improved dashboard, fixed language tags, tests graph plotting
This commit is contained in:
parent
fb4437a3a2
commit
2656780907
176
pdm.lock
generated
176
pdm.lock
generated
@ -2,10 +2,62 @@
|
|||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
groups = ["default", "notebooks", "trials"]
|
groups = ["default", "notebooks", "trials", "trails"]
|
||||||
strategy = ["cross_platform", "inherit_metadata"]
|
strategy = ["cross_platform", "inherit_metadata"]
|
||||||
lock_version = "4.4.1"
|
lock_version = "4.4.2"
|
||||||
content_hash = "sha256:e00f157f833ee7615d96375c352e2caa6b4f6b50e5615ccbefa79446189594c7"
|
content_hash = "sha256:36979d60d30dad28e15e0f93496b4ea25af7fc5a12b91e82c12e1b957325c0af"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "alph"
|
||||||
|
version = "0.4.10"
|
||||||
|
requires_python = "<4.0,>=3.9"
|
||||||
|
summary = "alph"
|
||||||
|
groups = ["trails", "trials"]
|
||||||
|
dependencies = [
|
||||||
|
"altair>=5.0.1",
|
||||||
|
"networkx>=2.6.3",
|
||||||
|
"pandas>=1.3.5",
|
||||||
|
"scikit-network>=0.27.1",
|
||||||
|
]
|
||||||
|
files = [
|
||||||
|
{file = "alph-0.4.10-py3-none-any.whl", hash = "sha256:47649ef9d12ac7bddaa9cfc7510ab333e7fe1c76d4da4d1b09629bbd174fbe63"},
|
||||||
|
{file = "alph-0.4.10.tar.gz", hash = "sha256:a2cfe932c7a6a973c718f8c5bc1b1dbc7a1d18a122cb5e438db5ea3a61d6b5c3"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "alph"
|
||||||
|
version = "0.4.10"
|
||||||
|
extras = ["graphviz"]
|
||||||
|
requires_python = "<4.0,>=3.9"
|
||||||
|
summary = "alph"
|
||||||
|
groups = ["trails", "trials"]
|
||||||
|
dependencies = [
|
||||||
|
"alph==0.4.10",
|
||||||
|
"pygraphviz>=1.10",
|
||||||
|
]
|
||||||
|
files = [
|
||||||
|
{file = "alph-0.4.10-py3-none-any.whl", hash = "sha256:47649ef9d12ac7bddaa9cfc7510ab333e7fe1c76d4da4d1b09629bbd174fbe63"},
|
||||||
|
{file = "alph-0.4.10.tar.gz", hash = "sha256:a2cfe932c7a6a973c718f8c5bc1b1dbc7a1d18a122cb5e438db5ea3a61d6b5c3"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "altair"
|
||||||
|
version = "5.3.0"
|
||||||
|
requires_python = ">=3.8"
|
||||||
|
summary = "Vega-Altair: A declarative statistical visualization library for Python."
|
||||||
|
groups = ["trails", "trials"]
|
||||||
|
dependencies = [
|
||||||
|
"jinja2",
|
||||||
|
"jsonschema>=3.0",
|
||||||
|
"numpy",
|
||||||
|
"packaging",
|
||||||
|
"pandas>=0.25",
|
||||||
|
"toolz",
|
||||||
|
]
|
||||||
|
files = [
|
||||||
|
{file = "altair-5.3.0-py3-none-any.whl", hash = "sha256:7084a1dab4d83c5e7e5246b92dc1b4451a6c68fd057f3716ee9d315c8980e59a"},
|
||||||
|
{file = "altair-5.3.0.tar.gz", hash = "sha256:5a268b1a0983b23d8f9129f819f956174aa7aea2719ed55a52eba9979b9f6675"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "annotated-types"
|
name = "annotated-types"
|
||||||
@ -136,7 +188,7 @@ name = "attrs"
|
|||||||
version = "23.2.0"
|
version = "23.2.0"
|
||||||
requires_python = ">=3.7"
|
requires_python = ">=3.7"
|
||||||
summary = "Classes Without Boilerplate"
|
summary = "Classes Without Boilerplate"
|
||||||
groups = ["notebooks"]
|
groups = ["notebooks", "trails", "trials"]
|
||||||
files = [
|
files = [
|
||||||
{file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
|
{file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
|
||||||
{file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
|
{file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
|
||||||
@ -187,7 +239,7 @@ name = "blinker"
|
|||||||
version = "1.8.2"
|
version = "1.8.2"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "Fast, simple object-to-object and broadcast signaling"
|
summary = "Fast, simple object-to-object and broadcast signaling"
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
files = [
|
files = [
|
||||||
{file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"},
|
{file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"},
|
||||||
{file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"},
|
{file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"},
|
||||||
@ -231,7 +283,7 @@ name = "certifi"
|
|||||||
version = "2024.2.2"
|
version = "2024.2.2"
|
||||||
requires_python = ">=3.6"
|
requires_python = ">=3.6"
|
||||||
summary = "Python package for providing Mozilla's CA Bundle."
|
summary = "Python package for providing Mozilla's CA Bundle."
|
||||||
groups = ["default", "notebooks", "trials"]
|
groups = ["default", "notebooks"]
|
||||||
files = [
|
files = [
|
||||||
{file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
|
{file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
|
||||||
{file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
|
{file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
|
||||||
@ -276,7 +328,7 @@ name = "charset-normalizer"
|
|||||||
version = "3.3.2"
|
version = "3.3.2"
|
||||||
requires_python = ">=3.7.0"
|
requires_python = ">=3.7.0"
|
||||||
summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||||
groups = ["default", "notebooks", "trials"]
|
groups = ["default", "notebooks"]
|
||||||
files = [
|
files = [
|
||||||
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
|
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
|
||||||
@ -317,7 +369,7 @@ name = "click"
|
|||||||
version = "8.1.7"
|
version = "8.1.7"
|
||||||
requires_python = ">=3.7"
|
requires_python = ">=3.7"
|
||||||
summary = "Composable command line interface toolkit"
|
summary = "Composable command line interface toolkit"
|
||||||
groups = ["default", "trials"]
|
groups = ["default"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"colorama; platform_system == \"Windows\"",
|
"colorama; platform_system == \"Windows\"",
|
||||||
]
|
]
|
||||||
@ -342,7 +394,7 @@ name = "colorama"
|
|||||||
version = "0.4.6"
|
version = "0.4.6"
|
||||||
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||||
summary = "Cross-platform colored terminal text."
|
summary = "Cross-platform colored terminal text."
|
||||||
groups = ["default", "notebooks", "trials"]
|
groups = ["default", "notebooks"]
|
||||||
marker = "platform_system == \"Windows\" or sys_platform == \"win32\""
|
marker = "platform_system == \"Windows\" or sys_platform == \"win32\""
|
||||||
files = [
|
files = [
|
||||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||||
@ -402,7 +454,7 @@ name = "dash"
|
|||||||
version = "2.17.0"
|
version = "2.17.0"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "A Python framework for building reactive web-apps. Developed by Plotly."
|
summary = "A Python framework for building reactive web-apps. Developed by Plotly."
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"Flask<3.1,>=1.0.4",
|
"Flask<3.1,>=1.0.4",
|
||||||
"Werkzeug<3.1",
|
"Werkzeug<3.1",
|
||||||
@ -426,7 +478,7 @@ files = [
|
|||||||
name = "dash-core-components"
|
name = "dash-core-components"
|
||||||
version = "2.0.0"
|
version = "2.0.0"
|
||||||
summary = "Core component suite for Dash"
|
summary = "Core component suite for Dash"
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
files = [
|
files = [
|
||||||
{file = "dash_core_components-2.0.0-py3-none-any.whl", hash = "sha256:52b8e8cce13b18d0802ee3acbc5e888cb1248a04968f962d63d070400af2e346"},
|
{file = "dash_core_components-2.0.0-py3-none-any.whl", hash = "sha256:52b8e8cce13b18d0802ee3acbc5e888cb1248a04968f962d63d070400af2e346"},
|
||||||
{file = "dash_core_components-2.0.0.tar.gz", hash = "sha256:c6733874af975e552f95a1398a16c2ee7df14ce43fa60bb3718a3c6e0b63ffee"},
|
{file = "dash_core_components-2.0.0.tar.gz", hash = "sha256:c6733874af975e552f95a1398a16c2ee7df14ce43fa60bb3718a3c6e0b63ffee"},
|
||||||
@ -437,7 +489,7 @@ name = "dash-cytoscape"
|
|||||||
version = "1.0.1"
|
version = "1.0.1"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "A Component Library for Dash aimed at facilitating network visualization in Python, wrapped around Cytoscape.js"
|
summary = "A Component Library for Dash aimed at facilitating network visualization in Python, wrapped around Cytoscape.js"
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"dash",
|
"dash",
|
||||||
]
|
]
|
||||||
@ -449,7 +501,7 @@ files = [
|
|||||||
name = "dash-html-components"
|
name = "dash-html-components"
|
||||||
version = "2.0.0"
|
version = "2.0.0"
|
||||||
summary = "Vanilla HTML components for Dash"
|
summary = "Vanilla HTML components for Dash"
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
files = [
|
files = [
|
||||||
{file = "dash_html_components-2.0.0-py3-none-any.whl", hash = "sha256:b42cc903713c9706af03b3f2548bda4be7307a7cf89b7d6eae3da872717d1b63"},
|
{file = "dash_html_components-2.0.0-py3-none-any.whl", hash = "sha256:b42cc903713c9706af03b3f2548bda4be7307a7cf89b7d6eae3da872717d1b63"},
|
||||||
{file = "dash_html_components-2.0.0.tar.gz", hash = "sha256:8703a601080f02619a6390998e0b3da4a5daabe97a1fd7a9cebc09d015f26e50"},
|
{file = "dash_html_components-2.0.0.tar.gz", hash = "sha256:8703a601080f02619a6390998e0b3da4a5daabe97a1fd7a9cebc09d015f26e50"},
|
||||||
@ -459,7 +511,7 @@ files = [
|
|||||||
name = "dash-table"
|
name = "dash-table"
|
||||||
version = "5.0.0"
|
version = "5.0.0"
|
||||||
summary = "Dash table"
|
summary = "Dash table"
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
files = [
|
files = [
|
||||||
{file = "dash_table-5.0.0-py3-none-any.whl", hash = "sha256:19036fa352bb1c11baf38068ec62d172f0515f73ca3276c79dee49b95ddc16c9"},
|
{file = "dash_table-5.0.0-py3-none-any.whl", hash = "sha256:19036fa352bb1c11baf38068ec62d172f0515f73ca3276c79dee49b95ddc16c9"},
|
||||||
{file = "dash_table-5.0.0.tar.gz", hash = "sha256:18624d693d4c8ef2ddec99a6f167593437a7ea0bf153aa20f318c170c5bc7308"},
|
{file = "dash_table-5.0.0.tar.gz", hash = "sha256:18624d693d4c8ef2ddec99a6f167593437a7ea0bf153aa20f318c170c5bc7308"},
|
||||||
@ -543,7 +595,7 @@ name = "flask"
|
|||||||
version = "3.0.3"
|
version = "3.0.3"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "A simple framework for building complex web applications."
|
summary = "A simple framework for building complex web applications."
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"Jinja2>=3.1.2",
|
"Jinja2>=3.1.2",
|
||||||
"Werkzeug>=3.0.0",
|
"Werkzeug>=3.0.0",
|
||||||
@ -647,7 +699,7 @@ name = "idna"
|
|||||||
version = "3.7"
|
version = "3.7"
|
||||||
requires_python = ">=3.5"
|
requires_python = ">=3.5"
|
||||||
summary = "Internationalized Domain Names in Applications (IDNA)"
|
summary = "Internationalized Domain Names in Applications (IDNA)"
|
||||||
groups = ["default", "notebooks", "trials"]
|
groups = ["default", "notebooks"]
|
||||||
files = [
|
files = [
|
||||||
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
|
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
|
||||||
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
|
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
|
||||||
@ -658,7 +710,7 @@ name = "importlib-metadata"
|
|||||||
version = "7.1.0"
|
version = "7.1.0"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "Read metadata from Python packages"
|
summary = "Read metadata from Python packages"
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"zipp>=0.5",
|
"zipp>=0.5",
|
||||||
]
|
]
|
||||||
@ -767,7 +819,7 @@ name = "itsdangerous"
|
|||||||
version = "2.2.0"
|
version = "2.2.0"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "Safely pass data to untrusted environments and back."
|
summary = "Safely pass data to untrusted environments and back."
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
files = [
|
files = [
|
||||||
{file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"},
|
{file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"},
|
||||||
{file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"},
|
{file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"},
|
||||||
@ -792,7 +844,7 @@ name = "jinja2"
|
|||||||
version = "3.1.4"
|
version = "3.1.4"
|
||||||
requires_python = ">=3.7"
|
requires_python = ">=3.7"
|
||||||
summary = "A very fast and expressive template engine."
|
summary = "A very fast and expressive template engine."
|
||||||
groups = ["default", "notebooks", "trials"]
|
groups = ["default", "notebooks", "trails", "trials"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"MarkupSafe>=2.0",
|
"MarkupSafe>=2.0",
|
||||||
]
|
]
|
||||||
@ -839,7 +891,7 @@ name = "jsonschema"
|
|||||||
version = "4.22.0"
|
version = "4.22.0"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "An implementation of JSON Schema validation for Python"
|
summary = "An implementation of JSON Schema validation for Python"
|
||||||
groups = ["notebooks"]
|
groups = ["notebooks", "trails", "trials"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"attrs>=22.2.0",
|
"attrs>=22.2.0",
|
||||||
"jsonschema-specifications>=2023.03.6",
|
"jsonschema-specifications>=2023.03.6",
|
||||||
@ -856,7 +908,7 @@ name = "jsonschema-specifications"
|
|||||||
version = "2023.12.1"
|
version = "2023.12.1"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
|
summary = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
|
||||||
groups = ["notebooks"]
|
groups = ["notebooks", "trails", "trials"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"referencing>=0.31.0",
|
"referencing>=0.31.0",
|
||||||
]
|
]
|
||||||
@ -1160,7 +1212,7 @@ name = "markupsafe"
|
|||||||
version = "2.1.5"
|
version = "2.1.5"
|
||||||
requires_python = ">=3.7"
|
requires_python = ">=3.7"
|
||||||
summary = "Safely add untrusted strings to HTML/XML markup."
|
summary = "Safely add untrusted strings to HTML/XML markup."
|
||||||
groups = ["default", "notebooks", "trials"]
|
groups = ["default", "notebooks", "trails", "trials"]
|
||||||
files = [
|
files = [
|
||||||
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
|
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
|
||||||
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
|
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
|
||||||
@ -1325,7 +1377,7 @@ name = "nest-asyncio"
|
|||||||
version = "1.6.0"
|
version = "1.6.0"
|
||||||
requires_python = ">=3.5"
|
requires_python = ">=3.5"
|
||||||
summary = "Patch asyncio to allow nested event loops"
|
summary = "Patch asyncio to allow nested event loops"
|
||||||
groups = ["notebooks", "trials"]
|
groups = ["default", "notebooks"]
|
||||||
files = [
|
files = [
|
||||||
{file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},
|
{file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},
|
||||||
{file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
|
{file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
|
||||||
@ -1336,7 +1388,7 @@ name = "networkx"
|
|||||||
version = "3.3"
|
version = "3.3"
|
||||||
requires_python = ">=3.10"
|
requires_python = ">=3.10"
|
||||||
summary = "Python package for creating and manipulating graphs and networks"
|
summary = "Python package for creating and manipulating graphs and networks"
|
||||||
groups = ["default"]
|
groups = ["default", "trails", "trials"]
|
||||||
files = [
|
files = [
|
||||||
{file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"},
|
{file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"},
|
||||||
{file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"},
|
{file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"},
|
||||||
@ -1361,7 +1413,7 @@ name = "numpy"
|
|||||||
version = "1.26.4"
|
version = "1.26.4"
|
||||||
requires_python = ">=3.9"
|
requires_python = ">=3.9"
|
||||||
summary = "Fundamental package for array computing in Python"
|
summary = "Fundamental package for array computing in Python"
|
||||||
groups = ["default"]
|
groups = ["default", "trails", "trials"]
|
||||||
files = [
|
files = [
|
||||||
{file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"},
|
{file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"},
|
||||||
{file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"},
|
{file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"},
|
||||||
@ -1555,7 +1607,7 @@ name = "packaging"
|
|||||||
version = "24.0"
|
version = "24.0"
|
||||||
requires_python = ">=3.7"
|
requires_python = ">=3.7"
|
||||||
summary = "Core utilities for Python packages"
|
summary = "Core utilities for Python packages"
|
||||||
groups = ["default", "notebooks", "trials"]
|
groups = ["default", "notebooks", "trails", "trials"]
|
||||||
files = [
|
files = [
|
||||||
{file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"},
|
{file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"},
|
||||||
{file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"},
|
{file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"},
|
||||||
@ -1566,7 +1618,7 @@ name = "pandas"
|
|||||||
version = "2.2.2"
|
version = "2.2.2"
|
||||||
requires_python = ">=3.9"
|
requires_python = ">=3.9"
|
||||||
summary = "Powerful data structures for data analysis, time series, and statistics"
|
summary = "Powerful data structures for data analysis, time series, and statistics"
|
||||||
groups = ["default"]
|
groups = ["default", "trails", "trials"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"numpy>=1.23.2; python_version == \"3.11\"",
|
"numpy>=1.23.2; python_version == \"3.11\"",
|
||||||
"numpy>=1.26.0; python_version >= \"3.12\"",
|
"numpy>=1.26.0; python_version >= \"3.12\"",
|
||||||
@ -1701,7 +1753,7 @@ name = "plotly"
|
|||||||
version = "5.22.0"
|
version = "5.22.0"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "An open-source, interactive data visualization library for Python"
|
summary = "An open-source, interactive data visualization library for Python"
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"packaging",
|
"packaging",
|
||||||
"tenacity>=6.2.0",
|
"tenacity>=6.2.0",
|
||||||
@ -1890,12 +1942,22 @@ files = [
|
|||||||
{file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
|
{file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pygraphviz"
|
||||||
|
version = "1.13"
|
||||||
|
requires_python = ">=3.10"
|
||||||
|
summary = "Python interface to Graphviz"
|
||||||
|
groups = ["trails", "trials"]
|
||||||
|
files = [
|
||||||
|
{file = "pygraphviz-1.13.tar.gz", hash = "sha256:6ad8aa2f26768830a5a1cfc8a14f022d13df170a8f6fdfd68fd1aa1267000964"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "python-dateutil"
|
name = "python-dateutil"
|
||||||
version = "2.9.0.post0"
|
version = "2.9.0.post0"
|
||||||
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||||
summary = "Extensions to the standard Python datetime module"
|
summary = "Extensions to the standard Python datetime module"
|
||||||
groups = ["default", "notebooks"]
|
groups = ["default", "notebooks", "trails", "trials"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"six>=1.5",
|
"six>=1.5",
|
||||||
]
|
]
|
||||||
@ -1919,7 +1981,7 @@ files = [
|
|||||||
name = "pytz"
|
name = "pytz"
|
||||||
version = "2024.1"
|
version = "2024.1"
|
||||||
summary = "World timezone definitions, modern and historical"
|
summary = "World timezone definitions, modern and historical"
|
||||||
groups = ["default"]
|
groups = ["default", "trails", "trials"]
|
||||||
files = [
|
files = [
|
||||||
{file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
|
{file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
|
||||||
{file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
|
{file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
|
||||||
@ -2041,7 +2103,7 @@ name = "referencing"
|
|||||||
version = "0.35.1"
|
version = "0.35.1"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "JSON Referencing + Python"
|
summary = "JSON Referencing + Python"
|
||||||
groups = ["notebooks"]
|
groups = ["notebooks", "trails", "trials"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"attrs>=22.2.0",
|
"attrs>=22.2.0",
|
||||||
"rpds-py>=0.7.0",
|
"rpds-py>=0.7.0",
|
||||||
@ -2096,7 +2158,7 @@ name = "requests"
|
|||||||
version = "2.31.0"
|
version = "2.31.0"
|
||||||
requires_python = ">=3.7"
|
requires_python = ">=3.7"
|
||||||
summary = "Python HTTP for Humans."
|
summary = "Python HTTP for Humans."
|
||||||
groups = ["default", "notebooks", "trials"]
|
groups = ["default", "notebooks"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"certifi>=2017.4.17",
|
"certifi>=2017.4.17",
|
||||||
"charset-normalizer<4,>=2",
|
"charset-normalizer<4,>=2",
|
||||||
@ -2112,7 +2174,7 @@ files = [
|
|||||||
name = "retrying"
|
name = "retrying"
|
||||||
version = "1.3.4"
|
version = "1.3.4"
|
||||||
summary = "Retrying"
|
summary = "Retrying"
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"six>=1.7.0",
|
"six>=1.7.0",
|
||||||
]
|
]
|
||||||
@ -2151,7 +2213,7 @@ name = "rpds-py"
|
|||||||
version = "0.18.1"
|
version = "0.18.1"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "Python bindings to Rust's persistent data structures (rpds)"
|
summary = "Python bindings to Rust's persistent data structures (rpds)"
|
||||||
groups = ["notebooks"]
|
groups = ["notebooks", "trails", "trials"]
|
||||||
files = [
|
files = [
|
||||||
{file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"},
|
{file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"},
|
||||||
{file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"},
|
{file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"},
|
||||||
@ -2302,12 +2364,29 @@ files = [
|
|||||||
{file = "scikit_learn-1.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:d762070980c17ba3e9a4a1e043ba0518ce4c55152032f1af0ca6f39b376b5928"},
|
{file = "scikit_learn-1.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:d762070980c17ba3e9a4a1e043ba0518ce4c55152032f1af0ca6f39b376b5928"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "scikit-network"
|
||||||
|
version = "0.32.1"
|
||||||
|
requires_python = ">=3.8"
|
||||||
|
summary = "Graph algorithms"
|
||||||
|
groups = ["trails", "trials"]
|
||||||
|
dependencies = [
|
||||||
|
"numpy>=1.22.4",
|
||||||
|
"scipy>=1.7.3",
|
||||||
|
]
|
||||||
|
files = [
|
||||||
|
{file = "scikit_network-0.32.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6228326cc5a813f4a7c031b64f1276d9cc1d5ccff082e62fb176f1e53ae2cc46"},
|
||||||
|
{file = "scikit_network-0.32.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d634aab1e379702134c877c8c4a6803b7d7ba4efc103198474b7e896ec37e242"},
|
||||||
|
{file = "scikit_network-0.32.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e32e247728815750b12b950279cae6b12041a0c4bf95d1accdd273b0c1a6ea1"},
|
||||||
|
{file = "scikit_network-0.32.1-cp311-cp311-win_amd64.whl", hash = "sha256:c7016dce37120aa527fcff4254985360f164306ed060c496a6cf296a6d0fd2c3"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "scipy"
|
name = "scipy"
|
||||||
version = "1.13.0"
|
version = "1.13.0"
|
||||||
requires_python = ">=3.9"
|
requires_python = ">=3.9"
|
||||||
summary = "Fundamental algorithms for scientific computing in Python"
|
summary = "Fundamental algorithms for scientific computing in Python"
|
||||||
groups = ["default"]
|
groups = ["default", "trails", "trials"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"numpy<2.3,>=1.22.4",
|
"numpy<2.3,>=1.22.4",
|
||||||
]
|
]
|
||||||
@ -2364,7 +2443,7 @@ name = "setuptools"
|
|||||||
version = "69.5.1"
|
version = "69.5.1"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "Easily download, build, install, upgrade, and uninstall Python packages"
|
summary = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||||
groups = ["default", "trials"]
|
groups = ["default"]
|
||||||
files = [
|
files = [
|
||||||
{file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"},
|
{file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"},
|
||||||
{file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"},
|
{file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"},
|
||||||
@ -2375,7 +2454,7 @@ name = "six"
|
|||||||
version = "1.16.0"
|
version = "1.16.0"
|
||||||
requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||||
summary = "Python 2 and 3 compatibility utilities"
|
summary = "Python 2 and 3 compatibility utilities"
|
||||||
groups = ["default", "notebooks", "trials"]
|
groups = ["default", "notebooks", "trails", "trials"]
|
||||||
files = [
|
files = [
|
||||||
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
||||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||||
@ -2635,7 +2714,7 @@ name = "tenacity"
|
|||||||
version = "8.3.0"
|
version = "8.3.0"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "Retry code until it succeeds"
|
summary = "Retry code until it succeeds"
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
files = [
|
files = [
|
||||||
{file = "tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185"},
|
{file = "tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185"},
|
||||||
{file = "tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2"},
|
{file = "tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2"},
|
||||||
@ -2790,6 +2869,17 @@ files = [
|
|||||||
{file = "tokenizers-0.15.2.tar.gz", hash = "sha256:e6e9c6e019dd5484be5beafc775ae6c925f4c69a3487040ed09b45e13df2cb91"},
|
{file = "tokenizers-0.15.2.tar.gz", hash = "sha256:e6e9c6e019dd5484be5beafc775ae6c925f4c69a3487040ed09b45e13df2cb91"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "toolz"
|
||||||
|
version = "0.12.1"
|
||||||
|
requires_python = ">=3.7"
|
||||||
|
summary = "List processing tools and functional utilities"
|
||||||
|
groups = ["trails", "trials"]
|
||||||
|
files = [
|
||||||
|
{file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"},
|
||||||
|
{file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "torch"
|
name = "torch"
|
||||||
version = "2.3.0"
|
version = "2.3.0"
|
||||||
@ -2941,7 +3031,7 @@ name = "typing-extensions"
|
|||||||
version = "4.12.2"
|
version = "4.12.2"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "Backported and Experimental Type Hints for Python 3.8+"
|
summary = "Backported and Experimental Type Hints for Python 3.8+"
|
||||||
groups = ["default", "notebooks", "trials"]
|
groups = ["default", "notebooks"]
|
||||||
files = [
|
files = [
|
||||||
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
|
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
|
||||||
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
|
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
|
||||||
@ -2952,7 +3042,7 @@ name = "tzdata"
|
|||||||
version = "2024.1"
|
version = "2024.1"
|
||||||
requires_python = ">=2"
|
requires_python = ">=2"
|
||||||
summary = "Provider of IANA time zone data"
|
summary = "Provider of IANA time zone data"
|
||||||
groups = ["default"]
|
groups = ["default", "trails", "trials"]
|
||||||
files = [
|
files = [
|
||||||
{file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
|
{file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
|
||||||
{file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
|
{file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
|
||||||
@ -2974,7 +3064,7 @@ name = "urllib3"
|
|||||||
version = "2.2.1"
|
version = "2.2.1"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "HTTP library with thread-safe connection pooling, file post, and more."
|
summary = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||||
groups = ["default", "notebooks", "trials"]
|
groups = ["default", "notebooks"]
|
||||||
files = [
|
files = [
|
||||||
{file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"},
|
{file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"},
|
||||||
{file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"},
|
{file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"},
|
||||||
@ -3063,7 +3153,7 @@ name = "werkzeug"
|
|||||||
version = "3.0.3"
|
version = "3.0.3"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "The comprehensive WSGI web application library."
|
summary = "The comprehensive WSGI web application library."
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"MarkupSafe>=2.1.1",
|
"MarkupSafe>=2.1.1",
|
||||||
]
|
]
|
||||||
@ -3088,7 +3178,7 @@ name = "zipp"
|
|||||||
version = "3.18.2"
|
version = "3.18.2"
|
||||||
requires_python = ">=3.8"
|
requires_python = ">=3.8"
|
||||||
summary = "Backport of pathlib-compatible object wrapper for zip files"
|
summary = "Backport of pathlib-compatible object wrapper for zip files"
|
||||||
groups = ["trials"]
|
groups = ["default"]
|
||||||
files = [
|
files = [
|
||||||
{file = "zipp-3.18.2-py3-none-any.whl", hash = "sha256:dce197b859eb796242b0622af1b8beb0a722d52aa2f57133ead08edd5bf5374e"},
|
{file = "zipp-3.18.2-py3-none-any.whl", hash = "sha256:dce197b859eb796242b0622af1b8beb0a722d52aa2f57133ead08edd5bf5374e"},
|
||||||
{file = "zipp-3.18.2.tar.gz", hash = "sha256:6278d9ddbcfb1f1089a88fde84481528b07b0e10474e09dcfe53dad4069fa059"},
|
{file = "zipp-3.18.2.tar.gz", hash = "sha256:6278d9ddbcfb1f1089a88fde84481528b07b0e10474e09dcfe53dad4069fa059"},
|
||||||
|
|||||||
@ -13,6 +13,9 @@ dependencies = [
|
|||||||
"numpy>=1.26.4",
|
"numpy>=1.26.4",
|
||||||
"pip>=24.0",
|
"pip>=24.0",
|
||||||
"typing-extensions>=4.12.2",
|
"typing-extensions>=4.12.2",
|
||||||
|
"plotly>=5.22.0",
|
||||||
|
"dash>=2.17.0",
|
||||||
|
"dash-cytoscape>=1.0.1",
|
||||||
]
|
]
|
||||||
requires-python = ">=3.11"
|
requires-python = ">=3.11"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
@ -31,10 +34,13 @@ notebooks = [
|
|||||||
"jupyterlab>=4.2.0",
|
"jupyterlab>=4.2.0",
|
||||||
"ipywidgets>=8.1.2",
|
"ipywidgets>=8.1.2",
|
||||||
]
|
]
|
||||||
trials = [
|
# Alph relies on Pygraphviz which again relies on Graphviz, to sucessfully build
|
||||||
"plotly>=5.22.0",
|
# Pygraphviz the MSVC toolchain should be installed on the target system if it is Windows
|
||||||
"dash>=2.17.0",
|
# (Linux not tested yet). The compiler needs additional information about the location of Graphviz
|
||||||
"dash-cytoscape>=1.0.1",
|
# files. This information is provided by extra options.
|
||||||
|
# --config-setting="--global-option=build_ext" --config-setting="--global-option=-IC:\Program Files\Graphviz\include" --config-setting="--global-option=-LC:\Program Files\Graphviz\lib"
|
||||||
|
trails = [
|
||||||
|
"alph[graphviz]>=0.4.10",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
|
|||||||
@ -30,11 +30,11 @@ from lang_main.types import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
# ** build pipelines
|
# ** build pipelines
|
||||||
pipe_merge = build_merge_duplicates_pipe()
|
|
||||||
pipe_target_feat = build_base_target_feature_pipe()
|
pipe_target_feat = build_base_target_feature_pipe()
|
||||||
pipe_timeline = build_timeline_pipe()
|
pipe_merge = build_merge_duplicates_pipe()
|
||||||
pipe_token_analysis = build_tk_graph_pipe()
|
pipe_token_analysis = build_tk_graph_pipe()
|
||||||
pipe_graph_postprocessing = build_tk_graph_post_pipe()
|
pipe_graph_postprocessing = build_tk_graph_post_pipe()
|
||||||
|
pipe_timeline = build_timeline_pipe()
|
||||||
|
|
||||||
|
|
||||||
# ** preprocessing pipeline
|
# ** preprocessing pipeline
|
||||||
@ -76,8 +76,6 @@ def run_graph_postprocessing() -> None:
|
|||||||
# filter graph by edge weight and remove single nodes (no connection)
|
# filter graph by edge weight and remove single nodes (no connection)
|
||||||
ret = cast(tuple[TokenGraph], pipe_graph_postprocessing.run(starting_values=(tk_graph,)))
|
ret = cast(tuple[TokenGraph], pipe_graph_postprocessing.run(starting_values=(tk_graph,)))
|
||||||
tk_graph_filtered = ret[0]
|
tk_graph_filtered = ret[0]
|
||||||
# tk_graph_filtered = tk_graph.filter_by_edge_weight(THRESHOLD_EDGE_WEIGHT, None)
|
|
||||||
# tk_graph_filtered = tk_graph_filtered.filter_by_node_degree(1, None)
|
|
||||||
tk_graph_filtered.to_GraphML(
|
tk_graph_filtered.to_GraphML(
|
||||||
SAVE_PATH_FOLDER, filename='TokenGraph-filtered', directed=False
|
SAVE_PATH_FOLDER, filename='TokenGraph-filtered', directed=False
|
||||||
)
|
)
|
||||||
|
|||||||
@ -16,8 +16,8 @@ target = '../results/test_20240529/Pipe-Token_Analysis_Step-1_build_token_graph.
|
|||||||
p = Path(target).resolve()
|
p = Path(target).resolve()
|
||||||
ret = lang_main.io.load_pickle(p)
|
ret = lang_main.io.load_pickle(p)
|
||||||
tk_graph = cast(graphs.TokenGraph, ret[0])
|
tk_graph = cast(graphs.TokenGraph, ret[0])
|
||||||
tk_graph_filtered = tk_graph.filter_by_edge_weight(150, None)
|
tk_graph_filtered = graphs.filter_graph_by_edge_weight(tk_graph, 150, None)
|
||||||
tk_graph_filtered = tk_graph_filtered.filter_by_node_degree(1, None)
|
tk_graph_filtered = graphs.filter_graph_by_node_degree(tk_graph_filtered, 1, None)
|
||||||
cyto_data_base, weight_data = graphs.convert_graph_to_cytoscape(tk_graph_filtered)
|
cyto_data_base, weight_data = graphs.convert_graph_to_cytoscape(tk_graph_filtered)
|
||||||
|
|
||||||
MIN_WEIGHT = weight_data['min']
|
MIN_WEIGHT = weight_data['min']
|
||||||
@ -235,8 +235,16 @@ def update_edge_weight(weight_min, weight_max):
|
|||||||
weight_min = MIN_WEIGHT
|
weight_min = MIN_WEIGHT
|
||||||
if weight_max is None:
|
if weight_max is None:
|
||||||
weight_max = MAX_WEIGHT
|
weight_max = MAX_WEIGHT
|
||||||
tk_graph_filtered = tk_graph.filter_by_edge_weight(weight_min, weight_max)
|
tk_graph_filtered = graphs.filter_graph_by_edge_weight(
|
||||||
tk_graph_filtered = tk_graph_filtered.filter_by_node_degree(1, None)
|
tk_graph,
|
||||||
|
weight_min,
|
||||||
|
weight_max,
|
||||||
|
)
|
||||||
|
tk_graph_filtered = graphs.filter_graph_by_node_degree(
|
||||||
|
tk_graph_filtered,
|
||||||
|
1,
|
||||||
|
None,
|
||||||
|
)
|
||||||
cyto_data, _ = graphs.convert_graph_to_cytoscape(tk_graph_filtered)
|
cyto_data, _ = graphs.convert_graph_to_cytoscape(tk_graph_filtered)
|
||||||
return cyto_data
|
return cyto_data
|
||||||
|
|
||||||
|
|||||||
@ -2,10 +2,9 @@ import time
|
|||||||
import webbrowser
|
import webbrowser
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from typing import cast
|
from typing import Any, Final, cast
|
||||||
|
|
||||||
import dash_cytoscape as cyto
|
import dash_cytoscape as cyto
|
||||||
import pandas as pd
|
|
||||||
import plotly.express as px
|
import plotly.express as px
|
||||||
from dash import (
|
from dash import (
|
||||||
Dash,
|
Dash,
|
||||||
@ -19,45 +18,33 @@ from dash import (
|
|||||||
)
|
)
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
from lang_main.analysis import graphs
|
import lang_main.io
|
||||||
from lang_main.io import load_pickle
|
from lang_main.analysis import graphs, tokens
|
||||||
from lang_main.types import ObjectID, TimelineCandidates
|
|
||||||
from lang_main.analysis import tokens
|
|
||||||
from lang_main.constants import SPCY_MODEL
|
from lang_main.constants import SPCY_MODEL
|
||||||
|
from lang_main.types import ObjectID, TimelineCandidates
|
||||||
# df = pd.read_csv('https://raw.githubusercontent.com/plotly/datasets/master/gapminder_unfiltered.csv')
|
|
||||||
|
|
||||||
# ** data
|
# ** data
|
||||||
# p_df = Path(r'../Pipe-TargetFeature_Step-3_remove_NA.pkl').resolve()
|
|
||||||
p_df = Path(r'../results/test_20240619/TIMELINE.pkl').resolve()
|
p_df = Path(r'../results/test_20240619/TIMELINE.pkl').resolve()
|
||||||
# p_tl = Path(r'/Pipe-Timeline_Analysis_Step-4_get_timeline_candidates.pkl').resolve()
|
(data,) = cast(tuple[DataFrame], lang_main.io.load_pickle(p_df))
|
||||||
p_tl = Path(r'../results/test_20240619/TIMELINE_POSTPROCESSING.pkl').resolve()
|
p_tl = Path(r'../results/test_20240619/TIMELINE_POSTPROCESSING.pkl').resolve()
|
||||||
ret = cast(tuple[DataFrame], load_pickle(p_df))
|
cands, texts = cast(
|
||||||
data = ret[0]
|
tuple[TimelineCandidates, dict[ObjectID, str]], lang_main.io.load_pickle(p_tl)
|
||||||
ret = cast(tuple[TimelineCandidates, dict[ObjectID, str]], load_pickle(p_tl))
|
)
|
||||||
cands = ret[0]
|
|
||||||
texts = ret[1]
|
|
||||||
|
|
||||||
# p_df = Path(r'.\test-notebooks\dashboard\data.pkl')
|
|
||||||
# p_cands = Path(r'.\test-notebooks\dashboard\map_candidates.pkl')
|
|
||||||
# p_map = Path(r'.\test-notebooks\dashboard\map_texts.pkl')
|
|
||||||
# data = cast(DataFrame, load_pickle(p_df))
|
|
||||||
# cands = cast(TimelineCandidates, load_pickle(p_cands))
|
|
||||||
# texts = cast(dict[ObjectID, str], load_pickle(p_map))
|
|
||||||
|
|
||||||
table_feats = [
|
TABLE_FEATS: Final[list[str]] = [
|
||||||
'ErstellungsDatum',
|
'ErstellungsDatum',
|
||||||
'ErledigungsDatum',
|
'ErledigungsDatum',
|
||||||
'VorgangsTypName',
|
'VorgangsTypName',
|
||||||
'VorgangsBeschreibung',
|
'VorgangsBeschreibung',
|
||||||
]
|
]
|
||||||
table_feats_dates = [
|
TABLE_FEATS_DATES: Final[list[str]] = [
|
||||||
'ErstellungsDatum',
|
'ErstellungsDatum',
|
||||||
'ErledigungsDatum',
|
'ErledigungsDatum',
|
||||||
]
|
]
|
||||||
|
|
||||||
# ** figure config
|
# ** figure config
|
||||||
markers = {
|
MARKERS: Final[dict[str, Any]] = {
|
||||||
'size': 12,
|
'size': 12,
|
||||||
'color': 'yellow',
|
'color': 'yellow',
|
||||||
'line': {
|
'line': {
|
||||||
@ -65,15 +52,15 @@ markers = {
|
|||||||
'color': 'red',
|
'color': 'red',
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
hover_data = {
|
HOVER_DATA: Final[dict[str, Any]] = {
|
||||||
'ErstellungsDatum': '|%d.%m.%Y',
|
'ErstellungsDatum': '|%d.%m.%Y',
|
||||||
'VorgangsBeschreibung': True,
|
'VorgangsBeschreibung': True,
|
||||||
}
|
}
|
||||||
|
|
||||||
# ** graphs
|
# ** graph
|
||||||
target = '../results/test_20240529/Pipe-Token_Analysis_Step-1_build_token_graph.pkl'
|
target = '../results/test_20240529/Pipe-Token_Analysis_Step-1_build_token_graph.pkl'
|
||||||
p = Path(target).resolve()
|
p = Path(target).resolve()
|
||||||
ret = load_pickle(p)
|
ret = lang_main.io.load_pickle(p)
|
||||||
tk_graph = cast(graphs.TokenGraph, ret[0])
|
tk_graph = cast(graphs.TokenGraph, ret[0])
|
||||||
tk_graph_filtered = graphs.filter_graph_by_edge_weight(tk_graph, 150, None)
|
tk_graph_filtered = graphs.filter_graph_by_edge_weight(tk_graph, 150, None)
|
||||||
tk_graph_filtered = graphs.filter_graph_by_node_degree(tk_graph_filtered, 1, None)
|
tk_graph_filtered = graphs.filter_graph_by_node_degree(tk_graph_filtered, 1, None)
|
||||||
@ -142,14 +129,12 @@ my_stylesheet = [
|
|||||||
# {'selector': '.triangle', 'style': {'shape': 'triangle'}},
|
# {'selector': '.triangle', 'style': {'shape': 'triangle'}},
|
||||||
]
|
]
|
||||||
|
|
||||||
# ** app
|
|
||||||
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
|
|
||||||
app = Dash(__name__, external_stylesheets=external_stylesheets)
|
|
||||||
|
|
||||||
graph_layout = html.Div(
|
graph_layout = html.Div(
|
||||||
[
|
[
|
||||||
html.Button('Trigger JS Weight', id='test_js_weight'),
|
html.Button('Trigger JS Weight', id='test_js_weight'),
|
||||||
html.Button('Trigger Candidate Graph', id='cand_graph'),
|
html.Button('Trigger Candidate Graph', id='graph-build-btn'),
|
||||||
|
dcc.Store(id='graph-store', storage_type='memory'),
|
||||||
|
dcc.Store(id='graph-store-cyto-curr_cands', storage_type='memory'),
|
||||||
html.Div(id='output'),
|
html.Div(id='output'),
|
||||||
html.Div(
|
html.Div(
|
||||||
[
|
[
|
||||||
@ -184,7 +169,7 @@ graph_layout = html.Div(
|
|||||||
[
|
[
|
||||||
html.H3('Graph Filter'),
|
html.H3('Graph Filter'),
|
||||||
dcc.Input(
|
dcc.Input(
|
||||||
id='weight_min',
|
id='graph-weight_min',
|
||||||
type='number',
|
type='number',
|
||||||
min=MIN_WEIGHT,
|
min=MIN_WEIGHT,
|
||||||
max=MAX_WEIGHT,
|
max=MAX_WEIGHT,
|
||||||
@ -194,7 +179,7 @@ graph_layout = html.Div(
|
|||||||
style={'width': '40%'},
|
style={'width': '40%'},
|
||||||
),
|
),
|
||||||
dcc.Input(
|
dcc.Input(
|
||||||
id='weight_max',
|
id='graph-weight_max',
|
||||||
type='number',
|
type='number',
|
||||||
min=MIN_WEIGHT,
|
min=MIN_WEIGHT,
|
||||||
max=MAX_WEIGHT,
|
max=MAX_WEIGHT,
|
||||||
@ -204,7 +189,7 @@ graph_layout = html.Div(
|
|||||||
style={'width': '40%'},
|
style={'width': '40%'},
|
||||||
),
|
),
|
||||||
html.H3('Graph'),
|
html.H3('Graph'),
|
||||||
html.Button('Re-Layout', id='trigger_relayout'),
|
html.Button('Re-Layout', id='graph-trigger_relayout'),
|
||||||
html.Div(
|
html.Div(
|
||||||
[
|
[
|
||||||
cyto.Cytoscape(
|
cyto.Cytoscape(
|
||||||
@ -230,6 +215,12 @@ graph_layout = html.Div(
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ** app
|
||||||
|
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
|
||||||
|
app = Dash(__name__, external_stylesheets=external_stylesheets)
|
||||||
|
|
||||||
|
|
||||||
app.layout = html.Div(
|
app.layout = html.Div(
|
||||||
[
|
[
|
||||||
html.H1(children='Demo Zeitreihenanalyse', style={'textAlign': 'center'}),
|
html.H1(children='Demo Zeitreihenanalyse', style={'textAlign': 'center'}),
|
||||||
@ -238,16 +229,16 @@ app.layout = html.Div(
|
|||||||
html.H2('Wählen Sie ein Objekt aus (ObjektID):'),
|
html.H2('Wählen Sie ein Objekt aus (ObjektID):'),
|
||||||
dcc.Dropdown(
|
dcc.Dropdown(
|
||||||
list(cands.keys()),
|
list(cands.keys()),
|
||||||
id='dropdown-selection',
|
id='selector-obj_id',
|
||||||
placeholder='ObjektID auswählen...',
|
placeholder='ObjektID auswählen...',
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
html.Div(
|
html.Div(
|
||||||
children=[
|
children=[
|
||||||
html.H3(id='object_text'),
|
html.H3(id='object-text'),
|
||||||
dcc.Dropdown(id='choice-candidates'),
|
dcc.Dropdown(id='selector-candidates'),
|
||||||
dcc.Graph(id='graph-output'),
|
dcc.Graph(id='graph-candidates'),
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
html.Div(
|
html.Div(
|
||||||
@ -260,8 +251,8 @@ app.layout = html.Div(
|
|||||||
|
|
||||||
|
|
||||||
@callback(
|
@callback(
|
||||||
Output('object_text', 'children'),
|
Output('object-text', 'children'),
|
||||||
Input('dropdown-selection', 'value'),
|
Input('selector-obj_id', 'value'),
|
||||||
prevent_initial_call=True,
|
prevent_initial_call=True,
|
||||||
)
|
)
|
||||||
def update_obj_text(obj_id):
|
def update_obj_text(obj_id):
|
||||||
@ -272,21 +263,24 @@ def update_obj_text(obj_id):
|
|||||||
|
|
||||||
|
|
||||||
@callback(
|
@callback(
|
||||||
Output('choice-candidates', 'options'),
|
[Output('selector-candidates', 'options'), Output('selector-candidates', 'value')],
|
||||||
Input('dropdown-selection', 'value'),
|
Input('selector-obj_id', 'value'),
|
||||||
prevent_initial_call=True,
|
prevent_initial_call=True,
|
||||||
)
|
)
|
||||||
def update_choice_candidates(obj_id):
|
def update_choice_candidates(obj_id):
|
||||||
obj_id = int(obj_id)
|
obj_id = int(obj_id)
|
||||||
cands_obj_id = cands[obj_id]
|
cands_obj_id = cands[obj_id]
|
||||||
choices = list(range(1, len(cands_obj_id) + 1))
|
choices = list(range(1, len(cands_obj_id) + 1))
|
||||||
return choices
|
return choices, choices[0]
|
||||||
|
|
||||||
|
|
||||||
|
# TODO check possible storage of pre-filtered result
|
||||||
|
# TODO change input of ``update_table_candidates`` and ``display_candidates_as_graph``
|
||||||
|
# TODO to storage component
|
||||||
@callback(
|
@callback(
|
||||||
Output('graph-output', 'figure'),
|
Output('graph-candidates', 'figure'),
|
||||||
Input('choice-candidates', 'value'),
|
Input('selector-candidates', 'value'),
|
||||||
State('dropdown-selection', 'value'),
|
State('selector-obj_id', 'value'),
|
||||||
prevent_initial_call=True,
|
prevent_initial_call=True,
|
||||||
)
|
)
|
||||||
def update_timeline(index, obj_id):
|
def update_timeline(index, obj_id):
|
||||||
@ -295,19 +289,20 @@ def update_timeline(index, obj_id):
|
|||||||
obj_text = texts[obj_id]
|
obj_text = texts[obj_id]
|
||||||
title = f'HObjektText: {obj_text}'
|
title = f'HObjektText: {obj_text}'
|
||||||
# cands
|
# cands
|
||||||
cands_obj_id = cands[obj_id]
|
# cands_per_obj_id = cands[obj_id]
|
||||||
cands_choice = cands_obj_id[int(index) - 1]
|
# cands_similar = cands_per_obj_id[int(index) - 1]
|
||||||
# data
|
# data
|
||||||
df = data.loc[list(cands_choice)].sort_index() # type: ignore
|
# df = data.loc[list(cands_similar)].sort_index() # type: ignore
|
||||||
|
df = pre_filter_data(data, idx=index, obj_id=obj_id)
|
||||||
# figure
|
# figure
|
||||||
fig = px.line(
|
fig = px.line(
|
||||||
data_frame=df,
|
data_frame=df,
|
||||||
x='ErstellungsDatum',
|
x='ErstellungsDatum',
|
||||||
y='ObjektID',
|
y='ObjektID',
|
||||||
title=title,
|
title=title,
|
||||||
hover_data=hover_data,
|
hover_data=HOVER_DATA,
|
||||||
)
|
)
|
||||||
fig.update_traces(mode='markers+lines', marker=markers, marker_symbol='diamond')
|
fig.update_traces(mode='markers+lines', marker=MARKERS, marker_symbol='diamond')
|
||||||
fig.update_xaxes(
|
fig.update_xaxes(
|
||||||
tickformat='%B\n%Y',
|
tickformat='%B\n%Y',
|
||||||
rangeslider_visible=True,
|
rangeslider_visible=True,
|
||||||
@ -319,24 +314,18 @@ def update_timeline(index, obj_id):
|
|||||||
|
|
||||||
@callback(
|
@callback(
|
||||||
[Output('table-candidates', 'data'), Output('table-candidates', 'columns')],
|
[Output('table-candidates', 'data'), Output('table-candidates', 'columns')],
|
||||||
Input('choice-candidates', 'value'),
|
Input('selector-candidates', 'value'),
|
||||||
State('dropdown-selection', 'value'),
|
State('selector-obj_id', 'value'),
|
||||||
prevent_initial_call=True,
|
prevent_initial_call=True,
|
||||||
)
|
)
|
||||||
def update_table_candidates(index, obj_id):
|
def update_table_candidates(index, obj_id):
|
||||||
# obj_id = int(obj_id)
|
|
||||||
# # cands
|
|
||||||
# cands_obj_id = cands[obj_id]
|
|
||||||
# cands_choice = cands_obj_id[int(index) - 1]
|
|
||||||
# # data
|
|
||||||
# df = data.loc[list(cands_choice)].sort_index() # type: ignore
|
|
||||||
df = pre_filter_data(data, idx=index, obj_id=obj_id)
|
df = pre_filter_data(data, idx=index, obj_id=obj_id)
|
||||||
df = df.filter(items=table_feats, axis=1).sort_values(
|
df = df.filter(items=TABLE_FEATS, axis=1).sort_values(
|
||||||
by='ErstellungsDatum', ascending=True
|
by='ErstellungsDatum', ascending=True
|
||||||
)
|
)
|
||||||
cols = [{'name': i, 'id': i} for i in df.columns]
|
cols = [{'name': i, 'id': i} for i in df.columns]
|
||||||
# convert dates to strings
|
# convert dates to strings
|
||||||
for col in table_feats_dates:
|
for col in TABLE_FEATS_DATES:
|
||||||
df[col] = df[col].dt.strftime(r'%Y-%m-%d')
|
df[col] = df[col].dt.strftime(r'%Y-%m-%d')
|
||||||
|
|
||||||
table_data = df.to_dict('records')
|
table_data = df.to_dict('records')
|
||||||
@ -348,6 +337,7 @@ def pre_filter_data(
|
|||||||
idx: int,
|
idx: int,
|
||||||
obj_id: ObjectID,
|
obj_id: ObjectID,
|
||||||
) -> DataFrame:
|
) -> DataFrame:
|
||||||
|
idx = int(idx)
|
||||||
obj_id = int(obj_id)
|
obj_id = int(obj_id)
|
||||||
data = data.copy()
|
data = data.copy()
|
||||||
# cands
|
# cands
|
||||||
@ -359,33 +349,53 @@ def pre_filter_data(
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
# ** graph
|
# ** graph callbacks
|
||||||
|
# TODO store pre-calculated graph
|
||||||
@app.callback(
|
@app.callback(
|
||||||
Output('cytoscape-graph', 'elements', allow_duplicate=True),
|
Output('cytoscape-graph', 'elements', allow_duplicate=True),
|
||||||
Output('weight_min', 'min', allow_duplicate=True),
|
Output('graph-weight_min', 'min', allow_duplicate=True),
|
||||||
Output('weight_min', 'max', allow_duplicate=True),
|
Output('graph-weight_min', 'max', allow_duplicate=True),
|
||||||
Output('weight_min', 'placeholder', allow_duplicate=True),
|
Output('graph-weight_min', 'placeholder', allow_duplicate=True),
|
||||||
Output('weight_max', 'min', allow_duplicate=True),
|
Output('graph-weight_max', 'min', allow_duplicate=True),
|
||||||
Output('weight_max', 'max', allow_duplicate=True),
|
Output('graph-weight_max', 'max', allow_duplicate=True),
|
||||||
Output('weight_max', 'placeholder', allow_duplicate=True),
|
Output('graph-weight_max', 'placeholder', allow_duplicate=True),
|
||||||
Input('cand_graph', 'n_clicks'),
|
Output('graph-store', 'data'),
|
||||||
State('choice-candidates', 'value'),
|
Output('graph-store-cyto-curr_cands', 'data'),
|
||||||
State('dropdown-selection', 'value'),
|
# Input('graph-build-btn', 'n_clicks'),
|
||||||
|
Input('selector-candidates', 'value'),
|
||||||
|
State('selector-obj_id', 'value'),
|
||||||
prevent_initial_call=True,
|
prevent_initial_call=True,
|
||||||
)
|
)
|
||||||
def update_graph_candidates(_, index, obj_id):
|
def display_candidates_as_graph(index, obj_id):
|
||||||
|
t1 = time.perf_counter()
|
||||||
df = pre_filter_data(data, idx=index, obj_id=obj_id)
|
df = pre_filter_data(data, idx=index, obj_id=obj_id)
|
||||||
|
t2 = time.perf_counter()
|
||||||
|
print(f'Time for filtering: {t2 - t1} s')
|
||||||
|
|
||||||
|
t1 = time.perf_counter()
|
||||||
tk_graph_cands, _ = tokens.build_token_graph(
|
tk_graph_cands, _ = tokens.build_token_graph(
|
||||||
data=df,
|
data=df,
|
||||||
model=SPCY_MODEL,
|
model=SPCY_MODEL,
|
||||||
target_feature='VorgangsBeschreibung',
|
target_feature='VorgangsBeschreibung',
|
||||||
build_map=False,
|
build_map=False,
|
||||||
|
logging_graph=False,
|
||||||
)
|
)
|
||||||
|
t2 = time.perf_counter()
|
||||||
|
print(f'Time for graph building: {t2 - t1} s')
|
||||||
|
|
||||||
|
t1 = time.perf_counter()
|
||||||
cyto_data, weight_info = graphs.convert_graph_to_cytoscape(tk_graph_cands)
|
cyto_data, weight_info = graphs.convert_graph_to_cytoscape(tk_graph_cands)
|
||||||
weight_min = weight_info['min']
|
weight_min = weight_info['min']
|
||||||
weight_max = weight_info['max']
|
weight_max = weight_info['max']
|
||||||
placeholder_min = f'Minimum edge weight: {weight_min} - {weight_max}'
|
placeholder_min = f'Minimum edge weight: {weight_min} - {weight_max}'
|
||||||
placeholder_max = f'Minimum edge weight: {weight_min} - {weight_max}'
|
placeholder_max = f'Maximum edge weight: {weight_min} - {weight_max}'
|
||||||
|
t2 = time.perf_counter()
|
||||||
|
print(f'Time for graph metadata and conversion: {t2 - t1} s')
|
||||||
|
|
||||||
|
t1 = time.perf_counter()
|
||||||
|
graph_to_store = lang_main.io.encode_to_base64_str(tk_graph_cands)
|
||||||
|
t2 = time.perf_counter()
|
||||||
|
print(f'Time for encoding: {t2 - t1} s')
|
||||||
return (
|
return (
|
||||||
cyto_data,
|
cyto_data,
|
||||||
weight_min,
|
weight_min,
|
||||||
@ -394,6 +404,8 @@ def update_graph_candidates(_, index, obj_id):
|
|||||||
weight_min,
|
weight_min,
|
||||||
weight_max,
|
weight_max,
|
||||||
placeholder_max,
|
placeholder_max,
|
||||||
|
graph_to_store,
|
||||||
|
cyto_data,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -412,30 +424,44 @@ def update_layout_internal(layout_choice):
|
|||||||
@app.callback(
|
@app.callback(
|
||||||
Output('cytoscape-graph', 'zoom'),
|
Output('cytoscape-graph', 'zoom'),
|
||||||
Output('cytoscape-graph', 'elements', allow_duplicate=True),
|
Output('cytoscape-graph', 'elements', allow_duplicate=True),
|
||||||
Output('weight_min', 'value'),
|
Output('graph-weight_min', 'value'),
|
||||||
Output('weight_max', 'value'),
|
Output('graph-weight_max', 'value'),
|
||||||
Input('bt-reset', 'n_clicks'),
|
Input('bt-reset', 'n_clicks'),
|
||||||
|
State('graph-store-cyto-curr_cands', 'data'),
|
||||||
prevent_initial_call=True,
|
prevent_initial_call=True,
|
||||||
)
|
)
|
||||||
def reset_layout(n_clicks):
|
def reset_layout(_, current_cands_cyto_elements):
|
||||||
return (1, cyto_data_base, None, None)
|
return (1, current_cands_cyto_elements, None, None)
|
||||||
|
|
||||||
|
|
||||||
# update edge weight
|
# update edge weight
|
||||||
@app.callback(
|
@app.callback(
|
||||||
Output('cytoscape-graph', 'elements', allow_duplicate=True),
|
Output('cytoscape-graph', 'elements', allow_duplicate=True),
|
||||||
Input('weight_min', 'value'),
|
Input('graph-weight_min', 'value'),
|
||||||
Input('weight_max', 'value'),
|
Input('graph-weight_max', 'value'),
|
||||||
|
State('graph-store', 'data'),
|
||||||
|
State('graph-store-cyto-curr_cands', 'data'),
|
||||||
|
State('graph-weight_min', 'min'),
|
||||||
|
State('graph-weight_min', 'max'),
|
||||||
prevent_initial_call=True,
|
prevent_initial_call=True,
|
||||||
)
|
)
|
||||||
def update_edge_weight(weight_min, weight_max):
|
def update_edge_weight(
|
||||||
if not any([weight_min, weight_max]):
|
weight_min,
|
||||||
return cyto_data_base
|
weight_max,
|
||||||
|
current_graph,
|
||||||
|
current_cands_cyto_elements,
|
||||||
|
current_min,
|
||||||
|
current_max,
|
||||||
|
):
|
||||||
|
if not any((weight_min, weight_max)):
|
||||||
|
return current_cands_cyto_elements
|
||||||
|
|
||||||
if weight_min is None:
|
if weight_min is None:
|
||||||
weight_min = MIN_WEIGHT
|
weight_min = current_min
|
||||||
if weight_max is None:
|
if weight_max is None:
|
||||||
weight_max = MAX_WEIGHT
|
weight_max = current_max
|
||||||
|
|
||||||
|
tk_graph = cast(graphs.TokenGraph, lang_main.io.decode_from_base64_str(current_graph))
|
||||||
tk_graph_filtered = graphs.filter_graph_by_edge_weight(tk_graph, weight_min, weight_max)
|
tk_graph_filtered = graphs.filter_graph_by_edge_weight(tk_graph, weight_min, weight_max)
|
||||||
# tk_graph_filtered = tk_graph.filter_by_edge_weight(weight_min, weight_max)
|
# tk_graph_filtered = tk_graph.filter_by_edge_weight(weight_min, weight_max)
|
||||||
tk_graph_filtered = graphs.filter_graph_by_node_degree(tk_graph_filtered, 1, None)
|
tk_graph_filtered = graphs.filter_graph_by_node_degree(tk_graph_filtered, 1, None)
|
||||||
@ -444,6 +470,7 @@ def update_edge_weight(weight_min, weight_max):
|
|||||||
return cyto_data
|
return cyto_data
|
||||||
|
|
||||||
|
|
||||||
|
# ** graph: layout with edge weight
|
||||||
app.clientside_callback(
|
app.clientside_callback(
|
||||||
"""
|
"""
|
||||||
function(n_clicks, layout) {
|
function(n_clicks, layout) {
|
||||||
@ -458,11 +485,12 @@ app.clientside_callback(
|
|||||||
}
|
}
|
||||||
""",
|
""",
|
||||||
Output('cytoscape-graph', 'layout', allow_duplicate=True),
|
Output('cytoscape-graph', 'layout', allow_duplicate=True),
|
||||||
Input('trigger_relayout', 'n_clicks'),
|
Input('graph-trigger_relayout', 'n_clicks'),
|
||||||
State('cytoscape-graph', 'layout'),
|
State('cytoscape-graph', 'layout'),
|
||||||
prevent_initial_call=True,
|
prevent_initial_call=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# ** graph: display edge weight (line thickness)
|
||||||
app.clientside_callback(
|
app.clientside_callback(
|
||||||
"""
|
"""
|
||||||
function(n_clicks, stylesheet) {
|
function(n_clicks, stylesheet) {
|
||||||
@ -1,4 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@ -41,3 +42,9 @@ else:
|
|||||||
loaded_cfg = load_toml_config(path_to_toml=cfg_path_external)
|
loaded_cfg = load_toml_config(path_to_toml=cfg_path_external)
|
||||||
|
|
||||||
CONFIG: Final[dict[str, Any]] = loaded_cfg.copy()
|
CONFIG: Final[dict[str, Any]] = loaded_cfg.copy()
|
||||||
|
|
||||||
|
# append Graphviz binary folder to system path if not already contained
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
path = Path(r'C:\Program Files\Graphviz\bin')
|
||||||
|
if path.is_dir() and str(path).lower() not in os.environ['PATH'].lower():
|
||||||
|
os.environ['PATH'] += f';{path}'
|
||||||
|
|||||||
@ -164,6 +164,9 @@ def convert_graph_to_cytoscape(
|
|||||||
}
|
}
|
||||||
cyto_data.append(edge_data)
|
cyto_data.append(edge_data)
|
||||||
|
|
||||||
|
min_weight: int = 0
|
||||||
|
max_weight: int = 0
|
||||||
|
if weights:
|
||||||
min_weight = min(weights)
|
min_weight = min(weights)
|
||||||
max_weight = max(weights)
|
max_weight = max(weights)
|
||||||
weight_metadata: WeightData = {'min': min_weight, 'max': max_weight}
|
weight_metadata: WeightData = {'min': min_weight, 'max': max_weight}
|
||||||
@ -284,6 +287,9 @@ class TokenGraph(DiGraph):
|
|||||||
f'{len(self.edges)})'
|
f'{len(self.edges)})'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def disable_logging(self) -> None:
|
||||||
|
self.logging = False
|
||||||
|
|
||||||
# !! only used to verify that saving was done correctly
|
# !! only used to verify that saving was done correctly
|
||||||
"""
|
"""
|
||||||
def __key(self) -> tuple[Hashable, ...]:
|
def __key(self) -> tuple[Hashable, ...]:
|
||||||
@ -337,16 +343,9 @@ class TokenGraph(DiGraph):
|
|||||||
logging: bool | None = ...,
|
logging: bool | None = ...,
|
||||||
) -> Graph: ...
|
) -> Graph: ...
|
||||||
|
|
||||||
@overload
|
|
||||||
def to_undirected(
|
def to_undirected(
|
||||||
self,
|
self,
|
||||||
inplace: bool = ...,
|
inplace: bool = True,
|
||||||
logging: bool | None = ...,
|
|
||||||
) -> Graph | None: ...
|
|
||||||
|
|
||||||
def to_undirected(
|
|
||||||
self,
|
|
||||||
inplace=True,
|
|
||||||
logging: bool | None = None,
|
logging: bool | None = None,
|
||||||
) -> Graph | None:
|
) -> Graph | None:
|
||||||
if logging is None:
|
if logging is None:
|
||||||
|
|||||||
@ -21,7 +21,8 @@ from lang_main.types import PandasIndex
|
|||||||
# POS_OF_INTEREST: frozenset[str] = frozenset(['NOUN', 'PROPN', 'ADJ', 'VERB', 'AUX'])
|
# POS_OF_INTEREST: frozenset[str] = frozenset(['NOUN', 'PROPN', 'ADJ', 'VERB', 'AUX'])
|
||||||
# POS_OF_INTEREST: frozenset[str] = frozenset(['NOUN', 'ADJ', 'VERB', 'AUX'])
|
# POS_OF_INTEREST: frozenset[str] = frozenset(['NOUN', 'ADJ', 'VERB', 'AUX'])
|
||||||
# POS_OF_INTEREST: frozenset[str] = frozenset(['NOUN', 'PROPN'])
|
# POS_OF_INTEREST: frozenset[str] = frozenset(['NOUN', 'PROPN'])
|
||||||
POS_OF_INTEREST: frozenset[str] = frozenset(['NOUN', 'PROPN', 'VERB', 'AUX'])
|
# POS_OF_INTEREST: frozenset[str] = frozenset(['NOUN', 'PROPN', 'VERB', 'AUX'])
|
||||||
|
POS_OF_INTEREST: frozenset[str] = frozenset(['NOUN', 'PROPN', 'VERB', 'AUX', 'ADV'])
|
||||||
|
|
||||||
# POS_INDIRECT: frozenset[str] = frozenset(['AUX', 'VERB'])
|
# POS_INDIRECT: frozenset[str] = frozenset(['AUX', 'VERB'])
|
||||||
POS_INDIRECT: frozenset[str] = frozenset(['AUX'])
|
POS_INDIRECT: frozenset[str] = frozenset(['AUX'])
|
||||||
@ -153,6 +154,7 @@ def build_token_graph(
|
|||||||
batch_idx_feature: str = ...,
|
batch_idx_feature: str = ...,
|
||||||
build_map: Literal[False],
|
build_map: Literal[False],
|
||||||
batch_size_model: int = ...,
|
batch_size_model: int = ...,
|
||||||
|
logging_graph: bool = ...,
|
||||||
) -> tuple[TokenGraph, None]: ...
|
) -> tuple[TokenGraph, None]: ...
|
||||||
|
|
||||||
|
|
||||||
@ -166,6 +168,7 @@ def build_token_graph(
|
|||||||
batch_idx_feature: str = ...,
|
batch_idx_feature: str = ...,
|
||||||
build_map: Literal[True] = ...,
|
build_map: Literal[True] = ...,
|
||||||
batch_size_model: int = ...,
|
batch_size_model: int = ...,
|
||||||
|
logging_graph: bool = ...,
|
||||||
) -> tuple[TokenGraph, dict[PandasIndex, SpacyDoc]]: ...
|
) -> tuple[TokenGraph, dict[PandasIndex, SpacyDoc]]: ...
|
||||||
|
|
||||||
|
|
||||||
@ -175,11 +178,12 @@ def build_token_graph(
|
|||||||
*,
|
*,
|
||||||
target_feature: str = 'entry',
|
target_feature: str = 'entry',
|
||||||
weights_feature: str | None = None,
|
weights_feature: str | None = None,
|
||||||
batch_idx_feature: str = 'batched_idxs',
|
batch_idx_feature: str | None = 'batched_idxs',
|
||||||
build_map: bool = True,
|
build_map: bool = True,
|
||||||
batch_size_model: int = 50,
|
batch_size_model: int = 50,
|
||||||
|
logging_graph: bool = True,
|
||||||
) -> tuple[TokenGraph, dict[PandasIndex, SpacyDoc] | None]:
|
) -> tuple[TokenGraph, dict[PandasIndex, SpacyDoc] | None]:
|
||||||
graph = TokenGraph()
|
graph = TokenGraph(enable_logging=logging_graph)
|
||||||
model_input = cast(tuple[str], tuple(data[target_feature].to_list()))
|
model_input = cast(tuple[str], tuple(data[target_feature].to_list()))
|
||||||
if weights_feature is not None:
|
if weights_feature is not None:
|
||||||
weights = cast(tuple[int], tuple(data[weights_feature].to_list()))
|
weights = cast(tuple[int], tuple(data[weights_feature].to_list()))
|
||||||
@ -187,7 +191,9 @@ def build_token_graph(
|
|||||||
weights = None
|
weights = None
|
||||||
|
|
||||||
docs_mapping: dict[PandasIndex, SpacyDoc] | None
|
docs_mapping: dict[PandasIndex, SpacyDoc] | None
|
||||||
if build_map:
|
if build_map and batch_idx_feature is None:
|
||||||
|
raise ValueError('Can not build mapping if batched indices are unknown.')
|
||||||
|
elif build_map:
|
||||||
indices = cast(tuple[list[PandasIndex]], tuple(data[batch_idx_feature].to_list()))
|
indices = cast(tuple[list[PandasIndex]], tuple(data[batch_idx_feature].to_list()))
|
||||||
docs_mapping = {}
|
docs_mapping = {}
|
||||||
else:
|
else:
|
||||||
@ -199,10 +205,10 @@ def build_token_graph(
|
|||||||
for doc in tqdm(
|
for doc in tqdm(
|
||||||
model.pipe(model_input, batch_size=batch_size_model), total=len(model_input)
|
model.pipe(model_input, batch_size=batch_size_model), total=len(model_input)
|
||||||
):
|
):
|
||||||
|
weight: int | None = None
|
||||||
if weights is not None:
|
if weights is not None:
|
||||||
weight = weights[index]
|
weight = weights[index]
|
||||||
else:
|
|
||||||
weight = None
|
|
||||||
add_doc_info_to_graph(
|
add_doc_info_to_graph(
|
||||||
graph=graph,
|
graph=graph,
|
||||||
doc=doc,
|
doc=doc,
|
||||||
@ -219,7 +225,7 @@ def build_token_graph(
|
|||||||
# metadata
|
# metadata
|
||||||
graph.update_metadata()
|
graph.update_metadata()
|
||||||
# convert to undirected
|
# convert to undirected
|
||||||
graph.to_undirected()
|
graph.to_undirected(logging=False)
|
||||||
|
|
||||||
return graph, docs_mapping
|
return graph, docs_mapping
|
||||||
|
|
||||||
@ -250,7 +256,7 @@ def build_token_graph_simple(
|
|||||||
# metadata
|
# metadata
|
||||||
graph.update_metadata()
|
graph.update_metadata()
|
||||||
# convert to undirected
|
# convert to undirected
|
||||||
graph.to_undirected()
|
graph.to_undirected(logging=False)
|
||||||
|
|
||||||
return graph, docs_mapping
|
return graph, docs_mapping
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
import pickle
|
import pickle
|
||||||
|
import base64
|
||||||
import shutil
|
import shutil
|
||||||
import tomllib
|
import tomllib
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@ -61,6 +62,24 @@ def load_pickle(
|
|||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
def encode_to_base64_str(
|
||||||
|
obj: Any,
|
||||||
|
encoding: str = 'utf-8',
|
||||||
|
) -> str:
|
||||||
|
serialised = pickle.dumps(obj, protocol=pickle.HIGHEST_PROTOCOL)
|
||||||
|
b64_bytes = base64.b64encode(serialised)
|
||||||
|
return b64_bytes.decode(encoding=encoding)
|
||||||
|
|
||||||
|
|
||||||
|
def decode_from_base64_str(
|
||||||
|
b64_str: str,
|
||||||
|
encoding: str = 'utf-8',
|
||||||
|
) -> Any:
|
||||||
|
b64_bytes = b64_str.encode(encoding=encoding)
|
||||||
|
decoded = base64.b64decode(b64_bytes)
|
||||||
|
return pickle.loads(decoded)
|
||||||
|
|
||||||
|
|
||||||
def get_entry_point(
|
def get_entry_point(
|
||||||
saving_path: Path,
|
saving_path: Path,
|
||||||
filename: str,
|
filename: str,
|
||||||
|
|||||||
@ -97,6 +97,18 @@ class BasePipeline(ABC):
|
|||||||
|
|
||||||
|
|
||||||
class PipelineContainer(BasePipeline):
|
class PipelineContainer(BasePipeline):
|
||||||
|
"""Container class for basic actions.
|
||||||
|
Basic actions are usually functions, which do not take any parameters
|
||||||
|
and return nothing. Indeed, if an action returns any values after its
|
||||||
|
procedure is finished, an error is raised. Therefore, PipelineContainers
|
||||||
|
can be seen as a concatenation of many (independent) simple procedures
|
||||||
|
which are executed in the order in which they were added to the pipe.
|
||||||
|
With a simple call of the ``run`` method the actions are performed.
|
||||||
|
Additionally, there is an option to skip actions which can be set in
|
||||||
|
the ``add`` method. This allows for easily configurable pipelines,
|
||||||
|
e.g., via a user configuration.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
@ -169,9 +181,6 @@ class Pipeline(BasePipeline):
|
|||||||
f'working dir: {self.working_dir}, contents: {self.action_names})'
|
f'working dir: {self.working_dir}, contents: {self.action_names})'
|
||||||
)
|
)
|
||||||
|
|
||||||
# @property
|
|
||||||
# def intermediate_result(self) -> tuple[Any, ...] | None:
|
|
||||||
# return self._intermediate_result
|
|
||||||
@override
|
@override
|
||||||
def add(
|
def add(
|
||||||
self,
|
self,
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
Loading…
x
Reference in New Issue
Block a user