diff --git a/_typos.toml b/_typos.toml new file mode 100644 index 000000000..98af6e1aa --- /dev/null +++ b/_typos.toml @@ -0,0 +1,9 @@ +[default.extend-identifiers] +bck = "bck" # BiMap uses abbreviation +ser_it = "ser_it" +SerCollection = "SerCollection" +strat = "strat" # common abbreviation for strategy +# some arithmetic op names: +inot = "inot" +fle = "fle" +ine = "ine" diff --git a/hugr-cli/CHANGELOG.md b/hugr-cli/CHANGELOG.md index 018e58f46..b1390be3c 100644 --- a/hugr-cli/CHANGELOG.md +++ b/hugr-cli/CHANGELOG.md @@ -1,5 +1,6 @@ # Changelog + ## 0.1.1 (2024-06-07) ### Features diff --git a/hugr-cli/Cargo.toml b/hugr-cli/Cargo.toml index 0d6d01264..27fd2d13f 100644 --- a/hugr-cli/Cargo.toml +++ b/hugr-cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hugr-cli" -version = "0.1.1" +version = "0.1.2" edition = { workspace = true } rust-version = { workspace = true } license = { workspace = true } @@ -17,7 +17,7 @@ categories = ["compilers"] clap = {workspace = true, features = ["derive"]} clap-stdin.workspace = true clap-verbosity-flag.workspace = true -hugr-core = { path = "../hugr-core", version = "0.2.0" } +hugr-core = { path = "../hugr-core", version = "0.3.0" } serde_json.workspace = true thiserror.workspace = true diff --git a/hugr-core/CHANGELOG.md b/hugr-core/CHANGELOG.md index bc0079bfa..d5e6875fb 100644 --- a/hugr-core/CHANGELOG.md +++ b/hugr-core/CHANGELOG.md @@ -1,5 +1,32 @@ # Changelog +## 0.3.0 (2024-06-28) + +### Bug Fixes + +- SimpleReplacement panic on multiports ([#1191](https://github.com/CQCL/hugr/pull/1191)) +- Add some validation for const nodes ([#1222](https://github.com/CQCL/hugr/pull/1222)) +- Cfg not validating entry/exit types ([#1229](https://github.com/CQCL/hugr/pull/1229)) +- `extract_hugr` not removing root node ports ([#1239](https://github.com/CQCL/hugr/pull/1239)) + +### Documentation + +- Fix documentation of `ValidationError::ConstTypeError` ([#1227](https://github.com/CQCL/hugr/pull/1227)) + +### Features + +- CircuitBuilder::add_constant ([#1168](https://github.com/CQCL/hugr/pull/1168)) +- [**breaking**] Make the rewrite errors more useful ([#1174](https://github.com/CQCL/hugr/pull/1174)) +- [**breaking**] Validate Extensions using hierarchy, ignore input_extensions, RIP inference ([#1142](https://github.com/CQCL/hugr/pull/1142)) +- [**breaking**] Infer extension deltas for Case, Cfg, Conditional, DataflowBlock, Dfg, TailLoop ([#1195](https://github.com/CQCL/hugr/pull/1195)) +- Helper functions for requesting inference, use with builder in tests ([#1219](https://github.com/CQCL/hugr/pull/1219)) + +### Refactor + +- [**breaking**] Remove NodeType and input_extensions ([#1183](https://github.com/CQCL/hugr/pull/1183)) +- [**breaking**] FunctionBuilder takes impl Into ([#1220](https://github.com/CQCL/hugr/pull/1220)) + + ## 0.2.0 (2024-06-07) ### Bug Fixes diff --git a/hugr-core/Cargo.toml b/hugr-core/Cargo.toml index 21c52a5ec..2be3268df 100644 --- a/hugr-core/Cargo.toml +++ b/hugr-core/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hugr-core" -version = "0.2.0" +version = "0.3.0" edition = { workspace = true } rust-version = { workspace = true } diff --git a/hugr-core/src/extension/prelude.rs b/hugr-core/src/extension/prelude.rs index 8065c30fa..c96b9194f 100644 --- a/hugr-core/src/extension/prelude.rs +++ b/hugr-core/src/extension/prelude.rs @@ -350,7 +350,7 @@ pub struct ConstExternalSymbol { pub symbol: String, /// The type of the value found at this symbol reference. pub typ: Type, - /// Whether the value at the symbol referenence is constant or mutable. + /// Whether the value at the symbol reference is constant or mutable. pub constant: bool, } diff --git a/hugr-core/src/hugr.rs b/hugr-core/src/hugr.rs index c039d58e2..1cd8504e2 100644 --- a/hugr-core/src/hugr.rs +++ b/hugr-core/src/hugr.rs @@ -327,6 +327,7 @@ mod test { } #[test] + #[cfg_attr(miri, ignore)] // Opening files is not supported in (isolated) miri #[ignore = "issue 1225: In serialisation we do not distinguish between unknown CustomConst serialised value invalid but known CustomConst serialised values"] fn hugr_validation_0() { // https://github.com/CQCL/hugr/issues/1091 bad case @@ -341,6 +342,7 @@ mod test { } #[test] + #[cfg_attr(miri, ignore)] // Opening files is not supported in (isolated) miri fn hugr_validation_1() { // https://github.com/CQCL/hugr/issues/1091 good case let mut hugr: Hugr = serde_json::from_reader(BufReader::new( @@ -351,6 +353,7 @@ mod test { } #[test] + #[cfg_attr(miri, ignore)] // Opening files is not supported in (isolated) miri fn hugr_validation_2() { // https://github.com/CQCL/hugr/issues/1185 bad case let mut hugr: Hugr = serde_json::from_reader(BufReader::new( @@ -364,6 +367,7 @@ mod test { } #[test] + #[cfg_attr(miri, ignore)] // Opening files is not supported in (isolated) miri fn hugr_validation_3() { // https://github.com/CQCL/hugr/issues/1185 good case let mut hugr: Hugr = serde_json::from_reader(BufReader::new( diff --git a/hugr-core/src/hugr/views.rs b/hugr-core/src/hugr/views.rs index fd4974f31..a592b5f94 100644 --- a/hugr-core/src/hugr/views.rs +++ b/hugr-core/src/hugr/views.rs @@ -24,7 +24,7 @@ use itertools::{Itertools, MapInto}; use portgraph::render::{DotFormat, MermaidFormat}; use portgraph::{multiportgraph, LinkView, PortView}; -use super::internal::HugrInternals; +use super::internal::{HugrInternals, HugrMutInternals}; use super::{ Hugr, HugrError, HugrMut, NodeMetadata, NodeMetadataMap, ValidationError, DEFAULT_OPTYPE, }; @@ -511,6 +511,7 @@ pub trait ExtractHugr: HugrView + Sized { let old_root = hugr.root(); let new_root = hugr.insert_from_view(old_root, &self).new_root; hugr.set_root(new_root); + hugr.set_num_ports(new_root, 0, 0); hugr.remove_node(old_root); hugr } diff --git a/hugr-core/src/hugr/views/descendants.rs b/hugr-core/src/hugr/views/descendants.rs index 35a6e46c6..d2b66b250 100644 --- a/hugr-core/src/hugr/views/descendants.rs +++ b/hugr-core/src/hugr/views/descendants.rs @@ -201,6 +201,9 @@ where #[cfg(test)] pub(super) mod test { + use rstest::rstest; + + use crate::extension::PRELUDE_REGISTRY; use crate::{ builder::{Container, Dataflow, DataflowSubContainer, HugrBuilder, ModuleBuilder}, type_row, @@ -269,4 +272,20 @@ pub(super) mod test { Ok(()) } + + #[rstest] + fn extract_hugr() -> Result<(), Box> { + let (hugr, def, _inner) = make_module_hgr()?; + + let region: DescendantsGraph = DescendantsGraph::try_new(&hugr, def)?; + let extracted = region.extract_hugr(); + extracted.validate(&PRELUDE_REGISTRY)?; + + let region: DescendantsGraph = DescendantsGraph::try_new(&hugr, def)?; + + assert_eq!(region.node_count(), extracted.node_count()); + assert_eq!(region.root_type(), extracted.root_type()); + + Ok(()) + } } diff --git a/hugr-core/src/hugr/views/sibling.rs b/hugr-core/src/hugr/views/sibling.rs index 787cad1a2..7131c2451 100644 --- a/hugr-core/src/hugr/views/sibling.rs +++ b/hugr-core/src/hugr/views/sibling.rs @@ -491,12 +491,13 @@ mod test { #[rstest] fn extract_hugr() -> Result<(), Box> { - let (hugr, def, _inner) = make_module_hgr()?; + let (hugr, _def, inner) = make_module_hgr()?; - let region: SiblingGraph = SiblingGraph::try_new(&hugr, def)?; + let region: SiblingGraph = SiblingGraph::try_new(&hugr, inner)?; let extracted = region.extract_hugr(); + extracted.validate(&PRELUDE_REGISTRY)?; - let region: SiblingGraph = SiblingGraph::try_new(&hugr, def)?; + let region: SiblingGraph = SiblingGraph::try_new(&hugr, inner)?; assert_eq!(region.node_count(), extracted.node_count()); assert_eq!(region.root_type(), extracted.root_type()); diff --git a/hugr-core/src/hugr/views/sibling_subgraph.rs b/hugr-core/src/hugr/views/sibling_subgraph.rs index 889e5e5ae..fee4af9df 100644 --- a/hugr-core/src/hugr/views/sibling_subgraph.rs +++ b/hugr-core/src/hugr/views/sibling_subgraph.rs @@ -213,7 +213,7 @@ impl SiblingSubgraph { /// The in- and out-arity of the signature will match the /// number of incoming and outgoing edges respectively. In particular, the /// assumption is made that no two incoming edges have the same source - /// (no copy nodes at the input bounary). + /// (no copy nodes at the input boundary). pub fn try_from_nodes( nodes: impl Into>, hugr: &impl HugrView, diff --git a/hugr-core/src/proptest.rs b/hugr-core/src/proptest.rs index 344b32a70..5598cf75e 100644 --- a/hugr-core/src/proptest.rs +++ b/hugr-core/src/proptest.rs @@ -10,7 +10,7 @@ use crate::Hugr; /// [TypeParam], as well as several others, form a mutually recursive hierarchy. /// /// The proptest [proptest::strategy::Strategy::prop_recursive] is inadequate to -/// generate values for these types. Instead, ther Arbitrary instances take a +/// generate values for these types. Instead, the Arbitrary instances take a /// `RecursionDepth` as their (or part of their) /// [proptest::arbitrary::Arbitrary::Parameters]. We then use that parameter to /// generate children of that value. Usually we forward it unchanged, but in diff --git a/hugr-core/src/types/type_param.rs b/hugr-core/src/types/type_param.rs index c59411604..94d90fb16 100644 --- a/hugr-core/src/types/type_param.rs +++ b/hugr-core/src/types/type_param.rs @@ -105,7 +105,7 @@ impl TypeParam { } } - /// Make a new `TypeParam::List` (an arbitrary-length homogenous list) + /// Make a new `TypeParam::List` (an arbitrary-length homogeneous list) pub fn new_list(elem: impl Into) -> Self { Self::List { param: Box::new(elem.into()), @@ -491,15 +491,15 @@ mod test { #[test] fn type_arg_fits_param() { let rowvar = TypeRV::new_row_var_use; - fn check(arg: impl Into, parm: &TypeParam) -> Result<(), TypeArgError> { - check_type_arg(&arg.into(), parm) + fn check(arg: impl Into, param: &TypeParam) -> Result<(), TypeArgError> { + check_type_arg(&arg.into(), param) } fn check_seq>( args: &[T], - parm: &TypeParam, + param: &TypeParam, ) -> Result<(), TypeArgError> { let arg = args.iter().cloned().map_into().collect_vec().into(); - check_type_arg(&arg, parm) + check_type_arg(&arg, param) } // Simple cases: a TypeArg::Type is a TypeParam::Type but singleton sequences are lists check(USIZE_T, &TypeBound::Eq.into()).unwrap(); diff --git a/hugr-passes/CHANGELOG.md b/hugr-passes/CHANGELOG.md index dee4ec6c3..667dd35ee 100644 --- a/hugr-passes/CHANGELOG.md +++ b/hugr-passes/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## 0.3.0 (2024-06-28) + +### Features + +- [**breaking**] Validate Extensions using hierarchy, ignore input_extensions, RIP inference ([#1142](https://github.com/CQCL/hugr/pull/1142)) +- Helper functions for requesting inference, use with builder in tests ([#1219](https://github.com/CQCL/hugr/pull/1219)) + + ## 0.2.0 (2024-06-07) ### Features diff --git a/hugr-passes/Cargo.toml b/hugr-passes/Cargo.toml index 231b04d05..a714d39e6 100644 --- a/hugr-passes/Cargo.toml +++ b/hugr-passes/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hugr-passes" -version = "0.2.0" +version = "0.3.0" edition = { workspace = true } rust-version = { workspace = true } license = { workspace = true } @@ -13,7 +13,7 @@ keywords = ["Quantum", "Quantinuum"] categories = ["compilers"] [dependencies] -hugr-core = { path = "../hugr-core", version = "0.2.0" } +hugr-core = { path = "../hugr-core", version = "0.3.0" } itertools = { workspace = true } lazy_static = { workspace = true } paste = { workspace = true } diff --git a/hugr-passes/src/const_fold/test.rs b/hugr-passes/src/const_fold/test.rs index 258ff8617..928ad519c 100644 --- a/hugr-passes/src/const_fold/test.rs +++ b/hugr-passes/src/const_fold/test.rs @@ -229,7 +229,7 @@ fn orphan_output() { // x2 := or(x0,x1) // output x2 == true; // - // We arange things so that the `or` folds away first, leaving the not + // We arrange things so that the `or` folds away first, leaving the not // with no outputs. use hugr_core::ops::handle::NodeHandle; diff --git a/hugr-py/docs/.gitignore b/hugr-py/docs/.gitignore new file mode 100644 index 000000000..378eac25d --- /dev/null +++ b/hugr-py/docs/.gitignore @@ -0,0 +1 @@ +build diff --git a/hugr-py/docs/_static/Quantinuum_logo_black.png b/hugr-py/docs/_static/Quantinuum_logo_black.png new file mode 100644 index 000000000..5569581b8 Binary files /dev/null and b/hugr-py/docs/_static/Quantinuum_logo_black.png differ diff --git a/hugr-py/docs/_static/Quantinuum_logo_white.png b/hugr-py/docs/_static/Quantinuum_logo_white.png new file mode 100644 index 000000000..e896db91c Binary files /dev/null and b/hugr-py/docs/_static/Quantinuum_logo_white.png differ diff --git a/hugr-py/docs/_static/custom.css b/hugr-py/docs/_static/custom.css new file mode 100644 index 000000000..ce78012b3 --- /dev/null +++ b/hugr-py/docs/_static/custom.css @@ -0,0 +1,38 @@ +.wy-side-nav-search, +.wy-nav-top { + background: #5A46BE; +} + +.wy-grid-for-nav, +.wy-body-for-nav, +.wy-nav-side, +.wy-side-scroll, +.wy-menu, +.wy-menu-vertical { + background-color: #FFFFFF; +} + +.wy-menu-vertical a:hover { + background-color: #d9d9d9; +} + +.btn-link:visited, +.btn-link, +a:visited, +.a.reference.external, +.a.reference.internal, +.wy-menu-vertical a, +.wy-menu-vertical li, +.wy-menu-vertical ul, +.span.pre, +.sig-param, +.std.std-ref, + + +html[data-theme=light] { + --pst-color-inline-code: rgb(199, 37, 78) !important; +} + +.sig-name { + font-size: 1.25rem; +} diff --git a/hugr-py/docs/api-docs/.gitignore b/hugr-py/docs/api-docs/.gitignore new file mode 100644 index 000000000..cd6936e25 --- /dev/null +++ b/hugr-py/docs/api-docs/.gitignore @@ -0,0 +1,3 @@ +_autosummary +_build +generated diff --git a/hugr-py/docs/api-docs/_templates/autosummary/base.rst b/hugr-py/docs/api-docs/_templates/autosummary/base.rst new file mode 100644 index 000000000..5536fa108 --- /dev/null +++ b/hugr-py/docs/api-docs/_templates/autosummary/base.rst @@ -0,0 +1,5 @@ +{{ name | escape | underline}} + +.. currentmodule:: {{ module }} + +.. auto{{ objtype }}:: {{ objname }} diff --git a/hugr-py/docs/api-docs/_templates/autosummary/class.rst b/hugr-py/docs/api-docs/_templates/autosummary/class.rst new file mode 100644 index 000000000..cd41a4111 --- /dev/null +++ b/hugr-py/docs/api-docs/_templates/autosummary/class.rst @@ -0,0 +1,37 @@ +.. + Custom class template to make sphinx-autosummary list the full API doc after + the summary. See https://github.com/sphinx-doc/sphinx/issues/7912 + +{{ name | escape | underline}} + +.. currentmodule:: {{ module }} + +.. autoclass:: {{ objname }} + :members: + :show-inheritance: + :inherited-members: + + {% block methods %} + {% if methods %} + .. rubric:: {{ _('Methods') }} + + .. autosummary:: + :nosignatures: + {% for item in methods %} + {%- if not item.startswith('_') %} + ~{{ name }}.{{ item }} + {%- endif -%} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block attributes %} + {% if attributes %} + .. rubric:: {{ _('Attributes') }} + + .. autosummary:: + {% for item in attributes %} + ~{{ name }}.{{ item }} + {%- endfor %} + {% endif %} + {% endblock %} diff --git a/hugr-py/docs/api-docs/_templates/autosummary/module.rst b/hugr-py/docs/api-docs/_templates/autosummary/module.rst new file mode 100644 index 000000000..981555c13 --- /dev/null +++ b/hugr-py/docs/api-docs/_templates/autosummary/module.rst @@ -0,0 +1,68 @@ +.. + Custom module template to make sphinx-autosummary list the full API doc after + the summary. See https://github.com/sphinx-doc/sphinx/issues/7912 + +{{ name | escape | underline}} + +.. automodule:: {{ fullname }} + + {% block attributes %} + {% if attributes %} + .. rubric:: Module attributes + + .. autosummary:: + :toctree: + {% for item in attributes %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block functions %} + {% if functions %} + .. rubric:: {{ _('Functions') }} + + .. autosummary:: + :toctree: + :nosignatures: + {% for item in functions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block classes %} + {% if classes %} + .. rubric:: {{ _('Classes') }} + + .. autosummary:: + :toctree: + :nosignatures: + {% for item in classes %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block exceptions %} + {% if exceptions %} + .. rubric:: {{ _('Exceptions') }} + + .. autosummary:: + :toctree: + {% for item in exceptions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + +{% block modules %} +{% if modules %} +.. autosummary:: + :toctree: + :recursive: +{% for item in modules %} + {{ item }} +{%- endfor %} +{% endif %} +{% endblock %} diff --git a/hugr-py/docs/api-docs/conf.py b/hugr-py/docs/api-docs/conf.py new file mode 100644 index 000000000..98ecdd0db --- /dev/null +++ b/hugr-py/docs/api-docs/conf.py @@ -0,0 +1,44 @@ +# Configuration file for the Sphinx documentation builder. # noqa: INP001 +# See https://www.sphinx-doc.org/en/master/usage/configuration.html + + +project = "HUGR Python" +copyright = "2024, Quantinuum" +author = "Quantinuum" + +extensions = [ + "sphinx.ext.napoleon", + "sphinx.ext.autodoc", + "sphinx.ext.coverage", + "sphinx.ext.autosummary", + "sphinx.ext.viewcode", + "sphinx.ext.intersphinx", +] + +html_theme = "sphinx_book_theme" + +html_title = "HUGR python package API documentation." + +html_theme_options = { + "repository_url": "https://github.com/CQCL/hugr", + "use_repository_button": True, + "navigation_with_keys": True, + "logo": { + "image_light": "_static/Quantinuum_logo_black.png", + "image_dark": "_static/Quantinuum_logo_white.png", + }, +} + +html_static_path = ["../_static"] +html_css_files = ["custom.css"] + +autosummary_generate = True + +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "conftest.py"] + +intersphinx_mapping = { + "python": ("https://docs.python.org/3/", None), +} + +html_show_sourcelink = False diff --git a/hugr-py/docs/api-docs/index.rst b/hugr-py/docs/api-docs/index.rst new file mode 100644 index 000000000..fc7be3bb0 --- /dev/null +++ b/hugr-py/docs/api-docs/index.rst @@ -0,0 +1,20 @@ +HUGR Python API Documentation +================================== + +This is the API documentation for the HUGR Python package. + + +.. autosummary:: + :toctree: generated + :template: autosummary/module.rst + :recursive: + + hugr + + +Indices and tables +~~~~~~~~~~~~~~~~~~ + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/hugr-py/docs/build.sh b/hugr-py/docs/build.sh new file mode 100755 index 000000000..57eef23b6 --- /dev/null +++ b/hugr-py/docs/build.sh @@ -0,0 +1,7 @@ +#! /bin/bash + +mkdir build + +touch build/.nojekyll # Disable jekyll to keep files starting with underscores + +sphinx-build -b html ./api-docs ./build/api-docs diff --git a/hugr-py/poetry.lock b/hugr-py/poetry.lock new file mode 100644 index 000000000..2006a16d2 --- /dev/null +++ b/hugr-py/poetry.lock @@ -0,0 +1,715 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "accessible-pygments" +version = "0.0.5" +description = "A collection of accessible pygments styles" +optional = false +python-versions = ">=3.9" +files = [ + {file = "accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7"}, + {file = "accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872"}, +] + +[package.dependencies] +pygments = ">=1.5" + +[package.extras] +dev = ["pillow", "pkginfo (>=1.10)", "playwright", "pre-commit", "setuptools", "twine (>=5.0)"] +tests = ["hypothesis", "pytest"] + +[[package]] +name = "alabaster" +version = "0.7.16" +description = "A light, configurable Sphinx theme" +optional = false +python-versions = ">=3.9" +files = [ + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "babel" +version = "2.15.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, + {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "certifi" +version = "2024.6.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, + {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "docutils" +version = "0.21.2" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.9" +files = [ + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, +] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pydantic" +version = "2.7.4" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, + {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.18.4" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.18.4" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, + {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, + {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, + {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, + {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, + {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, + {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, + {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, + {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, + {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, + {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, + {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, + {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, + {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydata-sphinx-theme" +version = "0.15.4" +description = "Bootstrap-based Sphinx theme from the PyData community" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pydata_sphinx_theme-0.15.4-py3-none-any.whl", hash = "sha256:2136ad0e9500d0949f96167e63f3e298620040aea8f9c74621959eda5d4cf8e6"}, + {file = "pydata_sphinx_theme-0.15.4.tar.gz", hash = "sha256:7762ec0ac59df3acecf49fd2f889e1b4565dbce8b88b2e29ee06fdd90645a06d"}, +] + +[package.dependencies] +accessible-pygments = "*" +Babel = "*" +beautifulsoup4 = "*" +docutils = "!=0.17.0" +packaging = "*" +pygments = ">=2.7" +sphinx = ">=5" +typing-extensions = "*" + +[package.extras] +a11y = ["pytest-playwright"] +dev = ["pandoc", "pre-commit", "pydata-sphinx-theme[doc,test]", "pyyaml", "sphinx-theme-builder[cli]", "tox"] +doc = ["ablog (>=0.11.8)", "colorama", "graphviz", "ipykernel", "ipyleaflet", "ipywidgets", "jupyter_sphinx", "jupyterlite-sphinx", "linkify-it-py", "matplotlib", "myst-parser", "nbsphinx", "numpy", "numpydoc", "pandas", "plotly", "rich", "sphinx-autoapi (>=3.0.0)", "sphinx-copybutton", "sphinx-design", "sphinx-favicon (>=1.0.1)", "sphinx-sitemap", "sphinx-togglebutton", "sphinxcontrib-youtube (>=1.4.1)", "sphinxext-rediraffe", "xarray"] +i18n = ["Babel", "jinja2"] +test = ["pytest", "pytest-cov", "pytest-regressions", "sphinx[test]"] + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "sphinx" +version = "7.3.7" +description = "Python documentation generator" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinx-7.3.7-py3-none-any.whl", hash = "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3"}, + {file = "sphinx-7.3.7.tar.gz", hash = "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc"}, +] + +[package.dependencies] +alabaster = ">=0.7.14,<0.8.0" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.18.1,<0.22" +imagesize = ">=1.3" +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.14" +requests = ">=2.25.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.9" +tomli = {version = ">=2", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=3.5.0)", "importlib_metadata", "mypy (==1.9.0)", "pytest (>=6.0)", "ruff (==0.3.7)", "sphinx-lint", "tomli", "types-docutils", "types-requests"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=6.0)", "setuptools (>=67.0)"] + +[[package]] +name = "sphinx-book-theme" +version = "1.1.3" +description = "A clean book theme for scientific explanations and documentation with Sphinx" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinx_book_theme-1.1.3-py3-none-any.whl", hash = "sha256:a554a9a7ac3881979a87a2b10f633aa2a5706e72218a10f71be38b3c9e831ae9"}, + {file = "sphinx_book_theme-1.1.3.tar.gz", hash = "sha256:1f25483b1846cb3d353a6bc61b3b45b031f4acf845665d7da90e01ae0aef5b4d"}, +] + +[package.dependencies] +pydata-sphinx-theme = ">=0.15.2" +sphinx = ">=5" + +[package.extras] +code-style = ["pre-commit"] +doc = ["ablog", "folium", "ipywidgets", "matplotlib", "myst-nb", "nbclient", "numpy", "numpydoc", "pandas", "plotly", "sphinx-copybutton", "sphinx-design", "sphinx-examples", "sphinx-tabs", "sphinx-thebe", "sphinx-togglebutton", "sphinxcontrib-bibtex", "sphinxcontrib-youtube", "sphinxext-opengraph"] +test = ["beautifulsoup4", "coverage", "defusedxml", "myst-nb", "pytest", "pytest-cov", "pytest-regressions", "sphinx_thebe"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.8" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, + {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.6" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, + {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.5" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, + {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.7" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, + {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.10" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, + {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.10" +content-hash = "0e927e63660da3f23f2e241a006c7255a780303326e967baec4f0436f16bbd84" diff --git a/hugr-py/pyproject.toml b/hugr-py/pyproject.toml index 0c55a9476..739ced174 100644 --- a/hugr-py/pyproject.toml +++ b/hugr-py/pyproject.toml @@ -27,9 +27,12 @@ repository = "https://github.com/CQCL/hugr" python = ">=3.10" pydantic = "~2.7.0" -[tool.pytest.ini_options] -# Lark throws deprecation warnings for `src_parse` and `src_constants`. -filterwarnings = "ignore::DeprecationWarning:lark.*" +[tool.poetry.group.docs] +optional = true + +[tool.poetry.group.docs.dependencies] +sphinx = "^7.2.6" +sphinx-book-theme = "^1.1.2" [build-system] requires = ["poetry-core"] diff --git a/hugr-py/src/hugr/cfg.py b/hugr-py/src/hugr/cfg.py index 0cec794b1..61b4a8750 100644 --- a/hugr-py/src/hugr/cfg.py +++ b/hugr-py/src/hugr/cfg.py @@ -1,18 +1,24 @@ +"""Builder classes for HUGR control flow graphs.""" + from __future__ import annotations from dataclasses import dataclass +from typing import TYPE_CHECKING -import hugr.ops as ops +from hugr import ops, val from .dfg import _DfBase -from .exceptions import NoSiblingAncestor, NotInSameCfg, MismatchedExit +from .exceptions import MismatchedExit, NoSiblingAncestor, NotInSameCfg from .hugr import Hugr, ParentBuilder -from .node_port import Node, Wire, ToNode -from .tys import TypeRow, Type -import hugr.val as val + +if TYPE_CHECKING: + from .node_port import Node, ToNode, Wire + from .tys import Type, TypeRow class Block(_DfBase[ops.DataflowBlock]): + """Builder class for a basic block in a HUGR control flow graph.""" + def set_block_outputs(self, branching: Wire, *other_outputs: Wire) -> None: self.set_outputs(branching, *other_outputs) @@ -27,13 +33,13 @@ def _wire_up_port(self, node: Node, offset: int, p: Wire) -> Type: src_parent = self.hugr[src.node].parent try: super()._wire_up_port(node, offset, p) - except NoSiblingAncestor: + except NoSiblingAncestor as e: # note this just checks if there is a common CFG ancestor # it does not check for valid dominance between basic blocks # that is deferred to full HUGR validation. while cfg_node != src_parent: if src_parent is None or src_parent == self.hugr.root: - raise NotInSameCfg(src.node.idx, node.idx) + raise NotInSameCfg(src.node.idx, node.idx) from e src_parent = self.hugr[src_parent].parent self.hugr.add_link(src, node.inp(offset)) @@ -42,15 +48,32 @@ def _wire_up_port(self, node: Node, offset: int, p: Wire) -> Type: @dataclass class Cfg(ParentBuilder[ops.CFG]): + """Builder class for a HUGR control flow graph, with the HUGR root node + being a :class:`CFG `. + + Args: + input_types: The input types for the CFG. Outputs are computed + by propagating types through the control flow graph to the exit block. + + Examples: + >>> cfg = Cfg(tys.Bool, tys.Unit) + >>> cfg.parent_op + CFG(inputs=[Bool, Unit]) + """ + + #: The HUGR instance this CFG is part of. hugr: Hugr + #: The parent node of the CFG. parent_node: Node _entry_block: Block + #: The node holding the root of the exit block. exit: Node - def __init__(self, input_types: TypeRow) -> None: - root_op = ops.CFG(inputs=input_types) + def __init__(self, *input_types: Type) -> None: + input_typs = list(input_types) + root_op = ops.CFG(inputs=input_typs) hugr = Hugr(root_op) - self._init_impl(hugr, hugr.root, input_types) + self._init_impl(hugr, hugr.root, input_typs) def _init_impl(self: Cfg, hugr: Hugr, root: Node, input_types: TypeRow) -> None: self.hugr = hugr @@ -67,6 +90,23 @@ def new_nested( hugr: Hugr, parent: ToNode | None = None, ) -> Cfg: + """Start building a CFG nested inside an existing HUGR graph. + + Args: + input_types: The input types for the CFG. + hugr: The HUGR instance this CFG is part of. + parent: The parent node for the CFG: defaults to the root of the HUGR + instance. + + Returns: + The new CFG builder. + + Examples: + >>> hugr = Hugr() + >>> cfg = Cfg.new_nested([tys.Bool], hugr) + >>> cfg.parent_op + CFG(inputs=[Bool]) + """ new = cls.__new__(cls) root = hugr.add_node( ops.CFG(inputs=input_types), @@ -77,6 +117,13 @@ def new_nested( @property def entry(self) -> Node: + """Node for entry block of the CFG. + + Examples: + >>> cfg = Cfg(tys.Bool) + >>> cfg.entry + Node(1) + """ return self._entry_block.parent_node @property @@ -88,11 +135,34 @@ def _exit_op(self) -> ops.ExitBlock: return self.hugr._get_typed_op(self.exit, ops.ExitBlock) def add_entry(self) -> Block: + """Start building the entry block of the CFG. + + Returns: + The entry block builder. + + Examples: + >>> cfg = Cfg(tys.Bool) + >>> entry = cfg.add_entry() + >>> entry.set_outputs(*entry.inputs()) + """ return self._entry_block - def add_block(self, input_types: TypeRow) -> Block: + def add_block(self, *input_types: Type) -> Block: + """Add a new block to the CFG and start building it. + + Args: + input_types: The input types for the block. + + Returns: + The block builder. + + Examples: + >>> cfg = Cfg(tys.Bool) + >>> b = cfg.add_block(tys.Unit) + >>> b.set_single_succ_outputs(*b.inputs()) + """ new_block = Block.new_nested( - ops.DataflowBlock(input_types), + ops.DataflowBlock(list(input_types)), self.hugr, self.parent_node, ) @@ -101,7 +171,24 @@ def add_block(self, input_types: TypeRow) -> Block: # TODO insert_block def add_successor(self, pred: Wire) -> Block: - b = self.add_block(self._nth_outputs(pred)) + """Start building a block that succeeds an existing block. + + Args: + pred: The wire from the predecessor block to the new block. The + port of the wire determines the branching index of the new block. + + + Returns: + The new block builder. + + Examples: + >>> cfg = Cfg(tys.Bool) + >>> entry = cfg.add_entry() + >>> entry.set_single_succ_outputs() + >>> b = cfg.add_successor(entry[0]) + >>> b.set_single_succ_outputs(*b.inputs()) + """ + b = self.add_block(*self._nth_outputs(pred)) self.branch(pred, b) return b @@ -112,6 +199,19 @@ def _nth_outputs(self, wire: Wire) -> TypeRow: return block.nth_outputs(port.offset) def branch(self, src: Wire, dst: ToNode) -> None: + """Add a branching control flow link between blocks. + + Args: + src: The wire from the predecessor block. + dst: The destination block. + + Examples: + >>> cfg = Cfg(tys.Bool) + >>> entry = cfg.add_entry() + >>> entry.set_single_succ_outputs() + >>> b = cfg.add_block(tys.Unit) + >>> cfg.branch(entry[0], b) + """ # TODO check for existing link/type compatibility if dst.to_node() == self.exit: return self.branch_exit(src) @@ -119,6 +219,17 @@ def branch(self, src: Wire, dst: ToNode) -> None: self.hugr.add_link(src, dst.inp(0)) def branch_exit(self, src: Wire) -> None: + """Branch from a block to the exit block. + + Args: + src: The wire from the predecessor block. + + Examples: + >>> cfg = Cfg(tys.Bool) + >>> entry = cfg.add_entry() + >>> entry.set_single_succ_outputs() + >>> cfg.branch_exit(entry[0]) + """ src = src.out_port() self.hugr.add_link(src, self.exit.inp(0)) diff --git a/hugr-py/src/hugr/cond_loop.py b/hugr-py/src/hugr/cond_loop.py index a1ac42830..037a7ee0a 100644 --- a/hugr-py/src/hugr/cond_loop.py +++ b/hugr-py/src/hugr/cond_loop.py @@ -1,17 +1,25 @@ +"""Builder classes for structured control flow +in HUGR graphs (Conditional, TailLoop). +""" + from __future__ import annotations from dataclasses import dataclass +from typing import TYPE_CHECKING -import hugr.ops as ops +from hugr import ops from .dfg import _DfBase from .hugr import Hugr, ParentBuilder -from .node_port import Node, Wire, ToNode -from .tys import Sum, TypeRow +if TYPE_CHECKING: + from .node_port import Node, ToNode, Wire + from .tys import Sum, TypeRow class Case(_DfBase[ops.Case]): + """Dataflow graph builder for a case in a conditional.""" + _parent_cond: Conditional | None = None def set_outputs(self, *outputs: Wire) -> None: @@ -21,7 +29,7 @@ def set_outputs(self, *outputs: Wire) -> None: class ConditionalError(Exception): - pass + """Error building a :class:`Conditional`.""" @dataclass @@ -35,22 +43,62 @@ def __init__(self, case: Case) -> None: def _parent_conditional(self) -> Conditional: if self._parent_cond is None: - raise ConditionalError("If must have a parent conditional.") + msg = "If must have a parent conditional." + raise ConditionalError(msg) return self._parent_cond class If(_IfElse): + """Build the 'if' branch of a conditional branching on a boolean value. + + Examples: + >>> from hugr.dfg import Dfg + >>> dfg = Dfg(tys.Qubit) + >>> (q,) = dfg.inputs() + >>> if_ = dfg.add_if(dfg.load(val.TRUE), q) + >>> if_.set_outputs(if_.input_node[0]) + >>> else_= if_.add_else() + >>> else_.set_outputs(else_.input_node[0]) + >>> dfg.hugr[else_.finish()].op + Conditional(sum_ty=Bool, other_inputs=[Qubit]) + """ + def add_else(self) -> Else: + """Finish building the 'if' branch and start building the 'else' branch.""" return Else(self._parent_conditional().add_case(0)) class Else(_IfElse): + """Build the 'else' branch of a conditional branching on a boolean value. + + See :class:`If` for an example. + """ + def finish(self) -> Node: + """Finish building the if/else. + + Returns: + The node that represents the parent conditional. + """ return self._parent_conditional().parent_node @dataclass class Conditional(ParentBuilder[ops.Conditional]): + """Build a conditional branching on a sum type. + + Args: + sum_ty: The sum type to branch on. + other_inputs: The inputs for the conditional that aren't included in the + sum variants. These are passed to all cases. + + Examples: + >>> cond = Conditional(tys.Bool, [tys.Qubit]) + >>> cond.parent_op + Conditional(sum_ty=Bool, other_inputs=[Qubit]) + """ + + #: map from case index to node holding the :class:`Case ` cases: dict[int, Node | None] def __init__(self, sum_ty: Sum, other_inputs: TypeRow) -> None: @@ -71,6 +119,19 @@ def new_nested( hugr: Hugr, parent: ToNode | None = None, ) -> Conditional: + """Build a Conditional nested inside an existing HUGR graph. + + Args: + sum_ty: The sum type to branch on. + other_inputs: The inputs for the conditional that aren't included in the + sum variants. These are passed to all cases. + hugr: The HUGR instance this Conditional is part of. + parent: The parent node for the Conditional: defaults to the root of + the HUGR instance. + + Returns: + The new Conditional builder. + """ new = cls.__new__(cls) root = hugr.add_node( ops.Conditional(sum_ty, other_inputs), @@ -84,11 +145,31 @@ def _update_outputs(self, outputs: TypeRow) -> None: self.parent_op._outputs = outputs else: if outputs != self.parent_op._outputs: - raise ConditionalError("Mismatched case outputs.") + msg = "Mismatched case outputs." + raise ConditionalError(msg) def add_case(self, case_id: int) -> Case: + """Start building a case for the conditional. + + Args: + case_id: The index of the case to build. Input types for the case + are the corresponding variant of the sum type concatenated with the + other inputs to the conditional. + + Returns: + The new case builder. + + Raises: + ConditionalError: If the case index is out of range. + + Examples: + >>> cond = Conditional(tys.Bool, [tys.Qubit]) + >>> case = cond.add_case(0) + >>> case.set_outputs(*case.inputs()) + """ if case_id not in self.cases: - raise ConditionalError(f"Case {case_id} out of possible range.") + msg = f"Case {case_id} out of possible range." + raise ConditionalError(msg) input_types = self.parent_op.nth_inputs(case_id) new_case = Case.new_nested( ops.Case(input_types), @@ -104,9 +185,28 @@ def add_case(self, case_id: int) -> Case: @dataclass class TailLoop(_DfBase[ops.TailLoop]): + """Builder for a tail-controlled loop. + + Args: + just_inputs: Types that are only inputs to the loop body. + rest: The remaining input types that are also output types. + + Examples: + >>> tl = TailLoop([tys.Bool], [tys.Qubit]) + >>> tl.parent_op + TailLoop(just_inputs=[Bool], rest=[Qubit]) + """ + def __init__(self, just_inputs: TypeRow, rest: TypeRow) -> None: root_op = ops.TailLoop(just_inputs, rest) super().__init__(root_op) def set_loop_outputs(self, sum_wire: Wire, *rest: Wire) -> None: + """Set the outputs of the loop body. The first wire must be the sum type + that controls loop termination. + + Args: + sum_wire: The wire holding the sum type that controls loop termination. + rest: The remaining output wires (corresponding to the 'rest' types). + """ self.set_outputs(sum_wire, *rest) diff --git a/hugr-py/src/hugr/conftest.py b/hugr-py/src/hugr/conftest.py new file mode 100644 index 000000000..2289f8128 --- /dev/null +++ b/hugr-py/src/hugr/conftest.py @@ -0,0 +1,19 @@ +"""Testing setup.""" + +import pytest + +from hugr import dfg, hugr, node_port, ops, tys, val + + +@pytest.fixture(autouse=True) +def _add_hugr(doctest_namespace): + doctest_namespace.update( + { + "hugr": hugr, + "node_port": node_port, + "dfg": dfg, + "ops": ops, + "tys": tys, + "val": val, + } + ) diff --git a/hugr-py/src/hugr/dfg.py b/hugr-py/src/hugr/dfg.py index 99091b6c6..e905917d7 100644 --- a/hugr-py/src/hugr/dfg.py +++ b/hugr-py/src/hugr/dfg.py @@ -1,35 +1,26 @@ +"""Builder for HUGR datflow graphs.""" + from __future__ import annotations -from dataclasses import dataclass, replace +from dataclasses import dataclass, field, replace from typing import ( TYPE_CHECKING, - Iterable, - Sequence, TypeVar, ) from typing_extensions import Self -import hugr.ops as ops -import hugr.val as val -from hugr.tys import ( - Type, - TypeRow, - get_first_sum, - FunctionType, - TypeArg, - FunctionKind, - PolyFuncType, - ExtensionSet, -) +from hugr import ops, tys, val from .exceptions import NoSiblingAncestor from .hugr import Hugr, ParentBuilder -from .node_port import Node, OutPort, Wire, ToNode if TYPE_CHECKING: + from collections.abc import Iterable, Sequence + from .cfg import Cfg from .cond_loop import Conditional, If, TailLoop + from .node_port import Node, OutPort, ToNode, Wire DP = TypeVar("DP", bound=ops.DfParentOp) @@ -37,10 +28,20 @@ @dataclass() class _DfBase(ParentBuilder[DP]): - hugr: Hugr + """Base class for dataflow graph builders. + + Args: + parent_op: The parent operation of the dataflow graph. + """ + + #: The Hugr instance that the builder is using. + hugr: Hugr = field(repr=False) + #: The parent node of the dataflow graph. parent_node: Node - input_node: Node - output_node: Node + #: The input node of the dataflow graph. + input_node: Node = field(repr=False) + #: The output node of the dataflow graph. + output_node: Node = field(repr=False) def __init__(self, parent_op: DP) -> None: self.hugr = Hugr(parent_op) @@ -59,6 +60,20 @@ def _init_io_nodes(self, parent_op: DP): def new_nested( cls, parent_op: DP, hugr: Hugr, parent: ToNode | None = None ) -> Self: + """Start building a dataflow graph nested inside a larger HUGR. + + Args: + parent_op: The parent operation of the new dataflow graph. + hugr: The host HUGR instance to build the dataflow graph in. + parent: Parent of new dataflow graph's root node: defaults to the + host HUGR root. + + Example: + >>> hugr = Hugr() + >>> dfg = Dfg.new_nested(ops.DFG([]), hugr) + >>> dfg.parent_node + Node(1) + """ new = cls.__new__(cls) new.hugr = hugr @@ -73,15 +88,49 @@ def _output_op(self) -> ops.Output: return self.hugr._get_typed_op(self.output_node, ops.Output) def inputs(self) -> list[OutPort]: + """List all incoming wires (output ports of the input node). + + Example: + >>> dfg = Dfg(tys.Bool) + >>> dfg.inputs() + [OutPort(Node(1), 0)] + """ return [self.input_node.out(i) for i in range(len(self._input_op().types))] def add_op(self, op: ops.DataflowOp, /, *args: Wire) -> Node: + """Add a dataflow operation to the graph, wiring in input ports. + + Args: + op: The operation to add. + args: The input wires to the operation. + + Returns: + The node holding the new operation. + + Example: + >>> dfg = Dfg(tys.Bool) + >>> dfg.add_op(ops.Noop(), dfg.inputs()[0]) + Node(3) + """ new_n = self.hugr.add_node(op, self.parent_node) self._wire_up(new_n, args) return replace(new_n, _num_out_ports=op.num_out) def add(self, com: ops.Command) -> Node: + """Add a command (holding a dataflow operation and the incoming wires) + to the graph. + + Args: + com: The command to add. + + Example: + >>> dfg = Dfg(tys.Bool) + >>> (i,) = dfg.inputs() + >>> dfg.add(ops.Noop()(i)) + Node(3) + + """ return self.add_op(com.op, *com.incoming) def _insert_nested_impl(self, builder: ParentBuilder, *args: Wire) -> Node: @@ -90,12 +139,42 @@ def _insert_nested_impl(self, builder: ParentBuilder, *args: Wire) -> Node: return mapping[builder.parent_node] def insert_nested(self, dfg: Dfg, *args: Wire) -> Node: + """Insert a nested dataflow graph into the current graph, wiring in the + inputs. + + Args: + dfg: The dataflow graph to insert. + args: The input wires to the graph. + + Returns: + The root node of the inserted graph. + + Example: + >>> dfg = Dfg(tys.Bool) + >>> dfg2 = Dfg(tys.Bool) + >>> dfg.insert_nested(dfg2, dfg.inputs()[0]) + Node(3) + """ return self._insert_nested_impl(dfg, *args) def add_nested( self, *args: Wire, ) -> Dfg: + """Start building a nested dataflow graph. + + Args: + args: The input wires to the nested DFG. + + Returns: + Builder for new nested dataflow graph. + + Example: + >>> dfg = Dfg(tys.Bool) + >>> dfg2 = dfg.add_nested(dfg.inputs()[0]) + >>> dfg2.parent_node + Node(3) + """ from .dfg import Dfg parent_op = ops.DFG(self._wire_types(args)) @@ -103,13 +182,27 @@ def add_nested( self._wire_up(dfg.parent_node, args) return dfg - def _wire_types(self, args: Iterable[Wire]) -> TypeRow: + def _wire_types(self, args: Iterable[Wire]) -> tys.TypeRow: return [self._get_dataflow_type(w) for w in args] def add_cfg( self, *args: Wire, ) -> Cfg: + """Start building a new CFG nested inside the current dataflow graph. + + Args: + args: The input wires to the new CFG. + + Returns: + Builder for new nested CFG. + + Example: + >>> dfg = Dfg(tys.Bool) + >>> cfg = dfg.add_cfg(dfg.inputs()[0]) + >>> cfg.parent_op + CFG(inputs=[Bool]) + """ from .cfg import Cfg cfg = Cfg.new_nested(self._wire_types(args), self.hugr, self.parent_node) @@ -117,29 +210,122 @@ def add_cfg( return cfg def insert_cfg(self, cfg: Cfg, *args: Wire) -> Node: + """Insert a CFG into the current dataflow graph, wiring in the inputs. + + Args: + cfg: The CFG to insert. + args: The input wires to the CFG. + + Returns: + The root node of the inserted CFG. + + Example: + >>> from hugr.cfg import Cfg + >>> dfg = Dfg(tys.Bool) + >>> cfg = Cfg(tys.Bool) + >>> dfg.insert_cfg(cfg, dfg.inputs()[0]) + Node(3) + """ return self._insert_nested_impl(cfg, *args) - def add_conditional(self, cond: Wire, *args: Wire) -> Conditional: + def add_conditional(self, cond_wire: Wire, *args: Wire) -> Conditional: + """Start building a new conditional nested inside the current dataflow + graph. + + Args: + cond_wire: The wire holding the value (of Sum type) to branch the + conditional on. + args: Remaining input wires to the conditional. + + Returns: + Builder for new nested conditional. + + Example: + >>> dfg = Dfg(tys.Bool, tys.Unit) + >>> (cond, unit) = dfg.inputs() + >>> cond = dfg.add_conditional(cond, unit) + >>> cond.parent_node + Node(3) + """ from .cond_loop import Conditional - args = (cond, *args) - (sum_, other_inputs) = get_first_sum(self._wire_types(args)) - cond = Conditional.new_nested(sum_, other_inputs, self.hugr, self.parent_node) - self._wire_up(cond.parent_node, args) - return cond - - def insert_conditional(self, cond: Conditional, *args: Wire) -> Node: - return self._insert_nested_impl(cond, *args) + args = (cond_wire, *args) + (sum_, other_inputs) = tys.get_first_sum(self._wire_types(args)) + cond_wire = Conditional.new_nested( + sum_, other_inputs, self.hugr, self.parent_node + ) + self._wire_up(cond_wire.parent_node, args) + return cond_wire - def add_if(self, cond: Wire, *args: Wire) -> If: + def insert_conditional( + self, cond: Conditional, cond_wire: Wire, *args: Wire + ) -> Node: + """Insert a conditional into the current dataflow graph, wiring in the + inputs. + + Args: + cond: The conditional to insert. + cond_wire: The wire holding the value (of Sum type) + to branch the Conditional on. + args: Remaining input wires to the conditional. + + Returns: + The root node of the inserted conditional. + + Example: + >>> from hugr.cond_loop import Conditional + >>> cond = Conditional(tys.Bool, []) + >>> dfg = Dfg(tys.Bool) + >>> cond_n = dfg.insert_conditional(cond, dfg.inputs()[0]) + >>> dfg.hugr[cond_n].op + Conditional(sum_ty=Bool, other_inputs=[]) + """ + return self._insert_nested_impl(cond, *(cond_wire, *args)) + + def add_if(self, cond_wire: Wire, *args: Wire) -> If: + """Start building a new if block nested inside the current dataflow + graph. + + Args: + cond_wire: The wire holding the Bool value to branch the If on. + args: Remaining input wires to the If (and subsequent Else). + + Returns: + Builder for new nested If. + + Example: + >>> dfg = Dfg(tys.Bool) + >>> (cond,) = dfg.inputs() + >>> if_ = dfg.add_if(cond, cond) + >>> if_.parent_op + Case(inputs=[Bool]) + """ from .cond_loop import If - conditional = self.add_conditional(cond, *args) + conditional = self.add_conditional(cond_wire, *args) return If(conditional.add_case(1)) def add_tail_loop( self, just_inputs: Sequence[Wire], rest: Sequence[Wire] ) -> TailLoop: + """Start building a new tail loop nested inside the current dataflow + graph. + + Args: + just_inputs: input wires for types that are only inputs to the loop body. + rest: input wires for types that are inputs and outputs of the loop + body. + + Returns: + Builder for new nested TailLoop. + + Example: + >>> dfg = Dfg(tys.Bool) + >>> (cond,) = dfg.inputs() + >>> tl = dfg.add_tail_loop([cond], [cond]) + >>> tl.parent_op + TailLoop(just_inputs=[Bool], rest=[Bool]) + """ from .cond_loop import TailLoop just_input_types = self._wire_types(just_inputs) @@ -149,21 +335,99 @@ def add_tail_loop( self._wire_up(tl.parent_node, (*just_inputs, *rest)) return tl - def insert_tail_loop(self, tl: TailLoop, *args: Wire) -> Node: - return self._insert_nested_impl(tl, *args) + def insert_tail_loop( + self, tl: TailLoop, just_inputs: Sequence[Wire], rest: Sequence[Wire] + ) -> Node: + """Insert a tail loop into the current dataflow graph, wiring in the + inputs. + + Args: + tl: The tail loop to insert. + just_inputs: input wires for types that are only inputs to the loop body. + rest: input wires for types that are inputs and outputs of the loop + body. + + Returns: + The root node of the inserted tail loop. + + Example: + >>> from hugr.cond_loop import TailLoop + >>> tl = TailLoop([tys.Bool], [tys.Bool]) + >>> dfg = Dfg(tys.Bool) + >>> (b,) = dfg.inputs() + >>> tl_n = dfg.insert_tail_loop(tl, [b], [b]) + >>> dfg.hugr[tl_n].op + TailLoop(just_inputs=[Bool], rest=[Bool]) + """ + return self._insert_nested_impl(tl, *(*just_inputs, *rest)) def set_outputs(self, *args: Wire) -> None: + """Set the outputs of the dataflow graph. + Connects wires to the output node. + + Args: + args: Wires to connect to the output node. + + Example: + >>> dfg = Dfg(tys.Bool) + >>> dfg.set_outputs(dfg.inputs()[0]) # connect input to output + """ self._wire_up(self.output_node, args) self.parent_op._set_out_types(self._output_op().types) def add_state_order(self, src: Node, dst: Node) -> None: + """Add a state order link between two nodes. + + Args: + src: The source node. + dst: The destination node. + + Examples: + >>> df = dfg.Dfg() + >>> df.add_state_order(df.input_node, df.output_node) + >>> list(df.hugr.outgoing_order_links(df.input_node)) + [Node(2)] + """ # adds edge to the right of all existing edges self.hugr.add_link(src.out(-1), dst.inp(-1)) def add_const(self, val: val.Value) -> Node: + """Add a static constant to the graph. + + Args: + val: The value to add. + + Returns: + The node holding the :class:`Const ` operation. + + Example: + >>> dfg = Dfg() + >>> const_n = dfg.add_const(val.TRUE) + >>> dfg.hugr[const_n].op + Const(TRUE) + """ return self.hugr.add_const(val, self.parent_node) def load(self, const: ToNode | val.Value) -> Node: + """Load a constant into the graph as a dataflow value. + + Args: + const: The constant to load, either a Value that will be added as a + child Const node then loaded, or a node corresponding to an existing + Const. + + Returns: + The node holding the :class:`LoadConst ` + operation. + + Example: + >>> dfg = Dfg() + >>> const_n = dfg.load(val.TRUE) + >>> len(dfg.hugr) # parent, input, output, const, load + 5 + >>> dfg.hugr[const_n].op + LoadConst(Bool) + """ if isinstance(const, val.Value): const = self.add_const(const) const_op = self.hugr._get_typed_op(const, ops.Const) @@ -178,13 +442,27 @@ def call( self, func: ToNode, *args: Wire, - instantiation: FunctionType | None = None, - type_args: Sequence[TypeArg] | None = None, + instantiation: tys.FunctionType | None = None, + type_args: Sequence[tys.TypeArg] | None = None, ) -> Node: + """Call a static function in the graph. + See :class:`Call ` for more on how polymorphic functions + are handled. + + Args: + func: The node corresponding to the function definition/declaration to call. + args: The input wires to the function call. + instantiation: The concrete function type to call (needed if polymorphic). + type_args: The type arguments for the function (needed if + polymorphic). + + Returns: + The node holding the :class:`Call ` operation. + """ signature = self._fn_sig(func) call_op = ops.Call(signature, instantiation, type_args) call_n = self.hugr.add_node(call_op, self.parent_node, call_op.num_out) - self.hugr.add_link(func.out(0), call_n.inp(call_op.function_port_offset())) + self.hugr.add_link(func.out(0), call_n.inp(call_op._function_port_offset())) self._wire_up(call_n, args) @@ -193,9 +471,20 @@ def call( def load_function( self, func: ToNode, - instantiation: FunctionType | None = None, - type_args: Sequence[TypeArg] | None = None, + instantiation: tys.FunctionType | None = None, + type_args: Sequence[tys.TypeArg] | None = None, ) -> Node: + """Load a static function into the graph as a higher-order value. + + Args: + func: The node corresponding to the function definition/declaration to load. + instantiation: The concrete function type to load (needed if polymorphic). + type_args: The type arguments for the function (needed if + polymorphic). + + Returns: + The node holding the :class:`LoadFunc ` operation. + """ signature = self._fn_sig(func) load_op = ops.LoadFunc(signature, instantiation, type_args) load_n = self.hugr.add_node(load_op, self.parent_node) @@ -203,30 +492,32 @@ def load_function( return load_n - def _fn_sig(self, func: ToNode) -> PolyFuncType: + def _fn_sig(self, func: ToNode) -> tys.PolyFuncType: f_op = self.hugr[func] f_kind = f_op.op.port_kind(func.out(0)) match f_kind: - case FunctionKind(sig): + case tys.FunctionKind(sig): signature = sig case _: - raise ValueError("Expected 'func' to be a function") + msg = "Expected 'func' to be a function" + raise ValueError(msg) return signature - def _wire_up(self, node: Node, ports: Iterable[Wire]) -> TypeRow: + def _wire_up(self, node: Node, ports: Iterable[Wire]) -> tys.TypeRow: tys = [self._wire_up_port(node, i, p) for i, p in enumerate(ports)] - if isinstance(op := self.hugr[node].op, ops.PartialOp): - op.set_in_types(tys) + if isinstance(op := self.hugr[node].op, ops._PartialOp): + op._set_in_types(tys) return tys - def _get_dataflow_type(self, wire: Wire) -> Type: + def _get_dataflow_type(self, wire: Wire) -> tys.Type: port = wire.out_port() ty = self.hugr.port_type(port) if ty is None: - raise ValueError(f"Port {port} is not a dataflow port.") + msg = f"Port {port} is not a dataflow port." + raise ValueError(msg) return ty - def _wire_up_port(self, node: Node, offset: int, p: Wire) -> Type: + def _wire_up_port(self, node: Node, offset: int, p: Wire) -> tys.Type: src = p.out_port() node_ancestor = _ancestral_sibling(self.hugr, src.node, node) if node_ancestor is None: @@ -238,14 +529,26 @@ def _wire_up_port(self, node: Node, offset: int, p: Wire) -> Type: class Dfg(_DfBase[ops.DFG]): - def __init__( - self, *input_types: Type, extension_delta: ExtensionSet | None = None - ) -> None: - parent_op = ops.DFG(list(input_types), None, extension_delta or []) + """Builder for a simple nested Dataflow graph, with root node of type + :class:`DFG `. + + Args: + input_types: The input types of the the dataflow graph. Output types are + calculated by propagating types through the graph. + + Example: + >>> dfg = Dfg(tys.Bool) + >>> dfg.parent_op + DFG(inputs=[Bool]) + """ + + def __init__(self, *input_types: tys.Type) -> None: + parent_op = ops.DFG(list(input_types), None) super().__init__(parent_op) def _ancestral_sibling(h: Hugr, src: Node, tgt: Node) -> Node | None: + """Find the ancestor of `tgt` that is a sibling of `src`, if one exists.""" src_parent = h[src].parent while (tgt_parent := h[tgt].parent) is not None: diff --git a/hugr-py/src/hugr/exceptions.py b/hugr-py/src/hugr/exceptions.py index f003fc6c7..5d86e7945 100644 --- a/hugr-py/src/hugr/exceptions.py +++ b/hugr-py/src/hugr/exceptions.py @@ -1,28 +1,42 @@ +"""HUGR builder exceptions.""" + from dataclasses import dataclass @dataclass class NoSiblingAncestor(Exception): + """No sibling ancestor of target for valid inter-graph edge.""" + src: int tgt: int @property def msg(self): - return f"Source {self.src} has no sibling ancestor of target {self.tgt}, so cannot wire up." + return ( + f"Source {self.src} has no sibling ancestor of target {self.tgt}," + " so cannot wire up." + ) @dataclass class NotInSameCfg(Exception): + """Source and target nodes are not in the same CFG.""" + src: int tgt: int @property def msg(self): - return f"Source {self.src} is not in the same CFG as target {self.tgt}, so cannot wire up." + return ( + f"Source {self.src} is not in the same CFG as target {self.tgt}," + " so cannot wire up." + ) @dataclass class MismatchedExit(Exception): + """Edge to exit block signature mismatch.""" + src: int @property @@ -33,9 +47,6 @@ def msg(self): class ParentBeforeChild(Exception): - msg: str = "Parent node must be added before child node." + """Parent added before child.""" - -@dataclass -class IncompleteOp(Exception): - msg: str = "Operation is incomplete, may require set_in_types to be called." + msg: str = "Parent node must be added before child node." diff --git a/hugr-py/src/hugr/function.py b/hugr-py/src/hugr/function.py index 2e698c5d1..bdc388ccf 100644 --- a/hugr-py/src/hugr/function.py +++ b/hugr-py/src/hugr/function.py @@ -1,18 +1,37 @@ +"""Builder classes for defining functions and modules in HUGR.""" + from __future__ import annotations from dataclasses import dataclass +from typing import TYPE_CHECKING -import hugr.ops as ops -import hugr.val as val +from hugr import ops, val from .dfg import _DfBase -from hugr.node_port import Node from .hugr import Hugr -from .tys import TypeRow, TypeParam, PolyFuncType, Type, TypeBound + +if TYPE_CHECKING: + from hugr.node_port import Node + + from .tys import PolyFuncType, Type, TypeBound, TypeParam, TypeRow @dataclass class Function(_DfBase[ops.FuncDefn]): + """Build a function definition as a HUGR dataflow graph. + + Args: + name: The name of the function. + input_types: The input types for the function (output types are + computed by propagating types from input node through the graph). + type_params: The type parameters for the function, if polymorphic. + + Examples: + >>> f = Function("f", [tys.Bool]) + >>> f.parent_op + FuncDefn(name='f', inputs=[Bool], params=[]) + """ + def __init__( self, name: str, @@ -25,7 +44,15 @@ def __init__( @dataclass class Module: - hugr: Hugr + """Build a top-level HUGR module. + + Examples: + >>> m = Module() + >>> m.hugr.root_op() + Module() + """ + + hugr: Hugr[ops.Module] def __init__(self) -> None: self.hugr = Hugr(ops.Module()) @@ -36,20 +63,61 @@ def define_function( input_types: TypeRow, type_params: list[TypeParam] | None = None, ) -> Function: + """Start building a function definition in the module. + + Args: + name: The name of the function. + input_types: The input types for the function. + type_params: The type parameters for the function, if polymorphic. + + Returns: + The new function builder. + """ parent_op = ops.FuncDefn(name, input_types, type_params or []) return Function.new_nested(parent_op, self.hugr) def define_main(self, input_types: TypeRow) -> Function: + """Define the 'main' function in the module. See :meth:`define_function`.""" return self.define_function("main", input_types) def declare_function(self, name: str, signature: PolyFuncType) -> Node: + """Add a function declaration to the module. + + Args: + name: The name of the function. + signature: The (polymorphic) signature of the function. + + Returns: + The node representing the function declaration. + + Examples: + >>> m = Module() + >>> sig = tys.PolyFuncType([], tys.FunctionType.empty()) + >>> m.declare_function("f", sig) + Node(1) + """ return self.hugr.add_node(ops.FuncDecl(name, signature), self.hugr.root) def add_const(self, value: val.Value) -> Node: + """Add a static constant to the module. + + Args: + value: The constant value to add. + + Returns: + The node holding the constant. + + Examples: + >>> m = Module() + >>> m.add_const(val.FALSE) + Node(1) + """ return self.hugr.add_node(ops.Const(value), self.hugr.root) def add_alias_defn(self, name: str, ty: Type) -> Node: + """Add a type alias definition.""" return self.hugr.add_node(ops.AliasDefn(name, ty), self.hugr.root) def add_alias_decl(self, name: str, bound: TypeBound) -> Node: + """Add a type alias declaration.""" return self.hugr.add_node(ops.AliasDecl(name, bound), self.hugr.root) diff --git a/hugr-py/src/hugr/hugr.py b/hugr-py/src/hugr/hugr.py index 7fb9bc70c..4df2dc67a 100644 --- a/hugr-py/src/hugr/hugr.py +++ b/hugr-py/src/hugr/hugr.py @@ -1,39 +1,46 @@ +"""Core data structures for HUGR.""" + from __future__ import annotations -from collections.abc import Mapping +from collections.abc import Iterable, Mapping from dataclasses import dataclass, field, replace from typing import ( + TYPE_CHECKING, Generic, - Iterable, Protocol, TypeVar, cast, overload, - Type as PyType, ) - -from hugr.ops import Op, DataflowOp, Const, Call -from hugr.tys import Type, Kind, ValueKind -from hugr.val import Value -from hugr.node_port import Direction, InPort, OutPort, ToNode, Node, _SubPort +from hugr.node_port import Direction, InPort, Node, OutPort, ToNode, _SubPort +from hugr.ops import Call, Const, DataflowOp, Module, Op from hugr.serialization.ops import OpType as SerialOp from hugr.serialization.serial_hugr import SerialHugr +from hugr.tys import Kind, Type, ValueKind from hugr.utils import BiMap +from hugr.val import Value from .exceptions import ParentBeforeChild +if TYPE_CHECKING: + from hugr.val import Value + @dataclass() class NodeData: + """Node weights in HUGR graph. Defined by an operation and parent node.""" + + #: The operation of the node. op: Op + #: The parent node, or None for the root node. parent: Node | None - _num_inps: int = 0 - _num_outs: int = 0 - children: list[Node] = field(default_factory=list) + _num_inps: int = field(default=0, repr=False) + _num_outs: int = field(default=0, repr=False) + children: list[Node] = field(default_factory=list, repr=False) - def to_serial(self, node: Node, hugr: Hugr) -> SerialOp: - o = self.op.to_serial(node, self.parent if self.parent else node, hugr) + def to_serial(self, node: Node) -> SerialOp: + o = self.op.to_serial(self.parent if self.parent else node) return SerialOp(root=o) # type: ignore[arg-type] @@ -48,7 +55,11 @@ def to_serial(self, node: Node, hugr: Hugr) -> SerialOp: class ParentBuilder(ToNode, Protocol[OpVar]): + """Abstract interface implemented by builders of nodes that contain child HUGRs.""" + + #: The child HUGR. hugr: Hugr[OpVar] + # Unique parent node. parent_node: Node def to_node(self) -> Node: @@ -56,21 +67,39 @@ def to_node(self) -> Node: @property def parent_op(self) -> OpVar: + """The parent node's operation.""" return cast(OpVar, self.hugr[self.parent_node].op) @dataclass() class Hugr(Mapping[Node, NodeData], Generic[OpVar]): + """The core HUGR datastructure. + + Args: + root_op: The operation for the root node. Defaults to a Module. + + Examples: + >>> h = Hugr() + >>> h.root_op() + Module() + >>> h[h.root].op + Module() + """ + + #: Root node of the HUGR. root: Node + # List of nodes, with None for deleted nodes. _nodes: list[NodeData | None] + # Bidirectional map of links between ports. _links: BiMap[_SO, _SI] + # List of free node indices, populated when nodes are deleted. _free_nodes: list[Node] - def __init__(self, root_op: OpVar) -> None: + def __init__(self, root_op: OpVar | None = None) -> None: self._free_nodes = [] self._links = BiMap() self._nodes = [] - self.root = self._add_node(root_op, None, 0) + self.root = self._add_node(root_op or Module(), None, 0) def __getitem__(self, key: ToNode) -> NodeData: key = key.to_node() @@ -88,12 +117,26 @@ def __iter__(self): def __len__(self) -> int: return self.num_nodes() - def _get_typed_op(self, node: ToNode, cl: PyType[OpVar2]) -> OpVar2: + def _get_typed_op(self, node: ToNode, cl: type[OpVar2]) -> OpVar2: op = self[node].op assert isinstance(op, cl) return op def children(self, node: ToNode | None = None) -> list[Node]: + """The child nodes of a given `node`. + + Args: + node: Parent node. Defaults to the HUGR root. + + Returns: + List of child nodes. + + Examples: + >>> h = Hugr() + >>> n = h.add_node(ops.Const(val.TRUE)) + >>> h.children(h.root) + [Node(1)] + """ node = node or self.root return self[node].children @@ -123,13 +166,55 @@ def add_node( parent: ToNode | None = None, num_outs: int | None = None, ) -> Node: + """Add a node to the HUGR. + + Args: + op: Operation of the node. + parent: Parent node of added node. Defaults to HUGR root if None. + num_outs: Number of output ports expected for this node. Defaults to None. + + Returns: + Handle to the added node. + """ parent = parent or self.root return self._add_node(op, parent, num_outs) def add_const(self, value: Value, parent: ToNode | None = None) -> Node: + """Add a constant node to the HUGR. + + Args: + value: Value of the constant. + parent: Parent node of added node. Defaults to HUGR root if None. + + Returns: + Handle to the added node. + + Examples: + >>> h = Hugr() + >>> n = h.add_const(val.TRUE) + >>> h[n].op + Const(TRUE) + """ return self.add_node(Const(value), parent) def delete_node(self, node: ToNode) -> NodeData | None: + """Delete a node from the HUGR. + + Args: + node: Node to delete. + + Returns: + The deleted node data, or None if the node was not found. + + Examples: + >>> h = Hugr() + >>> n = h.add_const(val.TRUE) + >>> deleted = h.delete_node(n) + >>> deleted.op + Const(TRUE) + >>> len(h) + 1 + """ node = node.to_node() parent = self[node].parent if parent: @@ -156,6 +241,19 @@ def _unused_sub_offset(self, port: P) -> _SubPort[P]: return sub_port def add_link(self, src: OutPort, dst: InPort) -> None: + """Add a link (edge) between two nodes to the HUGR, + from an outgoing port to an incoming port. + + Args: + src: Source port. + dst: Destination port. + + Examples: + >>> df = dfg.Dfg(tys.Bool) + >>> df.hugr.add_link(df.input_node.out(0), df.output_node.inp(0)) + >>> list(df.hugr.linked_ports(df.input_node[0])) + [InPort(Node(2), 0)] + """ src_sub = self._unused_sub_offset(src) dst_sub = self._unused_sub_offset(dst) # if self._links.get_left(dst_sub) is not None: @@ -166,6 +264,12 @@ def add_link(self, src: OutPort, dst: InPort) -> None: self[dst.node]._num_inps = max(self[dst.node]._num_inps, dst.offset + 1) def delete_link(self, src: OutPort, dst: InPort) -> None: + """Delete a link (edge) between two nodes from the HUGR. + + Args: + src: Source port. + dst: Destination port. + """ try: sub_offset = next( i for i, inp in enumerate(self.linked_ports(src)) if inp == dst @@ -176,12 +280,45 @@ def delete_link(self, src: OutPort, dst: InPort) -> None: # TODO make sure sub-offset is handled correctly def root_op(self) -> OpVar: + """The operation of the root node. + + Examples: + >>> h = Hugr() + >>> h.root_op() + Module() + """ return cast(OpVar, self[self.root].op) def num_nodes(self) -> int: + """The number of nodes in the HUGR. + + Examples: + >>> h = Hugr() + >>> n = h.add_const(val.TRUE) + >>> h.num_nodes() + 2 + """ return len(self._nodes) - len(self._free_nodes) def num_ports(self, node: ToNode, direction: Direction) -> int: + """The number of ports of a node in a given direction. + Not necessarily the number of connected ports - if port `i` is + connected, then all ports `0..i` are assumed to exist. + + Args: + node: Node to query. + direction: Direction of ports to count. + + Examples: + >>> h = Hugr() + >>> n1 = h.add_const(val.TRUE) + >>> n2 = h.add_const(val.FALSE) + >>> h.add_link(n1.out(0), n2.inp(2)) # not a valid link! + >>> h.num_ports(n1, Direction.OUTGOING) + 1 + >>> h.num_ports(n2, Direction.INCOMING) + 3 + """ return ( self.num_in_ports(node) if direction == Direction.INCOMING @@ -189,9 +326,11 @@ def num_ports(self, node: ToNode, direction: Direction) -> int: ) def num_in_ports(self, node: ToNode) -> int: + """The number of incoming ports of a node. See :meth:`num_ports`.""" return self[node]._num_inps def num_out_ports(self, node: ToNode) -> int: + """The number of outgoing ports of a node. See :meth:`num_ports`.""" return self[node]._num_outs def _linked_ports( @@ -208,6 +347,21 @@ def linked_ports(self, port: OutPort) -> Iterable[InPort]: ... @overload def linked_ports(self, port: InPort) -> Iterable[OutPort]: ... def linked_ports(self, port: OutPort | InPort): + """Return an iterable of In(Out)Ports linked to given Out(In)Port. + + Args: + port: Given port. + + Returns: + Iterator over linked ports. + + Examples: + >>> df = dfg.Dfg(tys.Bool) + >>> df.set_outputs(df.input_node[0]) + >>> list(df.hugr.linked_ports(df.input_node[0])) + [InPort(Node(2), 0)] + + """ match port: case OutPort(_): return self._linked_ports(port, self._links.fwd) @@ -217,9 +371,33 @@ def linked_ports(self, port: OutPort | InPort): # TODO: single linked port def outgoing_order_links(self, node: ToNode) -> Iterable[Node]: + """Iterator over nodes connected by an outgoing state order link from a + given node. + + Args: + node: Source node of state order link. + + Examples: + >>> df = dfg.Dfg() + >>> df.add_state_order(df.input_node, df.output_node) + >>> list(df.hugr.outgoing_order_links(df.input_node)) + [Node(2)] + """ return (p.node for p in self.linked_ports(node.out(-1))) def incoming_order_links(self, node: ToNode) -> Iterable[Node]: + """Iterator over nodes connected by an incoming state order link to a + given node. + + Args: + node: Destination node of state order link. + + Examples: + >>> df = dfg.Dfg() + >>> df.add_state_order(df.input_node, df.output_node) + >>> list(df.hugr.incoming_order_links(df.output_node)) + [Node(1)] + """ return (p.node for p in self.linked_ports(node.inp(-1))) def _node_links( @@ -235,25 +413,86 @@ def _node_links( yield port, list(self._linked_ports(port, links)) def outgoing_links(self, node: ToNode) -> Iterable[tuple[OutPort, list[InPort]]]: + """Iterator over outgoing links from a given node. + + Args: + node: Node to query. + + Returns: + Iterator of pairs of outgoing port and the incoming ports connected + to that port. + + Examples: + >>> df = dfg.Dfg() + >>> df.hugr.add_link(df.input_node.out(0), df.output_node.inp(0)) + >>> df.hugr.add_link(df.input_node.out(0), df.output_node.inp(1)) + >>> list(df.hugr.outgoing_links(df.input_node)) + [(OutPort(Node(1), 0), [InPort(Node(2), 0), InPort(Node(2), 1)])] + """ return self._node_links(node, self._links.fwd) def incoming_links(self, node: ToNode) -> Iterable[tuple[InPort, list[OutPort]]]: + """Iterator over incoming links to a given node. + + Args: + node: Node to query. + + Returns: + Iterator of pairs of incoming port and the outgoing ports connected + to that port. + + Examples: + >>> df = dfg.Dfg() + >>> df.hugr.add_link(df.input_node.out(0), df.output_node.inp(0)) + >>> df.hugr.add_link(df.input_node.out(0), df.output_node.inp(1)) + >>> list(df.hugr.incoming_links(df.output_node)) + [(InPort(Node(2), 0), [OutPort(Node(1), 0)]), (InPort(Node(2), 1), [OutPort(Node(1), 0)])] + """ # noqa: E501 return self._node_links(node, self._links.bck) def num_incoming(self, node: Node) -> int: - # connecetd links + """The number of incoming links to a `node`. + + Examples: + >>> df = dfg.Dfg() + >>> df.hugr.add_link(df.input_node.out(0), df.output_node.inp(0)) + >>> df.hugr.num_incoming(df.output_node) + 1 + """ return sum(1 for _ in self.incoming_links(node)) def num_outgoing(self, node: ToNode) -> int: - # connecetd links + """The number of outgoing links from a `node`. + + Examples: + >>> df = dfg.Dfg() + >>> df.hugr.add_link(df.input_node.out(0), df.output_node.inp(0)) + >>> df.hugr.num_outgoing(df.input_node) + 1 + """ return sum(1 for _ in self.outgoing_links(node)) # TODO: num_links and _linked_ports def port_kind(self, port: InPort | OutPort) -> Kind: + """The kind of a `port`. + + Examples: + >>> df = dfg.Dfg(tys.Bool) + >>> df.hugr.port_kind(df.input_node.out(0)) + ValueKind(Bool) + """ return self[port.node].op.port_kind(port) def port_type(self, port: InPort | OutPort) -> Type | None: + """The type of a `port`, if the kind is + :class:`ValueKind `, else None. + + Examples: + >>> df = dfg.Dfg(tys.Bool) + >>> df.hugr.port_type(df.input_node.out(0)) + Bool + """ op = self[port.node].op if isinstance(op, DataflowOp): return op.port_type(port) @@ -264,6 +503,22 @@ def port_type(self, port: InPort | OutPort) -> Type | None: return None def insert_hugr(self, hugr: Hugr, parent: ToNode | None = None) -> dict[Node, Node]: + """Insert a HUGR into this HUGR. + + Args: + hugr: HUGR to insert. + parent: Parent for root of inserted HUGR. Defaults to None. + + Returns: + Mapping from node indices in inserted HUGR to their new indices + in this HUGR. + + Examples: + >>> d = dfg.Dfg() + >>> h = Hugr() + >>> h.insert_hugr(d.hugr) + {Node(0): Node(1), Node(1): Node(2), Node(2): Node(3)} + """ mapping: dict[Node, Node] = {} for idx, node_data in enumerate(hugr._nodes): @@ -274,7 +529,7 @@ def insert_hugr(self, hugr: Hugr, parent: ToNode | None = None) -> dict[Node, No mapping[node_data.parent] if node_data.parent else parent ) except KeyError as e: - raise ParentBeforeChild() from e + raise ParentBeforeChild from e mapping[Node(idx)] = self.add_node(node_data.op, node_parent) for src, dst in hugr._links.items(): @@ -285,9 +540,10 @@ def insert_hugr(self, hugr: Hugr, parent: ToNode | None = None) -> dict[Node, No return mapping def to_serial(self) -> SerialHugr: + """Serialize the HUGR.""" node_it = (node for node in self._nodes if node is not None) - def _serialise_link( + def _serialize_link( link: tuple[_SO, _SI], ) -> tuple[tuple[int, int], tuple[int, int]]: src, dst = link @@ -297,8 +553,8 @@ def _serialise_link( return SerialHugr( version="v1", # non contiguous indices will be erased - nodes=[node.to_serial(Node(idx), self) for idx, node in enumerate(node_it)], - edges=[_serialise_link(link) for link in self._links.items()], + nodes=[node.to_serial(Node(idx)) for idx, node in enumerate(node_it)], + edges=[_serialize_link(link) for link in self._links.items()], ) def _constrain_offset(self, p: P) -> int: @@ -317,6 +573,7 @@ def _constrain_offset(self, p: P) -> int: @classmethod def from_serial(cls, serial: SerialHugr) -> Hugr: + """Load a HUGR from a serialized form.""" assert serial.nodes, "Empty Hugr is invalid" hugr = Hugr.__new__(Hugr) diff --git a/hugr-py/src/hugr/node_port.py b/hugr-py/src/hugr/node_port.py index 23e10291a..0ef173bb6 100644 --- a/hugr-py/src/hugr/node_port.py +++ b/hugr-py/src/hugr/node_port.py @@ -1,19 +1,27 @@ +"""Node and port classes for Hugr graphs.""" + from __future__ import annotations from dataclasses import dataclass, field, replace from enum import Enum from typing import ( + TYPE_CHECKING, ClassVar, - Iterator, + Generic, Protocol, - overload, TypeVar, - Generic, + overload, ) + from typing_extensions import Self +if TYPE_CHECKING: + from collections.abc import Iterator + class Direction(Enum): + """Enum over port directions, INCOMING and OUTGOING.""" + INCOMING = 0 OUTGOING = 1 @@ -27,23 +35,41 @@ class _Port: @dataclass(frozen=True, eq=True, order=True) class InPort(_Port): + """Incoming port, defined by the `node` it belongs to and the port `offset`.""" + direction: ClassVar[Direction] = Direction.INCOMING + def __repr__(self) -> str: + return f"InPort({self.node}, {self.offset})" + class Wire(Protocol): - def out_port(self) -> OutPort: ... + """Protocol for objects that can provide a dataflow output port.""" + + def out_port(self) -> OutPort: + """OutPort corresponding to this :class:`Wire`.""" + ... # pragma: no cover @dataclass(frozen=True, eq=True, order=True) class OutPort(_Port, Wire): + """Outgoing port, defined by the `node` it belongs to and the port `offset`.""" + direction: ClassVar[Direction] = Direction.OUTGOING def out_port(self) -> OutPort: return self + def __repr__(self) -> str: + return f"OutPort({self.node}, {self.offset})" + class ToNode(Wire, Protocol): - def to_node(self) -> Node: ... + """Protocol by any object that can be treated as a :class:`Node`.""" + + def to_node(self) -> Node: + """Convert to a :class:`Node`.""" + ... # pragma: no cover @overload def __getitem__(self, index: int) -> OutPort: ... @@ -57,16 +83,48 @@ def __getitem__( ) -> OutPort | Iterator[OutPort]: return self.to_node()._index(index) - def out_port(self) -> "OutPort": + def out_port(self) -> OutPort: return OutPort(self.to_node(), 0) def inp(self, offset: int) -> InPort: + """Generate an input port for this node. + + Args: + offset: port offset. + + Returns: + Incoming port for this node. + + Examples: + >>> Node(0).inp(1) + InPort(Node(0), 1) + """ return InPort(self.to_node(), offset) def out(self, offset: int) -> OutPort: + """Generate an output port for this node. + + Args: + offset: port offset. + + Returns: + Outgoing port for this node. + + Examples: + >>> Node(0).out(1) + OutPort(Node(0), 1) + """ return OutPort(self.to_node(), offset) def port(self, offset: int, direction: Direction) -> InPort | OutPort: + """Generate a port in `direction` for this node with `offset`. + + Examples: + >>> Node(0).port(1, Direction.INCOMING) + InPort(Node(0), 1) + >>> Node(0).port(1, Direction.OUTGOING) + OutPort(Node(0), 1) + """ if direction == Direction.INCOMING: return self.inp(offset) else: @@ -75,6 +133,10 @@ def port(self, offset: int, direction: Direction) -> InPort | OutPort: @dataclass(frozen=True, eq=True, order=True) class Node(ToNode): + """Node in hierarchical :class:`Hugr ` graph, + with globally unique index. + """ + idx: int _num_out_ports: int | None = field(default=None, compare=False) @@ -83,17 +145,16 @@ def _index( ) -> OutPort | Iterator[OutPort]: match index: case int(index): - if self._num_out_ports is not None: - if index >= self._num_out_ports: - raise IndexError("Index out of range") + if self._num_out_ports is not None and index >= self._num_out_ports: + msg = "Index out of range" + raise IndexError(msg) return self.out(index) case slice(): start = index.start or 0 stop = index.stop or self._num_out_ports if stop is None: - raise ValueError( - "Stop must be specified when number of outputs unknown" - ) + msg = "Stop must be specified when number of outputs unknown" + raise ValueError(msg) step = index.step or 1 return (self[i] for i in range(start, stop, step)) case tuple(xs): @@ -102,6 +163,9 @@ def _index( def to_node(self) -> Node: return self + def __repr__(self) -> str: + return f"Node({self.idx})" + P = TypeVar("P", InPort, OutPort) diff --git a/hugr-py/src/hugr/ops.py b/hugr-py/src/hugr/ops.py index 07ab362dc..ef9f86d91 100644 --- a/hugr-py/src/hugr/ops.py +++ b/hugr-py/src/hugr/ops.py @@ -1,42 +1,69 @@ +"""Definitions of HUGR operations.""" + from __future__ import annotations from dataclasses import dataclass, field -from typing import Protocol, TYPE_CHECKING, Sequence, runtime_checkable, TypeVar -from hugr.serialization.ops import BaseOp +from typing import TYPE_CHECKING, Protocol, TypeVar, runtime_checkable + import hugr.serialization.ops as sops +from hugr import tys, val +from hugr.node_port import Direction, InPort, Node, OutPort, Wire from hugr.utils import ser_it -import hugr.tys as tys -from hugr.node_port import Node, InPort, OutPort, Wire -import hugr.val as val -from .exceptions import IncompleteOp if TYPE_CHECKING: - from hugr.hugr import Hugr + from collections.abc import Sequence + + from hugr.serialization.ops import BaseOp @dataclass class InvalidPort(Exception): + """Port is not valid for this operation.""" + port: InPort | OutPort + op: Op @property def msg(self) -> str: - return f"Invalid port {self.port}" + return f"Port {self.port} is invalid for operation {self.op}." @runtime_checkable class Op(Protocol): + """An abstract HUGR operation. Must be convertible + to a serialisable :class:`BaseOp`. + """ + @property - def num_out(self) -> int | None: - return None + def num_out(self) -> int: + """The number of output ports for this operation. + + Example: + >>> op = Const(val.TRUE) + >>> op.num_out + 1 + """ + ... # pragma: no cover - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> BaseOp: ... + def to_serial(self, parent: Node) -> BaseOp: + """Convert this operation to a serialisable form.""" + ... # pragma: no cover - def port_kind(self, port: InPort | OutPort) -> tys.Kind: ... + def port_kind(self, port: InPort | OutPort) -> tys.Kind: + """Get the kind of the given port. + Example: + >>> op = Const(val.TRUE) + >>> op.port_kind(OutPort(Node(0), 0)) + ConstKind(Bool) + """ + ... # pragma: no cover -def _sig_port_type(sig: tys.FunctionType, port: InPort | OutPort) -> tys.Type: - from hugr.node_port import Direction + def _invalid_port(self, port: InPort | OutPort) -> InvalidPort: + return InvalidPort(port, self) + +def _sig_port_type(sig: tys.FunctionType, port: InPort | OutPort) -> tys.Type: if port.direction == Direction.INCOMING: return sig.input[port.offset] return sig.output[port.offset] @@ -44,7 +71,15 @@ def _sig_port_type(sig: tys.FunctionType, port: InPort | OutPort) -> tys.Type: @runtime_checkable class DataflowOp(Op, Protocol): - def outer_signature(self) -> tys.FunctionType: ... + """Abstract dataflow operation. Can be assumed to have a signature and Value- + kind ports. + """ + + def outer_signature(self) -> tys.FunctionType: + """The external signature of this operation. Defines the valid external + connectivity of the node the operation belongs to. + """ + ... # pragma: no cover def port_kind(self, port: InPort | OutPort) -> tys.Kind: if port.offset == -1: @@ -52,32 +87,81 @@ def port_kind(self, port: InPort | OutPort) -> tys.Kind: return tys.ValueKind(self.port_type(port)) def port_type(self, port: InPort | OutPort) -> tys.Type: + """Get the type of the given dataflow port from the signature of the + operation. + + Example: + >>> op = Input([tys.Bool]) + >>> op.port_type(OutPort(Node(0), 0)) + Bool + + """ return _sig_port_type(self.outer_signature(), port) def __call__(self, *args) -> Command: + """Calling with incoming :class:`Wire` arguments returns a + :class:`Command` which can be used to wire the operation into a + dataflow graph. + """ return Command(self, list(args)) @runtime_checkable -class PartialOp(Protocol): - def set_in_types(self, types: tys.TypeRow) -> None: ... +class _PartialOp(Protocol): + def _set_in_types(self, types: tys.TypeRow) -> None: ... + + +@dataclass +class IncompleteOp(Exception): + """Op types have not been set during building.""" + + op: Op + + @property + def msg(self) -> str: + return ( + f"Operation {self.op} is incomplete, may require set_in_types to be called." + ) + + +V = TypeVar("V") + + +def _check_complete(op, v: V | None) -> V: + if v is None: + raise IncompleteOp(op) + return v @dataclass(frozen=True) class Command: + """A :class:`DataflowOp` and its incoming :class:`Wire ` + arguments. + + Ephemeral: used to wire operations into a dataflow graph. + + Example: + >>> Noop()(Node(0).out(0)) + Command(op=Noop, incoming=[OutPort(Node(0), 0)]) + """ + op: DataflowOp incoming: list[Wire] @dataclass() class Input(DataflowOp): + """Input operation in dataflow graph. Outputs of this operation are the + inputs to the graph. + """ + types: tys.TypeRow @property - def num_out(self) -> int | None: + def num_out(self) -> int: return len(self.types) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.Input: + def to_serial(self, parent: Node) -> sops.Input: return sops.Input(parent=parent.idx, types=ser_it(self.types)) def outer_signature(self) -> tys.FunctionType: @@ -87,35 +171,33 @@ def __call__(self) -> Command: return super().__call__() -V = TypeVar("V") - - -def _check_complete(v: V | None) -> V: - if v is None: - raise IncompleteOp() - return v - - @dataclass() -class Output(DataflowOp, PartialOp): - _types: tys.TypeRow | None = None +class Output(DataflowOp, _PartialOp): + """Output operation in dataflow graph. Inputs of this operation are the + outputs of the graph. + """ + + _types: tys.TypeRow | None = field(default=None, repr=False) + num_out: int = field(default=0, repr=False) @property def types(self) -> tys.TypeRow: - return _check_complete(self._types) + return _check_complete(self, self._types) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.Output: + def to_serial(self, parent: Node) -> sops.Output: return sops.Output(parent=parent.idx, types=ser_it(self.types)) def outer_signature(self) -> tys.FunctionType: return tys.FunctionType(input=self.types, output=[]) - def set_in_types(self, types: tys.TypeRow) -> None: + def _set_in_types(self, types: tys.TypeRow) -> None: self._types = types @dataclass() class Custom(DataflowOp): + """A non-core dataflow operation defined in an extension.""" + op_name: str signature: tys.FunctionType = field(default_factory=tys.FunctionType.empty) description: str = "" @@ -123,10 +205,10 @@ class Custom(DataflowOp): args: list[tys.TypeArg] = field(default_factory=list) @property - def num_out(self) -> int | None: + def num_out(self) -> int: return len(self.signature.output) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.CustomOp: + def to_serial(self, parent: Node) -> sops.CustomOp: return sops.CustomOp( parent=parent.idx, extension=self.extension, @@ -141,15 +223,22 @@ def outer_signature(self) -> tys.FunctionType: @dataclass() -class MakeTupleDef(DataflowOp, PartialOp): - _types: tys.TypeRow | None = None - num_out: int | None = 1 +class MakeTuple(DataflowOp, _PartialOp): + """Operation to create a tuple from a sequence of wires.""" + + _types: tys.TypeRow | None = field(default=None, repr=False) + num_out: int = field(default=1, repr=False) @property def types(self) -> tys.TypeRow: - return _check_complete(self._types) + """If set, the types of the tuple elements. + + Raises: + IncompleteOp: If the types have not been set. + """ + return _check_complete(self, self._types) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.MakeTuple: + def to_serial(self, parent: Node) -> sops.MakeTuple: return sops.MakeTuple( parent=parent.idx, tys=ser_it(self.types), @@ -161,26 +250,33 @@ def __call__(self, *elements: Wire) -> Command: def outer_signature(self) -> tys.FunctionType: return tys.FunctionType(input=self.types, output=[tys.Tuple(*self.types)]) - def set_in_types(self, types: tys.TypeRow) -> None: + def _set_in_types(self, types: tys.TypeRow) -> None: self._types = types - -MakeTuple = MakeTupleDef() + def __repr__(self) -> str: + return "MakeTuple" + (f"({self._types})" if self._types is not None else "") @dataclass() -class UnpackTupleDef(DataflowOp, PartialOp): - _types: tys.TypeRow | None = None +class UnpackTuple(DataflowOp, _PartialOp): + """Operation to unpack a tuple into its elements.""" + + _types: tys.TypeRow | None = field(default=None, repr=False) @property def types(self) -> tys.TypeRow: - return _check_complete(self._types) + """If set, the types of the tuple elements. + + Raises: + IncompleteOp: If the types have not been set. + """ + return _check_complete(self, self._types) @property - def num_out(self) -> int | None: + def num_out(self) -> int: return len(self.types) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.UnpackTuple: + def to_serial(self, parent: Node) -> sops.UnpackTuple: return sops.UnpackTuple( parent=parent.idx, tys=ser_it(self.types), @@ -190,25 +286,28 @@ def __call__(self, tuple_: Wire) -> Command: return super().__call__(tuple_) def outer_signature(self) -> tys.FunctionType: - return MakeTupleDef(self.types).outer_signature().flip() + return MakeTuple(self.types).outer_signature().flip() - def set_in_types(self, types: tys.TypeRow) -> None: + def _set_in_types(self, types: tys.TypeRow) -> None: (t,) = types assert isinstance(t, tys.Sum), f"Expected unary Sum, got {t}" (row,) = t.variant_rows self._types = row -UnpackTuple = UnpackTupleDef() - - @dataclass() class Tag(DataflowOp): + """Tag a row of incoming values to make them a variant of a sum type. + + Requires `sum_ty` to be set as it is not possible to extract all the variants from + just the input wires for one variant. + """ + tag: int sum_ty: tys.Sum - num_out: int | None = 1 + num_out: int = field(default=1, repr=False) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.Tag: + def to_serial(self, parent: Node) -> sops.Tag: return sops.Tag( parent=parent.idx, tag=self.tag, @@ -222,7 +321,13 @@ def outer_signature(self) -> tys.FunctionType: class DfParentOp(Op, Protocol): - def inner_signature(self) -> tys.FunctionType: ... + """Abstract parent of dataflow graph operations. Can be queried for the + dataflow signature of its child graph. + """ + + def inner_signature(self) -> tys.FunctionType: + """Inner signature of the child dataflow graph.""" + ... # pragma: no cover def _set_out_types(self, types: tys.TypeRow) -> None: ... @@ -231,23 +336,36 @@ def _inputs(self) -> tys.TypeRow: ... @dataclass class DFG(DfParentOp, DataflowOp): + """Simple dataflow graph operation. Outer signature matches inner signature.""" + + #: Inputs types of the operation. inputs: tys.TypeRow - _outputs: tys.TypeRow | None = None - extension_delta: tys.ExtensionSet = field(default_factory=list) + _outputs: tys.TypeRow | None = field(default=None, repr=False) + _extension_delta: tys.ExtensionSet = field(default_factory=list, repr=False) @property def outputs(self) -> tys.TypeRow: - return _check_complete(self._outputs) + """Output types of the operation. + + Raises: + IncompleteOp: If the outputs have not been set. + """ + return _check_complete(self, self._outputs) @property def signature(self) -> tys.FunctionType: - return tys.FunctionType(self.inputs, self.outputs, self.extension_delta) + """Signature of the operation. + + Raises: + IncompleteOp: If the outputs have not been set. + """ + return tys.FunctionType(self.inputs, self.outputs, self._extension_delta) @property - def num_out(self) -> int | None: + def num_out(self) -> int: return len(self.signature.output) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.DFG: + def to_serial(self, parent: Node) -> sops.DFG: return sops.DFG( parent=parent.idx, signature=self.signature.to_serial(), @@ -268,22 +386,35 @@ def _inputs(self) -> tys.TypeRow: @dataclass() class CFG(DataflowOp): + """Parent operation of a control flow graph.""" + + #: Inputs types of the operation. inputs: tys.TypeRow - _outputs: tys.TypeRow | None = None + _outputs: tys.TypeRow | None = field(default=None, repr=False) @property def outputs(self) -> tys.TypeRow: - return _check_complete(self._outputs) + """Output types of the operation, if set. + + Raises: + IncompleteOp: If the outputs have not been set. + """ + return _check_complete(self, self._outputs) @property def signature(self) -> tys.FunctionType: + """Dataflow signature of the CFG operation. + + Raises: + IncompleteOp: If the outputs have not been set. + """ return tys.FunctionType(self.inputs, self.outputs) @property - def num_out(self) -> int | None: + def num_out(self) -> int: return len(self.outputs) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.CFG: + def to_serial(self, parent: Node) -> sops.CFG: return sops.CFG( parent=parent.idx, signature=self.signature.to_serial(), @@ -295,24 +426,40 @@ def outer_signature(self) -> tys.FunctionType: @dataclass class DataflowBlock(DfParentOp): + """Parent of non-entry basic block in a control flow graph.""" + + #: Inputs types of the innner dataflow graph. inputs: tys.TypeRow _sum: tys.Sum | None = None - _other_outputs: tys.TypeRow | None = None + _other_outputs: tys.TypeRow | None = field(default=None, repr=False) extension_delta: tys.ExtensionSet = field(default_factory=list) @property def sum_ty(self) -> tys.Sum: - return _check_complete(self._sum) + """If set, the sum type that defines the potential branching of the + block. + + + Raises: + IncompleteOp: If the sum type has not been set. + """ + return _check_complete(self, self._sum) @property def other_outputs(self) -> tys.TypeRow: - return _check_complete(self._other_outputs) + """The non-branching outputs of the block which are passed to all + successors. + + Raises: + IncompleteOp: If the outputs have not been set. + """ + return _check_complete(self, self._other_outputs) @property - def num_out(self) -> int | None: + def num_out(self) -> int: return len(self.sum_ty.variant_rows) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.DataflowBlock: + def to_serial(self, parent: Node) -> sops.DataflowBlock: return sops.DataflowBlock( parent=parent.idx, inputs=ser_it(self.inputs), @@ -338,19 +485,29 @@ def _inputs(self) -> tys.TypeRow: return self.inputs def nth_outputs(self, n: int) -> tys.TypeRow: + """The outputs passed to the nth successor of the block. + Concatenation of the nth variant of the sum type and the other outputs. + """ return [*self.sum_ty.variant_rows[n], *self.other_outputs] @dataclass class ExitBlock(Op): - _cfg_outputs: tys.TypeRow | None = None - num_out: int | None = 0 + """Unique exit block of a control flow graph.""" + + _cfg_outputs: tys.TypeRow | None = field(default=None, repr=False) + num_out: int = field(default=0, repr=False) @property def cfg_outputs(self) -> tys.TypeRow: - return _check_complete(self._cfg_outputs) + """Output types of the parent control flow graph of this exit block. + + Raises: + IncompleteOp: If the outputs have not been set. + """ + return _check_complete(self, self._cfg_outputs) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.ExitBlock: + def to_serial(self, parent: Node) -> sops.ExitBlock: return sops.ExitBlock( parent=parent.idx, cfg_outputs=ser_it(self.cfg_outputs), @@ -362,10 +519,14 @@ def port_kind(self, port: InPort | OutPort) -> tys.Kind: @dataclass class Const(Op): + """A static constant value. Can be used with a :class:`LoadConst` to load into + a dataflow graph. + """ + val: val.Value - num_out: int | None = 1 + num_out: int = field(default=1, repr=False) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.Const: + def to_serial(self, parent: Node) -> sops.Const: return sops.Const( parent=parent.idx, v=self.val.to_serial_root(), @@ -376,56 +537,86 @@ def port_kind(self, port: InPort | OutPort) -> tys.Kind: case OutPort(_, 0): return tys.ConstKind(self.val.type_()) case _: - raise InvalidPort(port) + raise self._invalid_port(port) + + def __repr__(self) -> str: + return f"Const({self.val})" @dataclass class LoadConst(DataflowOp): - typ: tys.Type | None = None - num_out: int | None = 1 + """Load a constant value into a dataflow graph. Connects to a :class:`Const`.""" + _typ: tys.Type | None = None + num_out: int = field(default=1, repr=False) + + @property def type_(self) -> tys.Type: - return _check_complete(self.typ) + """The type of the loaded value. - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.LoadConstant: + Raises: + IncompleteOp: If the type has not been set. + """ + return _check_complete(self, self._typ) + + def to_serial(self, parent: Node) -> sops.LoadConstant: return sops.LoadConstant( parent=parent.idx, - datatype=self.type_().to_serial_root(), + datatype=self.type_.to_serial_root(), ) def outer_signature(self) -> tys.FunctionType: - return tys.FunctionType(input=[], output=[self.type_()]) + return tys.FunctionType(input=[], output=[self.type_]) def port_kind(self, port: InPort | OutPort) -> tys.Kind: match port: case InPort(_, 0): - return tys.ConstKind(self.type_()) + return tys.ConstKind(self.type_) case OutPort(_, 0): - return tys.ValueKind(self.type_()) + return tys.ValueKind(self.type_) case _: - raise InvalidPort(port) + raise self._invalid_port(port) + + def __repr__(self) -> str: + return "LoadConst" + (f"({self._typ})" if self._typ is not None else "") @dataclass() class Conditional(DataflowOp): + """'Switch' operation on the variants of an incoming sum type, evaluating the + corresponding one of the child :class:`Case` operations. + """ + + #: Sum type to switch on. sum_ty: tys.Sum + #: Non-sum inputs that are passed to all cases. other_inputs: tys.TypeRow - _outputs: tys.TypeRow | None = None + _outputs: tys.TypeRow | None = field(default=None, repr=False) @property def outputs(self) -> tys.TypeRow: - return _check_complete(self._outputs) + """Outputs of the conditional, common to all cases. + + Raises: + IncompleteOp: If the outputs have not been set. + """ + return _check_complete(self, self._outputs) @property def signature(self) -> tys.FunctionType: + """Dataflow signature of the conditional operation. + + Raises: + IncompleteOp: If the outputs have not been set. + """ inputs = [self.sum_ty, *self.other_inputs] return tys.FunctionType(inputs, self.outputs) @property - def num_out(self) -> int | None: + def num_out(self) -> int: return len(self.outputs) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.Conditional: + def to_serial(self, parent: Node) -> sops.Conditional: return sops.Conditional( parent=parent.idx, sum_rows=[ser_it(r) for r in self.sum_ty.variant_rows], @@ -437,20 +628,31 @@ def outer_signature(self) -> tys.FunctionType: return self.signature def nth_inputs(self, n: int) -> tys.TypeRow: + """The inputs passed to the nth child case. + Concatenation of the nth variant of the sum type and the other inputs. + """ return [*self.sum_ty.variant_rows[n], *self.other_inputs] @dataclass class Case(DfParentOp): + """Parent of a dataflow graph that is a branch of a :class:`Conditional`.""" + + #: Inputs types of the innner dataflow graph. inputs: tys.TypeRow - _outputs: tys.TypeRow | None = None - num_out: int | None = 0 + _outputs: tys.TypeRow | None = field(default=None, repr=False) + num_out: int = field(default=0, repr=False) @property def outputs(self) -> tys.TypeRow: - return _check_complete(self._outputs) + """Outputs of the case operation. + + Raises: + IncompleteOp: If the outputs have not been set. + """ + return _check_complete(self, self._outputs) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.Case: + def to_serial(self, parent: Node) -> sops.Case: return sops.Case( parent=parent.idx, signature=self.inner_signature().to_serial() ) @@ -459,7 +661,7 @@ def inner_signature(self) -> tys.FunctionType: return tys.FunctionType(self.inputs, self.outputs) def port_kind(self, port: InPort | OutPort) -> tys.Kind: - raise InvalidPort(port) + raise self._invalid_port(port) def _set_out_types(self, types: tys.TypeRow) -> None: self._outputs = types @@ -470,20 +672,31 @@ def _inputs(self) -> tys.TypeRow: @dataclass class TailLoop(DfParentOp, DataflowOp): + """Tail controlled loop operation, child dataflow graph iterates while it + outputs the first variant of a sum type. + """ + + #: Types that are only inputs of the child graph. just_inputs: tys.TypeRow + #: Types that are appended to both inputs and outputs of the graph. rest: tys.TypeRow - _just_outputs: tys.TypeRow | None = None - extension_delta: tys.ExtensionSet = field(default_factory=list) + _just_outputs: tys.TypeRow | None = field(default=None, repr=False) + extension_delta: tys.ExtensionSet = field(default_factory=list, repr=False) @property def just_outputs(self) -> tys.TypeRow: - return _check_complete(self._just_outputs) + """Types that are only outputs of the child graph. + + Raises: + IncompleteOp: If the outputs have not been set. + """ + return _check_complete(self, self._just_outputs) @property - def num_out(self) -> int | None: + def num_out(self) -> int: return len(self.just_outputs) + len(self.rest) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.TailLoop: + def to_serial(self, parent: Node) -> sops.TailLoop: return sops.TailLoop( parent=parent.idx, just_inputs=ser_it(self.just_inputs), @@ -514,23 +727,40 @@ def _inputs(self) -> tys.TypeRow: @dataclass class FuncDefn(DfParentOp): + """Function definition operation, parent of a dataflow graph that defines + the function. + """ + + #: function name name: str + #: input types of the function inputs: tys.TypeRow + # ? type parameters of the function if polymorphic params: list[tys.TypeParam] = field(default_factory=list) - _outputs: tys.TypeRow | None = None - num_out: int | None = 1 + _outputs: tys.TypeRow | None = field(default=None, repr=False) + num_out: int = field(default=1, repr=False) @property def outputs(self) -> tys.TypeRow: - return _check_complete(self._outputs) + """Output types of the function. + + Raises: + IncompleteOp: If the outputs have not been set. + """ + return _check_complete(self, self._outputs) @property def signature(self) -> tys.PolyFuncType: + """Polymorphic signature of the function. + + Raises: + IncompleteOp: If the outputs have not been set. + """ return tys.PolyFuncType( self.params, tys.FunctionType(self.inputs, self.outputs) ) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.FuncDefn: + def to_serial(self, parent: Node) -> sops.FuncDefn: return sops.FuncDefn( parent=parent.idx, name=self.name, @@ -551,16 +781,20 @@ def port_kind(self, port: InPort | OutPort) -> tys.Kind: case OutPort(_, 0): return tys.FunctionKind(self.signature) case _: - raise InvalidPort(port) + raise self._invalid_port(port) @dataclass class FuncDecl(Op): + """Function declaration operation, defines the signature of a function.""" + + #: function name name: str + #: polymorphic function signature signature: tys.PolyFuncType - num_out: int | None = 0 + num_out: int = field(default=1, repr=False) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.FuncDecl: + def to_serial(self, parent: Node) -> sops.FuncDecl: return sops.FuncDecl( parent=parent.idx, name=self.name, @@ -572,47 +806,33 @@ def port_kind(self, port: InPort | OutPort) -> tys.Kind: case OutPort(_, 0): return tys.FunctionKind(self.signature) case _: - raise InvalidPort(port) + raise self._invalid_port(port) @dataclass class Module(Op): - num_out: int | None = 0 + """Root operation of a HUGR which corresponds to a full module definition.""" - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.Module: + num_out: int = field(default=0, repr=False) + + def to_serial(self, parent: Node) -> sops.Module: return sops.Module(parent=parent.idx) def port_kind(self, port: InPort | OutPort) -> tys.Kind: - raise InvalidPort(port) + raise self._invalid_port(port) class NoConcreteFunc(Exception): - pass - - -def _fn_instantiation( - signature: tys.PolyFuncType, - instantiation: tys.FunctionType | None = None, - type_args: Sequence[tys.TypeArg] | None = None, -) -> tuple[tys.FunctionType, list[tys.TypeArg]]: - if len(signature.params) == 0: - return signature.body, [] - - else: - # TODO substitute type args into signature to get instantiation - if instantiation is None: - raise NoConcreteFunc("Missing instantiation for polymorphic function.") - type_args = type_args or [] - - if len(signature.params) != len(type_args): - raise NoConcreteFunc("Mismatched number of type arguments.") - return instantiation, list(type_args) + """Could not instantiate a polymorphic function.""" @dataclass -class Call(Op): +class _CallOrLoad: + #: polymorphic function signature signature: tys.PolyFuncType + #: concrete function signature instantiation: tys.FunctionType + #: type arguments for polymorphic function type_args: list[tys.TypeArg] def __init__( @@ -622,11 +842,40 @@ def __init__( type_args: Sequence[tys.TypeArg] | None = None, ) -> None: self.signature = signature - self.instantiation, self.type_args = _fn_instantiation( - signature, instantiation, type_args - ) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.Call: + if len(signature.params) == 0: + self.instantiation = signature.body + self.type_args = [] + + else: + # TODO substitute type args into signature to get instantiation + if instantiation is None: + msg = "Missing instantiation for polymorphic function." + raise NoConcreteFunc(msg) + type_args = type_args or [] + + if len(signature.params) != len(type_args): + msg = "Mismatched number of type arguments." + raise NoConcreteFunc(msg) + self.instantiation = instantiation + self.type_args = list(type_args) + + +class Call(_CallOrLoad, Op): + """Call a function inside a dataflow graph. Connects to :class:`FuncDefn` or + :class:`FuncDecl` operations. + + Args: + signature: Polymorphic function signature. + instantiation: Concrete function signature. Defaults to None. + type_args: Type arguments for polymorphic function. Defaults to None. + + Raises: + NoConcreteFunc: If the signature is polymorphic and no instantiation + is provided. + """ + + def to_serial(self, parent: Node) -> sops.Call: return sops.Call( parent=parent.idx, func_sig=self.signature.to_serial(), @@ -635,33 +884,42 @@ def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.Call: ) @property - def num_out(self) -> int | None: + def num_out(self) -> int: return len(self.signature.body.output) - def function_port_offset(self) -> int: + def _function_port_offset(self) -> int: return len(self.signature.body.input) def port_kind(self, port: InPort | OutPort) -> tys.Kind: match port: - case InPort(_, offset) if offset == self.function_port_offset(): + case InPort(_, offset) if offset == self._function_port_offset(): return tys.FunctionKind(self.signature) case _: return tys.ValueKind(_sig_port_type(self.instantiation, port)) @dataclass() -class CallIndirectDef(DataflowOp, PartialOp): +class CallIndirect(DataflowOp, _PartialOp): + """Higher order evaluation of a + :class:`FunctionType ` value. + """ + _signature: tys.FunctionType | None = None @property - def num_out(self) -> int | None: + def num_out(self) -> int: return len(self.signature.output) @property def signature(self) -> tys.FunctionType: - return _check_complete(self._signature) + """The signature of the function being called. + + Raises: + IncompleteOp: If the signature has not been set. + """ + return _check_complete(self, self._signature) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.CallIndirect: + def to_serial(self, parent: Node) -> sops.CallIndirect: return sops.CallIndirect( parent=parent.idx, signature=self.signature.to_serial(), @@ -675,7 +933,7 @@ def outer_signature(self) -> tys.FunctionType: return tys.FunctionType(input=[sig, *sig.input], output=sig.output) - def set_in_types(self, types: tys.TypeRow) -> None: + def _set_in_types(self, types: tys.TypeRow) -> None: func_sig, *_ = types assert isinstance( func_sig, tys.FunctionType @@ -683,29 +941,23 @@ def set_in_types(self, types: tys.TypeRow) -> None: self._signature = func_sig -# rename to eval? -CallIndirect = CallIndirectDef() +class LoadFunc(_CallOrLoad, DataflowOp): + """Load a statically defined function as a higher order value. + Connects to :class:`FuncDefn` or :class:`FuncDecl` operations. + Args: + signature: Polymorphic function signature. + instantiation: Concrete function signature. Defaults to None. + type_args: Type arguments for polymorphic function. Defaults to None. -@dataclass -class LoadFunc(DataflowOp): - signature: tys.PolyFuncType - instantiation: tys.FunctionType - type_args: list[tys.TypeArg] - num_out: int | None = 1 + Raises: + NoConcreteFunc: If the signature is polymorphic and no instantiation + is provided. + """ - def __init__( - self, - signature: tys.PolyFuncType, - instantiation: tys.FunctionType | None = None, - type_args: Sequence[tys.TypeArg] | None = None, - ) -> None: - self.signature = signature - self.instantiation, self.type_args = _fn_instantiation( - signature, instantiation, type_args - ) + num_out: int = field(default=1, repr=False) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.LoadFunction: + def to_serial(self, parent: Node) -> sops.LoadFunction: return sops.LoadFunction( parent=parent.idx, func_sig=self.signature.to_serial(), @@ -723,43 +975,50 @@ def port_kind(self, port: InPort | OutPort) -> tys.Kind: case OutPort(_, 0): return tys.ValueKind(self.instantiation) case _: - raise InvalidPort(port) + raise self._invalid_port(port) @dataclass -class NoopDef(DataflowOp, PartialOp): +class Noop(DataflowOp, _PartialOp): + """Identity operation that passes through its input.""" + _type: tys.Type | None = None - num_out: int | None = 1 + num_out: int = field(default=1, repr=False) @property def type_(self) -> tys.Type: - return _check_complete(self._type) + """The type of the input and output of the operation.""" + return _check_complete(self, self._type) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.Noop: + def to_serial(self, parent: Node) -> sops.Noop: return sops.Noop(parent=parent.idx, ty=self.type_.to_serial_root()) def outer_signature(self) -> tys.FunctionType: return tys.FunctionType.endo([self.type_]) - def set_in_types(self, types: tys.TypeRow) -> None: + def _set_in_types(self, types: tys.TypeRow) -> None: (t,) = types self._type = t - -Noop = NoopDef() + def __repr__(self) -> str: + return "Noop" + (f"({self._type})" if self._type is not None else "") @dataclass -class Lift(DataflowOp, PartialOp): +class Lift(DataflowOp, _PartialOp): + """Add an extension requirement to input values and pass them through.""" + + #: Extension added. new_extension: tys.ExtensionId - _type_row: tys.TypeRow | None = None - num_out: int | None = 1 + _type_row: tys.TypeRow | None = field(default=None, repr=False) + num_out: int = field(default=1, repr=False) @property def type_row(self) -> tys.TypeRow: - return _check_complete(self._type_row) + """Types of the input and output of the operation.""" + return _check_complete(self, self._type_row) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.Lift: + def to_serial(self, parent: Node) -> sops.Lift: return sops.Lift( parent=parent.idx, new_extension=self.new_extension, @@ -769,17 +1028,21 @@ def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.Lift: def outer_signature(self) -> tys.FunctionType: return tys.FunctionType.endo(self.type_row) - def set_in_types(self, types: tys.TypeRow) -> None: + def _set_in_types(self, types: tys.TypeRow) -> None: self._type_row = types @dataclass class AliasDecl(Op): + """Declare an external type alias.""" + + #: Alias name. name: str + #: Type bound. bound: tys.TypeBound - num_out: int | None = 0 + num_out: int = field(default=0, repr=False) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.AliasDecl: + def to_serial(self, parent: Node) -> sops.AliasDecl: return sops.AliasDecl( parent=parent.idx, name=self.name, @@ -787,16 +1050,20 @@ def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.AliasDecl: ) def port_kind(self, port: InPort | OutPort) -> tys.Kind: - raise InvalidPort(port) + raise self._invalid_port(port) @dataclass class AliasDefn(Op): + """Declare a type alias.""" + + #: Alias name. name: str + #: Type definition. definition: tys.Type - num_out: int | None = 0 + num_out: int = field(default=0, repr=False) - def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.AliasDefn: + def to_serial(self, parent: Node) -> sops.AliasDefn: return sops.AliasDefn( parent=parent.idx, name=self.name, @@ -804,4 +1071,4 @@ def to_serial(self, node: Node, parent: Node, hugr: Hugr) -> sops.AliasDefn: ) def port_kind(self, port: InPort | OutPort) -> tys.Kind: - raise InvalidPort(port) + raise self._invalid_port(port) diff --git a/hugr-py/src/hugr/serialization/__init__.py b/hugr-py/src/hugr/serialization/__init__.py index ad1db81e8..c3f433fb4 100644 --- a/hugr-py/src/hugr/serialization/__init__.py +++ b/hugr-py/src/hugr/serialization/__init__.py @@ -1,3 +1 @@ -from .serial_hugr import SerialHugr - -__all__ = ["SerialHugr"] +"""Serialized HUGR objects as pydantic models.""" diff --git a/hugr-py/src/hugr/serialization/ops.py b/hugr-py/src/hugr/serialization/ops.py index 65706b047..e778939c1 100644 --- a/hugr-py/src/hugr/serialization/ops.py +++ b/hugr-py/src/hugr/serialization/ops.py @@ -1,46 +1,52 @@ from __future__ import annotations + import inspect import sys from abc import ABC, abstractmethod from typing import Any, Literal -from pydantic import Field, RootModel, ConfigDict +from pydantic import ConfigDict, Field, RootModel + +from hugr.utils import deser_it from . import tys as stys from .tys import ( + ConfiguredBaseModel, ExtensionId, ExtensionSet, FunctionType, PolyFuncType, - Type, - TypeRow, SumType, + Type, TypeBound, - ConfiguredBaseModel, + TypeRow, +) +from .tys import ( classes as tys_classes, +) +from .tys import ( model_rebuild as tys_model_rebuild, ) -from hugr.utils import deser_it - NodeID = int class BaseOp(ABC, ConfiguredBaseModel): - """Base class for ops that store their node's input/output types""" + """Base class for ops that store their node's input/output types.""" # Parent node index of node the op belongs to, used only at serialization time parent: NodeID def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: """Hook to insert type information from the input and output ports into the - op""" + op. + """ def insert_child_dfg_signature(self, inputs: TypeRow, outputs: TypeRow) -> None: - """Hook to insert type information from a child dataflow graph""" + """Hook to insert type information from a child dataflow graph.""" def display_name(self) -> str: - """Name of the op for visualisation""" + """Name of the op for visualisation.""" return self.__class__.__name__ @abstractmethod @@ -128,14 +134,14 @@ class TupleValue(BaseValue): """A constant tuple value.""" v: Literal["Tuple"] = Field(default="Tuple", title="ValueTag") - vs: list["Value"] + vs: list[Value] def deserialize(self) -> val.Value: - return val.Tuple(deser_it((v.root for v in self.vs))) + return val.Tuple(*deser_it(v.root for v in self.vs)) class SumValue(BaseValue): - """A Sum variant + """A Sum variant. For any Sum type where this value meets the type of the variant indicated by the tag """ @@ -143,7 +149,7 @@ class SumValue(BaseValue): v: Literal["Sum"] = Field(default="Sum", title="ValueTag") tag: int typ: SumType - vs: list["Value"] + vs: list[Value] model_config = ConfigDict( json_schema_extra={ "description": ( @@ -155,7 +161,7 @@ class SumValue(BaseValue): def deserialize(self) -> val.Value: return val.Sum( - self.tag, self.typ.deserialize(), deser_it((v.root for v in self.vs)) + self.tag, self.typ.deserialize(), deser_it(v.root for v in self.vs) ) @@ -189,7 +195,8 @@ def deserialize(self) -> ops.Const: class DataflowBlock(BaseOp): """A CFG basic block node. The signature is that of the internal Dataflow - graph.""" + graph. + """ op: Literal["DataflowBlock"] = "DataflowBlock" inputs: TypeRow = Field(default_factory=list) @@ -224,14 +231,16 @@ def deserialize(self) -> ops.DataflowBlock: model_config = ConfigDict( json_schema_extra={ - "description": "A CFG basic block node. The signature is that of the internal Dataflow graph.", + "description": "A CFG basic block node." + " The signature is that of the internal Dataflow graph.", } ) class ExitBlock(BaseOp): """The single exit node of the CFG, has no children, stores the types of - the CFG node output.""" + the CFG node output. + """ op: Literal["ExitBlock"] = "ExitBlock" cfg_outputs: TypeRow @@ -239,7 +248,8 @@ class ExitBlock(BaseOp): model_config = ConfigDict( json_schema_extra={ # Needed to avoid random '\n's in the pydantic description - "description": "The single exit node of the CFG, has no children, stores the types of the CFG node output.", + "description": "The single exit node of the CFG, has no children," + " stores the types of the CFG node output.", } ) @@ -285,8 +295,7 @@ def deserialize(self) -> ops.Output: class Call(DataflowOp): - """ - Call a function directly. + """Call a function directly. The first port is connected to the def/declare of the function being called directly, with a `ConstE` edge. The signature of the remaining ports matches @@ -334,8 +343,8 @@ def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: assert len(fun_ty.output) == len(out_types) self.signature = fun_ty - def deserialize(self) -> ops.CallIndirectDef: - return ops.CallIndirectDef(self.signature.deserialize()) + def deserialize(self) -> ops.CallIndirect: + return ops.CallIndirect(self.signature.deserialize()) class LoadConstant(DataflowOp): @@ -362,7 +371,7 @@ def deserialize(self) -> ops.LoadFunc: (f_ty,) = signature.output assert isinstance( f_ty, tys.FunctionType - ), "Expected single funciton type output" + ), "Expected single function type output" return ops.LoadFunc( self.func_sig.deserialize(), f_ty, @@ -490,7 +499,8 @@ def deserialize(self) -> ops.CFG: class CustomOp(DataflowOp): """A user-defined operation that can be downcasted by the extensions that define - it.""" + it. + """ op: Literal["CustomOp"] = "CustomOp" extension: ExtensionId @@ -517,8 +527,8 @@ def deserialize(self) -> ops.Custom: # Needed to avoid random '\n's in the pydantic description json_schema_extra={ "description": ( - "A user-defined operation that can be downcasted by the extensions that " - "define it." + "A user-defined operation that can be downcasted by the extensions that" + " define it." ) } ) @@ -536,8 +546,8 @@ def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: assert in_types[0] == out_types[0] self.ty = in_types[0] - def deserialize(self) -> ops.NoopDef: - return ops.NoopDef(self.ty.deserialize()) + def deserialize(self) -> ops.Noop: + return ops.Noop(self.ty.deserialize()) class MakeTuple(DataflowOp): @@ -552,8 +562,8 @@ def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: in_types = [] self.tys = list(in_types) - def deserialize(self) -> ops.MakeTupleDef: - return ops.MakeTupleDef(deser_it(self.tys)) + def deserialize(self) -> ops.MakeTuple: + return ops.MakeTuple(deser_it(self.tys)) class UnpackTuple(DataflowOp): @@ -565,8 +575,8 @@ class UnpackTuple(DataflowOp): def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: self.tys = list(out_types) - def deserialize(self) -> ops.UnpackTupleDef: - return ops.UnpackTupleDef(deser_it(self.tys)) + def deserialize(self) -> ops.UnpackTuple: + return ops.UnpackTuple(deser_it(self.tys)) class Tag(DataflowOp): @@ -682,7 +692,8 @@ class OpDef(ConfiguredBaseModel, populate_by_name=True): tys_model_rebuild(dict(classes)) -# needed to avoid circular imports -from hugr import ops # noqa: E402 -from hugr import val # noqa: E402 -from hugr import tys # noqa: E402 +from hugr import ( # noqa: E402 # needed to avoid circular imports + ops, + tys, + val, +) diff --git a/hugr-py/src/hugr/serialization/serial_hugr.py b/hugr-py/src/hugr/serialization/serial_hugr.py index 49bfbd2f7..619eaca90 100644 --- a/hugr-py/src/hugr/serialization/serial_hugr.py +++ b/hugr-py/src/hugr/serialization/serial_hugr.py @@ -1,11 +1,13 @@ from typing import Any, Literal -from pydantic import Field, ConfigDict +from pydantic import ConfigDict, Field -from .ops import NodeID, OpType, classes as ops_classes -from .tys import model_rebuild, ConfiguredBaseModel import hugr +from .ops import NodeID, OpType +from .ops import classes as ops_classes +from .tys import ConfiguredBaseModel, model_rebuild + Port = tuple[NodeID, int | None] # (node, offset) Edge = tuple[Port, Port] @@ -37,7 +39,8 @@ def get_version(cls) -> str: return cls(nodes=[], edges=[]).version @classmethod - def _pydantic_rebuild(cls, config: ConfigDict = ConfigDict(), **kwargs): + def _pydantic_rebuild(cls, config: ConfigDict | None = None, **kwargs): + config = config or ConfigDict() my_classes = dict(ops_classes) my_classes[cls.__name__] = cls model_rebuild(my_classes, config=config, **kwargs) diff --git a/hugr-py/src/hugr/serialization/testing_hugr.py b/hugr-py/src/hugr/serialization/testing_hugr.py index 32bf2b95f..43acd1d43 100644 --- a/hugr-py/src/hugr/serialization/testing_hugr.py +++ b/hugr-py/src/hugr/serialization/testing_hugr.py @@ -1,12 +1,16 @@ -from pydantic import ConfigDict from typing import Literal -from .tys import Type, SumType, PolyFuncType, ConfiguredBaseModel, model_rebuild -from .ops import Value, OpType, OpDef, classes as ops_classes + +from pydantic import ConfigDict + +from .ops import OpDef, OpType, Value +from .ops import classes as ops_classes +from .tys import ConfiguredBaseModel, PolyFuncType, SumType, Type, model_rebuild class TestingHugr(ConfiguredBaseModel): """A serializable representation of a Hugr Type, SumType, PolyFuncType, - Value, OpType. Intended for testing only.""" + Value, OpType. Intended for testing only. + """ version: Literal["v1"] = "v1" typ: Type | None = None @@ -22,7 +26,8 @@ def get_version(cls) -> str: return cls().version @classmethod - def _pydantic_rebuild(cls, config: ConfigDict = ConfigDict(), **kwargs): + def _pydantic_rebuild(cls, config: ConfigDict | None = None, **kwargs): + config = config or ConfigDict() my_classes = dict(ops_classes) my_classes[cls.__name__] = cls model_rebuild(my_classes, config=config, **kwargs) diff --git a/hugr-py/src/hugr/serialization/tys.py b/hugr-py/src/hugr/serialization/tys.py index 053deefcd..d8f6688b9 100644 --- a/hugr-py/src/hugr/serialization/tys.py +++ b/hugr-py/src/hugr/serialization/tys.py @@ -1,24 +1,28 @@ from __future__ import annotations -from abc import ABC, abstractmethod import inspect import sys +from abc import ABC, abstractmethod from enum import Enum -from typing import Annotated, Any, Literal, Union, Mapping +from typing import TYPE_CHECKING, Annotated, Any, Literal -from hugr.utils import deser_it from pydantic import ( BaseModel, + ConfigDict, Field, RootModel, ValidationError, ValidationInfo, ValidatorFunctionWrapHandler, WrapValidator, - ConfigDict, ) from pydantic_core import PydanticCustomError +from hugr.utils import deser_it + +if TYPE_CHECKING: + from collections.abc import Mapping + def _json_custom_error_validator( value: Any, handler: ValidatorFunctionWrapHandler, _info: ValidationInfo @@ -36,8 +40,9 @@ def _json_custom_error_validator( try: return handler(value) except ValidationError as err: + msg = "invalid_json" raise PydanticCustomError( - "invalid_json", + msg, "Input is not valid json", ) from err @@ -68,7 +73,7 @@ def deserialize(self) -> tys.TypeParam: ... class TypeTypeParam(BaseTypeParam): tp: Literal["Type"] = "Type" - b: "TypeBound" + b: TypeBound def deserialize(self) -> tys.TypeTypeParam: return tys.TypeTypeParam(bound=self.b) @@ -84,7 +89,7 @@ def deserialize(self) -> tys.BoundedNatParam: class OpaqueParam(BaseTypeParam): tp: Literal["Opaque"] = "Opaque" - ty: "Opaque" + ty: Opaque def deserialize(self) -> tys.OpaqueParam: return tys.OpaqueParam(ty=self.ty.deserialize()) @@ -92,7 +97,7 @@ def deserialize(self) -> tys.OpaqueParam: class ListParam(BaseTypeParam): tp: Literal["List"] = "List" - param: "TypeParam" + param: TypeParam def deserialize(self) -> tys.ListParam: return tys.ListParam(param=self.param.deserialize()) @@ -100,7 +105,7 @@ def deserialize(self) -> tys.ListParam: class TupleParam(BaseTypeParam): tp: Literal["Tuple"] = "Tuple" - params: list["TypeParam"] + params: list[TypeParam] def deserialize(self) -> tys.TupleParam: return tys.TupleParam(params=deser_it(self.params)) @@ -144,7 +149,7 @@ def deserialize(self) -> tys.TypeArg: ... class TypeTypeArg(BaseTypeArg): tya: Literal["Type"] = "Type" - ty: "Type" + ty: Type def deserialize(self) -> tys.TypeTypeArg: return tys.TypeTypeArg(ty=self.ty.deserialize()) @@ -160,7 +165,7 @@ def deserialize(self) -> tys.BoundedNatArg: class OpaqueArg(BaseTypeArg): tya: Literal["Opaque"] = "Opaque" - typ: "Opaque" + typ: Opaque value: Any def deserialize(self) -> tys.OpaqueArg: @@ -169,7 +174,7 @@ def deserialize(self) -> tys.OpaqueArg: class SequenceArg(BaseTypeArg): tya: Literal["Sequence"] = "Sequence" - elems: list["TypeArg"] + elems: list[TypeArg] def deserialize(self) -> tys.SequenceArg: return tys.SequenceArg(elems=deser_it(self.elems)) @@ -222,7 +227,7 @@ def deserialize(self) -> tys.Type: ... class MultiContainer(BaseType): - ty: "Type" + ty: Type class Array(MultiContainer): @@ -251,14 +256,14 @@ class GeneralSum(BaseType): t: Literal["Sum"] = "Sum" s: Literal["General"] = "General" - rows: list["TypeRow"] + rows: list[TypeRow] def deserialize(self) -> tys.Sum: return tys.Sum(variant_rows=[[t.deserialize() for t in r] for r in self.rows]) class SumType(RootModel): - root: Annotated[Union[UnitSum, GeneralSum], Field(discriminator="s")] + root: Annotated[UnitSum | GeneralSum, Field(discriminator="s")] # This seems to be required for nested discriminated unions to work @property @@ -281,7 +286,7 @@ class Variable(BaseType): t: Literal["V"] = "V" i: int - b: "TypeBound" + b: TypeBound def deserialize(self) -> tys.Variable: return tys.Variable(idx=self.i, bound=self.b) @@ -289,11 +294,12 @@ def deserialize(self) -> tys.Variable: class RowVar(BaseType): """A variable standing for a row of some (unknown) number of types. - May occur only within a row; not a node input/output.""" + May occur only within a row; not a node input/output. + """ t: Literal["R"] = "R" i: int - b: "TypeBound" + b: TypeBound def deserialize(self) -> tys.RowVariable: return tys.RowVariable(idx=self.i, bound=self.b) @@ -310,17 +316,18 @@ def deserialize(self) -> tys.USize: class FunctionType(BaseType): """A graph encoded as a value. It contains a concrete signature and a set of - required resources.""" + required resources. + """ t: Literal["G"] = "G" - input: "TypeRow" # Value inputs of the function. - output: "TypeRow" # Value outputs of the function. + input: TypeRow # Value inputs of the function. + output: TypeRow # Value outputs of the function. # The extension requirements which are added by the operation extension_reqs: ExtensionSet = Field(default_factory=ExtensionSet) @classmethod - def empty(cls) -> "FunctionType": + def empty(cls) -> FunctionType: return FunctionType(input=[], output=[], extension_reqs=[]) def deserialize(self) -> tys.FunctionType: @@ -343,7 +350,8 @@ def deserialize(self) -> tys.FunctionType: class PolyFuncType(BaseType): """A polymorphic type scheme, i.e. of a FuncDecl, FuncDefn or OpDef. - (Nodes/operations in the Hugr are not polymorphic.)""" + (Nodes/operations in the Hugr are not polymorphic.). + """ # The declared type parameters, i.e., these must be instantiated with the same # number of TypeArgs before the function can be called. This defines the indices @@ -354,7 +362,7 @@ class PolyFuncType(BaseType): body: FunctionType @classmethod - def empty(cls) -> "PolyFuncType": + def empty(cls) -> PolyFuncType: return PolyFuncType(params=[], body=FunctionType.empty()) def deserialize(self) -> tys.PolyFuncType: @@ -380,7 +388,7 @@ class TypeBound(Enum): Any = "A" @staticmethod - def join(*bs: "TypeBound") -> "TypeBound": + def join(*bs: TypeBound) -> TypeBound: """Computes the least upper bound for a sequence of bounds.""" res = TypeBound.Eq for b in bs: @@ -410,7 +418,7 @@ def deserialize(self) -> tys.Opaque: class Alias(BaseType): - """An Alias Type""" + """An Alias Type.""" t: Literal["Alias"] = "Alias" bound: TypeBound @@ -430,7 +438,7 @@ class Qubit(BaseType): t: Literal["Q"] = "Q" - def deserialize(self) -> tys.QubitDef: + def deserialize(self) -> tys._QubitDef: return tys.Qubit @@ -475,9 +483,10 @@ def deserialize(self) -> tys.Type: def model_rebuild( classes: Mapping[str, type], - config: ConfigDict = ConfigDict(), + config: ConfigDict | None = None, **kwargs, ): + config = config or ConfigDict() for c in classes.values(): if issubclass(c, ConfiguredBaseModel): c.update_model_config(config) diff --git a/hugr-py/src/hugr/tys.py b/hugr-py/src/hugr/tys.py index cfbb7c294..7ad773b49 100644 --- a/hugr-py/src/hugr/tys.py +++ b/hugr-py/src/hugr/tys.py @@ -1,8 +1,12 @@ +"""HUGR edge kinds, types, type parameters and type arguments.""" + from __future__ import annotations + from dataclasses import dataclass, field +from typing import Any, Protocol, runtime_checkable + import hugr.serialization.tys as stys from hugr.utils import ser_it -from typing import Any, Protocol, runtime_checkable ExtensionId = stys.ExtensionId ExtensionSet = stys.ExtensionSet @@ -10,18 +14,22 @@ class TypeParam(Protocol): - """A type parameter.""" + """A HUGR type parameter.""" - def to_serial(self) -> stys.BaseTypeParam: ... + def to_serial(self) -> stys.BaseTypeParam: + """Convert to serialisable model.""" + ... # pragma: no cover def to_serial_root(self) -> stys.TypeParam: return stys.TypeParam(root=self.to_serial()) # type: ignore[arg-type] class TypeArg(Protocol): - """A type argument.""" + """A HUGR type argument, which can be bound to a :class:TypeParam.""" - def to_serial(self) -> stys.BaseTypeArg: ... + def to_serial(self) -> stys.BaseTypeArg: + """Convert to serialisable model.""" + ... # pragma: no cover def to_serial_root(self) -> stys.TypeArg: return stys.TypeArg(root=self.to_serial()) # type: ignore[arg-type] @@ -29,17 +37,26 @@ def to_serial_root(self) -> stys.TypeArg: @runtime_checkable class Type(Protocol): - """A type.""" + """A HUGR type.""" - def to_serial(self) -> stys.BaseType: ... + def to_serial(self) -> stys.BaseType: + """Convert to serialisable model.""" + ... # pragma: no cover def to_serial_root(self) -> stys.Type: return stys.Type(root=self.to_serial()) # type: ignore[arg-type] def type_arg(self) -> TypeTypeArg: + """The :class:`TypeTypeArg` for this type. + + Example: + >>> Qubit.type_arg() + TypeTypeArg(ty=Qubit) + """ return TypeTypeArg(self) +#: Row of types. TypeRow = list[Type] # -------------------------------------------- @@ -49,6 +66,8 @@ def type_arg(self) -> TypeTypeArg: @dataclass(frozen=True) class TypeTypeParam(TypeParam): + """A type parameter indicating a type with a given boumd.""" + bound: TypeBound def to_serial(self) -> stys.TypeTypeParam: @@ -57,6 +76,8 @@ def to_serial(self) -> stys.TypeTypeParam: @dataclass(frozen=True) class BoundedNatParam(TypeParam): + """A type parameter indicating a natural number with an optional upper bound.""" + upper_bound: int | None def to_serial(self) -> stys.BoundedNatParam: @@ -65,6 +86,8 @@ def to_serial(self) -> stys.BoundedNatParam: @dataclass(frozen=True) class OpaqueParam(TypeParam): + """Opaque type parameter.""" + ty: Opaque def to_serial(self) -> stys.OpaqueParam: @@ -73,6 +96,8 @@ def to_serial(self) -> stys.OpaqueParam: @dataclass(frozen=True) class ListParam(TypeParam): + """Type parameter which requires a list of type arguments.""" + param: TypeParam def to_serial(self) -> stys.ListParam: @@ -81,6 +106,8 @@ def to_serial(self) -> stys.ListParam: @dataclass(frozen=True) class TupleParam(TypeParam): + """Type parameter which requires a tuple of type arguments.""" + params: list[TypeParam] def to_serial(self) -> stys.TupleParam: @@ -89,6 +116,8 @@ def to_serial(self) -> stys.TupleParam: @dataclass(frozen=True) class ExtensionsParam(TypeParam): + """An extension set parameter.""" + def to_serial(self) -> stys.ExtensionsParam: return stys.ExtensionsParam() @@ -100,6 +129,8 @@ def to_serial(self) -> stys.ExtensionsParam: @dataclass(frozen=True) class TypeTypeArg(TypeArg): + """A type argument for a :class:`TypeTypeParam`.""" + ty: Type def to_serial(self) -> stys.TypeTypeArg: @@ -108,6 +139,8 @@ def to_serial(self) -> stys.TypeTypeArg: @dataclass(frozen=True) class BoundedNatArg(TypeArg): + """A type argument for a :class:`BoundedNatParam`.""" + n: int def to_serial(self) -> stys.BoundedNatArg: @@ -116,6 +149,8 @@ def to_serial(self) -> stys.BoundedNatArg: @dataclass(frozen=True) class OpaqueArg(TypeArg): + """An opaque type argument for a :class:`OpaqueParam`.""" + ty: Opaque value: Any @@ -125,6 +160,8 @@ def to_serial(self) -> stys.OpaqueArg: @dataclass(frozen=True) class SequenceArg(TypeArg): + """Sequence of type arguments, for a :class:`ListParam` or :class:`TupleParam`.""" + elems: list[TypeArg] def to_serial(self) -> stys.SequenceArg: @@ -133,6 +170,8 @@ def to_serial(self) -> stys.SequenceArg: @dataclass(frozen=True) class ExtensionsArg(TypeArg): + """Type argument for an :class:`ExtensionsParam`.""" + extensions: ExtensionSet def to_serial(self) -> stys.ExtensionsArg: @@ -141,6 +180,8 @@ def to_serial(self) -> stys.ExtensionsArg: @dataclass(frozen=True) class VariableArg(TypeArg): + """A type argument variable.""" + idx: int param: TypeParam @@ -155,6 +196,8 @@ def to_serial(self) -> stys.VariableArg: @dataclass(frozen=True) class Array(Type): + """Prelude fixed `size` array of `ty` elements.""" + ty: Type size: int @@ -164,6 +207,11 @@ def to_serial(self) -> stys.Array: @dataclass() class Sum(Type): + """Algebraic sum-over-product type. Instances of this type correspond to + tuples (products) over one of the `variant_rows` in the sum type, tagged by + the index of the row. + """ + variant_rows: list[TypeRow] def to_serial(self) -> stys.GeneralSum: @@ -175,9 +223,14 @@ def as_tuple(self) -> Tuple: ), "Sum type must have exactly one row to be converted to a Tuple" return Tuple(*self.variant_rows[0]) + def __repr__(self) -> str: + return f"Sum({self.variant_rows})" + @dataclass() class UnitSum(Sum): + """Simple :class:`Sum` type with `size` variants of empty rows.""" + size: int def __init__(self, size: int): @@ -187,15 +240,31 @@ def __init__(self, size: int): def to_serial(self) -> stys.UnitSum: # type: ignore[override] return stys.UnitSum(size=self.size) + def __repr__(self) -> str: + if self == Bool: + return "Bool" + elif self == Unit: + return "Unit" + return f"UnitSum({self.size})" + @dataclass() class Tuple(Sum): + """Product type with `tys` elements. Instances of this type correspond to + :class:`Sum` with a single variant. + """ + def __init__(self, *tys: Type): self.variant_rows = [list(tys)] + def __repr__(self) -> str: + return f"Tuple{tuple(self.variant_rows[0])}" + @dataclass(frozen=True) class Variable(Type): + """A type variable with a given bound, identified by index.""" + idx: int bound: TypeBound @@ -205,6 +274,8 @@ def to_serial(self) -> stys.Variable: @dataclass(frozen=True) class RowVariable(Type): + """A type variable standing in for a row of types, identified by index.""" + idx: int bound: TypeBound @@ -214,12 +285,16 @@ def to_serial(self) -> stys.RowVar: @dataclass(frozen=True) class USize(Type): + """The Prelude unsigned size type.""" + def to_serial(self) -> stys.USize: return stys.USize() @dataclass(frozen=True) class Alias(Type): + """Type alias.""" + name: str bound: TypeBound @@ -229,6 +304,10 @@ def to_serial(self) -> stys.Alias: @dataclass(frozen=True) class FunctionType(Type): + """A function type, defined by input types, + output types and extension requirements. + """ + input: TypeRow output: TypeRow extension_reqs: ExtensionSet = field(default_factory=ExtensionSet) @@ -242,18 +321,43 @@ def to_serial(self) -> stys.FunctionType: @classmethod def empty(cls) -> FunctionType: + """Generate an empty function type. + + Example: + >>> FunctionType.empty() + FunctionType([], []) + """ return cls(input=[], output=[]) @classmethod def endo(cls, tys: TypeRow) -> FunctionType: + """Function type with the same input and output types. + + Example: + >>> FunctionType.endo([Qubit]) + FunctionType([Qubit], [Qubit]) + """ return cls(input=tys, output=tys) def flip(self) -> FunctionType: + """Return a new function type with input and output types swapped. + + Example: + >>> FunctionType([Qubit], [Bool]).flip() + FunctionType([Bool], [Qubit]) + """ return FunctionType(input=list(self.output), output=list(self.input)) + def __repr__(self) -> str: + return f"FunctionType({self.input}, {self.output})" + @dataclass(frozen=True) class PolyFuncType(Type): + """Polymorphic function type or type scheme. Defined by a list of type + parameters that may appear in the :class:`FunctionType` body. + """ + params: list[TypeParam] body: FunctionType @@ -265,6 +369,8 @@ def to_serial(self) -> stys.PolyFuncType: @dataclass class Opaque(Type): + """Opaque type, identified by `id` and with optional type arguments and bound.""" + id: str bound: TypeBound args: list[TypeArg] = field(default_factory=list) @@ -280,43 +386,86 @@ def to_serial(self) -> stys.Opaque: @dataclass -class QubitDef(Type): +class _QubitDef(Type): def to_serial(self) -> stys.Qubit: return stys.Qubit() + def __repr__(self) -> str: + return "Qubit" -Qubit = QubitDef() + +#: Qubit type. +Qubit = _QubitDef() +#: Boolean type (:class:`UnitSum` of size 2). Bool = UnitSum(size=2) +#: Unit type (:class:`UnitSum` of size 1). Unit = UnitSum(size=1) @dataclass(frozen=True) class ValueKind: + """Dataflow value edges.""" + + #: Type of the value. ty: Type + def __repr__(self) -> str: + return f"ValueKind({self.ty})" + @dataclass(frozen=True) class ConstKind: + """Static constant value edges.""" + + #: Type of the constant. ty: Type + def __repr__(self) -> str: + return f"ConstKind({self.ty})" + @dataclass(frozen=True) class FunctionKind: + """Statically defined function edges.""" + + #: Type of the function. ty: PolyFuncType + def __repr__(self) -> str: + return f"FunctionKind({self.ty})" + @dataclass(frozen=True) -class CFKind: ... +class CFKind: + """Control flow edges.""" @dataclass(frozen=True) -class OrderKind: ... +class OrderKind: + """State order edges.""" +#: The kind of a HUGR graph edge. Kind = ValueKind | ConstKind | FunctionKind | CFKind | OrderKind def get_first_sum(types: TypeRow) -> tuple[Sum, TypeRow]: + """Check the first type in a row of types is a :class:`Sum`, returning it + and the rest. + + Args: + types: row of types. + + Raises: + AssertionError: if the first type is not a :class:`Sum`. + + Example: + >>> get_first_sum([UnitSum(3), Qubit]) + (UnitSum(3), [Qubit]) + >>> get_first_sum([Qubit, UnitSum(3)]) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + AssertionError: Expected Sum, got Qubit + """ (sum_, *other) = types assert isinstance(sum_, Sum), f"Expected Sum, got {sum_}" return sum_, other diff --git a/hugr-py/src/hugr/utils.py b/hugr-py/src/hugr/utils.py index dd4bdae1a..6c892db76 100644 --- a/hugr-py/src/hugr/utils.py +++ b/hugr-py/src/hugr/utils.py @@ -1,24 +1,63 @@ -from collections.abc import Hashable, ItemsView, MutableMapping -from dataclasses import dataclass, field -from typing import Generic, Iterable, Protocol, TypeVar +"""Shared utility classes and functions.""" +from collections.abc import Hashable, ItemsView, Iterable, Mapping, MutableMapping +from dataclasses import dataclass, field +from typing import Generic, Protocol, TypeVar L = TypeVar("L", bound=Hashable) R = TypeVar("R", bound=Hashable) +class NotBijection(Exception): + """Initial map is not a bijection.""" + + @dataclass() class BiMap(MutableMapping, Generic[L, R]): + """Bidirectional map backed by two dictionaries, between left types `L` and + right types `R`. + """ + fwd: dict[L, R] = field(default_factory=dict) bck: dict[R, L] = field(default_factory=dict) + def __init__(self, fwd: Mapping[L, R] | None = None) -> None: + """Initialize a bidirectional map. + + Args: + fwd: Left to right mapping. Defaults to empty. + + Raises: + NotBijection: If the initial map is not a bijection. + """ + fwd = fwd or {} + if len(fwd) != len(set(fwd.values())): + raise NotBijection + self.fwd = dict(fwd) + self.bck = {v: k for k, v in fwd.items()} + def __getitem__(self, key: L) -> R: + """Get the right value for a left key. + + Args: + key: Left key. + + Raises: + KeyError: If the key is not found. + + Example: + >>> bm = BiMap({"a": 1}) + >>> bm["a"] + 1 + """ return self.fwd[key] def __setitem__(self, key: L, value: R) -> None: + """See :meth:`insert_left`.""" self.insert_left(key, value) def __delitem__(self, key: L) -> None: + """See :meth:`delete_left`.""" self.delete_left(key) def __iter__(self): @@ -28,15 +67,51 @@ def __len__(self) -> int: return len(self.fwd) def items(self) -> ItemsView[L, R]: + """Iterator over left, right pairs. + + Example: + >>> bm = BiMap({"a": 1, "b": 2}) + >>> list(bm.items()) + [('a', 1), ('b', 2)] + """ return self.fwd.items() def get_left(self, key: R) -> L | None: + """Get a left value using a right key. + + Example: + >>> bm = BiMap({"a": 1}) + >>> bm.get_left(1) + 'a' + >>> bm.get_left(2) + """ return self.bck.get(key) def get_right(self, key: L) -> R | None: + """Get a right value using a left key. + + Example: + >>> bm = BiMap({"a": 1}) + >>> bm.get_right("a") + 1 + >>> bm.get_right("b") + """ return self.fwd.get(key) def insert_left(self, key: L, value: R) -> None: + """Insert a left key and right value. + If the key or value already exist, the existing key-value pair is replaced. + + Args: + key: Left key. + value: Right value. + + Example: + >>> bm = BiMap() + >>> bm.insert_left("a", 1) + >>> bm["a"] + 1 + """ if (existing_key := self.bck.get(value)) is not None: del self.fwd[existing_key] if (existing_value := self.fwd.get(key)) is not None: @@ -45,31 +120,85 @@ def insert_left(self, key: L, value: R) -> None: self.bck[value] = key def insert_right(self, key: R, value: L) -> None: + """Insert a right key and left value. + If the key or value already exist, the existing key-value pair is replaced. + + Args: + key: Right key. + value: Left value. + + Example: + >>> bm = BiMap() + >>> bm.insert_right(1, "a") + >>> bm["a"] + 1 + """ self.insert_left(value, key) def delete_left(self, key: L) -> None: + """Delete a left key and its right value. + + Args: + key: Left key. + + Raises: + KeyError: If the key is not found. + + Example: + >>> bm = BiMap({"a": 1}) + >>> bm.delete_left("a") + >>> bm + BiMap({}) + """ del self.bck[self.fwd[key]] del self.fwd[key] def delete_right(self, key: R) -> None: + """Delete a right key and its left value. + + Args: + key: Right key. + + Raises: + KeyError: If the key is not found. + + Example: + >>> bm = BiMap({"a": 1}) + >>> bm.delete_right(1) + >>> bm + BiMap({}) + """ del self.fwd[self.bck[key]] del self.bck[key] + def __repr__(self) -> str: + return f"BiMap({self.fwd})" + S = TypeVar("S", covariant=True) class SerCollection(Protocol[S]): - def to_serial_root(self) -> S: ... + """Protocol for serialisable objects.""" + + def to_serial_root(self) -> S: + """Convert to serialisable root model.""" + ... # pragma: no cover class DeserCollection(Protocol[S]): - def deserialize(self) -> S: ... + """Protocol for deserialisable objects.""" + + def deserialize(self) -> S: + """Deserialize from model.""" + ... # pragma: no cover def ser_it(it: Iterable[SerCollection[S]]) -> list[S]: + """Serialize an iterable of serializable objects.""" return [v.to_serial_root() for v in it] def deser_it(it: Iterable[DeserCollection[S]]) -> list[S]: + """Deserialize an iterable of deserializable objects.""" return [v.deserialize() for v in it] diff --git a/hugr-py/src/hugr/val.py b/hugr-py/src/hugr/val.py index deb898c09..9d6d9630f 100644 --- a/hugr-py/src/hugr/val.py +++ b/hugr-py/src/hugr/val.py @@ -1,9 +1,13 @@ +"""HUGR values, used for static constants in HUGR programs.""" + from __future__ import annotations + from dataclasses import dataclass, field -from typing import Any, Protocol, runtime_checkable, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable + import hugr.serialization.ops as sops import hugr.serialization.tys as stys -import hugr.tys as tys +from hugr import tys from hugr.utils import ser_it if TYPE_CHECKING: @@ -12,19 +16,45 @@ @runtime_checkable class Value(Protocol): - def to_serial(self) -> sops.BaseValue: ... + """Abstract value definition. Must be serialisable into a HUGR value.""" + + def to_serial(self) -> sops.BaseValue: + """Convert to serialisable model.""" + ... # pragma: no cover + def to_serial_root(self) -> sops.Value: return sops.Value(root=self.to_serial()) # type: ignore[arg-type] - def type_(self) -> tys.Type: ... + def type_(self) -> tys.Type: + """Report the type of the value. + + Example: + >>> TRUE.type_() + Bool + """ + ... # pragma: no cover @dataclass class Sum(Value): + """Sum-of-product value. + + Example: + >>> Sum(0, tys.Sum([[tys.Bool], [tys.Unit]]), [TRUE]) + Sum(tag=0, typ=Sum([[Bool], [Unit]]), vals=[TRUE]) + """ + + #: Tag identifying the variant. tag: int + #: Type of the sum: defines all possible variants. typ: tys.Sum + #: The values of this variant row. vals: list[Value] + @property + def n_variants(self) -> int: + return len(self.typ.variant_rows) + def type_(self) -> tys.Sum: return self.typ @@ -36,34 +66,93 @@ def to_serial(self) -> sops.SumValue: ) -def bool_value(b: bool) -> Sum: - return Sum( - tag=int(b), - typ=tys.Bool, - vals=[], - ) +class UnitSum(Sum): + """Simple :class:`Sum` with each variant being an empty row. + + Example: + >>> UnitSum(0, 3) + UnitSum(0, 3) + >>> UnitSum(0, 1) + Unit + >>> assert UnitSum(0, 2) == FALSE + >>> assert UnitSum(1, 2) == TRUE + """ + + def __init__(self, tag: int, size: int): + super().__init__( + tag=tag, + typ=tys.UnitSum(size), + vals=[], + ) + def __repr__(self) -> str: + if self == TRUE: + return "TRUE" + if self == FALSE: + return "FALSE" + if self == Unit: + return "Unit" + return f"UnitSum({self.tag}, {self.n_variants})" -Unit = Sum(0, tys.Unit, []) + +def bool_value(b: bool) -> UnitSum: + """Convert a python bool to a HUGR boolean value. + + Example: + >>> bool_value(True) + TRUE + >>> bool_value(False) + FALSE + """ + return UnitSum(int(b), 2) + + +#: HUGR unit type. Sum with a single empty row variant. +Unit = UnitSum(0, 1) +#: HUGR true value. TRUE = bool_value(True) +#: HUGR false value. FALSE = bool_value(False) @dataclass -class Tuple(Value): +class Tuple(Sum): + """Tuple or product value, defined by a list of values. + Internally a :class:`Sum` with a single variant row. + + Example: + >>> tup = Tuple(TRUE, FALSE) + >>> tup + Tuple(TRUE, FALSE) + >>> tup.type_() + Tuple(Bool, Bool) + + """ + + #: The values of this tuple. vals: list[Value] - def type_(self) -> tys.Tuple: - return tys.Tuple(*(v.type_() for v in self.vals)) + def __init__(self, *vals: Value): + val_list = list(vals) + super().__init__( + tag=0, typ=tys.Tuple(*(v.type_() for v in val_list)), vals=val_list + ) - def to_serial(self) -> sops.TupleValue: + # sops.TupleValue isn't an instance of sops.SumValue + # so mypy doesn't like the override of Sum.to_serial + def to_serial(self) -> sops.TupleValue: # type: ignore[override] return sops.TupleValue( vs=ser_it(self.vals), ) + def __repr__(self) -> str: + return f"Tuple({', '.join(map(repr, self.vals))})" + @dataclass class Function(Value): + """Higher order function value, defined by a :class:`Hugr `.""" + body: Hugr def type_(self) -> tys.FunctionType: @@ -77,8 +166,13 @@ def to_serial(self) -> sops.FunctionValue: @dataclass class Extension(Value): + """Non-core extension value.""" + + #: Value name. name: str + #: Value type. typ: tys.Type + #: Value payload. val: Any extensions: tys.ExtensionSet = field(default_factory=tys.ExtensionSet) @@ -94,7 +188,11 @@ def to_serial(self) -> sops.ExtensionValue: class ExtensionValue(Value, Protocol): - def to_value(self) -> Extension: ... + """Protocol which types can implement to be a HUGR extension value.""" + + def to_value(self) -> Extension: + """Convert to a HUGR extension value.""" + ... # pragma: no cover def type_(self) -> tys.Type: return self.to_value().type_() diff --git a/hugr-py/tests/conftest.py b/hugr-py/tests/conftest.py index 33f792d0b..f1eff88b7 100644 --- a/hugr-py/tests/conftest.py +++ b/hugr-py/tests/conftest.py @@ -1,17 +1,19 @@ from __future__ import annotations -from dataclasses import dataclass, field -import subprocess +import json import os import pathlib -from hugr.node_port import Wire +import subprocess +from dataclasses import dataclass, field +from typing import TYPE_CHECKING +from hugr import tys, val from hugr.hugr import Hugr -from hugr.ops import Custom, Command -from hugr.serialization import SerialHugr -import hugr.tys as tys -import hugr.val as val -import json +from hugr.ops import Command, Custom +from hugr.serialization.serial_hugr import SerialHugr + +if TYPE_CHECKING: + from hugr.node_port import Wire def int_t(width: int) -> tys.Opaque: @@ -39,12 +41,15 @@ class LogicOps(Custom): extension: tys.ExtensionId = "logic" +_NotSig = tys.FunctionType.endo([tys.Bool]) + + # TODO get from YAML @dataclass class NotDef(LogicOps): - num_out: int | None = 1 + num_out: int = 1 op_name: str = "Not" - signature: tys.FunctionType = tys.FunctionType.endo([tys.Bool]) + signature: tys.FunctionType = _NotSig def __call__(self, a: Wire) -> Command: return super().__call__(a) @@ -58,11 +63,14 @@ class QuantumOps(Custom): extension: tys.ExtensionId = "tket2.quantum" +_OneQbSig = tys.FunctionType.endo([tys.Qubit]) + + @dataclass class OneQbGate(QuantumOps): op_name: str - num_out: int | None = 1 - signature: tys.FunctionType = tys.FunctionType.endo([tys.Qubit]) + num_out: int = 1 + signature: tys.FunctionType = _OneQbSig def __call__(self, q: Wire) -> Command: return super().__call__(q) @@ -70,12 +78,14 @@ def __call__(self, q: Wire) -> Command: H = OneQbGate("H") +_MeasSig = tys.FunctionType([tys.Qubit], [tys.Qubit, tys.Bool]) + @dataclass class MeasureDef(QuantumOps): op_name: str = "Measure" - num_out: int | None = 2 - signature: tys.FunctionType = tys.FunctionType([tys.Qubit], [tys.Qubit, tys.Bool]) + num_out: int = 2 + signature: tys.FunctionType = _MeasSig def __call__(self, q: Wire) -> Command: return super().__call__(q) @@ -94,7 +104,7 @@ class IntOps(Custom): @dataclass class DivModDef(IntOps): - num_out: int | None = 2 + num_out: int = 2 extension: tys.ExtensionId = "arithmetic.int" op_name: str = "idivmod_u" signature: tys.FunctionType = field( @@ -115,7 +125,7 @@ def validate(h: Hugr, mermaid: bool = False, roundtrip: bool = True): if mermaid: cmd.append("--mermaid") serial = h.to_serial().to_json() - subprocess.run(cmd, check=True, input=serial.encode()) + subprocess.run(cmd, check=True, input=serial.encode()) # noqa: S603 if roundtrip: h2 = Hugr.from_serial(SerialHugr.load_json(json.loads(serial))) diff --git a/hugr-py/tests/serialization/test_basic.py b/hugr-py/tests/serialization/test_basic.py index 5c3b41ace..1479888eb 100644 --- a/hugr-py/tests/serialization/test_basic.py +++ b/hugr-py/tests/serialization/test_basic.py @@ -1,4 +1,4 @@ -from hugr.serialization import SerialHugr +from hugr.serialization.serial_hugr import SerialHugr def test_empty(): diff --git a/hugr-py/tests/test_bimap.py b/hugr-py/tests/test_bimap.py index 028498c9c..244eb76dd 100644 --- a/hugr-py/tests/test_bimap.py +++ b/hugr-py/tests/test_bimap.py @@ -1,4 +1,6 @@ -from hugr.utils import BiMap +import pytest + +from hugr.utils import BiMap, NotBijection def test_insert_left() -> None: @@ -61,3 +63,19 @@ def test_existing_key() -> None: assert bimap.get_left(1) == "b" assert bimap.get_right("a") is None + + +def test_bimap_init(): + # Test with empty initial map + bm = BiMap() + assert len(bm) == 0 + + # Test with non-empty initial map + initial_map = {"a": 1, "b": 2} + bm = BiMap(initial_map) + assert len(bm) == 2 + + # Test with non-bijection initial map + invalid_map = {"a": 1, "b": 1} + with pytest.raises(NotBijection): + bm = BiMap(invalid_map) diff --git a/hugr-py/tests/test_cfg.py b/hugr-py/tests/test_cfg.py index ddd3b9c32..8a44a5344 100644 --- a/hugr-py/tests/test_cfg.py +++ b/hugr-py/tests/test_cfg.py @@ -1,9 +1,8 @@ +from hugr import ops, tys, val from hugr.cfg import Cfg -import hugr.tys as tys -import hugr.val as val from hugr.dfg import Dfg -import hugr.ops as ops -from .conftest import validate, INT_T, DivMod, IntVal + +from .conftest import INT_T, DivMod, IntVal, validate def build_basic_cfg(cfg: Cfg) -> None: @@ -14,13 +13,13 @@ def build_basic_cfg(cfg: Cfg) -> None: def test_basic_cfg() -> None: - cfg = Cfg([tys.Bool]) + cfg = Cfg(tys.Bool) build_basic_cfg(cfg) validate(cfg.hugr) def test_branch() -> None: - cfg = Cfg([tys.Bool, INT_T]) + cfg = Cfg(tys.Bool, INT_T) entry = cfg.add_entry() entry.set_block_outputs(*entry.inputs()) @@ -49,7 +48,7 @@ def test_nested_cfg() -> None: def test_dom_edge() -> None: - cfg = Cfg([tys.Bool, tys.Unit, INT_T]) + cfg = Cfg(tys.Bool, tys.Unit, INT_T) entry = cfg.add_entry() b, u, i = entry.inputs() entry.set_block_outputs(b, i) @@ -69,7 +68,7 @@ def test_dom_edge() -> None: def test_asymm_types() -> None: # test different types going to entry block's susccessors - cfg = Cfg([]) + cfg = Cfg() entry = cfg.add_entry() int_load = entry.load(IntVal(34)) diff --git a/hugr-py/tests/test_cond_loop.py b/hugr-py/tests/test_cond_loop.py index 75791317b..01c1fc1a4 100644 --- a/hugr-py/tests/test_cond_loop.py +++ b/hugr-py/tests/test_cond_loop.py @@ -1,10 +1,10 @@ +import pytest + +from hugr import ops, tys, val from hugr.cond_loop import Conditional, ConditionalError, TailLoop from hugr.dfg import Dfg -import hugr.tys as tys -import hugr.ops as ops -import hugr.val as val -import pytest -from .conftest import INT_T, validate, IntVal, H, Measure + +from .conftest import INT_T, H, IntVal, Measure, validate SUM_T = tys.Sum([[tys.Qubit], [tys.Qubit, INT_T]]) @@ -88,7 +88,7 @@ def build_tl(tl: TailLoop) -> None: h = Dfg(tys.Qubit) (q,) = h.inputs() - tl_n = h.insert_tail_loop(tl, q) + tl_n = h.insert_tail_loop(tl, [q], []) h.set_outputs(tl_n) validate(h.hugr) diff --git a/hugr-py/tests/test_hugr_build.py b/hugr-py/tests/test_hugr_build.py index a01b89d97..1660ec4ee 100644 --- a/hugr-py/tests/test_hugr_build.py +++ b/hugr-py/tests/test_hugr_build.py @@ -1,16 +1,15 @@ from __future__ import annotations -from hugr.node_port import Node, _SubPort -from hugr.hugr import Hugr +import pytest + +from hugr import ops, tys, val from hugr.dfg import Dfg, _ancestral_sibling -from hugr.ops import NoConcreteFunc -import hugr.ops as ops -import hugr.tys as tys -import hugr.val as val from hugr.function import Module -import pytest +from hugr.hugr import Hugr +from hugr.node_port import Node, _SubPort +from hugr.ops import NoConcreteFunc -from .conftest import Not, INT_T, IntVal, validate, DivMod +from .conftest import INT_T, DivMod, IntVal, Not, validate def test_stable_indices(): @@ -95,16 +94,16 @@ def test_tuple(): row = [tys.Bool, tys.Qubit] h = Dfg(*row) a, b = h.inputs() - t = h.add(ops.MakeTuple(a, b)) - a, b = h.add(ops.UnpackTuple(t)) + t = h.add(ops.MakeTuple()(a, b)) + a, b = h.add(ops.UnpackTuple()(t)) h.set_outputs(a, b) validate(h.hugr) h1 = Dfg(*row) a, b = h1.inputs() - mt = h1.add_op(ops.MakeTuple, a, b) - a, b = h1.add_op(ops.UnpackTuple, mt)[0, 1] + mt = h1.add_op(ops.MakeTuple(), a, b) + a, b = h1.add_op(ops.UnpackTuple(), mt)[0, 1] h1.set_outputs(a, b) assert h.hugr.to_serial() == h1.hugr.to_serial() @@ -196,7 +195,7 @@ def test_ancestral_sibling(): [ val.Function(simple_id().hugr), val.Sum(1, tys.Sum([[INT_T], [tys.Bool, INT_T]]), [val.TRUE, IntVal(34)]), - val.Tuple([val.TRUE, IntVal(23)]), + val.Tuple(val.TRUE, IntVal(23)), ], ) def test_vals(val: val.Value): @@ -232,7 +231,7 @@ def test_poly_function(direct_call: bool) -> None: load = f_main.load_function( f_id, instantiation=instantiation, type_args=type_args ) - call = f_main.add(ops.CallIndirect(load, q)) + call = f_main.add(ops.CallIndirect()(load, q)) f_main.set_outputs(call) @@ -252,7 +251,7 @@ def test_mono_function(direct_call: bool) -> None: call = f_main.call(f_id, q) else: load = f_main.load_function(f_id) - call = f_main.add(ops.CallIndirect(load, q)) + call = f_main.add(ops.CallIndirect()(load, q)) f_main.set_outputs(call) validate(mod.hugr) @@ -260,19 +259,20 @@ def test_mono_function(direct_call: bool) -> None: def test_higher_order() -> None: noop_fn = Dfg(tys.Qubit) - noop_fn.set_outputs(noop_fn.add(ops.Noop(noop_fn.input_node[0]))) + noop_fn.set_outputs(noop_fn.add(ops.Noop()(noop_fn.input_node[0]))) d = Dfg(tys.Qubit) (q,) = d.inputs() f_val = d.load(val.Function(noop_fn.hugr)) - call = d.add(ops.CallIndirect(f_val, q))[0] + call = d.add(ops.CallIndirect()(f_val, q))[0] d.set_outputs(call) validate(d.hugr) def test_lift() -> None: - d = Dfg(tys.Qubit, extension_delta=["X"]) + d = Dfg(tys.Qubit) + d.parent_op._extension_delta = ["X"] (q,) = d.inputs() lift = d.add(ops.Lift("X")(q)) d.set_outputs(lift) diff --git a/hugr-py/tests/test_version.py b/hugr-py/tests/test_version.py index ac9e154d6..5ed309b90 100644 --- a/hugr-py/tests/test_version.py +++ b/hugr-py/tests/test_version.py @@ -1,6 +1,8 @@ # from https://github.com/python-poetry/poetry/issues/144#issuecomment-877835259 -import toml # type: ignore[import-untyped] from pathlib import Path + +import toml # type: ignore[import-untyped] + import hugr @@ -8,7 +10,8 @@ def test_versions_are_in_sync(): """Checks if the pyproject.toml and package.__init__.py __version__ are in sync.""" path = Path(__file__).resolve().parents[1] / "pyproject.toml" - pyproject = toml.loads(open(str(path)).read()) + with Path.open(path, "r") as f: + pyproject = toml.loads(f.read()) pyproject_version = pyproject["tool"]["poetry"]["version"] package_init_version = hugr.__version__ diff --git a/hugr/CHANGELOG.md b/hugr/CHANGELOG.md index 3bddd8307..b6b1cdc66 100644 --- a/hugr/CHANGELOG.md +++ b/hugr/CHANGELOG.md @@ -1,5 +1,32 @@ # Changelog +## 0.6.0 (2024-06-28) + +### Bug Fixes + +- SimpleReplacement panic on multiports ([#1191](https://github.com/CQCL/hugr/pull/1191)) +- Add some validation for const nodes ([#1222](https://github.com/CQCL/hugr/pull/1222)) +- Cfg not validating entry/exit types ([#1229](https://github.com/CQCL/hugr/pull/1229)) +- `extract_hugr` not removing root node ports ([#1239](https://github.com/CQCL/hugr/pull/1239)) + +### Documentation + +- Fix documentation of `ValidationError::ConstTypeError` ([#1227](https://github.com/CQCL/hugr/pull/1227)) + +### Features + +- CircuitBuilder::add_constant ([#1168](https://github.com/CQCL/hugr/pull/1168)) +- [**breaking**] Make the rewrite errors more useful ([#1174](https://github.com/CQCL/hugr/pull/1174)) +- [**breaking**] Validate Extensions using hierarchy, ignore input_extensions, RIP inference ([#1142](https://github.com/CQCL/hugr/pull/1142)) +- [**breaking**] Infer extension deltas for Case, Cfg, Conditional, DataflowBlock, Dfg, TailLoop ([#1195](https://github.com/CQCL/hugr/pull/1195)) +- Helper functions for requesting inference, use with builder in tests ([#1219](https://github.com/CQCL/hugr/pull/1219)) + +### Refactor + +- [**breaking**] FunctionBuilder takes impl Into ([#1220](https://github.com/CQCL/hugr/pull/1220)) +- [**breaking**] Remove NodeType and input_extensions ([#1183](https://github.com/CQCL/hugr/pull/1183)) + + ## 0.5.1 (2024-06-07) ### Bug Fixes diff --git a/hugr/Cargo.toml b/hugr/Cargo.toml index dff1dd10b..87c685c85 100644 --- a/hugr/Cargo.toml +++ b/hugr/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hugr" -version = "0.5.1" +version = "0.6.0" edition = { workspace = true } rust-version = { workspace = true } @@ -25,8 +25,8 @@ path = "src/lib.rs" extension_inference = [] [dependencies] -hugr-core = { path = "../hugr-core", version = "0.2.0" } -hugr-passes = { path = "../hugr-passes", version = "0.2.0" } +hugr-core = { path = "../hugr-core", version = "0.3.0" } +hugr-passes = { path = "../hugr-passes", version = "0.3.0" } [dev-dependencies] rstest = { workspace = true } diff --git a/poetry.lock b/poetry.lock index 36f24f9e5..0a82d9f3c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -35,63 +35,63 @@ files = [ [[package]] name = "coverage" -version = "7.5.3" +version = "7.5.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, - {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, - {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, - {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, - {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, - {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, - {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, - {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, - {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, - {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, - {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, - {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, - {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, - {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, + {file = "coverage-7.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cfb5a4f556bb51aba274588200a46e4dd6b505fb1a5f8c5ae408222eb416f99"}, + {file = "coverage-7.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2174e7c23e0a454ffe12267a10732c273243b4f2d50d07544a91198f05c48f47"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2214ee920787d85db1b6a0bd9da5f8503ccc8fcd5814d90796c2f2493a2f4d2e"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1137f46adb28e3813dec8c01fefadcb8c614f33576f672962e323b5128d9a68d"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b385d49609f8e9efc885790a5a0e89f2e3ae042cdf12958b6034cc442de428d3"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4a474f799456e0eb46d78ab07303286a84a3140e9700b9e154cfebc8f527016"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5cd64adedf3be66f8ccee418473c2916492d53cbafbfcff851cbec5a8454b136"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e564c2cf45d2f44a9da56f4e3a26b2236504a496eb4cb0ca7221cd4cc7a9aca9"}, + {file = "coverage-7.5.4-cp310-cp310-win32.whl", hash = "sha256:7076b4b3a5f6d2b5d7f1185fde25b1e54eb66e647a1dfef0e2c2bfaf9b4c88c8"}, + {file = "coverage-7.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:018a12985185038a5b2bcafab04ab833a9a0f2c59995b3cec07e10074c78635f"}, + {file = "coverage-7.5.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db14f552ac38f10758ad14dd7b983dbab424e731588d300c7db25b6f89e335b5"}, + {file = "coverage-7.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3257fdd8e574805f27bb5342b77bc65578e98cbc004a92232106344053f319ba"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a6612c99081d8d6134005b1354191e103ec9705d7ba2754e848211ac8cacc6b"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d45d3cbd94159c468b9b8c5a556e3f6b81a8d1af2a92b77320e887c3e7a5d080"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed550e7442f278af76d9d65af48069f1fb84c9f745ae249c1a183c1e9d1b025c"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a892be37ca35eb5019ec85402c3371b0f7cda5ab5056023a7f13da0961e60da"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8192794d120167e2a64721d88dbd688584675e86e15d0569599257566dec9bf0"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:820bc841faa502e727a48311948e0461132a9c8baa42f6b2b84a29ced24cc078"}, + {file = "coverage-7.5.4-cp311-cp311-win32.whl", hash = "sha256:6aae5cce399a0f065da65c7bb1e8abd5c7a3043da9dceb429ebe1b289bc07806"}, + {file = "coverage-7.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2e344d6adc8ef81c5a233d3a57b3c7d5181f40e79e05e1c143da143ccb6377d"}, + {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"}, + {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"}, + {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"}, + {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"}, + {file = "coverage-7.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdd31315fc20868c194130de9ee6bfd99755cc9565edff98ecc12585b90be882"}, + {file = "coverage-7.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02ff6e898197cc1e9fa375581382b72498eb2e6d5fc0b53f03e496cfee3fac6d"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05c16cf4b4c2fc880cb12ba4c9b526e9e5d5bb1d81313d4d732a5b9fe2b9d53"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5986ee7ea0795a4095ac4d113cbb3448601efca7f158ec7f7087a6c705304e4"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df54843b88901fdc2f598ac06737f03d71168fd1175728054c8f5a2739ac3e4"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ab73b35e8d109bffbda9a3e91c64e29fe26e03e49addf5b43d85fc426dde11f9"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:aea072a941b033813f5e4814541fc265a5c12ed9720daef11ca516aeacd3bd7f"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:16852febd96acd953b0d55fc842ce2dac1710f26729b31c80b940b9afcd9896f"}, + {file = "coverage-7.5.4-cp38-cp38-win32.whl", hash = "sha256:8f894208794b164e6bd4bba61fc98bf6b06be4d390cf2daacfa6eca0a6d2bb4f"}, + {file = "coverage-7.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:e2afe743289273209c992075a5a4913e8d007d569a406ffed0bd080ea02b0633"}, + {file = "coverage-7.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b95c3a8cb0463ba9f77383d0fa8c9194cf91f64445a63fc26fb2327e1e1eb088"}, + {file = "coverage-7.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7564cc09dd91b5a6001754a5b3c6ecc4aba6323baf33a12bd751036c998be4"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44da56a2589b684813f86d07597fdf8a9c6ce77f58976727329272f5a01f99f7"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e16f3d6b491c48c5ae726308e6ab1e18ee830b4cdd6913f2d7f77354b33f91c8"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbc5958cb471e5a5af41b0ddaea96a37e74ed289535e8deca404811f6cb0bc3d"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a04e990a2a41740b02d6182b498ee9796cf60eefe40cf859b016650147908029"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ddbd2f9713a79e8e7242d7c51f1929611e991d855f414ca9996c20e44a895f7c"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b1ccf5e728ccf83acd313c89f07c22d70d6c375a9c6f339233dcf792094bcbf7"}, + {file = "coverage-7.5.4-cp39-cp39-win32.whl", hash = "sha256:56b4eafa21c6c175b3ede004ca12c653a88b6f922494b023aeb1e836df953ace"}, + {file = "coverage-7.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:65e528e2e921ba8fd67d9055e6b9f9e34b21ebd6768ae1c1723f4ea6ace1234d"}, + {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"}, + {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"}, ] [package.dependencies] @@ -127,18 +127,18 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.14.0" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, - {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -184,38 +184,38 @@ files = [ [[package]] name = "mypy" -version = "1.10.0" +version = "1.10.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, - {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, - {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, - {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, - {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, - {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, - {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, - {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, - {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, - {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, - {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, - {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, - {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, - {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, - {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, - {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, - {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, - {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, - {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, + {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, + {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, + {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, + {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, + {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, + {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, + {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, + {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, + {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, + {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, + {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, + {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, + {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, + {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, + {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, + {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, + {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, ] [package.dependencies] @@ -253,13 +253,13 @@ files = [ [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -313,13 +313,13 @@ virtualenv = ">=20.10.0" [[package]] name = "pydantic" -version = "2.7.3" +version = "2.7.4" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.3-py3-none-any.whl", hash = "sha256:ea91b002777bf643bb20dd717c028ec43216b24a6001a280f83877fd2655d0b4"}, - {file = "pydantic-2.7.3.tar.gz", hash = "sha256:c46c76a40bb1296728d7a8b99aa73dd70a48c3510111ff290034f860c99c419e"}, + {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, + {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, ] [package.dependencies] @@ -570,24 +570,24 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.1" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.12.1-py3-none-any.whl", hash = "sha256:6024b58b69089e5a89c347397254e35f1bf02a907728ec7fee9bf0fe837d203a"}, - {file = "typing_extensions-4.12.1.tar.gz", hash = "sha256:915f5e35ff76f56588223f15fdd5938f9a1cf9195c0de25130c627e4d597f6d1"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "virtualenv" -version = "20.26.2" +version = "20.26.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, - {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, ] [package.dependencies] diff --git a/pyproject.toml b/pyproject.toml index a67f62af5..22f314844 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,3 +27,7 @@ typing-extensions = "^4.12.0" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +addopts = "--doctest-modules" +filterwarnings = "ignore::DeprecationWarning:lark.*" diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 000000000..6aeaa0c53 --- /dev/null +++ b/ruff.toml @@ -0,0 +1,84 @@ +# See https://docs.astral.sh/ruff/rules/ +target-version = "py310" + +line-length = 88 + +exclude = ["tests/error"] + +[lint] + +select = [ + "F", # pyflakes + "E", # pycodestyle Errors + "W", # pycodestyle Warnings + + # "A", # flake8-builtins + # "ANN", # flake8-annotations + # "ARG", # flake8-unused-arguments + "B", # flake8-Bugbear + "BLE", # flake8-blind-except + "C4", # flake8-comprehensions + # "C90", # mccabe + # "COM", # flake8-commas + # "CPY", # flake8-copyright + "D", # pydocstyle + "EM", # flake8-errmsg + # "ERA", # eradicate + "EXE", # flake8-executable + "FA", # flake8-future-annotations + # "FBT", # flake8-boolean-trap + # "FIX", # flake8-fixme + "FLY", # flynt + # "FURB", # refurb + "G", # flake8-logging-format + "I", # isort + "ICN", # flake8-import-conventions + "INP", # flake8-no-pep420 + "INT", # flake8-gettext + # "ISC", # flake8-implicit-str-concat + # "LOG", # flake8-logging + # "N", # pep8-Naming + "NPY", # NumPy-specific + "PERF", # Perflint + "PGH", # pygrep-hooks + "PIE", # flake8-pie + # "PL", # pylint + "PT", # flake8-pytest-style + "PTH", # flake8-use-pathlib + "PYI", # flake8-pyi + "Q", # flake8-quotes + # "RET", # flake8-return + "RSE", # flake8-raise + "RUF", # Ruff-specific + "S", # flake8-bandit (Security) + "SIM", # flake8-simplify + # "SLF", # flake8-self + "SLOT", # flake8-slots + "T10", # flake8-debugger + "T20", # flake8-print + "TCH", # flake8-type-checking + # "TD", # flake8-todos + "TID", # flake8-tidy-imports + "TRY", # tryceratops + "UP", # pyupgrade + "YTT", # flake8-2020 +] + + +ignore = [ + "S101", # Use of `assert` detected + "TRY003", # Avoid specifying long messages outside the exception class + "D102", # Can't detect that abstract method overrides can use the same docstring + "D105", # Missing docstrings in dunder methods (repr etc.) + "D205", # Blank line requirement + "D107", # Missing docstrings in __init__ (covered by class level docstring) +] + +[lint.per-file-ignores] +"hugr-py/tests/**" = ["D"] +"hugr-py/docs/**" = ["D"] +"hugr-py/src/hugr/serialization/**" = ["D"] +"scripts/*" = ["T201", "EXE001", "D"] + +[lint.pydocstyle] +convention = "google" diff --git a/scripts/generate_schema.py b/scripts/generate_schema.py index 882f0d7e6..78d66fb19 100644 --- a/scripts/generate_schema.py +++ b/scripts/generate_schema.py @@ -1,28 +1,29 @@ #!/usr/bin/env python """Dumps the json schema for `hugr.serialization.SerialHugr` to a file. -The schema is written to a file named `hugr_schema_v#.json` in the specified output directory. -If no output directory is specified, the schema is written to the current working directory. +The schema is written to a file named `hugr_schema_v#.json` +in the specified output directory. +If no output directory is specified, +the schema is written to the current working directory. usage: python generate_schema.py [] """ import json import sys -from typing import Type, Optional from pathlib import Path from pydantic import ConfigDict -from hugr.serialization import SerialHugr +from hugr.serialization.serial_hugr import SerialHugr from hugr.serialization.testing_hugr import TestingHugr def write_schema( out_dir: Path, name_prefix: str, - schema: Type[SerialHugr] | Type[TestingHugr], - config: Optional[ConfigDict] = None, + schema: type[SerialHugr] | type[TestingHugr], + config: ConfigDict | None = None, **kwargs, ): version = schema.get_version() diff --git a/specification/hugr.md b/specification/hugr.md index 7dad8b064..127e0635a 100644 --- a/specification/hugr.md +++ b/specification/hugr.md @@ -186,7 +186,7 @@ these are listed in [hierarchical node relationships](#hierarchical-relationships-and-constraints). In a valid HUGR the hierarchy edges form a tree joining all nodes of the HUGR, with a unique root node. The HUGR is characterized by the type of its root node. -The root node has no non-hierarchy edges (and this supercedes any other requirements on the +The root node has no non-hierarchy edges (and this supersedes any other requirements on the edges of specific node types). A *sibling graph* is a subgraph of the HUGR containing all nodes with @@ -837,7 +837,7 @@ such declarations may include (bind) any number of type parameters, of kinds as TypeParam ::= Type(Any|Copyable|Eq) | BoundedUSize(u64|) -- note optional bound | Extensions - | List(TypeParam) -- homogenous, any sized + | List(TypeParam) -- homogeneous, any sized | Tuple([TypeParam]) -- heterogenous, fixed size | Opaque(Name, [TypeArg]) -- e.g. Opaque("Array", [5, Opaque("usize", [])]) ``` @@ -1694,7 +1694,7 @@ including the names of the operations. Where WebAssembly specifies a "partial" operation (i.e. when the result is not defined on certain inputs), we use a Sum type to hold the result. -A few additonal operations not included in WebAssembly are also +A few additional operations not included in WebAssembly are also specified, and there are some other small differences (highlighted below). diff --git a/specification/schema/hugr_schema_strict_v1.json b/specification/schema/hugr_schema_strict_v1.json index fb201688a..1cfd6bb61 100644 --- a/specification/schema/hugr_schema_strict_v1.json +++ b/specification/schema/hugr_schema_strict_v1.json @@ -2,7 +2,7 @@ "$defs": { "Alias": { "additionalProperties": false, - "description": "An Alias Type", + "description": "An Alias Type.", "properties": { "t": { "const": "Alias", diff --git a/specification/schema/hugr_schema_v1.json b/specification/schema/hugr_schema_v1.json index 39c488e44..82c7f1137 100644 --- a/specification/schema/hugr_schema_v1.json +++ b/specification/schema/hugr_schema_v1.json @@ -2,7 +2,7 @@ "$defs": { "Alias": { "additionalProperties": true, - "description": "An Alias Type", + "description": "An Alias Type.", "properties": { "t": { "const": "Alias", diff --git a/specification/schema/testing_hugr_schema_strict_v1.json b/specification/schema/testing_hugr_schema_strict_v1.json index 497b14cfc..631308ca0 100644 --- a/specification/schema/testing_hugr_schema_strict_v1.json +++ b/specification/schema/testing_hugr_schema_strict_v1.json @@ -2,7 +2,7 @@ "$defs": { "Alias": { "additionalProperties": false, - "description": "An Alias Type", + "description": "An Alias Type.", "properties": { "t": { "const": "Alias", diff --git a/specification/schema/testing_hugr_schema_v1.json b/specification/schema/testing_hugr_schema_v1.json index 989e86509..63a2cc84b 100644 --- a/specification/schema/testing_hugr_schema_v1.json +++ b/specification/schema/testing_hugr_schema_v1.json @@ -2,7 +2,7 @@ "$defs": { "Alias": { "additionalProperties": true, - "description": "An Alias Type", + "description": "An Alias Type.", "properties": { "t": { "const": "Alias",