diff --git a/.github/workflows/ci_python.yml b/.github/workflows/ci_python.yml index 3c8a89803..339ad6df4 100644 --- a/.github/workflows/ci_python.yml +++ b/.github/workflows/ci_python.yml @@ -69,13 +69,13 @@ jobs: poetry install --only=main --only=lint - name: black run: | - poetry run black --diff pysrc pytests docs/source + poetry run black --diff pysrc pytests docs - name: flake8 run: | - poetry run flake8 pysrc pytests docs/source + poetry run flake8 pysrc pytests docs - name: isort run: | - poetry run isort --filter-files --diff pysrc pytests docs/source + poetry run isort --filter-files --diff pysrc pytests docs - name: pydocstyle run: | poetry run pydocstyle pysrc @@ -227,28 +227,33 @@ jobs: echo "::endgroup::" deactivate done - - name: Setup QT - # Needed by sphinx-social-cards. - # https://github.com/2bndy5/sphinx-social-cards/blob/main/.github/workflows/build.yml#L54 + - name: Lint reference docs run: | - sudo apt-get install -y libgl1-mesa-dev libxkbcommon-x11-0 - echo "QT_QPA_PLATFORM=offscreen" >> "$GITHUB_ENV" + poetry env use 3.11 + source $(poetry env info --path)/bin/activate + poetry install --with=docs + pip install ${WHEEL} --force-reinstall + cd docs + python _scripts/lint_reference.py + deactivate + - name: Set up Quarto + uses: quarto-dev/quarto-actions/setup@v2 - name: Build docs - # ablog doesn't currently indicate whether it supports parallel reads, - # leading to a warning. - # when possible, add `"-j", "auto",` to do parallel builds (and in nox). run: | poetry env use 3.11 source $(poetry env info --path)/bin/activate poetry install --with=docs pip install ${WHEEL} --force-reinstall - sphinx-build docs/source docs/_build -W # -j auto + cd docs + python _scripts/gen_reference.py + python -m quartodoc interlinks + quarto render --output-dir _site deactivate - name: Upload docs uses: actions/upload-pages-artifact@v2 with: # Automatically uploads an artifact from the './_site' directory by default - path: ${{ github.workspace }}/python/docs/_build + path: ${{ github.workspace }}/python/docs/_site rust-format: runs-on: ubuntu-20.04 diff --git a/python/README.md b/python/README.md index 2ea1da6cb..bb2b2b59f 100644 --- a/python/README.md +++ b/python/README.md @@ -33,3 +33,41 @@ Alternatively, install nox and run the tests inside an isolated environment: ```shell nox ``` + + +## Previewing Docs + +* Install `quarto-cli` on your machine. Also consider installing an IDE extension. + + See: https://quarto.org/docs/get-started/ + +* Generate reference docs + + ```shell + nox -s docs-gen + ``` + + You should re-run this after making any updates to the `pysrc` docstrings. + If _Preview Docs_ is running in another shell, the system should auto-refresh with your changes. + +* Preview docs (with auto-refresh on edit) + + ```shell + nox -s docs + ``` + +* Cleanup generated and cached docs + + ```shell + nox -s docs-clean + ``` + + Try this if you see something unexpected (especially after deleting or renaming). + +* Builds docs to `docs/_site` + + ```shell + nox -s docs-build + ``` + + This is primarily used in CI. diff --git a/python/docs/.gitignore b/python/docs/.gitignore index c52066621..489c9246a 100644 --- a/python/docs/.gitignore +++ b/python/docs/.gitignore @@ -1,5 +1,21 @@ -_build -.jupyter_cache -jupyter_execute -source/reference/apidocs -source/iframe_figures \ No newline at end of file +# quarto build cache +/.quarto/ + +# quartodoc interlink artifacts +/_inv/ +/objects.json + +# generated API docs +/reference/ + +# files that might remain if a quarto build is interrupted +index.html +/blog/*.html +/examples/*.html +/guide/*.html +index-listing.json +data_types-listing.json +/site_libs/ + +# output of the site from `nox -s docs-build` +/_site/ diff --git a/python/docs/_extensions/machow/interlinks/.gitignore b/python/docs/_extensions/machow/interlinks/.gitignore new file mode 100644 index 000000000..5a1bf0b4e --- /dev/null +++ b/python/docs/_extensions/machow/interlinks/.gitignore @@ -0,0 +1,3 @@ +*.html +*.pdf +*_files/ diff --git a/python/docs/_extensions/machow/interlinks/_extension.yml b/python/docs/_extensions/machow/interlinks/_extension.yml new file mode 100644 index 000000000..c8a812136 --- /dev/null +++ b/python/docs/_extensions/machow/interlinks/_extension.yml @@ -0,0 +1,7 @@ +title: Interlinks +author: Michael Chow +version: 1.1.0 +quarto-required: ">=1.2.0" +contributes: + filters: + - interlinks.lua diff --git a/python/docs/_extensions/machow/interlinks/interlinks.lua b/python/docs/_extensions/machow/interlinks/interlinks.lua new file mode 100644 index 000000000..47aa61fa6 --- /dev/null +++ b/python/docs/_extensions/machow/interlinks/interlinks.lua @@ -0,0 +1,254 @@ +local function read_inv_text(filename) + -- read file + local file = io.open(filename, "r") + if file == nil then + return nil + end + local str = file:read("a") + file:close() + + + local project = str:match("# Project: (%S+)") + local version = str:match("# Version: (%S+)") + + local data = {project = project, version = version, items = {}} + + local ptn_data = + "^" .. + "(.-)%s+" .. -- name + "([%S:]-):" .. -- domain + "([%S]+)%s+" .. -- role + "(%-?%d+)%s+" .. -- priority + "(%S*)%s+" .. -- uri + "(.-)\r?$" -- dispname + + + -- Iterate through each line in the file content + for line in str:gmatch("[^\r\n]+") do + if not line:match("^#") then + -- Match each line against the pattern + local name, domain, role, priority, uri, dispName = line:match(ptn_data) + + -- if name is nil, raise an error + if name == nil then + error("Error parsing line: " .. line) + end + + data.items[#data.items + 1] = { + name = name, + domain = domain, + role = role, + priority = priority, + uri = uri, + dispName = dispName + } + end + end + return data +end + +local function read_json(filename) + + local file = io.open(filename, "r") + if file == nil then + return nil + end + local str = file:read("a") + file:close() + + local decoded = quarto.json.decode(str) + return decoded +end + +local function read_inv_text_or_json(base_name) + local file = io.open(base_name .. ".txt", "r") + if file then + -- TODO: refactors so we don't just close the file immediately + io.close(file) + json = read_inv_text(base_name .. ".txt") + + else + json = read_json(base_name .. ".json") + end + + return json +end + +local inventory = {} + +local function lookup(search_object) + + local results = {} + for _, inv in ipairs(inventory) do + for _, item in ipairs(inv.items) do + -- e.g. :external+:::`` + if item.inv_name and item.inv_name ~= search_object.inv_name then + goto continue + end + + if item.name ~= search_object.name then + goto continue + end + + if search_object.role and item.role ~= search_object.role then + goto continue + end + + if search_object.domain and item.domain ~= search_object.domain then + goto continue + else + if search_object.domain or item.domain == "py" then + table.insert(results, item) + end + + goto continue + end + + ::continue:: + end + end + + if #results == 1 then + return results[1] + end + if #results > 1 then + quarto.log.warning("Found multiple matches for " .. search_object.name .. ", using the first match.") + return results[1] + end + if #results == 0 then + quarto.log.warning("Found no matches for object:\n", search_object) + end + + return nil +end + +local function mysplit (inputstr, sep) + if sep == nil then + sep = "%s" + end + local t={} + for str in string.gmatch(inputstr, "([^"..sep.."]+)") do + table.insert(t, str) + end + return t +end + +local function normalize_role(role) + if role == "func" then + return "function" + end + return role +end + +local function build_search_object(str) + local starts_with_colon = str:sub(1, 1) == ":" + local search = {} + if starts_with_colon then + local t = mysplit(str, ":") + if #t == 2 then + -- e.g. :py:func:`my_func` + search.role = normalize_role(t[1]) + search.name = t[2]:match("%%60(.*)%%60") + elseif #t == 3 then + -- e.g. :py:func:`my_func` + search.domain = t[1] + search.role = normalize_role(t[2]) + search.name = t[3]:match("%%60(.*)%%60") + elseif #t == 4 then + -- e.g. :ext+inv:py:func:`my_func` + search.external = true + + search.inv_name = t[1]:match("external%+(.*)") + search.domain = t[2] + search.role = normalize_role(t[3]) + search.name = t[4]:match("%%60(.*)%%60") + else + quarto.log.warning("couldn't parse this link: " .. str) + return {} + end + else + search.name = str:match("%%60(.*)%%60") + end + + if search.name == nil then + quarto.log.warning("couldn't parse this link: " .. str) + return {} + end + + if search.name:sub(1, 1) == "~" then + search.shortened = true + search.name = search.name:sub(2, -1) + end + return search +end + +local function report_broken_link(link, search_object, replacement) + -- TODO: how to unescape html elements like [? + return pandoc.Code(pandoc.utils.stringify(link.content)) +end + +function Link(link) + -- do not process regular links ---- + if not link.target:match("%%60") then + return link + end + + -- lookup item ---- + local search = build_search_object(link.target) + local item = lookup(search) + + -- determine replacement, used if no link text specified ---- + local original_text = pandoc.utils.stringify(link.content) + local replacement = search.name + if search.shortened then + local t = mysplit(search.name, ".") + replacement = t[#t] + end + + -- set link text ---- + if original_text == "" and replacement ~= nil then + link.content = pandoc.Code(replacement) + end + + -- report broken links ---- + if item == nil then + return report_broken_link(link, search) + end + link.target = item.uri:gsub("%$$", search.name) + + + return link +end + +local function fixup_json(json, prefix) + for _, item in ipairs(json.items) do + item.uri = prefix .. item.uri + end + table.insert(inventory, json) +end + +return { + { + Meta = function(meta) + local json + local prefix + if meta.interlinks and meta.interlinks.sources then + for k, v in pairs(meta.interlinks.sources) do + local base_name = quarto.project.offset .. "/_inv/" .. k .. "_objects" + json = read_inv_text_or_json(base_name) + prefix = pandoc.utils.stringify(v.url) + if json ~= nil then + fixup_json(json, prefix) + end + end + end + json = read_inv_text_or_json(quarto.project.offset .. "/objects") + if json ~= nil then + fixup_json(json, "/") + end + end + }, + { + Link = Link + } +} diff --git a/python/docs/_extensions/quarto-ext/include-code-files/_extension.yml b/python/docs/_extensions/quarto-ext/include-code-files/_extension.yml new file mode 100644 index 000000000..13c496060 --- /dev/null +++ b/python/docs/_extensions/quarto-ext/include-code-files/_extension.yml @@ -0,0 +1,9 @@ +title: Include Code Files +author: Bruno Beaufils +version: 1.0.0 +quarto-required: ">=1.2" +contributes: + filters: + - include-code-files.lua + + diff --git a/python/docs/_extensions/quarto-ext/include-code-files/include-code-files.lua b/python/docs/_extensions/quarto-ext/include-code-files/include-code-files.lua new file mode 100644 index 000000000..c5f5cbf66 --- /dev/null +++ b/python/docs/_extensions/quarto-ext/include-code-files/include-code-files.lua @@ -0,0 +1,63 @@ +--- include-code-files.lua – filter to include code from source files +--- +--- Copyright: © 2020 Bruno BEAUFILS +--- License: MIT – see LICENSE file for details + +--- Dedent a line +local function dedent (line, n) + return line:sub(1,n):gsub(" ","") .. line:sub(n+1) +end + +--- Filter function for code blocks +local function transclude (cb) + if cb.attributes.include then + local content = "" + local fh = io.open(cb.attributes.include) + if not fh then + io.stderr:write("Cannot open file " .. cb.attributes.include .. " | Skipping includes\n") + else + local number = 1 + local start = 1 + + -- change hyphenated attributes to PascalCase + for i,pascal in pairs({"startLine", "endLine"}) + do + local hyphen = pascal:gsub("%u", "-%0"):lower() + if cb.attributes[hyphen] then + cb.attributes[pascal] = cb.attributes[hyphen] + cb.attributes[hyphen] = nil + end + end + + if cb.attributes.startLine then + cb.attributes.startFrom = cb.attributes.startLine + start = tonumber(cb.attributes.startLine) + end + for line in fh:lines ("L") + do + if cb.attributes.dedent then + line = dedent(line, cb.attributes.dedent) + end + if number >= start then + if not cb.attributes.endLine or number <= tonumber(cb.attributes.endLine) then + content = content .. line + end + end + number = number + 1 + end + fh:close() + end + -- remove key-value pair for used keys + cb.attributes.include = nil + cb.attributes.startLine = nil + cb.attributes.endLine = nil + cb.attributes.dedent = nil + -- return final code block + return pandoc.CodeBlock(content, cb.attr) + end +end + +return { + { CodeBlock = transclude } +} + diff --git a/python/docs/_quarto.yml b/python/docs/_quarto.yml new file mode 100644 index 000000000..0530bde15 --- /dev/null +++ b/python/docs/_quarto.yml @@ -0,0 +1,94 @@ +project: + type: website + output-dir: .quarto/_site + preview: + port: 4200 + browser: true # Open a web browser to view the preview + navigate: true # Navigate the browser automatically when outputs are updated + watch-inputs: true # Re-render input files when they change + +website: + favicon: "_static/images/favicon.png" + google-analytics: G-HR9E2E6TG4 + page-navigation: true + navbar: + background: primary + logo: "_static/images/kaskada-negative.svg" + logo-alt: "Kaskada logo." + title: false + left: + - href: guide/index.qmd + text: User Guide + - href: examples/index.qmd + text: Examples + - href: community/index.qmd + text: Community + - href: reference/index.qmd + text: API + - href: blog/index.qmd + text: Blog + right: + - icon: slack + href: https://join.slack.com/t/kaskada-hq/shared_invite/zt-1t1lms085-bqs2jtGO2TYr9kuuam~c9w + - icon: github + href: https://github.com/kaskada-ai/kaskada + sidebar: + - title: User Guide + style: docked + contents: + - guide/index.qmd + - guide/quickstart.qmd + - guide/tour.qmd + - guide/why.qmd + - guide/installation.qmd + - guide/timestreams.qmd + - guide/data_types.qmd + - guide/entities.qmd + - guide/aggregation.qmd + - guide/joins.qmd + - guide/sources.qmd + - guide/execution.qmd + - title: Examples + contents: examples + - title: API + style: docked + contents: reference + + page-footer: + border: true + left: "© Copyright 2023, Kaskada Contributors." + repo-url: https://github.com/kaskada-ai/kaskada/ + repo-subdir: python/docs + repo-actions: + - edit + - source + site-url: https://kaskada.io + title: Kaskada + +format: + html: + theme: + light: flatly + dark: darkly #vapor + css: + - _static/css/styles.css + include-in-header: _templates/announcement.html + toc: true + link-external-icon: false + link-external-newwindow: false + link-external-filter: '^(?:http:|https:)\/\/kaskada\.io\/' + +filters: + - interlinks + +interlinks: + fast: true + sources: + numpy: + url: https://numpy.org/doc/stable/ + pandas: + url: https://pandas.pydata.org/docs/ + pyarrow: + url: https://arrow.apache.org/docs/ + python: + url: https://docs.python.org/3/ diff --git a/python/docs/_reference.yml b/python/docs/_reference.yml new file mode 100644 index 000000000..3a8b39f73 --- /dev/null +++ b/python/docs/_reference.yml @@ -0,0 +1,200 @@ +quartodoc: + title: API + dir: reference + package: kaskada + source_dir: ../pysrc + options: + children: separate + include_empty: true + dynamic: true + + sections: + - title: Sessions + options: + children: embedded + contents: + - init_session + - title: Timestream + options: + children: flat + contents: + - name: Timestream + members: + - data_type + - is_continuous + - name: LiteralValue + dynamic: false + - name: Arg + dynamic: false + - subtitle: Aggregation + desc: | + The User Guide has details on [aggregations in general](../../../guide/aggregation.qmd). + + ::: {.callout-important} + It is important to remember that aggregations are partitioned by entity and windowed, with the default behavior being cumulative up to the current time. + ::: + contents: + - Timestream.collect + - Timestream.count + - Timestream.count_if + - Timestream.first + - Timestream.last + - Timestream.max + - Timestream.mean + - Timestream.min + - Timestream.stddev + - Timestream.sum + - Timestream.variance + - subtitle: Arithmetic + desc: | + Timestreams support a variety of arithmetic operations. + + ::: {.callout-important} + In addition to the chainable methods, standard operators are implemented where appropriate. + For instance, `a.add(b)` may be written as `a + b`. + See the notes on the specific functions for more information. + ::: + contents: + - Timestream.add + - Timestream.ceil + - Timestream.clamp + - Timestream.div + - Timestream.exp + - Timestream.floor + - Timestream.greatest + - Timestream.least + - Timestream.mul + - Timestream.neg + - Timestream.powf + - Timestream.round + - Timestream.sqrt + - Timestream.sub + - subtitle: Collection + desc: | + Timestreams allow each point to contain a collection -- a `list` or `map` -- of elements. + contents: + - Timestream.__getitem__ + - Timestream.flatten + - Timestream.index + - Timestream.length + - Timestream.union + - subtitle: Comparison + desc: | + Comparison operations produce boolean Timestreams. + + ::: {.callout-important} + In addition to the chainable methods, standard operators are implemented where appropriate. + For instance, `a.ge(b)` may be written as `a >= b`. + See the notes on the specific functions for more information. + + To respect the semantics of `__eq__` and `__ne__`, `a == b` and `a != b` are *not* overloaded. + ::: + contents: + - Timestream.eq + - Timestream.ge + - Timestream.gt + - Timestream.le + - Timestream.lt + - Timestream.ne + - Timestream.is_null + - Timestream.is_not_null + - subtitle: Execution + contents: + - Timestream.preview + - Timestream.to_pandas + - Timestream.run_iter + - Timestream.write + - subtitle: Grouping + contents: + - Timestream.lookup + - Timestream.with_key + - subtitle: Logical + contents: + - Timestream.and_ + - Timestream.or_ + - Timestream.not_ + - subtitle: Misc + contents: + - Timestream.cast + - Timestream.coalesce + - Timestream.else_ + - Timestream.explain + - Timestream.filter + - Timestream.hash + - Timestream.if_ + - Timestream.lag + - Timestream.null_if + - Timestream.pipe + - subtitle: Records + desc: | + Record operations create, extract or manipulate Timestreams of records. + contents: + - Timestream.col + - Timestream.select + - Timestream.remove + - Timestream.extend + - Timestream.record + - record + - subtitle: String + contents: + - Timestream.len + - Timestream.lower + - Timestream.upper + - Timestream.substring + - subtitle: Time + contents: + - Timestream.shift_by + - Timestream.shift_to + - Timestream.shift_until + - Timestream.time + - Timestream.seconds_since + - Timestream.seconds_since_previous + - title: Windows + package: kaskada.windows + options: + children: embedded + contents: + - Window + - Since + - Sliding + - Trailing + - Tumbling + - title: Sources + package: kaskada.sources + options: + children: embedded + contents: + # - Source + - CsvString + - JsonlString + - Pandas + - Parquet + - PyDict + # - package: kaskada.sources.source + # name: TimeUnit + - subtitle: Source + package: kaskada.sources + options: + children: flat + contents: + - Source + - source.TimeUnit + - title: Destinations + package: kaskada.destinations + options: + children: embedded + contents: + - Destination + - title: Execution + options: + children: embedded + contents: + - Execution + - ResultIterator + - title: Results + package: kaskada.results + options: + children: embedded + contents: + - History + - Snapshot diff --git a/python/docs/_scripts/builder.py b/python/docs/_scripts/builder.py new file mode 100644 index 000000000..b4453187c --- /dev/null +++ b/python/docs/_scripts/builder.py @@ -0,0 +1,292 @@ +from __future__ import annotations + +import logging +import sys +from pathlib import Path +from typing import Any + +from pydantic import ValidationError +from quartodoc import blueprint, collect, layout +from quartodoc.inventory import convert_inventory, create_inventory +from quartodoc.validation import fmt +from renderer import Renderer +from summarizer import Summarizer + + +# `preview()` can be used to help debug doc generation. +# use it on a `section` or `page` element to see a visual +# representation of the element contents. Use the `max_depth` +# named param to limit how much is returned to stdout. +# from quartodoc import preview + +_log = logging.getLogger("quartodoc") + + +class Builder: + """Base class for building API docs. + + Parameters + ---------- + package: str + The name of the package. + sections: ConfigSection + A list of sections, with items to document. + dir: + Name of API directory. + title: + Title of the API index page. + options: + Default options to set for all pieces of content (e.g. include_attributes). + rewrite_all_pages: + Whether to rewrite all rendered doc pages, or only those with changes. + source_dir: + A directory where source files to be documented live. This is only necessary + if you are not documenting a package, but collection of scripts. Use a "." + to refer to the current directory. + dynamic: + Whether to dynamically load all python objects. By default, objects are + loaded using static analysis. + parser: + Docstring parser to use. This correspond to different docstring styles, + and can be one of "google", "sphinx", and "numpy". Defaults to "numpy". + + """ + + # misc config + out_inventory: str = "objects.json" + out_index: str = "index.qmd" + out_page_suffix = ".qmd" + + package: str + dir: str + title: str + + page_map: {str: layout.Page} + item_map: {str: layout.Item} + items: [layout.Item] + + blueprint: layout.Layout + + def __init__( + self, + package: str, + sections: "list[Any]" = tuple(), + dir: str = "reference", + title: str = "Function reference", + options: "dict | None" = None, + rewrite_all_pages=False, + source_dir: "str | None" = None, + dynamic: bool | None = None, + parser="google", + ): + self.layout = self.load_layout( + sections=sections, package=package, options=options + ) + + self.package = package + self.dir = dir + self.title = title + self.rewrite_all_pages = rewrite_all_pages + self.renderer = Renderer() + self.summarizer = Summarizer() + + if source_dir: + self.source_dir = str(Path(source_dir).absolute()) + sys.path.append(self.source_dir) + + self.blueprint = blueprint(self.layout, dynamic=dynamic, parser=parser) + + pages, items = collect(self.blueprint, base_dir=self.dir) + + self.page_map = {} + for page in pages: + self.page_map[page.path] = page + + self.items = [] + self.item_map = {} + for item in items: + self.item_map[item.name] = item + + def load_layout(self, sections: dict, package: str, options=None): + # TODO: currently returning the list of sections, to make work with + # previous code. We should make Layout a first-class citizen of the + # process. + try: + return layout.Layout(sections=sections, package=package, options=options) + except ValidationError as e: + msg = "Configuration error for YAML:\n - " + errors = [fmt(err) for err in e.errors() if fmt(err)] + first_error = errors[ + 0 + ] # we only want to show one error at a time b/c it is confusing otherwise + msg += first_error + raise ValueError(msg) from None + + # building ---------------------------------------------------------------- + + def build(self): + """Build index page, sphinx inventory, and individual doc pages.""" + + # writing pages ---- + + _log.info("Writing pages") + self.write_pages() + + # inventory ---- + + _log.info("Creating inventory file") + inv = create_inventory(self.package, "0.0.9999", self.items) + convert_inventory(inv, self.out_inventory) + + def write_pages(self): + root = layout.Section( + title=self.title, + desc="This is the API Reference", + ) + + root_text = self.renderer.render(root) + root_path = Path(self.dir) / self.out_index + self.write_page_if_not_exists(root_path, root_text) + + last_title = None + order = 1 + + for section in self.blueprint.sections: + if section.title: + last_title = section.title + section_text = self.renderer.render(section, order=order) + order += 1 + location = Path(self.dir) / section.title + elif section.subtitle: + section_text = self.renderer.render(section) + location = Path(self.dir) / last_title / section.subtitle + + section_path = location / self.out_index + self.write_page_if_not_exists(section_path, section_text) + + is_flat = ( + section.options + and section.options.children == layout.ChoicesChildren.flat + ) + + for page in section.contents: + if isinstance(page, layout.Page): + # don't make separate pages for flat sections + if not is_flat: + _log.info(f"Rendering {page.path}") + # preview(page, max_depth=4) + page_text = self.renderer.render(page) + page_path = location / (page.path + self.out_page_suffix) + self.write_page_if_not_exists(page_path, page_text) + if page.path in self.page_map: + del self.page_map[page.path] + + self.update_page_items(page, location, is_flat) + else: + raise NotImplementedError(f"Unsupported section item: {type(page)}") + + if len(self.page_map.keys()) > 0: + _log.warning(f"Extra pages: {self.page_map.keys()}") + _log.error( + "Linking between pages may not work properly. Fix the issue and try again" + ) + + if len(self.item_map.keys()) > 0: + _log.warning(f"Extra items: {self.item_map.keys()}") + _log.error( + "Linking between pages may not work properly. Fix the issue and try again" + ) + + def update_page_items(self, page: layout.Page, location: Path, is_flat: bool): + for doc in page.contents: + if isinstance(doc, layout.Doc): + page_path = ( + f"{location}/index.html" + if is_flat + else f"{location}/{page.path}.html" + ) + self.update_items(doc, page_path) + else: + raise NotImplementedError(f"Unsupported page item: {type(doc)}") + + def update_items(self, doc: layout.Doc, page_path: str): + name = doc.obj.path + uri = f"{page_path}#{doc.anchor}" + + # item corresponding to the specified path ---- + # e.g. this might be a top-level import + if name in self.item_map: + item = self.item_map[name] + item.uri = uri + del self.item_map[name] + else: + item = layout.Item(uri=uri, name=name, obj=doc.obj, dispname=None) + _log.warning(f"Missing item, adding it: {item}") + self.items.append(item) + + canonical_path = doc.obj.canonical_path + if name != canonical_path: + # item corresponding to the canonical path ---- + # this is where the object is defined (which may be deep in a submodule) + if canonical_path in self.item_map: + item = self.item_map[canonical_path] + item.uri = uri + del self.item_map[canonical_path] + else: + item = layout.Item( + uri=uri, name=canonical_path, obj=doc.obj, dispname=name + ) + _log.warning(f"Missing item, adding it: {item}") + self.items.append(item) + + # recurse in 😊 + if isinstance(doc, layout.DocClass): + for member in doc.members: + self.update_items(member, page_path) + + def write_index_old(self, bp: layout.Layout): + """Write API index page.""" + + _log.info("Summarizing docs for index page.") + content = self.summarizer.summarize(bp) + _log.info(f"Writing index to directory: {self.dir}") + + final = f"# {self.title}\n\n{content}" + + p_index = Path(self.dir) / self.out_index + p_index.parent.mkdir(exist_ok=True, parents=True) + p_index.write_text(final) + + return str(p_index) + + def write_page_if_not_exists(self, path: Path, content): + if ( + self.rewrite_all_pages + or (not path.exists()) + or (path.read_text() != content) + ): + _log.info(f"Writing: {path}") + path.parent.mkdir(exist_ok=True, parents=True) + path.write_text(content) + else: + _log.info("Skipping write (content unchanged)") + + # constructors ---- + + @classmethod + def from_quarto_config(cls, quarto_cfg: "str | dict"): + """Construct a Builder from a configuration object (or yaml file).""" + + # TODO: validation / config model loading + if isinstance(quarto_cfg, str): + import yaml + + quarto_cfg = yaml.safe_load(open(quarto_cfg)) + + cfg = quarto_cfg.get("quartodoc") + if cfg is None: + raise KeyError("No `quartodoc:` section found in your _quarto.yml.") + + return Builder( + **{k: v for k, v in cfg.items()}, + ) diff --git a/python/docs/_scripts/gen_reference.py b/python/docs/_scripts/gen_reference.py new file mode 100644 index 000000000..8141ee44d --- /dev/null +++ b/python/docs/_scripts/gen_reference.py @@ -0,0 +1,20 @@ +import logging +import sys + +from builder import Builder + + +if __name__ == "__main__": + root = logging.getLogger("quartodoc") + root.setLevel(logging.INFO) + + handler = logging.StreamHandler(sys.stdout) + handler.setLevel(logging.INFO) + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + handler.setFormatter(formatter) + root.addHandler(handler) + + b = Builder.from_quarto_config("_reference.yml") + b.build() diff --git a/python/docs/_scripts/lint_reference.py b/python/docs/_scripts/lint_reference.py new file mode 100644 index 000000000..09f6b390d --- /dev/null +++ b/python/docs/_scripts/lint_reference.py @@ -0,0 +1,19 @@ +import logging +import sys + +from linter import Linter + + +if __name__ == "__main__": + root = logging.getLogger("quartodoc") + root.setLevel(logging.WARNING) + + handler = logging.StreamHandler(sys.stdout) + handler.setLevel(logging.INFO) + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + handler.setFormatter(formatter) + root.addHandler(handler) + + Linter.from_quarto_config("_reference.yml").lint() diff --git a/python/docs/_scripts/linter.py b/python/docs/_scripts/linter.py new file mode 100644 index 000000000..3d622f929 --- /dev/null +++ b/python/docs/_scripts/linter.py @@ -0,0 +1,120 @@ +from __future__ import annotations + +import logging +import sys +from pathlib import Path +from typing import Any + +from pydantic import ValidationError +from quartodoc import blueprint, collect, layout +from quartodoc.validation import fmt + + +_log = logging.getLogger("quartodoc") + + +def load_layout(sections: dict, package: str, options=None): + try: + return layout.Layout(sections=sections, package=package, options=options) + except ValidationError as e: + msg = "Configuration error for YAML:\n - " + errors = [fmt(err) for err in e.errors() if fmt(err)] + first_error = errors[ + 0 + ] # we only want to show one error at a time b/c it is confusing otherwise + msg += first_error + raise ValueError(msg) from None + + +class Linter: + """Base class for linting API docs. + + Parameters + ---------- + package: str + The name of the package. + sections: ConfigSection + A list of sections, with items to document. + options: + Default options to set for all pieces of content (e.g. include_attributes). + source_dir: + A directory where source files to be documented live. This is only necessary + if you are not documenting a package, but collection of scripts. Use a "." + to refer to the current directory. + parser: + Docstring parser to use. This correspond to different docstring styles, + and can be one of "google", "sphinx", and "numpy". Defaults to "numpy". + + """ + + package: str + sections: list[Any] + options: dict | None + source_dir: str | None + parser: str + + def __init__( + self, + package: str, + sections: list[Any] = tuple(), + options: dict | None = None, + source_dir: str | None = None, + parser="google", + ): + self.package = package + self.sections = sections + self.options = options + self.parser = parser + + if source_dir: + self.source_dir = str(Path(source_dir).absolute()) + sys.path.append(self.source_dir) + + def get_items(self, use_sections: bool): + sections = self.sections if use_sections else [] + + layout = load_layout( + sections=sections, package=self.package, options=self.options + ) + + _, items = collect(blueprint(layout, parser=self.parser), base_dir="") + + return [item.name for item in items] + + def lint(self): + """Lints the config and lets you know about any missing items""" + + ref_items = self.get_items(True) + pkg_items = self.get_items(False) + + issue_count = 0 + for pkg_item in pkg_items: + if pkg_item not in ref_items: + _log.warning(f"Missing item: {pkg_item}") + issue_count += 1 + + if issue_count > 0: + _log.error("Encountered un-documented items. Please fix.") + sys.exit(1) + + @classmethod + def from_quarto_config(cls, quarto_cfg: "str | dict"): + """Construct a Builder from a configuration object (or yaml file).""" + + # TODO: validation / config model loading + if isinstance(quarto_cfg, str): + import yaml + + quarto_cfg = yaml.safe_load(open(quarto_cfg)) + + cfg = quarto_cfg.get("quartodoc") + if cfg is None: + raise KeyError("No `quartodoc:` section found in your _quarto.yml.") + + return Linter( + **{ + k: v + for k, v in cfg.items() + if k in ["package", "sections", "options", "parser"] + }, + ) diff --git a/python/docs/_scripts/renderer.py b/python/docs/_scripts/renderer.py new file mode 100644 index 000000000..84cb58e58 --- /dev/null +++ b/python/docs/_scripts/renderer.py @@ -0,0 +1,520 @@ +from __future__ import annotations + +from typing import Optional, Union + +import quartodoc.ast as qast +from griffe import dataclasses as dc +from griffe.docstrings import dataclasses as ds +from plum import dispatch +from quartodoc import layout +from summarizer import Summarizer +from tabulate import tabulate + + +try: + # Name and Expression were moved to expressions in v0.28 + from griffe import expressions as expr +except ImportError: + from griffe import dataclasses as expr + +skip_annotation_types = [ + "kaskada", + "kaskada.destinations", + "kaskada.results", + "kaskada.windows", + "pyarrow", +] + + +def _has_attr_section(el: dc.Docstring | None): + if el is None: + return False + + return any([isinstance(x, ds.DocstringSectionAttributes) for x in el.parsed]) + + +def escape(val: str): + return f"`{val}`" + + +def sanitize(val: str, allow_markdown=False): + # sanitize common tokens that break tables + res = val.replace("\n", " ").replace("|", "\\|") + + # sanitize elements that can get interpreted as markdown links + # or citations + if not allow_markdown: + return res.replace("[", "\\[").replace("]", "\\]") + + return res + + +class Renderer: + """Render docstrings to markdown.""" + + summarizer = Summarizer() + + def _get_display_name(self, el: "dc.Alias | dc.Object") -> str: + parts = el.path.split(".")[1:] + name = parts.pop() + prefix = ".".join(parts) if len(parts) > 0 else "kaskada" + display_name = f"**{prefix}.**[**{name}**]{{.red}}" + + if isinstance(el, dc.Object): + if "staticmethod" in el.labels: + display_name = "***static*** " + display_name + + text = [display_name] + + # if isinstance(el, dc.Object) and el.kind == dc.Kind.CLASS: + # text.append(f"Bases: []({el.parent.name})") + + return "\n\n".join(text) + + def _fetch_method_parameters(self, el: dc.Function): + if el.parent and el.parent.is_class and len(el.parameters) > 0: + if el.parameters[0].name in {"self", "cls"}: + return dc.Parameters(*list(el.parameters)[1:]) + + return el.parameters + + def _render_definition_list( + self, title: str, items: [str], title_class: Optional[str] = None + ) -> str: + rows = [title] + for item in items: + if len(rows) == 1: + rows.append(f"~ {item}") + else: + rows.append(f" {item}") + if title_class: + rows.insert(0, f":::{{.{title_class}}}") + rows.append(":::") + text = "\n\n".join(rows) + # fix extra indenting for nested definition lists + return text.replace("\n\n \n\n", "\n\n") + + def _render_header(self, title: str, order: Optional[int] = None) -> str: + text = ["---"] + text.append(f"title: {title}") + if order: + text.append(f"order: {order}") + text.append("---") + return "\n".join(text) + + def _render_table(self, rows, headers) -> str: + table = tabulate(rows, headers=headers, tablefmt="github") + + return table + + # render_annotation method -------------------------------------------------------- + + @dispatch + def render_annotation(self, el: str) -> str: # noqa: F811 + # hack to get Timestream in the correct format for the kaskada.Arg + # alias docs + if el == "'Timestream'": + return "[Timestream](`kaskada.Timestream`)" + return sanitize(el) + + @dispatch + def render_annotation(self, el: None) -> str: # noqa: F811 + return "" + + @dispatch + def render_annotation(self, el: expr.Name) -> str: # noqa: F811 + if el.full not in skip_annotation_types: + return f"[{sanitize(el.source)}](`{el.full}`)" + return "" + + @dispatch + def render_annotation(self, el: expr.Expression) -> str: # noqa: F811 + text = "".join(map(self.render_annotation, el)) + return text.lstrip(".") + + @dispatch + def render_annotation(self, el: dc.Attribute) -> str: # noqa: F811 + text = "".join(map(self.render_annotation, el.value)) + return text.lstrip(".") + + # signature method -------------------------------------------------------- + + @dispatch + def signature(self, el: layout.Doc) -> str: # noqa: F811 + return self.signature(el.obj) + + @dispatch + def signature( # noqa: F811 + self, el: dc.Alias, source: Optional[dc.Alias] = None + ) -> str: + """Return a string representation of an object's signature.""" + return self.signature(el.target, el) + + @dispatch + def signature( # noqa: F811 + self, el: dc.Function, source: Optional[dc.Alias] = None + ) -> str: + name = self._get_display_name(source or el) + pars = self.render(self._fetch_method_parameters(el)) + return f"{name}([{pars}]{{.bold-italic}})" + + @dispatch + def signature( # noqa: F811 + self, el: dc.Class, source: Optional[dc.Alias] = None + ) -> str: + name = self._get_display_name(source or el) + return f"***class*** {name}" + + @dispatch + def signature( # noqa: F811 + self, el: Union[dc.Module, dc.Attribute], source: Optional[dc.Alias] = None + ) -> str: + name = self._get_display_name(source or el) + return f"`{name}`" + + # render method ----------------------------------------------------------- + + @dispatch + def render(self, el) -> str: # noqa: F811 + """Return a string representation of an object, or layout element.""" + + raise NotImplementedError(f"Unsupported type: {type(el)}") + + @dispatch + def render(self, el: str) -> str: # noqa: F811 + return el + + # render layouts ========================================================== + + @dispatch + def render( # noqa: F811 + self, el: layout.Section, order: Optional[int] = None + ) -> str: + rows = [self._render_header(el.title or el.subtitle, order=order)] + + if el.desc: + rows.append(el.desc) + + if el.options and el.options.children == layout.ChoicesChildren.flat: + for page in el.contents: + rows.append(self.render(page, is_flat=True)) + text = "\n\n".join(rows) + else: + text = "\n\n".join(rows) + text += "\n\n" + self.summarizer.summarize(el.contents) + + return text + + @dispatch + def render(self, el: layout.Page, is_flat: bool = False) -> str: # noqa: F811 + rows = [] + if el.summary: + if el.summary.name: + if is_flat: + rows.append(f"## {el.summary.name}") + else: + rows.append(self._render_header(el.summary.name)) + if el.summary.desc: + rows.append(sanitize(el.summary.desc, allow_markdown=True)) + + for item in el.contents: + rows.append(self.render(item, is_flat=is_flat)) + + return "\n\n".join(rows) + + @dispatch + def render(self, el: layout.Doc) -> str: # noqa: F811 + raise NotImplementedError(f"Unsupported Doc type: {type(el)}") + + @dispatch + def render( # noqa: F811 + self, el: Union[layout.DocClass, layout.DocModule], is_flat: bool = False + ) -> str: + title = "" if is_flat else self._render_header(el.name) + + sig = self.signature(el) + body_rows = self.render(el.obj).split("\n") + + if el.members: + # add attributes + # skip if docstring has an attributes section + raw_attrs = [x for x in el.members if x.obj.is_attribute] + if raw_attrs and not _has_attr_section(el.obj.docstring): + attr_rows = map(self.render, raw_attrs) + attr_text = self._render_definition_list( + "Attributes:", attr_rows, title_class="highlight" + ) + body_rows.extend(attr_text.split("\n")) + + # add classes + for raw_class in el.members: + if raw_class.obj.is_class and isinstance(raw_class, layout.Doc): + body_rows.extend(self.render(raw_class, is_flat=True).split("\n")) + + # add methods + for raw_method in el.members: + if raw_method.obj.is_function and isinstance(raw_method, layout.Doc): + body_rows.extend(self.render(raw_method, is_flat=True).split("\n")) + + text = self._render_definition_list(sig, body_rows) + + return "\n\n".join([title, text]) + + @dispatch + def render( # noqa: F811 + self, el: layout.DocFunction, is_flat: bool = False + ) -> str: + title = "" if is_flat else self._render_header(el.name) + + sig = self.signature(el) + body_rows = self.render(el.obj).split("\n") + text = self._render_definition_list(sig, body_rows) + + return "\n\n".join([title, text]) + + @dispatch + def render( # noqa: F811 + self, el: layout.DocAttribute, is_flat: bool = False + ) -> str: + link = f"[{el.name}](#{el.anchor})" + description = self.summarizer.summarize(el.obj) + + # check for alias like "IntStr: TypeAlias = Optional[Union[int, str]]" + if isinstance(el.obj, dc.Alias) and el.obj.target and el.obj.target.value: + alias = f"alias of {self.render_annotation(el.obj.target)}" + return self._render_definition_list(title=link, items=[description, alias]) + + return " -- ".join([link, description]) + + # render griffe objects =================================================== + + @dispatch + def render(self, el: Union[dc.Object, dc.Alias]) -> str: # noqa: F811 + """Render high level objects representing functions, classes, etc..""" + + str_body = [] + if el.docstring is None: + pass + else: + patched_sections = qast.transform(el.docstring.parsed) + for section in patched_sections: + str_body.append(self.render(section)) + + parts = [*str_body] + + return "\n\n".join(parts) + + # signature parts ------------------------------------------------------------- + + @dispatch + def render(self, el: dc.Parameters) -> str: # noqa: F811 + # index for switch from positional to kw args (via an unnamed *) + try: + kw_only = [par.kind for par in el].index(dc.ParameterKind.keyword_only) + except ValueError: + kw_only = None + + # index for final positionly only args (via /) + try: + pos_only = max( + [ + ii + for ii, el in enumerate(el) + if el.kind == dc.ParameterKind.positional_only + ] + ) + except ValueError: + pos_only = None + + pars = list(map(self.render, el)) + + # insert a single `*,` argument to represent the shift to kw only arguments, + # only if the shift to kw_only was not triggered by *args (var_positional) + if ( + kw_only is not None + and kw_only > 0 + and el[kw_only - 1].kind != dc.ParameterKind.var_positional + ): + pars.insert(kw_only, sanitize("*")) + + # insert a single `/, ` argument to represent shift from positional only arguments + # note that this must come before a single *, so it's okay that both this + # and block above insert into pars + if pos_only is not None: + pars.insert(pos_only + 1, sanitize("/")) + + return ", ".join(pars) + + @dispatch + def render(self, el: dc.Parameter) -> str: # noqa: F811 + splats = {dc.ParameterKind.var_keyword, dc.ParameterKind.var_positional} + has_default = el.default and el.kind not in splats + + if el.kind == dc.ParameterKind.var_keyword: + glob = "**" + elif el.kind == dc.ParameterKind.var_positional: + glob = "*" + else: + glob = "" + + name = sanitize(el.name) + + if has_default: + res = f"{glob}{name}={el.default}" + else: + res = f"{glob}{name}" + return res + + # docstring parts ------------------------------------------------------------- + + # text ---- + # note this can be a number of things. for example, opening docstring text, + # or a section with a header not included in the numpydoc standard + @dispatch + def render(self, el: ds.DocstringSectionText) -> str: # noqa: F811 + new_el = qast.transform(el) + if isinstance(new_el, ds.DocstringSectionText): + # ensures we don't recurse forever + return el.value + + return self.render(new_el) + + # parameters ---- + + @dispatch + def render(self, el: ds.DocstringSectionParameters) -> str: # noqa: F811 + # if more than one param, render as un-ordered list + prefix = "* " if len(el.value) > 1 else "" + follow = " " if len(el.value) > 1 else "" + + rows = [] + for param in el.value: + name = sanitize(param.name) + anno = self.render_annotation(param.annotation) + default = f", default: {escape(param.default)}" if param.default else "" + + rows.append(f"{prefix}**{name}** ({anno}{default})") + rows.append("") + for row in param.description.split("\n"): + rows.append(f"{follow}{row}") + rows.append("") + + return self._render_definition_list( + "Parameters:", rows, title_class="highlight" + ) + + # attributes ---- + + @dispatch + def render(self, el: ds.DocstringSectionAttributes) -> str: # noqa: F811 + # if more than one param, render as un-ordered list + prefix = "* " if len(el.value) > 1 else "" + follow = " " if len(el.value) > 1 else "" + + rows = [] + for attr in el.value: + name = sanitize(attr.name) + anno = self.render_annotation(attr.annotation) + rows.append(f"{prefix}**{name}** ({anno})") + rows.append("") + for row in attr.description.split("\n"): + rows.append(f"{follow}{row}") + rows.append("") + + return self._render_definition_list( + "Attributes:", rows, title_class="highlight" + ) + + # examples ---- + + @dispatch + def render(self, el: ds.DocstringSectionExamples) -> str: # noqa: F811 + # its value is a tuple: DocstringSectionKind["text" | "examples"], str + data = map(qast.transform, el.value) + return "\n\n".join(list(map(self.render, data))) + + @dispatch + def render(self, el: qast.ExampleCode): # noqa: F811 + return f"""```python +{el.value} +```""" + + @dispatch + def render(self, el: qast.ExampleText): # noqa: F811 + return el.value + + # returns ---- + + @dispatch + def render(self, el: ds.DocstringSectionReturns) -> str: # noqa: F811 + # if more than one param, render as un-ordered list + prefix = "* " if len(el.value) > 1 else "" + follow = " " if len(el.value) > 1 else "" + + rows = [] + for item in el.value: + title = prefix + name = sanitize(item.name) + if name: + title += f"**{name}**" + + return_type = self.render_annotation(item.annotation) + if return_type: + title += return_type + + if title != prefix: + rows.append(title) + + if item.description: + rows.append("") + for row in item.description.split("\n"): + rows.append(f"{follow}{row}") + rows.append("") + + return self._render_definition_list("Returns:", rows, title_class="highlight") + + @dispatch + def render(self, el: ds.DocstringSectionRaises) -> str: # noqa: F811 + # if more than one param, render as un-ordered list + prefix = "* " if len(el.value) > 1 else "" + follow = " " if len(el.value) > 1 else "" + + rows = [] + for item in el.value: + # name = sanitize(item.name) + anno = self.render_annotation(item.annotation) + rows.append(f"{prefix}{anno}") + rows.append("") + for row in item.description.split("\n"): + rows.append(f"{follow}{row}") + rows.append("") + + return self._render_definition_list("Raises:", rows, title_class="highlight") + + @dispatch + def render(self, el: ds.DocstringSectionAdmonition) -> str: # noqa: F811 + rows = [] + if el.title.lower().startswith("note"): + rows.append(f"::: {{.callout-note title={el.title!r}}}") + elif el.title.lower().startswith("warn"): + rows.append(f"::: {{.callout-warning title={el.title!r}}}") + else: + rows.append(f"::: {{.callout-tip title={el.title!r}}}") + + rows.append(sanitize(el.value.description, allow_markdown=True)) + rows.append(":::") + + return "\n".join(rows) + + # unsupported parts ---- + + @dispatch.multi( + (ds.DocstringAdmonition,), + (ds.DocstringDeprecated,), + (ds.DocstringWarn,), + (ds.DocstringYield,), + (ds.DocstringReceive,), + (ds.DocstringAttribute,), + ) + def render(self, el): # noqa: F811 + raise NotImplementedError(f"{type(el)}") diff --git a/python/docs/_scripts/summarizer.py b/python/docs/_scripts/summarizer.py new file mode 100644 index 000000000..015f81fb9 --- /dev/null +++ b/python/docs/_scripts/summarizer.py @@ -0,0 +1,118 @@ +from __future__ import annotations + +from typing import Optional, Union + +from griffe import dataclasses as dc +from griffe.docstrings import dataclasses as ds +from plum import dispatch +from quartodoc import layout + + +class Summarizer: + """Summarize docstrings to markdown.""" + + @staticmethod + def _summary_row(link, description): + return f"| {link} | {description} |" + + @dispatch + def summarize(self, el): # noqa: F811 + raise NotImplementedError(f"Unsupported type: {type(el)}") + + @dispatch + def summarize(self, el: layout.Layout): # noqa: F811 + rendered_sections = list(map(self.summarize, el.sections)) + return "\n\n".join(rendered_sections) + + @dispatch + def summarize(self, el: layout.Section): # noqa: F811 + desc = f"\n\n{el.desc}" if el.desc is not None else "" + if el.title is not None: + header = f"## {el.title}{desc}" + elif el.subtitle is not None: + header = f"### {el.subtitle}{desc}" + else: + header = "" + + if el.contents: + return f"{header}\n\n{self.summarize(el.contents)}" + + return header + + @dispatch + def summarize(self, contents: layout.ContentList): # noqa: F811 + thead = "| | |\n| --- | --- |" + + rendered = [] + for child in contents: + rendered.append(self.summarize(child)) + + return "\n".join([thead, *rendered]) + + @dispatch + def summarize(self, el: layout.Page): # noqa: F811 + if el.summary is not None: + # TODO: assumes that files end with .qmd + return self._summary_row( + f"[{el.summary.name}]({el.path}.qmd)", el.summary.desc + ) + + if len(el.contents) > 1 and not el.flatten: + raise ValueError( + "Cannot summarize Page. Either set its `summary` attribute with name " + "and description details, or set `flatten` to True." + ) + + else: + rows = [self.summarize(entry, el.path) for entry in el.contents] + return "\n".join(rows) + + @dispatch + def summarize(self, el: layout.MemberPage): # noqa: F811 + # TODO: model should validate these only have a single entry + return self.summarize(el.contents[0], el.path, shorten=True) + + @dispatch + def summarize(self, el: layout.Interlaced, *args, **kwargs): # noqa: F811 + rows = [self.summarize(doc, *args, **kwargs) for doc in el.contents] + + return "\n".join(rows) + + @dispatch + def summarize( # noqa: F811 + self, el: layout.Doc, path: Optional[str] = None, shorten: bool = False + ): + # this is where summary page method links are created + if path is None: + link = f"[{el.name}](#{el.anchor})" + else: + # TODO: assumes that files end with .qmd + link = f"[{el.name}]({path}.qmd#{el.anchor})" + + description = self.summarize(el.obj) + return self._summary_row(link, description) + + @dispatch + def summarize(self, el: layout.Link): # noqa: F811 + description = self.summarize(el.obj) + return self._summary_row(f"[](`{el.name}`)", description) + + @dispatch + def summarize(self, obj: Union[dc.Object, dc.Alias]) -> str: # noqa: F811 + """Test""" + # get high-level description + doc = obj.docstring + if doc is None: + docstring_parts = [] + else: + docstring_parts = doc.parsed + + if len(docstring_parts) and isinstance( + docstring_parts[0], ds.DocstringSectionText + ): + description = docstring_parts[0].value + short = description.split("\n")[0] + + return short + + return "" diff --git a/python/docs/_static/css/design-style.css b/python/docs/_static/css/design-style.css new file mode 100644 index 000000000..d8fce0a7a --- /dev/null +++ b/python/docs/_static/css/design-style.css @@ -0,0 +1,3183 @@ +.sd-bg-primary { + background-color: var(--sd-color-primary) !important +} + +.sd-bg-text-primary { + color: var(--sd-color-primary-text) !important +} + +button.sd-bg-primary:focus,button.sd-bg-primary:hover { + background-color: var(--sd-color-primary-highlight) !important +} + +a.sd-bg-primary:focus,a.sd-bg-primary:hover { + background-color: var(--sd-color-primary-highlight) !important +} + +.sd-bg-secondary { + background-color: var(--sd-color-secondary) !important +} + +.sd-bg-text-secondary { + color: var(--sd-color-secondary-text) !important +} + +button.sd-bg-secondary:focus,button.sd-bg-secondary:hover { + background-color: var(--sd-color-secondary-highlight) !important +} + +a.sd-bg-secondary:focus,a.sd-bg-secondary:hover { + background-color: var(--sd-color-secondary-highlight) !important +} + +.sd-bg-success { + background-color: var(--sd-color-success) !important +} + +.sd-bg-text-success { + color: var(--sd-color-success-text) !important +} + +button.sd-bg-success:focus,button.sd-bg-success:hover { + background-color: var(--sd-color-success-highlight) !important +} + +a.sd-bg-success:focus,a.sd-bg-success:hover { + background-color: var(--sd-color-success-highlight) !important +} + +.sd-bg-info { + background-color: var(--sd-color-info) !important +} + +.sd-bg-text-info { + color: var(--sd-color-info-text) !important +} + +button.sd-bg-info:focus,button.sd-bg-info:hover { + background-color: var(--sd-color-info-highlight) !important +} + +a.sd-bg-info:focus,a.sd-bg-info:hover { + background-color: var(--sd-color-info-highlight) !important +} + +.sd-bg-warning { + background-color: var(--sd-color-warning) !important +} + +.sd-bg-text-warning { + color: var(--sd-color-warning-text) !important +} + +button.sd-bg-warning:focus,button.sd-bg-warning:hover { + background-color: var(--sd-color-warning-highlight) !important +} + +a.sd-bg-warning:focus,a.sd-bg-warning:hover { + background-color: var(--sd-color-warning-highlight) !important +} + +.sd-bg-danger { + background-color: var(--sd-color-danger) !important +} + +.sd-bg-text-danger { + color: var(--sd-color-danger-text) !important +} + +button.sd-bg-danger:focus,button.sd-bg-danger:hover { + background-color: var(--sd-color-danger-highlight) !important +} + +a.sd-bg-danger:focus,a.sd-bg-danger:hover { + background-color: var(--sd-color-danger-highlight) !important +} + +.sd-bg-light { + background-color: var(--sd-color-light) !important +} + +.sd-bg-text-light { + color: var(--sd-color-light-text) !important +} + +button.sd-bg-light:focus,button.sd-bg-light:hover { + background-color: var(--sd-color-light-highlight) !important +} + +a.sd-bg-light:focus,a.sd-bg-light:hover { + background-color: var(--sd-color-light-highlight) !important +} + +.sd-bg-muted { + background-color: var(--sd-color-muted) !important +} + +.sd-bg-text-muted { + color: var(--sd-color-muted-text) !important +} + +button.sd-bg-muted:focus,button.sd-bg-muted:hover { + background-color: var(--sd-color-muted-highlight) !important +} + +a.sd-bg-muted:focus,a.sd-bg-muted:hover { + background-color: var(--sd-color-muted-highlight) !important +} + +.sd-bg-dark { + background-color: var(--sd-color-dark) !important +} + +.sd-bg-text-dark { + color: var(--sd-color-dark-text) !important +} + +button.sd-bg-dark:focus,button.sd-bg-dark:hover { + background-color: var(--sd-color-dark-highlight) !important +} + +a.sd-bg-dark:focus,a.sd-bg-dark:hover { + background-color: var(--sd-color-dark-highlight) !important +} + +.sd-bg-black { + background-color: var(--sd-color-black) !important +} + +.sd-bg-text-black { + color: var(--sd-color-black-text) !important +} + +button.sd-bg-black:focus,button.sd-bg-black:hover { + background-color: var(--sd-color-black-highlight) !important +} + +a.sd-bg-black:focus,a.sd-bg-black:hover { + background-color: var(--sd-color-black-highlight) !important +} + +.sd-bg-white { + background-color: var(--sd-color-white) !important +} + +.sd-bg-text-white { + color: var(--sd-color-white-text) !important +} + +button.sd-bg-white:focus,button.sd-bg-white:hover { + background-color: var(--sd-color-white-highlight) !important +} + +a.sd-bg-white:focus,a.sd-bg-white:hover { + background-color: var(--sd-color-white-highlight) !important +} + +.sd-text-primary,.sd-text-primary>p { + color: var(--sd-color-primary) !important +} + +a.sd-text-primary:focus,a.sd-text-primary:hover { + color: var(--sd-color-primary-highlight) !important +} + +.sd-text-secondary,.sd-text-secondary>p { + color: var(--sd-color-secondary) !important +} + +a.sd-text-secondary:focus,a.sd-text-secondary:hover { + color: var(--sd-color-secondary-highlight) !important +} + +.sd-text-success,.sd-text-success>p { + color: var(--sd-color-success) !important +} + +a.sd-text-success:focus,a.sd-text-success:hover { + color: var(--sd-color-success-highlight) !important +} + +.sd-text-info,.sd-text-info>p { + color: var(--sd-color-info) !important +} + +a.sd-text-info:focus,a.sd-text-info:hover { + color: var(--sd-color-info-highlight) !important +} + +.sd-text-warning,.sd-text-warning>p { + color: var(--sd-color-warning) !important +} + +a.sd-text-warning:focus,a.sd-text-warning:hover { + color: var(--sd-color-warning-highlight) !important +} + +.sd-text-danger,.sd-text-danger>p { + color: var(--sd-color-danger) !important +} + +a.sd-text-danger:focus,a.sd-text-danger:hover { + color: var(--sd-color-danger-highlight) !important +} + +.sd-text-light,.sd-text-light>p { + color: var(--sd-color-light) !important +} + +a.sd-text-light:focus,a.sd-text-light:hover { + color: var(--sd-color-light-highlight) !important +} + +.sd-text-muted,.sd-text-muted>p { + color: var(--sd-color-muted) !important +} + +a.sd-text-muted:focus,a.sd-text-muted:hover { + color: var(--sd-color-muted-highlight) !important +} + +.sd-text-dark,.sd-text-dark>p { + color: var(--sd-color-dark) !important +} + +a.sd-text-dark:focus,a.sd-text-dark:hover { + color: var(--sd-color-dark-highlight) !important +} + +.sd-text-black,.sd-text-black>p { + color: var(--sd-color-black) !important +} + +a.sd-text-black:focus,a.sd-text-black:hover { + color: var(--sd-color-black-highlight) !important +} + +.sd-text-white,.sd-text-white>p { + color: var(--sd-color-white) !important +} + +a.sd-text-white:focus,a.sd-text-white:hover { + color: var(--sd-color-white-highlight) !important +} + +.sd-outline-primary { + border-color: var(--sd-color-primary) !important; + border-style: solid !important; + border-width: 1px !important +} + +a.sd-outline-primary:focus,a.sd-outline-primary:hover { + border-color: var(--sd-color-primary-highlight) !important +} + +.sd-outline-secondary { + border-color: var(--sd-color-secondary) !important; + border-style: solid !important; + border-width: 1px !important +} + +a.sd-outline-secondary:focus,a.sd-outline-secondary:hover { + border-color: var(--sd-color-secondary-highlight) !important +} + +.sd-outline-success { + border-color: var(--sd-color-success) !important; + border-style: solid !important; + border-width: 1px !important +} + +a.sd-outline-success:focus,a.sd-outline-success:hover { + border-color: var(--sd-color-success-highlight) !important +} + +.sd-outline-info { + border-color: var(--sd-color-info) !important; + border-style: solid !important; + border-width: 1px !important +} + +a.sd-outline-info:focus,a.sd-outline-info:hover { + border-color: var(--sd-color-info-highlight) !important +} + +.sd-outline-warning { + border-color: var(--sd-color-warning) !important; + border-style: solid !important; + border-width: 1px !important +} + +a.sd-outline-warning:focus,a.sd-outline-warning:hover { + border-color: var(--sd-color-warning-highlight) !important +} + +.sd-outline-danger { + border-color: var(--sd-color-danger) !important; + border-style: solid !important; + border-width: 1px !important +} + +a.sd-outline-danger:focus,a.sd-outline-danger:hover { + border-color: var(--sd-color-danger-highlight) !important +} + +.sd-outline-light { + border-color: var(--sd-color-light) !important; + border-style: solid !important; + border-width: 1px !important +} + +a.sd-outline-light:focus,a.sd-outline-light:hover { + border-color: var(--sd-color-light-highlight) !important +} + +.sd-outline-muted { + border-color: var(--sd-color-muted) !important; + border-style: solid !important; + border-width: 1px !important +} + +a.sd-outline-muted:focus,a.sd-outline-muted:hover { + border-color: var(--sd-color-muted-highlight) !important +} + +.sd-outline-dark { + border-color: var(--sd-color-dark) !important; + border-style: solid !important; + border-width: 1px !important +} + +a.sd-outline-dark:focus,a.sd-outline-dark:hover { + border-color: var(--sd-color-dark-highlight) !important +} + +.sd-outline-black { + border-color: var(--sd-color-black) !important; + border-style: solid !important; + border-width: 1px !important +} + +a.sd-outline-black:focus,a.sd-outline-black:hover { + border-color: var(--sd-color-black-highlight) !important +} + +.sd-outline-white { + border-color: var(--sd-color-white) !important; + border-style: solid !important; + border-width: 1px !important +} + +a.sd-outline-white:focus,a.sd-outline-white:hover { + border-color: var(--sd-color-white-highlight) !important +} + +.sd-bg-transparent { + background-color: transparent !important +} + +.sd-outline-transparent { + border-color: transparent !important +} + +.sd-text-transparent { + color: transparent !important +} + +.sd-p-0 { + padding: 0 !important +} + +.sd-pt-0,.sd-py-0 { + padding-top: 0 !important +} + +.sd-pr-0,.sd-px-0 { + padding-right: 0 !important +} + +.sd-pb-0,.sd-py-0 { + padding-bottom: 0 !important +} + +.sd-pl-0,.sd-px-0 { + padding-left: 0 !important +} + +.sd-p-1 { + padding: .25rem !important +} + +.sd-pt-1,.sd-py-1 { + padding-top: .25rem !important +} + +.sd-pr-1,.sd-px-1 { + padding-right: .25rem !important +} + +.sd-pb-1,.sd-py-1 { + padding-bottom: .25rem !important +} + +.sd-pl-1,.sd-px-1 { + padding-left: .25rem !important +} + +.sd-p-2 { + padding: .5rem !important +} + +.sd-pt-2,.sd-py-2 { + padding-top: .5rem !important +} + +.sd-pr-2,.sd-px-2 { + padding-right: .5rem !important +} + +.sd-pb-2,.sd-py-2 { + padding-bottom: .5rem !important +} + +.sd-pl-2,.sd-px-2 { + padding-left: .5rem !important +} + +.sd-p-3 { + padding: 1rem !important +} + +.sd-pt-3,.sd-py-3 { + padding-top: 1rem !important +} + +.sd-pr-3,.sd-px-3 { + padding-right: 1rem !important +} + +.sd-pb-3,.sd-py-3 { + padding-bottom: 1rem !important +} + +.sd-pl-3,.sd-px-3 { + padding-left: 1rem !important +} + +.sd-p-4 { + padding: 1.5rem !important +} + +.sd-pt-4,.sd-py-4 { + padding-top: 1.5rem !important +} + +.sd-pr-4,.sd-px-4 { + padding-right: 1.5rem !important +} + +.sd-pb-4,.sd-py-4 { + padding-bottom: 1.5rem !important +} + +.sd-pl-4,.sd-px-4 { + padding-left: 1.5rem !important +} + +.sd-p-5 { + padding: 3rem !important +} + +.sd-pt-5,.sd-py-5 { + padding-top: 3rem !important +} + +.sd-pr-5,.sd-px-5 { + padding-right: 3rem !important +} + +.sd-pb-5,.sd-py-5 { + padding-bottom: 3rem !important +} + +.sd-pl-5,.sd-px-5 { + padding-left: 3rem !important +} + +.sd-m-auto { + margin: auto !important +} + +.sd-mt-auto,.sd-my-auto { + margin-top: auto !important +} + +.sd-mr-auto,.sd-mx-auto { + margin-right: auto !important +} + +.sd-mb-auto,.sd-my-auto { + margin-bottom: auto !important +} + +.sd-ml-auto,.sd-mx-auto { + margin-left: auto !important +} + +.sd-m-0 { + margin: 0 !important +} + +.sd-mt-0,.sd-my-0 { + margin-top: 0 !important +} + +.sd-mr-0,.sd-mx-0 { + margin-right: 0 !important +} + +.sd-mb-0,.sd-my-0 { + margin-bottom: 0 !important +} + +.sd-ml-0,.sd-mx-0 { + margin-left: 0 !important +} + +.sd-m-1 { + margin: .25rem !important +} + +.sd-mt-1,.sd-my-1 { + margin-top: .25rem !important +} + +.sd-mr-1,.sd-mx-1 { + margin-right: .25rem !important +} + +.sd-mb-1,.sd-my-1 { + margin-bottom: .25rem !important +} + +.sd-ml-1,.sd-mx-1 { + margin-left: .25rem !important +} + +.sd-m-2 { + margin: .5rem !important +} + +.sd-mt-2,.sd-my-2 { + margin-top: .5rem !important +} + +.sd-mr-2,.sd-mx-2 { + margin-right: .5rem !important +} + +.sd-mb-2,.sd-my-2 { + margin-bottom: .5rem !important +} + +.sd-ml-2,.sd-mx-2 { + margin-left: .5rem !important +} + +.sd-m-3 { + margin: 1rem !important +} + +.sd-mt-3,.sd-my-3 { + margin-top: 1rem !important +} + +.sd-mr-3,.sd-mx-3 { + margin-right: 1rem !important +} + +.sd-mb-3,.sd-my-3 { + margin-bottom: 1rem !important +} + +.sd-ml-3,.sd-mx-3 { + margin-left: 1rem !important +} + +.sd-m-4 { + margin: 1.5rem !important +} + +.sd-mt-4,.sd-my-4 { + margin-top: 1.5rem !important +} + +.sd-mr-4,.sd-mx-4 { + margin-right: 1.5rem !important +} + +.sd-mb-4,.sd-my-4 { + margin-bottom: 1.5rem !important +} + +.sd-ml-4,.sd-mx-4 { + margin-left: 1.5rem !important +} + +.sd-m-5 { + margin: 3rem !important +} + +.sd-mt-5,.sd-my-5 { + margin-top: 3rem !important +} + +.sd-mr-5,.sd-mx-5 { + margin-right: 3rem !important +} + +.sd-mb-5,.sd-my-5 { + margin-bottom: 3rem !important +} + +.sd-ml-5,.sd-mx-5 { + margin-left: 3rem !important +} + +.sd-w-25 { + width: 25% !important +} + +.sd-w-50 { + width: 50% !important +} + +.sd-w-75 { + width: 75% !important +} + +.sd-w-100 { + width: 100% !important +} + +.sd-w-auto { + width: auto !important +} + +.sd-h-25 { + height: 25% !important +} + +.sd-h-50 { + height: 50% !important +} + +.sd-h-75 { + height: 75% !important +} + +.sd-h-100 { + height: 100% !important +} + +.sd-h-auto { + height: auto !important +} + +.sd-d-none { + display: none !important +} + +.sd-d-inline { + display: inline !important +} + +.sd-d-inline-block { + display: inline-block !important +} + +.sd-d-block { + display: block !important +} + +.sd-d-grid { + display: grid !important +} + +.sd-d-flex-row { + display: -ms-flexbox !important; + display: flex !important; + flex-direction: row !important +} + +.sd-d-flex-column { + display: -ms-flexbox !important; + display: flex !important; + flex-direction: column !important +} + +.sd-d-inline-flex { + display: -ms-inline-flexbox !important; + display: inline-flex !important +} + +@media(min-width: 576px) { + .sd-d-sm-none { + display:none !important + } + + .sd-d-sm-inline { + display: inline !important + } + + .sd-d-sm-inline-block { + display: inline-block !important + } + + .sd-d-sm-block { + display: block !important + } + + .sd-d-sm-grid { + display: grid !important + } + + .sd-d-sm-flex { + display: -ms-flexbox !important; + display: flex !important + } + + .sd-d-sm-inline-flex { + display: -ms-inline-flexbox !important; + display: inline-flex !important + } +} + +@media(min-width: 768px) { + .sd-d-md-none { + display:none !important + } + + .sd-d-md-inline { + display: inline !important + } + + .sd-d-md-inline-block { + display: inline-block !important + } + + .sd-d-md-block { + display: block !important + } + + .sd-d-md-grid { + display: grid !important + } + + .sd-d-md-flex { + display: -ms-flexbox !important; + display: flex !important + } + + .sd-d-md-inline-flex { + display: -ms-inline-flexbox !important; + display: inline-flex !important + } +} + +@media(min-width: 992px) { + .sd-d-lg-none { + display:none !important + } + + .sd-d-lg-inline { + display: inline !important + } + + .sd-d-lg-inline-block { + display: inline-block !important + } + + .sd-d-lg-block { + display: block !important + } + + .sd-d-lg-grid { + display: grid !important + } + + .sd-d-lg-flex { + display: -ms-flexbox !important; + display: flex !important + } + + .sd-d-lg-inline-flex { + display: -ms-inline-flexbox !important; + display: inline-flex !important + } +} + +@media(min-width: 1200px) { + .sd-d-xl-none { + display:none !important + } + + .sd-d-xl-inline { + display: inline !important + } + + .sd-d-xl-inline-block { + display: inline-block !important + } + + .sd-d-xl-block { + display: block !important + } + + .sd-d-xl-grid { + display: grid !important + } + + .sd-d-xl-flex { + display: -ms-flexbox !important; + display: flex !important + } + + .sd-d-xl-inline-flex { + display: -ms-inline-flexbox !important; + display: inline-flex !important + } +} + +.sd-align-major-start { + justify-content: flex-start !important +} + +.sd-align-major-end { + justify-content: flex-end !important +} + +.sd-align-major-center { + justify-content: center !important +} + +.sd-align-major-justify { + justify-content: space-between !important +} + +.sd-align-major-spaced { + justify-content: space-evenly !important +} + +.sd-align-minor-start { + align-items: flex-start !important +} + +.sd-align-minor-end { + align-items: flex-end !important +} + +.sd-align-minor-center { + align-items: center !important +} + +.sd-align-minor-stretch { + align-items: stretch !important +} + +.sd-text-justify { + text-align: justify !important +} + +.sd-text-left { + text-align: left !important +} + +.sd-text-right { + text-align: right !important +} + +.sd-text-center { + text-align: center !important +} + +.sd-font-weight-light { + font-weight: 300 !important +} + +.sd-font-weight-lighter { + font-weight: lighter !important +} + +.sd-font-weight-normal { + font-weight: 400 !important +} + +.sd-font-weight-bold { + font-weight: 700 !important +} + +.sd-font-weight-bolder { + font-weight: bolder !important +} + +.sd-font-italic { + font-style: italic !important +} + +.sd-text-decoration-none { + text-decoration: none !important +} + +.sd-text-lowercase { + text-transform: lowercase !important +} + +.sd-text-uppercase { + text-transform: uppercase !important +} + +.sd-text-capitalize { + text-transform: capitalize !important +} + +.sd-text-wrap { + white-space: normal !important +} + +.sd-text-nowrap { + white-space: nowrap !important +} + +.sd-text-truncate { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap +} + +.sd-fs-1,.sd-fs-1>p { + font-size: calc(1.375rem + 1.5vw) !important; + line-height: unset !important +} + +.sd-fs-2,.sd-fs-2>p { + font-size: calc(1.325rem + 0.9vw) !important; + line-height: unset !important +} + +.sd-fs-3,.sd-fs-3>p { + font-size: calc(1.3rem + 0.6vw) !important; + line-height: unset !important +} + +.sd-fs-4,.sd-fs-4>p { + font-size: calc(1.275rem + 0.3vw) !important; + line-height: unset !important +} + +.sd-fs-5,.sd-fs-5>p { + font-size: 1.25rem !important; + line-height: unset !important +} + +.sd-fs-6,.sd-fs-6>p { + font-size: 1rem !important; + line-height: unset !important +} + +.sd-border-0 { + border: 0 solid !important +} + +.sd-border-top-0 { + border-top: 0 solid !important +} + +.sd-border-bottom-0 { + border-bottom: 0 solid !important +} + +.sd-border-right-0 { + border-right: 0 solid !important +} + +.sd-border-left-0 { + border-left: 0 solid !important +} + +.sd-border-1 { + border: 1px solid !important +} + +.sd-border-top-1 { + border-top: 1px solid !important +} + +.sd-border-bottom-1 { + border-bottom: 1px solid !important +} + +.sd-border-right-1 { + border-right: 1px solid !important +} + +.sd-border-left-1 { + border-left: 1px solid !important +} + +.sd-border-2 { + border: 2px solid !important +} + +.sd-border-top-2 { + border-top: 2px solid !important +} + +.sd-border-bottom-2 { + border-bottom: 2px solid !important +} + +.sd-border-right-2 { + border-right: 2px solid !important +} + +.sd-border-left-2 { + border-left: 2px solid !important +} + +.sd-border-3 { + border: 3px solid !important +} + +.sd-border-top-3 { + border-top: 3px solid !important +} + +.sd-border-bottom-3 { + border-bottom: 3px solid !important +} + +.sd-border-right-3 { + border-right: 3px solid !important +} + +.sd-border-left-3 { + border-left: 3px solid !important +} + +.sd-border-4 { + border: 4px solid !important +} + +.sd-border-top-4 { + border-top: 4px solid !important +} + +.sd-border-bottom-4 { + border-bottom: 4px solid !important +} + +.sd-border-right-4 { + border-right: 4px solid !important +} + +.sd-border-left-4 { + border-left: 4px solid !important +} + +.sd-border-5 { + border: 5px solid !important +} + +.sd-border-top-5 { + border-top: 5px solid !important +} + +.sd-border-bottom-5 { + border-bottom: 5px solid !important +} + +.sd-border-right-5 { + border-right: 5px solid !important +} + +.sd-border-left-5 { + border-left: 5px solid !important +} + +.sd-rounded-0 { + border-radius: 0 !important +} + +.sd-rounded-1 { + border-radius: .2rem !important +} + +.sd-rounded-2 { + border-radius: .3rem !important +} + +.sd-rounded-3 { + border-radius: .5rem !important +} + +.sd-rounded-pill { + border-radius: 50rem !important +} + +.sd-rounded-circle { + border-radius: 50% !important +} + +.shadow-none { + box-shadow: none !important +} + +.sd-shadow-sm { + box-shadow: 0 .125rem .25rem var(--sd-color-shadow) !important +} + +.sd-shadow-md { + box-shadow: 0 .5rem 1rem var(--sd-color-shadow) !important +} + +.sd-shadow-lg { + box-shadow: 0 1rem 3rem var(--sd-color-shadow) !important +} + +@keyframes sd-slide-from-left { + 0% { + transform: translateX(-100%) + } + + 100% { + transform: translateX(0) + } +} + +@keyframes sd-slide-from-right { + 0% { + transform: translateX(200%) + } + + 100% { + transform: translateX(0) + } +} + +@keyframes sd-grow100 { + 0% { + transform: scale(0); + opacity: .5 + } + + 100% { + transform: scale(1); + opacity: 1 + } +} + +@keyframes sd-grow50 { + 0% { + transform: scale(0.5); + opacity: .5 + } + + 100% { + transform: scale(1); + opacity: 1 + } +} + +@keyframes sd-grow50-rot20 { + 0% { + transform: scale(0.5) rotateZ(-20deg); + opacity: .5 + } + + 75% { + transform: scale(1) rotateZ(5deg); + opacity: 1 + } + + 95% { + transform: scale(1) rotateZ(-1deg); + opacity: 1 + } + + 100% { + transform: scale(1) rotateZ(0); + opacity: 1 + } +} + +.sd-animate-slide-from-left { + animation: 1s ease-out 0s 1 normal none running sd-slide-from-left +} + +.sd-animate-slide-from-right { + animation: 1s ease-out 0s 1 normal none running sd-slide-from-right +} + +.sd-animate-grow100 { + animation: 1s ease-out 0s 1 normal none running sd-grow100 +} + +.sd-animate-grow50 { + animation: 1s ease-out 0s 1 normal none running sd-grow50 +} + +.sd-animate-grow50-rot20 { + animation: 1s ease-out 0s 1 normal none running sd-grow50-rot20 +} + +.sd-badge { + display: inline-block; + padding: .35em .65em; + font-size: .75em; + font-weight: 700; + line-height: 1; + text-align: center; + white-space: nowrap; + vertical-align: baseline; + border-radius: .25rem +} + +.sd-badge:empty { + display: none +} + +a.sd-badge { + text-decoration: none +} + +.sd-btn .sd-badge { + position: relative; + top: -1px +} + +.sd-btn { + background-color: transparent; + border: 1px solid transparent; + border-radius: .25rem; + cursor: pointer; + display: inline-block; + font-weight: 400; + font-size: 1rem; + line-height: 1.5; + padding: .375rem .75rem; + text-align: center; + text-decoration: none; + transition: color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out; + vertical-align: middle; + user-select: none; + -moz-user-select: none; + -ms-user-select: none; + -webkit-user-select: none +} + +.sd-btn:hover { + text-decoration: none +} + +@media(prefers-reduced-motion: reduce) { + .sd-btn { + transition: none + } +} + +.sd-btn-primary,.sd-btn-outline-primary:hover,.sd-btn-outline-primary:focus { + color: var(--sd-color-primary-text) !important; + background-color: var(--sd-color-primary) !important; + border-color: var(--sd-color-primary) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-primary:hover,.sd-btn-primary:focus { + color: var(--sd-color-primary-text) !important; + background-color: var(--sd-color-primary-highlight) !important; + border-color: var(--sd-color-primary-highlight) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-outline-primary { + color: var(--sd-color-primary) !important; + border-color: var(--sd-color-primary) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-secondary,.sd-btn-outline-secondary:hover,.sd-btn-outline-secondary:focus { + color: var(--sd-color-secondary-text) !important; + background-color: var(--sd-color-secondary) !important; + border-color: var(--sd-color-secondary) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-secondary:hover,.sd-btn-secondary:focus { + color: var(--sd-color-secondary-text) !important; + background-color: var(--sd-color-secondary-highlight) !important; + border-color: var(--sd-color-secondary-highlight) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-outline-secondary { + color: var(--sd-color-secondary) !important; + border-color: var(--sd-color-secondary) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-success,.sd-btn-outline-success:hover,.sd-btn-outline-success:focus { + color: var(--sd-color-success-text) !important; + background-color: var(--sd-color-success) !important; + border-color: var(--sd-color-success) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-success:hover,.sd-btn-success:focus { + color: var(--sd-color-success-text) !important; + background-color: var(--sd-color-success-highlight) !important; + border-color: var(--sd-color-success-highlight) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-outline-success { + color: var(--sd-color-success) !important; + border-color: var(--sd-color-success) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-info,.sd-btn-outline-info:hover,.sd-btn-outline-info:focus { + color: var(--sd-color-info-text) !important; + background-color: var(--sd-color-info) !important; + border-color: var(--sd-color-info) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-info:hover,.sd-btn-info:focus { + color: var(--sd-color-info-text) !important; + background-color: var(--sd-color-info-highlight) !important; + border-color: var(--sd-color-info-highlight) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-outline-info { + color: var(--sd-color-info) !important; + border-color: var(--sd-color-info) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-warning,.sd-btn-outline-warning:hover,.sd-btn-outline-warning:focus { + color: var(--sd-color-warning-text) !important; + background-color: var(--sd-color-warning) !important; + border-color: var(--sd-color-warning) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-warning:hover,.sd-btn-warning:focus { + color: var(--sd-color-warning-text) !important; + background-color: var(--sd-color-warning-highlight) !important; + border-color: var(--sd-color-warning-highlight) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-outline-warning { + color: var(--sd-color-warning) !important; + border-color: var(--sd-color-warning) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-danger,.sd-btn-outline-danger:hover,.sd-btn-outline-danger:focus { + color: var(--sd-color-danger-text) !important; + background-color: var(--sd-color-danger) !important; + border-color: var(--sd-color-danger) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-danger:hover,.sd-btn-danger:focus { + color: var(--sd-color-danger-text) !important; + background-color: var(--sd-color-danger-highlight) !important; + border-color: var(--sd-color-danger-highlight) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-outline-danger { + color: var(--sd-color-danger) !important; + border-color: var(--sd-color-danger) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-light,.sd-btn-outline-light:hover,.sd-btn-outline-light:focus { + color: var(--sd-color-light-text) !important; + background-color: var(--sd-color-light) !important; + border-color: var(--sd-color-light) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-light:hover,.sd-btn-light:focus { + color: var(--sd-color-light-text) !important; + background-color: var(--sd-color-light-highlight) !important; + border-color: var(--sd-color-light-highlight) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-outline-light { + color: var(--sd-color-light) !important; + border-color: var(--sd-color-light) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-muted,.sd-btn-outline-muted:hover,.sd-btn-outline-muted:focus { + color: var(--sd-color-muted-text) !important; + background-color: var(--sd-color-muted) !important; + border-color: var(--sd-color-muted) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-muted:hover,.sd-btn-muted:focus { + color: var(--sd-color-muted-text) !important; + background-color: var(--sd-color-muted-highlight) !important; + border-color: var(--sd-color-muted-highlight) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-outline-muted { + color: var(--sd-color-muted) !important; + border-color: var(--sd-color-muted) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-dark,.sd-btn-outline-dark:hover,.sd-btn-outline-dark:focus { + color: var(--sd-color-dark-text) !important; + background-color: var(--sd-color-dark) !important; + border-color: var(--sd-color-dark) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-dark:hover,.sd-btn-dark:focus { + color: var(--sd-color-dark-text) !important; + background-color: var(--sd-color-dark-highlight) !important; + border-color: var(--sd-color-dark-highlight) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-outline-dark { + color: var(--sd-color-dark) !important; + border-color: var(--sd-color-dark) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-black,.sd-btn-outline-black:hover,.sd-btn-outline-black:focus { + color: var(--sd-color-black-text) !important; + background-color: var(--sd-color-black) !important; + border-color: var(--sd-color-black) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-black:hover,.sd-btn-black:focus { + color: var(--sd-color-black-text) !important; + background-color: var(--sd-color-black-highlight) !important; + border-color: var(--sd-color-black-highlight) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-outline-black { + color: var(--sd-color-black) !important; + border-color: var(--sd-color-black) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-white,.sd-btn-outline-white:hover,.sd-btn-outline-white:focus { + color: var(--sd-color-white-text) !important; + background-color: var(--sd-color-white) !important; + border-color: var(--sd-color-white) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-white:hover,.sd-btn-white:focus { + color: var(--sd-color-white-text) !important; + background-color: var(--sd-color-white-highlight) !important; + border-color: var(--sd-color-white-highlight) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-btn-outline-white { + color: var(--sd-color-white) !important; + border-color: var(--sd-color-white) !important; + border-width: 1px !important; + border-style: solid !important +} + +.sd-stretched-link::after { + position: absolute; + top: 0; + right: 0; + bottom: 0; + left: 0; + z-index: 1; + content: "" +} + +.sd-hide-link-text { + font-size: 0 +} + +.sd-octicon,.sd-material-icon { + display: inline-block; + fill: currentColor; + vertical-align: middle +} + +.sd-avatar-xs { + border-radius: 50%; + object-fit: cover; + object-position: center; + width: 1rem; + height: 1rem +} + +.sd-avatar-sm { + border-radius: 50%; + object-fit: cover; + object-position: center; + width: 3rem; + height: 3rem +} + +.sd-avatar-md { + border-radius: 50%; + object-fit: cover; + object-position: center; + width: 5rem; + height: 5rem +} + +.sd-avatar-lg { + border-radius: 50%; + object-fit: cover; + object-position: center; + width: 7rem; + height: 7rem +} + +.sd-avatar-xl { + border-radius: 50%; + object-fit: cover; + object-position: center; + width: 10rem; + height: 10rem +} + +.sd-avatar-inherit { + border-radius: 50%; + object-fit: cover; + object-position: center; + width: inherit; + height: inherit +} + +.sd-avatar-initial { + border-radius: 50%; + object-fit: cover; + object-position: center; + width: initial; + height: initial +} + +.sd-card { + background-clip: border-box; + background-color: var(--sd-color-card-background); + border: 1px solid var(--sd-color-card-border); + border-radius: .25rem; + color: var(--sd-color-card-text); + display: -ms-flexbox; + display: flex; + -ms-flex-direction: column; + flex-direction: column; + min-width: 0; + position: relative; + word-wrap: break-word +} + +.sd-card>hr { + margin-left: 0; + margin-right: 0 +} + +.sd-card-hover:hover { + border-color: var(--sd-color-card-border-hover); + transform: scale(1.01) +} + +.sd-card-body { + -ms-flex: 1 1 auto; + flex: 1 1 auto; + padding: 1rem 1rem +} + +.sd-card-title { + margin-bottom: .5rem +} + +.sd-card-subtitle { + margin-top: -0.25rem; + margin-bottom: 0 +} + +.sd-card-text:last-child { + margin-bottom: 0 +} + +.sd-card-link:hover { + text-decoration: none +} + +.sd-card-link+.card-link { + margin-left: 1rem +} + +.sd-card-header { + padding: .5rem 1rem; + margin-bottom: 0; + background-color: var(--sd-color-card-header); + border-bottom: 1px solid var(--sd-color-card-border) +} + +.sd-card-header:first-child { + border-radius: calc(0.25rem - 1px) calc(0.25rem - 1px) 0 0 +} + +.sd-card-footer { + padding: .5rem 1rem; + background-color: var(--sd-color-card-footer); + border-top: 1px solid var(--sd-color-card-border) +} + +.sd-card-footer:last-child { + border-radius: 0 0 calc(0.25rem - 1px) calc(0.25rem - 1px) +} + +.sd-card-header-tabs { + margin-right: -0.5rem; + margin-bottom: -0.5rem; + margin-left: -0.5rem; + border-bottom: 0 +} + +.sd-card-header-pills { + margin-right: -0.5rem; + margin-left: -0.5rem +} + +.sd-card-img-overlay { + position: absolute; + top: 0; + right: 0; + bottom: 0; + left: 0; + padding: 1rem; + border-radius: calc(0.25rem - 1px) +} + +.sd-card-img,.sd-card-img-bottom,.sd-card-img-top { + width: 100% +} + +.sd-card-img,.sd-card-img-top { + border-top-left-radius: calc(0.25rem - 1px); + border-top-right-radius: calc(0.25rem - 1px) +} + +.sd-card-img,.sd-card-img-bottom { + border-bottom-left-radius: calc(0.25rem - 1px); + border-bottom-right-radius: calc(0.25rem - 1px) +} + +.sd-cards-carousel { + width: 100%; + display: flex; + flex-wrap: nowrap; + -ms-flex-direction: row; + flex-direction: row; + overflow-x: hidden; + scroll-snap-type: x mandatory +} + +.sd-cards-carousel.sd-show-scrollbar { + overflow-x: auto +} + +.sd-cards-carousel:hover,.sd-cards-carousel:focus { + overflow-x: auto +} + +.sd-cards-carousel>.sd-card { + flex-shrink: 0; + scroll-snap-align: start +} + +.sd-cards-carousel>.sd-card:not(:last-child) { + margin-right: 3px +} + +.sd-card-cols-1>.sd-card { + width: 90% +} + +.sd-card-cols-2>.sd-card { + width: 45% +} + +.sd-card-cols-3>.sd-card { + width: 30% +} + +.sd-card-cols-4>.sd-card { + width: 22.5% +} + +.sd-card-cols-5>.sd-card { + width: 18% +} + +.sd-card-cols-6>.sd-card { + width: 15% +} + +.sd-card-cols-7>.sd-card { + width: 12.8571428571% +} + +.sd-card-cols-8>.sd-card { + width: 11.25% +} + +.sd-card-cols-9>.sd-card { + width: 10% +} + +.sd-card-cols-10>.sd-card { + width: 9% +} + +.sd-card-cols-11>.sd-card { + width: 8.1818181818% +} + +.sd-card-cols-12>.sd-card { + width: 7.5% +} + +.sd-container,.sd-container-fluid,.sd-container-lg,.sd-container-md,.sd-container-sm,.sd-container-xl { + margin-left: auto; + margin-right: auto; + padding-left: var(--sd-gutter-x, 0.75rem); + padding-right: var(--sd-gutter-x, 0.75rem); + width: 100% +} + +@media(min-width: 576px) { + .sd-container-sm,.sd-container { + max-width:540px + } +} + +@media(min-width: 768px) { + .sd-container-md,.sd-container-sm,.sd-container { + max-width:720px + } +} + +@media(min-width: 992px) { + .sd-container-lg,.sd-container-md,.sd-container-sm,.sd-container { + max-width:960px + } +} + +@media(min-width: 1200px) { + .sd-container-xl,.sd-container-lg,.sd-container-md,.sd-container-sm,.sd-container { + max-width:1140px + } +} + +.sd-row { + --sd-gutter-x: 1.5rem; + --sd-gutter-y: 0; + display: -ms-flexbox; + display: flex; + -ms-flex-wrap: wrap; + flex-wrap: wrap; + margin-top: calc(var(--sd-gutter-y) * -1); + margin-right: calc(var(--sd-gutter-x) * -0.5); + margin-left: calc(var(--sd-gutter-x) * -0.5) +} + +.sd-row>* { + box-sizing: border-box; + flex-shrink: 0; + width: 100%; + max-width: 100%; + padding-right: calc(var(--sd-gutter-x) * 0.5); + padding-left: calc(var(--sd-gutter-x) * 0.5); + margin-top: var(--sd-gutter-y) +} + +.sd-col { + flex: 1 0 0%; + -ms-flex: 1 0 0% +} + +.sd-row-cols-auto>* { + flex: 0 0 auto; + width: auto +} + +.sd-row-cols-1>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 100% +} + +.sd-row-cols-2>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 50% +} + +.sd-row-cols-3>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 33.3333333333% +} + +.sd-row-cols-4>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 25% +} + +.sd-row-cols-5>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 20% +} + +.sd-row-cols-6>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 16.6666666667% +} + +.sd-row-cols-7>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 14.2857142857% +} + +.sd-row-cols-8>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 12.5% +} + +.sd-row-cols-9>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 11.1111111111% +} + +.sd-row-cols-10>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 10% +} + +.sd-row-cols-11>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 9.0909090909% +} + +.sd-row-cols-12>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 8.3333333333% +} + +@media(min-width: 576px) { + .sd-col-sm { + flex:1 0 0%; + -ms-flex: 1 0 0% + } + + .sd-row-cols-sm-auto { + flex: 1 0 auto; + -ms-flex: 1 0 auto; + width: 100% + } + + .sd-row-cols-sm-1>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 100% + } + + .sd-row-cols-sm-2>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 50% + } + + .sd-row-cols-sm-3>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 33.3333333333% + } + + .sd-row-cols-sm-4>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 25% + } + + .sd-row-cols-sm-5>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 20% + } + + .sd-row-cols-sm-6>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 16.6666666667% + } + + .sd-row-cols-sm-7>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 14.2857142857% + } + + .sd-row-cols-sm-8>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 12.5% + } + + .sd-row-cols-sm-9>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 11.1111111111% + } + + .sd-row-cols-sm-10>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 10% + } + + .sd-row-cols-sm-11>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 9.0909090909% + } + + .sd-row-cols-sm-12>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 8.3333333333% + } +} + +@media(min-width: 768px) { + .sd-col-md { + flex:1 0 0%; + -ms-flex: 1 0 0% + } + + .sd-row-cols-md-auto { + flex: 1 0 auto; + -ms-flex: 1 0 auto; + width: 100% + } + + .sd-row-cols-md-1>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 100% + } + + .sd-row-cols-md-2>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 50% + } + + .sd-row-cols-md-3>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 33.3333333333% + } + + .sd-row-cols-md-4>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 25% + } + + .sd-row-cols-md-5>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 20% + } + + .sd-row-cols-md-6>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 16.6666666667% + } + + .sd-row-cols-md-7>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 14.2857142857% + } + + .sd-row-cols-md-8>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 12.5% + } + + .sd-row-cols-md-9>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 11.1111111111% + } + + .sd-row-cols-md-10>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 10% + } + + .sd-row-cols-md-11>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 9.0909090909% + } + + .sd-row-cols-md-12>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 8.3333333333% + } +} + +@media(min-width: 992px) { + .sd-col-lg { + flex:1 0 0%; + -ms-flex: 1 0 0% + } + + .sd-row-cols-lg-auto { + flex: 1 0 auto; + -ms-flex: 1 0 auto; + width: 100% + } + + .sd-row-cols-lg-1>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 100% + } + + .sd-row-cols-lg-2>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 50% + } + + .sd-row-cols-lg-3>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 33.3333333333% + } + + .sd-row-cols-lg-4>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 25% + } + + .sd-row-cols-lg-5>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 20% + } + + .sd-row-cols-lg-6>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 16.6666666667% + } + + .sd-row-cols-lg-7>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 14.2857142857% + } + + .sd-row-cols-lg-8>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 12.5% + } + + .sd-row-cols-lg-9>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 11.1111111111% + } + + .sd-row-cols-lg-10>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 10% + } + + .sd-row-cols-lg-11>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 9.0909090909% + } + + .sd-row-cols-lg-12>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 8.3333333333% + } +} + +@media(min-width: 1200px) { + .sd-col-xl { + flex:1 0 0%; + -ms-flex: 1 0 0% + } + + .sd-row-cols-xl-auto { + flex: 1 0 auto; + -ms-flex: 1 0 auto; + width: 100% + } + + .sd-row-cols-xl-1>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 100% + } + + .sd-row-cols-xl-2>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 50% + } + + .sd-row-cols-xl-3>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 33.3333333333% + } + + .sd-row-cols-xl-4>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 25% + } + + .sd-row-cols-xl-5>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 20% + } + + .sd-row-cols-xl-6>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 16.6666666667% + } + + .sd-row-cols-xl-7>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 14.2857142857% + } + + .sd-row-cols-xl-8>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 12.5% + } + + .sd-row-cols-xl-9>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 11.1111111111% + } + + .sd-row-cols-xl-10>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 10% + } + + .sd-row-cols-xl-11>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 9.0909090909% + } + + .sd-row-cols-xl-12>* { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 8.3333333333% + } +} + +.sd-col-auto { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: auto +} + +.sd-col-1 { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 8.3333333333% +} + +.sd-col-2 { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 16.6666666667% +} + +.sd-col-3 { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 25% +} + +.sd-col-4 { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 33.3333333333% +} + +.sd-col-5 { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 41.6666666667% +} + +.sd-col-6 { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 50% +} + +.sd-col-7 { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 58.3333333333% +} + +.sd-col-8 { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 66.6666666667% +} + +.sd-col-9 { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 75% +} + +.sd-col-10 { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 83.3333333333% +} + +.sd-col-11 { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 91.6666666667% +} + +.sd-col-12 { + flex: 0 0 auto; + -ms-flex: 0 0 auto; + width: 100% +} + +.sd-g-0,.sd-gy-0 { + --sd-gutter-y: 0 +} + +.sd-g-0,.sd-gx-0 { + --sd-gutter-x: 0 +} + +.sd-g-1,.sd-gy-1 { + --sd-gutter-y: 0.25rem +} + +.sd-g-1,.sd-gx-1 { + --sd-gutter-x: 0.25rem +} + +.sd-g-2,.sd-gy-2 { + --sd-gutter-y: 0.5rem +} + +.sd-g-2,.sd-gx-2 { + --sd-gutter-x: 0.5rem +} + +.sd-g-3,.sd-gy-3 { + --sd-gutter-y: 1rem +} + +.sd-g-3,.sd-gx-3 { + --sd-gutter-x: 1rem +} + +.sd-g-4,.sd-gy-4 { + --sd-gutter-y: 1.5rem +} + +.sd-g-4,.sd-gx-4 { + --sd-gutter-x: 1.5rem +} + +.sd-g-5,.sd-gy-5 { + --sd-gutter-y: 3rem +} + +.sd-g-5,.sd-gx-5 { + --sd-gutter-x: 3rem +} + +@media(min-width: 576px) { + .sd-col-sm-auto { + -ms-flex:0 0 auto; + flex: 0 0 auto; + width: auto + } + + .sd-col-sm-1 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 8.3333333333% + } + + .sd-col-sm-2 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 16.6666666667% + } + + .sd-col-sm-3 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 25% + } + + .sd-col-sm-4 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 33.3333333333% + } + + .sd-col-sm-5 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 41.6666666667% + } + + .sd-col-sm-6 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 50% + } + + .sd-col-sm-7 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 58.3333333333% + } + + .sd-col-sm-8 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 66.6666666667% + } + + .sd-col-sm-9 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 75% + } + + .sd-col-sm-10 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 83.3333333333% + } + + .sd-col-sm-11 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 91.6666666667% + } + + .sd-col-sm-12 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 100% + } + + .sd-g-sm-0,.sd-gy-sm-0 { + --sd-gutter-y: 0 + } + + .sd-g-sm-0,.sd-gx-sm-0 { + --sd-gutter-x: 0 + } + + .sd-g-sm-1,.sd-gy-sm-1 { + --sd-gutter-y: 0.25rem + } + + .sd-g-sm-1,.sd-gx-sm-1 { + --sd-gutter-x: 0.25rem + } + + .sd-g-sm-2,.sd-gy-sm-2 { + --sd-gutter-y: 0.5rem + } + + .sd-g-sm-2,.sd-gx-sm-2 { + --sd-gutter-x: 0.5rem + } + + .sd-g-sm-3,.sd-gy-sm-3 { + --sd-gutter-y: 1rem + } + + .sd-g-sm-3,.sd-gx-sm-3 { + --sd-gutter-x: 1rem + } + + .sd-g-sm-4,.sd-gy-sm-4 { + --sd-gutter-y: 1.5rem + } + + .sd-g-sm-4,.sd-gx-sm-4 { + --sd-gutter-x: 1.5rem + } + + .sd-g-sm-5,.sd-gy-sm-5 { + --sd-gutter-y: 3rem + } + + .sd-g-sm-5,.sd-gx-sm-5 { + --sd-gutter-x: 3rem + } +} + +@media(min-width: 768px) { + .sd-col-md-auto { + -ms-flex:0 0 auto; + flex: 0 0 auto; + width: auto + } + + .sd-col-md-1 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 8.3333333333% + } + + .sd-col-md-2 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 16.6666666667% + } + + .sd-col-md-3 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 25% + } + + .sd-col-md-4 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 33.3333333333% + } + + .sd-col-md-5 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 41.6666666667% + } + + .sd-col-md-6 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 50% + } + + .sd-col-md-7 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 58.3333333333% + } + + .sd-col-md-8 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 66.6666666667% + } + + .sd-col-md-9 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 75% + } + + .sd-col-md-10 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 83.3333333333% + } + + .sd-col-md-11 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 91.6666666667% + } + + .sd-col-md-12 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 100% + } + + .sd-g-md-0,.sd-gy-md-0 { + --sd-gutter-y: 0 + } + + .sd-g-md-0,.sd-gx-md-0 { + --sd-gutter-x: 0 + } + + .sd-g-md-1,.sd-gy-md-1 { + --sd-gutter-y: 0.25rem + } + + .sd-g-md-1,.sd-gx-md-1 { + --sd-gutter-x: 0.25rem + } + + .sd-g-md-2,.sd-gy-md-2 { + --sd-gutter-y: 0.5rem + } + + .sd-g-md-2,.sd-gx-md-2 { + --sd-gutter-x: 0.5rem + } + + .sd-g-md-3,.sd-gy-md-3 { + --sd-gutter-y: 1rem + } + + .sd-g-md-3,.sd-gx-md-3 { + --sd-gutter-x: 1rem + } + + .sd-g-md-4,.sd-gy-md-4 { + --sd-gutter-y: 1.5rem + } + + .sd-g-md-4,.sd-gx-md-4 { + --sd-gutter-x: 1.5rem + } + + .sd-g-md-5,.sd-gy-md-5 { + --sd-gutter-y: 3rem + } + + .sd-g-md-5,.sd-gx-md-5 { + --sd-gutter-x: 3rem + } +} + +@media(min-width: 992px) { + .sd-col-lg-auto { + -ms-flex:0 0 auto; + flex: 0 0 auto; + width: auto + } + + .sd-col-lg-1 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 8.3333333333% + } + + .sd-col-lg-2 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 16.6666666667% + } + + .sd-col-lg-3 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 25% + } + + .sd-col-lg-4 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 33.3333333333% + } + + .sd-col-lg-5 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 41.6666666667% + } + + .sd-col-lg-6 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 50% + } + + .sd-col-lg-7 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 58.3333333333% + } + + .sd-col-lg-8 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 66.6666666667% + } + + .sd-col-lg-9 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 75% + } + + .sd-col-lg-10 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 83.3333333333% + } + + .sd-col-lg-11 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 91.6666666667% + } + + .sd-col-lg-12 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 100% + } + + .sd-g-lg-0,.sd-gy-lg-0 { + --sd-gutter-y: 0 + } + + .sd-g-lg-0,.sd-gx-lg-0 { + --sd-gutter-x: 0 + } + + .sd-g-lg-1,.sd-gy-lg-1 { + --sd-gutter-y: 0.25rem + } + + .sd-g-lg-1,.sd-gx-lg-1 { + --sd-gutter-x: 0.25rem + } + + .sd-g-lg-2,.sd-gy-lg-2 { + --sd-gutter-y: 0.5rem + } + + .sd-g-lg-2,.sd-gx-lg-2 { + --sd-gutter-x: 0.5rem + } + + .sd-g-lg-3,.sd-gy-lg-3 { + --sd-gutter-y: 1rem + } + + .sd-g-lg-3,.sd-gx-lg-3 { + --sd-gutter-x: 1rem + } + + .sd-g-lg-4,.sd-gy-lg-4 { + --sd-gutter-y: 1.5rem + } + + .sd-g-lg-4,.sd-gx-lg-4 { + --sd-gutter-x: 1.5rem + } + + .sd-g-lg-5,.sd-gy-lg-5 { + --sd-gutter-y: 3rem + } + + .sd-g-lg-5,.sd-gx-lg-5 { + --sd-gutter-x: 3rem + } +} + +@media(min-width: 1200px) { + .sd-col-xl-auto { + -ms-flex:0 0 auto; + flex: 0 0 auto; + width: auto + } + + .sd-col-xl-1 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 8.3333333333% + } + + .sd-col-xl-2 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 16.6666666667% + } + + .sd-col-xl-3 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 25% + } + + .sd-col-xl-4 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 33.3333333333% + } + + .sd-col-xl-5 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 41.6666666667% + } + + .sd-col-xl-6 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 50% + } + + .sd-col-xl-7 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 58.3333333333% + } + + .sd-col-xl-8 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 66.6666666667% + } + + .sd-col-xl-9 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 75% + } + + .sd-col-xl-10 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 83.3333333333% + } + + .sd-col-xl-11 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 91.6666666667% + } + + .sd-col-xl-12 { + -ms-flex: 0 0 auto; + flex: 0 0 auto; + width: 100% + } + + .sd-g-xl-0,.sd-gy-xl-0 { + --sd-gutter-y: 0 + } + + .sd-g-xl-0,.sd-gx-xl-0 { + --sd-gutter-x: 0 + } + + .sd-g-xl-1,.sd-gy-xl-1 { + --sd-gutter-y: 0.25rem + } + + .sd-g-xl-1,.sd-gx-xl-1 { + --sd-gutter-x: 0.25rem + } + + .sd-g-xl-2,.sd-gy-xl-2 { + --sd-gutter-y: 0.5rem + } + + .sd-g-xl-2,.sd-gx-xl-2 { + --sd-gutter-x: 0.5rem + } + + .sd-g-xl-3,.sd-gy-xl-3 { + --sd-gutter-y: 1rem + } + + .sd-g-xl-3,.sd-gx-xl-3 { + --sd-gutter-x: 1rem + } + + .sd-g-xl-4,.sd-gy-xl-4 { + --sd-gutter-y: 1.5rem + } + + .sd-g-xl-4,.sd-gx-xl-4 { + --sd-gutter-x: 1.5rem + } + + .sd-g-xl-5,.sd-gy-xl-5 { + --sd-gutter-y: 3rem + } + + .sd-g-xl-5,.sd-gx-xl-5 { + --sd-gutter-x: 3rem + } +} + +.sd-flex-row-reverse { + flex-direction: row-reverse !important +} + +details.sd-dropdown { + position: relative +} + +details.sd-dropdown .sd-summary-title { + font-weight: 700; + padding-right: 3em !important; + -moz-user-select: none; + -ms-user-select: none; + -webkit-user-select: none; + user-select: none +} + +details.sd-dropdown:hover { + cursor: pointer +} + +details.sd-dropdown .sd-summary-content { + cursor: default +} + +details.sd-dropdown summary { + list-style: none; + padding: 1em +} + +details.sd-dropdown summary .sd-octicon.no-title { + vertical-align: middle +} + +details.sd-dropdown[open] summary .sd-octicon.no-title { + visibility: hidden +} + +details.sd-dropdown summary::-webkit-details-marker { + display: none +} + +details.sd-dropdown summary:focus { + outline: none +} + +details.sd-dropdown .sd-summary-icon { + margin-right: .5em +} + +details.sd-dropdown .sd-summary-icon svg { + opacity: .8 +} + +details.sd-dropdown summary:hover .sd-summary-up svg,details.sd-dropdown summary:hover .sd-summary-down svg { + opacity: 1; + transform: scale(1.1) +} + +details.sd-dropdown .sd-summary-up svg,details.sd-dropdown .sd-summary-down svg { + display: block; + opacity: .6 +} + +details.sd-dropdown .sd-summary-up,details.sd-dropdown .sd-summary-down { + pointer-events: none; + position: absolute; + right: 1em; + top: 1em +} + +details.sd-dropdown[open]>.sd-summary-title .sd-summary-down { + visibility: hidden +} + +details.sd-dropdown:not([open])>.sd-summary-title .sd-summary-up { + visibility: hidden +} + +details.sd-dropdown:not([open]).sd-card { + border: none +} + +details.sd-dropdown:not([open])>.sd-card-header { + border: 1px solid var(--sd-color-card-border); + border-radius: .25rem +} + +details.sd-dropdown.sd-fade-in[open] summary~* { + -moz-animation: sd-fade-in .5s ease-in-out; + -webkit-animation: sd-fade-in .5s ease-in-out; + animation: sd-fade-in .5s ease-in-out +} + +details.sd-dropdown.sd-fade-in-slide-down[open] summary~* { + -moz-animation: sd-fade-in .5s ease-in-out,sd-slide-down .5s ease-in-out; + -webkit-animation: sd-fade-in .5s ease-in-out,sd-slide-down .5s ease-in-out; + animation: sd-fade-in .5s ease-in-out,sd-slide-down .5s ease-in-out +} + +.sd-col>.sd-dropdown { + width: 100% +} + +.sd-summary-content>.sd-tab-set:first-child { + margin-top: 0 +} + +@keyframes sd-fade-in { + 0% { + opacity: 0 + } + + 100% { + opacity: 1 + } +} + +@keyframes sd-slide-down { + 0% { + transform: translate(0, -10px) + } + + 100% { + transform: translate(0, 0) + } +} + +.sd-tab-set { + border-radius: .125rem; + display: flex; + flex-wrap: wrap; + margin: 1em 0; + position: relative +} + +.sd-tab-set>input { + opacity: 0; + position: absolute +} + +.sd-tab-set>input:checked+label { + border-color: var(--sd-color-tabs-underline-active); + color: var(--sd-color-tabs-label-active) +} + +.sd-tab-set>input:checked+label+.sd-tab-content { + display: block +} + +.sd-tab-set>input:not(:checked)+label:hover { + color: var(--sd-color-tabs-label-hover); + border-color: var(--sd-color-tabs-underline-hover) +} + +.sd-tab-set>input:focus+label { + outline-style: auto +} + +.sd-tab-set>input:not(.focus-visible)+label { + outline: none; + -webkit-tap-highlight-color: transparent +} + +.sd-tab-set>label { + border-bottom: .125rem solid transparent; + margin-bottom: 0; + color: var(--sd-color-tabs-label-inactive); + border-color: var(--sd-color-tabs-underline-inactive); + cursor: pointer; + font-size: var(--sd-fontsize-tabs-label); + font-weight: 700; + padding: 1em 1.25em .5em; + transition: color 250ms; + width: auto; + z-index: 1 +} + +html .sd-tab-set>label:hover { + color: var(--sd-color-tabs-label-active) +} + +.sd-col>.sd-tab-set { + width: 100% +} + +.sd-tab-content { + box-shadow: 0 -0.0625rem var(--sd-color-tabs-overline),0 .0625rem var(--sd-color-tabs-underline); + display: none; + order: 99; + padding-bottom: .75rem; + padding-top: .75rem; + width: 100% +} + +.sd-tab-content>:first-child { + margin-top: 0 !important +} + +.sd-tab-content>:last-child { + margin-bottom: 0 !important +} + +.sd-tab-content>.sd-tab-set { + margin: 0 +} + +.sd-sphinx-override,.sd-sphinx-override * { + -moz-box-sizing: border-box; + -webkit-box-sizing: border-box; + box-sizing: border-box +} + +.sd-sphinx-override p { + margin-top: 0 +} + +:root { + --sd-color-primary: #0071bc; + --sd-color-secondary: #6c757d; + --sd-color-success: #28a745; + --sd-color-info: #17a2b8; + --sd-color-warning: #f0b37e; + --sd-color-danger: #dc3545; + --sd-color-light: #f8f9fa; + --sd-color-muted: #6c757d; + --sd-color-dark: #212529; + --sd-color-black: black; + --sd-color-white: white; + --sd-color-primary-highlight: #0060a0; + --sd-color-secondary-highlight: #5c636a; + --sd-color-success-highlight: #228e3b; + --sd-color-info-highlight: #148a9c; + --sd-color-warning-highlight: #cc986b; + --sd-color-danger-highlight: #bb2d3b; + --sd-color-light-highlight: #d3d4d5; + --sd-color-muted-highlight: #5c636a; + --sd-color-dark-highlight: #1c1f23; + --sd-color-black-highlight: black; + --sd-color-white-highlight: #d9d9d9; + --sd-color-primary-text: #fff; + --sd-color-secondary-text: #fff; + --sd-color-success-text: #fff; + --sd-color-info-text: #fff; + --sd-color-warning-text: #212529; + --sd-color-danger-text: #fff; + --sd-color-light-text: #212529; + --sd-color-muted-text: #fff; + --sd-color-dark-text: #fff; + --sd-color-black-text: #fff; + --sd-color-white-text: #212529; + --sd-color-shadow: rgba(0, 0, 0, 0.15); + --sd-color-card-border: rgba(0, 0, 0, 0.125); + --sd-color-card-border-hover: hsla(231, 99%, 66%, 1); + --sd-color-card-background: transparent; + --sd-color-card-text: inherit; + --sd-color-card-header: transparent; + --sd-color-card-footer: transparent; + --sd-color-tabs-label-active: hsla(231, 99%, 66%, 1); + --sd-color-tabs-label-hover: hsla(231, 99%, 66%, 1); + --sd-color-tabs-label-inactive: hsl(0, 0%, 66%); + --sd-color-tabs-underline-active: hsla(231, 99%, 66%, 1); + --sd-color-tabs-underline-hover: rgba(178, 206, 245, 0.62); + --sd-color-tabs-underline-inactive: transparent; + --sd-color-tabs-overline: rgb(222, 222, 222); + --sd-color-tabs-underline: rgb(222, 222, 222); + --sd-fontsize-tabs-label: 1rem +} diff --git a/python/docs/_static/css/styles.css b/python/docs/_static/css/styles.css new file mode 100644 index 000000000..2b9bf8b30 --- /dev/null +++ b/python/docs/_static/css/styles.css @@ -0,0 +1,87 @@ +/* css styles */ + +.navbar-brand-logo img { + max-width: 100%; + width: auto; + height: 36px; + max-height: 36px; + vertical-align: middle; +} + +body.quarto-light nav.navbar { + box-shadow: 0 0.125rem 0.25rem 0 var(--bs-gray-400); +} + +body.quarto-light .only-dark { + display: none !important; +} + +body.quarto-dark .only-light { + display: none !important; +} + +.announcement { + align-items: center; + display: flex; + justify-content: center; + min-height: 3rem; + padding: 0.5rem 12.5%; + position: relative; + text-align: center; + width: 100%; +} + +body.quarto-light .announcement { + background-color: rgb(208, 231, 233); +} + +body.quarto-dark .announcement { + background-color: rgb(73, 73, 110); +} + +h2 { + border-bottom: none; +} + +body.quarto-dark .sd-card { + border: 1px solid white; +} + +body.quarto-dark .sd-card-header { + border-bottom: 1px solid white; +} + +th> :last-child, +td> :last-child { + margin-bottom: 0px; +} + +dd { + margin-bottom: 10px; + margin-left: 30px; + margin-top: 3px; +} + +.red { + color: #e83e8c +} + +.highlight dt { + padding-bottom: 4px; + padding-left: 8px; + padding-right: 8px; + padding-top: 2px; +} + +body.quarto-light .highlight dt { + background-color: #f5f5f5; +} + +body.quarto-dark .highlight dt { + background-color: #2f2f2f; +} + +.bold-italic { + font-weight: bolder; + font-style: italic; +} \ No newline at end of file diff --git a/python/docs/source/_static/favicon.png b/python/docs/_static/images/favicon.png similarity index 100% rename from python/docs/source/_static/favicon.png rename to python/docs/_static/images/favicon.png diff --git a/python/docs/source/_static/kaskada-negative.svg b/python/docs/_static/images/kaskada-negative.svg similarity index 100% rename from python/docs/source/_static/kaskada-negative.svg rename to python/docs/_static/images/kaskada-negative.svg diff --git a/python/docs/source/_static/kaskada-positive.svg b/python/docs/_static/images/kaskada-positive.svg similarity index 100% rename from python/docs/source/_static/kaskada-positive.svg rename to python/docs/_static/images/kaskada-positive.svg diff --git a/python/docs/_templates/announcement.html b/python/docs/_templates/announcement.html new file mode 100644 index 000000000..c022c76ba --- /dev/null +++ b/python/docs/_templates/announcement.html @@ -0,0 +1,3 @@ +
+
This describes the next version of Kaskada. It is currently available as an alpha release.
+
\ No newline at end of file diff --git a/python/docs/_templates/card.ejs b/python/docs/_templates/card.ejs new file mode 100644 index 000000000..321764d27 --- /dev/null +++ b/python/docs/_templates/card.ejs @@ -0,0 +1,30 @@ +```{=html} + +``` \ No newline at end of file diff --git a/python/docs/_templates/data_types_table.ejs b/python/docs/_templates/data_types_table.ejs new file mode 100644 index 000000000..142b808f5 --- /dev/null +++ b/python/docs/_templates/data_types_table.ejs @@ -0,0 +1,44 @@ +```{=html} + + + + + + + + + + + + + <% for (const item of items) { %> + + + + + <% } %> + +
+

Types

+
+

Description

+
+

+ <% for (const index in item.types) { %> + <%= item.types[index] %> + <% if (index < item.types.length - 1) { %>,<% } %> + <% } %> +

+
+

+ <%= item.desc %> +

+

+ Examples: + <% for (const index in item.examples) { %> + <%= item.examples[index] %> + <% if (index < item.examples.length - 1) { %>,<% } %> + <% } %> +

+
+``` \ No newline at end of file diff --git a/python/docs/blog/_metadata.yml b/python/docs/blog/_metadata.yml new file mode 100644 index 000000000..954e649d0 --- /dev/null +++ b/python/docs/blog/_metadata.yml @@ -0,0 +1,4 @@ +format: + html: + link-external-icon: true + link-external-newwindow: true diff --git a/python/docs/blog/index.qmd b/python/docs/blog/index.qmd new file mode 100644 index 000000000..e2c74b6fd --- /dev/null +++ b/python/docs/blog/index.qmd @@ -0,0 +1,12 @@ +--- +title: Blog +listing: + contents: posts + sort: "date desc" + type: default + categories: true + sort-ui: true + filter-ui: true + fields: ["title", "date", "author", "subtitle", "description", "reading-time", "categories"] # "image", "image-alt", +page-layout: full +--- diff --git a/python/docs/source/blog/posts/2023-03-28-announcing-kaskada-oss.md b/python/docs/blog/posts/2023-03-28-announcing-kaskada-oss.qmd similarity index 98% rename from python/docs/source/blog/posts/2023-03-28-announcing-kaskada-oss.md rename to python/docs/blog/posts/2023-03-28-announcing-kaskada-oss.qmd index 84095abdd..d925cf241 100644 --- a/python/docs/source/blog/posts/2023-03-28-announcing-kaskada-oss.md +++ b/python/docs/blog/posts/2023-03-28-announcing-kaskada-oss.qmd @@ -1,10 +1,10 @@ --- -blogpost: true -author: ben +author: Ben Chambers date: 2023-Mar-28 -tags: releases -excerpt: 1 -description: From Startup to Open Source Project +categories: + - releases +title: Announcing Kaskada OSS +subtitle: From Startup to Open Source Project --- # Announcing Kaskada OSS diff --git a/python/docs/source/blog/posts/2023-08-25-new-kaskada.md b/python/docs/blog/posts/2023-08-25-new-kaskada.qmd similarity index 95% rename from python/docs/source/blog/posts/2023-08-25-new-kaskada.md rename to python/docs/blog/posts/2023-08-25-new-kaskada.qmd index 9025e85c4..c89e3754a 100644 --- a/python/docs/source/blog/posts/2023-08-25-new-kaskada.md +++ b/python/docs/blog/posts/2023-08-25-new-kaskada.qmd @@ -1,14 +1,12 @@ --- -blogpost: true date: 2023-Aug-25 -author: ryan -tags: releases -excerpt: 2 -description: Embedded in Python for accessible Real-Time AI +author: Ryan Michael +categories: + - releases +title: Introducing the New Kaskada +subtitle: Embedded in Python for accessible Real-Time AI --- -# Introducing the New Kaskada - We started Kaskada with the goal of simplifying the real-time AI/ML lifecycle, and in the past year AI has exploded in usefulness and accessibility. Generative models and Large Language Models (LLMs) have revolutionized how we approach AI. Their accessibility and incredible capabilities have made AI more valuable than it has ever been and democratized the practice of AI. Still, a challenge remains: building and managing real-time AI applications. @@ -29,7 +27,7 @@ These challenges have made it difficult for all but the largest companies with t ## Welcome to the New Kaskada -We originally built Kaskada as a managed service. Earlier this year, we [released Kaskada as an open-source, self-managed service](./2023-03-28-announcing-kaskada-oss.md), simplifying data onboarding and allowing Kaskada to be deployed anywhere. +We originally built Kaskada as a managed service. Earlier this year, we [released Kaskada as an open-source, self-managed service](./2023-03-28-announcing-kaskada-oss.qmd), simplifying data onboarding and allowing Kaskada to be deployed anywhere. Today, we take the next step in improving Kaskada’s usability by providing its core compute engine as an embedded Python library. Because Kaskada is written in Rust, we’re able to leverage the excellent [PyO3](https://pyo3.rs/) project to compile Python-native bindings for our compute engine and support Python-defined UDF’s. Additionally, Kaskada is built using [Apache Arrow](https://arrow.apache.org/), which allows zero-copy data transfers between Kaskada and other Python libraries such as [Pandas](https://pandas.pydata.org/), allowing you to operate on your data in-place. diff --git a/python/docs/source/community/index.md b/python/docs/community/index.qmd similarity index 60% rename from python/docs/source/community/index.md rename to python/docs/community/index.qmd index 37e6f3100..2e992a267 100644 --- a/python/docs/source/community/index.md +++ b/python/docs/community/index.qmd @@ -1,27 +1,40 @@ --- -html_theme.sidebar_secondary.remove: true +title: Community +format: + html: + css: + - /_static/css/design-style.css + link-external-icon: true + link-external-newwindow: true + +listing: + - id: cards + template: ../_templates/card.ejs + contents: + - name: GitHub Discussions + icon: bi bi-github + content: Ask questions and discuss Kaskada on GitHub. + link: https://github.com/kaskada-ai/kaskada/discussions + link_label: See GitHub Discussions + - name: Slack + icon: bi bi-slack + content: Join us on Slack to chat about Proactive AI! + link: https://join.slack.com/t/kaskada-hq/shared_invite/zt-1t1lms085-bqs2jtGO2TYr9kuuam~c9w + link_label: Join Slack + - name: Office Hours + icon: bi bi-camera-video-fill + content: Discuss your real-time Gen AI needs and ideas with us! + link: https://calendly.com/kaskada-io/office-hours + link_label: Schedule a video call --- -# Community +::: {#cards } +::: Welcome to the Kaskada community! The Kaskada project was open-sourced in 2023. As a growing community, we're here to explore new ways to interact with event-based data at scale and have some fun along the way. We build in the open and use the channels below for communication. -```{gallery-grid} -:grid-columns: 1 2 2 3 - -- header: "{fab}`github;pst-color-primary` GitHub Discussions" - content: "Ask questions and discuss Kaskada on GitHub." - link: https://github.com/kaskada-ai/kaskada/discussions -- header: "{fab}`slack;pst-color-primary` Join Slack" - content: "Join us on Slack to chat about Proactive AI!" - link: https://join.slack.com/t/kaskada-hq/shared_invite/zt-1t1lms085-bqs2jtGO2TYr9kuuam~c9w -- header: "{fas}`video;pst-color-primary` Meet with Us" - content: "Schedule time to meet with Kaskada developers and discuss your needs and ideas!" - link: https://calendly.com/kaskada-io/office-hours -``` - ## Contributing Nothing makes us happier than community contributions. See the README in the main Kaskada repository to get a grasp of the project layout and reference the [`CONTRIBUTING.md`](https://github.com/kaskada-ai/kaskada/blob/main/CONTRIBUTING.md){target="_blank" rel="noopener noreferrer"} file for detailed steps. @@ -35,4 +48,4 @@ If you're interested in contributing but don't know where to start, we also reco ## Code of Conduct To make Kaskada a welcoming and harassment-free experience for everyone, we adopt and follow the [Contributor Covenant](https://www.contributor-covenant.org/). -Please read our [Code of Conduct](https://github.com/kaskada-ai/kaskada/blob/main/CODE_OF_CONDUCT.md) before engaging with the community. \ No newline at end of file +Please read our [Code of Conduct](https://github.com/kaskada-ai/kaskada/blob/main/CODE_OF_CONDUCT.md) before engaging with the community. diff --git a/python/docs/examples/_metadata.yml b/python/docs/examples/_metadata.yml new file mode 100644 index 000000000..dc16c1f14 --- /dev/null +++ b/python/docs/examples/_metadata.yml @@ -0,0 +1,7 @@ +filters: + - include-code-files + +format: + html: + link-external-icon: true + link-external-newwindow: true diff --git a/python/docs/source/examples/bluesky.md b/python/docs/examples/bluesky.md similarity index 76% rename from python/docs/source/examples/bluesky.md rename to python/docs/examples/bluesky.md index 53b3936ee..4b6b97081 100644 --- a/python/docs/source/examples/bluesky.md +++ b/python/docs/examples/bluesky.md @@ -1,4 +1,9 @@ -# Bluesky Firehose Example +--- +title: Bluesky Firehose Example +subtitle: | + Read and aggregate messages from the Bluesky firehose. Use Kaskada to connect in real-time and parse messages as part of the query. +order: 2 +--- Bluesky is a "distributed social network" that aims to improve on some of the perceived shortcomings of X (nee Twitter). Bluesky uses a distributed protocol name the [AT Protocol](https://atproto.com/) to exchange messages between users, and provides a "firehose" delivering every message sent over the protocol in real-time. @@ -12,13 +17,7 @@ You can see the full example in the file [bluesky.py](https://github.com/kaskada Before we can receive events from Bluesky, we need to create a data source to tell Kaskada how to handle the events. We'll provide a schema and configure the time and entity fields. -```{literalinclude} bluesky.py -:language: python -:start-after: "[start_setup]" -:end-before: "[end_setup]" -:linenos: -:lineno-match: -:dedent: 4 +```{.python include="bluesky.py" code-line-numbers="true" start-line=26 end-line=53 dedent=4} ``` ## Define the incoming event handler @@ -28,13 +27,7 @@ This handler parses the message to find [Commit](https://atproto.com/specs/repos For each Commit, we'll parse out any [Post](https://atproto.com/blog/create-post#post-record-structure) messages. Finally we do some schema munging to get the Post into the event format we described when creating the data source. -```{literalinclude} bluesky.py -:language: python -:start-after: "[start_incoming]" -:end-before: "[end_incoming]" -:linenos: -:lineno-match: -:dedent: 4 +```{.python include="bluesky.py" code-line-numbers="true" start-line=55 end-line=79 dedent=4} ``` ## Construct a real-time query and result handler @@ -44,26 +37,14 @@ First we'll use `with_key` to regroup events by language, then we'll apply a sim Finally, we create a handler for the transformed results - here just printing them out. -```{literalinclude} bluesky.py -:language: python -:start-after: "[start_result]" -:end-before: "[end_result]" -:linenos: -:lineno-match: -:dedent: 4 +```{.python include="bluesky.py" code-line-numbers="true" start-line=81 end-line=89 dedent=4} ``` ## Final touches Now we just need to kick it all off by calling `asyncio.gather` on the two handler coroutines. This kicks off all the async processing. -```{literalinclude} bluesky.py -:start-after: "[start_run]" -:end-before: "[end_run]" -:language: python -:linenos: -:lineno-match: -:dedent: 4 +```{.python include="bluesky.py" code-line-numbers="true" start-line=91 end-line=92 dedent=4} ``` Try running it yourself and playing different transformations! diff --git a/python/docs/source/examples/bluesky.py b/python/docs/examples/bluesky.py similarity index 97% rename from python/docs/source/examples/bluesky.py rename to python/docs/examples/bluesky.py index c260fc960..906a1c0aa 100644 --- a/python/docs/source/examples/bluesky.py +++ b/python/docs/examples/bluesky.py @@ -26,7 +26,6 @@ async def main(): # The firehose doesn't (currently) require authentication. at_client = AsyncFirehoseSubscribeReposClient() - # [start_setup] # Setup the data source. # This defintes (most of) the schema of the events we'll receive, # and tells Kaskada which fields to use for time and initial entity. @@ -62,9 +61,7 @@ async def main(): key_column="author", time_unit="s", ) - # [end_setup] - # [start_incoming] # Handler for newly-arrived messages from BlueSky. async def receive_at(message) -> None: # Extract the contents of the message and bail if it's not a "commit" @@ -92,9 +89,6 @@ async def receive_at(message) -> None: } ) - # [end_incoming] - - # [start_result] # Handler for values emitted by Kaskada. async def receive_outputs(): # We'll perform a very simple aggregation - key by language and count. @@ -104,12 +98,8 @@ async def receive_outputs(): async for row in posts_by_first_lang.count().run_iter(kind="row", mode="live"): print(f"{row['_key']} has posted {row['result']} times since startup") - # [end_result] - - # [start_run] # Kickoff the two async processes concurrently. await asyncio.gather(at_client.start(receive_at), receive_outputs()) - # [end_run] # Copied from https://raw.githubusercontent.com/MarshalX/atproto/main/examples/firehose/process_commits.py diff --git a/python/docs/examples/index.qmd b/python/docs/examples/index.qmd new file mode 100644 index 000000000..b325ba517 --- /dev/null +++ b/python/docs/examples/index.qmd @@ -0,0 +1,7 @@ +--- +title: Examples +listing: + type: default + sort: order + fields: [title, subtitle, reading-time, categories] +--- diff --git a/python/docs/source/examples/reddit.md b/python/docs/examples/reddit.md similarity index 100% rename from python/docs/source/examples/reddit.md rename to python/docs/examples/reddit.md diff --git a/python/docs/source/examples/reddit.py b/python/docs/examples/reddit.py similarity index 100% rename from python/docs/source/examples/reddit.py rename to python/docs/examples/reddit.py diff --git a/python/docs/examples/time_centric.ipynb b/python/docs/examples/time_centric.ipynb new file mode 100644 index 000000000..5935cba1a --- /dev/null +++ b/python/docs/examples/time_centric.ipynb @@ -0,0 +1,357 @@ +{ + "cells": [ + { + "cell_type": "raw", + "id": "cbf72170", + "metadata": {}, + "source": [ + "---\n", + "title: \"Time-centric Calculations\"\n", + "subtitle: \"Work with time and produce past training examples and recent results for applying models.\"\n", + "order: 1\n", + "---" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "5a20a51f", + "metadata": { + "id": "5a20a51f" + }, + "source": [ + "Kaskada was built to process and perform temporal calculations on event streams,\n", + "with real-time analytics and machine learning in mind. It is not exclusively for\n", + "real-time applications, but Kaskada excels at time-centric computations and\n", + "aggregations on event-based data.\n", + "\n", + "For example, let's say you're building a user analytics dashboard at an\n", + "ecommerce retailer. You have event streams showing all actions the user has\n", + "taken, and you'd like to include in the dashboard:\n", + "\n", + "* the total number of events the user has ever generated\n", + "* the total number of purchases the user has made\n", + "* the total revenue from the user\n", + "* the number of purchases made by the user today\n", + "* the total revenue from the user today\n", + "* the number of events the user has generated in the past hour\n", + "\n", + "Because the calculations needed here are a mix of hourly, daily, and over all of\n", + "history, more than one type of event aggregation needs to happen. Table-centric\n", + "tools like those based on SQL would require multiple JOINs and window functions,\n", + "which would be spread over multiple queries or CTEs. \n", + "\n", + "Kaskada was designed for these types of time-centric calculations, so we can do\n", + "each of the calculations in the list in one line:\n", + "\n", + "```python\n", + "record({\n", + " \"event_count_total\": DemoEvents.count(),\n", + " \"purchases_total_count\": DemoEvents.filter(DemoEvents.col(\"event_name\").eq(\"purchase\")).count(),\n", + " \"revenue_total\": DemoEvents.col(\"revenue\").sum(),\n", + " \"purchases_daily\": DemoEvents.filter(DemoEvents.col(\"event_name\").eq(\"purchase\")).count(window=Daily()),\n", + " \"revenue_daily\": DemoEvents.col(\"revenue\").sum(window=Daily()),\n", + " \"event_count_hourly\": DemoEvents.count(window=Hourly()),\n", + "})\n", + "```\n", + "\n", + "::: {.callout-warning}\n", + "The previous example demonstrates the use of `Daily()` and `Hourly()` windowing which aren't yet part of the new Python library.\n", + ":::\n", + "\n", + "Of course, a few more lines of code are needed to put these calculations to work,\n", + "but these six lines are all that is needed to specify the calculations\n", + "themselves. Each line may specify:\n", + "\n", + "* the name of a calculation (e.g. `event_count_total`)\n", + "* the input data to start with (e.g. `DemoEvents`)\n", + "* selecting event fields (e.g. `DemoEvents.col(\"revenue\")`)\n", + "* function calls (e.g. `count()`)\n", + "* event filtering (e.g. `filter(DemoEvents.col(\"event_name\").eq(\"purchase\"))`)\n", + "* time windows to calculate over (e.g. `window=Daily()`)\n", + "\n", + "...with consecutive steps chained together in a familiar way.\n", + "\n", + "Because Kaskada was built for time-centric calculations on event-based data, a\n", + "calculation we might describe as \"total number of purchase events for the user\"\n", + "can be defined in Kaskada in roughly the same number of terms as the verbal\n", + "description itself.\n", + "\n", + "Continue through the demo below to find out how it works.\n", + "\n", + "See [the Kaskada documentation](../guide/index) for lots more information." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "BJ2EE9mSGtGB", + "metadata": { + "id": "BJ2EE9mSGtGB" + }, + "source": [ + "## Kaskada Client Setup\n", + "\n", + "```\n", + "%pip install kaskada>=0.6.0-a.0\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "37db47ba", + "metadata": { + "tags": [ + "hide-output" + ] + }, + "outputs": [], + "source": [ + "import kaskada as kd\n", + "\n", + "kd.init_session()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "5b838eef", + "metadata": {}, + "source": [ + "## Example dataset\n", + "\n", + "For this demo, we'll use a very small example data set, which, for simplicity and portability of this demo notebook, we'll read from a string.\n", + "\n", + "```{note}\n", + "For simplicity, instead of a CSV file or other file format we read and then parse data from a CSV string.\n", + "You can load your own event data from many common sources, including Pandas DataFrames and Parquet files.\n", + "See {py:mod}`kaskada.sources` for more information on the available sources.\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ba4bb6b6", + "metadata": {}, + "outputs": [], + "source": [ + "import asyncio\n", + "\n", + "# For demo simplicity, instead of a CSV file, we read and then parse data from a\n", + "# CSV string. Kaskada\n", + "event_data_string = \"\"\"\n", + " event_id,event_at,entity_id,event_name,revenue\n", + " ev_00001,2022-01-01 22:01:00,user_001,login,0\n", + " ev_00002,2022-01-01 22:05:00,user_001,view_item,0\n", + " ev_00003,2022-01-01 22:20:00,user_001,view_item,0\n", + " ev_00004,2022-01-01 23:10:00,user_001,view_item,0\n", + " ev_00005,2022-01-01 23:20:00,user_001,view_item,0\n", + " ev_00006,2022-01-01 23:40:00,user_001,purchase,12.50\n", + " ev_00007,2022-01-01 23:45:00,user_001,view_item,0\n", + " ev_00008,2022-01-01 23:59:00,user_001,view_item,0\n", + " ev_00009,2022-01-02 05:30:00,user_001,login,0\n", + " ev_00010,2022-01-02 05:35:00,user_001,view_item,0\n", + " ev_00011,2022-01-02 05:45:00,user_001,view_item,0\n", + " ev_00012,2022-01-02 06:10:00,user_001,view_item,0\n", + " ev_00013,2022-01-02 06:15:00,user_001,view_item,0\n", + " ev_00014,2022-01-02 06:25:00,user_001,purchase,25\n", + " ev_00015,2022-01-02 06:30:00,user_001,view_item,0\n", + " ev_00016,2022-01-02 06:31:00,user_001,purchase,5.75\n", + " ev_00017,2022-01-02 07:01:00,user_001,view_item,0\n", + " ev_00018,2022-01-01 22:17:00,user_002,view_item,0\n", + " ev_00019,2022-01-01 22:18:00,user_002,view_item,0\n", + " ev_00020,2022-01-01 22:20:00,user_002,view_item,0\n", + "\"\"\"\n", + "\n", + "events = await kd.sources.CsvString.create(\n", + " event_data_string, time_column=\"event_at\", key_column=\"entity_id\"\n", + ")\n", + "\n", + "# Inspect the event data\n", + "events.preview()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "568d1272", + "metadata": {}, + "source": [ + "## Define queries and calculations" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "c2c5a298", + "metadata": {}, + "source": [ + "Kaskada queries are defined in Python, using the `{py}Timestream` class.\n", + "Sources are Timestreams generally containing [records](../guide/data_types.qmd#record-types).\n", + "\n", + "Let's do a simple query for events for a specific entity ID.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bce22e47", + "metadata": {}, + "outputs": [], + "source": [ + "events.filter(events.col(\"entity_id\").eq(\"user_002\")).preview()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "6b5f2725", + "metadata": {}, + "source": [ + "\n", + "Beyond querying for events, Kaskada has a powerful syntax for defining\n", + "calculations on events, temporally across history.\n", + "\n", + "The six calculations discussed at the top of this demo notebook are below.\n", + "\n", + "(Note that some functions return `NaN` if no events for that user have occurred\n", + "within the time window.)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3ad6d596", + "metadata": {}, + "outputs": [], + "source": [ + "purchases = events.filter(events.col(\"event_name\").eq(\"purchase\"))\n", + "\n", + "features = kd.record(\n", + " {\n", + " \"event_count_total\": events.count(),\n", + " # \"event_count_hourly\": events.count(window=Hourly()),\n", + " \"purchases_total_count\": purchases.count(),\n", + " # \"purchases_today\": purchases.count(window=Since(Daily()),\n", + " # \"revenue_today\": events.col(\"revenue\").sum(window=Since(Daily())),\n", + " \"revenue_total\": events.col(\"revenue\").sum(),\n", + " }\n", + ")\n", + "features.preview()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "1c315938", + "metadata": {}, + "source": [ + "## At Any Time\n", + "\n", + "A key feature of Kaskada's time-centric design is the ability to query for\n", + "calculation values at any point in time. Traditional query languages (e.g. SQL)\n", + "can only return data that already exists---if we want to return a row of\n", + "computed/aggregated data, we have to compute the row first, then return it. As a\n", + "specific example, suppose we have SQL queries that produce daily aggregations\n", + "over event data, and now we want to have the same aggregations on an hourly\n", + "basis. In SQL, we would need to write new queries for hourly aggregations; the\n", + "queries would look very similar to the daily ones, but they would still be\n", + "different queries.\n", + "\n", + "With Kaskada, we can define the calculations once, and then specify the points\n", + "in time at which we want to know the calculation values when we query them.\n", + "\n", + "In the examples so far, we have used `preview()` to get a DataFrame containing\n", + "some of the rows from the Timestreams we've defined. By default, this produces\n", + "a _history_ containing all the times the result changed. This is useful for\n", + "using past values to create training examples.\n", + "\n", + "We can also execute the query for the values at a specific point in time." + ] + }, + { + "cell_type": "markdown", + "id": "082e174d", + "metadata": { + "tags": [ + "hide-output" + ] + }, + "source": [ + "```\n", + "features.preview(at=\"2022-01-01 22:00\")\n", + "``````" + ] + }, + { + "cell_type": "markdown", + "id": "5a44c5f7", + "metadata": {}, + "source": [ + "You can also compose a query that produces values at specific points in time.\n", + "\n", + "```\n", + "features.when(hourly())\n", + "```\n", + "\n", + "Regardless of the time cadence of the calculations themselves, the query output\n", + "can contain rows for whatever time points you specify. You can define a set of\n", + "daily calculations and then get hourly updates during the day. Or, you can\n", + "publish the definitions of some features in a Python module and different users\n", + "can query those same calculations for hourly, daily, and monthly\n", + "values---without editing the calculation definitions themselves.\n", + "\n", + "## Adding more calculations to the query\n", + "\n", + "We can add two new calculations, also in one line each, representing:\n", + "\n", + "* the time of the user's first event\n", + "* the time of the user's last event\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "2ba09e77-0fdf-43f4-960b-50a126262ec7", + "metadata": { + "id": "2ba09e77-0fdf-43f4-960b-50a126262ec7" + }, + "source": [ + "This is only a small sample of possible Kaskada queries and capabilities. See\n", + "everything that's possible with [Timestreams](../reference/Timestream/index.qmd)." + ] + } + ], + "metadata": { + "colab": { + "collapsed_sections": [ + "6924ca3e-28b3-4f93-b0cf-5f8afddc11d8", + "936700a9-e042-401c-9156-7bb18652e109", + "08f5921d-36dc-41d1-a2a6-ae800b7a11de" + ], + "private_outputs": true, + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/python/docs/guide/_metadata.yml b/python/docs/guide/_metadata.yml new file mode 100644 index 000000000..ffbf12790 --- /dev/null +++ b/python/docs/guide/_metadata.yml @@ -0,0 +1,10 @@ +execute: + daemon: false + warning: false + +format: + html: + link-external-icon: true + link-external-newwindow: true + +jupyter: python3 \ No newline at end of file diff --git a/python/docs/source/guide/aggregation.md b/python/docs/guide/aggregation.qmd similarity index 78% rename from python/docs/source/guide/aggregation.md rename to python/docs/guide/aggregation.qmd index 0c5bb7687..86a8f4c84 100644 --- a/python/docs/source/guide/aggregation.md +++ b/python/docs/guide/aggregation.qmd @@ -1,4 +1,6 @@ -# Aggregation +--- +title: Aggregations +--- Aggregations are _cumulative_, _grouped_, and _windowed_. @@ -9,7 +11,7 @@ Grouped : They reflect the values for each entity separately. Windowed -: They reflect the values within a specific [window](../reference/windows.md). +: They reflect the values within a specific [](`kaskada.windows.Window`). While most functions operate on specific points in the Timestream, aggregation functions operate on many points. Generally, the result of an aggregation represents the result for each key up to and including the current time. @@ -20,12 +22,11 @@ If no window is specified, the aggregation is over all rows for the entity, up t If a window is provided, the result of an aggregation is the result for that entity in the current window up to and including the current time. Aggregations produce cumulative results up to each point in time, so the result at a given point in time may represent an incomplete window. -```{code-block} python -:caption: Cumulative aggregation since the start of the day. +```{.python filename="Cumulative aggregation since the start of the day."} Purchases.sum(window = kd.windows.Since.daily()) ``` -The [windows reference](../reference/windows.md) has information on the supported kinds of windows. +The [windows reference](../reference/Windows/index.qmd) has information on the supported kinds of windows. ## Repeated Aggregation @@ -35,7 +36,6 @@ The result of the first aggregation is the same — a sequence of timestamped da Applying an additional aggregation simply aggregates over those times. We can compute the maximum of the average purchase amounts. -```{code-block} python -:caption: Repeated aggregation computing the maximum of the average purchases. +```{.python filename="Repeated aggregation computing the maximum of the average purchases."} Purchases.col("amount").mean().max() ``` \ No newline at end of file diff --git a/python/docs/source/guide/data_types.md b/python/docs/guide/data_types.qmd similarity index 74% rename from python/docs/source/guide/data_types.md rename to python/docs/guide/data_types.qmd index 6e25456e7..c33cbce55 100644 --- a/python/docs/source/guide/data_types.md +++ b/python/docs/guide/data_types.qmd @@ -1,7 +1,15 @@ -# Data Types +--- +title: Data Types + +listing: + - id: types + template: ../_templates/data_types_table.ejs + contents: + - data_types.yml +--- Kaskada operates on typed Timestreams. -Similar to how every Pandas `DataFrame` has an associated `dtype`, every Kaskada `Timestream` has an associated [PyArrow data type](https://arrow.apache.org/docs/python/api/datatypes.html) returned by {py:attr}`kaskada.Timestream.data_type`. +Similar to how every Pandas `DataFrame` has an associated `dtype`, every Kaskada `Timestream` has an associated [PyArrow data type](https://arrow.apache.org/docs/python/api/datatypes.html) returned by [](`kaskada.Timestream.data_type`). The set of supported types is based on the types supported by [Apache Arrow](https://arrow.apache.org/). Each `Timestream` contains points of the corresponding type. @@ -18,49 +26,7 @@ An expression producing a record type is a stream that produces a value of the g Scalar types include booleans, numbers, strings, timestamps, durations and calendar intervals. -:::{list-table} Scalar Types -:widths: 1, 3 -:header-rows: 1 - -- * Types - * Description -- * `bool` - * Booleans represent true or false. - - Examples: `true`, `false`. -- * `u8`, `u16`, `u32`, `u64` - * Unsigned integer numbers of the specified bit width. - - Examples: `0`, `1`, `1000` -- * `i8`, `i16`, `i32`, `i64` - * Signed integer numbers of the specified bit width. - - Examples: `0`, `1`, `-100` -- * `f32`, `f64` - * Floating point numbers of the specified bit width. - - Examples: `0`, `1`, `-100`, `1000`, `0.0`, `-1.0`, `-100837.631`. -- * `str` - * Unicode strings. - - Examples: `"hello", "hi 'bob'"`. - -- * `timestamp_s`, `timestamp_ms`, `timestamp_us`, `timestamp_ns` - * Points in time relative the Unix Epoch (00:00:00 UTC on January 1, 1970). - Time unit may be seconds (s), milliseconds (ms), microseconds (us) or nanoseconds (ns). - - Examples: `1639595174 as timestamp_s` -- * `duration_s`, `duration_ms`, `duration_us`, `duration_ns` - * A duration of a fixed amount of a specific time unit. - Time unit may be seconds (s), milliseconds (ms), microseconds (us) or nanoseconds (ns). - - Examples: `-100 as duration_ms` -- * `interval_days`, `interval_months` - * A calendar interval corresponding to the given amount of the corresponding time. - The length of an interval depends on the point in time it is added to. - For instance, adding 1 `interval_month` to a timestamp will shift to the same day of the next month. - - Examples: `1 as interval_days`, `-100 as interval_months` +::: {#types} ::: ## Record Types diff --git a/python/docs/guide/data_types.yml b/python/docs/guide/data_types.yml new file mode 100644 index 000000000..7a5baceb8 --- /dev/null +++ b/python/docs/guide/data_types.yml @@ -0,0 +1,31 @@ +- types: [bool] + desc: Booleans represent true or false. + examples: [true, false] +- types: [u8, u16, u32, u64] + desc: Unsigned integer numbers of the specified bit width. + examples: [0, 1, 1000] +- types: [i8, i16, i32, i64] + desc: Signed integer numbers of the specified bit width. + examples: [0, 1, -100] +- types: [f32, f64] + desc: Floating point numbers of the specified bit width. + examples: [0, 1, -100, 1000, 0.0, -1.0, -100837.631] +- types: [str] + desc: Unicode strings. + examples: ["hello", "hi 'bob'"] +- types: [timestamp_s, timestamp_ms, timestamp_us, timestamp_ns] + desc: | + Points in time relative the Unix Epoch (00:00:00 UTC on January 1, 1970). + Time unit may be seconds (s), milliseconds (ms), microseconds (us) or nanoseconds (ns). + examples: [1639595174 as timestamp_s] +- types: [duration_s, duration_ms, duration_us, duration_ns] + desc: | + A duration of a fixed amount of a specific time unit. + Time unit may be seconds (s), milliseconds (ms), microseconds (us) or nanoseconds (ns). + examples: [-100 as duration_ms] +- types: [interval_days, interval_months] + desc: | + A calendar interval corresponding to the given amount of the corresponding time. + The length of an interval depends on the point in time it is added to. + For instance, adding 1 `interval_month` to a timestamp will shift to the same day of the next month. + examples: [1 as interval_days, -100 as interval_months] diff --git a/python/docs/source/guide/entities.md b/python/docs/guide/entities.qmd similarity index 81% rename from python/docs/source/guide/entities.md rename to python/docs/guide/entities.qmd index 0b1285f35..70c0c7c1c 100644 --- a/python/docs/source/guide/entities.md +++ b/python/docs/guide/entities.qmd @@ -1,14 +1,7 @@ --- -file_format: mystnb -kernelspec: - name: python3 - disply_name: Python 3 -mystnb: - execution_mode: cache +title: Entities and Grouping --- -# Entities and Grouping - Entities organize data for use in feature engineering. They describe the particular objects that a prediction will be made for. The result of a feature computation is a _feature vector_ for each entity at various points in time. @@ -35,22 +28,15 @@ For example, using `Vancouver` to identify cities would lead to ambiguity betwee In these cases, you'd likely use some other identifier for instances. Others may be useful, such as using the airport code. -:::{list-table} Example Entities and corresponding keys. -:header-rows: 1 - -* - Example Entity - - Example Entity Instance -* - Houses - - 1600 Pennsylvania Avenue -* - Airports - - SEA -* - Customers - - John Doe -* - City - - Vancouver -* - State - - Washington -::: +| Example Entity | Example Entity Instance | +|-|-| +|Houses|1600 Pennsylvania Avenue| +|Airports|SEA| +|Customers|John Doe| +|City|Vancouver| +|State|Washington| + +: Example Entities and corresponding keys. ## Entities and Aggregation @@ -59,14 +45,16 @@ Entities provide an implicit grouping for the aggregation. When we write `sum(Purchases.amount)` it is an aggregation that returns the sum of purchases made _by each entity_. This is helpful since the _feature vector_ for an entity will depend only on events related to that entity. + + +```{python} +# | code-fold: true + import asyncio import kaskada as kd kd.init_session() @@ -91,19 +79,18 @@ kd.plot.render( ## Changing Keys -The key associated with each point may be changed using {py:meth}`kaskada.Timestream.with_key`. +The key associated with each point may be changed using [](`kaskada.Timestream.with_key`). For example, given a stream of purchases associated with each user, we could create a Timestream associated with the purchased item: -```{code-block} python -:caption: Using with-key to associate purchases with items +```{.python filename="Using with-key to associate purchases with items"} purchases_by_user.with_key(purchases_by_user.col("item_id")) ``` -This is particularly useful with the ability to [lookup](joins.md#explicit-lookups) values from associated with other keys. +This is particularly useful with the ability to [lookup](joins.qmd#explicit-lookups) values from associated with other keys. For instance, we could re-key purchases as shown above to count the total spent on each item (across all users) and then lookup that information for each user's purchases. ## Joining Joining with the same entity happens automatically. Joining with other entities (and even other kinds of entities) is done using `lookup`. -See [Joins](joins.md) for more information. \ No newline at end of file +See [Joins](joins.qmd) for more information. \ No newline at end of file diff --git a/python/docs/source/guide/execution.md b/python/docs/guide/execution.qmd similarity index 75% rename from python/docs/source/guide/execution.md rename to python/docs/guide/execution.qmd index 558c2d778..0e5180403 100644 --- a/python/docs/source/guide/execution.md +++ b/python/docs/guide/execution.qmd @@ -1,6 +1,8 @@ -# Execution +--- +title: Execution +--- -A [Timestream](./timestreams.md) may be executed and written to a [destination](#destinations) in a variety of ways. +A [Timestream](./timestreams.qmd) may be executed and written to a [destination](#destinations) in a variety of ways. The things to consider when deciding how to execute the Timestream are: 1. Whether you want the _history_ of points or the _snapshot_ of values for each entity at a given time. @@ -10,12 +12,10 @@ The things to consider when deciding how to execute the Timestream are: [State](#state) can be used to provide fault-tolerance and allow incremental processing of only new events. -```{admonition} Preview during development -:class: tip - +::: {.callout-tip title="Preview during development"} While developing queries, it is often useful to view a few rows from the result. -Using {py:meth}`kaskada.Timestream.preview` you can retrieve a small set of rows from the result set as a Pandas DataFrame. -``` +Using [](`kaskada.Timestream.preview`) you can retrieve a small set of rows from the result set as a Pandas DataFrame. +::: ## History vs. Snapshot @@ -27,10 +27,11 @@ Executing a Timestream for a snapshot produces a value for each entity at a spec This means that each entity appears at-most once in the results. This is useful for maintaining a feature store based on the latest values. -```{todo} +::: {.callout-note title=Todo} Expose the configuration for snapshots. See https://github.com/kaskada-ai/kaskada/issues/719 -``` +::: + ## Query vs. Materialize Every Timestream may be executed as a single query or used to start a materialization. @@ -48,10 +49,10 @@ This allows incrementally outputting the entire history to some external store, For a snapshot query, only entities that have changed after this time are included in the output. This reduces the amount of data written when the past snapshot is already present in the destination. -```{todo} +::: {.callout-note title=Todo} Expose the configuration for changed since. See https://github.com/kaskada-ai/kaskada/issues/719 -``` +::: ## Up To @@ -60,15 +61,15 @@ Configuring the _up to time_ lets you control the maximum points output (and in For a historic query, only points occurring before or at the up to time are included in the output. For a snapshot query, this corresponds to the time at which the snapshot will be taken. -```{note} +::: {.callout-note title=Todo} Currently when not specified, the up to time is determined from the maximum event present in the data. We have plans to change this to a parameter to `run` defaulting to the current time. -``` +::: -```{todo} +::: {.callout-note title=Todo} Expose the configuration for up-to. See https://github.com/kaskada-ai/kaskada/issues/719 -``` +::: ## State @@ -81,11 +82,11 @@ Similarly, when producing a history, Kaskada can use any persisted state before ## Destinations -The methods {py:func}`Timestream.preview` and {py:func}`Timestream.to_pandas` provide the results of a query in a Pandas DataFrame for easy visualization and consumption within the Python process. +The methods [](`kaskada.Timestream.preview`) and [](`kaskada.Timestream.to_pandas`) provide the results of a query in a Pandas DataFrame for easy visualization and consumption within the Python process. -The {py:func}`Timestream.run_iter` methods provides synchronous and asynchronous iterators over the results in a variety of formats including Pandas DataFrames, PyArrow RecordBatches, and rows as Python dictionaries. +The [](`kaskada.Timestream.run_iter`) methods provides synchronous and asynchronous iterators over the results in a variety of formats including Pandas DataFrames, PyArrow RecordBatches, and rows as Python dictionaries. This allows you to run the entire retrieve-evaluate-respond loop within a single Python process. -The {py:func}`Timestream.write` function allows you to specify a destination from {py:mod}`kaskada.destinations` for results. -This supports both `once` and `live` queries -See the reference on [destinations](../reference/destinations.md) for more on the supported destinations. \ No newline at end of file +The [](`kaskada.Timestream.write`) function allows you to specify a destination from [`kaskada.destinations`](../reference/Destinations/index.qmd) for results. This supports both `once` and `live` queries. + +See the reference on [destinations](../reference/Destinations/index.qmd) for more on the supported destinations. diff --git a/python/docs/source/guide/index.md b/python/docs/guide/index.qmd similarity index 88% rename from python/docs/source/guide/index.md rename to python/docs/guide/index.qmd index b7d28a79b..96d5cc4a5 100644 --- a/python/docs/source/guide/index.md +++ b/python/docs/guide/index.qmd @@ -1,4 +1,6 @@ -# User Guide +--- +title: User Guide +--- Understanding and reacting to the world in real-time requires understanding what is happening _now_ in the context of what happened in the past. You need the ability to understand if what just happened is unusual, how it relates to what happened previously, and how it relates to other things that are happening at the same time. @@ -12,7 +14,7 @@ Use time-travel to compute training examples from historic data and understand h ## What are "Timestreams"? -A [Timestream](timestreams) describes how a value changes over time. +A [Timestream](timestreams.qmd) describes how a value changes over time. In the same way that SQL queries transform tables and graph queries transform nodes and edges, Kaskada queries transform Timestreams. In comparison to a timeseries which often contains simple values (e.g., numeric observations) defined at fixed, periodic times (i.e., every minute), a Timestream contains any kind of data (records or collections as well as primitives) and may be defined at arbitrary times corresponding to when the events occur. @@ -32,20 +34,3 @@ data = kd.sources.Parquet( # Get the count of events associated with each user over time, as a dataframe. data.count().to_pandas() ``` - -```{toctree} -:hidden: -:maxdepth: 2 - -quickstart -tour -why -installation -timestreams -data_types -entities -aggregation -joins -sources -execution -``` \ No newline at end of file diff --git a/python/docs/source/guide/installation.md b/python/docs/guide/installation.qmd similarity index 89% rename from python/docs/source/guide/installation.md rename to python/docs/guide/installation.qmd index a518c9334..d745dd4fb 100644 --- a/python/docs/source/guide/installation.md +++ b/python/docs/guide/installation.qmd @@ -1,23 +1,24 @@ -# Installation +--- +title: Installation +--- To install Kaskada, you need to be using Python >= 3.8. We suggest using 3.11 or newer, since that provides more precise error locations. -```{code-block} bash -:caption: Installing Kaskada +```{.bash filename="Installing Kaskada"} pip install kaskada>=0.6.0-a.3 ``` -```{warning} +::: {.callout-warning} This version of Kaskada is currently a pre-release, as indicated by the `-a.0` suffix. It will not be installed by default if you `pip install kaskada`. You need to either use `pip install --pre kaskada` or specify a specific version, as shown in the example. -``` +::: -```{tip} +::: {.callout-tip} Depending on you Python installation and configuration you may have `pip3` instead of `pip` available in your terminal. If you do have `pip3` replace pip with `pip3` in your command, i.e., `pip3 install kaskada`. If you get a permission error when running the `pip` command, you may need to run as an administrator using `sudo pip install kaskada`. If you don't have administrator access (e.g., in Google Colab, or other hosted environments) you amy use `pip`’s `--user` flag to install the package in your user directory. -``` +::: diff --git a/python/docs/source/guide/joins.md b/python/docs/guide/joins.qmd similarity index 91% rename from python/docs/source/guide/joins.md rename to python/docs/guide/joins.qmd index 73d935342..476379c00 100644 --- a/python/docs/source/guide/joins.md +++ b/python/docs/guide/joins.qmd @@ -1,4 +1,6 @@ -# Joins +--- +title: Joins +--- It is often necessary to use multiple Timestreams to define a query. Understanding user behavior requires considering their activity across a variety of event streams. @@ -9,9 +11,7 @@ Both of these are accomplished by joining the Timestreams. Timestreams associated with the same kind of entity -- for instance, a user -- are automatically joined. This makes it easy to write queries that consider multiple event streams in a single query. -```{code-block} python -:caption: Joining two event streams to compute page-views per purchase - +```{.python filename="Joining two event streams to compute page-views per purchase"} page_views.count() / purchases.count() ``` @@ -28,7 +28,7 @@ For any input table that is continuous, the join is `as of` the time of the outp ## Explicit Lookups -Values from associated with other entities may be retrieved using {py:meth}`kaskada.Timestream.lookup`. +Values from associated with other entities may be retrieved using [](`kaskada.Timestream.lookup`). `left.lookup(right)` does a left-join, looking up the value from `right` for each computed key in `left`. Lookups are _temporally correct_ -- the value retrieved corresponds to the `right` value at the time the key occurred in `left`. \ No newline at end of file diff --git a/python/docs/source/guide/quickstart.md b/python/docs/guide/quickstart.qmd similarity index 73% rename from python/docs/source/guide/quickstart.md rename to python/docs/guide/quickstart.qmd index 42b140a95..6e19ac96d 100644 --- a/python/docs/source/guide/quickstart.md +++ b/python/docs/guide/quickstart.qmd @@ -1,14 +1,7 @@ --- -file_format: mystnb -kernelspec: - name: python3 - display_name: Python 3 -mystnb: - execution_mode: cache +title: Quick Start --- -# Quick Start - This shows the bare minimum needed to get started with Kaskada. ## Install @@ -20,14 +13,14 @@ This uses `kaskada>=0.6.0-a.3` to ensure the pre-release version is installed. pip install kaskada>=0.6.0-a.3 ``` -See the section on [installation](./installation.md) to learn more about installing Kaskada. +See the section on [installation](./installation.qmd) to learn more about installing Kaskada. ## Write a query The following Python code imports the Kaskada library, creates a session, and loads some CSV data. It then runs a query to produce a Pandas DataFrame. -```{code-cell} +```{python} import asyncio import kaskada as kd kd.init_session() @@ -43,5 +36,5 @@ content = "\n".join( ] ) source = await kd.sources.CsvString.create(content, time_column="time", key_column="key") -source.select("m", "n").extend({"sum_m": source.col("m").sum() }).to_pandas() +source.select("m", "n").extend({"sum_m": source.col("m").sum()}).to_pandas() ``` diff --git a/python/docs/source/guide/sources.md b/python/docs/guide/sources.qmd similarity index 68% rename from python/docs/source/guide/sources.md rename to python/docs/guide/sources.qmd index 6e82078e4..e19b39e8d 100644 --- a/python/docs/source/guide/sources.md +++ b/python/docs/guide/sources.qmd @@ -1,11 +1,13 @@ -# Sources +--- +title: Sources +--- Sources describe how events enter a Timestream. Every source is a Timestream containing the events that have been read. -Most often, these events are [records](data_types.md#record-types). +Most often, these events are [records](data_types.qmd#record-types). Each event from a source is associated with a specific time and entity key. These define how the resulting Timestream is ordered and grouped. Generally, the time and entity key are associated with a specific column from the source, using the `time_column` and `key_column` arguments. -The [Sources Reference](../reference/sources.md) has more details on the supported sources. \ No newline at end of file +The [Sources Reference](../reference/Sources/index.qmd) has more details on the supported sources. \ No newline at end of file diff --git a/python/docs/source/guide/timestreams.md b/python/docs/guide/timestreams.qmd similarity index 88% rename from python/docs/source/guide/timestreams.md rename to python/docs/guide/timestreams.qmd index a6b61c488..e34a1ed00 100644 --- a/python/docs/source/guide/timestreams.md +++ b/python/docs/guide/timestreams.qmd @@ -1,26 +1,21 @@ --- -file_format: mystnb -kernelspec: - name: python3 - disply_name: Python 3 -mystnb: - execution_mode: cache +title: Timestreams --- -# Timestreams - Kaskada is built on the concept of a _Timestream_. Each Timestream is ordered by _time_ and partitioned by _entity_. This makes it easy to focus on events happening over time and how aggregations change. + + +```{python} +# | code-fold: true +# | warning: false import asyncio import kaskada as kd kd.init_session() @@ -50,10 +45,10 @@ While a discrete timestream contains values at specific points in time, a contin For example, the result of aggregating a timestream produces a continuous stream that changes on each non-`null` input. -```{code-cell} ---- -tags: [remove-input] ---- +```{python} +# | echo: false +# | warning: false kd.plot.render( kd.plot.Plot(multi_entity.col("m").sum(), name="sum(m)") -) \ No newline at end of file +) +``` diff --git a/python/docs/source/guide/tour.md b/python/docs/guide/tour.qmd similarity index 88% rename from python/docs/source/guide/tour.md rename to python/docs/guide/tour.qmd index 487fa055a..824d40309 100644 --- a/python/docs/source/guide/tour.md +++ b/python/docs/guide/tour.qmd @@ -1,29 +1,21 @@ --- -file_format: mystnb -kernelspec: - name: python3 - display_name: Python 3 -mystnb: - execution_mode: cache +title: Tour of Kaskada --- - + This provides an overview of the key features in Kaskada that enable feature engineering on event-based data. -The [Quick Start](./quickstart.md) has details on how you can quickly get started running Kaskada queries. +The [Quick Start](./quickstart.qmd) has details on how you can quickly get started running Kaskada queries. For a more complete explanation, see the User Guide. This tour uses Kaskada and Plotly to render the illustrations. The initial setup / data is below. -```{code-cell} ---- -tags: [hide-cell] ---- +```{python} +# | code-fold: true import asyncio import kaskada as kd kd.init_session() @@ -52,21 +44,21 @@ A natural question to ask about the purchases is the total--or `sum`--of all pur This is accomplished by _aggregating_ the events. The results of an aggregation change over time as additional events occur. -```{code-cell} ---- -tags: [remove-input] ---- +```{python} +# | echo: false +# | warning: false + kd.plot.render( kd.plot.Plot(single_entity.col("m"), name="m"), kd.plot.Plot(single_entity.col("m").sum(), name="sum of m") ) ``` -The User Guide has [more details on aggregation](./aggregation.md), including how to use windows to control which events are aggregated. +The User Guide has [more details on aggregation](./aggregation.qmd), including how to use windows to control which events are aggregated. ## Discrete and Continuous We say that events (and values derived from them) are _discrete_ because they occur at specific in time. -and the results of the aggregation are [_continuous_](./timestreams.md#continuity). +and the results of the aggregation are [_continuous_](./timestreams.qmd#continuity). In the example, after the purchase with amount 13 the sum was 20. And it _continued_ to be 20 at every point in time until the next purchase was made, with amount 4. A continuous value is inclusive of the event that causes the value to change and exclusive of the next change. @@ -85,11 +77,11 @@ One way to understand this grouping is as a separate stream associated with each The stream of purchases for each user may be shown separately, as we do here, or it may be pictured flattened into a single stream keyed by user. The idea of grouped streams as separate, per-entity streams is often useful for understanding the behavior of Kaskada Timestreams. -```{todo} +::: {.callout-note title=Todo} Add example of multiple entity aggregation. -``` +::: -The User Guide has [more details on grouping](./entities.md), including how to change the grouping of a Timestream. +The User Guide has [more details on grouping](./entities.qmd), including how to change the grouping of a Timestream. ## History and Snapshots @@ -107,9 +99,9 @@ This would filter out points from the history, or limit the snapshot to only tho ## Windowed Aggregation -```{todo} +::: {.callout-note title=Todo} Update to reflect actual syntax. Include example. -``` +::: In addition to the default behavior of aggregating over all events up to a given time, aggregations may be performed over specific windows. For example, `hourly()` describes periodic windows of an hour. @@ -121,4 +113,4 @@ For instance, `sum(Purchases, window=hourly(), cumulative=false)` will produce t With the purchases in the previous example, this would mean that at 9:00 AM an event is produced containing the amount of the purchase at 8:45 AM, and at 10:00 AM an event is produced containing the sum of the purchases at 9:15 AM and 9:25 AM. A window must be specified when using a non-cumulative aggregation. -The section on [Aggregation](./aggregation.md#windowing) has more information on windowing. \ No newline at end of file +The section on [Aggregation](./aggregation.qmd#windowing) has more information on windowing. diff --git a/python/docs/source/guide/why.md b/python/docs/guide/why.qmd similarity index 91% rename from python/docs/source/guide/why.md rename to python/docs/guide/why.qmd index ff2e17b6d..3fb342c55 100644 --- a/python/docs/source/guide/why.md +++ b/python/docs/guide/why.qmd @@ -1,4 +1,6 @@ -# Why Kaskada? +--- +title: Why Kaskada? +--- Kaskada is a library for executing temporal queries over event-based data. An "event" can be any fact about the world associated with a time. @@ -12,4 +14,4 @@ With these traditional data processing systems, the answer changes based on when With Kaskada, the query "how many purchases has a given user made?" is expressed as a _Timestream_. This represents how the result of that query changes over time for each user. -Kaskada makes it easy to combine Timestreams to produce a new Timestream -- joining points from each input as needed. \ No newline at end of file +Kaskada makes it easy to combine Timestreams to produce a new Timestream -- joining points from each input as needed. diff --git a/python/docs/source/index.md b/python/docs/index.qmd similarity index 67% rename from python/docs/source/index.md rename to python/docs/index.qmd index 94ccd4ffc..070dd5d74 100644 --- a/python/docs/source/index.md +++ b/python/docs/index.qmd @@ -1,40 +1,48 @@ --- -html_theme.sidebar_secondary.remove: true -sd_hide_title: true +format: + html: + css: + - _static/css/design-style.css + link-external-icon: true + link-external-newwindow: true + toc: false + +listing: + - id: cards + template: _templates/card.ejs + contents: + - name: "Real-time Aggregation" + icon: bi bi-clock-history + content: Precompute model inputs from streaming data with robust data connectors, transformations & aggregations. + - name: Event Detection + icon: bi bi-binoculars-fill + content: Trigger pro-active AI behaviors by identifying important activities, as they happen. + - name: History Replay + icon: bi bi-skip-backward-fill + content: Backtest and fine-tune from historical data using per-example time travel and point-in-time joins. --- -# Real-Time AI without the fuss. - -
- - -

Real-Time AI without the fuss.

-
-

Kaskada is a next-generation streaming engine that connects AI models to real-time & historical data. -

-
-
+::::: {.px-4 .py-5 .my-5 .text-center} + + +

Real-Time AI without the fuss.

+::: {.col-lg-7 .mx-auto} +[Kaskada is a next-generation streaming engine that connects AI models to real-time & historical data.]{.lead .mb-4} +::: +::::: ## Kaskada completes the Real-Time AI stack, providing... -```{gallery-grid} -:grid-columns: 1 2 2 3 - -- header: "{fas}`timeline;pst-color-primary` Real-time Aggregation" - content: "Precompute model inputs from streaming data with robust data connectors, transformations & aggregations." -- header: "{fas}`binoculars;pst-color-primary` Event Detection" - content: "Trigger pro-active AI behaviors by identifying important activities, as they happen." -- header: "{fas}`backward;pst-color-primary` History Replay" - content: "Backtest and fine-tune from historical data using per-example time travel and point-in-time joins." -``` +::: {#cards .column-page} +::: ## Real-time AI in minutes -Connect and compute over databases, streaming data, _and_ data loaded dynamically using Python. +Connect and compute over databases, streaming data, _and_ data loaded dynamically using Python.. Kaskada is seamlessly integrated with Python's ecosystem of AI/ML tooling so you can load data, process it, train and serve models all in the same place. -There's no infrastructure to provision (and no JVM hiding under the covers), so you can jump right in - check out the [Quick Start](./guide/quickstart.md). +There's no infrastructure to provision (and no JVM hiding under the covers), so you can jump right in - check out the [Quick Start](./guide/quickstart.qmd). ## Built for scale and reliability @@ -83,7 +91,7 @@ conversations = ( messages async for row in conversations.run_iter(mode='live'): # Use a pre-trained model to identify interested users - prompt = "\n\n".join([f' {msg["user"]} --> {msg["text"]} ' for msg in row["result"]]) + prompt = "\n\n".join([f'{msg["user"]} --> {msg["text"]}' for msg in row["result"]]) res = openai.Completion.create( model="davinci:ft-personal:coversation-users-full-kaskada-2023-08-05-14-25-30", prompt=prompt + "\n\n###\n\n", @@ -105,15 +113,4 @@ For more details, check out the [BeepGPT Github project](https://github.com/kask ## Get Started Getting started with Kaskda is a `pip install kaskada` away. -Check out the [Quick Start](./guide/quickstart.md) now! - -```{toctree} -:hidden: -:maxdepth: 3 - -guide/index -examples/index -community/index -reference/index -blog/index -``` \ No newline at end of file +Check out the [Quick Start](./guide/quickstart.qmd) now! diff --git a/python/docs/source/_extensions/gallery_directive.py b/python/docs/source/_extensions/gallery_directive.py deleted file mode 100644 index 5c3ef3fdc..000000000 --- a/python/docs/source/_extensions/gallery_directive.py +++ /dev/null @@ -1,144 +0,0 @@ -"""A directive to generate a gallery of images from structured data. - -Generating a gallery of images that are all the same size is a common -pattern in documentation, and this can be cumbersome if the gallery is -generated programmatically. This directive wraps this particular use-case -in a helper-directive to generate it with a single YAML configuration file. - -It currently exists for maintainers of the pydata-sphinx-theme, -but might be abstracted into a standalone package if it proves useful. -""" -from pathlib import Path -from typing import Any, Dict, List - -from docutils import nodes -from docutils.parsers.rst import directives -from sphinx.application import Sphinx -from sphinx.util import logging -from sphinx.util.docutils import SphinxDirective -from yaml import safe_load - - -logger = logging.getLogger(__name__) - - -TEMPLATE_GRID = """ -`````{{grid}} {columns} -{options} - -{content} - -````` -""" - -GRID_CARD = """ -````{{grid-item-card}} {title} -{options} - -{content} -```` -""" - - -class GalleryGridDirective(SphinxDirective): - """A directive to show a gallery of images and links in a Bootstrap grid. - - The grid can be generated from a YAML file that contains a list of items, or - from the content of the directive (also formatted in YAML). Use the parameter - "class-card" to add an additional CSS class to all cards. When specifying the grid - items, you can use all parameters from "grid-item-card" directive to customize - individual cards + ["image", "header", "content", "title"]. - - Danger: - This directive can only be used in the context of a Myst documentation page as - the templates use Markdown flavored formatting. - """ - - name = "gallery-grid" - has_content = True - required_arguments = 0 - optional_arguments = 1 - final_argument_whitespace = True - option_spec = { - # A class to be added to the resulting container - "grid-columns": directives.unchanged, - "class-container": directives.unchanged, - "class-card": directives.unchanged, - } - - def run(self) -> List[nodes.Node]: - """Create the gallery grid.""" - if self.arguments: - # If an argument is given, assume it's a path to a YAML file - # Parse it and load it into the directive content - path_data_rel = Path(self.arguments[0]) - path_doc, _ = self.get_source_info() - path_doc = Path(path_doc).parent - path_data = (path_doc / path_data_rel).resolve() - if not path_data.exists(): - logger.warn(f"Could not find grid data at {path_data}.") - nodes.text("No grid data found at {path_data}.") - return - yaml_string = path_data.read_text() - else: - yaml_string = "\n".join(self.content) - - # Use all the element with an img-bottom key as sites to show - # and generate a card item for each of them - grid_items = [] - for item in safe_load(yaml_string): - # remove parameters that are not needed for the card options - title = item.pop("title", "") - - # build the content of the card using some extra parameters - header = f"{item.pop('header')} \n^^^ \n" if "header" in item else "" - image = f"![image]({item.pop('image')}) \n" if "image" in item else "" - content = f"{item.pop('content')} \n" if "content" in item else "" - - # optional parameter that influence all cards - if "class-card" in self.options: - item["class-card"] = self.options["class-card"] - - loc_options_str = "\n".join(f":{k}: {v}" for k, v in item.items()) + " \n" - - card = GRID_CARD.format( - options=loc_options_str, content=header + image + content, title=title - ) - grid_items.append(card) - - # Parse the template with Sphinx Design to create an output container - # Prep the options for the template grid - class_ = "gallery-directive" + f' {self.options.get("class-container", "")}' - options = {"gutter": 2, "class-container": class_} - options_str = "\n".join(f":{k}: {v}" for k, v in options.items()) - - # Create the directive string for the grid - grid_directive = TEMPLATE_GRID.format( - columns=self.options.get("grid-columns", "1 2 3 4"), - options=options_str, - content="\n".join(grid_items), - ) - - # Parse content as a directive so Sphinx Design processes it - container = nodes.container() - self.state.nested_parse([grid_directive], 0, container) - - # Sphinx Design outputs a container too, so just use that - return [container.children[0]] - - -def setup(app: Sphinx) -> Dict[str, Any]: - """Add custom configuration to sphinx app. - - Args: - app: the Sphinx application - - Returns: - the 2 parallel parameters set to ``True``. - """ - app.add_directive("gallery-grid", GalleryGridDirective) - - return { - "parallel_read_safe": True, - "parallel_write_safe": True, - } diff --git a/python/docs/source/_layouts/default.yml b/python/docs/source/_layouts/default.yml deleted file mode 100644 index bc243933e..000000000 --- a/python/docs/source/_layouts/default.yml +++ /dev/null @@ -1,46 +0,0 @@ -layers: - # the base layer for the background - - background: - color: "#26364a" - image: >- - #% if page.meta.card_image -%# - '{{ page.meta.card_image }}' - #%- elif layout.background_image -%# - '{{ layout.background_image }}' - #%- endif %# - # the layer for the logo image - - size: { width: 300, height: 83 } - offset: { x: 60, y: 60 } - icon: - image: "_static/kaskada-negative.svg" - # the layer for the page's title - - size: { width: 920, height: 300 } - offset: { x: 60, y: 180 } - typography: - content: >- - #% if page.meta.title -%# - '{{ page.meta.title }}' - #%- elif page.title -%# - '{{ page.title }}' - #%- endif %# - line: - # height: 0.85 - amount: 3 - font: - weight: 500 - color: white - # the layer for the site's (or page's) description - - offset: { x: 60, y: 480 } - size: { width: 1080, height: 90 } - typography: - content: >- - #% if page.meta and page.meta.description -%# - '{{ page.meta.description }}' - #%- else -%# - '{{ config.site_description }}' - #%- endif %# - line: - height: 0.87 - amount: 2 - align: start bottom - color: white \ No newline at end of file diff --git a/python/docs/source/_templates/autosummary/class.rst b/python/docs/source/_templates/autosummary/class.rst deleted file mode 100644 index bc23de59b..000000000 --- a/python/docs/source/_templates/autosummary/class.rst +++ /dev/null @@ -1,10 +0,0 @@ -:html_theme.sidebar_secondary.remove: - -{{ objname | escape | underline}} - -.. currentmodule:: {{ module }} - -.. auto{{ objtype }}:: {{ objname }} - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file diff --git a/python/docs/source/_templates/autosummary/function.rst b/python/docs/source/_templates/autosummary/function.rst deleted file mode 100644 index 5a51f80a5..000000000 --- a/python/docs/source/_templates/autosummary/function.rst +++ /dev/null @@ -1,7 +0,0 @@ -:html_theme.sidebar_secondary.remove: - -{{ objname | escape | underline}} - -.. currentmodule:: {{ module }} - -.. auto{{ objtype }}:: {{ objname }} \ No newline at end of file diff --git a/python/docs/source/_templates/autosummary/method.rst b/python/docs/source/_templates/autosummary/method.rst deleted file mode 100644 index 5a51f80a5..000000000 --- a/python/docs/source/_templates/autosummary/method.rst +++ /dev/null @@ -1,7 +0,0 @@ -:html_theme.sidebar_secondary.remove: - -{{ objname | escape | underline}} - -.. currentmodule:: {{ module }} - -.. auto{{ objtype }}:: {{ objname }} \ No newline at end of file diff --git a/python/docs/source/blog/index.md b/python/docs/source/blog/index.md deleted file mode 100644 index 44144083b..000000000 --- a/python/docs/source/blog/index.md +++ /dev/null @@ -1,9 +0,0 @@ -# Blog - -```{eval-rst} -.. postlist:: - :list-style: circle - :format: {date} - {title} - :excerpts: - :expand: Read more ... -``` diff --git a/python/docs/source/conf.py b/python/docs/source/conf.py deleted file mode 100644 index 052726882..000000000 --- a/python/docs/source/conf.py +++ /dev/null @@ -1,183 +0,0 @@ -"""Sphinx configuration.""" -import sys -from pathlib import Path -from typing import Any, Dict - - -sys.path.append(str(Path(".").resolve())) - -project = "kaskada" -author = "Kaskada Contributors" -copyright = "2023, Kaskada Contributors" -extensions = [ - "ablog", - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.napoleon", - "sphinx.ext.intersphinx", - "sphinx.ext.todo", - "sphinx_design", - # "myst_parser", - "myst_nb", - "sphinx_copybutton", - "sphinx_autodoc_typehints", # must be after napoleon - "sphinx_social_cards", - "_extensions.gallery_directive", -] -autodoc_typehints = "description" -language = "en" - -html_theme = "pydata_sphinx_theme" -html_favicon = "_static/favicon.png" -html_title = "Kaskada" -html_js_files = [ - "https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js" -] - -# Configure the primary (left) sidebar. -# https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/layout.html#primary-sidebar-left -html_sidebars = { - # No primary (left) sidebar on the landing page. - "index.md": [], - # Blog sidebars - # https://ablog.readthedocs.io/manual/ablog-configuration-options/#blog-sidebars - "blog/**": [ - # Information about the post. - "ablog/postcard.html", - # 5 most recent posts - "ablog/recentposts.html", - # Tag cloud and links. - "ablog/tagcloud.html", - # Categories -- we just use tags for now. - # "ablog/categories.html", - # Show all authors on the sidebar. - # "ablog/authors.html", - # Show all languages on the sidebar. - # "ablog/languages.html", - # Show all locations on the sidebar. - # "ablog/locations.html", - "ablog/archives.html", - ], - "[!blog]*/*": ["sidebar-nav-bs"], -} - -html_theme_options: Dict[str, Any] = { - # Setup external links. - # https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/header-links.html - "icon_links": [ - { - "name": "GitHub", - "url": "https://github.com/kaskada-ai/kaskada", # required - "icon": "fa-brands fa-square-github", - "type": "fontawesome", - }, - { - "name": "Slack", - "url": "https://join.slack.com/t/kaskada-hq/shared_invite/zt-1t1lms085-bqs2jtGO2TYr9kuuam~c9w", - "icon": "fa-brands fa-slack", - }, - ], - # Setup edit buttons - # See https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/source-buttons.html - "use_edit_page_button": True, - # Include indices. - # https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/indices.html - "primary_sidebar_end": ["indices.html"], - # Provide an announcement at the top. - # https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/announcements.html# - "announcement": ( - "This describes the next version of Kaskada. " - "It is currently available as an alpha release." - ), - # Branding / logos. - # https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/branding.html - "logo": { - "image_light": "_static/kaskada-positive.svg", - "image_dark": "_static/kaskada-negative.svg", - }, - # Setup analytics. - # https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/analytics.html - "analytics": { - "google_analytics_id": "G-HR9E2E6TG4", - }, - # TODO: Version switcher. - # This would require hosting multiple versions of the docs. - # https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/version-dropdown.html -} - -templates_path = ["_templates"] -html_static_path = ["_static"] - -html_context = { - "github_user": "kaskada-ai", - "github_repo": "kaskada", - "github_version": "main", - "doc_path": "python/docs/source", -} - -# Setup links to other sphinx projects. -intersphinx_mapping: Dict[str, Any] = { - "python": ("http://docs.python.org/3", None), - "pandas": ("http://pandas.pydata.org/docs", None), - "pyarrow": ("https://arrow.apache.org/docs", None), -} - -# adds useful copy functionality to all the examples; also -# strips the '>>>' and '...' prompt/continuation prefixes. -copybutton_prompt_text = r">>> |\.\.\. " -copybutton_prompt_is_regexp = True - -# Options for Todos -todo_include_todos = True - -# Options for Myst (markdown) -# https://myst-parser.readthedocs.io/en/v0.17.1/syntax/optional.html -myst_enable_extensions = [ - "colon_fence", - "deflist", - "smartquotes", - "replacements", - "linkify", -] -myst_heading_anchors = 3 - -# -- Options for autodoc ---------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#configuration - -# Automatically extract typehints when specified and place them in -# descriptions of the relevant function/method. -autodoc_typehints = "description" -autodoc_type_aliases = {"kaskada.Arg": "kaskada.Arg"} - -# Don't show class signature with the class' name. -autodoc_class_signature = "separated" - -autosummary_generate = True - -napoleon_preprocess_types = True -napoleon_attr_annotations = True -napoleon_use_rtype = False -typehints_use_rtype = False -typehints_document_rtype = False -typehints_defaults = "comma" - -suppress_warnings = ["mystnb.unknown_mime_type"] - -blog_path = "blog" -blog_authors = { - "ben": ("Ben Chambers", "https://github.com/bjchambers"), - "ryan": ("Ryan Michael", "https://github.com/kerinin"), -} -post_date_format = "%Y-%b-%d" -post_date_format_short = "%Y-%b-%d" -post_show_prev_next = False - -# Generate social cards for blog posts -social_cards = { - "site_url": "https://kaskada.io/kaskada", - "description": "Kaskada: Real-Time AI without the fuss.", - "cards_layout_dir": ["_layouts"], - "cards_layout_options": { - "background_color": "#26364a", - }, -} diff --git a/python/docs/source/examples/index.md b/python/docs/source/examples/index.md deleted file mode 100644 index 3c8fa7cbe..000000000 --- a/python/docs/source/examples/index.md +++ /dev/null @@ -1,17 +0,0 @@ -# Examples - -The [Time-Centric Calculations example](./time_centric.ipynb) shows how to work with time and produce past training examples and recent results for applying models. - -The [Reddit example](./reddit.md) shows how to read and aggregate live messages from Reddit. - -The [Bluesky Firehose example](./bluesky.md) shows how to read and aggregate messages from the Bluesky firehose. -This demonstrates how to use Kaskada to connect in real-time and parse messages as part of the query. - -```{toctree} -:hidden: -:maxdepth: 2 - -time_centric -reddit -bluesky -``` \ No newline at end of file diff --git a/python/docs/source/examples/time_centric.ipynb b/python/docs/source/examples/time_centric.ipynb deleted file mode 100644 index 904e2203d..000000000 --- a/python/docs/source/examples/time_centric.ipynb +++ /dev/null @@ -1,344 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "5a20a51f", - "metadata": { - "id": "5a20a51f" - }, - "source": [ - "# Time-centric Calculations\n", - "\n", - "Kaskada was built to process and perform temporal calculations on event streams,\n", - "with real-time analytics and machine learning in mind. It is not exclusively for\n", - "real-time applications, but Kaskada excels at time-centric computations and\n", - "aggregations on event-based data.\n", - "\n", - "For example, let's say you're building a user analytics dashboard at an\n", - "ecommerce retailer. You have event streams showing all actions the user has\n", - "taken, and you'd like to include in the dashboard:\n", - "* the total number of events the user has ever generated\n", - "* the total number of purchases the user has made\n", - "* the total revenue from the user\n", - "* the number of purchases made by the user today\n", - "* the total revenue from the user today\n", - "* the number of events the user has generated in the past hour\n", - "\n", - "Because the calculations needed here are a mix of hourly, daily, and over all of\n", - "history, more than one type of event aggregation needs to happen. Table-centric\n", - "tools like those based on SQL would require multiple JOINs and window functions,\n", - "which would be spread over multiple queries or CTEs. \n", - "\n", - "Kaskada was designed for these types of time-centric calculations, so we can do\n", - "each of the calculations in the list in one line:\n", - "\n", - "```python\n", - "record({\n", - " \"event_count_total\": DemoEvents.count(),\n", - " \"purchases_total_count\": DemoEvents.filter(DemoEvents.col(\"event_name\").eq(\"purchase\")).count(),\n", - " \"revenue_total\": DemoEvents.col(\"revenue\").sum(),\n", - " \"purchases_daily\": DemoEvents.filter(DemoEvents.col(\"event_name\").eq(\"purchase\")).count(window=Daily()),\n", - " \"revenue_daily\": DemoEvents.col(\"revenue\").sum(window=Daily()),\n", - " \"event_count_hourly\": DemoEvents.count(window=Hourly()),\n", - "})\n", - "```\n", - "\n", - "```{warning}\n", - "The previous example demonstrates the use of `Daily()` and `Hourly()` windowing which aren't yet part of the new Python library.\n", - "```\n", - "\n", - "Of course, a few more lines of code are needed to put these calculations to work,\n", - "but these six lines are all that is needed to specify the calculations\n", - "themselves. Each line may specify:\n", - "* the name of a calculation (e.g. `event_count_total`)\n", - "* the input data to start with (e.g. `DemoEvents`)\n", - "* selecting event fields (e.g. `DemoEvents.col(\"revenue\")`)\n", - "* function calls (e.g. `count()`)\n", - "* event filtering (e.g. `filter(DemoEvents.col(\"event_name\").eq(\"purchase\"))`)\n", - "* time windows to calculate over (e.g. `window=Daily()`)\n", - "\n", - "...with consecutive steps chained together in a familiar way.\n", - "\n", - "Because Kaskada was built for time-centric calculations on event-based data, a\n", - "calculation we might describe as \"total number of purchase events for the user\"\n", - "can be defined in Kaskada in roughly the same number of terms as the verbal\n", - "description itself.\n", - "\n", - "Continue through the demo below to find out how it works.\n", - "\n", - "See [the Kaskada documentation](../guide/index) for lots more information." - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "BJ2EE9mSGtGB", - "metadata": { - "id": "BJ2EE9mSGtGB" - }, - "source": [ - "## Kaskada Client Setup\n", - "\n", - "```\n", - "%pip install kaskada>=0.6.0-a.0\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "37db47ba", - "metadata": { - "tags": [ - "hide-output" - ] - }, - "outputs": [], - "source": [ - "import kaskada as kd\n", - "\n", - "kd.init_session()" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "5b838eef", - "metadata": {}, - "source": [ - "## Example dataset\n", - "\n", - "For this demo, we'll use a very small example data set, which, for simplicity and portability of this demo notebook, we'll read from a string.\n", - "\n", - "```{note}\n", - "For simplicity, instead of a CSV file or other file format we read and then parse data from a CSV string.\n", - "You can load your own event data from many common sources, including Pandas DataFrames and Parquet files.\n", - "See {py:mod}`kaskada.sources` for more information on the available sources.\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ba4bb6b6", - "metadata": {}, - "outputs": [], - "source": [ - "import asyncio\n", - "\n", - "# For demo simplicity, instead of a CSV file, we read and then parse data from a\n", - "# CSV string. Kaskadaa\n", - "event_data_string = \"\"\"\n", - " event_id,event_at,entity_id,event_name,revenue\n", - " ev_00001,2022-01-01 22:01:00,user_001,login,0\n", - " ev_00002,2022-01-01 22:05:00,user_001,view_item,0\n", - " ev_00003,2022-01-01 22:20:00,user_001,view_item,0\n", - " ev_00004,2022-01-01 23:10:00,user_001,view_item,0\n", - " ev_00005,2022-01-01 23:20:00,user_001,view_item,0\n", - " ev_00006,2022-01-01 23:40:00,user_001,purchase,12.50\n", - " ev_00007,2022-01-01 23:45:00,user_001,view_item,0\n", - " ev_00008,2022-01-01 23:59:00,user_001,view_item,0\n", - " ev_00009,2022-01-02 05:30:00,user_001,login,0\n", - " ev_00010,2022-01-02 05:35:00,user_001,view_item,0\n", - " ev_00011,2022-01-02 05:45:00,user_001,view_item,0\n", - " ev_00012,2022-01-02 06:10:00,user_001,view_item,0\n", - " ev_00013,2022-01-02 06:15:00,user_001,view_item,0\n", - " ev_00014,2022-01-02 06:25:00,user_001,purchase,25\n", - " ev_00015,2022-01-02 06:30:00,user_001,view_item,0\n", - " ev_00016,2022-01-02 06:31:00,user_001,purchase,5.75\n", - " ev_00017,2022-01-02 07:01:00,user_001,view_item,0\n", - " ev_00018,2022-01-01 22:17:00,user_002,view_item,0\n", - " ev_00019,2022-01-01 22:18:00,user_002,view_item,0\n", - " ev_00020,2022-01-01 22:20:00,user_002,view_item,0\n", - "\"\"\"\n", - "\n", - "events = await kd.sources.CsvString.create(\n", - " event_data_string, time_column=\"event_at\", key_column=\"entity_id\"\n", - ")\n", - "\n", - "# Inspect the event data\n", - "events.preview()" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "568d1272", - "metadata": {}, - "source": [ - "## Define queries and calculations" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "c2c5a298", - "metadata": {}, - "source": [ - "Kaskada queries are defined in Python, using the `{py}Timestream` class.\n", - "Sources are Timestreams generally containing [records](../guide/data_types.md#record-types).\n", - "\n", - "Let's do a simple query for events for a specific entity ID.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bce22e47", - "metadata": {}, - "outputs": [], - "source": [ - "events.filter(events.col(\"entity_id\").eq(\"user_002\")).preview()" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "6b5f2725", - "metadata": {}, - "source": [ - "\n", - "Beyond querying for events, Kaskada has a powerful syntax for defining\n", - "calculations on events, temporally across history.\n", - "\n", - "The six calculations discussed at the top of this demo notebook are below.\n", - "\n", - "(Note that some functions return `NaN` if no events for that user have occurred\n", - "within the time window.)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3ad6d596", - "metadata": {}, - "outputs": [], - "source": [ - "purchases = events.filter(events.col(\"event_name\").eq(\"purchase\"))\n", - "\n", - "features = kd.record(\n", - " {\n", - " \"event_count_total\": events.count(),\n", - " # \"event_count_hourly\": events.count(window=Hourly()),\n", - " \"purchases_total_count\": purchases.count(),\n", - " # \"purchases_today\": purchases.count(window=Since(Daily()),\n", - " # \"revenue_today\": events.col(\"revenue\").sum(window=Since(Daily())),\n", - " \"revenue_total\": events.col(\"revenue\").sum(),\n", - " }\n", - ")\n", - "features.preview()" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "1c315938", - "metadata": {}, - "source": [ - "## At Any Time\n", - "\n", - "A key feature of Kaskada's time-centric design is the ability to query for\n", - "calculation values at any point in time. Traditional query languages (e.g. SQL)\n", - "can only return data that already exists---if we want to return a row of\n", - "computed/aggregated data, we have to compute the row first, then return it. As a\n", - "specific example, suppose we have SQL queries that produce daily aggregations\n", - "over event data, and now we want to have the same aggregations on an hourly\n", - "basis. In SQL, we would need to write new queries for hourly aggregations; the\n", - "queries would look very similar to the daily ones, but they would still be\n", - "different queries.\n", - "\n", - "With Kaskada, we can define the calculations once, and then specify the points\n", - "in time at which we want to know the calculation values when we query them.\n", - "\n", - "In the examples so far, we have used `preview()` to get a DataFrame containing\n", - "some of the rows from the Timestreams we've defined. By default, this produces\n", - "a _history_ containing all the times the result changed. This is useful for\n", - "using past values to create training examples.\n", - "\n", - "We can also execute the query for the values at a specific point in time." - ] - }, - { - "cell_type": "markdown", - "id": "082e174d", - "metadata": { - "tags": [ - "hide-output" - ] - }, - "source": [ - "```\n", - "features.preview(at=\"2022-01-01 22:00\")\n", - "``````" - ] - }, - { - "cell_type": "markdown", - "id": "5a44c5f7", - "metadata": {}, - "source": [ - "You can also compose a query that produces values at specific points in time.\n", - "\n", - "```\n", - "features.when(hourly())\n", - "```\n", - "\n", - "Regardless of the time cadence of the calculations themselves, the query output\n", - "can contain rows for whatever time points you specify. You can define a set of\n", - "daily calculations and then get hourly updates during the day. Or, you can\n", - "publish the definitions of some features in a Python module and different users\n", - "can query those same calculations for hourly, daily, and monthly\n", - "values---without editing the calculation definitions themselves.\n", - "\n", - "## Adding more calculations to the query\n", - "\n", - "We can add two new calculations, also in one line each, representing:\n", - "* the time of the user's first event\n", - "* the time of the user's last event\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "2ba09e77-0fdf-43f4-960b-50a126262ec7", - "metadata": { - "id": "2ba09e77-0fdf-43f4-960b-50a126262ec7" - }, - "source": [ - "This is only a small sample of possible Kaskada queries and capabilities. See\n", - "everything that's possible with [Timestreams](../reference/timestream/index.md)." - ] - } - ], - "metadata": { - "colab": { - "collapsed_sections": [ - "6924ca3e-28b3-4f93-b0cf-5f8afddc11d8", - "936700a9-e042-401c-9156-7bb18652e109", - "08f5921d-36dc-41d1-a2a6-ae800b7a11de" - ], - "private_outputs": true, - "provenance": [] - }, - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.4" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/python/docs/source/reference/destinations.md b/python/docs/source/reference/destinations.md deleted file mode 100644 index 4eb506273..000000000 --- a/python/docs/source/reference/destinations.md +++ /dev/null @@ -1,11 +0,0 @@ -# Destinations - -```{eval-rst} - -.. automodule:: kaskada.destinations - - .. autosummary:: - :toctree: apidocs/destinations - - Destination -``` \ No newline at end of file diff --git a/python/docs/source/reference/execution.md b/python/docs/source/reference/execution.md deleted file mode 100644 index 29b6ae883..000000000 --- a/python/docs/source/reference/execution.md +++ /dev/null @@ -1,11 +0,0 @@ -# Execution - -```{eval-rst} -.. currentmodule:: kaskada - -.. autosummary:: - :toctree: apidocs/ - - Execution - ResultIterator -``` \ No newline at end of file diff --git a/python/docs/source/reference/index.md b/python/docs/source/reference/index.md deleted file mode 100644 index 8ce92b2cb..000000000 --- a/python/docs/source/reference/index.md +++ /dev/null @@ -1,13 +0,0 @@ -# API - -```{toctree} -:hidden: -:maxdepth: 3 - -timestream/index -windows -sources -destinations -results -execution -``` diff --git a/python/docs/source/reference/results.md b/python/docs/source/reference/results.md deleted file mode 100644 index ce07a295c..000000000 --- a/python/docs/source/reference/results.md +++ /dev/null @@ -1,13 +0,0 @@ -# Results - -These classes allow configuring the results to be produced. - -```{eval-rst} -.. currentmodule:: kaskada.results - -.. autosummary:: - :toctree: apidocs/ - - History - Snapshot -``` \ No newline at end of file diff --git a/python/docs/source/reference/sources.md b/python/docs/source/reference/sources.md deleted file mode 100644 index 59a66a1be..000000000 --- a/python/docs/source/reference/sources.md +++ /dev/null @@ -1,17 +0,0 @@ -# Sources - -```{eval-rst} - -.. automodule:: kaskada.sources - - .. autosummary:: - :toctree: apidocs/sources - - Source - CsvString - JsonlFile - JsonlString - Pandas - Parquet - PyDict -``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/aggregation.md b/python/docs/source/reference/timestream/aggregation.md deleted file mode 100644 index 3adc384fe..000000000 --- a/python/docs/source/reference/timestream/aggregation.md +++ /dev/null @@ -1,28 +0,0 @@ -# Aggregation - -The User Guide has details on [aggregations in general](../../guide/aggregation.md). - -```{note} -It is important to remember that aggregations are partitioned by entity and windowed, with the default behavior being cumulative up to the current time. -``` - -## Aggregation Methods - -```{eval-rst} -.. currentmodule:: kaskada - -.. autosummary:: - :toctree: ../apidocs/ - - Timestream.collect - Timestream.count - Timestream.count_if - Timestream.first - Timestream.last - Timestream.max - Timestream.mean - Timestream.min - Timestream.stddev - Timestream.sum - Timestream.variance -``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/arithmetic.md b/python/docs/source/reference/timestream/arithmetic.md deleted file mode 100644 index 7f7a72564..000000000 --- a/python/docs/source/reference/timestream/arithmetic.md +++ /dev/null @@ -1,33 +0,0 @@ -# Arithmetic - -Timestreams support a variety of arithmetic operations. - -```{note} -Note: In addition to the chainable methods, standard operators are implemented where appropriate. -For instance, `a.add(b)` may be written as `a + b`. -See the notes on the specific functions for more information. -``` - -## Arithmetic Methods - -```{eval-rst} -.. currentmodule:: kaskada - -.. autosummary:: - :toctree: ../apidocs/ - - Timestream.add - Timestream.ceil - Timestream.clamp - Timestream.div - Timestream.exp - Timestream.floor - Timestream.greatest - Timestream.least - Timestream.mul - Timestream.neg - Timestream.powf - Timestream.round - Timestream.sqrt - Timestream.sub -``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/collection.md b/python/docs/source/reference/timestream/collection.md deleted file mode 100644 index 90da93a85..000000000 --- a/python/docs/source/reference/timestream/collection.md +++ /dev/null @@ -1,18 +0,0 @@ -# Collections - -Timestreams allow each point to contain a collection -- a `list` or `map` -- of elements. - -## Collection Methods - -```{eval-rst} -.. currentmodule:: kaskada - -.. autosummary:: - :toctree: ../apidocs/ - - Timestream.__getitem__ - Timestream.flatten - Timestream.index - Timestream.length - Timestream.union -``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/comparison.md b/python/docs/source/reference/timestream/comparison.md deleted file mode 100644 index bdf014ba9..000000000 --- a/python/docs/source/reference/timestream/comparison.md +++ /dev/null @@ -1,29 +0,0 @@ -# Comparison - -Comparison operations produce boolean Timestreams. - -```{note} -Note: In addition to the chainable methods, standard operators are implemented where appropriate. -For instance, `a.ge(b)` may be written as `a >= b`. -See the notes on the specific functions for more information. - -To respect the semantics of `__eq__` and `__ne__`, `a == b` and `a != b` are *not* overloaded. -``` - -## Comparison Methods - -```{eval-rst} -.. currentmodule:: kaskada - -.. autosummary:: - :toctree: ../apidocs/ - - Timestream.eq - Timestream.ge - Timestream.gt - Timestream.le - Timestream.lt - Timestream.ne - Timestream.is_null - Timestream.is_not_null -``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/execution.md b/python/docs/source/reference/timestream/execution.md deleted file mode 100644 index 32c87b574..000000000 --- a/python/docs/source/reference/timestream/execution.md +++ /dev/null @@ -1,13 +0,0 @@ -# Execution - -```{eval-rst} -.. currentmodule:: kaskada - -.. autosummary:: - :toctree: ../apidocs/ - - Timestream.preview - Timestream.to_pandas - Timestream.run_iter - Timestream.write -``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/grouping.md b/python/docs/source/reference/timestream/grouping.md deleted file mode 100644 index 47575a856..000000000 --- a/python/docs/source/reference/timestream/grouping.md +++ /dev/null @@ -1,11 +0,0 @@ -# Grouping - -```{eval-rst} -.. currentmodule:: kaskada - -.. autosummary:: - :toctree: ../apidocs/ - - Timestream.lookup - Timestream.with_key -``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/index.md b/python/docs/source/reference/timestream/index.md deleted file mode 100644 index eaf46a2d6..000000000 --- a/python/docs/source/reference/timestream/index.md +++ /dev/null @@ -1,33 +0,0 @@ ---- -html_theme.sidebar_secondary.remove: ---- - -# Timestream - -```{eval-rst} -.. currentmodule:: kaskada - -.. autoclass:: kaskada.Timestream - :exclude-members: __init__ - - .. autoproperty:: data_type - -.. autoclass:: kaskada.Arg -.. autoclass:: kaskada.LiteralValue -``` - -```{toctree} -:hidden: - -aggregation -arithmetic -collection -comparison -execution -grouping -logical -misc -records -string -time -``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/logical.md b/python/docs/source/reference/timestream/logical.md deleted file mode 100644 index e2eeb9927..000000000 --- a/python/docs/source/reference/timestream/logical.md +++ /dev/null @@ -1,12 +0,0 @@ -# Logical - -```{eval-rst} -.. currentmodule:: kaskada - -.. autosummary:: - :toctree: ../apidocs/ - - Timestream.and_ - Timestream.or_ - Timestream.not_ -``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/misc.md b/python/docs/source/reference/timestream/misc.md deleted file mode 100644 index 609c29f4a..000000000 --- a/python/docs/source/reference/timestream/misc.md +++ /dev/null @@ -1,18 +0,0 @@ -# Miscellaneous - -```{eval-rst} -.. currentmodule:: kaskada - -.. autosummary:: - :toctree: ../apidocs/ - - Timestream.cast - Timestream.coalesce - Timestream.else_ - Timestream.filter - Timestream.hash - Timestream.if_ - Timestream.lag - Timestream.null_if - Timestream.pipe -``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/records.md b/python/docs/source/reference/timestream/records.md deleted file mode 100644 index 24a2b4743..000000000 --- a/python/docs/source/reference/timestream/records.md +++ /dev/null @@ -1,19 +0,0 @@ -# Records - -Record operations create, extract or manipulate Timestreams of records. - -## Record Methods - -```{eval-rst} -.. currentmodule:: kaskada - -.. autosummary:: - :toctree: ../apidocs/ - - Timestream.col - Timestream.select - Timestream.remove - Timestream.extend - Timestream.record - record -``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/string.md b/python/docs/source/reference/timestream/string.md deleted file mode 100644 index 6649e8ed5..000000000 --- a/python/docs/source/reference/timestream/string.md +++ /dev/null @@ -1,13 +0,0 @@ -# String - -```{eval-rst} -.. currentmodule:: kaskada - -.. autosummary:: - :toctree: ../apidocs/ - - Timestream.len - Timestream.lower - Timestream.substring - Timestream.upper -``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/time.md b/python/docs/source/reference/timestream/time.md deleted file mode 100644 index a1e2e3aab..000000000 --- a/python/docs/source/reference/timestream/time.md +++ /dev/null @@ -1,15 +0,0 @@ -# Time - -```{eval-rst} -.. currentmodule:: kaskada - -.. autosummary:: - :toctree: ../apidocs/ - - Timestream.shift_by - Timestream.shift_to - Timestream.shift_until - Timestream.time - Timestream.seconds_since - Timestream.seconds_since_previous -``` \ No newline at end of file diff --git a/python/docs/source/reference/windows.md b/python/docs/source/reference/windows.md deleted file mode 100644 index d14513a98..000000000 --- a/python/docs/source/reference/windows.md +++ /dev/null @@ -1,13 +0,0 @@ -# Windows - -```{eval-rst} -.. currentmodule:: kaskada.windows - -.. autosummary:: - :toctree: apidocs/windows/ - - Window - Since - Sliding - Trailing -``` \ No newline at end of file diff --git a/python/noxfile.py b/python/noxfile.py index 5648b05eb..4f62424e1 100644 --- a/python/noxfile.py +++ b/python/noxfile.py @@ -50,7 +50,7 @@ def fix_lint(session: nox.Session) -> None: @nox.session(python=python_versions[0]) def safety(session: nox.Session) -> None: """Scan dependencies for insecure packages.""" - # NOTE: Pass `extras` to `export_requirements` if the project supports any. + # NOTE: Pass `extras` to `export_requirements` if the project supports any. requirements = export_requirements(session) install(session, groups=["safety"], root=False) session.run("safety", "check", "--full-report", f"--file={requirements}") @@ -114,36 +114,60 @@ def xdoctest(session: nox.Session) -> None: install(session, groups=["test"]) session.run("python", "-m", "xdoctest", *args) -@nox.session(name="docs-build", python=python_versions[0]) -def docs_build(session: nox.Session) -> None: - """Build the documentation.""" - # ablog doesn't currently indicate whether it supports parallel reads, - # leading to a warning. - # when possible, add `"-j", "auto",` to do parallel builds (and in CI). - args = session.posargs or ["docs/source", "docs/_build", "-W"] - if not session.posargs and "FORCE_COLOR" in os.environ: - args.insert(0, "--color") - install(session, groups=["typecheck", "docs"]) +@nox.session(name="docs-clean", python=python_versions[0]) +def docs_clean(session: nox.Session) -> None: + """Clean up generated and cached docs""" + + for item in ["_inv", "_site", ".quarto", "objects.json", "reference"]: + p = Path("docs", item) + if p.exists() and p.is_dir(): + shutil.rmtree(p) + elif p.exists() and p.is_file(): + p.unlink() - build_dir = Path("docs", "_build") - if build_dir.exists(): - shutil.rmtree(build_dir) - session.run("sphinx-build", *args) +@nox.session(name="docs-gen", python=python_versions[0]) +def docs_gen(session: nox.Session) -> None: + """Generate API reference docs""" + install(session, groups=["docs"]) + + with session.chdir("docs"): + session.run("python", "_scripts/gen_reference.py") + session.run("python", "-m", "quartodoc", "interlinks") @nox.session(python=python_versions[0]) def docs(session: nox.Session) -> None: """Build and serve the documentation with live reloading on file changes.""" - args = ["--open-browser", "docs/source", "docs/_build", "-j", "auto", "--ignore", "*/apidocs/*", "--watch", "pysrc/kaskada"] - install(session, groups=["typecheck", "docs"]) + if not shutil.which("quarto"): + session.error("Unable to execute `quarto`. Is it installed and in your path?\n" + "See https://quarto.org/docs/get-started/") + + install(session, groups=["docs"]) + + with session.chdir("docs"): + session.run("quarto", "preview", external=True) + +@nox.session(name="docs-lint", python=python_versions[0]) +def docs_gen(session: nox.Session) -> None: + """Lints the API reference config and lets you know about any missing items""" + install(session, groups=["docs"]) + + with session.chdir("docs"): + session.run("python", "_scripts/lint_reference.py") + +@nox.session(name="docs-build", python=python_versions[0]) +def docs_build(session: nox.Session) -> None: + """Build the docs.""" + if not shutil.which("quarto"): + session.error("Unable to execute `quarto`. Is it installed and in your path?\n" + "See https://quarto.org/docs/get-started/") - build_dir = Path("docs", "_build") - if build_dir.exists(): - shutil.rmtree(build_dir) + install(session, groups=["docs"]) - session.run("sphinx-autobuild", *args) + with session.chdir("docs"): + session.run("quarto", "render", "--output-dir", "_site", external=True) def install(session: nox.Session, *, groups: Iterable[str], root: bool = True) -> None: diff --git a/python/poetry.lock b/python/poetry.lock index 9c93fe4a0..cdc36eca9 100644 --- a/python/poetry.lock +++ b/python/poetry.lock @@ -1,93 +1,126 @@ # This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] -name = "ablog" -version = "0.11.5" -description = "A Sphinx extension that converts any documentation or personal website project into a full-fledged blog." +name = "anyio" +version = "4.0.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "ablog-0.11.5-py3-none-any.whl", hash = "sha256:158a9638244c3e9183d5b0f8368857ec125d3a2087d0e943363a915df0afc3ce"}, - {file = "ablog-0.11.5.tar.gz", hash = "sha256:b9d2d3d14c9f164278822cd17b6deb36d88356282a309a0aa8ca9e7f166d6889"}, + {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"}, + {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"}, ] [package.dependencies] -docutils = ">=0.18" -feedgen = ">=0.9.0" -invoke = ">=1.6.0" -packaging = ">=19.0" -python-dateutil = ">=2.8.2" -sphinx = ">=5.0.0" -watchdog = ">=2.1.0" +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" [package.extras] -all = ["ipython (>=7.30.0)", "myst-parser (>=0.17.0)", "nbsphinx (>=0.8.0)"] -dev = ["alabaster", "ipython (>=7.30.0)", "myst-parser (>=0.17.0)", "nbsphinx (>=0.8.0)", "pytest", "sphinx-automodapi"] -docs = ["alabaster", "sphinx-automodapi"] -markdown = ["myst-parser (>=0.17.0)"] -notebook = ["ipython (>=7.30.0)", "nbsphinx (>=0.8.0)"] -tests = ["pytest"] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.22)"] [[package]] -name = "accessible-pygments" -version = "0.0.4" -description = "A collection of accessible pygments styles" +name = "appnope" +version = "0.1.3" +description = "Disable App Nap on macOS >= 10.9" optional = false python-versions = "*" files = [ - {file = "accessible-pygments-0.0.4.tar.gz", hash = "sha256:e7b57a9b15958e9601c7e9eb07a440c813283545a20973f2574a5f453d0e953e"}, - {file = "accessible_pygments-0.0.4-py2.py3-none-any.whl", hash = "sha256:416c6d8c1ea1c5ad8701903a20fcedf953c6e720d64f33dc47bfb2d3f2fa4e8d"}, + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, ] -[package.dependencies] -pygments = ">=1.5" - [[package]] -name = "alabaster" -version = "0.7.13" -description = "A configurable sidebar-enabled Sphinx theme" +name = "argcomplete" +version = "3.1.2" +description = "Bash tab completion for argparse" optional = false python-versions = ">=3.6" files = [ - {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, - {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, + {file = "argcomplete-3.1.2-py3-none-any.whl", hash = "sha256:d97c036d12a752d1079f190bc1521c545b941fda89ad85d15afa909b4d1b9a99"}, + {file = "argcomplete-3.1.2.tar.gz", hash = "sha256:d5d1e5efd41435260b8f85673b74ea2e883affcbec9f4230c582689e8e78251b"}, ] +[package.extras] +test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] + [[package]] -name = "annotated-types" -version = "0.5.0" -description = "Reusable constraint types to use with typing.Annotated" +name = "argon2-cffi" +version = "23.1.0" +description = "Argon2 for Python" optional = false python-versions = ">=3.7" files = [ - {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, - {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, ] +[package.dependencies] +argon2-cffi-bindings = "*" + +[package.extras] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] + [[package]] -name = "appnope" -version = "0.1.3" -description = "Disable App Nap on macOS >= 10.9" +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" optional = false -python-versions = "*" +python-versions = ">=3.6" files = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, -] + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] +tests = ["pytest"] [[package]] -name = "argcomplete" -version = "3.1.2" -description = "Bash tab completion for argparse" +name = "arrow" +version = "1.3.0" +description = "Better dates & times for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "argcomplete-3.1.2-py3-none-any.whl", hash = "sha256:d97c036d12a752d1079f190bc1521c545b941fda89ad85d15afa909b4d1b9a99"}, - {file = "argcomplete-3.1.2.tar.gz", hash = "sha256:d5d1e5efd41435260b8f85673b74ea2e883affcbec9f4230c582689e8e78251b"}, + {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, + {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, ] +[package.dependencies] +python-dateutil = ">=2.7.0" +types-python-dateutil = ">=2.8.10" + [package.extras] -test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] +doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] +test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] [[package]] name = "asttokens" @@ -106,6 +139,20 @@ six = ">=1.12.0" [package.extras] test = ["astroid", "pytest"] +[[package]] +name = "async-lru" +version = "2.0.4" +description = "Simple LRU cache for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, + {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + [[package]] name = "attrs" version = "23.1.0" @@ -164,6 +211,24 @@ files = [ {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, ] +[[package]] +name = "beartype" +version = "0.16.2" +description = "Unbearably fast runtime type checking in pure Python." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "beartype-0.16.2-py3-none-any.whl", hash = "sha256:72d133615fe674affc8c49365dd24dfe2260552b9a8a2b7193cdd48021527782"}, + {file = "beartype-0.16.2.tar.gz", hash = "sha256:47ec1c8c3be3f999f4f9f829e8913f65926aa7e85b180d9ffd305dc78d3e7d7b"}, +] + +[package.extras] +all = ["typing-extensions (>=3.10.0.0)"] +dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "mypy (>=0.800)", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] +doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] +test-tox = ["mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] +test-tox-coverage = ["coverage (>=5.5)"] + [[package]] name = "beautifulsoup4" version = "4.12.2" @@ -230,19 +295,37 @@ d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "bleach" +version = "6.1.0" +description = "An easy safelist-based HTML-sanitizing tool." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, +] + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.3)"] + [[package]] name = "boto3" -version = "1.28.61" +version = "1.28.62" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.28.61-py3-none-any.whl", hash = "sha256:ec49986e6c9549177e351494de64886c3f9daffd1a7af9e40302208aa1ffff1c"}, - {file = "boto3-1.28.61.tar.gz", hash = "sha256:7a539aaf00eb45aea1ae857ef5d05e67def24fc07af4cb36c202fa45f8f30590"}, + {file = "boto3-1.28.62-py3-none-any.whl", hash = "sha256:0dfa2fc96ccafce4feb23044d6cba8b25075ad428a0c450d369d099c6a1059d2"}, + {file = "boto3-1.28.62.tar.gz", hash = "sha256:148eeba0f1867b3db5b3e5ae2997d75a94d03fad46171374a0819168c36f7ed0"}, ] [package.dependencies] -botocore = ">=1.31.61,<1.32.0" +botocore = ">=1.31.62,<1.32.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.7.0,<0.8.0" @@ -631,19 +714,22 @@ xray = ["mypy-boto3-xray (>=1.28.0,<1.29.0)"] [[package]] name = "botocore" -version = "1.31.61" +version = "1.31.62" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.31.61-py3-none-any.whl", hash = "sha256:433bf93af09ad205d6db4c2ffc1f0e3193ddad4e0aced0a68ad8b0fa9de903e2"}, - {file = "botocore-1.31.61.tar.gz", hash = "sha256:39b059603f0e92a26599eecc7fe9b141f13eb412c964786ca3a7df5375928c87"}, + {file = "botocore-1.31.62-py3-none-any.whl", hash = "sha256:be792d806afc064694a2d0b9b25779f3ca0c1584b29a35ac32e67f0064ddb8b7"}, + {file = "botocore-1.31.62.tar.gz", hash = "sha256:272b78ac65256b6294cb9cdb0ac484d447ad3a85642e33cb6a3b1b8afee15a4c"}, ] [package.dependencies] jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" -urllib3 = ">=1.25.4,<1.27" +urllib3 = [ + {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}, +] [package.extras] crt = ["awscrt (==0.16.26)"] @@ -1002,6 +1088,17 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] + [[package]] name = "distlib" version = "0.3.7" @@ -1106,20 +1203,6 @@ files = [ [package.extras] devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] -[[package]] -name = "feedgen" -version = "0.9.0" -description = "Feed Generator (ATOM, RSS, Podcasts)" -optional = false -python-versions = "*" -files = [ - {file = "feedgen-0.9.0.tar.gz", hash = "sha256:8e811bdbbed6570034950db23a4388453628a70e689a6e8303ccec430f5a804a"}, -] - -[package.dependencies] -lxml = "*" -python-dateutil = "*" - [[package]] name = "filelock" version = "3.12.4" @@ -1189,6 +1272,17 @@ restructuredtext-lint = "*" [package.extras] develop = ["build", "twine"] +[[package]] +name = "fqdn" +version = "1.5.1" +description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +optional = false +python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +files = [ + {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, + {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, +] + [[package]] name = "graphviz" version = "0.20.1" @@ -1206,79 +1300,18 @@ docs = ["sphinx (>=5)", "sphinx-autodoc-typehints", "sphinx-rtd-theme"] test = ["coverage", "mock (>=4)", "pytest (>=7)", "pytest-cov", "pytest-mock (>=3)"] [[package]] -name = "greenlet" -version = "3.0.0" -description = "Lightweight in-process concurrent programming" +name = "griffe" +version = "0.32.3" +description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "greenlet-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e09dea87cc91aea5500262993cbd484b41edf8af74f976719dd83fe724644cd6"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47932c434a3c8d3c86d865443fadc1fbf574e9b11d6650b656e602b1797908a"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdfaeecf8cc705d35d8e6de324bf58427d7eafb55f67050d8f28053a3d57118c"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a68d670c8f89ff65c82b936275369e532772eebc027c3be68c6b87ad05ca695"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ad562a104cd41e9d4644f46ea37167b93190c6d5e4048fcc4b80d34ecb278f"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02a807b2a58d5cdebb07050efe3d7deaf915468d112dfcf5e426d0564aa3aa4a"}, - {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1660a15a446206c8545edc292ab5c48b91ff732f91b3d3b30d9a915d5ec4779"}, - {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:813720bd57e193391dfe26f4871186cf460848b83df7e23e6bef698a7624b4c9"}, - {file = "greenlet-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:aa15a2ec737cb609ed48902b45c5e4ff6044feb5dcdfcf6fa8482379190330d7"}, - {file = "greenlet-3.0.0-cp310-universal2-macosx_11_0_x86_64.whl", hash = "sha256:7709fd7bb02b31908dc8fd35bfd0a29fc24681d5cc9ac1d64ad07f8d2b7db62f"}, - {file = "greenlet-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:211ef8d174601b80e01436f4e6905aca341b15a566f35a10dd8d1e93f5dbb3b7"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6512592cc49b2c6d9b19fbaa0312124cd4c4c8a90d28473f86f92685cc5fef8e"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:871b0a8835f9e9d461b7fdaa1b57e3492dd45398e87324c047469ce2fc9f516c"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b505fcfc26f4148551826a96f7317e02c400665fa0883fe505d4fcaab1dabfdd"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123910c58234a8d40eaab595bc56a5ae49bdd90122dde5bdc012c20595a94c14"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:96d9ea57292f636ec851a9bb961a5cc0f9976900e16e5d5647f19aa36ba6366b"}, - {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b72b802496cccbd9b31acea72b6f87e7771ccfd7f7927437d592e5c92ed703c"}, - {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:527cd90ba3d8d7ae7dceb06fda619895768a46a1b4e423bdb24c1969823b8362"}, - {file = "greenlet-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:37f60b3a42d8b5499be910d1267b24355c495064f271cfe74bf28b17b099133c"}, - {file = "greenlet-3.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1482fba7fbed96ea7842b5a7fc11d61727e8be75a077e603e8ab49d24e234383"}, - {file = "greenlet-3.0.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:be557119bf467d37a8099d91fbf11b2de5eb1fd5fc5b91598407574848dc910f"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2f1922a39d5d59cc0e597987300df3396b148a9bd10b76a058a2f2772fc04"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1e22c22f7826096ad503e9bb681b05b8c1f5a8138469b255eb91f26a76634f2"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d363666acc21d2c204dd8705c0e0457d7b2ee7a76cb16ffc099d6799744ac99"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:334ef6ed8337bd0b58bb0ae4f7f2dcc84c9f116e474bb4ec250a8bb9bd797a66"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6672fdde0fd1a60b44fb1751a7779c6db487e42b0cc65e7caa6aa686874e79fb"}, - {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:952256c2bc5b4ee8df8dfc54fc4de330970bf5d79253c863fb5e6761f00dda35"}, - {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:269d06fa0f9624455ce08ae0179430eea61085e3cf6457f05982b37fd2cefe17"}, - {file = "greenlet-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9adbd8ecf097e34ada8efde9b6fec4dd2a903b1e98037adf72d12993a1c80b51"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b5ce7f40f0e2f8b88c28e6691ca6806814157ff05e794cdd161be928550f4c"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf94aa539e97a8411b5ea52fc6ccd8371be9550c4041011a091eb8b3ca1d810"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80dcd3c938cbcac986c5c92779db8e8ce51a89a849c135172c88ecbdc8c056b7"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e52a712c38e5fb4fd68e00dc3caf00b60cb65634d50e32281a9d6431b33b4af1"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5539f6da3418c3dc002739cb2bb8d169056aa66e0c83f6bacae0cd3ac26b423"}, - {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:343675e0da2f3c69d3fb1e894ba0a1acf58f481f3b9372ce1eb465ef93cf6fed"}, - {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:abe1ef3d780de56defd0c77c5ba95e152f4e4c4e12d7e11dd8447d338b85a625"}, - {file = "greenlet-3.0.0-cp37-cp37m-win32.whl", hash = "sha256:e693e759e172fa1c2c90d35dea4acbdd1d609b6936115d3739148d5e4cd11947"}, - {file = "greenlet-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bdd696947cd695924aecb3870660b7545a19851f93b9d327ef8236bfc49be705"}, - {file = "greenlet-3.0.0-cp37-universal2-macosx_11_0_x86_64.whl", hash = "sha256:cc3e2679ea13b4de79bdc44b25a0c4fcd5e94e21b8f290791744ac42d34a0353"}, - {file = "greenlet-3.0.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:63acdc34c9cde42a6534518e32ce55c30f932b473c62c235a466469a710bfbf9"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a1a6244ff96343e9994e37e5b4839f09a0207d35ef6134dce5c20d260d0302c"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b822fab253ac0f330ee807e7485769e3ac85d5eef827ca224feaaefa462dc0d0"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8060b32d8586e912a7b7dac2d15b28dbbd63a174ab32f5bc6d107a1c4143f40b"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:621fcb346141ae08cb95424ebfc5b014361621b8132c48e538e34c3c93ac7365"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6bb36985f606a7c49916eff74ab99399cdfd09241c375d5a820bb855dfb4af9f"}, - {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10b5582744abd9858947d163843d323d0b67be9432db50f8bf83031032bc218d"}, - {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f351479a6914fd81a55c8e68963609f792d9b067fb8a60a042c585a621e0de4f"}, - {file = "greenlet-3.0.0-cp38-cp38-win32.whl", hash = "sha256:9de687479faec7db5b198cc365bc34addd256b0028956501f4d4d5e9ca2e240a"}, - {file = "greenlet-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:3fd2b18432e7298fcbec3d39e1a0aa91ae9ea1c93356ec089421fabc3651572b"}, - {file = "greenlet-3.0.0-cp38-universal2-macosx_11_0_x86_64.whl", hash = "sha256:3c0d36f5adc6e6100aedbc976d7428a9f7194ea79911aa4bf471f44ee13a9464"}, - {file = "greenlet-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4cd83fb8d8e17633ad534d9ac93719ef8937568d730ef07ac3a98cb520fd93e4"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a5b2d4cdaf1c71057ff823a19d850ed5c6c2d3686cb71f73ae4d6382aaa7a06"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e7dcdfad252f2ca83c685b0fa9fba00e4d8f243b73839229d56ee3d9d219314"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94e4e924d09b5a3e37b853fe5924a95eac058cb6f6fb437ebb588b7eda79870"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6fb737e46b8bd63156b8f59ba6cdef46fe2b7db0c5804388a2d0519b8ddb99"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d55db1db455c59b46f794346efce896e754b8942817f46a1bada2d29446e305a"}, - {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:56867a3b3cf26dc8a0beecdb4459c59f4c47cdd5424618c08515f682e1d46692"}, - {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a812224a5fb17a538207e8cf8e86f517df2080c8ee0f8c1ed2bdaccd18f38f4"}, - {file = "greenlet-3.0.0-cp39-cp39-win32.whl", hash = "sha256:0d3f83ffb18dc57243e0151331e3c383b05e5b6c5029ac29f754745c800f8ed9"}, - {file = "greenlet-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:831d6f35037cf18ca5e80a737a27d822d87cd922521d18ed3dbc8a6967be50ce"}, - {file = "greenlet-3.0.0-cp39-universal2-macosx_11_0_x86_64.whl", hash = "sha256:a048293392d4e058298710a54dfaefcefdf49d287cd33fb1f7d63d55426e4355"}, - {file = "greenlet-3.0.0.tar.gz", hash = "sha256:19834e3f91f485442adc1ee440171ec5d9a4840a1f7bd5ed97833544719ce10b"}, -] - -[package.extras] -docs = ["Sphinx"] -test = ["objgraph", "psutil"] + {file = "griffe-0.32.3-py3-none-any.whl", hash = "sha256:d9471934225818bf8f309822f70451cc6abb4b24e59e0bb27402a45f9412510f"}, + {file = "griffe-0.32.3.tar.gz", hash = "sha256:14983896ad581f59d5ad7b6c9261ff12bdaa905acccc1129341d13e545da8521"}, +] + +[package.dependencies] +colorama = ">=0.4" [[package]] name = "idna" @@ -1291,17 +1324,6 @@ files = [ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] -[[package]] -name = "imagesize" -version = "1.4.1" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] - [[package]] name = "importlib-metadata" version = "6.8.0" @@ -1321,6 +1343,24 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +[[package]] +name = "importlib-resources" +version = "6.1.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.1.0-py3-none-any.whl", hash = "sha256:aa50258bbfa56d4e33fbd8aa3ef48ded10d1735f11532b8df95388cc6bdb7e83"}, + {file = "importlib_resources-6.1.0.tar.gz", hash = "sha256:9d48dcccc213325e810fd723e7fbb45ccb39f6cf5c31f00cf2b965f5f10f3cb9"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -1332,17 +1372,6 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "invoke" -version = "2.2.0" -description = "Pythonic task execution" -optional = false -python-versions = ">=3.6" -files = [ - {file = "invoke-2.2.0-py3-none-any.whl", hash = "sha256:6ea924cc53d4f78e3d98bc436b08069a03077e6f85ad1ddaa8a116d7dad15820"}, - {file = "invoke-2.2.0.tar.gz", hash = "sha256:ee6cbb101af1a859c7fe84f2a264c059020b0cb7fe3535f9424300ab568f6bd5"}, -] - [[package]] name = "ipykernel" version = "6.25.2" @@ -1416,6 +1445,52 @@ qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] +[[package]] +name = "ipython-genutils" +version = "0.2.0" +description = "Vestigial utilities from IPython" +optional = false +python-versions = "*" +files = [ + {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, + {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, +] + +[[package]] +name = "ipywidgets" +version = "8.1.1" +description = "Jupyter interactive widgets" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ipywidgets-8.1.1-py3-none-any.whl", hash = "sha256:2b88d728656aea3bbfd05d32c747cfd0078f9d7e159cf982433b58ad717eed7f"}, + {file = "ipywidgets-8.1.1.tar.gz", hash = "sha256:40211efb556adec6fa450ccc2a77d59ca44a060f4f9f136833df59c9f538e6e8"}, +] + +[package.dependencies] +comm = ">=0.1.3" +ipython = ">=6.1.0" +jupyterlab-widgets = ">=3.0.9,<3.1.0" +traitlets = ">=4.3.1" +widgetsnbextension = ">=4.0.9,<4.1.0" + +[package.extras] +test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] + +[[package]] +name = "isoduration" +version = "20.11.0" +description = "Operations with ISO 8601 durations" +optional = false +python-versions = ">=3.7" +files = [ + {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, + {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, +] + +[package.dependencies] +arrow = ">=0.15.0" + [[package]] name = "isort" version = "5.12.0" @@ -1480,6 +1555,31 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "json5" +version = "0.9.14" +description = "A Python implementation of the JSON5 data format." +optional = false +python-versions = "*" +files = [ + {file = "json5-0.9.14-py2.py3-none-any.whl", hash = "sha256:740c7f1b9e584a468dbb2939d8d458db3427f2c93ae2139d05f47e453eae964f"}, + {file = "json5-0.9.14.tar.gz", hash = "sha256:9ed66c3a6ca3510a976a9ef9b8c0787de24802724ab1860bc0153c7fdd589b02"}, +] + +[package.extras] +dev = ["hypothesis"] + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + [[package]] name = "jsonschema" version = "4.19.1" @@ -1493,9 +1593,17 @@ files = [ [package.dependencies] attrs = ">=22.2.0" +fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} jsonschema-specifications = ">=2023.03.6" referencing = ">=0.28.4" +rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} rpds-py = ">=0.7.1" +uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] @@ -1516,31 +1624,24 @@ files = [ referencing = ">=0.28.0" [[package]] -name = "jupyter-cache" -version = "0.6.1" -description = "A defined interface for working with a cache of jupyter notebooks." +name = "jupyter" +version = "1.0.0" +description = "Jupyter metapackage. Install all the Jupyter components in one go." optional = false -python-versions = "~=3.8" +python-versions = "*" files = [ - {file = "jupyter-cache-0.6.1.tar.gz", hash = "sha256:26f83901143edf4af2f3ff5a91e2d2ad298e46e2cee03c8071d37a23a63ccbfc"}, - {file = "jupyter_cache-0.6.1-py3-none-any.whl", hash = "sha256:2fce7d4975805c77f75bdfc1bc2e82bc538b8e5b1af27f2f5e06d55b9f996a82"}, + {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, + {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, + {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, ] [package.dependencies] -attrs = "*" -click = "*" -importlib-metadata = "*" -nbclient = ">=0.2,<0.8" -nbformat = "*" -pyyaml = "*" -sqlalchemy = ">=1.3.12,<3" -tabulate = "*" - -[package.extras] -cli = ["click-log"] -code-style = ["pre-commit (>=2.12,<4.0)"] -rtd = ["ipykernel", "jupytext", "myst-nb", "nbdime", "sphinx-book-theme", "sphinx-copybutton"] -testing = ["coverage", "ipykernel", "jupytext", "matplotlib", "nbdime", "nbformat (>=5.1)", "numpy", "pandas", "pytest (>=6,<8)", "pytest-cov", "pytest-regressions", "sympy"] +ipykernel = "*" +ipywidgets = "*" +jupyter-console = "*" +nbconvert = "*" +notebook = "*" +qtconsole = "*" [[package]] name = "jupyter-client" @@ -1565,15 +1666,39 @@ traitlets = ">=5.3" docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] +[[package]] +name = "jupyter-console" +version = "6.6.3" +description = "Jupyter terminal console" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485"}, + {file = "jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539"}, +] + +[package.dependencies] +ipykernel = ">=6.14" +ipython = "*" +jupyter-client = ">=7.0.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +prompt-toolkit = ">=3.0.30" +pygments = "*" +pyzmq = ">=17" +traitlets = ">=5.4" + +[package.extras] +test = ["flaky", "pexpect", "pytest"] + [[package]] name = "jupyter-core" -version = "5.3.2" +version = "5.4.0" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.3.2-py3-none-any.whl", hash = "sha256:a4af53c3fa3f6330cebb0d9f658e148725d15652811d1c32dc0f63bb96f2e6d6"}, - {file = "jupyter_core-5.3.2.tar.gz", hash = "sha256:0c28db6cbe2c37b5b398e1a1a5b22f84fd64cd10afc1f6c05b02fb09481ba45f"}, + {file = "jupyter_core-5.4.0-py3-none-any.whl", hash = "sha256:66e252f675ac04dcf2feb6ed4afb3cd7f68cf92f483607522dc251f32d471571"}, + {file = "jupyter_core-5.4.0.tar.gz", hash = "sha256:e4b98344bb94ee2e3e6c4519a97d001656009f9cb2b7f2baf15b3c205770011d"}, ] [package.dependencies] @@ -1586,170 +1711,179 @@ docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", " test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] [[package]] -name = "linkify-it-py" -version = "2.0.2" -description = "Links recognition library with FULL unicode support." +name = "jupyter-events" +version = "0.7.0" +description = "Jupyter Event System library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "linkify-it-py-2.0.2.tar.gz", hash = "sha256:19f3060727842c254c808e99d465c80c49d2c7306788140987a1a7a29b0d6ad2"}, - {file = "linkify_it_py-2.0.2-py3-none-any.whl", hash = "sha256:a3a24428f6c96f27370d7fe61d2ac0be09017be5190d68d8658233171f1b6541"}, + {file = "jupyter_events-0.7.0-py3-none-any.whl", hash = "sha256:4753da434c13a37c3f3c89b500afa0c0a6241633441421f6adafe2fb2e2b924e"}, + {file = "jupyter_events-0.7.0.tar.gz", hash = "sha256:7be27f54b8388c03eefea123a4f79247c5b9381c49fb1cd48615ee191eb12615"}, ] [package.dependencies] -uc-micro-py = "*" +jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} +python-json-logger = ">=2.0.4" +pyyaml = ">=5.3" +referencing = "*" +rfc3339-validator = "*" +rfc3986-validator = ">=0.1.1" +traitlets = ">=5.3" [package.extras] -benchmark = ["pytest", "pytest-benchmark"] -dev = ["black", "flake8", "isort", "pre-commit", "pyproject-flake8"] -doc = ["myst-parser", "sphinx", "sphinx-book-theme"] -test = ["coverage", "pytest", "pytest-cov"] +cli = ["click", "rich"] +docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] +test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] [[package]] -name = "livereload" -version = "2.6.3" -description = "Python LiveReload is an awesome tool for web developers" +name = "jupyter-lsp" +version = "2.2.0" +description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"}, - {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, + {file = "jupyter-lsp-2.2.0.tar.gz", hash = "sha256:8ebbcb533adb41e5d635eb8fe82956b0aafbf0fd443b6c4bfa906edeeb8635a1"}, + {file = "jupyter_lsp-2.2.0-py3-none-any.whl", hash = "sha256:9e06b8b4f7dd50300b70dd1a78c0c3b0c3d8fa68e0f2d8a5d1fbab62072aca3f"}, ] [package.dependencies] -six = "*" -tornado = {version = "*", markers = "python_version > \"2.7\""} - -[[package]] -name = "lxml" -version = "4.9.3" -description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" -files = [ - {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, - {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, - {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, - {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, - {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, - {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, - {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, - {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, - {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, - {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, - {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, - {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, - {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, - {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, - {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, - {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, - {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, - {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, - {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, - {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, - {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, - {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, - {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, - {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, - {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, -] - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.35)"] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-server = ">=1.1.2" + +[[package]] +name = "jupyter-server" +version = "2.7.3" +description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server-2.7.3-py3-none-any.whl", hash = "sha256:8e4b90380b59d7a1e31086c4692231f2a2ea4cb269f5516e60aba72ce8317fc9"}, + {file = "jupyter_server-2.7.3.tar.gz", hash = "sha256:d4916c8581c4ebbc534cebdaa8eca2478d9f3bfdd88eae29fcab0120eac57649"}, +] + +[package.dependencies] +anyio = ">=3.1.0" +argon2-cffi = "*" +jinja2 = "*" +jupyter-client = ">=7.4.4" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-events = ">=0.6.0" +jupyter-server-terminals = "*" +nbconvert = ">=6.4.4" +nbformat = ">=5.3.0" +overrides = "*" +packaging = "*" +prometheus-client = "*" +pywinpty = {version = "*", markers = "os_name == \"nt\""} +pyzmq = ">=24" +send2trash = ">=1.8.2" +terminado = ">=0.8.3" +tornado = ">=6.2.0" +traitlets = ">=5.6.0" +websocket-client = "*" + +[package.extras] +docs = ["ipykernel", "jinja2", "jupyter-client", "jupyter-server", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] +test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] + +[[package]] +name = "jupyter-server-terminals" +version = "0.4.4" +description = "A Jupyter Server Extension Providing Terminals." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server_terminals-0.4.4-py3-none-any.whl", hash = "sha256:75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36"}, + {file = "jupyter_server_terminals-0.4.4.tar.gz", hash = "sha256:57ab779797c25a7ba68e97bcfb5d7740f2b5e8a83b5e8102b10438041a7eac5d"}, +] + +[package.dependencies] +pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} +terminado = ">=0.8.3" + +[package.extras] +docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] + +[[package]] +name = "jupyterlab" +version = "4.0.6" +description = "JupyterLab computational environment" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab-4.0.6-py3-none-any.whl", hash = "sha256:7d9dacad1e3f30fe4d6d4efc97fda25fbb5012012b8f27cc03a2283abcdee708"}, + {file = "jupyterlab-4.0.6.tar.gz", hash = "sha256:6c43ae5a6a1fd2fdfafcb3454004958bde6da76331abb44cffc6f9e436b19ba1"}, +] + +[package.dependencies] +async-lru = ">=1.0.0" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +ipykernel = "*" +jinja2 = ">=3.0.3" +jupyter-core = "*" +jupyter-lsp = ">=2.0.0" +jupyter-server = ">=2.4.0,<3" +jupyterlab-server = ">=2.19.0,<3" +notebook-shim = ">=0.2" +packaging = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} +tornado = ">=6.2.0" +traitlets = "*" + +[package.extras] +dev = ["black[jupyter] (==23.7.0)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.0.286)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8,<7.2.0)", "sphinx-copybutton"] +docs-screenshots = ["altair (==5.0.1)", "ipython (==8.14.0)", "ipywidgets (==8.0.6)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post0)", "matplotlib (==3.7.1)", "nbconvert (>=7.0.0)", "pandas (==2.0.2)", "scipy (==1.10.1)", "vega-datasets (==0.9.0)"] +test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] + +[[package]] +name = "jupyterlab-pygments" +version = "0.2.2" +description = "Pygments theme using JupyterLab CSS variables" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, + {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, +] + +[[package]] +name = "jupyterlab-server" +version = "2.25.0" +description = "A set of server components for JupyterLab and JupyterLab like applications." optional = false python-versions = ">=3.8" files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, + {file = "jupyterlab_server-2.25.0-py3-none-any.whl", hash = "sha256:c9f67a98b295c5dee87f41551b0558374e45d449f3edca153dd722140630dcb2"}, + {file = "jupyterlab_server-2.25.0.tar.gz", hash = "sha256:77c2f1f282d610f95e496e20d5bf1d2a7706826dfb7b18f3378ae2870d272fb7"}, ] [package.dependencies] -mdurl = ">=0.1,<1.0" +babel = ">=2.10" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0.3" +json5 = ">=0.9.0" +jsonschema = ">=4.18.0" +jupyter-server = ">=1.21,<3" +packaging = ">=21.3" +requests = ">=2.31" [package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] +docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] +openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] +test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.7.0)", "pytest (>=7.0)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] + +[[package]] +name = "jupyterlab-widgets" +version = "3.0.9" +description = "Jupyter interactive widgets for JupyterLab" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jupyterlab_widgets-3.0.9-py3-none-any.whl", hash = "sha256:3cf5bdf5b897bf3bccf1c11873aa4afd776d7430200f765e0686bd352487b58d"}, + {file = "jupyterlab_widgets-3.0.9.tar.gz", hash = "sha256:6005a4e974c7beee84060fdfba341a3218495046de8ae3ec64888e5fe19fdb4c"}, +] [[package]] name = "markupsafe" @@ -1778,6 +1912,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1865,33 +2009,14 @@ files = [ ] [[package]] -name = "mdit-py-plugins" -version = "0.4.0" -description = "Collection of plugins for markdown-it-py" +name = "mistune" +version = "3.0.2" +description = "A sane and fast Markdown parser with useful plugins and renderers" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "mdit_py_plugins-0.4.0-py3-none-any.whl", hash = "sha256:b51b3bb70691f57f974e257e367107857a93b36f322a9e6d44ca5bf28ec2def9"}, - {file = "mdit_py_plugins-0.4.0.tar.gz", hash = "sha256:d8ab27e9aed6c38aa716819fedfde15ca275715955f8a185a8e1cf90fb1d2c1b"}, -] - -[package.dependencies] -markdown-it-py = ">=1.0.0,<4.0.0" - -[package.extras] -code-style = ["pre-commit"] -rtd = ["myst-parser", "sphinx-book-theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, + {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, + {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, ] [[package]] @@ -1966,84 +2091,64 @@ files = [ ] [[package]] -name = "myst-nb" -version = "0.18.0" -description = "A Jupyter Notebook Sphinx reader built on top of the MyST markdown parser." -optional = false -python-versions = ">=3.8" -files = [] -develop = false - -[package.dependencies] -importlib_metadata = "*" -ipykernel = "*" -ipython = "*" -jupyter-cache = ">=0.5,<0.7" -myst-parser = ">=0.18.0" -nbclient = "*" -nbformat = ">=5.0,<6.0" -pyyaml = "*" -sphinx = ">=4" -typing-extensions = "*" - -[package.extras] -code-style = ["pre-commit"] -rtd = ["alabaster", "altair", "bokeh", "coconut (>=1.4.3,<3.1.0)", "ipykernel (>=5.5,<7.0)", "ipywidgets", "jupytext (>=1.11.2,<1.16.0)", "matplotlib", "numpy", "pandas", "plotly", "sphinx-book-theme (>=0.3)", "sphinx-copybutton", "sphinx-design (>=0.4.0,<0.5.0)", "sphinxcontrib-bibtex", "sympy"] -testing = ["beautifulsoup4", "coverage (>=6.4,<8.0)", "ipykernel (>=5.5,<7.0)", "ipython (!=8.1.0,<8.16)", "ipywidgets (>=8)", "jupytext (>=1.11.2,<1.16.0)", "matplotlib (>=3.5.3,<3.6)", "nbdime", "numpy", "pandas", "pytest (>=7.1,<8.0)", "pytest-cov (>=3,<5)", "pytest-param-files (>=0.3.3,<0.4.0)", "pytest-regressions", "sympy (>=1.10.1)"] - -[package.source] -type = "git" -url = "https://github.com/executablebooks/MyST-NB.git" -reference = "59854c270deb76f297f228477be5d5088babd143" -resolved_reference = "59854c270deb76f297f228477be5d5088babd143" - -[[package]] -name = "myst-parser" -version = "2.0.0" -description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," +name = "nbclient" +version = "0.8.0" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false -python-versions = ">=3.8" +python-versions = ">=3.8.0" files = [ - {file = "myst_parser-2.0.0-py3-none-any.whl", hash = "sha256:7c36344ae39c8e740dad7fdabf5aa6fc4897a813083c6cc9990044eb93656b14"}, - {file = "myst_parser-2.0.0.tar.gz", hash = "sha256:ea929a67a6a0b1683cdbe19b8d2e724cd7643f8aa3e7bb18dd65beac3483bead"}, + {file = "nbclient-0.8.0-py3-none-any.whl", hash = "sha256:25e861299e5303a0477568557c4045eccc7a34c17fc08e7959558707b9ebe548"}, + {file = "nbclient-0.8.0.tar.gz", hash = "sha256:f9b179cd4b2d7bca965f900a2ebf0db4a12ebff2f36a711cb66861e4ae158e55"}, ] [package.dependencies] -docutils = ">=0.16,<0.21" -jinja2 = "*" -markdown-it-py = ">=3.0,<4.0" -mdit-py-plugins = ">=0.4,<1.0" -pyyaml = "*" -sphinx = ">=6,<8" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +nbformat = ">=5.1" +traitlets = ">=5.4" [package.extras] -code-style = ["pre-commit (>=3.0,<4.0)"] -linkify = ["linkify-it-py (>=2.0,<3.0)"] -rtd = ["ipython", "pydata-sphinx-theme (==v0.13.0rc4)", "sphinx-autodoc2 (>=0.4.2,<0.5.0)", "sphinx-book-theme (==1.0.0rc2)", "sphinx-copybutton", "sphinx-design2", "sphinx-pyscript", "sphinx-tippy (>=0.3.1)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.8.2,<0.9.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] -testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=7,<8)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx-pytest"] -testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,<0.4.0)"] +dev = ["pre-commit"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] +test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] [[package]] -name = "nbclient" -version = "0.7.4" -description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +name = "nbconvert" +version = "7.9.2" +description = "Converting Jupyter Notebooks" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8" files = [ - {file = "nbclient-0.7.4-py3-none-any.whl", hash = "sha256:c817c0768c5ff0d60e468e017613e6eae27b6fa31e43f905addd2d24df60c125"}, - {file = "nbclient-0.7.4.tar.gz", hash = "sha256:d447f0e5a4cfe79d462459aec1b3dc5c2e9152597262be8ee27f7d4c02566a0d"}, + {file = "nbconvert-7.9.2-py3-none-any.whl", hash = "sha256:39fe4b8bdd1b0104fdd86fc8a43a9077ba64c720bda4c6132690d917a0a154ee"}, + {file = "nbconvert-7.9.2.tar.gz", hash = "sha256:e56cc7588acc4f93e2bb5a34ec69028e4941797b2bfaf6462f18a41d1cc258c9"}, ] [package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -nbformat = ">=5.1" -traitlets = ">=5.3" +beautifulsoup4 = "*" +bleach = "!=5.0.0" +defusedxml = "*" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0" +jupyter-core = ">=4.7" +jupyterlab-pygments = "*" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<4" +nbclient = ">=0.5.0" +nbformat = ">=5.7" +packaging = "*" +pandocfilters = ">=1.4.1" +pygments = ">=2.4.1" +tinycss2 = "*" +traitlets = ">=5.1" [package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] -test = ["flaky", "ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] +all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] +docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] +qtpdf = ["nbconvert[qtpng]"] +qtpng = ["pyqtwebengine (>=5.15)"] +serve = ["tornado (>=6.1)"] +test = ["flaky", "ipykernel", "ipywidgets (>=7)", "pytest", "pytest-dependency"] +webpdf = ["playwright"] [[package]] name = "nbformat" @@ -2077,6 +2182,46 @@ files = [ {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, ] +[[package]] +name = "notebook" +version = "7.0.4" +description = "Jupyter Notebook - A web-based notebook environment for interactive computing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "notebook-7.0.4-py3-none-any.whl", hash = "sha256:ee738414ac01773c1ad6834cf76cc6f1ce140ac8197fd13b3e2d44d89e257f72"}, + {file = "notebook-7.0.4.tar.gz", hash = "sha256:0c1b458f72ce8774445c8ef9ed2492bd0b9ce9605ac996e2b066114f69795e71"}, +] + +[package.dependencies] +jupyter-server = ">=2.4.0,<3" +jupyterlab = ">=4.0.2,<5" +jupyterlab-server = ">=2.22.1,<3" +notebook-shim = ">=0.2,<0.3" +tornado = ">=6.2.0" + +[package.extras] +dev = ["hatch", "pre-commit"] +docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.22.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] + +[[package]] +name = "notebook-shim" +version = "0.2.3" +description = "A shim layer for notebook traits and config" +optional = false +python-versions = ">=3.7" +files = [ + {file = "notebook_shim-0.2.3-py3-none-any.whl", hash = "sha256:a83496a43341c1674b093bfcebf0fe8e74cbe7eda5fd2bbc56f8e39e1486c0c7"}, + {file = "notebook_shim-0.2.3.tar.gz", hash = "sha256:f69388ac283ae008cd506dda10d0288b09a017d822d5e8c7129a152cbd3ce7e9"}, +] + +[package.dependencies] +jupyter-server = ">=1.8,<3" + +[package.extras] +test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] + [[package]] name = "nox" version = "2023.4.22" @@ -2131,6 +2276,17 @@ files = [ {file = "numpy-1.25.2.tar.gz", hash = "sha256:fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760"}, ] +[[package]] +name = "overrides" +version = "7.4.0" +description = "A decorator to automatically detect mismatch when overriding a method." +optional = false +python-versions = ">=3.6" +files = [ + {file = "overrides-7.4.0-py3-none-any.whl", hash = "sha256:3ad24583f86d6d7a49049695efe9933e67ba62f0c7625d53c59fa832ce4b8b7d"}, + {file = "overrides-7.4.0.tar.gz", hash = "sha256:9502a3cca51f4fac40b5feca985b6703a5c1f6ad815588a7ca9e285b9dca6757"}, +] + [[package]] name = "packaging" version = "23.2" @@ -2282,6 +2438,17 @@ files = [ numpy = {version = ">=1.25.0", markers = "python_version >= \"3.9\""} types-pytz = ">=2022.1.1" +[[package]] +name = "pandocfilters" +version = "1.5.0" +description = "Utilities for writing pandoc filters in python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, + {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, +] + [[package]] name = "parso" version = "0.8.3" @@ -2392,6 +2559,49 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "plum-dispatch" +version = "1.7.4" +description = "Multiple dispatch in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "plum-dispatch-1.7.4.tar.gz", hash = "sha256:1c1d15b2842b5fa98405fd3dff6fad4887bdc77b60bd200e209d76ebfe9990fe"}, + {file = "plum_dispatch-1.7.4-py3-none-any.whl", hash = "sha256:c40dbeab269bbbf972ce0dbc078380da19ebaee1a370a2c564e1814a11bde216"}, +] + +[[package]] +name = "plum-dispatch" +version = "2.2.2" +description = "Multiple dispatch in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "plum_dispatch-2.2.2-py3-none-any.whl", hash = "sha256:d7ee415bd166ffa90eaa4b24d7c9dc7ca6f8875750586001e7c9baff706223bd"}, + {file = "plum_dispatch-2.2.2.tar.gz", hash = "sha256:d5d180225c9fbf0277375bb558b649d97d0b651a91037bb7155cedbe9f52764b"}, +] + +[package.dependencies] +beartype = ">=0.16.2" +typing-extensions = {version = "*", markers = "python_version <= \"3.10\""} + +[package.extras] +dev = ["black (==23.9.0)", "build", "coveralls", "ghp-import", "ipython", "jupyter-book", "mypy", "numpy", "pre-commit", "pyright", "pytest (>=6)", "pytest-cov", "tox", "wheel"] + +[[package]] +name = "prometheus-client" +version = "0.17.1" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = ">=3.6" +files = [ + {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, + {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, +] + +[package.extras] +twisted = ["twisted"] + [[package]] name = "prompt-toolkit" version = "3.0.39" @@ -2518,183 +2728,55 @@ files = [ [[package]] name = "pydantic" -version = "2.4.2" -description = "Data validation using Python type hints" +version = "1.10.13" +description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"}, - {file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, + {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, + {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, + {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, + {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, + {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, + {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, + {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, + {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.10.1" -typing-extensions = ">=4.6.1" - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.10.1" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"}, - {file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"}, - {file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"}, - {file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"}, - {file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"}, - {file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"}, - {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"}, - {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"}, - {file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"}, - {file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"}, - {file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"}, - {file = "pydantic_core-2.10.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921"}, - {file = "pydantic_core-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d"}, - {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f"}, - {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c"}, - {file = "pydantic_core-2.10.1-cp312-none-win32.whl", hash = "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f"}, - {file = "pydantic_core-2.10.1-cp312-none-win_amd64.whl", hash = "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430"}, - {file = "pydantic_core-2.10.1-cp312-none-win_arm64.whl", hash = "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f"}, - {file = "pydantic_core-2.10.1-cp37-none-win32.whl", hash = "sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c"}, - {file = "pydantic_core-2.10.1-cp37-none-win_amd64.whl", hash = "sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e"}, - {file = "pydantic_core-2.10.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc"}, - {file = "pydantic_core-2.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e"}, - {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561"}, - {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de"}, - {file = "pydantic_core-2.10.1-cp38-none-win32.whl", hash = "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee"}, - {file = "pydantic_core-2.10.1-cp38-none-win_amd64.whl", hash = "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e"}, - {file = "pydantic_core-2.10.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970"}, - {file = "pydantic_core-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429"}, - {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7"}, - {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595"}, - {file = "pydantic_core-2.10.1-cp39-none-win32.whl", hash = "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a"}, - {file = "pydantic_core-2.10.1-cp39-none-win_amd64.whl", hash = "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"}, - {file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pydantic-extra-types" -version = "2.1.0" -description = "Extra Pydantic types." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic_extra_types-2.1.0-py3-none-any.whl", hash = "sha256:1b8aa83a2986b0bc6a7179834fdb423c5e0bcef6b2b4cd9261bf753ad7dcc483"}, - {file = "pydantic_extra_types-2.1.0.tar.gz", hash = "sha256:d07b869e733d33712b07d6b8cd7b0223077c23ae5a1e23bd0699a00401259ec7"}, -] - -[package.dependencies] -pydantic = ">=2.0.3" - -[package.extras] -all = ["phonenumbers (>=8,<9)", "pycountry (>=22,<23)"] - -[[package]] -name = "pydata-sphinx-theme" -version = "0.13.3" -description = "Bootstrap-based Sphinx theme from the PyData community" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydata_sphinx_theme-0.13.3-py3-none-any.whl", hash = "sha256:bf41ca6c1c6216e929e28834e404bfc90e080b51915bbe7563b5e6fda70354f0"}, - {file = "pydata_sphinx_theme-0.13.3.tar.gz", hash = "sha256:827f16b065c4fd97e847c11c108bf632b7f2ff53a3bca3272f63f3f3ff782ecc"}, -] - -[package.dependencies] -accessible-pygments = "*" -Babel = "*" -beautifulsoup4 = "*" -docutils = "!=0.17.0" -packaging = "*" -pygments = ">=2.7" -sphinx = ">=4.2" -typing-extensions = "*" +typing-extensions = ">=4.2.0" [package.extras] -dev = ["nox", "pre-commit", "pydata-sphinx-theme[doc,test]", "pyyaml"] -doc = ["ablog (>=0.11.0rc2)", "colorama", "ipyleaflet", "jupyter_sphinx", "linkify-it-py", "matplotlib", "myst-nb", "nbsphinx", "numpy", "numpydoc", "pandas", "plotly", "rich", "sphinx-copybutton", "sphinx-design", "sphinx-favicon (>=1.0.1)", "sphinx-sitemap", "sphinx-togglebutton", "sphinxcontrib-youtube", "sphinxext-rediraffe", "xarray"] -test = ["codecov", "pytest", "pytest-cov", "pytest-regressions"] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] [[package]] name = "pydocstyle" @@ -2738,57 +2820,6 @@ files = [ [package.extras] plugins = ["importlib-metadata"] -[[package]] -name = "pyside6" -version = "6.5.3" -description = "Python bindings for the Qt cross-platform application and UI framework" -optional = false -python-versions = "<3.12,>=3.7" -files = [ - {file = "PySide6-6.5.3-cp37-abi3-macosx_11_0_universal2.whl", hash = "sha256:be53e7c64710fc4307afd33147e241a06cd97b18fae887ee611d8d4b373dbb04"}, - {file = "PySide6-6.5.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:84f3d3e278e5ea00f1558ac7e1eeb382bba1df7732bdb025ee654e7b4b3cd451"}, - {file = "PySide6-6.5.3-cp37-abi3-manylinux_2_31_aarch64.whl", hash = "sha256:48f4579ca49225cfff8f512178551bdf6aa9031198527f71799bcc061a0f2327"}, - {file = "PySide6-6.5.3-cp37-abi3-win_amd64.whl", hash = "sha256:aaaf5acfaaf9575740df03ee1aa706e2f38d8fcca2255acbbd3a5701f6f2f416"}, -] - -[package.dependencies] -PySide6-Addons = "6.5.3" -PySide6-Essentials = "6.5.3" -shiboken6 = "6.5.3" - -[[package]] -name = "pyside6-addons" -version = "6.5.3" -description = "Python bindings for the Qt cross-platform application and UI framework (Addons)" -optional = false -python-versions = "<3.12,>=3.7" -files = [ - {file = "PySide6_Addons-6.5.3-cp37-abi3-macosx_11_0_universal2.whl", hash = "sha256:047162b158ee929d43c21cdc3ac48e75fec612f2e5492b317190fac98d2de5c6"}, - {file = "PySide6_Addons-6.5.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:e5bc1fa95351182dc2c003e07320d5509218ccc0840d10197d7d452aa5de5d2e"}, - {file = "PySide6_Addons-6.5.3-cp37-abi3-manylinux_2_31_aarch64.whl", hash = "sha256:be0dcfb15d44c2973c3c122058f1df8c3c9d93abd4170534e06dbf986aa30e26"}, - {file = "PySide6_Addons-6.5.3-cp37-abi3-win_amd64.whl", hash = "sha256:dd1d294d48798bd297bde02d3ea02f313a86e38ed3944519228466bdfb537961"}, -] - -[package.dependencies] -PySide6-Essentials = "6.5.3" -shiboken6 = "6.5.3" - -[[package]] -name = "pyside6-essentials" -version = "6.5.3" -description = "Python bindings for the Qt cross-platform application and UI framework (Essentials)" -optional = false -python-versions = "<3.12,>=3.7" -files = [ - {file = "PySide6_Essentials-6.5.3-cp37-abi3-macosx_11_0_universal2.whl", hash = "sha256:4d9c95ded938e557052fc67efe68d57108856df141a1b499497fd7999419e3eb"}, - {file = "PySide6_Essentials-6.5.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:45580138be91f5fdcefb4d28dadb56d3640eb658575af97b49057e10c22a024d"}, - {file = "PySide6_Essentials-6.5.3-cp37-abi3-manylinux_2_31_aarch64.whl", hash = "sha256:8244bc185b0243ba7c4491033e592b247e44a63d69213e9a45ee38e87e0f1f90"}, - {file = "PySide6_Essentials-6.5.3-cp37-abi3-win_amd64.whl", hash = "sha256:f928b98ec349c87f9ccc63a482917779f59fa646893722c53c2fe2a1e4f335e0"}, -] - -[package.dependencies] -shiboken6 = "6.5.3" - [[package]] name = "pytest" version = "7.4.2" @@ -2866,6 +2897,17 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-json-logger" +version = "2.0.7" +description = "A python library adding a json log formatter" +optional = false +python-versions = ">=3.6" +files = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] + [[package]] name = "pytz" version = "2023.3.post1" @@ -2879,13 +2921,13 @@ files = [ [[package]] name = "pyupgrade" -version = "3.14.0" +version = "3.15.0" description = "A tool to automatically upgrade syntax for newer versions." optional = false python-versions = ">=3.8.1" files = [ - {file = "pyupgrade-3.14.0-py2.py3-none-any.whl", hash = "sha256:221923c5cd6171e4adb78bfd331ce95500112294c36fb61a0947c55c78cb1924"}, - {file = "pyupgrade-3.14.0.tar.gz", hash = "sha256:e5caa64798256e341afcee6fe42be8d75e4f88f20809b25dc6174548dbc3bace"}, + {file = "pyupgrade-3.15.0-py2.py3-none-any.whl", hash = "sha256:8dc8ebfaed43566e2c65994162795017c7db11f531558a74bc8aa077907bc305"}, + {file = "pyupgrade-3.15.0.tar.gz", hash = "sha256:a7fde381060d7c224f55aef7a30fae5ac93bbc428367d27e70a603bc2acd4f00"}, ] [package.dependencies] @@ -2914,6 +2956,21 @@ files = [ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] +[[package]] +name = "pywinpty" +version = "2.0.12" +description = "Pseudo terminal support for Windows from Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pywinpty-2.0.12-cp310-none-win_amd64.whl", hash = "sha256:21319cd1d7c8844fb2c970fb3a55a3db5543f112ff9cfcd623746b9c47501575"}, + {file = "pywinpty-2.0.12-cp311-none-win_amd64.whl", hash = "sha256:853985a8f48f4731a716653170cd735da36ffbdc79dcb4c7b7140bce11d8c722"}, + {file = "pywinpty-2.0.12-cp312-none-win_amd64.whl", hash = "sha256:1617b729999eb6713590e17665052b1a6ae0ad76ee31e60b444147c5b6a35dca"}, + {file = "pywinpty-2.0.12-cp38-none-win_amd64.whl", hash = "sha256:189380469ca143d06e19e19ff3fba0fcefe8b4a8cc942140a6b863aed7eebb2d"}, + {file = "pywinpty-2.0.12-cp39-none-win_amd64.whl", hash = "sha256:7520575b6546db23e693cbd865db2764097bd6d4ef5dc18c92555904cd62c3d4"}, + {file = "pywinpty-2.0.12.tar.gz", hash = "sha256:8197de460ae8ebb7f5d1701dfa1b5df45b157bb832e92acba316305e18ca00dd"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -2926,6 +2983,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2933,8 +2991,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2951,6 +3016,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2958,6 +3024,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3068,6 +3135,79 @@ files = [ [package.dependencies] cffi = {version = "*", markers = "implementation_name == \"pypy\""} +[[package]] +name = "qtconsole" +version = "5.4.4" +description = "Jupyter Qt console" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "qtconsole-5.4.4-py3-none-any.whl", hash = "sha256:a3b69b868e041c2c698bdc75b0602f42e130ffb256d6efa48f9aa756c97672aa"}, + {file = "qtconsole-5.4.4.tar.gz", hash = "sha256:b7ffb53d74f23cee29f4cdb55dd6fabc8ec312d94f3c46ba38e1dde458693dfb"}, +] + +[package.dependencies] +ipykernel = ">=4.1" +ipython-genutils = "*" +jupyter-client = ">=4.1" +jupyter-core = "*" +packaging = "*" +pygments = "*" +pyzmq = ">=17.1" +qtpy = ">=2.4.0" +traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" + +[package.extras] +doc = ["Sphinx (>=1.3)"] +test = ["flaky", "pytest", "pytest-qt"] + +[[package]] +name = "qtpy" +version = "2.4.0" +description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." +optional = false +python-versions = ">=3.7" +files = [ + {file = "QtPy-2.4.0-py3-none-any.whl", hash = "sha256:4d4f045a41e09ac9fa57fcb47ef05781aa5af294a0a646acc1b729d14225e741"}, + {file = "QtPy-2.4.0.tar.gz", hash = "sha256:db2d508167aa6106781565c8da5c6f1487debacba33519cedc35fa8997d424d4"}, +] + +[package.dependencies] +packaging = "*" + +[package.extras] +test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] + +[[package]] +name = "quartodoc" +version = "0.6.3" +description = "Generate API documentation with Quarto." +optional = false +python-versions = ">=3.9" +files = [ + {file = "quartodoc-0.6.3-py3-none-any.whl", hash = "sha256:3d3d69a85d5a89793d06877a6cf3718d8e713d699b4e2a8e794b4baf9c6e842e"}, + {file = "quartodoc-0.6.3.tar.gz", hash = "sha256:c299451e49b8be2ff0330210d8d294e8cbc66526dd9ae7c9eae72c3008a7a16b"}, +] + +[package.dependencies] +click = "*" +griffe = "<=0.32.3" +importlib-metadata = ">=5.1.0" +importlib-resources = ">=5.10.2" +plum-dispatch = [ + {version = "<2.0.0", markers = "python_version < \"3.10\""}, + {version = ">2.0.0", markers = "python_version >= \"3.10\""}, +] +pydantic = "<2.0" +pyyaml = "*" +sphobjinv = ">=2.3.1" +tabulate = ">=0.9.0" +typing-extensions = ">=4.4.0" +watchdog = ">=3.0.0" + +[package.extras] +dev = ["jupyterlab", "jupytext", "pre-commit", "pytest", "syrupy"] + [[package]] name = "referencing" version = "0.30.2" @@ -3117,6 +3257,31 @@ files = [ [package.dependencies] docutils = ">=0.11,<1.0" +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +description = "A pure python RFC3339 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, + {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "rfc3986-validator" +version = "0.1.1" +description = "Pure python rfc3986 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, + {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, +] + [[package]] name = "rpds-py" version = "0.10.4" @@ -3252,30 +3417,36 @@ python-versions = ">=3.6" files = [ {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, @@ -3322,6 +3493,22 @@ setuptools = ">=19.3" github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"] gitlab = ["python-gitlab (>=1.3.0)"] +[[package]] +name = "send2trash" +version = "1.8.2" +description = "Send file to trash natively under Mac OS X, Windows and Linux" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "Send2Trash-1.8.2-py3-none-any.whl", hash = "sha256:a384719d99c07ce1eefd6905d2decb6f8b7ed054025bb0e618919f945de4f679"}, + {file = "Send2Trash-1.8.2.tar.gz", hash = "sha256:c132d59fa44b9ca2b1699af5c86f57ce9f4c5eb56629d5d55fbb7a35f84e2312"}, +] + +[package.extras] +nativelib = ["pyobjc-framework-Cocoa", "pywin32"] +objc = ["pyobjc-framework-Cocoa"] +win32 = ["pywin32"] + [[package]] name = "setuptools" version = "68.2.2" @@ -3338,19 +3525,6 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] -[[package]] -name = "shiboken6" -version = "6.5.3" -description = "Python/C++ bindings helper module" -optional = false -python-versions = "<3.12,>=3.7" -files = [ - {file = "shiboken6-6.5.3-cp37-abi3-macosx_11_0_universal2.whl", hash = "sha256:faaca92dcbbf26c0ae13f189746c38482e40859e0897b0ed4dee5e04f69fda71"}, - {file = "shiboken6-6.5.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4cdda98df511243c40f1dd4d9eac25a7191c2583ac673147ecdae0ffa3b9223f"}, - {file = "shiboken6-6.5.3-cp37-abi3-manylinux_2_31_aarch64.whl", hash = "sha256:1bc928ca9f1c1d16ff8fe0585627738a15552bb3329c04fca2c74a443618a6b3"}, - {file = "shiboken6-6.5.3-cp37-abi3-win_amd64.whl", hash = "sha256:a013367e38a12b3f69ba02e79f133df4fba8d21b55a78c6999cdb31c25609524"}, -] - [[package]] name = "six" version = "1.16.0" @@ -3362,6 +3536,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -3385,323 +3570,20 @@ files = [ ] [[package]] -name = "sphinx" -version = "7.2.6" -description = "Python documentation generator" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinx-7.2.6-py3-none-any.whl", hash = "sha256:1e09160a40b956dc623c910118fa636da93bd3ca0b9876a7b3df90f07d691560"}, - {file = "sphinx-7.2.6.tar.gz", hash = "sha256:9a5160e1ea90688d5963ba09a2dcd8bdd526620edbb65c328728f1b2228d5ab5"}, -] - -[package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=2.9" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.18.1,<0.21" -imagesize = ">=1.3" -importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} -Jinja2 = ">=3.0" -packaging = ">=21.0" -Pygments = ">=2.14" -requests = ">=2.25.0" -snowballstemmer = ">=2.0" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = ">=2.0.0" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.9" - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] -test = ["cython (>=3.0)", "filelock", "html5lib", "pytest (>=4.6)", "setuptools (>=67.0)"] - -[[package]] -name = "sphinx-autobuild" -version = "2021.3.14" -description = "Rebuild Sphinx documentation on changes, with live-reload in the browser." +name = "sphobjinv" +version = "2.3.1" +description = "Sphinx objects.inv Inspection/Manipulation Tool" optional = false python-versions = ">=3.6" files = [ - {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"}, - {file = "sphinx_autobuild-2021.3.14-py3-none-any.whl", hash = "sha256:8fe8cbfdb75db04475232f05187c776f46f6e9e04cacf1e49ce81bdac649ccac"}, -] - -[package.dependencies] -colorama = "*" -livereload = "*" -sphinx = "*" - -[package.extras] -test = ["pytest", "pytest-cov"] - -[[package]] -name = "sphinx-autodoc-typehints" -version = "1.24.0" -description = "Type hints (PEP 484) support for the Sphinx autodoc extension" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sphinx_autodoc_typehints-1.24.0-py3-none-any.whl", hash = "sha256:6a73c0c61a9144ce2ed5ef2bed99d615254e5005c1cc32002017d72d69fb70e6"}, - {file = "sphinx_autodoc_typehints-1.24.0.tar.gz", hash = "sha256:94e440066941bb237704bb880785e2d05e8ae5406c88674feefbb938ad0dc6af"}, + {file = "sphobjinv-2.3.1-py3-none-any.whl", hash = "sha256:f3efe68bb0ba6e32cb50df064fe6349b8f94681589b400dea753a2860dd576b5"}, + {file = "sphobjinv-2.3.1.tar.gz", hash = "sha256:1442a47fc93587a0177be95346904e388ef85a8366f90a1835a7c3eeeb122eb7"}, ] [package.dependencies] -sphinx = ">=7.0.1" - -[package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)"] -numpy = ["nptyping (>=2.5)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.6.3)"] - -[[package]] -name = "sphinx-copybutton" -version = "0.5.2" -description = "Add a copy button to each of your code cells." -optional = false -python-versions = ">=3.7" -files = [ - {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, - {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, -] - -[package.dependencies] -sphinx = ">=1.8" - -[package.extras] -code-style = ["pre-commit (==2.12.1)"] -rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] - -[[package]] -name = "sphinx-design" -version = "0.5.0" -description = "A sphinx extension for designing beautiful, view size responsive web components." -optional = false -python-versions = ">=3.8" -files = [ - {file = "sphinx_design-0.5.0-py3-none-any.whl", hash = "sha256:1af1267b4cea2eedd6724614f19dcc88fe2e15aff65d06b2f6252cee9c4f4c1e"}, - {file = "sphinx_design-0.5.0.tar.gz", hash = "sha256:e8e513acea6f92d15c6de3b34e954458f245b8e761b45b63950f65373352ab00"}, -] - -[package.dependencies] -sphinx = ">=5,<8" - -[package.extras] -code-style = ["pre-commit (>=3,<4)"] -rtd = ["myst-parser (>=1,<3)"] -testing = ["myst-parser (>=1,<3)", "pytest (>=7.1,<8.0)", "pytest-cov", "pytest-regressions"] -theme-furo = ["furo (>=2023.7.0,<2023.8.0)"] -theme-pydata = ["pydata-sphinx-theme (>=0.13.0,<0.14.0)"] -theme-rtd = ["sphinx-rtd-theme (>=1.0,<2.0)"] -theme-sbt = ["sphinx-book-theme (>=1.0,<2.0)"] - -[[package]] -name = "sphinx-social-cards" -version = "0.3.0" -description = "Generate social media cards for documentation pages with Sphinx" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sphinx-social-cards-0.3.0.tar.gz", hash = "sha256:149b18921f5244768a1200e3554d596fd093a7562df96f0d76ce32e8bb6cc70f"}, - {file = "sphinx_social_cards-0.3.0-py3-none-any.whl", hash = "sha256:322a044b54a8241423c157c4e43a09bba22a02b658ecb24cc146e40034bf8027"}, -] - -[package.dependencies] -pydantic = ">=2.0" -pydantic-extra-types = "*" -PySide6 = "*" -pyyaml = "*" -requests = "*" -sphinx = "*" -typing-extensions = "*" - -[package.extras] -github = ["appdirs"] - -[[package]] -name = "sphinxcontrib-applehelp" -version = "1.0.7" -description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_applehelp-1.0.7-py3-none-any.whl", hash = "sha256:094c4d56209d1734e7d252f6e0b3ccc090bd52ee56807a5d9315b19c122ab15d"}, - {file = "sphinxcontrib_applehelp-1.0.7.tar.gz", hash = "sha256:39fdc8d762d33b01a7d8f026a3b7d71563ea3b72787d5f00ad8465bd9d6dfbfa"}, -] - -[package.dependencies] -Sphinx = ">=5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "1.0.5" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_devhelp-1.0.5-py3-none-any.whl", hash = "sha256:fe8009aed765188f08fcaadbb3ea0d90ce8ae2d76710b7e29ea7d047177dae2f"}, - {file = "sphinxcontrib_devhelp-1.0.5.tar.gz", hash = "sha256:63b41e0d38207ca40ebbeabcf4d8e51f76c03e78cd61abe118cf4435c73d4212"}, -] - -[package.dependencies] -Sphinx = ">=5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.0.4" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_htmlhelp-2.0.4-py3-none-any.whl", hash = "sha256:8001661c077a73c29beaf4a79968d0726103c5605e27db92b9ebed8bab1359e9"}, - {file = "sphinxcontrib_htmlhelp-2.0.4.tar.gz", hash = "sha256:6c26a118a05b76000738429b724a0568dbde5b72391a688577da08f11891092a"}, -] - -[package.dependencies] -Sphinx = ">=5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["html5lib", "pytest"] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] - -[package.extras] -test = ["flake8", "mypy", "pytest"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "1.0.6" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_qthelp-1.0.6-py3-none-any.whl", hash = "sha256:bf76886ee7470b934e363da7a954ea2825650013d367728588732c7350f49ea4"}, - {file = "sphinxcontrib_qthelp-1.0.6.tar.gz", hash = "sha256:62b9d1a186ab7f5ee3356d906f648cacb7a6bdb94d201ee7adf26db55092982d"}, -] - -[package.dependencies] -Sphinx = ">=5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "1.1.9" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_serializinghtml-1.1.9-py3-none-any.whl", hash = "sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1"}, - {file = "sphinxcontrib_serializinghtml-1.1.9.tar.gz", hash = "sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54"}, -] - -[package.dependencies] -Sphinx = ">=5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sqlalchemy" -version = "2.0.21" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.21-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e7dc99b23e33c71d720c4ae37ebb095bebebbd31a24b7d99dfc4753d2803ede"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7f0c4ee579acfe6c994637527c386d1c22eb60bc1c1d36d940d8477e482095d4"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f7d57a7e140efe69ce2d7b057c3f9a595f98d0bbdfc23fd055efdfbaa46e3a5"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca38746eac23dd7c20bec9278d2058c7ad662b2f1576e4c3dbfcd7c00cc48fa"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3cf229704074bce31f7f47d12883afee3b0a02bb233a0ba45ddbfe542939cca4"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb87f763b5d04a82ae84ccff25554ffd903baafba6698e18ebaf32561f2fe4aa"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-win32.whl", hash = "sha256:89e274604abb1a7fd5c14867a412c9d49c08ccf6ce3e1e04fffc068b5b6499d4"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-win_amd64.whl", hash = "sha256:e36339a68126ffb708dc6d1948161cea2a9e85d7d7b0c54f6999853d70d44430"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bf8eebccc66829010f06fbd2b80095d7872991bfe8415098b9fe47deaaa58063"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b977bfce15afa53d9cf6a632482d7968477625f030d86a109f7bdfe8ce3c064a"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ff3dc2f60dbf82c9e599c2915db1526d65415be323464f84de8db3e361ba5b9"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44ac5c89b6896f4740e7091f4a0ff2e62881da80c239dd9408f84f75a293dae9"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:87bf91ebf15258c4701d71dcdd9c4ba39521fb6a37379ea68088ce8cd869b446"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b69f1f754d92eb1cc6b50938359dead36b96a1dcf11a8670bff65fd9b21a4b09"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-win32.whl", hash = "sha256:af520a730d523eab77d754f5cf44cc7dd7ad2d54907adeb3233177eeb22f271b"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-win_amd64.whl", hash = "sha256:141675dae56522126986fa4ca713739d00ed3a6f08f3c2eb92c39c6dfec463ce"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7614f1eab4336df7dd6bee05bc974f2b02c38d3d0c78060c5faa4cd1ca2af3b8"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d59cb9e20d79686aa473e0302e4a82882d7118744d30bb1dfb62d3c47141b3ec"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a95aa0672e3065d43c8aa80080cdd5cc40fe92dc873749e6c1cf23914c4b83af"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8c323813963b2503e54d0944813cd479c10c636e3ee223bcbd7bd478bf53c178"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:419b1276b55925b5ac9b4c7044e999f1787c69761a3c9756dec6e5c225ceca01"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-win32.whl", hash = "sha256:4615623a490e46be85fbaa6335f35cf80e61df0783240afe7d4f544778c315a9"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-win_amd64.whl", hash = "sha256:cca720d05389ab1a5877ff05af96551e58ba65e8dc65582d849ac83ddde3e231"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b4eae01faee9f2b17f08885e3f047153ae0416648f8e8c8bd9bc677c5ce64be9"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3eb7c03fe1cd3255811cd4e74db1ab8dca22074d50cd8937edf4ef62d758cdf4"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2d494b6a2a2d05fb99f01b84cc9af9f5f93bf3e1e5dbdafe4bed0c2823584c1"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b19ae41ef26c01a987e49e37c77b9ad060c59f94d3b3efdfdbf4f3daaca7b5fe"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fc6b15465fabccc94bf7e38777d665b6a4f95efd1725049d6184b3a39fd54880"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:014794b60d2021cc8ae0f91d4d0331fe92691ae5467a00841f7130fe877b678e"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-win32.whl", hash = "sha256:0268256a34806e5d1c8f7ee93277d7ea8cc8ae391f487213139018b6805aeaf6"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-win_amd64.whl", hash = "sha256:73c079e21d10ff2be54a4699f55865d4b275fd6c8bd5d90c5b1ef78ae0197301"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:785e2f2c1cb50d0a44e2cdeea5fd36b5bf2d79c481c10f3a88a8be4cfa2c4615"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c111cd40910ffcb615b33605fc8f8e22146aeb7933d06569ac90f219818345ef"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9cba4e7369de663611ce7460a34be48e999e0bbb1feb9130070f0685e9a6b66"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50a69067af86ec7f11a8e50ba85544657b1477aabf64fa447fd3736b5a0a4f67"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ccb99c3138c9bde118b51a289d90096a3791658da9aea1754667302ed6564f6e"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:513fd5b6513d37e985eb5b7ed89da5fd9e72354e3523980ef00d439bc549c9e9"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-win32.whl", hash = "sha256:f9fefd6298433b6e9188252f3bff53b9ff0443c8fde27298b8a2b19f6617eeb9"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-win_amd64.whl", hash = "sha256:2e617727fe4091cedb3e4409b39368f424934c7faa78171749f704b49b4bb4ce"}, - {file = "SQLAlchemy-2.0.21-py3-none-any.whl", hash = "sha256:ea7da25ee458d8f404b93eb073116156fd7d8c2a776d8311534851f28277b4ce"}, - {file = "SQLAlchemy-2.0.21.tar.gz", hash = "sha256:05b971ab1ac2994a14c56b35eaaa91f86ba080e9ad481b20d99d77f381bb6258"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""} -typing-extensions = ">=4.2.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=7)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3-binary"] +attrs = ">=19.2" +certifi = "*" +jsonschema = ">=3.0" [[package]] name = "stack-data" @@ -3750,6 +3632,44 @@ files = [ [package.extras] doc = ["reno", "sphinx", "tornado (>=4.5)"] +[[package]] +name = "terminado" +version = "0.17.1" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +optional = false +python-versions = ">=3.7" +files = [ + {file = "terminado-0.17.1-py3-none-any.whl", hash = "sha256:8650d44334eba354dd591129ca3124a6ba42c3d5b70df5051b6921d506fdaeae"}, + {file = "terminado-0.17.1.tar.gz", hash = "sha256:6ccbbcd3a4f8a25a5ec04991f39a0b8db52dfcd487ea0e578d977e6752380333"}, +] + +[package.dependencies] +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} +tornado = ">=6.1.0" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] + +[[package]] +name = "tinycss2" +version = "1.2.1" +description = "A tiny CSS parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, + {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, +] + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] + [[package]] name = "tokenize-rt" version = "5.2.0" @@ -3848,6 +3768,17 @@ files = [ {file = "types_awscrt-0.19.2.tar.gz", hash = "sha256:d8c379420ba75b1e43687d12b0b772a5bb17f352859a2bef6aa8f0abde123f55"}, ] +[[package]] +name = "types-python-dateutil" +version = "2.8.19.14" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = "*" +files = [ + {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, + {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, +] + [[package]] name = "types-pytz" version = "2023.3.1.1" @@ -3893,18 +3824,18 @@ files = [ ] [[package]] -name = "uc-micro-py" -version = "1.0.2" -description = "Micro subset of unicode data files for linkify-it-py projects." +name = "uri-template" +version = "1.3.0" +description = "RFC 6570 URI Template Processor" optional = false python-versions = ">=3.7" files = [ - {file = "uc-micro-py-1.0.2.tar.gz", hash = "sha256:30ae2ac9c49f39ac6dce743bd187fcd2b574b16ca095fa74cd9396795c954c54"}, - {file = "uc_micro_py-1.0.2-py3-none-any.whl", hash = "sha256:8c9110c309db9d9e87302e2f4ad2c3152770930d88ab385cd544e7a7e75f3de0"}, + {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, + {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, ] [package.extras] -test = ["coverage", "pytest", "pytest-cov"] +dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] [[package]] name = "urllib3" @@ -3922,6 +3853,23 @@ brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotl secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +[[package]] +name = "urllib3" +version = "2.0.6" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, + {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "virtualenv" version = "20.24.5" @@ -3992,15 +3940,41 @@ files = [ {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, ] +[[package]] +name = "webcolors" +version = "1.13" +description = "A library for working with the color formats defined by HTML and CSS." +optional = false +python-versions = ">=3.7" +files = [ + {file = "webcolors-1.13-py3-none-any.whl", hash = "sha256:29bc7e8752c0a1bd4a1f03c14d6e6a72e93d82193738fa860cbff59d0fcc11bf"}, + {file = "webcolors-1.13.tar.gz", hash = "sha256:c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"}, +] + +[package.extras] +docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] +tests = ["pytest", "pytest-cov"] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +optional = false +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + [[package]] name = "websocket-client" -version = "1.6.3" +version = "1.6.4" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" files = [ - {file = "websocket-client-1.6.3.tar.gz", hash = "sha256:3aad25d31284266bcfcfd1fd8a743f63282305a364b8d0948a43bd606acc652f"}, - {file = "websocket_client-1.6.3-py3-none-any.whl", hash = "sha256:6cfc30d051ebabb73a5fa246efdcc14c8fbebbd0330f8984ac3bb6d9edd2ad03"}, + {file = "websocket-client-1.6.4.tar.gz", hash = "sha256:b3324019b3c28572086c4a319f91d1dcd44e6e11cd340232978c684a7650d0df"}, + {file = "websocket_client-1.6.4-py3-none-any.whl", hash = "sha256:084072e0a7f5f347ef2ac3d8698a5e0b4ffbfcab607628cadabc650fc9a83a24"}, ] [package.extras] @@ -4008,6 +3982,17 @@ docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] +[[package]] +name = "widgetsnbextension" +version = "4.0.9" +description = "Jupyter interactive widgets for Jupyter Notebook" +optional = false +python-versions = ">=3.7" +files = [ + {file = "widgetsnbextension-4.0.9-py3-none-any.whl", hash = "sha256:91452ca8445beb805792f206e560c1769284267a30ceb1cec9f5bcc887d15175"}, + {file = "widgetsnbextension-4.0.9.tar.gz", hash = "sha256:3c1f5e46dc1166dfd40a42d685e6a51396fd34ff878742a3e47c6f0cc4a2a385"}, +] + [[package]] name = "xdoctest" version = "1.1.1" @@ -4059,4 +4044,4 @@ plot = ["plotly"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "f7d10b778061e5e832f9ffdad4031a07e8cc62c517c61af62e47a43951cc1d55" +content-hash = "1747c1b5ec5175ae3bee9752b29402ee18147d0c246ef1b3450e98ebadd64132" diff --git a/python/pyproject.toml b/python/pyproject.toml index 93b6ceebf..2349daf5c 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -3,9 +3,7 @@ name = "kaskada" version = "0.6.0-a.2" description = "Kaskada query builder and local execution engine." requires-python = ">=3.9,<4.0" -classifiers = [ - "Development Status :: 3 - Alpha", -] +classifiers = ["Development Status :: 3 - Alpha"] # We need to list dependencies here for maturin to put them in the package. # They should match what poetry believes we need. dependencies = [ @@ -15,12 +13,8 @@ dependencies = [ ] [project.optional-dependencies] -plot = [ - "plotly >= 5.0.0, < 6.0.0", -] -explain = [ - "graphviz >= 0.20.1, < 1.0.0", -] +plot = ["plotly >= 5.0.0, < 6.0.0"] +explain = ["graphviz >= 0.20.1, < 1.0.0"] [tool.poetry] name = "kaskada" @@ -37,7 +31,7 @@ python = ">=3.9,<4.0" pyarrow = "^12.0.1" typing-extensions = "^4.7.1" graphviz = { version = "^0.20.1", optional = true } -plotly = {version = "^5.16.1", optional = true} +plotly = { version = "^5.16.1", optional = true } [tool.poetry.extras] plot = ["plotly"] @@ -79,28 +73,18 @@ mypy = ">=0.930" pandas-stubs = "^2.0.2" typeguard = ">=2.13.3" graphviz = { version = "^0.20.1" } -boto3-stubs = {extras = ["s3"], version = "^1.28.62"} +boto3-stubs = { extras = ["s3"], version = "^1.28.62" } [tool.poetry.group.docs] # Dependencies for documentation. optional = true [tool.poetry.group.docs.dependencies] -sphinx = ">=6.0.0" -sphinx-autobuild = ">=2021.3.14" -sphinx-autodoc-typehints = ">=1.23.0" -sphinx-copybutton = "^0.5.2" -sphinx-design = "^0.5.0" -myst-parser = {version = ">=0.16.1"} -# Use myst-nb from git since the currently released version (0.17.2) pins -# Sphinx to < 6. Once a new release occurs we can upgrade to `0.18.0` or newer. -# https://github.com/executablebooks/MyST-NB/issues/530 -myst-nb = { git = "https://github.com/executablebooks/MyST-NB.git", rev = "59854c270deb76f297f228477be5d5088babd143" } -plotly = {version = "^5.16.1"} -linkify-it-py = "^2.0.2" -pydata-sphinx-theme = "^0.13.3" -ablog = "^0.11.4.post1" -sphinx-social-cards = { version = "^0.3.0", python = ">=3.9,<3.12" } +plotly = { version = "^5.16.1" } +quartodoc = "~0.6.3" +nbformat = "^5.9.2" +nbclient = "^0.8.0" +jupyter = "^1.0.0" [tool.poetry.group.test] # Dependencies for testing @@ -108,11 +92,11 @@ optional = true [tool.poetry.group.test.dependencies] boto3 = "^1.28.54" -coverage = { extras = ["toml"], version = ">=6.2"} +coverage = { extras = ["toml"], version = ">=6.2" } pytest = ">=6.2.5" pytest-asyncio = "^0.21.1" pytest-docker-fixtures = "^1.3.17" -xdoctest = {extras = ["colors"], version = ">=0.15.10"} +xdoctest = { extras = ["colors"], version = ">=0.15.10" } [tool.poetry.group.release] # Dependencies for performing the @@ -163,7 +147,5 @@ show_error_context = true # pyproject.toml [tool.pytest.ini_options] -testpaths = [ - "pytests", -] +testpaths = ["pytests"] asyncio_mode = "auto" diff --git a/python/pysrc/kaskada/_execution.py b/python/pysrc/kaskada/_execution.py index ca9ea7742..e9bb9f70e 100644 --- a/python/pysrc/kaskada/_execution.py +++ b/python/pysrc/kaskada/_execution.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from typing import AsyncIterator, Callable, Iterator, Literal, Optional, TypeVar, Union -import kaskada as kd +import kaskada import pyarrow as pa from . import _ffi @@ -43,7 +43,7 @@ class _ExecutionOptions: @staticmethod def create( - results: Optional[Union[kd.results.History, kd.results.Snapshot]], + results: Optional[Union[kaskada.results.History, kaskada.results.Snapshot]], row_limit: Optional[int], max_batch_size: Optional[int], mode: Literal["once", "live"] = "once", @@ -56,15 +56,15 @@ def create( ) if results is None: - results = kd.results.History() + results = kaskada.results.History() - if isinstance(results, kd.results.History): + if isinstance(results, kaskada.results.History): options.results = "history" if results.since is not None: options.changed_since = int(results.since.timestamp()) if results.until is not None: options.final_at = int(results.until.timestamp()) - elif isinstance(results, kd.results.Snapshot): + elif isinstance(results, kaskada.results.Snapshot): options.results = "snapshot" if results.changed_since is not None: options.changed_since = int(results.changed_since.timestamp()) diff --git a/python/pysrc/kaskada/_timestream.py b/python/pysrc/kaskada/_timestream.py index 197689223..ffef219db 100644 --- a/python/pysrc/kaskada/_timestream.py +++ b/python/pysrc/kaskada/_timestream.py @@ -19,7 +19,7 @@ overload, ) -import kaskada as kd +import kaskada import kaskada._ffi as _ffi import pandas as pd import pyarrow as pa @@ -31,14 +31,18 @@ if TYPE_CHECKING: import graphviz -#: A literal value that can be used as an argument to a Timestream operation. +# docstring must be under the definition :( LiteralValue: TypeAlias = Optional[Union[int, str, float, bool, timedelta, datetime]] -#: A Timestream or literal which can be used as an argument to a Timestream operation. +"""A literal value that can be used as an argument to a Timestream operation.""" + +# docstring must be under the definition :( Arg: TypeAlias = Union[ "Timestream", Callable[["Timestream"], "Timestream"], LiteralValue ] +"""A Timestream or literal which can be used as an argument to a Timestream operation.""" + def _augment_error(args: Sequence[Arg], e: Exception) -> Exception: """Augment an error with information about the arguments.""" @@ -111,12 +115,12 @@ def _call( Args: func: Name of the function to apply. input: The input to use for any "deferred" arguments. - If `None`, then any arguments that require a `Timestream` argument - will produce an error. + If `None`, then any arguments that require a `Timestream` argument + will produce an error. *args: List of arguments to the expression. session: FFI Session to create the expression in. - If unspecified, will infer from the arguments. - Will fail if all arguments are literals and the session is not provided. + If unspecified, will infer from the arguments. + Will fail if all arguments are literals and the session is not provided. Returns: Timestream representing the result of the function applied to the arguments. @@ -191,8 +195,8 @@ def pipe( Args: func: Function to apply to this Timestream. - Alternatively a `(func, keyword)` tuple where `keyword` is a string - indicating the keyword of `func` that expects the Timestream. + Alternatively a `(func, keyword)` tuple where `keyword` is a string + indicating the keyword of `func` that expects the Timestream. *args: Positional arguments passed into ``func``. **kwargs: A dictionary of keyword arguments passed into ``func``. @@ -201,7 +205,7 @@ def pipe( Raises: ValueError: When using `self` with a specific `keyword` if the `keyword` also - appears on in the `kwargs`. + appears on in the `kwargs`. Notes: Use ``.pipe`` when chaining together functions that expect Timestreams. @@ -646,7 +650,7 @@ def collect( *, max: Optional[int], min: Optional[int] = 0, - window: Optional[kd.windows.Window] = None, + window: Optional[kaskada.windows.Window] = None, ) -> Timestream: """Return a Timestream collecting up to the last `max` values in the `window`. @@ -654,11 +658,11 @@ def collect( Args: max: The maximum number of values to collect. - If `None` all values are collected. + If `None` all values are collected. min: The minimum number of values to collect before producing a value. - Defaults to 0. + Defaults to 0. window: The window to use for the aggregation. If not specified, - the entire Timestream is used. + the entire Timestream is used. Returns: A Timestream containing the list of collected elements at each point. @@ -720,7 +724,7 @@ def with_key(self, key: Arg, grouping: Optional[str] = None) -> Timestream: Args: key: The new key to use for the grouping. grouping: A string literal naming the new grouping. If no `grouping` is specified, - one will be computed from the type of the `key`. + one will be computed from the type of the `key`. """ return Timestream._call("with_key", key, self, grouping, input=self) @@ -746,7 +750,7 @@ def coalesce(self, arg: Arg, *args: Arg) -> Timestream: Returns: Timestream containing the first non-null value from each point. - If all values are null, then returns null. + If all values are null, then returns null. """ return Timestream._call("coalesce", self, arg, *args, input=self) @@ -804,7 +808,7 @@ def shift_until(self, predicate: Arg) -> Timestream: """ return Timestream._call("shift_until", predicate, self, input=self) - def sum(self, *, window: Optional[kd.windows.Window] = None) -> Timestream: + def sum(self, *, window: Optional[kaskada.windows.Window] = None) -> Timestream: """Return a Timestream summing the values in the `window`. Computes the sum for each key separately. @@ -814,7 +818,7 @@ def sum(self, *, window: Optional[kd.windows.Window] = None) -> Timestream: """ return _aggregation("sum", self, window) - def first(self, *, window: Optional[kd.windows.Window] = None) -> Timestream: + def first(self, *, window: Optional[kaskada.windows.Window] = None) -> Timestream: """Return a Timestream containing the first value in the `window`. Computed for each key separately. @@ -824,7 +828,7 @@ def first(self, *, window: Optional[kd.windows.Window] = None) -> Timestream: """ return _aggregation("first", self, window) - def last(self, window: Optional[kd.windows.Window] = None) -> Timestream: + def last(self, window: Optional[kaskada.windows.Window] = None) -> Timestream: """Return a Timestream containing the last value in the `window`. Computed for each key separately. @@ -834,7 +838,7 @@ def last(self, window: Optional[kd.windows.Window] = None) -> Timestream: """ return _aggregation("last", self, window) - def count(self, window: Optional[kd.windows.Window] = None) -> Timestream: + def count(self, window: Optional[kaskada.windows.Window] = None) -> Timestream: """Return a Timestream containing the count value in the `window`. Computed for each key separately. @@ -844,7 +848,7 @@ def count(self, window: Optional[kd.windows.Window] = None) -> Timestream: """ return _aggregation("count", self, window) - def count_if(self, window: Optional[kd.windows.Window] = None) -> Timestream: + def count_if(self, window: Optional[kaskada.windows.Window] = None) -> Timestream: """Return a Timestream containing the count of `true` values in `window`. Computed for each key separately. @@ -854,7 +858,7 @@ def count_if(self, window: Optional[kd.windows.Window] = None) -> Timestream: """ return _aggregation("count_if", self, window) - def max(self, window: Optional[kd.windows.Window] = None) -> Timestream: + def max(self, window: Optional[kaskada.windows.Window] = None) -> Timestream: """Return a Timestream containing the max value in the `window`. Computed for each key separately. @@ -864,12 +868,12 @@ def max(self, window: Optional[kd.windows.Window] = None) -> Timestream: See Also: This returns the maximum of values in a column. See - :func:`greatest` to get the maximum value + [](`~kaskada.Timestream.greatest`) to get the maximum value between Timestreams at each point. """ return _aggregation("max", self, window) - def min(self, window: Optional[kd.windows.Window] = None) -> Timestream: + def min(self, window: Optional[kaskada.windows.Window] = None) -> Timestream: """Return a Timestream containing the min value in the `window`. Computed for each key separately. @@ -879,12 +883,12 @@ def min(self, window: Optional[kd.windows.Window] = None) -> Timestream: See Also: This returns the minimum of values in a column. See - :func:`least` to get the minimum value + [](`~kaskada.Timestream.least`) to get the minimum value between Timestreams at each point. """ return _aggregation("min", self, window) - def mean(self, window: Optional[kd.windows.Window] = None) -> Timestream: + def mean(self, window: Optional[kaskada.windows.Window] = None) -> Timestream: """Return a Timestream containing the mean value in the `window`. Computed for each key separately. @@ -894,7 +898,7 @@ def mean(self, window: Optional[kd.windows.Window] = None) -> Timestream: """ return _aggregation("mean", self, window) - def stddev(self, window: Optional[kd.windows.Window] = None) -> Timestream: + def stddev(self, window: Optional[kaskada.windows.Window] = None) -> Timestream: """Return a Timestream containing the standard deviation in the `window`. Computed for each key separately. @@ -904,7 +908,7 @@ def stddev(self, window: Optional[kd.windows.Window] = None) -> Timestream: """ return _aggregation("stddev", self, window) - def variance(self, window: Optional[kd.windows.Window] = None) -> Timestream: + def variance(self, window: Optional[kaskada.windows.Window] = None) -> Timestream: """Return a Timestream containing the variance in the `window`. Computed for each key separately. @@ -955,9 +959,9 @@ def seconds_since_previous(self, n: int = 1) -> Timestream: Args: n: The number of points to look back. For example, `n=1` refers to - the previous point. + the previous point. - Defaults to 1 (the previous point). + Defaults to 1 (the previous point). """ time_of_current = Timestream._call("time_of", self).cast(pa.int64()) time_of_previous = Timestream._call("time_of", self).lag(n).cast(pa.int64()) @@ -986,8 +990,8 @@ def record(self, fields: Callable[[Timestream], Mapping[str, Arg]]) -> Timestrea fields: The fields to include in the record. See Also: - kaskada.record: Function for creating a record from one or more - timestreams. + [](`kaskada.record`): Function for creating a record from one or more + timestreams. """ return record(fields(self)) @@ -996,8 +1000,8 @@ def round(self) -> Timestream: Returns: A Timestream of the same type as `self`. The result contains `null` - if the value was `null` at that point. Otherwise, it contains - the result of rounding the value to the nearest integer. + if the value was `null` at that point. Otherwise, it contains + the result of rounding the value to the nearest integer. Notes: This method may be applied to any numeric type. For anything other @@ -1005,8 +1009,8 @@ def round(self) -> Timestream: are already integers. See Also: - - :func:`ceil` - - :func:`floor` + - [](`~kaskada.Timestream.ceil`) + - [](`~kaskada.Timestream.floor`) """ return Timestream._call("round", self) @@ -1026,13 +1030,13 @@ def greatest(self, rhs: Arg) -> Timestream: Returns: Each point contains the value from `self` if `self` - is greater than `rhs`, otherwise it contains `rhs`. - If any input is `null` or `NaN`, then that will be - the result. + is greater than `rhs`, otherwise it contains `rhs`. + If any input is `null` or `NaN`, then that will be + the result. See Also: This returns the greatest of two values. See - :func:`max` for the maximum of values in + [](`~kaskada.Timestream.max`) for the maximum of values in a column. """ return Timestream._call("zip_max", self, rhs) @@ -1045,13 +1049,13 @@ def least(self, rhs: Arg) -> Timestream: Returns: Each point contains the value from `self` if `self` - is less than `rhs`, otherwise it contains `rhs`. - If any input is `null` or `NaN`, then that will be - the result. + is less than `rhs`, otherwise it contains `rhs`. + If any input is `null` or `NaN`, then that will be + the result. See Also: This returns the least of two values. See - :func:`min` for the minimum of values in + [](`~kaskada.Timestream.min`) for the minimum of values in a column. """ return Timestream._call("zip_min", self, rhs) @@ -1059,7 +1063,9 @@ def least(self, rhs: Arg) -> Timestream: def preview( self, limit: int = 10, - results: Optional[Union[kd.results.History, kd.results.Snapshot]] = None, + results: Optional[ + Union[kaskada.results.History, kaskada.results.Snapshot] + ] = None, ) -> pd.DataFrame: """Preview the points in this TimeStream as a DataFrame. @@ -1071,7 +1077,9 @@ def preview( def to_pandas( self, - results: Optional[Union[kd.results.History, kd.results.Snapshot]] = None, + results: Optional[ + Union[kaskada.results.History, kaskada.results.Snapshot] + ] = None, *, row_limit: Optional[int] = None, ) -> pd.DataFrame: @@ -1080,14 +1088,13 @@ def to_pandas( Args: results: The results to produce in the DataFrame. Defaults to `History()` producing all points. row_limit: The maximum number of rows to return. Defaults to `None` for no limit. - max_batch_size: The maximum number of rows to return in each batch. - Defaults to `None` for no limit. See Also: - - :func:`preview`: For quick peeks at the contents of a TimeStream during development. - - :func:`write`: For writing results to supported destinations without passing through - Pandas. - - :func:`run_iter`: For non-blocking (iterator or async iterator) execution. + - [](`~kaskada.Timestream.preview`): For quick peeks at the contents of a TimeStream during + development. + - [](`~kaskada.Timestream.write`): For writing results to supported destinations without + passing through Pandas. + - [](`~kaskada.Timestream.run_iter`): For non-blocking (iterator or async iterator) execution. """ execution = self._execute(results, row_limit=row_limit) batches = execution.collect_pyarrow() @@ -1097,23 +1104,25 @@ def to_pandas( def write( self, - destination: kd.destinations.Destination, + destination: kaskada.destinations.Destination, mode: Literal["once", "live"] = "once", - results: Optional[Union[kd.results.History, kd.results.Snapshot]] = None, + results: Optional[ + Union[kaskada.results.History, kaskada.results.Snapshot] + ] = None, ) -> Execution: """Execute the TimeStream writing to the given destination. Args: destination: The destination to write to. mode: The execution mode to use. Defaults to `'once'` to produce the results - from the currently available data. Use `'live'` to start a standing query - that continues to process new data until stopped. - results: The results to produce. Defaults to `History()` producing all points. + from the currently available data. Use `'live'` to start a standing query + that continues to process new data until stopped. + results: The results to produce. Defaults to `Histroy()` producing all points. Returns: An `ExecutionProgress` which allows iterating (synchronously or asynchronously) - over the progress information, as well as cancelling the query if it is no longer - needed. + over the progress information, as well as cancelling the query if it is no longer + needed. """ raise NotImplementedError @@ -1123,7 +1132,9 @@ def run_iter( kind: Literal["pandas"] = "pandas", *, mode: Literal["once", "live"] = "once", - results: Optional[Union[kd.results.History, kd.results.Snapshot]] = None, + results: Optional[ + Union[kaskada.results.History, kaskada.results.Snapshot] + ] = None, row_limit: Optional[int] = None, max_batch_size: Optional[int] = None, ) -> ResultIterator[pd.DataFrame]: @@ -1135,7 +1146,9 @@ def run_iter( kind: Literal["pyarrow"], *, mode: Literal["once", "live"] = "once", - results: Optional[Union[kd.results.History, kd.results.Snapshot]] = None, + results: Optional[ + Union[kaskada.results.History, kaskada.results.Snapshot] + ] = None, row_limit: Optional[int] = None, max_batch_size: Optional[int] = None, ) -> ResultIterator[pa.RecordBatch]: @@ -1147,7 +1160,9 @@ def run_iter( kind: Literal["row"], *, mode: Literal["once", "live"] = "once", - results: Optional[Union[kd.results.History, kd.results.Snapshot]] = None, + results: Optional[ + Union[kaskada.results.History, kaskada.results.Snapshot] + ] = None, row_limit: Optional[int] = None, max_batch_size: Optional[int] = None, ) -> ResultIterator[dict]: @@ -1158,7 +1173,9 @@ def run_iter( kind: Literal["pandas", "pyarrow", "row"] = "pandas", *, mode: Literal["once", "live"] = "once", - results: Optional[Union[kd.results.History, kd.results.Snapshot]] = None, + results: Optional[ + Union[kaskada.results.History, kaskada.results.Snapshot] + ] = None, row_limit: Optional[int] = None, max_batch_size: Optional[int] = None, ) -> Union[ @@ -1171,19 +1188,19 @@ def run_iter( Args: kind: The kind of iterator to produce. Defaults to `pandas`. mode: The execution mode to use. Defaults to `'once'` to produce the results - from the currently available data. Use `'live'` to start a standing query - that continues to process new data until stopped. + from the currently available data. Use `'live'` to start a standing query + that continues to process new data until stopped. results: The results to produce. Defaults to `History()` producing all points. row_limit: The maximum number of rows to return. Defaults to `None` for no limit. max_batch_size: The maximum number of rows to return in each batch. - Defaults to `None` for no limit. + Defaults to `None` for no limit. Returns: Iterator over data of the corresponding kind. The `QueryIterator` allows - cancelling the query or materialization as well as iterating. + cancelling the query or materialization as well as iterating. See Also: - - :func:`write`: To write the results directly to a + - [](`~kaskada.Timestream.write`): To write the results directly to a :class:`Destination`. """ execution = self._execute( @@ -1201,7 +1218,9 @@ def run_iter( def explain( self, kind: Literal["initial_dfg", "final_dfg", "final_plan"] = "final_plan", - results: Optional[Union[kd.results.History, kd.results.Snapshot]] = None, + results: Optional[ + Union[kaskada.results.History, kaskada.results.Snapshot] + ] = None, mode: Literal["once", "live"] = "once", ) -> "graphviz.Source": """Return an explanation of this Timestream will be executed. @@ -1214,15 +1233,15 @@ def explain( results: The results to produce. Defaults to `History()` producing all points. kind: The kind of plan to produce. mode: The execution mode to use. Defaults to `'once'` to produce the results - from the currently available data. Use `'live'` to start a standing query - that continues to process new data until stopped. + from the currently available data. Use `'live'` to start a standing query + that continues to process new data until stopped. Returns: A GraphViz representation of the execution plan as a string, SVG string, or SVG. - Specific representation depends on the `format` argument. + Specific representation depends on the `format` argument. Raises: - ValueError if the `kind` is not recognized or the `format` is not supported. + ValueError: if the `kind` is not recognized or the `format` is not supported. Caution: This method is intended for debugging and development purposes only. @@ -1245,7 +1264,7 @@ def explain( def _execute( self, - results: Optional[Union[kd.results.History, kd.results.Snapshot]], + results: Optional[Union[kaskada.results.History, kaskada.results.Snapshot]], *, row_limit: Optional[int] = None, max_batch_size: Optional[int] = None, @@ -1264,28 +1283,28 @@ def _execute( def _aggregation( op: str, input: Timestream, - window: Optional[kd.windows.Window], + window: Optional[kaskada.windows.Window], *args: Union[Timestream, LiteralValue], ) -> Timestream: """Return the aggregation `op` with the given `input`, `window` and `args`. Args: - op: The operation to create. - input: The input to the aggregation. - window: The window to use for the aggregation. - *args: Additional arguments to provide after `input` and before the flattened window. + op: The operation to create. + input: The input to the aggregation. + window: The window to use for the aggregation. + *args: Additional arguments to provide after `input` and before the flattened window. Raises: NotImplementedError: If the window is not a known type. """ if window is None: return Timestream._call(op, input, *args, None, None) - elif isinstance(window, kd.windows.Since): + elif isinstance(window, kaskada.windows.Since): predicate = window.predicate if callable(predicate): predicate = predicate(input) return Timestream._call(op, input, *args, predicate, None) - elif isinstance(window, kd.windows.Sliding): + elif isinstance(window, kaskada.windows.Sliding): predicate = window.predicate if callable(predicate): predicate = predicate(input) @@ -1293,7 +1312,7 @@ def _aggregation( return Timestream._call(op, input, *args, predicate, window.duration).filter( predicate ) - elif isinstance(window, kd.windows.Trailing): + elif isinstance(window, kaskada.windows.Trailing): if op != "collect": raise NotImplementedError( f"Aggregation '{op} does not support trailing windows" @@ -1315,7 +1334,7 @@ def _aggregation( # `duration` has passed with no "real" inputs. merged_input = record({"input": input, "shift": input_shift}).col("input") return Timestream._call("collect", merged_input, *args, None, trailing_ns) - elif isinstance(window, kd.windows.Tumbling): + elif isinstance(window, kaskada.windows.Tumbling): # Tumbling windows are analogous to Since windows, aside from output behavior. # Tumbling windows only emit once per window. However, this behavior is not implemented # in Sparrow yet, so we hack this by using a Since window with a filter applied afterwards @@ -1337,8 +1356,8 @@ def record(fields: Mapping[str, Arg]) -> Timestream: fields: The fields to include in the record. See Also: - Timestream.record: Method for creating a record from fields computed from - a timestream. + [Timestream.record](`kaskada.Timestream.record`): Method for creating a record + from fields computed from a timestream. """ import itertools diff --git a/python/pysrc/kaskada/results.py b/python/pysrc/kaskada/results.py index a661ad744..4d6286965 100644 --- a/python/pysrc/kaskada/results.py +++ b/python/pysrc/kaskada/results.py @@ -7,32 +7,40 @@ @dataclass class History: - """Execution options for queries producing all historic points.""" + """Execution options for queries producing all historic points. + + Args: + since: If set, only returns points after this time. + + Setting this allows incremental execution to use a checkpoint + from a time before the `since` time. + + until: Only return points less than or equal to this time. + If not set, the current time will be used. + """ - #: If set, only returns points after this time. - #: - #: Setting this allows incremental execution to use a checkpoint - #: from a time before the `since` time. since: Optional[datetime] = None - #: Only return points less than or equal to this time. - #: If not set, the current time will be used. until: Optional[datetime] = None @dataclass class Snapshot: - """Execution options for queries producing snapshots at a specific time.""" - - #: If set, only includes entities that changed after this time. - #: - #: Snapshot queries support incremental execution even when this isn't set. - #: However, every snapshot will include every entity unless this is set. - #: When writing results to an external store that already has values - #: from an earlier snapshot, this can be used to reduce the amount of - #: data to be written. + """Execution options for queries producing snapshots at a specific time. + + Args: + changed_since: If set, only includes entities that changed after this time. + + Snapshot queries support incremental execution even when this isn't set. + However, every snapshot will include every entity unless this is set. + When writing results to an external store that already has values + from an earlier snapshot, this can be used to reduce the amount of + data to be written. + + at: If set, produces the snapshot at the given time. + If not set, the current time will be used. + """ + changed_since: Optional[datetime] = None - #: If set, produces the snapshot at the given time. - #: If not set, the current time will be used. at: Optional[datetime] = None diff --git a/python/pysrc/kaskada/sources/arrow.py b/python/pysrc/kaskada/sources/arrow.py index 4684a7124..53fe90e75 100644 --- a/python/pysrc/kaskada/sources/arrow.py +++ b/python/pysrc/kaskada/sources/arrow.py @@ -35,12 +35,12 @@ def __init__( key_column: The name of the column containing the key. schema: The schema to use. subsort_column: The name of the column containing the subsort. - If not provided, the subsort will be assigned by the system. + If not provided, the subsort will be assigned by the system. grouping_name: The name of the group associated with each key. - This is used to ensure implicit joins are only performed between data grouped - by the same entity. + This is used to ensure implicit joins are only performed between data grouped + by the same entity. time_unit: The unit of the time column. One of `ns`, `us`, `ms`, or `s`. - If not specified (and not specified in the data), nanosecond will be assumed. + If not specified (and not specified in the data), nanosecond will be assumed. """ super().__init__( schema=schema, @@ -69,13 +69,13 @@ async def create( time_column: The name of the column containing the time. key_column: The name of the column containing the key. subsort_column: The name of the column containing the subsort. - If not provided, the subsort will be assigned by the system. + If not provided, the subsort will be assigned by the system. schema: The schema to use. If not provided, it will be inferred from the input. grouping_name: The name of the group associated with each key. - This is used to ensure implicit joins are only performed between data grouped - by the same entity. + This is used to ensure implicit joins are only performed between data grouped + by the same entity. time_unit: The unit of the time column. One of `ns`, `us`, `ms`, or `s`. - If not specified (and not specified in the data), nanosecond will be assumed. + If not specified (and not specified in the data), nanosecond will be assumed. """ if schema is None: if dataframe is None: @@ -123,17 +123,17 @@ def __init__( key_column: The name of the column containing the key. schema: The schema to use. queryable: Whether added rows will be available for running queries. - If True, rows (both provided to the constructor and added later) will be available - for interactive queries. If False, rows will be discarded after being sent to any - running materializations. Consider setting this to False when the source will only - be used for materialization to avoid unnecessary memory usage. + If True, rows (both provided to the constructor and added later) will be available + for interactive queries. If False, rows will be discarded after being sent to any + running materializations. Consider setting this to False when the source will only + be used for materialization to avoid unnecessary memory usage. subsort_column: The name of the column containing the subsort. - If not provided, the subsort will be assigned by the system. + If not provided, the subsort will be assigned by the system. grouping_name: The name of the group associated with each key. - This is used to ensure implicit joins are only performed between data grouped - by the same entity. + This is used to ensure implicit joins are only performed between data grouped + by the same entity. time_unit: The unit of the time column. One of `ns`, `us`, `ms`, or `s`. - If not specified (and not specified in the data), nanosecond will be assumed. + If not specified (and not specified in the data), nanosecond will be assumed. """ super().__init__( queryable=queryable, @@ -166,18 +166,18 @@ async def create( time_column: The name of the column containing the time. key_column: The name of the column containing the key. queryable: Whether added rows will be available for running queries. - If True, rows (both provided to the constructor and added later) will be available - for interactive queries. If False, rows will be discarded after being sent to any - running materializations. Consider setting this to False when the source will only - be used for materialization to avoid unnecessary memory usage. + If True, rows (both provided to the constructor and added later) will be available + for interactive queries. If False, rows will be discarded after being sent to any + running materializations. Consider setting this to False when the source will only + be used for materialization to avoid unnecessary memory usage. subsort_column: The name of the column containing the subsort. - If not provided, the subsort will be assigned by the system. + If not provided, the subsort will be assigned by the system. schema: The schema to use. If not provided, it will be inferred from the input. grouping_name: The name of the group associated with each key. - This is used to ensure implicit joins are only performed between data grouped - by the same entity. + This is used to ensure implicit joins are only performed between data grouped + by the same entity. time_unit: The unit of the time column. One of `ns`, `us`, `ms`, or `s`. - If not specified (and not specified in the data), nanosecond will be assumed. + If not specified (and not specified in the data), nanosecond will be assumed. """ if schema is None: if rows is None: @@ -227,12 +227,12 @@ def __init__( key_column: The name of the column containing the key. schema: The schema to use. subsort_column: The name of the column containing the subsort. - If not provided, the subsort will be assigned by the system. + If not provided, the subsort will be assigned by the system. grouping_name: The name of the group associated with each key. - This is used to ensure implicit joins are only performed between data grouped - by the same entity. + This is used to ensure implicit joins are only performed between data grouped + by the same entity. time_unit: The unit of the time column. One of `ns`, `us`, `ms`, or `s`. - If not specified (and not specified in the data), nanosecond will be assumed. + If not specified (and not specified in the data), nanosecond will be assumed. """ super().__init__( schema=schema, @@ -270,13 +270,13 @@ async def create( time_column: The name of the column containing the time. key_column: The name of the column containing the key. subsort_column: The name of the column containing the subsort. - If not provided, the subsort will be assigned by the system. + If not provided, the subsort will be assigned by the system. schema: The schema to use. If not provided, it will be inferred from the input. grouping_name: The name of the group associated with each key. - This is used to ensure implicit joins are only performed between data grouped - by the same entity. + This is used to ensure implicit joins are only performed between data grouped + by the same entity. time_unit: The unit of the time column. One of `ns`, `us`, `ms`, or `s`. - If not specified (and not specified in the data), nanosecond will be assumed. + If not specified (and not specified in the data), nanosecond will be assumed. """ if isinstance(csv_string, str): csv_string = BytesIO(csv_string.encode("utf-8")) @@ -427,12 +427,12 @@ def __init__( key_column: The name of the column containing the key. schema: The schema to use. subsort_column: The name of the column containing the subsort. - If not provided, the subsort will be assigned by the system. + If not provided, the subsort will be assigned by the system. grouping_name: The name of the group associated with each key. - This is used to ensure implicit joins are only performed between data grouped - by the same entity. + This is used to ensure implicit joins are only performed between data grouped + by the same entity. time_unit: The unit of the time column. One of `ns`, `us`, `ms`, or `s`. - If not specified (and not specified in the data), nanosecond will be assumed. + If not specified (and not specified in the data), nanosecond will be assumed. """ super().__init__( schema=schema, @@ -462,13 +462,13 @@ async def create( time_column: The name of the column containing the time. key_column: The name of the column containing the key. subsort_column: The name of the column containing the subsort. - If not provided, the subsort will be assigned by the system. + If not provided, the subsort will be assigned by the system. schema: The schema to use. If not provided, it will be inferred from the input. grouping_name: The name of the group associated with each key. - This is used to ensure implicit joins are only performed between data grouped - by the same entity. + This is used to ensure implicit joins are only performed between data grouped + by the same entity. time_unit: The unit of the time column. One of `ns`, `us`, `ms`, or `s`. - If not specified (and not specified in the data), nanosecond will be assumed. + If not specified (and not specified in the data), nanosecond will be assumed. """ if isinstance(json_string, str): json_string = BytesIO(json_string.encode("utf-8")) @@ -520,12 +520,12 @@ def __init__( key_column: The name of the column containing the key. schema: The schema to use. subsort_column: The name of the column containing the subsort. - If not provided, the subsort will be assigned by the system. + If not provided, the subsort will be assigned by the system. grouping_name: The name of the group associated with each key. - This is used to ensure implicit joins are only performed between data grouped - by the same entity. + This is used to ensure implicit joins are only performed between data grouped + by the same entity. time_unit: The unit of the time column. One of `ns`, `us`, `ms`, or `s`. - If not specified (and not specified in the data), nanosecond will be assumed. + If not specified (and not specified in the data), nanosecond will be assumed. """ super().__init__( schema=schema, @@ -552,18 +552,18 @@ async def create( Args: file: The url or path of the Parquet file to add. Paths should be relative to the - current working directory or absolute. URLs may describe local file paths or - object-store locations. + current working directory or absolute. URLs may describe local file paths or + object-store locations. time_column: The name of the column containing the time. key_column: The name of the column containing the key. schema: The schema to use. If not provided, it will be inferred from the input. subsort_column: The name of the column containing the subsort. - If not provided, the subsort will be assigned by the system. + If not provided, the subsort will be assigned by the system. grouping_name: The name of the group associated with each key. - This is used to ensure implicit joins are only performed between data grouped - by the same entity. + This is used to ensure implicit joins are only performed between data grouped + by the same entity. time_unit: The unit of the time column. One of `ns`, `us`, `ms`, or `s`. - If not specified (and not specified in the data), nanosecond will be assumed. + If not specified (and not specified in the data), nanosecond will be assumed. """ if schema is None: if file is None: diff --git a/python/pysrc/kaskada/windows.py b/python/pysrc/kaskada/windows.py index f878e7c54..018ec595e 100644 --- a/python/pysrc/kaskada/windows.py +++ b/python/pysrc/kaskada/windows.py @@ -24,9 +24,6 @@ class Since(Window): Args: predicate: the condition used to determine when the window resets. - - Returns: - Window for aggregating cumulative values since the predicate. """ #: The boolean Timestream to use as predicate for the window. @@ -38,27 +35,47 @@ class Since(Window): @staticmethod def minutely() -> Since: - """Return a window since the start of each calendar minute.""" + """Return a window since the start of each calendar minute. + + Returns: + Window for aggregating cumulative values since the predicate. + """ return Since(predicate=lambda domain: Timestream._call("minutely", domain)) @staticmethod def hourly() -> Since: - """Return a window since the start of each calendar hour.""" + """Return a window since the start of each calendar hour. + + Returns: + Window for aggregating cumulative values since the predicate. + """ return Since(predicate=lambda domain: Timestream._call("hourly", domain)) @staticmethod def daily() -> Since: - """Return a window since the start of each calendar day.""" + """Return a window since the start of each calendar day. + + Returns: + Window for aggregating cumulative values since the predicate. + """ return Since(predicate=lambda domain: Timestream._call("daily", domain)) @staticmethod def monthly() -> Since: - """Return a window since the start of each calendar month.""" + """Return a window since the start of each calendar month. + + Returns: + Window for aggregating cumulative values since the predicate. + """ return Since(predicate=lambda domain: Timestream._call("monthly", domain)) @staticmethod def yearly() -> Since: - """Return a window since the start of each calendar year.""" + """Return a window since the start of each calendar year. + + Returns: + Window for aggregating cumulative values since the predicate. + """ return Since(predicate=lambda domain: Timestream._call("yearly", domain)) @@ -73,10 +90,7 @@ class Tumbling(Window): Args: predicate: the condition used to determine when the window resets. - Returns: - Window for aggregating values since the predicate. - - Note: + Notes: Like other systems, Kaskada treats tumbling windows as non-overlapping. When one window ends the next starts. @@ -94,27 +108,47 @@ class Tumbling(Window): @staticmethod def minutely() -> Tumbling: - """Return a tumbling window that resets at the start of each calendar minute.""" + """Return a tumbling window that resets at the start of each calendar minute. + + Returns: + Window for aggregating values since the predicate. + """ return Tumbling(predicate=lambda domain: Timestream._call("minutely", domain)) @staticmethod def hourly() -> Tumbling: - """Return a tumbling window that resets at the start of each calendar hour.""" + """Return a tumbling window that resets at the start of each calendar hour. + + Returns: + Window for aggregating values since the predicate. + """ return Tumbling(predicate=lambda domain: Timestream._call("hourly", domain)) @staticmethod def daily() -> Tumbling: - """Return a tumbling window that resets at the start of each calendar day.""" + """Return a tumbling window that resets at the start of each calendar day. + + Returns: + Window for aggregating values since the predicate. + """ return Tumbling(predicate=lambda domain: Timestream._call("daily", domain)) @staticmethod def monthly() -> Tumbling: - """Return a tumbling window that resets at the start of each calendar month.""" + """Return a tumbling window that resets at the start of each calendar month. + + Returns: + Window for aggregating values since the predicate. + """ return Tumbling(predicate=lambda domain: Timestream._call("monthly", domain)) @staticmethod def yearly() -> Tumbling: - """Return a tumbling window that resets at the start of each calendar year.""" + """Return a tumbling window that resets at the start of each calendar year. + + Returns: + Window for aggregating values since the predicate. + """ return Tumbling(predicate=lambda domain: Timestream._call("yearly", domain)) @@ -129,10 +163,7 @@ class Sliding(Window): Args: duration: the number of active windows at any given time. predicate: the condition used to determine when the oldest window ends and a new - window starts. - - Returns: - Overlapping windows for aggregating values. + window starts. """ #: The number of sliding intervals to use in the window. @@ -155,6 +186,9 @@ def minutely(duration: int) -> Sliding: Args: duration: The number of minutes to use in the window. + + Returns: + Overlapping windows for aggregating values. """ return Sliding( duration=duration, @@ -167,6 +201,9 @@ def hourly(duration: int) -> Sliding: Args: duration: The number of hours to use in the window. + + Returns: + Overlapping windows for aggregating values. """ return Sliding( duration=duration, @@ -179,6 +216,9 @@ def daily(duration: int) -> Sliding: Args: duration: The number of days to use in the window. + + Returns: + Overlapping windows for aggregating values. """ return Sliding( duration=duration, @@ -191,6 +231,9 @@ def monthly(duration: int) -> Sliding: Args: duration: The number of months to use in the window. + + Returns: + Overlapping windows for aggregating values. """ return Sliding( duration=duration, @@ -203,6 +246,9 @@ def yearly(duration: int) -> Sliding: Args: duration: The number of years to use in the window. + + Returns: + Overlapping windows for aggregating values. """ return Sliding( duration=duration,