diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4d4dbef1a..c244fc1ae 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -12,6 +12,28 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- added new method `Grid::delete_orders` and the corresponding switch
`--delete-orders` in the subcommand `write` of the CLI
+### Changed
+
+- Python API: dropped top-level Python interface layer
+- Python API: renamed `lumi` to `channel` in PyO3 Python interface. This
+ concerns 1) the argument names of `convolute_with_one` and similar functions;
+ 2) the module `pineappl.lumi` was moved to `pineappl.boc`; 3) the class
+ `LumiEntry` was renamed to `Channel`
+- Python API: `.into()` needs to be explicitly called on subgrids when calling
+ `pineappl.grid.set_subgrid()`
+- Python API: replaced `pineappl.grid.PyPidBasis` with
+ `pineappl.evolution.PidBasis`
+- Python API: replaced `pineappl.grid.PyOperatorSliceInfo` with
+ `pineappl.evolution.OperatorSliceInfo`
+- Python API: drop all `Py` prefixes, for instance `PyEvolveInfo` was renamed
+ to `EvolveInfo`
+
+### Removed
+
+- Python API: removed `pineappl.grid.Grid.create()` and
+ `pineappl.fk_table.FkTable.from_grid()` methods; use the constructors
+ of the respective class instead
+
## [0.8.2] - 22/07/2024
### Changed
diff --git a/pineappl_py/.gitignore b/pineappl_py/.gitignore
index 21abbfe8b..83eb351cd 100644
--- a/pineappl_py/.gitignore
+++ b/pineappl_py/.gitignore
@@ -21,8 +21,11 @@ coverage.xml
.coverage
# Sphinx documentation
-docs/_build/
-doc/_build/
+# Ignore auto generated module references
+# TODO: while apidoc does not work, we need to preserve that folder
+# docs/source/modules
+# ignore temporary build files
+docs/build/
# Environments
.env
diff --git a/pineappl_py/README.md b/pineappl_py/README.md
index da57beb55..a91bb744e 100644
--- a/pineappl_py/README.md
+++ b/pineappl_py/README.md
@@ -12,3 +12,49 @@ For installation instructions see the [documentation].
[PyO3]: https://pyo3.rs
[Rust API]: https://docs.rs/pineappl
[documentation]: https://pineappl.readthedocs.io/en/latest/installation.html
+
+## Development
+
+Run
+
+```shell
+python -m venv env && . env/bin/activate
+```
+
+to setup a new environment and check that `pip --version` returns at least `pip
+22.0 from ...`. If not, upgrade `pip` via
+
+```shell
+pip install -U pip
+```
+
+Next, install `maturin`:
+
+```shell
+pip install maturin
+```
+
+Run
+
+```shell
+maturin develop
+```
+
+to build the project, which also installs it into the environment so that it
+can be used in Python projects that use the same environment.
+
+### Documentation
+
+Run the following once to install the documentation's dependencies:
+
+```shell
+pip install '.[docs]'
+```
+
+Then run
+
+```shell
+( cd docs && make clean html )
+```
+
+to generate the documentation.
diff --git a/pineappl_py/docs/.gitignore b/pineappl_py/docs/.gitignore
deleted file mode 100644
index 3025cff24..000000000
--- a/pineappl_py/docs/.gitignore
+++ /dev/null
@@ -1,9 +0,0 @@
-# autogenerated file
-source/development/code_todos.rst
-# Ignore auto generated module references
-source/modules
-source/development/ekomark
-# ignore temporary build files
-_build/
-# Ignore generated sphinx-bibtex file
-source/bibtex.json
diff --git a/pineappl_py/docs/Makefile b/pineappl_py/docs/Makefile
index ea76ec019..db4a541c0 100644
--- a/pineappl_py/docs/Makefile
+++ b/pineappl_py/docs/Makefile
@@ -24,8 +24,9 @@ clean:
rm -rf build
rm -rf _build
+# TODO: while apidoc does not work, we need to preserve that folder
cleanall: clean
- rm -rf $(PINEAPPLOUT)
+ # rm -rf $(PINEAPPLOUT)
.PHONY: help Makefile
diff --git a/pineappl_py/docs/source/conf.py b/pineappl_py/docs/source/conf.py
index 0026f9267..8c3b56cf3 100644
--- a/pineappl_py/docs/source/conf.py
+++ b/pineappl_py/docs/source/conf.py
@@ -1,174 +1,65 @@
# Configuration file for the Sphinx documentation builder.
#
-# This file only contains a selection of the most common options. For a full
-# list see the documentation:
+# For the full list of built-in configuration values, see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
-# -- Path setup --------------------------------------------------------------
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-#
-# import os
-# import sys
-# sys.path.insert(0, os.path.abspath('.'))
-
-import pathlib
-import os
-import sys
-
-here = pathlib.Path(__file__).absolute().parent
-
# -- Project information -----------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
-project = "pineappl"
-copyright = "2020-2021, the PineAPPL team"
-author = "the PineAPPL team"
+import pineappl
+import sys
+import pathlib
+project = 'pineappl'
+copyright = '2020–2024, the PineAPPL team'
+author = 'the PineAPPL team'
+release = pineappl.version
+version = release
-# -- General configuration ---------------------------------------------------
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
-# ones.
extensions = [
- "sphinx.ext.autodoc",
- "sphinx.ext.doctest",
- "sphinx.ext.intersphinx",
- "sphinx.ext.todo",
- "sphinx.ext.coverage",
- "sphinx.ext.mathjax",
- "sphinx.ext.ifconfig",
- "sphinx.ext.viewcode",
- "sphinx.ext.autosectionlabel",
- "sphinx.ext.napoleon",
- "sphinxcontrib.bibtex",
- "sphinx.ext.graphviz",
- "sphinx.ext.extlinks",
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.autosectionlabel',
+ 'sphinx.ext.extlinks',
+ 'sphinx.ext.inheritance_diagram',
+ 'sphinx.ext.intersphinx',
+ 'sphinx.ext.napoleon',
+ 'sphinx.ext.todo',
+ 'sphinx_rtd_theme',
]
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ["_templates"]
-
-# The suffix(es) of source filenames.
-# You can specify multiple suffix as a list of string:
-#
-source_suffix = {
- ".rst": "restructuredtext",
- ".txt": "restructuredtext",
-}
-
-autosectionlabel_prefix_document = True
-# autosectionlabel_maxdepth = 10
-# Allow to embed rst syntax in markdown files.
-enable_eval_rst = True
-
-# The master toctree document.
-master_doc = "index"
-bibtex_bibfiles = ["refs.bib"]
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-# This pattern also affects html_static_path and html_extra_path.
-exclude_patterns = ["shared/*"]
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = None
-
-# A string to be included at the beginning of all files
-shared = here / "shared"
-rst_prolog = "\n".join([open(x).read() for x in os.scandir(shared)])
extlinks = {
- "yadism": ("https://nnpdf.github.io/yadism/%s", "yadism - %s"),
"rustdoc": ("https://docs.rs/pineappl/latest/pineappl/%s", "PineAPPL - %s"),
- "pineko": ("https://github.com/NNPDF/pineko/%s", "pineko - %s"),
}
+templates_path = ['_templates']
+exclude_patterns = []
+
# -- Options for HTML output -------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-#
-html_theme = "sphinx_rtd_theme"
+html_theme = 'sphinx_rtd_theme'
+html_static_path = ['_static']
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ["_static"]
-# -- Extension configuration -------------------------------------------------
+# TODO: find a way to reactivate apidoc, which doesn't seem to work for the moment.
-# -- Options for intersphinx extension ---------------------------------------
+# here = pathlib.Path(__file__).absolute().parent
+# # https://github.com/readthedocs/readthedocs.org/issues/1139#issuecomment-312626491
+# def run_apidoc(_):
+# from sphinx.ext.apidoc import main # pylint: disable=import-outside-toplevel
+
+# sys.path.append(str(here.parent))
+# # analyse 'pineappl'
+# docs_dest = here / "modules"
+# import pineappl # pylint: disable=import-outside-toplevel
+
+# # note that we can NOT point to the local directory (`here.parents[1] / "pineappl"`)
+# # but we need the package built by `maturin` and installed by `pip`
+# package = pathlib.Path(pineappl.__file__).parent / "pineappl"
+# main(["--module-first", "--no-toc", "-o", str(docs_dest), str(package)])
-# Example configuration for intersphinx: refer to the Python standard library.
-# Thanks https://github.com/bskinn/sphobjinv
-intersphinx_mapping = {
- "python": ("https://docs.python.org/3/", None),
- "scipy": ("https://docs.scipy.org/doc/scipy/", None),
- "numpy": ("https://numpy.org/doc/stable", None),
-}
-# -- Options for todo extension ----------------------------------------------
-
-# If true, `todo` and `todoList` produce output, else they produce nothing.
-todo_include_todos = True
-
-mathjax3_config = {
- "tex": {
- "macros": {
- # fncs
- # "atan": [r"\text{atan}", 0],
- # "span": [r"\text{span}", 0],
- }
- }
-}
-# https://stackoverflow.com/questions/1871549/determine-if-python-is-running-inside-virtualenv
-def get_base_prefix_compat():
- """Get base/real prefix, or sys.prefix if there is none."""
- return (
- getattr(sys, "base_prefix", None)
- or getattr(sys, "real_prefix", None)
- or sys.prefix
- )
-
-
-def in_virtualenv():
- return get_base_prefix_compat() != sys.prefix
-
-
-# https://github.com/readthedocs/readthedocs.org/issues/1139#issuecomment-312626491
-def run_apidoc(_):
- import subprocess # pylint: disable=import-outside-toplevel
-
- from sphinx.ext.apidoc import main # pylint: disable=import-outside-toplevel
-
- sys.path.append(str(here.parent))
- # run maturin to have the latest stuff
- pkg_root = here.parents[1]
- # if in_virtualenv(): # in local repos we're always in a virtualenv
- # subprocess.run(["maturin", "develop"], cwd=pkg_root)
- # else: # on RTD we can't (for some reason we're not inside the virtualenv - or maybe only the subshell isn't)
- # subprocess.run(["maturin", "build"], cwd=pkg_root)
- # # On RTD we were already installing before, but of course this was fake
- # # as it only had the raw Python stuff, so let's do it again
- # subprocess.run(["pip", "uninstall", "pineappl", "-y"], cwd=pkg_root)
- # wheels = list((pkg_root / "target" / "wheels").glob("pineappl*.whl"))
- # # In case there are several wheels (as on RTD) find the one matching (and let the others happily fail)
- # for wheel in wheels:
- # subprocess.run(["pip", "install", str(wheel.absolute())], cwd=pkg_root)
-
- # analyse 'pineappl'
- docs_dest = here / "modules" / "pineappl"
- import pineappl
-
- # note that we can NOT point to the local directory (`here.parents[1] / "pineappl"`)
- # but we need the package built by `maturin` and installed by `pip`
- package = pathlib.Path(pineappl.__file__).parent
- main(["--module-first", "-o", str(docs_dest), str(package)])
- (docs_dest / "modules.rst").unlink()
-
-
-def setup(app):
- app.connect("builder-inited", run_apidoc)
+# def setup(app):
+# app.connect("builder-inited", run_apidoc)
diff --git a/pineappl_py/docs/source/implementation.rst b/pineappl_py/docs/source/implementation.rst
deleted file mode 100644
index 32a5e90ae..000000000
--- a/pineappl_py/docs/source/implementation.rst
+++ /dev/null
@@ -1,9 +0,0 @@
-Implementation
-==============
-
-The wrapper is built using PyO3 to interface Rust to Python. This requires (for the moment) unfortunately a bit of overhead code so there exist four layers of code:
-
-- The original Rust library that actually provides all the functionality. Any true code should be placed here as this is shared by all interfaces.
-- The PyO3 wrapper objects written in Rust that define which methods are exposed to Python and that takes care of the type conversion from Python-understandable types to Rust types.
-- The PyO3 wrapper objects in Python which are an exact mirror of their Rust equivalent. This translation is provided by PyO3 and note that it also preserves the associated documentation.
-- The additional Python wrappers around the raw PyO3 objects which provide convenience wrappers to cast arbitrary Python objects to objects that can actually used by PyO3.
diff --git a/pineappl_py/docs/source/index.rst b/pineappl_py/docs/source/index.rst
index 632e0addc..600c36b30 100644
--- a/pineappl_py/docs/source/index.rst
+++ b/pineappl_py/docs/source/index.rst
@@ -1,14 +1,15 @@
Welcome to PineAPPL
===================
-This is the Python wrapper for the `Rust PineAPPL library `_.
+This is the Python wrapper for the `Rust PineAPPL library `_ using `PyO3 `_.
PineAPPL is a computer library that makes it possible to produce fast-interpolation grids for fitting parton distribution functions (PDFs) including corrections of strong and electroweak origin.
The :doc:`installation` instructions are given :doc:`here `.
A practical example can be found in the ``example/`` subfolder of the `repository `_.
-The Python wrapper is also used in :yadism:`\ ` and :pineko:`\ `. We also list some common :doc:`recipes` here.
+The Python wrapper is also used in `yadism `_ and `pineko `_.
+We also list some common :doc:`recipes` here.
.. toctree::
:maxdepth: 1
@@ -17,17 +18,5 @@ The Python wrapper is also used in :yadism:`\ ` and :pineko:`\ `. We also list s
installation
recipes
- implementation
- API
+ API
indices
-
-.. important::
-
- If you are looking for the methods of a specific class, be aware that part of
- them are just passed to the underlying Rust object, whose class is the same
- of the user-facing one, but prefixed with a ``Py``, e.g.:
- :class:`pineappl.grid.Grid` and :class:`pineappl.pineappl.grid.PyGrid`.
-
- You will find the documentation of the unwrapped method in the raw ``Py``
- class, while part of the methods are wrapped and thus even documented in the
- user-facing class.
diff --git a/pineappl_py/docs/source/modules/pineappl.rst b/pineappl_py/docs/source/modules/pineappl.rst
new file mode 100644
index 000000000..3bbe4b8ed
--- /dev/null
+++ b/pineappl_py/docs/source/modules/pineappl.rst
@@ -0,0 +1,42 @@
+PineAPPL's Python API
+=====================
+
+.. automodule:: pineappl
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+.. automodule:: pineappl.bin
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+.. automodule:: pineappl.channel
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+.. automodule:: pineappl.evolution
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+.. automodule:: pineappl.fk_table
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+.. automodule:: pineappl.grid
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+.. automodule:: pineappl.subgrid
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+.. automodule:: pineappl.import_only_subgrid
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pineappl_py/docs/source/recipes.rst b/pineappl_py/docs/source/recipes.rst
index f026bf4ea..a599a888a 100644
--- a/pineappl_py/docs/source/recipes.rst
+++ b/pineappl_py/docs/source/recipes.rst
@@ -13,7 +13,7 @@ How can I convolve a given PineAPPL grid with my PDF?
import lhapdf
g = pineappl.grid.Grid.read("path/to/grid.pineappl.lz4")
pdf = lhapdf.mkPDF("YourPDF", 0)
- bins = g.convolve(pdf.xfxQ2, pdf.xfxQ2, pdf.alphasQ2)
+ bins = g.convolve_with_one(2212, pdf.xfxQ2, pdf.alphasQ2)
If the grid is actually an FkTable just replace
@@ -23,16 +23,11 @@ If the grid is actually an FkTable just replace
.. note::
- For the :meth:`pineappl.pineappl.PyGrid.read` function, both ``.pineappl``
+ For the :meth:`pineappl.grid.Grid.read` function, both ``.pineappl``
and ``.pineappl.lz4`` extensions are acceptable, as long as they are
consistent (without ``.lz4`` the grid is assumed not to be compressed, with
it is assumed compressed).
- This is asymmetric with respect to the
- :meth:`pineappl.pineappl.PyGrid.write` function, in which the function will
- refuse to guess, so another version is provided to write a compressed grid,
- :meth:`pineappl.pineappl.PyGrid.write_lz4`
-
How can I edit a grid?
----------------------
@@ -60,9 +55,8 @@ change even the bins themselves.
remapper = pineappl.bin.BinRemapper(normalizations, limits)
g.set_remapper(remapper)
-For more details about :class:`pineappl.bin.BinRemapper` check also the `Rust
-documentation
-`_, e.g.
+For more details about :class:`pineappl.bin.BinRemapper` check also
+the Rust documentation of :rustdoc:`pineappl::bin::BinRemapper `, e.g.
on how to treat multidimensional distributions.
How can I get the bin configurations from a given PineAPPL grid?
diff --git a/pineappl_py/docs/source/refs.bib b/pineappl_py/docs/source/refs.bib
deleted file mode 100644
index e69de29bb..000000000
diff --git a/pineappl_py/package/Containerfile b/pineappl_py/package/Containerfile
deleted file mode 100644
index 49e5821a8..000000000
--- a/pineappl_py/package/Containerfile
+++ /dev/null
@@ -1,27 +0,0 @@
-FROM quay.io/pypa/manylinux2014_x86_64
-
-ARG MATURIN_TAR='maturin-x86_64-unknown-linux-musl.tar.gz'
-ARG MATURIN_TAG='v0.13.0-beta.9'
-
-# install c compiler
-# and create a dedicated user
-RUN /bin/bash -c "yum install gcc -y; \
- useradd -m pineappl;\
- su - pineappl"
-
-USER pineappl
-
-# install dependencies
-# - rust
-# - maturin
-RUN /bin/bash -c "cd ${HOME}; \
- mkdir -p local/bin; \
- curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y; \
- curl --remote-name -L https://github.com/PyO3/maturin/releases/download/${MATURIN_TAG}/${MATURIN_TAR}; \
- tar -xvzf ${MATURIN_TAR} --directory=local/bin/"
-
-COPY maturin /home/pineappl
-
-ENTRYPOINT ["/home/pineappl/maturin"]
-# To minimize the size of the wheel use '--strip'
-CMD ["build --release --interpreter 3.7 3.8 3.9 3.10 pypy3.7"]
diff --git a/pineappl_py/package/README.md b/pineappl_py/package/README.md
deleted file mode 100644
index 69defc670..000000000
--- a/pineappl_py/package/README.md
+++ /dev/null
@@ -1,89 +0,0 @@
-# Packaging python interface
-
-In order to compile wheels to distribute some requirements have to be met:
-
-- `linux`: the compilation process has to be run in a
- [`manylinux`](https://github.com/pypa/manylinux) compliant environment, for
- this reason a suitable container image is provided (see
- [published packages](https://github.com/orgs/NNPDF/packages?repo_name=pineappl)
- and the respective [`Containerfile`](./Containerfile))
-
- Notice that the default container provided by
- [pypa](https://github.com/pypa/manylinux) is not sufficient, since it does not
- ship a C compiler (required to compile the `syn` crate).
-- `macOS`: it just needs to be run in a macOS environment, see
- [publishing workflow](https://github.com/NNPDF/pineappl/tree/master/.github/workflows/wheels.yml)
-- `windows`: it just needs to be run in a windows environment, see
- [publishing workflow](https://github.com/NNPDF/pineappl/tree/master/.github/workflows/wheels.yml)
-
-## `maturin` container image
-
-`maturin` image has its own version (following [semver](https://semver.org/)),
-and:
-
-- it is based on `manylinux2014_x86_64`
-- build wheels for a range of CPython versions (the actual one depends on the
- `maturin` version inside the container)
-
-### Using `maturin` to compile for `manylinux`
-
-This is the easy part: you just need to download the
-[image](https://github.com/NNPDF/pineappl/pkgs/container/maturin) and run with
-your favorite container tool.
-
-Here the explicit commands with `podman` [[1]](#docker)
-
-```sh
-podman pull ghcr.io/n3pdf/maturin:latest
-podman run ghcr.io/n3pdf/maturin
-podman cp :root/pineappl/pineappl_py/target/wheels/ .
-```
-
-Now wheels are available outside the container and can be uploaded in your
-favorite way.
-
-#### Interactive use
-
-If you want to use the container environment interactively, you need to provide
-an alternative entry point:
-
-```sh
-podman run --entrypoint bash -it
-```
-
-### Create a new `maturin` image
-
-_Use case_: if a new rust or maturin version is released, it might be needed to
-upgrade also those inside the `maturin` image (since they are pre-installed in
-the image itself)
-
-To upgrade the build instructions ([_Containerfile_](./Containerfile)):
-
-- change `FROM` to choose a different manylinux base image (see the
- [official source](https://github.com/pypa/manylinux))
-- change `ARG` for `MATURIN_TAG` to choose a different `maturin` version
-- to change architecture both `FROM` and `ARG MATURIN_TAR=` have to be updated
- from `x86_64`
-- `rust` version is always taken to be the latest one at each build (so
- rerunning the build **without changing** anything might generate a **different
- image**)
-
-Once `Containerfile` has been updated then rerun:
-
-```sh
-# inside pineappl/pineappl_py/package
-podman build -t ghcr.io/n3pdf/maturin .
-podman tag ghcr.io/n3pdf/maturin:
-# login to GitHub registry with user credentials (not organization), see [2]
-echo ${PAT} | podman login ghcr.io -u --password-stdin
-# finally publish
-podman push ghcr.io/n3pdf/maturin:
-# and publish the new latest (all layers already available, it's just an alias)
-podman push ghcr.io/n3pdf/maturin:latest
-```
-
-[1]: In the following I will use `podman` as the container
-runtime for the examples. To use `docker` instead, you can simply replace
-`podman -> docker`, they have compatible subcommands
-[2]: official
-[GitHub registry docs](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry)
diff --git a/pineappl_py/package/maturin b/pineappl_py/package/maturin
deleted file mode 100755
index 2b33a9240..000000000
--- a/pineappl_py/package/maturin
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/bash
-export PATH=${HOME}/local/bin:${HOME}/.cargo/bin:${PATH}
-cd ${HOME}
-# clone pineappl code
-git clone https://github.com/NNPDF/pineappl.git
-# compile pineappl python package
-cd pineappl/pineappl_py
-maturin $@
diff --git a/pineappl_py/pineappl/__init__.py b/pineappl_py/pineappl/__init__.py
deleted file mode 100644
index deab97d90..000000000
--- a/pineappl_py/pineappl/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .pineappl import version as __version__
-from . import bin, fk_table, grid, import_only_subgrid, channel, subgrid
diff --git a/pineappl_py/pineappl/bin.py b/pineappl_py/pineappl/bin.py
deleted file mode 100644
index 0abb45630..000000000
--- a/pineappl_py/pineappl/bin.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import numpy as np
-
-from .pineappl import PyBinRemapper
-from .utils import PyWrapper
-
-
-class BinRemapper(PyWrapper):
- """
- Python wrapper object for :class:`~pineappl.pineappl.PyBinRemapper`.
-
- Parameters
- ----------
- normalizations : sequence(float)
- list with normalizations
- limits : list(tuple(float,float))
- all bin limits as a flat list
- """
-
- def __init__(self, normalizations, limits):
- self._raw = PyBinRemapper(np.array(normalizations), limits)
diff --git a/pineappl_py/pineappl/channel.py b/pineappl_py/pineappl/channel.py
deleted file mode 100644
index 8b23bb09f..000000000
--- a/pineappl_py/pineappl/channel.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from .pineappl import PyChannel
-from .utils import PyWrapper
-
-
-class Channel(PyWrapper):
- """
- Python wrapper object to :class:`~pineappl.pineappl.PyChannel`.
-
- Parameters
- ----------
- entry : list(tuple(int,int,float))
- sequence describing a channel combination.
- """
-
- def __init__(self, entry):
- self._raw = PyChannel(entry)
diff --git a/pineappl_py/pineappl/fk_table.py b/pineappl_py/pineappl/fk_table.py
deleted file mode 100644
index 96eed4c9a..000000000
--- a/pineappl_py/pineappl/fk_table.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from .pineappl import PyFkTable, PyFkAssumptions
-from .utils import PyWrapper
-
-
-class FkTable(PyWrapper):
- """Python wrapper object to interface :class:`~pineappl.pineappl.PyFkTable`.
-
- Parameters
- ----------
- pyfktable : PyFkTable
- raw wrapper object
- """
-
- def __init__(self, pyfktable):
- self._raw = pyfktable
-
- @classmethod
- def from_grid(cls, grid):
- return cls(PyFkTable(grid.raw))
-
- @classmethod
- def read(cls, path):
- """Load an existing grid from file.
-
- Convenience wrapper for :meth:`pineappl.pineappl.PyFkTable.read()`.
-
- Parameters
- ----------
- path : pathlike
- file path
-
- Returns
- -------
- FkTable
- grid object
- """
- return cls(PyFkTable.read(path))
-
- def optimize(self, assumptions="Nf6Ind"):
- """Optimize FK table storage.
-
- In order to perform any relevant optimization, assumptions are needed, and they are passed
- as parameters to the function.
-
- Parameters
- ----------
- assumptions : FkAssumptions or str
- assumptions about the FkTable properties, declared by the user, deciding which
- optimizations are possible
- """
- if not isinstance(assumptions, FkAssumptions):
- assumptions = FkAssumptions(assumptions)
- return self._raw.optimize(assumptions._raw)
-
-
-class FkAssumptions(PyWrapper):
- """Python wrapper object to interface :class:`~pineappl.pineappl.PyFkAssumptions`.
-
- Parameters
- ----------
- assumption : str
- assumption identifier
- """
-
- def __init__(self, assumption):
- self._raw = PyFkAssumptions(assumption)
diff --git a/pineappl_py/pineappl/grid.py b/pineappl_py/pineappl/grid.py
deleted file mode 100644
index e3fdd78b4..000000000
--- a/pineappl_py/pineappl/grid.py
+++ /dev/null
@@ -1,390 +0,0 @@
-import numpy as np
-
-from .fk_table import FkTable
-from .pineappl import PyGrid, PyOrder, PyOperatorSliceInfo, PyPidBasis
-from .utils import PyWrapper
-
-
-class Order(PyWrapper):
- r"""Python wrapper object to interface :class:`~pineappl.pineappl.PyOrder`.
-
- Parameters
- ----------
- alphas : int
- power of :math:`\alpha_s`
- alpha : int
- power of :math:`\alpha`
- logxir : int
- power of :math:`\log(\xi_r)`
- logxif : int
- power of :math:`\log(\xi_f)`
- """
-
- def __init__(self, alphas, alpha, logxir, logxif):
- self._raw = PyOrder(alphas, alpha, logxir, logxif)
-
- @staticmethod
- def create_mask(orders, max_as, max_al, logs):
- r"""Return a mask suitable to pass as the `order_mask` parameter.
-
- Parameters
- ----------
- orders : list(Order)
- list of available orders
- max_as : int
- maximum power of :math:`\alpha_s`
- max_al : int
- maximum power of :math:`\alpha`
- logs : bool
- whether to include log grids or not
-
- Returns
- -------
- list(bool)
- boolean mask
-
- """
- return PyOrder.create_mask([o._raw for o in orders], max_as, max_al, logs)
-
-
-class Grid(PyWrapper):
- r"""Python wrapper object to interface :class:`~pineappl.pineappl.PyGrid`.
-
- To create an object, you should call either :meth:`create`
- or :meth:`read`.
-
- Parameters
- ----------
- pygrid : PyGrid
- raw wrapper object
- """
-
- def __init__(self, pygrid):
- self._raw = pygrid
-
- @classmethod
- def create(cls, channels, orders, bin_limits, subgrid_params):
- """Create a grid object from its ingredients.
-
- Parameters
- ---------
- channels : list(Channel)
- List of active channels
- orders: list(Order)
- List of available orders
- bin_limits: sequence(float)
- Bin limits
- subgrid_params : SubgridParams
- subgrid parameters
- """
- channels = [centry.raw for centry in channels]
- orders = [o.raw for o in orders]
- return cls(PyGrid(channels, orders, np.array(bin_limits), subgrid_params.raw))
-
- def subgrid(self, order, bin_, channel):
- """Retrieve the subgrid at the given position.
-
- Convenience wrapper for :meth:`pineappl.pineappl.PyGrid.set_subgrid()`.
-
- Parameters
- ----------
- order : int
- index of order
- bin_ : int
- index of bin
- channel : int
- index of channel
-
- Returns
- -------
- subgrid : Subgrid
- subgrid content
- """
- return self.raw.subgrid(order, bin_, channel)
-
- def __getitem__(self, key):
- """Retrieve the subgrid at the given position.
-
- Syntactic sugar for :meth:`subgrid`.
-
- Parameters
- ----------
- key : (int, int, int)
- a 3-element integers tuple, consisting of `(order, bin, channel)`
-
- Returns
- -------
- subgrid : Subgrid
- subgrid content
- """
- if len(key) != 3:
- raise ValueError("A tuple with `(order, bin, channel)` is required as key.")
-
- return self.subgrid(*key)
-
- def set_subgrid(self, order, bin_, channel, subgrid):
- """Set the subgrid at the given position.
-
- Convenience wrapper for :meth:`pineappl.pineappl.PyGrid.set_subgrid()`.
-
- Parameters
- ----------
- order : int
- index of order
- bin_ : int
- index of bin
- channel : int
- index of channel
- subgrid : ImportOnlySubgridV1
- subgrid content
- """
- self.raw.set_subgrid(order, bin_, channel, subgrid.into())
-
- def __setitem__(self, key, subgrid):
- """Set the subgrid at the given position.
-
- Syntactic sugar for :meth:`set_subgrid`
-
- Parameters
- ----------
- key : (int, int, int)
- a 3-element integers tuple, consisting of `(order, bin, channel)`
- subgrid : ImportOnlySubgridV1
- subgrid content
- """
- if len(key) != 3:
- raise ValueError("A tuple with `(order, bin, channel)` is required as key.")
-
- self.set_subgrid(*key, subgrid)
-
- def set_remapper(self, remapper):
- """Set the normalizations.
-
- Convenience wrapper for :meth:`pineappl.pineappl.PyGrid.set_remapper()`.
-
- Parameters
- ----------
- remapper: BinRemapper
- Remapper object
- """
- self.raw.set_remapper(remapper.raw)
-
- def orders(self):
- """Extract the available perturbative orders and scale variations.
-
- Convenience wrapper for :meth:`pineappl.pineappl.PyGrid.orders()`.
-
- Parameters
- ----------
- list(Order) :
- list with perturbative orders and scale variations
- """
- return [Order(*pyorder.as_tuple()) for pyorder in self.raw.orders()]
-
- def convolve_with_one(
- self,
- pdg_id,
- xfx,
- alphas,
- order_mask=np.array([], dtype=bool),
- bin_indices=np.array([], dtype=np.uint64),
- channel_mask=np.array([], dtype=bool),
- xi=((1.0, 1.0),),
- ):
- r"""Convolve with a single distribution.
-
- Parameters
- ----------
- pdg_id : int
- PDG Monte Carlo ID of the hadronic particle
- xfx : callable
- lhapdf like callable with arguments `pid, x, Q2` returning x*pdf for :math:`x`-grid
- alphas : callable
- lhapdf like callable with arguments `Q2` returning :math:`\alpha_s`
- order_mask : sequence(bool)
- Mask for selecting specific orders. The value `True` means the corresponding order
- is included. An empty list corresponds to all orders being enabled.
- bin_indices : sequence(int)
- A list with the indices of the corresponding bins that should be calculated. An
- empty list means that all orders should be calculated.
- channel_mask : sequence(bool)
- Mask for selecting specific channels. The value `True` means the
- corresponding channel is included. An empty list corresponds to all channels being
- enabled.
- xi : list((float, float))
- A list with the scale variation factors that should be used to calculate
- scale-varied results. The first entry of a tuple corresponds to the variation of
- the renormalization scale, the second entry to the variation of the factorization
- scale. If only results for the central scale are need the list should contain
- `(1.0, 1.0)`.
-
- Returns
- -------
- list(float) :
- cross sections for all bins, for each scale-variation tuple (first all bins, then
- the scale variation)
- """
- return self.raw.convolve_with_one(
- pdg_id,
- xfx,
- alphas,
- np.array(order_mask),
- np.array(bin_indices),
- np.array(channel_mask),
- xi,
- )
-
- def convolve_with_two(
- self,
- pdg_id1,
- xfx1,
- pdg_id2,
- xfx2,
- alphas,
- order_mask=np.array([], dtype=bool),
- bin_indices=np.array([], dtype=np.uint64),
- channel_mask=np.array([], dtype=bool),
- xi=((1.0, 1.0),),
- ):
- r"""Convolve with two distributions.
-
- Parameters
- ----------
- pdg_id1 : int
- PDG Monte Carlo ID of the first hadronic particle
- xfx1 : callable
- lhapdf like callable with arguments `pid, x, Q2` returning x*pdf for :math:`x`-grid
- pdg_id2 : int
- PDG Monte Carlo ID of the second hadronic particle
- xfx2 : callable
- lhapdf like callable with arguments `pid, x, Q2` returning x*pdf for :math:`x`-grid
- alphas : callable
- lhapdf like callable with arguments `Q2` returning :math:`\alpha_s`
- order_mask : sequence(bool)
- Mask for selecting specific orders. The value `True` means the corresponding order
- is included. An empty list corresponds to all orders being enabled.
- bin_indices : sequence(int)
- A list with the indices of the corresponding bins that should be calculated. An
- empty list means that all orders should be calculated.
- channel_mask : sequence(bool)
- Mask for selecting specific channels. The value `True` means the
- corresponding channel is included. An empty list corresponds to all channels being
- enabled.
- xi : list((float, float))
- A list with the scale variation factors that should be used to calculate
- scale-varied results. The first entry of a tuple corresponds to the variation of
- the renormalization scale, the second entry to the variation of the factorization
- scale. If only results for the central scale are needed the list should contain
- `(1.0, 1.0)`.
-
- Returns
- -------
- list(float) :
- cross sections for all bins, for each scale-variation tuple (first all bins, then
- the scale variation)
- """
- return self.raw.convolve_with_two(
- pdg_id1,
- xfx1,
- pdg_id2,
- xfx2,
- alphas,
- np.array(order_mask),
- np.array(bin_indices),
- np.array(channel_mask),
- xi,
- )
-
- def evolve(
- self,
- operators,
- mur2_grid,
- alphas_values,
- pid_basis="pdg_mc_ids",
- order_mask=(),
- xi=(1.0, 1.0),
- ):
- """Create an FKTable with the EKO.
-
- Convenience wrapper for :meth:`pineappl.pineappl.PyGrid.evolve()`.
-
- Parameters
- ----------
- operators : dict
- EKO Output
- mur2_grid : list[float]
- renormalization scales
- alphas_values : list[float]
- alpha_s values associated to the renormalization scales
- pid_basis : str
- kind of channel types (e.g. "pdg_mc_ids" for flavor basis, "evol"
- for evolution basis)
- order_mask : list(bool)
- Mask for selecting specific orders. The value `True` means the corresponding order
- is included. An empty list corresponds to all orders being enabled.
- xi : (float, float)
- A tuple with the scale variation factors that should be used.
- The first entry of a tuple corresponds to the variation of
- the renormalization scale, the second entry to the variation of the factorization
- scale. If only results for the central scale are need the tuple should be
- `(1.0, 1.0)`.
-
- Returns
- ------
- PyFkTable :
- raw grid as an FKTable
- """
- operator_grid = np.array(
- [op["operators"] for op in operators["Q2grid"].values()]
- )
- q2grid = list(operators["Q2grid"].keys())
- return FkTable(
- self.raw.evolve(
- np.array(operator_grid),
- operators["q2_ref"],
- np.array(operators["inputpids"], dtype=np.int32),
- np.array(operators["inputgrid"]),
- np.array(q2grid, dtype=np.float64),
- np.array(operators["targetpids"], dtype=np.int32),
- np.array(operators["targetgrid"]),
- np.array(mur2_grid, dtype=np.float64),
- np.array(alphas_values, dtype=np.float64),
- xi,
- pid_basis,
- np.array(order_mask, dtype=bool),
- )
- )
-
- @classmethod
- def read(cls, path):
- """Load an existing grid from file.
-
- Convenience wrapper for :meth:`pineappl.pineappl.PyGrid.read()`.
-
- Parameters
- ----------
- path : pathlike
- file path
-
- Returns
- -------
- Grid :
- grid object
- """
- return cls(PyGrid.read(path))
-
- def merge(self, other: "Grid"):
- """Merge a second grid in the current one."""
- self.raw.merge(other.raw)
-
- def delete_bins(self, bin_indices):
- """Delete bins.
-
- Repeated bins and those exceeding length are ignored.
-
- Parameters
- ----------
- bin_indices : sequence(int)
- list of indices of bins to removed
- """
- self.raw.delete_bins(np.array(bin_indices, dtype=np.uint))
diff --git a/pineappl_py/pineappl/import_only_subgrid.py b/pineappl_py/pineappl/import_only_subgrid.py
deleted file mode 100644
index 5baf8893c..000000000
--- a/pineappl_py/pineappl/import_only_subgrid.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import numpy as np
-
-from .pineappl import PyImportOnlySubgridV1
-from .pineappl import PyImportOnlySubgridV2
-from .utils import PyWrapper
-
-
-class ImportOnlySubgridV1(PyWrapper):
- """Python wrapper object to :class:`~pineappl.pineappl.PyImportOnlySubgridV1`.
-
- Parameters
- ----------
- array : numpy.ndarray(float, dim=3)
- 3-dimensional subgrid content
- q2_grid : sequence(float)
- scale grid
- x1_grid : sequence(float)
- interpolation grid for :math:`x_1`
- x2_grid : sequence(float)
- interpolation grid for :math:`x_2`
- """
-
- def __init__(self, array, q2_grid, x1_grid, x2_grid):
- self._raw = PyImportOnlySubgridV1(
- np.array(array), np.array(q2_grid), np.array(x1_grid), np.array(x2_grid)
- )
-
-class ImportOnlySubgridV2(PyWrapper):
- """Python wrapper object to :class:`~pineappl.pineappl.PyImportOnlySubgridV2`.
-
- Parameters
- ----------
- array : numpy.ndarray(float, dim=3)
- 3-dimensional subgrid content
- mu2_grid : sequence(float)
- scale grid
- x1_grid : sequence(float)
- interpolation grid for :math:`x_1`
- x2_grid : sequence(float)
- interpolation grid for :math:`x_2`
- """
-
- def __init__(self, array, mu2_grid, x1_grid, x2_grid):
- self._raw = PyImportOnlySubgridV2(
- np.array(array), mu2_grid, np.array(x1_grid), np.array(x2_grid)
- )
diff --git a/pineappl_py/pineappl/subgrid.py b/pineappl_py/pineappl/subgrid.py
deleted file mode 100644
index 642940b46..000000000
--- a/pineappl_py/pineappl/subgrid.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from .pineappl import PySubgridParams, PyMu2
-from .utils import PyWrapper
-
-
-class SubgridParams(PyWrapper):
- """Python wrapper object to :class:`~pineappl.pineappl.PySubgridParams`."""
-
- def __init__(self):
- self._raw = PySubgridParams()
-
-class Mu2(PyWrapper):
-
- def __init__(self, ren, fac):
- self._raw = PyMu2(ren, fac)
diff --git a/pineappl_py/pineappl/utils.py b/pineappl_py/pineappl/utils.py
deleted file mode 100644
index 20449b39b..000000000
--- a/pineappl_py/pineappl/utils.py
+++ /dev/null
@@ -1,18 +0,0 @@
-"""Helper tools."""
-
-class PyWrapper:
- """Python wrapper helper to delegate function calls to the underlying raw object."""
-
- _raw = None
-
- @property
- def raw(self):
- """Raw PyO3 object"""
- return self._raw
-
- def __getattr__(self, name):
- """Delegate function calls down."""
- if name[0] != "_":
- return self._raw.__getattribute__(name)
- else:
- raise AttributeError
diff --git a/pineappl_py/pyproject.toml b/pineappl_py/pyproject.toml
index cb0886107..2b0864f26 100644
--- a/pineappl_py/pyproject.toml
+++ b/pineappl_py/pyproject.toml
@@ -28,8 +28,6 @@ cli = ["pineappl-cli"]
docs = [
"sphinx>=6.2.1",
"sphinx_rtd_theme>=1.2.2",
- "sphinxcontrib-bibtex>=2.5.0",
- "nbsphinx>=0.9.2",
]
test = ["pytest", "pytest-cov"]
diff --git a/pineappl_py/src/bin.rs b/pineappl_py/src/bin.rs
index 426b029d4..17398e175 100644
--- a/pineappl_py/src/bin.rs
+++ b/pineappl_py/src/bin.rs
@@ -1,22 +1,17 @@
-use pineappl::bin::BinRemapper;
+//! Binnning interface.
use numpy::{PyArrayMethods, PyReadonlyArray1};
+use pineappl::bin::BinRemapper;
use pyo3::prelude::*;
/// PyO3 wrapper to :rustdoc:`pineappl::bin::BinRemapper `.
-#[pyclass]
+#[pyclass(name = "BinRemapper")]
#[derive(Clone)]
#[repr(transparent)]
pub struct PyBinRemapper {
pub(crate) bin_remapper: BinRemapper,
}
-impl PyBinRemapper {
- pub(crate) fn new(bin_remapper: BinRemapper) -> Self {
- Self { bin_remapper }
- }
-}
-
#[pymethods]
impl PyBinRemapper {
/// Constructor.
@@ -28,7 +23,22 @@ impl PyBinRemapper {
/// limits : list(tuple(float, float))
/// bin limits
#[new]
- pub fn new_f64(normalizations: PyReadonlyArray1, limits: Vec<(f64, f64)>) -> Self {
- Self::new(BinRemapper::new(normalizations.to_vec().unwrap(), limits).unwrap())
+ pub fn new(normalizations: PyReadonlyArray1, limits: Vec<(f64, f64)>) -> Self {
+ Self {
+ bin_remapper: BinRemapper::new(normalizations.to_vec().unwrap(), limits).unwrap(),
+ }
}
}
+
+/// Register submodule in parent.
+pub fn register(parent_module: &Bound<'_, PyModule>) -> PyResult<()> {
+ let m = PyModule::new_bound(parent_module.py(), "bin")?;
+ m.setattr(pyo3::intern!(m.py(), "__doc__"), "Binning interface.")?;
+ pyo3::py_run!(
+ parent_module.py(),
+ m,
+ "import sys; sys.modules['pineappl.bin'] = m"
+ );
+ m.add_class::()?;
+ parent_module.add_submodule(&m)
+}
diff --git a/pineappl_py/src/boc.rs b/pineappl_py/src/boc.rs
new file mode 100644
index 000000000..343aa71bb
--- /dev/null
+++ b/pineappl_py/src/boc.rs
@@ -0,0 +1,144 @@
+//! Interface for bins, orders and channels.
+
+use numpy::{IntoPyArray, PyArray1};
+use pineappl::boc::{Channel, Order};
+use pyo3::prelude::*;
+
+/// PyO3 wrapper to :rustdoc:`pineappl::boc::Channel `.
+///
+/// Each entry consists of a tuple, which contains, in the following order:
+///
+/// 1. the PDG id of the first incoming parton
+/// 2. the PDG id of the second parton
+/// 3. a numerical factor that will multiply the result for this specific combination.
+#[pyclass(name = "Channel")]
+#[repr(transparent)]
+pub struct PyChannel {
+ pub(crate) entry: Channel,
+}
+
+#[pymethods]
+impl PyChannel {
+ /// Constructor.
+ ///
+ /// Parameters
+ /// ----------
+ /// entry: list(tuple(int, int, float))
+ /// channel configuration
+ #[new]
+ pub fn new(entry: Vec<(i32, i32, f64)>) -> Self {
+ Self {
+ entry: Channel::new(entry),
+ }
+ }
+
+ /// Get list representation.
+ ///
+ /// Returns
+ /// -------
+ /// list(tuple(int,int,float)) :
+ /// list representation
+ pub fn into_array(&self) -> Vec<(i32, i32, f64)> {
+ self.entry.entry().to_vec()
+ }
+}
+
+/// Register submodule in parent.
+pub fn register(parent_module: &Bound<'_, PyModule>) -> PyResult<()> {
+ let m = PyModule::new_bound(parent_module.py(), "boc")?;
+ m.setattr(
+ pyo3::intern!(m.py(), "__doc__"),
+ "Interface for bins, orders and channels.",
+ )?;
+ pyo3::py_run!(
+ parent_module.py(),
+ m,
+ "import sys; sys.modules['pineappl.channel'] = m"
+ );
+ m.add_class::()?;
+ parent_module.add_submodule(&m)
+}
+
+/// PyO3 wrapper to :rustdoc:`pineappl::boc::Order `.
+#[pyclass(name = "Order")]
+#[repr(transparent)]
+pub struct PyOrder {
+ pub(crate) order: Order,
+}
+
+impl PyOrder {
+ pub(crate) fn new(order: Order) -> Self {
+ Self { order }
+ }
+}
+
+#[pymethods]
+impl PyOrder {
+ /// Constructor.
+ ///
+ /// Parameters
+ /// ----------
+ /// alphas : int
+ /// power of :math:`\alpha_s`
+ /// alpha : int
+ /// power of :math:`\alpha`
+ /// logxir : int
+ /// power of :math:`\ln(\xi_r)`
+ /// logxif : int
+ /// power of :math:`\ln(\xi_f)`
+ #[new]
+ pub fn new_order(alphas: u32, alpha: u32, logxir: u32, logxif: u32) -> Self {
+ Self::new(Order::new(alphas, alpha, logxir, logxif))
+ }
+
+ /// Tuple representation.
+ ///
+ /// Returns
+ /// -------
+ /// alphas : int
+ /// power of :math:`\alpha_s`
+ /// alpha : int
+ /// power of :math:`\alpha`
+ /// logxir : int
+ /// power of :math:`\ln(\xi_r)`
+ /// logxif : int
+ /// power of :math:`\ln(\xi_f)`
+ pub fn as_tuple(&self) -> (u32, u32, u32, u32) {
+ (
+ self.order.alphas,
+ self.order.alpha,
+ self.order.logxir,
+ self.order.logxif,
+ )
+ }
+
+ /// Return a mask suitable to pass as the `order_mask` parameter of [`Grid::convolve`].
+ ///
+ /// The selection of `orders` is controlled using the `max_as` and `max_al` parameters, for
+ /// instance setting `max_as = 1` and `max_al = 0` selects the LO QCD only, `max_as = 2` and
+ /// `max_al = 0` the NLO QCD; setting `max_as = 3` and `max_al = 2` would select all NLOs, and
+ /// the NNLO QCD.
+ ///
+ /// See `pineappl` crate docs for more examples.
+ ///
+ /// Returns
+ /// -------
+ /// numpy.ndarray(bool)
+ /// boolean array, to be used as orders' mask
+ #[staticmethod]
+ pub fn create_mask<'py>(
+ orders: Vec>,
+ max_as: u32,
+ max_al: u32,
+ logs: bool,
+ py: Python<'py>,
+ ) -> Bound<'py, PyArray1> {
+ Order::create_mask(
+ &orders.iter().map(|o| o.order.clone()).collect::>(),
+ max_as,
+ max_al,
+ logs,
+ )
+ .into_pyarray_bound(py)
+ }
+}
diff --git a/pineappl_py/src/channel.rs b/pineappl_py/src/channel.rs
deleted file mode 100644
index 684aa4714..000000000
--- a/pineappl_py/src/channel.rs
+++ /dev/null
@@ -1,46 +0,0 @@
-use pineappl::boc::Channel;
-
-use pyo3::prelude::*;
-
-/// PyO3 wrapper to :rustdoc:`pineappl::boc::Channel `.
-///
-/// Each entry consists of a tuple, which contains, in the following order:
-///
-/// 1. the PDG id of the first incoming parton
-/// 2. the PDG id of the second parton
-/// 3. a numerical factor that will multiply the result for this specific combination.
-#[pyclass]
-#[repr(transparent)]
-pub struct PyChannel {
- pub(crate) entry: Channel,
-}
-
-impl PyChannel {
- pub(crate) fn new(entry: Channel) -> Self {
- Self { entry }
- }
-}
-
-#[pymethods]
-impl PyChannel {
- /// Constructor.
- ///
- /// Parameters
- /// ----------
- /// entry: list(tuple(int, int, float))
- /// channel configuration
- #[new]
- pub fn new_entry(entry: Vec<(i32, i32, f64)>) -> Self {
- Self::new(Channel::new(entry))
- }
-
- /// Get list representation.
- ///
- /// Returns
- /// -------
- /// list(tuple(int,int,float)) :
- /// list representation
- pub fn into_array(&self) -> Vec<(i32, i32, f64)> {
- self.entry.entry().to_vec()
- }
-}
diff --git a/pineappl_py/src/evolution.rs b/pineappl_py/src/evolution.rs
index 24723f38a..4a28a3de6 100644
--- a/pineappl_py/src/evolution.rs
+++ b/pineappl_py/src/evolution.rs
@@ -1,10 +1,64 @@
+//! Evolution interface.
+
+use super::pids::PyPidBasis;
use numpy::{IntoPyArray, PyArray1};
use pineappl::evolution::{EvolveInfo, OperatorSliceInfo};
-
use pyo3::prelude::*;
+/// PyO3 wrapper to :rustdoc:`pineappl::evolution::OperatorSliceInfo `.
+#[pyclass(name = "OperatorSliceInfo")]
+#[derive(Clone)]
+#[repr(transparent)]
+pub struct PyOperatorSliceInfo {
+ pub(crate) info: OperatorSliceInfo,
+}
+
+#[pymethods]
+impl PyOperatorSliceInfo {
+ /// Constructor.
+ ///
+ /// Parameteters
+ /// ------------
+ /// fac0 : float
+ /// initial factorization scale
+ /// pids0 : list(int)
+ /// flavors available at the initial scale
+ /// x0 : list(float)
+ /// x-grid at the initial scale
+ /// fac1 : float
+ /// evolved final scale
+ /// pids1 : list(int)
+ /// flavors available at the final scale
+ /// x1 : list(float)
+ /// x-grid at the final scale
+ /// pid_basis : PyPidBasis
+ /// flavor basis reprentation at the initial scale
+ #[new]
+ pub fn new(
+ fac0: f64,
+ pids0: Vec,
+ x0: Vec,
+ fac1: f64,
+ pids1: Vec,
+ x1: Vec,
+ pid_basis: PyPidBasis,
+ ) -> Self {
+ Self {
+ info: OperatorSliceInfo {
+ fac0,
+ pids0,
+ x0,
+ fac1,
+ pids1,
+ x1,
+ pid_basis: pid_basis.into(),
+ },
+ }
+ }
+}
+
/// PyO3 wrapper to :rustdoc:`pineappl::evolution::EvolveInfo `.
-#[pyclass]
+#[pyclass(name = "EvolveInfo")]
#[repr(transparent)]
pub struct PyEvolveInfo {
pub(crate) evolve_info: EvolveInfo,
@@ -37,10 +91,16 @@ impl PyEvolveInfo {
}
}
-/// PyO3 wrapper to :rustdoc:`pineappl::evolution::OperatorSliceInfo `.
-#[pyclass]
-#[repr(transparent)]
-#[derive(Clone)]
-pub struct PyOperatorSliceInfo {
- pub(crate) slice_info: OperatorSliceInfo,
+/// Register submodule in parent.
+pub fn register(parent_module: &Bound<'_, PyModule>) -> PyResult<()> {
+ let m = PyModule::new_bound(parent_module.py(), "evolution")?;
+ m.setattr(pyo3::intern!(m.py(), "__doc__"), "Evolution interface.")?;
+ pyo3::py_run!(
+ parent_module.py(),
+ m,
+ "import sys; sys.modules['pineappl.evolution'] = m"
+ );
+ m.add_class::()?;
+ m.add_class::()?;
+ parent_module.add_submodule(&m)
}
diff --git a/pineappl_py/src/fk_table.rs b/pineappl_py/src/fk_table.rs
index 5eaafe282..33af7ffe7 100644
--- a/pineappl_py/src/fk_table.rs
+++ b/pineappl_py/src/fk_table.rs
@@ -1,27 +1,26 @@
+//! FK table interface.
+
+use super::grid::PyGrid;
+use numpy::{IntoPyArray, PyArray1, PyArray4, PyArrayMethods, PyReadonlyArray1};
use pineappl::convolutions::LumiCache;
use pineappl::fk_table::{FkAssumptions, FkTable};
use pineappl::grid::Grid;
-
-use numpy::{IntoPyArray, PyArray1, PyArray4, PyArrayMethods, PyReadonlyArray1};
use pyo3::prelude::*;
-
use std::collections::HashMap;
use std::fs::File;
use std::io::BufReader;
use std::path::PathBuf;
use std::str::FromStr;
-use crate::grid::PyGrid;
-
/// PyO3 wrapper to :rustdoc:`pineappl::fk_table::FkTable `.
-#[pyclass]
+#[pyclass(name = "FkTable")]
#[repr(transparent)]
pub struct PyFkTable {
pub(crate) fk_table: FkTable,
}
-/// PyO3 wrapper to :rustdoc:`pineappl::fk_table::PyFkAssumptions `.
-#[pyclass]
+/// PyO3 wrapper to :rustdoc:`pineappl::fk_table::FkAssumptions `.
+#[pyclass(name = "FkAssumptions")]
#[repr(transparent)]
pub struct PyFkAssumptions {
pub(crate) fk_assumptions: FkAssumptions,
@@ -260,15 +259,15 @@ impl PyFkTable {
pub fn convolve_with_two<'py>(
&self,
pdg_id1: i32,
- xfx1: &PyAny,
+ xfx1: &Bound<'py, PyAny>,
pdg_id2: i32,
- xfx2: &PyAny,
+ xfx2: &Bound<'py, PyAny>,
bin_indices: Option>,
channel_mask: Option>,
py: Python<'py>,
) -> Bound<'py, PyArray1> {
- let mut xfx1 = |id, x, q2| f64::extract(xfx1.call1((id, x, q2)).unwrap()).unwrap();
- let mut xfx2 = |id, x, q2| f64::extract(xfx2.call1((id, x, q2)).unwrap()).unwrap();
+ let mut xfx1 = |id, x, q2| xfx1.call1((id, x, q2)).unwrap().extract().unwrap();
+ let mut xfx2 = |id, x, q2| xfx2.call1((id, x, q2)).unwrap().extract().unwrap();
let mut alphas = |_| 1.0;
let mut lumi_cache =
LumiCache::with_two(pdg_id1, &mut xfx1, pdg_id2, &mut xfx2, &mut alphas);
@@ -295,3 +294,17 @@ impl PyFkTable {
self.fk_table.optimize(assumptions.fk_assumptions)
}
}
+
+/// Register submodule in parent.
+pub fn register(parent_module: &Bound<'_, PyModule>) -> PyResult<()> {
+ let m = PyModule::new_bound(parent_module.py(), "fk_table")?;
+ m.setattr(pyo3::intern!(m.py(), "__doc__"), "FK table interface.")?;
+ pyo3::py_run!(
+ parent_module.py(),
+ m,
+ "import sys; sys.modules['pineappl.fk_table'] = m"
+ );
+ m.add_class::()?;
+ m.add_class::()?;
+ parent_module.add_submodule(&m)
+}
diff --git a/pineappl_py/src/grid.rs b/pineappl_py/src/grid.rs
index 6ef35f8c1..dbca4db7d 100644
--- a/pineappl_py/src/grid.rs
+++ b/pineappl_py/src/grid.rs
@@ -1,198 +1,32 @@
-use ndarray::CowArray;
-use pineappl::boc::Order;
-use pineappl::convolutions::LumiCache;
-use pineappl::evolution::{AlphasTable, OperatorInfo, OperatorSliceInfo};
-use pineappl::grid::{Grid, Ntuple};
-use pineappl::pids::PidBasis;
+//! Grid interface.
use super::bin::PyBinRemapper;
-use super::channel::PyChannel;
-use super::evolution::PyEvolveInfo;
+use super::boc::{PyChannel, PyOrder};
+use super::evolution::{PyEvolveInfo, PyOperatorSliceInfo};
use super::fk_table::PyFkTable;
use super::subgrid::{PySubgridEnum, PySubgridParams};
-
use itertools::izip;
-use numpy::{
- IntoPyArray, PyArray1, PyArrayMethods, PyReadonlyArray1, PyReadonlyArray4, PyReadonlyArray5,
-};
-
+use ndarray::CowArray;
+use numpy::{IntoPyArray, PyArray1, PyArrayMethods, PyReadonlyArray1, PyReadonlyArray4};
+use pineappl::convolutions::LumiCache;
+use pineappl::evolution::AlphasTable;
+use pineappl::grid::{Grid, Ntuple};
+use pyo3::exceptions::PyValueError;
+use pyo3::prelude::*;
+use pyo3::types::PyIterator;
use std::collections::HashMap;
use std::fs::File;
use std::io::BufReader;
use std::path::PathBuf;
-use pyo3::exceptions::PyValueError;
-use pyo3::prelude::*;
-use pyo3::types::PyIterator;
-
-/// PyO3 wrapper to :rustdoc:`pineappl::grid::Order `.
-#[pyclass]
-#[repr(transparent)]
-pub struct PyOrder {
- pub(crate) order: Order,
-}
-
-// TODO: should probably be in a different module
-// TODO: rename to `PidBasis`
-#[pyclass]
-#[derive(Clone)]
-pub enum PyPidBasis {
- Pdg,
- Evol,
-}
-
-impl From for PidBasis {
- fn from(basis: PyPidBasis) -> Self {
- match basis {
- PyPidBasis::Pdg => Self::Pdg,
- PyPidBasis::Evol => Self::Evol,
- }
- }
-}
-
-// TODO: should probably be in a different module
-// TODO: rename to `OperatorSliceInfo`
-#[pyclass]
-#[derive(Clone)]
-pub struct PyOperatorSliceInfo {
- info: OperatorSliceInfo,
-}
-
-#[pymethods]
-impl PyOperatorSliceInfo {
- /// Constructor.
- ///
- /// Parameteters
- /// ------------
- /// fac0 : float
- /// initial factorization scale
- /// pids0 : list(int)
- /// flavors available at the initial scale
- /// x0 : list(float)
- /// x-grid at the initial scale
- /// fac1 : float
- /// evolved final scale
- /// pids1 : list(int)
- /// flavors available at the final scale
- /// x1 : list(float)
- /// x-grid at the final scale
- /// pid_basis : PyPidBasis
- /// flavor basis reprentation at the initial scale
- #[new]
- pub fn new(
- fac0: f64,
- pids0: Vec,
- x0: Vec,
- fac1: f64,
- pids1: Vec,
- x1: Vec,
- pid_basis: PyPidBasis,
- ) -> Self {
- Self {
- info: OperatorSliceInfo {
- fac0,
- pids0,
- x0,
- fac1,
- pids1,
- x1,
- pid_basis: pid_basis.into(),
- },
- }
- }
-}
-
-impl PyOrder {
- pub(crate) fn new(order: Order) -> Self {
- Self { order }
- }
-}
-
-#[pymethods]
-impl PyOrder {
- /// Constructor.
- ///
- /// Parameters
- /// ----------
- /// alphas : int
- /// power of :math:`\alpha_s`
- /// alpha : int
- /// power of :math:`\alpha`
- /// logxir : int
- /// power of :math:`\ln(\xi_r)`
- /// logxif : int
- /// power of :math:`\ln(\xi_f)`
- #[new]
- pub fn new_order(alphas: u32, alpha: u32, logxir: u32, logxif: u32) -> Self {
- Self::new(Order::new(alphas, alpha, logxir, logxif))
- }
-
- /// Tuple representation.
- ///
- /// Returns
- /// -------
- /// alphas : int
- /// power of :math:`\alpha_s`
- /// alpha : int
- /// power of :math:`\alpha`
- /// logxir : int
- /// power of :math:`\ln(\xi_r)`
- /// logxif : int
- /// power of :math:`\ln(\xi_f)`
- pub fn as_tuple(&self) -> (u32, u32, u32, u32) {
- (
- self.order.alphas,
- self.order.alpha,
- self.order.logxir,
- self.order.logxif,
- )
- }
-
- /// Return a mask suitable to pass as the `order_mask` parameter of [`Grid::convolve`].
- ///
- /// The selection of `orders` is controlled using the `max_as` and `max_al` parameters, for
- /// instance setting `max_as = 1` and `max_al = 0` selects the LO QCD only, `max_as = 2` and
- /// `max_al = 0` the NLO QCD; setting `max_as = 3` and `max_al = 2` would select all NLOs, and
- /// the NNLO QCD.
- ///
- /// See `pineappl` crate docs for more examples.
- ///
- /// Returns
- /// -------
- /// numpy.ndarray(bool)
- /// boolean array, to be used as orders' mask
- #[staticmethod]
- pub fn create_mask<'py>(
- orders: Vec>,
- max_as: u32,
- max_al: u32,
- logs: bool,
- py: Python<'py>,
- ) -> Bound<'py, PyArray1> {
- Order::create_mask(
- &orders.iter().map(|o| o.order.clone()).collect::>(),
- max_as,
- max_al,
- logs,
- )
- .into_pyarray_bound(py)
- }
-}
-
/// PyO3 wrapper to :rustdoc:`pineappl::grid::Grid `.
-#[pyclass]
+#[pyclass(name = "Grid")]
#[repr(transparent)]
#[derive(Clone)]
pub struct PyGrid {
pub(crate) grid: Grid,
}
-impl PyGrid {
- pub(crate) fn new(grid: Grid) -> Self {
- Self { grid }
- }
-}
-
#[pymethods]
impl PyGrid {
/// Constructor.
@@ -214,12 +48,14 @@ impl PyGrid {
bin_limits: PyReadonlyArray1,
subgrid_params: PySubgridParams,
) -> Self {
- Self::new(Grid::new(
- channels.iter().map(|pyc| pyc.entry.clone()).collect(),
- orders.iter().map(|pyo| pyo.order.clone()).collect(),
- bin_limits.to_vec().unwrap(),
- subgrid_params.subgrid_params,
- ))
+ Self {
+ grid: Grid::new(
+ channels.iter().map(|pyc| pyc.entry.clone()).collect(),
+ orders.iter().map(|pyo| pyo.order.clone()).collect(),
+ bin_limits.to_vec().unwrap(),
+ subgrid_params.subgrid_params,
+ ),
+ }
}
/// Add a point to the grid.
@@ -425,15 +261,16 @@ impl PyGrid {
/// numpy.ndarray(float) :
/// cross sections for all bins, for each scale-variation tuple (first all bins, then
/// the scale variation)
+ #[pyo3(signature = (pdg_id, xfx, alphas, order_mask = None, bin_indices = None, channel_mask = None, xi = None))]
pub fn convolve_with_one<'py>(
&self,
pdg_id: i32,
xfx: &Bound<'py, PyAny>,
alphas: &Bound<'py, PyAny>,
- order_mask: PyReadonlyArray1,
- bin_indices: PyReadonlyArray1,
- channel_mask: PyReadonlyArray1,
- xi: Vec<(f64, f64)>,
+ order_mask: Option>,
+ bin_indices: Option>,
+ channel_mask: Option>,
+ xi: Option>,
py: Python<'py>,
) -> Bound<'py, PyArray1> {
let mut xfx = |id, x, q2| xfx.call1((id, x, q2)).unwrap().extract().unwrap();
@@ -443,10 +280,10 @@ impl PyGrid {
self.grid
.convolve(
&mut lumi_cache,
- &order_mask.to_vec().unwrap(),
- &bin_indices.to_vec().unwrap(),
- &channel_mask.to_vec().unwrap(),
- &xi,
+ &order_mask.map_or(vec![], |b| b.to_vec().unwrap()),
+ &bin_indices.map_or(vec![], |c| c.to_vec().unwrap()),
+ &channel_mask.map_or(vec![], |d| d.to_vec().unwrap()),
+ &xi.map_or(vec![(1.0, 1.0)], |m| m),
)
.into_pyarray_bound(py)
}
@@ -487,6 +324,7 @@ impl PyGrid {
/// numpy.ndarray(float) :
/// cross sections for all bins, for each scale-variation tuple (first all bins, then
/// the scale variation)
+ #[pyo3(signature = (pdg_id1, xfx1, pdg_id2, xfx2, alphas, order_mask = None, bin_indices = None, channel_mask = None, xi = None))]
pub fn convolve_with_two<'py>(
&self,
pdg_id1: i32,
@@ -494,10 +332,10 @@ impl PyGrid {
pdg_id2: i32,
xfx2: &Bound<'py, PyAny>,
alphas: &Bound<'py, PyAny>,
- order_mask: PyReadonlyArray1,
- bin_indices: PyReadonlyArray1,
- channel_mask: PyReadonlyArray1,
- xi: Vec<(f64, f64)>,
+ order_mask: Option>,
+ bin_indices: Option>,
+ channel_mask: Option>,
+ xi: Option>,
py: Python<'py>,
) -> Bound<'py, PyArray1> {
let mut xfx1 = |id, x, q2| xfx1.call1((id, x, q2)).unwrap().extract().unwrap();
@@ -509,90 +347,14 @@ impl PyGrid {
self.grid
.convolve(
&mut lumi_cache,
- &order_mask.to_vec().unwrap(),
- &bin_indices.to_vec().unwrap(),
- &channel_mask.to_vec().unwrap(),
- &xi,
+ &order_mask.map_or(vec![], |b| b.to_vec().unwrap()),
+ &bin_indices.map_or(vec![], |c| c.to_vec().unwrap()),
+ &channel_mask.map_or(vec![], |d| d.to_vec().unwrap()),
+ &xi.map_or(vec![(1.0, 1.0)], |m| m),
)
.into_pyarray_bound(py)
}
- /// Convolve with an evolution operator.
- ///
- /// Parameters
- /// ----------
- /// operator : numpy.ndarray(int, rank=5)
- /// evolution tensor
- /// fac0 : float
- /// reference scale
- /// pids0 : numpy.ndarray(int)
- /// sorting of the particles in the tensor for final FkTable
- /// x0 : numpy.ndarray(float)
- /// final FKTable interpolation grid
- /// fac1 : numpy.ndarray(float)
- /// list of factorization scales
- /// pids1 : numpy.ndarray(int)
- /// sorting of the particles in the grid
- /// x1 : numpy.ndarray(float)
- /// interpolation grid at process level
- /// ren1 : numpy.ndarray(float)
- /// list of renormalization scales
- /// alphas : numpy.ndarray(float)
- /// list with :math:`\alpha_s(Q2)` for the process scales
- /// xi : (float, float)
- /// factorization and renormalization variation
- /// pid_basis : str
- /// type of channel identifier
- /// order_mask : numpy.ndarray(bool)
- /// boolean mask to activate orders
- ///
- /// Returns
- /// -------
- /// PyFkTable :
- /// produced FK table
- #[deprecated(since = "0.7.4", note = "use evolve_with_slice_iter instead")]
- pub fn evolve(
- &self,
- operator: PyReadonlyArray5,
- fac0: f64,
- pids0: PyReadonlyArray1,
- x0: PyReadonlyArray1,
- fac1: PyReadonlyArray1,
- pids1: PyReadonlyArray1,
- x1: PyReadonlyArray1,
- ren1: PyReadonlyArray1,
- alphas: PyReadonlyArray1,
- xi: (f64, f64),
- pid_basis: String,
- order_mask: PyReadonlyArray1,
- ) -> PyFkTable {
- let op_info = OperatorInfo {
- fac0: fac0,
- pids0: pids0.to_vec().unwrap(),
- x0: x0.to_vec().unwrap(),
- fac1: fac1.to_vec().unwrap(),
- pids1: pids1.to_vec().unwrap(),
- x1: x1.to_vec().unwrap(),
- ren1: ren1.to_vec().unwrap(),
- alphas: alphas.to_vec().unwrap(),
- xir: xi.0,
- xif: xi.1,
- pid_basis: pid_basis.parse().unwrap(),
- };
-
- let evolved_grid = self
- .grid
- .evolve(
- operator.as_array(),
- &op_info,
- order_mask.as_slice().unwrap(),
- )
- .expect("Nothing returned from evolution.");
- PyFkTable {
- fk_table: evolved_grid,
- }
- }
-
/// Collect information for convolution with an evolution operator.
///
/// Parameters
@@ -739,7 +501,9 @@ impl PyGrid {
/// grid
#[staticmethod]
pub fn read(path: PathBuf) -> Self {
- Self::new(Grid::read(BufReader::new(File::open(path).unwrap())).unwrap())
+ Self {
+ grid: Grid::read(BufReader::new(File::open(path).unwrap())).unwrap(),
+ }
}
/// Write to file.
@@ -775,20 +539,6 @@ impl PyGrid {
}
}
- /// Merge with another grid, loaded from file.
- ///
- /// Note
- /// ----
- /// For a current limitation with the implementation of the bound object `Grid` is not possible
- /// to operate with two `Grid`s in memory, since is not possible to pass a `Grid` by argument
- #[deprecated = "Deprecated in favor of PyGrid::merge"]
- pub fn merge_from_file(&mut self, path: PathBuf) -> PyResult<()> {
- match self.grid.merge(Self::read(path).grid) {
- Ok(()) => Ok(()),
- Err(x) => Err(PyValueError::new_err(format!("{:?}", x))),
- }
- }
-
/// Extract the number of dimensions for bins.
///
/// E.g.: two differential cross-sections will return 2.
@@ -914,3 +664,17 @@ impl PyGrid {
self.grid.delete_bins(&bin_indices.to_vec().unwrap())
}
}
+
+/// Register submodule in parent.
+pub fn register(parent_module: &Bound<'_, PyModule>) -> PyResult<()> {
+ let m = PyModule::new_bound(parent_module.py(), "grid")?;
+ m.setattr(pyo3::intern!(m.py(), "__doc__"), "Grid interface.")?;
+ pyo3::py_run!(
+ parent_module.py(),
+ m,
+ "import sys; sys.modules['pineappl.grid'] = m"
+ );
+ m.add_class::()?;
+ m.add_class::()?;
+ parent_module.add_submodule(&m)
+}
diff --git a/pineappl_py/src/import_only_subgrid.rs b/pineappl_py/src/import_only_subgrid.rs
index 6b81060ff..b58a006cc 100644
--- a/pineappl_py/src/import_only_subgrid.rs
+++ b/pineappl_py/src/import_only_subgrid.rs
@@ -1,5 +1,6 @@
-use super::subgrid::PySubgridEnum;
+//! PyImportOnlySubgrid* interface.
+use super::subgrid::PySubgridEnum;
use numpy::{PyArrayMethods, PyReadonlyArray1, PyReadonlyArray3};
use pineappl::import_only_subgrid::ImportOnlySubgridV1;
use pineappl::import_only_subgrid::ImportOnlySubgridV2;
@@ -8,7 +9,7 @@ use pineappl::subgrid::Mu2;
use pyo3::prelude::*;
/// PyO3 wrapper to :rustdoc:`pineappl::import_only_subgrid::ImportOnlySubgridV2 `.
-#[pyclass]
+#[pyclass(name = "ImportOnlySubgridV2")]
#[derive(Clone)]
#[repr(transparent)]
pub struct PyImportOnlySubgridV2 {
@@ -79,7 +80,7 @@ impl PyImportOnlySubgridV2 {
}
/// PyO3 wrapper to :rustdoc:`pineappl::import_only_subgrid::ImportOnlySubgridV1 `.
-#[pyclass]
+#[pyclass(name = "ImportOnlySubgridV1")]
#[derive(Clone)]
#[repr(transparent)]
pub struct PyImportOnlySubgridV1 {
@@ -149,3 +150,20 @@ impl PyImportOnlySubgridV1 {
}
}
}
+
+/// Register submodule in parent.
+pub fn register(parent_module: &Bound<'_, PyModule>) -> PyResult<()> {
+ let m = PyModule::new_bound(parent_module.py(), "import_only_subgrid")?;
+ m.setattr(
+ pyo3::intern!(m.py(), "__doc__"),
+ "ImportOnlySubgrid* interface.",
+ )?;
+ pyo3::py_run!(
+ parent_module.py(),
+ m,
+ "import sys; sys.modules['pineappl.import_only_subgrid'] = m"
+ );
+ m.add_class::()?;
+ m.add_class::()?;
+ parent_module.add_submodule(&m)
+}
diff --git a/pineappl_py/src/lib.rs b/pineappl_py/src/lib.rs
index 54db39f56..5c0c5e1d2 100644
--- a/pineappl_py/src/lib.rs
+++ b/pineappl_py/src/lib.rs
@@ -1,35 +1,29 @@
-// this is needed for PyO3 to work
+//! Generate PyO3 interface.
+
#![allow(unsafe_op_in_unsafe_fn)]
use pyo3::prelude::*;
pub mod bin;
-pub mod channel;
+pub mod boc;
pub mod evolution;
pub mod fk_table;
pub mod grid;
pub mod import_only_subgrid;
+pub mod pids;
pub mod subgrid;
/// PyO3 Python module that contains all exposed classes from Rust.
-///
-/// NOTE: this name has to match the one in Cargo.toml 'lib.name'
#[pymodule]
fn pineappl(m: &Bound<'_, PyModule>) -> PyResult<()> {
- m.add_class::()?;
- m.add_class::()?;
- m.add_class::()?;
- m.add_class::()?;
- m.add_class::()?;
- m.add_class::()?;
- m.add_class::()?;
- m.add_class::()?;
- m.add_class::()?;
- m.add_class::()?;
- m.add_class::()?;
- m.add_class::()?;
- m.add_class::()?;
- m.add_class::()?;
+ bin::register(m)?;
+ boc::register(m)?;
+ grid::register(m)?;
+ import_only_subgrid::register(m)?;
+ evolution::register(m)?;
+ fk_table::register(m)?;
+ pids::register(m)?;
+ subgrid::register(m)?;
m.add("version", env!("CARGO_PKG_VERSION"))?;
Ok(())
diff --git a/pineappl_py/src/pids.rs b/pineappl_py/src/pids.rs
new file mode 100644
index 000000000..81b3b67e4
--- /dev/null
+++ b/pineappl_py/src/pids.rs
@@ -0,0 +1,36 @@
+//! PIDs interface.
+
+use pineappl::pids::PidBasis;
+use pyo3::prelude::*;
+
+/// PyO3 wrapper to :rustdoc:`pineappl::pids::PidBasis `.
+#[pyclass(name = "PidBasis")]
+#[derive(Clone)]
+pub enum PyPidBasis {
+ /// PDG Monte Carlo IDs.
+ Pdg,
+ /// NNPDF's evolution basis IDs.
+ Evol,
+}
+
+impl From for PidBasis {
+ fn from(basis: PyPidBasis) -> Self {
+ match basis {
+ PyPidBasis::Pdg => Self::Pdg,
+ PyPidBasis::Evol => Self::Evol,
+ }
+ }
+}
+
+/// Register submodule in parent.
+pub fn register(parent_module: &Bound<'_, PyModule>) -> PyResult<()> {
+ let m = PyModule::new_bound(parent_module.py(), "pids")?;
+ m.setattr(pyo3::intern!(m.py(), "__doc__"), "PIDs interface.")?;
+ pyo3::py_run!(
+ parent_module.py(),
+ m,
+ "import sys; sys.modules['pineappl.pids'] = m"
+ );
+ m.add_class::()?;
+ parent_module.add_submodule(&m)
+}
diff --git a/pineappl_py/src/subgrid.rs b/pineappl_py/src/subgrid.rs
index eb323ce81..5eb89a78a 100644
--- a/pineappl_py/src/subgrid.rs
+++ b/pineappl_py/src/subgrid.rs
@@ -1,46 +1,27 @@
+//! Subgrid interface.
+
use ndarray::Array3;
use numpy::{IntoPyArray, PyArray1, PyArray3};
use pineappl::subgrid::Mu2;
use pineappl::subgrid::{Subgrid, SubgridEnum, SubgridParams};
use pyo3::prelude::*;
-/// PyO3 wrapper to :rustdoc:`pineappl::subgrid::SubgridParams `
-#[pyclass]
+/// PyO3 wrapper to :rustdoc:`pineappl::subgrid::SubgridParams `
+#[pyclass(name = "SubgridParams")]
+#[derive(Clone)]
#[repr(transparent)]
pub struct PySubgridParams {
pub(crate) subgrid_params: SubgridParams,
}
-impl PySubgridParams {
- pub(crate) fn new(subgrid_params: SubgridParams) -> Self {
- Self { subgrid_params }
- }
-}
-
-impl Clone for PySubgridParams {
- fn clone(&self) -> Self {
- let mut subgrid_params = SubgridParams::default();
- subgrid_params.set_q2_bins(self.subgrid_params.q2_bins());
- subgrid_params.set_q2_max(self.subgrid_params.q2_max());
- subgrid_params.set_q2_min(self.subgrid_params.q2_min());
- subgrid_params.set_q2_order(self.subgrid_params.q2_order());
- subgrid_params.set_reweight(self.subgrid_params.reweight());
- subgrid_params.set_x_bins(self.subgrid_params.x_bins());
- subgrid_params.set_x_max(self.subgrid_params.x_max());
- subgrid_params.set_x_min(self.subgrid_params.x_min());
- subgrid_params.set_x_order(self.subgrid_params.x_order());
- Self { subgrid_params }
- }
-}
-
#[pymethods]
impl PySubgridParams {
/// Constructor using the defaults.
#[new]
pub fn default() -> Self {
- let subgrid_params = SubgridParams::default();
-
- Self::new(subgrid_params)
+ Self {
+ subgrid_params: SubgridParams::default(),
+ }
}
/// Set number of :math:`Q^2` bins.
@@ -135,10 +116,10 @@ impl PySubgridParams {
}
/// PyO3 wrapper to :rustdoc:`pineappl::subgrid::Mu2 `
-#[pyclass]
+#[pyclass(name = "Mu2")]
#[repr(transparent)]
pub struct PyMu2 {
- pub mu2: Mu2,
+ pub(crate) mu2: Mu2,
}
#[pymethods]
@@ -182,7 +163,7 @@ impl PyMu2 {
}
/// PyO3 wrapper to :rustdoc:`pineappl::subgrid::SubgridEnum `
-#[pyclass]
+#[pyclass(name = "SubgridEnum")]
#[derive(Clone)]
#[repr(transparent)]
pub struct PySubgridEnum {
@@ -231,3 +212,18 @@ impl PySubgridEnum {
PyArray1::from_slice_bound(py, &self.subgrid_enum.x2_grid())
}
}
+
+/// Register submodule in parent.
+pub fn register(parent_module: &Bound<'_, PyModule>) -> PyResult<()> {
+ let m = PyModule::new_bound(parent_module.py(), "subgrid")?;
+ m.setattr(pyo3::intern!(m.py(), "__doc__"), "Subgrid interface.")?;
+ pyo3::py_run!(
+ parent_module.py(),
+ m,
+ "import sys; sys.modules['pineappl.subgrid'] = m"
+ );
+ m.add_class::()?;
+ m.add_class::()?;
+ m.add_class::()?;
+ parent_module.add_submodule(&m)
+}
diff --git a/pineappl_py/tests/test_bin.py b/pineappl_py/tests/test_bin.py
index e8484a924..66044c782 100644
--- a/pineappl_py/tests/test_bin.py
+++ b/pineappl_py/tests/test_bin.py
@@ -8,7 +8,6 @@ def test_init(self):
br = pineappl.bin.BinRemapper(np.array([1.0]), [(2, 3)])
assert isinstance(br, pineappl.bin.BinRemapper)
- assert isinstance(br.raw, pineappl.pineappl.PyBinRemapper)
with pytest.raises(AttributeError):
br._bla()
diff --git a/pineappl_py/tests/test_boc.py b/pineappl_py/tests/test_boc.py
new file mode 100644
index 000000000..9b00171d5
--- /dev/null
+++ b/pineappl_py/tests/test_boc.py
@@ -0,0 +1,7 @@
+import pineappl
+
+
+class TestChannel:
+ def test_init(self):
+ le = pineappl.boc.Channel([(2, 2, 0.5)])
+ assert isinstance(le, pineappl.boc.Channel)
diff --git a/pineappl_py/tests/test_channel.py b/pineappl_py/tests/test_channel.py
deleted file mode 100644
index 16dce2e38..000000000
--- a/pineappl_py/tests/test_channel.py
+++ /dev/null
@@ -1,9 +0,0 @@
-import pineappl
-
-
-class TestChannel:
- def test_init(self):
- le = pineappl.channel.Channel([(2, 2, 0.5)])
-
- assert isinstance(le, pineappl.channel.Channel)
- assert isinstance(le.raw, pineappl.pineappl.PyChannel)
diff --git a/pineappl_py/tests/test_fk_table.py b/pineappl_py/tests/test_fk_table.py
index 9eeac5a5a..af047ed8d 100644
--- a/pineappl_py/tests/test_fk_table.py
+++ b/pineappl_py/tests/test_fk_table.py
@@ -5,11 +5,11 @@
class TestFkTable:
def fake_grid(self, bins=None):
- channels = [pineappl.channel.Channel([(1, 21, 1.0)])]
+ channels = [pineappl.boc.Channel([(1, 21, 1.0)])]
orders = [pineappl.grid.Order(0, 0, 0, 0)]
bin_limits = np.array([1e-7, 1e-3, 1] if bins is None else bins, dtype=float)
subgrid_params = pineappl.subgrid.SubgridParams()
- g = pineappl.grid.Grid.create(channels, orders, bin_limits, subgrid_params)
+ g = pineappl.grid.Grid(channels, orders, bin_limits, subgrid_params)
return g
def test_convolve_with_one(self):
@@ -24,8 +24,8 @@ def test_convolve_with_one(self):
xs,
np.array([1.0]),
)
- g.set_subgrid(0, 0, 0, subgrid)
- fk = pineappl.fk_table.FkTable.from_grid(g)
+ g.set_subgrid(0, 0, 0, subgrid.into())
+ fk = pineappl.fk_table.FkTable(g)
np.testing.assert_allclose(
fk.convolve_with_one(2212, lambda pid, x, q2: 0.0),
[0.0] * 2,
@@ -35,8 +35,8 @@ def test_convolve_with_one(self):
[5e7 / 9999, 0.0],
)
- info = pineappl.grid.PyOperatorSliceInfo(
- 1.0, [], [], 1.0, [], [], pineappl.grid.PyPidBasis.Pdg
+ info = pineappl.evolution.OperatorSliceInfo(
+ 1.0, [], [], 1.0, [], [], pineappl.pids.PidBasis.Pdg
)
# TODO: write a better test
diff --git a/pineappl_py/tests/test_grid.py b/pineappl_py/tests/test_grid.py
index 5208674c3..f0678db99 100644
--- a/pineappl_py/tests/test_grid.py
+++ b/pineappl_py/tests/test_grid.py
@@ -10,23 +10,21 @@ def test_init(self):
o = pineappl.grid.Order(*args)
assert isinstance(o, pineappl.grid.Order)
- assert isinstance(o.raw, pineappl.pineappl.PyOrder)
assert o.as_tuple() == args
class TestGrid:
def fake_grid(self, bins=None):
- channels = [pineappl.channel.Channel([(1, 21, 0.1)])]
+ channels = [pineappl.boc.Channel([(1, 21, 0.1)])]
orders = [pineappl.grid.Order(3, 0, 0, 0)]
bin_limits = np.array([1e-7, 1e-3, 1] if bins is None else bins, dtype=float)
subgrid_params = pineappl.subgrid.SubgridParams()
- g = pineappl.grid.Grid.create(channels, orders, bin_limits, subgrid_params)
+ g = pineappl.grid.Grid(channels, orders, bin_limits, subgrid_params)
return g
def test_init(self):
g = self.fake_grid()
assert isinstance(g, pineappl.grid.Grid)
- assert isinstance(g.raw, pineappl.pineappl.PyGrid)
# orders
assert len(g.orders()) == 1
assert g.orders()[0].as_tuple() == (3, 0, 0, 0)
@@ -43,7 +41,7 @@ def test_set_subgrid(self):
np.array(xs),
np.array([1.0]),
)
- g.set_subgrid(0, 0, 0, subgrid)
+ g.set_subgrid(0, 0, 0, subgrid.into())
# let's mix it for fun with an hadronic one
x1s = np.linspace(0.1, 1, 2)
@@ -52,7 +50,7 @@ def test_set_subgrid(self):
subgrid = pineappl.import_only_subgrid.ImportOnlySubgridV1(
np.random.rand(len(Q2s), len(x1s), len(x2s)), Q2s, x1s, x2s
)
- g.set_subgrid(0, 1, 0, subgrid)
+ g.set_subgrid(0, 1, 0, subgrid.into())
g.optimize()
def test_set_key_value(self):
@@ -64,7 +62,7 @@ def test_set_key_value(self):
def test_bins(self):
g = self.fake_grid()
# 1D
- normalizations = [1.0] * 2
+ normalizations = np.array([1.0, 1.0])
limits = [(1, 1), (2, 2)]
remapper = pineappl.bin.BinRemapper(normalizations, limits)
g.set_remapper(remapper)
@@ -93,7 +91,7 @@ def test_convolve_with_one(self):
xs,
np.array([1.0]),
)
- g.set_subgrid(0, 0, 0, subgrid)
+ g.set_subgrid(0, 0, 0, subgrid.into())
np.testing.assert_allclose(
g.convolve_with_one(2212, lambda pid, x, q2: 0.0, lambda q2: 0.0),
[0.0] * 2,
diff --git a/pineappl_py/tests/test_sugrid.py b/pineappl_py/tests/test_sugrid.py
index f8dd84720..91a0d423b 100644
--- a/pineappl_py/tests/test_sugrid.py
+++ b/pineappl_py/tests/test_sugrid.py
@@ -7,18 +7,17 @@
class TestSubgridParams:
def test_init(self):
sp = pineappl.subgrid.SubgridParams()
-
assert isinstance(sp, pineappl.subgrid.SubgridParams)
- assert isinstance(sp.raw, pineappl.pineappl.PySubgridParams)
def test_issue_164(pdf):
- channels = [pineappl.channel.Channel([(1, 2, 1.0)])]
+ channels = [pineappl.boc.Channel([(1, 2, 1.0)])]
orders = [pineappl.grid.Order(0, 0, 0, 0)]
params = pineappl.subgrid.SubgridParams()
def convolve_grid():
- grid = pineappl.grid.Grid.create(channels, orders, [0.0, 1.0], params)
+ bin_limits = np.array([0.0, 1.0])
+ grid = pineappl.grid.Grid(channels, orders, bin_limits, params)
grid.fill(0.2, 0.2, 10, 0, 0.5, 0, 0.5)
return grid.convolve_with_one(2212, pdf.xfxQ, pdf.alphasQ)
@@ -31,42 +30,46 @@ def convolve_grid():
res = convolve_grid()
assert pytest.approx(res) != 0.0
+
class TestSubgrid:
def fake_grid(self):
- channels = [pineappl.channel.Channel([(1, 2, 1.0)])]
+ channels = [pineappl.boc.Channel([(1, 2, 1.0)])]
orders = [pineappl.grid.Order(0, 0, 0, 0)]
params = pineappl.subgrid.SubgridParams()
- grid = pineappl.grid.Grid.create(channels, orders, [0.0, 1.0], params)
+ bin_limits = np.array([0.0, 1.0])
+ grid = pineappl.grid.Grid(channels, orders, bin_limits, params)
return grid
-
+
def fake_importonlysubgrid(self):
x1s = np.linspace(0.1, 1, 2)
x2s = np.linspace(0.5, 1, 2)
Q2s = np.linspace(10, 20, 2)
mu2s = [tuple([q2, q2]) for q2 in Q2s]
array = np.random.rand(len(Q2s), len(x1s), len(x2s))
- subgrid = pineappl.import_only_subgrid.ImportOnlySubgridV2(array, mu2s , x1s, x2s)
+ subgrid = pineappl.import_only_subgrid.ImportOnlySubgridV2(
+ array, mu2s, x1s, x2s
+ )
return subgrid, [x1s, x2s, mu2s, array]
def test_subgrid_methods(self):
grid = self.fake_grid()
test_subgrid, infos = self.fake_importonlysubgrid()
x1s, x2s, mu2s, _ = (obj for obj in infos)
- grid.set_subgrid(0,0,0, test_subgrid)
- extr_subgrid = grid.subgrid(0,0,0)
+ grid.set_subgrid(0, 0, 0, test_subgrid.into())
+ extr_subgrid = grid.subgrid(0, 0, 0)
facgrid = np.array([mu2.fac for mu2 in extr_subgrid.mu2_grid()])
rengrid = np.array([mu2.ren for mu2 in extr_subgrid.mu2_grid()])
np.testing.assert_allclose([mu2[0] for mu2 in mu2s], rengrid)
np.testing.assert_allclose([mu2[1] for mu2 in mu2s], facgrid)
np.testing.assert_allclose(extr_subgrid.x1_grid(), x1s)
np.testing.assert_allclose(extr_subgrid.x2_grid(), x2s)
-
+
def test_to_array3(self):
grid = self.fake_grid()
test_subgrid, infos = self.fake_importonlysubgrid()
_, _, _, array = (obj for obj in infos)
- grid.set_subgrid(0,0,0, test_subgrid)
- extr_subgrid = grid.subgrid(0,0,0)
+ grid.set_subgrid(0, 0, 0, test_subgrid.into())
+ extr_subgrid = grid.subgrid(0, 0, 0)
test_array = extr_subgrid.to_array3()
print(test_array)
print(array)