Skip to content

Commit

Permalink
feat: Implement the hashes key for archive URLs
Browse files Browse the repository at this point in the history
  • Loading branch information
edgarrmondragon committed Jan 24, 2024
1 parent 7818fb2 commit 8f86f64
Show file tree
Hide file tree
Showing 9 changed files with 187 additions and 83 deletions.
7 changes: 3 additions & 4 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
id: setup-python
with:
cache: pip
python-version: ${{ matrix.python-version }}
Expand All @@ -102,12 +103,10 @@ jobs:
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
pipx install hatch
pipx install --python=${{ steps.setup-python.outputs.python-path }} hatch
- name: Run tests
env:
HATCH_ENV: "test"
run: |
hatch run +py=${{ matrix.python-version }} cov
hatch run cov
- uses: actions/upload-artifact@v4
with:
name: coverage-data-${{ matrix.os }}-${{ matrix.python-version }}
Expand Down
3 changes: 1 addition & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,9 @@ repos:
- id: validate_manifest

- repo: https://github.com/tox-dev/pyproject-fmt
rev: "1.6.0"
rev: "1.7.0"
hooks:
- id: pyproject-fmt
args: ["--max-supported-python", "3.13"]

- repo: https://github.com/crate-ci/typos
rev: v1.17.0
Expand Down
29 changes: 28 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,32 @@ pip install pep610

## Usage

You can use `pep610.read_from_distribution` to parse the [Direct URL Origin structure][pep610-structure] from a `Distribution` object:

```python
from importlib import metadata

import pep610

dist = metadata.distribution('pep610')
data = pep610.read_from_distribution(dist)

if isinstance(data, pep610.DirData):
print(f"URL: {data.url}")
print(f"Editable: {data.dir_info.editable}")
elif isinstance(data, pep610.VCSData):
print(f"URL: {data.url}")
print(f"VCS: {data.vcs_info.vcs}")
print(f"Commit: {data.vcs_info.commit_id}")
elif isinstance(data, pep610.ArchiveData):
print(f"URL: {data.url}")
print(f"Hashes: {data.archive_info.hashes}")
else:
print("Unknown data")
```

Or, in Python 3.10+ using pattern matching:

```python
from importlib import metadata

Expand All @@ -42,7 +68,7 @@ match data:
print(f"Commit: {vcs_info.commit_id}")
case pep610.ArchiveData(url, archive_info):
print(f"URL: {url}")
print(f"Hash: {archive_info.hash}")
print(f"Hashes: {archive_info.hashes}")
case _:
print("Unknown data")
```
Expand All @@ -53,4 +79,5 @@ match data:

[pep610]: https://www.python.org/dev/peps/pep-0610/
[pep610-pypa]: https://packaging.python.org/en/latest/specifications/direct-url/#direct-url
[pep610-structure]: https://packaging.python.org/en/latest/specifications/direct-url-data-structure/
[pypa-specs]: https://packaging.python.org/en/latest/specifications/
13 changes: 9 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ description = "Python helpers for PEP 610"
readme = "README.md"
keywords = [
"pep",
"pep610",
]
license = {file = "LICENSE"}
authors = [{ name = "Edgar Ramírez Mondragón", email = "edgarrmondragon@hey.com" }]
Expand All @@ -33,6 +34,7 @@ dynamic = [
"version",
]
dependencies = [
'importlib-resources>=5.3; python_version < "3.9"',
]
optional-dependencies.dev = [
"coverage[toml]>=6.5",
Expand All @@ -47,20 +49,20 @@ urls.Source = "https://github.com/unknown/pep610"
[tool.hatch.version]
source = "vcs"

[tool.hatch.envs.test]
[tool.hatch.envs.default]
dependencies = [
"coverage[toml]>=6.5",
]
features = ["dev"]
[tool.hatch.envs.test.overrides]
[tool.hatch.envs.default.overrides]
matrix.python.env-vars = [
{ key = "COVERAGE_CORE", value = "sysmon", if = ["3.12", "3.13"] }
]
[tool.hatch.envs.test.scripts]
[tool.hatch.envs.default.scripts]
test = "pytest {args:tests}"
cov = "coverage run -m pytest {args:tests}"

[[tool.hatch.envs.test.matrix]]
[[tool.hatch.envs.all.matrix]]
python = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "pypy3.10"]

[tool.hatch.envs.coverage]
Expand Down Expand Up @@ -176,6 +178,9 @@ ban-relative-imports = "all"
[tool.ruff.lint.pydocstyle]
convention = "google"

[tool.pyproject-fmt]
max_supported_python = "3.13"

[tool.pytest.ini_options]
addopts = "-v"

Expand Down
26 changes: 21 additions & 5 deletions src/pep610/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,18 @@
from __future__ import annotations

import json
import sys
import typing as t
from dataclasses import dataclass
from functools import singledispatch
from importlib.metadata import version

if sys.version_info < (3, 9):
import importlib_resources
else:
import importlib.resources as importlib_resources

if t.TYPE_CHECKING:
import sys
from importlib.metadata import Distribution, PathDistribution

if sys.version_info <= (3, 10):
Expand Down Expand Up @@ -40,6 +45,7 @@
"write_to_distribution",
]

SCHEMA_FILE = importlib_resources.files(__package__) / "direct_url.schema.json"
__version__ = version(__package__)


Expand Down Expand Up @@ -79,7 +85,11 @@ class HashData(t.NamedTuple):
class ArchiveInfo:
"""Archive information."""

hash: HashData | None
hashes: dict[str, str] | None = None
"""Dictionary mapping a hash name to a hex encoded digest of the file."""

hash: HashData | None = None
"""The archive hash (deprecated)."""


@dataclass
Expand Down Expand Up @@ -146,6 +156,9 @@ def _(data: VCSData) -> VCSDict:
@to_dict.register(ArchiveData)
def _(data: ArchiveData) -> ArchiveDict:
archive_info: ArchiveInfoDict = {}
if data.archive_info.hashes is not None:
archive_info["hashes"] = data.archive_info.hashes

if data.archive_info.hash is not None:
archive_info["hash"] = f"{data.archive_info.hash.algorithm}={data.archive_info.hash.value}"

Expand All @@ -164,11 +177,14 @@ def _parse(content: str) -> VCSData | ArchiveData | DirData | None:
data = json.loads(content)

if "archive_info" in data:
hash_value = data["archive_info"].get("hash")
hash_data = HashData(*hash_value.split("=", 1)) if hash_value else None
hashes = data["archive_info"].get("hashes")
hash_data = None
if hash_value := data["archive_info"].get("hash"):
hash_data = HashData(*hash_value.split("=", 1)) if hash_value else None

return ArchiveData(
url=data["url"],
archive_info=ArchiveInfo(hash=hash_data),
archive_info=ArchiveInfo(hashes=hashes, hash=hash_data),
)

if "dir_info" in data:
Expand Down
1 change: 1 addition & 0 deletions src/pep610/_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ class VCSDict(t.TypedDict):
class ArchiveInfoDict(t.TypedDict, total=False):
"""Archive information."""

hashes: dict[str, str]
hash: str


Expand Down
96 changes: 96 additions & 0 deletions src/pep610/direct_url.schema.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
{
"allOf": [
{
"type": "object",
"properties": {
"url": {
"type": "string",
"format": "uri"
}
},
"required": [
"url"
]
},
{
"anyOf": [
{
"type": "object",
"properties": {
"dir_info": {
"type": "object",
"properties": {
"editable": {
"type": "boolean"
}
}
}
},
"required": [
"dir_info"
]
},
{
"type": "object",
"properties": {
"vcs_info": {
"type": "object",
"properties": {
"vcs": {
"type": "string",
"enum": [
"git",
"hg",
"bzr",
"svn"
]
},
"requested_revision": {
"type": "string"
},
"commit_id": {
"type": "string"
},
"resolved_revision": {
"type": "string"
}
},
"required": [
"vcs",
"commit_id"
]
}
},
"required": [
"vcs_info"
]
},
{
"type": "object",
"properties": {
"archive_info": {
"type": "object",
"properties": {
"hash": {
"type": "string",
"pattern": "^[a-f0-9]+=[a-f0-9]+$"
},
"hashes": {
"type": "object",
"patternProperties": {
"^[a-f0-9]+$": {
"type": "string"
}
}
}
}
}
},
"required": [
"archive_info"
]
}
]
}
]
}
70 changes: 5 additions & 65 deletions tests/test_generic.py
Original file line number Diff line number Diff line change
@@ -1,77 +1,17 @@
import json
from importlib.metadata import PathDistribution

import pytest
from hypothesis import HealthCheck, given, settings
from hypothesis_jsonschema import from_schema

from pep610 import read_from_distribution, write_to_distribution
from pep610 import SCHEMA_FILE, read_from_distribution, write_to_distribution

SCHEMA = json.loads(SCHEMA_FILE.read_text())


@settings(suppress_health_check=[HealthCheck.too_slow])
@given(
from_schema(
{
"allOf": [
{
"type": "object",
"properties": {
"url": {"type": "string", "format": "uri"},
},
"required": ["url"],
},
{
"anyOf": [
{
"type": "object",
"properties": {
"dir_info": {
"type": "object",
"properties": {"editable": {"type": "boolean"}},
},
},
"required": ["dir_info"],
},
{
"type": "object",
"properties": {
"vcs_info": {
"type": "object",
"properties": {
"vcs": {
"type": "string",
"enum": ["git", "hg", "bzr", "svn"],
},
"requested_revision": {"type": "string"},
"commit_id": {"type": "string"},
"resolved_revision": {"type": "string"},
"resolved_revision_type": {"type": "string"},
},
"required": ["vcs", "commit_id"],
},
},
"required": ["vcs_info"],
},
{
"type": "object",
"properties": {
"archive_info": {
"type": "object",
"properties": {
"hash": {
"type": "string",
"pattern": r"^[a-f0-9]+=[a-f0-9]+$",
},
},
},
},
"required": ["archive_info"],
},
],
},
],
},
),
)
@given(from_schema(SCHEMA))
def test_generic(tmp_path_factory: pytest.TempPathFactory, value: dict):
"""Test parsing a local directory."""
dist_path = tmp_path_factory.mktemp("pep610")
Expand Down
Loading

0 comments on commit 8f86f64

Please sign in to comment.