Skip to content

Commit

Permalink
Merge pull request #77 from vlasovskikh/py3-only
Browse files Browse the repository at this point in the history
Drop support for Python 2.7
  • Loading branch information
vlasovskikh committed Nov 19, 2023
2 parents 343a65d + ecaa1cc commit dae0c79
Show file tree
Hide file tree
Showing 24 changed files with 1,004 additions and 1,144 deletions.
15 changes: 15 additions & 0 deletions .fleet/run.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"configurations": [
{
"type": "python-tests",
"name": "Unit tests",
"testFramework": "unittest"
},
{
"type": "command",
"name": "pre-commit",
"program": "pre-commit",
"args": ["run", "-a"]
}
]
}
4 changes: 2 additions & 2 deletions .github/workflows/gh-pages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.11
- name: Set up Python 3.12
uses: actions/setup-python@v3
with:
python-version: "3.11"
python-version: "3.12"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/publish-to-pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.11
- name: Set up Python 3.12
uses: actions/setup-python@v3
with:
python-version: "3.11"
python-version: "3.12"
- name: Install build tools
run: |
python -m pip install --upgrade pip
Expand Down
22 changes: 1 addition & 21 deletions .github/workflows/python-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,27 +9,6 @@ on:
- master

jobs:
unittest-checks:
runs-on: ubuntu-latest
strategy:
matrix:
python-version:
- "2.7"
- "3.7"
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install poetry
poetry install
- name: Test with unittest
run: |
poetry run python -m unittest discover
pre-commit-checks:
runs-on: ubuntu-latest
strategy:
Expand All @@ -39,6 +18,7 @@ jobs:
- "3.9"
- "3.10"
- "3.11"
- "3.12"
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
Expand Down
10 changes: 4 additions & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,19 +1,17 @@
repos:
- repo: https://github.com/psf/black
rev: "22.10.0"
rev: "23.11.0"
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
rev: "3.9.2"
- repo: https://github.com/PyCQA/flake8
rev: "6.1.0"
hooks:
- id: flake8
args: ["--max-line-length=88"]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: "v0.982"
rev: "v1.7.0"
hooks:
- id: mypy
args: []
files: "^tests/"
- repo: local
hooks:
- id: unittest
Expand Down
2 changes: 1 addition & 1 deletion LICENSE
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Copyright © 2009/2021 Andrey Vlasovskikh
Copyright © 2009/2023 Andrey Vlasovskikh

Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
Expand Down
15 changes: 15 additions & 0 deletions docs/changes.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,21 @@
The Changelog
=============

2.0.0 — to be released
----------------------

Dropped support for Python 2.7 (end of life). For compatibility with Python 2.7 please
use version `>=1.0,==1.*` (`~=1.0`).

### Added

* Added support for Python 3.12

### Changed

* Dropped support for Python 2.7
* Dropped support for Python 3.7


1.0.1 — 2022-11-04
------------------
Expand Down
2 changes: 1 addition & 1 deletion docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ Used By
Some open-source projects that use `funcparserlib` as an explicit dependency:

* [Hy](https://github.com/hylang/hy), a Lisp dialect that's embedded in Python
* 4.2K stars, version `>= 1.0.0a0`, Python 3.7+
* 4.2K stars, version `~= 1.0`, Python 3.8+
* [Splash](https://github.com/scrapinghub/splash), a JavaScript rendering service with HTTP API, by Scrapinghub
* 3.6K stars, version `*`. Python 3 in Docker
* [graphite-beacon](https://github.com/klen/graphite-beacon), a simple alerting system for Graphite metrics
Expand Down
57 changes: 34 additions & 23 deletions funcparserlib/lexer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-

# Copyright © 2009/2021 Andrey Vlasovskikh
# Copyright © 2009/2023 Andrey Vlasovskikh
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
Expand All @@ -19,28 +17,31 @@
# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

from __future__ import unicode_literals

__all__ = ["make_tokenizer", "TokenSpec", "Token", "LexerError"]

import re
from typing import Callable, Iterable, List, Tuple, Optional, Sequence, Pattern, Union


_Place = Tuple[int, int]
_Spec = Tuple[str, Tuple]


class LexerError(Exception):
def __init__(self, place, msg):
def __init__(self, place: _Place, msg: str) -> None:
self.place = place
self.msg = msg

def __str__(self):
def __str__(self) -> str:
s = "cannot tokenize data"
line, pos = self.place
return '%s: %d,%d: "%s"' % (s, line, pos, self.msg)


class TokenSpec(object):
class TokenSpec:
"""A token specification for generating a lexer via `make_tokenizer()`."""

def __init__(self, type, pattern, flags=0):
def __init__(self, type: str, pattern: str, flags: int = 0) -> None:
"""Initialize a `TokenSpec` object.
Parameters:
Expand All @@ -53,11 +54,11 @@ def __init__(self, type, pattern, flags=0):
self.pattern = pattern
self.flags = flags

def __repr__(self):
def __repr__(self) -> str:
return "TokenSpec(%r, %r, %r)" % (self.type, self.pattern, self.flags)


class Token(object):
class Token:
"""A token object that represents a substring of certain type in your text.
You can compare tokens for equality using the `==` operator. Tokens also define
Expand All @@ -71,48 +72,56 @@ class Token(object):
end (Optional[Tuple[int, int]]): End position (_line_, _column_)
"""

def __init__(self, type, value, start=None, end=None):
def __init__(
self,
type: str,
value: str,
start: Optional[_Place] = None,
end: Optional[_Place] = None,
) -> None:
"""Initialize a `Token` object."""
self.type = type
self.value = value
self.start = start
self.end = end

def __repr__(self):
def __repr__(self) -> str:
return "Token(%r, %r)" % (self.type, self.value)

def __eq__(self, other):
def __eq__(self, other: object) -> bool:
# FIXME: Case sensitivity is assumed here
if other is None:
if not isinstance(other, Token):
return False
else:
return self.type == other.type and self.value == other.value

def _pos_str(self):
def _pos_str(self) -> str:
if self.start is None or self.end is None:
return ""
else:
sl, sp = self.start
el, ep = self.end
return "%d,%d-%d,%d:" % (sl, sp, el, ep)

def __str__(self):
def __str__(self) -> str:
s = "%s %s '%s'" % (self._pos_str(), self.type, self.value)
return s.strip()

@property
def name(self):
def name(self) -> str:
return self.value

def pformat(self):
def pformat(self) -> str:
return "%s %s '%s'" % (
self._pos_str().ljust(20), # noqa
self.type.ljust(14),
self.value,
)


def make_tokenizer(specs):
def make_tokenizer(
specs: Sequence[Union[TokenSpec, _Spec]],
) -> Callable[[str], Iterable[Token]]:
# noinspection GrazieInspection
"""Make a function that tokenizes text based on the regexp specs.
Expand Down Expand Up @@ -150,7 +159,7 @@ def make_tokenizer(specs):
```
"""
compiled = []
compiled: List[Tuple[str, Pattern[str]]] = []
for spec in specs:
if isinstance(spec, TokenSpec):
c = spec.type, re.compile(spec.pattern, spec.flags)
Expand All @@ -159,7 +168,7 @@ def make_tokenizer(specs):
c = name, re.compile(*args)
compiled.append(c)

def match_specs(s, i, position):
def match_specs(s: str, i: int, position: Tuple[int, int]) -> Token:
line, pos = position
for type, regexp in compiled:
m = regexp.match(s, i)
Expand All @@ -176,13 +185,15 @@ def match_specs(s, i, position):
err_line = s.splitlines()[line - 1]
raise LexerError((line, pos + 1), err_line)

def f(s):
def f(s: str) -> Iterable[Token]:
length = len(s)
line, pos = 1, 0
i = 0
while i < length:
t = match_specs(s, i, (line, pos))
yield t
if t.end is None:
raise ValueError("Token %r has no end specified", (t,))
line, pos = t.end
i += len(t.value)

Expand Down
34 changes: 0 additions & 34 deletions funcparserlib/lexer.pyi

This file was deleted.

Loading

0 comments on commit dae0c79

Please sign in to comment.