Skip to content

Commit

Permalink
Remove redundant six dependency (#781)
Browse files Browse the repository at this point in the history
* Don't create universal wheel for Python 3 only

Signed-off-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>

* Update Black target version to match min Python supported

Signed-off-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>

* Upgrade files using six to Python 3 syntax

Signed-off-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>

* Remove redundant six dependency

Signed-off-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>

* Format with Black

Signed-off-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>

* Add changelog entry

Signed-off-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>

---------

Signed-off-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
  • Loading branch information
hugovk committed Jul 20, 2024
1 parent 1ef788a commit de96d28
Show file tree
Hide file tree
Showing 18 changed files with 116 additions and 162 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
- Removed deprecated `numpy.float_` and update NumPy/Pandas imports ([#762](https://github.com/opensearch-project/opensearch-py/pull/762))
### Deprecated
### Removed
- Removed redundant dependency on six ([#781](https://github.com/opensearch-project/opensearch-py/pull/781))
### Fixed
- Fixed Search helper to ensure proper retention of the _collapse attribute in chained operations. ([#771](https://github.com/opensearch-project/opensearch-py/pull/771))
### Updated APIs
Expand Down
1 change: 0 additions & 1 deletion noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,6 @@ def lint(session: Any) -> None:
"isort",
"pylint",
"types-requests",
"types-six",
"types-simplejson",
"types-python-dateutil",
"types-PyYAML",
Expand Down
9 changes: 3 additions & 6 deletions opensearchpy/_async/helpers/document.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@
from fnmatch import fnmatch
from typing import Any, Optional, Tuple, Type

from six import add_metaclass

from opensearchpy._async.client import AsyncOpenSearch
from opensearchpy._async.helpers.index import AsyncIndex
from opensearchpy._async.helpers.search import AsyncSearch
Expand All @@ -38,7 +36,7 @@ def __new__(
bases: Tuple[Type[ObjectBase]],
attrs: Any,
) -> Any:
new_cls = super(AsyncIndexMeta, cls).__new__(cls, name, bases, attrs)
new_cls = super().__new__(cls, name, bases, attrs)
if cls._document_initialized:
index_opts = attrs.pop("Index", None)
index = cls.construct_index(index_opts, bases)
Expand Down Expand Up @@ -67,8 +65,7 @@ def construct_index(cls, opts: Any, bases: Any) -> Any:
return i


@add_metaclass(AsyncIndexMeta)
class AsyncDocument(ObjectBase):
class AsyncDocument(ObjectBase, metaclass=AsyncIndexMeta):
"""
Model-like class for persisting documents in opensearch.
"""
Expand Down Expand Up @@ -297,7 +294,7 @@ def to_dict( # type: ignore
``[]``, ``{}``) to be left on the document. Those values will be
stripped out otherwise as they make no difference in opensearch.
"""
d = super(AsyncDocument, self).to_dict(skip_empty)
d = super().to_dict(skip_empty)
if not include_meta:
return d

Expand Down
12 changes: 5 additions & 7 deletions opensearchpy/_async/helpers/faceted_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,12 @@

from typing import Any

from six import iteritems, itervalues

from opensearchpy._async.helpers.search import AsyncSearch
from opensearchpy.helpers.faceted_search import FacetedResponse
from opensearchpy.helpers.query import MatchAll


class AsyncFacetedSearch(object):
class AsyncFacetedSearch:
"""
Abstraction for creating faceted navigation searches that takes care of
composing the queries, aggregations and filters as needed as well as
Expand Down Expand Up @@ -75,7 +73,7 @@ def __init__(self, query: Any = None, filters: Any = {}, sort: Any = ()) -> None
self._filters: Any = {}
self._sort = sort
self.filter_values: Any = {}
for name, value in iteritems(filters):
for name, value in filters.items():
self.add_filter(name, value)

self._s = self.build_search()
Expand Down Expand Up @@ -140,10 +138,10 @@ def aggregate(self, search: Any) -> Any:
Add aggregations representing the facets selected, including potential
filters.
"""
for f, facet in iteritems(self.facets):
for f, facet in self.facets.items():
agg = facet.get_aggregation()
agg_filter = MatchAll()
for field, filter in iteritems(self._filters):
for field, filter in self._filters.items():
if f == field:
continue
agg_filter &= filter
Expand All @@ -160,7 +158,7 @@ def filter(self, search: Any) -> Any:
return search

post_filter = MatchAll()
for f in itervalues(self._filters):
for f in self._filters.values():
post_filter &= f
return search.post_filter(post_filter)

Expand Down
8 changes: 3 additions & 5 deletions opensearchpy/_async/helpers/mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,12 @@
from itertools import chain
from typing import Any

from six import iteritems

from opensearchpy.connection.async_connections import get_connection
from opensearchpy.helpers.field import Nested, Text
from opensearchpy.helpers.mapping import META_FIELDS, Properties


class AsyncMapping(object):
class AsyncMapping:
_meta: Any
properties: Properties

Expand Down Expand Up @@ -104,11 +102,11 @@ async def update_from_opensearch(self, index: Any, using: str = "default") -> No
self._update_from_dict(raw["mappings"])

def _update_from_dict(self, raw: Any) -> None:
for name, definition in iteritems(raw.get("properties", {})):
for name, definition in raw.get("properties", {}).items():
self.field(name, definition)

# metadata like _all etc
for name, value in iteritems(raw):
for name, value in raw.items():
if name != "properties":
if isinstance(value, collections_abc.Mapping):
self.meta(name, **value)
Expand Down
16 changes: 7 additions & 9 deletions opensearchpy/_async/helpers/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@
import copy
from typing import Any, Sequence

from six import iteritems, string_types

from opensearchpy._async.helpers.actions import aiter, async_scan
from opensearchpy.connection.async_connections import get_connection
from opensearchpy.exceptions import IllegalOperation, TransportError
Expand All @@ -37,7 +35,7 @@ def __init__(self, **kwargs: Any) -> None:
All the parameters supplied (or omitted) at creation type can be later
overridden by methods (`using`, `index` and `doc_type` respectively).
"""
super(AsyncSearch, self).__init__(**kwargs)
super().__init__(**kwargs)

self.aggs = AggsProxy(self)
self._sort: Sequence[Any] = []
Expand Down Expand Up @@ -119,7 +117,7 @@ def _clone(self) -> Any:
of all the underlying objects. Used internally by most state modifying
APIs.
"""
s = super(AsyncSearch, self)._clone()
s = super()._clone()

s._response_class = self._response_class
s._sort = self._sort[:]
Expand Down Expand Up @@ -158,7 +156,7 @@ def update_from_dict(self, d: Any) -> "AsyncSearch":
aggs = d.pop("aggs", d.pop("aggregations", {}))
if aggs:
self.aggs._params = {
"aggs": {name: A(value) for (name, value) in iteritems(aggs)}
"aggs": {name: A(value) for (name, value) in aggs.items()}
}
if "sort" in d:
self._sort = d.pop("sort")
Expand Down Expand Up @@ -200,7 +198,7 @@ def script_fields(self, **kwargs: Any) -> Any:
"""
s = self._clone()
for name in kwargs:
if isinstance(kwargs[name], string_types):
if isinstance(kwargs[name], str):
kwargs[name] = {"script": kwargs[name]}
s._script_fields.update(kwargs)
return s
Expand Down Expand Up @@ -276,7 +274,7 @@ def sort(self, *keys: Any) -> Any:
s = self._clone()
s._sort = []
for k in keys:
if isinstance(k, string_types) and k.startswith("-"):
if isinstance(k, str) and k.startswith("-"):
if k[1:] == "_score":
raise IllegalOperation("Sorting by `-_score` is not allowed.")
k = {k[1:]: {"order": "desc"}}
Expand Down Expand Up @@ -470,7 +468,7 @@ class AsyncMultiSearch(Request):
"""

def __init__(self, **kwargs: Any) -> None:
super(AsyncMultiSearch, self).__init__(**kwargs)
super().__init__(**kwargs)
self._searches: Any = []

def __getitem__(self, key: Any) -> Any:
Expand All @@ -480,7 +478,7 @@ def __iter__(self) -> Any:
return iter(self._searches)

def _clone(self) -> Any:
ms = super(AsyncMultiSearch, self)._clone()
ms = super()._clone()
ms._searches = self._searches[:]
return ms

Expand Down
6 changes: 2 additions & 4 deletions opensearchpy/connection/async_connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,12 @@

from typing import Any

from six import string_types

import opensearchpy
from opensearchpy._async.helpers.actions import aiter
from opensearchpy.serializer import serializer


class AsyncConnections(object):
class AsyncConnections:
_conns: Any

"""
Expand Down Expand Up @@ -92,7 +90,7 @@ async def get_connection(self, alias: str = "default") -> Any:
"""
# do not check isinstance(AsyncOpenSearch) so that people can wrap their
# clients
if not isinstance(alias, string_types):
if not isinstance(alias, str):
return alias

# connection already established
Expand Down
6 changes: 2 additions & 4 deletions opensearchpy/connection/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,11 @@

from typing import Any

from six import string_types

import opensearchpy
from opensearchpy.serializer import serializer


class Connections(object):
class Connections:
"""
Class responsible for holding connections to different clusters. Used as a
singleton in this module.
Expand Down Expand Up @@ -106,7 +104,7 @@ def get_connection(self, alias: str = "default") -> Any:
"""
# do not check isinstance(OpenSearch) so that people can wrap their
# clients
if not isinstance(alias, string_types):
if not isinstance(alias, str):
return alias

# connection already established
Expand Down
20 changes: 9 additions & 11 deletions opensearchpy/helpers/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,12 @@

from typing import Any, Optional

import six

from opensearchpy.connection.connections import get_connection

from .utils import AttrDict, DslBase, merge


class AnalysisBase(object):
class AnalysisBase:
@classmethod
def _type_shortcut(
cls: Any, name_or_instance: Any, type: Any = None, **kwargs: Any
Expand All @@ -51,22 +49,22 @@ def _type_shortcut(
)


class CustomAnalysis(object):
class CustomAnalysis:
name: Optional[str] = "custom"

def __init__(
self, filter_name: str, builtin_type: str = "custom", **kwargs: Any
) -> None:
self._builtin_type = builtin_type
self._name = filter_name
super(CustomAnalysis, self).__init__(**kwargs)
super().__init__(**kwargs)

def to_dict(self) -> Any:
# only name to present in lists
return self._name

def get_definition(self) -> Any:
d = super(CustomAnalysis, self).to_dict() # type: ignore
d = super().to_dict() # type: ignore
d = d.pop(self.name)
d["type"] = self._builtin_type
return d
Expand Down Expand Up @@ -106,12 +104,12 @@ def get_analysis_definition(self: Any) -> Any:
return out


class BuiltinAnalysis(object):
class BuiltinAnalysis:
name: Optional[str] = "builtin"

def __init__(self, name: Any) -> None:
self._name = name
super(BuiltinAnalysis, self).__init__()
super().__init__()

def to_dict(self) -> Any:
# only name to present in lists
Expand Down Expand Up @@ -168,7 +166,7 @@ def simulate(
sec_def = definition.get(section, {})
sec_names = analyzer_def[section]

if isinstance(sec_names, six.string_types):
if isinstance(sec_names, str):
body[section] = sec_def.get(sec_names, sec_names)
else:
body[section] = [
Expand Down Expand Up @@ -235,7 +233,7 @@ def get_definition(self) -> Any:
# comma delimited string given by user
(
fs
if isinstance(fs, six.string_types)
if isinstance(fs, str)
else
# list of strings or TokenFilter objects
", ".join(f.to_dict() if hasattr(f, "to_dict") else f for f in fs)
Expand All @@ -251,7 +249,7 @@ def get_analysis_definition(self) -> Any:
fs: Any = {}
d = {"filter": fs}
for filters in self.filters:
if isinstance(filters, six.string_types):
if isinstance(filters, str):
continue
fs.update(
{
Expand Down
Loading

0 comments on commit de96d28

Please sign in to comment.