Skip to content

Commit

Permalink
Removed 'out/opensearchpy' folder which was produced while generating…
Browse files Browse the repository at this point in the history
… pyi files for plugins (#288)

Signed-off-by: saimedhi <saimedhi@amazon.com>
  • Loading branch information
saimedhi authored Feb 14, 2023
1 parent 3d1dd7b commit 9363639
Show file tree
Hide file tree
Showing 10 changed files with 3 additions and 110 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)

### Removed
- Removed patch versions in integration tests for OpenSearch 1.0.0 - 2.3.0 to reduce Github Action jobs ([#262](https://github.com/opensearch-project/opensearch-py/pull/262))
- Removed 'out/opensearchpy' folder which was produced while generating pyi files for plugins ([#288](https://github.com/opensearch-project/opensearch-py/pull/288))
### Fixed
- Fixed DeprecationWarning emitted from urllib3 1.26.13+ ([#246](https://github.com/opensearch-project/opensearch-py/pull/246))
### Security
Expand Down
4 changes: 0 additions & 4 deletions opensearchpy/_async/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,6 @@ async def async_streaming_bulk(
*args,
**kwargs
):

"""
Streaming bulk consumes actions from the iterable passed in and yields
results per action. For non-streaming usecases use
Expand Down Expand Up @@ -185,7 +184,6 @@ async def map_actions():
async for bulk_data, bulk_actions in _chunk_actions(
map_actions(), chunk_size, max_chunk_bytes, client.transport.serializer
):

for attempt in range(max_retries + 1):
to_retry, to_retry_data = [], []
if attempt:
Expand All @@ -207,7 +205,6 @@ async def map_actions():
**kwargs,
),
):

if not ok:
action, info = info.popitem()
# retry if retries enabled, we get 429, and we are not
Expand Down Expand Up @@ -422,7 +419,6 @@ async def async_reindex(
scan_kwargs={},
bulk_kwargs={},
):

"""
Reindex all documents from one index that satisfy a given query
to another, potentially (if `target_client` is specified) on a different cluster.
Expand Down
1 change: 0 additions & 1 deletion opensearchpy/_async/transport.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,6 @@ async def _async_init(self):

# ... and we can start sniffing in the background.
if self.sniffing_task is None and self.sniff_on_start:

# Create an asyncio.Event for future calls to block on
# until the initial sniffing task completes.
self._sniff_on_start_event = asyncio.Event()
Expand Down
1 change: 0 additions & 1 deletion opensearchpy/connection/http_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,6 @@ async def perform_request(

start = self.loop.time()
try:

async with self.session.request(
method,
url,
Expand Down
4 changes: 0 additions & 4 deletions opensearchpy/helpers/actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,6 @@ def streaming_bulk(
*args,
**kwargs
):

"""
Streaming bulk consumes actions from the iterable passed in and yields
results per action. For non-streaming usecases use
Expand Down Expand Up @@ -319,7 +318,6 @@ def streaming_bulk(
for bulk_data, bulk_actions in _chunk_actions(
actions, chunk_size, max_chunk_bytes, client.transport.serializer
):

for attempt in range(max_retries + 1):
to_retry, to_retry_data = [], []
if attempt:
Expand All @@ -339,7 +337,6 @@ def streaming_bulk(
**kwargs
),
):

if not ok:
action, info = info.popitem()
# retry if retries enabled, we get 429, and we are not
Expand Down Expand Up @@ -621,7 +618,6 @@ def reindex(
scan_kwargs={},
bulk_kwargs={},
):

"""
Reindex all documents from one index that satisfy a given query
to another, potentially (if `target_client` is specified) on a different cluster.
Expand Down
3 changes: 2 additions & 1 deletion opensearchpy/transport.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,13 +197,14 @@ def set_connections(self, hosts):
:arg hosts: same as `__init__`
"""

# construct the connections
def _create_connection(host):
# if this is not the initial setup look at the existing connection
# options and identify connections that haven't changed and can be
# kept around.
if hasattr(self, "connection_pool"):
for (connection, old_host) in self.connection_pool.connection_opts:
for connection, old_host in self.connection_pool.connection_opts:
if old_host == host:
return connection

Expand Down
16 changes: 0 additions & 16 deletions out/opensearchpy/client/plugins.pyi

This file was deleted.

8 changes: 0 additions & 8 deletions out/opensearchpy/plugins/__init__.pyi

This file was deleted.

73 changes: 0 additions & 73 deletions out/opensearchpy/plugins/alerting.pyi

This file was deleted.

2 changes: 0 additions & 2 deletions test_opensearchpy/test_async/test_server/test_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -530,7 +530,6 @@ async def test_initial_search_error(self, async_client, scan_teardown):
),
):
with patch.object(async_client, "scroll", MockScroll()):

data = [
x
async for x in helpers.async_scan(
Expand All @@ -554,7 +553,6 @@ async def test_initial_search_error(self, async_client, scan_teardown):
),
):
with patch.object(async_client, "scroll", MockScroll()) as mock_scroll:

with pytest.raises(ScanError):
data = [
x
Expand Down

0 comments on commit 9363639

Please sign in to comment.