Skip to content

Commit

Permalink
Add-pathlib-package (#26)
Browse files Browse the repository at this point in the history
* feat: Add Path to list functions
* feat: Add Path to check functions
* feat: Add Path to copy functions
* feat: Add Path to delete functions
* feat: Add Path do download functions
* feat: Add Path to move functions
* feat: Add Path to read functions
* feat: Add Path to upload functions
* feat: Add Path to presigned functions
* chore: Bump package patch version
  • Loading branch information
FerrariDG authored Aug 18, 2022
1 parent bdeffa7 commit 3969bd0
Show file tree
Hide file tree
Showing 22 changed files with 619 additions and 457 deletions.
3 changes: 2 additions & 1 deletion docs/source/introduction.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,13 @@ All S3 objects functions, in this package, have the option to set AWS Session au
To understand more about AWS authentication mechanism, `read boto3 documentation <https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html>`_.

.. code-block:: python
aws_auth = {
'region_name': 'REGION',
'aws_access_key_id': 'ACCESS_KEY',
'aws_secret_access_key': 'SECRET_KEY',
'aws_session_token': 'SESSION_TOKEN',
'profile_name': 'PROFILE_NAME'
'profile_name': 'PROFILE_NAME',
}
Installation
Expand Down
86 changes: 31 additions & 55 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "aws-s3-tools"
version = "0.2.0"
version = "0.2.1"
description = "AWS S3 tools package"
authors = ["Daniel Ferrari"]
maintainers = ["Daniel Ferrari, Carlos Alves, Tomás Osório"]
Expand Down Expand Up @@ -52,6 +52,8 @@ pytest-cov = "^3.0.0"
toml = "^0.10.2"
sphinx-rtd-theme = "^0.5.2"
sphinx = "^3.5.4"
requests = "^2.28.1"
types-requests = "^2.28.8"

[build-system]
requires = ["poetry_core>=1.0.0"]
Expand Down
12 changes: 8 additions & 4 deletions s3_tools/objects/check.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,23 @@
"""Check objects on S3 bucket."""
from typing import Dict
from pathlib import Path
from typing import (
Dict,
Union,
)

import boto3
from botocore.exceptions import ClientError


def object_exists(bucket: str, key: str, aws_auth: Dict[str, str] = {}) -> bool:
def object_exists(bucket: str, key: Union[str, Path], aws_auth: Dict[str, str] = {}) -> bool:
"""Check if an object exists for a given bucket and key.
Parameters
----------
bucket : str
Bucket name where the object is stored.
key : str
key : Union[str, Path]
Full key for the object.
aws_auth: Dict[str, str]
Expand All @@ -38,7 +42,7 @@ def object_exists(bucket: str, key: str, aws_auth: Dict[str, str] = {}) -> bool:
s3 = session.client("s3")

try:
s3.head_object(Bucket=bucket, Key=key)
s3.head_object(Bucket=bucket, Key=Path(key).as_posix())
except Exception as error:
if isinstance(error, ClientError) and (error.response["Error"]["Code"] == "404"):
return False
Expand Down
43 changes: 24 additions & 19 deletions s3_tools/objects/copy.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
"""Copy S3 objects."""
from concurrent import futures
from pathlib import Path
from typing import (
Dict,
List,
Optional,
Tuple,
Union,
)

import boto3
Expand All @@ -14,9 +16,9 @@

def copy_object(
source_bucket: str,
source_key: str,
source_key: Union[str, Path],
destination_bucket: str,
destination_key: str,
destination_key: Union[str, Path],
aws_auth: Dict[str, str] = {}
) -> None:
"""Copy S3 object from source bucket and key to destination.
Expand All @@ -26,13 +28,13 @@ def copy_object(
source_bucket : str
S3 bucket where the object is stored.
source_key : str
source_key : Union[str, Path]
S3 key where the object is referenced.
destination_bucket : str
S3 destination bucket.
destination_key : str
destination_key : Union[str, Path]
S3 destination key.
aws_auth: Dict[str, str]
Expand All @@ -44,25 +46,25 @@ def copy_object(
... source_bucket='bucket',
... source_key='myFiles/song.mp3',
... destination_bucket='bucket',
... destination_key='myMusic/song.mp3'
... destination_key='myMusic/song.mp3',
... )
"""
session = boto3.session.Session(**aws_auth)
s3 = session.resource("s3")

s3.meta.client.copy(
{'Bucket': source_bucket, 'Key': source_key},
{'Bucket': source_bucket, 'Key': Path(source_key).as_posix()},
destination_bucket,
destination_key
Path(destination_key).as_posix()
)


def copy_keys(
source_bucket: str,
source_keys: List[str],
source_keys: List[Union[str, Path]],
destination_bucket: str,
destination_keys: List[str],
destination_keys: List[Union[str, Path]],
threads: int = 5,
aws_auth: Dict[str, str] = {}
) -> None:
Expand All @@ -73,13 +75,13 @@ def copy_keys(
source_bucket : str
S3 bucket where the objects are stored.
source_keys : List[str]
source_keys : List[Union[str, Path]]
S3 keys where the objects are referenced.
destination_bucket : str
S3 destination bucket.
destination_keys : List[str]
destination_keys : List[Union[str, Path]]
S3 destination keys.
threads : int, optional
Expand All @@ -102,12 +104,12 @@ def copy_keys(
... source_bucket='bucket',
... source_keys=[
... 'myFiles/song.mp3',
... 'myFiles/photo.jpg'
... Path('myFiles/photo.jpg'),
... ],
... destination_bucket='bucket',
... destination_keys=[
... 'myMusic/song.mp3',
... 'myPhotos/photo.jpg'
... Path('myMusic/song.mp3'),
... 'myPhotos/photo.jpg',
... ]
... )
Expand All @@ -130,9 +132,9 @@ def copy_keys(

def copy_prefix(
source_bucket: str,
source_prefix: str,
source_prefix: Union[str, Path],
destination_bucket: str,
change_prefix: Optional[Tuple[str, str]] = None,
change_prefix: Optional[Tuple[Union[str, Path], Union[str, Path]]] = None,
filter_keys: Optional[str] = None,
threads: int = 5,
aws_auth: Dict[str, str] = {}
Expand All @@ -144,13 +146,13 @@ def copy_prefix(
source_bucket : str
S3 bucket where the objects are stored.
source_prefix : str
source_prefix : Union[str, Path]
S3 prefix where the objects are referenced.
destination_bucket : str
S3 destination bucket.
change_prefix : Tuple[str, str], optional
change_prefix : Tuple[Union[str, Path], Union[str, Path]], optional
Text to be replaced in keys prefixes, by default is None.
The first element is the text to be replaced, the second is the replacement text.
Expand Down Expand Up @@ -183,7 +185,10 @@ def copy_prefix(
)

destination_keys = source_keys if change_prefix is None else [
key.replace(change_prefix[0], change_prefix[1])
Path(key).as_posix().replace(
Path(change_prefix[0]).as_posix(),
Path(change_prefix[1]).as_posix()
)
for key in source_keys
]

Expand Down
Loading

0 comments on commit 3969bd0

Please sign in to comment.