Skip to content

Commit

Permalink
Refactor creation of file-metadata
Browse files Browse the repository at this point in the history
  • Loading branch information
pcrespov committed Apr 27, 2021
1 parent edbdd11 commit 0fcd5b7
Showing 1 changed file with 65 additions and 92 deletions.
157 changes: 65 additions & 92 deletions services/storage/tests/unit/test_dsm.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,54 @@
ANY_DATCORE_TOKENS = os.environ.get("BF_API_KEY") or os.environ.get("BF_API_SECRET")


@pytest.fixture()
def file_meta_data(
postgres_service_url,
s3_client,
tmp_file: Path,
user_id: int,
bucket_name: str,
) -> FileMetaData:
"""
Uploads a tmp-file to s3 bucket and returns a FileMetaData object
"""
# create file and upload
filename = os.path.basename(tmp_file)
project_id = "api" # "357879cc-f65d-48b2-ad6c-074e2b9aa1c7"
project_name = "battlestar"
node_name = "galactica"
node_id = "b423b654-686d-4157-b74b-08fa9d90b36e"
file_name = filename
file_uuid = os.path.join(str(project_id), str(node_id), str(file_name))
display_name = os.path.join(str(project_name), str(node_name), str(file_name))
created_at = str(datetime.datetime.now())
file_size = 1234

d = {
"object_name": os.path.join(str(project_id), str(node_id), str(file_name)),
"bucket_name": bucket_name,
"file_name": filename,
"user_id": user_id,
"user_name": "starbucks",
"location": SIMCORE_S3_STR,
"location_id": SIMCORE_S3_ID,
"project_id": project_id,
"project_name": project_name,
"node_id": node_id,
"node_name": node_name,
"file_uuid": file_uuid,
"file_id": file_uuid,
"raw_file_path": file_uuid,
"display_file_path": display_name,
"created_at": created_at,
"last_modified": created_at,
"file_size": file_size,
}

fmd = FileMetaData(**d)
return fmd


def test_mockup(dsm_mockup_db):
assert len(dsm_mockup_db) == 100

Expand Down Expand Up @@ -109,62 +157,12 @@ async def test_dsm_s3(dsm_mockup_db, dsm_fixture):
assert len(dsm_mockup_db) == new_size + len(bobs_biostromy_files)


def _create_file_meta_for_s3(
postgres_url, s3_client, tmp_file, user_id: int, bucket_name: str
):

# create file and upload
filename = os.path.basename(tmp_file)
project_id = "api" # "357879cc-f65d-48b2-ad6c-074e2b9aa1c7"
project_name = "battlestar"
node_name = "galactica"
node_id = "b423b654-686d-4157-b74b-08fa9d90b36e"
file_name = filename
file_uuid = os.path.join(str(project_id), str(node_id), str(file_name))
display_name = os.path.join(str(project_name), str(node_name), str(file_name))
created_at = str(datetime.datetime.now())
file_size = 1234

d = {
"object_name": os.path.join(str(project_id), str(node_id), str(file_name)),
"bucket_name": bucket_name,
"file_name": filename,
"user_id": user_id,
"user_name": "starbucks",
"location": SIMCORE_S3_STR,
"location_id": SIMCORE_S3_ID,
"project_id": project_id,
"project_name": project_name,
"node_id": node_id,
"node_name": node_name,
"file_uuid": file_uuid,
"file_id": file_uuid,
"raw_file_path": file_uuid,
"display_file_path": display_name,
"created_at": created_at,
"last_modified": created_at,
"file_size": file_size,
}

fmd = FileMetaData(**d)

return fmd


async def test_links_s3(
postgres_service_url,
s3_client,
mock_files_factory,
file_meta_data: FileMetaData,
tmp_file: Path,
dsm_fixture,
user_id: int,
bucket_name: str,
):

tmp_file = mock_files_factory(1)[0]
fmd = _create_file_meta_for_s3(
postgres_service_url, s3_client, tmp_file, user_id, bucket_name
)

fmd = file_meta_data
dsm = dsm_fixture

up_url = await dsm.upload_link(fmd.user_id, fmd.file_uuid)
Expand All @@ -184,20 +182,13 @@ async def test_links_s3(


async def test_copy_s3_s3(
postgres_service_url,
s3_client,
mock_files_factory,
file_meta_data: FileMetaData,
tmp_file: Path,
dsm_fixture,
user_id: int,
bucket_name: str,
):

tmp_file = mock_files_factory(1)[0]
fmd = _create_file_meta_for_s3(
postgres_service_url, s3_client, tmp_file, user_id, bucket_name
)

fmd = file_meta_data
dsm = dsm_fixture

data = await dsm.list_files(user_id=fmd.user_id, location=SIMCORE_S3_STR)
assert len(data) == 0

Expand Down Expand Up @@ -264,21 +255,14 @@ async def test_dsm_datcore(

@pytest.mark.skipif(not ANY_DATCORE_TOKENS)
async def test_dsm_s3_to_datcore(
postgres_service_url,
s3_client,
mock_files_factory,
file_meta_data: FileMetaData,
tmp_file: Path,
dsm_fixture,
datcore_structured_testbucket,
user_id: int,
bucket_name: str,
):

tmp_file = mock_files_factory(1)[0]

fmd = _create_file_meta_for_s3(
postgres_service_url, s3_client, tmp_file, user_id, bucket_name
)

fmd = file_meta_data
dsm = dsm_fixture

up_url = await dsm.upload_link(fmd.user_id, fmd.file_uuid)
Expand Down Expand Up @@ -320,10 +304,8 @@ async def test_dsm_s3_to_datcore(

@pytest.mark.skipif(not ANY_DATCORE_TOKENS)
async def test_dsm_datcore_to_local(
postgres_service_url,
dsm_fixture,
mock_files_factory,
datcore_structured_testbucket,
tmp_path: Path.datcore_structured_testbucket,
user_id: int,
bucket_name: str,
):
Expand All @@ -347,20 +329,15 @@ async def test_dsm_datcore_to_local(

@pytest.mark.skipif(not ANY_DATCORE_TOKENS)
async def test_dsm_datcore_to_S3(
postgres_service_url,
s3_client,
file_meta_data: FileMetaData,
tmp_file: Path,
dsm_fixture,
mock_files_factory,
datcore_structured_testbucket,
user_id: int,
bucket_name: str,
):

# create temporary file
tmp_file = mock_files_factory(1)[0]
dest_fmd = _create_file_meta_for_s3(
postgres_service_url, s3_client, tmp_file, user_id
)
dest_fmd = file_meta_data
user_id = dest_fmd.user_id
dest_uuid = dest_fmd.file_uuid

Expand Down Expand Up @@ -403,10 +380,9 @@ async def test_dsm_datcore_to_S3(

@pytest.mark.skipif(not ANY_DATCORE_TOKENS)
async def test_copy_datcore(
postgres_service_url,
s3_client,
file_meta_data: FileMetaData,
tmp_file: Path,
dsm_fixture,
mock_files_factory,
datcore_structured_testbucket,
user_id: int,
bucket_name: str,
Expand All @@ -420,10 +396,7 @@ async def test_copy_datcore(
assert len(data) == 3

# create temporary file and upload to s3
tmp_file = mock_files_factory(1)[0]
fmd = _create_file_meta_for_s3(
postgres_service_url, s3_client, tmp_file, user_id, bucket_name
)
fmd = file_meta_data

up_url = await dsm.upload_link(fmd.user_id, fmd.file_uuid)
with io.open(tmp_file, "rb") as fp:
Expand Down

0 comments on commit 0fcd5b7

Please sign in to comment.