Skip to content

Commit

Permalink
Temporarily disable s3 log file processing
Browse files Browse the repository at this point in the history
  • Loading branch information
danlamanna committed May 23, 2023
1 parent 1328fc7 commit e286ef8
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 0 deletions.
1 change: 1 addition & 0 deletions dandiapi/analytics/tasks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def process_s3_log_file_task(bucket: LogBucket, s3_log_key: str) -> None:
asset blobs. Prevents duplicate processing with a unique constraint on the ProcessedS3Log name
and embargoed fields.
"""
return
assert bucket in [
settings.DANDI_DANDISETS_LOG_BUCKET_NAME,
settings.DANDI_DANDISETS_EMBARGO_LOG_BUCKET_NAME,
Expand Down
2 changes: 2 additions & 0 deletions dandiapi/analytics/tests/test_download_counts.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ def s3_log_file(s3_log_bucket, asset_blob):


@pytest.mark.django_db
@pytest.mark.skip(reason='Temporarily disabled')
def test_processing_s3_log_files(s3_log_bucket, s3_log_file, asset_blob):
collect_s3_log_records_task(s3_log_bucket)
asset_blob.refresh_from_db()
Expand All @@ -58,6 +59,7 @@ def test_processing_s3_log_files(s3_log_bucket, s3_log_file, asset_blob):


@pytest.mark.django_db
@pytest.mark.skip(reason='Temporarily disabled')
def test_processing_s3_log_files_idempotent(s3_log_bucket, s3_log_file, asset_blob):
collect_s3_log_records_task(s3_log_bucket)
# run the task again, it should skip the existing log record
Expand Down

0 comments on commit e286ef8

Please sign in to comment.