diff --git a/dandi/exceptions.py b/dandi/exceptions.py index cd2379cb4..1a6c6111f 100644 --- a/dandi/exceptions.py +++ b/dandi/exceptions.py @@ -80,3 +80,7 @@ class UnknownAssetError(ValueError): class HTTP404Error(requests.HTTPError): pass + + +class UploadError(Exception): + pass diff --git a/dandi/tests/test_upload.py b/dandi/tests/test_upload.py index cabed70dd..6fc90b3e9 100644 --- a/dandi/tests/test_upload.py +++ b/dandi/tests/test_upload.py @@ -15,7 +15,7 @@ from ..dandiapi import AssetType, RemoteBlobAsset, RemoteZarrAsset from ..dandiset import APIDandiset from ..download import download -from ..exceptions import NotFoundError +from ..exceptions import NotFoundError, UploadError from ..files import LocalFileAsset from ..pynwb_utils import make_nwb_file from ..utils import list_paths, yaml_dump @@ -121,7 +121,8 @@ def test_upload_extant_bad_existing( mocker: MockerFixture, text_dandiset: SampleDandiset ) -> None: iter_upload_spy = mocker.spy(LocalFileAsset, "iter_upload") - text_dandiset.upload(existing="foobar") + with pytest.raises(ValueError): + text_dandiset.upload(existing="foobar") iter_upload_spy.assert_not_called() @@ -185,7 +186,8 @@ def test_upload_bids_invalid( mocker: MockerFixture, bids_dandiset_invalid: SampleDandiset ) -> None: iter_upload_spy = mocker.spy(LocalFileAsset, "iter_upload") - bids_dandiset_invalid.upload(existing="force") + with pytest.raises(UploadError): + bids_dandiset_invalid.upload(existing="force") iter_upload_spy.assert_not_called() # Does validation ignoring work? bids_dandiset_invalid.upload(existing="force", validation="ignore") @@ -265,7 +267,8 @@ def test_upload_invalid_metadata( ), **simple1_nwb_metadata, ) - new_dandiset.upload() + with pytest.raises(UploadError): + new_dandiset.upload() with pytest.raises(NotFoundError): new_dandiset.dandiset.get_asset_by_path("broken.nwb") diff --git a/dandi/upload.py b/dandi/upload.py index 29be508b3..78ffde4f7 100644 --- a/dandi/upload.py +++ b/dandi/upload.py @@ -7,11 +7,12 @@ from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Set, Tuple, Union import click +from packaging.version import Version -from . import lgr +from . import __version__, lgr from .consts import DRAFT, dandiset_identifier_regex, dandiset_metadata_file from .dandiapi import RemoteAsset -from .exceptions import NotFoundError +from .exceptions import NotFoundError, UploadError from .files import ( DandiFile, DandisetMetadataFile, @@ -109,6 +110,9 @@ def upload( lambda: {"size": 0, "errors": []} ) + upload_err: Optional[Exception] = None + validate_ok = True + # TODO: we might want to always yield a full record so no field is not # provided to pyout to cause it to halt def process_path(dfile: DandiFile) -> Iterator[dict]: @@ -123,18 +127,17 @@ def process_path(dfile: DandiFile) -> Iterator[dict]: dict Records for pyout """ + nonlocal upload_err, validate_ok strpath = str(dfile.filepath) try: if not isinstance(dfile, LocalDirectoryAsset): try: yield {"size": dfile.size} except FileNotFoundError: - yield skip_file("ERROR: File not found") - return + raise UploadError("File not found") except Exception as exc: # without limiting [:50] it might cause some pyout indigestion - yield skip_file("ERROR: %s" % str(exc)[:50]) - return + raise UploadError(str(exc)[:50]) # # Validate first, so we do not bother server at all if not kosher @@ -157,8 +160,8 @@ def process_path(dfile: DandiFile) -> Iterator[dict]: ) for i, e in enumerate(validation_errors, start=1): lgr.warning(" Error %d: %s", i, e) - yield skip_file("failed validation") - return + validate_ok = False + raise UploadError("failed validation") else: yield {"status": "validated"} else: @@ -196,8 +199,7 @@ def process_path(dfile: DandiFile) -> Iterator[dict]: try: file_etag = dfile.get_digest() except Exception as exc: - yield skip_file("failed to compute digest: %s" % str(exc)) - return + raise UploadError("failed to compute digest: %s" % str(exc)) try: extant = remote_dandiset.get_asset_by_path(dfile.path) @@ -228,8 +230,7 @@ def process_path(dfile: DandiFile) -> Iterator[dict]: digest=file_etag, ignore_errors=allow_any_path ).json_dict() except Exception as e: - yield skip_file("failed to extract metadata: %s" % str(e)) - return + raise UploadError("failed to extract metadata: %s" % str(e)) # # Upload file @@ -253,6 +254,8 @@ def process_path(dfile: DandiFile) -> Iterator[dict]: yield {"status": "done"} except Exception as exc: + if upload_err is None: + upload_err = exc if devel_debug: raise lgr.exception("Error uploading %s:", strpath) @@ -260,7 +263,7 @@ def process_path(dfile: DandiFile) -> Iterator[dict]: # user-meaningful message message = str(exc) uploaded_paths[strpath]["errors"].append(message) - yield {"status": "ERROR", "message": message} + yield error_file(message) finally: process_paths.remove(strpath) @@ -314,9 +317,31 @@ def upload_agg(*ignored: Any) -> str: else: rec[tuple(rec_fields[1:])] = process_path(dfile) except ValueError as exc: - rec.update(skip_file(exc)) + rec.update(error_file(exc)) out(rec) + if not validate_ok: + lgr.warning( + "One or more assets failed validation. Consult the logfile for" + " details." + ) + if upload_err is not None: + try: + import etelemetry + + latest_version = etelemetry.get_project("dandi/dandi-cli")["version"] + except Exception: + pass + else: + if Version(latest_version) > Version(__version__): + lgr.warning( + "Upload failed, and you are not using the latest" + " version of dandi. We suggest upgrading dandi to v%s" + " and trying again.", + latest_version, + ) + raise upload_err + if sync: relpaths: List[str] = [] for p in paths: @@ -390,3 +415,7 @@ def check_replace_asset( def skip_file(msg: Any) -> Dict[str, str]: return {"status": "skipped", "message": str(msg)} + + +def error_file(msg: Any) -> Dict[str, str]: + return {"status": "ERROR", "message": str(msg)}