Skip to content

Commit

Permalink
Fix CI sync script
Browse files Browse the repository at this point in the history
  • Loading branch information
pd0wm committed Jan 23, 2020
1 parent e2c7847 commit 09283f4
Showing 1 changed file with 52 additions and 29 deletions.
81 changes: 52 additions & 29 deletions selfdrive/test/update_ci_routes.py
Original file line number Diff line number Diff line change
@@ -1,49 +1,72 @@
#!/usr/bin/env python3
import os
import subprocess
import tempfile
import shutil
import subprocess
from common.basedir import BASEDIR
from azure.storage.blob import BlockBlobService

from selfdrive.test.test_car_models import routes as test_car_models_routes, non_public_routes
from selfdrive.test.process_replay.test_processes import segments as replay_segments
from xx.chffr.lib import azureutil
from xx.chffr.lib.storage import upload_dir_serial, download_dir_tpe, key_prefix_exists
from xx.chffr.lib.storage import upload_dir_serial, download_dir_tpe
from xx.chffr.lib.storage import _DATA_ACCOUNT_PRODUCTION, _DATA_ACCOUNT_CI, _DATA_BUCKET_PRODUCTION, _DATA_BUCKET_CI

sas_token = os.getenv("TOKEN", None)
if sas_token is None:
sas_token = subprocess.check_output("az storage container generate-sas --account-name commadataci --name openpilotci --https-only --permissions lrw --expiry $(date -u '+%Y-%m-%dT%H:%M:%SZ' -d '+1 hour') --auth-mode login --as-user --output tsv", shell=True).decode().strip("\n")
service = BlockBlobService(account_name=_DATA_ACCOUNT_CI, sas_token=sas_token)
SOURCES = [
(_DATA_ACCOUNT_PRODUCTION, _DATA_BUCKET_PRODUCTION),
(_DATA_ACCOUNT_PRODUCTION, "preserve"),
]

DEST_KEY = azureutil.get_user_token(_DATA_ACCOUNT_CI, "openpilotci")
SOURCE_KEYS = [azureutil.get_user_token(account, bucket) for account, bucket in SOURCES]
SERVICE = BlockBlobService(_DATA_ACCOUNT_CI, sas_token=DEST_KEY)

def sync_to_ci_public(service, route):

def sync_to_ci_public(route):
print(f"Uploading {route}")
key_prefix = route.replace('|', '/')

if next(azureutil.list_all_blobs(service, "openpilotci", prefix=key_prefix), None) is not None:
return
if next(azureutil.list_all_blobs(SERVICE, "openpilotci", prefix=key_prefix), None) is not None:
print("Already synced")
return True

for (source_account, source_bucket), source_key in zip(SOURCES, SOURCE_KEYS):
print(f"Trying {source_account}/{source_bucket}")
cmd = [
f"{BASEDIR}/external/bin/azcopy",
"copy",
"https://{}.blob.core.windows.net/{}/{}?{}".format(source_account, source_bucket, key_prefix, source_key),
"https://{}.blob.core.windows.net/{}?{}".format(_DATA_ACCOUNT_CI, "openpilotci", DEST_KEY),
"--recursive=true",
"--overwrite=false",
]

try:
result = subprocess.call(cmd, stdout=subprocess.DEVNULL)
if result == 0:
print("Success")
return True
except subprocess.CalledProcessError:
print("Failed")

print("uploading", route)
return False

tmpdir = tempfile.mkdtemp()
try:
print(f"download_dir_tpe({_DATA_ACCOUNT_PRODUCTION}, {_DATA_BUCKET_PRODUCTION}, {key_prefix}, {tmpdir})")

# production -> openpilotci
download_dir_tpe(_DATA_ACCOUNT_PRODUCTION, _DATA_BUCKET_PRODUCTION, tmpdir, key_prefix)
if __name__ == "__main__":
failed_routes = []

# commadataci -> openpilotci
#download_dir_tpe(_DATA_ACCOUNT_CI, _DATA_BUCKET_CI, tmpdir, key_prefix)
# sync process replay routes
for s in replay_segments:
route_name, _ = s.rsplit('--', 1)
if not sync_to_ci_public(route_name):
failed_routes.append(route_name)

upload_dir_serial(_DATA_ACCOUNT_CI, "openpilotci", tmpdir, key_prefix)
finally:
shutil.rmtree(tmpdir)
# sync test_car_models routes
for r in list(test_car_models_routes.keys()):
if r not in non_public_routes:
if not sync_to_ci_public(r):
failed_routes.append(r)

# sync process replay routes
for s in replay_segments:
route_name, _ = s.rsplit('--', 1)
sync_to_ci_public(service, route_name)

# sync test_car_models routes
for r in test_car_models_routes:
if r not in non_public_routes:
sync_to_ci_public(service, r)
if len(failed_routes):
print("failed routes:")
print(failed_routes)

0 comments on commit 09283f4

Please sign in to comment.