update azure-storage-blob (#29411)

Co-authored-by: Cameron Clough <cameronjclough@gmail.com>

old-commit-hash: c9e227a9c0
beeps
Adeeb Shihadeh 2 years ago committed by GitHub
parent 8960f76597
commit 34edef6a60
  1. 4
      poetry.lock
  2. 3
      pyproject.toml
  3. 79
      selfdrive/test/openpilotci.py
  4. 44
      selfdrive/test/update_ci_routes.py

4
poetry.lock generated

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:022ad1dc837484b37034c71bd6855e317a61039e96a4b1d7c7b6ea80c6faf893
size 371531
oid sha256:d819ba33852e8be375cc0914a1ac5b344df6fc200441f8daf45e3ab3c8787a88
size 376782

@ -96,7 +96,8 @@ sconscontrib = {git = "https://github.com/SCons/scons-contrib.git"}
[tool.poetry.group.dev.dependencies]
av = "*"
azure-storage-blob = "~2.1"
azure-identity = "*"
azure-storage-blob = "*"
breathe = "*"
carla = { url = "https://github.com/commaai/carla/releases/download/3.11.4/carla-0.9.14-cp311-cp311-linux_x86_64.whl", platform = "linux", markers = "platform_machine == 'x86_64'" }
coverage = "*"

@ -1,43 +1,64 @@
#!/usr/bin/env python3
import os
import sys
import subprocess
from datetime import datetime, timedelta
from functools import lru_cache
from pathlib import Path
from typing import IO, Union
BASE_URL = "https://commadataci.blob.core.windows.net/openpilotci/"
TOKEN_PATH = "/data/azure_token"
DATA_CI_ACCOUNT = "commadataci"
DATA_CI_ACCOUNT_URL = f"https://{DATA_CI_ACCOUNT}.blob.core.windows.net"
DATA_CI_CONTAINER = "openpilotci"
BASE_URL = f"{DATA_CI_ACCOUNT_URL}/{DATA_CI_CONTAINER}/"
TOKEN_PATH = Path("/data/azure_token")
def get_url(route_name, segment_num, log_type="rlog"):
def get_url(route_name: str, segment_num, log_type="rlog") -> str:
ext = "hevc" if log_type.endswith('camera') else "bz2"
return BASE_URL + f"{route_name.replace('|', '/')}/{segment_num}/{log_type}.{ext}"
def get_sas_token():
sas_token = os.environ.get("AZURE_TOKEN", None)
if os.path.isfile(TOKEN_PATH):
sas_token = open(TOKEN_PATH).read().strip()
if sas_token is None:
sas_token = subprocess.check_output("az storage container generate-sas --account-name commadataci --name openpilotci \
--https-only --permissions lrw --expiry $(date -u '+%Y-%m-%dT%H:%M:%SZ' -d '+1 hour') \
--auth-mode login --as-user --output tsv", shell=True).decode().strip("\n")
@lru_cache
def get_azure_credential():
if "AZURE_TOKEN" in os.environ:
return os.environ["AZURE_TOKEN"]
elif TOKEN_PATH.is_file():
return TOKEN_PATH.read_text().strip()
else:
from azure.identity import AzureCliCredential
return AzureCliCredential()
return sas_token
@lru_cache
def get_container_sas(account_name: str, container_name: str):
from azure.storage.blob import BlobServiceClient, ContainerSasPermissions, generate_container_sas
start_time = datetime.utcnow()
expiry_time = start_time + timedelta(hours=1)
blob_service = BlobServiceClient(
account_url=f"https://{account_name}.blob.core.windows.net",
credential=get_azure_credential(),
)
return generate_container_sas(
account_name,
container_name,
user_delegation_key=blob_service.get_user_delegation_key(start_time, expiry_time),
permission=ContainerSasPermissions(read=True, write=True, list=True),
expiry=expiry_time,
)
def upload_bytes(data, name):
from azure.storage.blob import BlockBlobService
service = BlockBlobService(account_name="commadataci", sas_token=get_sas_token())
service.create_blob_from_bytes("openpilotci", name, data)
return BASE_URL + name
def upload_file(path, name):
from azure.storage.blob import BlockBlobService
service = BlockBlobService(account_name="commadataci", sas_token=get_sas_token())
service.create_blob_from_path("openpilotci", name, path)
return BASE_URL + name
def upload_bytes(data: Union[bytes, IO], blob_name: str) -> str:
from azure.storage.blob import BlobClient
blob = BlobClient(
account_url=DATA_CI_ACCOUNT_URL,
container_name=DATA_CI_CONTAINER,
blob_name=blob_name,
credential=get_azure_credential(),
)
blob.upload_blob(data)
return BASE_URL + blob_name
if __name__ == "__main__":
for f in sys.argv[1:]:
name = os.path.basename(f)
url = upload_file(f, name)
print(url)
def upload_file(path: Union[str, os.PathLike], blob_name: str) -> str:
with open(path, "rb") as f:
return upload_bytes(f, blob_name)

@ -1,32 +1,38 @@
#!/usr/bin/env python3
from functools import lru_cache
import sys
import subprocess
import sys
from functools import lru_cache
from typing import Iterable, Optional
from azure.storage.blob import ContainerClient
from tqdm import tqdm
from azure.storage.blob import BlockBlobService
from openpilot.selfdrive.car.tests.routes import routes as test_car_models_routes
from openpilot.selfdrive.locationd.test.test_laikad import UBLOX_TEST_ROUTE, QCOM_TEST_ROUTE
from openpilot.selfdrive.test.process_replay.test_processes import source_segments as replay_segments
from xx.chffr.lib import azureutil
from xx.chffr.lib.storage import _DATA_ACCOUNT_PRODUCTION, _DATA_ACCOUNT_CI, _DATA_BUCKET_PRODUCTION
from openpilot.selfdrive.test.openpilotci import (DATA_CI_ACCOUNT, DATA_CI_ACCOUNT_URL, DATA_CI_CONTAINER,
get_azure_credential, get_container_sas)
DATA_PROD_ACCOUNT = "commadata2"
DATA_PROD_CONTAINER = "commadata2"
SOURCES = [
(_DATA_ACCOUNT_PRODUCTION, _DATA_BUCKET_PRODUCTION),
(_DATA_ACCOUNT_CI, "commadataci"),
(DATA_PROD_ACCOUNT, DATA_PROD_CONTAINER),
(DATA_CI_ACCOUNT, DATA_CI_CONTAINER),
]
@lru_cache
def get_azure_keys():
dest_key = azureutil.get_user_token(_DATA_ACCOUNT_CI, "openpilotci")
source_keys = [azureutil.get_user_token(account, bucket) for account, bucket in SOURCES]
service = BlockBlobService(_DATA_ACCOUNT_CI, sas_token=dest_key)
return dest_key, source_keys, service
dest_container = ContainerClient(DATA_CI_ACCOUNT_URL, DATA_CI_CONTAINER, credential=get_azure_credential())
dest_key = get_container_sas(DATA_CI_ACCOUNT, DATA_CI_CONTAINER)
source_keys = [get_container_sas(*s) for s in SOURCES]
return dest_container, dest_key, source_keys
def upload_route(path, exclude_patterns=None):
dest_key, _, _ = get_azure_keys()
def upload_route(path: str, exclude_patterns: Optional[Iterable[str]] = None) -> None:
# TODO: use azure-storage-blob instead of azcopy, simplifies auth
dest_key = get_container_sas(DATA_CI_ACCOUNT, DATA_CI_CONTAINER)
if exclude_patterns is None:
exclude_patterns = ['*/dcamera.hevc']
@ -37,28 +43,30 @@ def upload_route(path, exclude_patterns=None):
"azcopy",
"copy",
f"{path}/*",
f"https://{_DATA_ACCOUNT_CI}.blob.core.windows.net/openpilotci/{destpath}?{dest_key}",
f"https://{DATA_CI_ACCOUNT}.blob.core.windows.net/{DATA_CI_CONTAINER}/{destpath}?{dest_key}",
"--recursive=false",
"--overwrite=false",
] + [f"--exclude-pattern={p}" for p in exclude_patterns]
subprocess.check_call(cmd)
def sync_to_ci_public(route):
dest_key, source_keys, service = get_azure_keys()
def sync_to_ci_public(route: str) -> bool:
dest_container, dest_key, source_keys = get_azure_keys()
key_prefix = route.replace('|', '/')
dongle_id = key_prefix.split('/')[0]
if next(azureutil.list_all_blobs(service, "openpilotci", prefix=key_prefix), None) is not None:
if next(dest_container.list_blob_names(name_starts_with=key_prefix), None) is not None:
return True
print(f"Uploading {route}")
for (source_account, source_bucket), source_key in zip(SOURCES, source_keys, strict=True):
# assumes az login has been run
print(f"Trying {source_account}/{source_bucket}")
cmd = [
"azcopy",
"copy",
f"https://{source_account}.blob.core.windows.net/{source_bucket}/{key_prefix}?{source_key}",
f"https://{_DATA_ACCOUNT_CI}.blob.core.windows.net/openpilotci/{dongle_id}?{dest_key}",
f"https://{DATA_CI_ACCOUNT}.blob.core.windows.net/{DATA_CI_CONTAINER}/{dongle_id}?{dest_key}",
"--recursive=true",
"--overwrite=false",
"--exclude-pattern=*/dcamera.hevc",

Loading…
Cancel
Save