diff --git a/Jenkinsfile b/Jenkinsfile index 5380d5d698..c42817a556 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -25,6 +25,8 @@ export TEST_DIR=${env.TEST_DIR} export SOURCE_DIR=${env.SOURCE_DIR} export GIT_BRANCH=${env.GIT_BRANCH} export GIT_COMMIT=${env.GIT_COMMIT} +export CI_ARTIFACTS_TOKEN=${env.CI_ARTIFACTS_TOKEN} +export GITHUB_COMMENTS_TOKEN=${env.GITHUB_COMMENTS_TOKEN} export AZURE_TOKEN='${env.AZURE_TOKEN}' # only use 1 thread for tici tests since most require HIL export PYTEST_ADDOPTS="-n 0" @@ -133,6 +135,18 @@ def setupCredentials() { ]) { env.AZURE_TOKEN = "${AZURE_TOKEN}" } + + withCredentials([ + string(credentialsId: 'ci_artifacts_pat', variable: 'CI_ARTIFACTS_TOKEN'), + ]) { + env.CI_ARTIFACTS_TOKEN = "${CI_ARTIFACTS_TOKEN}" + } + + withCredentials([ + string(credentialsId: 'post_comments_github_pat', variable: 'GITHUB_COMMENTS_TOKEN'), + ]) { + env.GITHUB_COMMENTS_TOKEN = "${GITHUB_COMMENTS_TOKEN}" + } } diff --git a/selfdrive/test/process_replay/model_replay.py b/selfdrive/test/process_replay/model_replay.py index 9ee1b6be4e..f88ccab96c 100755 --- a/selfdrive/test/process_replay/model_replay.py +++ b/selfdrive/test/process_replay/model_replay.py @@ -3,14 +3,18 @@ import os import sys from collections import defaultdict from typing import Any +import tempfile +from itertools import zip_longest + +import matplotlib.pyplot as plt -from openpilot.common.git import get_commit from openpilot.system.hardware import PC -from openpilot.tools.lib.openpilotci import BASE_URL, get_url +from openpilot.tools.lib.openpilotci import get_url from openpilot.selfdrive.test.process_replay.compare_logs import compare_logs, format_diff from openpilot.selfdrive.test.process_replay.process_replay import get_process_config, replay_process from openpilot.tools.lib.framereader import FrameReader from openpilot.tools.lib.logreader import LogReader, save_log +from openpilot.tools.lib.github_utils import GithubUtils TEST_ROUTE = "2f4452b03ccb98f0|2022-12-03--13-45-30" SEGMENT = 6 @@ -19,10 +23,55 @@ MAX_FRAMES = 100 if PC else 600 NO_MODEL = "NO_MODEL" in os.environ SEND_EXTRA_INPUTS = bool(int(os.getenv("SEND_EXTRA_INPUTS", "0"))) +DATA_TOKEN = os.getenv("CI_ARTIFACTS_TOKEN","") +API_TOKEN = os.getenv("GITHUB_COMMENTS_TOKEN","") +MODEL_REPLAY_BUCKET="model_replay_master" +GITHUB = GithubUtils(API_TOKEN, DATA_TOKEN) + + +def get_log_fn(test_route): + return f"{test_route}_model_tici_master.bz2" + +def plot(proposed, master, title, tmp): + fig, ax = plt.subplots() + ax.plot(list(proposed), label='PROPOSED') + ax.plot(list(master), label='MASTER') + plt.legend(loc='best') + plt.title(title) + plt.savefig(f'{tmp}/{title}.png') + return title + '.png' + +def get_event(logs, event): + return (getattr(m, m.which()) for m in filter(lambda m: m.which() == event, logs)) + +def zl(array, fill): + return zip_longest(array, [], fillvalue=fill) + +def generate_report(proposed, master, tmp): + ModelV2_Plots = zl([ + (lambda x: x.velocity.x[0], "velocity.x"), + (lambda x: x.action.desiredCurvature, "desiredCurvature"), + (lambda x: x.leadsV3[0].x[0], "leadsV3.x"), + (lambda x: x.laneLines[1].y[0], "laneLines.y"), + (lambda x: x.meta.disengagePredictions.gasPressProbs[1], "gasPressProbs") + ], "modelV2") -def get_log_fn(ref_commit, test_route): - return f"{test_route}_model_tici_{ref_commit}.bz2" + return [plot(map(v[0], get_event(proposed, event)), \ + map(v[0], get_event(master, event)), v[1], tmp) \ + for v,event in [*ModelV2_Plots]] +def comment_replay_report(proposed, master): + with tempfile.TemporaryDirectory() as tmp: + PR_BRANCH=os.getenv("GIT_BRANCH","") + DATA_BUCKET=f"model_replay_{PR_BRANCH}" + + files = generate_report(proposed, master, tmp) + + GITHUB.comment_images_on_pr("Model Replay Plots", + "commaci-public", + PR_BRANCH, + DATA_BUCKET, + [(x, tmp + '/' + x) for x in files]) def trim_logs_to_max_frames(logs, max_frames, frs_types, include_all_types): all_msgs = [] @@ -68,9 +117,8 @@ def model_replay(lr, frs): if __name__ == "__main__": - update = "--update" in sys.argv + update = "--update" in sys.argv or (os.getenv("GIT_BRANCH", "") == 'master') replay_dir = os.path.dirname(os.path.abspath(__file__)) - ref_commit_fn = os.path.join(replay_dir, "model_replay_ref_commit") # load logs lr = list(LogReader(get_url(TEST_ROUTE, SEGMENT, "rlog.bz2"))) @@ -88,11 +136,9 @@ if __name__ == "__main__": # get diff failed = False if not update: - with open(ref_commit_fn) as f: - ref_commit = f.read().strip() - log_fn = get_log_fn(ref_commit, TEST_ROUTE) + log_fn = get_log_fn(TEST_ROUTE) try: - all_logs = list(LogReader(BASE_URL + log_fn)) + all_logs = list(LogReader(GITHUB.get_file_url(MODEL_REPLAY_BUCKET, log_fn))) cmp_log = [] # logs are ordered based on type: modelV2, drivingModelData, driverStateV2 @@ -134,11 +180,14 @@ if __name__ == "__main__": ignore.append(f'modelV2.roadEdges.{i}.{field}') tolerance = .3 if PC else None results: Any = {TEST_ROUTE: {}} - log_paths: Any = {TEST_ROUTE: {"models": {'ref': BASE_URL + log_fn, 'new': log_fn}}} + log_paths: Any = {TEST_ROUTE: {"models": {'ref': log_fn, 'new': log_fn}}} results[TEST_ROUTE]["models"] = compare_logs(cmp_log, log_msgs, tolerance=tolerance, ignore_fields=ignore) - diff_short, diff_long, failed = format_diff(results, log_paths, ref_commit) + diff_short, diff_long, failed = format_diff(results, log_paths, 'master') if "CI" in os.environ: + if not PC: + comment_replay_report(log_msgs, cmp_log) + failed = False print(diff_long) print('-------------\n'*5) print(diff_short) @@ -149,22 +198,13 @@ if __name__ == "__main__": failed = True # upload new refs - if (update or failed) and not PC: - from openpilot.tools.lib.openpilotci import upload_file - + if update and not PC: print("Uploading new refs") - - new_commit = get_commit() - log_fn = get_log_fn(new_commit, TEST_ROUTE) + log_fn = get_log_fn(TEST_ROUTE) save_log(log_fn, log_msgs) try: - upload_file(log_fn, os.path.basename(log_fn)) + GITHUB.upload_file(MODEL_REPLAY_BUCKET, os.path.basename(log_fn), log_fn) except Exception as e: print("failed to upload", e) - with open(ref_commit_fn, 'w') as f: - f.write(str(new_commit)) - - print("\n\nNew ref commit: ", new_commit) - sys.exit(int(failed)) diff --git a/selfdrive/test/process_replay/model_replay_ref_commit b/selfdrive/test/process_replay/model_replay_ref_commit deleted file mode 100644 index bfa63a007c..0000000000 --- a/selfdrive/test/process_replay/model_replay_ref_commit +++ /dev/null @@ -1 +0,0 @@ -ef4faa7e90e530ce20c345ee68467c7e1f7e7a14 diff --git a/tools/lib/github_utils.py b/tools/lib/github_utils.py new file mode 100644 index 0000000000..607fdfdae9 --- /dev/null +++ b/tools/lib/github_utils.py @@ -0,0 +1,109 @@ +import base64 +import requests +from http import HTTPMethod + +class GithubUtils: + def __init__(self, api_token, data_token, owner='commaai', api_repo='openpilot', data_repo='ci-artifacts'): + self.OWNER = owner + self.API_REPO = api_repo + self.DATA_REPO = data_repo + self.API_TOKEN = api_token + self.DATA_TOKEN = data_token + + @property + def API_ROUTE(self): + return f"https://api.github.com/repos/{self.OWNER}/{self.API_REPO}" + + @property + def DATA_ROUTE(self): + return f"https://api.github.com/repos/{self.OWNER}/{self.DATA_REPO}" + + def api_call(self, path, data="", method=HTTPMethod.GET, accept="", data_call=False, raise_on_failure=True): + token = self.DATA_TOKEN if data_call else self.API_TOKEN + if token: + headers = {"Authorization": f"Bearer {self.DATA_TOKEN if data_call else self.API_TOKEN}", \ + "Accept": f"application/vnd.github{accept}+json"} + else: + headers = {} + path = f'{self.DATA_ROUTE if data_call else self.API_ROUTE}/{path}' + r = requests.request(method, path, headers=headers, data=data) + if not r.ok and raise_on_failure: + raise Exception(f"Call to {path} failed with {r.status_code}") + else: + return r + + def upload_file(self, bucket, path, file_name): + with open(path, "rb") as f: + encoded = base64.b64encode(f.read()).decode() + + # check if file already exists + sha = self.get_file_sha(bucket, file_name) + sha = f'"sha":"{sha}",' if sha else '' + + data = f'{{"message":"uploading {file_name}", \ + "branch":"{bucket}", \ + "committer":{{"name":"Vehicle Researcher", "email": "user@comma.ai"}}, \ + {sha} \ + "content":"{encoded}"}}' + github_path = f"contents/{file_name}" + self.api_call(github_path, data=data, method=HTTPMethod.PUT, data_call=True) + + def upload_files(self, bucket, files): + self.create_bucket(bucket) + for file_name,path in files: + self.upload_file(bucket, path, file_name) + + def create_bucket(self, bucket): + if self.get_bucket_sha(bucket): + return + master_sha = self.get_bucket_sha('master') + github_path = "git/refs" + data = f'{{"ref":"refs/heads/{bucket}", "sha":"{master_sha}"}}' + self.api_call(github_path, data=data, method=HTTPMethod.POST, data_call=True) + + def get_bucket_sha(self, bucket): + github_path = f"git/refs/heads/{bucket}" + r = self.api_call(github_path, data_call=True, raise_on_failure=False) + return r.json()['object']['sha'] if r.ok else None + + def get_file_url(self, bucket, file_name): + github_path = f"contents/{file_name}?ref={bucket}" + r = self.api_call(github_path, data_call=True) + return r.json()['download_url'] + + def get_file_sha(self, bucket, file_name): + github_path = f"contents/{file_name}?ref={bucket}" + r = self.api_call(github_path, data_call=True, raise_on_failure=False) + return r.json()['sha'] if r.ok else None + + def get_pr_number(self, pr_branch): + github_path = f"commits/{pr_branch}/pulls" + r = self.api_call(github_path) + return r.json()[0]['number'] + + def comment_on_pr(self, comment, commenter, pr_branch): + pr_number = self.get_pr_number(pr_branch) + data = f'{{"body": "{comment}"}}' + github_path = f'issues/{pr_number}/comments' + r = self.api_call(github_path) + comments = [x['id'] for x in r.json() if x['user']['login'] == commenter] + if comments: + github_path = f'issues/comments/{comments[0]}' + self.api_call(github_path, data=data, method=HTTPMethod.PATCH) + else: + github_path=f'issues/{pr_number}/comments' + self.api_call(github_path, data=data, method=HTTPMethod.POST) + + # upload files to github and comment them on the pr + def comment_images_on_pr(self, title, commenter, pr_branch, bucket, images): + self.upload_files(bucket, images) + table = [f'
{title}'] + for i,f in enumerate(images): + if not (i % 2): + table.append('') + table.append(f'') + if (i % 2): + table.append('') + table.append('
') + table = ''.join(table) + self.comment_on_pr(table, commenter, pr_branch)