process replay: regen in parallel (#24628)
* regen in parallel
* prefixes
* clean regen
* clean output
* tqdm loc
* del swp file
* add routes back
* cleanup
* disable tqdm
* unique dirs
* unique dirs
* outdir in regen_all
* formatting when played from other dirs
* prefix dongle id
* local disable_tqdm
* formatting
* bug fix
* dont spam fakedata
* 16 char fake dongle ids
* formatting
* formatting
* more descriptive dongle
* fix azure path
* couple more fixes
* handle failures nicely
Co-authored-by: Adeeb Shihadeh <adeebshihadeh@gmail.com>
old-commit-hash: 397da56c85
taco
parent
1f5667285b
commit
4171e45e9c
4 changed files with 48 additions and 33 deletions
@ -1,22 +1,36 @@ |
||||
#!/usr/bin/env python3 |
||||
import argparse |
||||
import concurrent.futures |
||||
import os |
||||
import random |
||||
from tqdm import tqdm |
||||
|
||||
from selfdrive.test.process_replay.helpers import OpenpilotPrefix |
||||
from selfdrive.test.process_replay.regen import regen_and_save |
||||
from selfdrive.test.process_replay.test_processes import original_segments as segments |
||||
from selfdrive.test.process_replay.test_processes import FAKEDATA, original_segments as segments |
||||
|
||||
if __name__ == "__main__": |
||||
new_segments = [] |
||||
for segment in segments: |
||||
def regen_job(segment): |
||||
with OpenpilotPrefix(): |
||||
route = segment[1].rsplit('--', 1)[0] |
||||
sidx = int(segment[1].rsplit('--', 1)[1]) |
||||
print("Regen", route, sidx) |
||||
relr = regen_and_save(route, sidx, upload=True, use_route_meta=False) |
||||
fake_dongle_id = 'regen' + ''.join(random.choice('0123456789ABCDEF') for i in range(11)) |
||||
try: |
||||
relr = regen_and_save(route, sidx, upload=True, use_route_meta=False, outdir=os.path.join(FAKEDATA, fake_dongle_id), disable_tqdm=True) |
||||
relr = '|'.join(relr.split('/')[-2:]) |
||||
return f' ("{segment[0]}", "{relr}"), ' |
||||
except Exception as e: |
||||
return f" {segment} failed: {str(e)}" |
||||
|
||||
print("\n\n", "*"*30, "\n\n") |
||||
print("New route:", relr, "\n") |
||||
relr = relr.replace('/', '|') |
||||
new_segments.append(f' ("{segment[0]}", "{relr}"), ') |
||||
print() |
||||
if __name__ == "__main__": |
||||
parser = argparse.ArgumentParser(description="Generate new segments from old ones") |
||||
parser.add_argument("-j", "--jobs", type=int, default=1) |
||||
args = parser.parse_args() |
||||
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=args.jobs) as pool: |
||||
p = list(pool.map(regen_job, segments)) |
||||
msg = "Copy these new segments into test_processes.py:" |
||||
for seg in tqdm(p, desc="Generating segments"): |
||||
msg += "\n" + str(seg) |
||||
print() |
||||
print() |
||||
print('COPY THIS INTO test_processes.py') |
||||
for seg in new_segments: |
||||
print(seg) |
||||
print(msg) |
||||
|
Loading…
Reference in new issue