process replay: regen in parallel (#24628)
	
		
	
				
					
				
			* regen in parallel * prefixes * clean regen * clean output * tqdm loc * del swp file * add routes back * cleanup * disable tqdm * unique dirs * unique dirs * outdir in regen_all * formatting when played from other dirs * prefix dongle id * local disable_tqdm * formatting * bug fix * dont spam fakedata * 16 char fake dongle ids * formatting * formatting * more descriptive dongle * fix azure path * couple more fixes * handle failures nicely Co-authored-by: Adeeb Shihadeh <adeebshihadeh@gmail.com>pull/83/head
							parent
							
								
									7e5eec2e4f
								
							
						
					
					
						commit
						397da56c85
					
				
				 4 changed files with 48 additions and 33 deletions
			
			
		| @ -1,22 +1,36 @@ | |||||||
| #!/usr/bin/env python3 | #!/usr/bin/env python3 | ||||||
|  | import argparse | ||||||
|  | import concurrent.futures | ||||||
|  | import os | ||||||
|  | import random | ||||||
|  | from tqdm import tqdm | ||||||
|  | 
 | ||||||
|  | from selfdrive.test.process_replay.helpers import OpenpilotPrefix | ||||||
| from selfdrive.test.process_replay.regen import regen_and_save | from selfdrive.test.process_replay.regen import regen_and_save | ||||||
| from selfdrive.test.process_replay.test_processes import original_segments as segments | from selfdrive.test.process_replay.test_processes import FAKEDATA, original_segments as segments | ||||||
| 
 | 
 | ||||||
| if __name__ == "__main__": | def regen_job(segment): | ||||||
|   new_segments = [] |   with OpenpilotPrefix(): | ||||||
|   for segment in segments: |  | ||||||
|     route = segment[1].rsplit('--', 1)[0] |     route = segment[1].rsplit('--', 1)[0] | ||||||
|     sidx = int(segment[1].rsplit('--', 1)[1]) |     sidx = int(segment[1].rsplit('--', 1)[1]) | ||||||
|     print("Regen", route, sidx) |     fake_dongle_id = 'regen' + ''.join(random.choice('0123456789ABCDEF') for i in range(11)) | ||||||
|     relr = regen_and_save(route, sidx, upload=True, use_route_meta=False) |     try: | ||||||
|  |       relr = regen_and_save(route, sidx, upload=True, use_route_meta=False, outdir=os.path.join(FAKEDATA, fake_dongle_id), disable_tqdm=True) | ||||||
|  |       relr = '|'.join(relr.split('/')[-2:]) | ||||||
|  |       return f'  ("{segment[0]}", "{relr}"), ' | ||||||
|  |     except Exception as e: | ||||||
|  |       return f"  {segment} failed: {str(e)}" | ||||||
|  | 
 | ||||||
|  | if __name__ == "__main__": | ||||||
|  |   parser = argparse.ArgumentParser(description="Generate new segments from old ones") | ||||||
|  |   parser.add_argument("-j", "--jobs", type=int, default=1) | ||||||
|  |   args = parser.parse_args() | ||||||
| 
 | 
 | ||||||
|     print("\n\n", "*"*30, "\n\n") |   with concurrent.futures.ProcessPoolExecutor(max_workers=args.jobs) as pool: | ||||||
|     print("New route:", relr, "\n") |     p = list(pool.map(regen_job, segments)) | ||||||
|     relr = relr.replace('/', '|') |     msg = "Copy these new segments into test_processes.py:" | ||||||
|     new_segments.append(f'  ("{segment[0]}", "{relr}"), ') |     for seg in tqdm(p, desc="Generating segments"): | ||||||
|   print() |       msg += "\n" + str(seg) | ||||||
|   print() |     print() | ||||||
|   print() |     print() | ||||||
|   print('COPY THIS INTO test_processes.py') |     print(msg) | ||||||
|   for seg in new_segments: |  | ||||||
|     print(seg) |  | ||||||
|  | |||||||
					Loading…
					
					
				
		Reference in new issue