Revert "make test_onroad faster (#34704) (#34783)

* Revert "make test_onroad faster (#34704)"

This reverts commit 470ec46830.

* try this

* add there

* cleanup

* try this

* lil more

* list

* classic

* don't skip test
pull/34784/head
Adeeb Shihadeh 2 months ago committed by GitHub
parent d15599a30b
commit 821148150b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 4
      conftest.py
  2. 3
      pyproject.toml
  3. 83
      selfdrive/test/test_onroad.py

@ -77,8 +77,10 @@ def openpilot_class_fixture():
@pytest.fixture(scope="function") @pytest.fixture(scope="function")
def tici_setup_fixture(openpilot_function_fixture): def tici_setup_fixture(request, openpilot_function_fixture):
"""Ensure a consistent state for tests on-device. Needs the openpilot function fixture to run first.""" """Ensure a consistent state for tests on-device. Needs the openpilot function fixture to run first."""
if 'skip_tici_setup' in request.keywords:
return
HARDWARE.initialize_hardware() HARDWARE.initialize_hardware()
HARDWARE.set_power_save(False) HARDWARE.set_power_save(False)
os.system("pkill -9 -f athena") os.system("pkill -9 -f athena")

@ -143,6 +143,7 @@ asyncio_default_fixture_loop_scope = "function"
markers = [ markers = [
"slow: tests that take awhile to run and can be skipped with -m 'not slow'", "slow: tests that take awhile to run and can be skipped with -m 'not slow'",
"tici: tests that are only meant to run on the C3/C3X", "tici: tests that are only meant to run on the C3/C3X",
"skip_tici_setup: mark test to skip tici setup fixture"
] ]
testpaths = [ testpaths = [
"common", "common",
@ -215,7 +216,7 @@ lint.select = [
"E", "F", "W", "PIE", "C4", "ISC", "A", "B", "E", "F", "W", "PIE", "C4", "ISC", "A", "B",
"NPY", # numpy "NPY", # numpy
"UP", # pyupgrade "UP", # pyupgrade
"TRY302", "TRY400", "TRY401", # try/excepts "TRY203", "TRY400", "TRY401", # try/excepts
"RUF008", "RUF100", "RUF008", "RUF100",
"TID251", "TID251",
"PLR1704", "PLR1704",

@ -1,8 +1,6 @@
import math import math
import json import json
import os import os
import pathlib
import psutil
import pytest import pytest
import shutil import shutil
import subprocess import subprocess
@ -12,7 +10,7 @@ from collections import Counter, defaultdict
from pathlib import Path from pathlib import Path
from tabulate import tabulate from tabulate import tabulate
from cereal import car, log from cereal import log
import cereal.messaging as messaging import cereal.messaging as messaging
from cereal.services import SERVICE_LIST from cereal.services import SERVICE_LIST
from openpilot.common.basedir import BASEDIR from openpilot.common.basedir import BASEDIR
@ -23,6 +21,7 @@ from openpilot.selfdrive.test.helpers import set_params_enabled, release_only
from openpilot.system.hardware import HARDWARE from openpilot.system.hardware import HARDWARE
from openpilot.system.hardware.hw import Paths from openpilot.system.hardware.hw import Paths
from openpilot.tools.lib.logreader import LogReader from openpilot.tools.lib.logreader import LogReader
from openpilot.tools.lib.log_time_series import msgs_to_time_series
""" """
CPU usage budget CPU usage budget
@ -112,6 +111,7 @@ def cputime_total(ct):
@pytest.mark.tici @pytest.mark.tici
@pytest.mark.skip_tici_setup
class TestOnroad: class TestOnroad:
@classmethod @classmethod
@ -119,7 +119,8 @@ class TestOnroad:
if "DEBUG" in os.environ: if "DEBUG" in os.environ:
segs = filter(lambda x: os.path.exists(os.path.join(x, "rlog.zst")), Path(Paths.log_root()).iterdir()) segs = filter(lambda x: os.path.exists(os.path.join(x, "rlog.zst")), Path(Paths.log_root()).iterdir())
segs = sorted(segs, key=lambda x: x.stat().st_mtime) segs = sorted(segs, key=lambda x: x.stat().st_mtime)
cls.lr = list(LogReader(os.path.join(segs[-3], "rlog.zst"))) cls.lr = list(LogReader(os.path.join(segs[-1], "rlog.zst")))
cls.ts = msgs_to_time_series(cls.lr)
return return
# setup env # setup env
@ -140,44 +141,31 @@ class TestOnroad:
proc = subprocess.Popen(["python", manager_path]) proc = subprocess.Popen(["python", manager_path])
sm = messaging.SubMaster(['carState']) sm = messaging.SubMaster(['carState'])
with Timeout(150, "controls didn't start"): with Timeout(30, "controls didn't start"):
while sm.recv_frame['carState'] < 0: while not sm.seen['carState']:
sm.update(1000) sm.update(1000)
route = None
cls.segments = []
with Timeout(300, "timed out waiting for logs"):
while route is None:
route = params.get("CurrentRoute", encoding="utf-8") route = params.get("CurrentRoute", encoding="utf-8")
time.sleep(0.01) assert route is not None
# test car params caching segs = list(Path(Paths.log_root()).glob(f"{route}--*"))
params.put("CarParamsCache", car.CarParams().to_bytes()) assert len(segs) == 1
while len(cls.segments) < 1:
segs = set()
if Path(Paths.log_root()).exists():
segs = set(Path(Paths.log_root()).glob(f"{route}--*"))
cls.segments = sorted(segs, key=lambda s: int(str(s).rsplit('--')[-1]))
time.sleep(0.01)
time.sleep(TEST_DURATION) time.sleep(TEST_DURATION)
finally: finally:
cls.gpu_procs = {psutil.Process(int(f.name)).name() for f in pathlib.Path('/sys/devices/virtual/kgsl/kgsl/proc/').iterdir() if f.is_dir()}
if proc is not None: if proc is not None:
proc.terminate() proc.terminate()
if proc.wait(60) is None: if proc.wait(60) is None:
proc.kill() proc.kill()
cls.lrs = [list(LogReader(os.path.join(str(s), "rlog.zst"))) for s in cls.segments] cls.lr = list(LogReader(os.path.join(str(segs[0]), "rlog.zst")))
st = time.monotonic()
cls.lr = list(LogReader(os.path.join(str(cls.segments[0]), "rlog.zst"))) cls.ts = msgs_to_time_series(cls.lr)
cls.log_path = cls.segments[0] print("msgs to time series", time.monotonic() - st)
log_path = segs[0]
cls.log_sizes = {} cls.log_sizes = {}
for f in cls.log_path.iterdir(): for f in log_path.iterdir():
assert f.is_file() assert f.is_file()
cls.log_sizes[f] = f.stat().st_size / 1e6 cls.log_sizes[f] = f.stat().st_size / 1e6
@ -198,7 +186,7 @@ class TestOnroad:
assert len(msgs) >= math.floor(SERVICE_LIST[s].frequency*int(TEST_DURATION*0.8)) assert len(msgs) >= math.floor(SERVICE_LIST[s].frequency*int(TEST_DURATION*0.8))
def test_manager_starting_time(self): def test_manager_starting_time(self):
st = self.msgs['managerState'][0].logMonoTime / 1e9 st = self.ts['managerState']['t'][0]
assert (st - self.manager_st) < 10, f"manager.py took {st - self.manager_st}s to publish the first 'managerState' msg" assert (st - self.manager_st) < 10, f"manager.py took {st - self.manager_st}s to publish the first 'managerState' msg"
def test_cloudlog_size(self): def test_cloudlog_size(self):
@ -226,7 +214,7 @@ class TestOnroad:
result += "-------------- UI Draw Timing ------------------\n" result += "-------------- UI Draw Timing ------------------\n"
result += "------------------------------------------------\n" result += "------------------------------------------------\n"
ts = [m.uiDebug.drawTimeMillis for m in self.msgs['uiDebug']] ts = self.ts['uiDebug']['drawTimeMillis']
result += f"min {min(ts):.2f}ms\n" result += f"min {min(ts):.2f}ms\n"
result += f"max {max(ts):.2f}ms\n" result += f"max {max(ts):.2f}ms\n"
result += f"std {np.std(ts):.2f}ms\n" result += f"std {np.std(ts):.2f}ms\n"
@ -309,9 +297,6 @@ class TestOnroad:
assert np.max(np.diff(mems)) <= 4, "Max memory increase too high" assert np.max(np.diff(mems)) <= 4, "Max memory increase too high"
assert np.average(np.diff(mems)) <= 1, "Average memory increase too high" assert np.average(np.diff(mems)) <= 1, "Average memory increase too high"
def test_gpu_usage(self):
assert self.gpu_procs == {"weston", "ui", "camerad", "selfdrive.modeld.modeld", "selfdrive.modeld.dmonitoringmodeld"}
def test_camera_frame_timings(self, subtests): def test_camera_frame_timings(self, subtests):
# test timing within a single camera # test timing within a single camera
result = "\n" result = "\n"
@ -319,7 +304,7 @@ class TestOnroad:
result += "----------------- SOF Timing ------------------\n" result += "----------------- SOF Timing ------------------\n"
result += "------------------------------------------------\n" result += "------------------------------------------------\n"
for name in ['roadCameraState', 'wideRoadCameraState', 'driverCameraState']: for name in ['roadCameraState', 'wideRoadCameraState', 'driverCameraState']:
ts = [getattr(m, m.which()).timestampSof for m in self.lr if name in m.which()] ts = self.ts[name]['timestampSof']
d_ms = np.diff(ts) / 1e6 d_ms = np.diff(ts) / 1e6
d50 = np.abs(d_ms-50) d50 = np.abs(d_ms-50)
result += f"{name} sof delta vs 50ms: min {min(d50):.2f}ms\n" result += f"{name} sof delta vs 50ms: min {min(d50):.2f}ms\n"
@ -338,33 +323,30 @@ class TestOnroad:
# sanity checks within a single cam # sanity checks within a single cam
for cam in cams: for cam in cams:
with subtests.test(test="frame_skips", camera=cam): with subtests.test(test="frame_skips", camera=cam):
cam_log = [getattr(x, x.which()) for x in self.msgs[cam]] assert set(np.diff(self.ts[cam]['frameId'])) == {1, }, "Frame ID skips"
assert set(np.diff([x.frameId for x in cam_log])) == {1, }, "Frame ID skips"
# EOF > SOF # EOF > SOF
eof_sof_diff = np.array([x.timestampEof - x.timestampSof for x in cam_log]) eof_sof_diff = self.ts[cam]['timestampEof'] - self.ts[cam]['timestampSof']
assert np.all(eof_sof_diff > 0) assert np.all(eof_sof_diff > 0)
assert np.all(eof_sof_diff < 50*1e6) assert np.all(eof_sof_diff < 50*1e6)
fid = {c: [getattr(m, m.which()).frameId for m in self.msgs[c]] for c in cams} first_fid = {c: min(self.ts[c]['frameId']) for c in cams}
first_fid = [min(x) for x in fid.values()]
if cam.endswith('CameraState'): if cam.endswith('CameraState'):
# camerad guarantees that all cams start on frame ID 0 # camerad guarantees that all cams start on frame ID 0
# (note loggerd also needs to start up fast enough to catch it) # (note loggerd also needs to start up fast enough to catch it)
assert set(first_fid) == {0, }, "Cameras don't start on frame ID 0" assert set(first_fid.values()) == {0, }, "Cameras don't start on frame ID 0"
else: else:
# encoder guarantees all cams start on the same frame ID # encoder guarantees all cams start on the same frame ID
assert len(set(first_fid)) == 1, "Cameras don't start on same frame ID" assert len(set(first_fid.values())) == 1, "Cameras don't start on same frame ID"
# we don't do a full segment rotation, so these might not match exactly # we don't do a full segment rotation, so these might not match exactly
last_fid = [max(x) for x in fid.values()] last_fid = {c: max(self.ts[c]['frameId']) for c in cams}
assert max(last_fid) - min(last_fid) < 10 assert max(last_fid.values()) - min(last_fid.values()) < 10
start, end = min(first_fid), min(last_fid) start, end = min(first_fid.values()), min(last_fid.values())
all_ts = [[getattr(m, m.which()).timestampSof for m in self.msgs[c]] for c in cams]
for i in range(end-start): for i in range(end-start):
ts = [round(x[i]/1e6, 1) for x in all_ts] ts = {c: round(self.ts[c]['timestampSof'][i]/1e6, 1) for c in cams}
diff = max(ts) - min(ts) diff = (max(ts.values()) - min(ts.values()))
assert diff < 2, f"Cameras not synced properly: frame_id={start+i}, {diff=:.1f}ms, {ts=}" assert diff < 2, f"Cameras not synced properly: frame_id={start+i}, {diff=:.1f}ms, {ts=}"
def test_mpc_execution_timings(self): def test_mpc_execution_timings(self):
@ -438,12 +420,7 @@ class TestOnroad:
@release_only @release_only
def test_startup(self): def test_startup(self):
startup_alert = None startup_alert = self.ts['selfdriveState']['alertText1'][0]
for msg in self.lrs[0]:
# can't use onroadEvents because the first msg can be dropped while loggerd is starting up
if msg.which() == "selfdriveState":
startup_alert = msg.selfdriveState.alertText1
break
expected = EVENTS[log.OnroadEvent.EventName.startup][ET.PERMANENT].alert_text_1 expected = EVENTS[log.OnroadEvent.EventName.startup][ET.PERMANENT].alert_text_1
assert startup_alert == expected, "wrong startup alert" assert startup_alert == expected, "wrong startup alert"

Loading…
Cancel
Save