|
|
@ -97,7 +97,8 @@ def cputime_total(ct): |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def check_cpu_usage(first_proc, last_proc): |
|
|
|
def check_cpu_usage(first_proc, last_proc): |
|
|
|
result = "------------------------------------------------\n" |
|
|
|
result = "\n" |
|
|
|
|
|
|
|
result += "------------------------------------------------\n" |
|
|
|
result += "------------------ CPU Usage -------------------\n" |
|
|
|
result += "------------------ CPU Usage -------------------\n" |
|
|
|
result += "------------------------------------------------\n" |
|
|
|
result += "------------------------------------------------\n" |
|
|
|
|
|
|
|
|
|
|
@ -112,10 +113,9 @@ def check_cpu_usage(first_proc, last_proc): |
|
|
|
cpu_usage = cpu_time / dt * 100. |
|
|
|
cpu_usage = cpu_time / dt * 100. |
|
|
|
if cpu_usage > max(normal_cpu_usage * 1.15, normal_cpu_usage + 5.0): |
|
|
|
if cpu_usage > max(normal_cpu_usage * 1.15, normal_cpu_usage + 5.0): |
|
|
|
# cpu usage is high while playing sounds |
|
|
|
# cpu usage is high while playing sounds |
|
|
|
if proc_name == "./_soundd" and cpu_usage < 65.: |
|
|
|
if not (proc_name == "./_soundd" and cpu_usage < 65.): |
|
|
|
continue |
|
|
|
result += f"Warning {proc_name} using more CPU than normal\n" |
|
|
|
result += f"Warning {proc_name} using more CPU than normal\n" |
|
|
|
r = False |
|
|
|
r = False |
|
|
|
|
|
|
|
elif cpu_usage < min(normal_cpu_usage * 0.65, max(normal_cpu_usage - 1.0, 0.0)): |
|
|
|
elif cpu_usage < min(normal_cpu_usage * 0.65, max(normal_cpu_usage - 1.0, 0.0)): |
|
|
|
result += f"Warning {proc_name} using less CPU than normal\n" |
|
|
|
result += f"Warning {proc_name} using less CPU than normal\n" |
|
|
|
r = False |
|
|
|
r = False |
|
|
@ -201,7 +201,8 @@ class TestOnroad(unittest.TestCase): |
|
|
|
self.assertTrue(cpu_ok) |
|
|
|
self.assertTrue(cpu_ok) |
|
|
|
|
|
|
|
|
|
|
|
def test_model_execution_timings(self): |
|
|
|
def test_model_execution_timings(self): |
|
|
|
result = "------------------------------------------------\n" |
|
|
|
result = "\n" |
|
|
|
|
|
|
|
result += "------------------------------------------------\n" |
|
|
|
result += "----------------- Model Timing -----------------\n" |
|
|
|
result += "----------------- Model Timing -----------------\n" |
|
|
|
result += "------------------------------------------------\n" |
|
|
|
result += "------------------------------------------------\n" |
|
|
|
# TODO: this went up when plannerd cpu usage increased, why? |
|
|
|
# TODO: this went up when plannerd cpu usage increased, why? |
|
|
@ -212,12 +213,15 @@ class TestOnroad(unittest.TestCase): |
|
|
|
self.assertLess(np.mean(ts), avg_max, f"high avg '{s}' execution time: {np.mean(ts)}") |
|
|
|
self.assertLess(np.mean(ts), avg_max, f"high avg '{s}' execution time: {np.mean(ts)}") |
|
|
|
result += f"'{s}' execution time: {min(ts)}\n" |
|
|
|
result += f"'{s}' execution time: {min(ts)}\n" |
|
|
|
result += f"'{s}' avg execution time: {np.mean(ts)}\n" |
|
|
|
result += f"'{s}' avg execution time: {np.mean(ts)}\n" |
|
|
|
|
|
|
|
result += "------------------------------------------------\n" |
|
|
|
print(result) |
|
|
|
print(result) |
|
|
|
|
|
|
|
|
|
|
|
def test_timings(self): |
|
|
|
def test_timings(self): |
|
|
|
|
|
|
|
passed = True |
|
|
|
print("\n\n") |
|
|
|
result = "\n" |
|
|
|
print("="*25, "service timings", "="*25) |
|
|
|
result += "------------------------------------------------\n" |
|
|
|
|
|
|
|
result += "----------------- Service Timings --------------\n" |
|
|
|
|
|
|
|
result += "------------------------------------------------\n" |
|
|
|
for s, (maxmin, rsd) in TIMINGS.items(): |
|
|
|
for s, (maxmin, rsd) in TIMINGS.items(): |
|
|
|
msgs = [m.logMonoTime for m in self.lr if m.which() == s] |
|
|
|
msgs = [m.logMonoTime for m in self.lr if m.which() == s] |
|
|
|
if not len(msgs): |
|
|
|
if not len(msgs): |
|
|
@ -226,12 +230,22 @@ class TestOnroad(unittest.TestCase): |
|
|
|
ts = np.diff(msgs) / 1e9 |
|
|
|
ts = np.diff(msgs) / 1e9 |
|
|
|
dt = 1 / service_list[s].frequency |
|
|
|
dt = 1 / service_list[s].frequency |
|
|
|
|
|
|
|
|
|
|
|
np.testing.assert_allclose(np.mean(ts), dt, rtol=0.03, err_msg=f"{s} - failed mean timing check") |
|
|
|
try: |
|
|
|
np.testing.assert_allclose([np.max(ts), np.min(ts)], dt, rtol=maxmin, err_msg=f"{s} - failed max/min timing check") |
|
|
|
np.testing.assert_allclose(np.mean(ts), dt, rtol=0.03, err_msg=f"{s} - failed mean timing check") |
|
|
|
self.assertLess(np.std(ts) / dt, rsd, msg=f"{s} - failed RSD timing check") |
|
|
|
np.testing.assert_allclose([np.max(ts), np.min(ts)], dt, rtol=maxmin, err_msg=f"{s} - failed max/min timing check") |
|
|
|
print(f"{s}: {np.array([np.mean(ts), np.max(ts), np.min(ts)])*1e3}") |
|
|
|
except Exception as e: |
|
|
|
print(f" {np.max(np.absolute([np.max(ts)/dt, np.min(ts)/dt]))} {np.std(ts)/dt}") |
|
|
|
result += str(e) + "\n" |
|
|
|
print("="*67) |
|
|
|
passed = False |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if np.std(ts) / dt > rsd: |
|
|
|
|
|
|
|
result += f"{s} - failed RSD timing check\n" |
|
|
|
|
|
|
|
passed = False |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
result += f"{s.ljust(40)}: {np.array([np.mean(ts), np.max(ts), np.min(ts)])*1e3}\n" |
|
|
|
|
|
|
|
result += f"{''.ljust(40)} {np.max(np.absolute([np.max(ts)/dt, np.min(ts)/dt]))} {np.std(ts)/dt}\n" |
|
|
|
|
|
|
|
result += "="*67 |
|
|
|
|
|
|
|
print(result) |
|
|
|
|
|
|
|
self.assertTrue(passed) |
|
|
|
|
|
|
|
|
|
|
|
@release_only |
|
|
|
@release_only |
|
|
|
def test_startup(self): |
|
|
|
def test_startup(self): |
|
|
|