Lots of cleaning

pull/2217/head
Gregor Kikelj 5 years ago
parent ecebc831b4
commit 24863affda
  1. 3
      selfdrive/locationd/ubloxd_main.cc
  2. 29
      selfdrive/manager.py
  3. 1
      selfdrive/test/process_replay/process_replay.py
  4. 5
      selfdrive/test/process_replay/test_processes.py
  5. 5
      tools/lib/tests/test_caching.py

@ -12,7 +12,6 @@
#include <math.h> #include <math.h>
#include <ctime> #include <ctime>
#include <chrono> #include <chrono>
#include <iostream>
#include "messaging.hpp" #include "messaging.hpp"
#include "common/util.h" #include "common/util.h"
@ -42,6 +41,7 @@ int ubloxd_main(poll_ubloxraw_msg_func poll_func, send_gps_event_func send_func)
subscriber->setTimeout(100); subscriber->setTimeout(100);
PubMaster pm({"ubloxGnss", "gpsLocationExternal"}); PubMaster pm({"ubloxGnss", "gpsLocationExternal"});
while (!do_exit) { while (!do_exit) {
Message * msg = subscriber->receive(); Message * msg = subscriber->receive();
if (!msg){ if (!msg){
@ -53,6 +53,7 @@ int ubloxd_main(poll_ubloxraw_msg_func poll_func, send_gps_event_func send_func)
auto amsg = kj::heapArray<capnp::word>((msg->getSize() / sizeof(capnp::word)) + 1); auto amsg = kj::heapArray<capnp::word>((msg->getSize() / sizeof(capnp::word)) + 1);
memcpy(amsg.begin(), msg->getData(), msg->getSize()); memcpy(amsg.begin(), msg->getData(), msg->getSize());
capnp::FlatArrayMessageReader cmsg(amsg); capnp::FlatArrayMessageReader cmsg(amsg);
cereal::Event::Reader event = cmsg.getRoot<cereal::Event>(); cereal::Event::Reader event = cmsg.getRoot<cereal::Event>();
auto ubloxRaw = event.getUbloxRaw(); auto ubloxRaw = event.getUbloxRaw();

@ -22,7 +22,6 @@ os.environ['BASEDIR'] = BASEDIR
TOTAL_SCONS_NODES = 1005 TOTAL_SCONS_NODES = 1005
prebuilt = os.path.exists(os.path.join(BASEDIR, 'prebuilt')) prebuilt = os.path.exists(os.path.join(BASEDIR, 'prebuilt'))
VALGRIND = os.getenv("VALGRIND") is not None
# Create folders needed for msgq # Create folders needed for msgq
try: try:
os.mkdir("/dev/shm") os.mkdir("/dev/shm")
@ -170,16 +169,16 @@ managed_processes = {
"plannerd": "selfdrive.controls.plannerd", "plannerd": "selfdrive.controls.plannerd",
"radard": "selfdrive.controls.radard", "radard": "selfdrive.controls.radard",
"dmonitoringd": "selfdrive.monitoring.dmonitoringd", "dmonitoringd": "selfdrive.monitoring.dmonitoringd",
"ubloxd": ("selfdrive/locationd", ["./ubloxd"]), # 0 "ubloxd": ("selfdrive/locationd", ["./ubloxd"]),
"loggerd": ("selfdrive/loggerd", ["./loggerd"]), # 1 "loggerd": ("selfdrive/loggerd", ["./loggerd"]),
"logmessaged": "selfdrive.logmessaged", "logmessaged": "selfdrive.logmessaged",
"locationd": "selfdrive.locationd.locationd", "locationd": "selfdrive.locationd.locationd",
"tombstoned": "selfdrive.tombstoned", "tombstoned": "selfdrive.tombstoned",
"logcatd": ("selfdrive/logcatd", ["./logcatd"]), "logcatd": ("selfdrive/logcatd", ["./logcatd"]),
"proclogd": ("selfdrive/proclogd", ["./proclogd"]), "proclogd": ("selfdrive/proclogd", ["./proclogd"]),
"boardd": ("selfdrive/boardd", ["./boardd"]), "boardd": ("selfdrive/boardd", ["./boardd"]), # not used directly
"pandad": "selfdrive.pandad", "pandad": "selfdrive.pandad",
"ui": ("selfdrive/ui", ["./ui"]), # 2 "ui": ("selfdrive/ui", ["./ui"]),
"calibrationd": "selfdrive.locationd.calibrationd", "calibrationd": "selfdrive.locationd.calibrationd",
"paramsd": "selfdrive.locationd.paramsd", "paramsd": "selfdrive.locationd.paramsd",
"camerad": ("selfdrive/camerad", ["./camerad"]), "camerad": ("selfdrive/camerad", ["./camerad"]),
@ -292,18 +291,6 @@ def nativelauncher(pargs, cwd):
os.chmod(pargs[0], 0o700) os.chmod(pargs[0], 0o700)
os.execvp(pargs[0], pargs) os.execvp(pargs[0], pargs)
def valgrindlauncher(pargs, cwd):
# exec the process
os.chdir(cwd)
# Run valgrind on a process
command = "valgrind --leak-check=full " + pargs[0]
output = os.popen(command)
while True:
s = output.read()
if s == "":
break
# print(s)
def start_managed_process(name): def start_managed_process(name):
if name in running or name not in managed_processes: if name in running or name not in managed_processes:
@ -315,15 +302,7 @@ def start_managed_process(name):
else: else:
pdir, pargs = proc pdir, pargs = proc
cwd = os.path.join(BASEDIR, pdir) cwd = os.path.join(BASEDIR, pdir)
if VALGRIND:
print("\nVALGRIND" * 5)
print(pargs)
print(cwd)
running[name] = Process(name=name, target=valgrindlauncher, args=(pargs, cwd))
else:
cloudlog.info("starting process %s" % name) cloudlog.info("starting process %s" % name)
print(pargs)
print(cwd)
running[name] = Process(name=name, target=nativelauncher, args=(pargs, cwd)) running[name] = Process(name=name, target=nativelauncher, args=(pargs, cwd))
running[name].start() running[name].start()

@ -288,7 +288,6 @@ CONFIGS = [
] ]
def replay_process(cfg, lr): def replay_process(cfg, lr):
print("Running replay on inputs " + str(cfg) + " " + str(lr))
sub_sockets = [s for _, sub in cfg.pub_sub.items() for s in sub] sub_sockets = [s for _, sub in cfg.pub_sub.items() for s in sub]
pub_sockets = [s for s in cfg.pub_sub.keys() if s != 'can'] pub_sockets = [s for s in cfg.pub_sub.keys() if s != 'can']

@ -105,7 +105,6 @@ def format_diff(results, ref_commit):
return diff1, diff2, failed return diff1, diff2, failed
if __name__ == "__main__": if __name__ == "__main__":
# os.environ["VALGRIND"] = "1"
parser = argparse.ArgumentParser(description="Regression test to identify changes in a process's output") parser = argparse.ArgumentParser(description="Regression test to identify changes in a process's output")
@ -141,8 +140,9 @@ if __name__ == "__main__":
tested_cars = set(c.lower() for c, _ in segments) tested_cars = set(c.lower() for c, _ in segments)
untested = (set(interface_names) - set(excluded_interfaces)) - tested_cars untested = (set(interface_names) - set(excluded_interfaces)) - tested_cars
assert len(untested) == 0, "Cars missing routes: %s" % (str(untested)) assert len(untested) == 0, "Cars missing routes: %s" % (str(untested))
results: Any = {} results: Any = {}
for car_brand, segment in segments: # Runs all tests, can focus on inside of the loop :) for car_brand, segment in segments:
if (cars_whitelisted and car_brand.upper() not in args.whitelist_cars) or \ if (cars_whitelisted and car_brand.upper() not in args.whitelist_cars) or \
(not cars_whitelisted and car_brand.upper() in args.blacklist_cars): (not cars_whitelisted and car_brand.upper() in args.blacklist_cars):
continue continue
@ -152,7 +152,6 @@ if __name__ == "__main__":
results[segment] = {} results[segment] = {}
rlog_fn = get_segment(segment) rlog_fn = get_segment(segment)
print(rlog_fn)
lr = LogReader(rlog_fn) lr = LogReader(rlog_fn)
for cfg in CONFIGS: for cfg in CONFIGS:

@ -38,6 +38,8 @@ class TestFileDownload(unittest.TestCase):
# Make sure we don't force cache # Make sure we don't force cache
os.environ["FILEREADER_CACHE"] = "0" os.environ["FILEREADER_CACHE"] = "0"
small_file_url = "https://raw.githubusercontent.com/commaai/openpilot/master/SAFETY.md" small_file_url = "https://raw.githubusercontent.com/commaai/openpilot/master/SAFETY.md"
# If you want large file to be larger than a chunk
# large_file_url = "https://commadataci.blob.core.windows.net/openpilotci/0375fdf7b1ce594d/2019-06-13--08-32-25/3/fcamera.hevc"
# Load full small file # Load full small file
self.compare_loads(small_file_url) self.compare_loads(small_file_url)
@ -53,10 +55,7 @@ class TestFileDownload(unittest.TestCase):
self.compare_loads(small_file_url, 100 * i, 100) self.compare_loads(small_file_url, 100 * i, 100)
def test_large_file(self): def test_large_file(self):
# If you want large file to be larger than a chunk
# large_file_url = "https://commadataci.blob.core.windows.net/openpilotci/0375fdf7b1ce594d/2019-06-13--08-32-25/3/fcamera.hevc"
large_file_url = "https://commadataci.blob.core.windows.net/openpilotci/0375fdf7b1ce594d/2019-06-13--08-32-25/3/qlog.bz2" large_file_url = "https://commadataci.blob.core.windows.net/openpilotci/0375fdf7b1ce594d/2019-06-13--08-32-25/3/qlog.bz2"
# Load the end 100 bytes of both files # Load the end 100 bytes of both files
file_large = URLFile(large_file_url) file_large = URLFile(large_file_url)
length = file_large.get_length() length = file_large.get_length()

Loading…
Cancel
Save