diff --git a/common/loader.py b/common/loader.py deleted file mode 100644 index 5b8c680a34..0000000000 --- a/common/loader.py +++ /dev/null @@ -1,284 +0,0 @@ -import os -import struct -import bisect -import numpy as np -import _io - -import capnp -from cereal import log as capnp_log - -class RawData(): - def __init__(self, f): - self.f = _io.FileIO(f, 'rb') - self.lenn = struct.unpack("I", self.f.read(4))[0] - self.count = os.path.getsize(f) / (self.lenn+4) - - def read(self, i): - self.f.seek((self.lenn+4)*i + 4) - return self.f.read(self.lenn) - -def yuv420_to_rgb(raw, image_dim=None, swizzled=False): - def expand(x): - x = np.repeat(x, 2, axis=0) - return np.repeat(x, 2, axis=1) - - if image_dim is None: - image_dim = (raw.shape[1]*2, raw.shape[2]*2) - swizzled = True - - if not swizzled: - img_data = np.array(raw, copy=False, dtype=np.uint8) - uv_len = (image_dim[0]/2)*(image_dim[1]/2) - img_data_u = expand(img_data[image_dim[0]*image_dim[1]: \ - image_dim[0]*image_dim[1]+uv_len]. \ - reshape(image_dim[0]/2, image_dim[1]/2)) - - img_data_v = expand(img_data[image_dim[0]*image_dim[1]+uv_len: \ - image_dim[0]*image_dim[1]+2*uv_len]. \ - reshape(image_dim[0]/2, image_dim[1]/2)) - img_data_y = img_data[0:image_dim[0]*image_dim[1]].reshape(image_dim) - else: - img_data_y = np.zeros(image_dim, dtype=np.uint8) - img_data_y[0::2, 0::2] = raw[0] - img_data_y[1::2, 0::2] = raw[1] - img_data_y[0::2, 1::2] = raw[2] - img_data_y[1::2, 1::2] = raw[3] - img_data_u = expand(raw[4]) - img_data_v = expand(raw[5]) - - yuv = np.stack((img_data_y, img_data_u, img_data_v)).swapaxes(0,2).swapaxes(0,1) - yuv = yuv.astype(np.int16) - - # http://maxsharabayko.blogspot.com/2016/01/fast-yuv-to-rgb-conversion-in-python-3.html - # according to ITU-R BT.709 - yuv[:,:, 0] = yuv[:,:, 0].clip(16, 235).astype(yuv.dtype) - 16 - yuv[:,:,1:] = yuv[:,:,1:].clip(16, 240).astype(yuv.dtype) - 128 - - A = np.array([[1.164, 0.000, 1.793], - [1.164, -0.213, -0.533], - [1.164, 2.112, 0.000]]) - - # our result - img = np.dot(yuv, A.T).clip(0, 255).astype('uint8') - return img - - -class YuvData(): - def __init__(self, f, dim=(160,320)): - self.f = _io.FileIO(f, 'rb') - self.image_dim = dim - self.image_size = self.image_dim[0]/2 * self.image_dim[1]/2 * 6 - self.count = os.path.getsize(f) / self.image_size - - def read_frame(self, frame): - self.f.seek(self.image_size*frame) - raw = self.f.read(self.image_size) - return raw - - def read_frames(self, range_start, range_len): - self.f.seek(self.image_size*range_start) - raw = self.f.read(self.image_size*range_len) - return raw - - def read_frames_into(self, range_start, buf): - self.f.seek(self.image_size*range_start) - return self.f.readinto(buf) - - def read(self, frame): - return yuv420_to_rgb(self.read_frame(frame), self.image_dim) - - def close(self): - self.f.close() - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - self.close() - - - -class OneReader(): - def __init__(self, base_path, goofy=False, segment_range=None): - self.base_path = base_path - - route_name = os.path.basename(base_path) - - self.rcamera_size = (304, 560) - - if segment_range is None: - parent_path = os.path.dirname(base_path) - self.segment_nums = [] - for p in os.listdir(parent_path): - if not p.startswith(route_name+"--"): - continue - self.segment_nums.append(int(p.rsplit("--", 1)[-1])) - if not self.segment_nums: - raise Exception("no route segments found") - self.segment_nums.sort() - self.segment_range = (self.segment_nums[0], self.segment_nums[-1]) - else: - self.segment_range = segment_range - self.segment_nums = range(segment_range[0], segment_range[1]+1) - for i in self.segment_nums: - if not os.path.exists(base_path+"--"+str(i)): - raise Exception("missing segment in provided range") - - # goofy data is broken with discontinuous logs - if goofy and (self.segment_range[0] != 0 - or self.segment_nums != range(self.segment_range[0], self.segment_range[1]+1)): - raise Exception("goofy data needs all the segments for a route") - - self.cur_seg = None - self.cur_seg_f = None - - # index the frames - print("indexing frames {}...".format(self.segment_nums)) - - self.rcamera_encode_map = {} # frame_id -> (segment num, segment id, frame_time) - last_frame_id = -1 - - if goofy: - # goofy is goofy - - frame_size = self.rcamera_size[0]*self.rcamera_size[1]*3/2 - - # find the encode id ranges for each segment by using the rcamera file size - segment_encode_ids = [] - cur_encode_id = 0 - for n in self.segment_nums: - camera_path = os.path.join(self.seg_path(n), "rcamera") - if not os.path.exists(camera_path): - # for goofy, missing camera files means a bad route - raise Exception("Missing camera file {}".format(camera_path)) - camera_size = os.path.getsize(camera_path) - assert (camera_size % frame_size) == 0 - - num_frames = camera_size / frame_size - segment_encode_ids.append(cur_encode_id) - cur_encode_id += num_frames - - last_encode_id = -1 - # use the segment encode id map and frame events to build the frame index - for n in self.segment_nums: - log_path = os.path.join(self.seg_path(n), "rlog") - if os.path.exists(log_path): - with open(log_path, "rb") as f: - for evt in capnp_log.Event.read_multiple(f): - if evt.which() == 'frame': - - if evt.frame.frameId < last_frame_id: - # a non-increasing frame id is bad route (eg visiond was restarted) - raise Exception("non-increasing frame id") - last_frame_id = evt.frame.frameId - - seg_i = bisect.bisect_right(segment_encode_ids, evt.frame.encodeId)-1 - assert seg_i >= 0 - seg_num = self.segment_nums[seg_i] - seg_id = evt.frame.encodeId-segment_encode_ids[seg_i] - frame_time = evt.logMonoTime / 1.0e9 - - self.rcamera_encode_map[evt.frame.frameId] = (seg_num, seg_id, - frame_time) - - last_encode_id = evt.frame.encodeId - - if last_encode_id-cur_encode_id > 10: - # too many missing frames is a bad route (eg route from before encoder rotating worked) - raise Exception("goofy route is missing frames: {}, {}".format( - last_encode_id, cur_encode_id)) - - else: - # for harry data, build the index from encodeIdx events - for n in self.segment_nums: - log_path = os.path.join(self.seg_path(n), "rlog") - if os.path.exists(log_path): - with open(log_path, "rb") as f: - for evt in capnp_log.Event.read_multiple(f): - if evt.which() == 'encodeIdx' and evt.encodeIdx.type == 'bigBoxLossless': - frame_time = evt.logMonoTime / 1.0e9 - self.rcamera_encode_map[evt.encodeIdx.frameId] = ( - evt.encodeIdx.segmentNum, evt.encodeIdx.segmentId, - frame_time) - - print("done") - - # read the first event to find the start time - self.reset_to_seg(self.segment_range[0]) - for evt in self.events(): - if evt.which() != 'initData': - self.start_mono = evt.logMonoTime - break - self.reset_to_seg(self.segment_range[0]) - - - def seg_path(self, num): - return self.base_path+"--"+str(num) - - def reset_to_seg(self, seg): - self.cur_seg = seg - if self.cur_seg_f: - self.cur_seg_f.close() - self.cur_seg_f = None - - def seek_ts(self, ts): - seek_seg = int(ts/60) - if seek_seg < self.segment_range[0] or seek_seg > self.segment_range[1]: - raise ValueError - - self.reset_to_seg(seek_seg) - target_mono = self.start_mono + int(ts*1e9) - for evt in self.events(): - if evt.logMonoTime >= target_mono: - break - - def read_event(self): - while True: - if self.cur_seg > self.segment_range[1]: - return None - if self.cur_seg_f is None: - log_path = os.path.join(self.seg_path(self.cur_seg), "rlog") - if not os.path.exists(log_path): - print("missing log file!", log_path) - self.cur_seg += 1 - continue - self.cur_seg_f = open(log_path, "rb") - - try: - return capnp_log.Event.read(self.cur_seg_f) - except capnp.lib.capnp.KjException as e: - if 'EOF' in str(e): # dumb, but pycapnp does this too - self.cur_seg_f.close() - self.cur_seg_f = None - self.cur_seg += 1 - else: - raise - - def events(self): - while True: - r = self.read_event() - if r is None: - break - yield r - - def read_frame(self, frame_id): - encode_idx = self.rcamera_encode_map.get(frame_id) - if encode_idx is None: - return None - - seg_num, seg_id, _ = encode_idx - camera_path = os.path.join(self.seg_path(seg_num), "rcamera") - if not os.path.exists(camera_path): - return None - with YuvData(camera_path, self.rcamera_size) as data: - return data.read_frame(seg_id) - - def close(self): - if self.cur_seg_f is not None: - self.cur_seg_f.close() - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - self.close() diff --git a/common/numpy_helpers.py b/common/numpy_helpers.py deleted file mode 100644 index cb71f56022..0000000000 --- a/common/numpy_helpers.py +++ /dev/null @@ -1,66 +0,0 @@ -import bisect -import numpy as np -from scipy.interpolate import interp1d - - -def deep_interp_0_fast(dx, x, y): - FIX = False - if len(y.shape) == 1: - y = y[:, None] - FIX = True - ret = np.zeros((dx.shape[0], y.shape[1])) - index = list(x) - for i in range(dx.shape[0]): - idx = bisect.bisect_left(index, dx[i]) - if idx == x.shape[0]: - idx = x.shape[0] - 1 - ret[i] = y[idx] - - if FIX: - return ret[:, 0] - else: - return ret - - -def running_mean(x, N): - cumsum = np.cumsum(np.insert(x, [0]*(int(N/2)) + [-1]*(N-int(N/2)), [x[0]]*int(N/2) + [x[-1]]*(N-int(N/2)))) - return (cumsum[N:] - cumsum[:-N]) / N - - -def deep_interp_np(x, xp, fp): - x = np.atleast_1d(x) - xp = np.array(xp) - if len(xp) < 2: - return np.repeat(fp, len(x), axis=0) - if min(np.diff(xp)) < 0: - raise RuntimeError('Bad x array for interpolation') - j = np.searchsorted(xp, x) - 1 - j = np.clip(j, 0, len(xp)-2) - d = np.divide(x - xp[j], xp[j + 1] - xp[j], out=np.ones_like(x, dtype=np.float64), where=xp[j + 1] - xp[j] != 0) - vals_interp = (fp[j].T*(1 - d)).T + (fp[j + 1].T*d).T - if len(vals_interp) == 1: - return vals_interp[0] - else: - return vals_interp - - -def clipping_deep_interp(x, xp, fp): - if len(xp) < 2: - return deep_interp_np(x, xp, fp) - bad_idx = np.where(np.diff(xp) < 0)[0] - if len(bad_idx) > 0: - if bad_idx[0] ==1: - return np.zeros([] + list(fp.shape[1:])) - return deep_interp_np(x, xp[:bad_idx[0]], fp[:bad_idx[0]]) - else: - return deep_interp_np(x, xp, fp) - - -def deep_interp(dx, x, y, kind="slinear"): - return interp1d( - x, y, - axis=0, - kind=kind, - bounds_error=False, - fill_value="extrapolate", - assume_sorted=True)(dx) diff --git a/common/sampling_buffer.py b/common/sampling_buffer.py deleted file mode 100644 index dbfa7c66a7..0000000000 --- a/common/sampling_buffer.py +++ /dev/null @@ -1,78 +0,0 @@ -import os -import numpy as np -import random - -class SamplingBuffer(): - def __init__(self, fn, size, write=False): - self._fn = fn - self._write = write - if self._write: - self._f = open(self._fn, "ab") - else: - self._f = open(self._fn, "rb") - self._size = size - self._refresh() - - def _refresh(self): - self.cnt = os.path.getsize(self._fn) / self._size - - @property - def count(self): - self._refresh() - return self.cnt - - def _fetch_one(self, x): - assert self._write == False - self._f.seek(x*self._size) - return self._f.read(self._size) - - def sample(self, count, indices = None): - if indices == None: - cnt = self.count - assert cnt != 0 - indices = map(lambda x: random.randint(0, cnt-1), range(count)) - return map(self._fetch_one, indices) - - def write(self, dat): - assert self._write == True - assert (len(dat) % self._size) == 0 - self._f.write(dat) - self._f.flush() - -class NumpySamplingBuffer(): - def __init__(self, fn, size, dtype, write=False): - self._size = size - self._dtype = dtype - self._buf = SamplingBuffer(fn, len(np.zeros(size, dtype=dtype).tobytes()), write) - - @property - def count(self): - return self._buf.count - - def write(self, dat): - self._buf.write(dat.tobytes()) - - def sample(self, count, indices = None): - return np.fromstring(''.join(self._buf.sample(count, indices)), dtype=self._dtype).reshape([count]+list(self._size)) - -# TODO: n IOPS needed where n is the Multi -class MultiNumpySamplingBuffer(): - def __init__(self, fn, npa, write=False): - self._bufs = [] - for i,n in enumerate(npa): - self._bufs.append(NumpySamplingBuffer(fn + ("_%d" % i), n[0], n[1], write)) - - def write(self, dat): - for b,x in zip(self._bufs, dat): - b.write(x) - - @property - def count(self): - return min(map(lambda x: x.count, self._bufs)) - - def sample(self, count): - cnt = self.count - assert cnt != 0 - indices = map(lambda x: random.randint(0, cnt-1), range(count)) - return map(lambda x: x.sample(count, indices), self._bufs) - diff --git a/common/stat_tracker.py b/common/stat_tracker.py deleted file mode 100644 index c765f55484..0000000000 --- a/common/stat_tracker.py +++ /dev/null @@ -1,94 +0,0 @@ -import numpy as np - -_DESC_FMT = """ -{} (n={}): -MEAN={} -VAR={} -MIN={} -MAX={} -""" - -class StatTracker(): - def __init__(self, name): - self._name = name - self._mean = 0. - self._var = 0. - self._n = 0 - self._min = -float("-inf") - self._max = -float("inf") - - @property - def mean(self): - return self._mean - - @property - def var(self): - return (self._n * self._var) / (self._n - 1.) - - @property - def min(self): - return self._min - - @property - def max(self): - return self._max - - def update(self, samples): - # https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm - data = samples.reshape(-1) - n_a = data.size - mean_a = np.mean(data) - var_a = np.var(data, ddof=0) - - n_b = self._n - mean_b = self._mean - - delta = mean_b - mean_a - m_a = var_a * (n_a - 1) - m_b = self._var * (n_b - 1) - m2 = m_a + m_b + delta**2 * n_a * n_b / (n_a + n_b) - - self._var = m2 / (n_a + n_b) - self._mean = (n_a * mean_a + n_b * mean_b) / (n_a + n_b) - self._n = n_a + n_b - - self._min = min(self._min, np.min(data)) - self._max = max(self._max, np.max(data)) - - def __str__(self): - return _DESC_FMT.format(self._name, self._n, self._mean, self.var, self._min, - self._max) - -# FIXME(mgraczyk): The variance computation does not work with 1 sample batches. -class VectorStatTracker(StatTracker): - def __init__(self, name, dim): - self._name = name - self._mean = np.zeros((dim, )) - self._var = np.zeros((dim, dim)) - self._n = 0 - self._min = np.full((dim, ), -float("-inf")) - self._max = np.full((dim, ), -float("inf")) - - @property - def cov(self): - return self.var - - def update(self, samples): - n_a = samples.shape[0] - mean_a = np.mean(samples, axis=0) - var_a = np.cov(samples, ddof=0, rowvar=False) - - n_b = self._n - mean_b = self._mean - - delta = mean_b - mean_a - m_a = var_a * (n_a - 1) - m_b = self._var * (n_b - 1) - m2 = m_a + m_b + delta**2 * n_a * n_b / (n_a + n_b) - - self._var = m2 / (n_a + n_b) - self._mean = (n_a * mean_a + n_b * mean_b) / (n_a + n_b) - self._n = n_a + n_b - - self._min = np.minimum(self._min, np.min(samples, axis=0)) - self._max = np.maximum(self._max, np.max(samples, axis=0)) diff --git a/selfdrive/debug/internal/camera_small_box.py b/selfdrive/debug/internal/camera_small_box.py deleted file mode 100755 index 082e711baf..0000000000 --- a/selfdrive/debug/internal/camera_small_box.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python3 -import os - -from common.basedir import BASEDIR -os.environ['BASEDIR'] = BASEDIR -SCALE = 3 - -import argparse -import zmq -import pygame -import numpy as np -import cv2 -import sys -import traceback -from collections import namedtuple - -from cereal import car -from common.params import Params -from common.lazy_property import lazy_property -from cereal.messaging import sub_sock, recv_one_or_none, recv_one -from cereal.services import service_list - -_BB_OFFSET = 290, 332 -_BB_TO_FULL_FRAME = np.asarray([[1., 0., _BB_OFFSET[0]], [0., 1., _BB_OFFSET[1]], - [0., 0., 1.]]) -_FULL_FRAME_TO_BB = np.linalg.inv(_BB_TO_FULL_FRAME) -_FULL_FRAME_SIZE = 1164, 874 - - - -def pygame_modules_have_loaded(): - return pygame.display.get_init() and pygame.font.get_init() - - -def ui_thread(addr, frame_address): - context = zmq.Context() - - pygame.init() - pygame.font.init() - assert pygame_modules_have_loaded() - - size = (640 * SCALE, 480 * SCALE) - pygame.display.set_caption("comma one debug UI") - screen = pygame.display.set_mode(size, pygame.DOUBLEBUF) - - camera_surface = pygame.surface.Surface((640 * SCALE, 480 * SCALE), 0, 24).convert() - - frame = context.socket(zmq.SUB) - frame.connect(frame_address or "tcp://%s:%d" % (addr, 'frame')) - frame.setsockopt(zmq.SUBSCRIBE, "") - - img = np.zeros((480, 640, 3), dtype='uint8') - imgff = np.zeros((_FULL_FRAME_SIZE[1], _FULL_FRAME_SIZE[0], 3), dtype=np.uint8) - - while 1: - list(pygame.event.get()) - screen.fill((64, 64, 64)) - - # ***** frame ***** - fpkt = recv_one(frame) - yuv_img = fpkt.frame.image - - if fpkt.frame.transform: - yuv_transform = np.array(fpkt.frame.transform).reshape(3, 3) - else: - # assume frame is flipped - yuv_transform = np.array([[-1.0, 0.0, _FULL_FRAME_SIZE[0] - 1], - [0.0, -1.0, _FULL_FRAME_SIZE[1] - 1], [0.0, 0.0, 1.0]]) - - if yuv_img and len(yuv_img) == _FULL_FRAME_SIZE[0] * _FULL_FRAME_SIZE[1] * 3 // 2: - yuv_np = np.frombuffer( - yuv_img, dtype=np.uint8).reshape(_FULL_FRAME_SIZE[1] * 3 // 2, -1) - cv2.cvtColor(yuv_np, cv2.COLOR_YUV2RGB_I420, dst=imgff) - cv2.warpAffine( - imgff, - np.dot(yuv_transform, _BB_TO_FULL_FRAME)[:2], (img.shape[1], img.shape[0]), - dst=img, - flags=cv2.WARP_INVERSE_MAP) - else: - img.fill(0) - - height, width = img.shape[:2] - img_resized = cv2.resize( - img, (SCALE * width, SCALE * height), interpolation=cv2.INTER_CUBIC) - # *** blits *** - pygame.surfarray.blit_array(camera_surface, img_resized.swapaxes(0, 1)) - screen.blit(camera_surface, (0, 0)) - - # this takes time...vsync or something - pygame.display.flip() - - -def get_arg_parser(): - parser = argparse.ArgumentParser( - description="Show replay data in a UI.", - formatter_class=argparse.ArgumentDefaultsHelpFormatter) - - parser.add_argument( - "ip_address", - nargs="?", - default="127.0.0.1", - help="The ip address on which to receive zmq messages.") - - parser.add_argument( - "--frame-address", - default=None, - help="The ip address on which to receive zmq messages.") - return parser - - -if __name__ == "__main__": - args = get_arg_parser().parse_args(sys.argv[1:]) - ui_thread(args.ip_address, args.frame_address) diff --git a/selfdrive/debug/internal/convert_csv_to_route.py b/selfdrive/debug/internal/convert_csv_to_route.py deleted file mode 100755 index f97e4617cb..0000000000 --- a/selfdrive/debug/internal/convert_csv_to_route.py +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env python -from common.kalman.ned import ecef2geodetic - -import csv -import numpy as np -import webbrowser -import os -import sys -import json -import numpy.linalg as LA -import gmplot -from dateutil.parser import parse -from common.numpy_helpers import deep_interp -# import cvxpy as cvx -MPH_TO_MS = 0.44704 - - -def downsample(positions, speeds, start_idx, end_idx, dist): - # TODO: save headings too - track = [] - last_position = positions[start_idx] - valid_indeces = [] - track_speeds = [] - for pi in range(start_idx, end_idx): - # only save points that are at least 10 cm far away - if LA.norm(positions[pi] - last_position) >= dist: - #print LA.norm(positions[pi] - last_position) - last_position = positions[pi] - track.append(positions[pi]) - valid_indeces.append(pi) - track_speeds.append(speeds[pi]) - print(-start_idx + end_idx, len(valid_indeces)) - # this compare the original point count vs the filtered count - - track = np.array(track) - track_speeds = np.array(track_speeds) - return track, track_speeds - -def converter(date): - - filename = "/home/batman/one/selfdrive/locationd/liveloc_dumps/" + date + "/canonical.csv" # Point one (OK!) - - c = csv.DictReader(open(filename, 'rb'), delimiter=',') - - start_time = None - - t = [] - ll_positions = [] - positions = [] - sats = [] - flag = [] - speeds = [] - - for row in c: - t.append(float(row['pctime'])) - x = float(row['ecefX']) - y = float(row['ecefY']) - z = float(row['ecefZ']) - ecef = np.array((x, y, z)) - speeds.append(float(row['velSpeed'])) - - pos = ecef2geodetic(ecef) - ll_positions.append(pos) - positions.append(ecef) - - t = np.array(t) - ll_positions = np.array(ll_positions) - positions = np.array(positions) - - #distances = ll_positions[:,0:2] - START_POS[:2] - #i_start = np.argmin(LA.norm(distances, axis=1)) - - #for i in range(i_start + 500): - # distances[i] += np.array([100, 100]) - #i_end = np.argmin(LA.norm(distances, axis=1)) - - i_start = 0 - i_end = len(positions) - - print(i_start, i_end) - track, track_speeds = downsample(positions, speeds, i_start, i_end, 0.2) - ll_track = np.array([ecef2geodetic(pos) for pos in track]) - - track_struct = {} - print(track_speeds.shape) - print(track.shape) - track_struct['race'] = np.hstack((track, - np.expand_dims(track_speeds, axis=1), - np.zeros((len(track_speeds), 1)))) - - f = open('/home/batman/one/selfdrive/controls/tracks/loop_city.npy', 'w') - np.save(f, track_struct) - f.close() - print("SAVED!") - - -if __name__ == "__main__": - converter(sys.argv[1]) diff --git a/selfdrive/debug/internal/honda_sniff_steering.py b/selfdrive/debug/internal/honda_sniff_steering.py deleted file mode 100755 index 2dc32d90b1..0000000000 --- a/selfdrive/debug/internal/honda_sniff_steering.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python3 -import os -import zmq - -import cereal.messaging as messaging -from cereal.services import service_list - -from panda.lib.panda import Panda -from hexdump import hexdump -import time - -def raw_panda(): - p = Panda() - print(p) - - p.set_uart_baud(2, 9600) - p.set_uart_baud(3, 9600) - - p.set_uart_parity(2, 1) - p.set_uart_parity(3, 1) - - p.set_uart_callback(2, 1) - p.set_uart_callback(3, 1) - - idx = 0 - while 1: - """ - dat = p.serial_read(2) - if len(dat) > 0: - print "2:", - hexdump(dat) - - dat = p.serial_read(3) - if len(dat) > 0: - print "3:", - hexdump(dat) - - print "read done, waiting" - time.sleep(0.01) - """ - - if idx%2 == 1: - dat = "\x20\x80\xc0\xa0" - else: - dat = "\x00\x80\xc0\xc0" - p.can_send(0, dat, 8) - - for r in p.can_recv(): - if r[-1] in [8, 9]: - print(r[-1], r[2].encode("hex")) - - time.sleep(0.01) - idx += 1 - -if __name__ == "__main__": - #raw_panda() - #exit(0) - - logcan = messaging.sub_sock('can') - - t1 = [] - t2 = [] - t3 = [] - - while len(t1) < 1000 or os.uname()[-1] == "aarch64": - rr = messaging.recv_sock(logcan, wait=True) - for c in rr.can: - if c.src in [9] and len(c.dat) == 5: - aa = map(lambda x: ord(x)&0x7f, c.dat) - - # checksum - assert (-(aa[0]+aa[1]+aa[2]+aa[3]))&0x7f == aa[4] - - #print map(bin, aa[0:4]) - - aa[0] &= ~0x20 - aa[1] &= ~0x20 - - st = (aa[0] << 5) + aa[1] - if st >= 256: - st = -(512-st) - - mt = ((aa[2] >> 3) << 7) + aa[3] - if mt >= 512: - mt = -(1024-mt) - - print(st, mt) - t1.append(st) - t2.append(mt) - #print map(bin, aa), "apply", st - - if c.src in [8] and len(c.dat) == 4: - aa = map(lambda x: ord(x)&0x7f, c.dat) - - # checksum - assert (-(aa[0]+aa[1]+aa[2]))&0x7f == aa[3] - - aa[0] &= ~0x20 - aa[1] &= ~0x20 - - st = (aa[0] << 5) + aa[1] - if st >= 256: - st = -(512-st) - print(aa, "apply", st) - - t3.append(st) - - import matplotlib.pyplot as plt - plt.plot(t1) - plt.plot(t2) - plt.plot(t3) - plt.show() - diff --git a/selfdrive/debug/internal/run_segment_through_visiond.py b/selfdrive/debug/internal/run_segment_through_visiond.py deleted file mode 100755 index e10ddcfe6a..0000000000 --- a/selfdrive/debug/internal/run_segment_through_visiond.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -import time -import os - -from tqdm import tqdm - -from cereal.messaging import PubMaster, recv_one, sub_sock -from cereal.services import service_list -from tools.lib.logreader import LogReader -from xx.chffr.lib.route import Route, RouteSegment -from tools.lib.route_framereader import RouteFrameReader -from xx.uncommon.column_store import save_dict_as_column_store -from xx.pipeline.lib.log_time_series import append_dict -from selfdrive.test.process_replay.compare_logs import save_log - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Run visiond on segment") - parser.add_argument("segment_name", help="The segment to run") - parser.add_argument("output_path", help="The output file") - - args = parser.parse_args() - segment = RouteSegment.from_canonical_name(args.segment_name) - route = Route(segment._name._route_name) - - frame_id_lookup = {} - frame_reader = RouteFrameReader(route.camera_paths(), None, frame_id_lookup, readahead=True) - - msgs = list(LogReader(segment.log_path)) - - pm = PubMaster(['liveCalibration', 'frame']) - model_sock = sub_sock('model') - - # Read encodeIdx - for msg in msgs: - if msg.which() == 'encodeIdx': - frame_id_lookup[msg.encodeIdx.frameId] = (msg.encodeIdx.segmentNum, msg.encodeIdx.segmentId) - - # Send some livecalibration messages to initalize visiond - for msg in msgs: - if msg.which() == 'liveCalibration': - pm.send('liveCalibration', msg.as_builder()) - - time.sleep(1.0) - values = {} - - out_msgs = [] - for msg in tqdm(msgs): - w = msg.which() - - if w == 'liveCalibration': - pm.send(w, msg.as_builder()) - - if w == 'frame': - msg = msg.as_builder() - - frame_id = msg.frame.frameId - img = frame_reader.get(frame_id, pix_fmt="rgb24")[:,:,::-1] - - msg.frame.image = img.flatten().tobytes() - pm.send(w, msg) - - model = recv_one(model_sock) - model = model.as_builder() - model.logMonoTime = 0 - model = model.as_reader() - out_msgs.append(model) - - save_log(args.output_path, out_msgs) - - # tm = model.logMonoTime / 1.0e9 - # model = model.model - # append_dict("model/data/path", tm, model.path.to_dict(), values) - # append_dict("model/data/left_lane", tm, model.leftLane.to_dict(), values) - # append_dict("model/data/right_lane", tm, model.rightLane.to_dict(), values) - # append_dict("model/data/lead", tm, model.lead.to_dict(), values) - - # save_dict_as_column_store(values, os.path.join(args.output_path, "LiveVisionD", args.segment_name)) diff --git a/tools/replay/mapd.py b/tools/replay/mapd.py deleted file mode 100755 index 9b5e0bccd5..0000000000 --- a/tools/replay/mapd.py +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env python - -import matplotlib -matplotlib.use('TkAgg') -import matplotlib.pyplot as plt - -import numpy as np -import zmq -from cereal.services import service_list -from selfdrive.config import Conversions as CV -import cereal.messaging as messaging - - -if __name__ == "__main__": - live_map_sock = messaging.sub_sock(service_list['liveMapData'].port, conflate=True) - plan_sock = messaging.sub_sock(service_list['plan'].port, conflate=True) - - plt.ion() - fig = plt.figure(figsize=(8, 16)) - ax = fig.add_subplot(2, 1, 1) - ax.set_title('Map') - - SCALE = 1000 - ax.set_xlim([-SCALE, SCALE]) - ax.set_ylim([-SCALE, SCALE]) - ax.set_xlabel('x [m]') - ax.set_ylabel('y [m]') - ax.grid(True) - - points_plt, = ax.plot([0.0], [0.0], "--xk") - cur, = ax.plot([0.0], [0.0], "xr") - - speed_txt = ax.text(-500, 900, '') - curv_txt = ax.text(-500, 775, '') - - ax = fig.add_subplot(2, 1, 2) - ax.set_title('Curvature') - curvature_plt, = ax.plot([0.0], [0.0], "--xk") - ax.set_xlim([0, 500]) - ax.set_ylim([0, 1e-2]) - ax.set_xlabel('Distance along path [m]') - ax.set_ylabel('Curvature [1/m]') - ax.grid(True) - - plt.show() - - while True: - m = messaging.recv_one_or_none(live_map_sock) - p = messaging.recv_one_or_none(plan_sock) - if p is not None: - v = p.plan.vCurvature * CV.MS_TO_MPH - speed_txt.set_text('Desired curvature speed: %.2f mph' % v) - - if m is not None: - print("Current way id: %d" % m.liveMapData.wayId) - curv_txt.set_text('Curvature valid: %s Dist: %03.0f m\nSpeedlimit valid: %s Speed: %.0f mph' % - (str(m.liveMapData.curvatureValid), - m.liveMapData.distToTurn, - str(m.liveMapData.speedLimitValid), - m.liveMapData.speedLimit * CV.MS_TO_MPH)) - - points_plt.set_xdata(m.liveMapData.roadX) - points_plt.set_ydata(m.liveMapData.roadY) - curvature_plt.set_xdata(m.liveMapData.roadCurvatureX) - curvature_plt.set_ydata(m.liveMapData.roadCurvature) - - fig.canvas.draw() - fig.canvas.flush_events()