diff --git a/.github/workflows/test.yaml b/.github/workflows/selfdrive_tests.yaml similarity index 99% rename from .github/workflows/test.yaml rename to .github/workflows/selfdrive_tests.yaml index 71b797b34c..823689a642 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/selfdrive_tests.yaml @@ -1,4 +1,4 @@ -name: tests +name: selfdrive on: push: branches-ignore: diff --git a/.github/workflows/tools_tests.yaml b/.github/workflows/tools_tests.yaml new file mode 100644 index 0000000000..32badb1267 --- /dev/null +++ b/.github/workflows/tools_tests.yaml @@ -0,0 +1,18 @@ +name: tools +on: + push: + pull_request: + +jobs: + plotjuggler: + name: plotjuggler + runs-on: ubuntu-20.04 + timeout-minutes: 30 + steps: + - uses: actions/checkout@v2 + with: + submodules: true + - name: Install + run: cd tools/plotjuggler && ./install.sh + #- name: Juggle Demo + # run: cd tools/plotjuggler && ./juggle.py diff --git a/tools/LICENSE b/tools/LICENSE deleted file mode 100644 index b8fd9e299c..0000000000 --- a/tools/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 comma.ai - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/tools/README.md b/tools/README.md index c5033a931f..4fcc2f530b 100644 --- a/tools/README.md +++ b/tools/README.md @@ -16,7 +16,6 @@ Table of Contents * [Replay driving data](#replay-driving-data) * [Debug car controls](#debug-car-controls) * [Stream replayed CAN messages to EON](#stream-replayed-can-messages-to-eon) - * [Stream EON video data to a PC](#stream-eon-video-data-to-a-pc) * [Welcomed contributions](#welcomed-contributions) @@ -24,7 +23,7 @@ Table of Contents Requirements ============ -openpilot tools and the following setup steps are developed and tested on Ubuntu 20.04, MacOS 10.14.2 and Python 3.8.2. +openpilot tools and the following setup steps are developed and tested on Ubuntu 20.04, MacOS 10.14.2 and, Python 3.8.2. Setup ============ @@ -136,33 +135,6 @@ python replay/unlogger.py ``` ![Imgur](https://i.imgur.com/AcurZk8.jpg) - -Stream EON video data to a PC -------------- - -**Hardware needed**: [EON](https://comma.ai/shop/products/eon-gold-dashcam-devkit/), [comma Smays](https://comma.ai/shop/products/comma-smays-adapter/). - -You can connect your EON to your pc using the Ethernet cable provided with the comma Smays and you'll be able to stream data from your EON, in real time, with low latency. A useful application is being able to stream the raw video frames at 20fps, as captured by the EON's camera. - -Usage: -``` -# ssh into the eon and run loggerd with the flag "--stream". In ../selfdrive/manager.py you can change: -# ... -# "loggerd": ("selfdrive/loggerd", ["./loggerd"]), -# ... -# with: -# ... -# "loggerd": ("selfdrive/loggerd", ["./loggerd", "--stream"]), -# ... - -# On the PC: -# To receive frames from the EON and re-publish them. Set PYGAME env variable if you want to display the video stream -python streamer/streamerd.py -``` - -![Imgur](stream.gif) - - Welcomed contributions ============= diff --git a/tools/nui/README b/tools/nui/README deleted file mode 100644 index 3033bcadc0..0000000000 --- a/tools/nui/README +++ /dev/null @@ -1,9 +0,0 @@ -== Ubuntu == - -sudo apt-get install capnproto libyaml-cpp-dev qt5-default - -== Mac == - -brew install qt5 ffmpeg capnp yaml-cpp zmq -brew link qt5 --force - diff --git a/tools/plotjuggler/install.sh b/tools/plotjuggler/install.sh index 1256136774..b97bbf0408 100755 --- a/tools/plotjuggler/install.sh +++ b/tools/plotjuggler/install.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/bin/bash -e wget https://github.com/commaai/PlotJuggler/releases/download/latest/bin.tar.gz tar -xf bin.tar.gz diff --git a/tools/stream.gif b/tools/stream.gif deleted file mode 100644 index cd266fb6a8..0000000000 Binary files a/tools/stream.gif and /dev/null differ diff --git a/tools/streamer/streamerd.py b/tools/streamer/streamerd.py deleted file mode 100755 index b26e0eafc9..0000000000 --- a/tools/streamer/streamerd.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python -# pylint: skip-file - -import os -import sys -import zmq -import cv2 -import numpy as np -import struct - -# sudo pip install git+git://github.com/mikeboers/PyAV.git -import av - -import cereal.messaging as messaging -from cereal.services import service_list - -PYGAME = os.getenv("PYGAME") is not None -if PYGAME: - import pygame - imgff = np.zeros((874, 1164, 3), dtype=np.uint8) - -# first 74 bytes in any stream -start = "0000000140010c01ffff016000000300b0000003000003005dac5900000001420101016000000300b0000003000003005da0025080381c5c665aee4c92ec80000000014401c0f1800420" - -def receiver_thread(): - if PYGAME: - pygame.init() - pygame.display.set_caption("vnet debug UI") - screen = pygame.display.set_mode((1164, 874), pygame.DOUBLEBUF) - camera_surface = pygame.surface.Surface((1164, 874), 0, 24).convert() - - addr = "192.168.5.11" - if len(sys.argv) >= 2: - addr = sys.argv[1] - - context = zmq.Context() - s = messaging.sub_sock(context, 9002, addr=addr) - frame_sock = messaging.pub_sock(context, service_list['roadCameraState'].port) - - ctx = av.codec.codec.Codec('hevc', 'r').create() - ctx.decode(av.packet.Packet(start.decode("hex"))) - - # import time - while 1: - # t1 = time.time() - ts, raw = s.recv_multipart() - ts = struct.unpack('q', ts)[0] * 1000 - # t1, t2 = time.time(), t1 - #print 'ms to get frame:', (t1-t2)*1000 - - pkt = av.packet.Packet(raw) - f = ctx.decode(pkt) - if not f: - continue - f = f[0] - # t1, t2 = time.time(), t1 - #print 'ms to decode:', (t1-t2)*1000 - - y_plane = np.frombuffer(f.planes[0], np.uint8).reshape((874, 1216))[:, 0:1164] - u_plane = np.frombuffer(f.planes[1], np.uint8).reshape((437, 608))[:, 0:582] - v_plane = np.frombuffer(f.planes[2], np.uint8).reshape((437, 608))[:, 0:582] - yuv_img = y_plane.tobytes() + u_plane.tobytes() + v_plane.tobytes() - # t1, t2 = time.time(), t1 - #print 'ms to make yuv:', (t1-t2)*1000 - #print 'tsEof:', ts - - dat = messaging.new_message('roadCameraState') - dat.roadCameraState.image = yuv_img - dat.roadCameraState.timestampEof = ts - dat.roadCameraState.transform = map(float, list(np.eye(3).flatten())) - frame_sock.send(dat.to_bytes()) - - if PYGAME: - yuv_np = np.frombuffer(yuv_img, dtype=np.uint8).reshape(874 * 3 // 2, -1) - cv2.cvtColor(yuv_np, cv2.COLOR_YUV2RGB_I420, dst=imgff) - #print yuv_np.shape, imgff.shape - - #scipy.misc.imsave("tmp.png", imgff) - - pygame.surfarray.blit_array(camera_surface, imgff.swapaxes(0, 1)) - screen.blit(camera_surface, (0, 0)) - pygame.display.flip() - - -def main(gctx=None): - receiver_thread() - -if __name__ == "__main__": - main() diff --git a/tools/webcam/README.md b/tools/webcam/README.md index 472b95177a..6d559b3c31 100644 --- a/tools/webcam/README.md +++ b/tools/webcam/README.md @@ -1,16 +1,15 @@ -Run openpilot with webcam on PC/laptop -===================== +# Run openpilot with webcam on PC + What's needed: - Ubuntu 20.04 - Python 3.8.2 - GPU (recommended) - Two USB webcams, at least 720p and 78 degrees FOV (e.g. Logitech C920/C615) - [Car harness](https://comma.ai/shop/products/comma-car-harness) with black panda to connect to your car -- [Panda paw](https://comma.ai/shop/products/panda-paw) (or USB-A to USB-A cable) to connect panda to your computer -- Tape, Charger, ... +- [Panda paw](https://comma.ai/shop/products/panda-paw) or USB-A to USB-A cable to connect panda to your computer That's it! -## Clone openpilot and install the requirements +## Setup openpilot ``` cd ~ git clone https://github.com/commaai/openpilot.git @@ -37,10 +36,8 @@ USE_WEBCAM=1 scons -j$(nproc) ## GO ``` -cd ~/openpilot/tools/webcam -./accept_terms.py # accept the user terms so that thermald can detect the car started cd ~/openpilot/selfdrive -PASSIVE=0 NOSENSOR=1 WEBCAM=1 ./manager.py +PASSIVE=0 NOSENSOR=1 USE_WEBCAM=1 ./manager.py ``` - Start the car, then the UI should show the road webcam's view - Adjust and secure the webcams (you can run tools/webcam/front_mount_helper.py to help mount the driver camera) diff --git a/tools/webcam/accept_terms.py b/tools/webcam/accept_terms.py deleted file mode 100755 index a5445a5719..0000000000 --- a/tools/webcam/accept_terms.py +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env python -from common.params import Params -from selfdrive.version import terms_version, training_version - -if __name__ == '__main__': - params = Params() - params.put("HasAcceptedTerms", str(terms_version, 'utf-8')) - params.put("CompletedTrainingVersion", str(training_version, 'utf-8')) - print("Terms Accepted!")