rename yuv streams (#23071)

* rename yuv streams

* bump cereal

Co-authored-by: Adeeb Shihadeh <adeebshihadeh@gmail.com>
old-commit-hash: 348d2d2b0d
commatwo_master
Dean Lee 3 years ago committed by GitHub
parent 0781870e69
commit 9b0fcaeb84
  1. 2
      cereal
  2. 4
      selfdrive/camerad/cameras/camera_qcom.cc
  3. 6
      selfdrive/camerad/cameras/camera_qcom2.cc
  4. 4
      selfdrive/camerad/cameras/camera_replay.cc
  5. 4
      selfdrive/camerad/cameras/camera_webcam.cc
  6. 6
      selfdrive/loggerd/loggerd.h
  7. 2
      selfdrive/modeld/dmonitoringmodeld.cc
  8. 2
      selfdrive/modeld/modeld.cc
  9. 8
      selfdrive/test/process_replay/model_replay.py
  10. 4
      selfdrive/test/process_replay/regen.py
  11. 6
      selfdrive/ui/replay/camera.h
  12. 4
      tools/sim/bridge.py

@ -1 +1 @@
Subproject commit 8cbd71836e4170fa175070796396a91771624e82
Subproject commit d6f233bf7bd6d1ee3508b17667e44420ce38de0d

@ -211,12 +211,12 @@ void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_i
/*fps*/ 20,
#endif
device_id, ctx,
VISION_STREAM_RGB_BACK, VISION_STREAM_YUV_BACK);
VISION_STREAM_RGB_BACK, VISION_STREAM_ROAD);
camera_init(v, &s->driver_cam, CAMERA_ID_OV8865, 1,
/*pixel_clock=*/72000000, /*line_length_pclk=*/1602,
/*max_gain=*/510, 10, device_id, ctx,
VISION_STREAM_RGB_FRONT, VISION_STREAM_YUV_FRONT);
VISION_STREAM_RGB_FRONT, VISION_STREAM_DRIVER);
s->sm = new SubMaster({"driverState"});
s->pm = new PubMaster({"roadCameraState", "driverCameraState", "thumbnail"});

@ -709,13 +709,13 @@ static void camera_open(CameraState *s) {
void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_id, cl_context ctx) {
camera_init(s, v, &s->road_cam, CAMERA_ID_AR0231, 1, 20, device_id, ctx,
VISION_STREAM_RGB_BACK, VISION_STREAM_YUV_BACK); // swap left/right
VISION_STREAM_RGB_BACK, VISION_STREAM_ROAD); // swap left/right
printf("road camera initted \n");
camera_init(s, v, &s->wide_road_cam, CAMERA_ID_AR0231, 0, 20, device_id, ctx,
VISION_STREAM_RGB_WIDE, VISION_STREAM_YUV_WIDE);
VISION_STREAM_RGB_WIDE, VISION_STREAM_WIDE_ROAD);
printf("wide road camera initted \n");
camera_init(s, v, &s->driver_cam, CAMERA_ID_AR0231, 2, 20, device_id, ctx,
VISION_STREAM_RGB_FRONT, VISION_STREAM_YUV_FRONT);
VISION_STREAM_RGB_FRONT, VISION_STREAM_DRIVER);
printf("driver camera initted \n");
s->sm = new SubMaster({"driverState"});

@ -98,9 +98,9 @@ void process_road_camera(MultiCameraState *s, CameraState *c, int cnt) {
void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_id, cl_context ctx) {
camera_init(v, &s->road_cam, CAMERA_ID_LGC920, 20, device_id, ctx,
VISION_STREAM_RGB_BACK, VISION_STREAM_YUV_BACK, get_url(road_camera_route, "fcamera", 0));
VISION_STREAM_RGB_BACK, VISION_STREAM_ROAD, get_url(road_camera_route, "fcamera", 0));
// camera_init(v, &s->driver_cam, CAMERA_ID_LGC615, 10, device_id, ctx,
// VISION_STREAM_RGB_FRONT, VISION_STREAM_YUV_FRONT, get_url(driver_camera_route, "dcamera", 0));
// VISION_STREAM_RGB_FRONT, VISION_STREAM_DRIVER, get_url(driver_camera_route, "dcamera", 0));
s->pm = new PubMaster({"roadCameraState", "driverCameraState", "thumbnail"});
}

@ -141,9 +141,9 @@ void driver_camera_thread(CameraState *s) {
void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_id, cl_context ctx) {
camera_init(v, &s->road_cam, CAMERA_ID_LGC920, 20, device_id, ctx,
VISION_STREAM_RGB_BACK, VISION_STREAM_YUV_BACK);
VISION_STREAM_RGB_BACK, VISION_STREAM_ROAD);
camera_init(v, &s->driver_cam, CAMERA_ID_LGC615, 10, device_id, ctx,
VISION_STREAM_RGB_FRONT, VISION_STREAM_YUV_FRONT);
VISION_STREAM_RGB_FRONT, VISION_STREAM_DRIVER);
s->pm = new PubMaster({"roadCameraState", "driverCameraState", "thumbnail"});
}

@ -44,7 +44,7 @@ const int SEGMENT_LENGTH = LOGGERD_TEST ? atoi(getenv("LOGGERD_SEGMENT_LENGTH"))
const LogCameraInfo cameras_logged[] = {
{
.type = RoadCam,
.stream_type = VISION_STREAM_YUV_BACK,
.stream_type = VISION_STREAM_ROAD,
.filename = "fcamera.hevc",
.frame_packet_name = "roadCameraState",
.fps = MAIN_FPS,
@ -58,7 +58,7 @@ const LogCameraInfo cameras_logged[] = {
},
{
.type = DriverCam,
.stream_type = VISION_STREAM_YUV_FRONT,
.stream_type = VISION_STREAM_DRIVER,
.filename = "dcamera.hevc",
.frame_packet_name = "driverCameraState",
.fps = MAIN_FPS, // on EONs, more compressed this way
@ -72,7 +72,7 @@ const LogCameraInfo cameras_logged[] = {
},
{
.type = WideRoadCam,
.stream_type = VISION_STREAM_YUV_WIDE,
.stream_type = VISION_STREAM_WIDE_ROAD,
.filename = "ecamera.hevc",
.frame_packet_name = "wideRoadCameraState",
.fps = MAIN_FPS,

@ -39,7 +39,7 @@ int main(int argc, char **argv) {
DMonitoringModelState model;
dmonitoring_init(&model);
VisionIpcClient vipc_client = VisionIpcClient("camerad", VISION_STREAM_YUV_FRONT, true);
VisionIpcClient vipc_client = VisionIpcClient("camerad", VISION_STREAM_DRIVER, true);
while (!do_exit && !vipc_client.connect(false)) {
util::sleep_for(100);
}

@ -153,7 +153,7 @@ int main(int argc, char **argv) {
model_init(&model, device_id, context);
LOGW("models loaded, modeld starting");
VisionIpcClient vipc_client = VisionIpcClient("camerad", wide_camera ? VISION_STREAM_YUV_WIDE : VISION_STREAM_YUV_BACK, true, device_id, context);
VisionIpcClient vipc_client = VisionIpcClient("camerad", wide_camera ? VISION_STREAM_WIDE_ROAD : VISION_STREAM_ROAD, true, device_id, context);
while (!do_exit && !vipc_client.connect(false)) {
util::sleep_for(100);
}

@ -50,10 +50,10 @@ def process_frame(msg, pm, sm, log_msgs, vipc_server, spinner, frs, frame_idxs,
img = frs[msg.which()].get(frame_idxs[msg.which()], pix_fmt="yuv420p")[0]
if msg.which == "roadCameraState":
vipc_server.send(VisionStreamType.VISION_STREAM_YUV_BACK, img.flatten().tobytes(), f.roadCameraState.frameId,
vipc_server.send(VisionStreamType.VISION_STREAM_ROAD, img.flatten().tobytes(), f.roadCameraState.frameId,
f.roadCameraState.timestampSof, f.roadCameraState.timestampEof)
else:
vipc_server.send(VisionStreamType.VISION_STREAM_YUV_FRONT, img.flatten().tobytes(), f.driverCameraState.frameId,
vipc_server.send(VisionStreamType.VISION_STREAM_DRIVER, img.flatten().tobytes(), f.driverCameraState.frameId,
f.driverCameraState.timestampSof, f.driverCameraState.timestampEof)
with Timeout(seconds=15):
log_msgs.append(messaging.recv_one(sm.sock[packet_from_camera[msg.which()]]))
@ -73,8 +73,8 @@ def model_replay(lr_list, frs):
spinner.update("starting model replay")
vipc_server = VisionIpcServer("camerad")
vipc_server.create_buffers(VisionStreamType.VISION_STREAM_YUV_BACK, 40, False, *(tici_f_frame_size if TICI else eon_f_frame_size))
vipc_server.create_buffers(VisionStreamType.VISION_STREAM_YUV_FRONT, 40, False, *(tici_d_frame_size if TICI else eon_d_frame_size))
vipc_server.create_buffers(VisionStreamType.VISION_STREAM_ROAD, 40, False, *(tici_f_frame_size if TICI else eon_f_frame_size))
vipc_server.create_buffers(VisionStreamType.VISION_STREAM_DRIVER, 40, False, *(tici_d_frame_size if TICI else eon_d_frame_size))
vipc_server.start_listener()
pm = messaging.PubMaster(['roadCameraState', 'driverCameraState', 'liveCalibration', 'lateralPlan'])

@ -39,8 +39,8 @@ def replay_service(s, msgs):
vs = None
def replay_cameras(lr, frs):
cameras = [
("roadCameraState", DT_MDL, eon_f_frame_size, VisionStreamType.VISION_STREAM_YUV_BACK),
("driverCameraState", DT_DMON, eon_d_frame_size, VisionStreamType.VISION_STREAM_YUV_FRONT),
("roadCameraState", DT_MDL, eon_f_frame_size, VisionStreamType.VISION_STREAM_ROAD),
("driverCameraState", DT_DMON, eon_d_frame_size, VisionStreamType.VISION_STREAM_DRIVER),
]
def replay_camera(s, stream, dt, vipc_server, fr, size):

@ -32,9 +32,9 @@ protected:
void cameraThread(Camera &cam);
Camera cameras_[MAX_CAMERAS] = {
{.type = RoadCam, .rgb_type = VISION_STREAM_RGB_BACK, .yuv_type = VISION_STREAM_YUV_BACK},
{.type = DriverCam, .rgb_type = VISION_STREAM_RGB_FRONT, .yuv_type = VISION_STREAM_YUV_FRONT},
{.type = WideRoadCam, .rgb_type = VISION_STREAM_RGB_WIDE, .yuv_type = VISION_STREAM_YUV_WIDE},
{.type = RoadCam, .rgb_type = VISION_STREAM_RGB_BACK, .yuv_type = VISION_STREAM_ROAD},
{.type = DriverCam, .rgb_type = VISION_STREAM_RGB_FRONT, .yuv_type = VISION_STREAM_DRIVER},
{.type = WideRoadCam, .rgb_type = VISION_STREAM_RGB_WIDE, .yuv_type = VISION_STREAM_WIDE_ROAD},
};
std::atomic<int> publishing_ = 0;
std::unique_ptr<VisionIpcServer> vipc_server_;

@ -68,7 +68,7 @@ class Camerad:
# TODO: remove RGB buffers once the last RGB vipc subscriber is removed
self.vipc_server.create_buffers(VisionStreamType.VISION_STREAM_RGB_BACK, 4, True, W, H)
self.vipc_server.create_buffers(VisionStreamType.VISION_STREAM_YUV_BACK, 40, False, W, H)
self.vipc_server.create_buffers(VisionStreamType.VISION_STREAM_ROAD, 40, False, W, H)
self.vipc_server.start_listener()
# set up for pyopencl rgb to yuv conversion
@ -98,7 +98,7 @@ class Camerad:
# TODO: remove RGB send once the last RGB vipc subscriber is removed
self.vipc_server.send(VisionStreamType.VISION_STREAM_RGB_BACK, img.tobytes(), self.frame_id, eof, eof)
self.vipc_server.send(VisionStreamType.VISION_STREAM_YUV_BACK, yuv.data.tobytes(), self.frame_id, eof, eof)
self.vipc_server.send(VisionStreamType.VISION_STREAM_ROAD, yuv.data.tobytes(), self.frame_id, eof, eof)
dat = messaging.new_message('roadCameraState')
dat.roadCameraState = {

Loading…
Cancel
Save