loggerd: switch to v4l encoder try 2 (#24380)

* start v4l encoder

* v4l encoder starts

* start and stop

* fill in proper controls

* it dequeued a buffer

* getting bytes

* it made a video

* it does make files

* getting close

* ahh, so that's how dequeue works

* qcam works (no remuxing)

* remuxing works

* we just need to make shutdown and rollover graceful

* graceful destruction

* switch to polling

* should work now

* fix pc build

* refactors, stop properly

* touchups, remove a copy

* add v4l encoder to release

* inlcude file

* move writing to it's own thread

* fix minor memory leak

* block instead of dropping frames

* add counter, fix tests maybe

* better debugging and test print

* print file path in assert

* format string in test

* no more oversized qlogs

* match qcam

* touchups, remove omx encoder

* remove omx include files

* checked ioctl, better debugging, open by name

* unused import

* move linux includes to third_party/linux/include

* simple encoderd

* full packet

* encoderd should be complete

* lagging print

* updates

* name dq thread

* subset idx

* video file writing works

* debug

* potential bugfix

* rotation works

* iframe

* keep writing support

* ci should pass

* loggerd, not encoderd

* remote encoder code

* support remote encoder

* cereal to master, add encoderd

* header no longer required

* put that back there

* realtime

* lower decoder latency

* don't use queue for VisionIpcBufExtra, disable realtime again

* assert all written

* hmm simpler

* only push to to_write if we are writing

* assert timestamp is right

* use at and remove assert

* revert to queue

Co-authored-by: Comma Device <device@comma.ai>
old-commit-hash: 0baa4c3e2a
taco
George Hotz 3 years ago committed by GitHub
parent 576ae03e43
commit 7119a98414
  1. 1
      SConstruct
  2. 2
      cereal
  3. 9
      release/files_common
  4. 1
      selfdrive/loggerd/.gitignore
  5. 10
      selfdrive/loggerd/SConscript
  6. 4
      selfdrive/loggerd/encoder.h
  7. 145
      selfdrive/loggerd/encoderd.cc
  8. 15
      selfdrive/loggerd/loggerd.cc
  9. 12
      selfdrive/loggerd/loggerd.h
  10. 547
      selfdrive/loggerd/omx_encoder.cc
  11. 79
      selfdrive/loggerd/omx_encoder.h
  12. 2
      selfdrive/loggerd/raw_logger.cc
  13. 3
      selfdrive/loggerd/raw_logger.h
  14. 84
      selfdrive/loggerd/remote_encoder.cc
  15. 12
      selfdrive/loggerd/remote_encoder.h
  16. 3
      selfdrive/loggerd/tests/test_encoder.py
  17. 401
      selfdrive/loggerd/v4l_encoder.cc
  18. 48
      selfdrive/loggerd/v4l_encoder.h
  19. 1
      selfdrive/loggerd/video_writer.cc
  20. 2
      selfdrive/loggerd/video_writer.h
  21. 0
      third_party/linux/include/msm_media_info.h
  22. 1696
      third_party/linux/include/v4l2-controls.h
  23. 3
      third_party/openmax/include/OMX_Audio.h
  24. 3
      third_party/openmax/include/OMX_Component.h
  25. 3
      third_party/openmax/include/OMX_ContentPipe.h
  26. 3
      third_party/openmax/include/OMX_Core.h
  27. 3
      third_party/openmax/include/OMX_CoreExt.h
  28. 3
      third_party/openmax/include/OMX_IVCommon.h
  29. 3
      third_party/openmax/include/OMX_Image.h
  30. 3
      third_party/openmax/include/OMX_Index.h
  31. 3
      third_party/openmax/include/OMX_IndexExt.h
  32. 3
      third_party/openmax/include/OMX_Other.h
  33. 3
      third_party/openmax/include/OMX_QCOMExtns.h
  34. 3
      third_party/openmax/include/OMX_Skype_VideoExtensions.h
  35. 3
      third_party/openmax/include/OMX_Types.h
  36. 3
      third_party/openmax/include/OMX_Video.h
  37. 3
      third_party/openmax/include/OMX_VideoExt.h
  38. 26
      tools/camerastream/compressed_vipc.py

@ -184,7 +184,6 @@ env = Environment(
"#third_party/acados/include/hpipm/include", "#third_party/acados/include/hpipm/include",
"#third_party/catch2/include", "#third_party/catch2/include",
"#third_party/libyuv/include", "#third_party/libyuv/include",
"#third_party/openmax/include",
"#third_party/json11", "#third_party/json11",
"#third_party/curl/include", "#third_party/curl/include",
"#third_party/libgralloc/include", "#third_party/libgralloc/include",

@ -1 +1 @@
Subproject commit a057aed16747d0e414145d83d4861c50315781ad Subproject commit aea23111ee0a08efb549d1d318405b9af8f456d9

@ -299,19 +299,21 @@ selfdrive/proclogd/proclog.h
selfdrive/loggerd/SConscript selfdrive/loggerd/SConscript
selfdrive/loggerd/encoder.h selfdrive/loggerd/encoder.h
selfdrive/loggerd/omx_encoder.cc selfdrive/loggerd/v4l_encoder.cc
selfdrive/loggerd/omx_encoder.h selfdrive/loggerd/v4l_encoder.h
selfdrive/loggerd/video_writer.cc selfdrive/loggerd/video_writer.cc
selfdrive/loggerd/video_writer.h selfdrive/loggerd/video_writer.h
selfdrive/loggerd/remote_encoder.cc
selfdrive/loggerd/remote_encoder.h
selfdrive/loggerd/logger.cc selfdrive/loggerd/logger.cc
selfdrive/loggerd/logger.h selfdrive/loggerd/logger.h
selfdrive/loggerd/loggerd.cc selfdrive/loggerd/loggerd.cc
selfdrive/loggerd/loggerd.h selfdrive/loggerd/loggerd.h
selfdrive/loggerd/encoderd.cc
selfdrive/loggerd/main.cc selfdrive/loggerd/main.cc
selfdrive/loggerd/bootlog.cc selfdrive/loggerd/bootlog.cc
selfdrive/loggerd/raw_logger.cc selfdrive/loggerd/raw_logger.cc
selfdrive/loggerd/raw_logger.h selfdrive/loggerd/raw_logger.h
selfdrive/loggerd/include/msm_media_info.h
selfdrive/loggerd/__init__.py selfdrive/loggerd/__init__.py
selfdrive/loggerd/config.py selfdrive/loggerd/config.py
@ -444,7 +446,6 @@ third_party/SConscript
third_party/linux/** third_party/linux/**
third_party/opencl/** third_party/opencl/**
third_party/openmax/**
third_party/json11/json11.cpp third_party/json11/json11.cpp
third_party/json11/json11.hpp third_party/json11/json11.hpp

@ -1,2 +1,3 @@
loggerd loggerd
encoderd
tests/test_logger tests/test_logger

@ -1,15 +1,13 @@
Import('env', 'arch', 'cereal', 'messaging', 'common', 'visionipc', 'gpucommon') Import('env', 'arch', 'cereal', 'messaging', 'common', 'visionipc')
libs = [common, cereal, messaging, visionipc, libs = [common, cereal, messaging, visionipc,
'zmq', 'capnp', 'kj', 'z', 'zmq', 'capnp', 'kj', 'z',
'avformat', 'avcodec', 'swscale', 'avutil', 'avformat', 'avcodec', 'swscale', 'avutil',
'yuv', 'bz2', 'OpenCL', 'pthread'] 'yuv', 'bz2', 'OpenCL', 'pthread']
src = ['logger.cc', 'loggerd.cc', 'video_writer.cc'] src = ['logger.cc', 'loggerd.cc', 'video_writer.cc', 'remote_encoder.cc']
if arch == "larch64": if arch == "larch64":
src += ['omx_encoder.cc'] src += ['v4l_encoder.cc']
libs += ['OmxCore', 'gsl', 'CB'] + gpucommon
else: else:
src += ['raw_logger.cc'] src += ['raw_logger.cc']
@ -22,6 +20,8 @@ logger_lib = env.Library('logger', src)
libs.insert(0, logger_lib) libs.insert(0, logger_lib)
env.Program('loggerd', ['main.cc'], LIBS=libs) env.Program('loggerd', ['main.cc'], LIBS=libs)
if arch == "larch64":
env.Program('encoderd', ['encoderd.cc'], LIBS=libs)
env.Program('bootlog.cc', LIBS=libs) env.Program('bootlog.cc', LIBS=libs)
if GetOption('test'): if GetOption('test'):

@ -1,13 +1,13 @@
#pragma once #pragma once
#include <cstdint> #include <cstdint>
#include "selfdrive/loggerd/loggerd.h" #include "cereal/visionipc/visionipc.h"
class VideoEncoder { class VideoEncoder {
public: public:
virtual ~VideoEncoder() {} virtual ~VideoEncoder() {}
virtual int encode_frame(const uint8_t *y_ptr, const uint8_t *u_ptr, const uint8_t *v_ptr, virtual int encode_frame(const uint8_t *y_ptr, const uint8_t *u_ptr, const uint8_t *v_ptr,
int in_width, int in_height, uint64_t ts) = 0; int in_width, int in_height, VisionIpcBufExtra *extra) = 0;
virtual void encoder_open(const char* path) = 0; virtual void encoder_open(const char* path) = 0;
virtual void encoder_close() = 0; virtual void encoder_close() = 0;
}; };

@ -0,0 +1,145 @@
#include "selfdrive/loggerd/loggerd.h"
ExitHandler do_exit;
struct EncoderdState {
int max_waiting = 0;
// Sync logic for startup
std::atomic<int> encoders_ready = 0;
std::atomic<uint32_t> start_frame_id = 0;
bool camera_ready[WideRoadCam + 1] = {};
bool camera_synced[WideRoadCam + 1] = {};
};
// Handle initial encoder syncing by waiting for all encoders to reach the same frame id
bool sync_encoders(EncoderdState *s, CameraType cam_type, uint32_t frame_id) {
if (s->camera_synced[cam_type]) return true;
if (s->max_waiting > 1 && s->encoders_ready != s->max_waiting) {
// add a small margin to the start frame id in case one of the encoders already dropped the next frame
update_max_atomic(s->start_frame_id, frame_id + 2);
if (std::exchange(s->camera_ready[cam_type], true) == false) {
++s->encoders_ready;
LOGD("camera %d encoder ready", cam_type);
}
return false;
} else {
if (s->max_waiting == 1) update_max_atomic(s->start_frame_id, frame_id);
bool synced = frame_id >= s->start_frame_id;
s->camera_synced[cam_type] = synced;
if (!synced) LOGD("camera %d waiting for frame %d, cur %d", cam_type, (int)s->start_frame_id, frame_id);
return synced;
}
}
void encoder_thread(EncoderdState *s, const LogCameraInfo &cam_info) {
util::set_thread_name(cam_info.filename);
std::vector<Encoder *> encoders;
VisionIpcClient vipc_client = VisionIpcClient("camerad", cam_info.stream_type, false);
int cur_seg = 0;
while (!do_exit) {
if (!vipc_client.connect(false)) {
util::sleep_for(5);
continue;
}
// init encoders
if (encoders.empty()) {
VisionBuf buf_info = vipc_client.buffers[0];
LOGD("encoder init %dx%d", buf_info.width, buf_info.height);
// main encoder
encoders.push_back(new Encoder(cam_info.filename, cam_info.type, buf_info.width, buf_info.height,
cam_info.fps, cam_info.bitrate, cam_info.is_h265,
buf_info.width, buf_info.height, false));
// qcamera encoder
if (cam_info.has_qcamera) {
encoders.push_back(new Encoder(qcam_info.filename, cam_info.type, buf_info.width, buf_info.height,
qcam_info.fps, qcam_info.bitrate, qcam_info.is_h265,
qcam_info.frame_width, qcam_info.frame_height, false));
}
}
for (int i = 0; i < encoders.size(); ++i) {
encoders[i]->encoder_open(NULL);
}
bool lagging = false;
while (!do_exit) {
VisionIpcBufExtra extra;
VisionBuf* buf = vipc_client.recv(&extra);
if (buf == nullptr) continue;
// detect loop around and drop the frames
if (buf->get_frame_id() != extra.frame_id) {
if (!lagging) {
LOGE("encoder %s lag buffer id: %d extra id: %d", cam_info.filename, buf->get_frame_id(), extra.frame_id);
lagging = true;
}
continue;
}
lagging = false;
if (cam_info.trigger_rotate) {
if (!sync_encoders(s, cam_info.type, extra.frame_id)) {
continue;
}
if (do_exit) break;
}
// encode a frame
for (int i = 0; i < encoders.size(); ++i) {
int out_id = encoders[i]->encode_frame(buf->y, buf->u, buf->v,
buf->width, buf->height, &extra);
if (out_id == -1) {
LOGE("Failed to encode frame. frame_id: %d", extra.frame_id);
}
}
const int frames_per_seg = SEGMENT_LENGTH * MAIN_FPS;
if (cur_seg >= 0 && extra.frame_id >= ((cur_seg + 1) * frames_per_seg) + s->start_frame_id) {
for (auto &e : encoders) {
e->encoder_close();
e->encoder_open(NULL);
}
++cur_seg;
}
}
}
LOG("encoder destroy");
for(auto &e : encoders) {
e->encoder_close();
delete e;
}
}
void encoderd_thread() {
EncoderdState s;
std::vector<std::thread> encoder_threads;
for (const auto &cam : cameras_logged) {
if (cam.enable) {
encoder_threads.push_back(std::thread(encoder_thread, &s, cam));
if (cam.trigger_rotate) s.max_waiting++;
}
}
for (auto &t : encoder_threads) t.join();
}
int main() {
if (Hardware::TICI()) {
int ret;
ret = util::set_realtime_priority(52);
assert(ret == 0);
ret = util::set_core_affinity({7});
assert(ret == 0);
}
encoderd_thread();
return 0;
}

@ -1,4 +1,6 @@
#include "selfdrive/loggerd/loggerd.h" #include "selfdrive/loggerd/loggerd.h"
#include "selfdrive/loggerd/remote_encoder.h"
bool USE_REMOTE_ENCODER = false;
ExitHandler do_exit; ExitHandler do_exit;
@ -107,7 +109,7 @@ void encoder_thread(LoggerdState *s, const LogCameraInfo &cam_info) {
// encode a frame // encode a frame
for (int i = 0; i < encoders.size(); ++i) { for (int i = 0; i < encoders.size(); ++i) {
int out_id = encoders[i]->encode_frame(buf->y, buf->u, buf->v, int out_id = encoders[i]->encode_frame(buf->y, buf->u, buf->v,
buf->width, buf->height, extra.timestamp_eof); buf->width, buf->height, &extra);
if (out_id == -1) { if (out_id == -1) {
LOGE("Failed to encode frame. frame_id: %d encode_id: %d", extra.frame_id, encode_idx); LOGE("Failed to encode frame. frame_id: %d encode_id: %d", extra.frame_id, encode_idx);
@ -188,15 +190,18 @@ void loggerd_thread() {
typedef struct QlogState { typedef struct QlogState {
std::string name; std::string name;
int counter, freq; int counter, freq;
bool encoder;
} QlogState; } QlogState;
std::unordered_map<SubSocket*, QlogState> qlog_states; std::unordered_map<SubSocket*, QlogState> qlog_states;
std::unordered_map<SubSocket*, struct RemoteEncoder> remote_encoders;
std::unique_ptr<Context> ctx(Context::create()); std::unique_ptr<Context> ctx(Context::create());
std::unique_ptr<Poller> poller(Poller::create()); std::unique_ptr<Poller> poller(Poller::create());
// subscribe to all socks // subscribe to all socks
for (const auto& it : services) { for (const auto& it : services) {
if (!it.should_log) continue; const bool encoder = USE_REMOTE_ENCODER & (strcmp(it.name+strlen(it.name)-strlen("EncodeData"), "EncodeData") == 0);
if (!it.should_log && !encoder) continue;
LOGD("logging %s (on port %d)", it.name, it.port); LOGD("logging %s (on port %d)", it.name, it.port);
SubSocket * sock = SubSocket::create(ctx.get(), it.name); SubSocket * sock = SubSocket::create(ctx.get(), it.name);
@ -206,6 +211,7 @@ void loggerd_thread() {
.name = it.name, .name = it.name,
.counter = 0, .counter = 0,
.freq = it.decimation, .freq = it.decimation,
.encoder = encoder,
}; };
} }
@ -238,9 +244,14 @@ void loggerd_thread() {
Message *msg = nullptr; Message *msg = nullptr;
while (!do_exit && (msg = sock->receive(true))) { while (!do_exit && (msg = sock->receive(true))) {
const bool in_qlog = qs.freq != -1 && (qs.counter++ % qs.freq == 0); const bool in_qlog = qs.freq != -1 && (qs.counter++ % qs.freq == 0);
if (qs.encoder) {
bytes_count += handle_encoder_msg(&s, msg, qs.name, remote_encoders[sock]);
} else {
logger_log(&s.logger, (uint8_t *)msg->getData(), msg->getSize(), in_qlog); logger_log(&s.logger, (uint8_t *)msg->getData(), msg->getSize(), in_qlog);
bytes_count += msg->getSize(); bytes_count += msg->getSize();
delete msg; delete msg;
}
rotate_if_needed(&s); rotate_if_needed(&s);

@ -25,8 +25,8 @@
#include "selfdrive/loggerd/encoder.h" #include "selfdrive/loggerd/encoder.h"
#include "selfdrive/loggerd/logger.h" #include "selfdrive/loggerd/logger.h"
#ifdef QCOM2 #ifdef QCOM2
#include "selfdrive/loggerd/omx_encoder.h" #include "selfdrive/loggerd/v4l_encoder.h"
#define Encoder OmxEncoder #define Encoder V4LEncoder
#else #else
#include "selfdrive/loggerd/raw_logger.h" #include "selfdrive/loggerd/raw_logger.h"
#define Encoder RawLogger #define Encoder RawLogger
@ -67,6 +67,8 @@ const LogCameraInfo cameras_logged[] = {
.trigger_rotate = true, .trigger_rotate = true,
.enable = true, .enable = true,
.record = true, .record = true,
.frame_width = 1928,
.frame_height = 1208,
}, },
{ {
.type = DriverCam, .type = DriverCam,
@ -79,6 +81,8 @@ const LogCameraInfo cameras_logged[] = {
.trigger_rotate = true, .trigger_rotate = true,
.enable = true, .enable = true,
.record = Params().getBool("RecordFront"), .record = Params().getBool("RecordFront"),
.frame_width = 1928,
.frame_height = 1208,
}, },
{ {
.type = WideRoadCam, .type = WideRoadCam,
@ -91,6 +95,8 @@ const LogCameraInfo cameras_logged[] = {
.trigger_rotate = true, .trigger_rotate = true,
.enable = Hardware::TICI(), .enable = Hardware::TICI(),
.record = Hardware::TICI(), .record = Hardware::TICI(),
.frame_width = 1928,
.frame_height = 1208,
}, },
}; };
const LogCameraInfo qcam_info = { const LogCameraInfo qcam_info = {
@ -98,6 +104,8 @@ const LogCameraInfo qcam_info = {
.fps = MAIN_FPS, .fps = MAIN_FPS,
.bitrate = 256000, .bitrate = 256000,
.is_h265 = false, .is_h265 = false,
.enable = true,
.record = true,
.frame_width = Hardware::TICI() ? 526 : 480, .frame_width = Hardware::TICI() ? 526 : 480,
.frame_height = Hardware::TICI() ? 330 : 360 // keep pixel count the same? .frame_height = Hardware::TICI() ? 330 : 360 // keep pixel count the same?
}; };

@ -1,547 +0,0 @@
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
#include "selfdrive/loggerd/omx_encoder.h"
#include "cereal/messaging/messaging.h"
#include <fcntl.h>
#include <sys/stat.h>
#include <unistd.h>
#include <cassert>
#include <cstdlib>
#include <cstdio>
#include <OMX_Component.h>
#include <OMX_IndexExt.h>
#include <OMX_QCOMExtns.h>
#include <OMX_VideoExt.h>
#include "libyuv.h"
#include "selfdrive/common/swaglog.h"
#include "selfdrive/common/util.h"
#include "selfdrive/loggerd/include/msm_media_info.h"
// Check the OMX error code and assert if an error occurred.
#define OMX_CHECK(_expr) \
do { \
assert(OMX_ErrorNone == (_expr)); \
} while (0)
extern ExitHandler do_exit;
// ***** OMX callback functions *****
void OmxEncoder::wait_for_state(OMX_STATETYPE state_) {
std::unique_lock lk(this->state_lock);
while (this->state != state_) {
this->state_cv.wait(lk);
}
}
static OMX_CALLBACKTYPE omx_callbacks = {
.EventHandler = OmxEncoder::event_handler,
.EmptyBufferDone = OmxEncoder::empty_buffer_done,
.FillBufferDone = OmxEncoder::fill_buffer_done,
};
OMX_ERRORTYPE OmxEncoder::event_handler(OMX_HANDLETYPE component, OMX_PTR app_data, OMX_EVENTTYPE event,
OMX_U32 data1, OMX_U32 data2, OMX_PTR event_data) {
OmxEncoder *e = (OmxEncoder*)app_data;
if (event == OMX_EventCmdComplete) {
assert(data1 == OMX_CommandStateSet);
LOG("set state event 0x%x", data2);
{
std::unique_lock lk(e->state_lock);
e->state = (OMX_STATETYPE)data2;
}
e->state_cv.notify_all();
} else if (event == OMX_EventError) {
LOGE("OMX error 0x%08x", data1);
} else {
LOGE("OMX unhandled event %d", event);
assert(false);
}
return OMX_ErrorNone;
}
OMX_ERRORTYPE OmxEncoder::empty_buffer_done(OMX_HANDLETYPE component, OMX_PTR app_data,
OMX_BUFFERHEADERTYPE *buffer) {
OmxEncoder *e = (OmxEncoder*)app_data;
e->free_in.push(buffer);
return OMX_ErrorNone;
}
OMX_ERRORTYPE OmxEncoder::fill_buffer_done(OMX_HANDLETYPE component, OMX_PTR app_data,
OMX_BUFFERHEADERTYPE *buffer) {
OmxEncoder *e = (OmxEncoder*)app_data;
e->done_out.push(buffer);
return OMX_ErrorNone;
}
#define PORT_INDEX_IN 0
#define PORT_INDEX_OUT 1
static const char* omx_color_fomat_name(uint32_t format) __attribute__((unused));
static const char* omx_color_fomat_name(uint32_t format) {
switch (format) {
case OMX_COLOR_FormatUnused: return "OMX_COLOR_FormatUnused";
case OMX_COLOR_FormatMonochrome: return "OMX_COLOR_FormatMonochrome";
case OMX_COLOR_Format8bitRGB332: return "OMX_COLOR_Format8bitRGB332";
case OMX_COLOR_Format12bitRGB444: return "OMX_COLOR_Format12bitRGB444";
case OMX_COLOR_Format16bitARGB4444: return "OMX_COLOR_Format16bitARGB4444";
case OMX_COLOR_Format16bitARGB1555: return "OMX_COLOR_Format16bitARGB1555";
case OMX_COLOR_Format16bitRGB565: return "OMX_COLOR_Format16bitRGB565";
case OMX_COLOR_Format16bitBGR565: return "OMX_COLOR_Format16bitBGR565";
case OMX_COLOR_Format18bitRGB666: return "OMX_COLOR_Format18bitRGB666";
case OMX_COLOR_Format18bitARGB1665: return "OMX_COLOR_Format18bitARGB1665";
case OMX_COLOR_Format19bitARGB1666: return "OMX_COLOR_Format19bitARGB1666";
case OMX_COLOR_Format24bitRGB888: return "OMX_COLOR_Format24bitRGB888";
case OMX_COLOR_Format24bitBGR888: return "OMX_COLOR_Format24bitBGR888";
case OMX_COLOR_Format24bitARGB1887: return "OMX_COLOR_Format24bitARGB1887";
case OMX_COLOR_Format25bitARGB1888: return "OMX_COLOR_Format25bitARGB1888";
case OMX_COLOR_Format32bitBGRA8888: return "OMX_COLOR_Format32bitBGRA8888";
case OMX_COLOR_Format32bitARGB8888: return "OMX_COLOR_Format32bitARGB8888";
case OMX_COLOR_FormatYUV411Planar: return "OMX_COLOR_FormatYUV411Planar";
case OMX_COLOR_FormatYUV411PackedPlanar: return "OMX_COLOR_FormatYUV411PackedPlanar";
case OMX_COLOR_FormatYUV420Planar: return "OMX_COLOR_FormatYUV420Planar";
case OMX_COLOR_FormatYUV420PackedPlanar: return "OMX_COLOR_FormatYUV420PackedPlanar";
case OMX_COLOR_FormatYUV420SemiPlanar: return "OMX_COLOR_FormatYUV420SemiPlanar";
case OMX_COLOR_FormatYUV422Planar: return "OMX_COLOR_FormatYUV422Planar";
case OMX_COLOR_FormatYUV422PackedPlanar: return "OMX_COLOR_FormatYUV422PackedPlanar";
case OMX_COLOR_FormatYUV422SemiPlanar: return "OMX_COLOR_FormatYUV422SemiPlanar";
case OMX_COLOR_FormatYCbYCr: return "OMX_COLOR_FormatYCbYCr";
case OMX_COLOR_FormatYCrYCb: return "OMX_COLOR_FormatYCrYCb";
case OMX_COLOR_FormatCbYCrY: return "OMX_COLOR_FormatCbYCrY";
case OMX_COLOR_FormatCrYCbY: return "OMX_COLOR_FormatCrYCbY";
case OMX_COLOR_FormatYUV444Interleaved: return "OMX_COLOR_FormatYUV444Interleaved";
case OMX_COLOR_FormatRawBayer8bit: return "OMX_COLOR_FormatRawBayer8bit";
case OMX_COLOR_FormatRawBayer10bit: return "OMX_COLOR_FormatRawBayer10bit";
case OMX_COLOR_FormatRawBayer8bitcompressed: return "OMX_COLOR_FormatRawBayer8bitcompressed";
case OMX_COLOR_FormatL2: return "OMX_COLOR_FormatL2";
case OMX_COLOR_FormatL4: return "OMX_COLOR_FormatL4";
case OMX_COLOR_FormatL8: return "OMX_COLOR_FormatL8";
case OMX_COLOR_FormatL16: return "OMX_COLOR_FormatL16";
case OMX_COLOR_FormatL24: return "OMX_COLOR_FormatL24";
case OMX_COLOR_FormatL32: return "OMX_COLOR_FormatL32";
case OMX_COLOR_FormatYUV420PackedSemiPlanar: return "OMX_COLOR_FormatYUV420PackedSemiPlanar";
case OMX_COLOR_FormatYUV422PackedSemiPlanar: return "OMX_COLOR_FormatYUV422PackedSemiPlanar";
case OMX_COLOR_Format18BitBGR666: return "OMX_COLOR_Format18BitBGR666";
case OMX_COLOR_Format24BitARGB6666: return "OMX_COLOR_Format24BitARGB6666";
case OMX_COLOR_Format24BitABGR6666: return "OMX_COLOR_Format24BitABGR6666";
case OMX_COLOR_FormatAndroidOpaque: return "OMX_COLOR_FormatAndroidOpaque";
case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar: return "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar";
case OMX_QCOM_COLOR_FormatYVU420SemiPlanar: return "OMX_QCOM_COLOR_FormatYVU420SemiPlanar";
case OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka: return "OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka";
case OMX_SEC_COLOR_FormatNV12Tiled: return "OMX_SEC_COLOR_FormatNV12Tiled";
case OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar32m: return "OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar32m";
// case QOMX_COLOR_FormatYVU420SemiPlanar: return "QOMX_COLOR_FormatYVU420SemiPlanar";
case QOMX_COLOR_FormatYVU420PackedSemiPlanar32m4ka: return "QOMX_COLOR_FormatYVU420PackedSemiPlanar32m4ka";
case QOMX_COLOR_FormatYUV420PackedSemiPlanar16m2ka: return "QOMX_COLOR_FormatYUV420PackedSemiPlanar16m2ka";
// case QOMX_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka: return "QOMX_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka";
// case QOMX_COLOR_FORMATYUV420PackedSemiPlanar32m: return "QOMX_COLOR_FORMATYUV420PackedSemiPlanar32m";
case QOMX_COLOR_FORMATYUV420PackedSemiPlanar32mMultiView: return "QOMX_COLOR_FORMATYUV420PackedSemiPlanar32mMultiView";
case QOMX_COLOR_FORMATYUV420PackedSemiPlanar32mCompressed: return "QOMX_COLOR_FORMATYUV420PackedSemiPlanar32mCompressed";
case QOMX_COLOR_Format32bitRGBA8888: return "QOMX_COLOR_Format32bitRGBA8888";
case QOMX_COLOR_Format32bitRGBA8888Compressed: return "QOMX_COLOR_Format32bitRGBA8888Compressed";
default:
return "unkn";
}
}
// ***** encoder functions *****
OmxEncoder::OmxEncoder(const char* filename, CameraType type, int in_width, int in_height, int fps, int bitrate, bool h265, int out_width, int out_height, bool write)
: in_width_(in_width), in_height_(in_height), width(out_width), height(out_height) {
this->filename = filename;
this->type = type;
this->write = write;
this->fps = fps;
this->remuxing = !h265;
this->downscale = in_width != out_width || in_height != out_height;
if (this->downscale) {
this->y_ptr2 = (uint8_t *)malloc(this->width*this->height);
this->u_ptr2 = (uint8_t *)malloc(this->width*this->height/4);
this->v_ptr2 = (uint8_t *)malloc(this->width*this->height/4);
}
auto component = (OMX_STRING)(h265 ? "OMX.qcom.video.encoder.hevc" : "OMX.qcom.video.encoder.avc");
int err = OMX_GetHandle(&this->handle, component, this, &omx_callbacks);
if (err != OMX_ErrorNone) {
LOGE("error getting codec: %x", err);
}
assert(err == OMX_ErrorNone);
// printf("handle: %p\n", this->handle);
// setup input port
OMX_PARAM_PORTDEFINITIONTYPE in_port = {0};
in_port.nSize = sizeof(in_port);
in_port.nPortIndex = (OMX_U32) PORT_INDEX_IN;
OMX_CHECK(OMX_GetParameter(this->handle, OMX_IndexParamPortDefinition, (OMX_PTR) &in_port));
in_port.format.video.nFrameWidth = this->width;
in_port.format.video.nFrameHeight = this->height;
in_port.format.video.nStride = VENUS_Y_STRIDE(COLOR_FMT_NV12, this->width);
in_port.format.video.nSliceHeight = this->height;
// in_port.nBufferSize = (this->width * this->height * 3) / 2;
in_port.nBufferSize = VENUS_BUFFER_SIZE(COLOR_FMT_NV12, this->width, this->height);
in_port.format.video.xFramerate = (this->fps * 65536);
in_port.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
// in_port.format.video.eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
in_port.format.video.eColorFormat = (OMX_COLOR_FORMATTYPE)QOMX_COLOR_FORMATYUV420PackedSemiPlanar32m;
OMX_CHECK(OMX_SetParameter(this->handle, OMX_IndexParamPortDefinition, (OMX_PTR) &in_port));
OMX_CHECK(OMX_GetParameter(this->handle, OMX_IndexParamPortDefinition, (OMX_PTR) &in_port));
this->in_buf_headers.resize(in_port.nBufferCountActual);
// setup output port
OMX_PARAM_PORTDEFINITIONTYPE out_port = {0};
out_port.nSize = sizeof(out_port);
out_port.nPortIndex = (OMX_U32) PORT_INDEX_OUT;
OMX_CHECK(OMX_GetParameter(this->handle, OMX_IndexParamPortDefinition, (OMX_PTR)&out_port));
out_port.format.video.nFrameWidth = this->width;
out_port.format.video.nFrameHeight = this->height;
out_port.format.video.xFramerate = 0;
out_port.format.video.nBitrate = bitrate;
if (h265) {
out_port.format.video.eCompressionFormat = OMX_VIDEO_CodingHEVC;
} else {
out_port.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
}
out_port.format.video.eColorFormat = OMX_COLOR_FormatUnused;
OMX_CHECK(OMX_SetParameter(this->handle, OMX_IndexParamPortDefinition, (OMX_PTR) &out_port));
OMX_CHECK(OMX_GetParameter(this->handle, OMX_IndexParamPortDefinition, (OMX_PTR) &out_port));
this->out_buf_headers.resize(out_port.nBufferCountActual);
OMX_VIDEO_PARAM_BITRATETYPE bitrate_type = {0};
bitrate_type.nSize = sizeof(bitrate_type);
bitrate_type.nPortIndex = (OMX_U32) PORT_INDEX_OUT;
OMX_CHECK(OMX_GetParameter(this->handle, OMX_IndexParamVideoBitrate, (OMX_PTR) &bitrate_type));
bitrate_type.eControlRate = OMX_Video_ControlRateVariable;
bitrate_type.nTargetBitrate = bitrate;
OMX_CHECK(OMX_SetParameter(this->handle, OMX_IndexParamVideoBitrate, (OMX_PTR) &bitrate_type));
if (h265) {
// setup HEVC
OMX_VIDEO_PARAM_PROFILELEVELTYPE hevc_type = {0};
OMX_INDEXTYPE index_type = OMX_IndexParamVideoProfileLevelCurrent;
hevc_type.nSize = sizeof(hevc_type);
hevc_type.nPortIndex = (OMX_U32) PORT_INDEX_OUT;
OMX_CHECK(OMX_GetParameter(this->handle, index_type, (OMX_PTR) &hevc_type));
hevc_type.eProfile = OMX_VIDEO_HEVCProfileMain;
hevc_type.eLevel = OMX_VIDEO_HEVCHighTierLevel5;
OMX_CHECK(OMX_SetParameter(this->handle, index_type, (OMX_PTR) &hevc_type));
} else {
// setup h264
OMX_VIDEO_PARAM_AVCTYPE avc = { 0 };
avc.nSize = sizeof(avc);
avc.nPortIndex = (OMX_U32) PORT_INDEX_OUT;
OMX_CHECK(OMX_GetParameter(this->handle, OMX_IndexParamVideoAvc, &avc));
avc.nBFrames = 0;
avc.nPFrames = 15;
avc.eProfile = OMX_VIDEO_AVCProfileHigh;
avc.eLevel = OMX_VIDEO_AVCLevel31;
avc.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB;
avc.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
avc.nRefFrames = 1;
avc.bUseHadamard = OMX_TRUE;
avc.bEntropyCodingCABAC = OMX_TRUE;
avc.bWeightedPPrediction = OMX_TRUE;
avc.bconstIpred = OMX_TRUE;
OMX_CHECK(OMX_SetParameter(this->handle, OMX_IndexParamVideoAvc, &avc));
}
// for (int i = 0; ; i++) {
// OMX_VIDEO_PARAM_PORTFORMATTYPE video_port_format = {0};
// video_port_format.nSize = sizeof(video_port_format);
// video_port_format.nIndex = i;
// video_port_format.nPortIndex = PORT_INDEX_IN;
// if (OMX_GetParameter(this->handle, OMX_IndexParamVideoPortFormat, &video_port_format) != OMX_ErrorNone)
// break;
// printf("in %d: compression 0x%x format 0x%x %s\n", i,
// video_port_format.eCompressionFormat, video_port_format.eColorFormat,
// omx_color_fomat_name(video_port_format.eColorFormat));
// }
// for (int i=0; i<32; i++) {
// OMX_VIDEO_PARAM_PROFILELEVELTYPE params = {0};
// params.nSize = sizeof(params);
// params.nPortIndex = PORT_INDEX_OUT;
// params.nProfileIndex = i;
// if (OMX_GetParameter(this->handle, OMX_IndexParamVideoProfileLevelQuerySupported, &params) != OMX_ErrorNone)
// break;
// printf("profile %d level 0x%x\n", params.eProfile, params.eLevel);
// }
OMX_CHECK(OMX_SendCommand(this->handle, OMX_CommandStateSet, OMX_StateIdle, NULL));
for (auto &buf : this->in_buf_headers) {
OMX_CHECK(OMX_AllocateBuffer(this->handle, &buf, PORT_INDEX_IN, this,
in_port.nBufferSize));
}
for (auto &buf : this->out_buf_headers) {
OMX_CHECK(OMX_AllocateBuffer(this->handle, &buf, PORT_INDEX_OUT, this,
out_port.nBufferSize));
}
wait_for_state(OMX_StateIdle);
OMX_CHECK(OMX_SendCommand(this->handle, OMX_CommandStateSet, OMX_StateExecuting, NULL));
wait_for_state(OMX_StateExecuting);
// give omx all the output buffers
for (auto &buf : this->out_buf_headers) {
// printf("fill %p\n", this->out_buf_headers[i]);
OMX_CHECK(OMX_FillThisBuffer(this->handle, buf));
}
// fill the input free queue
for (auto &buf : this->in_buf_headers) {
this->free_in.push(buf);
}
LOGE("omx initialized - in: %d - out %d", this->in_buf_headers.size(), this->out_buf_headers.size());
service_name = this->type == DriverCam ? "driverEncodeData" :
(this->type == WideRoadCam ? "wideRoadEncodeData" :
(this->remuxing ? "qRoadEncodeData" : "roadEncodeData"));
pm.reset(new PubMaster({service_name}));
}
void OmxEncoder::callback_handler(OmxEncoder *e) {
// OMX documentation specifies to not empty the buffer from the callback function
// so we use this intermediate handler to copy the buffer for further writing
// and give it back to OMX. We could also send the data over msgq from here.
bool exit = false;
while (!exit) {
OMX_BUFFERHEADERTYPE *buffer = e->done_out.pop();
OmxBuffer *new_buffer = (OmxBuffer*)malloc(sizeof(OmxBuffer) + buffer->nFilledLen);
assert(new_buffer);
new_buffer->header = *buffer;
memcpy(new_buffer->data, buffer->pBuffer + buffer->nOffset, buffer->nFilledLen);
e->to_write.push(new_buffer);
#ifdef QCOM2
if (buffer->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
buffer->nTimeStamp = 0;
}
if (buffer->nFlags & OMX_BUFFERFLAG_EOS) {
buffer->nTimeStamp = 0;
}
#endif
if (buffer->nFlags & OMX_BUFFERFLAG_EOS) {
exit = true;
}
// give omx back the buffer
// TOOD: fails when shutting down
OMX_CHECK(OMX_FillThisBuffer(e->handle, buffer));
}
}
void OmxEncoder::write_and_broadcast_handler(OmxEncoder *e){
bool exit = false;
e->segment_num++;
uint32_t idx = 0;
while (!exit) {
OmxBuffer *out_buf = e->to_write.pop();
MessageBuilder msg;
auto edata = (e->type == DriverCam) ? msg.initEvent(true).initDriverEncodeData() :
((e->type == WideRoadCam) ? msg.initEvent(true).initWideRoadEncodeData() :
(e->remuxing ? msg.initEvent(true).initQRoadEncodeData() : msg.initEvent(true).initRoadEncodeData()));
edata.setData(kj::heapArray<capnp::byte>(out_buf->data, out_buf->header.nFilledLen));
edata.setTimestampEof(out_buf->header.nTimeStamp);
edata.setIdx(idx++);
edata.setSegmentNum(e->segment_num);
edata.setFlags(out_buf->header.nFlags);
e->pm->send(e->service_name, msg);
OmxEncoder::handle_out_buf(e, out_buf);
if (out_buf->header.nFlags & OMX_BUFFERFLAG_EOS) {
exit = true;
}
free(out_buf);
}
}
void OmxEncoder::handle_out_buf(OmxEncoder *e, OmxBuffer *out_buf) {
if (!(out_buf->header.nFlags & OMX_BUFFERFLAG_EOS) && e->writer) {
e->writer->write(out_buf->data,
out_buf->header.nFilledLen,
out_buf->header.nTimeStamp,
out_buf->header.nFlags & OMX_BUFFERFLAG_CODECCONFIG,
out_buf->header.nFlags & OMX_BUFFERFLAG_SYNCFRAME);
}
}
int OmxEncoder::encode_frame(const uint8_t *y_ptr, const uint8_t *u_ptr, const uint8_t *v_ptr,
int in_width, int in_height, uint64_t ts) {
assert(in_width == this->in_width_);
assert(in_height == this->in_height_);
int err;
if (!this->is_open) {
return -1;
}
// this sometimes freezes... put it outside the encoder lock so we can still trigger rotates...
// THIS IS A REALLY BAD IDEA, but apparently the race has to happen 30 times to trigger this
//pthread_mutex_unlock(&this->lock);
OMX_BUFFERHEADERTYPE* in_buf = nullptr;
while (!this->free_in.try_pop(in_buf, 20)) {
if (do_exit) {
return -1;
}
}
//pthread_mutex_lock(&this->lock);
int ret = this->counter;
uint8_t *in_buf_ptr = in_buf->pBuffer;
// printf("in_buf ptr %p\n", in_buf_ptr);
uint8_t *in_y_ptr = in_buf_ptr;
int in_y_stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, this->width);
int in_uv_stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, this->width);
// uint8_t *in_uv_ptr = in_buf_ptr + (this->width * this->height);
uint8_t *in_uv_ptr = in_buf_ptr + (in_y_stride * VENUS_Y_SCANLINES(COLOR_FMT_NV12, this->height));
if (this->downscale) {
I420Scale(y_ptr, in_width,
u_ptr, in_width/2,
v_ptr, in_width/2,
in_width, in_height,
this->y_ptr2, this->width,
this->u_ptr2, this->width/2,
this->v_ptr2, this->width/2,
this->width, this->height,
libyuv::kFilterNone);
y_ptr = this->y_ptr2;
u_ptr = this->u_ptr2;
v_ptr = this->v_ptr2;
}
err = libyuv::I420ToNV12(y_ptr, this->width,
u_ptr, this->width/2,
v_ptr, this->width/2,
in_y_ptr, in_y_stride,
in_uv_ptr, in_uv_stride,
this->width, this->height);
assert(err == 0);
// in_buf->nFilledLen = (this->width*this->height) + (this->width*this->height/2);
in_buf->nFilledLen = VENUS_BUFFER_SIZE(COLOR_FMT_NV12, this->width, this->height);
in_buf->nFlags = OMX_BUFFERFLAG_ENDOFFRAME;
in_buf->nOffset = 0;
in_buf->nTimeStamp = ts/1000LL; // OMX_TICKS, in microseconds
this->last_t = in_buf->nTimeStamp;
OMX_CHECK(OMX_EmptyThisBuffer(this->handle, in_buf));
this->dirty = true;
this->counter++;
return ret;
}
void OmxEncoder::encoder_open(const char* path) {
if (this->write) {
writer.reset(new VideoWriter(path, this->filename, this->remuxing, this->width, this->height, this->fps, !this->remuxing, false));
}
// start writer threads
callback_handler_thread = std::thread(OmxEncoder::callback_handler, this);
write_handler_thread = std::thread(OmxEncoder::write_and_broadcast_handler, this);
this->is_open = true;
this->counter = 0;
}
void OmxEncoder::encoder_close() {
if (this->is_open) {
if (this->dirty) {
// drain output only if there could be frames in the encoder
OMX_BUFFERHEADERTYPE* in_buf = this->free_in.pop();
in_buf->nFilledLen = 0;
in_buf->nOffset = 0;
in_buf->nFlags = OMX_BUFFERFLAG_EOS;
in_buf->nTimeStamp = this->last_t + 1000000LL/this->fps;
OMX_CHECK(OMX_EmptyThisBuffer(this->handle, in_buf));
this->dirty = false;
}
callback_handler_thread.join();
write_handler_thread.join();
writer.reset();
}
this->is_open = false;
}
OmxEncoder::~OmxEncoder() {
assert(!this->is_open);
OMX_CHECK(OMX_SendCommand(this->handle, OMX_CommandStateSet, OMX_StateIdle, NULL));
wait_for_state(OMX_StateIdle);
OMX_CHECK(OMX_SendCommand(this->handle, OMX_CommandStateSet, OMX_StateLoaded, NULL));
for (auto &buf : this->in_buf_headers) {
OMX_CHECK(OMX_FreeBuffer(this->handle, PORT_INDEX_IN, buf));
}
for (auto &buf : this->out_buf_headers) {
OMX_CHECK(OMX_FreeBuffer(this->handle, PORT_INDEX_OUT, buf));
}
wait_for_state(OMX_StateLoaded);
OMX_CHECK(OMX_FreeHandle(this->handle));
OMX_BUFFERHEADERTYPE *buf;
while (this->free_in.try_pop(buf));
while (this->done_out.try_pop(buf));
OmxBuffer *write_buf;
while (this->to_write.try_pop(write_buf)) {
free(write_buf);
};
if (this->downscale) {
free(this->y_ptr2);
free(this->u_ptr2);
free(this->v_ptr2);
}
}

@ -1,79 +0,0 @@
#pragma once
#include <cstdint>
#include <cstdio>
#include <vector>
#include <thread>
#include <OMX_Component.h>
#include "selfdrive/common/queue.h"
#include "selfdrive/loggerd/encoder.h"
#include "selfdrive/loggerd/video_writer.h"
struct OmxBuffer {
OMX_BUFFERHEADERTYPE header;
OMX_U8 data[];
};
// OmxEncoder, lossey codec using hardware hevc
class OmxEncoder : public VideoEncoder {
public:
OmxEncoder(const char* filename, CameraType type, int width, int height, int fps, int bitrate, bool h265, int out_width, int out_height, bool write = true);
~OmxEncoder();
int encode_frame(const uint8_t *y_ptr, const uint8_t *u_ptr, const uint8_t *v_ptr,
int in_width, int in_height, uint64_t ts);
void encoder_open(const char* path);
void encoder_close();
// OMX callbacks
static OMX_ERRORTYPE event_handler(OMX_HANDLETYPE component, OMX_PTR app_data, OMX_EVENTTYPE event,
OMX_U32 data1, OMX_U32 data2, OMX_PTR event_data);
static OMX_ERRORTYPE empty_buffer_done(OMX_HANDLETYPE component, OMX_PTR app_data,
OMX_BUFFERHEADERTYPE *buffer);
static OMX_ERRORTYPE fill_buffer_done(OMX_HANDLETYPE component, OMX_PTR app_data,
OMX_BUFFERHEADERTYPE *buffer);
private:
void wait_for_state(OMX_STATETYPE state);
static void callback_handler(OmxEncoder *e);
static void write_and_broadcast_handler(OmxEncoder *e);
static void handle_out_buf(OmxEncoder *e, OmxBuffer *out_buf);
int in_width_, in_height_;
int width, height, fps;
bool is_open = false;
bool dirty = false;
bool write = false;
int counter = 0;
std::thread callback_handler_thread;
std::thread write_handler_thread;
int segment_num = -1;
std::unique_ptr<PubMaster> pm;
const char *service_name;
const char* filename;
CameraType type;
std::mutex state_lock;
std::condition_variable state_cv;
OMX_STATETYPE state = OMX_StateLoaded;
OMX_HANDLETYPE handle;
std::vector<OMX_BUFFERHEADERTYPE *> in_buf_headers;
std::vector<OMX_BUFFERHEADERTYPE *> out_buf_headers;
uint64_t last_t;
SafeQueue<OMX_BUFFERHEADERTYPE *> free_in;
SafeQueue<OMX_BUFFERHEADERTYPE *> done_out;
SafeQueue<OmxBuffer *> to_write;
bool remuxing;
std::unique_ptr<VideoWriter> writer;
bool downscale;
uint8_t *y_ptr2, *u_ptr2, *v_ptr2;
};

@ -59,7 +59,7 @@ void RawLogger::encoder_close() {
} }
int RawLogger::encode_frame(const uint8_t *y_ptr, const uint8_t *u_ptr, const uint8_t *v_ptr, int RawLogger::encode_frame(const uint8_t *y_ptr, const uint8_t *u_ptr, const uint8_t *v_ptr,
int in_width, int in_height, uint64_t ts) { int in_width, int in_height, VisionIpcBufExtra *extra) {
assert(in_width == this->in_width_); assert(in_width == this->in_width_);
assert(in_height == this->in_height_); assert(in_height == this->in_height_);

@ -12,6 +12,7 @@ extern "C" {
} }
#include "selfdrive/loggerd/encoder.h" #include "selfdrive/loggerd/encoder.h"
#include "selfdrive/loggerd/loggerd.h"
#include "selfdrive/loggerd/video_writer.h" #include "selfdrive/loggerd/video_writer.h"
class RawLogger : public VideoEncoder { class RawLogger : public VideoEncoder {
@ -20,7 +21,7 @@ class RawLogger : public VideoEncoder {
int bitrate, bool h265, int out_width, int out_height, bool write = true); int bitrate, bool h265, int out_width, int out_height, bool write = true);
~RawLogger(); ~RawLogger();
int encode_frame(const uint8_t *y_ptr, const uint8_t *u_ptr, const uint8_t *v_ptr, int encode_frame(const uint8_t *y_ptr, const uint8_t *u_ptr, const uint8_t *v_ptr,
int in_width, int in_height, uint64_t ts); int in_width, int in_height, VisionIpcBufExtra *extra);
void encoder_open(const char* path); void encoder_open(const char* path);
void encoder_close(); void encoder_close();

@ -0,0 +1,84 @@
#include "selfdrive/loggerd/loggerd.h"
#include "selfdrive/loggerd/remote_encoder.h"
int handle_encoder_msg(LoggerdState *s, Message *msg, std::string &name, struct RemoteEncoder &re) {
const LogCameraInfo &cam_info = (name == "driverEncodeData") ? cameras_logged[1] :
((name == "wideRoadEncodeData") ? cameras_logged[2] :
((name == "qRoadEncodeData") ? qcam_info : cameras_logged[0]));
if (!cam_info.record) return 0; // TODO: handle this by not subscribing
int bytes_count = 0;
// TODO: AlignedBuffer is making a copy and allocing
//AlignedBuffer aligned_buf;
//capnp::FlatArrayMessageReader cmsg(aligned_buf.align(msg->getData(), msg->getSize()));
capnp::FlatArrayMessageReader cmsg(kj::ArrayPtr<capnp::word>((capnp::word *)msg->getData(), msg->getSize()));
auto event = cmsg.getRoot<cereal::Event>();
auto edata = (name == "driverEncodeData") ? event.getDriverEncodeData() :
((name == "wideRoadEncodeData") ? event.getWideRoadEncodeData() :
((name == "qRoadEncodeData") ? event.getQRoadEncodeData() : event.getRoadEncodeData()));
auto idx = edata.getIdx();
auto flags = idx.getFlags();
// rotation happened, process the queue (happens before the current message)
if (re.logger_segment != s->rotate_segment) {
re.logger_segment = s->rotate_segment;
for (auto &qmsg: re.q) {
bytes_count += handle_encoder_msg(s, qmsg, name, re);
}
re.q.clear();
}
if (!re.writer) {
// only create on iframe
if (flags & V4L2_BUF_FLAG_KEYFRAME) {
if (re.dropped_frames) {
// this should only happen for the first segment, maybe
LOGD("%s: dropped %d non iframe packets before init", name.c_str(), re.dropped_frames);
re.dropped_frames = 0;
}
re.writer.reset(new VideoWriter(s->segment_path,
cam_info.filename, !cam_info.is_h265,
cam_info.frame_width, cam_info.frame_height,
cam_info.fps, cam_info.is_h265, false));
// write the header
auto header = edata.getHeader();
re.writer->write((uint8_t *)header.begin(), header.size(), idx.getTimestampEof()/1000, true, false);
re.segment = idx.getSegmentNum();
} else {
++re.dropped_frames;
return bytes_count;
}
}
if (re.segment != idx.getSegmentNum()) {
if (re.writer) {
// encoder is on the next segment, this segment is over so we close the videowriter
re.writer.reset();
++s->ready_to_rotate;
LOGD("rotate %d -> %d ready %d/%d", re.segment, idx.getSegmentNum(), s->ready_to_rotate.load(), s->max_waiting);
}
// queue up all the new segment messages, they go in after the rotate
re.q.push_back(msg);
} else {
auto data = edata.getData();
re.writer->write((uint8_t *)data.begin(), data.size(), idx.getTimestampEof()/1000, false, flags & V4L2_BUF_FLAG_KEYFRAME);
// put it in log stream as the idx packet
MessageBuilder bmsg;
auto evt = bmsg.initEvent(event.getValid());
evt.setLogMonoTime(event.getLogMonoTime());
if (name == "driverEncodeData") { evt.setDriverEncodeIdx(idx); }
if (name == "wideRoadEncodeData") { evt.setWideRoadEncodeIdx(idx); }
if (name == "qRoadEncodeData") { evt.setQRoadEncodeIdx(idx); }
if (name == "roadEncodeData") { evt.setRoadEncodeIdx(idx); }
auto new_msg = bmsg.toBytes();
logger_log(&s->logger, (uint8_t *)new_msg.begin(), new_msg.size(), true); // always in qlog?
delete msg;
bytes_count += new_msg.size();
}
return bytes_count;
}

@ -0,0 +1,12 @@
#include "selfdrive/loggerd/video_writer.h"
#define V4L2_BUF_FLAG_KEYFRAME 0x00000008
struct RemoteEncoder {
std::unique_ptr<VideoWriter> writer;
int segment = -1;
std::vector<Message *> q;
int logger_segment = -1;
int dropped_frames = 0;
};
int handle_encoder_msg(LoggerdState *s, Message *msg, std::string &name, struct RemoteEncoder &re);

@ -107,7 +107,8 @@ class TestEncoder(unittest.TestCase):
# sanity check file size # sanity check file size
file_size = os.path.getsize(file_path) file_size = os.path.getsize(file_path)
self.assertTrue(math.isclose(file_size, size, rel_tol=FILE_SIZE_TOLERANCE)) self.assertTrue(math.isclose(file_size, size, rel_tol=FILE_SIZE_TOLERANCE),
f"{file_path} size {file_size} isn't close to target size {size}")
# Check encodeIdx # Check encodeIdx
if encode_idx_name is not None: if encode_idx_name is not None:

@ -0,0 +1,401 @@
#include <cassert>
#include <sys/ioctl.h>
#include <poll.h>
#include "selfdrive/loggerd/v4l_encoder.h"
#include "selfdrive/common/util.h"
#include "selfdrive/common/timing.h"
#include "libyuv.h"
#include "msm_media_info.h"
// has to be in this order
#include "v4l2-controls.h"
#include <linux/videodev2.h>
#define V4L2_QCOM_BUF_FLAG_CODECCONFIG 0x00020000
#define V4L2_QCOM_BUF_FLAG_EOS 0x02000000
// echo 0x7fffffff > /sys/kernel/debug/msm_vidc/debug_level
const int env_debug_encoder = (getenv("DEBUG_ENCODER") != NULL) ? atoi(getenv("DEBUG_ENCODER")) : 0;
#define checked_ioctl(x,y,z) { int _ret = HANDLE_EINTR(ioctl(x,y,z)); if (_ret!=0) { LOGE("checked_ioctl failed %d %lx %p", x, y, z); } assert(_ret==0); }
static void dequeue_buffer(int fd, v4l2_buf_type buf_type, unsigned int *index=NULL, unsigned int *bytesused=NULL, unsigned int *flags=NULL, struct timeval *timestamp=NULL) {
v4l2_plane plane = {0};
v4l2_buffer v4l_buf = {
.type = buf_type,
.memory = V4L2_MEMORY_USERPTR,
.m = { .planes = &plane, },
.length = 1,
};
checked_ioctl(fd, VIDIOC_DQBUF, &v4l_buf);
if (index) *index = v4l_buf.index;
if (bytesused) *bytesused = v4l_buf.m.planes[0].bytesused;
if (flags) *flags = v4l_buf.flags;
if (timestamp) *timestamp = v4l_buf.timestamp;
assert(v4l_buf.m.planes[0].data_offset == 0);
}
static void queue_buffer(int fd, v4l2_buf_type buf_type, unsigned int index, VisionBuf *buf, struct timeval timestamp={0}) {
v4l2_plane plane = {
.length = (unsigned int)buf->len,
.m = { .userptr = (unsigned long)buf->addr, },
.reserved = {(unsigned int)buf->fd}
};
v4l2_buffer v4l_buf = {
.type = buf_type,
.index = index,
.memory = V4L2_MEMORY_USERPTR,
.m = { .planes = &plane, },
.length = 1,
.bytesused = 0,
.flags = V4L2_BUF_FLAG_TIMESTAMP_COPY,
.timestamp = timestamp
};
checked_ioctl(fd, VIDIOC_QBUF, &v4l_buf);
}
static void request_buffers(int fd, v4l2_buf_type buf_type, unsigned int count) {
struct v4l2_requestbuffers reqbuf = {
.type = buf_type,
.memory = V4L2_MEMORY_USERPTR,
.count = count
};
checked_ioctl(fd, VIDIOC_REQBUFS, &reqbuf);
}
// TODO: writing should be moved to loggerd
void V4LEncoder::write_handler(V4LEncoder *e, const char *path) {
VideoWriter writer(path, e->filename, !e->h265, e->width, e->height, e->fps, e->h265, false);
bool first = true;
kj::Array<capnp::word>* out_buf;
while ((out_buf = e->to_write.pop())) {
capnp::FlatArrayMessageReader cmsg(*out_buf);
cereal::Event::Reader event = cmsg.getRoot<cereal::Event>();
auto edata = (e->type == DriverCam) ? event.getDriverEncodeData() :
((e->type == WideRoadCam) ? event.getWideRoadEncodeData() :
(e->h265 ? event.getRoadEncodeData() : event.getQRoadEncodeData()));
auto idx = edata.getIdx();
auto flags = idx.getFlags();
if (first) {
assert(flags & V4L2_BUF_FLAG_KEYFRAME);
auto header = edata.getHeader();
writer.write((uint8_t *)header.begin(), header.size(), idx.getTimestampEof()/1000, true, false);
first = false;
}
// dangerous cast from const, but should be fine
auto data = edata.getData();
if (data.size() > 0) {
writer.write((uint8_t *)data.begin(), data.size(), idx.getTimestampEof()/1000, false, flags & V4L2_BUF_FLAG_KEYFRAME);
}
// free the data
delete out_buf;
}
// VideoWriter is freed on out of scope
}
void V4LEncoder::dequeue_handler(V4LEncoder *e) {
std::string dequeue_thread_name = "dq-"+std::string(e->filename);
util::set_thread_name(dequeue_thread_name.c_str());
e->segment_num++;
uint32_t idx = -1;
bool exit = false;
// POLLIN is capture, POLLOUT is frame
struct pollfd pfd;
pfd.events = POLLIN | POLLOUT;
pfd.fd = e->fd;
// save the header
kj::Array<capnp::byte> header;
while (!exit) {
int rc = poll(&pfd, 1, 1000);
if (!rc) { LOGE("encoder dequeue poll timeout"); continue; }
if (env_debug_encoder >= 2) {
printf("%20s poll %x at %.2f ms\n", e->filename, pfd.revents, millis_since_boot());
}
int frame_id = -1;
if (pfd.revents & POLLIN) {
unsigned int bytesused, flags, index;
struct timeval timestamp;
dequeue_buffer(e->fd, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, &index, &bytesused, &flags, &timestamp);
e->buf_out[index].sync(VISIONBUF_SYNC_FROM_DEVICE);
uint8_t *buf = (uint8_t*)e->buf_out[index].addr;
int64_t ts = timestamp.tv_sec * 1000000 + timestamp.tv_usec;
// eof packet, we exit
if (flags & V4L2_QCOM_BUF_FLAG_EOS) {
if (e->write) e->to_write.push(NULL);
exit = true;
} else if (flags & V4L2_QCOM_BUF_FLAG_CODECCONFIG) {
// save header
header = kj::heapArray<capnp::byte>(buf, bytesused);
} else {
VisionIpcBufExtra extra = e->extras.pop();
assert(extra.timestamp_eof/1000 == ts); // stay in sync
frame_id = extra.frame_id;
++idx;
// broadcast packet
MessageBuilder msg;
auto event = msg.initEvent(true);
auto edat = (e->type == DriverCam) ? event.initDriverEncodeData() :
((e->type == WideRoadCam) ? event.initWideRoadEncodeData() :
(e->h265 ? event.initRoadEncodeData() : event.initQRoadEncodeData()));
auto edata = edat.initIdx();
edata.setFrameId(extra.frame_id);
edata.setTimestampSof(extra.timestamp_sof);
edata.setTimestampEof(extra.timestamp_eof);
edata.setType(e->h265 ? cereal::EncodeIndex::Type::FULL_H_E_V_C : cereal::EncodeIndex::Type::QCAMERA_H264);
edata.setEncodeId(idx);
edata.setSegmentNum(e->segment_num);
edata.setSegmentId(idx);
edata.setFlags(flags);
edata.setLen(bytesused);
edat.setData(kj::arrayPtr<capnp::byte>(buf, bytesused));
if (flags & V4L2_BUF_FLAG_KEYFRAME) edat.setHeader(header);
auto words = new kj::Array<capnp::word>(capnp::messageToFlatArray(msg));
auto bytes = words->asBytes();
e->pm->send(e->service_name, bytes.begin(), bytes.size());
if (e->write) {
e->to_write.push(words);
} else {
delete words;
}
}
if (env_debug_encoder) {
printf("%20s got(%d) %6d bytes flags %8x idx %4d id %8d ts %ld lat %.2f ms (%lu frames free)\n",
e->filename, index, bytesused, flags, idx, frame_id, ts, millis_since_boot()-(ts/1000.), e->free_buf_in.size());
}
// requeue the buffer
queue_buffer(e->fd, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, index, &e->buf_out[index]);
}
if (pfd.revents & POLLOUT) {
unsigned int index;
dequeue_buffer(e->fd, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, &index);
e->free_buf_in.push(index);
}
}
}
V4LEncoder::V4LEncoder(
const char* filename, CameraType type, int in_width, int in_height,
int fps, int bitrate, bool h265, int out_width, int out_height, bool write)
: type(type), in_width_(in_width), in_height_(in_height),
filename(filename), h265(h265),
width(out_width), height(out_height), fps(fps), write(write) {
fd = open("/dev/v4l/by-path/platform-aa00000.qcom_vidc-video-index1", O_RDWR|O_NONBLOCK);
assert(fd >= 0);
struct v4l2_capability cap;
checked_ioctl(fd, VIDIOC_QUERYCAP, &cap);
LOGD("opened encoder device %s %s = %d", cap.driver, cap.card, fd);
assert(strcmp((const char *)cap.driver, "msm_vidc_driver") == 0);
assert(strcmp((const char *)cap.card, "msm_vidc_venc") == 0);
struct v4l2_format fmt_out = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE,
.fmt = {
.pix_mp = {
// downscales are free with v4l
.width = (unsigned int)out_width,
.height = (unsigned int)out_height,
.pixelformat = h265 ? V4L2_PIX_FMT_HEVC : V4L2_PIX_FMT_H264,
.field = V4L2_FIELD_ANY,
.colorspace = V4L2_COLORSPACE_DEFAULT,
}
}
};
checked_ioctl(fd, VIDIOC_S_FMT, &fmt_out);
v4l2_streamparm streamparm = {
.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE,
.parm = {
.output = {
// TODO: more stuff here? we don't know
.timeperframe = {
.numerator = 1,
.denominator = 20
}
}
}
};
checked_ioctl(fd, VIDIOC_S_PARM, &streamparm);
struct v4l2_format fmt_in = {
.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE,
.fmt = {
.pix_mp = {
.width = (unsigned int)in_width,
.height = (unsigned int)in_height,
.pixelformat = V4L2_PIX_FMT_NV12,
.field = V4L2_FIELD_ANY,
.colorspace = V4L2_COLORSPACE_470_SYSTEM_BG,
}
}
};
checked_ioctl(fd, VIDIOC_S_FMT, &fmt_in);
LOGD("in buffer size %d, out buffer size %d",
fmt_in.fmt.pix_mp.plane_fmt[0].sizeimage,
fmt_out.fmt.pix_mp.plane_fmt[0].sizeimage);
// shared ctrls
{
struct v4l2_control ctrls[] = {
{ .id = V4L2_CID_MPEG_VIDEO_HEADER_MODE, .value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE},
{ .id = V4L2_CID_MPEG_VIDEO_BITRATE, .value = bitrate},
{ .id = V4L2_CID_MPEG_VIDC_VIDEO_RATE_CONTROL, .value = V4L2_CID_MPEG_VIDC_VIDEO_RATE_CONTROL_VBR_CFR},
{ .id = V4L2_CID_MPEG_VIDC_VIDEO_PRIORITY, .value = V4L2_MPEG_VIDC_VIDEO_PRIORITY_REALTIME_DISABLE},
{ .id = V4L2_CID_MPEG_VIDC_VIDEO_IDR_PERIOD, .value = 1},
};
for (auto ctrl : ctrls) {
checked_ioctl(fd, VIDIOC_S_CTRL, &ctrl);
}
}
if (h265) {
struct v4l2_control ctrls[] = {
{ .id = V4L2_CID_MPEG_VIDC_VIDEO_HEVC_PROFILE, .value = V4L2_MPEG_VIDC_VIDEO_HEVC_PROFILE_MAIN},
{ .id = V4L2_CID_MPEG_VIDC_VIDEO_HEVC_TIER_LEVEL, .value = V4L2_MPEG_VIDC_VIDEO_HEVC_LEVEL_HIGH_TIER_LEVEL_5},
{ .id = V4L2_CID_MPEG_VIDC_VIDEO_NUM_P_FRAMES, .value = 29},
{ .id = V4L2_CID_MPEG_VIDC_VIDEO_NUM_B_FRAMES, .value = 0},
};
for (auto ctrl : ctrls) {
checked_ioctl(fd, VIDIOC_S_CTRL, &ctrl);
}
} else {
struct v4l2_control ctrls[] = {
{ .id = V4L2_CID_MPEG_VIDEO_H264_PROFILE, .value = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH},
{ .id = V4L2_CID_MPEG_VIDEO_H264_LEVEL, .value = V4L2_MPEG_VIDEO_H264_LEVEL_UNKNOWN},
{ .id = V4L2_CID_MPEG_VIDC_VIDEO_NUM_P_FRAMES, .value = 15},
{ .id = V4L2_CID_MPEG_VIDC_VIDEO_NUM_B_FRAMES, .value = 0},
{ .id = V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE, .value = V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CABAC},
{ .id = V4L2_CID_MPEG_VIDC_VIDEO_H264_CABAC_MODEL, .value = V4L2_CID_MPEG_VIDC_VIDEO_H264_CABAC_MODEL_0},
{ .id = V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_MODE, .value = 0},
{ .id = V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_ALPHA, .value = 0},
{ .id = V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_BETA, .value = 0},
{ .id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MODE, .value = 0},
};
for (auto ctrl : ctrls) {
checked_ioctl(fd, VIDIOC_S_CTRL, &ctrl);
}
}
// allocate buffers
request_buffers(fd, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, BUF_OUT_COUNT);
request_buffers(fd, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, BUF_IN_COUNT);
// start encoder
v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
checked_ioctl(fd, VIDIOC_STREAMON, &buf_type);
buf_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
checked_ioctl(fd, VIDIOC_STREAMON, &buf_type);
// queue up output buffers
for (unsigned int i = 0; i < BUF_OUT_COUNT; i++) {
buf_out[i].allocate(fmt_out.fmt.pix_mp.plane_fmt[0].sizeimage);
queue_buffer(fd, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, i, &buf_out[i]);
}
// queue up input buffers
for (unsigned int i = 0; i < BUF_IN_COUNT; i++) {
buf_in[i].allocate(fmt_in.fmt.pix_mp.plane_fmt[0].sizeimage);
free_buf_in.push(i);
}
// publish
service_name = this->type == DriverCam ? "driverEncodeData" :
(this->type == WideRoadCam ? "wideRoadEncodeData" :
(this->h265 ? "roadEncodeData" : "qRoadEncodeData"));
pm.reset(new PubMaster({service_name}));
}
void V4LEncoder::encoder_open(const char* path) {
dequeue_handler_thread = std::thread(V4LEncoder::dequeue_handler, this);
if (this->write) write_handler_thread = std::thread(V4LEncoder::write_handler, this, path);
this->is_open = true;
this->counter = 0;
}
int V4LEncoder::encode_frame(const uint8_t *y_ptr, const uint8_t *u_ptr, const uint8_t *v_ptr,
int in_width, int in_height, VisionIpcBufExtra *extra) {
assert(in_width == in_width_);
assert(in_height == in_height_);
assert(is_open);
// reserve buffer
int buffer_in = free_buf_in.pop();
uint8_t *in_y_ptr = (uint8_t*)buf_in[buffer_in].addr;
int in_y_stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, in_width);
int in_uv_stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, in_width);
uint8_t *in_uv_ptr = in_y_ptr + (in_y_stride * VENUS_Y_SCANLINES(COLOR_FMT_NV12, in_height));
// GRRR COPY
int err = libyuv::I420ToNV12(y_ptr, in_width,
u_ptr, in_width/2,
v_ptr, in_width/2,
in_y_ptr, in_y_stride,
in_uv_ptr, in_uv_stride,
in_width, in_height);
assert(err == 0);
struct timeval timestamp {
.tv_sec = (long)(extra->timestamp_eof/1000000000),
.tv_usec = (long)((extra->timestamp_eof/1000) % 1000000),
};
// push buffer
extras.push(*extra);
buf_in[buffer_in].sync(VISIONBUF_SYNC_TO_DEVICE);
queue_buffer(fd, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, buffer_in, &buf_in[buffer_in], timestamp);
return this->counter++;
}
void V4LEncoder::encoder_close() {
if (this->is_open) {
// pop all the frames before closing, then put the buffers back
for (int i = 0; i < BUF_IN_COUNT; i++) free_buf_in.pop();
for (int i = 0; i < BUF_IN_COUNT; i++) free_buf_in.push(i);
// no frames, stop the encoder
struct v4l2_encoder_cmd encoder_cmd = { .cmd = V4L2_ENC_CMD_STOP };
checked_ioctl(fd, VIDIOC_ENCODER_CMD, &encoder_cmd);
// join waits for V4L2_QCOM_BUF_FLAG_EOS
dequeue_handler_thread.join();
assert(extras.empty());
if (this->write) write_handler_thread.join();
assert(to_write.empty());
}
this->is_open = false;
}
V4LEncoder::~V4LEncoder() {
encoder_close();
v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
checked_ioctl(fd, VIDIOC_STREAMOFF, &buf_type);
request_buffers(fd, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, 0);
buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
checked_ioctl(fd, VIDIOC_STREAMOFF, &buf_type);
request_buffers(fd, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, 0);
close(fd);
}

@ -0,0 +1,48 @@
#pragma once
#include "selfdrive/common/queue.h"
#include "selfdrive/loggerd/encoder.h"
#include "selfdrive/loggerd/loggerd.h"
#include "selfdrive/loggerd/video_writer.h"
#define BUF_IN_COUNT 7
#define BUF_OUT_COUNT 6
class V4LEncoder : public VideoEncoder {
public:
V4LEncoder(const char* filename, CameraType type, int width, int height, int fps, int bitrate, bool h265, int out_width, int out_height, bool write = true);
~V4LEncoder();
int encode_frame(const uint8_t *y_ptr, const uint8_t *u_ptr, const uint8_t *v_ptr,
int in_width, int in_height, VisionIpcBufExtra *extra);
void encoder_open(const char* path);
void encoder_close();
private:
int fd;
const char* filename;
CameraType type;
unsigned int in_width_, in_height_;
bool h265;
bool is_open = false;
int segment_num = -1;
int counter = 0;
std::unique_ptr<PubMaster> pm;
const char *service_name;
static void dequeue_handler(V4LEncoder *e);
std::thread dequeue_handler_thread;
VisionBuf buf_in[BUF_IN_COUNT];
VisionBuf buf_out[BUF_OUT_COUNT];
SafeQueue<unsigned int> free_buf_in;
SafeQueue<VisionIpcBufExtra> extras;
// writing support
int width, height, fps;
bool write;
static void write_handler(V4LEncoder *e, const char *path);
std::thread write_handler_thread;
SafeQueue<kj::Array<capnp::word>* > to_write;
};

@ -48,7 +48,6 @@ VideoWriter::VideoWriter(const char *path, const char *filename, bool remuxing,
int err = avio_open(&this->ofmt_ctx->pb, this->vid_path.c_str(), AVIO_FLAG_WRITE); int err = avio_open(&this->ofmt_ctx->pb, this->vid_path.c_str(), AVIO_FLAG_WRITE);
assert(err >= 0); assert(err >= 0);
this->wrote_codec_config = false;
} else { } else {
this->of = util::safe_fopen(this->vid_path.c_str(), "wb"); this->of = util::safe_fopen(this->vid_path.c_str(), "wb");
assert(this->of); assert(this->of);

@ -20,6 +20,4 @@ private:
AVFormatContext *ofmt_ctx; AVFormatContext *ofmt_ctx;
AVStream *out_stream; AVStream *out_stream;
bool remuxing, raw; bool remuxing, raw;
bool wrote_codec_config;
}; };

File diff suppressed because it is too large Load Diff

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a79a84bd1d498a6bb4f2be0c72bfc31e9549509953027ed06daed495f8603200
size 77117

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1511c182af584c7605682d1b9cba4949cdb99169a78730a677ea1a9e985f778b
size 23869

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3d83ce4f1313757b4db838a9122d99048176e7edeb104e52b90ff9d52b8955ec
size 9223

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:759bccdbaa1c6afa189e5a528dd75602763c4a77ff14b581e9178c387342dff1
size 70034

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:cd5af1232e30e239bf82f42322ac623e116272a2ba7968a0050a7602c417790e
size 2593

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:242b6d449fadd52df0ed1935e3803ac8e12b48e19a466968a27901fa52714fd4
size 33141

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:7a3f1a490953a4453347fb2c99dc4d9dcc9f3418202942aef6aaeb5e7b65dde7
size 13267

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f4e3deb7606f867f327e8d6f2ef6345187db712fedd43e9f1b34b37b923485c4
size 17977

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:fa8479b1ffa0a9357e32a162052901c6277fcc1f78b4c7f7503f145e4990f5bc
size 4239

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a20c53c71ba440d6f1293625a79377dbc2f688c6ec7e81560790acdc134191e5
size 17932

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a1d61fb5649544c473a98bf955b4bea2e51ea7a10841732640b27ac2e16e7280
size 67453

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:bb0b58976083bf927080d193d896396735171b479bc2efd9ac6d7453284a1c33
size 4729

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ff5592a4396b7f1331e39f886429eda3d57eb424ce1aea0ffe7d86c15236bad5
size 12542

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:9e019a14f0204796d40cc20b869ad654ea3a643349a8cc591bd5178e7090fc8d
size 44945

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e685e54fc7058a951b91339df4622f6cfdd42f9e0c7811b38bb97b412f84803a
size 5628

@ -11,10 +11,7 @@ V4L2_BUF_FLAG_KEYFRAME = 8
def writer(fn, addr, sock_name): def writer(fn, addr, sock_name):
import cereal.messaging as messaging import cereal.messaging as messaging
HEADER = b"\x00\x00\x00\x01\x40\x01\x0c\x01\xff\xff\x01\x60\x00\x00\x03\x00\xb0\x00\x00\x03\x00\x00\x03\x00\x96\xac\x09\x00\x00\x00\x01\x42\x01\x01\x01\x60\x00\x00\x03\x00\xb0\x00\x00\x03\x00\x00\x03\x00\x96\xa0\x03\xd0\x80\x13\x07\x1b\x2e\x5a\xee\x4c\x92\xea\x00\xbb\x42\x84\xa0\x00\x00\x00\x01\x44\x01\xc0\xe2\x4f\x09\xc1\x80\xc6\x08\x40\x00"
fifo_file = open(fn, "wb") fifo_file = open(fn, "wb")
fifo_file.write(HEADER)
fifo_file.flush()
os.environ["ZMQ"] = "1" os.environ["ZMQ"] = "1"
messaging.context = messaging.Context() messaging.context = messaging.Context()
@ -26,23 +23,25 @@ def writer(fn, addr, sock_name):
msgs = messaging.drain_sock(sock, wait_for_one=True) msgs = messaging.drain_sock(sock, wait_for_one=True)
for evt in msgs: for evt in msgs:
evta = getattr(evt, evt.which()) evta = getattr(evt, evt.which())
lat = ((evt.logMonoTime/1e9) - (evta.timestampEof/1e6))*1000 lat = ((evt.logMonoTime/1e9) - (evta.idx.timestampEof/1e9))*1000
print("%2d %4d %.3f %.3f latency %.2f ms" % (len(msgs), evta.idx, evt.logMonoTime/1e9, evta.timestampEof/1e6, lat), len(evta.data), sock_name) print("%2d %4d %.3f %.3f latency %.2f ms" % (len(msgs), evta.idx.encodeId, evt.logMonoTime/1e9, evta.idx.timestampEof/1e6, lat), len(evta.data), sock_name)
if evta.idx != 0 and evta.idx != (last_idx+1): if evta.idx.encodeId != 0 and evta.idx.encodeId != (last_idx+1):
print("DROP!") print("DROP!")
last_idx = evta.idx last_idx = evta.idx.encodeId
if evta.flags & V4L2_BUF_FLAG_KEYFRAME or evta.flags == 0x7f001030: if evta.idx.flags & V4L2_BUF_FLAG_KEYFRAME:
fifo_file.write(evta.header)
seen_iframe = True seen_iframe = True
if not seen_iframe: if not seen_iframe:
print("waiting for iframe") print("waiting for iframe")
continue continue
fifo_file.write(evta.data) fifo_file.write(evta.data)
fifo_file.flush()
FFMPEG_OPTIONS = {"probesize": "32", "flags": "low_delay"}
def decoder_nvidia(fn, vipc_server, vst, yuv=True, rgb=False): def decoder_nvidia(fn, vipc_server, vst, yuv=True, rgb=False):
sys.path.append("/raid.dell2/PyNvCodec") sys.path.append("/raid.dell2/PyNvCodec")
import PyNvCodec as nvc # pylint: disable=import-error import PyNvCodec as nvc # pylint: disable=import-error
decoder = nvc.PyNvDecoder(fn, 0, {"probesize": "32"}) decoder = nvc.PyNvDecoder(fn, 0, FFMPEG_OPTIONS)
cc1 = nvc.ColorspaceConversionContext(nvc.ColorSpace.BT_709, nvc.ColorRange.JPEG) cc1 = nvc.ColorspaceConversionContext(nvc.ColorSpace.BT_709, nvc.ColorRange.JPEG)
if rgb: if rgb:
@ -72,7 +71,7 @@ def decoder_nvidia(fn, vipc_server, vst, yuv=True, rgb=False):
def decoder_ffmpeg(fn, vipc_server, vst, yuv=True, rgb=False): def decoder_ffmpeg(fn, vipc_server, vst, yuv=True, rgb=False):
import av # pylint: disable=import-error import av # pylint: disable=import-error
container = av.open(fn, options={"probesize": "32"}) container = av.open(fn, options=FFMPEG_OPTIONS)
cnt = 0 cnt = 0
for frame in container.decode(video=0): for frame in container.decode(video=0):
if rgb: if rgb:
@ -87,6 +86,7 @@ import argparse
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Decode video streams and broacast on VisionIPC') parser = argparse.ArgumentParser(description='Decode video streams and broacast on VisionIPC')
parser.add_argument("addr", help="Address of comma 3") parser.add_argument("addr", help="Address of comma 3")
parser.add_argument('--pipes', action='store_true', help='Only create pipes')
parser.add_argument('--nvidia', action='store_true', help='Use nvidia instead of ffmpeg') parser.add_argument('--nvidia', action='store_true', help='Use nvidia instead of ffmpeg')
parser.add_argument('--rgb', action='store_true', help='Also broadcast RGB') parser.add_argument('--rgb', action='store_true', help='Also broadcast RGB')
parser.add_argument("--cams", default="0,1,2", help="Cameras to decode") parser.add_argument("--cams", default="0,1,2", help="Cameras to decode")
@ -112,7 +112,9 @@ if __name__ == "__main__":
os.unlink(FIFO_NAME) os.unlink(FIFO_NAME)
os.mkfifo(FIFO_NAME) os.mkfifo(FIFO_NAME)
multiprocessing.Process(target=writer, args=(FIFO_NAME, sys.argv[1], k)).start() multiprocessing.Process(target=writer, args=(FIFO_NAME, sys.argv[1], k)).start()
if args.nvidia: if args.pipes:
print("connect to", FIFO_NAME)
elif args.nvidia:
multiprocessing.Process(target=decoder_nvidia, args=(FIFO_NAME, vipc_server, v, True, args.rgb)).start() multiprocessing.Process(target=decoder_nvidia, args=(FIFO_NAME, vipc_server, v, True, args.rgb)).start()
else: else:
multiprocessing.Process(target=decoder_ffmpeg, args=(FIFO_NAME, vipc_server, v, True, args.rgb)).start() multiprocessing.Process(target=decoder_ffmpeg, args=(FIFO_NAME, vipc_server, v, True, args.rgb)).start()

Loading…
Cancel
Save