framereader: use swscale to handle the YUV conversion (#22710)

old-commit-hash: 921600b95c
commatwo_master
Dean Lee 4 years ago committed by GitHub
parent da6ebc2f9d
commit c742e81f3e
  1. 3
      selfdrive/camerad/cameras/camera_replay.cc
  2. 2
      selfdrive/ui/SConscript
  3. 46
      selfdrive/ui/replay/framereader.cc
  4. 6
      selfdrive/ui/replay/framereader.h

@ -49,12 +49,13 @@ void run_camera(CameraState *s) {
uint32_t stream_frame_id = 0, frame_id = 0;
size_t buf_idx = 0;
std::unique_ptr<uint8_t[]> rgb_buf = std::make_unique<uint8_t[]>(s->frame->getRGBSize());
std::unique_ptr<uint8_t[]> yuv_buf = std::make_unique<uint8_t[]>(s->frame->getYUVSize());
while (!do_exit) {
if (stream_frame_id == s->frame->getFrameCount()) {
// loop stream
stream_frame_id = 0;
}
if (s->frame->get(stream_frame_id++, rgb_buf.get(), nullptr)) {
if (s->frame->get(stream_frame_id++, rgb_buf.get(), yuv_buf.get())) {
s->buf.camera_bufs_metadata[buf_idx] = {.frame_id = frame_id};
auto &buf = s->buf.camera_bufs[buf_idx];
CL_CHECK(clEnqueueWriteBuffer(buf.copy_q, buf.buf_cl, CL_TRUE, 0, s->frame->getRGBSize(), rgb_buf.get(), 0, NULL, NULL));

@ -113,7 +113,7 @@ if arch in ['x86_64', 'Darwin'] and os.path.exists(Dir("#tools/").get_abspath())
replay_lib_src = ["replay/replay.cc", "replay/camera.cc", "replay/logreader.cc", "replay/framereader.cc", "replay/route.cc", "replay/util.cc"]
replay_lib = qt_env.Library("qt_replay", replay_lib_src, LIBS=base_libs)
replay_libs = [replay_lib, 'avutil', 'avcodec', 'avformat', 'bz2', 'curl', 'yuv'] + qt_libs
replay_libs = [replay_lib, 'avutil', 'avcodec', 'avformat', 'bz2', 'curl', 'swscale'] + qt_libs
qt_env.Program("replay/replay", ["replay/main.cc"], LIBS=replay_libs)
qt_env.Program("watch3", ["watch3.cc"], LIBS=qt_libs + ['common', 'json11'])

@ -3,7 +3,6 @@
#include <unistd.h>
#include <cassert>
#include <mutex>
#include "libyuv.h"
static int ffmpeg_lockmgr_cb(void **arg, enum AVLockOp op) {
std::mutex *mutex = (std::mutex *)*arg;
@ -45,12 +44,10 @@ FrameReader::~FrameReader() {
avcodec_close(pCodecCtx_);
avcodec_free_context(&pCodecCtx_);
}
if (pFormatCtx_) {
avformat_close_input(&pFormatCtx_);
}
if (av_frame_) {
av_frame_free(&av_frame_);
}
if (pFormatCtx_) avformat_close_input(&pFormatCtx_);
if (av_frame_) av_frame_free(&av_frame_);
if (rgb_frame_) av_frame_free(&rgb_frame_);
if (sws_ctx_) sws_freeContext(sws_ctx_);
}
bool FrameReader::load(const std::string &url) {
@ -77,9 +74,14 @@ bool FrameReader::load(const std::string &url) {
if (ret < 0) return false;
av_frame_ = av_frame_alloc();
rgb_frame_ = av_frame_alloc();
width = pCodecCtxOrig->width;
height = pCodecCtxOrig->height;
sws_ctx_ = sws_getContext(width, height, AV_PIX_FMT_YUV420P,
width, height, AV_PIX_FMT_BGR24,
SWS_FAST_BILINEAR, NULL, NULL, NULL);
if (!sws_ctx_) return false;
frames_.reserve(60 * 20); // 20fps, one minute
while (true) {
@ -97,7 +99,7 @@ bool FrameReader::load(const std::string &url) {
}
bool FrameReader::get(int idx, uint8_t *rgb, uint8_t *yuv) {
assert(rgb != nullptr);
assert(rgb != nullptr && yuv != nullptr);
if (!valid_ || idx < 0 || idx >= frames_.size()) {
return false;
}
@ -140,27 +142,11 @@ bool FrameReader::decode(int idx, uint8_t *rgb, uint8_t *yuv) {
}
bool FrameReader::decodeFrame(AVFrame *f, uint8_t *rgb, uint8_t *yuv) {
uint8_t *y = yuv;
if (!yuv) {
if (yuv_buf_.empty()) {
yuv_buf_.resize(getYUVSize());
}
y = yuv_buf_.data();
}
int i, j, k;
for (i = 0; i < f->height; i++) {
memcpy(y + f->width * i, f->data[0] + f->linesize[0] * i, f->width);
}
for (j = 0; j < f->height / 2; j++) {
memcpy(y + f->width * i + f->width / 2 * j, f->data[1] + f->linesize[1] * j, f->width / 2);
}
for (k = 0; k < f->height / 2; k++) {
memcpy(y + f->width * i + f->width / 2 * j + f->width / 2 * k, f->data[2] + f->linesize[2] * k, f->width / 2);
}
// images is going to be written to output buffers, no alignment (align = 1)
int ret = av_image_copy_to_buffer(yuv, getYUVSize(), f->data, f->linesize, AV_PIX_FMT_YUV420P, f->width, f->height, 1);
if (ret < 0) return false;
uint8_t *u = y + f->width * f->height;
uint8_t *v = u + (f->width / 2) * (f->height / 2);
libyuv::I420ToRGB24(y, f->width, u, f->width / 2, v, f->width / 2, rgb, f->width * 3, f->width, f->height);
return true;
av_image_fill_arrays(rgb_frame_->data, rgb_frame_->linesize, rgb, AV_PIX_FMT_BGR24, f->width, f->height, 1);
ret = sws_scale(sws_ctx_, (const uint8_t **)f->data, f->linesize, 0, f->height, rgb_frame_->data, rgb_frame_->linesize);
return ret >= 0;
}

@ -6,6 +6,8 @@
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
}
class FrameReader {
@ -31,10 +33,10 @@ private:
bool failed = false;
};
std::vector<Frame> frames_;
AVFrame *av_frame_ = nullptr;
SwsContext *sws_ctx_ = nullptr;
AVFrame *av_frame_, *rgb_frame_ = nullptr;
AVFormatContext *pFormatCtx_ = nullptr;
AVCodecContext *pCodecCtx_ = nullptr;
int key_frames_count_ = 0;
std::vector<uint8_t> yuv_buf_;
bool valid_ = false;
};

Loading…
Cancel
Save