Merge branch 'master' of ssh://github.com/commaai/openpilot into improve-debugging-for-safety

pull/36055/head
Adeeb Shihadeh 3 weeks ago
commit 31ae45970f
  1. 2
      opendbc_repo
  2. 45
      selfdrive/car/tests/test_car_interfaces.py
  3. 2
      selfdrive/modeld/models/driving_policy.onnx
  4. 2
      selfdrive/modeld/models/driving_vision.onnx
  5. 2
      selfdrive/test/process_replay/ref_commit
  6. 7
      system/hardware/base.py
  7. 27
      system/hardware/esim.py
  8. 29
      system/hardware/tici/esim.py
  9. 10
      system/hardware/tici/hardware.py
  10. 4
      tools/auto_source.py
  11. 2
      tools/replay/SConscript
  12. 65
      tools/replay/framereader.cc
  13. 27
      tools/replay/framereader.h
  14. 346
      tools/replay/qcom_decoder.cc
  15. 88
      tools/replay/qcom_decoder.h

@ -1 +1 @@
Subproject commit ec4e322b052c7386312d0765b496c8b1264038ae
Subproject commit 7afc25d8d4096bb31e25c0b7ae0b961ea05f5394

@ -1,15 +1,12 @@
import os
import math
import hypothesis.strategies as st
from hypothesis import Phase, given, settings
from parameterized import parameterized
from cereal import car
from opendbc.car import DT_CTRL
from opendbc.car.car_helpers import interfaces
from opendbc.car.structs import CarParams
from opendbc.car.tests.test_car_interfaces import get_fuzzy_car_interface_args
from opendbc.car.fw_versions import FW_VERSIONS, FW_QUERY_CONFIGS
from opendbc.car.tests.test_car_interfaces import get_fuzzy_car_interface
from opendbc.car.mock.values import CAR as MOCK
from opendbc.car.values import PLATFORMS
from openpilot.selfdrive.controls.lib.latcontrol_angle import LatControlAngle
@ -18,11 +15,6 @@ from openpilot.selfdrive.controls.lib.latcontrol_torque import LatControlTorque
from openpilot.selfdrive.controls.lib.longcontrol import LongControl
from openpilot.selfdrive.test.fuzzy_generation import FuzzyGenerator
ALL_ECUS = {ecu for ecus in FW_VERSIONS.values() for ecu in ecus.keys()}
ALL_ECUS |= {ecu for config in FW_QUERY_CONFIGS.values() for ecu in config.extra_ecus}
ALL_REQUESTS = {tuple(r.request) for config in FW_QUERY_CONFIGS.values() for r in config.requests}
MAX_EXAMPLES = int(os.environ.get('MAX_EXAMPLES', '60'))
@ -34,39 +26,8 @@ class TestCarInterfaces:
phases=(Phase.reuse, Phase.generate, Phase.shrink))
@given(data=st.data())
def test_car_interfaces(self, car_name, data):
CarInterface = interfaces[car_name]
args = get_fuzzy_car_interface_args(data.draw)
car_params = CarInterface.get_params(car_name, args['fingerprints'], args['car_fw'],
alpha_long=args['alpha_long'], is_release=False, docs=False)
car_params = car_params.as_reader()
car_interface = CarInterface(car_params)
assert car_params
assert car_interface
assert car_params.mass > 1
assert car_params.wheelbase > 0
# centerToFront is center of gravity to front wheels, assert a reasonable range
assert car_params.wheelbase * 0.3 < car_params.centerToFront < car_params.wheelbase * 0.7
assert car_params.maxLateralAccel > 0
# Longitudinal sanity checks
assert len(car_params.longitudinalTuning.kpV) == len(car_params.longitudinalTuning.kpBP)
assert len(car_params.longitudinalTuning.kiV) == len(car_params.longitudinalTuning.kiBP)
# Lateral sanity checks
if car_params.steerControlType != CarParams.SteerControlType.angle:
tune = car_params.lateralTuning
if tune.which() == 'pid':
if car_name != MOCK.MOCK:
assert not math.isnan(tune.pid.kf) and tune.pid.kf > 0
assert len(tune.pid.kpV) > 0 and len(tune.pid.kpV) == len(tune.pid.kpBP)
assert len(tune.pid.kiV) > 0 and len(tune.pid.kiV) == len(tune.pid.kiBP)
elif tune.which() == 'torque':
assert not math.isnan(tune.torque.kf) and tune.torque.kf > 0
assert not math.isnan(tune.torque.friction) and tune.torque.friction > 0
car_interface = get_fuzzy_car_interface(car_name, data.draw)
car_params = car_interface.CP.as_reader()
cc_msg = FuzzyGenerator.get_random_msg(data.draw, car.CarControl, real_floats=True)
# Run car interface

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:72e98a95541f200bd2faeae8d718997483696fd4801fc7d718c167b05854707d
oid sha256:ebb38a934d6472c061cc6010f46d9720ca132d631a47e585a893bdd41ade2419
size 12343535

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e66bb8d53eced3786ed71a59b55ffc6810944cb217f0518621cc76303260a1ef
oid sha256:befac016a247b7ad5dc5b55d339d127774ed7bd2b848f1583f72aa4caee37781
size 46271991

@ -1 +1 @@
4c677a3ebcbd3d4faa3de98e3fb9c0bb83b47926
afcab1abb62b9d5678342956cced4712f44e909e

@ -65,6 +65,10 @@ class ThermalConfig:
return ret
class LPABase(ABC):
@abstractmethod
def bootstrap(self) -> None:
pass
@abstractmethod
def list_profiles(self) -> list[Profile]:
pass
@ -89,6 +93,9 @@ class LPABase(ABC):
def switch_profile(self, iccid: str) -> None:
pass
def is_comma_profile(self, iccid: str) -> bool:
return any(iccid.startswith(prefix) for prefix in ('8985235',))
class HardwareBase(ABC):
@staticmethod
def get_cmdline() -> dict[str, str]:

@ -3,10 +3,32 @@
import argparse
import time
from openpilot.system.hardware import HARDWARE
from openpilot.system.hardware.base import LPABase
def bootstrap(lpa: LPABase) -> None:
print('┌──────────────────────────────────────────────────────────────────────────────┐')
print('│ WARNING, PLEASE READ BEFORE PROCEEDING │')
print('│ │')
print('│ this is an irreversible operation that will remove the comma-provisioned │')
print('│ profile. │')
print('│ │')
print('│ after this operation, you must purchase a new eSIM from comma in order to │')
print('│ use the comma prime subscription again. │')
print('└──────────────────────────────────────────────────────────────────────────────┘')
print()
for severity in ('sure', '100% sure'):
print(f'are you {severity} you want to proceed? (y/N) ', end='')
confirm = input()
if confirm != 'y':
print('aborting')
exit(0)
lpa.bootstrap()
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog='esim.py', description='manage eSIM profiles on your comma device', epilog='comma.ai')
parser.add_argument('--bootstrap', action='store_true', help='bootstrap the eUICC (required before downloading profiles)')
parser.add_argument('--backend', choices=['qmi', 'at'], default='qmi', help='use the specified backend, defaults to qmi')
parser.add_argument('--switch', metavar='iccid', help='switch to profile')
parser.add_argument('--delete', metavar='iccid', help='delete profile (warning: this cannot be undone)')
@ -16,7 +38,10 @@ if __name__ == '__main__':
mutated = False
lpa = HARDWARE.get_sim_lpa()
if args.switch:
if args.bootstrap:
bootstrap(lpa)
mutated = True
elif args.switch:
lpa.switch_profile(args.switch)
mutated = True
elif args.delete:

@ -40,6 +40,7 @@ class TiciLPA(LPABase):
self._process_notifications()
def download_profile(self, qr: str, nickname: str | None = None) -> None:
self._check_bootstrapped()
msgs = self._invoke('profile', 'download', '-a', qr)
self._validate_successful(msgs)
new_profile = next((m for m in msgs if m['payload']['message'] == 'es8p_meatadata_parse'), None)
@ -54,6 +55,7 @@ class TiciLPA(LPABase):
self._validate_successful(self._invoke('profile', 'nickname', iccid, nickname))
def switch_profile(self, iccid: str) -> None:
self._check_bootstrapped()
self._validate_profile_exists(iccid)
latest = self.get_active_profile()
if latest and latest.iccid == iccid:
@ -61,6 +63,33 @@ class TiciLPA(LPABase):
self._validate_successful(self._invoke('profile', 'enable', iccid))
self._process_notifications()
def bootstrap(self) -> None:
"""
find all comma-provisioned profiles and delete them. they conflict with user-provisioned profiles
and must be deleted.
**note**: this is a **very** destructive operation. you **must** purchase a new comma SIM in order
to use comma prime again.
"""
if self._is_bootstrapped():
return
for p in self.list_profiles():
if self.is_comma_profile(p.iccid):
self._disable_profile(p.iccid)
self.delete_profile(p.iccid)
def _disable_profile(self, iccid: str) -> None:
self._validate_successful(self._invoke('profile', 'disable', iccid))
self._process_notifications()
def _check_bootstrapped(self) -> None:
assert self._is_bootstrapped(), 'eUICC is not bootstrapped, please bootstrap before performing this operation'
def _is_bootstrapped(self) -> bool:
""" check if any comma provisioned profiles are on the eUICC """
return not any(self.is_comma_profile(iccid) for iccid in (p.iccid for p in self.list_profiles()))
def _invoke(self, *cmd: str):
proc = subprocess.Popen(['sudo', '-E', 'lpac'] + list(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=self.env)
try:

@ -487,7 +487,7 @@ class Tici(HardwareBase):
# eSIM prime
dest = "/etc/NetworkManager/system-connections/esim.nmconnection"
if sim_id.startswith('8985235') and not os.path.exists(dest):
if self.get_sim_lpa().is_comma_profile(sim_id) and not os.path.exists(dest):
with open(Path(__file__).parent/'esim.nmconnection') as f, tempfile.NamedTemporaryFile(mode='w') as tf:
dat = f.read()
dat = dat.replace("sim-id=", f"sim-id={sim_id}")
@ -498,6 +498,14 @@ class Tici(HardwareBase):
os.system(f"sudo cp {tf.name} {dest}")
os.system(f"sudo nmcli con load {dest}")
def reboot_modem(self):
modem = self.get_modem()
for state in (0, 1):
try:
modem.Command(f'AT+CFUN={state}', math.ceil(TIMEOUT), dbus_interface=MM_MODEM, timeout=TIMEOUT)
except Exception:
pass
def get_networks(self):
r = {}

@ -1,6 +1,6 @@
#!/usr/bin/env python3
import sys
from openpilot.tools.lib.logreader import LogReader
from openpilot.tools.lib.logreader import LogReader, ReadMode
def main():
@ -9,7 +9,7 @@ def main():
sys.exit(1)
log_path = sys.argv[1]
lr = LogReader(log_path, sort_by_time=True)
lr = LogReader(log_path, default_mode=ReadMode.AUTO, sort_by_time=True)
print("\n".join(lr.logreader_identifiers))

@ -12,7 +12,7 @@ else:
base_libs.append('OpenCL')
replay_lib_src = ["replay.cc", "consoleui.cc", "camera.cc", "filereader.cc", "logreader.cc", "framereader.cc",
"route.cc", "util.cc", "seg_mgr.cc", "timeline.cc", "api.cc"]
"route.cc", "util.cc", "seg_mgr.cc", "timeline.cc", "api.cc", "qcom_decoder.cc"]
replay_lib = replay_env.Library("replay", replay_lib_src, LIBS=base_libs, FRAMEWORKS=base_frameworks)
Export('replay_lib')
replay_libs = [replay_lib, 'avutil', 'avcodec', 'avformat', 'bz2', 'zstd', 'curl', 'yuv', 'ncurses'] + base_libs

@ -8,6 +8,7 @@
#include "common/util.h"
#include "third_party/libyuv/include/libyuv.h"
#include "tools/replay/util.h"
#include "system/hardware/hw.h"
#ifdef __APPLE__
#define HW_DEVICE_TYPE AV_HWDEVICE_TYPE_VIDEOTOOLBOX
@ -37,7 +38,13 @@ struct DecoderManager {
return it->second.get();
}
auto decoder = std::make_unique<VideoDecoder>();
std::unique_ptr<VideoDecoder> decoder;
if (Hardware::TICI() && hw_decoder) {
decoder = std::make_unique<QcomVideoDecoder>();
} else {
decoder = std::make_unique<FFmpegVideoDecoder>();
}
if (!decoder->open(codecpar, hw_decoder)) {
decoder.reset(nullptr);
}
@ -114,19 +121,19 @@ bool FrameReader::get(int idx, VisionBuf *buf) {
// class VideoDecoder
VideoDecoder::VideoDecoder() {
FFmpegVideoDecoder::FFmpegVideoDecoder() {
av_frame_ = av_frame_alloc();
hw_frame_ = av_frame_alloc();
}
VideoDecoder::~VideoDecoder() {
FFmpegVideoDecoder::~FFmpegVideoDecoder() {
if (hw_device_ctx) av_buffer_unref(&hw_device_ctx);
if (decoder_ctx) avcodec_free_context(&decoder_ctx);
av_frame_free(&av_frame_);
av_frame_free(&hw_frame_);
}
bool VideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) {
bool FFmpegVideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) {
const AVCodec *decoder = avcodec_find_decoder(codecpar->codec_id);
if (!decoder) return false;
@ -149,7 +156,7 @@ bool VideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) {
return true;
}
bool VideoDecoder::initHardwareDecoder(AVHWDeviceType hw_device_type) {
bool FFmpegVideoDecoder::initHardwareDecoder(AVHWDeviceType hw_device_type) {
const AVCodecHWConfig *config = nullptr;
for (int i = 0; (config = avcodec_get_hw_config(decoder_ctx->codec, i)) != nullptr; i++) {
if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX && config->device_type == hw_device_type) {
@ -175,7 +182,7 @@ bool VideoDecoder::initHardwareDecoder(AVHWDeviceType hw_device_type) {
return true;
}
bool VideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) {
bool FFmpegVideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) {
int current_idx = idx;
if (idx != reader->prev_idx + 1) {
// seeking to the nearest key frame
@ -219,7 +226,7 @@ bool VideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) {
return false;
}
AVFrame *VideoDecoder::decodeFrame(AVPacket *pkt) {
AVFrame *FFmpegVideoDecoder::decodeFrame(AVPacket *pkt) {
int ret = avcodec_send_packet(decoder_ctx, pkt);
if (ret < 0) {
rError("Error sending a packet for decoding: %d", ret);
@ -239,7 +246,7 @@ AVFrame *VideoDecoder::decodeFrame(AVPacket *pkt) {
return (av_frame_->format == hw_pix_fmt) ? hw_frame_ : av_frame_;
}
bool VideoDecoder::copyBuffer(AVFrame *f, VisionBuf *buf) {
bool FFmpegVideoDecoder::copyBuffer(AVFrame *f, VisionBuf *buf) {
if (hw_pix_fmt == HW_PIX_FMT) {
for (int i = 0; i < height/2; i++) {
memcpy(buf->y + (i*2 + 0)*buf->stride, f->data[0] + (i*2 + 0)*f->linesize[0], width);
@ -256,3 +263,45 @@ bool VideoDecoder::copyBuffer(AVFrame *f, VisionBuf *buf) {
}
return true;
}
bool QcomVideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) {
if (codecpar->codec_id != AV_CODEC_ID_HEVC) {
rError("Hardware decoder only supports HEVC codec");
return false;
}
width = codecpar->width;
height = codecpar->height;
msm_vidc.init(VIDEO_DEVICE, width, height, V4L2_PIX_FMT_HEVC);
return true;
}
bool QcomVideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) {
int from_idx = idx;
if (idx != reader->prev_idx + 1) {
// seeking to the nearest key frame
for (int i = idx; i >= 0; --i) {
if (reader->packets_info[i].flags & AV_PKT_FLAG_KEY) {
from_idx = i;
break;
}
}
auto pos = reader->packets_info[from_idx].pos;
int ret = avformat_seek_file(reader->input_ctx, 0, pos, pos, pos, AVSEEK_FLAG_BYTE);
if (ret < 0) {
rError("Failed to seek to byte position %lld: %d", pos, AVERROR(ret));
return false;
}
}
reader->prev_idx = idx;
bool result = false;
AVPacket pkt;
msm_vidc.avctx = reader->input_ctx;
for (int i = from_idx; i <= idx; ++i) {
if (av_read_frame(reader->input_ctx, &pkt) == 0) {
result = msm_vidc.decodeFrame(&pkt, buf) && (i == idx);
av_packet_unref(&pkt);
}
}
return result;
}

@ -6,6 +6,7 @@
#include "msgq/visionipc/visionbuf.h"
#include "tools/replay/filereader.h"
#include "tools/replay/util.h"
#include "tools/replay/qcom_decoder.h"
extern "C" {
#include <libavcodec/avcodec.h>
@ -40,11 +41,18 @@ public:
class VideoDecoder {
public:
VideoDecoder();
~VideoDecoder();
bool open(AVCodecParameters *codecpar, bool hw_decoder);
bool decode(FrameReader *reader, int idx, VisionBuf *buf);
virtual ~VideoDecoder() = default;
virtual bool open(AVCodecParameters *codecpar, bool hw_decoder) = 0;
virtual bool decode(FrameReader *reader, int idx, VisionBuf *buf) = 0;
int width = 0, height = 0;
};
class FFmpegVideoDecoder : public VideoDecoder {
public:
FFmpegVideoDecoder();
~FFmpegVideoDecoder() override;
bool open(AVCodecParameters *codecpar, bool hw_decoder) override;
bool decode(FrameReader *reader, int idx, VisionBuf *buf) override;
private:
bool initHardwareDecoder(AVHWDeviceType hw_device_type);
@ -56,3 +64,14 @@ private:
AVPixelFormat hw_pix_fmt = AV_PIX_FMT_NONE;
AVBufferRef *hw_device_ctx = nullptr;
};
class QcomVideoDecoder : public VideoDecoder {
public:
QcomVideoDecoder() {};
~QcomVideoDecoder() override {};
bool open(AVCodecParameters *codecpar, bool hw_decoder) override;
bool decode(FrameReader *reader, int idx, VisionBuf *buf) override;
private:
MsmVidc msm_vidc = MsmVidc();
};

@ -0,0 +1,346 @@
#include "qcom_decoder.h"
#include <assert.h>
#include "third_party/linux/include/v4l2-controls.h"
#include <linux/videodev2.h>
#include "common/swaglog.h"
#include "common/util.h"
// echo "0xFFFF" > /sys/kernel/debug/msm_vidc/debug_level
static void copyBuffer(VisionBuf *src_buf, VisionBuf *dst_buf) {
// Copy Y plane
memcpy(dst_buf->y, src_buf->y, src_buf->height * src_buf->stride);
// Copy UV plane
memcpy(dst_buf->uv, src_buf->uv, src_buf->height / 2 * src_buf->stride);
}
static void request_buffers(int fd, v4l2_buf_type buf_type, unsigned int count) {
struct v4l2_requestbuffers reqbuf = {
.count = count,
.type = buf_type,
.memory = V4L2_MEMORY_USERPTR
};
util::safe_ioctl(fd, VIDIOC_REQBUFS, &reqbuf, "VIDIOC_REQBUFS failed");
}
MsmVidc::~MsmVidc() {
if (fd > 0) {
close(fd);
}
}
bool MsmVidc::init(const char* dev, size_t width, size_t height, uint64_t codec) {
LOG("Initializing msm_vidc device %s", dev);
this->w = width;
this->h = height;
this->fd = open(dev, O_RDWR, 0);
if (fd < 0) {
LOGE("failed to open video device %s", dev);
return false;
}
subscribeEvents();
v4l2_buf_type out_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
setPlaneFormat(out_type, V4L2_PIX_FMT_HEVC); // Also allocates the output buffer
setFPS(FPS);
request_buffers(fd, out_type, OUTPUT_BUFFER_COUNT);
util::safe_ioctl(fd, VIDIOC_STREAMON, &out_type, "VIDIOC_STREAMON OUTPUT failed");
restartCapture();
setupPolling();
this->initialized = true;
return true;
}
VisionBuf* MsmVidc::decodeFrame(AVPacket *pkt, VisionBuf *buf) {
assert(initialized && (pkt != nullptr) && (buf != nullptr));
this->frame_ready = false;
this->current_output_buf = buf;
bool sent_packet = false;
while (!this->frame_ready) {
if (!sent_packet) {
int buf_index = getBufferUnlocked();
if (buf_index >= 0) {
assert(buf_index < out_buf_cnt);
sendPacket(buf_index, pkt);
sent_packet = true;
}
}
if (poll(pfd, nfds, -1) < 0) {
LOGE("poll() error: %d", errno);
return nullptr;
}
if (VisionBuf* result = processEvents()) {
return result;
}
}
return buf;
}
VisionBuf* MsmVidc::processEvents() {
for (int idx = 0; idx < nfds; idx++) {
short revents = pfd[idx].revents;
if (!revents) continue;
if (idx == ev[EV_VIDEO]) {
if (revents & (POLLIN | POLLRDNORM)) {
VisionBuf *result = handleCapture();
if (result == this->current_output_buf) {
this->frame_ready = true;
}
}
if (revents & (POLLOUT | POLLWRNORM)) {
handleOutput();
}
if (revents & POLLPRI) {
handleEvent();
}
} else {
LOGE("Unexpected event on fd %d", pfd[idx].fd);
}
}
return nullptr;
}
VisionBuf* MsmVidc::handleCapture() {
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[1] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_USERPTR;
buf.m.planes = planes;
buf.length = 1;
util::safe_ioctl(this->fd, VIDIOC_DQBUF, &buf, "VIDIOC_DQBUF CAPTURE failed");
if (this->reconfigure_pending || buf.m.planes[0].bytesused == 0) {
return nullptr;
}
copyBuffer(&cap_bufs[buf.index], this->current_output_buf);
queueCaptureBuffer(buf.index);
return this->current_output_buf;
}
bool MsmVidc::subscribeEvents() {
for (uint32_t event : subscriptions) {
struct v4l2_event_subscription sub = { .type = event};
util::safe_ioctl(fd, VIDIOC_SUBSCRIBE_EVENT, &sub, "VIDIOC_SUBSCRIBE_EVENT failed");
}
return true;
}
bool MsmVidc::setPlaneFormat(enum v4l2_buf_type type, uint32_t fourcc) {
struct v4l2_format fmt = {.type = type};
struct v4l2_pix_format_mplane *pix = &fmt.fmt.pix_mp;
*pix = {
.width = (__u32)this->w,
.height = (__u32)this->h,
.pixelformat = fourcc
};
util::safe_ioctl(fd, VIDIOC_S_FMT, &fmt, "VIDIOC_S_FMT failed");
if (type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
this->out_buf_size = pix->plane_fmt[0].sizeimage;
int ion_size = this->out_buf_size * OUTPUT_BUFFER_COUNT; // Output (input) buffers are ION buffer.
this->out_buf.allocate(ion_size); // mmap rw
for (int i = 0; i < OUTPUT_BUFFER_COUNT; i++) {
this->out_buf_off[i] = i * this->out_buf_size;
this->out_buf_addr[i] = (char *)this->out_buf.addr + this->out_buf_off[i];
this->out_buf_flag[i] = false;
}
LOGD("Set output buffer size to %d, count %d, addr %p", this->out_buf_size, OUTPUT_BUFFER_COUNT, this->out_buf.addr);
} else if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
request_buffers(this->fd, type, CAPTURE_BUFFER_COUNT);
util::safe_ioctl(fd, VIDIOC_G_FMT, &fmt, "VIDIOC_G_FMT failed");
const __u32 y_size = pix->plane_fmt[0].sizeimage;
const __u32 y_stride = pix->plane_fmt[0].bytesperline;
for (int i = 0; i < CAPTURE_BUFFER_COUNT; i++) {
size_t uv_offset = (size_t)y_stride * pix->height;
size_t required = uv_offset + (y_stride * pix->height / 2); // enough for Y + UV. For linear NV12, UV plane starts at y_stride * height.
size_t alloc_size = std::max<size_t>(y_size, required);
this->cap_bufs[i].allocate(alloc_size);
this->cap_bufs[i].init_yuv(pix->width, pix->height, y_stride, uv_offset);
}
LOGD("Set capture buffer size to %d, count %d, addr %p, extradata size %d",
pix->plane_fmt[0].sizeimage, CAPTURE_BUFFER_COUNT, this->cap_bufs[0].addr, pix->plane_fmt[1].sizeimage);
}
return true;
}
bool MsmVidc::setFPS(uint32_t fps) {
struct v4l2_streamparm streamparam = {
.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE,
.parm.output.timeperframe = {1, fps}
};
util::safe_ioctl(fd, VIDIOC_S_PARM, &streamparam, "VIDIOC_S_PARM failed");
return true;
}
bool MsmVidc::restartCapture() {
// stop if already initialized
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
if (this->initialized) {
LOGD("Restarting capture, flushing buffers...");
util::safe_ioctl(this->fd, VIDIOC_STREAMOFF, &type, "VIDIOC_STREAMOFF CAPTURE failed");
struct v4l2_requestbuffers reqbuf = {.type = type, .memory = V4L2_MEMORY_USERPTR};
util::safe_ioctl(this->fd, VIDIOC_REQBUFS, &reqbuf, "VIDIOC_REQBUFS failed");
for (size_t i = 0; i < CAPTURE_BUFFER_COUNT; ++i) {
this->cap_bufs[i].free();
this->cap_buf_flag[i] = false; // mark as not queued
cap_bufs[i].~VisionBuf();
new (&cap_bufs[i]) VisionBuf();
}
}
// setup, start and queue capture buffers
setDBP();
setPlaneFormat(type, V4L2_PIX_FMT_NV12);
util::safe_ioctl(this->fd, VIDIOC_STREAMON, &type, "VIDIOC_STREAMON CAPTURE failed");
for (size_t i = 0; i < CAPTURE_BUFFER_COUNT; ++i) {
queueCaptureBuffer(i);
}
return true;
}
bool MsmVidc::queueCaptureBuffer(int i) {
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[1] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_USERPTR;
buf.index = i;
buf.m.planes = planes;
buf.length = 1;
// decoded frame plane
planes[0].m.userptr = (unsigned long)this->cap_bufs[i].addr; // no security
planes[0].length = this->cap_bufs[i].len;
planes[0].reserved[0] = this->cap_bufs[i].fd; // ION fd
planes[0].reserved[1] = 0;
planes[0].bytesused = this->cap_bufs[i].len;
planes[0].data_offset = 0;
util::safe_ioctl(this->fd, VIDIOC_QBUF, &buf, "VIDIOC_QBUF failed");
this->cap_buf_flag[i] = true; // mark as queued
return true;
}
bool MsmVidc::queueOutputBuffer(int i, size_t size) {
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[1] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
buf.memory = V4L2_MEMORY_USERPTR;
buf.index = i;
buf.m.planes = planes;
buf.length = 1;
// decoded frame plane
planes[0].m.userptr = (unsigned long)this->out_buf_off[i]; // check this
planes[0].length = this->out_buf_size;
planes[0].reserved[0] = this->out_buf.fd; // ION fd
planes[0].reserved[1] = 0;
planes[0].bytesused = size;
planes[0].data_offset = 0;
assert((this->out_buf_off[i] & 0xfff) == 0); // must be 4 KiB aligned
assert(this->out_buf_size % 4096 == 0); // ditto for size
util::safe_ioctl(this->fd, VIDIOC_QBUF, &buf, "VIDIOC_QBUF failed");
this->out_buf_flag[i] = true; // mark as queued
return true;
}
bool MsmVidc::setDBP() {
struct v4l2_ext_control control[2] = {0};
struct v4l2_ext_controls controls = {0};
control[0].id = V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_MODE;
control[0].value = 1; // V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_SECONDARY
control[1].id = V4L2_CID_MPEG_VIDC_VIDEO_DPB_COLOR_FORMAT;
control[1].value = 0; // V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_NONE
controls.count = 2;
controls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
controls.controls = control;
util::safe_ioctl(fd, VIDIOC_S_EXT_CTRLS, &controls, "VIDIOC_S_EXT_CTRLS failed");
return true;
}
bool MsmVidc::setupPolling() {
// Initialize poll array
pfd[EV_VIDEO] = {fd, POLLIN | POLLOUT | POLLWRNORM | POLLRDNORM | POLLPRI, 0};
ev[EV_VIDEO] = EV_VIDEO;
nfds = 1;
return true;
}
bool MsmVidc::sendPacket(int buf_index, AVPacket *pkt) {
assert(buf_index >= 0 && buf_index < out_buf_cnt);
assert(pkt != nullptr && pkt->data != nullptr && pkt->size > 0);
// Prepare output buffer
memset(this->out_buf_addr[buf_index], 0, this->out_buf_size);
uint8_t * data = (uint8_t *)this->out_buf_addr[buf_index];
memcpy(data, pkt->data, pkt->size);
queueOutputBuffer(buf_index, pkt->size);
return true;
}
int MsmVidc::getBufferUnlocked() {
for (int i = 0; i < this->out_buf_cnt; i++) {
if (!out_buf_flag[i]) {
return i;
}
}
return -1;
}
bool MsmVidc::handleOutput() {
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[1];
buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
buf.memory = V4L2_MEMORY_USERPTR;
buf.m.planes = planes;
buf.length = 1;
util::safe_ioctl(this->fd, VIDIOC_DQBUF, &buf, "VIDIOC_DQBUF OUTPUT failed");
this->out_buf_flag[buf.index] = false; // mark as not queued
return true;
}
bool MsmVidc::handleEvent() {
// dequeue event
struct v4l2_event event = {0};
util::safe_ioctl(this->fd, VIDIOC_DQEVENT, &event, "VIDIOC_DQEVENT failed");
switch (event.type) {
case V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT: {
unsigned int *ptr = (unsigned int *)event.u.data;
unsigned int height = ptr[0];
unsigned int width = ptr[1];
this->w = width;
this->h = height;
LOGD("Port Reconfig received insufficient, new size %ux%u, flushing capture bufs...", width, height); // This is normal
struct v4l2_decoder_cmd dec;
dec.flags = V4L2_QCOM_CMD_FLUSH_CAPTURE;
dec.cmd = V4L2_QCOM_CMD_FLUSH;
util::safe_ioctl(this->fd, VIDIOC_DECODER_CMD, &dec, "VIDIOC_DECODER_CMD FLUSH_CAPTURE failed");
this->reconfigure_pending = true;
LOGD("Waiting for flush done event to reconfigure capture queue");
break;
}
case V4L2_EVENT_MSM_VIDC_FLUSH_DONE: {
unsigned int *ptr = (unsigned int *)event.u.data;
unsigned int flags = ptr[0];
if (flags & V4L2_QCOM_CMD_FLUSH_CAPTURE) {
if (this->reconfigure_pending) {
this->restartCapture();
this->reconfigure_pending = false;
}
}
break;
}
default:
break;
}
return true;
}

@ -0,0 +1,88 @@
#pragma once
#include <linux/videodev2.h>
#include <poll.h>
#include "msgq/visionipc/visionbuf.h"
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
}
#define V4L2_EVENT_MSM_VIDC_START (V4L2_EVENT_PRIVATE_START + 0x00001000)
#define V4L2_EVENT_MSM_VIDC_FLUSH_DONE (V4L2_EVENT_MSM_VIDC_START + 1)
#define V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT (V4L2_EVENT_MSM_VIDC_START + 3)
#define V4L2_CID_MPEG_MSM_VIDC_BASE 0x00992000
#define V4L2_CID_MPEG_VIDC_VIDEO_DPB_COLOR_FORMAT (V4L2_CID_MPEG_MSM_VIDC_BASE + 44)
#define V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_MODE (V4L2_CID_MPEG_MSM_VIDC_BASE + 22)
#define V4L2_QCOM_CMD_FLUSH_CAPTURE (1 << 1)
#define V4L2_QCOM_CMD_FLUSH (4)
#define VIDEO_DEVICE "/dev/video32"
#define OUTPUT_BUFFER_COUNT 8
#define CAPTURE_BUFFER_COUNT 8
#define FPS 20
class MsmVidc {
public:
MsmVidc() = default;
~MsmVidc();
bool init(const char* dev, size_t width, size_t height, uint64_t codec);
VisionBuf* decodeFrame(AVPacket* pkt, VisionBuf* buf);
AVFormatContext* avctx = nullptr;
int fd = 0;
private:
bool initialized = false;
bool reconfigure_pending = false;
bool frame_ready = false;
VisionBuf* current_output_buf = nullptr;
VisionBuf out_buf; // Single input buffer
VisionBuf cap_bufs[CAPTURE_BUFFER_COUNT]; // Capture (output) buffers
size_t w = 1928, h = 1208;
size_t cap_height = 0, cap_width = 0;
int cap_buf_size = 0;
int out_buf_size = 0;
size_t cap_plane_off[CAPTURE_BUFFER_COUNT] = {0};
size_t cap_plane_stride[CAPTURE_BUFFER_COUNT] = {0};
bool cap_buf_flag[CAPTURE_BUFFER_COUNT] = {false};
size_t out_buf_off[OUTPUT_BUFFER_COUNT] = {0};
void* out_buf_addr[OUTPUT_BUFFER_COUNT] = {0};
bool out_buf_flag[OUTPUT_BUFFER_COUNT] = {false};
const int out_buf_cnt = OUTPUT_BUFFER_COUNT;
const int subscriptions[2] = {
V4L2_EVENT_MSM_VIDC_FLUSH_DONE,
V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT
};
enum { EV_VIDEO, EV_COUNT };
struct pollfd pfd[EV_COUNT] = {0};
int ev[EV_COUNT] = {-1};
int nfds = 0;
VisionBuf* processEvents();
bool setupOutput();
bool subscribeEvents();
bool setPlaneFormat(v4l2_buf_type type, uint32_t fourcc);
bool setFPS(uint32_t fps);
bool restartCapture();
bool queueCaptureBuffer(int i);
bool queueOutputBuffer(int i, size_t size);
bool setDBP();
bool setupPolling();
bool sendPacket(int buf_index, AVPacket* pkt);
int getBufferUnlocked();
VisionBuf* handleCapture();
bool handleOutput();
bool handleEvent();
};
Loading…
Cancel
Save