camerad: full buffer size for IFE processing (#34141)

* camerad: full buffer size for IFE processing

* assert

* revert

---------

Co-authored-by: Comma Device <device@comma.ai>
pull/34143/head
Adeeb Shihadeh 5 months ago committed by GitHub
parent 24dfa0e1bf
commit 556060f793
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 11
      system/camerad/cameras/camera_common.cc
  2. 2
      system/camerad/cameras/camera_common.h
  3. 43
      system/camerad/cameras/spectra.cc
  4. 13
      system/camerad/cameras/spectra.h

@ -65,16 +65,19 @@ void CameraBuf::init(cl_device_id device_id, cl_context context, SpectraCamera *
const SensorInfo *sensor = cam->sensor.get();
is_raw = cam->is_raw;
camera_bufs_raw = std::make_unique<VisionBuf[]>(frame_buf_count);
frame_metadata = std::make_unique<FrameMetadata[]>(frame_buf_count);
// RAW + final frames from ISP
// RAW frames from ISP
if (is_raw) {
camera_bufs_raw = std::make_unique<VisionBuf[]>(frame_buf_count);
const int raw_frame_size = (sensor->frame_height + sensor->extra_height) * sensor->frame_stride;
for (int i = 0; i < frame_buf_count; i++) {
camera_bufs_raw[i].allocate(raw_frame_size);
camera_bufs_raw[i].init_cl(device_id, context);
}
LOGD("allocated %d CL buffers", frame_buf_count);
}
out_img_width = sensor->frame_width;
out_img_height = sensor->hdr_offset > 0 ? (sensor->frame_height - sensor->hdr_offset) / 2 : sensor->frame_height;
@ -83,8 +86,8 @@ void CameraBuf::init(cl_device_id device_id, cl_context context, SpectraCamera *
// TODO: VENUS_BUFFER_SIZE should give the size, but it's too small. dependent on encoder settings?
size_t nv12_size = (out_img_width <= 1344 ? 2900 : 2346)*cam->stride;
vipc_server->create_buffers_with_sizes(stream_type, YUV_BUFFER_COUNT, out_img_width, out_img_height, nv12_size, cam->stride, cam->uv_offset);
LOGD("created %d YUV vipc buffers with size %dx%d", YUV_BUFFER_COUNT, cam->stride, cam->y_height);
vipc_server->create_buffers_with_sizes(stream_type, VIPC_BUFFER_COUNT, out_img_width, out_img_height, nv12_size, cam->stride, cam->uv_offset);
LOGD("created %d YUV vipc buffers with size %dx%d", VIPC_BUFFER_COUNT, cam->stride, cam->y_height);
imgproc = new ImgProc(device_id, context, this, sensor, cam->cc.camera_num, cam->stride, cam->uv_offset);
}

@ -8,7 +8,7 @@
#include "common/util.h"
const int YUV_BUFFER_COUNT = 20;
const int VIPC_BUFFER_COUNT = 18;
typedef struct FrameMetadata {
uint32_t frame_id;

@ -242,6 +242,9 @@ SpectraCamera::SpectraCamera(SpectraMaster *master, const CameraConfig &config,
cc(config),
is_raw(raw) {
mm.init(m->video0_fd);
ife_buf_depth = is_raw ? 4 : VIPC_BUFFER_COUNT;
assert(ife_buf_depth < MAX_IFE_BUFS);
}
SpectraCamera::~SpectraCamera() {
@ -288,14 +291,15 @@ void SpectraCamera::camera_open(VisionIpcServer *v, cl_device_id device_id, cl_c
linkDevices();
LOGD("camera init %d", cc.camera_num);
buf.init(device_id, ctx, this, v, FRAME_BUF_COUNT, cc.stream_type);
buf.init(device_id, ctx, this, v, ife_buf_depth, cc.stream_type);
camera_map_bufs();
enqueue_req_multi(1, ife_buf_depth, 0);
}
void SpectraCamera::enqueue_req_multi(uint64_t start, int n, bool dp) {
for (uint64_t i = start; i < start + n; ++i) {
request_ids[(i - 1) % FRAME_BUF_COUNT] = i;
enqueue_buffer((i - 1) % FRAME_BUF_COUNT, dp);
request_ids[(i - 1) % ife_buf_depth] = i;
enqueue_buffer((i - 1) % ife_buf_depth, dp);
}
}
@ -670,7 +674,7 @@ void SpectraCamera::enqueue_buffer(int i, bool dp) {
int ret;
uint64_t request_id = request_ids[i];
if (buf_handle_raw[i] && sync_objs[i]) {
if (sync_objs[i]) {
// wait
struct cam_sync_wait sync_wait = {0};
sync_wait.sync_obj = sync_objs[i];
@ -733,23 +737,23 @@ void SpectraCamera::enqueue_buffer(int i, bool dp) {
void SpectraCamera::camera_map_bufs() {
int ret;
for (int i = 0; i < FRAME_BUF_COUNT; i++) {
for (int i = 0; i < ife_buf_depth; i++) {
// configure ISP to put the image in place
struct cam_mem_mgr_map_cmd mem_mgr_map_cmd = {0};
mem_mgr_map_cmd.flags = CAM_MEM_FLAG_HW_READ_WRITE;
mem_mgr_map_cmd.mmu_hdls[0] = m->device_iommu;
mem_mgr_map_cmd.num_hdl = 1;
//mem_mgr_map_cmd.mmu_hdls[1] = m->icp_device_iommu;
//mem_mgr_map_cmd.num_hdl = 2;
mem_mgr_map_cmd.num_hdl = 1;
mem_mgr_map_cmd.flags = CAM_MEM_FLAG_HW_READ_WRITE;
if (is_raw) {
// RAW bayer images
mem_mgr_map_cmd.fd = buf.camera_bufs_raw[i].fd;
ret = do_cam_control(m->video0_fd, CAM_REQ_MGR_MAP_BUF, &mem_mgr_map_cmd, sizeof(mem_mgr_map_cmd));
assert(ret == 0);
LOGD("map buf req: (fd: %d) 0x%x %d", buf.camera_bufs_raw[i].fd, mem_mgr_map_cmd.out.buf_handle, ret);
buf_handle_raw[i] = mem_mgr_map_cmd.out.buf_handle;
// TODO: this needs to match camera bufs length
} else {
// final processed images
VisionBuf *vb = buf.vipc_server->get_buffer(buf.stream_type, i);
mem_mgr_map_cmd.fd = vb->fd;
@ -757,7 +761,7 @@ void SpectraCamera::camera_map_bufs() {
LOGD("map buf req: (fd: %d) 0x%x %d", vb->fd, mem_mgr_map_cmd.out.buf_handle, ret);
buf_handle_yuv[i] = mem_mgr_map_cmd.out.buf_handle;
}
enqueue_req_multi(1, FRAME_BUF_COUNT, 0);
}
}
bool SpectraCamera::openSensor() {
@ -870,7 +874,7 @@ void SpectraCamera::configISP() {
// allocate IFE memory, then configure it
ife_cmd.init(m, 67984, 0x20,
CAM_MEM_FLAG_HW_READ_WRITE | CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE,
m->device_iommu, m->cdm_iommu, FRAME_BUF_COUNT);
m->device_iommu, m->cdm_iommu, ife_buf_depth);
if (!is_raw) {
ife_gamma_lut.init(m, 64*sizeof(uint32_t), 0x20,
CAM_MEM_FLAG_HW_READ_WRITE | CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE,
@ -925,7 +929,7 @@ void SpectraCamera::configICP() {
// BPS CMD buffer
unsigned char striping_out[] = "\x00";
bps_cmd.init(m, FRAME_BUF_COUNT*ALIGNED_SIZE(464, 0x20), 0x20,
bps_cmd.init(m, ife_buf_depth*ALIGNED_SIZE(464, 0x20), 0x20,
CAM_MEM_FLAG_HW_READ_WRITE | CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE | CAM_MEM_FLAG_HW_SHARED_ACCESS,
m->icp_device_iommu);
@ -1046,9 +1050,8 @@ void SpectraCamera::camera_close() {
ret = device_control(csiphy_fd, CAM_RELEASE_DEV, session_handle, csiphy_dev_handle);
LOGD("release csiphy: %d", ret);
for (int i = 0; i < FRAME_BUF_COUNT; i++) {
release(m->video0_fd, buf_handle_yuv[i]);
release(m->video0_fd, buf_handle_raw[i]);
for (int i = 0; i < ife_buf_depth; i++) {
release(m->video0_fd, is_raw ? buf_handle_raw[i] : buf_handle_yuv[i]);
}
LOGD("released buffers");
}
@ -1071,13 +1074,13 @@ void SpectraCamera::handle_camera_event(const cam_req_mgr_message *event_data) {
if (real_id != 0) { // next ready
if (real_id == 1) {idx_offset = main_id;}
int buf_idx = (real_id - 1) % FRAME_BUF_COUNT;
int buf_idx = (real_id - 1) % ife_buf_depth;
// check for skipped frames
if (main_id > frame_id_last + 1 && !skipped) {
LOGE("camera %d realign", cc.camera_num);
clear_req_queue();
enqueue_req_multi(real_id + 1, FRAME_BUF_COUNT - 1, 0);
enqueue_req_multi(real_id + 1, ife_buf_depth - 1, 0);
skipped = true;
} else if (main_id == frame_id_last + 1) {
skipped = false;
@ -1086,7 +1089,7 @@ void SpectraCamera::handle_camera_event(const cam_req_mgr_message *event_data) {
// check for dropped requests
if (real_id > request_id_last + 1) {
LOGE("camera %d dropped requests %ld %ld", cc.camera_num, real_id, request_id_last);
enqueue_req_multi(request_id_last + 1 + FRAME_BUF_COUNT, real_id - (request_id_last + 1), 0);
enqueue_req_multi(request_id_last + 1 + ife_buf_depth, real_id - (request_id_last + 1), 0);
}
// metas
@ -1099,12 +1102,12 @@ void SpectraCamera::handle_camera_event(const cam_req_mgr_message *event_data) {
meta_data.timestamp_sof = timestamp; // this is timestamped in the kernel's SOF IRQ callback
// dispatch
enqueue_req_multi(real_id + FRAME_BUF_COUNT, 1, 1);
enqueue_req_multi(real_id + ife_buf_depth, 1, 1);
} else { // not ready
if (main_id > frame_id_last + 10) {
LOGE("camera %d reset after half second of no response", cc.camera_num);
clear_req_queue();
enqueue_req_multi(request_id_last + 1, FRAME_BUF_COUNT, 0);
enqueue_req_multi(request_id_last + 1, ife_buf_depth, 0);
frame_id_last = main_id;
skipped = true;
}

@ -12,7 +12,7 @@
#include "system/camerad/cameras/camera_common.h"
#include "system/camerad/sensors/sensor.h"
#define FRAME_BUF_COUNT 4
#define MAX_IFE_BUFS 20
const int MIPI_SETTLE_CNT = 33; // Calculated by camera_freqs.py
@ -116,6 +116,7 @@ public:
// *** state ***
int ife_buf_depth = -1;
bool open = false;
bool enabled = true;
CameraConfig cc;
@ -151,11 +152,11 @@ public:
SpectraBuf bps_iq;
SpectraBuf bps_striping;
int buf_handle_yuv[FRAME_BUF_COUNT] = {};
int buf_handle_raw[FRAME_BUF_COUNT] = {};
int sync_objs[FRAME_BUF_COUNT] = {};
int sync_objs_bps_out[FRAME_BUF_COUNT] = {};
uint64_t request_ids[FRAME_BUF_COUNT] = {};
int buf_handle_yuv[MAX_IFE_BUFS] = {};
int buf_handle_raw[MAX_IFE_BUFS] = {};
int sync_objs[MAX_IFE_BUFS] = {};
int sync_objs_bps_out[MAX_IFE_BUFS] = {};
uint64_t request_ids[MAX_IFE_BUFS] = {};
uint64_t request_id_last = 0;
uint64_t frame_id_last = 0;
uint64_t idx_offset = 0;

Loading…
Cancel
Save