diff --git a/selfdrive/ui/qt/widgets/cameraview.h b/selfdrive/ui/qt/widgets/cameraview.h index 73f4858c4b..29aa8493c7 100644 --- a/selfdrive/ui/qt/widgets/cameraview.h +++ b/selfdrive/ui/qt/widgets/cameraview.h @@ -23,11 +23,9 @@ #endif #include "msgq/visionipc/visionipc_client.h" -#include "system/camerad/cameras/camera_common.h" #include "selfdrive/ui/ui.h" const int FRAME_BUFFER_SIZE = 5; -static_assert(FRAME_BUFFER_SIZE <= YUV_BUFFER_COUNT); class CameraWidget : public QOpenGLWidget, protected QOpenGLFunctions { Q_OBJECT diff --git a/system/camerad/SConscript b/system/camerad/SConscript index 511664c275..4e2eddeb42 100644 --- a/system/camerad/SConscript +++ b/system/camerad/SConscript @@ -3,7 +3,7 @@ Import('env', 'arch', 'messaging', 'common', 'gpucommon', 'visionipc') libs = ['m', 'pthread', common, 'jpeg', 'OpenCL', 'yuv', messaging, visionipc, gpucommon, 'atomic'] camera_obj = env.Object(['cameras/camera_qcom2.cc', 'cameras/camera_common.cc', 'cameras/camera_util.cc', - 'sensors/ar0231.cc', 'sensors/ox03c10.cc', 'sensors/os04c10.cc']) + 'cameras/spectra.cc', 'sensors/ar0231.cc', 'sensors/ox03c10.cc', 'sensors/os04c10.cc']) env.Program('camerad', ['main.cc', camera_obj], LIBS=libs) if GetOption("extras") and arch == "x86_64": diff --git a/system/camerad/cameras/camera_common.cc b/system/camerad/cameras/camera_common.cc index c89fc7f83a..a4ac385440 100644 --- a/system/camerad/cameras/camera_common.cc +++ b/system/camerad/cameras/camera_common.cc @@ -10,16 +10,13 @@ #include "common/swaglog.h" #include "third_party/linux/include/msm_media_info.h" -#include "system/camerad/cameras/camera_qcom2.h" -#ifdef QCOM2 -#include "CL/cl_ext_qcom.h" -#endif +#include "system/camerad/cameras/spectra.h" + class ImgProc { public: - ImgProc(cl_device_id device_id, cl_context context, const CameraBuf *b, const CameraState *s, int buf_width, int uv_offset) { + ImgProc(cl_device_id device_id, cl_context context, const CameraBuf *b, const SensorInfo *sensor, int camera_num, int buf_width, int uv_offset) { char args[4096]; - const SensorInfo *sensor = s->sensor.get(); snprintf(args, sizeof(args), "-cl-fast-relaxed-math -cl-denorms-are-zero -Isensors " "-DFRAME_WIDTH=%d -DFRAME_HEIGHT=%d -DFRAME_STRIDE=%d -DFRAME_OFFSET=%d " @@ -27,7 +24,7 @@ public: "-DSENSOR_ID=%hu -DHDR_OFFSET=%d -DVIGNETTING=%d ", sensor->frame_width, sensor->frame_height, sensor->hdr_offset > 0 ? sensor->frame_stride * 2 : sensor->frame_stride, sensor->frame_offset, b->out_img_width, b->out_img_height, buf_width, uv_offset, - static_cast(sensor->image_sensor), sensor->hdr_offset, s->cc.camera_num == 1); + static_cast(sensor->image_sensor), sensor->hdr_offset, camera_num == 1); const char *cl_file = "cameras/process_raw.cl"; cl_program prg_imgproc = cl_program_from_file(context, device_id, cl_file, args); krnl_ = CL_CHECK_ERR(clCreateKernel(prg_imgproc, "process_raw", &err)); @@ -62,12 +59,13 @@ private: cl_command_queue queue; }; -void CameraBuf::init(cl_device_id device_id, cl_context context, CameraState *s, VisionIpcServer * v, int frame_cnt, VisionStreamType type) { +void CameraBuf::init(cl_device_id device_id, cl_context context, SpectraCamera *cam, VisionIpcServer * v, int frame_cnt, VisionStreamType type) { vipc_server = v; stream_type = type; frame_buf_count = frame_cnt; - const SensorInfo *sensor = s->sensor.get(); + const SensorInfo *sensor = cam->sensor.get(); + // RAW frame const int frame_size = (sensor->frame_height + sensor->extra_height) * sensor->frame_stride; camera_bufs = std::make_unique(frame_buf_count); @@ -95,7 +93,7 @@ void CameraBuf::init(cl_device_id device_id, cl_context context, CameraState *s, vipc_server->create_buffers_with_sizes(stream_type, YUV_BUFFER_COUNT, false, out_img_width, out_img_height, nv12_size, nv12_width, nv12_uv_offset); LOGD("created %d YUV vipc buffers with size %dx%d", YUV_BUFFER_COUNT, nv12_width, nv12_height); - imgproc = new ImgProc(device_id, context, this, s, nv12_width, nv12_uv_offset); + imgproc = new ImgProc(device_id, context, this, sensor, cam->cc.camera_num, nv12_width, nv12_uv_offset); } CameraBuf::~CameraBuf() { @@ -138,24 +136,6 @@ void CameraBuf::queue(size_t buf_idx) { // common functions -void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &frame_data, CameraState *c) { - framed.setFrameId(frame_data.frame_id); - framed.setRequestId(frame_data.request_id); - framed.setTimestampEof(frame_data.timestamp_eof); - framed.setTimestampSof(frame_data.timestamp_sof); - framed.setIntegLines(frame_data.integ_lines); - framed.setGain(frame_data.gain); - framed.setHighConversionGain(frame_data.high_conversion_gain); - framed.setMeasuredGreyFraction(frame_data.measured_grey_fraction); - framed.setTargetGreyFraction(frame_data.target_grey_fraction); - framed.setProcessingTime(frame_data.processing_time); - - const float ev = c->cur_ev[frame_data.frame_id % 3]; - const float perc = util::map_val(ev, c->sensor->min_ev, c->sensor->max_ev, 0.0f, 100.0f); - framed.setExposureValPercent(perc); - framed.setSensor(c->sensor->image_sensor); -} - kj::Array get_raw_frame_image(const CameraBuf *b) { const uint8_t *dat = (const uint8_t *)b->cur_camera_buf->addr; @@ -283,30 +263,6 @@ float set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_sk return lum_med / 256.0; } -void camerad_thread() { - cl_device_id device_id = cl_get_device_id(CL_DEVICE_TYPE_DEFAULT); -#ifdef QCOM2 - const cl_context_properties props[] = {CL_CONTEXT_PRIORITY_HINT_QCOM, CL_PRIORITY_HINT_HIGH_QCOM, 0}; - cl_context context = CL_CHECK_ERR(clCreateContext(props, 1, &device_id, NULL, NULL, &err)); -#else - cl_context context = CL_CHECK_ERR(clCreateContext(NULL, 1, &device_id, NULL, NULL, &err)); -#endif - - { - MultiCameraState cameras; - cameras.pm = new PubMaster({"roadCameraState", "driverCameraState", "wideRoadCameraState", "thumbnail"}); - VisionIpcServer vipc_server("camerad", device_id, context); - - cameras_init(&cameras, &vipc_server, device_id, context); - - vipc_server.start_listener(); - - cameras_run(&cameras); - } - - CL_CHECK(clReleaseContext(context)); -} - int open_v4l_by_name_and_index(const char name[], int index, int flags) { for (int v4l_index = 0; /**/; ++v4l_index) { std::string v4l_name = util::read_file(util::string_format("/sys/class/video4linux/v4l-subdev%d/name", v4l_index)); diff --git a/system/camerad/cameras/camera_common.h b/system/camerad/cameras/camera_common.h index 567a03c193..d1f0b0d0e1 100644 --- a/system/camerad/cameras/camera_common.h +++ b/system/camerad/cameras/camera_common.h @@ -8,6 +8,7 @@ #include "common/queue.h" #include "common/util.h" + const int YUV_BUFFER_COUNT = 20; enum CameraType { @@ -39,7 +40,7 @@ typedef struct FrameMetadata { float processing_time; } FrameMetadata; -struct MultiCameraState; +class SpectraCamera; class CameraState; class ImgProc; @@ -62,18 +63,13 @@ public: CameraBuf() = default; ~CameraBuf(); - void init(cl_device_id device_id, cl_context context, CameraState *s, VisionIpcServer * v, int frame_cnt, VisionStreamType type); + void init(cl_device_id device_id, cl_context context, SpectraCamera *cam, VisionIpcServer * v, int frame_cnt, VisionStreamType type); bool acquire(); void queue(size_t buf_idx); }; -void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &frame_data, CameraState *c); +void camerad_thread(); kj::Array get_raw_frame_image(const CameraBuf *b); float set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_skip); void publish_thumbnail(PubMaster *pm, const CameraBuf *b); -void cameras_init(MultiCameraState *s, VisionIpcServer *v, cl_device_id device_id, cl_context ctx); -void cameras_run(MultiCameraState *s); -void cameras_close(MultiCameraState *s); -void camerad_thread(); - int open_v4l_by_name_and_index(const char name[], int index = 0, int flags = O_RDWR | O_NONBLOCK); diff --git a/system/camerad/cameras/camera_qcom2.cc b/system/camerad/cameras/camera_qcom2.cc index 6febbf36a9..3b2b9db330 100644 --- a/system/camerad/cameras/camera_qcom2.cc +++ b/system/camerad/cameras/camera_qcom2.cc @@ -1,4 +1,5 @@ -#include "system/camerad/cameras/camera_qcom2.h" +#include "system/camerad/cameras/camera_common.h" +#include "system/camerad/cameras/spectra.h" #include #include @@ -11,759 +12,62 @@ #include #include +#include "CL/cl_ext_qcom.h" + #include "media/cam_defs.h" -#include "media/cam_isp.h" -#include "media/cam_isp_ife.h" -#include "media/cam_req_mgr.h" #include "media/cam_sensor_cmn_header.h" #include "media/cam_sync.h" +#include "common/clutil.h" #include "common/params.h" #include "common/swaglog.h" -const int MIPI_SETTLE_CNT = 33; // Calculated by camera_freqs.py ExitHandler do_exit; -CameraState::CameraState(MultiCameraState *multi_camera_state, const CameraConfig &config) - : multi_cam_state(multi_camera_state), - enabled(config.enabled) , - cc(config) { -} - -CameraState::~CameraState() { - if (open) { - camera_close(); - } -} - - -int CameraState::clear_req_queue() { - struct cam_req_mgr_flush_info req_mgr_flush_request = {0}; - req_mgr_flush_request.session_hdl = session_handle; - req_mgr_flush_request.link_hdl = link_handle; - req_mgr_flush_request.flush_type = CAM_REQ_MGR_FLUSH_TYPE_ALL; - int ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_FLUSH_REQ, &req_mgr_flush_request, sizeof(req_mgr_flush_request)); - // LOGD("flushed all req: %d", ret); - return ret; -} - -// ************** high level camera helpers **************** - -void CameraState::sensors_start() { - if (!enabled) return; - LOGD("starting sensor %d", cc.camera_num); - sensors_i2c(sensor->start_reg_array.data(), sensor->start_reg_array.size(), CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, sensor->data_word); -} - -void CameraState::sensors_poke(int request_id) { - uint32_t cam_packet_handle = 0; - int size = sizeof(struct cam_packet); - auto pkt = mm.alloc(size, &cam_packet_handle); - pkt->num_cmd_buf = 0; - pkt->kmd_cmd_buf_index = -1; - pkt->header.size = size; - pkt->header.op_code = CAM_SENSOR_PACKET_OPCODE_SENSOR_NOP; - pkt->header.request_id = request_id; - - int ret = device_config(sensor_fd, session_handle, sensor_dev_handle, cam_packet_handle); - if (ret != 0) { - LOGE("** sensor %d FAILED poke, disabling", cc.camera_num); - enabled = false; - return; - } -} - -void CameraState::sensors_i2c(const struct i2c_random_wr_payload* dat, int len, int op_code, bool data_word) { - // LOGD("sensors_i2c: %d", len); - uint32_t cam_packet_handle = 0; - int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*1; - auto pkt = mm.alloc(size, &cam_packet_handle); - pkt->num_cmd_buf = 1; - pkt->kmd_cmd_buf_index = -1; - pkt->header.size = size; - pkt->header.op_code = op_code; - struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; - - buf_desc[0].size = buf_desc[0].length = sizeof(struct i2c_rdwr_header) + len*sizeof(struct i2c_random_wr_payload); - buf_desc[0].type = CAM_CMD_BUF_I2C; - - auto i2c_random_wr = mm.alloc(buf_desc[0].size, (uint32_t*)&buf_desc[0].mem_handle); - i2c_random_wr->header.count = len; - i2c_random_wr->header.op_code = 1; - i2c_random_wr->header.cmd_type = CAMERA_SENSOR_CMD_TYPE_I2C_RNDM_WR; - i2c_random_wr->header.data_type = data_word ? CAMERA_SENSOR_I2C_TYPE_WORD : CAMERA_SENSOR_I2C_TYPE_BYTE; - i2c_random_wr->header.addr_type = CAMERA_SENSOR_I2C_TYPE_WORD; - memcpy(i2c_random_wr->random_wr_payload, dat, len*sizeof(struct i2c_random_wr_payload)); - - int ret = device_config(sensor_fd, session_handle, sensor_dev_handle, cam_packet_handle); - if (ret != 0) { - LOGE("** sensor %d FAILED i2c, disabling", cc.camera_num); - enabled = false; - return; - } -} - -static cam_cmd_power *power_set_wait(cam_cmd_power *power, int16_t delay_ms) { - cam_cmd_unconditional_wait *unconditional_wait = (cam_cmd_unconditional_wait *)((char *)power + (sizeof(struct cam_cmd_power) + (power->count - 1) * sizeof(struct cam_power_settings))); - unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT; - unconditional_wait->delay = delay_ms; - unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND; - return (struct cam_cmd_power *)(unconditional_wait + 1); -} - -int CameraState::sensors_init() { - uint32_t cam_packet_handle = 0; - int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*2; - auto pkt = mm.alloc(size, &cam_packet_handle); - pkt->num_cmd_buf = 2; - pkt->kmd_cmd_buf_index = -1; - pkt->header.op_code = 0x1000000 | CAM_SENSOR_PACKET_OPCODE_SENSOR_PROBE; - pkt->header.size = size; - struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; - - buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_cmd_i2c_info) + sizeof(struct cam_cmd_probe); - buf_desc[0].type = CAM_CMD_BUF_LEGACY; - auto i2c_info = mm.alloc(buf_desc[0].size, (uint32_t*)&buf_desc[0].mem_handle); - auto probe = (struct cam_cmd_probe *)(i2c_info.get() + 1); - - probe->camera_id = cc.camera_num; - i2c_info->slave_addr = sensor->getSlaveAddress(cc.camera_num); - // 0(I2C_STANDARD_MODE) = 100khz, 1(I2C_FAST_MODE) = 400khz - //i2c_info->i2c_freq_mode = I2C_STANDARD_MODE; - i2c_info->i2c_freq_mode = I2C_FAST_MODE; - i2c_info->cmd_type = CAMERA_SENSOR_CMD_TYPE_I2C_INFO; - - probe->data_type = CAMERA_SENSOR_I2C_TYPE_WORD; - probe->addr_type = CAMERA_SENSOR_I2C_TYPE_WORD; - probe->op_code = 3; // don't care? - probe->cmd_type = CAMERA_SENSOR_CMD_TYPE_PROBE; - probe->reg_addr = sensor->probe_reg_addr; - probe->expected_data = sensor->probe_expected_data; - probe->data_mask = 0; - - //buf_desc[1].size = buf_desc[1].length = 148; - buf_desc[1].size = buf_desc[1].length = 196; - buf_desc[1].type = CAM_CMD_BUF_I2C; - auto power_settings = mm.alloc(buf_desc[1].size, (uint32_t*)&buf_desc[1].mem_handle); - - // power on - struct cam_cmd_power *power = power_settings.get(); - power->count = 4; - power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP; - power->power_settings[0].power_seq_type = 3; // clock?? - power->power_settings[1].power_seq_type = 1; // analog - power->power_settings[2].power_seq_type = 2; // digital - power->power_settings[3].power_seq_type = 8; // reset low - power = power_set_wait(power, 1); - - // set clock - power->count = 1; - power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP; - power->power_settings[0].power_seq_type = 0; - power->power_settings[0].config_val_low = sensor->mclk_frequency; - power = power_set_wait(power, 1); - - // reset high - power->count = 1; - power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP; - power->power_settings[0].power_seq_type = 8; - power->power_settings[0].config_val_low = 1; - // wait 650000 cycles @ 19.2 mhz = 33.8 ms - power = power_set_wait(power, 34); - - // probe happens here - - // disable clock - power->count = 1; - power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; - power->power_settings[0].power_seq_type = 0; - power->power_settings[0].config_val_low = 0; - power = power_set_wait(power, 1); - - // reset high - power->count = 1; - power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; - power->power_settings[0].power_seq_type = 8; - power->power_settings[0].config_val_low = 1; - power = power_set_wait(power, 1); - - // reset low - power->count = 1; - power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; - power->power_settings[0].power_seq_type = 8; - power->power_settings[0].config_val_low = 0; - power = power_set_wait(power, 1); - - // power off - power->count = 3; - power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; - power->power_settings[0].power_seq_type = 2; - power->power_settings[1].power_seq_type = 1; - power->power_settings[2].power_seq_type = 3; - - int ret = do_cam_control(sensor_fd, CAM_SENSOR_PROBE_CMD, (void *)(uintptr_t)cam_packet_handle, 0); - LOGD("probing the sensor: %d", ret); - return ret; -} - -void CameraState::config_isp(int io_mem_handle, int fence, int request_id, int buf0_mem_handle, int buf0_offset) { - uint32_t cam_packet_handle = 0; - int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*2; - if (io_mem_handle != 0) { - size += sizeof(struct cam_buf_io_cfg); - } - auto pkt = mm.alloc(size, &cam_packet_handle); - pkt->num_cmd_buf = 2; - pkt->kmd_cmd_buf_index = 0; - // YUV has kmd_cmd_buf_offset = 1780 - // I guess this is the ISP command - // YUV also has patch_offset = 0x1030 and num_patches = 10 - - if (io_mem_handle != 0) { - pkt->io_configs_offset = sizeof(struct cam_cmd_buf_desc)*pkt->num_cmd_buf; - pkt->num_io_configs = 1; - } - - if (io_mem_handle != 0) { - pkt->header.op_code = 0xf000001; - pkt->header.request_id = request_id; - } else { - pkt->header.op_code = 0xf000000; - } - pkt->header.size = size; - struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; - struct cam_buf_io_cfg *io_cfg = (struct cam_buf_io_cfg *)((char*)&pkt->payload + pkt->io_configs_offset); - - // TODO: support MMU - buf_desc[0].size = 65624; - buf_desc[0].length = 0; - buf_desc[0].type = CAM_CMD_BUF_DIRECT; - buf_desc[0].meta_data = 3; - buf_desc[0].mem_handle = buf0_mem_handle; - buf_desc[0].offset = buf0_offset; - - // parsed by cam_isp_packet_generic_blob_handler - struct isp_packet { - uint32_t type_0; - cam_isp_resource_hfr_config resource_hfr; - - uint32_t type_1; - cam_isp_clock_config clock; - uint64_t extra_rdi_hz[3]; - - uint32_t type_2; - cam_isp_bw_config bw; - struct cam_isp_bw_vote extra_rdi_vote[6]; - } __attribute__((packed)) tmp; - memset(&tmp, 0, sizeof(tmp)); - - tmp.type_0 = CAM_ISP_GENERIC_BLOB_TYPE_HFR_CONFIG; - tmp.type_0 |= sizeof(cam_isp_resource_hfr_config) << 8; - static_assert(sizeof(cam_isp_resource_hfr_config) == 0x20); - tmp.resource_hfr = { - .num_ports = 1, // 10 for YUV (but I don't think we need them) - .port_hfr_config[0] = { - .resource_type = CAM_ISP_IFE_OUT_RES_RDI_0, // CAM_ISP_IFE_OUT_RES_FULL for YUV - .subsample_pattern = 1, - .subsample_period = 0, - .framedrop_pattern = 1, - .framedrop_period = 0, - }}; - - tmp.type_1 = CAM_ISP_GENERIC_BLOB_TYPE_CLOCK_CONFIG; - tmp.type_1 |= (sizeof(cam_isp_clock_config) + sizeof(tmp.extra_rdi_hz)) << 8; - static_assert((sizeof(cam_isp_clock_config) + sizeof(tmp.extra_rdi_hz)) == 0x38); - tmp.clock = { - .usage_type = 1, // dual mode - .num_rdi = 4, - .left_pix_hz = 404000000, - .right_pix_hz = 404000000, - .rdi_hz[0] = 404000000, - }; - - - tmp.type_2 = CAM_ISP_GENERIC_BLOB_TYPE_BW_CONFIG; - tmp.type_2 |= (sizeof(cam_isp_bw_config) + sizeof(tmp.extra_rdi_vote)) << 8; - static_assert((sizeof(cam_isp_bw_config) + sizeof(tmp.extra_rdi_vote)) == 0xe0); - tmp.bw = { - .usage_type = 1, // dual mode - .num_rdi = 4, - .left_pix_vote = { - .resource_id = 0, - .cam_bw_bps = 450000000, - .ext_bw_bps = 450000000, - }, - .rdi_vote[0] = { - .resource_id = 0, - .cam_bw_bps = 8706200000, - .ext_bw_bps = 8706200000, - }, - }; - - static_assert(offsetof(struct isp_packet, type_2) == 0x60); - - buf_desc[1].size = sizeof(tmp); - buf_desc[1].offset = io_mem_handle != 0 ? 0x60 : 0; - buf_desc[1].length = buf_desc[1].size - buf_desc[1].offset; - buf_desc[1].type = CAM_CMD_BUF_GENERIC; - buf_desc[1].meta_data = CAM_ISP_PACKET_META_GENERIC_BLOB_COMMON; - auto buf2 = mm.alloc(buf_desc[1].size, (uint32_t*)&buf_desc[1].mem_handle); - memcpy(buf2.get(), &tmp, sizeof(tmp)); - - if (io_mem_handle != 0) { - io_cfg[0].mem_handle[0] = io_mem_handle; - io_cfg[0].planes[0] = (struct cam_plane_cfg){ - .width = sensor->frame_width, - .height = sensor->frame_height + sensor->extra_height, - .plane_stride = sensor->frame_stride, - .slice_height = sensor->frame_height + sensor->extra_height, - .meta_stride = 0x0, // YUV has meta(stride=0x400, size=0x5000) - .meta_size = 0x0, - .meta_offset = 0x0, - .packer_config = 0x0, // 0xb for YUV - .mode_config = 0x0, // 0x9ef for YUV - .tile_config = 0x0, - .h_init = 0x0, - .v_init = 0x0, - }; - io_cfg[0].format = sensor->mipi_format; // CAM_FORMAT_UBWC_TP10 for YUV - io_cfg[0].color_space = CAM_COLOR_SPACE_BASE; // CAM_COLOR_SPACE_BT601_FULL for YUV - io_cfg[0].color_pattern = 0x5; // 0x0 for YUV - io_cfg[0].bpp = (sensor->mipi_format == CAM_FORMAT_MIPI_RAW_10 ? 0xa : 0xc); // bits per pixel - io_cfg[0].resource_type = CAM_ISP_IFE_OUT_RES_RDI_0; // CAM_ISP_IFE_OUT_RES_FULL for YUV - io_cfg[0].fence = fence; - io_cfg[0].direction = CAM_BUF_OUTPUT; - io_cfg[0].subsample_pattern = 0x1; - io_cfg[0].framedrop_pattern = 0x1; - } - - int ret = device_config(multi_cam_state->isp_fd, session_handle, isp_dev_handle, cam_packet_handle); - assert(ret == 0); - if (ret != 0) { - LOGE("isp config failed"); - } -} - -void CameraState::enqueue_buffer(int i, bool dp) { - int ret; - uint64_t request_id = request_ids[i]; - - if (buf_handle[i] && sync_objs[i]) { - // wait - struct cam_sync_wait sync_wait = {0}; - sync_wait.sync_obj = sync_objs[i]; - sync_wait.timeout_ms = 50; // max dt tolerance, typical should be 23 - ret = do_cam_control(multi_cam_state->cam_sync_fd, CAM_SYNC_WAIT, &sync_wait, sizeof(sync_wait)); - if (ret != 0) { - LOGE("failed to wait for sync: %d %d", ret, sync_wait.sync_obj); - // TODO: handle frame drop cleanly - } - - buf.camera_bufs_metadata[i].timestamp_eof = (uint64_t)nanos_since_boot(); // set true eof - if (dp) buf.queue(i); - - // destroy old output fence - struct cam_sync_info sync_destroy = {0}; - sync_destroy.sync_obj = sync_objs[i]; - ret = do_cam_control(multi_cam_state->cam_sync_fd, CAM_SYNC_DESTROY, &sync_destroy, sizeof(sync_destroy)); - if (ret != 0) { - LOGE("failed to destroy sync object: %d %d", ret, sync_destroy.sync_obj); - } - } - - // create output fence - struct cam_sync_info sync_create = {0}; - strcpy(sync_create.name, "NodeOutputPortFence"); - ret = do_cam_control(multi_cam_state->cam_sync_fd, CAM_SYNC_CREATE, &sync_create, sizeof(sync_create)); - if (ret != 0) { - LOGE("failed to create fence: %d %d", ret, sync_create.sync_obj); - } - sync_objs[i] = sync_create.sync_obj; - - // schedule request with camera request manager - struct cam_req_mgr_sched_request req_mgr_sched_request = {0}; - req_mgr_sched_request.session_hdl = session_handle; - req_mgr_sched_request.link_hdl = link_handle; - req_mgr_sched_request.req_id = request_id; - ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_SCHED_REQ, &req_mgr_sched_request, sizeof(req_mgr_sched_request)); - if (ret != 0) { - LOGE("failed to schedule cam mgr request: %d %lu", ret, request_id); - } - - // poke sensor, must happen after schedule - sensors_poke(request_id); - - // submit request to the ife - config_isp(buf_handle[i], sync_objs[i], request_id, buf0_handle, 65632*(i+1)); -} - -void CameraState::enqueue_req_multi(uint64_t start, int n, bool dp) { - for (uint64_t i = start; i < start + n; ++i) { - request_ids[(i - 1) % FRAME_BUF_COUNT] = i; - enqueue_buffer((i - 1) % FRAME_BUF_COUNT, dp); - } -} - -// ******************* camera ******************* - -void CameraState::set_exposure_rect() { - // set areas for each camera, shouldn't be changed - std::vector> ae_targets = { - // (Rect, F) - std::make_pair((Rect){96, 250, 1734, 524}, 567.0), // wide - std::make_pair((Rect){96, 160, 1734, 986}, 2648.0), // road - std::make_pair((Rect){96, 242, 1736, 906}, 567.0) // driver - }; - int h_ref = 1208; - /* - exposure target intrinics is - [ - [F, 0, 0.5*ae_xywh[2]] - [0, F, 0.5*H-ae_xywh[1]] - [0, 0, 1] - ] - */ - auto ae_target = ae_targets[cc.camera_num]; - Rect xywh_ref = ae_target.first; - float fl_ref = ae_target.second; - - ae_xywh = (Rect){ - std::max(0, buf.out_img_width / 2 - (int)(fl_pix / fl_ref * xywh_ref.w / 2)), - std::max(0, buf.out_img_height / 2 - (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))), - std::min((int)(fl_pix / fl_ref * xywh_ref.w), buf.out_img_width / 2 + (int)(fl_pix / fl_ref * xywh_ref.w / 2)), - std::min((int)(fl_pix / fl_ref * xywh_ref.h), buf.out_img_height / 2 + (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))) - }; -} - -void CameraState::sensor_set_parameters() { - dc_gain_weight = sensor->dc_gain_min_weight; - gain_idx = sensor->analog_gain_rec_idx; - cur_ev[0] = cur_ev[1] = cur_ev[2] = (1 + dc_gain_weight * (sensor->dc_gain_factor-1) / sensor->dc_gain_max_weight) * sensor->sensor_analog_gains[gain_idx] * exposure_time; -} - -void CameraState::camera_map_bufs() { - for (int i = 0; i < FRAME_BUF_COUNT; i++) { - // configure ISP to put the image in place - struct cam_mem_mgr_map_cmd mem_mgr_map_cmd = {0}; - mem_mgr_map_cmd.mmu_hdls[0] = multi_cam_state->device_iommu; - mem_mgr_map_cmd.num_hdl = 1; - mem_mgr_map_cmd.flags = CAM_MEM_FLAG_HW_READ_WRITE; - mem_mgr_map_cmd.fd = buf.camera_bufs[i].fd; - int ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_MAP_BUF, &mem_mgr_map_cmd, sizeof(mem_mgr_map_cmd)); - LOGD("map buf req: (fd: %d) 0x%x %d", buf.camera_bufs[i].fd, mem_mgr_map_cmd.out.buf_handle, ret); - buf_handle[i] = mem_mgr_map_cmd.out.buf_handle; - } - enqueue_req_multi(1, FRAME_BUF_COUNT, 0); -} - -void CameraState::camera_init(VisionIpcServer * v, cl_device_id device_id, cl_context ctx) { - if (!enabled) return; - - LOGD("camera init %d", cc.camera_num); - buf.init(device_id, ctx, this, v, FRAME_BUF_COUNT, cc.stream_type); - camera_map_bufs(); - - fl_pix = cc.focal_len / sensor->pixel_size_mm; - set_exposure_rect(); -} - -void CameraState::camera_open() { - if (!enabled) return; - if (!openSensor()) { - return; - } +// high level camera state +class CameraState : public SpectraCamera { +public: + std::mutex exp_lock; - open = true; - configISP(); - configCSIPHY(); - linkDevices(); -} + int exposure_time = 5; + bool dc_gain_enabled = false; + int dc_gain_weight = 0; + int gain_idx = 0; + float analog_gain_frac = 0; -bool CameraState::openSensor() { - sensor_fd = open_v4l_by_name_and_index("cam-sensor-driver", cc.camera_num); - assert(sensor_fd >= 0); - LOGD("opened sensor for %d", cc.camera_num); + float cur_ev[3] = {}; + float best_ev_score = 0; + int new_exp_g = 0; + int new_exp_t = 0; - // init memorymanager for this camera - mm.init(multi_cam_state->video0_fd); + Rect ae_xywh = {}; + float measured_grey_fraction = 0; + float target_grey_fraction = 0.3; - LOGD("-- Probing sensor %d", cc.camera_num); + float fl_pix = 0; - auto init_sensor_lambda = [this](SensorInfo *s) { - sensor.reset(s); - int ret = sensors_init(); - if (ret == 0) { - sensor_set_parameters(); - } - return ret == 0; - }; + CameraState(SpectraMaster *master, const CameraConfig &config) : SpectraCamera(master, config) {}; + void update_exposure_score(float desired_ev, int exp_t, int exp_g_idx, float exp_gain); + void set_camera_exposure(float grey_frac); - // Try different sensors one by one until it success. - if (!init_sensor_lambda(new AR0231) && - !init_sensor_lambda(new OX03C10) && - !init_sensor_lambda(new OS04C10)) { - LOGE("** sensor %d FAILED bringup, disabling", cc.camera_num); - enabled = false; - return false; - } - LOGD("-- Probing sensor %d success", cc.camera_num); - - // create session - struct cam_req_mgr_session_info session_info = {}; - int ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_CREATE_SESSION, &session_info, sizeof(session_info)); - LOGD("get session: %d 0x%X", ret, session_info.session_hdl); - session_handle = session_info.session_hdl; - - // access the sensor - LOGD("-- Accessing sensor"); - auto sensor_dev_handle_ = device_acquire(sensor_fd, session_handle, nullptr); - assert(sensor_dev_handle_); - sensor_dev_handle = *sensor_dev_handle_; - LOGD("acquire sensor dev"); - - LOG("-- Configuring sensor"); - sensors_i2c(sensor->init_reg_array.data(), sensor->init_reg_array.size(), CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, sensor->data_word); - return true; -} + void set_exposure_rect(); + void run(); -void CameraState::configISP() { - // NOTE: to be able to disable road and wide road, we still have to configure the sensor over i2c - // If you don't do this, the strobe GPIO is an output (even in reset it seems!) - if (!enabled) return; + void init() { + fl_pix = cc.focal_len / sensor->pixel_size_mm; + set_exposure_rect(); - struct cam_isp_in_port_info in_port_info = { - .res_type = cc.phy, - .lane_type = CAM_ISP_LANE_TYPE_DPHY, - .lane_num = 4, - .lane_cfg = 0x3210, - - .vc = 0x0, - .dt = sensor->frame_data_type, - .format = sensor->mipi_format, - - .test_pattern = 0x2, // 0x3? - .usage_type = 0x0, - - .left_start = 0, - .left_stop = sensor->frame_width - 1, - .left_width = sensor->frame_width, - - .right_start = 0, - .right_stop = sensor->frame_width - 1, - .right_width = sensor->frame_width, - - .line_start = 0, - .line_stop = sensor->frame_height + sensor->extra_height - 1, - .height = sensor->frame_height + sensor->extra_height, - - .pixel_clk = 0x0, - .batch_size = 0x0, - .dsp_mode = CAM_ISP_DSP_MODE_NONE, - .hbi_cnt = 0x0, - .custom_csid = 0x0, - - .num_out_res = 0x1, - .data[0] = (struct cam_isp_out_port_info){ - .res_type = CAM_ISP_IFE_OUT_RES_RDI_0, - .format = sensor->mipi_format, - .width = sensor->frame_width, - .height = sensor->frame_height + sensor->extra_height, - .comp_grp_id = 0x0, .split_point = 0x0, .secure_mode = 0x0, - }, - }; - struct cam_isp_resource isp_resource = { - .resource_id = CAM_ISP_RES_ID_PORT, - .handle_type = CAM_HANDLE_USER_POINTER, - .res_hdl = (uint64_t)&in_port_info, - .length = sizeof(in_port_info), + dc_gain_weight = sensor->dc_gain_min_weight; + gain_idx = sensor->analog_gain_rec_idx; + cur_ev[0] = cur_ev[1] = cur_ev[2] = (1 + dc_gain_weight * (sensor->dc_gain_factor-1) / sensor->dc_gain_max_weight) * sensor->sensor_analog_gains[gain_idx] * exposure_time; }; - auto isp_dev_handle_ = device_acquire(multi_cam_state->isp_fd, session_handle, &isp_resource); - assert(isp_dev_handle_); - isp_dev_handle = *isp_dev_handle_; - LOGD("acquire isp dev"); - - // config ISP - alloc_w_mmu_hdl(multi_cam_state->video0_fd, 984480, (uint32_t*)&buf0_handle, 0x20, CAM_MEM_FLAG_HW_READ_WRITE | CAM_MEM_FLAG_KMD_ACCESS | - CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, multi_cam_state->device_iommu, multi_cam_state->cdm_iommu); - config_isp(0, 0, 1, buf0_handle, 0); -} - -void CameraState::configCSIPHY() { - csiphy_fd = open_v4l_by_name_and_index("cam-csiphy-driver", cc.camera_num); - assert(csiphy_fd >= 0); - LOGD("opened csiphy for %d", cc.camera_num); - - struct cam_csiphy_acquire_dev_info csiphy_acquire_dev_info = {.combo_mode = 0}; - auto csiphy_dev_handle_ = device_acquire(csiphy_fd, session_handle, &csiphy_acquire_dev_info); - assert(csiphy_dev_handle_); - csiphy_dev_handle = *csiphy_dev_handle_; - LOGD("acquire csiphy dev"); - - // config csiphy - LOG("-- Config CSI PHY"); - { - uint32_t cam_packet_handle = 0; - int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*1; - auto pkt = mm.alloc(size, &cam_packet_handle); - pkt->num_cmd_buf = 1; - pkt->kmd_cmd_buf_index = -1; - pkt->header.size = size; - struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; - - buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_csiphy_info); - buf_desc[0].type = CAM_CMD_BUF_GENERIC; - - auto csiphy_info = mm.alloc(buf_desc[0].size, (uint32_t*)&buf_desc[0].mem_handle); - csiphy_info->lane_mask = 0x1f; - csiphy_info->lane_assign = 0x3210;// skip clk. How is this 16 bit for 5 channels?? - csiphy_info->csiphy_3phase = 0x0; // no 3 phase, only 2 conductors per lane - csiphy_info->combo_mode = 0x0; - csiphy_info->lane_cnt = 0x4; - csiphy_info->secure_mode = 0x0; - csiphy_info->settle_time = MIPI_SETTLE_CNT * 200000000ULL; - csiphy_info->data_rate = 48000000; // Calculated by camera_freqs.py - - int ret_ = device_config(csiphy_fd, session_handle, csiphy_dev_handle, cam_packet_handle); - assert(ret_ == 0); - } -} - -void CameraState::linkDevices() { - LOG("-- Link devices"); - struct cam_req_mgr_link_info req_mgr_link_info = {0}; - req_mgr_link_info.session_hdl = session_handle; - req_mgr_link_info.num_devices = 2; - req_mgr_link_info.dev_hdls[0] = isp_dev_handle; - req_mgr_link_info.dev_hdls[1] = sensor_dev_handle; - int ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_LINK, &req_mgr_link_info, sizeof(req_mgr_link_info)); - link_handle = req_mgr_link_info.link_hdl; - LOGD("link: %d session: 0x%X isp: 0x%X sensors: 0x%X link: 0x%X", ret, session_handle, isp_dev_handle, sensor_dev_handle, link_handle); - - struct cam_req_mgr_link_control req_mgr_link_control = {0}; - req_mgr_link_control.ops = CAM_REQ_MGR_LINK_ACTIVATE; - req_mgr_link_control.session_hdl = session_handle; - req_mgr_link_control.num_links = 1; - req_mgr_link_control.link_hdls[0] = link_handle; - ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_LINK_CONTROL, &req_mgr_link_control, sizeof(req_mgr_link_control)); - LOGD("link control: %d", ret); - - ret = device_control(csiphy_fd, CAM_START_DEV, session_handle, csiphy_dev_handle); - LOGD("start csiphy: %d", ret); - ret = device_control(multi_cam_state->isp_fd, CAM_START_DEV, session_handle, isp_dev_handle); - LOGD("start isp: %d", ret); - assert(ret == 0); - - // TODO: this is unneeded, should we be doing the start i2c in a different way? - //ret = device_control(sensor_fd, CAM_START_DEV, session_handle, sensor_dev_handle); - //LOGD("start sensor: %d", ret); -} - -void cameras_init(MultiCameraState *s, VisionIpcServer *v, cl_device_id device_id, cl_context ctx) { - LOG("-- Opening devices"); - // video0 is req_mgr, the target of many ioctls - s->video0_fd = HANDLE_EINTR(open("/dev/v4l/by-path/platform-soc:qcom_cam-req-mgr-video-index0", O_RDWR | O_NONBLOCK)); - assert(s->video0_fd >= 0); - LOGD("opened video0"); - - // video1 is cam_sync, the target of some ioctls - s->cam_sync_fd = HANDLE_EINTR(open("/dev/v4l/by-path/platform-cam_sync-video-index0", O_RDWR | O_NONBLOCK)); - assert(s->cam_sync_fd >= 0); - LOGD("opened video1 (cam_sync)"); - - // looks like there's only one of these - s->isp_fd = open_v4l_by_name_and_index("cam-isp"); - assert(s->isp_fd >= 0); - LOGD("opened isp"); - - // query icp for MMU handles - LOG("-- Query ICP for MMU handles"); - struct cam_isp_query_cap_cmd isp_query_cap_cmd = {0}; - struct cam_query_cap_cmd query_cap_cmd = {0}; - query_cap_cmd.handle_type = 1; - query_cap_cmd.caps_handle = (uint64_t)&isp_query_cap_cmd; - query_cap_cmd.size = sizeof(isp_query_cap_cmd); - int ret = do_cam_control(s->isp_fd, CAM_QUERY_CAP, &query_cap_cmd, sizeof(query_cap_cmd)); - assert(ret == 0); - LOGD("using MMU handle: %x", isp_query_cap_cmd.device_iommu.non_secure); - LOGD("using MMU handle: %x", isp_query_cap_cmd.cdm_iommu.non_secure); - s->device_iommu = isp_query_cap_cmd.device_iommu.non_secure; - s->cdm_iommu = isp_query_cap_cmd.cdm_iommu.non_secure; - - // subscribe - LOG("-- Subscribing"); - struct v4l2_event_subscription sub = {0}; - sub.type = V4L_EVENT_CAM_REQ_MGR_EVENT; - sub.id = V4L_EVENT_CAM_REQ_MGR_SOF_BOOT_TS; - ret = HANDLE_EINTR(ioctl(s->video0_fd, VIDIOC_SUBSCRIBE_EVENT, &sub)); - LOGD("req mgr subscribe: %d", ret); - - // open - s->driver_cam.camera_open(); - LOGD("driver camera opened"); - s->road_cam.camera_open(); - LOGD("road camera opened"); - s->wide_road_cam.camera_open(); - LOGD("wide road camera opened"); - - // init - s->driver_cam.camera_init(v, device_id, ctx); - s->road_cam.camera_init(v, device_id, ctx); - s->wide_road_cam.camera_init(v, device_id, ctx); -} - -void CameraState::camera_close() { - // stop devices - LOG("-- Stop devices %d", cc.camera_num); - - if (enabled) { - // ret = device_control(sensor_fd, CAM_STOP_DEV, session_handle, sensor_dev_handle); - // LOGD("stop sensor: %d", ret); - int ret = device_control(multi_cam_state->isp_fd, CAM_STOP_DEV, session_handle, isp_dev_handle); - LOGD("stop isp: %d", ret); - ret = device_control(csiphy_fd, CAM_STOP_DEV, session_handle, csiphy_dev_handle); - LOGD("stop csiphy: %d", ret); - // link control stop - LOG("-- Stop link control"); - struct cam_req_mgr_link_control req_mgr_link_control = {0}; - req_mgr_link_control.ops = CAM_REQ_MGR_LINK_DEACTIVATE; - req_mgr_link_control.session_hdl = session_handle; - req_mgr_link_control.num_links = 1; - req_mgr_link_control.link_hdls[0] = link_handle; - ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_LINK_CONTROL, &req_mgr_link_control, sizeof(req_mgr_link_control)); - LOGD("link control stop: %d", ret); - - // unlink - LOG("-- Unlink"); - struct cam_req_mgr_unlink_info req_mgr_unlink_info = {0}; - req_mgr_unlink_info.session_hdl = session_handle; - req_mgr_unlink_info.link_hdl = link_handle; - ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_UNLINK, &req_mgr_unlink_info, sizeof(req_mgr_unlink_info)); - LOGD("unlink: %d", ret); - - // release devices - LOGD("-- Release devices"); - ret = device_control(multi_cam_state->isp_fd, CAM_RELEASE_DEV, session_handle, isp_dev_handle); - LOGD("release isp: %d", ret); - ret = device_control(csiphy_fd, CAM_RELEASE_DEV, session_handle, csiphy_dev_handle); - LOGD("release csiphy: %d", ret); - - for (int i = 0; i < FRAME_BUF_COUNT; i++) { - release(multi_cam_state->video0_fd, buf_handle[i]); - } - LOGD("released buffers"); - } + // TODO: this should move to SpectraCamera + void handle_camera_event(void *evdat); +}; - int ret = device_control(sensor_fd, CAM_RELEASE_DEV, session_handle, sensor_dev_handle); - LOGD("release sensor: %d", ret); - - // destroyed session - struct cam_req_mgr_session_info session_info = {.session_hdl = session_handle}; - ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_DESTROY_SESSION, &session_info, sizeof(session_info)); - LOGD("destroyed session %d: %d", cc.camera_num, ret); -} void CameraState::handle_camera_event(void *evdat) { if (!enabled) return; @@ -824,6 +128,35 @@ void CameraState::handle_camera_event(void *evdat) { } } +void CameraState::set_exposure_rect() { + // set areas for each camera, shouldn't be changed + std::vector> ae_targets = { + // (Rect, F) + std::make_pair((Rect){96, 250, 1734, 524}, 567.0), // wide + std::make_pair((Rect){96, 160, 1734, 986}, 2648.0), // road + std::make_pair((Rect){96, 242, 1736, 906}, 567.0) // driver + }; + int h_ref = 1208; + /* + exposure target intrinics is + [ + [F, 0, 0.5*ae_xywh[2]] + [0, F, 0.5*H-ae_xywh[1]] + [0, 0, 1] + ] + */ + auto ae_target = ae_targets[cc.camera_num]; + Rect xywh_ref = ae_target.first; + float fl_ref = ae_target.second; + + ae_xywh = (Rect){ + std::max(0, buf.out_img_width / 2 - (int)(fl_pix / fl_ref * xywh_ref.w / 2)), + std::max(0, buf.out_img_height / 2 - (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))), + std::min((int)(fl_pix / fl_ref * xywh_ref.w), buf.out_img_width / 2 + (int)(fl_pix / fl_ref * xywh_ref.w / 2)), + std::min((int)(fl_pix / fl_ref * xywh_ref.h), buf.out_img_height / 2 + (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))) + }; +} + void CameraState::update_exposure_score(float desired_ev, int exp_t, int exp_g_idx, float exp_gain) { float score = sensor->getExposureScore(desired_ev, exp_t, exp_g_idx, exp_gain, gain_idx); if (score < best_ev_score) { @@ -939,13 +272,31 @@ void CameraState::set_camera_exposure(float grey_frac) { void CameraState::run() { util::set_thread_name(cc.publish_name); + std::vector pubs = {cc.publish_name}; + if (cc.stream_type == VISION_STREAM_ROAD) pubs.push_back("thumbnail"); + PubMaster pm(pubs); + for (uint32_t cnt = 0; !do_exit; ++cnt) { // Acquire the buffer; continue if acquisition fails if (!buf.acquire()) continue; MessageBuilder msg; auto framed = (msg.initEvent().*cc.init_camera_state)(); - fill_frame_data(framed, buf.cur_frame_data, this); + framed.setFrameId(buf.cur_frame_data.frame_id); + framed.setRequestId(buf.cur_frame_data.request_id); + framed.setTimestampEof(buf.cur_frame_data.timestamp_eof); + framed.setTimestampSof(buf.cur_frame_data.timestamp_sof); + framed.setIntegLines(buf.cur_frame_data.integ_lines); + framed.setGain(buf.cur_frame_data.gain); + framed.setHighConversionGain(buf.cur_frame_data.high_conversion_gain); + framed.setMeasuredGreyFraction(buf.cur_frame_data.measured_grey_fraction); + framed.setTargetGreyFraction(buf.cur_frame_data.target_grey_fraction); + framed.setProcessingTime(buf.cur_frame_data.processing_time); + + const float ev = cur_ev[buf.cur_frame_data.frame_id % 3]; + const float perc = util::map_val(ev, sensor->min_ev, sensor->max_ev, 0.0f, 100.0f); + framed.setExposureValPercent(perc); + framed.setSensor(sensor->image_sensor); // Log raw frames for road camera if (env_log_raw_frames && cc.stream_type == VISION_STREAM_ROAD && cnt % 100 == 5) { // no overlap with qlog decimation @@ -957,38 +308,58 @@ void CameraState::run() { set_camera_exposure(set_exposure_target(&buf, ae_xywh, 2, cc.stream_type != VISION_STREAM_DRIVER ? 2 : 4)); // Send the message - multi_cam_state->pm->send(cc.publish_name, msg); + pm.send(cc.publish_name, msg); if (cc.stream_type == VISION_STREAM_ROAD && cnt % 100 == 3) { - publish_thumbnail(multi_cam_state->pm, &buf); // this takes 10ms??? + publish_thumbnail(&pm, &buf); // this takes 10ms??? } } } -MultiCameraState::MultiCameraState() - : driver_cam(this, DRIVER_CAMERA_CONFIG), - road_cam(this, ROAD_CAMERA_CONFIG), - wide_road_cam(this, WIDE_ROAD_CAMERA_CONFIG) { -} +void camerad_thread() { + /* + TODO: future cleanups + - centralize enabled handling + - open/init/etc mess + - no ISP stuff in this file + */ + + cl_device_id device_id = cl_get_device_id(CL_DEVICE_TYPE_DEFAULT); + const cl_context_properties props[] = {CL_CONTEXT_PRIORITY_HINT_QCOM, CL_PRIORITY_HINT_HIGH_QCOM, 0}; + cl_context ctx = CL_CHECK_ERR(clCreateContext(props, 1, &device_id, NULL, NULL, &err)); + + VisionIpcServer v("camerad", device_id, ctx); + + // *** initial ISP init *** + SpectraMaster m; + m.init(); + + // *** per-cam init *** + std::vector cams = { + new CameraState(&m, ROAD_CAMERA_CONFIG), + new CameraState(&m, WIDE_ROAD_CAMERA_CONFIG), + new CameraState(&m, DRIVER_CAMERA_CONFIG), + }; + for (auto cam : cams) cam->camera_open(); + for (auto cam : cams) cam->camera_init(&v, device_id, ctx); + for (auto cam : cams) cam->init(); + v.start_listener(); -void cameras_run(MultiCameraState *s) { LOG("-- Starting threads"); std::vector threads; - if (s->driver_cam.enabled) threads.emplace_back(&CameraState::run, &s->driver_cam); - if (s->road_cam.enabled) threads.emplace_back(&CameraState::run, &s->road_cam); - if (s->wide_road_cam.enabled) threads.emplace_back(&CameraState::run, &s->wide_road_cam); + for (auto cam : cams) { + if (cam->enabled) threads.emplace_back(&CameraState::run, cam); + } // start devices LOG("-- Starting devices"); - s->driver_cam.sensors_start(); - s->road_cam.sensors_start(); - s->wide_road_cam.sensors_start(); + for (auto cam : cams) cam->sensors_start(); // poll events LOG("-- Dequeueing Video events"); while (!do_exit) { struct pollfd fds[1] = {{0}}; - fds[0].fd = s->video0_fd; + fds[0].fd = m.video0_fd; fds[0].events = POLLPRI; int ret = poll(fds, std::size(fds), 1000); @@ -1011,15 +382,11 @@ void cameras_run(MultiCameraState *s) { do_exit = do_exit || event_data->u.frame_msg.frame_id > (1*20); } - if (event_data->session_hdl == s->road_cam.session_handle) { - s->road_cam.handle_camera_event(event_data); - } else if (event_data->session_hdl == s->wide_road_cam.session_handle) { - s->wide_road_cam.handle_camera_event(event_data); - } else if (event_data->session_hdl == s->driver_cam.session_handle) { - s->driver_cam.handle_camera_event(event_data); - } else { - LOGE("Unknown vidioc event source"); - assert(false); + for (auto cam : cams) { + if (event_data->session_hdl == cam->session_handle) { + cam->handle_camera_event(event_data); + break; + } } } else { LOGE("unhandled event %d\n", ev.type); @@ -1030,6 +397,6 @@ void cameras_run(MultiCameraState *s) { } LOG(" ************** STOPPING **************"); - for (auto &t : threads) t.join(); + for (auto cam : cams) delete cam; } diff --git a/system/camerad/cameras/camera_qcom2.h b/system/camerad/cameras/camera_qcom2.h deleted file mode 100644 index bd53a326f3..0000000000 --- a/system/camerad/cameras/camera_qcom2.h +++ /dev/null @@ -1,156 +0,0 @@ -#pragma once - -#include -#include - -#include "media/cam_isp_ife.h" - -#include "system/camerad/cameras/camera_common.h" -#include "system/camerad/cameras/camera_util.h" -#include "system/camerad/sensors/sensor.h" -#include "common/util.h" - -#define FRAME_BUF_COUNT 4 - -struct CameraConfig { - int camera_num; - VisionStreamType stream_type; - float focal_len; // millimeters - const char *publish_name; - cereal::FrameData::Builder (cereal::Event::Builder::*init_camera_state)(); - bool enabled; - uint32_t phy; -}; - -const CameraConfig WIDE_ROAD_CAMERA_CONFIG = { - .camera_num = 0, - .stream_type = VISION_STREAM_WIDE_ROAD, - .focal_len = 1.71, - .publish_name = "wideRoadCameraState", - .init_camera_state = &cereal::Event::Builder::initWideRoadCameraState, - .enabled = !getenv("DISABLE_WIDE_ROAD"), - .phy = CAM_ISP_IFE_IN_RES_PHY_0, -}; - -const CameraConfig ROAD_CAMERA_CONFIG = { - .camera_num = 1, - .stream_type = VISION_STREAM_ROAD, - .focal_len = 8.0, - .publish_name = "roadCameraState", - .init_camera_state = &cereal::Event::Builder::initRoadCameraState, - .enabled = !getenv("DISABLE_ROAD"), - .phy = CAM_ISP_IFE_IN_RES_PHY_1, -}; - -const CameraConfig DRIVER_CAMERA_CONFIG = { - .camera_num = 2, - .stream_type = VISION_STREAM_DRIVER, - .focal_len = 1.71, - .publish_name = "driverCameraState", - .init_camera_state = &cereal::Event::Builder::initDriverCameraState, - .enabled = !getenv("DISABLE_DRIVER"), - .phy = CAM_ISP_IFE_IN_RES_PHY_2, -}; - -class CameraState { -public: - CameraConfig cc; - MultiCameraState *multi_cam_state = nullptr; - std::unique_ptr sensor; - bool enabled = true; - bool open = false; - - std::mutex exp_lock; - - int exposure_time = 5; - bool dc_gain_enabled = false; - int dc_gain_weight = 0; - int gain_idx = 0; - float analog_gain_frac = 0; - - float cur_ev[3] = {}; - float best_ev_score = 0; - int new_exp_g = 0; - int new_exp_t = 0; - - Rect ae_xywh = {}; - float measured_grey_fraction = 0; - float target_grey_fraction = 0.3; - - unique_fd sensor_fd; - unique_fd csiphy_fd; - - float fl_pix = 0; - - CameraState(MultiCameraState *multi_camera_state, const CameraConfig &config); - ~CameraState(); - void handle_camera_event(void *evdat); - void update_exposure_score(float desired_ev, int exp_t, int exp_g_idx, float exp_gain); - void set_camera_exposure(float grey_frac); - - void sensors_start(); - - void camera_open(); - void set_exposure_rect(); - void sensor_set_parameters(); - void camera_map_bufs(); - void camera_init(VisionIpcServer *v, cl_device_id device_id, cl_context ctx); - void camera_close(); - void run(); - - int32_t session_handle = -1; - int32_t sensor_dev_handle = -1; - int32_t isp_dev_handle = -1; - int32_t csiphy_dev_handle = -1; - - int32_t link_handle = -1; - - int buf0_handle = 0; - int buf_handle[FRAME_BUF_COUNT] = {}; - int sync_objs[FRAME_BUF_COUNT] = {}; - uint64_t request_ids[FRAME_BUF_COUNT] = {}; - uint64_t request_id_last = 0; - uint64_t frame_id_last = 0; - uint64_t idx_offset = 0; - bool skipped = true; - - CameraBuf buf; - MemoryManager mm; - - void config_isp(int io_mem_handle, int fence, int request_id, int buf0_mem_handle, int buf0_offset); - void enqueue_req_multi(uint64_t start, int n, bool dp); - void enqueue_buffer(int i, bool dp); - int clear_req_queue(); - - int sensors_init(); - void sensors_poke(int request_id); - void sensors_i2c(const struct i2c_random_wr_payload* dat, int len, int op_code, bool data_word); - -private: - bool openSensor(); - void configISP(); - void configCSIPHY(); - void linkDevices(); -}; - -class MultiCameraState { -public: - MultiCameraState(); - ~MultiCameraState() { - if (pm != nullptr) { - delete pm; - } - }; - - unique_fd video0_fd; - unique_fd cam_sync_fd; - unique_fd isp_fd; - int device_iommu = -1; - int cdm_iommu = -1; - - CameraState road_cam; - CameraState wide_road_cam; - CameraState driver_cam; - - PubMaster *pm = nullptr; -}; diff --git a/system/camerad/cameras/spectra.cc b/system/camerad/cameras/spectra.cc new file mode 100644 index 0000000000..609d77bf0f --- /dev/null +++ b/system/camerad/cameras/spectra.cc @@ -0,0 +1,695 @@ +#include + +#include +#include +#include +#include + +#include "media/cam_defs.h" +#include "media/cam_isp.h" +#include "media/cam_isp_ife.h" +#include "media/cam_req_mgr.h" +#include "media/cam_sensor_cmn_header.h" +#include "media/cam_sync.h" + +#include "common/util.h" +#include "common/swaglog.h" + +#include "system/camerad/cameras/spectra.h" + + +// *** helpers *** + +static cam_cmd_power *power_set_wait(cam_cmd_power *power, int16_t delay_ms) { + cam_cmd_unconditional_wait *unconditional_wait = (cam_cmd_unconditional_wait *)((char *)power + (sizeof(struct cam_cmd_power) + (power->count - 1) * sizeof(struct cam_power_settings))); + unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT; + unconditional_wait->delay = delay_ms; + unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND; + return (struct cam_cmd_power *)(unconditional_wait + 1); +} + +// *** SpectraMaster *** + +void SpectraMaster::init() { + LOG("-- Opening devices"); + // video0 is req_mgr, the target of many ioctls + video0_fd = HANDLE_EINTR(open("/dev/v4l/by-path/platform-soc:qcom_cam-req-mgr-video-index0", O_RDWR | O_NONBLOCK)); + assert(video0_fd >= 0); + LOGD("opened video0"); + + // video1 is cam_sync, the target of some ioctls + cam_sync_fd = HANDLE_EINTR(open("/dev/v4l/by-path/platform-cam_sync-video-index0", O_RDWR | O_NONBLOCK)); + assert(cam_sync_fd >= 0); + LOGD("opened video1 (cam_sync)"); + + // looks like there's only one of these + isp_fd = open_v4l_by_name_and_index("cam-isp"); + assert(isp_fd >= 0); + LOGD("opened isp"); + + // query icp for MMU handles + LOG("-- Query ICP for MMU handles"); + struct cam_isp_query_cap_cmd isp_query_cap_cmd = {0}; + struct cam_query_cap_cmd query_cap_cmd = {0}; + query_cap_cmd.handle_type = 1; + query_cap_cmd.caps_handle = (uint64_t)&isp_query_cap_cmd; + query_cap_cmd.size = sizeof(isp_query_cap_cmd); + int ret = do_cam_control(isp_fd, CAM_QUERY_CAP, &query_cap_cmd, sizeof(query_cap_cmd)); + assert(ret == 0); + LOGD("using MMU handle: %x", isp_query_cap_cmd.device_iommu.non_secure); + LOGD("using MMU handle: %x", isp_query_cap_cmd.cdm_iommu.non_secure); + device_iommu = isp_query_cap_cmd.device_iommu.non_secure; + cdm_iommu = isp_query_cap_cmd.cdm_iommu.non_secure; + + // subscribe + LOG("-- Subscribing"); + struct v4l2_event_subscription sub = {0}; + sub.type = V4L_EVENT_CAM_REQ_MGR_EVENT; + sub.id = V4L_EVENT_CAM_REQ_MGR_SOF_BOOT_TS; + ret = HANDLE_EINTR(ioctl(video0_fd, VIDIOC_SUBSCRIBE_EVENT, &sub)); + LOGD("req mgr subscribe: %d", ret); +} + +// *** SpectraCamera *** + +SpectraCamera::SpectraCamera(SpectraMaster *master, const CameraConfig &config) + : m(master), + enabled(config.enabled) , + cc(config) { + mm.init(m->video0_fd); +} + +SpectraCamera::~SpectraCamera() { + if (open) { + camera_close(); + } +} + +int SpectraCamera::clear_req_queue() { + struct cam_req_mgr_flush_info req_mgr_flush_request = {0}; + req_mgr_flush_request.session_hdl = session_handle; + req_mgr_flush_request.link_hdl = link_handle; + req_mgr_flush_request.flush_type = CAM_REQ_MGR_FLUSH_TYPE_ALL; + int ret = do_cam_control(m->video0_fd, CAM_REQ_MGR_FLUSH_REQ, &req_mgr_flush_request, sizeof(req_mgr_flush_request)); + // LOGD("flushed all req: %d", ret); + return ret; +} + +void SpectraCamera::camera_open() { + if (!enabled) return; + + if (!openSensor()) { + return; + } + + open = true; + configISP(); + configCSIPHY(); + linkDevices(); +} + +void SpectraCamera::camera_init(VisionIpcServer *v, cl_device_id device_id, cl_context ctx) { + if (!enabled) return; + + LOGD("camera init %d", cc.camera_num); + buf.init(device_id, ctx, this, v, FRAME_BUF_COUNT, cc.stream_type); + camera_map_bufs(); +} + +void SpectraCamera::enqueue_req_multi(uint64_t start, int n, bool dp) { + for (uint64_t i = start; i < start + n; ++i) { + request_ids[(i - 1) % FRAME_BUF_COUNT] = i; + enqueue_buffer((i - 1) % FRAME_BUF_COUNT, dp); + } +} + +void SpectraCamera::sensors_start() { + if (!enabled) return; + LOGD("starting sensor %d", cc.camera_num); + sensors_i2c(sensor->start_reg_array.data(), sensor->start_reg_array.size(), CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, sensor->data_word); +} + +void SpectraCamera::sensors_poke(int request_id) { + uint32_t cam_packet_handle = 0; + int size = sizeof(struct cam_packet); + auto pkt = mm.alloc(size, &cam_packet_handle); + pkt->num_cmd_buf = 0; + pkt->kmd_cmd_buf_index = -1; + pkt->header.size = size; + pkt->header.op_code = CAM_SENSOR_PACKET_OPCODE_SENSOR_NOP; + pkt->header.request_id = request_id; + + int ret = device_config(sensor_fd, session_handle, sensor_dev_handle, cam_packet_handle); + if (ret != 0) { + LOGE("** sensor %d FAILED poke, disabling", cc.camera_num); + enabled = false; + return; + } +} + +void SpectraCamera::sensors_i2c(const struct i2c_random_wr_payload* dat, int len, int op_code, bool data_word) { + // LOGD("sensors_i2c: %d", len); + uint32_t cam_packet_handle = 0; + int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*1; + auto pkt = mm.alloc(size, &cam_packet_handle); + pkt->num_cmd_buf = 1; + pkt->kmd_cmd_buf_index = -1; + pkt->header.size = size; + pkt->header.op_code = op_code; + struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; + + buf_desc[0].size = buf_desc[0].length = sizeof(struct i2c_rdwr_header) + len*sizeof(struct i2c_random_wr_payload); + buf_desc[0].type = CAM_CMD_BUF_I2C; + + auto i2c_random_wr = mm.alloc(buf_desc[0].size, (uint32_t*)&buf_desc[0].mem_handle); + i2c_random_wr->header.count = len; + i2c_random_wr->header.op_code = 1; + i2c_random_wr->header.cmd_type = CAMERA_SENSOR_CMD_TYPE_I2C_RNDM_WR; + i2c_random_wr->header.data_type = data_word ? CAMERA_SENSOR_I2C_TYPE_WORD : CAMERA_SENSOR_I2C_TYPE_BYTE; + i2c_random_wr->header.addr_type = CAMERA_SENSOR_I2C_TYPE_WORD; + memcpy(i2c_random_wr->random_wr_payload, dat, len*sizeof(struct i2c_random_wr_payload)); + + int ret = device_config(sensor_fd, session_handle, sensor_dev_handle, cam_packet_handle); + if (ret != 0) { + LOGE("** sensor %d FAILED i2c, disabling", cc.camera_num); + enabled = false; + return; + } +} + +int SpectraCamera::sensors_init() { + uint32_t cam_packet_handle = 0; + int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*2; + auto pkt = mm.alloc(size, &cam_packet_handle); + pkt->num_cmd_buf = 2; + pkt->kmd_cmd_buf_index = -1; + pkt->header.op_code = 0x1000000 | CAM_SENSOR_PACKET_OPCODE_SENSOR_PROBE; + pkt->header.size = size; + struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; + + buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_cmd_i2c_info) + sizeof(struct cam_cmd_probe); + buf_desc[0].type = CAM_CMD_BUF_LEGACY; + auto i2c_info = mm.alloc(buf_desc[0].size, (uint32_t*)&buf_desc[0].mem_handle); + auto probe = (struct cam_cmd_probe *)(i2c_info.get() + 1); + + probe->camera_id = cc.camera_num; + i2c_info->slave_addr = sensor->getSlaveAddress(cc.camera_num); + // 0(I2C_STANDARD_MODE) = 100khz, 1(I2C_FAST_MODE) = 400khz + //i2c_info->i2c_freq_mode = I2C_STANDARD_MODE; + i2c_info->i2c_freq_mode = I2C_FAST_MODE; + i2c_info->cmd_type = CAMERA_SENSOR_CMD_TYPE_I2C_INFO; + + probe->data_type = CAMERA_SENSOR_I2C_TYPE_WORD; + probe->addr_type = CAMERA_SENSOR_I2C_TYPE_WORD; + probe->op_code = 3; // don't care? + probe->cmd_type = CAMERA_SENSOR_CMD_TYPE_PROBE; + probe->reg_addr = sensor->probe_reg_addr; + probe->expected_data = sensor->probe_expected_data; + probe->data_mask = 0; + + //buf_desc[1].size = buf_desc[1].length = 148; + buf_desc[1].size = buf_desc[1].length = 196; + buf_desc[1].type = CAM_CMD_BUF_I2C; + auto power_settings = mm.alloc(buf_desc[1].size, (uint32_t*)&buf_desc[1].mem_handle); + + // power on + struct cam_cmd_power *power = power_settings.get(); + power->count = 4; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP; + power->power_settings[0].power_seq_type = 3; // clock?? + power->power_settings[1].power_seq_type = 1; // analog + power->power_settings[2].power_seq_type = 2; // digital + power->power_settings[3].power_seq_type = 8; // reset low + power = power_set_wait(power, 1); + + // set clock + power->count = 1; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP; + power->power_settings[0].power_seq_type = 0; + power->power_settings[0].config_val_low = sensor->mclk_frequency; + power = power_set_wait(power, 1); + + // reset high + power->count = 1; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP; + power->power_settings[0].power_seq_type = 8; + power->power_settings[0].config_val_low = 1; + // wait 650000 cycles @ 19.2 mhz = 33.8 ms + power = power_set_wait(power, 34); + + // probe happens here + + // disable clock + power->count = 1; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; + power->power_settings[0].power_seq_type = 0; + power->power_settings[0].config_val_low = 0; + power = power_set_wait(power, 1); + + // reset high + power->count = 1; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; + power->power_settings[0].power_seq_type = 8; + power->power_settings[0].config_val_low = 1; + power = power_set_wait(power, 1); + + // reset low + power->count = 1; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; + power->power_settings[0].power_seq_type = 8; + power->power_settings[0].config_val_low = 0; + power = power_set_wait(power, 1); + + // power off + power->count = 3; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; + power->power_settings[0].power_seq_type = 2; + power->power_settings[1].power_seq_type = 1; + power->power_settings[2].power_seq_type = 3; + + int ret = do_cam_control(sensor_fd, CAM_SENSOR_PROBE_CMD, (void *)(uintptr_t)cam_packet_handle, 0); + LOGD("probing the sensor: %d", ret); + return ret; +} + +void SpectraCamera::config_isp(int io_mem_handle, int fence, int request_id, int buf0_mem_handle, int buf0_offset) { + uint32_t cam_packet_handle = 0; + int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*2; + if (io_mem_handle != 0) { + size += sizeof(struct cam_buf_io_cfg); + } + auto pkt = mm.alloc(size, &cam_packet_handle); + pkt->num_cmd_buf = 2; + pkt->kmd_cmd_buf_index = 0; + + if (io_mem_handle != 0) { + pkt->io_configs_offset = sizeof(struct cam_cmd_buf_desc)*pkt->num_cmd_buf; + pkt->num_io_configs = 1; + } + + if (io_mem_handle != 0) { + pkt->header.op_code = 0xf000001; + pkt->header.request_id = request_id; + } else { + pkt->header.op_code = 0xf000000; + pkt->header.request_id = 1; + } + pkt->header.size = size; + struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; + struct cam_buf_io_cfg *io_cfg = (struct cam_buf_io_cfg *)((char*)&pkt->payload + pkt->io_configs_offset); + + // TODO: support MMU + buf_desc[0].size = 65624; + buf_desc[0].length = 0; + buf_desc[0].type = CAM_CMD_BUF_DIRECT; + buf_desc[0].meta_data = 3; + buf_desc[0].mem_handle = buf0_mem_handle; + buf_desc[0].offset = buf0_offset; + + // parsed by cam_isp_packet_generic_blob_handler + struct isp_packet { + uint32_t type_0; + cam_isp_resource_hfr_config resource_hfr; + + uint32_t type_1; + cam_isp_clock_config clock; + uint64_t extra_rdi_hz[3]; + + uint32_t type_2; + cam_isp_bw_config bw; + struct cam_isp_bw_vote extra_rdi_vote[6]; + } __attribute__((packed)) tmp; + memset(&tmp, 0, sizeof(tmp)); + + tmp.type_0 = CAM_ISP_GENERIC_BLOB_TYPE_HFR_CONFIG; + tmp.type_0 |= sizeof(cam_isp_resource_hfr_config) << 8; + static_assert(sizeof(cam_isp_resource_hfr_config) == 0x20); + tmp.resource_hfr = { + .num_ports = 1, // 10 for YUV (but I don't think we need them) + .port_hfr_config[0] = { + .resource_type = CAM_ISP_IFE_OUT_RES_RDI_0, // CAM_ISP_IFE_OUT_RES_FULL for YUV + .subsample_pattern = 1, + .subsample_period = 0, + .framedrop_pattern = 1, + .framedrop_period = 0, + }}; + + tmp.type_1 = CAM_ISP_GENERIC_BLOB_TYPE_CLOCK_CONFIG; + tmp.type_1 |= (sizeof(cam_isp_clock_config) + sizeof(tmp.extra_rdi_hz)) << 8; + static_assert((sizeof(cam_isp_clock_config) + sizeof(tmp.extra_rdi_hz)) == 0x38); + tmp.clock = { + .usage_type = 1, // dual mode + .num_rdi = 4, + .left_pix_hz = 404000000, + .right_pix_hz = 404000000, + .rdi_hz[0] = 404000000, + }; + + tmp.type_2 = CAM_ISP_GENERIC_BLOB_TYPE_BW_CONFIG; + tmp.type_2 |= (sizeof(cam_isp_bw_config) + sizeof(tmp.extra_rdi_vote)) << 8; + static_assert((sizeof(cam_isp_bw_config) + sizeof(tmp.extra_rdi_vote)) == 0xe0); + tmp.bw = { + .usage_type = 1, // dual mode + .num_rdi = 4, + .left_pix_vote = { + .resource_id = 0, + .cam_bw_bps = 450000000, + .ext_bw_bps = 450000000, + }, + .rdi_vote[0] = { + .resource_id = 0, + .cam_bw_bps = 8706200000, + .ext_bw_bps = 8706200000, + }, + }; + + static_assert(offsetof(struct isp_packet, type_2) == 0x60); + + buf_desc[1].size = sizeof(tmp); + buf_desc[1].offset = io_mem_handle != 0 ? 0x60 : 0; + buf_desc[1].length = buf_desc[1].size - buf_desc[1].offset; + buf_desc[1].type = CAM_CMD_BUF_GENERIC; + buf_desc[1].meta_data = CAM_ISP_PACKET_META_GENERIC_BLOB_COMMON; + auto buf2 = mm.alloc(buf_desc[1].size, (uint32_t*)&buf_desc[1].mem_handle); + memcpy(buf2.get(), &tmp, sizeof(tmp)); + + if (io_mem_handle != 0) { + io_cfg[0].mem_handle[0] = io_mem_handle; + io_cfg[0].planes[0] = (struct cam_plane_cfg){ + .width = sensor->frame_width, + .height = sensor->frame_height + sensor->extra_height, + .plane_stride = sensor->frame_stride, + .slice_height = sensor->frame_height + sensor->extra_height, + .meta_stride = 0x0, // YUV has meta(stride=0x400, size=0x5000) + .meta_size = 0x0, + .meta_offset = 0x0, + .packer_config = 0x0, // 0xb for YUV + .mode_config = 0x0, // 0x9ef for YUV + .tile_config = 0x0, + .h_init = 0x0, + .v_init = 0x0, + }; + io_cfg[0].format = sensor->mipi_format; // CAM_FORMAT_UBWC_TP10 for YUV + io_cfg[0].color_space = CAM_COLOR_SPACE_BASE; // CAM_COLOR_SPACE_BT601_FULL for YUV + io_cfg[0].color_pattern = 0x5; // 0x0 for YUV + io_cfg[0].bpp = (sensor->mipi_format == CAM_FORMAT_MIPI_RAW_10 ? 0xa : 0xc); // bits per pixel + io_cfg[0].resource_type = CAM_ISP_IFE_OUT_RES_RDI_0; // CAM_ISP_IFE_OUT_RES_FULL for YUV + io_cfg[0].fence = fence; + io_cfg[0].direction = CAM_BUF_OUTPUT; + io_cfg[0].subsample_pattern = 0x1; + io_cfg[0].framedrop_pattern = 0x1; + } + + int ret = device_config(m->isp_fd, session_handle, isp_dev_handle, cam_packet_handle); + assert(ret == 0); + if (ret != 0) { + LOGE("isp config failed"); + } +} + +void SpectraCamera::enqueue_buffer(int i, bool dp) { + int ret; + uint64_t request_id = request_ids[i]; + + if (buf_handle[i] && sync_objs[i]) { + // wait + struct cam_sync_wait sync_wait = {0}; + sync_wait.sync_obj = sync_objs[i]; + sync_wait.timeout_ms = 50; // max dt tolerance, typical should be 23 + ret = do_cam_control(m->cam_sync_fd, CAM_SYNC_WAIT, &sync_wait, sizeof(sync_wait)); + if (ret != 0) { + LOGE("failed to wait for sync: %d %d", ret, sync_wait.sync_obj); + // TODO: handle frame drop cleanly + } + + buf.camera_bufs_metadata[i].timestamp_eof = (uint64_t)nanos_since_boot(); // set true eof + if (dp) buf.queue(i); + + // destroy old output fence + struct cam_sync_info sync_destroy = {0}; + sync_destroy.sync_obj = sync_objs[i]; + ret = do_cam_control(m->cam_sync_fd, CAM_SYNC_DESTROY, &sync_destroy, sizeof(sync_destroy)); + if (ret != 0) { + LOGE("failed to destroy sync object: %d %d", ret, sync_destroy.sync_obj); + } + } + + // create output fence + struct cam_sync_info sync_create = {0}; + strcpy(sync_create.name, "NodeOutputPortFence"); + ret = do_cam_control(m->cam_sync_fd, CAM_SYNC_CREATE, &sync_create, sizeof(sync_create)); + if (ret != 0) { + LOGE("failed to create fence: %d %d", ret, sync_create.sync_obj); + } + sync_objs[i] = sync_create.sync_obj; + + // schedule request with camera request manager + struct cam_req_mgr_sched_request req_mgr_sched_request = {0}; + req_mgr_sched_request.session_hdl = session_handle; + req_mgr_sched_request.link_hdl = link_handle; + req_mgr_sched_request.req_id = request_id; + ret = do_cam_control(m->video0_fd, CAM_REQ_MGR_SCHED_REQ, &req_mgr_sched_request, sizeof(req_mgr_sched_request)); + if (ret != 0) { + LOGE("failed to schedule cam mgr request: %d %lu", ret, request_id); + } + + // poke sensor, must happen after schedule + sensors_poke(request_id); + + // submit request to the ife + config_isp(buf_handle[i], sync_objs[i], request_id, buf0_handle, 65632*(i+1)); +} + +void SpectraCamera::camera_map_bufs() { + for (int i = 0; i < FRAME_BUF_COUNT; i++) { + // configure ISP to put the image in place + struct cam_mem_mgr_map_cmd mem_mgr_map_cmd = {0}; + mem_mgr_map_cmd.mmu_hdls[0] = m->device_iommu; + mem_mgr_map_cmd.num_hdl = 1; + mem_mgr_map_cmd.flags = CAM_MEM_FLAG_HW_READ_WRITE; + mem_mgr_map_cmd.fd = buf.camera_bufs[i].fd; + int ret = do_cam_control(m->video0_fd, CAM_REQ_MGR_MAP_BUF, &mem_mgr_map_cmd, sizeof(mem_mgr_map_cmd)); + LOGD("map buf req: (fd: %d) 0x%x %d", buf.camera_bufs[i].fd, mem_mgr_map_cmd.out.buf_handle, ret); + buf_handle[i] = mem_mgr_map_cmd.out.buf_handle; + } + enqueue_req_multi(1, FRAME_BUF_COUNT, 0); +} + +bool SpectraCamera::openSensor() { + sensor_fd = open_v4l_by_name_and_index("cam-sensor-driver", cc.camera_num); + assert(sensor_fd >= 0); + LOGD("opened sensor for %d", cc.camera_num); + + LOGD("-- Probing sensor %d", cc.camera_num); + + auto init_sensor_lambda = [this](SensorInfo *s) { + sensor.reset(s); + return (sensors_init() == 0); + }; + + // Figure out which sensor we have + if (!init_sensor_lambda(new AR0231) && + !init_sensor_lambda(new OX03C10) && + !init_sensor_lambda(new OS04C10)) { + LOGE("** sensor %d FAILED bringup, disabling", cc.camera_num); + enabled = false; + return false; + } + LOGD("-- Probing sensor %d success", cc.camera_num); + + // create session + struct cam_req_mgr_session_info session_info = {}; + int ret = do_cam_control(m->video0_fd, CAM_REQ_MGR_CREATE_SESSION, &session_info, sizeof(session_info)); + LOGD("get session: %d 0x%X", ret, session_info.session_hdl); + session_handle = session_info.session_hdl; + + // access the sensor + LOGD("-- Accessing sensor"); + auto sensor_dev_handle_ = device_acquire(sensor_fd, session_handle, nullptr); + assert(sensor_dev_handle_); + sensor_dev_handle = *sensor_dev_handle_; + LOGD("acquire sensor dev"); + + LOG("-- Configuring sensor"); + sensors_i2c(sensor->init_reg_array.data(), sensor->init_reg_array.size(), CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, sensor->data_word); + return true; +} + +void SpectraCamera::configISP() { + // NOTE: to be able to disable road and wide road, we still have to configure the sensor over i2c + // If you don't do this, the strobe GPIO is an output (even in reset it seems!) + if (!enabled) return; + + struct cam_isp_in_port_info in_port_info = { + .res_type = cc.phy, + .lane_type = CAM_ISP_LANE_TYPE_DPHY, + .lane_num = 4, + .lane_cfg = 0x3210, + + .vc = 0x0, + .dt = sensor->frame_data_type, + .format = sensor->mipi_format, + + .test_pattern = 0x2, // 0x3? + .usage_type = 0x0, + + .left_start = 0, + .left_stop = sensor->frame_width - 1, + .left_width = sensor->frame_width, + + .right_start = 0, + .right_stop = sensor->frame_width - 1, + .right_width = sensor->frame_width, + + .line_start = 0, + .line_stop = sensor->frame_height + sensor->extra_height - 1, + .height = sensor->frame_height + sensor->extra_height, + + .pixel_clk = 0x0, + .batch_size = 0x0, + .dsp_mode = CAM_ISP_DSP_MODE_NONE, + .hbi_cnt = 0x0, + .custom_csid = 0x0, + + .num_out_res = 0x1, + .data[0] = (struct cam_isp_out_port_info){ + .res_type = CAM_ISP_IFE_OUT_RES_RDI_0, + .format = sensor->mipi_format, + .width = sensor->frame_width, + .height = sensor->frame_height + sensor->extra_height, + .comp_grp_id = 0x0, .split_point = 0x0, .secure_mode = 0x0, + }, + }; + struct cam_isp_resource isp_resource = { + .resource_id = CAM_ISP_RES_ID_PORT, + .handle_type = CAM_HANDLE_USER_POINTER, + .res_hdl = (uint64_t)&in_port_info, + .length = sizeof(in_port_info), + }; + + auto isp_dev_handle_ = device_acquire(m->isp_fd, session_handle, &isp_resource); + assert(isp_dev_handle_); + isp_dev_handle = *isp_dev_handle_; + LOGD("acquire isp dev"); + + // config ISP + alloc_w_mmu_hdl(m->video0_fd, 984480, (uint32_t*)&buf0_handle, 0x20, CAM_MEM_FLAG_HW_READ_WRITE | CAM_MEM_FLAG_KMD_ACCESS | + CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, m->device_iommu, m->cdm_iommu); + config_isp(0, 0, 1, buf0_handle, 0); +} + +void SpectraCamera::configCSIPHY() { + csiphy_fd = open_v4l_by_name_and_index("cam-csiphy-driver", cc.camera_num); + assert(csiphy_fd >= 0); + LOGD("opened csiphy for %d", cc.camera_num); + + struct cam_csiphy_acquire_dev_info csiphy_acquire_dev_info = {.combo_mode = 0}; + auto csiphy_dev_handle_ = device_acquire(csiphy_fd, session_handle, &csiphy_acquire_dev_info); + assert(csiphy_dev_handle_); + csiphy_dev_handle = *csiphy_dev_handle_; + LOGD("acquire csiphy dev"); + + // config csiphy + LOG("-- Config CSI PHY"); + { + uint32_t cam_packet_handle = 0; + int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*1; + auto pkt = mm.alloc(size, &cam_packet_handle); + pkt->num_cmd_buf = 1; + pkt->kmd_cmd_buf_index = -1; + pkt->header.size = size; + struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; + + buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_csiphy_info); + buf_desc[0].type = CAM_CMD_BUF_GENERIC; + + auto csiphy_info = mm.alloc(buf_desc[0].size, (uint32_t*)&buf_desc[0].mem_handle); + csiphy_info->lane_mask = 0x1f; + csiphy_info->lane_assign = 0x3210;// skip clk. How is this 16 bit for 5 channels?? + csiphy_info->csiphy_3phase = 0x0; // no 3 phase, only 2 conductors per lane + csiphy_info->combo_mode = 0x0; + csiphy_info->lane_cnt = 0x4; + csiphy_info->secure_mode = 0x0; + csiphy_info->settle_time = MIPI_SETTLE_CNT * 200000000ULL; + csiphy_info->data_rate = 48000000; // Calculated by camera_freqs.py + + int ret_ = device_config(csiphy_fd, session_handle, csiphy_dev_handle, cam_packet_handle); + assert(ret_ == 0); + } +} + +void SpectraCamera::linkDevices() { + LOG("-- Link devices"); + struct cam_req_mgr_link_info req_mgr_link_info = {0}; + req_mgr_link_info.session_hdl = session_handle; + req_mgr_link_info.num_devices = 2; + req_mgr_link_info.dev_hdls[0] = isp_dev_handle; + req_mgr_link_info.dev_hdls[1] = sensor_dev_handle; + int ret = do_cam_control(m->video0_fd, CAM_REQ_MGR_LINK, &req_mgr_link_info, sizeof(req_mgr_link_info)); + link_handle = req_mgr_link_info.link_hdl; + LOGD("link: %d session: 0x%X isp: 0x%X sensors: 0x%X link: 0x%X", ret, session_handle, isp_dev_handle, sensor_dev_handle, link_handle); + + struct cam_req_mgr_link_control req_mgr_link_control = {0}; + req_mgr_link_control.ops = CAM_REQ_MGR_LINK_ACTIVATE; + req_mgr_link_control.session_hdl = session_handle; + req_mgr_link_control.num_links = 1; + req_mgr_link_control.link_hdls[0] = link_handle; + ret = do_cam_control(m->video0_fd, CAM_REQ_MGR_LINK_CONTROL, &req_mgr_link_control, sizeof(req_mgr_link_control)); + LOGD("link control: %d", ret); + + ret = device_control(csiphy_fd, CAM_START_DEV, session_handle, csiphy_dev_handle); + LOGD("start csiphy: %d", ret); + ret = device_control(m->isp_fd, CAM_START_DEV, session_handle, isp_dev_handle); + LOGD("start isp: %d", ret); + assert(ret == 0); +} + +void SpectraCamera::camera_close() { + LOG("-- Stop devices %d", cc.camera_num); + + if (enabled) { + // ret = device_control(sensor_fd, CAM_STOP_DEV, session_handle, sensor_dev_handle); + // LOGD("stop sensor: %d", ret); + int ret = device_control(m->isp_fd, CAM_STOP_DEV, session_handle, isp_dev_handle); + LOGD("stop isp: %d", ret); + ret = device_control(csiphy_fd, CAM_STOP_DEV, session_handle, csiphy_dev_handle); + LOGD("stop csiphy: %d", ret); + // link control stop + LOG("-- Stop link control"); + struct cam_req_mgr_link_control req_mgr_link_control = {0}; + req_mgr_link_control.ops = CAM_REQ_MGR_LINK_DEACTIVATE; + req_mgr_link_control.session_hdl = session_handle; + req_mgr_link_control.num_links = 1; + req_mgr_link_control.link_hdls[0] = link_handle; + ret = do_cam_control(m->video0_fd, CAM_REQ_MGR_LINK_CONTROL, &req_mgr_link_control, sizeof(req_mgr_link_control)); + LOGD("link control stop: %d", ret); + + // unlink + LOG("-- Unlink"); + struct cam_req_mgr_unlink_info req_mgr_unlink_info = {0}; + req_mgr_unlink_info.session_hdl = session_handle; + req_mgr_unlink_info.link_hdl = link_handle; + ret = do_cam_control(m->video0_fd, CAM_REQ_MGR_UNLINK, &req_mgr_unlink_info, sizeof(req_mgr_unlink_info)); + LOGD("unlink: %d", ret); + + // release devices + LOGD("-- Release devices"); + ret = device_control(m->isp_fd, CAM_RELEASE_DEV, session_handle, isp_dev_handle); + LOGD("release isp: %d", ret); + ret = device_control(csiphy_fd, CAM_RELEASE_DEV, session_handle, csiphy_dev_handle); + LOGD("release csiphy: %d", ret); + + for (int i = 0; i < FRAME_BUF_COUNT; i++) { + release(m->video0_fd, buf_handle[i]); + } + LOGD("released buffers"); + } + + int ret = device_control(sensor_fd, CAM_RELEASE_DEV, session_handle, sensor_dev_handle); + LOGD("release sensor: %d", ret); + + // destroyed session + struct cam_req_mgr_session_info session_info = {.session_hdl = session_handle}; + ret = do_cam_control(m->video0_fd, CAM_REQ_MGR_DESTROY_SESSION, &session_info, sizeof(session_info)); + LOGD("destroyed session %d: %d", cc.camera_num, ret); +} diff --git a/system/camerad/cameras/spectra.h b/system/camerad/cameras/spectra.h new file mode 100644 index 0000000000..baa5cbc32f --- /dev/null +++ b/system/camerad/cameras/spectra.h @@ -0,0 +1,87 @@ +#pragma once + +#include +#include + +#include "media/cam_isp_ife.h" + +#include "common/util.h" +#include "system/camerad/cameras/tici.h" +#include "system/camerad/cameras/camera_util.h" +#include "system/camerad/cameras/camera_common.h" +#include "system/camerad/sensors/sensor.h" + +#define FRAME_BUF_COUNT 4 + +const int MIPI_SETTLE_CNT = 33; // Calculated by camera_freqs.py + +// For use with the Spectra 280 ISP in the SDM845 +// https://github.com/commaai/agnos-kernel-sdm845 + + +class SpectraMaster { +public: + void init(); + + unique_fd video0_fd; + unique_fd cam_sync_fd; + unique_fd isp_fd; + int device_iommu = -1; + int cdm_iommu = -1; +}; + +class SpectraCamera { +public: + SpectraCamera(SpectraMaster *master, const CameraConfig &config); + ~SpectraCamera(); + + void camera_open(); + void camera_close(); + void camera_map_bufs(); + void camera_init(VisionIpcServer *v, cl_device_id device_id, cl_context ctx); + void config_isp(int io_mem_handle, int fence, int request_id, int buf0_mem_handle, int buf0_offset); + + int clear_req_queue(); + void enqueue_buffer(int i, bool dp); + void enqueue_req_multi(uint64_t start, int n, bool dp); + + int sensors_init(); + void sensors_start(); + void sensors_poke(int request_id); + void sensors_i2c(const struct i2c_random_wr_payload* dat, int len, int op_code, bool data_word); + + bool openSensor(); + void configISP(); + void configCSIPHY(); + void linkDevices(); + + // *** state *** + + bool open = false; + bool enabled = true; + CameraConfig cc; + std::unique_ptr sensor; + + unique_fd sensor_fd; + unique_fd csiphy_fd; + + int32_t session_handle = -1; + int32_t sensor_dev_handle = -1; + int32_t isp_dev_handle = -1; + int32_t csiphy_dev_handle = -1; + + int32_t link_handle = -1; + + int buf0_handle = 0; + int buf_handle[FRAME_BUF_COUNT] = {}; + int sync_objs[FRAME_BUF_COUNT] = {}; + uint64_t request_ids[FRAME_BUF_COUNT] = {}; + uint64_t request_id_last = 0; + uint64_t frame_id_last = 0; + uint64_t idx_offset = 0; + bool skipped = true; + + CameraBuf buf; + MemoryManager mm; + SpectraMaster *m; +}; diff --git a/system/camerad/cameras/tici.h b/system/camerad/cameras/tici.h new file mode 100644 index 0000000000..228ff8e284 --- /dev/null +++ b/system/camerad/cameras/tici.h @@ -0,0 +1,49 @@ +#pragma once + +#include "common/util.h" +#include "cereal/gen/cpp/log.capnp.h" +#include "msgq/visionipc/visionipc_server.h" + +#include "media/cam_isp_ife.h" + +// For the comma 3/3X three camera platform + +struct CameraConfig { + int camera_num; + VisionStreamType stream_type; + float focal_len; // millimeters + const char *publish_name; + cereal::FrameData::Builder (cereal::Event::Builder::*init_camera_state)(); + bool enabled; + uint32_t phy; +}; + +const CameraConfig WIDE_ROAD_CAMERA_CONFIG = { + .camera_num = 0, + .stream_type = VISION_STREAM_WIDE_ROAD, + .focal_len = 1.71, + .publish_name = "wideRoadCameraState", + .init_camera_state = &cereal::Event::Builder::initWideRoadCameraState, + .enabled = !getenv("DISABLE_WIDE_ROAD"), + .phy = CAM_ISP_IFE_IN_RES_PHY_0, +}; + +const CameraConfig ROAD_CAMERA_CONFIG = { + .camera_num = 1, + .stream_type = VISION_STREAM_ROAD, + .focal_len = 8.0, + .publish_name = "roadCameraState", + .init_camera_state = &cereal::Event::Builder::initRoadCameraState, + .enabled = !getenv("DISABLE_ROAD"), + .phy = CAM_ISP_IFE_IN_RES_PHY_1, +}; + +const CameraConfig DRIVER_CAMERA_CONFIG = { + .camera_num = 2, + .stream_type = VISION_STREAM_DRIVER, + .focal_len = 1.71, + .publish_name = "driverCameraState", + .init_camera_state = &cereal::Event::Builder::initDriverCameraState, + .enabled = !getenv("DISABLE_DRIVER"), + .phy = CAM_ISP_IFE_IN_RES_PHY_2, +}; diff --git a/system/camerad/sensors/ar0231.cc b/system/camerad/sensors/ar0231.cc index 74f09dfa61..ce8b0c0a4b 100644 --- a/system/camerad/sensors/ar0231.cc +++ b/system/camerad/sensors/ar0231.cc @@ -1,7 +1,6 @@ #include #include "common/swaglog.h" -#include "system/camerad/cameras/camera_common.h" #include "system/camerad/sensors/sensor.h" namespace { diff --git a/system/camerad/sensors/sensor.h b/system/camerad/sensors/sensor.h index 22083eb62b..4ab09a9fd2 100644 --- a/system/camerad/sensors/sensor.h +++ b/system/camerad/sensors/sensor.h @@ -6,7 +6,8 @@ #include #include #include "media/cam_sensor.h" -#include "system/camerad/cameras/camera_common.h" + +#include "cereal/gen/cpp/log.capnp.h" #include "system/camerad/sensors/ar0231_registers.h" #include "system/camerad/sensors/ox03c10_registers.h" #include "system/camerad/sensors/os04c10_registers.h"