#include "system/camerad/cameras/camera_common.h" #include #include #include "common/swaglog.h" #include "system/camerad/cameras/spectra.h" void CameraBuf::init(cl_device_id device_id, cl_context context, SpectraCamera *cam, VisionIpcServer * v, int frame_cnt, VisionStreamType type) { vipc_server = v; stream_type = type; frame_buf_count = frame_cnt; const SensorInfo *sensor = cam->sensor.get(); is_raw = cam->output_type == ISP_RAW_OUTPUT; frame_metadata = std::make_unique(frame_buf_count); // RAW frames from ISP if (cam->output_type != ISP_IFE_PROCESSED) { camera_bufs_raw = std::make_unique(frame_buf_count); const int raw_frame_size = (sensor->frame_height + sensor->extra_height) * sensor->frame_stride; for (int i = 0; i < frame_buf_count; i++) { camera_bufs_raw[i].allocate(raw_frame_size); camera_bufs_raw[i].init_cl(device_id, context); } LOGD("allocated %d CL buffers", frame_buf_count); } out_img_width = sensor->frame_width; out_img_height = sensor->hdr_offset > 0 ? (sensor->frame_height - sensor->hdr_offset) / 2 : sensor->frame_height; // the encoder HW tells us the size it wants after setting it up. // TODO: VENUS_BUFFER_SIZE should give the size, but it's too small. dependent on encoder settings? size_t nv12_size = (out_img_width <= 1344 ? 2900 : 2346)*cam->stride; vipc_server->create_buffers_with_sizes(stream_type, VIPC_BUFFER_COUNT, out_img_width, out_img_height, nv12_size, cam->stride, cam->uv_offset); LOGD("created %d YUV vipc buffers with size %dx%d", VIPC_BUFFER_COUNT, cam->stride, cam->y_height); } CameraBuf::~CameraBuf() { if (camera_bufs_raw != nullptr) { for (int i = 0; i < frame_buf_count; i++) { camera_bufs_raw[i].free(); } } } bool CameraBuf::acquire() { if (!safe_queue.try_pop(cur_buf_idx, 0)) return false; if (frame_metadata[cur_buf_idx].frame_id == -1) { LOGE("no frame data? wtf"); return false; } cur_frame_data = frame_metadata[cur_buf_idx]; if (camera_bufs_raw) { cur_camera_buf = &camera_bufs_raw[cur_buf_idx]; } cur_yuv_buf = vipc_server->get_buffer(stream_type, cur_buf_idx); cur_frame_data.processing_time = (double)(cur_frame_data.timestamp_end_of_isp - cur_frame_data.timestamp_eof)*1e-9; VisionIpcBufExtra extra = { cur_frame_data.frame_id, cur_frame_data.timestamp_sof, cur_frame_data.timestamp_eof, }; cur_yuv_buf->set_frame_id(cur_frame_data.frame_id); vipc_server->send(cur_yuv_buf, &extra); return true; } void CameraBuf::queue(size_t buf_idx) { safe_queue.push(buf_idx); } // common functions kj::Array get_raw_frame_image(const CameraBuf *b) { const uint8_t *dat = (const uint8_t *)b->cur_camera_buf->addr; kj::Array frame_image = kj::heapArray(b->cur_camera_buf->len); uint8_t *resized_dat = frame_image.begin(); memcpy(resized_dat, dat, b->cur_camera_buf->len); return kj::mv(frame_image); } float calculate_exposure_value(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_skip) { int lum_med; uint32_t lum_binning[256] = {0}; const uint8_t *pix_ptr = b->cur_yuv_buf->y; unsigned int lum_total = 0; for (int y = ae_xywh.y; y < ae_xywh.y + ae_xywh.h; y += y_skip) { for (int x = ae_xywh.x; x < ae_xywh.x + ae_xywh.w; x += x_skip) { uint8_t lum = pix_ptr[(y * b->out_img_width) + x]; lum_binning[lum]++; lum_total += 1; } } // Find mean lumimance value unsigned int lum_cur = 0; for (lum_med = 255; lum_med >= 0; lum_med--) { lum_cur += lum_binning[lum_med]; if (lum_cur >= lum_total / 2) { break; } } return lum_med / 256.0; } int open_v4l_by_name_and_index(const char name[], int index, int flags) { for (int v4l_index = 0; /**/; ++v4l_index) { std::string v4l_name = util::read_file(util::string_format("/sys/class/video4linux/v4l-subdev%d/name", v4l_index)); if (v4l_name.empty()) return -1; if (v4l_name.find(name) == 0) { if (index == 0) { return HANDLE_EINTR(open(util::string_format("/dev/v4l-subdev%d", v4l_index).c_str(), flags)); } index--; } } }