data loss prevention, clean up later

pull/2047/head
Tici 5 years ago
parent 81bc145139
commit 4f503005a5
  1. 994
      selfdrive/camerad/1
  2. 1050
      selfdrive/camerad/@
  3. 189
      selfdrive/camerad/cameras/camera_qcom2.c
  4. 7
      selfdrive/camerad/cameras/camera_qcom2.h
  5. 1
      selfdrive/camerad/czmq
  6. 1
      selfdrive/camerad/exp
  7. 1
      selfdrive/camerad/libzmq
  8. 282
      selfdrive/camerad/main.cc
  9. 1083
      selfdrive/camerad/~
  10. 6
      selfdrive/common/visionipc.h

@ -0,0 +1,994 @@
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <errno.h>
#include <unistd.h>
#include <signal.h>
#include <assert.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <poll.h>
#include <czmq.h>
#include "common/util.h"
#include "common/swaglog.h"
#include "camera_qcom2.h"
#include "media/cam_defs.h"
#include "media/cam_isp.h"
#include "media/cam_isp_ife.h"
#include "media/cam_sensor_cmn_header.h"
#include "media/cam_sensor.h"
#include "media/cam_sync.h"
#include "sensor2_i2c.h"
#define FRAME_WIDTH 1928
#define FRAME_HEIGHT 1208
//#define FRAME_STRIDE 1936 // for 8 bit output
#define FRAME_STRIDE 2416 // for 10 bit output
static void hexdump(uint8_t *data, int len) {
for (int i = 0; i < len; i++) {
if (i!=0&&i%0x10==0) printf("\n");
printf("%02X ", data[i]);
}
printf("\n");
}
extern volatile sig_atomic_t do_exit;
CameraInfo cameras_supported[CAMERA_ID_MAX] = {
[CAMERA_ID_AR0231] = {
.frame_width = FRAME_WIDTH,
.frame_height = FRAME_HEIGHT,
.frame_stride = FRAME_STRIDE,
.bayer = true,
.bayer_flip = 0,
.hdr = false
},
};
/*static void set_exposure(CameraState *s) {
int err = 0;
if (s->apply_exposure) {
err = s->apply_exposure(s);
}
if (err != 0) {
printf("%s\n", "err set exposure");
}
//LOGD("set exposure: %f %f - %d", exposure_frac, gain_frac, err);
}*/
// ************** low level camera helpers ****************
int cam_control(int fd, int op_code, void *handle, int size) {
struct cam_control camcontrol = {0};
camcontrol.op_code = op_code;
camcontrol.handle = (uint64_t)handle;
if (size == 0) { camcontrol.size = 8;
camcontrol.handle_type = CAM_HANDLE_MEM_HANDLE;
} else {
camcontrol.size = size;
camcontrol.handle_type = CAM_HANDLE_USER_POINTER;
}
int ret = ioctl(fd, VIDIOC_CAM_CONTROL, &camcontrol);
if (ret == -1) {
perror("wat");
}
return ret;
}
int device_control(int fd, int op_code, int session_handle, int dev_handle) {
// start stop and release are all the same
static struct cam_release_dev_cmd release_dev_cmd;
release_dev_cmd.session_handle = session_handle;
release_dev_cmd.dev_handle = dev_handle;
return cam_control(fd, op_code, &release_dev_cmd, sizeof(release_dev_cmd));
}
void *alloc_w_mmu_hdl(int video0_fd, int len, int align, int flags, uint32_t *handle, int mmu_hdl, int mmu_hdl2) {
int ret;
static struct cam_mem_mgr_alloc_cmd mem_mgr_alloc_cmd = {0};
mem_mgr_alloc_cmd.len = len;
mem_mgr_alloc_cmd.align = align;
mem_mgr_alloc_cmd.flags = flags;
mem_mgr_alloc_cmd.num_hdl = 0;
if (mmu_hdl != 0) {
mem_mgr_alloc_cmd.mmu_hdls[0] = mmu_hdl;
mem_mgr_alloc_cmd.num_hdl++;
}
if (mmu_hdl2 != 0) {
mem_mgr_alloc_cmd.mmu_hdls[1] = mmu_hdl2;
mem_mgr_alloc_cmd.num_hdl++;
}
cam_control(video0_fd, CAM_REQ_MGR_ALLOC_BUF, &mem_mgr_alloc_cmd, sizeof(mem_mgr_alloc_cmd));
*handle = mem_mgr_alloc_cmd.out.buf_handle;
void *ptr = NULL;
if (mem_mgr_alloc_cmd.out.fd > 0) {
ptr = mmap(NULL, len, PROT_READ | PROT_WRITE, MAP_SHARED, mem_mgr_alloc_cmd.out.fd, 0);
assert(ptr != MAP_FAILED);
}
LOGD("alloced: %x %d %llx mapped %p", mem_mgr_alloc_cmd.out.buf_handle, mem_mgr_alloc_cmd.out.fd, mem_mgr_alloc_cmd.out.vaddr, ptr);
return ptr;
}
void *alloc(int video0_fd, int len, int align, int flags, uint32_t *handle) {
return alloc_w_mmu_hdl(video0_fd, len, align, flags, handle, 0, 0);
}
void release(int video0_fd, uint32_t handle) {
int ret;
static struct cam_mem_mgr_release_cmd mem_mgr_release_cmd = {0};
mem_mgr_release_cmd.buf_handle = handle;
ret = cam_control(video0_fd, CAM_REQ_MGR_RELEASE_BUF, &mem_mgr_release_cmd, sizeof(mem_mgr_release_cmd));
assert(ret == 0);
}
void release_fd(int video0_fd, uint32_t handle) {
// handle to fd
close(handle>>16);
release(video0_fd, handle);
}
// ************** high level camera helpers ****************
void sensors_poke(struct CameraState *s, int request_id) {
uint32_t cam_packet_handle = 0;
int size = sizeof(struct cam_packet);
struct cam_packet *pkt = alloc(s->video0_fd, size, 8,
CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle);
pkt->num_cmd_buf = 1;
pkt->kmd_cmd_buf_index = -1;
pkt->header.size = size;
pkt->header.op_code = 0x7f;
pkt->header.request_id = request_id;
struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload;
static struct cam_config_dev_cmd config_dev_cmd = {};
config_dev_cmd.session_handle = s->session_handle;
config_dev_cmd.dev_handle = s->sensor_dev_handle;
config_dev_cmd.offset = 0;
config_dev_cmd.packet_handle = cam_packet_handle;
int ret = cam_control(s->sensor_fd, CAM_CONFIG_DEV, &config_dev_cmd, sizeof(config_dev_cmd));
assert(ret == 0);
release_fd(s->video0_fd, cam_packet_handle);
}
void sensors_i2c(struct CameraState *s, struct i2c_random_wr_payload* dat, int len, int op_code) {
LOGD("sensors_i2c: %d", len);
uint32_t cam_packet_handle = 0;
int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*1;
struct cam_packet *pkt = alloc(s->video0_fd, size, 8,
CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle);
pkt->num_cmd_buf = 1;
pkt->kmd_cmd_buf_index = -1;
pkt->header.size = size;
pkt->header.op_code = op_code;
struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload;
buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_cmd_i2c_random_wr) + (len-1)*sizeof(struct i2c_random_wr_payload);
buf_desc[0].type = CAM_CMD_BUF_I2C;
struct cam_cmd_power *power = alloc(s->video0_fd, buf_desc[0].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &buf_desc[0].mem_handle);
struct cam_cmd_i2c_random_wr *i2c_random_wr = (void*)power;
i2c_random_wr->header.count = len;
i2c_random_wr->header.op_code = 1;
i2c_random_wr->header.cmd_type = CAMERA_SENSOR_CMD_TYPE_I2C_RNDM_WR;
i2c_random_wr->header.data_type = CAMERA_SENSOR_I2C_TYPE_WORD;
i2c_random_wr->header.addr_type = CAMERA_SENSOR_I2C_TYPE_WORD;
memcpy(i2c_random_wr->random_wr_payload, dat, len*sizeof(struct i2c_random_wr_payload));
static struct cam_config_dev_cmd config_dev_cmd = {};
config_dev_cmd.session_handle = s->session_handle;
config_dev_cmd.dev_handle = s->sensor_dev_handle;
config_dev_cmd.offset = 0;
config_dev_cmd.packet_handle = cam_packet_handle;
int ret = cam_control(s->sensor_fd, CAM_CONFIG_DEV, &config_dev_cmd, sizeof(config_dev_cmd));
assert(ret == 0);
release_fd(s->video0_fd, buf_desc[0].mem_handle);
release_fd(s->video0_fd, cam_packet_handle);
}
static int test_apply_exposure(CameraState *s) {
int err;
/// {0x305E, 0xFF}
struct i2c_random_wr_payload exp_reg_array[] = {{0x301D, 0x03},
{0x3012, 0xFFF}};
sensors_i2c(s, exp_reg_array, sizeof(exp_reg_array)/sizeof(struct i2c_random_wr_payload),
CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG);
printf("called \n");
/*if (err != 0) {
printf("apply_exposure err %d", err);
}*/
return err;
}
static void do_autoexposure(CameraState *s, float grey_frac) {
const float target_grey = 0.3;
test_apply_exposure(s);
}
void sensors_init(int video0_fd, int sensor_fd, int camera_num) {
uint32_t cam_packet_handle = 0;
int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*2;
struct cam_packet *pkt = alloc(video0_fd, size, 8,
CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle);
pkt->num_cmd_buf = 2;
pkt->kmd_cmd_buf_index = -1;
pkt->header.op_code = 0x1000003;
pkt->header.size = size;
struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload;
buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_cmd_i2c_info) + sizeof(struct cam_cmd_probe);
buf_desc[0].type = CAM_CMD_BUF_LEGACY;
struct cam_cmd_i2c_info *i2c_info = alloc(video0_fd, buf_desc[0].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &buf_desc[0].mem_handle);
struct cam_cmd_probe *probe = (struct cam_cmd_probe *)((uint8_t *)i2c_info) + sizeof(struct cam_cmd_i2c_info);
switch (camera_num) {
case 0:
// port 0
i2c_info->slave_addr = 0x20;
probe->camera_id = 0;
break;
case 1:
// port 1
i2c_info->slave_addr = 0x30;
probe->camera_id = 1;
break;
case 2:
// port 2
i2c_info->slave_addr = 0x20;
probe->camera_id = 2;
break;
}
// 0(I2C_STANDARD_MODE) = 100khz, 1(I2C_FAST_MODE) = 400khz
//i2c_info->i2c_freq_mode = I2C_STANDARD_MODE;
i2c_info->i2c_freq_mode = I2C_FAST_MODE;
i2c_info->cmd_type = CAMERA_SENSOR_CMD_TYPE_I2C_INFO;
probe->data_type = CAMERA_SENSOR_I2C_TYPE_WORD;
probe->addr_type = CAMERA_SENSOR_I2C_TYPE_WORD;
probe->op_code = 3; // don't care?
probe->cmd_type = CAMERA_SENSOR_CMD_TYPE_PROBE;
probe->reg_addr = 0x3000; //0x300a; //0x300b;
probe->expected_data = 0x354; //0x7750; //0x885a;
probe->data_mask = 0;
//buf_desc[1].size = buf_desc[1].length = 148;
buf_desc[1].size = buf_desc[1].length = 196;
buf_desc[1].type = CAM_CMD_BUF_I2C;
struct cam_cmd_power *power = alloc(video0_fd, buf_desc[1].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &buf_desc[1].mem_handle);
memset(power, 0, buf_desc[1].size);
struct cam_cmd_unconditional_wait *unconditional_wait;
void *ptr = power;
// 7750
/*power->count = 2;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP;
power->power_settings[0].power_seq_type = 2;
power->power_settings[1].power_seq_type = 8;
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));*/
// 885a
power->count = 4;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP;
power->power_settings[0].power_seq_type = 3; // clock??
power->power_settings[1].power_seq_type = 1; // analog
power->power_settings[2].power_seq_type = 2; // digital
power->power_settings[3].power_seq_type = 8; // reset low
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
unconditional_wait = (void*)power;
unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT;
unconditional_wait->delay = 5;
unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND;
power = (void*)power + sizeof(struct cam_cmd_unconditional_wait);
// set clock
power->count = 1;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP;
power->power_settings[0].power_seq_type = 0;
power->power_settings[0].config_val_low = 24000000; //Hz
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
unconditional_wait = (void*)power;
unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT;
unconditional_wait->delay = 10; // ms
unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND;
power = (void*)power + sizeof(struct cam_cmd_unconditional_wait);
// 8,1 is this reset?
power->count = 1;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP;
power->power_settings[0].power_seq_type = 8;
power->power_settings[0].config_val_low = 1;
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
unconditional_wait = (void*)power;
unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT;
unconditional_wait->delay = 100; // ms
unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND;
power = (void*)power + sizeof(struct cam_cmd_unconditional_wait);
// probe happens here
// disable clock
power->count = 1;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN;
power->power_settings[0].power_seq_type = 0;
power->power_settings[0].config_val_low = 0;
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
unconditional_wait = (void*)power;
unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT;
unconditional_wait->delay = 1;
unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND;
power = (void*)power + sizeof(struct cam_cmd_unconditional_wait);
// reset high
power->count = 1;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN;
power->power_settings[0].power_seq_type = 8;
power->power_settings[0].config_val_low = 1;
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
unconditional_wait = (void*)power;
unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT;
unconditional_wait->delay = 1;
unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND;
power = (void*)power + sizeof(struct cam_cmd_unconditional_wait);
// reset low
power->count = 1;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN;
power->power_settings[0].power_seq_type = 8;
power->power_settings[0].config_val_low = 0;
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
unconditional_wait = (void*)power;
unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT;
unconditional_wait->delay = 1;
unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND;
power = (void*)power + sizeof(struct cam_cmd_unconditional_wait);
// 7750
/*power->count = 1;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN;
power->power_settings[0].power_seq_type = 2;
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));*/
// 885a
power->count = 3;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN;
power->power_settings[0].power_seq_type = 2;
power->power_settings[1].power_seq_type = 1;
power->power_settings[2].power_seq_type = 3;
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
LOGD("probing the sensor");
int ret = cam_control(sensor_fd, CAM_SENSOR_PROBE_CMD, (void *)cam_packet_handle, 0);
assert(ret == 0);
release_fd(video0_fd, buf_desc[0].mem_handle);
release_fd(video0_fd, buf_desc[1].mem_handle);
release_fd(video0_fd, cam_packet_handle);
}
void config_isp(struct CameraState *s, int io_mem_handle, int fence, int request_id, int buf0_mem_handle, int buf0_offset) {
uint32_t cam_packet_handle = 0;
int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*2;
if (io_mem_handle != 0) {
size += sizeof(struct cam_buf_io_cfg);
}
struct cam_packet *pkt = alloc(s->video0_fd, size, 8,
CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle);
pkt->num_cmd_buf = 2;
pkt->kmd_cmd_buf_index = 0;
if (io_mem_handle != 0) {
pkt->io_configs_offset = sizeof(struct cam_cmd_buf_desc)*2;
pkt->num_io_configs = 1;
}
if (io_mem_handle != 0) {
pkt->header.op_code = 0xf000001;
pkt->header.request_id = request_id;
} else {
pkt->header.op_code = 0xf000000;
}
pkt->header.size = size;
struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload;
struct cam_buf_io_cfg *io_cfg = (void*)&pkt->payload + pkt->io_configs_offset;
// TODO: support MMU
buf_desc[0].size = 65624;
buf_desc[0].length = 0;
buf_desc[0].type = CAM_CMD_BUF_DIRECT;
buf_desc[0].meta_data = 3;
buf_desc[0].mem_handle = buf0_mem_handle;
buf_desc[0].offset = buf0_offset;
buf_desc[1].size = 324;
if (io_mem_handle != 0) {
buf_desc[1].length = 228; // 0 works here too
buf_desc[1].offset = 0x60;
} else {
buf_desc[1].length = 324;
}
buf_desc[1].type = CAM_CMD_BUF_GENERIC;
buf_desc[1].meta_data = CAM_ISP_PACKET_META_GENERIC_BLOB_COMMON;
uint32_t *buf2 = alloc(s->video0_fd, buf_desc[1].size, 0x20, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &buf_desc[1].mem_handle);
// cam_isp_packet_generic_blob_handler
uint32_t tmp[] = {
// size is 0x20, type is 0(CAM_ISP_GENERIC_BLOB_TYPE_HFR_CONFIG)
0x2000,
0x1, 0x0, CAM_ISP_IFE_OUT_RES_RDI_0, 0x1, 0x0, 0x1, 0x0, 0x0, // 1 port, CAM_ISP_IFE_OUT_RES_RDI_0
// size is 0x38, type is 1(CAM_ISP_GENERIC_BLOB_TYPE_CLOCK_CONFIG), clocks
0x3801,
0x1, 0x4, // Dual mode, 4 RDI wires
0x18148d00, 0x0, 0x18148d00, 0x0, 0x18148d00, 0x0, // rdi clock
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, // junk?
// offset 0x60
// size is 0xe0, type is 2(CAM_ISP_GENERIC_BLOB_TYPE_BW_CONFIG), bandwidth
0xe002,
0x1, 0x4, // 4 RDI
0x0, 0x0, 0x1ad27480, 0x0, 0x1ad27480, 0x0, // left_pix_vote
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, // right_pix_vote
0x0, 0x0, 0x6ee11c0, 0x2, 0x6ee11c0, 0x2, // rdi_vote
0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0};
memcpy(buf2, tmp, sizeof(tmp));
if (io_mem_handle != 0) {
io_cfg[0].mem_handle[0] = io_mem_handle;
io_cfg[0].planes[0] = (struct cam_plane_cfg){
.width = FRAME_WIDTH,
.height = FRAME_HEIGHT,
.plane_stride = FRAME_STRIDE,
.slice_height = FRAME_HEIGHT,
.meta_stride = 0x0,
.meta_size = 0x0,
.meta_offset = 0x0,
.packer_config = 0x0,
.mode_config = 0x0,
.tile_config = 0x0,
.h_init = 0x0,
.v_init = 0x0,
};
io_cfg[0].format = 0x3;
io_cfg[0].color_pattern = 0x5;
io_cfg[0].bpp = 0xc;
io_cfg[0].resource_type = CAM_ISP_IFE_OUT_RES_RDI_0;
io_cfg[0].fence = fence;
io_cfg[0].direction = 0x2;
io_cfg[0].subsample_pattern = 0x1;
io_cfg[0].framedrop_pattern = 0x1;
}
static struct cam_config_dev_cmd config_dev_cmd = {};
config_dev_cmd.session_handle = s->session_handle;
config_dev_cmd.dev_handle = s->isp_dev_handle;
config_dev_cmd.offset = 0;
config_dev_cmd.packet_handle = cam_packet_handle;
int ret = cam_control(s->isp_fd, CAM_CONFIG_DEV, &config_dev_cmd, sizeof(config_dev_cmd));
if (ret != 0) {
printf("ISP CONFIG FAILED\n");
}
release_fd(s->video0_fd, buf_desc[1].mem_handle);
//release(s->video0_fd, buf_desc[0].mem_handle);
release_fd(s->video0_fd, cam_packet_handle);
}
void enqueue_buffer(struct CameraState *s, int i) {
int ret;
int request_id = (++s->sched_request_id);
bool first = true;
if (s->buf_handle[i]) {
first = false;
release(s->video0_fd, s->buf_handle[i]);
// destroy old output fence
static struct cam_sync_info sync_destroy = {0};
strcpy(sync_destroy.name, "NodeOutputPortFence");
sync_destroy.sync_obj = s->sync_objs[i];
ret = cam_control(s->video1_fd, CAM_SYNC_DESTROY, &sync_destroy, sizeof(sync_destroy));
LOGD("fence destroy: %d %d", ret, sync_destroy.sync_obj);
}
// new request_ids
s->request_ids[i] = request_id;
// do stuff
static struct cam_req_mgr_sched_request req_mgr_sched_request = {0};
req_mgr_sched_request.session_hdl = s->session_handle;
req_mgr_sched_request.link_hdl = s->link_handle;
req_mgr_sched_request.req_id = request_id;
ret = cam_control(s->video0_fd, CAM_REQ_MGR_SCHED_REQ, &req_mgr_sched_request, sizeof(req_mgr_sched_request));
LOGD("sched req: %d %d", ret, request_id);
// create output fence
static struct cam_sync_info sync_create = {0};
strcpy(sync_create.name, "NodeOutputPortFence");
ret = cam_control(s->video1_fd, CAM_SYNC_CREATE, &sync_create, sizeof(sync_create));
LOGD("fence req: %d %d", ret, sync_create.sync_obj);
s->sync_objs[i] = sync_create.sync_obj;
// configure ISP to put the image in place
static struct cam_mem_mgr_map_cmd mem_mgr_map_cmd = {0};
mem_mgr_map_cmd.mmu_hdls[0] = s->device_iommu;
mem_mgr_map_cmd.num_hdl = 1;
mem_mgr_map_cmd.flags = 1;
mem_mgr_map_cmd.fd = s->bufs[i].fd;
ret = cam_control(s->video0_fd, CAM_REQ_MGR_MAP_BUF, &mem_mgr_map_cmd, sizeof(mem_mgr_map_cmd));
LOGD("map buf req: (fd: %d) 0x%x %d", s->bufs[i].fd, mem_mgr_map_cmd.out.buf_handle, ret);
s->buf_handle[i] = mem_mgr_map_cmd.out.buf_handle;
// poke sensor
sensors_poke(s, request_id);
LOGD("Poked sensor");
// push the buffer
config_isp(s, s->buf_handle[i], s->sync_objs[i], request_id, s->buf0_handle, 65632*(i+1));
}
// ******************* camera *******************
static void camera_release_buffer(void* cookie, int i) {
int ret;
CameraState *s = cookie;
enqueue_buffer(s, i);
}
static void camera_init(CameraState *s, int camera_id, int camera_num, unsigned int fps) {
LOGD("camera init %d", camera_num);
// TODO: this is copied code from camera_webcam
assert(camera_id < ARRAYSIZE(cameras_supported));
s->ci = cameras_supported[camera_id];
assert(s->ci.frame_width != 0);
s->camera_num = camera_num;
s->frame_size = s->ci.frame_height * s->ci.frame_stride;
tbuffer_init2(&s->camera_tb, FRAME_BUF_COUNT, "frame", camera_release_buffer, s);
s->transform = (mat3){{
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0,
}};
s->digital_gain = 1.0;
}
static void camera_open(CameraState *s, VisionBuf* b) {
int ret;
s->bufs = b;
// /dev/v4l-subdev10 is sensor, 11, 12, 13 are the other sensors
switch (s->camera_num) {
case 0:
s->sensor_fd = open("/dev/v4l-subdev10", O_RDWR | O_NONBLOCK);
break;
case 1:
s->sensor_fd = open("/dev/v4l-subdev11", O_RDWR | O_NONBLOCK);
break;
case 2:
s->sensor_fd = open("/dev/v4l-subdev12", O_RDWR | O_NONBLOCK);
break;
}
assert(s->sensor_fd >= 0);
LOGD("opened sensor");
// also at /dev/v4l-subdev3, 4, 5, 6
switch (s->camera_num) {
case 0:
s->csiphy_fd = open("/dev/v4l-subdev3", O_RDWR | O_NONBLOCK);
break;
case 1:
s->csiphy_fd = open("/dev/v4l-subdev4", O_RDWR | O_NONBLOCK);
break;
case 2:
s->csiphy_fd = open("/dev/v4l-subdev5", O_RDWR | O_NONBLOCK);
break;
}
assert(s->csiphy_fd >= 0);
LOGD("opened csiphy");
// probe the sensor
LOGD("-- Probing sensor %d", s->camera_num);
sensors_init(s->video0_fd, s->sensor_fd, s->camera_num);
memset(&s->req_mgr_session_info, 0, sizeof(s->req_mgr_session_info));
ret = cam_control(s->video0_fd, CAM_REQ_MGR_CREATE_SESSION, &s->req_mgr_session_info, sizeof(s->req_mgr_session_info));
LOGD("get session: %d 0x%X", ret, s->req_mgr_session_info.session_hdl);
s->session_handle = s->req_mgr_session_info.session_hdl;
// access the sensor
LOGD("-- Accessing sensor");
static struct cam_acquire_dev_cmd acquire_dev_cmd = {0};
acquire_dev_cmd.session_handle = s->session_handle;
acquire_dev_cmd.handle_type = CAM_HANDLE_USER_POINTER;
ret = cam_control(s->sensor_fd, CAM_ACQUIRE_DEV, &acquire_dev_cmd, sizeof(acquire_dev_cmd));
LOGD("acquire sensor dev: %d", ret);
s->sensor_dev_handle = acquire_dev_cmd.dev_handle;
static struct cam_isp_resource isp_resource = {0};
acquire_dev_cmd.session_handle = s->session_handle;
acquire_dev_cmd.handle_type = CAM_HANDLE_USER_POINTER;
acquire_dev_cmd.num_resources = 1;
acquire_dev_cmd.resource_hdl = (uint64_t)&isp_resource;
isp_resource.resource_id = CAM_ISP_RES_ID_PORT;
isp_resource.length = sizeof(struct cam_isp_in_port_info) + sizeof(struct cam_isp_out_port_info)*(1-1);
isp_resource.handle_type = CAM_HANDLE_USER_POINTER;
struct cam_isp_in_port_info *in_port_info = malloc(isp_resource.length);
isp_resource.res_hdl = (uint64_t)in_port_info;
switch (s->camera_num) {
case 0:
in_port_info->res_type = CAM_ISP_IFE_IN_RES_PHY_0;
break;
case 1:
in_port_info->res_type = CAM_ISP_IFE_IN_RES_PHY_1;
break;
case 2:
in_port_info->res_type = CAM_ISP_IFE_IN_RES_PHY_2;
break;
}
in_port_info->lane_type = CAM_ISP_LANE_TYPE_DPHY;
in_port_info->lane_num = 4;
in_port_info->lane_cfg = 0x3210;
in_port_info->vc = 0x0;
//in_port_info->dt = 0x2C; //CSI_RAW12
//in_port_info->format = CAM_FORMAT_MIPI_RAW_12;
in_port_info->dt = 0x2B; //CSI_RAW10
in_port_info->format = CAM_FORMAT_MIPI_RAW_10;
in_port_info->test_pattern = 0x2; // 0x3?
in_port_info->usage_type = 0x0;
in_port_info->left_start = 0x0;
in_port_info->left_stop = FRAME_WIDTH - 1;
in_port_info->left_width = FRAME_WIDTH;
in_port_info->right_start = 0x0;
in_port_info->right_stop = FRAME_WIDTH - 1;
in_port_info->right_width = FRAME_WIDTH;
in_port_info->line_start = 0x0;
in_port_info->line_stop = FRAME_HEIGHT - 1;
in_port_info->height = FRAME_HEIGHT;
in_port_info->pixel_clk = 0x0;
in_port_info->batch_size = 0x0;
in_port_info->dsp_mode = 0x0;
in_port_info->hbi_cnt = 0x0;
in_port_info->custom_csid = 0x0;
in_port_info->num_out_res = 0x1;
in_port_info->data[0] = (struct cam_isp_out_port_info){
.res_type = CAM_ISP_IFE_OUT_RES_RDI_0,
//.format = CAM_FORMAT_MIPI_RAW_12,
.format = CAM_FORMAT_MIPI_RAW_10,
.width = FRAME_WIDTH,
.height = FRAME_HEIGHT,
.comp_grp_id = 0x0, .split_point = 0x0, .secure_mode = 0x0,
};
ret = cam_control(s->isp_fd, CAM_ACQUIRE_DEV, &acquire_dev_cmd, sizeof(acquire_dev_cmd));
LOGD("acquire isp dev: %d", ret);
free(in_port_info);
s->isp_dev_handle = acquire_dev_cmd.dev_handle;
static struct cam_csiphy_acquire_dev_info csiphy_acquire_dev_info = {0};
csiphy_acquire_dev_info.combo_mode = 0;
acquire_dev_cmd.session_handle = s->session_handle;
acquire_dev_cmd.handle_type = CAM_HANDLE_USER_POINTER;
acquire_dev_cmd.num_resources = 1;
acquire_dev_cmd.resource_hdl = (uint64_t)&csiphy_acquire_dev_info;
ret = cam_control(s->csiphy_fd, CAM_ACQUIRE_DEV, &acquire_dev_cmd, sizeof(acquire_dev_cmd));
LOGD("acquire csiphy dev: %d", ret);
s->csiphy_dev_handle = acquire_dev_cmd.dev_handle;
// acquires done
// config ISP
void *buf0 = alloc_w_mmu_hdl(s->video0_fd, 984480, 0x20, CAM_MEM_FLAG_HW_READ_WRITE | CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &s->buf0_handle, s->device_iommu, s->cdm_iommu);
config_isp(s, 0, 0, 1, s->buf0_handle, 0);
LOG("-- Configuring sensor");
sensors_i2c(s, init_array_ar0231, sizeof(init_array_ar0231)/sizeof(struct i2c_random_wr_payload),
CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG);
sensors_i2c(s, start_reg_array, sizeof(start_reg_array)/sizeof(struct i2c_random_wr_payload),
CAM_SENSOR_PACKET_OPCODE_SENSOR_STREAMON);
sensors_i2c(s, stop_reg_array, sizeof(stop_reg_array)/sizeof(struct i2c_random_wr_payload),
CAM_SENSOR_PACKET_OPCODE_SENSOR_STREAMOFF);
// config csiphy
LOG("-- Config CSI PHY");
{
uint32_t cam_packet_handle = 0;
struct cam_packet *pkt = alloc(s->video0_fd, sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*1, 8,
CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle);
pkt->num_cmd_buf = 1;
pkt->kmd_cmd_buf_index = -1;
struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload;
buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_csiphy_info);
buf_desc[0].type = CAM_CMD_BUF_GENERIC;
struct cam_csiphy_info *csiphy_info = alloc(s->video0_fd, buf_desc[0].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &buf_desc[0].mem_handle);
csiphy_info->lane_mask = 0x1f;
csiphy_info->lane_assign = 0x3210;// skip clk. How is this 16 bit for 5 channels??
csiphy_info->csiphy_3phase = 0x0; // no 3 phase, only 2 conductors per lane
csiphy_info->combo_mode = 0x0;
csiphy_info->lane_cnt = 0x4;
csiphy_info->secure_mode = 0x0;
csiphy_info->settle_time = 2800000000;
csiphy_info->data_rate = 44000000;
static struct cam_config_dev_cmd config_dev_cmd = {};
config_dev_cmd.session_handle = s->session_handle;
config_dev_cmd.dev_handle = s->csiphy_dev_handle;
config_dev_cmd.offset = 0;
config_dev_cmd.packet_handle = cam_packet_handle;
int ret = cam_control(s->csiphy_fd, CAM_CONFIG_DEV, &config_dev_cmd, sizeof(config_dev_cmd));
assert(ret == 0);
release(s->video0_fd, buf_desc[0].mem_handle);
release(s->video0_fd, cam_packet_handle);
}
// link devices
LOG("-- Link devices");
static struct cam_req_mgr_link_info req_mgr_link_info = {0};
req_mgr_link_info.session_hdl = s->session_handle;
req_mgr_link_info.num_devices = 2;
req_mgr_link_info.dev_hdls[0] = s->isp_dev_handle;
req_mgr_link_info.dev_hdls[1] = s->sensor_dev_handle;
ret = cam_control(s->video0_fd, CAM_REQ_MGR_LINK, &req_mgr_link_info, sizeof(req_mgr_link_info));
LOGD("link: %d", ret);
s->link_handle = req_mgr_link_info.link_hdl;
static struct cam_req_mgr_link_control req_mgr_link_control = {0};
req_mgr_link_control.ops = 0;
req_mgr_link_control.session_hdl = s->session_handle;
req_mgr_link_control.num_links = 1;
req_mgr_link_control.link_hdls[0] = s->link_handle;
ret = cam_control(s->video0_fd, CAM_REQ_MGR_LINK_CONTROL, &req_mgr_link_control, sizeof(req_mgr_link_control));
LOGD("link control: %d", ret);
// start devices
LOG("-- Start devices");
ret = device_control(s->isp_fd, CAM_START_DEV, s->session_handle, s->isp_dev_handle);
LOGD("start isp: %d", ret);
ret = device_control(s->csiphy_fd, CAM_START_DEV, s->session_handle, s->csiphy_dev_handle);
LOGD("start csiphy: %d", ret);
ret = device_control(s->sensor_fd, CAM_START_DEV, s->session_handle, s->sensor_dev_handle);
LOGD("start sensor: %d", ret);
for (int i = 0; i < FRAME_BUF_COUNT; i++) {
LOG("-- Initting buffer %d", i);
enqueue_buffer(s, i);
}
}
void cameras_init(DualCameraState *s) {
camera_init(&s->rear, CAMERA_ID_AR0231, 0, 20);
camera_init(&s->wide, CAMERA_ID_AR0231, 1, 20);
camera_init(&s->front, CAMERA_ID_AR0231, 2, 20);
zsock_t *live_sock = zsock_new_push("tcp://192.168.2.221:7766");
assert(live_sock);
s->live_sock = zsock_resolve(live_sock);
}
void cameras_open(DualCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front) {
int ret;
LOG("-- Opening devices");
// video0 is the target of many ioctls
s->video0_fd = open("/dev/video0", O_RDWR | O_NONBLOCK);
assert(s->video0_fd >= 0);
LOGD("opened video0");
s->rear.video0_fd = s->front.video0_fd = s->wide.video0_fd = s->video0_fd;
// video1 is the target of some ioctls
s->video1_fd = open("/dev/video1", O_RDWR | O_NONBLOCK);
assert(s->video1_fd >= 0);
LOGD("opened video1");
s->rear.video1_fd = s->front.video1_fd = s->wide.video1_fd = s->video1_fd;
// looks like there's only one of these
s->isp_fd = open("/dev/v4l-subdev1", O_RDWR | O_NONBLOCK);
assert(s->isp_fd >= 0);
LOGD("opened isp");
s->rear.isp_fd = s->front.isp_fd = s->wide.isp_fd = s->isp_fd;
// query icp for MMU handles
LOG("-- Query ICP for MMU handles");
static struct cam_isp_query_cap_cmd isp_query_cap_cmd = {0};
static struct cam_query_cap_cmd query_cap_cmd = {0};
query_cap_cmd.handle_type = 1;
query_cap_cmd.caps_handle = (uint64_t)&isp_query_cap_cmd;
query_cap_cmd.size = sizeof(isp_query_cap_cmd);
ret = cam_control(s->isp_fd, CAM_QUERY_CAP, &query_cap_cmd, sizeof(query_cap_cmd));
assert(ret == 0);
LOGD("using MMU handle: %x", isp_query_cap_cmd.device_iommu.non_secure);
LOGD("using MMU handle: %x", isp_query_cap_cmd.cdm_iommu.non_secure);
int device_iommu = isp_query_cap_cmd.device_iommu.non_secure;
int cdm_iommu = isp_query_cap_cmd.cdm_iommu.non_secure;
s->rear.device_iommu = s->front.device_iommu = s->wide.device_iommu = device_iommu;
s->rear.cdm_iommu = s->front.cdm_iommu = s->wide.cdm_iommu = cdm_iommu;
// subscribe
LOG("-- Subscribing");
static struct v4l2_event_subscription sub = {0};
sub.type = 0x8000000;
sub.id = 0;
ret = ioctl(s->video0_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
LOGD("isp subscribe: %d", ret);
sub.id = 1;
ret = ioctl(s->video0_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
LOGD("isp subscribe: %d", ret);
camera_open(&s->rear, camera_bufs_rear);
//camera_open(&s->front, camera_bufs_front);
// TODO: add bufs for camera wide
}
static void camera_close(CameraState *s) {
int ret;
// stop devices
LOG("-- Stop devices");
ret = device_control(s->sensor_fd, CAM_STOP_DEV, s->session_handle, s->sensor_dev_handle);
LOGD("stop sensor: %d", ret);
ret = device_control(s->isp_fd, CAM_STOP_DEV, s->session_handle, s->isp_dev_handle);
LOGD("stop isp: %d", ret);
ret = device_control(s->csiphy_fd, CAM_STOP_DEV, s->session_handle, s->csiphy_dev_handle);
LOGD("stop csiphy: %d", ret);
// link control stop
LOG("-- Stop link control");
static struct cam_req_mgr_link_control req_mgr_link_control = {0};
req_mgr_link_control.ops = 1;
req_mgr_link_control.session_hdl = s->session_handle;
req_mgr_link_control.num_links = 1;
req_mgr_link_control.link_hdls[0] = s->link_handle;
ret = cam_control(s->video0_fd, CAM_REQ_MGR_LINK_CONTROL, &req_mgr_link_control, sizeof(req_mgr_link_control));
LOGD("link control stop: %d", ret);
// unlink
LOG("-- Unlink");
static struct cam_req_mgr_unlink_info req_mgr_unlink_info = {0};
req_mgr_unlink_info.session_hdl = s->session_handle;
req_mgr_unlink_info.link_hdl = s->link_handle;
ret = cam_control(s->video0_fd, CAM_REQ_MGR_UNLINK, &req_mgr_unlink_info, sizeof(req_mgr_unlink_info));
LOGD("unlink: %d", ret);
// release devices
LOGD("-- Release devices");
ret = device_control(s->sensor_fd, CAM_RELEASE_DEV, s->session_handle, s->sensor_dev_handle);
LOGD("release sensor: %d", ret);
ret = device_control(s->isp_fd, CAM_RELEASE_DEV, s->session_handle, s->isp_dev_handle);
LOGD("release isp: %d", ret);
ret = device_control(s->csiphy_fd, CAM_RELEASE_DEV, s->session_handle, s->csiphy_dev_handle);
LOGD("release csiphy: %d", ret);
ret = cam_control(s->video0_fd, CAM_REQ_MGR_DESTROY_SESSION, &s->req_mgr_session_info, sizeof(s->req_mgr_session_info));
LOGD("destroyed session: %d", ret);
tbuffer_stop(&s->camera_tb);
}
static void cameras_close(DualCameraState *s) {
camera_close(&s->rear);
//camera_close(&s->front);
//camera_close(&s->wide);
}
struct video_event_data {
int32_t session_hdl;
int32_t link_hdl;
int32_t frame_id;
int32_t reserved;
uint64_t tv_sec;
uint64_t tv_usec;
};
void cameras_run(DualCameraState *s) {
LOG("-- Dequeueing Video events");
int frame_id = 1;
int c = 0;
while (!do_exit) {
struct pollfd fds[2] = {{0}};
fds[0].fd = s->video0_fd;
fds[0].events = POLLPRI;
fds[1].fd = s->video1_fd;
fds[1].events = POLLPRI;
int ret = poll(fds, ARRAYSIZE(fds), 1000);
if (ret <= 0) {
if (errno == EINTR || errno == EAGAIN) continue;
LOGE("poll failed (%d - %d)", ret, errno);
break;
}
for (int i=0; i<2; i++) {
if (!fds[i].revents) continue;
static struct v4l2_event ev = {0};
ret = ioctl(fds[i].fd, VIDIOC_DQEVENT, &ev);
if (ev.type == 0x8000000) {
struct video_event_data *event_data = (struct video_event_data *)ev.u.data;
uint64_t timestamp = (event_data->tv_sec*1000000000ULL
+ event_data->tv_usec*1000);
LOGD("video%d dqevent: %d type:0x%x frame_id:%d timestamp: %llu", i, ret, ev.type, event_data->frame_id, timestamp);
if (event_data->frame_id != 0) {
for (int j = 0; j < FRAME_BUF_COUNT; j++) {
if (s->rear.request_ids[j] == event_data->frame_id) {
// TODO: support more than rear camera
tbuffer_dispatch(&s->rear.camera_tb, j);
s->rear.camera_bufs_metadata[j].frame_id = frame_id++;
c += 1;
if (c > 20) {
do_autoexposure(&s->rear, 0.5);
c = 0;
}
break;
}
}
}
}
}
}
LOG(" ************** STOPPING **************");
cameras_close(s);
}
void camera_autoexposure(CameraState *s, float grey_frac) {
}
void sendout(DualCameraState *s, void* dat, int len) {
int err, err2;
err = zmq_send(s->live_sock, dat, len, ZMQ_DONTWAIT);
err2 = zmq_errno();
printf("zmq errcode %d, %d\n", err ,err2);
}

File diff suppressed because it is too large Load Diff

@ -28,6 +28,9 @@
//#define FRAME_STRIDE 1936 // for 8 bit output //#define FRAME_STRIDE 1936 // for 8 bit output
#define FRAME_STRIDE 2416 // for 10 bit output #define FRAME_STRIDE 2416 // for 10 bit output
int sr_id = 0;
static void hexdump(uint8_t *data, int len) { static void hexdump(uint8_t *data, int len) {
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
if (i!=0&&i%0x10==0) printf("\n"); if (i!=0&&i%0x10==0) printf("\n");
@ -65,6 +68,7 @@ int cam_control(int fd, int op_code, void *handle, int size) {
int ret = ioctl(fd, VIDIOC_CAM_CONTROL, &camcontrol); int ret = ioctl(fd, VIDIOC_CAM_CONTROL, &camcontrol);
if (ret == -1) { if (ret == -1) {
printf("OP CODE ERR - %d \n", op_code);
perror("wat"); perror("wat");
} }
return ret; return ret;
@ -81,7 +85,7 @@ int device_control(int fd, int op_code, int session_handle, int dev_handle) {
void *alloc_w_mmu_hdl(int video0_fd, int len, int align, int flags, uint32_t *handle, int mmu_hdl, int mmu_hdl2) { void *alloc_w_mmu_hdl(int video0_fd, int len, int align, int flags, uint32_t *handle, int mmu_hdl, int mmu_hdl2) {
int ret; int ret;
static struct cam_mem_mgr_alloc_cmd mem_mgr_alloc_cmd = {0}; struct cam_mem_mgr_alloc_cmd mem_mgr_alloc_cmd = {0};
mem_mgr_alloc_cmd.len = len; mem_mgr_alloc_cmd.len = len;
mem_mgr_alloc_cmd.align = align; mem_mgr_alloc_cmd.align = align;
mem_mgr_alloc_cmd.flags = flags; mem_mgr_alloc_cmd.flags = flags;
@ -115,7 +119,7 @@ void *alloc(int video0_fd, int len, int align, int flags, uint32_t *handle) {
void release(int video0_fd, uint32_t handle) { void release(int video0_fd, uint32_t handle) {
int ret; int ret;
static struct cam_mem_mgr_release_cmd mem_mgr_release_cmd = {0}; struct cam_mem_mgr_release_cmd mem_mgr_release_cmd = {0};
mem_mgr_release_cmd.buf_handle = handle; mem_mgr_release_cmd.buf_handle = handle;
ret = cam_control(video0_fd, CAM_REQ_MGR_RELEASE_BUF, &mem_mgr_release_cmd, sizeof(mem_mgr_release_cmd)); ret = cam_control(video0_fd, CAM_REQ_MGR_RELEASE_BUF, &mem_mgr_release_cmd, sizeof(mem_mgr_release_cmd));
@ -134,6 +138,7 @@ void release_fd(int video0_fd, uint32_t handle) {
void sensors_poke(struct CameraState *s, int request_id) { void sensors_poke(struct CameraState *s, int request_id) {
uint32_t cam_packet_handle = 0; uint32_t cam_packet_handle = 0;
int size = sizeof(struct cam_packet); int size = sizeof(struct cam_packet);
printf("- poke allocing for req_id %d -\n", request_id);
struct cam_packet *pkt = alloc(s->video0_fd, size, 8, struct cam_packet *pkt = alloc(s->video0_fd, size, 8,
CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle); CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle);
pkt->num_cmd_buf = 1; pkt->num_cmd_buf = 1;
@ -143,7 +148,7 @@ void sensors_poke(struct CameraState *s, int request_id) {
pkt->header.request_id = request_id; pkt->header.request_id = request_id;
struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload;
static struct cam_config_dev_cmd config_dev_cmd = {}; struct cam_config_dev_cmd config_dev_cmd = {};
config_dev_cmd.session_handle = s->session_handle; config_dev_cmd.session_handle = s->session_handle;
config_dev_cmd.dev_handle = s->sensor_dev_handle; config_dev_cmd.dev_handle = s->sensor_dev_handle;
config_dev_cmd.offset = 0; config_dev_cmd.offset = 0;
@ -152,6 +157,7 @@ void sensors_poke(struct CameraState *s, int request_id) {
int ret = cam_control(s->sensor_fd, CAM_CONFIG_DEV, &config_dev_cmd, sizeof(config_dev_cmd)); int ret = cam_control(s->sensor_fd, CAM_CONFIG_DEV, &config_dev_cmd, sizeof(config_dev_cmd));
assert(ret == 0); assert(ret == 0);
printf("- poke releasing for req_id %d -\n", request_id);
release_fd(s->video0_fd, cam_packet_handle); release_fd(s->video0_fd, cam_packet_handle);
} }
@ -178,7 +184,7 @@ void sensors_i2c(struct CameraState *s, struct i2c_random_wr_payload* dat, int l
i2c_random_wr->header.addr_type = CAMERA_SENSOR_I2C_TYPE_WORD; i2c_random_wr->header.addr_type = CAMERA_SENSOR_I2C_TYPE_WORD;
memcpy(i2c_random_wr->random_wr_payload, dat, len*sizeof(struct i2c_random_wr_payload)); memcpy(i2c_random_wr->random_wr_payload, dat, len*sizeof(struct i2c_random_wr_payload));
static struct cam_config_dev_cmd config_dev_cmd = {}; struct cam_config_dev_cmd config_dev_cmd = {};
config_dev_cmd.session_handle = s->session_handle; config_dev_cmd.session_handle = s->session_handle;
config_dev_cmd.dev_handle = s->sensor_dev_handle; config_dev_cmd.dev_handle = s->sensor_dev_handle;
config_dev_cmd.offset = 0; config_dev_cmd.offset = 0;
@ -187,7 +193,9 @@ void sensors_i2c(struct CameraState *s, struct i2c_random_wr_payload* dat, int l
int ret = cam_control(s->sensor_fd, CAM_CONFIG_DEV, &config_dev_cmd, sizeof(config_dev_cmd)); int ret = cam_control(s->sensor_fd, CAM_CONFIG_DEV, &config_dev_cmd, sizeof(config_dev_cmd));
assert(ret == 0); assert(ret == 0);
printf("- I2C releasing 1 -\n");
release_fd(s->video0_fd, buf_desc[0].mem_handle); release_fd(s->video0_fd, buf_desc[0].mem_handle);
printf("- I2C releasing 2 -\n");
release_fd(s->video0_fd, cam_packet_handle); release_fd(s->video0_fd, cam_packet_handle);
} }
@ -353,17 +361,22 @@ void sensors_init(int video0_fd, int sensor_fd, int camera_num) {
int ret = cam_control(sensor_fd, CAM_SENSOR_PROBE_CMD, (void *)cam_packet_handle, 0); int ret = cam_control(sensor_fd, CAM_SENSOR_PROBE_CMD, (void *)cam_packet_handle, 0);
assert(ret == 0); assert(ret == 0);
printf("- init releasing 1 -\n");
release_fd(video0_fd, buf_desc[0].mem_handle); release_fd(video0_fd, buf_desc[0].mem_handle);
printf("- init releasing 2 -\n");
release_fd(video0_fd, buf_desc[1].mem_handle); release_fd(video0_fd, buf_desc[1].mem_handle);
printf("- init releasing 3 -\n");
release_fd(video0_fd, cam_packet_handle); release_fd(video0_fd, cam_packet_handle);
} }
void config_isp(struct CameraState *s, int io_mem_handle, int fence, int request_id, int buf0_mem_handle, int buf0_offset) { void config_isp(struct CameraState *s, int io_mem_handle, int fence, int request_id, int buf0_mem_handle, int buf0_offset) {
printf("io mem handle ? %d \n", io_mem_handle);
uint32_t cam_packet_handle = 0; uint32_t cam_packet_handle = 0;
int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*2; int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*2;
if (io_mem_handle != 0) { if (io_mem_handle != 0) {
size += sizeof(struct cam_buf_io_cfg); size += sizeof(struct cam_buf_io_cfg);
} }
printf("- ispc allocing 1 for sync_obj %d, req_id %d -\n", fence, request_id);
struct cam_packet *pkt = alloc(s->video0_fd, size, 8, struct cam_packet *pkt = alloc(s->video0_fd, size, 8,
CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle); CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle);
pkt->num_cmd_buf = 2; pkt->num_cmd_buf = 2;
@ -401,6 +414,7 @@ void config_isp(struct CameraState *s, int io_mem_handle, int fence, int request
} }
buf_desc[1].type = CAM_CMD_BUF_GENERIC; buf_desc[1].type = CAM_CMD_BUF_GENERIC;
buf_desc[1].meta_data = CAM_ISP_PACKET_META_GENERIC_BLOB_COMMON; buf_desc[1].meta_data = CAM_ISP_PACKET_META_GENERIC_BLOB_COMMON;
printf("- ispc allocing 2 for sync_obj %d, req_id %d -\n", fence, request_id);
uint32_t *buf2 = alloc(s->video0_fd, buf_desc[1].size, 0x20, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &buf_desc[1].mem_handle); uint32_t *buf2 = alloc(s->video0_fd, buf_desc[1].size, 0x20, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &buf_desc[1].mem_handle);
// cam_isp_packet_generic_blob_handler // cam_isp_packet_generic_blob_handler
@ -453,7 +467,7 @@ void config_isp(struct CameraState *s, int io_mem_handle, int fence, int request
io_cfg[0].framedrop_pattern = 0x1; io_cfg[0].framedrop_pattern = 0x1;
} }
static struct cam_config_dev_cmd config_dev_cmd = {}; struct cam_config_dev_cmd config_dev_cmd = {};
config_dev_cmd.session_handle = s->session_handle; config_dev_cmd.session_handle = s->session_handle;
config_dev_cmd.dev_handle = s->isp_dev_handle; config_dev_cmd.dev_handle = s->isp_dev_handle;
config_dev_cmd.offset = 0; config_dev_cmd.offset = 0;
@ -464,61 +478,77 @@ void config_isp(struct CameraState *s, int io_mem_handle, int fence, int request
printf("ISP CONFIG FAILED\n"); printf("ISP CONFIG FAILED\n");
} }
printf("- ispc releasing 1 for sync_obj %d, req_id %d -\n", fence, request_id);
release_fd(s->video0_fd, buf_desc[1].mem_handle); release_fd(s->video0_fd, buf_desc[1].mem_handle);
//release(s->video0_fd, buf_desc[0].mem_handle); // release_fd(s->video0_fd, buf_desc[0].mem_handle);
printf("- ispc releasing 2 for sync_obj %d, req_id %d -\n", fence, request_id);
release_fd(s->video0_fd, cam_packet_handle); release_fd(s->video0_fd, cam_packet_handle);
} }
void enqueue_buffer(struct CameraState *s, int i) { void enqueue_buffer(struct CameraState *s, int i) {
int ret; int ret;
int request_id = (++s->sched_request_id); int request_id = (++sr_id);//(++s->sched_request_id);
bool first = true; bool first = true;
printf("camera %d[%d] BUF unmapping %d \n", s->camera_num, i, s->buf_handle[i]);
if (s->buf_handle[i]) { if (s->buf_handle[i]) {
first = false; first = false;
release(s->video0_fd, s->buf_handle[i]); release(s->video0_fd, s->buf_handle[i]);
// wait
struct cam_sync_wait sync_wait = {0};
sync_wait.sync_obj = s->sync_objs[i];
sync_wait.timeout_ms = 100;
ret = cam_control(s->video1_fd, CAM_SYNC_WAIT, &sync_wait, sizeof(sync_wait));
printf("camera %d SYNC wait obj %d/%d ret %d \n", s->camera_num, sync_wait.sync_obj, s->sync_objs[i], ret);
// destroy old output fence // destroy old output fence
static struct cam_sync_info sync_destroy = {0}; struct cam_sync_info sync_destroy = {0};
strcpy(sync_destroy.name, "NodeOutputPortFence"); strcpy(sync_destroy.name, "NodeOutputPortFence");
sync_destroy.sync_obj = s->sync_objs[i]; sync_destroy.sync_obj = s->sync_objs[i];
ret = cam_control(s->video1_fd, CAM_SYNC_DESTROY, &sync_destroy, sizeof(sync_destroy)); ret = cam_control(s->video1_fd, CAM_SYNC_DESTROY, &sync_destroy, sizeof(sync_destroy));
LOGD("fence destroy: %d %d", ret, sync_destroy.sync_obj); LOGD("fence destroy: %d %d", ret, sync_destroy.sync_obj);
printf("camera %d SYNC destroy obj %d \n", s->camera_num, sync_destroy.sync_obj);
} }
// new request_ids // new request_ids
s->request_ids[i] = request_id; s->request_ids[i] = request_id;
// do stuff // do stuff
static struct cam_req_mgr_sched_request req_mgr_sched_request = {0}; struct cam_req_mgr_sched_request req_mgr_sched_request = {0};
req_mgr_sched_request.session_hdl = s->session_handle; req_mgr_sched_request.session_hdl = s->session_handle;
req_mgr_sched_request.link_hdl = s->link_handle; req_mgr_sched_request.link_hdl = s->link_handle;
req_mgr_sched_request.req_id = request_id; req_mgr_sched_request.req_id = request_id;
// req_mgr_sched_request.sync_mode = CAM_REQ_MGR_SYNC_MODE_SYNC;
ret = cam_control(s->video0_fd, CAM_REQ_MGR_SCHED_REQ, &req_mgr_sched_request, sizeof(req_mgr_sched_request)); ret = cam_control(s->video0_fd, CAM_REQ_MGR_SCHED_REQ, &req_mgr_sched_request, sizeof(req_mgr_sched_request));
LOGD("sched req: %d %d", ret, request_id); LOGD("sched req: %d %d", ret, request_id);
// create output fence // create output fence
static struct cam_sync_info sync_create = {0}; struct cam_sync_info sync_create = {0};
strcpy(sync_create.name, "NodeOutputPortFence"); strcpy(sync_create.name, "NodeOutputPortFence");
sync_create.sync_obj = s->camera_num*100 + i*10;
ret = cam_control(s->video1_fd, CAM_SYNC_CREATE, &sync_create, sizeof(sync_create)); ret = cam_control(s->video1_fd, CAM_SYNC_CREATE, &sync_create, sizeof(sync_create));
LOGD("fence req: %d %d", ret, sync_create.sync_obj); LOGD("fence req: %d %d", ret, sync_create.sync_obj);
s->sync_objs[i] = sync_create.sync_obj; s->sync_objs[i] = sync_create.sync_obj;
printf("camera %d SYNC create obj %d \n", s->camera_num, sync_create.sync_obj);
// configure ISP to put the image in place // configure ISP to put the image in place
static struct cam_mem_mgr_map_cmd mem_mgr_map_cmd = {0}; struct cam_mem_mgr_map_cmd mem_mgr_map_cmd = {0};
mem_mgr_map_cmd.mmu_hdls[0] = s->device_iommu; mem_mgr_map_cmd.mmu_hdls[0] = s->device_iommu;
mem_mgr_map_cmd.num_hdl = 1; mem_mgr_map_cmd.num_hdl = 1;
mem_mgr_map_cmd.flags = 1; mem_mgr_map_cmd.flags = 1;
mem_mgr_map_cmd.fd = s->bufs[i].fd; mem_mgr_map_cmd.fd = s->bufs[i].fd;
// printf("fd i for camera %d is %d\n", s->camera_num, i);
ret = cam_control(s->video0_fd, CAM_REQ_MGR_MAP_BUF, &mem_mgr_map_cmd, sizeof(mem_mgr_map_cmd)); ret = cam_control(s->video0_fd, CAM_REQ_MGR_MAP_BUF, &mem_mgr_map_cmd, sizeof(mem_mgr_map_cmd));
LOGD("map buf req: (fd: %d) 0x%x %d", s->bufs[i].fd, mem_mgr_map_cmd.out.buf_handle, ret); LOGD("map buf req: (fd: %d) 0x%x %d", s->bufs[i].fd, mem_mgr_map_cmd.out.buf_handle, ret);
s->buf_handle[i] = mem_mgr_map_cmd.out.buf_handle; s->buf_handle[i] = mem_mgr_map_cmd.out.buf_handle;
printf("camera %d[%d] BUF map %d with fd %d \n", s->camera_num, i, s->buf_handle[i], s->bufs[i].fd);
// poke sensor // poke sensor
sensors_poke(s, request_id); sensors_poke(s, request_id);
LOGD("Poked sensor"); LOGD("Poked sensor");
// push the buffer // push the buffer
printf("camera %d, enQ i %d, req_id %d\n", s->camera_num, i, request_id);
config_isp(s, s->buf_handle[i], s->sync_objs[i], request_id, s->buf0_handle, 65632*(i+1)); config_isp(s, s->buf_handle[i], s->sync_objs[i], request_id, s->buf0_handle, 65632*(i+1));
} }
@ -594,7 +624,6 @@ static void camera_open(CameraState *s, VisionBuf* b) {
ret = cam_control(s->video0_fd, CAM_REQ_MGR_CREATE_SESSION, &s->req_mgr_session_info, sizeof(s->req_mgr_session_info)); ret = cam_control(s->video0_fd, CAM_REQ_MGR_CREATE_SESSION, &s->req_mgr_session_info, sizeof(s->req_mgr_session_info));
LOGD("get session: %d 0x%X", ret, s->req_mgr_session_info.session_hdl); LOGD("get session: %d 0x%X", ret, s->req_mgr_session_info.session_hdl);
s->session_handle = s->req_mgr_session_info.session_hdl; s->session_handle = s->req_mgr_session_info.session_hdl;
// access the sensor // access the sensor
LOGD("-- Accessing sensor"); LOGD("-- Accessing sensor");
static struct cam_acquire_dev_cmd acquire_dev_cmd = {0}; static struct cam_acquire_dev_cmd acquire_dev_cmd = {0};
@ -686,6 +715,7 @@ static void camera_open(CameraState *s, VisionBuf* b) {
acquire_dev_cmd.resource_hdl = (uint64_t)&csiphy_acquire_dev_info; acquire_dev_cmd.resource_hdl = (uint64_t)&csiphy_acquire_dev_info;
ret = cam_control(s->csiphy_fd, CAM_ACQUIRE_DEV, &acquire_dev_cmd, sizeof(acquire_dev_cmd)); ret = cam_control(s->csiphy_fd, CAM_ACQUIRE_DEV, &acquire_dev_cmd, sizeof(acquire_dev_cmd));
LOGD("acquire csiphy dev: %d", ret); LOGD("acquire csiphy dev: %d", ret);
s->csiphy_dev_handle = acquire_dev_cmd.dev_handle; s->csiphy_dev_handle = acquire_dev_cmd.dev_handle;
@ -767,16 +797,19 @@ static void camera_open(CameraState *s, VisionBuf* b) {
ret = device_control(s->sensor_fd, CAM_START_DEV, s->session_handle, s->sensor_dev_handle); ret = device_control(s->sensor_fd, CAM_START_DEV, s->session_handle, s->sensor_dev_handle);
LOGD("start sensor: %d", ret); LOGD("start sensor: %d", ret);
for (int i = 0; i < FRAME_BUF_COUNT; i++) { //for (int i = 0; i < FRAME_BUF_COUNT; i++) {
LOG("-- Initting buffer %d", i); // LOG("-- Initting buffer %d", i);
enqueue_buffer(s, i); // enqueue_buffer(s, i);
} //}
} }
void cameras_init(DualCameraState *s) { void cameras_init(DualCameraState *s) {
camera_init(&s->rear, CAMERA_ID_AR0231, 0, 20); camera_init(&s->rear, CAMERA_ID_AR0231, 0, 20);
printf("rear initted \n");
camera_init(&s->wide, CAMERA_ID_AR0231, 1, 20); camera_init(&s->wide, CAMERA_ID_AR0231, 1, 20);
printf("wide initted \n");
camera_init(&s->front, CAMERA_ID_AR0231, 2, 20); camera_init(&s->front, CAMERA_ID_AR0231, 2, 20);
printf("front initted \n");
#ifdef NOSCREEN #ifdef NOSCREEN
zsock_t *rgb_sock = zsock_new_push("tcp://192.168.2.221:7768"); zsock_t *rgb_sock = zsock_new_push("tcp://192.168.2.221:7768");
assert(rgb_sock); assert(rgb_sock);
@ -784,7 +817,7 @@ void cameras_init(DualCameraState *s) {
#endif #endif
} }
void cameras_open(DualCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front) { void cameras_open(DualCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front, VisionBuf *camera_bufs_wide) {
int ret; int ret;
LOG("-- Opening devices"); LOG("-- Opening devices");
@ -829,13 +862,25 @@ void cameras_open(DualCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *ca
sub.id = 0; sub.id = 0;
ret = ioctl(s->video0_fd, VIDIOC_SUBSCRIBE_EVENT, &sub); ret = ioctl(s->video0_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
LOGD("isp subscribe: %d", ret); LOGD("isp subscribe: %d", ret);
sub.id = 1; //sub.id = 1;
ret = ioctl(s->video0_fd, VIDIOC_SUBSCRIBE_EVENT, &sub); //ret = ioctl(s->video0_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
LOGD("isp subscribe: %d", ret); //LOGD("isp subscribe: %d", ret);
sub.type =0x0;// 0x8000020;
camera_open(&s->rear, camera_bufs_rear); //sub.id = 2;
//camera_open(&s->front, camera_bufs_front); //ret = ioctl(s->video1_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
// TODO: add bufs for camera wide //LOGD("isp subscribe: %d", ret);
//sub.id = 2;
//ret = ioctl(s->isp_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
//LOGD("isp subscribe: %d", ret);
printf("opening \n");
camera_open(&s->rear, camera_bufs_rear); // s->rear.bufs
printf("rear opened \n");
camera_open(&s->wide, camera_bufs_wide); // s->rear.bufs
printf("wide opened \n");
camera_open(&s->front, camera_bufs_front); // s->front.bufs
printf("front opened \n");
// TODO: add bufs for camera wide0
} }
static void camera_close(CameraState *s) { static void camera_close(CameraState *s) {
@ -879,14 +924,13 @@ static void camera_close(CameraState *s) {
ret = cam_control(s->video0_fd, CAM_REQ_MGR_DESTROY_SESSION, &s->req_mgr_session_info, sizeof(s->req_mgr_session_info)); ret = cam_control(s->video0_fd, CAM_REQ_MGR_DESTROY_SESSION, &s->req_mgr_session_info, sizeof(s->req_mgr_session_info));
LOGD("destroyed session: %d", ret); LOGD("destroyed session: %d", ret);
tbuffer_stop(&s->camera_tb); tbuffer_stop(&s->camera_tb);
} }
static void cameras_close(DualCameraState *s) { static void cameras_close(DualCameraState *s) {
camera_close(&s->rear); camera_close(&s->rear);
//camera_close(&s->front); camera_close(&s->front);
//camera_close(&s->wide); camera_close(&s->wide);
#ifdef NOSCREEN #ifdef NOSCREEN
zsock_destroy(&s->rgb_sock); zsock_destroy(&s->rgb_sock);
#endif #endif
@ -897,6 +941,7 @@ struct video_event_data {
int32_t link_hdl; int32_t link_hdl;
int32_t frame_id; int32_t frame_id;
int32_t reserved; int32_t reserved;
int64_t req_id;
uint64_t tv_sec; uint64_t tv_sec;
uint64_t tv_usec; uint64_t tv_usec;
}; };
@ -904,9 +949,36 @@ struct video_event_data {
void cameras_run(DualCameraState *s) { void cameras_run(DualCameraState *s) {
LOG("-- Dequeueing Video events"); LOG("-- Dequeueing Video events");
int frame_id = 1; int frame_id = 1;
int q0 = 0;
int q1 = 0;
int q2 = 0;
for (int i = 0; i < FRAME_BUF_COUNT; i++) {
LOG("-- Initting buffer %d", i);
enqueue_buffer(&s->rear, i);
enqueue_buffer(&s->wide, i);
enqueue_buffer(&s->front, i);
}
struct cam_req_mgr_flush_info req_mgr_flush_request = {0};
req_mgr_flush_request.session_hdl = s->rear.session_handle;
req_mgr_flush_request.link_hdl = s->rear.link_handle;
req_mgr_flush_request.flush_type = CAM_REQ_MGR_FLUSH_TYPE_ALL;
//int ret = cam_control(s->video0_fd, CAM_REQ_MGR_FLUSH_REQ, &req_mgr_flush_request, sizeof(req_mgr_flush_request));
//assert(ret == 0);
req_mgr_flush_request.session_hdl = s->wide.session_handle;
req_mgr_flush_request.link_hdl = s->wide.link_handle;
req_mgr_flush_request.flush_type = CAM_REQ_MGR_FLUSH_TYPE_ALL;
//ret = cam_control(s->video0_fd, CAM_REQ_MGR_FLUSH_REQ, &req_mgr_flush_request, sizeof(req_mgr_flush_request));
//assert(ret == 0);
req_mgr_flush_request.session_hdl = s->front.session_handle;
req_mgr_flush_request.link_hdl = s->front.link_handle;
req_mgr_flush_request.flush_type = CAM_REQ_MGR_FLUSH_TYPE_ALL;
//ret = cam_control(s->video0_fd, CAM_REQ_MGR_FLUSH_REQ, &req_mgr_flush_request, sizeof(req_mgr_flush_request));
//assert(ret == 0);
while (!do_exit) { while (!do_exit) {
struct pollfd fds[2] = {{0}}; struct pollfd fds[3] = {{0}};
fds[0].fd = s->video0_fd; fds[0].fd = s->video0_fd;
fds[0].events = POLLPRI; fds[0].events = POLLPRI;
@ -914,6 +986,9 @@ void cameras_run(DualCameraState *s) {
fds[1].fd = s->video1_fd; fds[1].fd = s->video1_fd;
fds[1].events = POLLPRI; fds[1].events = POLLPRI;
fds[2].fd = s->isp_fd;
fds[2].events = POLLPRI;
int ret = poll(fds, ARRAYSIZE(fds), 1000); int ret = poll(fds, ARRAYSIZE(fds), 1000);
if (ret <= 0) { if (ret <= 0) {
if (errno == EINTR || errno == EAGAIN) continue; if (errno == EINTR || errno == EAGAIN) continue;
@ -921,25 +996,67 @@ void cameras_run(DualCameraState *s) {
break; break;
} }
for (int i=0; i<2; i++) { for (int i=0; i<3; i++) {
if (!fds[i].revents) continue; if (!fds[i].revents) continue;
//printf("%d rev = %d \n", i, fds[i].revents);
for(int event_idx=0; event_idx<1; event_idx++){
static struct v4l2_event ev = {0}; static struct v4l2_event ev = {0};
ret = ioctl(fds[i].fd, VIDIOC_DQEVENT, &ev); ret = ioctl(fds[i].fd, VIDIOC_DQEVENT, &ev);
if (ev.type == 0x8000000) { if (ev.type == 0x8000000) {
struct video_event_data *event_data = (struct video_event_data *)ev.u.data; struct video_event_data *event_data = (struct video_event_data *)ev.u.data;
// printf("pending? %d/%d \n", ev.pending, fds[i].revents);
printf("sess_hdl %d, link_hdl %d, frame_id %d, reserved %d, req_id %lld, tv_sec %lld, tv_usec %lld\n", event_data->session_hdl, event_data->link_hdl, event_data->frame_id, event_data->reserved, event_data->req_id, event_data->tv_sec, event_data->tv_usec);
uint64_t timestamp = (event_data->tv_sec*1000000000ULL uint64_t timestamp = (event_data->tv_sec*1000000000ULL
+ event_data->tv_usec*1000); + event_data->tv_usec*1000);
LOGD("video%d dqevent: %d type:0x%x frame_id:%d timestamp: %llu", i, ret, ev.type, event_data->frame_id, timestamp); LOGD("video%d dqevent: %d type:0x%x frame_id:%d timestamp: %llu", i, ret, ev.type, event_data->frame_id, timestamp);
if (event_data->session_hdl == s->rear.req_mgr_session_info.session_hdl) {
//q0 = event_data->req_id - event_data->frame_id;
q0 = 2;
q1 = 1;
q2 = 1;
} else if (event_data->session_hdl == s->wide.req_mgr_session_info.session_hdl) {
//q1 = event_data->req_id - event_data->frame_id;
q0 = 1;
q1 = 2;
q2 = 1;
} else if (event_data->session_hdl == s->front.req_mgr_session_info.session_hdl) {
//q2 = event_data->req_id - event_data->frame_id;
q0 = 1;
q1 = 1;
q2 = 2;
}
int bi = (event_data->frame_id + 3) % FRAME_BUF_COUNT;
//int bi=0;
if (event_data->frame_id != 0) { if (event_data->frame_id != 0) {
for (int j = 0; j < FRAME_BUF_COUNT; j++) { //for (int j = 0; j < FRAME_BUF_COUNT; j++) {
if (s->rear.request_ids[j] == event_data->frame_id) { if (q0 >= q1 && q0 >= q2) {
// TODO: support more than rear camera //TODO: support more than rear camera
tbuffer_dispatch(&s->rear.camera_tb, j); bi = (++s->rear.buf_index + 3) % FRAME_BUF_COUNT;
s->rear.camera_bufs_metadata[j].frame_id = frame_id++; printf("rear \n");
break; tbuffer_dispatch(&s->rear.camera_tb, bi);
s->rear.camera_bufs_metadata[bi].frame_id = event_data->frame_id;
//break;
}
if (q2 >= q0 && q2 >= q1) {
bi = (++s->front.buf_index + 3) % FRAME_BUF_COUNT;
printf("front \n");
tbuffer_dispatch(&s->front.camera_tb, bi);
s->front.camera_bufs_metadata[bi].frame_id = event_data->frame_id;
//break;
} }
if (q1 >= q0 && q1 >= q2) {
bi = (++s->wide.buf_index + 3) % FRAME_BUF_COUNT;
printf("wide \n");
tbuffer_dispatch(&s->wide.camera_tb, bi);
s->wide.camera_bufs_metadata[bi].frame_id = event_data->frame_id;
//break;
}
//}
} }
//printf("q0 %d, q1 %d, q2 %d, bi %d \n",q0,q1,q2,bi);
} }
} }
} }
@ -981,6 +1098,6 @@ void sendrgb(DualCameraState *s, void* dat, int len) {
int err, err2; int err, err2;
err = zmq_send(zsock_resolve(s->rgb_sock), dat, len, ZMQ_DONTWAIT); err = zmq_send(zsock_resolve(s->rgb_sock), dat, len, ZMQ_DONTWAIT);
err2 = zmq_errno(); err2 = zmq_errno();
printf("zmq errcode %d, %d\n", err ,err2); //printf("zmq errcode %d, %d\n", err ,err2);
} }
#endif #endif

@ -55,9 +55,10 @@ typedef struct CameraState {
int buf0_handle; int buf0_handle;
int buf_handle[FRAME_BUF_COUNT]; int buf_handle[FRAME_BUF_COUNT];
int sched_request_id;
int request_ids[FRAME_BUF_COUNT]; int request_ids[FRAME_BUF_COUNT];
int sched_request_id;
int sync_objs[FRAME_BUF_COUNT]; int sync_objs[FRAME_BUF_COUNT];
int buf_index;
struct cam_req_mgr_session_info req_mgr_session_info; struct cam_req_mgr_session_info req_mgr_session_info;
} CameraState; } CameraState;
@ -75,10 +76,12 @@ typedef struct DualCameraState {
#ifdef NOSCREEN #ifdef NOSCREEN
zsock_t *rgb_sock; zsock_t *rgb_sock;
#endif #endif
pthread_mutex_t isp_lock;
} DualCameraState; } DualCameraState;
void cameras_init(DualCameraState *s); void cameras_init(DualCameraState *s);
void cameras_open(DualCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front); void cameras_open(DualCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front, VisionBuf *camera_bufs_wide);
void cameras_run(DualCameraState *s); void cameras_run(DualCameraState *s);
void camera_autoexposure(CameraState *s, float grey_frac); void camera_autoexposure(CameraState *s, float grey_frac);
#ifdef NOSCREEN #ifdef NOSCREEN

@ -0,0 +1 @@
Subproject commit fabbfe64d7940aacdacd3a80ed4685fa64d23512

@ -0,0 +1 @@
0xFFFF, 0x512

@ -0,0 +1 @@
Subproject commit 4a863e334a0d47c55ae376ed73a7e34d9900eb22

@ -75,8 +75,10 @@ struct VisionState {
cl_program prg_debayer_rear; cl_program prg_debayer_rear;
cl_program prg_debayer_front; cl_program prg_debayer_front;
cl_program prg_debayer_wide;
cl_kernel krnl_debayer_rear; cl_kernel krnl_debayer_rear;
cl_kernel krnl_debayer_front; cl_kernel krnl_debayer_front;
cl_kernel krnl_debayer_wide;
cl_program prg_rgb_laplacian; cl_program prg_rgb_laplacian;
cl_kernel krnl_rgb_laplacian; cl_kernel krnl_rgb_laplacian;
@ -89,10 +91,12 @@ struct VisionState {
// processing // processing
TBuffer ui_tb; TBuffer ui_tb;
TBuffer ui_front_tb; TBuffer ui_front_tb;
TBuffer ui_wide_tb;
mat3 yuv_transform; mat3 yuv_transform;
TBuffer *yuv_tb; TBuffer *yuv_tb;
TBuffer *yuv_front_tb; TBuffer *yuv_front_tb;
TBuffer *yuv_wide_tb;
// TODO: refactor for both cameras? // TODO: refactor for both cameras?
Pool yuv_pool; Pool yuv_pool;
@ -114,6 +118,15 @@ struct VisionState {
int yuv_front_width, yuv_front_height; int yuv_front_width, yuv_front_height;
RGBToYUVState front_rgb_to_yuv_state; RGBToYUVState front_rgb_to_yuv_state;
Pool yuv_wide_pool;
VisionBuf yuv_wide_ion[YUV_COUNT];
cl_mem yuv_wide_cl[YUV_COUNT];
YUVBuf yuv_wide_bufs[YUV_COUNT];
FrameMetadata yuv_wide_metas[YUV_COUNT];
size_t yuv_wide_buf_size;
int yuv_wide_width, yuv_wide_height;
RGBToYUVState wide_rgb_to_yuv_state;
size_t rgb_buf_size; size_t rgb_buf_size;
int rgb_width, rgb_height, rgb_stride; int rgb_width, rgb_height, rgb_stride;
VisionBuf rgb_bufs[UI_BUF_COUNT]; VisionBuf rgb_bufs[UI_BUF_COUNT];
@ -130,6 +143,10 @@ struct VisionState {
int front_meteringbox_xmin, front_meteringbox_xmax; int front_meteringbox_xmin, front_meteringbox_xmax;
int front_meteringbox_ymin, front_meteringbox_ymax; int front_meteringbox_ymin, front_meteringbox_ymax;
size_t rgb_wide_buf_size;
int rgb_wide_width, rgb_wide_height, rgb_wide_stride;
VisionBuf rgb_wide_bufs[UI_BUF_COUNT];
cl_mem rgb_wide_bufs_cl[UI_BUF_COUNT];
cl_mem camera_bufs_cl[FRAME_BUF_COUNT]; cl_mem camera_bufs_cl[FRAME_BUF_COUNT];
VisionBuf camera_bufs[FRAME_BUF_COUNT]; VisionBuf camera_bufs[FRAME_BUF_COUNT];
@ -139,6 +156,10 @@ struct VisionState {
cl_mem front_camera_bufs_cl[FRAME_BUF_COUNT]; cl_mem front_camera_bufs_cl[FRAME_BUF_COUNT];
VisionBuf front_camera_bufs[FRAME_BUF_COUNT]; VisionBuf front_camera_bufs[FRAME_BUF_COUNT];
cl_mem wide_camera_bufs_cl[FRAME_BUF_COUNT];
VisionBuf wide_camera_bufs[FRAME_BUF_COUNT];
DualCameraState cameras; DualCameraState cameras;
zsock_t *terminate_pub; zsock_t *terminate_pub;
@ -146,6 +167,7 @@ struct VisionState {
Context * msg_context; Context * msg_context;
PubSocket *frame_sock; PubSocket *frame_sock;
PubSocket *front_frame_sock; PubSocket *front_frame_sock;
PubSocket *wide_frame_sock;
PubSocket *thumbnail_sock; PubSocket *thumbnail_sock;
pthread_mutex_t clients_lock; pthread_mutex_t clients_lock;
@ -228,6 +250,12 @@ void* frontview_thread(void *arg) {
} }
} }
#ifdef NOSCREEN
if ((frame_data.frame_id / 40) % 3 == 1) {
sendrgb(&s->cameras, (void*) s->rgb_front_bufs[rgb_idx].addr, s->rgb_front_bufs[rgb_idx].len);
}
#endif
Message *msg = monitoring_sock->receive(true); Message *msg = monitoring_sock->receive(true);
if (msg != NULL) { if (msg != NULL) {
auto amsg = kj::heapArray<capnp::word>((msg->getSize() / sizeof(capnp::word)) + 1); auto amsg = kj::heapArray<capnp::word>((msg->getSize() / sizeof(capnp::word)) + 1);
@ -372,6 +400,177 @@ void* frontview_thread(void *arg) {
return NULL; return NULL;
} }
// wide
void* wideview_thread(void *arg) {
int err;
VisionState *s = (VisionState*)arg;
set_thread_name("wideview");
err = set_realtime_priority(1);
LOG("setpriority returns %d", err);
// init cl stuff
const cl_queue_properties props[] = {0}; //CL_QUEUE_PRIORITY_KHR, CL_QUEUE_PRIORITY_HIGH_KHR, 0};
cl_command_queue q = clCreateCommandQueueWithProperties(s->context, s->device_id, props, &err);
assert(err == 0);
// init the net
LOG("wideview start!");
for (int cnt = 0; !do_exit; cnt++) {
int buf_idx = tbuffer_acquire(&s->cameras.wide.camera_tb);
// int buf_idx = camera_acquire_buffer(s);
if (buf_idx < 0) {
break;
}
double t1 = millis_since_boot();
FrameMetadata frame_data = s->cameras.wide.camera_bufs_metadata[buf_idx];
uint32_t frame_id = frame_data.frame_id;
if (frame_id == -1) {
LOGE("no frame data? wtf");
tbuffer_release(&s->cameras.wide.camera_tb, buf_idx);
continue;
}
int ui_idx = tbuffer_select(&s->ui_wide_tb);
int rgb_idx = ui_idx;
cl_event debayer_event;
if (s->cameras.wide.ci.bayer) {
err = clSetKernelArg(s->krnl_debayer_rear, 0, sizeof(cl_mem), &s->wide_camera_bufs_cl[buf_idx]); // TODO: add separate cl krl
assert(err == 0);
err = clSetKernelArg(s->krnl_debayer_rear, 1, sizeof(cl_mem), &s->rgb_wide_bufs_cl[rgb_idx]);
assert(err == 0);
err = clSetKernelArg(s->krnl_debayer_rear, 2, sizeof(float), &s->cameras.wide.digital_gain);
assert(err == 0);
const size_t debayer_work_size = s->rgb_height; // doesn't divide evenly, is this okay?
const size_t debayer_local_work_size = 128;
err = clEnqueueNDRangeKernel(q, s->krnl_debayer_rear, 1, NULL,
&debayer_work_size, &debayer_local_work_size, 0, 0, &debayer_event);
assert(err == 0);
} else {
assert(s->rgb_wide_buf_size >= s->frame_size);
assert(s->rgb_stride == s->frame_stride);
err = clEnqueueCopyBuffer(q, s->wide_camera_bufs_cl[buf_idx], s->rgb_wide_bufs_cl[rgb_idx],
0, 0, s->rgb_wide_buf_size, 0, 0, &debayer_event);
assert(err == 0);
}
clWaitForEvents(1, &debayer_event);
clReleaseEvent(debayer_event);
tbuffer_release(&s->cameras.wide.camera_tb, buf_idx);
visionbuf_sync(&s->rgb_wide_bufs[rgb_idx], VISIONBUF_SYNC_FROM_DEVICE);
#ifdef NOSCREEN
if ((frame_data.frame_id / 40) % 3 == 0) {
sendrgb(&s->cameras, (void*) s->rgb_wide_bufs[rgb_idx].addr, s->rgb_wide_bufs[rgb_idx].len);
}
#endif
double t2 = millis_since_boot();
uint8_t *bgr_ptr = (uint8_t*)s->rgb_wide_bufs[rgb_idx].addr;
double yt1 = millis_since_boot();
int yuv_idx = pool_select(&s->yuv_wide_pool);
s->yuv_wide_metas[yuv_idx] = frame_data;
uint8_t* yuv_ptr_y = s->yuv_wide_bufs[yuv_idx].y;
uint8_t* yuv_ptr_u = s->yuv_wide_bufs[yuv_idx].u;
uint8_t* yuv_ptr_v = s->yuv_wide_bufs[yuv_idx].v;
cl_mem yuv_wide_cl = s->yuv_wide_cl[yuv_idx];
rgb_to_yuv_queue(&s->wide_rgb_to_yuv_state, q, s->rgb_wide_bufs_cl[rgb_idx], yuv_wide_cl);
visionbuf_sync(&s->yuv_wide_ion[yuv_idx], VISIONBUF_SYNC_FROM_DEVICE);
double yt2 = millis_since_boot();
// keep another reference around till were done processing
pool_acquire(&s->yuv_wide_pool, yuv_idx);
pool_push(&s->yuv_wide_pool, yuv_idx);
// send frame event
{
if (s->wide_frame_sock != NULL) {
capnp::MallocMessageBuilder msg;
cereal::Event::Builder event = msg.initRoot<cereal::Event>();
event.setLogMonoTime(nanos_since_boot());
auto framed = event.initWideFrame();
framed.setFrameId(frame_data.frame_id);
framed.setEncodeId(cnt);
framed.setTimestampEof(frame_data.timestamp_eof);
framed.setFrameLength(frame_data.frame_length);
framed.setIntegLines(frame_data.integ_lines);
framed.setGlobalGain(frame_data.global_gain);
framed.setLensPos(frame_data.lens_pos);
framed.setLensSag(frame_data.lens_sag);
framed.setLensErr(frame_data.lens_err);
framed.setLensTruePos(frame_data.lens_true_pos);
framed.setGainFrac(frame_data.gain_frac);
#if !defined(QCOM) && !defined(QCOM2)
framed.setImage(kj::arrayPtr((const uint8_t*)s->yuv_ion[yuv_idx].addr, s->yuv_buf_size));
#endif
kj::ArrayPtr<const float> transform_vs(&s->yuv_transform.v[0], 9);
framed.setTransform(transform_vs);
auto words = capnp::messageToFlatArray(msg);
auto bytes = words.asBytes();
s->frame_sock->send((char*)bytes.begin(), bytes.size());
}
}
tbuffer_dispatch(&s->ui_wide_tb, ui_idx);
// auto exposure over big box
const int exposure_x = 290;
const int exposure_y = 282 + 40;
const int exposure_height = 314;
const int exposure_width = 560;
if (cnt % 3 == 0) {
// find median box luminance for AE
uint32_t lum_binning[256] = {0,};
for (int y=0; y<exposure_height; y++) {
for (int x=0; x<exposure_width; x++) {
uint8_t lum = yuv_ptr_y[((exposure_y+y)*s->yuv_wide_width) + exposure_x + x];
lum_binning[lum]++;
}
}
const unsigned int lum_total = exposure_height * exposure_width;
unsigned int lum_cur = 0;
int lum_med = 0;
for (lum_med=0; lum_med<256; lum_med++) {
// shouldn't be any values less than 16 - yuv footroom
lum_cur += lum_binning[lum_med];
if (lum_cur >= lum_total / 2) {
break;
}
}
// double avg = (double)acc / (big_box_width * big_box_height) - 16;
// printf("avg %d\n", lum_med);
camera_autoexposure(&s->cameras.wide, lum_med / 256.0);
}
pool_release(&s->yuv_wide_pool, yuv_idx);
double t5 = millis_since_boot();
LOGD("queued: %.2fms, yuv: %.2f, | processing: %.3fms", (t2-t1), (yt2-yt1), (t5-t1));
}
return NULL;
}
// processing // processing
void* processing_thread(void *arg) { void* processing_thread(void *arg) {
int err; int err;
@ -441,7 +640,9 @@ void* processing_thread(void *arg) {
visionbuf_sync(&s->rgb_bufs[rgb_idx], VISIONBUF_SYNC_FROM_DEVICE); visionbuf_sync(&s->rgb_bufs[rgb_idx], VISIONBUF_SYNC_FROM_DEVICE);
#ifdef NOSCREEN #ifdef NOSCREEN
if ((frame_data.frame_id / 40) % 3 == 2) {
sendrgb(&s->cameras, (void*) s->rgb_bufs[rgb_idx].addr, s->rgb_bufs[rgb_idx].len); sendrgb(&s->cameras, (void*) s->rgb_bufs[rgb_idx].addr, s->rgb_bufs[rgb_idx].len);
}
#endif #endif
#ifdef QCOM #ifdef QCOM
@ -1080,6 +1281,12 @@ void init_buffers(VisionState *s) {
&s->front_camera_bufs_cl[i]); &s->front_camera_bufs_cl[i]);
} }
for (int i=0; i<FRAME_BUF_COUNT; i++) {
s->wide_camera_bufs[i] = visionbuf_allocate_cl(s->cameras.wide.frame_size,
s->device_id, s->context,
&s->wide_camera_bufs_cl[i]);
}
// processing buffers // processing buffers
if (s->cameras.rear.ci.bayer) { if (s->cameras.rear.ci.bayer) {
s->rgb_width = s->frame_width/2; s->rgb_width = s->frame_width/2;
@ -1108,6 +1315,14 @@ void init_buffers(VisionState *s) {
s->rgb_front_height = s->cameras.front.ci.frame_height; s->rgb_front_height = s->cameras.front.ci.frame_height;
} }
if (s->cameras.wide.ci.bayer) {
s->rgb_wide_width = s->cameras.wide.ci.frame_width/2;
s->rgb_wide_height = s->cameras.wide.ci.frame_height/2;
} else {
s->rgb_wide_width = s->cameras.wide.ci.frame_width;
s->rgb_wide_height = s->cameras.wide.ci.frame_height;
}
for (int i=0; i<UI_BUF_COUNT; i++) { for (int i=0; i<UI_BUF_COUNT; i++) {
VisionImg img = visionimg_alloc_rgb24(s->rgb_front_width, s->rgb_front_height, &s->rgb_front_bufs[i]); VisionImg img = visionimg_alloc_rgb24(s->rgb_front_width, s->rgb_front_height, &s->rgb_front_bufs[i]);
@ -1119,6 +1334,17 @@ void init_buffers(VisionState *s) {
} }
tbuffer_init(&s->ui_front_tb, UI_BUF_COUNT, "frontrgb"); tbuffer_init(&s->ui_front_tb, UI_BUF_COUNT, "frontrgb");
for (int i=0; i<UI_BUF_COUNT; i++) {
VisionImg img = visionimg_alloc_rgb24(s->rgb_front_width, s->rgb_front_height, &s->rgb_wide_bufs[i]);
s->rgb_wide_bufs_cl[i] = visionbuf_to_cl(&s->rgb_wide_bufs[i], s->device_id, s->context);
if (i == 0){
s->rgb_front_stride = img.stride;
s->rgb_front_buf_size = img.size;
}
}
tbuffer_init(&s->ui_wide_tb, UI_BUF_COUNT, "widergb");
// yuv back for recording and orbd // yuv back for recording and orbd
pool_init(&s->yuv_pool, YUV_COUNT); pool_init(&s->yuv_pool, YUV_COUNT);
s->yuv_tb = pool_get_tbuffer(&s->yuv_pool); //only for visionserver... s->yuv_tb = pool_get_tbuffer(&s->yuv_pool); //only for visionserver...
@ -1149,6 +1375,22 @@ void init_buffers(VisionState *s) {
s->yuv_front_bufs[i].v = s->yuv_front_bufs[i].u + (s->yuv_front_width/2 * s->yuv_front_height/2); s->yuv_front_bufs[i].v = s->yuv_front_bufs[i].u + (s->yuv_front_width/2 * s->yuv_front_height/2);
} }
// yuv front for recording
pool_init(&s->yuv_wide_pool, YUV_COUNT);
s->yuv_wide_tb = pool_get_tbuffer(&s->yuv_wide_pool);
s->yuv_wide_width = s->rgb_wide_width;
s->yuv_wide_height = s->rgb_wide_height;
s->yuv_wide_buf_size = s->rgb_wide_width * s->rgb_wide_height * 3 / 2;
for (int i=0; i<YUV_COUNT; i++) {
s->yuv_wide_ion[i] = visionbuf_allocate_cl(s->yuv_front_buf_size, s->device_id, s->context, &s->yuv_wide_cl[i]);
s->yuv_wide_bufs[i].y = (uint8_t*)s->yuv_wide_ion[i].addr;
s->yuv_wide_bufs[i].u = s->yuv_wide_bufs[i].y + (s->yuv_front_width * s->yuv_front_height);
s->yuv_wide_bufs[i].v = s->yuv_wide_bufs[i].u + (s->yuv_front_width/2 * s->yuv_front_height/2);
}
if (s->cameras.rear.ci.bayer) { if (s->cameras.rear.ci.bayer) {
// debayering does a 2x downscale // debayering does a 2x downscale
s->yuv_transform = transform_scale_buffer(s->cameras.rear.transform, 0.5); s->yuv_transform = transform_scale_buffer(s->cameras.rear.transform, 0.5);
@ -1175,6 +1417,16 @@ void init_buffers(VisionState *s) {
assert(err == 0); assert(err == 0);
} }
if (s->cameras.wide.ci.bayer) {
s->prg_debayer_wide = build_debayer_program(s, s->cameras.wide.ci.frame_width, s->cameras.wide.ci.frame_height,
s->cameras.wide.ci.frame_stride,
s->rgb_wide_width, s->rgb_wide_height, s->rgb_wide_stride,
s->cameras.wide.ci.bayer_flip, s->cameras.wide.ci.hdr);
s->krnl_debayer_wide = clCreateKernel(s->prg_debayer_wide, "debayer10", &err);
assert(err == 0);
}
s->prg_rgb_laplacian = build_conv_program(s, s->rgb_width/NUM_SEGMENTS_X, s->rgb_height/NUM_SEGMENTS_Y, s->prg_rgb_laplacian = build_conv_program(s, s->rgb_width/NUM_SEGMENTS_X, s->rgb_height/NUM_SEGMENTS_Y,
3); 3);
s->krnl_rgb_laplacian = clCreateKernel(s->prg_rgb_laplacian, "rgb2gray_conv2d", &err); s->krnl_rgb_laplacian = clCreateKernel(s->prg_rgb_laplacian, "rgb2gray_conv2d", &err);
@ -1196,6 +1448,7 @@ void init_buffers(VisionState *s) {
rgb_to_yuv_init(&s->rgb_to_yuv_state, s->context, s->device_id, s->yuv_width, s->yuv_height, s->rgb_stride); rgb_to_yuv_init(&s->rgb_to_yuv_state, s->context, s->device_id, s->yuv_width, s->yuv_height, s->rgb_stride);
rgb_to_yuv_init(&s->front_rgb_to_yuv_state, s->context, s->device_id, s->yuv_front_width, s->yuv_front_height, s->rgb_front_stride); rgb_to_yuv_init(&s->front_rgb_to_yuv_state, s->context, s->device_id, s->yuv_front_width, s->yuv_front_height, s->rgb_front_stride);
rgb_to_yuv_init(&s->wide_rgb_to_yuv_state, s->context, s->device_id, s->yuv_wide_width, s->yuv_wide_height, s->rgb_wide_stride);
} }
void free_buffers(VisionState *s) { void free_buffers(VisionState *s) {
@ -1210,6 +1463,11 @@ void free_buffers(VisionState *s) {
visionbuf_free(&s->front_camera_bufs[i]); visionbuf_free(&s->front_camera_bufs[i]);
} }
for (int i=0; i<FRAME_BUF_COUNT; i++) {
visionbuf_free(&s->wide_camera_bufs[i]);
}
for (int i=0; i<UI_BUF_COUNT; i++) { for (int i=0; i<UI_BUF_COUNT; i++) {
visionbuf_free(&s->rgb_bufs[i]); visionbuf_free(&s->rgb_bufs[i]);
} }
@ -1218,6 +1476,10 @@ void free_buffers(VisionState *s) {
visionbuf_free(&s->rgb_front_bufs[i]); visionbuf_free(&s->rgb_front_bufs[i]);
} }
for (int i=0; i<UI_BUF_COUNT; i++) {
visionbuf_free(&s->rgb_wide_bufs[i]);
}
for (int i=0; i<YUV_COUNT; i++) { for (int i=0; i<YUV_COUNT; i++) {
visionbuf_free(&s->yuv_ion[i]); visionbuf_free(&s->yuv_ion[i]);
} }
@ -1239,13 +1501,16 @@ void party(VisionState *s) {
processing_thread, s); processing_thread, s);
assert(err == 0); assert(err == 0);
#ifndef QCOM2
// TODO: fix front camera on qcom2
pthread_t frontview_thread_handle; pthread_t frontview_thread_handle;
err = pthread_create(&frontview_thread_handle, NULL, err = pthread_create(&frontview_thread_handle, NULL,
frontview_thread, s); frontview_thread, s);
assert(err == 0); assert(err == 0);
#endif
pthread_t wideview_thread_handle;
err = pthread_create(&wideview_thread_handle, NULL,
wideview_thread, s);
assert(err == 0);
// priority for cameras // priority for cameras
err = set_realtime_priority(1); err = set_realtime_priority(1);
@ -1260,11 +1525,13 @@ void party(VisionState *s) {
zsock_signal(s->terminate_pub, 0); zsock_signal(s->terminate_pub, 0);
#ifndef QCOM2
LOG("joining frontview_thread"); LOG("joining frontview_thread");
err = pthread_join(frontview_thread_handle, NULL); err = pthread_join(frontview_thread_handle, NULL);
assert(err == 0); assert(err == 0);
#endif
LOG("joining wideview_thread");
err = pthread_join(wideview_thread_handle, NULL);
assert(err == 0);
LOG("joining visionserver_thread"); LOG("joining visionserver_thread");
err = pthread_join(visionserver_thread_handle, NULL); err = pthread_join(visionserver_thread_handle, NULL);
@ -1304,19 +1571,22 @@ int main(int argc, char *argv[]) {
s->msg_context = Context::create(); s->msg_context = Context::create();
s->frame_sock = PubSocket::create(s->msg_context, "frame"); s->frame_sock = PubSocket::create(s->msg_context, "frame");
s->front_frame_sock = PubSocket::create(s->msg_context, "frontFrame"); s->front_frame_sock = PubSocket::create(s->msg_context, "frontFrame");
s->wide_frame_sock = PubSocket::create(s->msg_context, "wideFrame");
s->thumbnail_sock = PubSocket::create(s->msg_context, "thumbnail"); s->thumbnail_sock = PubSocket::create(s->msg_context, "thumbnail");
assert(s->frame_sock != NULL); assert(s->frame_sock != NULL);
assert(s->front_frame_sock != NULL); assert(s->front_frame_sock != NULL);
assert(s->wide_frame_sock != NULL);
assert(s->thumbnail_sock != NULL); assert(s->thumbnail_sock != NULL);
#endif #endif
cameras_open(&s->cameras, &s->camera_bufs[0], &s->focus_bufs[0], &s->stats_bufs[0], &s->front_camera_bufs[0]); cameras_open(&s->cameras, &s->camera_bufs[0], &s->focus_bufs[0], &s->stats_bufs[0], &s->front_camera_bufs[0], &s->wide_camera_bufs[0]);
party(s); party(s);
#if defined(QCOM) || defined(QCOM2) #if defined(QCOM) || defined(QCOM2)
delete s->frame_sock; delete s->frame_sock;
delete s->front_frame_sock; delete s->front_frame_sock;
delete s->wide_frame_sock;
delete s->thumbnail_sock; delete s->thumbnail_sock;
delete s->msg_context; delete s->msg_context;
#endif #endif

File diff suppressed because it is too large Load Diff

@ -23,8 +23,10 @@ typedef enum VisionIPCPacketType {
typedef enum VisionStreamType { typedef enum VisionStreamType {
VISION_STREAM_RGB_BACK, VISION_STREAM_RGB_BACK,
VISION_STREAM_RGB_FRONT, VISION_STREAM_RGB_FRONT,
VISION_STREAM_RGB_WIDE,
VISION_STREAM_YUV, VISION_STREAM_YUV,
VISION_STREAM_YUV_FRONT, VISION_STREAM_YUV_FRONT,
VISION_STREAM_YUV_WIDE,
VISION_STREAM_MAX, VISION_STREAM_MAX,
} VisionStreamType; } VisionStreamType;
@ -35,6 +37,10 @@ typedef struct VisionUIInfo {
int front_box_x, front_box_y; int front_box_x, front_box_y;
int front_box_width, front_box_height; int front_box_width, front_box_height;
int wide_box_x, wide_box_y;
int wide_box_width, wide_box_height;
} VisionUIInfo; } VisionUIInfo;
typedef struct VisionStreamBufs { typedef struct VisionStreamBufs {

Loading…
Cancel
Save