diff --git a/selfdrive/camerad/cameras/camera_qcom2.c b/selfdrive/camerad/cameras/camera_qcom2.c index 979219360b..97eda49f39 100644 --- a/selfdrive/camerad/cameras/camera_qcom2.c +++ b/selfdrive/camerad/cameras/camera_qcom2.c @@ -52,6 +52,11 @@ CameraInfo cameras_supported[CAMERA_ID_MAX] = { }, }; +float sensor_analog_gains[ANALOG_GAIN_MAX_IDX] = {1.0/8.0, 2.0/8.0, 2.0/7.0, 3.0/7.0, + 3.0/6.0, 4.0/6.0, 4.0/5.0, 5.0/5.0, + 5.0/4.0, 6.0/4.0, 6.0/3.0, 7.0/3.0, + 7.0/2.0, 8.0/2.0, 8.0/1.0}; + // ************** low level camera helpers **************** int cam_control(int fd, int op_code, void *handle, int size) { @@ -567,15 +572,16 @@ static void camera_init(CameraState *s, int camera_id, int camera_num, unsigned 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, }}; - // s->digital_gain = 1.0; - // s->digital_gain_pre = 4; // for WB - s->dc_opstate = 0; + s->dc_gain_enabled = false; - s->analog_gain_frac = 1.0; - s->analog_gain = 0x8; - s->exposure_time = 598; + s->analog_gain = 0x5; + s->analog_gain_frac = sensor_analog_gains[s->analog_gain]; + s->exposure_time = 256; + s->exposure_time_max = 1.2 * EXPOSURE_TIME_MAX / 2; + s->exposure_time_min = 0.75 * EXPOSURE_TIME_MIN * 2; s->request_id_last = 0; s->skipped = true; + s->ef_filtered = 1.0; } static void camera_open(CameraState *s, VisionBuf* b) { @@ -946,6 +952,9 @@ void handle_camera_event(CameraState *s, void *evdat) { s->request_id_last = real_id; s->camera_bufs_metadata[buf_idx].frame_id = main_id - s->idx_offset; s->camera_bufs_metadata[buf_idx].timestamp_eof = timestamp; // only has sof? + s->camera_bufs_metadata[buf_idx].global_gain = s->analog_gain + (100*s->dc_gain_enabled); + s->camera_bufs_metadata[buf_idx].gain_frac = s->analog_gain_frac; + s->camera_bufs_metadata[buf_idx].integ_lines = s->exposure_time; // dispatch enqueue_req_multi(s, real_id + FRAME_BUF_COUNT, 1); @@ -1010,60 +1019,96 @@ void cameras_run(MultiCameraState *s) { cameras_close(s); } -void camera_autoexposure(CameraState *s, float grey_frac) { - // TODO: get stats from sensor - const float target_grey = 0.3; - const float analog_gain_frac_min = 0.25; - float analog_gain_frac_max = s->dc_gain_enabled?8.0:2.0; - // const float digital_gain_min = 1.0; - // const float digital_gain_max = 3.99; // is the correct? - const int exposure_time_min = 64; - const int exposure_time_max = 1066; //1416; // no slower than 1/25 sec. calculated from 0x300C and clock freq - float exposure_factor = pow(1.05, (target_grey - grey_frac) / 0.16 ); - - if (s->analog_gain_frac > 1 && exposure_factor > 0.98 && exposure_factor < 1.02) { // high analog gains are coarse - return; - } else if (s->analog_gain_frac > 1 && exposure_factor > 1 && !s->dc_gain_enabled && s->dc_opstate != 1) { // switch to HCG at iso 800 +// ******************* exposure control helpers ******************* + +void set_exposure_time_bounds(CameraState *s) { + switch (s->analog_gain) { + case 0: { + s->exposure_time_min = EXPOSURE_TIME_MIN; + s->exposure_time_max = EXPOSURE_TIME_MAX; // EXPOSURE_TIME_MIN * 4; + break; + } + case ANALOG_GAIN_MAX_IDX - 1: { + s->exposure_time_min = EXPOSURE_TIME_MIN; // EXPOSURE_TIME_MAX / 4; + s->exposure_time_max = EXPOSURE_TIME_MAX; + break; + } + default: { + // finetune margins on both ends + float k_up = sensor_analog_gains[s->analog_gain+1] / sensor_analog_gains[s->analog_gain]; + float k_down = sensor_analog_gains[s->analog_gain-1] / sensor_analog_gains[s->analog_gain]; + s->exposure_time_min = k_down * EXPOSURE_TIME_MIN * 2; + s->exposure_time_max = k_up * EXPOSURE_TIME_MAX / 2; + } + } +} + +void switch_conversion_gain(CameraState *s) { + if (!s->dc_gain_enabled) { s->dc_gain_enabled = true; - s->analog_gain_frac *= 0.5; - s->dc_opstate = 1; - } else if (s->analog_gain_frac < 0.5 && exposure_factor < 1 && s->dc_gain_enabled && s->dc_opstate != 1) { // switch back to LCG at iso 400 - s->dc_gain_enabled = false; - s->analog_gain_frac *= 2.0; - s->dc_opstate = 1; - } else if (s->analog_gain_frac > 1 && exposure_factor < 1) { // force gain down first - s->analog_gain_frac /= 2.0; - s->dc_opstate = 0; - } else if (s->analog_gain_frac > 0.5 && exposure_factor < 0.9) { // smoother transistion on large stepdowns - s->analog_gain_frac = max(min(s->analog_gain_frac * sqrt(exposure_factor), analog_gain_frac_max), analog_gain_frac_min); - s->exposure_time = max(min(s->exposure_time * sqrt(exposure_factor), exposure_time_max), exposure_time_min); - s->dc_opstate = 0; - } else if ((s->exposure_time < exposure_time_max || exposure_factor < 1) && (s->exposure_time > exposure_time_min || exposure_factor > 1)) { // ramp up shutter time before gain - s->exposure_time = max(min(s->exposure_time * exposure_factor, exposure_time_max), exposure_time_min); - s->dc_opstate = 0; + s->analog_gain -= 5; } else { - s->analog_gain_frac = max(min(s->analog_gain_frac * exposure_factor, analog_gain_frac_max), analog_gain_frac_min); - s->dc_opstate = 0; + s->dc_gain_enabled = false; + s->analog_gain += 4; } - // set up config - // gain mapping: [1/8, 2/8, 2/7, 3/7, 3/6, 4/6, 4/5, 5/5, 5/4, 6/4, 6/3, 7/3, 7/2, 8/2, 8/1, N/A] -> 0 to 15 - uint16_t AG; - if (s->analog_gain_frac > 4) { - s->analog_gain_frac = 8.0; - AG = 0xEEEE; - // printf("cam %d gain_frac is %f, set AG to 0x%X, S to %d, dc %d \n", s->camera_num, s->analog_gain_frac, AG, s->exposure_time, s->dc_gain_enabled); - } else { - AG = -(1.147 * s->analog_gain_frac * s->analog_gain_frac) + (7.67 * s->analog_gain_frac) - 0.1; - if (AG - s->analog_gain == -1) {AG = s->analog_gain;} - s->analog_gain = AG; - AG = AG * 4096 + AG * 256 + AG * 16 + AG; - // printf("cam %d gain_frac is %f, set AG to 0x%X, S to %d, dc %d \n", s->camera_num, s->analog_gain_frac, AG, s->exposure_time, s->dc_gain_enabled); +} + +void camera_autoexposure(CameraState *s, float grey_frac) { + // TODO: get stats from sensor? + float target_grey = 0.3 - (s->analog_gain / 105.0); + float exposure_factor = 1 + 30 * pow((target_grey - grey_frac), 3); + + if (s->camera_num != 1) { + s->ef_filtered = (1 - EF_LOWPASS_K) * s->ef_filtered + EF_LOWPASS_K * exposure_factor; + exposure_factor = s->ef_filtered; + } + + // always prioritize exposure time adjust + s->exposure_time *= exposure_factor; + + // switch gain if max/min exposure time is reached + // or always switch down to a lower gain when possible + bool kd = false; + if (s->analog_gain > 0) { + kd = 1.1 * s->exposure_time / (sensor_analog_gains[s->analog_gain-1] / sensor_analog_gains[s->analog_gain]) < EXPOSURE_TIME_MAX / 2; + } + + if (s->exposure_time > s->exposure_time_max) { + if (s->analog_gain < ANALOG_GAIN_MAX_IDX - 1) { + s->exposure_time = EXPOSURE_TIME_MAX / 2; + s->analog_gain += 1; + if (!s->dc_gain_enabled && sensor_analog_gains[s->analog_gain] == 1.0) { // switch to HCG at iso 800 + switch_conversion_gain(s); + } + set_exposure_time_bounds(s); + } else { + s->exposure_time = s->exposure_time_max; + } + } else if (s->exposure_time < s->exposure_time_min || kd) { + if (s->analog_gain > 0) { + s->exposure_time = max(EXPOSURE_TIME_MIN * 2, s->exposure_time / (sensor_analog_gains[s->analog_gain-1] / sensor_analog_gains[s->analog_gain])); + s->analog_gain -= 1; + if (s->dc_gain_enabled && sensor_analog_gains[s->analog_gain] == 0.25) { // switch back to LCG at iso 200 + switch_conversion_gain(s); + } + set_exposure_time_bounds(s); + } else { + s->exposure_time = s->exposure_time_min; + } } + // set up config + uint16_t AG = s->analog_gain; + AG = AG * 4096 + AG * 256 + AG * 16 + AG; + s->analog_gain_frac = sensor_analog_gains[s->analog_gain]; + + // printf("cam %d, min %d, max %d \n", s->camera_num, s->exposure_time_min, s->exposure_time_max); + // printf("cam %d, set AG to 0x%X, S to %d, dc %d \n", s->camera_num, AG, s->exposure_time, s->dc_gain_enabled); + struct i2c_random_wr_payload exp_reg_array[] = {{0x3366, AG}, // analog gain {0x3362, s->dc_gain_enabled?0x1:0x0}, // DC_GAIN - {0x305A, 0x00D1}, // red gain - {0x3058, 0x0118}, // blue gain + {0x305A, 0x00D8}, // red gain + {0x3058, 0x011B}, // blue gain {0x3056, 0x009A}, // g1 gain {0x305C, 0x009A}, // g2 gain {0x3012, s->exposure_time}}; // integ time diff --git a/selfdrive/camerad/cameras/camera_qcom2.h b/selfdrive/camerad/cameras/camera_qcom2.h index 295c9a925c..7401bba4be 100644 --- a/selfdrive/camerad/cameras/camera_qcom2.h +++ b/selfdrive/camerad/cameras/camera_qcom2.h @@ -16,24 +16,32 @@ #define FRAME_BUF_COUNT 4 +#define ANALOG_GAIN_MAX_IDX 15 // 0xF is bypass +#define EXPOSURE_TIME_MIN 8 // min time limited by HDR exp factor +#define EXPOSURE_TIME_MAX 1132 // with HDR, no slower than 1/25 sec (1416 lines) + +#define HLC_THRESH 240 +#define HLC_A 80 + +#define EF_LOWPASS_K 0.35 + #ifdef __cplusplus extern "C" { #endif - typedef struct CameraState { CameraInfo ci; FrameMetadata camera_bufs_metadata[FRAME_BUF_COUNT]; TBuffer camera_tb; int frame_size; - //float digital_gain; - //int digital_gain_pre; float analog_gain_frac; uint16_t analog_gain; - uint8_t dc_opstate; bool dc_gain_enabled; int exposure_time; + int exposure_time_min; + int exposure_time_max; + float ef_filtered; mat3 transform; diff --git a/selfdrive/camerad/cameras/real_debayer.cl b/selfdrive/camerad/cameras/real_debayer.cl index 97631e52f9..1781a6d466 100644 --- a/selfdrive/camerad/cameras/real_debayer.cl +++ b/selfdrive/camerad/cameras/real_debayer.cl @@ -26,10 +26,10 @@ uint int_from_10(const uchar * source, uint start, uint offset) { float to_normal(uint x) { float pv = (float)(x); - const float black_level = 0; + const float black_level = 42.0; pv = max(0.0, pv - black_level); pv /= (1024.0f - black_level); - pv = 5*pv / (1.0f + 5*pv); // reinhard + pv = 20*pv / (1.0f + 20*pv); // reinhard return pv; } diff --git a/selfdrive/camerad/cameras/sensor2_i2c.h b/selfdrive/camerad/cameras/sensor2_i2c.h index 9a1cbcaed6..73303d9a90 100644 --- a/selfdrive/camerad/cameras/sensor2_i2c.h +++ b/selfdrive/camerad/cameras/sensor2_i2c.h @@ -341,7 +341,7 @@ struct i2c_random_wr_payload init_array_ar0231[] = { // HDR Settings {0x3082, 0x0004}, // OPERATION_MODE_CTRL - {0x3238, 0x0444}, // EXPOSURE_RATIO + {0x3238, 0x0222}, // EXPOSURE_RATIO {0x3014, 0x098E}, // FINE_INTEGRATION_TIME_ {0x321E, 0x098E}, // FINE_INTEGRATION_TIME2 {0x3222, 0x098E}, // FINE_INTEGRATION_TIME3 @@ -357,10 +357,10 @@ struct i2c_random_wr_payload init_array_ar0231[] = { // Initial Gains {0x3022, 0x01}, // GROUPED_PARAMETER_HOLD_ - {0x3366, 0x7777}, // ANALOG_GAIN + {0x3366, 0x5555}, // ANALOG_GAIN {0x3060, 0xBBBB}, // ANALOG_COLOR_GAIN - {0x305A, 0x00D1}, // RED_GAIN - {0x3058, 0x0118}, // BLUE_GAIN + {0x305A, 0x00D8}, // RED_GAIN + {0x3058, 0x011B}, // BLUE_GAIN {0x3056, 0x009A}, // GREEN1_GAIN {0x305C, 0x009A}, // GREEN2_GAIN {0x3022, 0x00}, // GROUPED_PARAMETER_HOLD_ diff --git a/selfdrive/camerad/main.cc b/selfdrive/camerad/main.cc index c28d6edf55..c75ec29e93 100644 --- a/selfdrive/camerad/main.cc +++ b/selfdrive/camerad/main.cc @@ -298,11 +298,11 @@ void* frontview_thread(void *arg) { x_end = s->rhd_front ? s->rgb_front_width * 2 / 5:s->rgb_front_width; } #ifdef QCOM2 - x_start = 0.15*s->rgb_front_width; - x_end = 0.85*s->rgb_front_width; - y_start = 0.5*s->rgb_front_height; - y_end = 0.75*s->rgb_front_height; - skip = 2; + x_start = 96; + x_end = 1832; + y_start = 242; + y_end = 1148; + skip = 4; #endif uint32_t lum_binning[256] = {0,}; for (int y = y_start; y < y_end; y += skip) { @@ -322,12 +322,19 @@ void* frontview_thread(void *arg) { const unsigned int lum_total = (y_end - y_start) * (x_end - x_start) / 2 / skip; unsigned int lum_cur = 0; int lum_med = 0; - for (lum_med=0; lum_med<256; lum_med++) { + int lum_med_alt = 0; + for (lum_med=255; lum_med>=0; lum_med--) { lum_cur += lum_binning[lum_med]; +#ifdef QCOM2 + if (lum_cur > lum_total / HLC_A && lum_med > HLC_THRESH) { + lum_med_alt = 86; + } +#endif if (lum_cur >= lum_total / 2) { break; } } + lum_med = lum_med_alt>lum_med?lum_med_alt:lum_med; camera_autoexposure(&s->cameras.front, lum_med / 256.0); } @@ -497,10 +504,10 @@ void* wideview_thread(void *arg) { // auto exposure over big box // TODO: fix this? should not use med imo - const int exposure_x = 384; - const int exposure_y = 300; - const int exposure_height = 400; - const int exposure_width = 1152; + const int exposure_x = 96; + const int exposure_y = 250; + const int exposure_height = 524; + const int exposure_width = 1734; if (cnt % 3 == 0) { // find median box luminance for AE uint32_t lum_binning[256] = {0,}; @@ -513,14 +520,21 @@ void* wideview_thread(void *arg) { const unsigned int lum_total = exposure_height * exposure_width / 4; unsigned int lum_cur = 0; int lum_med = 0; - for (lum_med=0; lum_med<256; lum_med++) { + int lum_med_alt = 0; + for (lum_med=255; lum_med>=0; lum_med--) { // shouldn't be any values less than 16 - yuv footroom lum_cur += lum_binning[lum_med]; +#ifdef QCOM2 + if (lum_cur > 2*lum_total / (3*HLC_A) && lum_med > HLC_THRESH) { + lum_med_alt = 86; + } +#endif if (lum_cur >= lum_total / 2) { break; } } + lum_med = lum_med_alt>lum_med?lum_med_alt:lum_med; camera_autoexposure(&s->cameras.wide, lum_med / 256.0); } @@ -841,10 +855,10 @@ void* processing_thread(void *arg) { // auto exposure over big box #ifdef QCOM2 - const int exposure_x = 384; - const int exposure_y = 300; - const int exposure_height = 400; - const int exposure_width = 1152; + const int exposure_x = 96; + const int exposure_y = 160; + const int exposure_height = 986; + const int exposure_width = 1734; const int skip = 2; #else const int exposure_x = 290; @@ -865,14 +879,21 @@ void* processing_thread(void *arg) { const unsigned int lum_total = exposure_height * exposure_width / skip / skip; unsigned int lum_cur = 0; int lum_med = 0; - for (lum_med=0; lum_med<256; lum_med++) { + int lum_med_alt = 0; + for (lum_med=255; lum_med>=0; lum_med--) { // shouldn't be any values less than 16 - yuv footroom lum_cur += lum_binning[lum_med]; +#ifdef QCOM2 + if (lum_cur > lum_total / HLC_A && lum_med > HLC_THRESH) { + lum_med_alt = 86; + } +#endif if (lum_cur >= lum_total / 2) { break; } } + lum_med = lum_med_alt>lum_med?lum_med_alt:lum_med; camera_autoexposure(&s->cameras.rear, lum_med / 256.0); } @@ -1589,8 +1610,10 @@ void party(VisionState *s) { int main(int argc, char *argv[]) { set_realtime_priority(51); -#ifdef QCOM +#if defined(QCOM) set_core_affinity(2); +#elif defined(QCOM2) + set_core_affinity(6); #endif zsys_handler_set(NULL); diff --git a/selfdrive/camerad/test/tici_zclient/livergb.py b/selfdrive/camerad/test/tici_zclient/livergb.py index 13ed0b5e94..09b79b8a10 100755 --- a/selfdrive/camerad/test/tici_zclient/livergb.py +++ b/selfdrive/camerad/test/tici_zclient/livergb.py @@ -35,6 +35,7 @@ if __name__ == '__main__': hist = cv2.calcHist([cv2.cvtColor(cam_bufs[cam_id], cv2.COLOR_BGR2GRAY)],[0],None,[32],[0,256]) hist = (H*hist/hist.max()).astype(np.uint8) + hist_bufs[cam_id] = 0 for i,bb in enumerate(hist): hist_bufs[cam_id, H-bb[0]:,i*(200//32):(i+1)*(200//32), :] = (222,222,222) diff --git a/selfdrive/modeld/modeld.cc b/selfdrive/modeld/modeld.cc index 0f1c22c342..e2dcf69cb4 100644 --- a/selfdrive/modeld/modeld.cc +++ b/selfdrive/modeld/modeld.cc @@ -43,20 +43,21 @@ void* live_thread(void *arg) { 910.0, 0.0, 582.0, 0.0, 910.0, 437.0, 0.0, 0.0, 1.0; + float db_s = 0.5; // debayering does a 2x downscale #else Eigen::Matrix eon_intrinsics; eon_intrinsics << 2648.0, 0.0, 1928.0/2, 0.0, 2648.0, 1208.0/2, 0.0, 0.0, 1.0; + float db_s = 1.0; #endif - // debayering does a 2x downscale mat3 yuv_transform = transform_scale_buffer((mat3){{ 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, - }}, 0.5); + }}, db_s); while (!do_exit) { if (sm.update(10) > 0){