recover EON/C2 AF (#1665)

pull/1772/head
ZwX1616 5 years ago committed by GitHub
parent 59e77423c4
commit 3aa99a01d7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 139
      selfdrive/camerad/cameras/camera_qcom.cc
  2. 15
      selfdrive/camerad/cameras/camera_qcom.h
  3. 6
      selfdrive/camerad/imgproc/utils.h
  4. 27
      selfdrive/camerad/main.cc
  5. 5
      selfdrive/controls/controlsd.py
  6. 10
      selfdrive/controls/lib/events.py
  7. 4
      selfdrive/test/longitudinal_maneuvers/plant.py
  8. 2
      selfdrive/test/process_replay/process_replay.py

@ -123,6 +123,8 @@ static void camera_init(CameraState *s, int camera_id, int camera_num,
s->max_gain = max_gain;
s->fps = fps;
s->self_recover = 0;
zsock_t *ops_sock = zsock_new_push(">inproc://cameraops");
assert(ops_sock);
s->ops_sock = zsock_resolve(ops_sock);
@ -1745,8 +1747,13 @@ static void parse_autofocus(CameraState *s, uint8_t *d) {
avg_focus += s->focus[i];
}
}
// self recover override
if (s->self_recover > 1) {
s->focus_err = 200 * ((s->self_recover % 2 == 0) ? 1:-1); // far for even numbers, close for odd
s->self_recover -= 2;
return;
}
//printf("\n");
if (good_count < 4) {
s->focus_err = nan("");
return;
@ -1770,8 +1777,8 @@ static void do_autofocus(CameraState *s) {
float err = s->focus_err;
float sag = (s->last_sag_acc_z/9.8) * 128;
const int dac_up = s->device == DEVICE_LP3? 634:456;
const int dac_down = s->device == DEVICE_LP3? 366:224;
const int dac_up = s->device == DEVICE_LP3? LP3_AF_DAC_UP:OP3T_AF_DAC_UP;
const int dac_down = s->device == DEVICE_LP3? LP3_AF_DAC_DOWN:OP3T_AF_DAC_DOWN;
if (!isnan(err)) {
// learn lens_true_pos
@ -1980,43 +1987,6 @@ static FrameMetadata get_frame_metadata(CameraState *s, uint32_t frame_id) {
};
}
static bool acceleration_from_sensor_sock(void *sock, float *vs) {
int err;
bool ret = false;
zmq_msg_t msg;
err = zmq_msg_init(&msg);
assert(err == 0);
err = zmq_msg_recv(&msg, sock, 0);
assert(err >= 0);
void *data = zmq_msg_data(&msg);
size_t size = zmq_msg_size(&msg);
auto amsg = kj::heapArray<capnp::word>(size / sizeof(capnp::word) + 1);
memcpy(amsg.begin(), data, size);
capnp::FlatArrayMessageReader cmsg(amsg);
auto event = cmsg.getRoot<cereal::Event>();
if (event.which() == cereal::Event::SENSOR_EVENTS) {
auto sensor_events = event.getSensorEvents();
for (auto sensor_event : sensor_events) {
if (sensor_event.which() == cereal::SensorEventData::ACCELERATION) {
auto v = sensor_event.getAcceleration().getV();
if (v.size() < 3) {
continue; //wtf
}
for (int j = 0; j < 3; j++) {
vs[j] = v[j];
}
ret = true;
break;
}
}
}
zmq_msg_close(&msg);
return ret;
}
static void ops_term() {
zsock_t *ops_sock = zsock_new_push(">inproc://cameraops");
assert(ops_sock);
@ -2036,66 +2006,81 @@ static void* ops_thread(void* arg) {
zsock_t *cameraops = zsock_new_pull("@inproc://cameraops");
assert(cameraops);
zsock_t *sensor_sock = zsock_new_sub(">tcp://127.0.0.1:8003", "");
assert(sensor_sock);
zsock_t *terminate = zsock_new_sub(">inproc://terminate", "");
assert(terminate);
zpoller_t *poller = zpoller_new(cameraops, sensor_sock, terminate, NULL);
zpoller_t *poller = zpoller_new(cameraops, terminate, NULL);
assert(poller);
while (!do_exit) {
SubMaster sm({"sensorEvents"}); // msgq submaster
while (!do_exit) {
// zmq handling
zsock_t *which = (zsock_t*)zpoller_wait(poller, -1);
if (which == terminate || which == NULL) {
if (which == terminate) {
break;
}
void* sockraw = zsock_resolve(which);
if (which == cameraops) {
zmq_msg_t msg;
err = zmq_msg_init(&msg);
assert(err == 0);
err = zmq_msg_recv(&msg, sockraw, 0);
assert(err >= 0);
CameraMsg cmsg;
if (zmq_msg_size(&msg) == sizeof(cmsg)) {
memcpy(&cmsg, zmq_msg_data(&msg), zmq_msg_size(&msg));
//LOGD("cameraops %d", cmsg.type);
} else if (which != NULL) {
void* sockraw = zsock_resolve(which);
if (which == cameraops) {
zmq_msg_t msg;
err = zmq_msg_init(&msg);
assert(err == 0);
err = zmq_msg_recv(&msg, sockraw, 0);
assert(err >= 0);
CameraMsg cmsg;
if (zmq_msg_size(&msg) == sizeof(cmsg)) {
memcpy(&cmsg, zmq_msg_data(&msg), zmq_msg_size(&msg));
//LOGD("cameraops %d", cmsg.type);
if (cmsg.type == CAMERA_MSG_AUTOEXPOSE) {
if (cmsg.camera_num == 0) {
do_autoexposure(&s->rear, cmsg.grey_frac);
do_autofocus(&s->rear);
} else {
do_autoexposure(&s->front, cmsg.grey_frac);
}
} else if (cmsg.type == -1) {
break;
}
}
if (cmsg.type == CAMERA_MSG_AUTOEXPOSE) {
if (cmsg.camera_num == 0) {
do_autoexposure(&s->rear, cmsg.grey_frac);
do_autofocus(&s->rear);
} else {
do_autoexposure(&s->front, cmsg.grey_frac);
zmq_msg_close(&msg);
}
}
// msgq handling
if (sm.update(0) > 0) {
float vals[3] = {0.0};
bool got_accel = false;
auto sensor_events = sm["sensorEvents"].getSensorEvents();
for (auto sensor_event : sensor_events) {
if (sensor_event.which() == cereal::SensorEventData::ACCELERATION) {
auto v = sensor_event.getAcceleration().getV();
if (v.size() < 3) {
continue; //wtf
}
} else if (cmsg.type == -1) {
for (int j = 0; j < 3; j++) {
vals[j] = v[j];
}
got_accel = true;
break;
}
}
zmq_msg_close(&msg);
} else if (which == sensor_sock) {
float vs[3] = {0.0};
bool got_accel = acceleration_from_sensor_sock(sockraw, vs);
uint64_t ts = nanos_since_boot();
if (got_accel && ts - s->rear.last_sag_ts > 10000000) { // 10 ms
s->rear.last_sag_ts = ts;
s->rear.last_sag_acc_z = -vs[2];
s->rear.last_sag_acc_z = -vals[2];
}
}
}
zpoller_destroy(&poller);
zsock_destroy(&cameraops);
zsock_destroy(&sensor_sock);
zsock_destroy(&terminate);
return NULL;

@ -4,6 +4,7 @@
#include <stdint.h>
#include <stdbool.h>
#include <pthread.h>
#include "messaging.hpp"
#include "msmb_isp.h"
#include "msmb_ispif.h"
@ -25,6 +26,18 @@
#define NUM_FOCUS 8
#define LP3_AF_DAC_DOWN 366
#define LP3_AF_DAC_UP 634
#define LP3_AF_DAC_M 440
#define LP3_AF_DAC_3SIG 52
#define OP3T_AF_DAC_DOWN 224
#define OP3T_AF_DAC_UP 456
#define OP3T_AF_DAC_M 300
#define OP3T_AF_DAC_3SIG 96
#define FOCUS_RECOVER_PATIENCE 50 // 2.5 seconds of complete blur
#define FOCUS_RECOVER_STEPS 240 // 6 seconds
#ifdef __cplusplus
extern "C" {
#endif
@ -100,6 +113,8 @@ typedef struct CameraState {
float last_sag_acc_z;
float lens_true_pos;
int self_recover; // af recovery counter, neg is patience, pos is active
int fps;
mat3 transform;

@ -11,8 +11,8 @@
#define ROI_Y_MIN 2
#define ROI_Y_MAX 3
#define LM_THRESH 222
#define LM_PREC_THRESH 0.9
#define LM_THRESH 120
#define LM_PREC_THRESH 0.9 // 90 perc is blur
// only apply to QCOM
#define FULL_STRIDE_X 1280
@ -27,4 +27,4 @@ const int16_t lapl_conv_krnl[9] = {0, 1, 0,
void get_lapmap_one(int16_t *lap, uint16_t *res, int x_pitch, int y_pitch);
bool is_blur(uint16_t *lapmap);
#endif
#endif

@ -475,6 +475,32 @@ void* processing_thread(void *arg) {
/*t11 = millis_since_boot();
printf("process time: %f ms\n ----- \n", t11 - t10);
t10 = millis_since_boot();*/
// setup self recover
if (is_blur(&s->lapres[0]) &&
(s->cameras.rear.lens_true_pos < (s->cameras.device == DEVICE_LP3? LP3_AF_DAC_DOWN:OP3T_AF_DAC_DOWN)+1 ||
s->cameras.rear.lens_true_pos > (s->cameras.device == DEVICE_LP3? LP3_AF_DAC_UP:OP3T_AF_DAC_UP)-1) &&
s->cameras.rear.self_recover < 2) {
// truly stuck, needs help
s->cameras.rear.self_recover -= 1;
if (s->cameras.rear.self_recover < -FOCUS_RECOVER_PATIENCE) {
LOGW("rear camera bad state detected. attempting recovery from %.1f, recover state is %d",
s->cameras.rear.lens_true_pos, s->cameras.rear.self_recover);
s->cameras.rear.self_recover = FOCUS_RECOVER_STEPS + ((s->cameras.rear.lens_true_pos < (s->cameras.device == DEVICE_LP3? LP3_AF_DAC_M:OP3T_AF_DAC_M))?1:0); // parity determined by which end is stuck at
}
} else if ((s->cameras.rear.lens_true_pos < (s->cameras.device == DEVICE_LP3? LP3_AF_DAC_M - LP3_AF_DAC_3SIG:OP3T_AF_DAC_M - OP3T_AF_DAC_3SIG) ||
s->cameras.rear.lens_true_pos > (s->cameras.device == DEVICE_LP3? LP3_AF_DAC_M + LP3_AF_DAC_3SIG:OP3T_AF_DAC_M + OP3T_AF_DAC_3SIG)) &&
s->cameras.rear.self_recover < 2) {
// in suboptimal position with high prob, but may still recover by itself
s->cameras.rear.self_recover -= 1;
if (s->cameras.rear.self_recover < -(FOCUS_RECOVER_PATIENCE*3)) {
LOGW("rear camera bad state detected. attempting recovery from %.1f, recover state is %d", s->cameras.rear.lens_true_pos, s->cameras.rear.self_recover);
s->cameras.rear.self_recover = FOCUS_RECOVER_STEPS/2 + ((s->cameras.rear.lens_true_pos < (s->cameras.device == DEVICE_LP3? LP3_AF_DAC_M:OP3T_AF_DAC_M))?1:0);
}
} else if (s->cameras.rear.self_recover < 0) {
s->cameras.rear.self_recover += 1; // reset if fine
}
#endif
double t2 = millis_since_boot();
@ -527,6 +553,7 @@ void* processing_thread(void *arg) {
framed.setFocusConf(focus_confs);
kj::ArrayPtr<const uint16_t> sharpness_score(&s->lapres[0], (ROI_X_MAX-ROI_X_MIN+1)*(ROI_Y_MAX-ROI_Y_MIN+1));
framed.setSharpnessScore(sharpness_score);
framed.setRecoverState(s->cameras.rear.self_recover);
#endif
// TODO: add this back

@ -52,7 +52,7 @@ class Controls:
self.sm = sm
if self.sm is None:
self.sm = messaging.SubMaster(['thermal', 'health', 'model', 'liveCalibration',
self.sm = messaging.SubMaster(['thermal', 'health', 'frame', 'model', 'liveCalibration',
'dMonitoringState', 'plan', 'pathPlan', 'liveLocationKalman'])
self.can_sock = can_sock
@ -212,6 +212,9 @@ class Controls:
self.events.add(EventName.vehicleModelInvalid)
if not self.sm['liveLocationKalman'].posenetOK:
self.events.add(EventName.posenetInvalid)
if not self.sm['frame'].recoverState < 2:
# counter>=2 is active
self.events.add(EventName.focusRecoverActive)
if not self.sm['plan'].radarValid:
self.events.add(EventName.radarFault)
if self.sm['plan'].radarCanError:

@ -208,8 +208,6 @@ EVENTS = {
EventName.laneChangeBlocked: {},
EventName.focusRecoverActive: {},
# ********** events only containing alerts displayed in all states **********
EventName.debugAlert: {
@ -524,6 +522,14 @@ EVENTS = {
ET.NO_ENTRY: NoEntryAlert("Vision Model Output Uncertain"),
},
EventName.focusRecoverActive: {
ET.WARNING: Alert(
"TAKE CONTROL",
"Attempting Refocus: Camera Focus Invalid",
AlertStatus.userPrompt, AlertSize.mid,
Priority.LOW, VisualAlert.steerRequired, AudibleAlert.chimeWarning1, .4, 2., 3.),
},
EventName.outOfSpace: {
ET.NO_ENTRY: NoEntryAlert("Out of Storage Space",
duration_hud_alert=0.),

@ -112,6 +112,7 @@ class Plant():
Plant.logcan = messaging.pub_sock('can')
Plant.sendcan = messaging.sub_sock('sendcan')
Plant.model = messaging.pub_sock('model')
Plant.frame_pub = messaging.pub_sock('frame')
Plant.live_params = messaging.pub_sock('liveParameters')
Plant.live_location_kalman = messaging.pub_sock('liveLocationKalman')
Plant.health = messaging.pub_sock('health')
@ -161,6 +162,7 @@ class Plant():
def close(self):
Plant.logcan.close()
Plant.model.close()
Plant.frame_pub.close()
Plant.live_params.close()
Plant.live_location_kalman.close()
@ -391,6 +393,7 @@ class Plant():
if publish_model and self.frame % 5 == 0:
md = messaging.new_message('model')
cal = messaging.new_message('liveCalibration')
fp = messaging.new_message('frame')
md.model.frameId = 0
for x in [md.model.path, md.model.leftLane, md.model.rightLane]:
x.points = [0.0]*50
@ -422,6 +425,7 @@ class Plant():
# fake values?
Plant.model.send(md.to_bytes())
Plant.cal.send(cal.to_bytes())
Plant.frame_pub.send(fp.to_bytes())
Plant.logcan.send(can_list_to_can_capnp(can_msgs))

@ -213,7 +213,7 @@ CONFIGS = [
pub_sub={
"can": ["controlsState", "carState", "carControl", "sendcan", "carEvents", "carParams"],
"thermal": [], "health": [], "liveCalibration": [], "dMonitoringState": [], "plan": [], "pathPlan": [], "gpsLocation": [], "liveLocationKalman": [],
"model": [],
"model": [], "frame": [],
},
ignore=["logMonoTime", "valid", "controlsState.startMonoTime", "controlsState.cumLagMs"],
init_callback=fingerprint,

Loading…
Cancel
Save