paramsd: fix unbounded yaw rate while calibrating (#34806)

* rm

* comments

* default values when yaw rate invalid

* clean up

Revert "default values when yaw rate invalid"

This reverts commit e983abb3b0.

do the same for yaw rate we do for roll

and

* 1 is fine

* update refs

---------

Co-authored-by: Kacper Rączy <gfw.kra@gmail.com>
pull/34812/head
Shane Smiskol 2 months ago committed by GitHub
parent e61a4ac76e
commit 0789877cd3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 2
      selfdrive/locationd/locationd.py
  2. 29
      selfdrive/locationd/paramsd.py
  3. 2
      selfdrive/test/process_replay/ref_commit

@ -147,13 +147,13 @@ class LocationEstimator:
self.car_speed = abs(msg.vEgo)
elif which == "liveCalibration":
# Note that we use this message during calibration
if len(msg.rpyCalib) > 0:
calib = np.array(msg.rpyCalib)
if calib.min() < -CALIB_RPY_SANITY_CHECK or calib.max() > CALIB_RPY_SANITY_CHECK:
return HandleLogResult.INPUT_INVALID
self.device_from_calib = rot_from_euler(calib)
self.calibrated = msg.calStatus == log.LiveCalibrationData.Status.calibrated
elif which == "cameraOdometry":
if not self._validate_timestamp(t):

@ -46,18 +46,25 @@ class ParamsLearner:
self.yaw_rate_std = 0.0
self.roll = 0.0
self.steering_angle = 0.0
self.roll_valid = False
def handle_log(self, t, which, msg):
if which == 'livePose':
device_pose = Pose.from_live_pose(msg)
calibrated_pose = self.calibrator.build_calibrated_pose(device_pose)
self.yaw_rate, self.yaw_rate_std = calibrated_pose.angular_velocity.z, calibrated_pose.angular_velocity.z_std
yaw_rate_valid = msg.angularVelocityDevice.valid and self.calibrator.calib_valid
yaw_rate_valid = yaw_rate_valid and 0 < self.yaw_rate_std < 10 # rad/s
yaw_rate_valid = yaw_rate_valid and abs(self.yaw_rate) < 1 # rad/s
if yaw_rate_valid:
self.yaw_rate, self.yaw_rate_std = calibrated_pose.angular_velocity.z, calibrated_pose.angular_velocity.z_std
else:
# This is done to bound the yaw rate estimate when localizer values are invalid or calibrating
self.yaw_rate, self.yaw_rate_std = 0.0, np.radians(1)
localizer_roll, localizer_roll_std = device_pose.orientation.x, device_pose.orientation.x_std
localizer_roll_std = np.radians(1) if np.isnan(localizer_roll_std) else localizer_roll_std
self.roll_valid = (localizer_roll_std < ROLL_STD_MAX) and (ROLL_MIN < localizer_roll < ROLL_MAX) and msg.sensorsOK
if self.roll_valid:
roll_valid = (localizer_roll_std < ROLL_STD_MAX) and (ROLL_MIN < localizer_roll < ROLL_MAX) and msg.sensorsOK
if roll_valid:
roll = localizer_roll
# Experimentally found multiplier of 2 to be best trade-off between stability and accuracy or similar?
roll_std = 2 * localizer_roll_std
@ -67,18 +74,12 @@ class ParamsLearner:
roll_std = np.radians(10.0)
self.roll = np.clip(roll, self.roll - ROLL_MAX_DELTA, self.roll + ROLL_MAX_DELTA)
yaw_rate_valid = msg.angularVelocityDevice.valid and self.calibrator.calib_valid
yaw_rate_valid = yaw_rate_valid and 0 < self.yaw_rate_std < 10 # rad/s
yaw_rate_valid = yaw_rate_valid and abs(self.yaw_rate) < 1 # rad/s
if self.active:
if msg.posenetOK:
if yaw_rate_valid:
self.kf.predict_and_observe(t,
ObservationKind.ROAD_FRAME_YAW_RATE,
np.array([[-self.yaw_rate]]),
np.array([np.atleast_2d(self.yaw_rate_std**2)]))
self.kf.predict_and_observe(t,
ObservationKind.ROAD_FRAME_YAW_RATE,
np.array([[-self.yaw_rate]]),
np.array([np.atleast_2d(self.yaw_rate_std**2)]))
self.kf.predict_and_observe(t,
ObservationKind.ROAD_ROLL,

@ -1 +1 @@
87088a3540ddfc26356d9b71a3a4f40efcbc9b3b
465979047295dad5da3a552db9227ed776a26a79
Loading…
Cancel
Save