Release 260111

This commit is contained in:
Comma Device
2026-01-11 18:23:29 +08:00
commit 3721ecbf8a
2601 changed files with 855070 additions and 0 deletions

View File

@@ -0,0 +1,15 @@
# driver monitoring (DM)
Uploading driver-facing camera footage is opt-in, but it is encouraged to opt-in to improve the DM model. You can always change your preference using the "Record and Upload Driver Camera" toggle.
## Troubleshooting
Before creating a bug report, go through these troubleshooting steps.
* Ensure the driver-facing camera has a good view of the driver in normal driving positions.
* This can be checked in Settings -> Device -> Preview Driver Camera (when car is off).
* If the camera can't see the driver, the device should be re-mounted.
## Bug report
In order for us to look into DM bug reports, we'll need the driver-facing camera footage. If you don't normally have this enabled, simply enable the toggle for a single drive. Also ensure the "Upload Raw Logs" toggle is enabled before going for a drive.

View File

@@ -0,0 +1,51 @@
#!/usr/bin/env python3
import cereal.messaging as messaging
from openpilot.common.params import Params
from openpilot.common.realtime import config_realtime_process
from openpilot.selfdrive.monitoring.helpers import DriverMonitoring
def dmonitoringd_thread():
config_realtime_process([0, 1, 2, 3], 5)
params = Params()
pm = messaging.PubMaster(['driverMonitoringState'])
sm = messaging.SubMaster(['driverStateV2', 'liveCalibration', 'carState', 'selfdriveState', 'modelV2'], poll='driverStateV2')
DM = DriverMonitoring(rhd_saved=params.get_bool("IsRhdDetected"), always_on=params.get_bool("AlwaysOnDM"))
demo_mode=False
# 20Hz <- dmonitoringmodeld
while True:
sm.update()
if not sm.updated['driverStateV2']:
# iterate when model has new output
continue
valid = sm.all_checks()
if demo_mode and sm.valid['driverStateV2']:
DM.run_step(sm, demo=demo_mode)
elif valid:
DM.run_step(sm, demo=demo_mode)
# publish
dat = DM.get_state_packet(valid=valid)
pm.send('driverMonitoringState', dat)
# load live always-on toggle
if sm['driverStateV2'].frameId % 40 == 1:
DM.always_on = params.get_bool("AlwaysOnDM")
demo_mode = params.get_bool("IsDriverViewEnabled")
# save rhd virtual toggle every 5 mins
if (sm['driverStateV2'].frameId % 6000 == 0 and not demo_mode and
DM.wheelpos.prob_offseter.filtered_stat.n > DM.settings._WHEELPOS_FILTER_MIN_COUNT and
DM.wheel_on_right == (DM.wheelpos.prob_offseter.filtered_stat.M > DM.settings._WHEELPOS_THRESHOLD)):
params.put_bool_nonblocking("IsRhdDetected", DM.wheel_on_right)
def main():
dmonitoringd_thread()
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,464 @@
from math import atan2
import numpy as np
from cereal import car, log
import cereal.messaging as messaging
from openpilot.selfdrive.selfdrived.events import Events
from openpilot.selfdrive.selfdrived.alertmanager import set_offroad_alert
from openpilot.common.realtime import DT_DMON
from openpilot.common.filter_simple import FirstOrderFilter
from openpilot.common.params import Params
from openpilot.common.stat_live import RunningStatFilter
from openpilot.common.transformations.camera import DEVICE_CAMERAS
from openpilot.system.hardware import HARDWARE
EventName = log.OnroadEvent.EventName
# ******************************************************************************************
# NOTE: To fork maintainers.
# Disabling or nerfing safety features will get you and your users banned from our servers.
# We recommend that you do not change these numbers from the defaults.
# ******************************************************************************************
class DRIVER_MONITOR_SETTINGS:
def __init__(self, device_type):
self._DT_DMON = DT_DMON
# ref (page15-16): https://eur-lex.europa.eu/legal-content/EN/TXT/PDF/?uri=CELEX:42018X1947&rid=2
self._AWARENESS_TIME = 30. # passive wheeltouch total timeout
self._AWARENESS_PRE_TIME_TILL_TERMINAL = 15.
self._AWARENESS_PROMPT_TIME_TILL_TERMINAL = 6.
self._DISTRACTED_TIME = 11. # active monitoring total timeout
self._DISTRACTED_PRE_TIME_TILL_TERMINAL = 8.
self._DISTRACTED_PROMPT_TIME_TILL_TERMINAL = 6.
self._FACE_THRESHOLD = 0.7
self._EYE_THRESHOLD = 0.65
self._SG_THRESHOLD = 0.9
self._BLINK_THRESHOLD = 0.865
self._PHONE_THRESH = 0.75 if device_type == 'mici' else 0.4
self._PHONE_THRESH2 = 15.0
self._PHONE_MAX_OFFSET = 0.06
self._PHONE_MIN_OFFSET = 0.025
self._POSE_PITCH_THRESHOLD = 0.3133
self._POSE_PITCH_THRESHOLD_SLACK = 0.3237
self._POSE_PITCH_THRESHOLD_STRICT = self._POSE_PITCH_THRESHOLD
self._POSE_YAW_THRESHOLD = 0.4020
self._POSE_YAW_THRESHOLD_SLACK = 0.5042
self._POSE_YAW_THRESHOLD_STRICT = self._POSE_YAW_THRESHOLD
self._PITCH_NATURAL_OFFSET = 0.029 # initial value before offset is learned
self._PITCH_NATURAL_THRESHOLD = 0.449
self._YAW_NATURAL_OFFSET = 0.097 # initial value before offset is learned
self._PITCH_MAX_OFFSET = 0.124
self._PITCH_MIN_OFFSET = -0.0881
self._YAW_MAX_OFFSET = 0.289
self._YAW_MIN_OFFSET = -0.0246
self._DCAM_UNCERTAIN_ALERT_THRESHOLD = 0.1
self._DCAM_UNCERTAIN_ALERT_COUNT = int(60 / self._DT_DMON)
self._DCAM_UNCERTAIN_RESET_COUNT = int(20 / self._DT_DMON)
self._POSESTD_THRESHOLD = 0.3
self._HI_STD_FALLBACK_TIME = int(10 / self._DT_DMON) # fall back to wheel touch if model is uncertain for 10s
self._DISTRACTED_FILTER_TS = 0.25 # 0.6Hz
self._ALWAYS_ON_ALERT_MIN_SPEED = 11
self._POSE_CALIB_MIN_SPEED = 13 # 30 mph
self._POSE_OFFSET_MIN_COUNT = int(60 / self._DT_DMON) # valid data counts before calibration completes, 1min cumulative
self._POSE_OFFSET_MAX_COUNT = int(360 / self._DT_DMON) # stop deweighting new data after 6 min, aka "short term memory"
self._WHEELPOS_CALIB_MIN_SPEED = 11
self._WHEELPOS_THRESHOLD = 0.5
self._WHEELPOS_FILTER_MIN_COUNT = int(15 / self._DT_DMON) # allow 15 seconds to converge wheel side
self._RECOVERY_FACTOR_MAX = 5. # relative to minus step change
self._RECOVERY_FACTOR_MIN = 1.25 # relative to minus step change
self._MAX_TERMINAL_ALERTS = 3 # not allowed to engage after 3 terminal alerts
self._MAX_TERMINAL_DURATION = int(30 / self._DT_DMON) # not allowed to engage after 30s of terminal alerts
class DistractedType:
NOT_DISTRACTED = 0
DISTRACTED_POSE = 1 << 0
DISTRACTED_BLINK = 1 << 1
DISTRACTED_PHONE = 1 << 2
class DriverPose:
def __init__(self, max_trackable):
self.yaw = 0.
self.pitch = 0.
self.roll = 0.
self.yaw_std = 0.
self.pitch_std = 0.
self.roll_std = 0.
self.pitch_offseter = RunningStatFilter(max_trackable=max_trackable)
self.yaw_offseter = RunningStatFilter(max_trackable=max_trackable)
self.calibrated = False
self.low_std = True
self.cfactor_pitch = 1.
self.cfactor_yaw = 1.
class DriverProb:
def __init__(self, max_trackable):
self.prob = 0.
self.prob_offseter = RunningStatFilter(max_trackable=max_trackable)
self.prob_calibrated = False
class DriverBlink:
def __init__(self):
self.left = 0.
self.right = 0.
# model output refers to center of undistorted+leveled image
EFL = 598.0 # focal length in K
cam = DEVICE_CAMERAS[("tici", "ar0231")] # corrected image has same size as raw
W, H = (cam.dcam.width, cam.dcam.height) # corrected image has same size as raw
def face_orientation_from_net(angles_desc, pos_desc, rpy_calib):
# the output of these angles are in device frame
# so from driver's perspective, pitch is up and yaw is right
pitch_net, yaw_net, roll_net = angles_desc
face_pixel_position = ((pos_desc[0]+0.5)*W, (pos_desc[1]+0.5)*H)
yaw_focal_angle = atan2(face_pixel_position[0] - W//2, EFL)
pitch_focal_angle = atan2(face_pixel_position[1] - H//2, EFL)
pitch = pitch_net + pitch_focal_angle
yaw = -yaw_net + yaw_focal_angle
# no calib for roll
pitch -= rpy_calib[1]
yaw -= rpy_calib[2]
return roll_net, pitch, yaw
class DriverMonitoring:
def __init__(self, rhd_saved=False, settings=None, always_on=False):
# init policy settings
self.settings = settings if settings is not None else DRIVER_MONITOR_SETTINGS(device_type=HARDWARE.get_device_type())
# init driver status
self.wheelpos = DriverProb(-1)
self.pose = DriverPose(self.settings._POSE_OFFSET_MAX_COUNT)
self.phone = DriverProb(self.settings._POSE_OFFSET_MAX_COUNT)
self.blink = DriverBlink()
self.always_on = always_on
self.distracted_types = []
self.driver_distracted = False
self.driver_distraction_filter = FirstOrderFilter(0., self.settings._DISTRACTED_FILTER_TS, self.settings._DT_DMON)
self.wheel_on_right = False
self.wheel_on_right_last = None
self.wheel_on_right_default = rhd_saved
self.face_detected = False
self.terminal_alert_cnt = 0
self.terminal_time = 0
self.step_change = 0.
self.active_monitoring_mode = True
self.is_model_uncertain = False
self.hi_stds = 0
self.threshold_pre = self.settings._DISTRACTED_PRE_TIME_TILL_TERMINAL / self.settings._DISTRACTED_TIME
self.threshold_prompt = self.settings._DISTRACTED_PROMPT_TIME_TILL_TERMINAL / self.settings._DISTRACTED_TIME
self.dcam_uncertain_cnt = 0
self.dcam_uncertain_alerted = False # once per drive
self.dcam_reset_cnt = 0
self.params = Params()
self.too_distracted = self.params.get_bool("DriverTooDistracted")
self._reset_awareness()
self._set_timers(active_monitoring=True)
self._reset_events()
def _reset_awareness(self):
self.awareness = 1.
self.awareness_active = 1.
self.awareness_passive = 1.
def _reset_events(self):
self.current_events = Events()
def _set_timers(self, active_monitoring):
if self.active_monitoring_mode and self.awareness <= self.threshold_prompt:
if active_monitoring:
self.step_change = self.settings._DT_DMON / self.settings._DISTRACTED_TIME
else:
self.step_change = 0.
return # no exploit after orange alert
elif self.awareness <= 0.:
return
if active_monitoring:
# when falling back from passive mode to active mode, reset awareness to avoid false alert
if not self.active_monitoring_mode:
self.awareness_passive = self.awareness
self.awareness = self.awareness_active
self.threshold_pre = self.settings._DISTRACTED_PRE_TIME_TILL_TERMINAL / self.settings._DISTRACTED_TIME
self.threshold_prompt = self.settings._DISTRACTED_PROMPT_TIME_TILL_TERMINAL / self.settings._DISTRACTED_TIME
self.step_change = self.settings._DT_DMON / self.settings._DISTRACTED_TIME
self.active_monitoring_mode = True
else:
if self.active_monitoring_mode:
self.awareness_active = self.awareness
self.awareness = self.awareness_passive
self.threshold_pre = self.settings._AWARENESS_PRE_TIME_TILL_TERMINAL / self.settings._AWARENESS_TIME
self.threshold_prompt = self.settings._AWARENESS_PROMPT_TIME_TILL_TERMINAL / self.settings._AWARENESS_TIME
self.step_change = self.settings._DT_DMON / self.settings._AWARENESS_TIME
self.active_monitoring_mode = False
def _set_policy(self, brake_disengage_prob, car_speed):
bp = brake_disengage_prob
k1 = max(-0.00156*((car_speed-16)**2)+0.6, 0.2)
bp_normal = max(min(bp / k1, 0.5),0)
self.pose.cfactor_pitch = np.interp(bp_normal, [0, 0.5],
[self.settings._POSE_PITCH_THRESHOLD_SLACK,
self.settings._POSE_PITCH_THRESHOLD_STRICT]) / self.settings._POSE_PITCH_THRESHOLD
self.pose.cfactor_yaw = np.interp(bp_normal, [0, 0.5],
[self.settings._POSE_YAW_THRESHOLD_SLACK,
self.settings._POSE_YAW_THRESHOLD_STRICT]) / self.settings._POSE_YAW_THRESHOLD
def _get_distracted_types(self):
distracted_types = []
if not self.pose.calibrated:
pitch_error = self.pose.pitch - self.settings._PITCH_NATURAL_OFFSET
yaw_error = self.pose.yaw - self.settings._YAW_NATURAL_OFFSET
else:
pitch_error = self.pose.pitch - min(max(self.pose.pitch_offseter.filtered_stat.mean(),
self.settings._PITCH_MIN_OFFSET), self.settings._PITCH_MAX_OFFSET)
yaw_error = self.pose.yaw - min(max(self.pose.yaw_offseter.filtered_stat.mean(),
self.settings._YAW_MIN_OFFSET), self.settings._YAW_MAX_OFFSET)
pitch_error = 0 if pitch_error > 0 else abs(pitch_error) # no positive pitch limit
yaw_error = abs(yaw_error)
if pitch_error > (self.settings._POSE_PITCH_THRESHOLD*self.pose.cfactor_pitch if self.pose.calibrated else self.settings._PITCH_NATURAL_THRESHOLD) or \
yaw_error > self.settings._POSE_YAW_THRESHOLD*self.pose.cfactor_yaw:
distracted_types.append(DistractedType.DISTRACTED_POSE)
if (self.blink.left + self.blink.right)*0.5 > self.settings._BLINK_THRESHOLD:
distracted_types.append(DistractedType.DISTRACTED_BLINK)
if self.phone.prob_calibrated:
using_phone = self.phone.prob > max(min(self.phone.prob_offseter.filtered_stat.M, self.settings._PHONE_MAX_OFFSET), self.settings._PHONE_MIN_OFFSET) \
* self.settings._PHONE_THRESH2
else:
using_phone = self.phone.prob > self.settings._PHONE_THRESH
if using_phone:
distracted_types.append(DistractedType.DISTRACTED_PHONE)
return distracted_types
def _update_states(self, driver_state, cal_rpy, car_speed, op_engaged, standstill, demo_mode=False):
rhd_pred = driver_state.wheelOnRightProb
# calibrates only when there's movement and either face detected
if car_speed > self.settings._WHEELPOS_CALIB_MIN_SPEED and (driver_state.leftDriverData.faceProb > self.settings._FACE_THRESHOLD or
driver_state.rightDriverData.faceProb > self.settings._FACE_THRESHOLD):
self.wheelpos.prob_offseter.push_and_update(rhd_pred)
self.wheelpos.prob_calibrated = self.wheelpos.prob_offseter.filtered_stat.n > self.settings._WHEELPOS_FILTER_MIN_COUNT
if self.wheelpos.prob_calibrated or demo_mode:
self.wheel_on_right = self.wheelpos.prob_offseter.filtered_stat.M > self.settings._WHEELPOS_THRESHOLD
else:
self.wheel_on_right = self.wheel_on_right_default # use default/saved if calibration is unfinished
# make sure no switching when engaged
if op_engaged and self.wheel_on_right_last is not None and self.wheel_on_right_last != self.wheel_on_right and not demo_mode:
self.wheel_on_right = self.wheel_on_right_last
driver_data = driver_state.rightDriverData if self.wheel_on_right else driver_state.leftDriverData
if not all(len(x) > 0 for x in (driver_data.faceOrientation, driver_data.facePosition,
driver_data.faceOrientationStd, driver_data.facePositionStd)):
return
self.face_detected = driver_data.faceProb > self.settings._FACE_THRESHOLD
self.pose.roll, self.pose.pitch, self.pose.yaw = face_orientation_from_net(driver_data.faceOrientation, driver_data.facePosition, cal_rpy)
if self.wheel_on_right:
self.pose.yaw *= -1
self.wheel_on_right_last = self.wheel_on_right
self.pose.pitch_std = driver_data.faceOrientationStd[0]
self.pose.yaw_std = driver_data.faceOrientationStd[1]
model_std_max = max(self.pose.pitch_std, self.pose.yaw_std)
self.pose.low_std = model_std_max < self.settings._POSESTD_THRESHOLD
self.blink.left = driver_data.leftBlinkProb * (driver_data.leftEyeProb > self.settings._EYE_THRESHOLD) \
* (driver_data.sunglassesProb < self.settings._SG_THRESHOLD)
self.blink.right = driver_data.rightBlinkProb * (driver_data.rightEyeProb > self.settings._EYE_THRESHOLD) \
* (driver_data.sunglassesProb < self.settings._SG_THRESHOLD)
self.phone.prob = driver_data.phoneProb
self.distracted_types = self._get_distracted_types()
self.driver_distracted = (DistractedType.DISTRACTED_PHONE in self.distracted_types
or DistractedType.DISTRACTED_POSE in self.distracted_types
or DistractedType.DISTRACTED_BLINK in self.distracted_types) \
and driver_data.faceProb > self.settings._FACE_THRESHOLD and self.pose.low_std
self.driver_distraction_filter.update(self.driver_distracted)
# update offseter
# only update when driver is actively driving the car above a certain speed
if self.face_detected and car_speed > self.settings._POSE_CALIB_MIN_SPEED and self.pose.low_std and (not op_engaged or not self.driver_distracted):
self.pose.pitch_offseter.push_and_update(self.pose.pitch)
self.pose.yaw_offseter.push_and_update(self.pose.yaw)
self.phone.prob_offseter.push_and_update(self.phone.prob)
self.pose.calibrated = self.pose.pitch_offseter.filtered_stat.n > self.settings._POSE_OFFSET_MIN_COUNT and \
self.pose.yaw_offseter.filtered_stat.n > self.settings._POSE_OFFSET_MIN_COUNT
self.phone.prob_calibrated = self.phone.prob_offseter.filtered_stat.n > self.settings._POSE_OFFSET_MIN_COUNT
if self.face_detected and not self.driver_distracted:
if model_std_max > self.settings._DCAM_UNCERTAIN_ALERT_THRESHOLD:
if not standstill:
self.dcam_uncertain_cnt += 1
self.dcam_reset_cnt = 0
else:
self.dcam_reset_cnt += 1
if self.dcam_reset_cnt > self.settings._DCAM_UNCERTAIN_RESET_COUNT:
self.dcam_uncertain_cnt = 0
self.is_model_uncertain = self.hi_stds > self.settings._HI_STD_FALLBACK_TIME
self._set_timers(self.face_detected and not self.is_model_uncertain)
if self.face_detected and not self.pose.low_std and not self.driver_distracted:
self.hi_stds += 1
elif self.face_detected and self.pose.low_std:
self.hi_stds = 0
def _update_events(self, driver_engaged, op_engaged, standstill, wrong_gear, car_speed):
self._reset_events()
# Block engaging until ignition cycle after max number or time of distractions
if self.terminal_alert_cnt >= self.settings._MAX_TERMINAL_ALERTS or \
self.terminal_time >= self.settings._MAX_TERMINAL_DURATION:
if not self.too_distracted:
self.params.put_bool_nonblocking("DriverTooDistracted", True)
self.too_distracted = True
# Always-on distraction lockout is temporary
if self.too_distracted or (self.always_on and self.awareness <= self.threshold_prompt):
self.current_events.add(EventName.tooDistracted)
always_on_valid = self.always_on and not wrong_gear
if (driver_engaged and self.awareness > 0 and not self.active_monitoring_mode) or \
(not always_on_valid and not op_engaged) or \
(always_on_valid and not op_engaged and self.awareness <= 0):
# always reset on disengage with normal mode; disengage resets only on red if always on
self._reset_awareness()
return
driver_attentive = self.driver_distraction_filter.x < 0.37
awareness_prev = self.awareness
if (driver_attentive and self.face_detected and self.pose.low_std and self.awareness > 0):
if driver_engaged:
self._reset_awareness()
return
# only restore awareness when paying attention and alert is not red
self.awareness = min(self.awareness + ((self.settings._RECOVERY_FACTOR_MAX-self.settings._RECOVERY_FACTOR_MIN)*
(1.-self.awareness)+self.settings._RECOVERY_FACTOR_MIN)*self.step_change, 1.)
if self.awareness == 1.:
self.awareness_passive = min(self.awareness_passive + self.step_change, 1.)
# don't display alert banner when awareness is recovering and has cleared orange
if self.awareness > self.threshold_prompt:
return
_reaching_audible = self.awareness - self.step_change <= self.threshold_prompt
_reaching_terminal = self.awareness - self.step_change <= 0
standstill_orange_exemption = standstill and _reaching_audible
always_on_red_exemption = always_on_valid and not op_engaged and _reaching_terminal
always_on_lowspeed_exemption = always_on_valid and not op_engaged and car_speed < self.settings._ALWAYS_ON_ALERT_MIN_SPEED
certainly_distracted = self.driver_distraction_filter.x > 0.63 and self.driver_distracted and self.face_detected
maybe_distracted = self.hi_stds > self.settings._HI_STD_FALLBACK_TIME or not self.face_detected
if certainly_distracted or maybe_distracted:
# should always be counting if distracted unless at standstill (lowspeed for always-on) and reaching orange
# also will not be reaching 0 if DM is active when not engaged
if not (standstill_orange_exemption or always_on_red_exemption or (always_on_lowspeed_exemption and _reaching_audible)):
self.awareness = max(self.awareness - self.step_change, -0.1)
alert = None
if self.awareness <= 0.:
# terminal red alert: disengagement required
alert = EventName.driverDistracted if self.active_monitoring_mode else EventName.driverUnresponsive
self.terminal_time += 1
if awareness_prev > 0.:
self.terminal_alert_cnt += 1
elif self.awareness <= self.threshold_prompt:
# prompt orange alert
alert = EventName.promptDriverDistracted if self.active_monitoring_mode else EventName.promptDriverUnresponsive
elif self.awareness <= self.threshold_pre and not always_on_lowspeed_exemption:
# pre green alert
alert = EventName.preDriverDistracted if self.active_monitoring_mode else EventName.preDriverUnresponsive
if alert is not None:
self.current_events.add(alert)
if self.dcam_uncertain_cnt > self.settings._DCAM_UNCERTAIN_ALERT_COUNT and not self.dcam_uncertain_alerted:
set_offroad_alert("Offroad_DriverMonitoringUncertain", True)
self.dcam_uncertain_alerted = True
def get_state_packet(self, valid=True):
# build driverMonitoringState packet
dat = messaging.new_message('driverMonitoringState', valid=valid)
dat.driverMonitoringState = {
"events": self.current_events.to_msg(),
"faceDetected": self.face_detected,
"isDistracted": self.driver_distracted,
"distractedType": sum(self.distracted_types),
"awarenessStatus": self.awareness,
"posePitchOffset": self.pose.pitch_offseter.filtered_stat.mean(),
"posePitchValidCount": self.pose.pitch_offseter.filtered_stat.n,
"poseYawOffset": self.pose.yaw_offseter.filtered_stat.mean(),
"poseYawValidCount": self.pose.yaw_offseter.filtered_stat.n,
"phoneProbOffset": self.phone.prob_offseter.filtered_stat.mean(),
"phoneProbValidCount": self.phone.prob_offseter.filtered_stat.n,
"stepChange": self.step_change,
"awarenessActive": self.awareness_active,
"awarenessPassive": self.awareness_passive,
"isLowStd": self.pose.low_std,
"hiStdCount": self.hi_stds,
"isActiveMode": self.active_monitoring_mode,
"isRHD": self.wheel_on_right,
"uncertainCount": self.dcam_uncertain_cnt,
}
return dat
def run_step(self, sm, demo=False):
if demo:
highway_speed = 30
enabled = True
wrong_gear = False
standstill = False
driver_engaged = False
brake_disengage_prob = 1.0
rpyCalib = [0., 0., 0.]
else:
highway_speed = sm['carState'].vEgo
enabled = sm['selfdriveState'].enabled
wrong_gear = sm['carState'].gearShifter not in (car.CarState.GearShifter.drive, car.CarState.GearShifter.low)
standstill = sm['carState'].standstill
driver_engaged = sm['carState'].steeringPressed or sm['carState'].gasPressed
brake_disengage_prob = sm['modelV2'].meta.disengagePredictions.brakeDisengageProbs[0] # brake disengage prob in next 2s
rpyCalib = sm['liveCalibration'].rpyCalib
self._set_policy(
brake_disengage_prob=brake_disengage_prob,
car_speed=highway_speed,
)
# Parse data from dmonitoringmodeld
self._update_states(
driver_state=sm['driverStateV2'],
cal_rpy=rpyCalib,
car_speed=highway_speed,
op_engaged=enabled,
standstill=standstill,
demo_mode=demo,
)
# Update distraction events
self._update_events(
driver_engaged=driver_engaged,
op_engaged=enabled,
standstill=standstill,
wrong_gear=wrong_gear,
car_speed=highway_speed
)

View File

@@ -0,0 +1,206 @@
import numpy as np
from cereal import log
from openpilot.common.realtime import DT_DMON
from openpilot.selfdrive.monitoring.helpers import DriverMonitoring, DRIVER_MONITOR_SETTINGS
from openpilot.system.hardware import HARDWARE
EventName = log.OnroadEvent.EventName
dm_settings = DRIVER_MONITOR_SETTINGS(device_type=HARDWARE.get_device_type())
TEST_TIMESPAN = 120 # seconds
DISTRACTED_SECONDS_TO_ORANGE = dm_settings._DISTRACTED_TIME - dm_settings._DISTRACTED_PROMPT_TIME_TILL_TERMINAL + 1
DISTRACTED_SECONDS_TO_RED = dm_settings._DISTRACTED_TIME + 1
INVISIBLE_SECONDS_TO_ORANGE = dm_settings._AWARENESS_TIME - dm_settings._AWARENESS_PROMPT_TIME_TILL_TERMINAL + 1
INVISIBLE_SECONDS_TO_RED = dm_settings._AWARENESS_TIME + 1
def make_msg(face_detected, distracted=False, model_uncertain=False):
ds = log.DriverStateV2.new_message()
ds.leftDriverData.faceOrientation = [0., 0., 0.]
ds.leftDriverData.facePosition = [0., 0.]
ds.leftDriverData.faceProb = 1. * face_detected
ds.leftDriverData.leftEyeProb = 1.
ds.leftDriverData.rightEyeProb = 1.
ds.leftDriverData.leftBlinkProb = 1. * distracted
ds.leftDriverData.rightBlinkProb = 1. * distracted
ds.leftDriverData.faceOrientationStd = [1.*model_uncertain, 1.*model_uncertain, 1.*model_uncertain]
ds.leftDriverData.facePositionStd = [1.*model_uncertain, 1.*model_uncertain]
# TODO: test both separately when e2e is used
ds.leftDriverData.phoneProb = 0.
return ds
# driver state from neural net, 10Hz
msg_NO_FACE_DETECTED = make_msg(False)
msg_ATTENTIVE = make_msg(True)
msg_DISTRACTED = make_msg(True, distracted=True)
msg_ATTENTIVE_UNCERTAIN = make_msg(True, model_uncertain=True)
msg_DISTRACTED_UNCERTAIN = make_msg(True, distracted=True, model_uncertain=True)
msg_DISTRACTED_BUT_SOMEHOW_UNCERTAIN = make_msg(True, distracted=True, model_uncertain=dm_settings._POSESTD_THRESHOLD*1.5)
# driver interaction with car
car_interaction_DETECTED = True
car_interaction_NOT_DETECTED = False
# some common state vectors
always_no_face = [msg_NO_FACE_DETECTED] * int(TEST_TIMESPAN / DT_DMON)
always_attentive = [msg_ATTENTIVE] * int(TEST_TIMESPAN / DT_DMON)
always_distracted = [msg_DISTRACTED] * int(TEST_TIMESPAN / DT_DMON)
always_true = [True] * int(TEST_TIMESPAN / DT_DMON)
always_false = [False] * int(TEST_TIMESPAN / DT_DMON)
class TestMonitoring:
def _run_seq(self, msgs, interaction, engaged, standstill):
DM = DriverMonitoring()
events = []
for idx in range(len(msgs)):
DM._update_states(msgs[idx], [0, 0, 0], 0, engaged[idx], standstill[idx])
# cal_rpy and car_speed don't matter here
# evaluate events at 10Hz for tests
DM._update_events(interaction[idx], engaged[idx], standstill[idx], 0, 0)
events.append(DM.current_events)
assert len(events) == len(msgs), f"got {len(events)} for {len(msgs)} driverState input msgs"
return events, DM
def _assert_no_events(self, events):
assert all(not len(e) for e in events)
# engaged, driver is attentive all the time
def test_fully_aware_driver(self):
events, _ = self._run_seq(always_attentive, always_false, always_true, always_false)
self._assert_no_events(events)
# engaged, driver is distracted and does nothing
def test_fully_distracted_driver(self):
events, d_status = self._run_seq(always_distracted, always_false, always_true, always_false)
assert len(events[int((d_status.settings._DISTRACTED_TIME-d_status.settings._DISTRACTED_PRE_TIME_TILL_TERMINAL)/2/DT_DMON)]) == 0
assert events[int((d_status.settings._DISTRACTED_TIME-d_status.settings._DISTRACTED_PRE_TIME_TILL_TERMINAL + \
((d_status.settings._DISTRACTED_PRE_TIME_TILL_TERMINAL-d_status.settings._DISTRACTED_PROMPT_TIME_TILL_TERMINAL)/2))/DT_DMON)].names[0] == \
EventName.preDriverDistracted
assert events[int((d_status.settings._DISTRACTED_TIME-d_status.settings._DISTRACTED_PROMPT_TIME_TILL_TERMINAL + \
((d_status.settings._DISTRACTED_PROMPT_TIME_TILL_TERMINAL)/2))/DT_DMON)].names[0] == EventName.promptDriverDistracted
assert events[int((d_status.settings._DISTRACTED_TIME + \
((TEST_TIMESPAN-10-d_status.settings._DISTRACTED_TIME)/2))/DT_DMON)].names[0] == EventName.driverDistracted
assert isinstance(d_status.awareness, float)
# engaged, no face detected the whole time, no action
def test_fully_invisible_driver(self):
events, d_status = self._run_seq(always_no_face, always_false, always_true, always_false)
assert len(events[int((d_status.settings._AWARENESS_TIME-d_status.settings._AWARENESS_PRE_TIME_TILL_TERMINAL)/2/DT_DMON)]) == 0
assert events[int((d_status.settings._AWARENESS_TIME-d_status.settings._AWARENESS_PRE_TIME_TILL_TERMINAL + \
((d_status.settings._AWARENESS_PRE_TIME_TILL_TERMINAL-d_status.settings._AWARENESS_PROMPT_TIME_TILL_TERMINAL)/2))/DT_DMON)].names[0] == \
EventName.preDriverUnresponsive
assert events[int((d_status.settings._AWARENESS_TIME-d_status.settings._AWARENESS_PROMPT_TIME_TILL_TERMINAL + \
((d_status.settings._AWARENESS_PROMPT_TIME_TILL_TERMINAL)/2))/DT_DMON)].names[0] == EventName.promptDriverUnresponsive
assert events[int((d_status.settings._AWARENESS_TIME + \
((TEST_TIMESPAN-10-d_status.settings._AWARENESS_TIME)/2))/DT_DMON)].names[0] == EventName.driverUnresponsive
# engaged, down to orange, driver pays attention, back to normal; then down to orange, driver touches wheel
# - should have short orange recovery time and no green afterwards; wheel touch only recovers when paying attention
def test_normal_driver(self):
ds_vector = [msg_DISTRACTED] * int(DISTRACTED_SECONDS_TO_ORANGE/DT_DMON) + \
[msg_ATTENTIVE] * int(DISTRACTED_SECONDS_TO_ORANGE/DT_DMON) + \
[msg_DISTRACTED] * int((DISTRACTED_SECONDS_TO_ORANGE+2)/DT_DMON) + \
[msg_ATTENTIVE] * (int(TEST_TIMESPAN/DT_DMON)-int((DISTRACTED_SECONDS_TO_ORANGE*3+2)/DT_DMON))
interaction_vector = [car_interaction_NOT_DETECTED] * int(DISTRACTED_SECONDS_TO_ORANGE*3/DT_DMON) + \
[car_interaction_DETECTED] * (int(TEST_TIMESPAN/DT_DMON)-int(DISTRACTED_SECONDS_TO_ORANGE*3/DT_DMON))
events, _ = self._run_seq(ds_vector, interaction_vector, always_true, always_false)
assert len(events[int(DISTRACTED_SECONDS_TO_ORANGE*0.5/DT_DMON)]) == 0
assert events[int((DISTRACTED_SECONDS_TO_ORANGE-0.1)/DT_DMON)].names[0] == EventName.promptDriverDistracted
assert len(events[int(DISTRACTED_SECONDS_TO_ORANGE*1.5/DT_DMON)]) == 0
assert events[int((DISTRACTED_SECONDS_TO_ORANGE*3-0.1)/DT_DMON)].names[0] == EventName.promptDriverDistracted
assert events[int((DISTRACTED_SECONDS_TO_ORANGE*3+0.1)/DT_DMON)].names[0] == EventName.promptDriverDistracted
assert len(events[int((DISTRACTED_SECONDS_TO_ORANGE*3+2.5)/DT_DMON)]) == 0
# engaged, down to orange, driver dodges camera, then comes back still distracted, down to red, \
# driver dodges, and then touches wheel to no avail, disengages and reengages
# - orange/red alert should remain after disappearance, and only disengaging clears red
def test_biggest_comma_fan(self):
_invisible_time = 2 # seconds
ds_vector = always_distracted[:]
interaction_vector = always_false[:]
op_vector = always_true[:]
ds_vector[int(DISTRACTED_SECONDS_TO_ORANGE/DT_DMON):int((DISTRACTED_SECONDS_TO_ORANGE+_invisible_time)/DT_DMON)] \
= [msg_NO_FACE_DETECTED] * int(_invisible_time/DT_DMON)
ds_vector[int((DISTRACTED_SECONDS_TO_RED+_invisible_time)/DT_DMON):int((DISTRACTED_SECONDS_TO_RED+2*_invisible_time)/DT_DMON)] \
= [msg_NO_FACE_DETECTED] * int(_invisible_time/DT_DMON)
interaction_vector[int((DISTRACTED_SECONDS_TO_RED+2*_invisible_time+0.5)/DT_DMON):int((DISTRACTED_SECONDS_TO_RED+2*_invisible_time+1.5)/DT_DMON)] \
= [True] * int(1/DT_DMON)
op_vector[int((DISTRACTED_SECONDS_TO_RED+2*_invisible_time+2.5)/DT_DMON):int((DISTRACTED_SECONDS_TO_RED+2*_invisible_time+3)/DT_DMON)] \
= [False] * int(0.5/DT_DMON)
events, _ = self._run_seq(ds_vector, interaction_vector, op_vector, always_false)
assert events[int((DISTRACTED_SECONDS_TO_ORANGE+0.5*_invisible_time)/DT_DMON)].names[0] == EventName.promptDriverDistracted
assert events[int((DISTRACTED_SECONDS_TO_RED+1.5*_invisible_time)/DT_DMON)].names[0] == EventName.driverDistracted
assert events[int((DISTRACTED_SECONDS_TO_RED+2*_invisible_time+1.5)/DT_DMON)].names[0] == EventName.driverDistracted
assert len(events[int((DISTRACTED_SECONDS_TO_RED+2*_invisible_time+3.5)/DT_DMON)]) == 0
# engaged, invisible driver, down to orange, driver touches wheel; then down to orange again, driver appears
# - both actions should clear the alert, but momentary appearance should not
def test_sometimes_transparent_commuter(self):
_visible_time = np.random.choice([0.5, 10])
ds_vector = always_no_face[:]*2
interaction_vector = always_false[:]*2
ds_vector[int((2*INVISIBLE_SECONDS_TO_ORANGE+1)/DT_DMON):int((2*INVISIBLE_SECONDS_TO_ORANGE+1+_visible_time)/DT_DMON)] = \
[msg_ATTENTIVE] * int(_visible_time/DT_DMON)
interaction_vector[int((INVISIBLE_SECONDS_TO_ORANGE)/DT_DMON):int((INVISIBLE_SECONDS_TO_ORANGE+1)/DT_DMON)] = [True] * int(1/DT_DMON)
events, _ = self._run_seq(ds_vector, interaction_vector, 2*always_true, 2*always_false)
assert len(events[int(INVISIBLE_SECONDS_TO_ORANGE*0.5/DT_DMON)]) == 0
assert events[int((INVISIBLE_SECONDS_TO_ORANGE-0.1)/DT_DMON)].names[0] == EventName.promptDriverUnresponsive
assert len(events[int((INVISIBLE_SECONDS_TO_ORANGE+0.1)/DT_DMON)]) == 0
if _visible_time == 0.5:
assert events[int((INVISIBLE_SECONDS_TO_ORANGE*2+1-0.1)/DT_DMON)].names[0] == EventName.promptDriverUnresponsive
assert events[int((INVISIBLE_SECONDS_TO_ORANGE*2+1+0.1+_visible_time)/DT_DMON)].names[0] == EventName.preDriverUnresponsive
elif _visible_time == 10:
assert events[int((INVISIBLE_SECONDS_TO_ORANGE*2+1-0.1)/DT_DMON)].names[0] == EventName.promptDriverUnresponsive
assert len(events[int((INVISIBLE_SECONDS_TO_ORANGE*2+1+0.1+_visible_time)/DT_DMON)]) == 0
# engaged, invisible driver, down to red, driver appears and then touches wheel, then disengages/reengages
# - only disengage will clear the alert
def test_last_second_responder(self):
_visible_time = 2 # seconds
ds_vector = always_no_face[:]
interaction_vector = always_false[:]
op_vector = always_true[:]
ds_vector[int(INVISIBLE_SECONDS_TO_RED/DT_DMON):int((INVISIBLE_SECONDS_TO_RED+_visible_time)/DT_DMON)] = [msg_ATTENTIVE] * int(_visible_time/DT_DMON)
interaction_vector[int((INVISIBLE_SECONDS_TO_RED+_visible_time)/DT_DMON):int((INVISIBLE_SECONDS_TO_RED+_visible_time+1)/DT_DMON)] = [True] * int(1/DT_DMON)
op_vector[int((INVISIBLE_SECONDS_TO_RED+_visible_time+1)/DT_DMON):int((INVISIBLE_SECONDS_TO_RED+_visible_time+0.5)/DT_DMON)] = [False] * int(0.5/DT_DMON)
events, _ = self._run_seq(ds_vector, interaction_vector, op_vector, always_false)
assert len(events[int(INVISIBLE_SECONDS_TO_ORANGE*0.5/DT_DMON)]) == 0
assert events[int((INVISIBLE_SECONDS_TO_ORANGE-0.1)/DT_DMON)].names[0] == EventName.promptDriverUnresponsive
assert events[int((INVISIBLE_SECONDS_TO_RED-0.1)/DT_DMON)].names[0] == EventName.driverUnresponsive
assert events[int((INVISIBLE_SECONDS_TO_RED+0.5*_visible_time)/DT_DMON)].names[0] == EventName.driverUnresponsive
assert events[int((INVISIBLE_SECONDS_TO_RED+_visible_time+0.5)/DT_DMON)].names[0] == EventName.driverUnresponsive
assert len(events[int((INVISIBLE_SECONDS_TO_RED+_visible_time+1+0.1)/DT_DMON)]) == 0
# disengaged, always distracted driver
# - dm should stay quiet when not engaged
def test_pure_dashcam_user(self):
events, _ = self._run_seq(always_distracted, always_false, always_false, always_false)
assert sum(len(event) for event in events) == 0
# engaged, car stops at traffic light, down to orange, no action, then car starts moving
# - should only reach green when stopped, but continues counting down on launch
def test_long_traffic_light_victim(self):
_redlight_time = 60 # seconds
standstill_vector = always_true[:]
standstill_vector[int(_redlight_time/DT_DMON):] = [False] * int((TEST_TIMESPAN-_redlight_time)/DT_DMON)
events, d_status = self._run_seq(always_distracted, always_false, always_true, standstill_vector)
assert events[int((d_status.settings._DISTRACTED_TIME-d_status.settings._DISTRACTED_PRE_TIME_TILL_TERMINAL+1)/DT_DMON)].names[0] == \
EventName.preDriverDistracted
assert events[int((_redlight_time-0.1)/DT_DMON)].names[0] == EventName.preDriverDistracted
assert events[int((_redlight_time+0.5)/DT_DMON)].names[0] == EventName.promptDriverDistracted
# engaged, model is somehow uncertain and driver is distracted
# - should fall back to wheel touch after uncertain alert
def test_somehow_indecisive_model(self):
ds_vector = [msg_DISTRACTED_BUT_SOMEHOW_UNCERTAIN] * int(TEST_TIMESPAN/DT_DMON)
interaction_vector = always_false[:]
events, d_status = self._run_seq(ds_vector, interaction_vector, always_true, always_false)
assert EventName.preDriverUnresponsive in \
events[int((INVISIBLE_SECONDS_TO_ORANGE-1+DT_DMON*d_status.settings._HI_STD_FALLBACK_TIME-0.1)/DT_DMON)].names
assert EventName.promptDriverUnresponsive in \
events[int((INVISIBLE_SECONDS_TO_ORANGE-1+DT_DMON*d_status.settings._HI_STD_FALLBACK_TIME+0.1)/DT_DMON)].names
assert EventName.driverUnresponsive in \
events[int((INVISIBLE_SECONDS_TO_RED-1+DT_DMON*d_status.settings._HI_STD_FALLBACK_TIME+0.1)/DT_DMON)].names