Release 260111
This commit is contained in:
0
common/__init__.py
Normal file
0
common/__init__.py
Normal file
46
common/api.py
Normal file
46
common/api.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import jwt
|
||||
import os
|
||||
import requests
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
from openpilot.system.version import get_version
|
||||
|
||||
API_HOST = os.getenv('API_HOST', 'https://api.commadotai.com')
|
||||
|
||||
class Api:
|
||||
def __init__(self, dongle_id):
|
||||
self.dongle_id = dongle_id
|
||||
with open(Paths.persist_root()+'/comma/id_rsa') as f:
|
||||
self.private_key = f.read()
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
return self.request('GET', *args, **kwargs)
|
||||
|
||||
def post(self, *args, **kwargs):
|
||||
return self.request('POST', *args, **kwargs)
|
||||
|
||||
def request(self, method, endpoint, timeout=None, access_token=None, **params):
|
||||
return api_get(endpoint, method=method, timeout=timeout, access_token=access_token, **params)
|
||||
|
||||
def get_token(self, expiry_hours=1):
|
||||
now = datetime.now(UTC).replace(tzinfo=None)
|
||||
payload = {
|
||||
'identity': self.dongle_id,
|
||||
'nbf': now,
|
||||
'iat': now,
|
||||
'exp': now + timedelta(hours=expiry_hours)
|
||||
}
|
||||
token = jwt.encode(payload, self.private_key, algorithm='RS256')
|
||||
if isinstance(token, bytes):
|
||||
token = token.decode('utf8')
|
||||
return token
|
||||
|
||||
|
||||
def api_get(endpoint, method='GET', timeout=None, access_token=None, **params):
|
||||
headers = {}
|
||||
if access_token is not None:
|
||||
headers['Authorization'] = "JWT " + access_token
|
||||
|
||||
headers['User-Agent'] = "openpilot-" + get_version()
|
||||
|
||||
return requests.request(method, API_HOST + "/" + endpoint, timeout=timeout, headers=headers, params=params)
|
||||
4
common/basedir.py
Normal file
4
common/basedir.py
Normal file
@@ -0,0 +1,4 @@
|
||||
import os
|
||||
|
||||
|
||||
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../"))
|
||||
28
common/clutil.h
Normal file
28
common/clutil.h
Normal file
@@ -0,0 +1,28 @@
|
||||
#pragma once
|
||||
|
||||
#ifdef __APPLE__
|
||||
#include <OpenCL/cl.h>
|
||||
#else
|
||||
#include <CL/cl.h>
|
||||
#endif
|
||||
|
||||
#include <string>
|
||||
|
||||
#define CL_CHECK(_expr) \
|
||||
do { \
|
||||
assert(CL_SUCCESS == (_expr)); \
|
||||
} while (0)
|
||||
|
||||
#define CL_CHECK_ERR(_expr) \
|
||||
({ \
|
||||
cl_int err = CL_INVALID_VALUE; \
|
||||
__typeof__(_expr) _ret = _expr; \
|
||||
assert(_ret&& err == CL_SUCCESS); \
|
||||
_ret; \
|
||||
})
|
||||
|
||||
cl_device_id cl_get_device_id(cl_device_type device_type);
|
||||
cl_context cl_create_context(cl_device_id device_id);
|
||||
void cl_release_context(cl_context context);
|
||||
cl_program cl_program_from_source(cl_context ctx, cl_device_id device_id, const std::string& src, const char* args = nullptr);
|
||||
cl_program cl_program_from_file(cl_context ctx, cl_device_id device_id, const char* path, const char* args);
|
||||
23
common/constants.py
Normal file
23
common/constants.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import numpy as np
|
||||
|
||||
# conversions
|
||||
class CV:
|
||||
# Speed
|
||||
MPH_TO_KPH = 1.609344
|
||||
KPH_TO_MPH = 1. / MPH_TO_KPH
|
||||
MS_TO_KPH = 3.6
|
||||
KPH_TO_MS = 1. / MS_TO_KPH
|
||||
MS_TO_MPH = MS_TO_KPH * KPH_TO_MPH
|
||||
MPH_TO_MS = MPH_TO_KPH * KPH_TO_MS
|
||||
MS_TO_KNOTS = 1.9438
|
||||
KNOTS_TO_MS = 1. / MS_TO_KNOTS
|
||||
|
||||
# Angle
|
||||
DEG_TO_RAD = np.pi / 180.
|
||||
RAD_TO_DEG = 1. / DEG_TO_RAD
|
||||
|
||||
# Mass
|
||||
LB_TO_KG = 0.453592
|
||||
|
||||
|
||||
ACCELERATION_DUE_TO_GRAVITY = 9.81 # m/s^2
|
||||
19
common/conversions.py
Normal file
19
common/conversions.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import numpy as np
|
||||
|
||||
class Conversions:
|
||||
# Speed
|
||||
MPH_TO_KPH = 1.609344
|
||||
KPH_TO_MPH = 1. / MPH_TO_KPH
|
||||
MS_TO_KPH = 3.6
|
||||
KPH_TO_MS = 1. / MS_TO_KPH
|
||||
MS_TO_MPH = MS_TO_KPH * KPH_TO_MPH
|
||||
MPH_TO_MS = MPH_TO_KPH * KPH_TO_MS
|
||||
MS_TO_KNOTS = 1.9438
|
||||
KNOTS_TO_MS = 1. / MS_TO_KNOTS
|
||||
|
||||
# Angle
|
||||
DEG_TO_RAD = np.pi / 180.
|
||||
RAD_TO_DEG = 1. / DEG_TO_RAD
|
||||
|
||||
# Mass
|
||||
LB_TO_KG = 0.453592
|
||||
9
common/dict_helpers.py
Normal file
9
common/dict_helpers.py
Normal file
@@ -0,0 +1,9 @@
|
||||
# remove all keys that end in DEPRECATED
|
||||
def strip_deprecated_keys(d):
|
||||
for k in list(d.keys()):
|
||||
if isinstance(k, str):
|
||||
if k.endswith('DEPRECATED'):
|
||||
d.pop(k)
|
||||
elif isinstance(d[k], dict):
|
||||
strip_deprecated_keys(d[k])
|
||||
return d
|
||||
8
common/ffi_wrapper.py
Normal file
8
common/ffi_wrapper.py
Normal file
@@ -0,0 +1,8 @@
|
||||
import platform
|
||||
|
||||
|
||||
def suffix():
|
||||
if platform.system() == "Darwin":
|
||||
return ".dylib"
|
||||
else:
|
||||
return ".so"
|
||||
58
common/file_helpers.py
Normal file
58
common/file_helpers.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import io
|
||||
import os
|
||||
import tempfile
|
||||
import contextlib
|
||||
import zstandard as zstd
|
||||
|
||||
LOG_COMPRESSION_LEVEL = 10 # little benefit up to level 15. level ~17 is a small step change
|
||||
|
||||
|
||||
class CallbackReader:
|
||||
"""Wraps a file, but overrides the read method to also
|
||||
call a callback function with the number of bytes read so far."""
|
||||
def __init__(self, f, callback, *args):
|
||||
self.f = f
|
||||
self.callback = callback
|
||||
self.cb_args = args
|
||||
self.total_read = 0
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.f, attr)
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
chunk = self.f.read(*args, **kwargs)
|
||||
self.total_read += len(chunk)
|
||||
self.callback(*self.cb_args, self.total_read)
|
||||
return chunk
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def atomic_write_in_dir(path: str, mode: str = 'w', buffering: int = -1, encoding: str = None, newline: str = None,
|
||||
overwrite: bool = False):
|
||||
"""Write to a file atomically using a temporary file in the same directory as the destination file."""
|
||||
dir_name = os.path.dirname(path)
|
||||
|
||||
if not overwrite and os.path.exists(path):
|
||||
raise FileExistsError(f"File '{path}' already exists. To overwrite it, set 'overwrite' to True.")
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode=mode, buffering=buffering, encoding=encoding, newline=newline, dir=dir_name, delete=False) as tmp_file:
|
||||
yield tmp_file
|
||||
tmp_file_name = tmp_file.name
|
||||
os.replace(tmp_file_name, path)
|
||||
|
||||
|
||||
def get_upload_stream(filepath: str, should_compress: bool) -> tuple[io.BufferedIOBase, int]:
|
||||
if not should_compress:
|
||||
file_size = os.path.getsize(filepath)
|
||||
file_stream = open(filepath, "rb")
|
||||
return file_stream, file_size
|
||||
|
||||
# Compress the file on the fly
|
||||
compressed_stream = io.BytesIO()
|
||||
compressor = zstd.ZstdCompressor(level=LOG_COMPRESSION_LEVEL)
|
||||
|
||||
with open(filepath, "rb") as f:
|
||||
compressor.copy_stream(f, compressed_stream)
|
||||
compressed_size = compressed_stream.tell()
|
||||
compressed_stream.seek(0)
|
||||
return compressed_stream, compressed_size
|
||||
69
common/filter_simple.py
Normal file
69
common/filter_simple.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import numpy as np
|
||||
from collections import deque
|
||||
|
||||
class FirstOrderFilter:
|
||||
# first order filter
|
||||
def __init__(self, x0, rc, dt, initialized=True):
|
||||
self.x = x0
|
||||
self.dt = dt
|
||||
self.update_alpha(rc)
|
||||
self.initialized = initialized
|
||||
|
||||
def update_alpha(self, rc):
|
||||
self.alpha = self.dt / (rc + self.dt)
|
||||
|
||||
def update(self, x):
|
||||
if self.initialized:
|
||||
self.x = (1. - self.alpha) * self.x + self.alpha * x
|
||||
else:
|
||||
self.initialized = True
|
||||
self.x = x
|
||||
return self.x
|
||||
|
||||
|
||||
class BounceFilter(FirstOrderFilter):
|
||||
def __init__(self, x0, rc, dt, initialized=True, bounce=2):
|
||||
self.velocity = FirstOrderFilter(0.0, 0.15, dt)
|
||||
self.bounce = bounce
|
||||
super().__init__(x0, rc, dt, initialized)
|
||||
|
||||
def update(self, x):
|
||||
super().update(x)
|
||||
scale = self.dt / (1.0 / 60.0) # tuned at 60 fps
|
||||
self.velocity.x += (x - self.x) * self.bounce * scale * self.dt
|
||||
self.velocity.update(0.0)
|
||||
if abs(self.velocity.x) < 1e-5:
|
||||
self.velocity.x = 0.0
|
||||
self.x += self.velocity.x
|
||||
return self.x
|
||||
|
||||
class MyMovingAverage:
|
||||
def __init__(self, window_size, value=None):
|
||||
self.window_size = window_size
|
||||
if (value is not None):
|
||||
self.values = deque([value] * window_size, maxlen=window_size)
|
||||
self.sum = value * window_size
|
||||
self.result = value
|
||||
else:
|
||||
self.values = deque(maxlen=window_size)
|
||||
self.sum = 0
|
||||
self.result = 0
|
||||
|
||||
def set(self, value):
|
||||
self.values.clear()
|
||||
self.values.append(value)
|
||||
self.sum = value
|
||||
self.result = value
|
||||
return value
|
||||
|
||||
def set_all(self, value):
|
||||
self.values = deque([value] * self.window_size, maxlen=self.window_size)
|
||||
self.sum = value * self.window_size
|
||||
self.result = value
|
||||
return value
|
||||
|
||||
def process(self, value, median=False):
|
||||
self.values.append(value)
|
||||
self.sum = sum(self.values)
|
||||
self.result = float(np.median(self.values)) if median else float(self.sum) / len(self.values)
|
||||
return self.result
|
||||
42
common/git.py
Normal file
42
common/git.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from functools import cache
|
||||
import subprocess
|
||||
from openpilot.common.run import run_cmd, run_cmd_default
|
||||
|
||||
|
||||
@cache
|
||||
def get_commit(cwd: str = None, branch: str = "HEAD") -> str:
|
||||
return run_cmd_default(["git", "rev-parse", branch], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_commit_date(cwd: str = None, commit: str = "HEAD") -> str:
|
||||
return run_cmd_default(["git", "show", "--no-patch", "--format='%ct %ci'", commit], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_short_branch(cwd: str = None) -> str:
|
||||
return run_cmd_default(["git", "rev-parse", "--abbrev-ref", "HEAD"], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_branch(cwd: str = None) -> str:
|
||||
return run_cmd_default(["git", "rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{u}"], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_origin(cwd: str = None) -> str:
|
||||
try:
|
||||
local_branch = run_cmd(["git", "name-rev", "--name-only", "HEAD"], cwd=cwd)
|
||||
tracking_remote = run_cmd(["git", "config", "branch." + local_branch + ".remote"], cwd=cwd)
|
||||
return run_cmd(["git", "config", "remote." + tracking_remote + ".url"], cwd=cwd)
|
||||
except subprocess.CalledProcessError: # Not on a branch, fallback
|
||||
return run_cmd_default(["git", "config", "--get", "remote.origin.url"], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_normalized_origin(cwd: str = None) -> str:
|
||||
return get_origin(cwd) \
|
||||
.replace("git@", "", 1) \
|
||||
.replace(".git", "", 1) \
|
||||
.replace("https://", "", 1) \
|
||||
.replace(":", "/", 1)
|
||||
89
common/gpio.py
Normal file
89
common/gpio.py
Normal file
@@ -0,0 +1,89 @@
|
||||
import os
|
||||
import fcntl
|
||||
import ctypes
|
||||
from functools import cache
|
||||
|
||||
def gpio_init(pin: int, output: bool) -> None:
|
||||
try:
|
||||
with open(f"/sys/class/gpio/gpio{pin}/direction", 'wb') as f:
|
||||
f.write(b"out" if output else b"in")
|
||||
except Exception as e:
|
||||
print(f"Failed to set gpio {pin} direction: {e}")
|
||||
|
||||
def gpio_set(pin: int, high: bool) -> None:
|
||||
try:
|
||||
with open(f"/sys/class/gpio/gpio{pin}/value", 'wb') as f:
|
||||
f.write(b"1" if high else b"0")
|
||||
except Exception as e:
|
||||
print(f"Failed to set gpio {pin} value: {e}")
|
||||
|
||||
def gpio_read(pin: int) -> bool | None:
|
||||
val = None
|
||||
try:
|
||||
with open(f"/sys/class/gpio/gpio{pin}/value", 'rb') as f:
|
||||
val = bool(int(f.read().strip()))
|
||||
except Exception as e:
|
||||
print(f"Failed to set gpio {pin} value: {e}")
|
||||
|
||||
return val
|
||||
|
||||
def gpio_export(pin: int) -> None:
|
||||
if os.path.isdir(f"/sys/class/gpio/gpio{pin}"):
|
||||
return
|
||||
|
||||
try:
|
||||
with open("/sys/class/gpio/export", 'w') as f:
|
||||
f.write(str(pin))
|
||||
except Exception:
|
||||
print(f"Failed to export gpio {pin}")
|
||||
|
||||
@cache
|
||||
def get_irq_action(irq: int) -> list[str]:
|
||||
try:
|
||||
with open(f"/sys/kernel/irq/{irq}/actions") as f:
|
||||
actions = f.read().strip().split(',')
|
||||
return actions
|
||||
except FileNotFoundError:
|
||||
return []
|
||||
|
||||
def get_irqs_for_action(action: str) -> list[str]:
|
||||
ret = []
|
||||
with open("/proc/interrupts") as f:
|
||||
for l in f.readlines():
|
||||
irq = l.split(':')[0].strip()
|
||||
if irq.isdigit() and action in get_irq_action(irq):
|
||||
ret.append(irq)
|
||||
return ret
|
||||
|
||||
# *** gpiochip ***
|
||||
|
||||
class gpioevent_data(ctypes.Structure):
|
||||
_fields_ = [
|
||||
("timestamp", ctypes.c_uint64),
|
||||
("id", ctypes.c_uint32),
|
||||
]
|
||||
|
||||
class gpioevent_request(ctypes.Structure):
|
||||
_fields_ = [
|
||||
("lineoffset", ctypes.c_uint32),
|
||||
("handleflags", ctypes.c_uint32),
|
||||
("eventflags", ctypes.c_uint32),
|
||||
("label", ctypes.c_char * 32),
|
||||
("fd", ctypes.c_int)
|
||||
]
|
||||
|
||||
def gpiochip_get_ro_value_fd(label: str, gpiochip_id: int, pin: int) -> int:
|
||||
GPIOEVENT_REQUEST_BOTH_EDGES = 0x3
|
||||
GPIOHANDLE_REQUEST_INPUT = 0x1
|
||||
GPIO_GET_LINEEVENT_IOCTL = 0xc030b404
|
||||
|
||||
rq = gpioevent_request()
|
||||
rq.lineoffset = pin
|
||||
rq.handleflags = GPIOHANDLE_REQUEST_INPUT
|
||||
rq.eventflags = GPIOEVENT_REQUEST_BOTH_EDGES
|
||||
rq.label = label.encode('utf-8')[:31] + b'\0'
|
||||
|
||||
fd = os.open(f"/dev/gpiochip{gpiochip_id}", os.O_RDONLY)
|
||||
fcntl.ioctl(fd, GPIO_GET_LINEEVENT_IOCTL, rq)
|
||||
os.close(fd)
|
||||
return int(rq.fd)
|
||||
8
common/gps.py
Normal file
8
common/gps.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from openpilot.common.params import Params
|
||||
|
||||
|
||||
def get_gps_location_service(params: Params) -> str:
|
||||
if params.get_bool("UbloxAvailable"):
|
||||
return "gpsLocationExternal"
|
||||
else:
|
||||
return "gpsLocation"
|
||||
250
common/logging_extra.py
Normal file
250
common/logging_extra.py
Normal file
@@ -0,0 +1,250 @@
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import copy
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
import socket
|
||||
import logging
|
||||
import traceback
|
||||
import numpy as np
|
||||
from threading import local
|
||||
from collections import OrderedDict
|
||||
from contextlib import contextmanager
|
||||
|
||||
LOG_TIMESTAMPS = "LOG_TIMESTAMPS" in os.environ
|
||||
|
||||
def json_handler(obj):
|
||||
if isinstance(obj, np.bool_):
|
||||
return bool(obj)
|
||||
# if isinstance(obj, (datetime.date, datetime.time)):
|
||||
# return obj.isoformat()
|
||||
return repr(obj)
|
||||
|
||||
def json_robust_dumps(obj):
|
||||
return json.dumps(obj, default=json_handler)
|
||||
|
||||
class NiceOrderedDict(OrderedDict):
|
||||
def __str__(self):
|
||||
return json_robust_dumps(self)
|
||||
|
||||
class SwagFormatter(logging.Formatter):
|
||||
def __init__(self, swaglogger):
|
||||
logging.Formatter.__init__(self, None, '%a %b %d %H:%M:%S %Z %Y')
|
||||
|
||||
self.swaglogger = swaglogger
|
||||
self.host = socket.gethostname()
|
||||
|
||||
def format_dict(self, record):
|
||||
record_dict = NiceOrderedDict()
|
||||
|
||||
if isinstance(record.msg, dict):
|
||||
record_dict['msg'] = record.msg
|
||||
else:
|
||||
try:
|
||||
record_dict['msg'] = record.getMessage()
|
||||
except (ValueError, TypeError):
|
||||
record_dict['msg'] = [record.msg]+record.args
|
||||
|
||||
record_dict['ctx'] = self.swaglogger.get_ctx()
|
||||
|
||||
if record.exc_info:
|
||||
record_dict['exc_info'] = self.formatException(record.exc_info)
|
||||
|
||||
record_dict['level'] = record.levelname
|
||||
record_dict['levelnum'] = record.levelno
|
||||
record_dict['name'] = record.name
|
||||
record_dict['filename'] = record.filename
|
||||
record_dict['lineno'] = record.lineno
|
||||
record_dict['pathname'] = record.pathname
|
||||
record_dict['module'] = record.module
|
||||
record_dict['funcName'] = record.funcName
|
||||
record_dict['host'] = self.host
|
||||
record_dict['process'] = record.process
|
||||
record_dict['thread'] = record.thread
|
||||
record_dict['threadName'] = record.threadName
|
||||
record_dict['created'] = record.created
|
||||
|
||||
return record_dict
|
||||
|
||||
def format(self, record):
|
||||
if self.swaglogger is None:
|
||||
raise Exception("must set swaglogger before calling format()")
|
||||
return json_robust_dumps(self.format_dict(record))
|
||||
|
||||
class SwagLogFileFormatter(SwagFormatter):
|
||||
def fix_kv(self, k, v):
|
||||
# append type to names to preserve legacy naming in logs
|
||||
# avoids overlapping key namespaces with different types
|
||||
# e.g. log.info() creates 'msg' -> 'msg$s'
|
||||
# log.event() creates 'msg.health.logMonoTime' -> 'msg.health.logMonoTime$i'
|
||||
# because overlapping namespace 'msg' caused problems
|
||||
if isinstance(v, (str, bytes)):
|
||||
k += "$s"
|
||||
elif isinstance(v, float):
|
||||
k += "$f"
|
||||
elif isinstance(v, bool):
|
||||
k += "$b"
|
||||
elif isinstance(v, int):
|
||||
k += "$i"
|
||||
elif isinstance(v, dict):
|
||||
nv = {}
|
||||
for ik, iv in v.items():
|
||||
ik, iv = self.fix_kv(ik, iv)
|
||||
nv[ik] = iv
|
||||
v = nv
|
||||
elif isinstance(v, list):
|
||||
k += "$a"
|
||||
return k, v
|
||||
|
||||
def format(self, record):
|
||||
if isinstance(record, str):
|
||||
v = json.loads(record)
|
||||
else:
|
||||
v = self.format_dict(record)
|
||||
|
||||
mk, mv = self.fix_kv('msg', v['msg'])
|
||||
del v['msg']
|
||||
v[mk] = mv
|
||||
v['id'] = uuid.uuid4().hex
|
||||
|
||||
return json_robust_dumps(v)
|
||||
|
||||
class SwagErrorFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
return record.levelno < logging.ERROR
|
||||
|
||||
def _tmpfunc():
|
||||
return 0
|
||||
|
||||
def _srcfile():
|
||||
return os.path.normcase(_tmpfunc.__code__.co_filename)
|
||||
|
||||
class SwagLogger(logging.Logger):
|
||||
def __init__(self):
|
||||
logging.Logger.__init__(self, "swaglog")
|
||||
|
||||
self.global_ctx = {}
|
||||
|
||||
self.log_local = local()
|
||||
self.log_local.ctx = {}
|
||||
|
||||
def local_ctx(self):
|
||||
try:
|
||||
return self.log_local.ctx
|
||||
except AttributeError:
|
||||
self.log_local.ctx = {}
|
||||
return self.log_local.ctx
|
||||
|
||||
def get_ctx(self):
|
||||
return dict(self.local_ctx(), **self.global_ctx)
|
||||
|
||||
@contextmanager
|
||||
def ctx(self, **kwargs):
|
||||
old_ctx = self.local_ctx()
|
||||
self.log_local.ctx = copy.copy(old_ctx) or {}
|
||||
self.log_local.ctx.update(kwargs)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.log_local.ctx = old_ctx
|
||||
|
||||
def bind(self, **kwargs):
|
||||
self.local_ctx().update(kwargs)
|
||||
|
||||
def bind_global(self, **kwargs):
|
||||
self.global_ctx.update(kwargs)
|
||||
|
||||
def event(self, event, *args, **kwargs):
|
||||
evt = NiceOrderedDict()
|
||||
evt['event'] = event
|
||||
if args:
|
||||
evt['args'] = args
|
||||
evt.update(kwargs)
|
||||
if 'error' in kwargs:
|
||||
self.error(evt)
|
||||
elif 'debug' in kwargs:
|
||||
self.debug(evt)
|
||||
else:
|
||||
self.info(evt)
|
||||
|
||||
def timestamp(self, event_name):
|
||||
if LOG_TIMESTAMPS:
|
||||
t = time.monotonic()
|
||||
tstp = NiceOrderedDict()
|
||||
tstp['timestamp'] = NiceOrderedDict()
|
||||
tstp['timestamp']["event"] = event_name
|
||||
tstp['timestamp']["time"] = t*1e9
|
||||
self.debug(tstp)
|
||||
|
||||
def findCaller(self, stack_info=False, stacklevel=1):
|
||||
"""
|
||||
Find the stack frame of the caller so that we can note the source
|
||||
file name, line number and function name.
|
||||
"""
|
||||
f = sys._getframe(3)
|
||||
#On some versions of IronPython, currentframe() returns None if
|
||||
#IronPython isn't run with -X:Frames.
|
||||
if f is not None:
|
||||
f = f.f_back
|
||||
orig_f = f
|
||||
while f and stacklevel > 1:
|
||||
f = f.f_back
|
||||
stacklevel -= 1
|
||||
if not f:
|
||||
f = orig_f
|
||||
rv = "(unknown file)", 0, "(unknown function)", None
|
||||
while hasattr(f, "f_code"):
|
||||
co = f.f_code
|
||||
filename = os.path.normcase(co.co_filename)
|
||||
|
||||
# TODO: is this pylint exception correct?
|
||||
if filename == _srcfile:
|
||||
f = f.f_back
|
||||
continue
|
||||
sinfo = None
|
||||
if stack_info:
|
||||
sio = io.StringIO()
|
||||
sio.write('Stack (most recent call last):\n')
|
||||
traceback.print_stack(f, file=sio)
|
||||
sinfo = sio.getvalue()
|
||||
if sinfo[-1] == '\n':
|
||||
sinfo = sinfo[:-1]
|
||||
sio.close()
|
||||
rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
|
||||
break
|
||||
return rv
|
||||
|
||||
if __name__ == "__main__":
|
||||
log = SwagLogger()
|
||||
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setLevel(logging.INFO)
|
||||
stdout_handler.addFilter(SwagErrorFilter())
|
||||
log.addHandler(stdout_handler)
|
||||
|
||||
stderr_handler = logging.StreamHandler(sys.stderr)
|
||||
stderr_handler.setLevel(logging.ERROR)
|
||||
log.addHandler(stderr_handler)
|
||||
|
||||
log.info("asdasd %s", "a")
|
||||
log.info({'wut': 1})
|
||||
log.warning("warning")
|
||||
log.error("error")
|
||||
log.critical("critical")
|
||||
log.event("test", x="y")
|
||||
|
||||
with log.ctx():
|
||||
stdout_handler.setFormatter(SwagFormatter(log))
|
||||
stderr_handler.setFormatter(SwagFormatter(log))
|
||||
log.bind(user="some user")
|
||||
log.info("in req")
|
||||
print("")
|
||||
log.warning("warning")
|
||||
print("")
|
||||
log.error("error")
|
||||
print("")
|
||||
log.critical("critical")
|
||||
print("")
|
||||
log.event("do_req", a=1, b="c")
|
||||
45
common/markdown.py
Normal file
45
common/markdown.py
Normal file
@@ -0,0 +1,45 @@
|
||||
HTML_REPLACEMENTS = [
|
||||
(r'&', r'&'),
|
||||
(r'"', r'"'),
|
||||
]
|
||||
|
||||
def parse_markdown(text: str, tab_length: int = 2) -> str:
|
||||
lines = text.split("\n")
|
||||
output: list[str] = []
|
||||
list_level = 0
|
||||
|
||||
def end_outstanding_lists(level: int, end_level: int) -> int:
|
||||
while level > end_level:
|
||||
level -= 1
|
||||
output.append("</ul>")
|
||||
if level > 0:
|
||||
output.append("</li>")
|
||||
return end_level
|
||||
|
||||
for i, line in enumerate(lines):
|
||||
if i + 1 < len(lines) and lines[i + 1].startswith("==="): # heading
|
||||
output.append(f"<h1>{line}</h1>")
|
||||
elif line.startswith("==="):
|
||||
pass
|
||||
elif line.lstrip().startswith("* "): # list
|
||||
line_level = 1 + line.count(" " * tab_length, 0, line.index("*"))
|
||||
if list_level >= line_level:
|
||||
list_level = end_outstanding_lists(list_level, line_level)
|
||||
else:
|
||||
list_level += 1
|
||||
if list_level > 1:
|
||||
output[-1] = output[-1].replace("</li>", "")
|
||||
output.append("<ul>")
|
||||
output.append(f"<li>{line.replace('*', '', 1).lstrip()}</li>")
|
||||
else:
|
||||
list_level = end_outstanding_lists(list_level, 0)
|
||||
if len(line) > 0:
|
||||
output.append(line)
|
||||
|
||||
end_outstanding_lists(list_level, 0)
|
||||
output_str = "\n".join(output) + "\n"
|
||||
|
||||
for (fr, to) in HTML_REPLACEMENTS:
|
||||
output_str = output_str.replace(fr, to)
|
||||
|
||||
return output_str
|
||||
85
common/mat.h
Normal file
85
common/mat.h
Normal file
@@ -0,0 +1,85 @@
|
||||
#pragma once
|
||||
|
||||
typedef struct vec3 {
|
||||
float v[3];
|
||||
} vec3;
|
||||
|
||||
typedef struct vec4 {
|
||||
float v[4];
|
||||
} vec4;
|
||||
|
||||
typedef struct mat3 {
|
||||
float v[3*3];
|
||||
} mat3;
|
||||
|
||||
typedef struct mat4 {
|
||||
float v[4*4];
|
||||
} mat4;
|
||||
|
||||
static inline mat3 matmul3(const mat3 &a, const mat3 &b) {
|
||||
mat3 ret = {{0.0}};
|
||||
for (int r=0; r<3; r++) {
|
||||
for (int c=0; c<3; c++) {
|
||||
float v = 0.0;
|
||||
for (int k=0; k<3; k++) {
|
||||
v += a.v[r*3+k] * b.v[k*3+c];
|
||||
}
|
||||
ret.v[r*3+c] = v;
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
static inline vec3 matvecmul3(const mat3 &a, const vec3 &b) {
|
||||
vec3 ret = {{0.0}};
|
||||
for (int r=0; r<3; r++) {
|
||||
for (int c=0; c<3; c++) {
|
||||
ret.v[r] += a.v[r*3+c] * b.v[c];
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
static inline mat4 matmul(const mat4 &a, const mat4 &b) {
|
||||
mat4 ret = {{0.0}};
|
||||
for (int r=0; r<4; r++) {
|
||||
for (int c=0; c<4; c++) {
|
||||
float v = 0.0;
|
||||
for (int k=0; k<4; k++) {
|
||||
v += a.v[r*4+k] * b.v[k*4+c];
|
||||
}
|
||||
ret.v[r*4+c] = v;
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
static inline vec4 matvecmul(const mat4 &a, const vec4 &b) {
|
||||
vec4 ret = {{0.0}};
|
||||
for (int r=0; r<4; r++) {
|
||||
for (int c=0; c<4; c++) {
|
||||
ret.v[r] += a.v[r*4+c] * b.v[c];
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
// scales the input and output space of a transformation matrix
|
||||
// that assumes pixel-center origin.
|
||||
static inline mat3 transform_scale_buffer(const mat3 &in, float s) {
|
||||
// in_pt = ( transform(out_pt/s + 0.5) - 0.5) * s
|
||||
|
||||
mat3 transform_out = {{
|
||||
1.0f/s, 0.0f, 0.5f,
|
||||
0.0f, 1.0f/s, 0.5f,
|
||||
0.0f, 0.0f, 1.0f,
|
||||
}};
|
||||
|
||||
mat3 transform_in = {{
|
||||
s, 0.0f, -0.5f*s,
|
||||
0.0f, s, -0.5f*s,
|
||||
0.0f, 0.0f, 1.0f,
|
||||
}};
|
||||
|
||||
return matmul3(transform_in, matmul3(in, transform_out));
|
||||
}
|
||||
51
common/mock/__init__.py
Normal file
51
common/mock/__init__.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""
|
||||
Utilities for generating mock messages for testing.
|
||||
example in common/tests/test_mock.py
|
||||
"""
|
||||
|
||||
|
||||
import functools
|
||||
import threading
|
||||
from cereal.messaging import PubMaster
|
||||
from cereal.services import SERVICE_LIST
|
||||
from openpilot.common.mock.generators import generate_livePose, generate_liveLocationKalman
|
||||
from openpilot.common.realtime import Ratekeeper
|
||||
|
||||
|
||||
MOCK_GENERATOR = {
|
||||
"livePose": generate_livePose,
|
||||
"liveLocationKalman": generate_liveLocationKalman
|
||||
}
|
||||
|
||||
|
||||
def generate_messages_loop(services: list[str], done: threading.Event):
|
||||
pm = PubMaster(services)
|
||||
rk = Ratekeeper(100)
|
||||
i = 0
|
||||
while not done.is_set():
|
||||
for s in services:
|
||||
should_send = i % (100/SERVICE_LIST[s].frequency) == 0
|
||||
if should_send:
|
||||
message = MOCK_GENERATOR[s]()
|
||||
pm.send(s, message)
|
||||
i += 1
|
||||
rk.keep_time()
|
||||
|
||||
|
||||
def mock_messages(services: list[str] | str):
|
||||
if isinstance(services, str):
|
||||
services = [services]
|
||||
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
done = threading.Event()
|
||||
t = threading.Thread(target=generate_messages_loop, args=(services, done))
|
||||
t.start()
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
finally:
|
||||
done.set()
|
||||
t.join()
|
||||
return wrapper
|
||||
return decorator
|
||||
33
common/mock/generators.py
Normal file
33
common/mock/generators.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from cereal import messaging
|
||||
|
||||
|
||||
LOCATION1 = (32.7174, -117.16277)
|
||||
LOCATION2 = (32.7558, -117.2037)
|
||||
|
||||
LLK_DECIMATION = 10
|
||||
RENDER_FRAMES = 15
|
||||
DEFAULT_ITERATIONS = RENDER_FRAMES * LLK_DECIMATION
|
||||
|
||||
|
||||
def generate_liveLocationKalman(location=LOCATION1):
|
||||
msg = messaging.new_message('liveLocationKalman')
|
||||
msg.liveLocationKalman.positionGeodetic = {'value': [*location, 0], 'std': [0., 0., 0.], 'valid': True}
|
||||
msg.liveLocationKalman.positionECEF = {'value': [0., 0., 0.], 'std': [0., 0., 0.], 'valid': True}
|
||||
msg.liveLocationKalman.calibratedOrientationNED = {'value': [0., 0., 0.], 'std': [0., 0., 0.], 'valid': True}
|
||||
msg.liveLocationKalman.velocityCalibrated = {'value': [0., 0., 0.], 'std': [0., 0., 0.], 'valid': True}
|
||||
msg.liveLocationKalman.status = 'valid'
|
||||
msg.liveLocationKalman.gpsOK = True
|
||||
return msg
|
||||
|
||||
|
||||
def generate_livePose():
|
||||
msg = messaging.new_message('livePose')
|
||||
meas = {'x': 0.0, 'y': 0.0, 'z': 0.0, 'xStd': 0.0, 'yStd': 0.0, 'zStd': 0.0, 'valid': True}
|
||||
msg.livePose.orientationNED = meas
|
||||
msg.livePose.velocityDevice = meas
|
||||
msg.livePose.angularVelocityDevice = meas
|
||||
msg.livePose.accelerationDevice = meas
|
||||
msg.livePose.inputsOK = True
|
||||
msg.livePose.posenetOK = True
|
||||
msg.livePose.sensorsOK = True
|
||||
return msg
|
||||
92
common/params.h
Normal file
92
common/params.h
Normal file
@@ -0,0 +1,92 @@
|
||||
#pragma once
|
||||
|
||||
#include <future>
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "common/queue.h"
|
||||
|
||||
enum ParamKeyType {
|
||||
PERSISTENT = 0x02,
|
||||
CLEAR_ON_MANAGER_START = 0x04,
|
||||
CLEAR_ON_ONROAD_TRANSITION = 0x08,
|
||||
CLEAR_ON_OFFROAD_TRANSITION = 0x10,
|
||||
DONT_LOG = 0x20,
|
||||
DEVELOPMENT_ONLY = 0x40,
|
||||
ALL = 0xFFFFFFFF
|
||||
};
|
||||
|
||||
class Params {
|
||||
public:
|
||||
explicit Params(const std::string &path = {});
|
||||
~Params();
|
||||
// Not copyable.
|
||||
Params(const Params&) = delete;
|
||||
Params& operator=(const Params&) = delete;
|
||||
|
||||
std::vector<std::string> allKeys() const;
|
||||
bool checkKey(const std::string &key);
|
||||
ParamKeyType getKeyType(const std::string &key);
|
||||
inline std::string getParamPath(const std::string &key = {}) {
|
||||
return params_path + params_prefix + (key.empty() ? "" : "/" + key);
|
||||
}
|
||||
|
||||
// Delete a value
|
||||
int remove(const std::string &key);
|
||||
void clearAll(ParamKeyType type);
|
||||
|
||||
// helpers for reading values
|
||||
std::string get(const std::string &key, bool block = false);
|
||||
inline bool getBool(const std::string &key, bool block = false) {
|
||||
return get(key, block) == "1";
|
||||
}
|
||||
inline int getInt(const std::string &key, bool block = false) {
|
||||
std::string value = get(key, block);
|
||||
return value.empty() ? 0 : std::stoi(value);
|
||||
}
|
||||
inline float getFloat(const std::string &key, bool block = false) {
|
||||
std::string value = get(key, block);
|
||||
return value.empty() ? 0.0 : std::stof(value);
|
||||
}
|
||||
std::map<std::string, std::string> readAll();
|
||||
|
||||
// helpers for writing values
|
||||
int put(const char *key, const char *val, size_t value_size);
|
||||
inline int put(const std::string &key, const std::string &val) {
|
||||
return put(key.c_str(), val.data(), val.size());
|
||||
}
|
||||
inline int putBool(const std::string &key, bool val) {
|
||||
return put(key.c_str(), val ? "1" : "0", 1);
|
||||
}
|
||||
inline int putInt(const std::string &key, int val) {
|
||||
return put(key.c_str(), std::to_string(val).c_str(), std::to_string(val).size());
|
||||
}
|
||||
inline int putFloat(const std::string &key, float val) {
|
||||
return put(key.c_str(), std::to_string(val).c_str(), std::to_string(val).size());
|
||||
}
|
||||
void putNonBlocking(const std::string &key, const std::string &val);
|
||||
inline void putBoolNonBlocking(const std::string &key, bool val) {
|
||||
putNonBlocking(key, val ? "1" : "0");
|
||||
}
|
||||
void putIntNonBlocking(const std::string &key, const std::string &val);
|
||||
inline void putIntNonBlocking(const std::string &key, int val) {
|
||||
putNonBlocking(key, std::to_string(val));
|
||||
}
|
||||
void putFloatNonBlocking(const std::string &key, const std::string &val);
|
||||
inline void putFloatNonBlocking(const std::string &key, float val) {
|
||||
putNonBlocking(key, std::to_string(val));
|
||||
}
|
||||
|
||||
private:
|
||||
void asyncWriteThread();
|
||||
|
||||
std::string params_path;
|
||||
std::string params_prefix;
|
||||
|
||||
// for nonblocking write
|
||||
std::future<void> future;
|
||||
SafeQueue<std::pair<std::string, std::string>> queue;
|
||||
};
|
||||
18
common/params.py
Normal file
18
common/params.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from openpilot.common.params_pyx import Params, ParamKeyType, UnknownKeyName
|
||||
assert Params
|
||||
assert ParamKeyType
|
||||
assert UnknownKeyName
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
params = Params()
|
||||
key = sys.argv[1]
|
||||
assert params.check_key(key), f"unknown param: {key}"
|
||||
|
||||
if len(sys.argv) == 3:
|
||||
val = sys.argv[2]
|
||||
print(f"SET: {key} = {val}")
|
||||
params.put(key, val)
|
||||
elif len(sys.argv) == 2:
|
||||
print(f"GET: {key} = {params.get(key)}")
|
||||
335
common/params_keys.h
Normal file
335
common/params_keys.h
Normal file
@@ -0,0 +1,335 @@
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
|
||||
inline static std::unordered_map<std::string, uint32_t> keys = {
|
||||
{"AccessToken", CLEAR_ON_MANAGER_START | DONT_LOG},
|
||||
{"AdbEnabled", PERSISTENT},
|
||||
{"AlwaysOnDM", PERSISTENT},
|
||||
{"ApiCache_Device", PERSISTENT},
|
||||
{"ApiCache_FirehoseStats", PERSISTENT},
|
||||
{"AssistNowToken", PERSISTENT},
|
||||
{"AthenadPid", PERSISTENT},
|
||||
{"AthenadUploadQueue", PERSISTENT},
|
||||
{"AthenadRecentlyViewedRoutes", PERSISTENT},
|
||||
{"BootCount", PERSISTENT},
|
||||
{"CalibrationParams", PERSISTENT},
|
||||
{"CameraDebugExpGain", CLEAR_ON_MANAGER_START},
|
||||
{"CameraDebugExpTime", CLEAR_ON_MANAGER_START},
|
||||
{"CarBatteryCapacity", PERSISTENT},
|
||||
{"CarParams", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"CarParamsCache", CLEAR_ON_MANAGER_START},
|
||||
{"CarParamsPersistent", PERSISTENT},
|
||||
{"CarParamsPrevRoute", PERSISTENT},
|
||||
{"CompletedTrainingVersion", PERSISTENT},
|
||||
{"ControlsReady", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"CurrentBootlog", PERSISTENT},
|
||||
{"CurrentRoute", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"DisableLogging", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"DisablePowerDown", PERSISTENT},
|
||||
{"DisableUpdates", PERSISTENT},
|
||||
{"DisengageOnAccelerator", PERSISTENT},
|
||||
{"DongleId", PERSISTENT},
|
||||
{"DoReboot", CLEAR_ON_MANAGER_START},
|
||||
{"DoShutdown", CLEAR_ON_MANAGER_START},
|
||||
{"DoUninstall", CLEAR_ON_MANAGER_START},
|
||||
{"DriverTooDistracted", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"AlphaLongitudinalEnabled", PERSISTENT | DEVELOPMENT_ONLY},
|
||||
{"ExperimentalMode", PERSISTENT},
|
||||
{"ExperimentalModeConfirmed", PERSISTENT},
|
||||
{"FirmwareQueryDone", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"ForcePowerDown", PERSISTENT},
|
||||
{"GitBranch", PERSISTENT},
|
||||
{"GitCommit", PERSISTENT},
|
||||
{"GitCommitDate", PERSISTENT},
|
||||
{"GitDiff", PERSISTENT},
|
||||
{"GithubSshKeys", PERSISTENT},
|
||||
{"GithubUsername", PERSISTENT},
|
||||
{"GitRemote", PERSISTENT},
|
||||
{"GsmApn", PERSISTENT},
|
||||
{"GsmMetered", PERSISTENT},
|
||||
{"GsmRoaming", PERSISTENT},
|
||||
{"HardwareSerial", PERSISTENT},
|
||||
{"HasAcceptedTerms", PERSISTENT},
|
||||
{"InstallDate", PERSISTENT},
|
||||
{"IsDriverViewEnabled", CLEAR_ON_MANAGER_START},
|
||||
{"IsEngaged", PERSISTENT},
|
||||
{"IsLdwEnabled", PERSISTENT},
|
||||
{"IsMetric", PERSISTENT},
|
||||
{"IsOffroad", CLEAR_ON_MANAGER_START},
|
||||
{"IsOnroad", PERSISTENT},
|
||||
{"IsRhdDetected", PERSISTENT},
|
||||
{"IsReleaseBranch", CLEAR_ON_MANAGER_START},
|
||||
{"IsTakingSnapshot", CLEAR_ON_MANAGER_START},
|
||||
{"IsTestedBranch", CLEAR_ON_MANAGER_START},
|
||||
{"JoystickDebugMode", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"LanguageSetting", PERSISTENT},
|
||||
{"LastAthenaPingTime", CLEAR_ON_MANAGER_START},
|
||||
{"LastGPSPosition", PERSISTENT},
|
||||
{"LastManagerExitReason", CLEAR_ON_MANAGER_START},
|
||||
{"LastOffroadStatusPacket", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"LastPowerDropDetected", CLEAR_ON_MANAGER_START},
|
||||
{"LastUpdateException", CLEAR_ON_MANAGER_START},
|
||||
{"LastUpdateTime", PERSISTENT},
|
||||
{"LiveDelay", PERSISTENT},
|
||||
{"LiveParameters", PERSISTENT},
|
||||
{"LiveParametersV2", PERSISTENT},
|
||||
{"LiveTorqueParameters", PERSISTENT | DONT_LOG},
|
||||
{"LocationFilterInitialState", PERSISTENT},
|
||||
{"LongitudinalManeuverMode", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"LongitudinalPersonality", PERSISTENT},
|
||||
{"NetworkMetered", PERSISTENT},
|
||||
{"ObdMultiplexingChanged", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"ObdMultiplexingEnabled", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"Offroad_BadNvme", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_CarUnrecognized", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"Offroad_ConnectivityNeeded", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_ConnectivityNeededPrompt", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_IsTakingSnapshot", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_NeosUpdate", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_NoFirmware", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"Offroad_Recalibration", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"Offroad_StorageMissing", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_TemperatureTooHigh", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_UnofficialHardware", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_UpdateFailed", CLEAR_ON_MANAGER_START},
|
||||
{"OpenpilotEnabledToggle", PERSISTENT},
|
||||
{"PandaHeartbeatLost", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"PandaSomResetTriggered", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"PandaSignatures", CLEAR_ON_MANAGER_START},
|
||||
{"PrimeType", PERSISTENT},
|
||||
{"RecordAudio", PERSISTENT},
|
||||
{"RecordFront", PERSISTENT},
|
||||
{"RecordFrontLock", PERSISTENT}, // for the internal fleet
|
||||
{"SecOCKey", PERSISTENT | DONT_LOG},
|
||||
{"RouteCount", PERSISTENT},
|
||||
{"SnoozeUpdate", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"SshEnabled", PERSISTENT},
|
||||
{"TermsVersion", PERSISTENT},
|
||||
{"TrainingVersion", PERSISTENT},
|
||||
{"UbloxAvailable", PERSISTENT},
|
||||
{"UpdateAvailable", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"UpdateFailedCount", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterAvailableBranches", PERSISTENT},
|
||||
{"UpdaterCurrentDescription", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterCurrentReleaseNotes", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterFetchAvailable", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterNewDescription", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterNewReleaseNotes", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterState", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterTargetBranch", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterLastFetchTime", PERSISTENT},
|
||||
{"Version", PERSISTENT},
|
||||
|
||||
{"ForceOffroad", CLEAR_ON_MANAGER_START},
|
||||
{"BydModifiedStockLong", PERSISTENT},
|
||||
{"AlwaysOnLKAS", PERSISTENT},
|
||||
{"BydAutoTuning", PERSISTENT},
|
||||
{"BydLatUseSiglin", PERSISTENT},
|
||||
{"CameraOffset", PERSISTENT},
|
||||
{"UseRedPanda", PERSISTENT},
|
||||
{"UseSteerRateLimiter", PERSISTENT},
|
||||
{"SteerRateLimLoSpd", PERSISTENT},
|
||||
{"SteerRateLimHiSpd", PERSISTENT},
|
||||
{"KeepLkasPassive", PERSISTENT},
|
||||
{"BydMpcTsr", PERSISTENT},
|
||||
{"BydLowSpdLong", PERSISTENT},
|
||||
{"SpeedCorrect30", PERSISTENT},
|
||||
{"SpeedCorrect60", PERSISTENT},
|
||||
{"SpeedCorrect90", PERSISTENT},
|
||||
{"SpeedCorrect120", PERSISTENT},
|
||||
{"AuthExpireDate", PERSISTENT},
|
||||
{"DeviceAuthKey", PERSISTENT},
|
||||
{"BydBsdType2", PERSISTENT},
|
||||
|
||||
{"LateralAngleSpdUp0", PERSISTENT},
|
||||
{"LateralAngleSpdDn0", PERSISTENT},
|
||||
{"LateralAngleSpdBp1", PERSISTENT},
|
||||
{"LateralAngleSpdUp1", PERSISTENT},
|
||||
{"LateralAngleSpdDn1", PERSISTENT},
|
||||
{"LateralAngleSpdBp2", PERSISTENT},
|
||||
{"LateralAngleSpdUp2", PERSISTENT},
|
||||
{"LateralAngleSpdDn2", PERSISTENT},
|
||||
{"LateralAngleTorqMax", PERSISTENT},
|
||||
{"LateralAngleTorqCut", PERSISTENT},
|
||||
|
||||
// carrot
|
||||
{"LongitudinalPersonalityMax", PERSISTENT},
|
||||
{"NetworkAddress", CLEAR_ON_MANAGER_START},
|
||||
|
||||
{"ApiCache_NavDestinations", PERSISTENT},
|
||||
{"NavDestination", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"NavDestinationWaypoints", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"NavPastDestinations", PERSISTENT},
|
||||
{"NavSettingLeftSide", PERSISTENT},
|
||||
{"NavSettingTime24h", PERSISTENT},
|
||||
{"MapboxStyle", PERSISTENT },
|
||||
{"MapboxPublicKey", PERSISTENT},
|
||||
{"MapboxSecretKey", PERSISTENT},
|
||||
{"GMapKey", PERSISTENT},
|
||||
{"SearchInput", PERSISTENT},
|
||||
|
||||
{"CarSelected3", PERSISTENT},
|
||||
{"SupportedCars", PERSISTENT},
|
||||
{"SupportedCars_gm", PERSISTENT},
|
||||
{"ShowDebugUI", PERSISTENT},
|
||||
{"ShowTpms", PERSISTENT},
|
||||
{"ShowDateTime", PERSISTENT},
|
||||
{"ShowPathEnd", PERSISTENT},
|
||||
{"ShowCustomBrightness", PERSISTENT},
|
||||
{"ShowLaneInfo", PERSISTENT},
|
||||
{"ShowRadarInfo", PERSISTENT},
|
||||
{"ShowDeviceState", PERSISTENT},
|
||||
{"ShowRouteInfo", PERSISTENT },
|
||||
{"ShowPathMode", PERSISTENT},
|
||||
{"ShowPathColor", PERSISTENT},
|
||||
{"ShowPathColorCruiseOff", PERSISTENT},
|
||||
{"ShowPathModeLane", PERSISTENT},
|
||||
{"ShowPathColorLane", PERSISTENT},
|
||||
{"ShowPlotMode", PERSISTENT},
|
||||
{"RecordRoadCam", PERSISTENT },
|
||||
{"HDPuse", PERSISTENT },
|
||||
|
||||
{"AutoCruiseControl", PERSISTENT},
|
||||
{"CruiseEcoControl", PERSISTENT},
|
||||
{"CarrotCruiseDecel", PERSISTENT},
|
||||
{"CarrotCruiseAtcDecel", PERSISTENT},
|
||||
{"CommaLongAcc", PERSISTENT},
|
||||
{"AutoGasTokSpeed", PERSISTENT},
|
||||
{"AutoGasSyncSpeed", PERSISTENT},
|
||||
{"AutoEngage", PERSISTENT},
|
||||
{"DisableMinSteerSpeed", PERSISTENT},
|
||||
{"AutoCurveSpeedLowerLimit", PERSISTENT},
|
||||
{"AutoCurveSpeedFactor", PERSISTENT},
|
||||
{"AutoCurveSpeedAggressiveness", PERSISTENT},
|
||||
{"AutoTurnControl", PERSISTENT},
|
||||
{"AutoTurnControlSpeedTurn", PERSISTENT},
|
||||
{"AutoTurnControlTurnEnd", PERSISTENT},
|
||||
{"AutoTurnMapChange", PERSISTENT },
|
||||
{"AutoNaviSpeedCtrlEnd", PERSISTENT},
|
||||
{"AutoNaviSpeedCtrlMode", PERSISTENT},
|
||||
{"AutoRoadSpeedLimitOffset", PERSISTENT},
|
||||
{"AutoNaviSpeedBumpTime", PERSISTENT},
|
||||
{"AutoNaviSpeedBumpSpeed", PERSISTENT},
|
||||
{"AutoNaviSpeedDecelRate", PERSISTENT},
|
||||
{"AutoNaviSpeedSafetyFactor", PERSISTENT},
|
||||
{"AutoNaviCountDownMode", PERSISTENT},
|
||||
{"TurnSpeedControlMode", PERSISTENT},
|
||||
{"CarrotSmartSpeedControl", PERSISTENT},
|
||||
{"MapTurnSpeedFactor", PERSISTENT},
|
||||
{"ModelTurnSpeedFactor", PERSISTENT},
|
||||
{"StoppingAccel", PERSISTENT},
|
||||
{"AutoSpeedUptoRoadSpeedLimit", PERSISTENT},
|
||||
{"AutoRoadSpeedAdjust", PERSISTENT},
|
||||
{"StopDistanceCarrot", PERSISTENT},
|
||||
{"ComfortBrake", PERSISTENT},
|
||||
{"JLeadFactor3", PERSISTENT},
|
||||
{"CruiseButtonMode", PERSISTENT},
|
||||
{"CancelButtonMode", PERSISTENT},
|
||||
{"LfaButtonMode", PERSISTENT},
|
||||
{"CruiseButtonTest1", PERSISTENT},
|
||||
{"CruiseButtonTest2", PERSISTENT},
|
||||
{"CruiseButtonTest3", PERSISTENT},
|
||||
{"CruiseSpeedUnit", PERSISTENT},
|
||||
{"CruiseSpeedUnitBasic", PERSISTENT},
|
||||
{"CruiseSpeed1", PERSISTENT},
|
||||
{"CruiseSpeed2", PERSISTENT},
|
||||
{"CruiseSpeed3", PERSISTENT},
|
||||
{"CruiseSpeed4", PERSISTENT},
|
||||
{"CruiseSpeed5", PERSISTENT},
|
||||
{"PaddleMode", PERSISTENT},
|
||||
{"MyDrivingMode", PERSISTENT},
|
||||
{"MyDrivingModeAuto", PERSISTENT},
|
||||
{"TrafficLightDetectMode", PERSISTENT},
|
||||
{"SteerActuatorDelay", PERSISTENT},
|
||||
{"LatSmoothSec", PERSISTENT},
|
||||
{"CruiseOnDist", PERSISTENT},
|
||||
{"CruiseMaxVals0", PERSISTENT},
|
||||
{"CruiseMaxVals1", PERSISTENT},
|
||||
{"CruiseMaxVals2", PERSISTENT},
|
||||
{"CruiseMaxVals3", PERSISTENT},
|
||||
{"CruiseMaxVals4", PERSISTENT},
|
||||
{"CruiseMaxVals5", PERSISTENT},
|
||||
{"CruiseMaxVals6", PERSISTENT},
|
||||
{"LongTuningKpV", PERSISTENT},
|
||||
{"LongTuningKiV", PERSISTENT},
|
||||
{"LongTuningKf", PERSISTENT},
|
||||
{"LongActuatorDelay", PERSISTENT },
|
||||
{"VEgoStopping", PERSISTENT },
|
||||
{"RadarReactionFactor", PERSISTENT},
|
||||
{"EnableRadarTracks", PERSISTENT},
|
||||
{"RadarLatFactor", PERSISTENT},
|
||||
{"EnableCornerRadar", PERSISTENT},
|
||||
{"EnableRadarTracksResult", PERSISTENT | CLEAR_ON_MANAGER_START},
|
||||
{"CanParserResult", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION },
|
||||
{"HotspotOnBoot", PERSISTENT},
|
||||
{"SoftwareMenu", PERSISTENT},
|
||||
{"HyundaiCameraSCC", PERSISTENT},
|
||||
{"FingerPrints", PERSISTENT | CLEAR_ON_MANAGER_START},
|
||||
{"IsLdwsCar", PERSISTENT},
|
||||
{"CanfdHDA2", PERSISTENT},
|
||||
{"CanfdDebug", PERSISTENT},
|
||||
{"SoundVolumeAdjust", PERSISTENT},
|
||||
{"SoundVolumeAdjustEngage", PERSISTENT},
|
||||
{"TFollowGap1", PERSISTENT},
|
||||
{"TFollowGap2", PERSISTENT},
|
||||
{"TFollowGap3", PERSISTENT},
|
||||
{"TFollowGap4", PERSISTENT},
|
||||
{"DynamicTFollow", PERSISTENT},
|
||||
{"DynamicTFollowLC", PERSISTENT},
|
||||
{"AChangeCostStarting", PERSISTENT},
|
||||
{"TrafficStopDistanceAdjust", PERSISTENT},
|
||||
{"HapticFeedbackWhenSpeedCamera", PERSISTENT},
|
||||
{"UseLaneLineSpeed", PERSISTENT},
|
||||
{"UseLaneLineCurveSpeed", PERSISTENT},
|
||||
{"AdjustLaneOffset", PERSISTENT},
|
||||
{"LaneChangeNeedTorque", PERSISTENT},
|
||||
{"LaneChangeDelay", PERSISTENT },
|
||||
{"LaneChangeBsd", PERSISTENT},
|
||||
{"MaxAngleFrames", PERSISTENT},
|
||||
{"SoftHoldMode", PERSISTENT},
|
||||
{"LatMpcPathCost", PERSISTENT},
|
||||
{"LatMpcMotionCost", PERSISTENT},
|
||||
{"LatMpcAccelCost", PERSISTENT},
|
||||
{"LatMpcJerkCost", PERSISTENT},
|
||||
{"LatMpcSteeringRateCost", PERSISTENT },
|
||||
{"LatMpcInputOffset", PERSISTENT},
|
||||
{"PathOffset", PERSISTENT},
|
||||
{"LateralTorqueCustom", PERSISTENT},
|
||||
{"LateralTorqueAccelFactor", PERSISTENT},
|
||||
{"LateralTorqueFriction", PERSISTENT},
|
||||
{"LateralTorqueKpV", PERSISTENT},
|
||||
{"LateralTorqueKiV", PERSISTENT},
|
||||
{"LateralTorqueKf", PERSISTENT},
|
||||
{"LateralTorqueKd", PERSISTENT},
|
||||
{"CustomSteerMax", PERSISTENT},
|
||||
{"CustomSteerDeltaUp", PERSISTENT},
|
||||
{"CustomSteerDeltaDown", PERSISTENT},
|
||||
{"CustomSteerDeltaUpLC", PERSISTENT},
|
||||
{"CustomSteerDeltaDownLC", PERSISTENT},
|
||||
{"SpeedFromPCM", PERSISTENT},
|
||||
{"MaxTimeOffroadMin", PERSISTENT},
|
||||
{"DisableDM", PERSISTENT},
|
||||
{"EnableConnect", PERSISTENT},
|
||||
{"MuteDoor", PERSISTENT},
|
||||
{"MuteSeatbelt", PERSISTENT},
|
||||
{"CarrotException", CLEAR_ON_MANAGER_START},
|
||||
{"CarrotSpeed", PERSISTENT},
|
||||
{"CarrotSpeedViz", PERSISTENT},
|
||||
{"CarrotSpeedTable", PERSISTENT},
|
||||
{"CarName", PERSISTENT},
|
||||
{"EVTable", PERSISTENT},
|
||||
{"LongPitch", PERSISTENT},
|
||||
{"ActivateCruiseAfterBrake", CLEAR_ON_MANAGER_START}, // for GM autoResume
|
||||
{"CustomSR", PERSISTENT},
|
||||
{"SteerRatioRate", PERSISTENT},
|
||||
{"SoftRestartTriggered", CLEAR_ON_MANAGER_START},
|
||||
|
||||
{"DevicePosition", CLEAR_ON_MANAGER_START},
|
||||
{"NNFF", PERSISTENT},
|
||||
{"NNFFLite", PERSISTENT},
|
||||
{"NNFFModelName", CLEAR_ON_OFFROAD_TRANSITION},
|
||||
|
||||
{"HardwareC3xLite", PERSISTENT},
|
||||
};
|
||||
163
common/params_pyx.pyx
Normal file
163
common/params_pyx.pyx
Normal file
@@ -0,0 +1,163 @@
|
||||
# distutils: language = c++
|
||||
# cython: language_level = 3
|
||||
from libcpp cimport bool
|
||||
from libcpp.string cimport string
|
||||
from libcpp.vector cimport vector
|
||||
|
||||
cdef extern from "common/params.h":
|
||||
cpdef enum ParamKeyType:
|
||||
PERSISTENT
|
||||
CLEAR_ON_MANAGER_START
|
||||
CLEAR_ON_ONROAD_TRANSITION
|
||||
CLEAR_ON_OFFROAD_TRANSITION
|
||||
DEVELOPMENT_ONLY
|
||||
ALL
|
||||
|
||||
cdef cppclass c_Params "Params":
|
||||
c_Params(string) except + nogil
|
||||
string get(string, bool) nogil
|
||||
bool getBool(string, bool) nogil
|
||||
int getInt(string, bool) nogil
|
||||
float getFloat(string, bool) nogil
|
||||
int remove(string) nogil
|
||||
int put(string, string) nogil
|
||||
void putNonBlocking(string, string) nogil
|
||||
void putBoolNonBlocking(string, bool) nogil
|
||||
void putIntNonBlocking(string, int) nogil
|
||||
void putFloatNonBlocking(string, float) nogil
|
||||
int putBool(string, bool) nogil
|
||||
int putInt(string, int) nogil
|
||||
int putFloat(string, float) nogil
|
||||
bool checkKey(string) nogil
|
||||
string getParamPath(string) nogil
|
||||
void clearAll(ParamKeyType)
|
||||
vector[string] allKeys()
|
||||
|
||||
|
||||
def ensure_bytes(v):
|
||||
return v.encode() if isinstance(v, str) else v
|
||||
|
||||
class UnknownKeyName(Exception):
|
||||
pass
|
||||
|
||||
cdef class Params:
|
||||
cdef c_Params* p
|
||||
cdef str d
|
||||
|
||||
def __cinit__(self, d=""):
|
||||
cdef string path = <string>d.encode()
|
||||
with nogil:
|
||||
self.p = new c_Params(path)
|
||||
self.d = d
|
||||
|
||||
def __reduce__(self):
|
||||
return (type(self), (self.d,))
|
||||
|
||||
def __dealloc__(self):
|
||||
del self.p
|
||||
|
||||
def clear_all(self, tx_type=ParamKeyType.ALL):
|
||||
self.p.clearAll(tx_type)
|
||||
|
||||
def check_key(self, key):
|
||||
key = ensure_bytes(key)
|
||||
if not self.p.checkKey(key):
|
||||
raise UnknownKeyName(key)
|
||||
return key
|
||||
|
||||
def get(self, key, bool block=False, encoding=None):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef string val
|
||||
with nogil:
|
||||
val = self.p.get(k, block)
|
||||
|
||||
if val == b"":
|
||||
if block:
|
||||
# If we got no value while running in blocked mode
|
||||
# it means we got an interrupt while waiting
|
||||
raise KeyboardInterrupt
|
||||
else:
|
||||
return None
|
||||
|
||||
return val if encoding is None else val.decode(encoding)
|
||||
|
||||
def get_bool(self, key, bool block=False):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef bool r
|
||||
with nogil:
|
||||
r = self.p.getBool(k, block)
|
||||
return r
|
||||
|
||||
def get_int(self, key, bool block=False):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef int r
|
||||
with nogil:
|
||||
r = self.p.getInt(k, block)
|
||||
return r
|
||||
|
||||
def get_float(self, key, bool block=False):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef float r
|
||||
with nogil:
|
||||
r = self.p.getFloat(k, block)
|
||||
return r
|
||||
|
||||
def put(self, key, dat):
|
||||
"""
|
||||
Warning: This function blocks until the param is written to disk!
|
||||
In very rare cases this can take over a second, and your code will hang.
|
||||
Use the put_nonblocking, put_bool_nonblocking in time sensitive code, but
|
||||
in general try to avoid writing params as much as possible.
|
||||
"""
|
||||
cdef string k = self.check_key(key)
|
||||
cdef string dat_bytes = ensure_bytes(dat)
|
||||
with nogil:
|
||||
self.p.put(k, dat_bytes)
|
||||
|
||||
def put_bool(self, key, bool val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putBool(k, val)
|
||||
|
||||
def put_int(self, key, int val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putInt(k, val)
|
||||
|
||||
def put_float(self, key, float val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putFloat(k, val)
|
||||
|
||||
def put_nonblocking(self, key, dat):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef string dat_bytes = ensure_bytes(dat)
|
||||
with nogil:
|
||||
self.p.putNonBlocking(k, dat_bytes)
|
||||
|
||||
def put_bool_nonblocking(self, key, bool val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putBoolNonBlocking(k, val)
|
||||
|
||||
def put_int_nonblocking(self, key, int val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putIntNonBlocking(k, val)
|
||||
|
||||
def put_float_nonblocking(self, key, float val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putFloatNonBlocking(k, val)
|
||||
|
||||
def remove(self, key):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.remove(k)
|
||||
|
||||
def get_param_path(self, key=""):
|
||||
cdef string key_bytes = ensure_bytes(key)
|
||||
return self.p.getParamPath(key_bytes).decode("utf-8")
|
||||
|
||||
def all_keys(self):
|
||||
return self.p.allKeys()
|
||||
BIN
common/params_pyx.so
Executable file
BIN
common/params_pyx.so
Executable file
Binary file not shown.
73
common/pid.py
Normal file
73
common/pid.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import numpy as np
|
||||
from numbers import Number
|
||||
|
||||
class PIDController:
|
||||
def __init__(self, k_p, k_i, k_f=0., k_d=0., pos_limit=1e308, neg_limit=-1e308, rate=100):
|
||||
self._k_p = k_p
|
||||
self._k_i = k_i
|
||||
self._k_d = k_d
|
||||
self.k_f = k_f # feedforward gain
|
||||
if isinstance(self._k_p, Number):
|
||||
self._k_p = [[0], [self._k_p]]
|
||||
if isinstance(self._k_i, Number):
|
||||
self._k_i = [[0], [self._k_i]]
|
||||
if isinstance(self._k_d, Number):
|
||||
self._k_d = [[0], [self._k_d]]
|
||||
|
||||
self.pos_limit = pos_limit
|
||||
self.neg_limit = neg_limit
|
||||
|
||||
self.i_unwind_rate = 0.3 / rate
|
||||
self.i_rate = 1.0 / rate
|
||||
self.speed = 0.0
|
||||
|
||||
self.reset()
|
||||
|
||||
@property
|
||||
def k_p(self):
|
||||
return np.interp(self.speed, self._k_p[0], self._k_p[1])
|
||||
|
||||
@property
|
||||
def k_i(self):
|
||||
return np.interp(self.speed, self._k_i[0], self._k_i[1])
|
||||
|
||||
@property
|
||||
def k_d(self):
|
||||
return np.interp(self.speed, self._k_d[0], self._k_d[1])
|
||||
|
||||
@property
|
||||
def error_integral(self):
|
||||
return self.i/self.k_i
|
||||
|
||||
def reset(self):
|
||||
self.p = 0.0
|
||||
self.i = 0.0
|
||||
self.d = 0.0
|
||||
self.f = 0.0
|
||||
self.control = 0
|
||||
|
||||
def update(self, error, error_rate=0.0, speed=0.0, override=False, feedforward=0., freeze_integrator=False):
|
||||
self.speed = speed
|
||||
|
||||
self.p = float(error) * self.k_p
|
||||
self.f = feedforward * self.k_f
|
||||
self.d = error_rate * self.k_d
|
||||
|
||||
if override:
|
||||
self.i -= self.i_unwind_rate * float(np.sign(self.i))
|
||||
else:
|
||||
if not freeze_integrator:
|
||||
if(self.k_i != 0):
|
||||
self.i = self.i + error * self.k_i * self.i_rate
|
||||
else:
|
||||
self.i = 0
|
||||
|
||||
# Clip i to prevent exceeding control limits
|
||||
control_no_i = self.p + self.d + self.f
|
||||
control_no_i = np.clip(control_no_i, self.neg_limit, self.pos_limit)
|
||||
self.i = np.clip(self.i, self.neg_limit - control_no_i, self.pos_limit - control_no_i)
|
||||
|
||||
control = self.p + self.i + self.d + self.f
|
||||
|
||||
self.control = np.clip(control, self.neg_limit, self.pos_limit)
|
||||
return self.control
|
||||
39
common/prefix.h
Normal file
39
common/prefix.h
Normal file
@@ -0,0 +1,39 @@
|
||||
#pragma once
|
||||
|
||||
#include <cassert>
|
||||
#include <string>
|
||||
|
||||
#include "common/params.h"
|
||||
#include "common/util.h"
|
||||
#include "system/hardware/hw.h"
|
||||
|
||||
class OpenpilotPrefix {
|
||||
public:
|
||||
OpenpilotPrefix(std::string prefix = {}) {
|
||||
if (prefix.empty()) {
|
||||
prefix = util::random_string(15);
|
||||
}
|
||||
msgq_path = Path::shm_path() + "/" + prefix;
|
||||
bool ret = util::create_directories(msgq_path, 0777);
|
||||
assert(ret);
|
||||
setenv("OPENPILOT_PREFIX", prefix.c_str(), 1);
|
||||
}
|
||||
|
||||
~OpenpilotPrefix() {
|
||||
auto param_path = Params().getParamPath();
|
||||
if (util::file_exists(param_path)) {
|
||||
std::string real_path = util::readlink(param_path);
|
||||
system(util::string_format("rm %s -rf", real_path.c_str()).c_str());
|
||||
unlink(param_path.c_str());
|
||||
}
|
||||
if (getenv("COMMA_CACHE") == nullptr) {
|
||||
system(util::string_format("rm %s -rf", Path::download_cache_root().c_str()).c_str());
|
||||
}
|
||||
system(util::string_format("rm %s -rf", Path::comma_home().c_str()).c_str());
|
||||
system(util::string_format("rm %s -rf", msgq_path.c_str()).c_str());
|
||||
unsetenv("OPENPILOT_PREFIX");
|
||||
}
|
||||
|
||||
private:
|
||||
std::string msgq_path;
|
||||
};
|
||||
53
common/prefix.py
Normal file
53
common/prefix.py
Normal file
@@ -0,0 +1,53 @@
|
||||
import os
|
||||
import shutil
|
||||
import uuid
|
||||
|
||||
|
||||
from openpilot.common.params import Params
|
||||
from openpilot.system.hardware import PC
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
from openpilot.system.hardware.hw import DEFAULT_DOWNLOAD_CACHE_ROOT
|
||||
|
||||
class OpenpilotPrefix:
|
||||
def __init__(self, prefix: str = None, clean_dirs_on_exit: bool = True, shared_download_cache: bool = False):
|
||||
self.prefix = prefix if prefix else str(uuid.uuid4().hex[0:15])
|
||||
self.msgq_path = os.path.join(Paths.shm_path(), self.prefix)
|
||||
self.clean_dirs_on_exit = clean_dirs_on_exit
|
||||
self.shared_download_cache = shared_download_cache
|
||||
|
||||
def __enter__(self):
|
||||
self.original_prefix = os.environ.get('OPENPILOT_PREFIX', None)
|
||||
os.environ['OPENPILOT_PREFIX'] = self.prefix
|
||||
try:
|
||||
os.mkdir(self.msgq_path)
|
||||
except FileExistsError:
|
||||
pass
|
||||
os.makedirs(Paths.log_root(), exist_ok=True)
|
||||
|
||||
if self.shared_download_cache:
|
||||
os.environ["COMMA_CACHE"] = DEFAULT_DOWNLOAD_CACHE_ROOT
|
||||
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_obj, exc_tb):
|
||||
if self.clean_dirs_on_exit:
|
||||
self.clean_dirs()
|
||||
try:
|
||||
del os.environ['OPENPILOT_PREFIX']
|
||||
if self.original_prefix is not None:
|
||||
os.environ['OPENPILOT_PREFIX'] = self.original_prefix
|
||||
except KeyError:
|
||||
pass
|
||||
return False
|
||||
|
||||
def clean_dirs(self):
|
||||
symlink_path = Params().get_param_path()
|
||||
if os.path.exists(symlink_path):
|
||||
shutil.rmtree(os.path.realpath(symlink_path), ignore_errors=True)
|
||||
os.remove(symlink_path)
|
||||
shutil.rmtree(self.msgq_path, ignore_errors=True)
|
||||
if PC:
|
||||
shutil.rmtree(Paths.log_root(), ignore_errors=True)
|
||||
if not os.environ.get("COMMA_CACHE", False):
|
||||
shutil.rmtree(Paths.download_cache_root(), ignore_errors=True)
|
||||
shutil.rmtree(Paths.comma_home(), ignore_errors=True)
|
||||
52
common/queue.h
Normal file
52
common/queue.h
Normal file
@@ -0,0 +1,52 @@
|
||||
#pragma once
|
||||
|
||||
#include <condition_variable>
|
||||
#include <mutex>
|
||||
#include <queue>
|
||||
|
||||
template <class T>
|
||||
class SafeQueue {
|
||||
public:
|
||||
SafeQueue() = default;
|
||||
|
||||
void push(const T& v) {
|
||||
{
|
||||
std::unique_lock lk(m);
|
||||
q.push(v);
|
||||
}
|
||||
cv.notify_one();
|
||||
}
|
||||
|
||||
T pop() {
|
||||
std::unique_lock lk(m);
|
||||
cv.wait(lk, [this] { return !q.empty(); });
|
||||
T v = q.front();
|
||||
q.pop();
|
||||
return v;
|
||||
}
|
||||
|
||||
bool try_pop(T& v, int timeout_ms = 0) {
|
||||
std::unique_lock lk(m);
|
||||
if (!cv.wait_for(lk, std::chrono::milliseconds(timeout_ms), [this] { return !q.empty(); })) {
|
||||
return false;
|
||||
}
|
||||
v = q.front();
|
||||
q.pop();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool empty() const {
|
||||
std::scoped_lock lk(m);
|
||||
return q.empty();
|
||||
}
|
||||
|
||||
size_t size() const {
|
||||
std::scoped_lock lk(m);
|
||||
return q.size();
|
||||
}
|
||||
|
||||
private:
|
||||
mutable std::mutex m;
|
||||
std::condition_variable cv;
|
||||
std::queue<T> q;
|
||||
};
|
||||
23
common/ratekeeper.h
Normal file
23
common/ratekeeper.h
Normal file
@@ -0,0 +1,23 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
#include <string>
|
||||
|
||||
class RateKeeper {
|
||||
public:
|
||||
RateKeeper(const std::string &name, float rate, float print_delay_threshold = 0);
|
||||
~RateKeeper() {}
|
||||
bool keepTime();
|
||||
bool monitorTime();
|
||||
inline uint64_t frame() const { return frame_; }
|
||||
inline double remaining() const { return remaining_; }
|
||||
|
||||
private:
|
||||
double interval;
|
||||
double next_frame_time;
|
||||
double last_monitor_time;
|
||||
double remaining_ = 0;
|
||||
float print_delay_threshold = 0;
|
||||
uint64_t frame_ = 0;
|
||||
std::string name;
|
||||
};
|
||||
96
common/realtime.py
Normal file
96
common/realtime.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Utilities for reading real time clocks and keeping soft real time constraints."""
|
||||
import gc
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from setproctitle import getproctitle
|
||||
|
||||
from openpilot.common.util import MovingAverage
|
||||
from openpilot.system.hardware import PC
|
||||
|
||||
|
||||
# time step for each process
|
||||
DT_CTRL = 0.01 # controlsd
|
||||
DT_MDL = 0.05 # model
|
||||
DT_HW = 0.5 # hardwared and manager
|
||||
DT_DMON = 0.05 # driver monitoring
|
||||
|
||||
|
||||
class Priority:
|
||||
# CORE 2
|
||||
# - modeld = 55
|
||||
# - camerad = 54
|
||||
CTRL_LOW = 51 # plannerd & radard
|
||||
|
||||
# CORE 3
|
||||
# - pandad = 55
|
||||
CTRL_HIGH = 53
|
||||
|
||||
|
||||
def set_core_affinity(cores: list[int]) -> None:
|
||||
if sys.platform == 'linux' and not PC:
|
||||
os.sched_setaffinity(0, cores)
|
||||
|
||||
|
||||
def config_realtime_process(cores: int | list[int], priority: int) -> None:
|
||||
gc.disable()
|
||||
if sys.platform == 'linux' and not PC:
|
||||
os.sched_setscheduler(0, os.SCHED_FIFO, os.sched_param(priority))
|
||||
c = cores if isinstance(cores, list) else [cores, ]
|
||||
set_core_affinity(c)
|
||||
|
||||
|
||||
class Ratekeeper:
|
||||
def __init__(self, rate: float, print_delay_threshold: float | None = 0.0) -> None:
|
||||
"""Rate in Hz for ratekeeping. print_delay_threshold must be nonnegative."""
|
||||
self._interval = 1. / rate
|
||||
self._print_delay_threshold = print_delay_threshold
|
||||
self._frame = 0
|
||||
self._remaining = 0.0
|
||||
self._process_name = getproctitle()
|
||||
self._last_monitor_time = -1.
|
||||
self._next_frame_time = -1.
|
||||
|
||||
self.avg_dt = MovingAverage(100)
|
||||
self.avg_dt.add_value(self._interval)
|
||||
|
||||
@property
|
||||
def frame(self) -> int:
|
||||
return self._frame
|
||||
|
||||
@property
|
||||
def remaining(self) -> float:
|
||||
return self._remaining
|
||||
|
||||
@property
|
||||
def lagging(self) -> bool:
|
||||
expected_dt = self._interval * (1 / 0.9)
|
||||
return self.avg_dt.get_average() > expected_dt
|
||||
|
||||
# Maintain loop rate by calling this at the end of each loop
|
||||
def keep_time(self) -> bool:
|
||||
lagged = self.monitor_time()
|
||||
if self._remaining > 0:
|
||||
time.sleep(self._remaining)
|
||||
return lagged
|
||||
|
||||
# Monitors the cumulative lag, but does not enforce a rate
|
||||
def monitor_time(self) -> bool:
|
||||
if self._last_monitor_time < 0:
|
||||
self._next_frame_time = time.monotonic() + self._interval
|
||||
self._last_monitor_time = time.monotonic()
|
||||
|
||||
prev = self._last_monitor_time
|
||||
self._last_monitor_time = time.monotonic()
|
||||
self.avg_dt.add_value(self._last_monitor_time - prev)
|
||||
|
||||
lagged = False
|
||||
remaining = self._next_frame_time - time.monotonic()
|
||||
self._next_frame_time += self._interval
|
||||
if self._print_delay_threshold is not None and remaining < -self._print_delay_threshold:
|
||||
print(f"{self._process_name} lagging by {-remaining * 1000:.2f} ms")
|
||||
lagged = True
|
||||
self._frame += 1
|
||||
self._remaining = remaining
|
||||
return lagged
|
||||
30
common/retry.py
Normal file
30
common/retry.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import time
|
||||
import functools
|
||||
|
||||
from openpilot.common.swaglog import cloudlog
|
||||
|
||||
|
||||
def retry(attempts=3, delay=1.0, ignore_failure=False):
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
for _ in range(attempts):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception:
|
||||
cloudlog.exception(f"{func.__name__} failed, trying again")
|
||||
time.sleep(delay)
|
||||
|
||||
if ignore_failure:
|
||||
cloudlog.error(f"{func.__name__} failed after retry")
|
||||
else:
|
||||
raise Exception(f"{func.__name__} failed after retry")
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@retry(attempts=10)
|
||||
def abc():
|
||||
raise ValueError("abc failed :(")
|
||||
abc()
|
||||
13
common/run.py
Normal file
13
common/run.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import subprocess
|
||||
|
||||
|
||||
def run_cmd(cmd: list[str], cwd=None, env=None) -> str:
|
||||
return subprocess.check_output(cmd, encoding='utf8', cwd=cwd, env=env).strip()
|
||||
|
||||
|
||||
def run_cmd_default(cmd: list[str], default: str = "", cwd=None, env=None) -> str:
|
||||
try:
|
||||
return run_cmd(cmd, cwd=cwd, env=env)
|
||||
except subprocess.CalledProcessError:
|
||||
return default
|
||||
|
||||
54
common/simple_kalman.py
Normal file
54
common/simple_kalman.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import numpy as np
|
||||
|
||||
|
||||
def get_kalman_gain(dt, A, C, Q, R, iterations=100):
|
||||
P = np.zeros_like(Q)
|
||||
for _ in range(iterations):
|
||||
P = A.dot(P).dot(A.T) + dt * Q
|
||||
S = C.dot(P).dot(C.T) + R
|
||||
K = P.dot(C.T).dot(np.linalg.inv(S))
|
||||
P = (np.eye(len(P)) - K.dot(C)).dot(P)
|
||||
return K
|
||||
|
||||
|
||||
class KF1D:
|
||||
# this EKF assumes constant covariance matrix, so calculations are much simpler
|
||||
# the Kalman gain also needs to be precomputed using the control module
|
||||
|
||||
def __init__(self, x0, A, C, K):
|
||||
self.x0_0 = x0[0][0]
|
||||
self.x1_0 = x0[1][0]
|
||||
self.A0_0 = A[0][0]
|
||||
self.A0_1 = A[0][1]
|
||||
self.A1_0 = A[1][0]
|
||||
self.A1_1 = A[1][1]
|
||||
self.C0_0 = C[0]
|
||||
self.C0_1 = C[1]
|
||||
self.K0_0 = K[0][0]
|
||||
self.K1_0 = K[1][0]
|
||||
|
||||
self.A_K_0 = self.A0_0 - self.K0_0 * self.C0_0
|
||||
self.A_K_1 = self.A0_1 - self.K0_0 * self.C0_1
|
||||
self.A_K_2 = self.A1_0 - self.K1_0 * self.C0_0
|
||||
self.A_K_3 = self.A1_1 - self.K1_0 * self.C0_1
|
||||
|
||||
# K matrix needs to be pre-computed as follow:
|
||||
# import control
|
||||
# (x, l, K) = control.dare(np.transpose(self.A), np.transpose(self.C), Q, R)
|
||||
# self.K = np.transpose(K)
|
||||
|
||||
def update(self, meas):
|
||||
#self.x = np.dot(self.A_K, self.x) + np.dot(self.K, meas)
|
||||
x0_0 = self.A_K_0 * self.x0_0 + self.A_K_1 * self.x1_0 + self.K0_0 * meas
|
||||
x1_0 = self.A_K_2 * self.x0_0 + self.A_K_3 * self.x1_0 + self.K1_0 * meas
|
||||
self.x0_0 = x0_0
|
||||
self.x1_0 = x1_0
|
||||
return [self.x0_0, self.x1_0]
|
||||
|
||||
@property
|
||||
def x(self):
|
||||
return [[self.x0_0], [self.x1_0]]
|
||||
|
||||
def set_x(self, x):
|
||||
self.x0_0 = x[0][0]
|
||||
self.x1_0 = x[1][0]
|
||||
52
common/spinner.py
Normal file
52
common/spinner.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import os
|
||||
import subprocess
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
|
||||
|
||||
class Spinner:
|
||||
def __init__(self):
|
||||
try:
|
||||
self.spinner_proc = subprocess.Popen(["./spinner"],
|
||||
stdin=subprocess.PIPE,
|
||||
cwd=os.path.join(BASEDIR, "selfdrive", "ui"),
|
||||
close_fds=True)
|
||||
except OSError:
|
||||
self.spinner_proc = None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def update(self, spinner_text: str):
|
||||
if self.spinner_proc is not None:
|
||||
self.spinner_proc.stdin.write(spinner_text.encode('utf8') + b"\n")
|
||||
try:
|
||||
self.spinner_proc.stdin.flush()
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
|
||||
def update_progress(self, cur: float, total: float):
|
||||
self.update(str(round(100 * cur / total)))
|
||||
|
||||
def close(self):
|
||||
if self.spinner_proc is not None:
|
||||
self.spinner_proc.kill()
|
||||
try:
|
||||
self.spinner_proc.communicate(timeout=2.)
|
||||
except subprocess.TimeoutExpired:
|
||||
print("WARNING: failed to kill spinner")
|
||||
self.spinner_proc = None
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import time
|
||||
with Spinner() as s:
|
||||
s.update("Spinner text")
|
||||
time.sleep(5.0)
|
||||
print("gone")
|
||||
time.sleep(5.0)
|
||||
73
common/stat_live.py
Normal file
73
common/stat_live.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import numpy as np
|
||||
|
||||
class RunningStat:
|
||||
# tracks realtime mean and standard deviation without storing any data
|
||||
def __init__(self, priors=None, max_trackable=-1):
|
||||
self.max_trackable = max_trackable
|
||||
if priors is not None:
|
||||
# initialize from history
|
||||
self.M = priors[0]
|
||||
self.S = priors[1]
|
||||
self.n = priors[2]
|
||||
self.M_last = self.M
|
||||
self.S_last = self.S
|
||||
|
||||
else:
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
self.M = 0.
|
||||
self.S = 0.
|
||||
self.M_last = 0.
|
||||
self.S_last = 0.
|
||||
self.n = 0
|
||||
|
||||
def push_data(self, new_data):
|
||||
# short term memory hack
|
||||
if self.max_trackable < 0 or self.n < self.max_trackable:
|
||||
self.n += 1
|
||||
if self.n == 0:
|
||||
self.M_last = new_data
|
||||
self.M = self.M_last
|
||||
self.S_last = 0.
|
||||
else:
|
||||
self.M = self.M_last + (new_data - self.M_last) / self.n
|
||||
self.S = self.S_last + (new_data - self.M_last) * (new_data - self.M)
|
||||
self.M_last = self.M
|
||||
self.S_last = self.S
|
||||
|
||||
def mean(self):
|
||||
return self.M
|
||||
|
||||
def variance(self):
|
||||
if self.n >= 2:
|
||||
return self.S / (self.n - 1.)
|
||||
else:
|
||||
return 0
|
||||
|
||||
def std(self):
|
||||
return np.sqrt(self.variance())
|
||||
|
||||
def params_to_save(self):
|
||||
return [self.M, self.S, self.n]
|
||||
|
||||
class RunningStatFilter:
|
||||
def __init__(self, raw_priors=None, filtered_priors=None, max_trackable=-1):
|
||||
self.raw_stat = RunningStat(raw_priors, -1)
|
||||
self.filtered_stat = RunningStat(filtered_priors, max_trackable)
|
||||
|
||||
def reset(self):
|
||||
self.raw_stat.reset()
|
||||
self.filtered_stat.reset()
|
||||
|
||||
def push_and_update(self, new_data):
|
||||
_std_last = self.raw_stat.std()
|
||||
self.raw_stat.push_data(new_data)
|
||||
_delta_std = self.raw_stat.std() - _std_last
|
||||
if _delta_std <= 0:
|
||||
self.filtered_stat.push_data(new_data)
|
||||
else:
|
||||
pass
|
||||
# self.filtered_stat.push_data(self.filtered_stat.mean())
|
||||
|
||||
# class SequentialBayesian():
|
||||
76
common/swaglog.h
Normal file
76
common/swaglog.h
Normal file
@@ -0,0 +1,76 @@
|
||||
#pragma once
|
||||
|
||||
#include "common/timing.h"
|
||||
|
||||
#define CLOUDLOG_DEBUG 10
|
||||
#define CLOUDLOG_INFO 20
|
||||
#define CLOUDLOG_WARNING 30
|
||||
#define CLOUDLOG_ERROR 40
|
||||
#define CLOUDLOG_CRITICAL 50
|
||||
|
||||
|
||||
#ifdef __GNUC__
|
||||
#define SWAG_LOG_CHECK_FMT(a, b) __attribute__ ((format (printf, a, b)))
|
||||
#else
|
||||
#define SWAG_LOG_CHECK_FMT(a, b)
|
||||
#endif
|
||||
|
||||
void cloudlog_e(int levelnum, const char* filename, int lineno, const char* func,
|
||||
const char* fmt, ...) SWAG_LOG_CHECK_FMT(5, 6);
|
||||
|
||||
void cloudlog_te(int levelnum, const char* filename, int lineno, const char* func,
|
||||
const char* fmt, ...) SWAG_LOG_CHECK_FMT(5, 6);
|
||||
|
||||
void cloudlog_te(int levelnum, const char* filename, int lineno, const char* func,
|
||||
uint32_t frame_id, const char* fmt, ...) SWAG_LOG_CHECK_FMT(6, 7);
|
||||
|
||||
|
||||
#define cloudlog(lvl, fmt, ...) cloudlog_e(lvl, __FILE__, __LINE__, \
|
||||
__func__, \
|
||||
fmt, ## __VA_ARGS__)
|
||||
|
||||
#define cloudlog_t(lvl, ...) cloudlog_te(lvl, __FILE__, __LINE__, \
|
||||
__func__, \
|
||||
__VA_ARGS__)
|
||||
|
||||
|
||||
#define cloudlog_rl(burst, millis, lvl, fmt, ...) \
|
||||
{ \
|
||||
static uint64_t __begin = 0; \
|
||||
static int __printed = 0; \
|
||||
static int __missed = 0; \
|
||||
\
|
||||
int __burst = (burst); \
|
||||
int __millis = (millis); \
|
||||
uint64_t __ts = nanos_since_boot(); \
|
||||
\
|
||||
if (!__begin) { __begin = __ts; } \
|
||||
\
|
||||
if (__begin + __millis*1000000ULL < __ts) { \
|
||||
if (__missed) { \
|
||||
cloudlog(CLOUDLOG_WARNING, "cloudlog: %d messages suppressed", __missed); \
|
||||
} \
|
||||
__begin = 0; \
|
||||
__printed = 0; \
|
||||
__missed = 0; \
|
||||
} \
|
||||
\
|
||||
if (__printed < __burst) { \
|
||||
cloudlog(lvl, fmt, ## __VA_ARGS__); \
|
||||
__printed++; \
|
||||
} else { \
|
||||
__missed++; \
|
||||
} \
|
||||
}
|
||||
|
||||
|
||||
#define LOGT(...) cloudlog_t(CLOUDLOG_DEBUG, __VA_ARGS__)
|
||||
#define LOGD(fmt, ...) cloudlog(CLOUDLOG_DEBUG, fmt, ## __VA_ARGS__)
|
||||
#define LOG(fmt, ...) cloudlog(CLOUDLOG_INFO, fmt, ## __VA_ARGS__)
|
||||
#define LOGW(fmt, ...) cloudlog(CLOUDLOG_WARNING, fmt, ## __VA_ARGS__)
|
||||
#define LOGE(fmt, ...) cloudlog(CLOUDLOG_ERROR, fmt, ## __VA_ARGS__)
|
||||
|
||||
#define LOGD_100(fmt, ...) cloudlog_rl(2, 100, CLOUDLOG_DEBUG, fmt, ## __VA_ARGS__)
|
||||
#define LOG_100(fmt, ...) cloudlog_rl(2, 100, CLOUDLOG_INFO, fmt, ## __VA_ARGS__)
|
||||
#define LOGW_100(fmt, ...) cloudlog_rl(2, 100, CLOUDLOG_WARNING, fmt, ## __VA_ARGS__)
|
||||
#define LOGE_100(fmt, ...) cloudlog_rl(2, 100, CLOUDLOG_ERROR, fmt, ## __VA_ARGS__)
|
||||
144
common/swaglog.py
Normal file
144
common/swaglog.py
Normal file
@@ -0,0 +1,144 @@
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
from logging.handlers import BaseRotatingHandler
|
||||
|
||||
import zmq
|
||||
|
||||
from openpilot.common.logging_extra import SwagLogger, SwagFormatter, SwagLogFileFormatter
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
|
||||
|
||||
def get_file_handler():
|
||||
Path(Paths.swaglog_root()).mkdir(parents=True, exist_ok=True)
|
||||
base_filename = os.path.join(Paths.swaglog_root(), "swaglog")
|
||||
handler = SwaglogRotatingFileHandler(base_filename)
|
||||
return handler
|
||||
|
||||
class SwaglogRotatingFileHandler(BaseRotatingHandler):
|
||||
def __init__(self, base_filename, interval=60, max_bytes=1024*256, backup_count=2500, encoding=None):
|
||||
super().__init__(base_filename, mode="a", encoding=encoding, delay=True)
|
||||
self.base_filename = base_filename
|
||||
self.interval = interval # seconds
|
||||
self.max_bytes = max_bytes
|
||||
self.backup_count = backup_count
|
||||
self.log_files = self.get_existing_logfiles()
|
||||
log_indexes = [f.split(".")[-1] for f in self.log_files]
|
||||
self.last_file_idx = max([int(i) for i in log_indexes if i.isdigit()] or [-1])
|
||||
self.last_rollover = None
|
||||
self.doRollover()
|
||||
|
||||
def _open(self):
|
||||
self.last_rollover = time.monotonic()
|
||||
self.last_file_idx += 1
|
||||
next_filename = f"{self.base_filename}.{self.last_file_idx:010}"
|
||||
stream = open(next_filename, self.mode, encoding=self.encoding)
|
||||
self.log_files.insert(0, next_filename)
|
||||
return stream
|
||||
|
||||
def get_existing_logfiles(self):
|
||||
log_files = list()
|
||||
base_dir = os.path.dirname(self.base_filename)
|
||||
for fn in os.listdir(base_dir):
|
||||
fp = os.path.join(base_dir, fn)
|
||||
if fp.startswith(self.base_filename) and os.path.isfile(fp):
|
||||
log_files.append(fp)
|
||||
return sorted(log_files)
|
||||
|
||||
def shouldRollover(self, record):
|
||||
size_exceeded = self.max_bytes > 0 and self.stream.tell() >= self.max_bytes
|
||||
time_exceeded = self.interval > 0 and self.last_rollover + self.interval <= time.monotonic()
|
||||
return size_exceeded or time_exceeded
|
||||
|
||||
def doRollover(self):
|
||||
if self.stream:
|
||||
self.stream.close()
|
||||
self.stream = self._open()
|
||||
|
||||
if self.backup_count > 0:
|
||||
while len(self.log_files) > self.backup_count:
|
||||
to_delete = self.log_files.pop()
|
||||
if os.path.exists(to_delete): # just being safe, should always exist
|
||||
os.remove(to_delete)
|
||||
|
||||
class UnixDomainSocketHandler(logging.Handler):
|
||||
def __init__(self, formatter):
|
||||
logging.Handler.__init__(self)
|
||||
self.setFormatter(formatter)
|
||||
self.pid = None
|
||||
|
||||
self.zctx = None
|
||||
self.sock = None
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
if self.sock is not None:
|
||||
self.sock.close()
|
||||
if self.zctx is not None:
|
||||
self.zctx.term()
|
||||
|
||||
def connect(self):
|
||||
self.zctx = zmq.Context()
|
||||
self.sock = self.zctx.socket(zmq.PUSH)
|
||||
self.sock.setsockopt(zmq.LINGER, 10)
|
||||
self.sock.connect(Paths.swaglog_ipc())
|
||||
self.pid = os.getpid()
|
||||
|
||||
def emit(self, record):
|
||||
if os.getpid() != self.pid:
|
||||
# TODO suppresses warning about forking proc with zmq socket, fix root cause
|
||||
warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*<zmq.*>")
|
||||
self.connect()
|
||||
|
||||
msg = self.format(record).rstrip('\n')
|
||||
# print("SEND".format(repr(msg)))
|
||||
try:
|
||||
s = chr(record.levelno)+msg
|
||||
self.sock.send(s.encode('utf8'), zmq.NOBLOCK)
|
||||
except zmq.error.Again:
|
||||
# drop :/
|
||||
pass
|
||||
|
||||
|
||||
class ForwardingHandler(logging.Handler):
|
||||
def __init__(self, target_logger):
|
||||
super().__init__()
|
||||
self.target_logger = target_logger
|
||||
|
||||
def emit(self, record):
|
||||
self.target_logger.handle(record)
|
||||
|
||||
|
||||
def add_file_handler(log):
|
||||
"""
|
||||
Function to add the file log handler to swaglog.
|
||||
This can be used to store logs when logmessaged is not running.
|
||||
"""
|
||||
handler = get_file_handler()
|
||||
handler.setFormatter(SwagLogFileFormatter(log))
|
||||
log.addHandler(handler)
|
||||
|
||||
|
||||
cloudlog = log = SwagLogger()
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
outhandler = logging.StreamHandler()
|
||||
|
||||
print_level = os.environ.get('LOGPRINT', 'warning')
|
||||
if print_level == 'debug':
|
||||
outhandler.setLevel(logging.DEBUG)
|
||||
elif print_level == 'info':
|
||||
outhandler.setLevel(logging.INFO)
|
||||
elif print_level == 'warning':
|
||||
outhandler.setLevel(logging.WARNING)
|
||||
|
||||
ipchandler = UnixDomainSocketHandler(SwagFormatter(log))
|
||||
|
||||
log.addHandler(outhandler)
|
||||
# logs are sent through IPC before writing to disk to prevent disk I/O blocking
|
||||
log.addHandler(ipchandler)
|
||||
0
common/tests/__init__.py
Normal file
0
common/tests/__init__.py
Normal file
19
common/tests/test_file_helpers.py
Normal file
19
common/tests/test_file_helpers.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import os
|
||||
from uuid import uuid4
|
||||
|
||||
from openpilot.common.file_helpers import atomic_write_in_dir
|
||||
|
||||
|
||||
class TestFileHelpers:
|
||||
def run_atomic_write_func(self, atomic_write_func):
|
||||
path = f"/tmp/tmp{uuid4()}"
|
||||
with atomic_write_func(path) as f:
|
||||
f.write("test")
|
||||
assert not os.path.exists(path)
|
||||
|
||||
with open(path) as f:
|
||||
assert f.read() == "test"
|
||||
os.remove(path)
|
||||
|
||||
def test_atomic_write_in_dir(self):
|
||||
self.run_atomic_write_func(atomic_write_in_dir)
|
||||
15
common/tests/test_markdown.py
Normal file
15
common/tests/test_markdown.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import os
|
||||
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
from openpilot.common.markdown import parse_markdown
|
||||
|
||||
|
||||
class TestMarkdown:
|
||||
def test_all_release_notes(self):
|
||||
with open(os.path.join(BASEDIR, "RELEASES.md")) as f:
|
||||
release_notes = f.read().split("\n\n")
|
||||
assert len(release_notes) > 10
|
||||
|
||||
for rn in release_notes:
|
||||
md = parse_markdown(rn)
|
||||
assert len(md) > 0
|
||||
109
common/tests/test_params.py
Normal file
109
common/tests/test_params.py
Normal file
@@ -0,0 +1,109 @@
|
||||
import pytest
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from openpilot.common.params import Params, ParamKeyType, UnknownKeyName
|
||||
|
||||
class TestParams:
|
||||
def setup_method(self):
|
||||
self.params = Params()
|
||||
|
||||
def test_params_put_and_get(self):
|
||||
self.params.put("DongleId", "cb38263377b873ee")
|
||||
assert self.params.get("DongleId") == b"cb38263377b873ee"
|
||||
|
||||
def test_params_non_ascii(self):
|
||||
st = b"\xe1\x90\xff"
|
||||
self.params.put("CarParams", st)
|
||||
assert self.params.get("CarParams") == st
|
||||
|
||||
def test_params_get_cleared_manager_start(self):
|
||||
self.params.put("CarParams", "test")
|
||||
self.params.put("DongleId", "cb38263377b873ee")
|
||||
assert self.params.get("CarParams") == b"test"
|
||||
|
||||
undefined_param = self.params.get_param_path(uuid.uuid4().hex)
|
||||
with open(undefined_param, "w") as f:
|
||||
f.write("test")
|
||||
assert os.path.isfile(undefined_param)
|
||||
|
||||
self.params.clear_all(ParamKeyType.CLEAR_ON_MANAGER_START)
|
||||
assert self.params.get("CarParams") is None
|
||||
assert self.params.get("DongleId") is not None
|
||||
assert not os.path.isfile(undefined_param)
|
||||
|
||||
def test_params_two_things(self):
|
||||
self.params.put("DongleId", "bob")
|
||||
self.params.put("AthenadPid", "123")
|
||||
assert self.params.get("DongleId") == b"bob"
|
||||
assert self.params.get("AthenadPid") == b"123"
|
||||
|
||||
def test_params_get_block(self):
|
||||
def _delayed_writer():
|
||||
time.sleep(0.1)
|
||||
self.params.put("CarParams", "test")
|
||||
threading.Thread(target=_delayed_writer).start()
|
||||
assert self.params.get("CarParams") is None
|
||||
assert self.params.get("CarParams", True) == b"test"
|
||||
|
||||
def test_params_unknown_key_fails(self):
|
||||
with pytest.raises(UnknownKeyName):
|
||||
self.params.get("swag")
|
||||
|
||||
with pytest.raises(UnknownKeyName):
|
||||
self.params.get_bool("swag")
|
||||
|
||||
with pytest.raises(UnknownKeyName):
|
||||
self.params.put("swag", "abc")
|
||||
|
||||
with pytest.raises(UnknownKeyName):
|
||||
self.params.put_bool("swag", True)
|
||||
|
||||
def test_remove_not_there(self):
|
||||
assert self.params.get("CarParams") is None
|
||||
self.params.remove("CarParams")
|
||||
assert self.params.get("CarParams") is None
|
||||
|
||||
def test_get_bool(self):
|
||||
self.params.remove("IsMetric")
|
||||
assert not self.params.get_bool("IsMetric")
|
||||
|
||||
self.params.put_bool("IsMetric", True)
|
||||
assert self.params.get_bool("IsMetric")
|
||||
|
||||
self.params.put_bool("IsMetric", False)
|
||||
assert not self.params.get_bool("IsMetric")
|
||||
|
||||
self.params.put("IsMetric", "1")
|
||||
assert self.params.get_bool("IsMetric")
|
||||
|
||||
self.params.put("IsMetric", "0")
|
||||
assert not self.params.get_bool("IsMetric")
|
||||
|
||||
def test_put_non_blocking_with_get_block(self):
|
||||
q = Params()
|
||||
def _delayed_writer():
|
||||
time.sleep(0.1)
|
||||
Params().put_nonblocking("CarParams", "test")
|
||||
threading.Thread(target=_delayed_writer).start()
|
||||
assert q.get("CarParams") is None
|
||||
assert q.get("CarParams", True) == b"test"
|
||||
|
||||
def test_put_bool_non_blocking_with_get_block(self):
|
||||
q = Params()
|
||||
def _delayed_writer():
|
||||
time.sleep(0.1)
|
||||
Params().put_bool_nonblocking("CarParams", True)
|
||||
threading.Thread(target=_delayed_writer).start()
|
||||
assert q.get("CarParams") is None
|
||||
assert q.get("CarParams", True) == b"1"
|
||||
|
||||
def test_params_all_keys(self):
|
||||
keys = Params().all_keys()
|
||||
|
||||
# sanity checks
|
||||
assert len(keys) > 20
|
||||
assert len(keys) == len(set(keys))
|
||||
assert b"CarParams" in keys
|
||||
29
common/tests/test_simple_kalman.py
Normal file
29
common/tests/test_simple_kalman.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from openpilot.common.simple_kalman import KF1D
|
||||
|
||||
|
||||
class TestSimpleKalman:
|
||||
def setup_method(self):
|
||||
dt = 0.01
|
||||
x0_0 = 0.0
|
||||
x1_0 = 0.0
|
||||
A0_0 = 1.0
|
||||
A0_1 = dt
|
||||
A1_0 = 0.0
|
||||
A1_1 = 1.0
|
||||
C0_0 = 1.0
|
||||
C0_1 = 0.0
|
||||
K0_0 = 0.12287673
|
||||
K1_0 = 0.29666309
|
||||
|
||||
self.kf = KF1D(x0=[[x0_0], [x1_0]],
|
||||
A=[[A0_0, A0_1], [A1_0, A1_1]],
|
||||
C=[C0_0, C0_1],
|
||||
K=[[K0_0], [K1_0]])
|
||||
|
||||
def test_getter_setter(self):
|
||||
self.kf.set_x([[1.0], [1.0]])
|
||||
assert self.kf.x == [[1.0], [1.0]]
|
||||
|
||||
def update_returns_state(self):
|
||||
x = self.kf.update(100)
|
||||
assert x == self.kf.x
|
||||
63
common/text_window.py
Executable file
63
common/text_window.py
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import time
|
||||
import subprocess
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
|
||||
|
||||
class TextWindow:
|
||||
def __init__(self, text):
|
||||
try:
|
||||
self.text_proc = subprocess.Popen(["./text", text],
|
||||
stdin=subprocess.PIPE,
|
||||
cwd=os.path.join(BASEDIR, "selfdrive", "ui"),
|
||||
close_fds=True)
|
||||
except OSError:
|
||||
self.text_proc = None
|
||||
|
||||
def get_status(self):
|
||||
if self.text_proc is not None:
|
||||
self.text_proc.poll()
|
||||
return self.text_proc.returncode
|
||||
return None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def close(self):
|
||||
if self.text_proc is not None:
|
||||
self.text_proc.terminate()
|
||||
self.text_proc = None
|
||||
|
||||
def wait_for_exit(self):
|
||||
if self.text_proc is not None:
|
||||
while True:
|
||||
if self.get_status() == 1:
|
||||
return
|
||||
time.sleep(0.1)
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
text = """Traceback (most recent call last):
|
||||
File "./controlsd.py", line 608, in <module>
|
||||
main()
|
||||
File "./controlsd.py", line 604, in main
|
||||
controlsd_thread(sm, pm, logcan)
|
||||
File "./controlsd.py", line 455, in controlsd_thread
|
||||
1/0
|
||||
ZeroDivisionError: division by zero"""
|
||||
print(text)
|
||||
|
||||
with TextWindow(text) as s:
|
||||
for _ in range(100):
|
||||
if s.get_status() == 1:
|
||||
print("Got exit button")
|
||||
break
|
||||
time.sleep(0.1)
|
||||
print("gone")
|
||||
15
common/time_helpers.py
Normal file
15
common/time_helpers.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
|
||||
MIN_DATE = datetime.datetime(year=2025, month=2, day=21)
|
||||
|
||||
def min_date():
|
||||
# on systemd systems, the default time is the systemd build time
|
||||
systemd_path = Path("/lib/systemd/systemd")
|
||||
if systemd_path.exists():
|
||||
d = datetime.datetime.fromtimestamp(systemd_path.stat().st_mtime)
|
||||
return max(MIN_DATE, d + datetime.timedelta(days=1))
|
||||
return MIN_DATE
|
||||
|
||||
def system_time_valid():
|
||||
return datetime.datetime.now() > min_date()
|
||||
27
common/timeout.py
Normal file
27
common/timeout.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import signal
|
||||
|
||||
class TimeoutException(Exception):
|
||||
pass
|
||||
|
||||
class Timeout:
|
||||
"""
|
||||
Timeout context manager.
|
||||
For example this code will raise a TimeoutException:
|
||||
with Timeout(seconds=5, error_msg="Sleep was too long"):
|
||||
time.sleep(10)
|
||||
"""
|
||||
def __init__(self, seconds, error_msg=None):
|
||||
if error_msg is None:
|
||||
error_msg = f'Timed out after {seconds} seconds'
|
||||
self.seconds = seconds
|
||||
self.error_msg = error_msg
|
||||
|
||||
def handle_timeout(self, signume, frame):
|
||||
raise TimeoutException(self.error_msg)
|
||||
|
||||
def __enter__(self):
|
||||
signal.signal(signal.SIGALRM, self.handle_timeout)
|
||||
signal.alarm(self.seconds)
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
signal.alarm(0)
|
||||
51
common/timing.h
Normal file
51
common/timing.h
Normal file
@@ -0,0 +1,51 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
#include <ctime>
|
||||
|
||||
#ifdef __APPLE__
|
||||
#define CLOCK_BOOTTIME CLOCK_MONOTONIC
|
||||
#endif
|
||||
|
||||
static inline uint64_t nanos_since_boot() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_BOOTTIME, &t);
|
||||
return t.tv_sec * 1000000000ULL + t.tv_nsec;
|
||||
}
|
||||
|
||||
static inline double millis_since_boot() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_BOOTTIME, &t);
|
||||
return t.tv_sec * 1000.0 + t.tv_nsec * 1e-6;
|
||||
}
|
||||
|
||||
static inline double seconds_since_boot() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_BOOTTIME, &t);
|
||||
return (double)t.tv_sec + t.tv_nsec * 1e-9;
|
||||
}
|
||||
|
||||
static inline uint64_t nanos_since_epoch() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_REALTIME, &t);
|
||||
return t.tv_sec * 1000000000ULL + t.tv_nsec;
|
||||
}
|
||||
|
||||
static inline double seconds_since_epoch() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_REALTIME, &t);
|
||||
return (double)t.tv_sec + t.tv_nsec * 1e-9;
|
||||
}
|
||||
|
||||
// you probably should use nanos_since_boot instead
|
||||
static inline uint64_t nanos_monotonic() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_MONOTONIC, &t);
|
||||
return t.tv_sec * 1000000000ULL + t.tv_nsec;
|
||||
}
|
||||
|
||||
static inline uint64_t nanos_monotonic_raw() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_MONOTONIC_RAW, &t);
|
||||
return t.tv_sec * 1000000000ULL + t.tv_nsec;
|
||||
}
|
||||
70
common/transformations/README.md
Normal file
70
common/transformations/README.md
Normal file
@@ -0,0 +1,70 @@
|
||||
|
||||
Reference Frames
|
||||
------
|
||||
Many reference frames are used throughout. This
|
||||
folder contains all helper functions needed to
|
||||
transform between them. Generally this is done
|
||||
by generating a rotation matrix and multiplying.
|
||||
|
||||
|
||||
| Name | [x, y, z] | Units | Notes |
|
||||
| :-------------: |:-------------:| :-----:| :----: |
|
||||
| Geodetic | [Latitude, Longitude, Altitude] | geodetic coordinates | Sometimes used as [lon, lat, alt], avoid this frame. |
|
||||
| ECEF | [x, y, z] | meters | We use **ITRF14 (IGS14)**, NOT NAD83. <br> This is the global Mesh3D frame. |
|
||||
| NED | [North, East, Down] | meters | Relative to earth's surface, useful for visualizing. |
|
||||
| Device | [Forward, Right, Down] | meters | This is the Mesh3D local frame. <br> Relative to camera, **not imu.** <br> |
|
||||
| Calibrated | [Forward, Right, Down] | meters | This is the frame the model outputs are in. <br> More details below. <br>|
|
||||
| Car | [Forward, Right, Down] | meters | This is useful for estimating position of points on the road. <br> More details below. <br>|
|
||||
| View | [Right, Down, Forward] | meters | Like device frame, but according to camera conventions. |
|
||||
| Camera | [u, v, focal] | pixels | Like view frame, but 2d on the camera image.|
|
||||
| Normalized Camera | [u / focal, v / focal, 1] | / | |
|
||||
| Model | [u, v, focal] | pixels | The sampled rectangle of the full camera frame the model uses. |
|
||||
| Normalized Model | [u / focal, v / focal, 1] | / | |
|
||||
|
||||
|
||||
|
||||
|
||||
Orientation Conventions
|
||||
------
|
||||
Quaternions, rotation matrices and euler angles are three
|
||||
equivalent representations of orientation and all three are
|
||||
used throughout the code base.
|
||||
|
||||
For euler angles the preferred convention is [roll, pitch, yaw]
|
||||
which corresponds to rotations around the [x, y, z] axes. All
|
||||
euler angles should always be in radians or radians/s unless
|
||||
for plotting or display purposes. For quaternions the hamilton
|
||||
notations is preferred which is [q<sub>w</sub>, q<sub>x</sub>, q<sub>y</sub>, q<sub>z</sub>]. All quaternions
|
||||
should always be normalized with a strictly positive q<sub>w</sub>. **These
|
||||
quaternions are a unique representation of orientation whereas euler angles
|
||||
or rotation matrices are not.**
|
||||
|
||||
To rotate from one frame into another with euler angles the
|
||||
convention is to rotate around roll, then pitch and then yaw,
|
||||
while rotating around the rotated axes, not the original axes.
|
||||
|
||||
|
||||
Car frame
|
||||
------
|
||||
Device frame is aligned with the road-facing camera used by openpilot. However, when controlling the vehicle it is helpful to think in a reference frame aligned with the vehicle. These two reference frames can be different.
|
||||
|
||||
The orientation of car frame is defined to be aligned with the car's direction of travel and the road plane when the vehicle is driving on a flat road and not turning. The origin of car frame is defined to be directly below device frame (in car frame), such that it is on the road plane. The position and orientation of this frame is not necessarily always aligned with the direction of travel or the road plane due to suspension movements and other effects.
|
||||
|
||||
|
||||
Calibrated frame
|
||||
------
|
||||
It is helpful for openpilot's driving model to take in images that look similar when mounted differently in different cars. To achieve this we "calibrate" the images by transforming it into calibrated frame. Calibrated frame is defined to be aligned with car frame in pitch and yaw, and aligned with device frame in roll. It also has the same origin as device frame.
|
||||
|
||||
|
||||
Example
|
||||
------
|
||||
To transform global Mesh3D positions and orientations (positions_ecef, quats_ecef) into the local frame described by the
|
||||
first position and orientation from Mesh3D one would do:
|
||||
```
|
||||
ecef_from_local = rot_from_quat(quats_ecef[0])
|
||||
local_from_ecef = ecef_from_local.T
|
||||
positions_local = np.einsum('ij,kj->ki', local_from_ecef, postions_ecef - positions_ecef[0])
|
||||
rotations_global = rot_from_quat(quats_ecef)
|
||||
rotations_local = np.einsum('ij,kjl->kil', local_from_ecef, rotations_global)
|
||||
eulers_local = euler_from_rot(rotations_local)
|
||||
```
|
||||
0
common/transformations/__init__.py
Normal file
0
common/transformations/__init__.py
Normal file
179
common/transformations/camera.py
Normal file
179
common/transformations/camera.py
Normal file
@@ -0,0 +1,179 @@
|
||||
import itertools
|
||||
import numpy as np
|
||||
from dataclasses import dataclass
|
||||
|
||||
import openpilot.common.transformations.orientation as orient
|
||||
|
||||
## -- hardcoded hardware params --
|
||||
@dataclass(frozen=True)
|
||||
class CameraConfig:
|
||||
width: int
|
||||
height: int
|
||||
focal_length: float
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
return (self.width, self.height)
|
||||
|
||||
@property
|
||||
def intrinsics(self):
|
||||
# aka 'K' aka camera_frame_from_view_frame
|
||||
return np.array([
|
||||
[self.focal_length, 0.0, float(self.width)/2],
|
||||
[0.0, self.focal_length, float(self.height)/2],
|
||||
[0.0, 0.0, 1.0]
|
||||
])
|
||||
|
||||
@property
|
||||
def intrinsics_inv(self):
|
||||
# aka 'K_inv' aka view_frame_from_camera_frame
|
||||
return np.linalg.inv(self.intrinsics)
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _NoneCameraConfig(CameraConfig):
|
||||
width: int = 0
|
||||
height: int = 0
|
||||
focal_length: float = 0
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DeviceCameraConfig:
|
||||
fcam: CameraConfig
|
||||
dcam: CameraConfig
|
||||
ecam: CameraConfig
|
||||
|
||||
def all_cams(self):
|
||||
for cam in ['fcam', 'dcam', 'ecam']:
|
||||
if not isinstance(getattr(self, cam), _NoneCameraConfig):
|
||||
yield cam, getattr(self, cam)
|
||||
|
||||
_ar_ox_fisheye = CameraConfig(1928, 1208, 567.0) # focal length probably wrong? magnification is not consistent across frame
|
||||
_os_fisheye = CameraConfig(2688 // 2, 1520 // 2, 567.0 / 4 * 3)
|
||||
_ar_ox_config = DeviceCameraConfig(CameraConfig(1928, 1208, 2648.0), _ar_ox_fisheye, _ar_ox_fisheye)
|
||||
_os_config = DeviceCameraConfig(CameraConfig(2688 // 2, 1520 // 2, 1522.0 * 3 / 4), _os_fisheye, _os_fisheye)
|
||||
_neo_config = DeviceCameraConfig(CameraConfig(1164, 874, 910.0), CameraConfig(816, 612, 650.0), _NoneCameraConfig())
|
||||
|
||||
DEVICE_CAMERAS = {
|
||||
# A "device camera" is defined by a device type and sensor
|
||||
|
||||
# sensor type was never set on eon/neo/two
|
||||
("neo", "unknown"): _neo_config,
|
||||
# unknown here is AR0231, field was added with OX03C10 support
|
||||
("tici", "unknown"): _ar_ox_config,
|
||||
|
||||
# before deviceState.deviceType was set, assume tici AR config
|
||||
("unknown", "ar0231"): _ar_ox_config,
|
||||
("unknown", "ox03c10"): _ar_ox_config,
|
||||
|
||||
# simulator (emulates a tici)
|
||||
("pc", "unknown"): _ar_ox_config,
|
||||
}
|
||||
prods = itertools.product(('tici', 'tizi', 'mici'), (('ar0231', _ar_ox_config), ('ox03c10', _ar_ox_config), ('os04c10', _os_config)))
|
||||
DEVICE_CAMERAS.update({(d, c[0]): c[1] for d, c in prods})
|
||||
|
||||
# device/mesh : x->forward, y-> right, z->down
|
||||
# view : x->right, y->down, z->forward
|
||||
device_frame_from_view_frame = np.array([
|
||||
[ 0., 0., 1.],
|
||||
[ 1., 0., 0.],
|
||||
[ 0., 1., 0.]
|
||||
])
|
||||
view_frame_from_device_frame = device_frame_from_view_frame.T
|
||||
|
||||
|
||||
# aka 'extrinsic_matrix'
|
||||
# road : x->forward, y -> left, z->up
|
||||
def get_view_frame_from_road_frame(roll, pitch, yaw, height):
|
||||
device_from_road = orient.rot_from_euler([roll, pitch, yaw]).dot(np.diag([1, -1, -1]))
|
||||
view_from_road = view_frame_from_device_frame.dot(device_from_road)
|
||||
return np.hstack((view_from_road, [[0], [height], [0]]))
|
||||
|
||||
|
||||
|
||||
# aka 'extrinsic_matrix'
|
||||
def get_view_frame_from_calib_frame(roll, pitch, yaw, height):
|
||||
device_from_calib= orient.rot_from_euler([roll, pitch, yaw])
|
||||
view_from_calib = view_frame_from_device_frame.dot(device_from_calib)
|
||||
return np.hstack((view_from_calib, [[0], [height], [0]]))
|
||||
|
||||
|
||||
def vp_from_ke(m):
|
||||
"""
|
||||
Computes the vanishing point from the product of the intrinsic and extrinsic
|
||||
matrices C = KE.
|
||||
|
||||
The vanishing point is defined as lim x->infinity C (x, 0, 0, 1).T
|
||||
"""
|
||||
return (m[0, 0]/m[2, 0], m[1, 0]/m[2, 0])
|
||||
|
||||
|
||||
def roll_from_ke(m):
|
||||
# note: different from calibration.h/RollAnglefromKE: i think that one's just wrong
|
||||
return np.arctan2(-(m[1, 0] - m[1, 1] * m[2, 0] / m[2, 1]),
|
||||
-(m[0, 0] - m[0, 1] * m[2, 0] / m[2, 1]))
|
||||
|
||||
|
||||
def normalize(img_pts, intrinsics):
|
||||
# normalizes image coordinates
|
||||
# accepts single pt or array of pts
|
||||
intrinsics_inv = np.linalg.inv(intrinsics)
|
||||
img_pts = np.array(img_pts)
|
||||
input_shape = img_pts.shape
|
||||
img_pts = np.atleast_2d(img_pts)
|
||||
img_pts = np.hstack((img_pts, np.ones((img_pts.shape[0], 1))))
|
||||
img_pts_normalized = img_pts.dot(intrinsics_inv.T)
|
||||
img_pts_normalized[(img_pts < 0).any(axis=1)] = np.nan
|
||||
return img_pts_normalized[:, :2].reshape(input_shape)
|
||||
|
||||
|
||||
def denormalize(img_pts, intrinsics, width=np.inf, height=np.inf):
|
||||
# denormalizes image coordinates
|
||||
# accepts single pt or array of pts
|
||||
img_pts = np.array(img_pts)
|
||||
input_shape = img_pts.shape
|
||||
img_pts = np.atleast_2d(img_pts)
|
||||
img_pts = np.hstack((img_pts, np.ones((img_pts.shape[0], 1), dtype=img_pts.dtype)))
|
||||
img_pts_denormalized = img_pts.dot(intrinsics.T)
|
||||
if np.isfinite(width):
|
||||
img_pts_denormalized[img_pts_denormalized[:, 0] > width] = np.nan
|
||||
img_pts_denormalized[img_pts_denormalized[:, 0] < 0] = np.nan
|
||||
if np.isfinite(height):
|
||||
img_pts_denormalized[img_pts_denormalized[:, 1] > height] = np.nan
|
||||
img_pts_denormalized[img_pts_denormalized[:, 1] < 0] = np.nan
|
||||
return img_pts_denormalized[:, :2].reshape(input_shape)
|
||||
|
||||
|
||||
def get_calib_from_vp(vp, intrinsics):
|
||||
vp_norm = normalize(vp, intrinsics)
|
||||
yaw_calib = np.arctan(vp_norm[0])
|
||||
pitch_calib = -np.arctan(vp_norm[1]*np.cos(yaw_calib))
|
||||
roll_calib = 0
|
||||
return roll_calib, pitch_calib, yaw_calib
|
||||
|
||||
|
||||
def device_from_ecef(pos_ecef, orientation_ecef, pt_ecef):
|
||||
# device from ecef frame
|
||||
# device frame is x -> forward, y-> right, z -> down
|
||||
# accepts single pt or array of pts
|
||||
input_shape = pt_ecef.shape
|
||||
pt_ecef = np.atleast_2d(pt_ecef)
|
||||
ecef_from_device_rot = orient.rotations_from_quats(orientation_ecef)
|
||||
device_from_ecef_rot = ecef_from_device_rot.T
|
||||
pt_ecef_rel = pt_ecef - pos_ecef
|
||||
pt_device = np.einsum('jk,ik->ij', device_from_ecef_rot, pt_ecef_rel)
|
||||
return pt_device.reshape(input_shape)
|
||||
|
||||
|
||||
def img_from_device(pt_device):
|
||||
# img coordinates from pts in device frame
|
||||
# first transforms to view frame, then to img coords
|
||||
# accepts single pt or array of pts
|
||||
input_shape = pt_device.shape
|
||||
pt_device = np.atleast_2d(pt_device)
|
||||
pt_view = np.einsum('jk,ik->ij', view_frame_from_device_frame, pt_device)
|
||||
|
||||
# This function should never return negative depths
|
||||
pt_view[pt_view[:, 2] < 0] = np.nan
|
||||
|
||||
pt_img = pt_view/pt_view[:, 2:3]
|
||||
return pt_img.reshape(input_shape)[:, :2]
|
||||
|
||||
43
common/transformations/coordinates.hpp
Normal file
43
common/transformations/coordinates.hpp
Normal file
@@ -0,0 +1,43 @@
|
||||
#pragma once
|
||||
|
||||
#include <eigen3/Eigen/Dense>
|
||||
|
||||
#define DEG2RAD(x) ((x) * M_PI / 180.0)
|
||||
#define RAD2DEG(x) ((x) * 180.0 / M_PI)
|
||||
|
||||
struct ECEF {
|
||||
double x, y, z;
|
||||
Eigen::Vector3d to_vector() const {
|
||||
return Eigen::Vector3d(x, y, z);
|
||||
}
|
||||
};
|
||||
|
||||
struct NED {
|
||||
double n, e, d;
|
||||
Eigen::Vector3d to_vector() const {
|
||||
return Eigen::Vector3d(n, e, d);
|
||||
}
|
||||
};
|
||||
|
||||
struct Geodetic {
|
||||
double lat, lon, alt;
|
||||
bool radians=false;
|
||||
};
|
||||
|
||||
ECEF geodetic2ecef(const Geodetic &g);
|
||||
Geodetic ecef2geodetic(const ECEF &e);
|
||||
|
||||
class LocalCoord {
|
||||
public:
|
||||
Eigen::Matrix3d ned2ecef_matrix;
|
||||
Eigen::Matrix3d ecef2ned_matrix;
|
||||
Eigen::Vector3d init_ecef;
|
||||
LocalCoord(const Geodetic &g, const ECEF &e);
|
||||
LocalCoord(const Geodetic &g) : LocalCoord(g, ::geodetic2ecef(g)) {}
|
||||
LocalCoord(const ECEF &e) : LocalCoord(::ecef2geodetic(e), e) {}
|
||||
|
||||
NED ecef2ned(const ECEF &e);
|
||||
ECEF ned2ecef(const NED &n);
|
||||
NED geodetic2ned(const Geodetic &g);
|
||||
Geodetic ned2geodetic(const NED &n);
|
||||
};
|
||||
18
common/transformations/coordinates.py
Normal file
18
common/transformations/coordinates.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from openpilot.common.transformations.orientation import numpy_wrap
|
||||
from openpilot.common.transformations.transformations import (ecef2geodetic_single,
|
||||
geodetic2ecef_single)
|
||||
from openpilot.common.transformations.transformations import LocalCoord as LocalCoord_single
|
||||
|
||||
|
||||
class LocalCoord(LocalCoord_single):
|
||||
ecef2ned = numpy_wrap(LocalCoord_single.ecef2ned_single, (3,), (3,))
|
||||
ned2ecef = numpy_wrap(LocalCoord_single.ned2ecef_single, (3,), (3,))
|
||||
geodetic2ned = numpy_wrap(LocalCoord_single.geodetic2ned_single, (3,), (3,))
|
||||
ned2geodetic = numpy_wrap(LocalCoord_single.ned2geodetic_single, (3,), (3,))
|
||||
|
||||
|
||||
geodetic2ecef = numpy_wrap(geodetic2ecef_single, (3,), (3,))
|
||||
ecef2geodetic = numpy_wrap(ecef2geodetic_single, (3,), (3,))
|
||||
|
||||
geodetic_from_ecef = ecef2geodetic
|
||||
ecef_from_geodetic = geodetic2ecef
|
||||
70
common/transformations/model.py
Normal file
70
common/transformations/model.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import numpy as np
|
||||
|
||||
from openpilot.common.transformations.orientation import rot_from_euler
|
||||
from openpilot.common.transformations.camera import get_view_frame_from_calib_frame, view_frame_from_device_frame, _ar_ox_fisheye
|
||||
|
||||
# segnet
|
||||
SEGNET_SIZE = (512, 384)
|
||||
|
||||
# MED model
|
||||
MEDMODEL_INPUT_SIZE = (512, 256)
|
||||
MEDMODEL_YUV_SIZE = (MEDMODEL_INPUT_SIZE[0], MEDMODEL_INPUT_SIZE[1] * 3 // 2)
|
||||
MEDMODEL_CY = 47.6
|
||||
|
||||
medmodel_fl = 910.0
|
||||
medmodel_intrinsics = np.array([
|
||||
[medmodel_fl, 0.0, 0.5 * MEDMODEL_INPUT_SIZE[0]],
|
||||
[0.0, medmodel_fl, MEDMODEL_CY],
|
||||
[0.0, 0.0, 1.0]])
|
||||
|
||||
|
||||
# BIG model
|
||||
BIGMODEL_INPUT_SIZE = (1024, 512)
|
||||
BIGMODEL_YUV_SIZE = (BIGMODEL_INPUT_SIZE[0], BIGMODEL_INPUT_SIZE[1] * 3 // 2)
|
||||
|
||||
bigmodel_fl = 910.0
|
||||
bigmodel_intrinsics = np.array([
|
||||
[bigmodel_fl, 0.0, 0.5 * BIGMODEL_INPUT_SIZE[0]],
|
||||
[0.0, bigmodel_fl, 256 + MEDMODEL_CY],
|
||||
[0.0, 0.0, 1.0]])
|
||||
|
||||
|
||||
# SBIG model (big model with the size of small model)
|
||||
SBIGMODEL_INPUT_SIZE = (512, 256)
|
||||
SBIGMODEL_YUV_SIZE = (SBIGMODEL_INPUT_SIZE[0], SBIGMODEL_INPUT_SIZE[1] * 3 // 2)
|
||||
|
||||
sbigmodel_fl = 455.0
|
||||
sbigmodel_intrinsics = np.array([
|
||||
[sbigmodel_fl, 0.0, 0.5 * SBIGMODEL_INPUT_SIZE[0]],
|
||||
[0.0, sbigmodel_fl, 0.5 * (256 + MEDMODEL_CY)],
|
||||
[0.0, 0.0, 1.0]])
|
||||
|
||||
DM_INPUT_SIZE = (1440, 960)
|
||||
dmonitoringmodel_fl = _ar_ox_fisheye.focal_length
|
||||
dmonitoringmodel_intrinsics = np.array([
|
||||
[dmonitoringmodel_fl, 0.0, DM_INPUT_SIZE[0]/2],
|
||||
[0.0, dmonitoringmodel_fl, DM_INPUT_SIZE[1]/2 - (_ar_ox_fisheye.height - DM_INPUT_SIZE[1])/2],
|
||||
[0.0, 0.0, 1.0]])
|
||||
|
||||
bigmodel_frame_from_calib_frame = np.dot(bigmodel_intrinsics,
|
||||
get_view_frame_from_calib_frame(0, 0, 0, 0))
|
||||
|
||||
|
||||
sbigmodel_frame_from_calib_frame = np.dot(sbigmodel_intrinsics,
|
||||
get_view_frame_from_calib_frame(0, 0, 0, 0))
|
||||
|
||||
medmodel_frame_from_calib_frame = np.dot(medmodel_intrinsics,
|
||||
get_view_frame_from_calib_frame(0, 0, 0, 0))
|
||||
|
||||
medmodel_frame_from_bigmodel_frame = np.dot(medmodel_intrinsics, np.linalg.inv(bigmodel_intrinsics))
|
||||
|
||||
calib_from_medmodel = np.linalg.inv(medmodel_frame_from_calib_frame[:, :3])
|
||||
calib_from_sbigmodel = np.linalg.inv(sbigmodel_frame_from_calib_frame[:, :3])
|
||||
|
||||
# This function is verified to give similar results to xx.uncommon.utils.transform_img
|
||||
def get_warp_matrix(device_from_calib_euler: np.ndarray, intrinsics: np.ndarray, bigmodel_frame: bool = False) -> np.ndarray:
|
||||
calib_from_model = calib_from_sbigmodel if bigmodel_frame else calib_from_medmodel
|
||||
device_from_calib = rot_from_euler(device_from_calib_euler)
|
||||
camera_from_calib = intrinsics @ view_frame_from_device_frame @ device_from_calib
|
||||
warp_matrix: np.ndarray = camera_from_calib @ calib_from_model
|
||||
return warp_matrix
|
||||
17
common/transformations/orientation.hpp
Normal file
17
common/transformations/orientation.hpp
Normal file
@@ -0,0 +1,17 @@
|
||||
#pragma once
|
||||
#include <eigen3/Eigen/Dense>
|
||||
#include "common/transformations/coordinates.hpp"
|
||||
|
||||
|
||||
Eigen::Quaterniond ensure_unique(const Eigen::Quaterniond &quat);
|
||||
|
||||
Eigen::Quaterniond euler2quat(const Eigen::Vector3d &euler);
|
||||
Eigen::Vector3d quat2euler(const Eigen::Quaterniond &quat);
|
||||
Eigen::Matrix3d quat2rot(const Eigen::Quaterniond &quat);
|
||||
Eigen::Quaterniond rot2quat(const Eigen::Matrix3d &rot);
|
||||
Eigen::Matrix3d euler2rot(const Eigen::Vector3d &euler);
|
||||
Eigen::Vector3d rot2euler(const Eigen::Matrix3d &rot);
|
||||
Eigen::Matrix3d rot_matrix(double roll, double pitch, double yaw);
|
||||
Eigen::Matrix3d rot(const Eigen::Vector3d &axis, double angle);
|
||||
Eigen::Vector3d ecef_euler_from_ned(const ECEF &ecef_init, const Eigen::Vector3d &ned_pose);
|
||||
Eigen::Vector3d ned_euler_from_ecef(const ECEF &ecef_init, const Eigen::Vector3d &ecef_pose);
|
||||
52
common/transformations/orientation.py
Normal file
52
common/transformations/orientation.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import numpy as np
|
||||
from collections.abc import Callable
|
||||
|
||||
from openpilot.common.transformations.transformations import (ecef_euler_from_ned_single,
|
||||
euler2quat_single,
|
||||
euler2rot_single,
|
||||
ned_euler_from_ecef_single,
|
||||
quat2euler_single,
|
||||
quat2rot_single,
|
||||
rot2euler_single,
|
||||
rot2quat_single)
|
||||
|
||||
|
||||
def numpy_wrap(function, input_shape, output_shape) -> Callable[..., np.ndarray]:
|
||||
"""Wrap a function to take either an input or list of inputs and return the correct shape"""
|
||||
def f(*inps):
|
||||
*args, inp = inps
|
||||
inp = np.array(inp)
|
||||
shape = inp.shape
|
||||
|
||||
if len(shape) == len(input_shape):
|
||||
out_shape = output_shape
|
||||
else:
|
||||
out_shape = (shape[0],) + output_shape
|
||||
|
||||
# Add empty dimension if inputs is not a list
|
||||
if len(shape) == len(input_shape):
|
||||
inp.shape = (1, ) + inp.shape
|
||||
|
||||
result = np.asarray([function(*args, i) for i in inp])
|
||||
result.shape = out_shape
|
||||
return result
|
||||
return f
|
||||
|
||||
|
||||
euler2quat = numpy_wrap(euler2quat_single, (3,), (4,))
|
||||
quat2euler = numpy_wrap(quat2euler_single, (4,), (3,))
|
||||
quat2rot = numpy_wrap(quat2rot_single, (4,), (3, 3))
|
||||
rot2quat = numpy_wrap(rot2quat_single, (3, 3), (4,))
|
||||
euler2rot = numpy_wrap(euler2rot_single, (3,), (3, 3))
|
||||
rot2euler = numpy_wrap(rot2euler_single, (3, 3), (3,))
|
||||
ecef_euler_from_ned = numpy_wrap(ecef_euler_from_ned_single, (3,), (3,))
|
||||
ned_euler_from_ecef = numpy_wrap(ned_euler_from_ecef_single, (3,), (3,))
|
||||
|
||||
quats_from_rotations = rot2quat
|
||||
quat_from_rot = rot2quat
|
||||
rotations_from_quats = quat2rot
|
||||
rot_from_quat = quat2rot
|
||||
euler_from_rot = rot2euler
|
||||
euler_from_quat = quat2euler
|
||||
rot_from_euler = euler2rot
|
||||
quat_from_euler = euler2quat
|
||||
0
common/transformations/tests/__init__.py
Normal file
0
common/transformations/tests/__init__.py
Normal file
104
common/transformations/tests/test_coordinates.py
Normal file
104
common/transformations/tests/test_coordinates.py
Normal file
@@ -0,0 +1,104 @@
|
||||
import numpy as np
|
||||
|
||||
import openpilot.common.transformations.coordinates as coord
|
||||
|
||||
geodetic_positions = np.array([[37.7610403, -122.4778699, 115],
|
||||
[27.4840915, -68.5867592, 2380],
|
||||
[32.4916858, -113.652821, -6],
|
||||
[15.1392514, 103.6976037, 24],
|
||||
[24.2302229, 44.2835412, 1650]])
|
||||
|
||||
ecef_positions = np.array([[-2711076.55270557, -4259167.14692758, 3884579.87669935],
|
||||
[ 2068042.69652729, -5273435.40316622, 2927004.89190746],
|
||||
[-2160412.60461669, -4932588.89873832, 3406542.29652851],
|
||||
[-1458247.92550567, 5983060.87496612, 1654984.6099885 ],
|
||||
[ 4167239.10867871, 4064301.90363223, 2602234.6065749 ]])
|
||||
|
||||
ecef_positions_offset = np.array([[-2711004.46961115, -4259099.33540613, 3884605.16002147],
|
||||
[ 2068074.30639499, -5273413.78835412, 2927012.48741131],
|
||||
[-2160344.53748176, -4932586.20092211, 3406636.2962545 ],
|
||||
[-1458211.98517094, 5983151.11161276, 1655077.02698447],
|
||||
[ 4167271.20055269, 4064398.22619263, 2602238.95265847]])
|
||||
|
||||
|
||||
ned_offsets = np.array([[78.722153649976391, 24.396208657446344, 60.343017506838436],
|
||||
[10.699003365155221, 37.319278617604269, 4.1084100025050407],
|
||||
[95.282646251726959, 61.266689955574428, -25.376506058505054],
|
||||
[68.535769283630003, -56.285970011848889, -100.54840137956515],
|
||||
[-33.066609321880179, 46.549821994306861, -84.062540548335591]])
|
||||
|
||||
ecef_init_batch = np.array([2068042.69652729, -5273435.40316622, 2927004.89190746])
|
||||
ecef_positions_offset_batch = np.array([[ 2068089.41454771, -5273434.46829148, 2927074.04783672],
|
||||
[ 2068103.31628647, -5273393.92275431, 2927102.08725987],
|
||||
[ 2068108.49939636, -5273359.27047121, 2927045.07091581],
|
||||
[ 2068075.12395611, -5273381.69432566, 2927041.08207992],
|
||||
[ 2068060.72033399, -5273430.6061505, 2927094.54928305]])
|
||||
|
||||
ned_offsets_batch = np.array([[ 53.88103168, 43.83445935, -46.27488057],
|
||||
[ 93.83378995, 71.57943024, -30.23113187],
|
||||
[ 57.26725796, 89.05602684, 23.02265814],
|
||||
[ 49.71775195, 49.79767572, 17.15351015],
|
||||
[ 78.56272609, 18.53100158, -43.25290759]])
|
||||
|
||||
|
||||
class TestNED:
|
||||
def test_small_distances(self):
|
||||
start_geodetic = np.array([33.8042184, -117.888593, 0.0])
|
||||
local_coord = coord.LocalCoord.from_geodetic(start_geodetic)
|
||||
|
||||
start_ned = local_coord.geodetic2ned(start_geodetic)
|
||||
np.testing.assert_array_equal(start_ned, np.zeros(3,))
|
||||
|
||||
west_geodetic = start_geodetic + [0, -0.0005, 0]
|
||||
west_ned = local_coord.geodetic2ned(west_geodetic)
|
||||
assert np.abs(west_ned[0]) < 1e-3
|
||||
assert west_ned[1] < 0
|
||||
|
||||
southwest_geodetic = start_geodetic + [-0.0005, -0.002, 0]
|
||||
southwest_ned = local_coord.geodetic2ned(southwest_geodetic)
|
||||
assert southwest_ned[0] < 0
|
||||
assert southwest_ned[1] < 0
|
||||
|
||||
def test_ecef_geodetic(self):
|
||||
# testing single
|
||||
np.testing.assert_allclose(ecef_positions[0], coord.geodetic2ecef(geodetic_positions[0]), rtol=1e-9)
|
||||
np.testing.assert_allclose(geodetic_positions[0, :2], coord.ecef2geodetic(ecef_positions[0])[:2], rtol=1e-9)
|
||||
np.testing.assert_allclose(geodetic_positions[0, 2], coord.ecef2geodetic(ecef_positions[0])[2], rtol=1e-9, atol=1e-4)
|
||||
|
||||
np.testing.assert_allclose(geodetic_positions[:, :2], coord.ecef2geodetic(ecef_positions)[:, :2], rtol=1e-9)
|
||||
np.testing.assert_allclose(geodetic_positions[:, 2], coord.ecef2geodetic(ecef_positions)[:, 2], rtol=1e-9, atol=1e-4)
|
||||
np.testing.assert_allclose(ecef_positions, coord.geodetic2ecef(geodetic_positions), rtol=1e-9)
|
||||
|
||||
|
||||
def test_ned(self):
|
||||
for ecef_pos in ecef_positions:
|
||||
converter = coord.LocalCoord.from_ecef(ecef_pos)
|
||||
ecef_pos_moved = ecef_pos + [25, -25, 25]
|
||||
ecef_pos_moved_double_converted = converter.ned2ecef(converter.ecef2ned(ecef_pos_moved))
|
||||
np.testing.assert_allclose(ecef_pos_moved, ecef_pos_moved_double_converted, rtol=1e-9)
|
||||
|
||||
for geo_pos in geodetic_positions:
|
||||
converter = coord.LocalCoord.from_geodetic(geo_pos)
|
||||
geo_pos_moved = geo_pos + np.array([0, 0, 10])
|
||||
geo_pos_double_converted_moved = converter.ned2geodetic(converter.geodetic2ned(geo_pos) + np.array([0, 0, -10]))
|
||||
np.testing.assert_allclose(geo_pos_moved[:2], geo_pos_double_converted_moved[:2], rtol=1e-9, atol=1e-6)
|
||||
np.testing.assert_allclose(geo_pos_moved[2], geo_pos_double_converted_moved[2], rtol=1e-9, atol=1e-4)
|
||||
|
||||
def test_ned_saved_results(self):
|
||||
for i, ecef_pos in enumerate(ecef_positions):
|
||||
converter = coord.LocalCoord.from_ecef(ecef_pos)
|
||||
np.testing.assert_allclose(converter.ned2ecef(ned_offsets[i]),
|
||||
ecef_positions_offset[i],
|
||||
rtol=1e-9, atol=1e-4)
|
||||
np.testing.assert_allclose(converter.ecef2ned(ecef_positions_offset[i]),
|
||||
ned_offsets[i],
|
||||
rtol=1e-9, atol=1e-4)
|
||||
|
||||
def test_ned_batch(self):
|
||||
converter = coord.LocalCoord.from_ecef(ecef_init_batch)
|
||||
np.testing.assert_allclose(converter.ecef2ned(ecef_positions_offset_batch),
|
||||
ned_offsets_batch,
|
||||
rtol=1e-9, atol=1e-7)
|
||||
np.testing.assert_allclose(converter.ned2ecef(ned_offsets_batch),
|
||||
ecef_positions_offset_batch,
|
||||
rtol=1e-9, atol=1e-7)
|
||||
61
common/transformations/tests/test_orientation.py
Normal file
61
common/transformations/tests/test_orientation.py
Normal file
@@ -0,0 +1,61 @@
|
||||
import numpy as np
|
||||
|
||||
from openpilot.common.transformations.orientation import euler2quat, quat2euler, euler2rot, rot2euler, \
|
||||
rot2quat, quat2rot, \
|
||||
ned_euler_from_ecef
|
||||
|
||||
eulers = np.array([[ 1.46520501, 2.78688383, 2.92780854],
|
||||
[ 4.86909526, 3.60618161, 4.30648981],
|
||||
[ 3.72175965, 2.68763705, 5.43895988],
|
||||
[ 5.92306687, 5.69573614, 0.81100357],
|
||||
[ 0.67838374, 5.02402037, 2.47106426]])
|
||||
|
||||
quats = np.array([[ 0.66855182, -0.71500939, 0.19539353, 0.06017818],
|
||||
[ 0.43163717, 0.70013301, 0.28209145, 0.49389021],
|
||||
[ 0.44121991, -0.08252646, 0.34257534, 0.82532207],
|
||||
[ 0.88578382, -0.04515356, -0.32936046, 0.32383617],
|
||||
[ 0.06578165, 0.61282835, 0.07126891, 0.78424163]])
|
||||
|
||||
ecef_positions = np.array([[-2711076.55270557, -4259167.14692758, 3884579.87669935],
|
||||
[ 2068042.69652729, -5273435.40316622, 2927004.89190746],
|
||||
[-2160412.60461669, -4932588.89873832, 3406542.29652851],
|
||||
[-1458247.92550567, 5983060.87496612, 1654984.6099885 ],
|
||||
[ 4167239.10867871, 4064301.90363223, 2602234.6065749 ]])
|
||||
|
||||
ned_eulers = np.array([[ 0.46806039, -0.4881889 , 1.65697808],
|
||||
[-2.14525969, -0.36533066, 0.73813479],
|
||||
[-1.39523364, -0.58540761, -1.77376356],
|
||||
[-1.84220435, 0.61828016, -1.03310421],
|
||||
[ 2.50450101, 0.36304151, 0.33136365]])
|
||||
|
||||
|
||||
class TestOrientation:
|
||||
def test_quat_euler(self):
|
||||
for i, eul in enumerate(eulers):
|
||||
np.testing.assert_allclose(quats[i], euler2quat(eul), rtol=1e-7)
|
||||
np.testing.assert_allclose(quats[i], euler2quat(quat2euler(quats[i])), rtol=1e-6)
|
||||
for i, eul in enumerate(eulers):
|
||||
np.testing.assert_allclose(quats[i], euler2quat(list(eul)), rtol=1e-7)
|
||||
np.testing.assert_allclose(quats[i], euler2quat(quat2euler(list(quats[i]))), rtol=1e-6)
|
||||
np.testing.assert_allclose(quats, euler2quat(eulers), rtol=1e-7)
|
||||
np.testing.assert_allclose(quats, euler2quat(quat2euler(quats)), rtol=1e-6)
|
||||
|
||||
def test_rot_euler(self):
|
||||
for eul in eulers:
|
||||
np.testing.assert_allclose(euler2quat(eul), euler2quat(rot2euler(euler2rot(eul))), rtol=1e-7)
|
||||
for eul in eulers:
|
||||
np.testing.assert_allclose(euler2quat(eul), euler2quat(rot2euler(euler2rot(list(eul)))), rtol=1e-7)
|
||||
np.testing.assert_allclose(euler2quat(eulers), euler2quat(rot2euler(euler2rot(eulers))), rtol=1e-7)
|
||||
|
||||
def test_rot_quat(self):
|
||||
for quat in quats:
|
||||
np.testing.assert_allclose(quat, rot2quat(quat2rot(quat)), rtol=1e-7)
|
||||
for quat in quats:
|
||||
np.testing.assert_allclose(quat, rot2quat(quat2rot(list(quat))), rtol=1e-7)
|
||||
np.testing.assert_allclose(quats, rot2quat(quat2rot(quats)), rtol=1e-7)
|
||||
|
||||
def test_euler_ned(self):
|
||||
for i in range(len(eulers)):
|
||||
np.testing.assert_allclose(ned_eulers[i], ned_euler_from_ecef(ecef_positions[i], eulers[i]), rtol=1e-7)
|
||||
#np.testing.assert_allclose(eulers[i], ecef_euler_from_ned(ecef_positions[i], ned_eulers[i]), rtol=1e-7)
|
||||
# np.testing.assert_allclose(ned_eulers, ned_euler_from_ecef(ecef_positions, eulers), rtol=1e-7)
|
||||
72
common/transformations/transformations.pxd
Normal file
72
common/transformations/transformations.pxd
Normal file
@@ -0,0 +1,72 @@
|
||||
# cython: language_level=3
|
||||
from libcpp cimport bool
|
||||
|
||||
cdef extern from "orientation.cc":
|
||||
pass
|
||||
|
||||
cdef extern from "orientation.hpp":
|
||||
cdef cppclass Quaternion "Eigen::Quaterniond":
|
||||
Quaternion()
|
||||
Quaternion(double, double, double, double)
|
||||
double w()
|
||||
double x()
|
||||
double y()
|
||||
double z()
|
||||
|
||||
cdef cppclass Vector3 "Eigen::Vector3d":
|
||||
Vector3()
|
||||
Vector3(double, double, double)
|
||||
double operator()(int)
|
||||
|
||||
cdef cppclass Matrix3 "Eigen::Matrix3d":
|
||||
Matrix3()
|
||||
Matrix3(double*)
|
||||
|
||||
double operator()(int, int)
|
||||
|
||||
Quaternion euler2quat(const Vector3 &)
|
||||
Vector3 quat2euler(const Quaternion &)
|
||||
Matrix3 quat2rot(const Quaternion &)
|
||||
Quaternion rot2quat(const Matrix3 &)
|
||||
Vector3 rot2euler(const Matrix3 &)
|
||||
Matrix3 euler2rot(const Vector3 &)
|
||||
Matrix3 rot_matrix(double, double, double)
|
||||
Vector3 ecef_euler_from_ned(const ECEF &, const Vector3 &)
|
||||
Vector3 ned_euler_from_ecef(const ECEF &, const Vector3 &)
|
||||
|
||||
|
||||
cdef extern from "coordinates.cc":
|
||||
cdef struct ECEF:
|
||||
double x
|
||||
double y
|
||||
double z
|
||||
|
||||
cdef struct NED:
|
||||
double n
|
||||
double e
|
||||
double d
|
||||
|
||||
cdef struct Geodetic:
|
||||
double lat
|
||||
double lon
|
||||
double alt
|
||||
bool radians
|
||||
|
||||
ECEF geodetic2ecef(const Geodetic &)
|
||||
Geodetic ecef2geodetic(const ECEF &)
|
||||
|
||||
cdef cppclass LocalCoord_c "LocalCoord":
|
||||
Matrix3 ned2ecef_matrix
|
||||
Matrix3 ecef2ned_matrix
|
||||
|
||||
LocalCoord_c(const Geodetic &, const ECEF &)
|
||||
LocalCoord_c(const Geodetic &)
|
||||
LocalCoord_c(const ECEF &)
|
||||
|
||||
NED ecef2ned(const ECEF &)
|
||||
ECEF ned2ecef(const NED &)
|
||||
NED geodetic2ned(const Geodetic &)
|
||||
Geodetic ned2geodetic(const NED &)
|
||||
|
||||
cdef extern from "coordinates.hpp":
|
||||
pass
|
||||
173
common/transformations/transformations.pyx
Normal file
173
common/transformations/transformations.pyx
Normal file
@@ -0,0 +1,173 @@
|
||||
# distutils: language = c++
|
||||
# cython: language_level = 3
|
||||
from openpilot.common.transformations.transformations cimport Matrix3, Vector3, Quaternion
|
||||
from openpilot.common.transformations.transformations cimport ECEF, NED, Geodetic
|
||||
|
||||
from openpilot.common.transformations.transformations cimport euler2quat as euler2quat_c
|
||||
from openpilot.common.transformations.transformations cimport quat2euler as quat2euler_c
|
||||
from openpilot.common.transformations.transformations cimport quat2rot as quat2rot_c
|
||||
from openpilot.common.transformations.transformations cimport rot2quat as rot2quat_c
|
||||
from openpilot.common.transformations.transformations cimport euler2rot as euler2rot_c
|
||||
from openpilot.common.transformations.transformations cimport rot2euler as rot2euler_c
|
||||
from openpilot.common.transformations.transformations cimport rot_matrix as rot_matrix_c
|
||||
from openpilot.common.transformations.transformations cimport ecef_euler_from_ned as ecef_euler_from_ned_c
|
||||
from openpilot.common.transformations.transformations cimport ned_euler_from_ecef as ned_euler_from_ecef_c
|
||||
from openpilot.common.transformations.transformations cimport geodetic2ecef as geodetic2ecef_c
|
||||
from openpilot.common.transformations.transformations cimport ecef2geodetic as ecef2geodetic_c
|
||||
from openpilot.common.transformations.transformations cimport LocalCoord_c
|
||||
|
||||
|
||||
import numpy as np
|
||||
cimport numpy as np
|
||||
|
||||
cdef np.ndarray[double, ndim=2] matrix2numpy(Matrix3 m):
|
||||
return np.array([
|
||||
[m(0, 0), m(0, 1), m(0, 2)],
|
||||
[m(1, 0), m(1, 1), m(1, 2)],
|
||||
[m(2, 0), m(2, 1), m(2, 2)],
|
||||
])
|
||||
|
||||
cdef Matrix3 numpy2matrix(np.ndarray[double, ndim=2, mode="fortran"] m):
|
||||
assert m.shape[0] == 3
|
||||
assert m.shape[1] == 3
|
||||
return Matrix3(<double*>m.data)
|
||||
|
||||
cdef ECEF list2ecef(ecef):
|
||||
cdef ECEF e
|
||||
e.x = ecef[0]
|
||||
e.y = ecef[1]
|
||||
e.z = ecef[2]
|
||||
return e
|
||||
|
||||
cdef NED list2ned(ned):
|
||||
cdef NED n
|
||||
n.n = ned[0]
|
||||
n.e = ned[1]
|
||||
n.d = ned[2]
|
||||
return n
|
||||
|
||||
cdef Geodetic list2geodetic(geodetic):
|
||||
cdef Geodetic g
|
||||
g.lat = geodetic[0]
|
||||
g.lon = geodetic[1]
|
||||
g.alt = geodetic[2]
|
||||
return g
|
||||
|
||||
def euler2quat_single(euler):
|
||||
cdef Vector3 e = Vector3(euler[0], euler[1], euler[2])
|
||||
cdef Quaternion q = euler2quat_c(e)
|
||||
return [q.w(), q.x(), q.y(), q.z()]
|
||||
|
||||
def quat2euler_single(quat):
|
||||
cdef Quaternion q = Quaternion(quat[0], quat[1], quat[2], quat[3])
|
||||
cdef Vector3 e = quat2euler_c(q)
|
||||
return [e(0), e(1), e(2)]
|
||||
|
||||
def quat2rot_single(quat):
|
||||
cdef Quaternion q = Quaternion(quat[0], quat[1], quat[2], quat[3])
|
||||
cdef Matrix3 r = quat2rot_c(q)
|
||||
return matrix2numpy(r)
|
||||
|
||||
def rot2quat_single(rot):
|
||||
cdef Matrix3 r = numpy2matrix(np.asfortranarray(rot, dtype=np.double))
|
||||
cdef Quaternion q = rot2quat_c(r)
|
||||
return [q.w(), q.x(), q.y(), q.z()]
|
||||
|
||||
def euler2rot_single(euler):
|
||||
cdef Vector3 e = Vector3(euler[0], euler[1], euler[2])
|
||||
cdef Matrix3 r = euler2rot_c(e)
|
||||
return matrix2numpy(r)
|
||||
|
||||
def rot2euler_single(rot):
|
||||
cdef Matrix3 r = numpy2matrix(np.asfortranarray(rot, dtype=np.double))
|
||||
cdef Vector3 e = rot2euler_c(r)
|
||||
return [e(0), e(1), e(2)]
|
||||
|
||||
def rot_matrix(roll, pitch, yaw):
|
||||
return matrix2numpy(rot_matrix_c(roll, pitch, yaw))
|
||||
|
||||
def ecef_euler_from_ned_single(ecef_init, ned_pose):
|
||||
cdef ECEF init = list2ecef(ecef_init)
|
||||
cdef Vector3 pose = Vector3(ned_pose[0], ned_pose[1], ned_pose[2])
|
||||
|
||||
cdef Vector3 e = ecef_euler_from_ned_c(init, pose)
|
||||
return [e(0), e(1), e(2)]
|
||||
|
||||
def ned_euler_from_ecef_single(ecef_init, ecef_pose):
|
||||
cdef ECEF init = list2ecef(ecef_init)
|
||||
cdef Vector3 pose = Vector3(ecef_pose[0], ecef_pose[1], ecef_pose[2])
|
||||
|
||||
cdef Vector3 e = ned_euler_from_ecef_c(init, pose)
|
||||
return [e(0), e(1), e(2)]
|
||||
|
||||
def geodetic2ecef_single(geodetic):
|
||||
cdef Geodetic g = list2geodetic(geodetic)
|
||||
cdef ECEF e = geodetic2ecef_c(g)
|
||||
return [e.x, e.y, e.z]
|
||||
|
||||
def ecef2geodetic_single(ecef):
|
||||
cdef ECEF e = list2ecef(ecef)
|
||||
cdef Geodetic g = ecef2geodetic_c(e)
|
||||
return [g.lat, g.lon, g.alt]
|
||||
|
||||
|
||||
cdef class LocalCoord:
|
||||
cdef LocalCoord_c * lc
|
||||
|
||||
def __init__(self, geodetic=None, ecef=None):
|
||||
assert (geodetic is not None) or (ecef is not None)
|
||||
if geodetic is not None:
|
||||
self.lc = new LocalCoord_c(list2geodetic(geodetic))
|
||||
elif ecef is not None:
|
||||
self.lc = new LocalCoord_c(list2ecef(ecef))
|
||||
|
||||
@property
|
||||
def ned2ecef_matrix(self):
|
||||
return matrix2numpy(self.lc.ned2ecef_matrix)
|
||||
|
||||
@property
|
||||
def ecef2ned_matrix(self):
|
||||
return matrix2numpy(self.lc.ecef2ned_matrix)
|
||||
|
||||
@property
|
||||
def ned_from_ecef_matrix(self):
|
||||
return self.ecef2ned_matrix
|
||||
|
||||
@property
|
||||
def ecef_from_ned_matrix(self):
|
||||
return self.ned2ecef_matrix
|
||||
|
||||
@classmethod
|
||||
def from_geodetic(cls, geodetic):
|
||||
return cls(geodetic=geodetic)
|
||||
|
||||
@classmethod
|
||||
def from_ecef(cls, ecef):
|
||||
return cls(ecef=ecef)
|
||||
|
||||
def ecef2ned_single(self, ecef):
|
||||
assert self.lc
|
||||
cdef ECEF e = list2ecef(ecef)
|
||||
cdef NED n = self.lc.ecef2ned(e)
|
||||
return [n.n, n.e, n.d]
|
||||
|
||||
def ned2ecef_single(self, ned):
|
||||
assert self.lc
|
||||
cdef NED n = list2ned(ned)
|
||||
cdef ECEF e = self.lc.ned2ecef(n)
|
||||
return [e.x, e.y, e.z]
|
||||
|
||||
def geodetic2ned_single(self, geodetic):
|
||||
assert self.lc
|
||||
cdef Geodetic g = list2geodetic(geodetic)
|
||||
cdef NED n = self.lc.geodetic2ned(g)
|
||||
return [n.n, n.e, n.d]
|
||||
|
||||
def ned2geodetic_single(self, ned):
|
||||
assert self.lc
|
||||
cdef NED n = list2ned(ned)
|
||||
cdef Geodetic g = self.lc.ned2geodetic(n)
|
||||
return [g.lat, g.lon, g.alt]
|
||||
|
||||
def __dealloc__(self):
|
||||
del self.lc
|
||||
BIN
common/transformations/transformations.so
Executable file
BIN
common/transformations/transformations.so
Executable file
Binary file not shown.
187
common/util.h
Normal file
187
common/util.h
Normal file
@@ -0,0 +1,187 @@
|
||||
#pragma once
|
||||
|
||||
#include <fcntl.h>
|
||||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <atomic>
|
||||
#include <chrono>
|
||||
#include <csignal>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <string>
|
||||
#include <thread>
|
||||
#include <vector>
|
||||
|
||||
// keep trying if x gets interrupted by a signal
|
||||
#define HANDLE_EINTR(x) \
|
||||
({ \
|
||||
decltype(x) ret_; \
|
||||
int try_cnt = 0; \
|
||||
do { \
|
||||
ret_ = (x); \
|
||||
} while (ret_ == -1 && errno == EINTR && try_cnt++ < 100); \
|
||||
ret_; \
|
||||
})
|
||||
|
||||
#ifndef sighandler_t
|
||||
typedef void (*sighandler_t)(int sig);
|
||||
#endif
|
||||
|
||||
const double MILE_TO_KM = 1.609344;
|
||||
const double KM_TO_MILE = 1. / MILE_TO_KM;
|
||||
const double MS_TO_KPH = 3.6;
|
||||
const double MS_TO_MPH = MS_TO_KPH * KM_TO_MILE;
|
||||
const double METER_TO_MILE = KM_TO_MILE / 1000.0;
|
||||
const double METER_TO_FOOT = 3.28084;
|
||||
|
||||
#define ALIGNED_SIZE(x, align) (((x) + (align)-1) & ~((align)-1))
|
||||
|
||||
namespace util {
|
||||
|
||||
void set_thread_name(const char* name);
|
||||
int set_realtime_priority(int level);
|
||||
int set_core_affinity(std::vector<int> cores);
|
||||
int set_file_descriptor_limit(uint64_t limit);
|
||||
|
||||
// ***** math helpers *****
|
||||
|
||||
// map x from [a1, a2] to [b1, b2]
|
||||
template <typename T>
|
||||
T map_val(T x, T a1, T a2, T b1, T b2) {
|
||||
x = std::clamp(x, a1, a2);
|
||||
T ra = a2 - a1;
|
||||
T rb = b2 - b1;
|
||||
return (x - a1) * rb / ra + b1;
|
||||
}
|
||||
|
||||
// ***** string helpers *****
|
||||
|
||||
template <typename... Args>
|
||||
std::string string_format(const std::string& format, Args... args) {
|
||||
size_t size = snprintf(nullptr, 0, format.c_str(), args...) + 1;
|
||||
std::unique_ptr<char[]> buf(new char[size]);
|
||||
snprintf(buf.get(), size, format.c_str(), args...);
|
||||
return std::string(buf.get(), buf.get() + size - 1);
|
||||
}
|
||||
|
||||
std::string getenv(const char* key, std::string default_val = "");
|
||||
int getenv(const char* key, int default_val);
|
||||
float getenv(const char* key, float default_val);
|
||||
|
||||
std::string hexdump(const uint8_t* in, const size_t size);
|
||||
bool starts_with(const std::string &s1, const std::string &s2);
|
||||
bool ends_with(const std::string &s, const std::string &suffix);
|
||||
std::string strip(const std::string &str);
|
||||
|
||||
// ***** random helpers *****
|
||||
int random_int(int min, int max);
|
||||
std::string random_string(std::string::size_type length);
|
||||
|
||||
// **** file helpers *****
|
||||
std::string read_file(const std::string& fn);
|
||||
std::map<std::string, std::string> read_files_in_dir(const std::string& path);
|
||||
int write_file(const char* path, const void* data, size_t size, int flags = O_WRONLY, mode_t mode = 0664);
|
||||
|
||||
FILE* safe_fopen(const char* filename, const char* mode);
|
||||
size_t safe_fwrite(const void * ptr, size_t size, size_t count, FILE * stream);
|
||||
int safe_fflush(FILE *stream);
|
||||
int safe_ioctl(int fd, unsigned long request, void *argp);
|
||||
|
||||
std::string readlink(const std::string& path);
|
||||
bool file_exists(const std::string& fn);
|
||||
bool create_directories(const std::string &dir, mode_t mode);
|
||||
|
||||
std::string check_output(const std::string& command);
|
||||
|
||||
bool system_time_valid();
|
||||
|
||||
inline void sleep_for(const int milliseconds) {
|
||||
if (milliseconds > 0) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(milliseconds));
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace util
|
||||
|
||||
class ExitHandler {
|
||||
public:
|
||||
ExitHandler() {
|
||||
std::signal(SIGINT, (sighandler_t)set_do_exit);
|
||||
std::signal(SIGTERM, (sighandler_t)set_do_exit);
|
||||
|
||||
#ifndef __APPLE__
|
||||
std::signal(SIGPWR, (sighandler_t)set_do_exit);
|
||||
#endif
|
||||
}
|
||||
inline static std::atomic<bool> power_failure = false;
|
||||
inline static std::atomic<int> signal = 0;
|
||||
inline operator bool() { return do_exit; }
|
||||
inline ExitHandler& operator=(bool v) {
|
||||
signal = 0;
|
||||
do_exit = v;
|
||||
return *this;
|
||||
}
|
||||
private:
|
||||
static void set_do_exit(int sig) {
|
||||
#ifndef __APPLE__
|
||||
power_failure = (sig == SIGPWR);
|
||||
#endif
|
||||
signal = sig;
|
||||
do_exit = true;
|
||||
}
|
||||
inline static std::atomic<bool> do_exit = false;
|
||||
};
|
||||
|
||||
struct unique_fd {
|
||||
unique_fd(int fd = -1) : fd_(fd) {}
|
||||
unique_fd& operator=(unique_fd&& uf) {
|
||||
fd_ = uf.fd_;
|
||||
uf.fd_ = -1;
|
||||
return *this;
|
||||
}
|
||||
~unique_fd() {
|
||||
if (fd_ != -1) close(fd_);
|
||||
}
|
||||
operator int() const { return fd_; }
|
||||
int fd_;
|
||||
};
|
||||
|
||||
class FirstOrderFilter {
|
||||
public:
|
||||
FirstOrderFilter(float x0, float ts, float dt, bool initialized = true) {
|
||||
k_ = (dt / ts) / (1.0 + dt / ts);
|
||||
x_ = x0;
|
||||
initialized_ = initialized;
|
||||
}
|
||||
inline float update(float x) {
|
||||
if (initialized_) {
|
||||
x_ = (1. - k_) * x_ + k_ * x;
|
||||
} else {
|
||||
initialized_ = true;
|
||||
x_ = x;
|
||||
}
|
||||
return x_;
|
||||
}
|
||||
inline void reset(float x) { x_ = x; }
|
||||
inline float x(){ return x_; }
|
||||
|
||||
private:
|
||||
float x_, k_;
|
||||
bool initialized_;
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
void update_max_atomic(std::atomic<T>& max, T const& value) {
|
||||
T prev = max;
|
||||
while (prev < value && !max.compare_exchange_weak(prev, value)) {}
|
||||
}
|
||||
|
||||
typedef struct Rect {
|
||||
int x;
|
||||
int y;
|
||||
int w;
|
||||
int h;
|
||||
} Rect;
|
||||
46
common/util.py
Normal file
46
common/util.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def sudo_write(val: str, path: str) -> None:
|
||||
try:
|
||||
with open(path, 'w') as f:
|
||||
f.write(str(val))
|
||||
except PermissionError:
|
||||
os.system(f"sudo chmod a+w {path}")
|
||||
try:
|
||||
with open(path, 'w') as f:
|
||||
f.write(str(val))
|
||||
except PermissionError:
|
||||
# fallback for debugfs files
|
||||
os.system(f"sudo su -c 'echo {val} > {path}'")
|
||||
|
||||
def sudo_read(path: str) -> str:
|
||||
try:
|
||||
return subprocess.check_output(f"sudo cat {path}", shell=True, encoding='utf8').strip()
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
class MovingAverage:
|
||||
def __init__(self, window_size: int):
|
||||
self.window_size: int = window_size
|
||||
self.buffer: list[float] = [0.0] * window_size
|
||||
self.index: int = 0
|
||||
self.count: int = 0
|
||||
self.sum: float = 0.0
|
||||
|
||||
def add_value(self, new_value: float):
|
||||
# Update the sum: subtract the value being replaced and add the new value
|
||||
self.sum -= self.buffer[self.index]
|
||||
self.buffer[self.index] = new_value
|
||||
self.sum += new_value
|
||||
|
||||
# Update the index in a circular manner
|
||||
self.index = (self.index + 1) % self.window_size
|
||||
|
||||
# Track the number of added values (for partial windows)
|
||||
self.count = min(self.count + 1, self.window_size)
|
||||
|
||||
def get_average(self) -> float:
|
||||
if self.count == 0:
|
||||
return float('nan')
|
||||
return self.sum / self.count
|
||||
118
common/utils.py
Normal file
118
common/utils.py
Normal file
@@ -0,0 +1,118 @@
|
||||
import io
|
||||
import os
|
||||
import tempfile
|
||||
import contextlib
|
||||
import subprocess
|
||||
import time
|
||||
import functools
|
||||
from subprocess import Popen, PIPE, TimeoutExpired
|
||||
import zstandard as zstd
|
||||
from openpilot.common.swaglog import cloudlog
|
||||
|
||||
LOG_COMPRESSION_LEVEL = 10 # little benefit up to level 15. level ~17 is a small step change
|
||||
|
||||
|
||||
class CallbackReader:
|
||||
"""Wraps a file, but overrides the read method to also
|
||||
call a callback function with the number of bytes read so far."""
|
||||
def __init__(self, f, callback, *args):
|
||||
self.f = f
|
||||
self.callback = callback
|
||||
self.cb_args = args
|
||||
self.total_read = 0
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.f, attr)
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
chunk = self.f.read(*args, **kwargs)
|
||||
self.total_read += len(chunk)
|
||||
self.callback(*self.cb_args, self.total_read)
|
||||
return chunk
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def atomic_write_in_dir(path: str, mode: str = 'w', buffering: int = -1, encoding: str | None = None, newline: str | None = None,
|
||||
overwrite: bool = False):
|
||||
"""Write to a file atomically using a temporary file in the same directory as the destination file."""
|
||||
dir_name = os.path.dirname(path)
|
||||
|
||||
if not overwrite and os.path.exists(path):
|
||||
raise FileExistsError(f"File '{path}' already exists. To overwrite it, set 'overwrite' to True.")
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode=mode, buffering=buffering, encoding=encoding, newline=newline, dir=dir_name, delete=False) as tmp_file:
|
||||
yield tmp_file
|
||||
tmp_file_name = tmp_file.name
|
||||
os.replace(tmp_file_name, path)
|
||||
|
||||
|
||||
def get_upload_stream(filepath: str, should_compress: bool) -> tuple[io.BufferedIOBase, int]:
|
||||
if not should_compress:
|
||||
file_size = os.path.getsize(filepath)
|
||||
file_stream = open(filepath, "rb")
|
||||
return file_stream, file_size
|
||||
|
||||
# Compress the file on the fly
|
||||
compressed_stream = io.BytesIO()
|
||||
compressor = zstd.ZstdCompressor(level=LOG_COMPRESSION_LEVEL)
|
||||
|
||||
with open(filepath, "rb") as f:
|
||||
compressor.copy_stream(f, compressed_stream)
|
||||
compressed_size = compressed_stream.tell()
|
||||
compressed_stream.seek(0)
|
||||
return compressed_stream, compressed_size
|
||||
|
||||
|
||||
# remove all keys that end in DEPRECATED
|
||||
def strip_deprecated_keys(d):
|
||||
for k in list(d.keys()):
|
||||
if isinstance(k, str):
|
||||
if k.endswith('DEPRECATED'):
|
||||
d.pop(k)
|
||||
elif isinstance(d[k], dict):
|
||||
strip_deprecated_keys(d[k])
|
||||
return d
|
||||
|
||||
|
||||
def run_cmd(cmd: list[str], cwd=None, env=None) -> str:
|
||||
return subprocess.check_output(cmd, encoding='utf8', cwd=cwd, env=env).strip()
|
||||
|
||||
|
||||
def run_cmd_default(cmd: list[str], default: str = "", cwd=None, env=None) -> str:
|
||||
try:
|
||||
return run_cmd(cmd, cwd=cwd, env=env)
|
||||
except subprocess.CalledProcessError:
|
||||
return default
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def managed_proc(cmd: list[str], env: dict[str, str]):
|
||||
proc = Popen(cmd, env=env, stdout=PIPE, stderr=PIPE)
|
||||
try:
|
||||
yield proc
|
||||
finally:
|
||||
if proc.poll() is None:
|
||||
proc.terminate()
|
||||
try:
|
||||
proc.wait(timeout=5)
|
||||
except TimeoutExpired:
|
||||
proc.kill()
|
||||
|
||||
|
||||
def retry(attempts=3, delay=1.0, ignore_failure=False):
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
for _ in range(attempts):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception:
|
||||
cloudlog.exception(f"{func.__name__} failed, trying again")
|
||||
time.sleep(delay)
|
||||
|
||||
if ignore_failure:
|
||||
cloudlog.error(f"{func.__name__} failed after retry")
|
||||
else:
|
||||
raise Exception(f"{func.__name__} failed after retry")
|
||||
return wrapper
|
||||
return decorator
|
||||
1
common/version.h
Normal file
1
common/version.h
Normal file
@@ -0,0 +1 @@
|
||||
#define COMMA_VERSION "0.9.9"
|
||||
5
common/watchdog.h
Normal file
5
common/watchdog.h
Normal file
@@ -0,0 +1,5 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
bool watchdog_kick(uint64_t ts);
|
||||
Reference in New Issue
Block a user