Release 260111
This commit is contained in:
19
.clang-tidy
Normal file
19
.clang-tidy
Normal file
@@ -0,0 +1,19 @@
|
||||
---
|
||||
Checks: '
|
||||
bugprone-*,
|
||||
-bugprone-integer-division,
|
||||
-bugprone-narrowing-conversions,
|
||||
performance-*,
|
||||
clang-analyzer-*,
|
||||
misc-*,
|
||||
-misc-unused-parameters,
|
||||
modernize-*,
|
||||
-modernize-avoid-c-arrays,
|
||||
-modernize-deprecated-headers,
|
||||
-modernize-use-auto,
|
||||
-modernize-use-using,
|
||||
-modernize-use-nullptr,
|
||||
-modernize-use-trailing-return-type,
|
||||
'
|
||||
CheckOptions:
|
||||
...
|
||||
39
.dockerignore
Normal file
39
.dockerignore
Normal file
@@ -0,0 +1,39 @@
|
||||
**/.git
|
||||
.DS_Store
|
||||
*.dylib
|
||||
*.DSYM
|
||||
*.d
|
||||
*.pyc
|
||||
*.pyo
|
||||
.*.swp
|
||||
.*.swo
|
||||
.*.un~
|
||||
*.tmp
|
||||
*.o
|
||||
*.o-*
|
||||
*.os
|
||||
*.os-*
|
||||
*.so
|
||||
*.a
|
||||
|
||||
venv/
|
||||
.venv/
|
||||
|
||||
notebooks
|
||||
phone
|
||||
massivemap
|
||||
neos
|
||||
installer
|
||||
chffr/app2
|
||||
chffr/backend/env
|
||||
selfdrive/nav
|
||||
selfdrive/baseui
|
||||
selfdrive/test/simulator2
|
||||
**/cache_data
|
||||
xx/plus
|
||||
xx/community
|
||||
xx/projects
|
||||
!xx/projects/eon_testing_master
|
||||
!xx/projects/map3d
|
||||
xx/ops
|
||||
xx/junk
|
||||
11
.editorconfig
Normal file
11
.editorconfig
Normal file
@@ -0,0 +1,11 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[{*.py, *.pyx, *.pxd}]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
112
.gitignore
vendored
Normal file
112
.gitignore
vendored
Normal file
@@ -0,0 +1,112 @@
|
||||
venv/
|
||||
.venv/
|
||||
.ci_cache
|
||||
.env
|
||||
.clang-format
|
||||
.DS_Store
|
||||
.tags
|
||||
.ipynb_checkpoints
|
||||
.idea
|
||||
.overlay_init
|
||||
.overlay_consistent
|
||||
.sconsign.dblite
|
||||
model2.png
|
||||
a.out
|
||||
.hypothesis
|
||||
|
||||
/docs_site/
|
||||
|
||||
*.dylib
|
||||
*.DSYM
|
||||
*.d
|
||||
*.pyc
|
||||
*.pyo
|
||||
.*.swp
|
||||
.*.swo
|
||||
.*.un~
|
||||
*.tmp
|
||||
*.o
|
||||
*.o-*
|
||||
*.os
|
||||
*.os-*
|
||||
*.so
|
||||
*.a
|
||||
*.clb
|
||||
*.class
|
||||
*.pyxbldc
|
||||
*.vcd
|
||||
*.qm
|
||||
*_pyx.cpp
|
||||
config.json
|
||||
clcache
|
||||
compile_commands.json
|
||||
compare_runtime*.html
|
||||
|
||||
persist
|
||||
selfdrive/pandad/pandad
|
||||
cereal/services.h
|
||||
cereal/gen
|
||||
cereal/messaging/bridge
|
||||
selfdrive/logcatd/logcatd
|
||||
selfdrive/mapd/default_speeds_by_region.json
|
||||
system/proclogd/proclogd
|
||||
selfdrive/ui/translations/alerts_generated.h
|
||||
selfdrive/ui/translations/tmp
|
||||
selfdrive/test/longitudinal_maneuvers/out
|
||||
selfdrive/car/tests/cars_dump
|
||||
system/camerad/camerad
|
||||
system/camerad/test/ae_gray_test
|
||||
|
||||
notebooks
|
||||
hyperthneed
|
||||
provisioning
|
||||
|
||||
.coverage*
|
||||
coverage.xml
|
||||
htmlcov
|
||||
pandaextra
|
||||
|
||||
.mypy_cache/
|
||||
flycheck_*
|
||||
|
||||
cppcheck_report.txt
|
||||
comma*.sh
|
||||
|
||||
selfdrive/modeld/thneed/compile
|
||||
selfdrive/modeld/models/*.thneed
|
||||
selfdrive/modeld/models/*.pkl
|
||||
|
||||
*.bz2
|
||||
*.zst
|
||||
|
||||
build/
|
||||
|
||||
!**/.gitkeep
|
||||
|
||||
poetry.toml
|
||||
Pipfile
|
||||
|
||||
### VisualStudioCode ###
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
!.vscode/*.code-snippets
|
||||
|
||||
# Local History for Visual Studio Code
|
||||
.history/
|
||||
|
||||
# Built Visual Studio Code Extensions
|
||||
*.vsix
|
||||
|
||||
### VisualStudioCode Patch ###
|
||||
# Ignore all local history of files
|
||||
.history
|
||||
.ionide
|
||||
.vs
|
||||
opendbc_repo/opendbc/dbc/byd_han_2021.dbc
|
||||
launch_ui_juggle.sh
|
||||
opendbc_repo/opendbc/dbc/bydc3dbc.dbc
|
||||
replay.sh
|
||||
opendbc_repo/opendbc/dbc/byd_generic_pt_full.dbc
|
||||
13
Dockerfile.openpilot
Normal file
13
Dockerfile.openpilot
Normal file
@@ -0,0 +1,13 @@
|
||||
FROM ghcr.io/commaai/openpilot-base:latest
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
ENV OPENPILOT_PATH=/home/batman/openpilot
|
||||
ENV PYTHONPATH=${OPENPILOT_PATH}:${PYTHONPATH}
|
||||
|
||||
RUN mkdir -p ${OPENPILOT_PATH}
|
||||
WORKDIR ${OPENPILOT_PATH}
|
||||
|
||||
COPY . ${OPENPILOT_PATH}/
|
||||
|
||||
RUN scons --cache-readonly -j$(nproc)
|
||||
81
Dockerfile.openpilot_base
Normal file
81
Dockerfile.openpilot_base
Normal file
@@ -0,0 +1,81 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends sudo tzdata locales ssh pulseaudio xvfb x11-xserver-utils gnome-screenshot python3-tk python3-dev && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen
|
||||
ENV LANG=en_US.UTF-8
|
||||
ENV LANGUAGE=en_US:en
|
||||
ENV LC_ALL=en_US.UTF-8
|
||||
|
||||
COPY tools/install_ubuntu_dependencies.sh /tmp/tools/
|
||||
RUN /tmp/tools/install_ubuntu_dependencies.sh && \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||
cd /usr/lib/gcc/arm-none-eabi/* && \
|
||||
rm -rf arm/ thumb/nofp thumb/v6* thumb/v8* thumb/v7+fp thumb/v7-r+fp.sp
|
||||
|
||||
# Add OpenCL
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
apt-utils \
|
||||
alien \
|
||||
unzip \
|
||||
tar \
|
||||
curl \
|
||||
xz-utils \
|
||||
dbus \
|
||||
gcc-arm-none-eabi \
|
||||
tmux \
|
||||
vim \
|
||||
libx11-6 \
|
||||
wget \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir -p /tmp/opencl-driver-intel && \
|
||||
cd /tmp/opencl-driver-intel && \
|
||||
wget https://github.com/intel/llvm/releases/download/2024-WW14/oclcpuexp-2024.17.3.0.09_rel.tar.gz && \
|
||||
wget https://github.com/oneapi-src/oneTBB/releases/download/v2021.12.0/oneapi-tbb-2021.12.0-lin.tgz && \
|
||||
mkdir -p /opt/intel/oclcpuexp_2024.17.3.0.09_rel && \
|
||||
cd /opt/intel/oclcpuexp_2024.17.3.0.09_rel && \
|
||||
tar -zxvf /tmp/opencl-driver-intel/oclcpuexp-2024.17.3.0.09_rel.tar.gz && \
|
||||
mkdir -p /etc/OpenCL/vendors && \
|
||||
echo /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64/libintelocl.so > /etc/OpenCL/vendors/intel_expcpu.icd && \
|
||||
cd /opt/intel && \
|
||||
tar -zxvf /tmp/opencl-driver-intel/oneapi-tbb-2021.12.0-lin.tgz && \
|
||||
ln -s /opt/intel/oneapi-tbb-2021.12.0/lib/intel64/gcc4.8/libtbb.so /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 && \
|
||||
ln -s /opt/intel/oneapi-tbb-2021.12.0/lib/intel64/gcc4.8/libtbbmalloc.so /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 && \
|
||||
ln -s /opt/intel/oneapi-tbb-2021.12.0/lib/intel64/gcc4.8/libtbb.so.12 /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 && \
|
||||
ln -s /opt/intel/oneapi-tbb-2021.12.0/lib/intel64/gcc4.8/libtbbmalloc.so.2 /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 && \
|
||||
mkdir -p /etc/ld.so.conf.d && \
|
||||
echo /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 > /etc/ld.so.conf.d/libintelopenclexp.conf && \
|
||||
ldconfig -f /etc/ld.so.conf.d/libintelopenclexp.conf && \
|
||||
cd / && \
|
||||
rm -rf /tmp/opencl-driver-intel
|
||||
|
||||
ENV NVIDIA_VISIBLE_DEVICES=all
|
||||
ENV NVIDIA_DRIVER_CAPABILITIES=graphics,utility,compute
|
||||
ENV QTWEBENGINE_DISABLE_SANDBOX=1
|
||||
|
||||
RUN dbus-uuidgen > /etc/machine-id
|
||||
|
||||
ARG USER=batman
|
||||
ARG USER_UID=1001
|
||||
RUN useradd -m -s /bin/bash -u $USER_UID $USER
|
||||
RUN usermod -aG sudo $USER
|
||||
RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
||||
USER $USER
|
||||
|
||||
COPY --chown=$USER pyproject.toml uv.lock /home/$USER
|
||||
COPY --chown=$USER tools/install_python_dependencies.sh /home/$USER/tools/
|
||||
|
||||
ENV VIRTUAL_ENV=/home/$USER/.venv
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
RUN cd /home/$USER && \
|
||||
tools/install_python_dependencies.sh && \
|
||||
rm -rf tools/ pyproject.toml uv.lock .cache
|
||||
|
||||
USER root
|
||||
RUN sudo git config --global --add safe.directory /tmp/openpilot
|
||||
7
LICENSE
Normal file
7
LICENSE
Normal file
@@ -0,0 +1,7 @@
|
||||
Copyright (c) 2018, Comma.ai, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
143
README.md
Normal file
143
README.md
Normal file
@@ -0,0 +1,143 @@
|
||||
## ⚠️ 법적 안내 / Legal Notice
|
||||
|
||||
🚫 대한민국 자동차관리법 개정안에 따라, 본 소프트웨어를 실제 차량에 장착하거나 주행에 사용하는 것은 법률에 위배될 수 있습니다.
|
||||
이 저장소에 있는 모든 소프트웨어는 **연구, 실험, 시뮬레이션 목적**으로만 제공됩니다.
|
||||
개발자는 본 소프트웨어의 실제 사용으로 인해 발생하는 **모든 법적 책임을 지지 않습니다.**
|
||||
|
||||
In accordance with the amended **Korean Motor Vehicle Management Act** (effective August 14, 2025),
|
||||
**modifying or installing software that affects the safe operation of a vehicle** is prohibited.
|
||||
|
||||
This software is provided **for research and educational use only**.
|
||||
The developer does **not take any responsibility** for real-world installation or usage.
|
||||
|
||||
**Carrotpilot에서 사용하는 차량(현대,기아)에 따라 Harness가 다릅니다..**
|
||||
- CAN통신차량: Comma 정품 Harness, Camera에 연결
|
||||
- CANFD-일반차량: Comma정품 Harness, Camera에 연결
|
||||
- CANFD-HDA2(ADAS Module 장착)차량: 사제 Harness, ADAS Module에 연결
|
||||
- 모든차량이 지원되는것이 아니니 반드시 확인바랍니다.
|
||||
|
||||
**In CarrotPilot, the harness used varies depending on the vehicle(HKG):**
|
||||
* **CAN vehicles** Use the official Comma harness, connected to the camera.
|
||||
* **CAN FD (standard) vehicles** Use the official Comma harness, connected to the camera.
|
||||
* **CAN FD vehicles with HDA2 (ADAS module equipped)** Use an aftermarket harness, connected to the ADAS module.
|
||||
* Please note that not all vehicles are supported.
|
||||
|
||||
|
||||
<div align="center" style="text-align: center;">
|
||||
|
||||
<h1>carrotpilot</h1>
|
||||
|
||||
<h3>
|
||||
<a href="https://g4iwnl.gitbook.io/carrotpilot">Manual</a>
|
||||
</h3>
|
||||
|
||||

|
||||
|
||||
|
||||
<div align="center" style="text-align: center;">
|
||||
|
||||
<h1>openpilot</h1>
|
||||
|
||||
<p>
|
||||
<b>openpilot is an operating system for robotics.</b>
|
||||
<br>
|
||||
Currently, it upgrades the driver assistance system in 275+ supported cars.
|
||||
</p>
|
||||
|
||||
<h3>
|
||||
<a href="https://docs.comma.ai">Docs</a>
|
||||
<span> · </span>
|
||||
<a href="https://docs.comma.ai/contributing/roadmap/">Roadmap</a>
|
||||
<span> · </span>
|
||||
<a href="https://github.com/commaai/openpilot/blob/master/docs/CONTRIBUTING.md">Contribute</a>
|
||||
<span> · </span>
|
||||
<a href="https://discord.comma.ai">Community</a>
|
||||
<span> · </span>
|
||||
<a href="https://comma.ai/shop">Try it on a comma 3X</a>
|
||||
</h3>
|
||||
|
||||
Quick start: `bash <(curl -fsSL openpilot.comma.ai)`
|
||||
|
||||

|
||||
[](https://codecov.io/gh/commaai/openpilot)
|
||||
[](LICENSE)
|
||||
[](https://x.com/comma_ai)
|
||||
[](https://discord.comma.ai)
|
||||
|
||||
</div>
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td><a href="https://youtu.be/NmBfgOanCyk" title="Video By Greer Viau"><img src="https://github.com/commaai/openpilot/assets/8762862/2f7112ae-f748-4f39-b617-fabd689c3772"></a></td>
|
||||
<td><a href="https://youtu.be/VHKyqZ7t8Gw" title="Video By Logan LeGrand"><img src="https://github.com/commaai/openpilot/assets/8762862/92351544-2833-40d7-9e0b-7ef7ae37ec4c"></a></td>
|
||||
<td><a href="https://youtu.be/SUIZYzxtMQs" title="A drive to Taco Bell"><img src="https://github.com/commaai/openpilot/assets/8762862/05ceefc5-2628-439c-a9b2-89ce77dc6f63"></a></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
Using openpilot in a car
|
||||
------
|
||||
|
||||
To use openpilot in a car, you need four things:
|
||||
1. **Supported Device:** a comma 3/3X, available at [comma.ai/shop](https://comma.ai/shop/comma-3x).
|
||||
2. **Software:** The setup procedure for the comma 3/3X allows users to enter a URL for custom software. Use the URL `openpilot.comma.ai` to install the release version.
|
||||
3. **Supported Car:** Ensure that you have one of [the 275+ supported cars](docs/CARS.md).
|
||||
4. **Car Harness:** You will also need a [car harness](https://comma.ai/shop/car-harness) to connect your comma 3/3X to your car.
|
||||
|
||||
We have detailed instructions for [how to install the harness and device in a car](https://comma.ai/setup). Note that it's possible to run openpilot on [other hardware](https://blog.comma.ai/self-driving-car-for-free/), although it's not plug-and-play.
|
||||
|
||||
### Branches
|
||||
| branch | URL | description |
|
||||
|------------------|----------------------------------------|-------------------------------------------------------------------------------------|
|
||||
| `release3` | openpilot.comma.ai | This is openpilot's release branch. |
|
||||
| `release3-staging` | openpilot-test.comma.ai | This is the staging branch for releases. Use it to get new releases slightly early. |
|
||||
| `nightly` | openpilot-nightly.comma.ai | This is the bleeding edge development branch. Do not expect this to be stable. |
|
||||
| `nightly-dev` | installer.comma.ai/commaai/nightly-dev | Same as nightly, but includes experimental development features for some cars. |
|
||||
|
||||
To start developing openpilot
|
||||
------
|
||||
|
||||
openpilot is developed by [comma](https://comma.ai/) and by users like you. We welcome both pull requests and issues on [GitHub](http://github.com/commaai/openpilot).
|
||||
|
||||
* Join the [community Discord](https://discord.comma.ai)
|
||||
* Check out [the contributing docs](docs/CONTRIBUTING.md)
|
||||
* Check out the [openpilot tools](tools/)
|
||||
* Read about the [development workflow](docs/WORKFLOW.md)
|
||||
* Code documentation lives at https://docs.comma.ai
|
||||
* Information about running openpilot lives on the [community wiki](https://github.com/commaai/openpilot/wiki)
|
||||
|
||||
Want to get paid to work on openpilot? [comma is hiring](https://comma.ai/jobs#open-positions) and offers lots of [bounties](https://comma.ai/bounties) for external contributors.
|
||||
|
||||
Safety and Testing
|
||||
----
|
||||
|
||||
* openpilot observes [ISO26262](https://en.wikipedia.org/wiki/ISO_26262) guidelines, see [SAFETY.md](docs/SAFETY.md) for more details.
|
||||
* openpilot has software-in-the-loop [tests](.github/workflows/selfdrive_tests.yaml) that run on every commit.
|
||||
* The code enforcing the safety model lives in panda and is written in C, see [code rigor](https://github.com/commaai/panda#code-rigor) for more details.
|
||||
* panda has software-in-the-loop [safety tests](https://github.com/commaai/panda/tree/master/tests/safety).
|
||||
* Internally, we have a hardware-in-the-loop Jenkins test suite that builds and unit tests the various processes.
|
||||
* panda has additional hardware-in-the-loop [tests](https://github.com/commaai/panda/blob/master/Jenkinsfile).
|
||||
* We run the latest openpilot in a testing closet containing 10 comma devices continuously replaying routes.
|
||||
|
||||
Licensing
|
||||
------
|
||||
|
||||
openpilot is released under the MIT license. Some parts of the software are released under other licenses as specified.
|
||||
|
||||
Any user of this software shall indemnify and hold harmless Comma.ai, Inc. and its directors, officers, employees, agents, stockholders, affiliates, subcontractors and customers from and against all allegations, claims, actions, suits, demands, damages, liabilities, obligations, losses, settlements, judgments, costs and expenses (including without limitation attorneys’ fees and costs) which arise out of, relate to or result from any use of this software by user.
|
||||
|
||||
**THIS IS ALPHA QUALITY SOFTWARE FOR RESEARCH PURPOSES ONLY. THIS IS NOT A PRODUCT.
|
||||
YOU ARE RESPONSIBLE FOR COMPLYING WITH LOCAL LAWS AND REGULATIONS.
|
||||
NO WARRANTY EXPRESSED OR IMPLIED.**
|
||||
|
||||
User Data and comma Account
|
||||
------
|
||||
|
||||
By default, openpilot uploads the driving data to our servers. You can also access your data through [comma connect](https://connect.comma.ai/). We use your data to train better models and improve openpilot for everyone.
|
||||
|
||||
openpilot is open source software: the user is free to disable data collection if they wish to do so.
|
||||
|
||||
openpilot logs the road-facing cameras, CAN, GPS, IMU, magnetometer, thermal sensors, crashes, and operating system logs.
|
||||
The driver-facing camera is only logged if you explicitly opt-in in settings. The microphone is not recorded.
|
||||
|
||||
By using openpilot, you agree to [our Privacy Policy](https://comma.ai/privacy). You understand that use of this software or its related services will generate certain types of user data, which may be logged and stored at the sole discretion of comma. By accepting this agreement, you grant an irrevocable, perpetual, worldwide right to comma for the use of this data.
|
||||
1235
RELEASES.md
Normal file
1235
RELEASES.md
Normal file
File diff suppressed because it is too large
Load Diff
5
SECURITY.md
Normal file
5
SECURITY.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Security Policy
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Suspected vulnerabilities can be reported to both `adeeb@comma.ai` and `security@comma.ai`.
|
||||
1
apps/carrotman.txt
Normal file
1
apps/carrotman.txt
Normal file
@@ -0,0 +1 @@
|
||||
http://shind0.synology.me/carrotman/CarrotMan51.apk
|
||||
26
apps/외부네비사용시.txt
Normal file
26
apps/외부네비사용시.txt
Normal file
@@ -0,0 +1,26 @@
|
||||
비 루팅폰 설정
|
||||
|
||||
pc에 adb를 설치한다.
|
||||
|
||||
command prompt창에서
|
||||
|
||||
adb shell pm grant com.ajouatom.carrotman android.permission.READ_LOGS
|
||||
|
||||
|
||||
|
||||
************ 티네비설정...
|
||||
|
||||
권한은 모두 허가하고,
|
||||
|
||||
블랙박스모드를 켠다.
|
||||
|
||||
블랙박스 저장소를 Download 로 가서
|
||||
|
||||
Carrot폴더를 만든다. (여기서 만들어야한다.) 이유는 모름..
|
||||
|
||||
여기서 저장소 권한을 넣는다.
|
||||
|
||||
블백박스를 사용하지 않으면 다시 끈다.
|
||||
|
||||
|
||||
루팅폰 잘됨~
|
||||
95
cereal/README.md
Normal file
95
cereal/README.md
Normal file
@@ -0,0 +1,95 @@
|
||||
# What is cereal?
|
||||
|
||||
cereal is the messaging system for openpilot. It uses [msgq](https://github.com/commaai/msgq) as a pub/sub backend, and [Cap'n proto](https://capnproto.org/capnp-tool.html) for serialization of the structs.
|
||||
|
||||
|
||||
## Messaging Spec
|
||||
|
||||
You'll find the message types in [log.capnp](log.capnp). It uses [Cap'n proto](https://capnproto.org/capnp-tool.html) and defines one struct called `Event`.
|
||||
|
||||
All `Events` have a `logMonoTime` and a `valid`. Then a big union defines the packet type.
|
||||
|
||||
### Best Practices
|
||||
|
||||
- **All fields must describe quantities in SI units**, unless otherwise specified in the field name.
|
||||
- In the context of the message they are in, field names should be completely unambiguous.
|
||||
- All values should be easy to plot and be human-readable with minimal parsing.
|
||||
|
||||
### Maintaining backwards-compatibility
|
||||
|
||||
When making changes to the messaging spec you want to maintain backwards-compatibility, such that old logs can
|
||||
be parsed with a new version of cereal. Adding structs and adding members to structs is generally safe, most other
|
||||
things are not. Read more details [here](https://capnproto.org/language.html).
|
||||
|
||||
### Custom forks
|
||||
|
||||
Forks of [openpilot](https://github.com/commaai/openpilot) might want to add things to the messaging
|
||||
spec, however this could conflict with future changes made in mainline cereal/openpilot. Rebasing against mainline openpilot
|
||||
then means breaking backwards-compatibility with all old logs of your fork. So we added reserved events in
|
||||
[custom.capnp](custom.capnp) that we will leave empty in mainline cereal/openpilot. **If you only modify those, you can ensure your
|
||||
fork will remain backwards-compatible with all versions of mainline openpilot and your fork.**
|
||||
|
||||
An example of compatible changes:
|
||||
```diff
|
||||
diff --git a/cereal/custom.capnp b/cereal/custom.capnp
|
||||
index 3348e859e..3365c7b98 100644
|
||||
--- a/cereal/custom.capnp
|
||||
+++ b/cereal/custom.capnp
|
||||
@@ -10,7 +10,11 @@ $Cxx.namespace("cereal");
|
||||
# DO rename the structs
|
||||
# DON'T change the identifier (e.g. @0x81c2f05a394cf4af)
|
||||
|
||||
-struct CustomReserved0 @0x81c2f05a394cf4af {
|
||||
+struct SteeringInfo @0x81c2f05a394cf4af {
|
||||
+ active @0 :Bool;
|
||||
+ steeringAngleDeg @1 :Float32;
|
||||
+ steeringRateDeg @2 :Float32;
|
||||
+ steeringAccelDeg @3 :Float32;
|
||||
}
|
||||
|
||||
struct CustomReserved1 @0xaedffd8f31e7b55d {
|
||||
diff --git a/cereal/log.capnp b/cereal/log.capnp
|
||||
index 1209f3fd9..b189f58b6 100644
|
||||
--- a/cereal/log.capnp
|
||||
+++ b/cereal/log.capnp
|
||||
@@ -2558,14 +2558,14 @@ struct Event {
|
||||
|
||||
# DO change the name of the field
|
||||
# DON'T change anything after the "@"
|
||||
- customReservedRawData0 @124 :Data;
|
||||
+ rawCanData @124 :Data;
|
||||
customReservedRawData1 @125 :Data;
|
||||
customReservedRawData2 @126 :Data;
|
||||
|
||||
# DO change the name of the field and struct
|
||||
# DON'T change the ID (e.g. @107)
|
||||
# DON'T change which struct it points to
|
||||
- customReserved0 @107 :Custom.CustomReserved0;
|
||||
+ steeringInfo @107 :Custom.SteeringInfo;
|
||||
customReserved1 @108 :Custom.CustomReserved1;
|
||||
customReserved2 @109 :Custom.CustomReserved2;
|
||||
customReserved3 @110 :Custom.CustomReserved3;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Example
|
||||
---
|
||||
```python
|
||||
import cereal.messaging as messaging
|
||||
|
||||
# in subscriber
|
||||
sm = messaging.SubMaster(['sensorEvents'])
|
||||
while 1:
|
||||
sm.update()
|
||||
print(sm['sensorEvents'])
|
||||
|
||||
```
|
||||
|
||||
```python
|
||||
# in publisher
|
||||
pm = messaging.PubMaster(['sensorEvents'])
|
||||
dat = messaging.new_message('sensorEvents', size=1)
|
||||
dat.sensorEvents[0] = {"gyro": {"v": [0.1, -0.1, 0.1]}}
|
||||
pm.send('sensorEvents', dat)
|
||||
```
|
||||
11
cereal/__init__.py
Normal file
11
cereal/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import os
|
||||
import capnp
|
||||
from importlib.resources import as_file, files
|
||||
|
||||
capnp.remove_import_hook()
|
||||
|
||||
with as_file(files("cereal")) as fspath:
|
||||
CEREAL_PATH = fspath.as_posix()
|
||||
log = capnp.load(os.path.join(CEREAL_PATH, "log.capnp"))
|
||||
car = capnp.load(os.path.join(CEREAL_PATH, "car.capnp"))
|
||||
custom = capnp.load(os.path.join(CEREAL_PATH, "custom.capnp"))
|
||||
1
cereal/car.capnp
Symbolic link
1
cereal/car.capnp
Symbolic link
@@ -0,0 +1 @@
|
||||
../opendbc_repo/opendbc/car/car.capnp
|
||||
101
cereal/custom.capnp
Normal file
101
cereal/custom.capnp
Normal file
@@ -0,0 +1,101 @@
|
||||
using Cxx = import "./include/c++.capnp";
|
||||
$Cxx.namespace("cereal");
|
||||
|
||||
@0xb526ba661d550a59;
|
||||
|
||||
# custom.capnp: a home for empty structs reserved for custom forks
|
||||
# These structs are guaranteed to remain reserved and empty in mainline
|
||||
# cereal, so use these if you want custom events in your fork.
|
||||
|
||||
# DO rename the structs
|
||||
# DON'T change the identifier (e.g. @0x81c2f05a394cf4af)
|
||||
|
||||
# you can rename the struct, but don't change the identifier
|
||||
struct CarrotMan @0x81c2f05a394cf4af {
|
||||
activeCarrot @0 : Int32;
|
||||
nRoadLimitSpeed @1 : Int32;
|
||||
remote @2 : Text;
|
||||
xSpdType @3 : Int32;
|
||||
xSpdLimit @4 : Int32;
|
||||
xSpdDist @5 : Int32;
|
||||
xSpdCountDown @6 : Int32;
|
||||
xTurnInfo @7 : Int32;
|
||||
xDistToTurn @8 : Int32;
|
||||
xTurnCountDown @9 : Int32;
|
||||
atcType @10 : Text;
|
||||
vTurnSpeed @11 : Int32;
|
||||
szPosRoadName @12 : Text;
|
||||
szTBTMainText @13 : Text;
|
||||
desiredSpeed @14 : Int32;
|
||||
desiredSource @15 : Text;
|
||||
carrotCmdIndex @16 : Int32;
|
||||
carrotCmd @17 : Text;
|
||||
carrotArg @18 : Text;
|
||||
xPosLat @19 : Float32;
|
||||
xPosLon @20 : Float32;
|
||||
xPosAngle @21 : Float32;
|
||||
xPosSpeed @22 : Float32;
|
||||
trafficState @23 : Int32;
|
||||
nGoPosDist @24 : Int32;
|
||||
nGoPosTime @25 : Int32;
|
||||
szSdiDescr @26 : Text;
|
||||
naviPaths @27 : Text;
|
||||
leftSec @28 : Int32;
|
||||
}
|
||||
|
||||
struct CustomReserved1 @0xaedffd8f31e7b55d {
|
||||
}
|
||||
|
||||
struct CustomReserved2 @0xf35cc4560bbf6ec2 {
|
||||
}
|
||||
|
||||
struct CustomReserved3 @0xda96579883444c35 {
|
||||
}
|
||||
|
||||
struct CustomReserved4 @0x80ae746ee2596b11 {
|
||||
}
|
||||
|
||||
struct CustomReserved5 @0xa5cd762cd951a455 {
|
||||
}
|
||||
|
||||
struct CustomReserved6 @0xf98d843bfd7004a3 {
|
||||
}
|
||||
|
||||
struct CustomReserved7 @0xb86e6369214c01c8 {
|
||||
}
|
||||
|
||||
struct CustomReserved8 @0xf416ec09499d9d19 {
|
||||
}
|
||||
|
||||
struct CustomReserved9 @0xa1680744031fdb2d {
|
||||
}
|
||||
|
||||
struct CustomReserved10 @0xcb9fd56c7057593a {
|
||||
}
|
||||
|
||||
struct CustomReserved11 @0xc2243c65e0340384 {
|
||||
}
|
||||
|
||||
struct CustomReserved12 @0x9ccdc8676701b412 {
|
||||
}
|
||||
|
||||
struct CustomReserved13 @0xcd96dafb67a082d0 {
|
||||
}
|
||||
|
||||
struct CustomReserved14 @0xb057204d7deadf3f {
|
||||
}
|
||||
|
||||
struct CustomReserved15 @0xbd443b539493bc68 {
|
||||
}
|
||||
|
||||
struct CustomReserved16 @0xfc6241ed8877b611 {
|
||||
}
|
||||
|
||||
struct CustomReserved17 @0xa30662f84033036c {
|
||||
}
|
||||
|
||||
struct CustomReserved18 @0xc86a3d38d13eb3ef {
|
||||
}
|
||||
|
||||
struct CustomReserved19 @0xa4f1eb3323f5f582 {
|
||||
}
|
||||
8295
cereal/gen/cpp/car.capnp.c++
Normal file
8295
cereal/gen/cpp/car.capnp.c++
Normal file
File diff suppressed because it is too large
Load Diff
10537
cereal/gen/cpp/car.capnp.h
Normal file
10537
cereal/gen/cpp/car.capnp.h
Normal file
File diff suppressed because it is too large
Load Diff
1238
cereal/gen/cpp/custom.capnp.c++
Normal file
1238
cereal/gen/cpp/custom.capnp.c++
Normal file
File diff suppressed because it is too large
Load Diff
2550
cereal/gen/cpp/custom.capnp.h
Normal file
2550
cereal/gen/cpp/custom.capnp.h
Normal file
File diff suppressed because it is too large
Load Diff
6878
cereal/gen/cpp/legacy.capnp.c++
Normal file
6878
cereal/gen/cpp/legacy.capnp.c++
Normal file
File diff suppressed because it is too large
Load Diff
11415
cereal/gen/cpp/legacy.capnp.h
Normal file
11415
cereal/gen/cpp/legacy.capnp.h
Normal file
File diff suppressed because it is too large
Load Diff
35295
cereal/gen/cpp/log.capnp.c++
Normal file
35295
cereal/gen/cpp/log.capnp.c++
Normal file
File diff suppressed because it is too large
Load Diff
60338
cereal/gen/cpp/log.capnp.h
Normal file
60338
cereal/gen/cpp/log.capnp.h
Normal file
File diff suppressed because it is too large
Load Diff
26
cereal/include/c++.capnp
Normal file
26
cereal/include/c++.capnp
Normal file
@@ -0,0 +1,26 @@
|
||||
# Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
|
||||
# Licensed under the MIT License:
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
@0xbdf87d7bb8304e81;
|
||||
$namespace("capnp::annotations");
|
||||
|
||||
annotation namespace(file): Text;
|
||||
annotation name(field, enumerant, struct, enum, interface, method, param, group, union): Text;
|
||||
574
cereal/legacy.capnp
Normal file
574
cereal/legacy.capnp
Normal file
@@ -0,0 +1,574 @@
|
||||
using Cxx = import "./include/c++.capnp";
|
||||
$Cxx.namespace("cereal");
|
||||
|
||||
@0x80ef1ec4889c2a63;
|
||||
|
||||
# legacy.capnp: a home for deprecated structs
|
||||
|
||||
struct LogRotate @0x9811e1f38f62f2d1 {
|
||||
segmentNum @0 :Int32;
|
||||
path @1 :Text;
|
||||
}
|
||||
|
||||
struct LiveUI @0xc08240f996aefced {
|
||||
rearViewCam @0 :Bool;
|
||||
alertText1 @1 :Text;
|
||||
alertText2 @2 :Text;
|
||||
awarenessStatus @3 :Float32;
|
||||
}
|
||||
|
||||
struct UiLayoutState @0x88dcce08ad29dda0 {
|
||||
activeApp @0 :App;
|
||||
sidebarCollapsed @1 :Bool;
|
||||
mapEnabled @2 :Bool;
|
||||
mockEngaged @3 :Bool;
|
||||
|
||||
enum App @0x9917470acf94d285 {
|
||||
home @0;
|
||||
music @1;
|
||||
nav @2;
|
||||
settings @3;
|
||||
none @4;
|
||||
}
|
||||
}
|
||||
|
||||
struct OrbslamCorrection @0x8afd33dc9b35e1aa {
|
||||
correctionMonoTime @0 :UInt64;
|
||||
prePositionECEF @1 :List(Float64);
|
||||
postPositionECEF @2 :List(Float64);
|
||||
prePoseQuatECEF @3 :List(Float32);
|
||||
postPoseQuatECEF @4 :List(Float32);
|
||||
numInliers @5 :UInt32;
|
||||
}
|
||||
|
||||
struct EthernetPacket @0xa99a9d5b33cf5859 {
|
||||
pkt @0 :Data;
|
||||
ts @1 :Float32;
|
||||
}
|
||||
|
||||
struct CellInfo @0xcff7566681c277ce {
|
||||
timestamp @0 :UInt64;
|
||||
repr @1 :Text; # android toString() for now
|
||||
}
|
||||
|
||||
struct WifiScan @0xd4df5a192382ba0b {
|
||||
bssid @0 :Text;
|
||||
ssid @1 :Text;
|
||||
capabilities @2 :Text;
|
||||
frequency @3 :Int32;
|
||||
level @4 :Int32;
|
||||
timestamp @5 :Int64;
|
||||
|
||||
centerFreq0 @6 :Int32;
|
||||
centerFreq1 @7 :Int32;
|
||||
channelWidth @8 :ChannelWidth;
|
||||
operatorFriendlyName @9 :Text;
|
||||
venueName @10 :Text;
|
||||
is80211mcResponder @11 :Bool;
|
||||
passpoint @12 :Bool;
|
||||
|
||||
distanceCm @13 :Int32;
|
||||
distanceSdCm @14 :Int32;
|
||||
|
||||
enum ChannelWidth @0xcb6a279f015f6b51 {
|
||||
w20Mhz @0;
|
||||
w40Mhz @1;
|
||||
w80Mhz @2;
|
||||
w160Mhz @3;
|
||||
w80Plus80Mhz @4;
|
||||
}
|
||||
}
|
||||
|
||||
struct LiveEventData @0x94b7baa90c5c321e {
|
||||
name @0 :Text;
|
||||
value @1 :Int32;
|
||||
}
|
||||
|
||||
struct ModelData @0xb8aad62cffef28a9 {
|
||||
frameId @0 :UInt32;
|
||||
frameAge @12 :UInt32;
|
||||
frameDropPerc @13 :Float32;
|
||||
timestampEof @9 :UInt64;
|
||||
modelExecutionTime @14 :Float32;
|
||||
gpuExecutionTime @16 :Float32;
|
||||
rawPred @15 :Data;
|
||||
|
||||
path @1 :PathData;
|
||||
leftLane @2 :PathData;
|
||||
rightLane @3 :PathData;
|
||||
lead @4 :LeadData;
|
||||
freePath @6 :List(Float32);
|
||||
|
||||
settings @5 :ModelSettings;
|
||||
leadFuture @7 :LeadData;
|
||||
speed @8 :List(Float32);
|
||||
meta @10 :MetaData;
|
||||
longitudinal @11 :LongitudinalData;
|
||||
|
||||
struct PathData @0x8817eeea389e9f08 {
|
||||
points @0 :List(Float32);
|
||||
prob @1 :Float32;
|
||||
std @2 :Float32;
|
||||
stds @3 :List(Float32);
|
||||
poly @4 :List(Float32);
|
||||
validLen @5 :Float32;
|
||||
}
|
||||
|
||||
struct LeadData @0xd1c9bef96d26fa91 {
|
||||
dist @0 :Float32;
|
||||
prob @1 :Float32;
|
||||
std @2 :Float32;
|
||||
relVel @3 :Float32;
|
||||
relVelStd @4 :Float32;
|
||||
relY @5 :Float32;
|
||||
relYStd @6 :Float32;
|
||||
relA @7 :Float32;
|
||||
relAStd @8 :Float32;
|
||||
}
|
||||
|
||||
struct ModelSettings @0xa26e3710efd3e914 {
|
||||
bigBoxX @0 :UInt16;
|
||||
bigBoxY @1 :UInt16;
|
||||
bigBoxWidth @2 :UInt16;
|
||||
bigBoxHeight @3 :UInt16;
|
||||
boxProjection @4 :List(Float32);
|
||||
yuvCorrection @5 :List(Float32);
|
||||
inputTransform @6 :List(Float32);
|
||||
}
|
||||
|
||||
struct MetaData @0x9744f25fb60f2bf8 {
|
||||
engagedProb @0 :Float32;
|
||||
desirePrediction @1 :List(Float32);
|
||||
brakeDisengageProb @2 :Float32;
|
||||
gasDisengageProb @3 :Float32;
|
||||
steerOverrideProb @4 :Float32;
|
||||
desireState @5 :List(Float32);
|
||||
}
|
||||
|
||||
struct LongitudinalData @0xf98f999c6a071122 {
|
||||
distances @2 :List(Float32);
|
||||
speeds @0 :List(Float32);
|
||||
accelerations @1 :List(Float32);
|
||||
}
|
||||
}
|
||||
|
||||
struct ECEFPoint @0xc25bbbd524983447 {
|
||||
x @0 :Float64;
|
||||
y @1 :Float64;
|
||||
z @2 :Float64;
|
||||
}
|
||||
|
||||
struct ECEFPointDEPRECATED @0xe10e21168db0c7f7 {
|
||||
x @0 :Float32;
|
||||
y @1 :Float32;
|
||||
z @2 :Float32;
|
||||
}
|
||||
|
||||
struct GPSPlannerPoints @0xab54c59699f8f9f3 {
|
||||
curPosDEPRECATED @0 :ECEFPointDEPRECATED;
|
||||
pointsDEPRECATED @1 :List(ECEFPointDEPRECATED);
|
||||
curPos @6 :ECEFPoint;
|
||||
points @7 :List(ECEFPoint);
|
||||
valid @2 :Bool;
|
||||
trackName @3 :Text;
|
||||
speedLimit @4 :Float32;
|
||||
accelTarget @5 :Float32;
|
||||
}
|
||||
|
||||
struct GPSPlannerPlan @0xf5ad1d90cdc1dd6b {
|
||||
valid @0 :Bool;
|
||||
poly @1 :List(Float32);
|
||||
trackName @2 :Text;
|
||||
speed @3 :Float32;
|
||||
acceleration @4 :Float32;
|
||||
pointsDEPRECATED @5 :List(ECEFPointDEPRECATED);
|
||||
points @6 :List(ECEFPoint);
|
||||
xLookahead @7 :Float32;
|
||||
}
|
||||
|
||||
struct UiNavigationEvent @0x90c8426c3eaddd3b {
|
||||
type @0: Type;
|
||||
status @1: Status;
|
||||
distanceTo @2: Float32;
|
||||
endRoadPointDEPRECATED @3: ECEFPointDEPRECATED;
|
||||
endRoadPoint @4: ECEFPoint;
|
||||
|
||||
enum Type @0xe8db07dcf8fcea05 {
|
||||
none @0;
|
||||
laneChangeLeft @1;
|
||||
laneChangeRight @2;
|
||||
mergeLeft @3;
|
||||
mergeRight @4;
|
||||
turnLeft @5;
|
||||
turnRight @6;
|
||||
}
|
||||
|
||||
enum Status @0xb9aa88c75ef99a1f {
|
||||
none @0;
|
||||
passive @1;
|
||||
approaching @2;
|
||||
active @3;
|
||||
}
|
||||
}
|
||||
|
||||
struct LiveLocationData @0xb99b2bc7a57e8128 {
|
||||
status @0 :UInt8;
|
||||
|
||||
# 3D fix
|
||||
lat @1 :Float64;
|
||||
lon @2 :Float64;
|
||||
alt @3 :Float32; # m
|
||||
|
||||
# speed
|
||||
speed @4 :Float32; # m/s
|
||||
|
||||
# NED velocity components
|
||||
vNED @5 :List(Float32);
|
||||
|
||||
# roll, pitch, heading (x,y,z)
|
||||
roll @6 :Float32; # WRT to center of earth?
|
||||
pitch @7 :Float32; # WRT to center of earth?
|
||||
heading @8 :Float32; # WRT to north?
|
||||
|
||||
# what are these?
|
||||
wanderAngle @9 :Float32;
|
||||
trackAngle @10 :Float32;
|
||||
|
||||
# car frame -- https://upload.wikimedia.org/wikipedia/commons/f/f5/RPY_angles_of_cars.png
|
||||
|
||||
# gyro, in car frame, deg/s
|
||||
gyro @11 :List(Float32);
|
||||
|
||||
# accel, in car frame, m/s^2
|
||||
accel @12 :List(Float32);
|
||||
|
||||
accuracy @13 :Accuracy;
|
||||
|
||||
source @14 :SensorSource;
|
||||
# if we are fixing a location in the past
|
||||
fixMonoTime @15 :UInt64;
|
||||
|
||||
gpsWeek @16 :Int32;
|
||||
timeOfWeek @17 :Float64;
|
||||
|
||||
positionECEF @18 :List(Float64);
|
||||
poseQuatECEF @19 :List(Float32);
|
||||
pitchCalibration @20 :Float32;
|
||||
yawCalibration @21 :Float32;
|
||||
imuFrame @22 :List(Float32);
|
||||
|
||||
struct Accuracy @0x943dc4625473b03f {
|
||||
pNEDError @0 :List(Float32);
|
||||
vNEDError @1 :List(Float32);
|
||||
rollError @2 :Float32;
|
||||
pitchError @3 :Float32;
|
||||
headingError @4 :Float32;
|
||||
ellipsoidSemiMajorError @5 :Float32;
|
||||
ellipsoidSemiMinorError @6 :Float32;
|
||||
ellipsoidOrientationError @7 :Float32;
|
||||
}
|
||||
|
||||
enum SensorSource @0xc871d3cc252af657 {
|
||||
applanix @0;
|
||||
kalman @1;
|
||||
orbslam @2;
|
||||
timing @3;
|
||||
dummy @4;
|
||||
}
|
||||
}
|
||||
|
||||
struct OrbOdometry @0xd7700859ed1f5b76 {
|
||||
# timing first
|
||||
startMonoTime @0 :UInt64;
|
||||
endMonoTime @1 :UInt64;
|
||||
|
||||
# fundamental matrix and error
|
||||
f @2: List(Float64);
|
||||
err @3: Float64;
|
||||
|
||||
# number of inlier points
|
||||
inliers @4: Int32;
|
||||
|
||||
# for debug only
|
||||
# indexed by endMonoTime features
|
||||
# value is startMonoTime feature match
|
||||
# -1 if no match
|
||||
matches @5: List(Int16);
|
||||
}
|
||||
|
||||
struct OrbFeatures @0xcd60164a8a0159ef {
|
||||
timestampEof @0 :UInt64;
|
||||
# transposed arrays of normalized image coordinates
|
||||
# len(xs) == len(ys) == len(descriptors) * 32
|
||||
xs @1 :List(Float32);
|
||||
ys @2 :List(Float32);
|
||||
descriptors @3 :Data;
|
||||
octaves @4 :List(Int8);
|
||||
|
||||
# match index to last OrbFeatures
|
||||
# -1 if no match
|
||||
timestampLastEof @5 :UInt64;
|
||||
matches @6: List(Int16);
|
||||
}
|
||||
|
||||
struct OrbFeaturesSummary @0xd500d30c5803fa4f {
|
||||
timestampEof @0 :UInt64;
|
||||
timestampLastEof @1 :UInt64;
|
||||
|
||||
featureCount @2 :UInt16;
|
||||
matchCount @3 :UInt16;
|
||||
computeNs @4 :UInt64;
|
||||
}
|
||||
|
||||
struct OrbKeyFrame @0xc8233c0345e27e24 {
|
||||
# this is a globally unique id for the KeyFrame
|
||||
id @0: UInt64;
|
||||
|
||||
# this is the location of the KeyFrame
|
||||
pos @1: ECEFPoint;
|
||||
|
||||
# these are the features in the world
|
||||
# len(dpos) == len(descriptors) * 32
|
||||
dpos @2 :List(ECEFPoint);
|
||||
descriptors @3 :Data;
|
||||
}
|
||||
|
||||
struct KalmanOdometry @0x92e21bb7ea38793a {
|
||||
trans @0 :List(Float32); # m/s in device frame
|
||||
rot @1 :List(Float32); # rad/s in device frame
|
||||
transStd @2 :List(Float32); # std m/s in device frame
|
||||
rotStd @3 :List(Float32); # std rad/s in device frame
|
||||
}
|
||||
|
||||
struct OrbObservation @0x9b326d4e436afec7 {
|
||||
observationMonoTime @0 :UInt64;
|
||||
normalizedCoordinates @1 :List(Float32);
|
||||
locationECEF @2 :List(Float64);
|
||||
matchDistance @3: UInt32;
|
||||
}
|
||||
|
||||
struct CalibrationFeatures @0x8fdfadb254ea867a {
|
||||
frameId @0 :UInt32;
|
||||
|
||||
p0 @1 :List(Float32);
|
||||
p1 @2 :List(Float32);
|
||||
status @3 :List(Int8);
|
||||
}
|
||||
|
||||
struct NavStatus @0xbd8822120928120c {
|
||||
isNavigating @0 :Bool;
|
||||
currentAddress @1 :Address;
|
||||
|
||||
struct Address @0xce7cd672cacc7814 {
|
||||
title @0 :Text;
|
||||
lat @1 :Float64;
|
||||
lng @2 :Float64;
|
||||
house @3 :Text;
|
||||
address @4 :Text;
|
||||
street @5 :Text;
|
||||
city @6 :Text;
|
||||
state @7 :Text;
|
||||
country @8 :Text;
|
||||
}
|
||||
}
|
||||
|
||||
struct NavUpdate @0xdb98be6565516acb {
|
||||
isNavigating @0 :Bool;
|
||||
curSegment @1 :Int32;
|
||||
segments @2 :List(Segment);
|
||||
|
||||
struct LatLng @0x9eaef9187cadbb9b {
|
||||
lat @0 :Float64;
|
||||
lng @1 :Float64;
|
||||
}
|
||||
|
||||
struct Segment @0xa5b39b4fc4d7da3f {
|
||||
from @0 :LatLng;
|
||||
to @1 :LatLng;
|
||||
updateTime @2 :Int32;
|
||||
distance @3 :Int32;
|
||||
crossTime @4 :Int32;
|
||||
exitNo @5 :Int32;
|
||||
instruction @6 :Instruction;
|
||||
|
||||
parts @7 :List(LatLng);
|
||||
|
||||
enum Instruction @0xc5417a637451246f {
|
||||
turnLeft @0;
|
||||
turnRight @1;
|
||||
keepLeft @2;
|
||||
keepRight @3;
|
||||
straight @4;
|
||||
roundaboutExitNumber @5;
|
||||
roundaboutExit @6;
|
||||
roundaboutTurnLeft @7;
|
||||
unkn8 @8;
|
||||
roundaboutStraight @9;
|
||||
unkn10 @10;
|
||||
roundaboutTurnRight @11;
|
||||
unkn12 @12;
|
||||
roundaboutUturn @13;
|
||||
unkn14 @14;
|
||||
arrive @15;
|
||||
exitLeft @16;
|
||||
exitRight @17;
|
||||
unkn18 @18;
|
||||
uturn @19;
|
||||
# ...
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct TrafficEvent @0xacfa74a094e62626 {
|
||||
type @0 :Type;
|
||||
distance @1 :Float32;
|
||||
action @2 :Action;
|
||||
resuming @3 :Bool;
|
||||
|
||||
enum Type @0xd85d75253435bf4b {
|
||||
stopSign @0;
|
||||
lightRed @1;
|
||||
lightYellow @2;
|
||||
lightGreen @3;
|
||||
stopLight @4;
|
||||
}
|
||||
|
||||
enum Action @0xa6f6ce72165ccb49 {
|
||||
none @0;
|
||||
yield @1;
|
||||
stop @2;
|
||||
resumeReady @3;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
struct AndroidGnss @0xdfdf30d03fc485bd {
|
||||
union {
|
||||
measurements @0 :Measurements;
|
||||
navigationMessage @1 :NavigationMessage;
|
||||
}
|
||||
|
||||
struct Measurements @0xa20710d4f428d6cd {
|
||||
clock @0 :Clock;
|
||||
measurements @1 :List(Measurement);
|
||||
|
||||
struct Clock @0xa0e27b453a38f450 {
|
||||
timeNanos @0 :Int64;
|
||||
hardwareClockDiscontinuityCount @1 :Int32;
|
||||
|
||||
hasTimeUncertaintyNanos @2 :Bool;
|
||||
timeUncertaintyNanos @3 :Float64;
|
||||
|
||||
hasLeapSecond @4 :Bool;
|
||||
leapSecond @5 :Int32;
|
||||
|
||||
hasFullBiasNanos @6 :Bool;
|
||||
fullBiasNanos @7 :Int64;
|
||||
|
||||
hasBiasNanos @8 :Bool;
|
||||
biasNanos @9 :Float64;
|
||||
|
||||
hasBiasUncertaintyNanos @10 :Bool;
|
||||
biasUncertaintyNanos @11 :Float64;
|
||||
|
||||
hasDriftNanosPerSecond @12 :Bool;
|
||||
driftNanosPerSecond @13 :Float64;
|
||||
|
||||
hasDriftUncertaintyNanosPerSecond @14 :Bool;
|
||||
driftUncertaintyNanosPerSecond @15 :Float64;
|
||||
}
|
||||
|
||||
struct Measurement @0xd949bf717d77614d {
|
||||
svId @0 :Int32;
|
||||
constellation @1 :Constellation;
|
||||
|
||||
timeOffsetNanos @2 :Float64;
|
||||
state @3 :Int32;
|
||||
receivedSvTimeNanos @4 :Int64;
|
||||
receivedSvTimeUncertaintyNanos @5 :Int64;
|
||||
cn0DbHz @6 :Float64;
|
||||
pseudorangeRateMetersPerSecond @7 :Float64;
|
||||
pseudorangeRateUncertaintyMetersPerSecond @8 :Float64;
|
||||
accumulatedDeltaRangeState @9 :Int32;
|
||||
accumulatedDeltaRangeMeters @10 :Float64;
|
||||
accumulatedDeltaRangeUncertaintyMeters @11 :Float64;
|
||||
|
||||
hasCarrierFrequencyHz @12 :Bool;
|
||||
carrierFrequencyHz @13 :Float32;
|
||||
hasCarrierCycles @14 :Bool;
|
||||
carrierCycles @15 :Int64;
|
||||
hasCarrierPhase @16 :Bool;
|
||||
carrierPhase @17 :Float64;
|
||||
hasCarrierPhaseUncertainty @18 :Bool;
|
||||
carrierPhaseUncertainty @19 :Float64;
|
||||
hasSnrInDb @20 :Bool;
|
||||
snrInDb @21 :Float64;
|
||||
|
||||
multipathIndicator @22 :MultipathIndicator;
|
||||
|
||||
enum Constellation @0x9ef1f3ff0deb5ffb {
|
||||
unknown @0;
|
||||
gps @1;
|
||||
sbas @2;
|
||||
glonass @3;
|
||||
qzss @4;
|
||||
beidou @5;
|
||||
galileo @6;
|
||||
}
|
||||
|
||||
enum State @0xcbb9490adce12d72 {
|
||||
unknown @0;
|
||||
codeLock @1;
|
||||
bitSync @2;
|
||||
subframeSync @3;
|
||||
towDecoded @4;
|
||||
msecAmbiguous @5;
|
||||
symbolSync @6;
|
||||
gloStringSync @7;
|
||||
gloTodDecoded @8;
|
||||
bdsD2BitSync @9;
|
||||
bdsD2SubframeSync @10;
|
||||
galE1bcCodeLock @11;
|
||||
galE1c2ndCodeLock @12;
|
||||
galE1bPageSync @13;
|
||||
sbasSync @14;
|
||||
}
|
||||
|
||||
enum MultipathIndicator @0xc04e7b6231d4caa8 {
|
||||
unknown @0;
|
||||
detected @1;
|
||||
notDetected @2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct NavigationMessage @0xe2517b083095fd4e {
|
||||
type @0 :Int32;
|
||||
svId @1 :Int32;
|
||||
messageId @2 :Int32;
|
||||
submessageId @3 :Int32;
|
||||
data @4 :Data;
|
||||
status @5 :Status;
|
||||
|
||||
enum Status @0xec1ff7996b35366f {
|
||||
unknown @0;
|
||||
parityPassed @1;
|
||||
parityRebuilt @2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct LidarPts @0xe3d6685d4e9d8f7a {
|
||||
r @0 :List(UInt16); # uint16 m*500.0
|
||||
theta @1 :List(UInt16); # uint16 deg*100.0
|
||||
reflect @2 :List(UInt8); # uint8 0-255
|
||||
|
||||
# For storing out of file.
|
||||
idx @3 :UInt64;
|
||||
|
||||
# For storing in file
|
||||
pkt @4 :Data;
|
||||
}
|
||||
|
||||
|
||||
2748
cereal/log.capnp
Normal file
2748
cereal/log.capnp
Normal file
File diff suppressed because it is too large
Load Diff
257
cereal/messaging/__init__.py
Normal file
257
cereal/messaging/__init__.py
Normal file
@@ -0,0 +1,257 @@
|
||||
# must be built with scons
|
||||
from msgq.ipc_pyx import Context, Poller, SubSocket, PubSocket, SocketEventHandle, toggle_fake_events, \
|
||||
set_fake_prefix, get_fake_prefix, delete_fake_prefix, wait_for_one_event
|
||||
from msgq.ipc_pyx import MultiplePublishersError, IpcError
|
||||
from msgq import fake_event_handle, pub_sock, sub_sock, drain_sock_raw
|
||||
import msgq
|
||||
|
||||
import os
|
||||
import capnp
|
||||
import time
|
||||
|
||||
from typing import Optional, List, Union, Dict
|
||||
|
||||
from cereal import log
|
||||
from cereal.services import SERVICE_LIST
|
||||
from openpilot.common.util import MovingAverage
|
||||
|
||||
NO_TRAVERSAL_LIMIT = 2**64-1
|
||||
|
||||
|
||||
def reset_context():
|
||||
msgq.context = Context()
|
||||
|
||||
|
||||
def log_from_bytes(dat: bytes, struct: capnp.lib.capnp._StructModule = log.Event) -> capnp.lib.capnp._DynamicStructReader:
|
||||
with struct.from_bytes(dat, traversal_limit_in_words=NO_TRAVERSAL_LIMIT) as msg:
|
||||
return msg
|
||||
|
||||
|
||||
def new_message(service: Optional[str], size: Optional[int] = None, **kwargs) -> capnp.lib.capnp._DynamicStructBuilder:
|
||||
args = {
|
||||
'valid': False,
|
||||
'logMonoTime': int(time.monotonic() * 1e9),
|
||||
**kwargs
|
||||
}
|
||||
dat = log.Event.new_message(**args)
|
||||
if service is not None:
|
||||
if size is None:
|
||||
dat.init(service)
|
||||
else:
|
||||
dat.init(service, size)
|
||||
return dat
|
||||
|
||||
|
||||
def drain_sock(sock: SubSocket, wait_for_one: bool = False) -> List[capnp.lib.capnp._DynamicStructReader]:
|
||||
"""Receive all message currently available on the queue"""
|
||||
msgs = drain_sock_raw(sock, wait_for_one=wait_for_one)
|
||||
return [log_from_bytes(m) for m in msgs]
|
||||
|
||||
|
||||
# TODO: print when we drop packets?
|
||||
def recv_sock(sock: SubSocket, wait: bool = False) -> Optional[capnp.lib.capnp._DynamicStructReader]:
|
||||
"""Same as drain sock, but only returns latest message. Consider using conflate instead."""
|
||||
dat = None
|
||||
|
||||
while 1:
|
||||
if wait and dat is None:
|
||||
recv = sock.receive()
|
||||
else:
|
||||
recv = sock.receive(non_blocking=True)
|
||||
|
||||
if recv is None: # Timeout hit
|
||||
break
|
||||
|
||||
dat = recv
|
||||
|
||||
if dat is not None:
|
||||
dat = log_from_bytes(dat)
|
||||
|
||||
return dat
|
||||
|
||||
|
||||
def recv_one(sock: SubSocket) -> Optional[capnp.lib.capnp._DynamicStructReader]:
|
||||
dat = sock.receive()
|
||||
if dat is not None:
|
||||
dat = log_from_bytes(dat)
|
||||
return dat
|
||||
|
||||
|
||||
def recv_one_or_none(sock: SubSocket) -> Optional[capnp.lib.capnp._DynamicStructReader]:
|
||||
dat = sock.receive(non_blocking=True)
|
||||
if dat is not None:
|
||||
dat = log_from_bytes(dat)
|
||||
return dat
|
||||
|
||||
|
||||
def recv_one_retry(sock: SubSocket) -> capnp.lib.capnp._DynamicStructReader:
|
||||
"""Keep receiving until we get a message"""
|
||||
while True:
|
||||
dat = sock.receive()
|
||||
if dat is not None:
|
||||
return log_from_bytes(dat)
|
||||
|
||||
|
||||
class FrequencyTracker:
|
||||
def __init__(self, service_freq: float, update_freq: float, is_poll: bool):
|
||||
freq = max(min(service_freq, update_freq), 1.)
|
||||
if is_poll:
|
||||
min_freq = max_freq = freq
|
||||
else:
|
||||
max_freq = min(freq, update_freq)
|
||||
if service_freq >= 2 * update_freq:
|
||||
min_freq = update_freq
|
||||
elif update_freq >= 2* service_freq:
|
||||
min_freq = freq
|
||||
else:
|
||||
min_freq = min(freq, freq / 2.)
|
||||
|
||||
self.min_freq = min_freq * 0.8
|
||||
self.max_freq = max_freq * 1.2
|
||||
self.avg_dt = MovingAverage(int(10 * freq))
|
||||
self.recent_avg_dt = MovingAverage(int(freq))
|
||||
self.prev_time = 0.0
|
||||
|
||||
def record_recv_time(self, cur_time: float) -> None:
|
||||
# TODO: Handle case where cur_time is less than prev_time
|
||||
if self.prev_time > 1e-5:
|
||||
dt = cur_time - self.prev_time
|
||||
|
||||
self.avg_dt.add_value(dt)
|
||||
self.recent_avg_dt.add_value(dt)
|
||||
|
||||
self.prev_time = cur_time
|
||||
|
||||
@property
|
||||
def valid(self) -> bool:
|
||||
if self.avg_dt.count == 0:
|
||||
return False
|
||||
|
||||
avg_freq = 1.0 / self.avg_dt.get_average()
|
||||
if self.min_freq <= avg_freq <= self.max_freq:
|
||||
return True
|
||||
|
||||
avg_freq_recent = 1.0 / self.recent_avg_dt.get_average()
|
||||
return self.min_freq <= avg_freq_recent <= self.max_freq
|
||||
|
||||
|
||||
class SubMaster:
|
||||
def __init__(self, services: List[str], poll: Optional[str] = None,
|
||||
ignore_alive: Optional[List[str]] = None, ignore_avg_freq: Optional[List[str]] = None,
|
||||
ignore_valid: Optional[List[str]] = None, addr: str = "127.0.0.1", frequency: Optional[float] = None):
|
||||
self.frame = -1
|
||||
self.services = services
|
||||
self.seen = {s: False for s in services}
|
||||
self.updated = {s: False for s in services}
|
||||
self.recv_time = {s: 0. for s in services}
|
||||
self.recv_frame = {s: 0 for s in services}
|
||||
self.sock = {}
|
||||
self.data = {}
|
||||
self.logMonoTime = {s: 0 for s in services}
|
||||
|
||||
# zero-frequency / on-demand services are always alive and presumed valid; all others must pass checks
|
||||
on_demand = {s: SERVICE_LIST[s].frequency <= 1e-5 for s in services}
|
||||
self.static_freq_services = set(s for s in services if not on_demand[s])
|
||||
self.alive = {s: on_demand[s] for s in services}
|
||||
self.freq_ok = {s: on_demand[s] for s in services}
|
||||
self.valid = {s: on_demand[s] for s in services}
|
||||
|
||||
self.freq_tracker: Dict[str, FrequencyTracker] = {}
|
||||
self.poller = Poller()
|
||||
polled_services = set([poll, ] if poll is not None else services)
|
||||
self.non_polled_services = set(services) - polled_services
|
||||
|
||||
self.ignore_average_freq = [] if ignore_avg_freq is None else ignore_avg_freq
|
||||
self.ignore_alive = [] if ignore_alive is None else ignore_alive
|
||||
self.ignore_valid = [] if ignore_valid is None else ignore_valid
|
||||
|
||||
self.simulation = bool(int(os.getenv("SIMULATION", "0")))
|
||||
|
||||
# if freq and poll aren't specified, assume the max to be conservative
|
||||
assert frequency is None or poll is None, "Do not specify 'frequency' - frequency of the polled service will be used."
|
||||
self.update_freq = frequency or max([SERVICE_LIST[s].frequency for s in polled_services])
|
||||
|
||||
for s in services:
|
||||
p = self.poller if s not in self.non_polled_services else None
|
||||
self.sock[s] = sub_sock(s, poller=p, addr=addr, conflate=True)
|
||||
|
||||
try:
|
||||
data = new_message(s)
|
||||
except capnp.lib.capnp.KjException:
|
||||
data = new_message(s, 0) # lists
|
||||
|
||||
self.data[s] = getattr(data.as_reader(), s)
|
||||
self.freq_tracker[s] = FrequencyTracker(SERVICE_LIST[s].frequency, self.update_freq, s == poll)
|
||||
|
||||
def __getitem__(self, s: str) -> capnp.lib.capnp._DynamicStructReader:
|
||||
return self.data[s]
|
||||
|
||||
def _check_avg_freq(self, s: str) -> bool:
|
||||
return SERVICE_LIST[s].frequency > 0.99 and (s not in self.ignore_average_freq) and (s not in self.ignore_alive)
|
||||
|
||||
def update(self, timeout: int = 100) -> None:
|
||||
msgs = []
|
||||
for sock in self.poller.poll(timeout):
|
||||
msgs.append(recv_one_or_none(sock))
|
||||
|
||||
# non-blocking receive for non-polled sockets
|
||||
for s in self.non_polled_services:
|
||||
msgs.append(recv_one_or_none(self.sock[s]))
|
||||
self.update_msgs(time.monotonic(), msgs)
|
||||
|
||||
def update_msgs(self, cur_time: float, msgs: List[capnp.lib.capnp._DynamicStructReader]) -> None:
|
||||
self.frame += 1
|
||||
self.updated = dict.fromkeys(self.services, False)
|
||||
for msg in msgs:
|
||||
if msg is None:
|
||||
continue
|
||||
|
||||
s = msg.which()
|
||||
self.seen[s] = True
|
||||
self.updated[s] = True
|
||||
|
||||
self.freq_tracker[s].record_recv_time(cur_time)
|
||||
self.recv_time[s] = cur_time
|
||||
self.recv_frame[s] = self.frame
|
||||
self.data[s] = getattr(msg, s)
|
||||
self.logMonoTime[s] = msg.logMonoTime
|
||||
self.valid[s] = msg.valid
|
||||
|
||||
for s in self.static_freq_services:
|
||||
# alive if delay is within 10x the expected frequency; checks relaxed in simulator
|
||||
self.alive[s] = (cur_time - self.recv_time[s]) < (10. / SERVICE_LIST[s].frequency) or (self.seen[s] and self.simulation)
|
||||
self.freq_ok[s] = self.freq_tracker[s].valid or self.simulation
|
||||
|
||||
def all_alive(self, service_list: Optional[List[str]] = None) -> bool:
|
||||
return all(self.alive[s] for s in (service_list or self.services) if s not in self.ignore_alive)
|
||||
|
||||
def all_freq_ok(self, service_list: Optional[List[str]] = None) -> bool:
|
||||
return all(self.freq_ok[s] for s in (service_list or self.services) if self._check_avg_freq(s))
|
||||
|
||||
def all_valid(self, service_list: Optional[List[str]] = None) -> bool:
|
||||
return all(self.valid[s] for s in (service_list or self.services) if s not in self.ignore_valid)
|
||||
|
||||
def all_checks(self, service_list: Optional[List[str]] = None) -> bool:
|
||||
return self.all_alive(service_list) and self.all_freq_ok(service_list) and self.all_valid(service_list)
|
||||
|
||||
|
||||
class PubMaster:
|
||||
def __init__(self, services: List[str]):
|
||||
self.sock = {}
|
||||
for s in services:
|
||||
self.sock[s] = pub_sock(s)
|
||||
|
||||
def send(self, s: str, dat: Union[bytes, capnp.lib.capnp._DynamicStructBuilder]) -> None:
|
||||
if not isinstance(dat, bytes):
|
||||
dat = dat.to_bytes()
|
||||
self.sock[s].send(dat)
|
||||
|
||||
def wait_for_readers_to_update(self, s: str, timeout: int, dt: float = 0.05) -> bool:
|
||||
for _ in range(int(timeout*(1./dt))):
|
||||
if self.sock[s].all_readers_updated():
|
||||
return True
|
||||
time.sleep(dt)
|
||||
return False
|
||||
|
||||
def all_readers_updated(self, s: str) -> bool:
|
||||
return self.sock[s].all_readers_updated() # type: ignore
|
||||
BIN
cereal/messaging/bridge
Executable file
BIN
cereal/messaging/bridge
Executable file
Binary file not shown.
102
cereal/messaging/messaging.h
Normal file
102
cereal/messaging/messaging.h
Normal file
@@ -0,0 +1,102 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstddef>
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <utility>
|
||||
|
||||
#include <capnp/serialize.h>
|
||||
|
||||
#include "cereal/gen/cpp/log.capnp.h"
|
||||
#include "common/timing.h"
|
||||
#include "msgq/ipc.h"
|
||||
|
||||
class SubMaster {
|
||||
public:
|
||||
SubMaster(const std::vector<const char *> &service_list, const std::vector<const char *> &poll = {},
|
||||
const char *address = nullptr, const std::vector<const char *> &ignore_alive = {});
|
||||
void update(int timeout = 1000);
|
||||
void update_msgs(uint64_t current_time, const std::vector<std::pair<std::string, cereal::Event::Reader>> &messages);
|
||||
inline bool allAlive(const std::vector<const char *> &service_list = {}) { return all_(service_list, false, true); }
|
||||
inline bool allValid(const std::vector<const char *> &service_list = {}) { return all_(service_list, true, false); }
|
||||
inline bool allAliveAndValid(const std::vector<const char *> &service_list = {}) { return all_(service_list, true, true); }
|
||||
void drain();
|
||||
~SubMaster();
|
||||
|
||||
uint64_t frame = 0;
|
||||
bool updated(const char *name) const;
|
||||
bool alive(const char *name) const;
|
||||
bool valid(const char *name) const;
|
||||
uint64_t rcv_frame(const char *name) const;
|
||||
uint64_t rcv_time(const char *name) const;
|
||||
cereal::Event::Reader &operator[](const char *name) const;
|
||||
|
||||
private:
|
||||
bool all_(const std::vector<const char *> &service_list, bool valid, bool alive);
|
||||
Poller *poller_ = nullptr;
|
||||
struct SubMessage;
|
||||
std::map<SubSocket *, SubMessage *> messages_;
|
||||
std::map<std::string, SubMessage *> services_;
|
||||
};
|
||||
|
||||
class MessageBuilder : public capnp::MallocMessageBuilder {
|
||||
public:
|
||||
MessageBuilder() = default;
|
||||
|
||||
cereal::Event::Builder initEvent(bool valid = true) {
|
||||
cereal::Event::Builder event = initRoot<cereal::Event>();
|
||||
event.setLogMonoTime(nanos_since_boot());
|
||||
event.setValid(valid);
|
||||
return event;
|
||||
}
|
||||
|
||||
kj::ArrayPtr<capnp::byte> toBytes() {
|
||||
heapArray_ = capnp::messageToFlatArray(*this);
|
||||
return heapArray_.asBytes();
|
||||
}
|
||||
|
||||
size_t getSerializedSize() {
|
||||
return capnp::computeSerializedSizeInWords(*this) * sizeof(capnp::word);
|
||||
}
|
||||
|
||||
int serializeToBuffer(unsigned char *buffer, size_t buffer_size) {
|
||||
size_t serialized_size = getSerializedSize();
|
||||
if (serialized_size > buffer_size) { return -1; }
|
||||
kj::ArrayOutputStream out(kj::ArrayPtr<capnp::byte>(buffer, buffer_size));
|
||||
capnp::writeMessage(out, *this);
|
||||
return serialized_size;
|
||||
}
|
||||
|
||||
private:
|
||||
kj::Array<capnp::word> heapArray_;
|
||||
};
|
||||
|
||||
class PubMaster {
|
||||
public:
|
||||
PubMaster(const std::vector<const char *> &service_list);
|
||||
inline int send(const char *name, capnp::byte *data, size_t size) { return sockets_.at(name)->send((char *)data, size); }
|
||||
int send(const char *name, MessageBuilder &msg);
|
||||
~PubMaster();
|
||||
|
||||
private:
|
||||
std::map<std::string, PubSocket *> sockets_;
|
||||
};
|
||||
|
||||
class AlignedBuffer {
|
||||
public:
|
||||
kj::ArrayPtr<const capnp::word> align(const char *data, const size_t size) {
|
||||
words_size = size / sizeof(capnp::word) + 1;
|
||||
if (aligned_buf.size() < words_size) {
|
||||
aligned_buf = kj::heapArray<capnp::word>(words_size < 512 ? 512 : words_size);
|
||||
}
|
||||
memcpy(aligned_buf.begin(), data, size);
|
||||
return aligned_buf.slice(0, words_size);
|
||||
}
|
||||
inline kj::ArrayPtr<const capnp::word> align(Message *m) {
|
||||
return align(m->getData(), m->getSize());
|
||||
}
|
||||
private:
|
||||
kj::Array<capnp::word> aligned_buf;
|
||||
size_t words_size;
|
||||
};
|
||||
37
cereal/messaging/msgq_to_zmq.h
Normal file
37
cereal/messaging/msgq_to_zmq.h
Normal file
@@ -0,0 +1,37 @@
|
||||
#pragma once
|
||||
|
||||
#include <condition_variable>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#define private public
|
||||
#include "msgq/impl_msgq.h"
|
||||
#include "msgq/impl_zmq.h"
|
||||
|
||||
class MsgqToZmq {
|
||||
public:
|
||||
MsgqToZmq() {}
|
||||
void run(const std::vector<std::string> &endpoints, const std::string &ip);
|
||||
|
||||
protected:
|
||||
void registerSockets();
|
||||
void zmqMonitorThread();
|
||||
|
||||
struct SocketPair {
|
||||
std::string endpoint;
|
||||
std::unique_ptr<ZMQPubSocket> pub_sock;
|
||||
std::unique_ptr<MSGQSubSocket> sub_sock;
|
||||
int connected_clients = 0;
|
||||
};
|
||||
|
||||
std::unique_ptr<MSGQContext> msgq_context;
|
||||
std::unique_ptr<ZMQContext> zmq_context;
|
||||
std::mutex mutex;
|
||||
std::condition_variable cv;
|
||||
std::unique_ptr<MSGQPoller> msgq_poller;
|
||||
std::map<SubSocket *, ZMQPubSocket *> sub2pub;
|
||||
std::vector<SocketPair> socket_pairs;
|
||||
};
|
||||
0
cereal/messaging/tests/__init__.py
Normal file
0
cereal/messaging/tests/__init__.py
Normal file
185
cereal/messaging/tests/test_messaging.py
Normal file
185
cereal/messaging/tests/test_messaging.py
Normal file
@@ -0,0 +1,185 @@
|
||||
import os
|
||||
import capnp
|
||||
import multiprocessing
|
||||
import numbers
|
||||
import random
|
||||
import threading
|
||||
import time
|
||||
from parameterized import parameterized
|
||||
import pytest
|
||||
|
||||
from cereal import log, car
|
||||
import cereal.messaging as messaging
|
||||
from cereal.services import SERVICE_LIST
|
||||
|
||||
events = [evt for evt in log.Event.schema.union_fields if evt in SERVICE_LIST.keys()]
|
||||
|
||||
def random_sock():
|
||||
return random.choice(events)
|
||||
|
||||
def random_socks(num_socks=10):
|
||||
return list({random_sock() for _ in range(num_socks)})
|
||||
|
||||
def random_bytes(length=1000):
|
||||
return bytes([random.randrange(0xFF) for _ in range(length)])
|
||||
|
||||
def zmq_sleep(t=1):
|
||||
if "ZMQ" in os.environ:
|
||||
time.sleep(t)
|
||||
|
||||
|
||||
# TODO: this should take any capnp struct and returrn a msg with random populated data
|
||||
def random_carstate():
|
||||
fields = ["vEgo", "aEgo", "gas", "steeringAngleDeg"]
|
||||
msg = messaging.new_message("carState")
|
||||
cs = msg.carState
|
||||
for f in fields:
|
||||
setattr(cs, f, random.random() * 10)
|
||||
return msg
|
||||
|
||||
# TODO: this should compare any capnp structs
|
||||
def assert_carstate(cs1, cs2):
|
||||
for f in car.CarState.schema.non_union_fields:
|
||||
# TODO: check all types
|
||||
val1, val2 = getattr(cs1, f), getattr(cs2, f)
|
||||
if isinstance(val1, numbers.Number):
|
||||
assert val1 == val2, f"{f}: sent '{val1}' vs recvd '{val2}'"
|
||||
|
||||
def delayed_send(delay, sock, dat):
|
||||
def send_func():
|
||||
sock.send(dat)
|
||||
threading.Timer(delay, send_func).start()
|
||||
|
||||
|
||||
class TestMessaging:
|
||||
def setUp(self):
|
||||
# TODO: ZMQ tests are too slow; all sleeps will need to be
|
||||
# replaced with logic to block on the necessary condition
|
||||
if "ZMQ" in os.environ:
|
||||
pytest.skip()
|
||||
|
||||
# ZMQ pub socket takes too long to die
|
||||
# sleep to prevent multiple publishers error between tests
|
||||
zmq_sleep()
|
||||
|
||||
@parameterized.expand(events)
|
||||
def test_new_message(self, evt):
|
||||
try:
|
||||
msg = messaging.new_message(evt)
|
||||
except capnp.lib.capnp.KjException:
|
||||
msg = messaging.new_message(evt, random.randrange(200))
|
||||
assert (time.monotonic() - msg.logMonoTime) < 0.1
|
||||
assert not msg.valid
|
||||
assert evt == msg.which()
|
||||
|
||||
@parameterized.expand(events)
|
||||
def test_pub_sock(self, evt):
|
||||
messaging.pub_sock(evt)
|
||||
|
||||
@parameterized.expand(events)
|
||||
def test_sub_sock(self, evt):
|
||||
messaging.sub_sock(evt)
|
||||
|
||||
@parameterized.expand([
|
||||
(messaging.drain_sock, capnp._DynamicStructReader),
|
||||
(messaging.drain_sock_raw, bytes),
|
||||
])
|
||||
def test_drain_sock(self, func, expected_type):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sub_sock = messaging.sub_sock(sock, timeout=1000)
|
||||
zmq_sleep()
|
||||
|
||||
# no wait and no msgs in queue
|
||||
msgs = func(sub_sock)
|
||||
assert isinstance(msgs, list)
|
||||
assert len(msgs) == 0
|
||||
|
||||
# no wait but msgs are queued up
|
||||
num_msgs = random.randrange(3, 10)
|
||||
for _ in range(num_msgs):
|
||||
pub_sock.send(messaging.new_message(sock).to_bytes())
|
||||
time.sleep(0.1)
|
||||
msgs = func(sub_sock)
|
||||
assert isinstance(msgs, list)
|
||||
assert all(isinstance(msg, expected_type) for msg in msgs)
|
||||
assert len(msgs) == num_msgs
|
||||
|
||||
def test_recv_sock(self):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sub_sock = messaging.sub_sock(sock, timeout=100)
|
||||
zmq_sleep()
|
||||
|
||||
# no wait and no msg in queue, socket should timeout
|
||||
recvd = messaging.recv_sock(sub_sock)
|
||||
assert recvd is None
|
||||
|
||||
# no wait and one msg in queue
|
||||
msg = random_carstate()
|
||||
pub_sock.send(msg.to_bytes())
|
||||
time.sleep(0.01)
|
||||
recvd = messaging.recv_sock(sub_sock)
|
||||
assert isinstance(recvd, capnp._DynamicStructReader)
|
||||
# https://github.com/python/mypy/issues/13038
|
||||
assert_carstate(msg.carState, recvd.carState)
|
||||
|
||||
def test_recv_one(self):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sub_sock = messaging.sub_sock(sock, timeout=1000)
|
||||
zmq_sleep()
|
||||
|
||||
# no msg in queue, socket should timeout
|
||||
recvd = messaging.recv_one(sub_sock)
|
||||
assert recvd is None
|
||||
|
||||
# one msg in queue
|
||||
msg = random_carstate()
|
||||
pub_sock.send(msg.to_bytes())
|
||||
recvd = messaging.recv_one(sub_sock)
|
||||
assert isinstance(recvd, capnp._DynamicStructReader)
|
||||
assert_carstate(msg.carState, recvd.carState)
|
||||
|
||||
@pytest.mark.xfail(condition="ZMQ" in os.environ, reason='ZMQ detected')
|
||||
def test_recv_one_or_none(self):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sub_sock = messaging.sub_sock(sock)
|
||||
zmq_sleep()
|
||||
|
||||
# no msg in queue, socket shouldn't block
|
||||
recvd = messaging.recv_one_or_none(sub_sock)
|
||||
assert recvd is None
|
||||
|
||||
# one msg in queue
|
||||
msg = random_carstate()
|
||||
pub_sock.send(msg.to_bytes())
|
||||
recvd = messaging.recv_one_or_none(sub_sock)
|
||||
assert isinstance(recvd, capnp._DynamicStructReader)
|
||||
assert_carstate(msg.carState, recvd.carState)
|
||||
|
||||
def test_recv_one_retry(self):
|
||||
sock = "carState"
|
||||
sock_timeout = 0.1
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sub_sock = messaging.sub_sock(sock, timeout=round(sock_timeout*1000))
|
||||
zmq_sleep()
|
||||
|
||||
# this test doesn't work with ZMQ since multiprocessing interrupts it
|
||||
if "ZMQ" not in os.environ:
|
||||
# wait 5 socket timeouts and make sure it's still retrying
|
||||
p = multiprocessing.Process(target=messaging.recv_one_retry, args=(sub_sock,))
|
||||
p.start()
|
||||
time.sleep(sock_timeout*5)
|
||||
assert p.is_alive()
|
||||
p.terminate()
|
||||
|
||||
# wait 5 socket timeouts before sending
|
||||
msg = random_carstate()
|
||||
delayed_send(sock_timeout*5, pub_sock, msg.to_bytes())
|
||||
start_time = time.monotonic()
|
||||
recvd = messaging.recv_one_retry(sub_sock)
|
||||
assert (time.monotonic() - start_time) >= sock_timeout*5
|
||||
assert isinstance(recvd, capnp._DynamicStructReader)
|
||||
assert_carstate(msg.carState, recvd.carState)
|
||||
156
cereal/messaging/tests/test_pub_sub_master.py
Normal file
156
cereal/messaging/tests/test_pub_sub_master.py
Normal file
@@ -0,0 +1,156 @@
|
||||
import random
|
||||
import time
|
||||
from typing import Sized, cast
|
||||
|
||||
import cereal.messaging as messaging
|
||||
from cereal.messaging.tests.test_messaging import events, random_sock, random_socks, \
|
||||
random_bytes, random_carstate, assert_carstate, \
|
||||
zmq_sleep
|
||||
|
||||
|
||||
class TestSubMaster:
|
||||
|
||||
def setup_method(self):
|
||||
# ZMQ pub socket takes too long to die
|
||||
# sleep to prevent multiple publishers error between tests
|
||||
zmq_sleep(3)
|
||||
|
||||
def test_init(self):
|
||||
sm = messaging.SubMaster(events)
|
||||
for p in [sm.updated, sm.recv_time, sm.recv_frame, sm.alive,
|
||||
sm.sock, sm.data, sm.logMonoTime, sm.valid]:
|
||||
assert len(cast(Sized, p)) == len(events)
|
||||
|
||||
def test_init_state(self):
|
||||
socks = random_socks()
|
||||
sm = messaging.SubMaster(socks)
|
||||
assert sm.frame == -1
|
||||
assert not any(sm.updated.values())
|
||||
assert not any(sm.alive.values())
|
||||
assert all(t == 0. for t in sm.recv_time.values())
|
||||
assert all(f == 0 for f in sm.recv_frame.values())
|
||||
assert all(t == 0 for t in sm.logMonoTime.values())
|
||||
|
||||
for p in [sm.updated, sm.recv_time, sm.recv_frame, sm.alive,
|
||||
sm.sock, sm.data, sm.logMonoTime, sm.valid]:
|
||||
assert len(cast(Sized, p)) == len(socks)
|
||||
|
||||
def test_getitem(self):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sm = messaging.SubMaster([sock,])
|
||||
zmq_sleep()
|
||||
|
||||
msg = random_carstate()
|
||||
pub_sock.send(msg.to_bytes())
|
||||
sm.update(1000)
|
||||
assert_carstate(msg.carState, sm[sock])
|
||||
|
||||
# TODO: break this test up to individually test SubMaster.update and SubMaster.update_msgs
|
||||
def test_update(self):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sm = messaging.SubMaster([sock,])
|
||||
zmq_sleep()
|
||||
|
||||
for i in range(10):
|
||||
msg = messaging.new_message(sock)
|
||||
pub_sock.send(msg.to_bytes())
|
||||
sm.update(1000)
|
||||
assert sm.frame == i
|
||||
assert all(sm.updated.values())
|
||||
|
||||
def test_update_timeout(self):
|
||||
sock = random_sock()
|
||||
sm = messaging.SubMaster([sock,])
|
||||
timeout = random.randrange(1000, 3000)
|
||||
start_time = time.monotonic()
|
||||
sm.update(timeout)
|
||||
t = time.monotonic() - start_time
|
||||
assert t >= timeout/1000.
|
||||
assert t < 3
|
||||
assert not any(sm.updated.values())
|
||||
|
||||
def test_avg_frequency_checks(self):
|
||||
for poll in (True, False):
|
||||
sm = messaging.SubMaster(["modelV2", "carParams", "carState", "cameraOdometry", "liveCalibration"],
|
||||
poll=("modelV2" if poll else None),
|
||||
frequency=(20. if not poll else None))
|
||||
|
||||
checks = {
|
||||
"carState": (20, 20),
|
||||
"modelV2": (20, 20 if poll else 10),
|
||||
"cameraOdometry": (20, 10),
|
||||
"liveCalibration": (4, 4),
|
||||
"carParams": (None, None),
|
||||
}
|
||||
|
||||
for service, (max_freq, min_freq) in checks.items():
|
||||
if max_freq is not None:
|
||||
assert sm._check_avg_freq(service)
|
||||
assert sm.freq_tracker[service].max_freq == max_freq*1.2
|
||||
assert sm.freq_tracker[service].min_freq == min_freq*0.8
|
||||
else:
|
||||
assert not sm._check_avg_freq(service)
|
||||
|
||||
def test_alive(self):
|
||||
pass
|
||||
|
||||
def test_ignore_alive(self):
|
||||
pass
|
||||
|
||||
def test_valid(self):
|
||||
pass
|
||||
|
||||
# SubMaster should always conflate
|
||||
def test_conflate(self):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sm = messaging.SubMaster([sock,])
|
||||
|
||||
n = 10
|
||||
for i in range(n+1):
|
||||
msg = messaging.new_message(sock)
|
||||
msg.carState.vEgo = i
|
||||
pub_sock.send(msg.to_bytes())
|
||||
time.sleep(0.01)
|
||||
sm.update(1000)
|
||||
assert sm[sock].vEgo == n
|
||||
|
||||
|
||||
class TestPubMaster:
|
||||
|
||||
def setup_method(self):
|
||||
# ZMQ pub socket takes too long to die
|
||||
# sleep to prevent multiple publishers error between tests
|
||||
zmq_sleep(3)
|
||||
|
||||
def test_init(self):
|
||||
messaging.PubMaster(events)
|
||||
|
||||
def test_send(self):
|
||||
socks = random_socks()
|
||||
pm = messaging.PubMaster(socks)
|
||||
sub_socks = {s: messaging.sub_sock(s, conflate=True, timeout=1000) for s in socks}
|
||||
zmq_sleep()
|
||||
|
||||
# PubMaster accepts either a capnp msg builder or bytes
|
||||
for capnp in [True, False]:
|
||||
for i in range(100):
|
||||
sock = socks[i % len(socks)]
|
||||
|
||||
if capnp:
|
||||
try:
|
||||
msg = messaging.new_message(sock)
|
||||
except Exception:
|
||||
msg = messaging.new_message(sock, random.randrange(50))
|
||||
else:
|
||||
msg = random_bytes()
|
||||
|
||||
pm.send(sock, msg)
|
||||
recvd = sub_socks[sock].receive()
|
||||
|
||||
if capnp:
|
||||
msg.clear_write_flag()
|
||||
msg = msg.to_bytes()
|
||||
assert msg == recvd, i
|
||||
21
cereal/messaging/tests/test_services.py
Normal file
21
cereal/messaging/tests/test_services.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import os
|
||||
import tempfile
|
||||
from typing import Dict
|
||||
from parameterized import parameterized
|
||||
|
||||
import cereal.services as services
|
||||
from cereal.services import SERVICE_LIST
|
||||
|
||||
|
||||
class TestServices:
|
||||
|
||||
@parameterized.expand(SERVICE_LIST.keys())
|
||||
def test_services(self, s):
|
||||
service = SERVICE_LIST[s]
|
||||
assert service.frequency <= 104
|
||||
assert service.decimation != 0
|
||||
|
||||
def test_generated_header(self):
|
||||
with tempfile.NamedTemporaryFile(suffix=".h") as f:
|
||||
ret = os.system(f"python3 {services.__file__} > {f.name} && clang++ {f.name} -std=c++11")
|
||||
assert ret == 0, "generated services header is not valid C"
|
||||
95
cereal/services.h
Normal file
95
cereal/services.h
Normal file
@@ -0,0 +1,95 @@
|
||||
/* THIS IS AN AUTOGENERATED FILE, PLEASE EDIT services.py */
|
||||
#ifndef __SERVICES_H
|
||||
#define __SERVICES_H
|
||||
#include <map>
|
||||
#include <string>
|
||||
struct service { std::string name; bool should_log; int frequency; int decimation; };
|
||||
static std::map<std::string, service> services = {
|
||||
{ "gyroscope", {"gyroscope", true, 104, 104}},
|
||||
{ "gyroscope2", {"gyroscope2", true, 100, 100}},
|
||||
{ "accelerometer", {"accelerometer", true, 104, 104}},
|
||||
{ "accelerometer2", {"accelerometer2", true, 100, 100}},
|
||||
{ "magnetometer", {"magnetometer", true, 25, -1}},
|
||||
{ "lightSensor", {"lightSensor", true, 100, 100}},
|
||||
{ "temperatureSensor", {"temperatureSensor", true, 2, 200}},
|
||||
{ "temperatureSensor2", {"temperatureSensor2", true, 2, 200}},
|
||||
{ "gpsNMEA", {"gpsNMEA", true, 9, -1}},
|
||||
{ "deviceState", {"deviceState", true, 2, 1}},
|
||||
{ "touch", {"touch", true, 20, 1}},
|
||||
{ "can", {"can", true, 100, 2053}},
|
||||
{ "controlsState", {"controlsState", true, 100, 10}},
|
||||
{ "selfdriveState", {"selfdriveState", true, 100, 10}},
|
||||
{ "pandaStates", {"pandaStates", true, 10, 1}},
|
||||
{ "peripheralState", {"peripheralState", true, 2, 1}},
|
||||
{ "radarState", {"radarState", true, 20, 5}},
|
||||
{ "roadEncodeIdx", {"roadEncodeIdx", false, 20, 1}},
|
||||
{ "liveTracks", {"liveTracks", true, 20, -1}},
|
||||
{ "sendcan", {"sendcan", true, 100, 139}},
|
||||
{ "logMessage", {"logMessage", true, 0, -1}},
|
||||
{ "errorLogMessage", {"errorLogMessage", true, 0, 1}},
|
||||
{ "liveCalibration", {"liveCalibration", true, 4, 4}},
|
||||
{ "liveTorqueParameters", {"liveTorqueParameters", true, 4, 1}},
|
||||
{ "liveDelay", {"liveDelay", true, 4, 1}},
|
||||
{ "androidLog", {"androidLog", true, 0, -1}},
|
||||
{ "carState", {"carState", true, 100, 10}},
|
||||
{ "carControl", {"carControl", true, 100, 10}},
|
||||
{ "carOutput", {"carOutput", true, 100, 10}},
|
||||
{ "longitudinalPlan", {"longitudinalPlan", true, 20, 10}},
|
||||
{ "driverAssistance", {"driverAssistance", true, 20, 20}},
|
||||
{ "procLog", {"procLog", true, 0, 15}},
|
||||
{ "gpsLocationExternal", {"gpsLocationExternal", true, 10, 10}},
|
||||
{ "gpsLocation", {"gpsLocation", true, 1, 1}},
|
||||
{ "ubloxGnss", {"ubloxGnss", true, 10, -1}},
|
||||
{ "qcomGnss", {"qcomGnss", true, 2, -1}},
|
||||
{ "gnssMeasurements", {"gnssMeasurements", true, 10, 10}},
|
||||
{ "clocks", {"clocks", true, 0, 1}},
|
||||
{ "ubloxRaw", {"ubloxRaw", true, 20, -1}},
|
||||
{ "livePose", {"livePose", true, 20, 4}},
|
||||
{ "liveParameters", {"liveParameters", true, 20, 5}},
|
||||
{ "cameraOdometry", {"cameraOdometry", true, 20, 10}},
|
||||
{ "thumbnail", {"thumbnail", true, 0, 1}},
|
||||
{ "lateralPlan", {"lateralPlan", true, 20, 5}},
|
||||
{ "onroadEvents", {"onroadEvents", true, 1, 1}},
|
||||
{ "carParams", {"carParams", true, 0, 1}},
|
||||
{ "roadCameraState", {"roadCameraState", true, 20, 20}},
|
||||
{ "driverCameraState", {"driverCameraState", true, 20, 20}},
|
||||
{ "driverEncodeIdx", {"driverEncodeIdx", false, 20, 1}},
|
||||
{ "driverStateV2", {"driverStateV2", true, 20, 10}},
|
||||
{ "driverMonitoringState", {"driverMonitoringState", true, 20, 10}},
|
||||
{ "wideRoadEncodeIdx", {"wideRoadEncodeIdx", false, 20, 1}},
|
||||
{ "wideRoadCameraState", {"wideRoadCameraState", true, 20, 20}},
|
||||
{ "drivingModelData", {"drivingModelData", true, 20, 10}},
|
||||
{ "modelV2", {"modelV2", true, 20, -1}},
|
||||
{ "managerState", {"managerState", true, 2, 1}},
|
||||
{ "uploaderState", {"uploaderState", true, 0, 1}},
|
||||
{ "navInstruction", {"navInstruction", true, 1, 10}},
|
||||
{ "navRoute", {"navRoute", true, 0, -1}},
|
||||
{ "navRouteNavd", {"navRouteNavd", true, 0, -1}},
|
||||
{ "navThumbnail", {"navThumbnail", true, 0, -1}},
|
||||
{ "navModel", {"navModel", true, 2, 4}},
|
||||
{ "mapRenderState", {"mapRenderState", true, 2, 1}},
|
||||
{ "qRoadEncodeIdx", {"qRoadEncodeIdx", false, 20, -1}},
|
||||
{ "userFlag", {"userFlag", true, 0, 1}},
|
||||
{ "soundPressure", {"soundPressure", true, 10, 10}},
|
||||
{ "rawAudioData", {"rawAudioData", false, 20, -1}},
|
||||
{ "carrotMan", {"carrotMan", true, 0, -1}},
|
||||
{ "navInstructionCarrot", {"navInstructionCarrot", true, 1, 10}},
|
||||
{ "uiDebug", {"uiDebug", true, 0, 1}},
|
||||
{ "testJoystick", {"testJoystick", true, 0, -1}},
|
||||
{ "alertDebug", {"alertDebug", true, 20, 5}},
|
||||
{ "roadEncodeData", {"roadEncodeData", false, 20, -1}},
|
||||
{ "driverEncodeData", {"driverEncodeData", false, 20, -1}},
|
||||
{ "wideRoadEncodeData", {"wideRoadEncodeData", false, 20, -1}},
|
||||
{ "qRoadEncodeData", {"qRoadEncodeData", false, 20, -1}},
|
||||
{ "livestreamWideRoadEncodeIdx", {"livestreamWideRoadEncodeIdx", false, 20, -1}},
|
||||
{ "livestreamRoadEncodeIdx", {"livestreamRoadEncodeIdx", false, 20, -1}},
|
||||
{ "livestreamDriverEncodeIdx", {"livestreamDriverEncodeIdx", false, 20, -1}},
|
||||
{ "livestreamWideRoadEncodeData", {"livestreamWideRoadEncodeData", false, 20, -1}},
|
||||
{ "livestreamRoadEncodeData", {"livestreamRoadEncodeData", false, 20, -1}},
|
||||
{ "livestreamDriverEncodeData", {"livestreamDriverEncodeData", false, 20, -1}},
|
||||
{ "customReservedRawData0", {"customReservedRawData0", true, 0, -1}},
|
||||
{ "customReservedRawData1", {"customReservedRawData1", true, 0, -1}},
|
||||
{ "customReservedRawData2", {"customReservedRawData2", true, 0, -1}},
|
||||
};
|
||||
#endif
|
||||
|
||||
132
cereal/services.py
Normal file
132
cereal/services.py
Normal file
@@ -0,0 +1,132 @@
|
||||
#!/usr/bin/env python3
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class Service:
|
||||
def __init__(self, should_log: bool, frequency: float, decimation: Optional[int] = None):
|
||||
self.should_log = should_log
|
||||
self.frequency = frequency
|
||||
self.decimation = decimation
|
||||
|
||||
|
||||
_services: dict[str, tuple] = {
|
||||
# service: (should_log, frequency, qlog decimation (optional))
|
||||
# note: the "EncodeIdx" packets will still be in the log
|
||||
"gyroscope": (True, 104., 104),
|
||||
"gyroscope2": (True, 100., 100),
|
||||
"accelerometer": (True, 104., 104),
|
||||
"accelerometer2": (True, 100., 100),
|
||||
"magnetometer": (True, 25.),
|
||||
"lightSensor": (True, 100., 100),
|
||||
"temperatureSensor": (True, 2., 200),
|
||||
"temperatureSensor2": (True, 2., 200),
|
||||
"gpsNMEA": (True, 9.),
|
||||
"deviceState": (True, 2., 1),
|
||||
"touch": (True, 20., 1),
|
||||
"can": (True, 100., 2053), # decimation gives ~3 msgs in a full segment
|
||||
"controlsState": (True, 100., 10),
|
||||
"selfdriveState": (True, 100., 10),
|
||||
"pandaStates": (True, 10., 1),
|
||||
"peripheralState": (True, 2., 1),
|
||||
"radarState": (True, 20., 5),
|
||||
"roadEncodeIdx": (False, 20., 1),
|
||||
"liveTracks": (True, 20.),
|
||||
"sendcan": (True, 100., 139),
|
||||
"logMessage": (True, 0.),
|
||||
"errorLogMessage": (True, 0., 1),
|
||||
"liveCalibration": (True, 4., 4),
|
||||
"liveTorqueParameters": (True, 4., 1),
|
||||
"liveDelay": (True, 4., 1),
|
||||
"androidLog": (True, 0.),
|
||||
"carState": (True, 100., 10),
|
||||
"carControl": (True, 100., 10),
|
||||
"carOutput": (True, 100., 10),
|
||||
"longitudinalPlan": (True, 20., 10),
|
||||
"driverAssistance": (True, 20., 20),
|
||||
"procLog": (True, 0.5, 15),
|
||||
"gpsLocationExternal": (True, 10., 10),
|
||||
"gpsLocation": (True, 1., 1),
|
||||
"ubloxGnss": (True, 10.),
|
||||
"qcomGnss": (True, 2.),
|
||||
"gnssMeasurements": (True, 10., 10),
|
||||
"clocks": (True, 0.1, 1),
|
||||
"ubloxRaw": (True, 20.),
|
||||
"livePose": (True, 20., 4),
|
||||
#"liveLocationKalman": (True, 20., 5),
|
||||
"liveParameters": (True, 20., 5),
|
||||
"cameraOdometry": (True, 20., 10),
|
||||
"thumbnail": (True, 1 / 60., 1),
|
||||
"lateralPlan": (True, 20., 5),
|
||||
"onroadEvents": (True, 1., 1),
|
||||
"carParams": (True, 0.02, 1),
|
||||
"roadCameraState": (True, 20., 20),
|
||||
"driverCameraState": (True, 20., 20),
|
||||
"driverEncodeIdx": (False, 20., 1),
|
||||
"driverStateV2": (True, 20., 10),
|
||||
"driverMonitoringState": (True, 20., 10),
|
||||
"wideRoadEncodeIdx": (False, 20., 1),
|
||||
"wideRoadCameraState": (True, 20., 20),
|
||||
"drivingModelData": (True, 20., 10),
|
||||
"modelV2": (True, 20.),
|
||||
"managerState": (True, 2., 1),
|
||||
"uploaderState": (True, 0., 1),
|
||||
"navInstruction": (True, 1., 10),
|
||||
"navRoute": (True, 0.),
|
||||
"navRouteNavd": (True, 0.),
|
||||
"navThumbnail": (True, 0.),
|
||||
"navModel": (True, 2., 4.),
|
||||
"mapRenderState": (True, 2., 1.),
|
||||
"qRoadEncodeIdx": (False, 20.),
|
||||
"userFlag": (True, 0., 1),
|
||||
"soundPressure": (True, 10., 10),
|
||||
"rawAudioData": (False, 20.),
|
||||
|
||||
"carrotMan": (True, 0.),
|
||||
"navInstructionCarrot": (True, 1., 10),
|
||||
|
||||
# debug
|
||||
"uiDebug": (True, 0., 1),
|
||||
"testJoystick": (True, 0.),
|
||||
"alertDebug": (True, 20., 5),
|
||||
"roadEncodeData": (False, 20.),
|
||||
"driverEncodeData": (False, 20.),
|
||||
"wideRoadEncodeData": (False, 20.),
|
||||
"qRoadEncodeData": (False, 20.),
|
||||
"livestreamWideRoadEncodeIdx": (False, 20.),
|
||||
"livestreamRoadEncodeIdx": (False, 20.),
|
||||
"livestreamDriverEncodeIdx": (False, 20.),
|
||||
"livestreamWideRoadEncodeData": (False, 20.),
|
||||
"livestreamRoadEncodeData": (False, 20.),
|
||||
"livestreamDriverEncodeData": (False, 20.),
|
||||
"customReservedRawData0": (True, 0.),
|
||||
"customReservedRawData1": (True, 0.),
|
||||
"customReservedRawData2": (True, 0.),
|
||||
}
|
||||
SERVICE_LIST = {name: Service(*vals) for
|
||||
idx, (name, vals) in enumerate(_services.items())}
|
||||
|
||||
|
||||
def build_header():
|
||||
h = ""
|
||||
h += "/* THIS IS AN AUTOGENERATED FILE, PLEASE EDIT services.py */\n"
|
||||
h += "#ifndef __SERVICES_H\n"
|
||||
h += "#define __SERVICES_H\n"
|
||||
|
||||
h += "#include <map>\n"
|
||||
h += "#include <string>\n"
|
||||
|
||||
h += "struct service { std::string name; bool should_log; int frequency; int decimation; };\n"
|
||||
h += "static std::map<std::string, service> services = {\n"
|
||||
for k, v in SERVICE_LIST.items():
|
||||
should_log = "true" if v.should_log else "false"
|
||||
decimation = -1 if v.decimation is None else v.decimation
|
||||
h += ' { "%s", {"%s", %s, %d, %d}},\n' % \
|
||||
(k, k, should_log, v.frequency, decimation)
|
||||
h += "};\n"
|
||||
|
||||
h += "#endif\n"
|
||||
return h
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(build_header())
|
||||
13
codecov.yml
Normal file
13
codecov.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
comment: false
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
informational: true
|
||||
patch: off
|
||||
|
||||
ignore:
|
||||
- "**/test_*.py"
|
||||
- "selfdrive/test/**"
|
||||
- "system/version.py" # codecov changes depending on if we are in a branch or not
|
||||
- "tools"
|
||||
0
common/__init__.py
Normal file
0
common/__init__.py
Normal file
46
common/api.py
Normal file
46
common/api.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import jwt
|
||||
import os
|
||||
import requests
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
from openpilot.system.version import get_version
|
||||
|
||||
API_HOST = os.getenv('API_HOST', 'https://api.commadotai.com')
|
||||
|
||||
class Api:
|
||||
def __init__(self, dongle_id):
|
||||
self.dongle_id = dongle_id
|
||||
with open(Paths.persist_root()+'/comma/id_rsa') as f:
|
||||
self.private_key = f.read()
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
return self.request('GET', *args, **kwargs)
|
||||
|
||||
def post(self, *args, **kwargs):
|
||||
return self.request('POST', *args, **kwargs)
|
||||
|
||||
def request(self, method, endpoint, timeout=None, access_token=None, **params):
|
||||
return api_get(endpoint, method=method, timeout=timeout, access_token=access_token, **params)
|
||||
|
||||
def get_token(self, expiry_hours=1):
|
||||
now = datetime.now(UTC).replace(tzinfo=None)
|
||||
payload = {
|
||||
'identity': self.dongle_id,
|
||||
'nbf': now,
|
||||
'iat': now,
|
||||
'exp': now + timedelta(hours=expiry_hours)
|
||||
}
|
||||
token = jwt.encode(payload, self.private_key, algorithm='RS256')
|
||||
if isinstance(token, bytes):
|
||||
token = token.decode('utf8')
|
||||
return token
|
||||
|
||||
|
||||
def api_get(endpoint, method='GET', timeout=None, access_token=None, **params):
|
||||
headers = {}
|
||||
if access_token is not None:
|
||||
headers['Authorization'] = "JWT " + access_token
|
||||
|
||||
headers['User-Agent'] = "openpilot-" + get_version()
|
||||
|
||||
return requests.request(method, API_HOST + "/" + endpoint, timeout=timeout, headers=headers, params=params)
|
||||
4
common/basedir.py
Normal file
4
common/basedir.py
Normal file
@@ -0,0 +1,4 @@
|
||||
import os
|
||||
|
||||
|
||||
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../"))
|
||||
28
common/clutil.h
Normal file
28
common/clutil.h
Normal file
@@ -0,0 +1,28 @@
|
||||
#pragma once
|
||||
|
||||
#ifdef __APPLE__
|
||||
#include <OpenCL/cl.h>
|
||||
#else
|
||||
#include <CL/cl.h>
|
||||
#endif
|
||||
|
||||
#include <string>
|
||||
|
||||
#define CL_CHECK(_expr) \
|
||||
do { \
|
||||
assert(CL_SUCCESS == (_expr)); \
|
||||
} while (0)
|
||||
|
||||
#define CL_CHECK_ERR(_expr) \
|
||||
({ \
|
||||
cl_int err = CL_INVALID_VALUE; \
|
||||
__typeof__(_expr) _ret = _expr; \
|
||||
assert(_ret&& err == CL_SUCCESS); \
|
||||
_ret; \
|
||||
})
|
||||
|
||||
cl_device_id cl_get_device_id(cl_device_type device_type);
|
||||
cl_context cl_create_context(cl_device_id device_id);
|
||||
void cl_release_context(cl_context context);
|
||||
cl_program cl_program_from_source(cl_context ctx, cl_device_id device_id, const std::string& src, const char* args = nullptr);
|
||||
cl_program cl_program_from_file(cl_context ctx, cl_device_id device_id, const char* path, const char* args);
|
||||
23
common/constants.py
Normal file
23
common/constants.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import numpy as np
|
||||
|
||||
# conversions
|
||||
class CV:
|
||||
# Speed
|
||||
MPH_TO_KPH = 1.609344
|
||||
KPH_TO_MPH = 1. / MPH_TO_KPH
|
||||
MS_TO_KPH = 3.6
|
||||
KPH_TO_MS = 1. / MS_TO_KPH
|
||||
MS_TO_MPH = MS_TO_KPH * KPH_TO_MPH
|
||||
MPH_TO_MS = MPH_TO_KPH * KPH_TO_MS
|
||||
MS_TO_KNOTS = 1.9438
|
||||
KNOTS_TO_MS = 1. / MS_TO_KNOTS
|
||||
|
||||
# Angle
|
||||
DEG_TO_RAD = np.pi / 180.
|
||||
RAD_TO_DEG = 1. / DEG_TO_RAD
|
||||
|
||||
# Mass
|
||||
LB_TO_KG = 0.453592
|
||||
|
||||
|
||||
ACCELERATION_DUE_TO_GRAVITY = 9.81 # m/s^2
|
||||
19
common/conversions.py
Normal file
19
common/conversions.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import numpy as np
|
||||
|
||||
class Conversions:
|
||||
# Speed
|
||||
MPH_TO_KPH = 1.609344
|
||||
KPH_TO_MPH = 1. / MPH_TO_KPH
|
||||
MS_TO_KPH = 3.6
|
||||
KPH_TO_MS = 1. / MS_TO_KPH
|
||||
MS_TO_MPH = MS_TO_KPH * KPH_TO_MPH
|
||||
MPH_TO_MS = MPH_TO_KPH * KPH_TO_MS
|
||||
MS_TO_KNOTS = 1.9438
|
||||
KNOTS_TO_MS = 1. / MS_TO_KNOTS
|
||||
|
||||
# Angle
|
||||
DEG_TO_RAD = np.pi / 180.
|
||||
RAD_TO_DEG = 1. / DEG_TO_RAD
|
||||
|
||||
# Mass
|
||||
LB_TO_KG = 0.453592
|
||||
9
common/dict_helpers.py
Normal file
9
common/dict_helpers.py
Normal file
@@ -0,0 +1,9 @@
|
||||
# remove all keys that end in DEPRECATED
|
||||
def strip_deprecated_keys(d):
|
||||
for k in list(d.keys()):
|
||||
if isinstance(k, str):
|
||||
if k.endswith('DEPRECATED'):
|
||||
d.pop(k)
|
||||
elif isinstance(d[k], dict):
|
||||
strip_deprecated_keys(d[k])
|
||||
return d
|
||||
8
common/ffi_wrapper.py
Normal file
8
common/ffi_wrapper.py
Normal file
@@ -0,0 +1,8 @@
|
||||
import platform
|
||||
|
||||
|
||||
def suffix():
|
||||
if platform.system() == "Darwin":
|
||||
return ".dylib"
|
||||
else:
|
||||
return ".so"
|
||||
58
common/file_helpers.py
Normal file
58
common/file_helpers.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import io
|
||||
import os
|
||||
import tempfile
|
||||
import contextlib
|
||||
import zstandard as zstd
|
||||
|
||||
LOG_COMPRESSION_LEVEL = 10 # little benefit up to level 15. level ~17 is a small step change
|
||||
|
||||
|
||||
class CallbackReader:
|
||||
"""Wraps a file, but overrides the read method to also
|
||||
call a callback function with the number of bytes read so far."""
|
||||
def __init__(self, f, callback, *args):
|
||||
self.f = f
|
||||
self.callback = callback
|
||||
self.cb_args = args
|
||||
self.total_read = 0
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.f, attr)
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
chunk = self.f.read(*args, **kwargs)
|
||||
self.total_read += len(chunk)
|
||||
self.callback(*self.cb_args, self.total_read)
|
||||
return chunk
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def atomic_write_in_dir(path: str, mode: str = 'w', buffering: int = -1, encoding: str = None, newline: str = None,
|
||||
overwrite: bool = False):
|
||||
"""Write to a file atomically using a temporary file in the same directory as the destination file."""
|
||||
dir_name = os.path.dirname(path)
|
||||
|
||||
if not overwrite and os.path.exists(path):
|
||||
raise FileExistsError(f"File '{path}' already exists. To overwrite it, set 'overwrite' to True.")
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode=mode, buffering=buffering, encoding=encoding, newline=newline, dir=dir_name, delete=False) as tmp_file:
|
||||
yield tmp_file
|
||||
tmp_file_name = tmp_file.name
|
||||
os.replace(tmp_file_name, path)
|
||||
|
||||
|
||||
def get_upload_stream(filepath: str, should_compress: bool) -> tuple[io.BufferedIOBase, int]:
|
||||
if not should_compress:
|
||||
file_size = os.path.getsize(filepath)
|
||||
file_stream = open(filepath, "rb")
|
||||
return file_stream, file_size
|
||||
|
||||
# Compress the file on the fly
|
||||
compressed_stream = io.BytesIO()
|
||||
compressor = zstd.ZstdCompressor(level=LOG_COMPRESSION_LEVEL)
|
||||
|
||||
with open(filepath, "rb") as f:
|
||||
compressor.copy_stream(f, compressed_stream)
|
||||
compressed_size = compressed_stream.tell()
|
||||
compressed_stream.seek(0)
|
||||
return compressed_stream, compressed_size
|
||||
69
common/filter_simple.py
Normal file
69
common/filter_simple.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import numpy as np
|
||||
from collections import deque
|
||||
|
||||
class FirstOrderFilter:
|
||||
# first order filter
|
||||
def __init__(self, x0, rc, dt, initialized=True):
|
||||
self.x = x0
|
||||
self.dt = dt
|
||||
self.update_alpha(rc)
|
||||
self.initialized = initialized
|
||||
|
||||
def update_alpha(self, rc):
|
||||
self.alpha = self.dt / (rc + self.dt)
|
||||
|
||||
def update(self, x):
|
||||
if self.initialized:
|
||||
self.x = (1. - self.alpha) * self.x + self.alpha * x
|
||||
else:
|
||||
self.initialized = True
|
||||
self.x = x
|
||||
return self.x
|
||||
|
||||
|
||||
class BounceFilter(FirstOrderFilter):
|
||||
def __init__(self, x0, rc, dt, initialized=True, bounce=2):
|
||||
self.velocity = FirstOrderFilter(0.0, 0.15, dt)
|
||||
self.bounce = bounce
|
||||
super().__init__(x0, rc, dt, initialized)
|
||||
|
||||
def update(self, x):
|
||||
super().update(x)
|
||||
scale = self.dt / (1.0 / 60.0) # tuned at 60 fps
|
||||
self.velocity.x += (x - self.x) * self.bounce * scale * self.dt
|
||||
self.velocity.update(0.0)
|
||||
if abs(self.velocity.x) < 1e-5:
|
||||
self.velocity.x = 0.0
|
||||
self.x += self.velocity.x
|
||||
return self.x
|
||||
|
||||
class MyMovingAverage:
|
||||
def __init__(self, window_size, value=None):
|
||||
self.window_size = window_size
|
||||
if (value is not None):
|
||||
self.values = deque([value] * window_size, maxlen=window_size)
|
||||
self.sum = value * window_size
|
||||
self.result = value
|
||||
else:
|
||||
self.values = deque(maxlen=window_size)
|
||||
self.sum = 0
|
||||
self.result = 0
|
||||
|
||||
def set(self, value):
|
||||
self.values.clear()
|
||||
self.values.append(value)
|
||||
self.sum = value
|
||||
self.result = value
|
||||
return value
|
||||
|
||||
def set_all(self, value):
|
||||
self.values = deque([value] * self.window_size, maxlen=self.window_size)
|
||||
self.sum = value * self.window_size
|
||||
self.result = value
|
||||
return value
|
||||
|
||||
def process(self, value, median=False):
|
||||
self.values.append(value)
|
||||
self.sum = sum(self.values)
|
||||
self.result = float(np.median(self.values)) if median else float(self.sum) / len(self.values)
|
||||
return self.result
|
||||
42
common/git.py
Normal file
42
common/git.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from functools import cache
|
||||
import subprocess
|
||||
from openpilot.common.run import run_cmd, run_cmd_default
|
||||
|
||||
|
||||
@cache
|
||||
def get_commit(cwd: str = None, branch: str = "HEAD") -> str:
|
||||
return run_cmd_default(["git", "rev-parse", branch], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_commit_date(cwd: str = None, commit: str = "HEAD") -> str:
|
||||
return run_cmd_default(["git", "show", "--no-patch", "--format='%ct %ci'", commit], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_short_branch(cwd: str = None) -> str:
|
||||
return run_cmd_default(["git", "rev-parse", "--abbrev-ref", "HEAD"], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_branch(cwd: str = None) -> str:
|
||||
return run_cmd_default(["git", "rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{u}"], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_origin(cwd: str = None) -> str:
|
||||
try:
|
||||
local_branch = run_cmd(["git", "name-rev", "--name-only", "HEAD"], cwd=cwd)
|
||||
tracking_remote = run_cmd(["git", "config", "branch." + local_branch + ".remote"], cwd=cwd)
|
||||
return run_cmd(["git", "config", "remote." + tracking_remote + ".url"], cwd=cwd)
|
||||
except subprocess.CalledProcessError: # Not on a branch, fallback
|
||||
return run_cmd_default(["git", "config", "--get", "remote.origin.url"], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_normalized_origin(cwd: str = None) -> str:
|
||||
return get_origin(cwd) \
|
||||
.replace("git@", "", 1) \
|
||||
.replace(".git", "", 1) \
|
||||
.replace("https://", "", 1) \
|
||||
.replace(":", "/", 1)
|
||||
89
common/gpio.py
Normal file
89
common/gpio.py
Normal file
@@ -0,0 +1,89 @@
|
||||
import os
|
||||
import fcntl
|
||||
import ctypes
|
||||
from functools import cache
|
||||
|
||||
def gpio_init(pin: int, output: bool) -> None:
|
||||
try:
|
||||
with open(f"/sys/class/gpio/gpio{pin}/direction", 'wb') as f:
|
||||
f.write(b"out" if output else b"in")
|
||||
except Exception as e:
|
||||
print(f"Failed to set gpio {pin} direction: {e}")
|
||||
|
||||
def gpio_set(pin: int, high: bool) -> None:
|
||||
try:
|
||||
with open(f"/sys/class/gpio/gpio{pin}/value", 'wb') as f:
|
||||
f.write(b"1" if high else b"0")
|
||||
except Exception as e:
|
||||
print(f"Failed to set gpio {pin} value: {e}")
|
||||
|
||||
def gpio_read(pin: int) -> bool | None:
|
||||
val = None
|
||||
try:
|
||||
with open(f"/sys/class/gpio/gpio{pin}/value", 'rb') as f:
|
||||
val = bool(int(f.read().strip()))
|
||||
except Exception as e:
|
||||
print(f"Failed to set gpio {pin} value: {e}")
|
||||
|
||||
return val
|
||||
|
||||
def gpio_export(pin: int) -> None:
|
||||
if os.path.isdir(f"/sys/class/gpio/gpio{pin}"):
|
||||
return
|
||||
|
||||
try:
|
||||
with open("/sys/class/gpio/export", 'w') as f:
|
||||
f.write(str(pin))
|
||||
except Exception:
|
||||
print(f"Failed to export gpio {pin}")
|
||||
|
||||
@cache
|
||||
def get_irq_action(irq: int) -> list[str]:
|
||||
try:
|
||||
with open(f"/sys/kernel/irq/{irq}/actions") as f:
|
||||
actions = f.read().strip().split(',')
|
||||
return actions
|
||||
except FileNotFoundError:
|
||||
return []
|
||||
|
||||
def get_irqs_for_action(action: str) -> list[str]:
|
||||
ret = []
|
||||
with open("/proc/interrupts") as f:
|
||||
for l in f.readlines():
|
||||
irq = l.split(':')[0].strip()
|
||||
if irq.isdigit() and action in get_irq_action(irq):
|
||||
ret.append(irq)
|
||||
return ret
|
||||
|
||||
# *** gpiochip ***
|
||||
|
||||
class gpioevent_data(ctypes.Structure):
|
||||
_fields_ = [
|
||||
("timestamp", ctypes.c_uint64),
|
||||
("id", ctypes.c_uint32),
|
||||
]
|
||||
|
||||
class gpioevent_request(ctypes.Structure):
|
||||
_fields_ = [
|
||||
("lineoffset", ctypes.c_uint32),
|
||||
("handleflags", ctypes.c_uint32),
|
||||
("eventflags", ctypes.c_uint32),
|
||||
("label", ctypes.c_char * 32),
|
||||
("fd", ctypes.c_int)
|
||||
]
|
||||
|
||||
def gpiochip_get_ro_value_fd(label: str, gpiochip_id: int, pin: int) -> int:
|
||||
GPIOEVENT_REQUEST_BOTH_EDGES = 0x3
|
||||
GPIOHANDLE_REQUEST_INPUT = 0x1
|
||||
GPIO_GET_LINEEVENT_IOCTL = 0xc030b404
|
||||
|
||||
rq = gpioevent_request()
|
||||
rq.lineoffset = pin
|
||||
rq.handleflags = GPIOHANDLE_REQUEST_INPUT
|
||||
rq.eventflags = GPIOEVENT_REQUEST_BOTH_EDGES
|
||||
rq.label = label.encode('utf-8')[:31] + b'\0'
|
||||
|
||||
fd = os.open(f"/dev/gpiochip{gpiochip_id}", os.O_RDONLY)
|
||||
fcntl.ioctl(fd, GPIO_GET_LINEEVENT_IOCTL, rq)
|
||||
os.close(fd)
|
||||
return int(rq.fd)
|
||||
8
common/gps.py
Normal file
8
common/gps.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from openpilot.common.params import Params
|
||||
|
||||
|
||||
def get_gps_location_service(params: Params) -> str:
|
||||
if params.get_bool("UbloxAvailable"):
|
||||
return "gpsLocationExternal"
|
||||
else:
|
||||
return "gpsLocation"
|
||||
250
common/logging_extra.py
Normal file
250
common/logging_extra.py
Normal file
@@ -0,0 +1,250 @@
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import copy
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
import socket
|
||||
import logging
|
||||
import traceback
|
||||
import numpy as np
|
||||
from threading import local
|
||||
from collections import OrderedDict
|
||||
from contextlib import contextmanager
|
||||
|
||||
LOG_TIMESTAMPS = "LOG_TIMESTAMPS" in os.environ
|
||||
|
||||
def json_handler(obj):
|
||||
if isinstance(obj, np.bool_):
|
||||
return bool(obj)
|
||||
# if isinstance(obj, (datetime.date, datetime.time)):
|
||||
# return obj.isoformat()
|
||||
return repr(obj)
|
||||
|
||||
def json_robust_dumps(obj):
|
||||
return json.dumps(obj, default=json_handler)
|
||||
|
||||
class NiceOrderedDict(OrderedDict):
|
||||
def __str__(self):
|
||||
return json_robust_dumps(self)
|
||||
|
||||
class SwagFormatter(logging.Formatter):
|
||||
def __init__(self, swaglogger):
|
||||
logging.Formatter.__init__(self, None, '%a %b %d %H:%M:%S %Z %Y')
|
||||
|
||||
self.swaglogger = swaglogger
|
||||
self.host = socket.gethostname()
|
||||
|
||||
def format_dict(self, record):
|
||||
record_dict = NiceOrderedDict()
|
||||
|
||||
if isinstance(record.msg, dict):
|
||||
record_dict['msg'] = record.msg
|
||||
else:
|
||||
try:
|
||||
record_dict['msg'] = record.getMessage()
|
||||
except (ValueError, TypeError):
|
||||
record_dict['msg'] = [record.msg]+record.args
|
||||
|
||||
record_dict['ctx'] = self.swaglogger.get_ctx()
|
||||
|
||||
if record.exc_info:
|
||||
record_dict['exc_info'] = self.formatException(record.exc_info)
|
||||
|
||||
record_dict['level'] = record.levelname
|
||||
record_dict['levelnum'] = record.levelno
|
||||
record_dict['name'] = record.name
|
||||
record_dict['filename'] = record.filename
|
||||
record_dict['lineno'] = record.lineno
|
||||
record_dict['pathname'] = record.pathname
|
||||
record_dict['module'] = record.module
|
||||
record_dict['funcName'] = record.funcName
|
||||
record_dict['host'] = self.host
|
||||
record_dict['process'] = record.process
|
||||
record_dict['thread'] = record.thread
|
||||
record_dict['threadName'] = record.threadName
|
||||
record_dict['created'] = record.created
|
||||
|
||||
return record_dict
|
||||
|
||||
def format(self, record):
|
||||
if self.swaglogger is None:
|
||||
raise Exception("must set swaglogger before calling format()")
|
||||
return json_robust_dumps(self.format_dict(record))
|
||||
|
||||
class SwagLogFileFormatter(SwagFormatter):
|
||||
def fix_kv(self, k, v):
|
||||
# append type to names to preserve legacy naming in logs
|
||||
# avoids overlapping key namespaces with different types
|
||||
# e.g. log.info() creates 'msg' -> 'msg$s'
|
||||
# log.event() creates 'msg.health.logMonoTime' -> 'msg.health.logMonoTime$i'
|
||||
# because overlapping namespace 'msg' caused problems
|
||||
if isinstance(v, (str, bytes)):
|
||||
k += "$s"
|
||||
elif isinstance(v, float):
|
||||
k += "$f"
|
||||
elif isinstance(v, bool):
|
||||
k += "$b"
|
||||
elif isinstance(v, int):
|
||||
k += "$i"
|
||||
elif isinstance(v, dict):
|
||||
nv = {}
|
||||
for ik, iv in v.items():
|
||||
ik, iv = self.fix_kv(ik, iv)
|
||||
nv[ik] = iv
|
||||
v = nv
|
||||
elif isinstance(v, list):
|
||||
k += "$a"
|
||||
return k, v
|
||||
|
||||
def format(self, record):
|
||||
if isinstance(record, str):
|
||||
v = json.loads(record)
|
||||
else:
|
||||
v = self.format_dict(record)
|
||||
|
||||
mk, mv = self.fix_kv('msg', v['msg'])
|
||||
del v['msg']
|
||||
v[mk] = mv
|
||||
v['id'] = uuid.uuid4().hex
|
||||
|
||||
return json_robust_dumps(v)
|
||||
|
||||
class SwagErrorFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
return record.levelno < logging.ERROR
|
||||
|
||||
def _tmpfunc():
|
||||
return 0
|
||||
|
||||
def _srcfile():
|
||||
return os.path.normcase(_tmpfunc.__code__.co_filename)
|
||||
|
||||
class SwagLogger(logging.Logger):
|
||||
def __init__(self):
|
||||
logging.Logger.__init__(self, "swaglog")
|
||||
|
||||
self.global_ctx = {}
|
||||
|
||||
self.log_local = local()
|
||||
self.log_local.ctx = {}
|
||||
|
||||
def local_ctx(self):
|
||||
try:
|
||||
return self.log_local.ctx
|
||||
except AttributeError:
|
||||
self.log_local.ctx = {}
|
||||
return self.log_local.ctx
|
||||
|
||||
def get_ctx(self):
|
||||
return dict(self.local_ctx(), **self.global_ctx)
|
||||
|
||||
@contextmanager
|
||||
def ctx(self, **kwargs):
|
||||
old_ctx = self.local_ctx()
|
||||
self.log_local.ctx = copy.copy(old_ctx) or {}
|
||||
self.log_local.ctx.update(kwargs)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.log_local.ctx = old_ctx
|
||||
|
||||
def bind(self, **kwargs):
|
||||
self.local_ctx().update(kwargs)
|
||||
|
||||
def bind_global(self, **kwargs):
|
||||
self.global_ctx.update(kwargs)
|
||||
|
||||
def event(self, event, *args, **kwargs):
|
||||
evt = NiceOrderedDict()
|
||||
evt['event'] = event
|
||||
if args:
|
||||
evt['args'] = args
|
||||
evt.update(kwargs)
|
||||
if 'error' in kwargs:
|
||||
self.error(evt)
|
||||
elif 'debug' in kwargs:
|
||||
self.debug(evt)
|
||||
else:
|
||||
self.info(evt)
|
||||
|
||||
def timestamp(self, event_name):
|
||||
if LOG_TIMESTAMPS:
|
||||
t = time.monotonic()
|
||||
tstp = NiceOrderedDict()
|
||||
tstp['timestamp'] = NiceOrderedDict()
|
||||
tstp['timestamp']["event"] = event_name
|
||||
tstp['timestamp']["time"] = t*1e9
|
||||
self.debug(tstp)
|
||||
|
||||
def findCaller(self, stack_info=False, stacklevel=1):
|
||||
"""
|
||||
Find the stack frame of the caller so that we can note the source
|
||||
file name, line number and function name.
|
||||
"""
|
||||
f = sys._getframe(3)
|
||||
#On some versions of IronPython, currentframe() returns None if
|
||||
#IronPython isn't run with -X:Frames.
|
||||
if f is not None:
|
||||
f = f.f_back
|
||||
orig_f = f
|
||||
while f and stacklevel > 1:
|
||||
f = f.f_back
|
||||
stacklevel -= 1
|
||||
if not f:
|
||||
f = orig_f
|
||||
rv = "(unknown file)", 0, "(unknown function)", None
|
||||
while hasattr(f, "f_code"):
|
||||
co = f.f_code
|
||||
filename = os.path.normcase(co.co_filename)
|
||||
|
||||
# TODO: is this pylint exception correct?
|
||||
if filename == _srcfile:
|
||||
f = f.f_back
|
||||
continue
|
||||
sinfo = None
|
||||
if stack_info:
|
||||
sio = io.StringIO()
|
||||
sio.write('Stack (most recent call last):\n')
|
||||
traceback.print_stack(f, file=sio)
|
||||
sinfo = sio.getvalue()
|
||||
if sinfo[-1] == '\n':
|
||||
sinfo = sinfo[:-1]
|
||||
sio.close()
|
||||
rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
|
||||
break
|
||||
return rv
|
||||
|
||||
if __name__ == "__main__":
|
||||
log = SwagLogger()
|
||||
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setLevel(logging.INFO)
|
||||
stdout_handler.addFilter(SwagErrorFilter())
|
||||
log.addHandler(stdout_handler)
|
||||
|
||||
stderr_handler = logging.StreamHandler(sys.stderr)
|
||||
stderr_handler.setLevel(logging.ERROR)
|
||||
log.addHandler(stderr_handler)
|
||||
|
||||
log.info("asdasd %s", "a")
|
||||
log.info({'wut': 1})
|
||||
log.warning("warning")
|
||||
log.error("error")
|
||||
log.critical("critical")
|
||||
log.event("test", x="y")
|
||||
|
||||
with log.ctx():
|
||||
stdout_handler.setFormatter(SwagFormatter(log))
|
||||
stderr_handler.setFormatter(SwagFormatter(log))
|
||||
log.bind(user="some user")
|
||||
log.info("in req")
|
||||
print("")
|
||||
log.warning("warning")
|
||||
print("")
|
||||
log.error("error")
|
||||
print("")
|
||||
log.critical("critical")
|
||||
print("")
|
||||
log.event("do_req", a=1, b="c")
|
||||
45
common/markdown.py
Normal file
45
common/markdown.py
Normal file
@@ -0,0 +1,45 @@
|
||||
HTML_REPLACEMENTS = [
|
||||
(r'&', r'&'),
|
||||
(r'"', r'"'),
|
||||
]
|
||||
|
||||
def parse_markdown(text: str, tab_length: int = 2) -> str:
|
||||
lines = text.split("\n")
|
||||
output: list[str] = []
|
||||
list_level = 0
|
||||
|
||||
def end_outstanding_lists(level: int, end_level: int) -> int:
|
||||
while level > end_level:
|
||||
level -= 1
|
||||
output.append("</ul>")
|
||||
if level > 0:
|
||||
output.append("</li>")
|
||||
return end_level
|
||||
|
||||
for i, line in enumerate(lines):
|
||||
if i + 1 < len(lines) and lines[i + 1].startswith("==="): # heading
|
||||
output.append(f"<h1>{line}</h1>")
|
||||
elif line.startswith("==="):
|
||||
pass
|
||||
elif line.lstrip().startswith("* "): # list
|
||||
line_level = 1 + line.count(" " * tab_length, 0, line.index("*"))
|
||||
if list_level >= line_level:
|
||||
list_level = end_outstanding_lists(list_level, line_level)
|
||||
else:
|
||||
list_level += 1
|
||||
if list_level > 1:
|
||||
output[-1] = output[-1].replace("</li>", "")
|
||||
output.append("<ul>")
|
||||
output.append(f"<li>{line.replace('*', '', 1).lstrip()}</li>")
|
||||
else:
|
||||
list_level = end_outstanding_lists(list_level, 0)
|
||||
if len(line) > 0:
|
||||
output.append(line)
|
||||
|
||||
end_outstanding_lists(list_level, 0)
|
||||
output_str = "\n".join(output) + "\n"
|
||||
|
||||
for (fr, to) in HTML_REPLACEMENTS:
|
||||
output_str = output_str.replace(fr, to)
|
||||
|
||||
return output_str
|
||||
85
common/mat.h
Normal file
85
common/mat.h
Normal file
@@ -0,0 +1,85 @@
|
||||
#pragma once
|
||||
|
||||
typedef struct vec3 {
|
||||
float v[3];
|
||||
} vec3;
|
||||
|
||||
typedef struct vec4 {
|
||||
float v[4];
|
||||
} vec4;
|
||||
|
||||
typedef struct mat3 {
|
||||
float v[3*3];
|
||||
} mat3;
|
||||
|
||||
typedef struct mat4 {
|
||||
float v[4*4];
|
||||
} mat4;
|
||||
|
||||
static inline mat3 matmul3(const mat3 &a, const mat3 &b) {
|
||||
mat3 ret = {{0.0}};
|
||||
for (int r=0; r<3; r++) {
|
||||
for (int c=0; c<3; c++) {
|
||||
float v = 0.0;
|
||||
for (int k=0; k<3; k++) {
|
||||
v += a.v[r*3+k] * b.v[k*3+c];
|
||||
}
|
||||
ret.v[r*3+c] = v;
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
static inline vec3 matvecmul3(const mat3 &a, const vec3 &b) {
|
||||
vec3 ret = {{0.0}};
|
||||
for (int r=0; r<3; r++) {
|
||||
for (int c=0; c<3; c++) {
|
||||
ret.v[r] += a.v[r*3+c] * b.v[c];
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
static inline mat4 matmul(const mat4 &a, const mat4 &b) {
|
||||
mat4 ret = {{0.0}};
|
||||
for (int r=0; r<4; r++) {
|
||||
for (int c=0; c<4; c++) {
|
||||
float v = 0.0;
|
||||
for (int k=0; k<4; k++) {
|
||||
v += a.v[r*4+k] * b.v[k*4+c];
|
||||
}
|
||||
ret.v[r*4+c] = v;
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
static inline vec4 matvecmul(const mat4 &a, const vec4 &b) {
|
||||
vec4 ret = {{0.0}};
|
||||
for (int r=0; r<4; r++) {
|
||||
for (int c=0; c<4; c++) {
|
||||
ret.v[r] += a.v[r*4+c] * b.v[c];
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
// scales the input and output space of a transformation matrix
|
||||
// that assumes pixel-center origin.
|
||||
static inline mat3 transform_scale_buffer(const mat3 &in, float s) {
|
||||
// in_pt = ( transform(out_pt/s + 0.5) - 0.5) * s
|
||||
|
||||
mat3 transform_out = {{
|
||||
1.0f/s, 0.0f, 0.5f,
|
||||
0.0f, 1.0f/s, 0.5f,
|
||||
0.0f, 0.0f, 1.0f,
|
||||
}};
|
||||
|
||||
mat3 transform_in = {{
|
||||
s, 0.0f, -0.5f*s,
|
||||
0.0f, s, -0.5f*s,
|
||||
0.0f, 0.0f, 1.0f,
|
||||
}};
|
||||
|
||||
return matmul3(transform_in, matmul3(in, transform_out));
|
||||
}
|
||||
51
common/mock/__init__.py
Normal file
51
common/mock/__init__.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""
|
||||
Utilities for generating mock messages for testing.
|
||||
example in common/tests/test_mock.py
|
||||
"""
|
||||
|
||||
|
||||
import functools
|
||||
import threading
|
||||
from cereal.messaging import PubMaster
|
||||
from cereal.services import SERVICE_LIST
|
||||
from openpilot.common.mock.generators import generate_livePose, generate_liveLocationKalman
|
||||
from openpilot.common.realtime import Ratekeeper
|
||||
|
||||
|
||||
MOCK_GENERATOR = {
|
||||
"livePose": generate_livePose,
|
||||
"liveLocationKalman": generate_liveLocationKalman
|
||||
}
|
||||
|
||||
|
||||
def generate_messages_loop(services: list[str], done: threading.Event):
|
||||
pm = PubMaster(services)
|
||||
rk = Ratekeeper(100)
|
||||
i = 0
|
||||
while not done.is_set():
|
||||
for s in services:
|
||||
should_send = i % (100/SERVICE_LIST[s].frequency) == 0
|
||||
if should_send:
|
||||
message = MOCK_GENERATOR[s]()
|
||||
pm.send(s, message)
|
||||
i += 1
|
||||
rk.keep_time()
|
||||
|
||||
|
||||
def mock_messages(services: list[str] | str):
|
||||
if isinstance(services, str):
|
||||
services = [services]
|
||||
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
done = threading.Event()
|
||||
t = threading.Thread(target=generate_messages_loop, args=(services, done))
|
||||
t.start()
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
finally:
|
||||
done.set()
|
||||
t.join()
|
||||
return wrapper
|
||||
return decorator
|
||||
33
common/mock/generators.py
Normal file
33
common/mock/generators.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from cereal import messaging
|
||||
|
||||
|
||||
LOCATION1 = (32.7174, -117.16277)
|
||||
LOCATION2 = (32.7558, -117.2037)
|
||||
|
||||
LLK_DECIMATION = 10
|
||||
RENDER_FRAMES = 15
|
||||
DEFAULT_ITERATIONS = RENDER_FRAMES * LLK_DECIMATION
|
||||
|
||||
|
||||
def generate_liveLocationKalman(location=LOCATION1):
|
||||
msg = messaging.new_message('liveLocationKalman')
|
||||
msg.liveLocationKalman.positionGeodetic = {'value': [*location, 0], 'std': [0., 0., 0.], 'valid': True}
|
||||
msg.liveLocationKalman.positionECEF = {'value': [0., 0., 0.], 'std': [0., 0., 0.], 'valid': True}
|
||||
msg.liveLocationKalman.calibratedOrientationNED = {'value': [0., 0., 0.], 'std': [0., 0., 0.], 'valid': True}
|
||||
msg.liveLocationKalman.velocityCalibrated = {'value': [0., 0., 0.], 'std': [0., 0., 0.], 'valid': True}
|
||||
msg.liveLocationKalman.status = 'valid'
|
||||
msg.liveLocationKalman.gpsOK = True
|
||||
return msg
|
||||
|
||||
|
||||
def generate_livePose():
|
||||
msg = messaging.new_message('livePose')
|
||||
meas = {'x': 0.0, 'y': 0.0, 'z': 0.0, 'xStd': 0.0, 'yStd': 0.0, 'zStd': 0.0, 'valid': True}
|
||||
msg.livePose.orientationNED = meas
|
||||
msg.livePose.velocityDevice = meas
|
||||
msg.livePose.angularVelocityDevice = meas
|
||||
msg.livePose.accelerationDevice = meas
|
||||
msg.livePose.inputsOK = True
|
||||
msg.livePose.posenetOK = True
|
||||
msg.livePose.sensorsOK = True
|
||||
return msg
|
||||
92
common/params.h
Normal file
92
common/params.h
Normal file
@@ -0,0 +1,92 @@
|
||||
#pragma once
|
||||
|
||||
#include <future>
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "common/queue.h"
|
||||
|
||||
enum ParamKeyType {
|
||||
PERSISTENT = 0x02,
|
||||
CLEAR_ON_MANAGER_START = 0x04,
|
||||
CLEAR_ON_ONROAD_TRANSITION = 0x08,
|
||||
CLEAR_ON_OFFROAD_TRANSITION = 0x10,
|
||||
DONT_LOG = 0x20,
|
||||
DEVELOPMENT_ONLY = 0x40,
|
||||
ALL = 0xFFFFFFFF
|
||||
};
|
||||
|
||||
class Params {
|
||||
public:
|
||||
explicit Params(const std::string &path = {});
|
||||
~Params();
|
||||
// Not copyable.
|
||||
Params(const Params&) = delete;
|
||||
Params& operator=(const Params&) = delete;
|
||||
|
||||
std::vector<std::string> allKeys() const;
|
||||
bool checkKey(const std::string &key);
|
||||
ParamKeyType getKeyType(const std::string &key);
|
||||
inline std::string getParamPath(const std::string &key = {}) {
|
||||
return params_path + params_prefix + (key.empty() ? "" : "/" + key);
|
||||
}
|
||||
|
||||
// Delete a value
|
||||
int remove(const std::string &key);
|
||||
void clearAll(ParamKeyType type);
|
||||
|
||||
// helpers for reading values
|
||||
std::string get(const std::string &key, bool block = false);
|
||||
inline bool getBool(const std::string &key, bool block = false) {
|
||||
return get(key, block) == "1";
|
||||
}
|
||||
inline int getInt(const std::string &key, bool block = false) {
|
||||
std::string value = get(key, block);
|
||||
return value.empty() ? 0 : std::stoi(value);
|
||||
}
|
||||
inline float getFloat(const std::string &key, bool block = false) {
|
||||
std::string value = get(key, block);
|
||||
return value.empty() ? 0.0 : std::stof(value);
|
||||
}
|
||||
std::map<std::string, std::string> readAll();
|
||||
|
||||
// helpers for writing values
|
||||
int put(const char *key, const char *val, size_t value_size);
|
||||
inline int put(const std::string &key, const std::string &val) {
|
||||
return put(key.c_str(), val.data(), val.size());
|
||||
}
|
||||
inline int putBool(const std::string &key, bool val) {
|
||||
return put(key.c_str(), val ? "1" : "0", 1);
|
||||
}
|
||||
inline int putInt(const std::string &key, int val) {
|
||||
return put(key.c_str(), std::to_string(val).c_str(), std::to_string(val).size());
|
||||
}
|
||||
inline int putFloat(const std::string &key, float val) {
|
||||
return put(key.c_str(), std::to_string(val).c_str(), std::to_string(val).size());
|
||||
}
|
||||
void putNonBlocking(const std::string &key, const std::string &val);
|
||||
inline void putBoolNonBlocking(const std::string &key, bool val) {
|
||||
putNonBlocking(key, val ? "1" : "0");
|
||||
}
|
||||
void putIntNonBlocking(const std::string &key, const std::string &val);
|
||||
inline void putIntNonBlocking(const std::string &key, int val) {
|
||||
putNonBlocking(key, std::to_string(val));
|
||||
}
|
||||
void putFloatNonBlocking(const std::string &key, const std::string &val);
|
||||
inline void putFloatNonBlocking(const std::string &key, float val) {
|
||||
putNonBlocking(key, std::to_string(val));
|
||||
}
|
||||
|
||||
private:
|
||||
void asyncWriteThread();
|
||||
|
||||
std::string params_path;
|
||||
std::string params_prefix;
|
||||
|
||||
// for nonblocking write
|
||||
std::future<void> future;
|
||||
SafeQueue<std::pair<std::string, std::string>> queue;
|
||||
};
|
||||
18
common/params.py
Normal file
18
common/params.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from openpilot.common.params_pyx import Params, ParamKeyType, UnknownKeyName
|
||||
assert Params
|
||||
assert ParamKeyType
|
||||
assert UnknownKeyName
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
params = Params()
|
||||
key = sys.argv[1]
|
||||
assert params.check_key(key), f"unknown param: {key}"
|
||||
|
||||
if len(sys.argv) == 3:
|
||||
val = sys.argv[2]
|
||||
print(f"SET: {key} = {val}")
|
||||
params.put(key, val)
|
||||
elif len(sys.argv) == 2:
|
||||
print(f"GET: {key} = {params.get(key)}")
|
||||
335
common/params_keys.h
Normal file
335
common/params_keys.h
Normal file
@@ -0,0 +1,335 @@
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
|
||||
inline static std::unordered_map<std::string, uint32_t> keys = {
|
||||
{"AccessToken", CLEAR_ON_MANAGER_START | DONT_LOG},
|
||||
{"AdbEnabled", PERSISTENT},
|
||||
{"AlwaysOnDM", PERSISTENT},
|
||||
{"ApiCache_Device", PERSISTENT},
|
||||
{"ApiCache_FirehoseStats", PERSISTENT},
|
||||
{"AssistNowToken", PERSISTENT},
|
||||
{"AthenadPid", PERSISTENT},
|
||||
{"AthenadUploadQueue", PERSISTENT},
|
||||
{"AthenadRecentlyViewedRoutes", PERSISTENT},
|
||||
{"BootCount", PERSISTENT},
|
||||
{"CalibrationParams", PERSISTENT},
|
||||
{"CameraDebugExpGain", CLEAR_ON_MANAGER_START},
|
||||
{"CameraDebugExpTime", CLEAR_ON_MANAGER_START},
|
||||
{"CarBatteryCapacity", PERSISTENT},
|
||||
{"CarParams", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"CarParamsCache", CLEAR_ON_MANAGER_START},
|
||||
{"CarParamsPersistent", PERSISTENT},
|
||||
{"CarParamsPrevRoute", PERSISTENT},
|
||||
{"CompletedTrainingVersion", PERSISTENT},
|
||||
{"ControlsReady", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"CurrentBootlog", PERSISTENT},
|
||||
{"CurrentRoute", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"DisableLogging", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"DisablePowerDown", PERSISTENT},
|
||||
{"DisableUpdates", PERSISTENT},
|
||||
{"DisengageOnAccelerator", PERSISTENT},
|
||||
{"DongleId", PERSISTENT},
|
||||
{"DoReboot", CLEAR_ON_MANAGER_START},
|
||||
{"DoShutdown", CLEAR_ON_MANAGER_START},
|
||||
{"DoUninstall", CLEAR_ON_MANAGER_START},
|
||||
{"DriverTooDistracted", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"AlphaLongitudinalEnabled", PERSISTENT | DEVELOPMENT_ONLY},
|
||||
{"ExperimentalMode", PERSISTENT},
|
||||
{"ExperimentalModeConfirmed", PERSISTENT},
|
||||
{"FirmwareQueryDone", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"ForcePowerDown", PERSISTENT},
|
||||
{"GitBranch", PERSISTENT},
|
||||
{"GitCommit", PERSISTENT},
|
||||
{"GitCommitDate", PERSISTENT},
|
||||
{"GitDiff", PERSISTENT},
|
||||
{"GithubSshKeys", PERSISTENT},
|
||||
{"GithubUsername", PERSISTENT},
|
||||
{"GitRemote", PERSISTENT},
|
||||
{"GsmApn", PERSISTENT},
|
||||
{"GsmMetered", PERSISTENT},
|
||||
{"GsmRoaming", PERSISTENT},
|
||||
{"HardwareSerial", PERSISTENT},
|
||||
{"HasAcceptedTerms", PERSISTENT},
|
||||
{"InstallDate", PERSISTENT},
|
||||
{"IsDriverViewEnabled", CLEAR_ON_MANAGER_START},
|
||||
{"IsEngaged", PERSISTENT},
|
||||
{"IsLdwEnabled", PERSISTENT},
|
||||
{"IsMetric", PERSISTENT},
|
||||
{"IsOffroad", CLEAR_ON_MANAGER_START},
|
||||
{"IsOnroad", PERSISTENT},
|
||||
{"IsRhdDetected", PERSISTENT},
|
||||
{"IsReleaseBranch", CLEAR_ON_MANAGER_START},
|
||||
{"IsTakingSnapshot", CLEAR_ON_MANAGER_START},
|
||||
{"IsTestedBranch", CLEAR_ON_MANAGER_START},
|
||||
{"JoystickDebugMode", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"LanguageSetting", PERSISTENT},
|
||||
{"LastAthenaPingTime", CLEAR_ON_MANAGER_START},
|
||||
{"LastGPSPosition", PERSISTENT},
|
||||
{"LastManagerExitReason", CLEAR_ON_MANAGER_START},
|
||||
{"LastOffroadStatusPacket", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"LastPowerDropDetected", CLEAR_ON_MANAGER_START},
|
||||
{"LastUpdateException", CLEAR_ON_MANAGER_START},
|
||||
{"LastUpdateTime", PERSISTENT},
|
||||
{"LiveDelay", PERSISTENT},
|
||||
{"LiveParameters", PERSISTENT},
|
||||
{"LiveParametersV2", PERSISTENT},
|
||||
{"LiveTorqueParameters", PERSISTENT | DONT_LOG},
|
||||
{"LocationFilterInitialState", PERSISTENT},
|
||||
{"LongitudinalManeuverMode", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"LongitudinalPersonality", PERSISTENT},
|
||||
{"NetworkMetered", PERSISTENT},
|
||||
{"ObdMultiplexingChanged", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"ObdMultiplexingEnabled", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"Offroad_BadNvme", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_CarUnrecognized", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"Offroad_ConnectivityNeeded", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_ConnectivityNeededPrompt", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_IsTakingSnapshot", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_NeosUpdate", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_NoFirmware", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"Offroad_Recalibration", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"Offroad_StorageMissing", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_TemperatureTooHigh", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_UnofficialHardware", CLEAR_ON_MANAGER_START},
|
||||
{"Offroad_UpdateFailed", CLEAR_ON_MANAGER_START},
|
||||
{"OpenpilotEnabledToggle", PERSISTENT},
|
||||
{"PandaHeartbeatLost", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"PandaSomResetTriggered", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"PandaSignatures", CLEAR_ON_MANAGER_START},
|
||||
{"PrimeType", PERSISTENT},
|
||||
{"RecordAudio", PERSISTENT},
|
||||
{"RecordFront", PERSISTENT},
|
||||
{"RecordFrontLock", PERSISTENT}, // for the internal fleet
|
||||
{"SecOCKey", PERSISTENT | DONT_LOG},
|
||||
{"RouteCount", PERSISTENT},
|
||||
{"SnoozeUpdate", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"SshEnabled", PERSISTENT},
|
||||
{"TermsVersion", PERSISTENT},
|
||||
{"TrainingVersion", PERSISTENT},
|
||||
{"UbloxAvailable", PERSISTENT},
|
||||
{"UpdateAvailable", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
|
||||
{"UpdateFailedCount", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterAvailableBranches", PERSISTENT},
|
||||
{"UpdaterCurrentDescription", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterCurrentReleaseNotes", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterFetchAvailable", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterNewDescription", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterNewReleaseNotes", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterState", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterTargetBranch", CLEAR_ON_MANAGER_START},
|
||||
{"UpdaterLastFetchTime", PERSISTENT},
|
||||
{"Version", PERSISTENT},
|
||||
|
||||
{"ForceOffroad", CLEAR_ON_MANAGER_START},
|
||||
{"BydModifiedStockLong", PERSISTENT},
|
||||
{"AlwaysOnLKAS", PERSISTENT},
|
||||
{"BydAutoTuning", PERSISTENT},
|
||||
{"BydLatUseSiglin", PERSISTENT},
|
||||
{"CameraOffset", PERSISTENT},
|
||||
{"UseRedPanda", PERSISTENT},
|
||||
{"UseSteerRateLimiter", PERSISTENT},
|
||||
{"SteerRateLimLoSpd", PERSISTENT},
|
||||
{"SteerRateLimHiSpd", PERSISTENT},
|
||||
{"KeepLkasPassive", PERSISTENT},
|
||||
{"BydMpcTsr", PERSISTENT},
|
||||
{"BydLowSpdLong", PERSISTENT},
|
||||
{"SpeedCorrect30", PERSISTENT},
|
||||
{"SpeedCorrect60", PERSISTENT},
|
||||
{"SpeedCorrect90", PERSISTENT},
|
||||
{"SpeedCorrect120", PERSISTENT},
|
||||
{"AuthExpireDate", PERSISTENT},
|
||||
{"DeviceAuthKey", PERSISTENT},
|
||||
{"BydBsdType2", PERSISTENT},
|
||||
|
||||
{"LateralAngleSpdUp0", PERSISTENT},
|
||||
{"LateralAngleSpdDn0", PERSISTENT},
|
||||
{"LateralAngleSpdBp1", PERSISTENT},
|
||||
{"LateralAngleSpdUp1", PERSISTENT},
|
||||
{"LateralAngleSpdDn1", PERSISTENT},
|
||||
{"LateralAngleSpdBp2", PERSISTENT},
|
||||
{"LateralAngleSpdUp2", PERSISTENT},
|
||||
{"LateralAngleSpdDn2", PERSISTENT},
|
||||
{"LateralAngleTorqMax", PERSISTENT},
|
||||
{"LateralAngleTorqCut", PERSISTENT},
|
||||
|
||||
// carrot
|
||||
{"LongitudinalPersonalityMax", PERSISTENT},
|
||||
{"NetworkAddress", CLEAR_ON_MANAGER_START},
|
||||
|
||||
{"ApiCache_NavDestinations", PERSISTENT},
|
||||
{"NavDestination", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"NavDestinationWaypoints", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION},
|
||||
{"NavPastDestinations", PERSISTENT},
|
||||
{"NavSettingLeftSide", PERSISTENT},
|
||||
{"NavSettingTime24h", PERSISTENT},
|
||||
{"MapboxStyle", PERSISTENT },
|
||||
{"MapboxPublicKey", PERSISTENT},
|
||||
{"MapboxSecretKey", PERSISTENT},
|
||||
{"GMapKey", PERSISTENT},
|
||||
{"SearchInput", PERSISTENT},
|
||||
|
||||
{"CarSelected3", PERSISTENT},
|
||||
{"SupportedCars", PERSISTENT},
|
||||
{"SupportedCars_gm", PERSISTENT},
|
||||
{"ShowDebugUI", PERSISTENT},
|
||||
{"ShowTpms", PERSISTENT},
|
||||
{"ShowDateTime", PERSISTENT},
|
||||
{"ShowPathEnd", PERSISTENT},
|
||||
{"ShowCustomBrightness", PERSISTENT},
|
||||
{"ShowLaneInfo", PERSISTENT},
|
||||
{"ShowRadarInfo", PERSISTENT},
|
||||
{"ShowDeviceState", PERSISTENT},
|
||||
{"ShowRouteInfo", PERSISTENT },
|
||||
{"ShowPathMode", PERSISTENT},
|
||||
{"ShowPathColor", PERSISTENT},
|
||||
{"ShowPathColorCruiseOff", PERSISTENT},
|
||||
{"ShowPathModeLane", PERSISTENT},
|
||||
{"ShowPathColorLane", PERSISTENT},
|
||||
{"ShowPlotMode", PERSISTENT},
|
||||
{"RecordRoadCam", PERSISTENT },
|
||||
{"HDPuse", PERSISTENT },
|
||||
|
||||
{"AutoCruiseControl", PERSISTENT},
|
||||
{"CruiseEcoControl", PERSISTENT},
|
||||
{"CarrotCruiseDecel", PERSISTENT},
|
||||
{"CarrotCruiseAtcDecel", PERSISTENT},
|
||||
{"CommaLongAcc", PERSISTENT},
|
||||
{"AutoGasTokSpeed", PERSISTENT},
|
||||
{"AutoGasSyncSpeed", PERSISTENT},
|
||||
{"AutoEngage", PERSISTENT},
|
||||
{"DisableMinSteerSpeed", PERSISTENT},
|
||||
{"AutoCurveSpeedLowerLimit", PERSISTENT},
|
||||
{"AutoCurveSpeedFactor", PERSISTENT},
|
||||
{"AutoCurveSpeedAggressiveness", PERSISTENT},
|
||||
{"AutoTurnControl", PERSISTENT},
|
||||
{"AutoTurnControlSpeedTurn", PERSISTENT},
|
||||
{"AutoTurnControlTurnEnd", PERSISTENT},
|
||||
{"AutoTurnMapChange", PERSISTENT },
|
||||
{"AutoNaviSpeedCtrlEnd", PERSISTENT},
|
||||
{"AutoNaviSpeedCtrlMode", PERSISTENT},
|
||||
{"AutoRoadSpeedLimitOffset", PERSISTENT},
|
||||
{"AutoNaviSpeedBumpTime", PERSISTENT},
|
||||
{"AutoNaviSpeedBumpSpeed", PERSISTENT},
|
||||
{"AutoNaviSpeedDecelRate", PERSISTENT},
|
||||
{"AutoNaviSpeedSafetyFactor", PERSISTENT},
|
||||
{"AutoNaviCountDownMode", PERSISTENT},
|
||||
{"TurnSpeedControlMode", PERSISTENT},
|
||||
{"CarrotSmartSpeedControl", PERSISTENT},
|
||||
{"MapTurnSpeedFactor", PERSISTENT},
|
||||
{"ModelTurnSpeedFactor", PERSISTENT},
|
||||
{"StoppingAccel", PERSISTENT},
|
||||
{"AutoSpeedUptoRoadSpeedLimit", PERSISTENT},
|
||||
{"AutoRoadSpeedAdjust", PERSISTENT},
|
||||
{"StopDistanceCarrot", PERSISTENT},
|
||||
{"ComfortBrake", PERSISTENT},
|
||||
{"JLeadFactor3", PERSISTENT},
|
||||
{"CruiseButtonMode", PERSISTENT},
|
||||
{"CancelButtonMode", PERSISTENT},
|
||||
{"LfaButtonMode", PERSISTENT},
|
||||
{"CruiseButtonTest1", PERSISTENT},
|
||||
{"CruiseButtonTest2", PERSISTENT},
|
||||
{"CruiseButtonTest3", PERSISTENT},
|
||||
{"CruiseSpeedUnit", PERSISTENT},
|
||||
{"CruiseSpeedUnitBasic", PERSISTENT},
|
||||
{"CruiseSpeed1", PERSISTENT},
|
||||
{"CruiseSpeed2", PERSISTENT},
|
||||
{"CruiseSpeed3", PERSISTENT},
|
||||
{"CruiseSpeed4", PERSISTENT},
|
||||
{"CruiseSpeed5", PERSISTENT},
|
||||
{"PaddleMode", PERSISTENT},
|
||||
{"MyDrivingMode", PERSISTENT},
|
||||
{"MyDrivingModeAuto", PERSISTENT},
|
||||
{"TrafficLightDetectMode", PERSISTENT},
|
||||
{"SteerActuatorDelay", PERSISTENT},
|
||||
{"LatSmoothSec", PERSISTENT},
|
||||
{"CruiseOnDist", PERSISTENT},
|
||||
{"CruiseMaxVals0", PERSISTENT},
|
||||
{"CruiseMaxVals1", PERSISTENT},
|
||||
{"CruiseMaxVals2", PERSISTENT},
|
||||
{"CruiseMaxVals3", PERSISTENT},
|
||||
{"CruiseMaxVals4", PERSISTENT},
|
||||
{"CruiseMaxVals5", PERSISTENT},
|
||||
{"CruiseMaxVals6", PERSISTENT},
|
||||
{"LongTuningKpV", PERSISTENT},
|
||||
{"LongTuningKiV", PERSISTENT},
|
||||
{"LongTuningKf", PERSISTENT},
|
||||
{"LongActuatorDelay", PERSISTENT },
|
||||
{"VEgoStopping", PERSISTENT },
|
||||
{"RadarReactionFactor", PERSISTENT},
|
||||
{"EnableRadarTracks", PERSISTENT},
|
||||
{"RadarLatFactor", PERSISTENT},
|
||||
{"EnableCornerRadar", PERSISTENT},
|
||||
{"EnableRadarTracksResult", PERSISTENT | CLEAR_ON_MANAGER_START},
|
||||
{"CanParserResult", CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION },
|
||||
{"HotspotOnBoot", PERSISTENT},
|
||||
{"SoftwareMenu", PERSISTENT},
|
||||
{"HyundaiCameraSCC", PERSISTENT},
|
||||
{"FingerPrints", PERSISTENT | CLEAR_ON_MANAGER_START},
|
||||
{"IsLdwsCar", PERSISTENT},
|
||||
{"CanfdHDA2", PERSISTENT},
|
||||
{"CanfdDebug", PERSISTENT},
|
||||
{"SoundVolumeAdjust", PERSISTENT},
|
||||
{"SoundVolumeAdjustEngage", PERSISTENT},
|
||||
{"TFollowGap1", PERSISTENT},
|
||||
{"TFollowGap2", PERSISTENT},
|
||||
{"TFollowGap3", PERSISTENT},
|
||||
{"TFollowGap4", PERSISTENT},
|
||||
{"DynamicTFollow", PERSISTENT},
|
||||
{"DynamicTFollowLC", PERSISTENT},
|
||||
{"AChangeCostStarting", PERSISTENT},
|
||||
{"TrafficStopDistanceAdjust", PERSISTENT},
|
||||
{"HapticFeedbackWhenSpeedCamera", PERSISTENT},
|
||||
{"UseLaneLineSpeed", PERSISTENT},
|
||||
{"UseLaneLineCurveSpeed", PERSISTENT},
|
||||
{"AdjustLaneOffset", PERSISTENT},
|
||||
{"LaneChangeNeedTorque", PERSISTENT},
|
||||
{"LaneChangeDelay", PERSISTENT },
|
||||
{"LaneChangeBsd", PERSISTENT},
|
||||
{"MaxAngleFrames", PERSISTENT},
|
||||
{"SoftHoldMode", PERSISTENT},
|
||||
{"LatMpcPathCost", PERSISTENT},
|
||||
{"LatMpcMotionCost", PERSISTENT},
|
||||
{"LatMpcAccelCost", PERSISTENT},
|
||||
{"LatMpcJerkCost", PERSISTENT},
|
||||
{"LatMpcSteeringRateCost", PERSISTENT },
|
||||
{"LatMpcInputOffset", PERSISTENT},
|
||||
{"PathOffset", PERSISTENT},
|
||||
{"LateralTorqueCustom", PERSISTENT},
|
||||
{"LateralTorqueAccelFactor", PERSISTENT},
|
||||
{"LateralTorqueFriction", PERSISTENT},
|
||||
{"LateralTorqueKpV", PERSISTENT},
|
||||
{"LateralTorqueKiV", PERSISTENT},
|
||||
{"LateralTorqueKf", PERSISTENT},
|
||||
{"LateralTorqueKd", PERSISTENT},
|
||||
{"CustomSteerMax", PERSISTENT},
|
||||
{"CustomSteerDeltaUp", PERSISTENT},
|
||||
{"CustomSteerDeltaDown", PERSISTENT},
|
||||
{"CustomSteerDeltaUpLC", PERSISTENT},
|
||||
{"CustomSteerDeltaDownLC", PERSISTENT},
|
||||
{"SpeedFromPCM", PERSISTENT},
|
||||
{"MaxTimeOffroadMin", PERSISTENT},
|
||||
{"DisableDM", PERSISTENT},
|
||||
{"EnableConnect", PERSISTENT},
|
||||
{"MuteDoor", PERSISTENT},
|
||||
{"MuteSeatbelt", PERSISTENT},
|
||||
{"CarrotException", CLEAR_ON_MANAGER_START},
|
||||
{"CarrotSpeed", PERSISTENT},
|
||||
{"CarrotSpeedViz", PERSISTENT},
|
||||
{"CarrotSpeedTable", PERSISTENT},
|
||||
{"CarName", PERSISTENT},
|
||||
{"EVTable", PERSISTENT},
|
||||
{"LongPitch", PERSISTENT},
|
||||
{"ActivateCruiseAfterBrake", CLEAR_ON_MANAGER_START}, // for GM autoResume
|
||||
{"CustomSR", PERSISTENT},
|
||||
{"SteerRatioRate", PERSISTENT},
|
||||
{"SoftRestartTriggered", CLEAR_ON_MANAGER_START},
|
||||
|
||||
{"DevicePosition", CLEAR_ON_MANAGER_START},
|
||||
{"NNFF", PERSISTENT},
|
||||
{"NNFFLite", PERSISTENT},
|
||||
{"NNFFModelName", CLEAR_ON_OFFROAD_TRANSITION},
|
||||
|
||||
{"HardwareC3xLite", PERSISTENT},
|
||||
};
|
||||
163
common/params_pyx.pyx
Normal file
163
common/params_pyx.pyx
Normal file
@@ -0,0 +1,163 @@
|
||||
# distutils: language = c++
|
||||
# cython: language_level = 3
|
||||
from libcpp cimport bool
|
||||
from libcpp.string cimport string
|
||||
from libcpp.vector cimport vector
|
||||
|
||||
cdef extern from "common/params.h":
|
||||
cpdef enum ParamKeyType:
|
||||
PERSISTENT
|
||||
CLEAR_ON_MANAGER_START
|
||||
CLEAR_ON_ONROAD_TRANSITION
|
||||
CLEAR_ON_OFFROAD_TRANSITION
|
||||
DEVELOPMENT_ONLY
|
||||
ALL
|
||||
|
||||
cdef cppclass c_Params "Params":
|
||||
c_Params(string) except + nogil
|
||||
string get(string, bool) nogil
|
||||
bool getBool(string, bool) nogil
|
||||
int getInt(string, bool) nogil
|
||||
float getFloat(string, bool) nogil
|
||||
int remove(string) nogil
|
||||
int put(string, string) nogil
|
||||
void putNonBlocking(string, string) nogil
|
||||
void putBoolNonBlocking(string, bool) nogil
|
||||
void putIntNonBlocking(string, int) nogil
|
||||
void putFloatNonBlocking(string, float) nogil
|
||||
int putBool(string, bool) nogil
|
||||
int putInt(string, int) nogil
|
||||
int putFloat(string, float) nogil
|
||||
bool checkKey(string) nogil
|
||||
string getParamPath(string) nogil
|
||||
void clearAll(ParamKeyType)
|
||||
vector[string] allKeys()
|
||||
|
||||
|
||||
def ensure_bytes(v):
|
||||
return v.encode() if isinstance(v, str) else v
|
||||
|
||||
class UnknownKeyName(Exception):
|
||||
pass
|
||||
|
||||
cdef class Params:
|
||||
cdef c_Params* p
|
||||
cdef str d
|
||||
|
||||
def __cinit__(self, d=""):
|
||||
cdef string path = <string>d.encode()
|
||||
with nogil:
|
||||
self.p = new c_Params(path)
|
||||
self.d = d
|
||||
|
||||
def __reduce__(self):
|
||||
return (type(self), (self.d,))
|
||||
|
||||
def __dealloc__(self):
|
||||
del self.p
|
||||
|
||||
def clear_all(self, tx_type=ParamKeyType.ALL):
|
||||
self.p.clearAll(tx_type)
|
||||
|
||||
def check_key(self, key):
|
||||
key = ensure_bytes(key)
|
||||
if not self.p.checkKey(key):
|
||||
raise UnknownKeyName(key)
|
||||
return key
|
||||
|
||||
def get(self, key, bool block=False, encoding=None):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef string val
|
||||
with nogil:
|
||||
val = self.p.get(k, block)
|
||||
|
||||
if val == b"":
|
||||
if block:
|
||||
# If we got no value while running in blocked mode
|
||||
# it means we got an interrupt while waiting
|
||||
raise KeyboardInterrupt
|
||||
else:
|
||||
return None
|
||||
|
||||
return val if encoding is None else val.decode(encoding)
|
||||
|
||||
def get_bool(self, key, bool block=False):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef bool r
|
||||
with nogil:
|
||||
r = self.p.getBool(k, block)
|
||||
return r
|
||||
|
||||
def get_int(self, key, bool block=False):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef int r
|
||||
with nogil:
|
||||
r = self.p.getInt(k, block)
|
||||
return r
|
||||
|
||||
def get_float(self, key, bool block=False):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef float r
|
||||
with nogil:
|
||||
r = self.p.getFloat(k, block)
|
||||
return r
|
||||
|
||||
def put(self, key, dat):
|
||||
"""
|
||||
Warning: This function blocks until the param is written to disk!
|
||||
In very rare cases this can take over a second, and your code will hang.
|
||||
Use the put_nonblocking, put_bool_nonblocking in time sensitive code, but
|
||||
in general try to avoid writing params as much as possible.
|
||||
"""
|
||||
cdef string k = self.check_key(key)
|
||||
cdef string dat_bytes = ensure_bytes(dat)
|
||||
with nogil:
|
||||
self.p.put(k, dat_bytes)
|
||||
|
||||
def put_bool(self, key, bool val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putBool(k, val)
|
||||
|
||||
def put_int(self, key, int val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putInt(k, val)
|
||||
|
||||
def put_float(self, key, float val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putFloat(k, val)
|
||||
|
||||
def put_nonblocking(self, key, dat):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef string dat_bytes = ensure_bytes(dat)
|
||||
with nogil:
|
||||
self.p.putNonBlocking(k, dat_bytes)
|
||||
|
||||
def put_bool_nonblocking(self, key, bool val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putBoolNonBlocking(k, val)
|
||||
|
||||
def put_int_nonblocking(self, key, int val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putIntNonBlocking(k, val)
|
||||
|
||||
def put_float_nonblocking(self, key, float val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putFloatNonBlocking(k, val)
|
||||
|
||||
def remove(self, key):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.remove(k)
|
||||
|
||||
def get_param_path(self, key=""):
|
||||
cdef string key_bytes = ensure_bytes(key)
|
||||
return self.p.getParamPath(key_bytes).decode("utf-8")
|
||||
|
||||
def all_keys(self):
|
||||
return self.p.allKeys()
|
||||
BIN
common/params_pyx.so
Executable file
BIN
common/params_pyx.so
Executable file
Binary file not shown.
73
common/pid.py
Normal file
73
common/pid.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import numpy as np
|
||||
from numbers import Number
|
||||
|
||||
class PIDController:
|
||||
def __init__(self, k_p, k_i, k_f=0., k_d=0., pos_limit=1e308, neg_limit=-1e308, rate=100):
|
||||
self._k_p = k_p
|
||||
self._k_i = k_i
|
||||
self._k_d = k_d
|
||||
self.k_f = k_f # feedforward gain
|
||||
if isinstance(self._k_p, Number):
|
||||
self._k_p = [[0], [self._k_p]]
|
||||
if isinstance(self._k_i, Number):
|
||||
self._k_i = [[0], [self._k_i]]
|
||||
if isinstance(self._k_d, Number):
|
||||
self._k_d = [[0], [self._k_d]]
|
||||
|
||||
self.pos_limit = pos_limit
|
||||
self.neg_limit = neg_limit
|
||||
|
||||
self.i_unwind_rate = 0.3 / rate
|
||||
self.i_rate = 1.0 / rate
|
||||
self.speed = 0.0
|
||||
|
||||
self.reset()
|
||||
|
||||
@property
|
||||
def k_p(self):
|
||||
return np.interp(self.speed, self._k_p[0], self._k_p[1])
|
||||
|
||||
@property
|
||||
def k_i(self):
|
||||
return np.interp(self.speed, self._k_i[0], self._k_i[1])
|
||||
|
||||
@property
|
||||
def k_d(self):
|
||||
return np.interp(self.speed, self._k_d[0], self._k_d[1])
|
||||
|
||||
@property
|
||||
def error_integral(self):
|
||||
return self.i/self.k_i
|
||||
|
||||
def reset(self):
|
||||
self.p = 0.0
|
||||
self.i = 0.0
|
||||
self.d = 0.0
|
||||
self.f = 0.0
|
||||
self.control = 0
|
||||
|
||||
def update(self, error, error_rate=0.0, speed=0.0, override=False, feedforward=0., freeze_integrator=False):
|
||||
self.speed = speed
|
||||
|
||||
self.p = float(error) * self.k_p
|
||||
self.f = feedforward * self.k_f
|
||||
self.d = error_rate * self.k_d
|
||||
|
||||
if override:
|
||||
self.i -= self.i_unwind_rate * float(np.sign(self.i))
|
||||
else:
|
||||
if not freeze_integrator:
|
||||
if(self.k_i != 0):
|
||||
self.i = self.i + error * self.k_i * self.i_rate
|
||||
else:
|
||||
self.i = 0
|
||||
|
||||
# Clip i to prevent exceeding control limits
|
||||
control_no_i = self.p + self.d + self.f
|
||||
control_no_i = np.clip(control_no_i, self.neg_limit, self.pos_limit)
|
||||
self.i = np.clip(self.i, self.neg_limit - control_no_i, self.pos_limit - control_no_i)
|
||||
|
||||
control = self.p + self.i + self.d + self.f
|
||||
|
||||
self.control = np.clip(control, self.neg_limit, self.pos_limit)
|
||||
return self.control
|
||||
39
common/prefix.h
Normal file
39
common/prefix.h
Normal file
@@ -0,0 +1,39 @@
|
||||
#pragma once
|
||||
|
||||
#include <cassert>
|
||||
#include <string>
|
||||
|
||||
#include "common/params.h"
|
||||
#include "common/util.h"
|
||||
#include "system/hardware/hw.h"
|
||||
|
||||
class OpenpilotPrefix {
|
||||
public:
|
||||
OpenpilotPrefix(std::string prefix = {}) {
|
||||
if (prefix.empty()) {
|
||||
prefix = util::random_string(15);
|
||||
}
|
||||
msgq_path = Path::shm_path() + "/" + prefix;
|
||||
bool ret = util::create_directories(msgq_path, 0777);
|
||||
assert(ret);
|
||||
setenv("OPENPILOT_PREFIX", prefix.c_str(), 1);
|
||||
}
|
||||
|
||||
~OpenpilotPrefix() {
|
||||
auto param_path = Params().getParamPath();
|
||||
if (util::file_exists(param_path)) {
|
||||
std::string real_path = util::readlink(param_path);
|
||||
system(util::string_format("rm %s -rf", real_path.c_str()).c_str());
|
||||
unlink(param_path.c_str());
|
||||
}
|
||||
if (getenv("COMMA_CACHE") == nullptr) {
|
||||
system(util::string_format("rm %s -rf", Path::download_cache_root().c_str()).c_str());
|
||||
}
|
||||
system(util::string_format("rm %s -rf", Path::comma_home().c_str()).c_str());
|
||||
system(util::string_format("rm %s -rf", msgq_path.c_str()).c_str());
|
||||
unsetenv("OPENPILOT_PREFIX");
|
||||
}
|
||||
|
||||
private:
|
||||
std::string msgq_path;
|
||||
};
|
||||
53
common/prefix.py
Normal file
53
common/prefix.py
Normal file
@@ -0,0 +1,53 @@
|
||||
import os
|
||||
import shutil
|
||||
import uuid
|
||||
|
||||
|
||||
from openpilot.common.params import Params
|
||||
from openpilot.system.hardware import PC
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
from openpilot.system.hardware.hw import DEFAULT_DOWNLOAD_CACHE_ROOT
|
||||
|
||||
class OpenpilotPrefix:
|
||||
def __init__(self, prefix: str = None, clean_dirs_on_exit: bool = True, shared_download_cache: bool = False):
|
||||
self.prefix = prefix if prefix else str(uuid.uuid4().hex[0:15])
|
||||
self.msgq_path = os.path.join(Paths.shm_path(), self.prefix)
|
||||
self.clean_dirs_on_exit = clean_dirs_on_exit
|
||||
self.shared_download_cache = shared_download_cache
|
||||
|
||||
def __enter__(self):
|
||||
self.original_prefix = os.environ.get('OPENPILOT_PREFIX', None)
|
||||
os.environ['OPENPILOT_PREFIX'] = self.prefix
|
||||
try:
|
||||
os.mkdir(self.msgq_path)
|
||||
except FileExistsError:
|
||||
pass
|
||||
os.makedirs(Paths.log_root(), exist_ok=True)
|
||||
|
||||
if self.shared_download_cache:
|
||||
os.environ["COMMA_CACHE"] = DEFAULT_DOWNLOAD_CACHE_ROOT
|
||||
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_obj, exc_tb):
|
||||
if self.clean_dirs_on_exit:
|
||||
self.clean_dirs()
|
||||
try:
|
||||
del os.environ['OPENPILOT_PREFIX']
|
||||
if self.original_prefix is not None:
|
||||
os.environ['OPENPILOT_PREFIX'] = self.original_prefix
|
||||
except KeyError:
|
||||
pass
|
||||
return False
|
||||
|
||||
def clean_dirs(self):
|
||||
symlink_path = Params().get_param_path()
|
||||
if os.path.exists(symlink_path):
|
||||
shutil.rmtree(os.path.realpath(symlink_path), ignore_errors=True)
|
||||
os.remove(symlink_path)
|
||||
shutil.rmtree(self.msgq_path, ignore_errors=True)
|
||||
if PC:
|
||||
shutil.rmtree(Paths.log_root(), ignore_errors=True)
|
||||
if not os.environ.get("COMMA_CACHE", False):
|
||||
shutil.rmtree(Paths.download_cache_root(), ignore_errors=True)
|
||||
shutil.rmtree(Paths.comma_home(), ignore_errors=True)
|
||||
52
common/queue.h
Normal file
52
common/queue.h
Normal file
@@ -0,0 +1,52 @@
|
||||
#pragma once
|
||||
|
||||
#include <condition_variable>
|
||||
#include <mutex>
|
||||
#include <queue>
|
||||
|
||||
template <class T>
|
||||
class SafeQueue {
|
||||
public:
|
||||
SafeQueue() = default;
|
||||
|
||||
void push(const T& v) {
|
||||
{
|
||||
std::unique_lock lk(m);
|
||||
q.push(v);
|
||||
}
|
||||
cv.notify_one();
|
||||
}
|
||||
|
||||
T pop() {
|
||||
std::unique_lock lk(m);
|
||||
cv.wait(lk, [this] { return !q.empty(); });
|
||||
T v = q.front();
|
||||
q.pop();
|
||||
return v;
|
||||
}
|
||||
|
||||
bool try_pop(T& v, int timeout_ms = 0) {
|
||||
std::unique_lock lk(m);
|
||||
if (!cv.wait_for(lk, std::chrono::milliseconds(timeout_ms), [this] { return !q.empty(); })) {
|
||||
return false;
|
||||
}
|
||||
v = q.front();
|
||||
q.pop();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool empty() const {
|
||||
std::scoped_lock lk(m);
|
||||
return q.empty();
|
||||
}
|
||||
|
||||
size_t size() const {
|
||||
std::scoped_lock lk(m);
|
||||
return q.size();
|
||||
}
|
||||
|
||||
private:
|
||||
mutable std::mutex m;
|
||||
std::condition_variable cv;
|
||||
std::queue<T> q;
|
||||
};
|
||||
23
common/ratekeeper.h
Normal file
23
common/ratekeeper.h
Normal file
@@ -0,0 +1,23 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
#include <string>
|
||||
|
||||
class RateKeeper {
|
||||
public:
|
||||
RateKeeper(const std::string &name, float rate, float print_delay_threshold = 0);
|
||||
~RateKeeper() {}
|
||||
bool keepTime();
|
||||
bool monitorTime();
|
||||
inline uint64_t frame() const { return frame_; }
|
||||
inline double remaining() const { return remaining_; }
|
||||
|
||||
private:
|
||||
double interval;
|
||||
double next_frame_time;
|
||||
double last_monitor_time;
|
||||
double remaining_ = 0;
|
||||
float print_delay_threshold = 0;
|
||||
uint64_t frame_ = 0;
|
||||
std::string name;
|
||||
};
|
||||
96
common/realtime.py
Normal file
96
common/realtime.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Utilities for reading real time clocks and keeping soft real time constraints."""
|
||||
import gc
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from setproctitle import getproctitle
|
||||
|
||||
from openpilot.common.util import MovingAverage
|
||||
from openpilot.system.hardware import PC
|
||||
|
||||
|
||||
# time step for each process
|
||||
DT_CTRL = 0.01 # controlsd
|
||||
DT_MDL = 0.05 # model
|
||||
DT_HW = 0.5 # hardwared and manager
|
||||
DT_DMON = 0.05 # driver monitoring
|
||||
|
||||
|
||||
class Priority:
|
||||
# CORE 2
|
||||
# - modeld = 55
|
||||
# - camerad = 54
|
||||
CTRL_LOW = 51 # plannerd & radard
|
||||
|
||||
# CORE 3
|
||||
# - pandad = 55
|
||||
CTRL_HIGH = 53
|
||||
|
||||
|
||||
def set_core_affinity(cores: list[int]) -> None:
|
||||
if sys.platform == 'linux' and not PC:
|
||||
os.sched_setaffinity(0, cores)
|
||||
|
||||
|
||||
def config_realtime_process(cores: int | list[int], priority: int) -> None:
|
||||
gc.disable()
|
||||
if sys.platform == 'linux' and not PC:
|
||||
os.sched_setscheduler(0, os.SCHED_FIFO, os.sched_param(priority))
|
||||
c = cores if isinstance(cores, list) else [cores, ]
|
||||
set_core_affinity(c)
|
||||
|
||||
|
||||
class Ratekeeper:
|
||||
def __init__(self, rate: float, print_delay_threshold: float | None = 0.0) -> None:
|
||||
"""Rate in Hz for ratekeeping. print_delay_threshold must be nonnegative."""
|
||||
self._interval = 1. / rate
|
||||
self._print_delay_threshold = print_delay_threshold
|
||||
self._frame = 0
|
||||
self._remaining = 0.0
|
||||
self._process_name = getproctitle()
|
||||
self._last_monitor_time = -1.
|
||||
self._next_frame_time = -1.
|
||||
|
||||
self.avg_dt = MovingAverage(100)
|
||||
self.avg_dt.add_value(self._interval)
|
||||
|
||||
@property
|
||||
def frame(self) -> int:
|
||||
return self._frame
|
||||
|
||||
@property
|
||||
def remaining(self) -> float:
|
||||
return self._remaining
|
||||
|
||||
@property
|
||||
def lagging(self) -> bool:
|
||||
expected_dt = self._interval * (1 / 0.9)
|
||||
return self.avg_dt.get_average() > expected_dt
|
||||
|
||||
# Maintain loop rate by calling this at the end of each loop
|
||||
def keep_time(self) -> bool:
|
||||
lagged = self.monitor_time()
|
||||
if self._remaining > 0:
|
||||
time.sleep(self._remaining)
|
||||
return lagged
|
||||
|
||||
# Monitors the cumulative lag, but does not enforce a rate
|
||||
def monitor_time(self) -> bool:
|
||||
if self._last_monitor_time < 0:
|
||||
self._next_frame_time = time.monotonic() + self._interval
|
||||
self._last_monitor_time = time.monotonic()
|
||||
|
||||
prev = self._last_monitor_time
|
||||
self._last_monitor_time = time.monotonic()
|
||||
self.avg_dt.add_value(self._last_monitor_time - prev)
|
||||
|
||||
lagged = False
|
||||
remaining = self._next_frame_time - time.monotonic()
|
||||
self._next_frame_time += self._interval
|
||||
if self._print_delay_threshold is not None and remaining < -self._print_delay_threshold:
|
||||
print(f"{self._process_name} lagging by {-remaining * 1000:.2f} ms")
|
||||
lagged = True
|
||||
self._frame += 1
|
||||
self._remaining = remaining
|
||||
return lagged
|
||||
30
common/retry.py
Normal file
30
common/retry.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import time
|
||||
import functools
|
||||
|
||||
from openpilot.common.swaglog import cloudlog
|
||||
|
||||
|
||||
def retry(attempts=3, delay=1.0, ignore_failure=False):
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
for _ in range(attempts):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception:
|
||||
cloudlog.exception(f"{func.__name__} failed, trying again")
|
||||
time.sleep(delay)
|
||||
|
||||
if ignore_failure:
|
||||
cloudlog.error(f"{func.__name__} failed after retry")
|
||||
else:
|
||||
raise Exception(f"{func.__name__} failed after retry")
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@retry(attempts=10)
|
||||
def abc():
|
||||
raise ValueError("abc failed :(")
|
||||
abc()
|
||||
13
common/run.py
Normal file
13
common/run.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import subprocess
|
||||
|
||||
|
||||
def run_cmd(cmd: list[str], cwd=None, env=None) -> str:
|
||||
return subprocess.check_output(cmd, encoding='utf8', cwd=cwd, env=env).strip()
|
||||
|
||||
|
||||
def run_cmd_default(cmd: list[str], default: str = "", cwd=None, env=None) -> str:
|
||||
try:
|
||||
return run_cmd(cmd, cwd=cwd, env=env)
|
||||
except subprocess.CalledProcessError:
|
||||
return default
|
||||
|
||||
54
common/simple_kalman.py
Normal file
54
common/simple_kalman.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import numpy as np
|
||||
|
||||
|
||||
def get_kalman_gain(dt, A, C, Q, R, iterations=100):
|
||||
P = np.zeros_like(Q)
|
||||
for _ in range(iterations):
|
||||
P = A.dot(P).dot(A.T) + dt * Q
|
||||
S = C.dot(P).dot(C.T) + R
|
||||
K = P.dot(C.T).dot(np.linalg.inv(S))
|
||||
P = (np.eye(len(P)) - K.dot(C)).dot(P)
|
||||
return K
|
||||
|
||||
|
||||
class KF1D:
|
||||
# this EKF assumes constant covariance matrix, so calculations are much simpler
|
||||
# the Kalman gain also needs to be precomputed using the control module
|
||||
|
||||
def __init__(self, x0, A, C, K):
|
||||
self.x0_0 = x0[0][0]
|
||||
self.x1_0 = x0[1][0]
|
||||
self.A0_0 = A[0][0]
|
||||
self.A0_1 = A[0][1]
|
||||
self.A1_0 = A[1][0]
|
||||
self.A1_1 = A[1][1]
|
||||
self.C0_0 = C[0]
|
||||
self.C0_1 = C[1]
|
||||
self.K0_0 = K[0][0]
|
||||
self.K1_0 = K[1][0]
|
||||
|
||||
self.A_K_0 = self.A0_0 - self.K0_0 * self.C0_0
|
||||
self.A_K_1 = self.A0_1 - self.K0_0 * self.C0_1
|
||||
self.A_K_2 = self.A1_0 - self.K1_0 * self.C0_0
|
||||
self.A_K_3 = self.A1_1 - self.K1_0 * self.C0_1
|
||||
|
||||
# K matrix needs to be pre-computed as follow:
|
||||
# import control
|
||||
# (x, l, K) = control.dare(np.transpose(self.A), np.transpose(self.C), Q, R)
|
||||
# self.K = np.transpose(K)
|
||||
|
||||
def update(self, meas):
|
||||
#self.x = np.dot(self.A_K, self.x) + np.dot(self.K, meas)
|
||||
x0_0 = self.A_K_0 * self.x0_0 + self.A_K_1 * self.x1_0 + self.K0_0 * meas
|
||||
x1_0 = self.A_K_2 * self.x0_0 + self.A_K_3 * self.x1_0 + self.K1_0 * meas
|
||||
self.x0_0 = x0_0
|
||||
self.x1_0 = x1_0
|
||||
return [self.x0_0, self.x1_0]
|
||||
|
||||
@property
|
||||
def x(self):
|
||||
return [[self.x0_0], [self.x1_0]]
|
||||
|
||||
def set_x(self, x):
|
||||
self.x0_0 = x[0][0]
|
||||
self.x1_0 = x[1][0]
|
||||
52
common/spinner.py
Normal file
52
common/spinner.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import os
|
||||
import subprocess
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
|
||||
|
||||
class Spinner:
|
||||
def __init__(self):
|
||||
try:
|
||||
self.spinner_proc = subprocess.Popen(["./spinner"],
|
||||
stdin=subprocess.PIPE,
|
||||
cwd=os.path.join(BASEDIR, "selfdrive", "ui"),
|
||||
close_fds=True)
|
||||
except OSError:
|
||||
self.spinner_proc = None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def update(self, spinner_text: str):
|
||||
if self.spinner_proc is not None:
|
||||
self.spinner_proc.stdin.write(spinner_text.encode('utf8') + b"\n")
|
||||
try:
|
||||
self.spinner_proc.stdin.flush()
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
|
||||
def update_progress(self, cur: float, total: float):
|
||||
self.update(str(round(100 * cur / total)))
|
||||
|
||||
def close(self):
|
||||
if self.spinner_proc is not None:
|
||||
self.spinner_proc.kill()
|
||||
try:
|
||||
self.spinner_proc.communicate(timeout=2.)
|
||||
except subprocess.TimeoutExpired:
|
||||
print("WARNING: failed to kill spinner")
|
||||
self.spinner_proc = None
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import time
|
||||
with Spinner() as s:
|
||||
s.update("Spinner text")
|
||||
time.sleep(5.0)
|
||||
print("gone")
|
||||
time.sleep(5.0)
|
||||
73
common/stat_live.py
Normal file
73
common/stat_live.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import numpy as np
|
||||
|
||||
class RunningStat:
|
||||
# tracks realtime mean and standard deviation without storing any data
|
||||
def __init__(self, priors=None, max_trackable=-1):
|
||||
self.max_trackable = max_trackable
|
||||
if priors is not None:
|
||||
# initialize from history
|
||||
self.M = priors[0]
|
||||
self.S = priors[1]
|
||||
self.n = priors[2]
|
||||
self.M_last = self.M
|
||||
self.S_last = self.S
|
||||
|
||||
else:
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
self.M = 0.
|
||||
self.S = 0.
|
||||
self.M_last = 0.
|
||||
self.S_last = 0.
|
||||
self.n = 0
|
||||
|
||||
def push_data(self, new_data):
|
||||
# short term memory hack
|
||||
if self.max_trackable < 0 or self.n < self.max_trackable:
|
||||
self.n += 1
|
||||
if self.n == 0:
|
||||
self.M_last = new_data
|
||||
self.M = self.M_last
|
||||
self.S_last = 0.
|
||||
else:
|
||||
self.M = self.M_last + (new_data - self.M_last) / self.n
|
||||
self.S = self.S_last + (new_data - self.M_last) * (new_data - self.M)
|
||||
self.M_last = self.M
|
||||
self.S_last = self.S
|
||||
|
||||
def mean(self):
|
||||
return self.M
|
||||
|
||||
def variance(self):
|
||||
if self.n >= 2:
|
||||
return self.S / (self.n - 1.)
|
||||
else:
|
||||
return 0
|
||||
|
||||
def std(self):
|
||||
return np.sqrt(self.variance())
|
||||
|
||||
def params_to_save(self):
|
||||
return [self.M, self.S, self.n]
|
||||
|
||||
class RunningStatFilter:
|
||||
def __init__(self, raw_priors=None, filtered_priors=None, max_trackable=-1):
|
||||
self.raw_stat = RunningStat(raw_priors, -1)
|
||||
self.filtered_stat = RunningStat(filtered_priors, max_trackable)
|
||||
|
||||
def reset(self):
|
||||
self.raw_stat.reset()
|
||||
self.filtered_stat.reset()
|
||||
|
||||
def push_and_update(self, new_data):
|
||||
_std_last = self.raw_stat.std()
|
||||
self.raw_stat.push_data(new_data)
|
||||
_delta_std = self.raw_stat.std() - _std_last
|
||||
if _delta_std <= 0:
|
||||
self.filtered_stat.push_data(new_data)
|
||||
else:
|
||||
pass
|
||||
# self.filtered_stat.push_data(self.filtered_stat.mean())
|
||||
|
||||
# class SequentialBayesian():
|
||||
76
common/swaglog.h
Normal file
76
common/swaglog.h
Normal file
@@ -0,0 +1,76 @@
|
||||
#pragma once
|
||||
|
||||
#include "common/timing.h"
|
||||
|
||||
#define CLOUDLOG_DEBUG 10
|
||||
#define CLOUDLOG_INFO 20
|
||||
#define CLOUDLOG_WARNING 30
|
||||
#define CLOUDLOG_ERROR 40
|
||||
#define CLOUDLOG_CRITICAL 50
|
||||
|
||||
|
||||
#ifdef __GNUC__
|
||||
#define SWAG_LOG_CHECK_FMT(a, b) __attribute__ ((format (printf, a, b)))
|
||||
#else
|
||||
#define SWAG_LOG_CHECK_FMT(a, b)
|
||||
#endif
|
||||
|
||||
void cloudlog_e(int levelnum, const char* filename, int lineno, const char* func,
|
||||
const char* fmt, ...) SWAG_LOG_CHECK_FMT(5, 6);
|
||||
|
||||
void cloudlog_te(int levelnum, const char* filename, int lineno, const char* func,
|
||||
const char* fmt, ...) SWAG_LOG_CHECK_FMT(5, 6);
|
||||
|
||||
void cloudlog_te(int levelnum, const char* filename, int lineno, const char* func,
|
||||
uint32_t frame_id, const char* fmt, ...) SWAG_LOG_CHECK_FMT(6, 7);
|
||||
|
||||
|
||||
#define cloudlog(lvl, fmt, ...) cloudlog_e(lvl, __FILE__, __LINE__, \
|
||||
__func__, \
|
||||
fmt, ## __VA_ARGS__)
|
||||
|
||||
#define cloudlog_t(lvl, ...) cloudlog_te(lvl, __FILE__, __LINE__, \
|
||||
__func__, \
|
||||
__VA_ARGS__)
|
||||
|
||||
|
||||
#define cloudlog_rl(burst, millis, lvl, fmt, ...) \
|
||||
{ \
|
||||
static uint64_t __begin = 0; \
|
||||
static int __printed = 0; \
|
||||
static int __missed = 0; \
|
||||
\
|
||||
int __burst = (burst); \
|
||||
int __millis = (millis); \
|
||||
uint64_t __ts = nanos_since_boot(); \
|
||||
\
|
||||
if (!__begin) { __begin = __ts; } \
|
||||
\
|
||||
if (__begin + __millis*1000000ULL < __ts) { \
|
||||
if (__missed) { \
|
||||
cloudlog(CLOUDLOG_WARNING, "cloudlog: %d messages suppressed", __missed); \
|
||||
} \
|
||||
__begin = 0; \
|
||||
__printed = 0; \
|
||||
__missed = 0; \
|
||||
} \
|
||||
\
|
||||
if (__printed < __burst) { \
|
||||
cloudlog(lvl, fmt, ## __VA_ARGS__); \
|
||||
__printed++; \
|
||||
} else { \
|
||||
__missed++; \
|
||||
} \
|
||||
}
|
||||
|
||||
|
||||
#define LOGT(...) cloudlog_t(CLOUDLOG_DEBUG, __VA_ARGS__)
|
||||
#define LOGD(fmt, ...) cloudlog(CLOUDLOG_DEBUG, fmt, ## __VA_ARGS__)
|
||||
#define LOG(fmt, ...) cloudlog(CLOUDLOG_INFO, fmt, ## __VA_ARGS__)
|
||||
#define LOGW(fmt, ...) cloudlog(CLOUDLOG_WARNING, fmt, ## __VA_ARGS__)
|
||||
#define LOGE(fmt, ...) cloudlog(CLOUDLOG_ERROR, fmt, ## __VA_ARGS__)
|
||||
|
||||
#define LOGD_100(fmt, ...) cloudlog_rl(2, 100, CLOUDLOG_DEBUG, fmt, ## __VA_ARGS__)
|
||||
#define LOG_100(fmt, ...) cloudlog_rl(2, 100, CLOUDLOG_INFO, fmt, ## __VA_ARGS__)
|
||||
#define LOGW_100(fmt, ...) cloudlog_rl(2, 100, CLOUDLOG_WARNING, fmt, ## __VA_ARGS__)
|
||||
#define LOGE_100(fmt, ...) cloudlog_rl(2, 100, CLOUDLOG_ERROR, fmt, ## __VA_ARGS__)
|
||||
144
common/swaglog.py
Normal file
144
common/swaglog.py
Normal file
@@ -0,0 +1,144 @@
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
from logging.handlers import BaseRotatingHandler
|
||||
|
||||
import zmq
|
||||
|
||||
from openpilot.common.logging_extra import SwagLogger, SwagFormatter, SwagLogFileFormatter
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
|
||||
|
||||
def get_file_handler():
|
||||
Path(Paths.swaglog_root()).mkdir(parents=True, exist_ok=True)
|
||||
base_filename = os.path.join(Paths.swaglog_root(), "swaglog")
|
||||
handler = SwaglogRotatingFileHandler(base_filename)
|
||||
return handler
|
||||
|
||||
class SwaglogRotatingFileHandler(BaseRotatingHandler):
|
||||
def __init__(self, base_filename, interval=60, max_bytes=1024*256, backup_count=2500, encoding=None):
|
||||
super().__init__(base_filename, mode="a", encoding=encoding, delay=True)
|
||||
self.base_filename = base_filename
|
||||
self.interval = interval # seconds
|
||||
self.max_bytes = max_bytes
|
||||
self.backup_count = backup_count
|
||||
self.log_files = self.get_existing_logfiles()
|
||||
log_indexes = [f.split(".")[-1] for f in self.log_files]
|
||||
self.last_file_idx = max([int(i) for i in log_indexes if i.isdigit()] or [-1])
|
||||
self.last_rollover = None
|
||||
self.doRollover()
|
||||
|
||||
def _open(self):
|
||||
self.last_rollover = time.monotonic()
|
||||
self.last_file_idx += 1
|
||||
next_filename = f"{self.base_filename}.{self.last_file_idx:010}"
|
||||
stream = open(next_filename, self.mode, encoding=self.encoding)
|
||||
self.log_files.insert(0, next_filename)
|
||||
return stream
|
||||
|
||||
def get_existing_logfiles(self):
|
||||
log_files = list()
|
||||
base_dir = os.path.dirname(self.base_filename)
|
||||
for fn in os.listdir(base_dir):
|
||||
fp = os.path.join(base_dir, fn)
|
||||
if fp.startswith(self.base_filename) and os.path.isfile(fp):
|
||||
log_files.append(fp)
|
||||
return sorted(log_files)
|
||||
|
||||
def shouldRollover(self, record):
|
||||
size_exceeded = self.max_bytes > 0 and self.stream.tell() >= self.max_bytes
|
||||
time_exceeded = self.interval > 0 and self.last_rollover + self.interval <= time.monotonic()
|
||||
return size_exceeded or time_exceeded
|
||||
|
||||
def doRollover(self):
|
||||
if self.stream:
|
||||
self.stream.close()
|
||||
self.stream = self._open()
|
||||
|
||||
if self.backup_count > 0:
|
||||
while len(self.log_files) > self.backup_count:
|
||||
to_delete = self.log_files.pop()
|
||||
if os.path.exists(to_delete): # just being safe, should always exist
|
||||
os.remove(to_delete)
|
||||
|
||||
class UnixDomainSocketHandler(logging.Handler):
|
||||
def __init__(self, formatter):
|
||||
logging.Handler.__init__(self)
|
||||
self.setFormatter(formatter)
|
||||
self.pid = None
|
||||
|
||||
self.zctx = None
|
||||
self.sock = None
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
if self.sock is not None:
|
||||
self.sock.close()
|
||||
if self.zctx is not None:
|
||||
self.zctx.term()
|
||||
|
||||
def connect(self):
|
||||
self.zctx = zmq.Context()
|
||||
self.sock = self.zctx.socket(zmq.PUSH)
|
||||
self.sock.setsockopt(zmq.LINGER, 10)
|
||||
self.sock.connect(Paths.swaglog_ipc())
|
||||
self.pid = os.getpid()
|
||||
|
||||
def emit(self, record):
|
||||
if os.getpid() != self.pid:
|
||||
# TODO suppresses warning about forking proc with zmq socket, fix root cause
|
||||
warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*<zmq.*>")
|
||||
self.connect()
|
||||
|
||||
msg = self.format(record).rstrip('\n')
|
||||
# print("SEND".format(repr(msg)))
|
||||
try:
|
||||
s = chr(record.levelno)+msg
|
||||
self.sock.send(s.encode('utf8'), zmq.NOBLOCK)
|
||||
except zmq.error.Again:
|
||||
# drop :/
|
||||
pass
|
||||
|
||||
|
||||
class ForwardingHandler(logging.Handler):
|
||||
def __init__(self, target_logger):
|
||||
super().__init__()
|
||||
self.target_logger = target_logger
|
||||
|
||||
def emit(self, record):
|
||||
self.target_logger.handle(record)
|
||||
|
||||
|
||||
def add_file_handler(log):
|
||||
"""
|
||||
Function to add the file log handler to swaglog.
|
||||
This can be used to store logs when logmessaged is not running.
|
||||
"""
|
||||
handler = get_file_handler()
|
||||
handler.setFormatter(SwagLogFileFormatter(log))
|
||||
log.addHandler(handler)
|
||||
|
||||
|
||||
cloudlog = log = SwagLogger()
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
outhandler = logging.StreamHandler()
|
||||
|
||||
print_level = os.environ.get('LOGPRINT', 'warning')
|
||||
if print_level == 'debug':
|
||||
outhandler.setLevel(logging.DEBUG)
|
||||
elif print_level == 'info':
|
||||
outhandler.setLevel(logging.INFO)
|
||||
elif print_level == 'warning':
|
||||
outhandler.setLevel(logging.WARNING)
|
||||
|
||||
ipchandler = UnixDomainSocketHandler(SwagFormatter(log))
|
||||
|
||||
log.addHandler(outhandler)
|
||||
# logs are sent through IPC before writing to disk to prevent disk I/O blocking
|
||||
log.addHandler(ipchandler)
|
||||
0
common/tests/__init__.py
Normal file
0
common/tests/__init__.py
Normal file
19
common/tests/test_file_helpers.py
Normal file
19
common/tests/test_file_helpers.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import os
|
||||
from uuid import uuid4
|
||||
|
||||
from openpilot.common.file_helpers import atomic_write_in_dir
|
||||
|
||||
|
||||
class TestFileHelpers:
|
||||
def run_atomic_write_func(self, atomic_write_func):
|
||||
path = f"/tmp/tmp{uuid4()}"
|
||||
with atomic_write_func(path) as f:
|
||||
f.write("test")
|
||||
assert not os.path.exists(path)
|
||||
|
||||
with open(path) as f:
|
||||
assert f.read() == "test"
|
||||
os.remove(path)
|
||||
|
||||
def test_atomic_write_in_dir(self):
|
||||
self.run_atomic_write_func(atomic_write_in_dir)
|
||||
15
common/tests/test_markdown.py
Normal file
15
common/tests/test_markdown.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import os
|
||||
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
from openpilot.common.markdown import parse_markdown
|
||||
|
||||
|
||||
class TestMarkdown:
|
||||
def test_all_release_notes(self):
|
||||
with open(os.path.join(BASEDIR, "RELEASES.md")) as f:
|
||||
release_notes = f.read().split("\n\n")
|
||||
assert len(release_notes) > 10
|
||||
|
||||
for rn in release_notes:
|
||||
md = parse_markdown(rn)
|
||||
assert len(md) > 0
|
||||
109
common/tests/test_params.py
Normal file
109
common/tests/test_params.py
Normal file
@@ -0,0 +1,109 @@
|
||||
import pytest
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from openpilot.common.params import Params, ParamKeyType, UnknownKeyName
|
||||
|
||||
class TestParams:
|
||||
def setup_method(self):
|
||||
self.params = Params()
|
||||
|
||||
def test_params_put_and_get(self):
|
||||
self.params.put("DongleId", "cb38263377b873ee")
|
||||
assert self.params.get("DongleId") == b"cb38263377b873ee"
|
||||
|
||||
def test_params_non_ascii(self):
|
||||
st = b"\xe1\x90\xff"
|
||||
self.params.put("CarParams", st)
|
||||
assert self.params.get("CarParams") == st
|
||||
|
||||
def test_params_get_cleared_manager_start(self):
|
||||
self.params.put("CarParams", "test")
|
||||
self.params.put("DongleId", "cb38263377b873ee")
|
||||
assert self.params.get("CarParams") == b"test"
|
||||
|
||||
undefined_param = self.params.get_param_path(uuid.uuid4().hex)
|
||||
with open(undefined_param, "w") as f:
|
||||
f.write("test")
|
||||
assert os.path.isfile(undefined_param)
|
||||
|
||||
self.params.clear_all(ParamKeyType.CLEAR_ON_MANAGER_START)
|
||||
assert self.params.get("CarParams") is None
|
||||
assert self.params.get("DongleId") is not None
|
||||
assert not os.path.isfile(undefined_param)
|
||||
|
||||
def test_params_two_things(self):
|
||||
self.params.put("DongleId", "bob")
|
||||
self.params.put("AthenadPid", "123")
|
||||
assert self.params.get("DongleId") == b"bob"
|
||||
assert self.params.get("AthenadPid") == b"123"
|
||||
|
||||
def test_params_get_block(self):
|
||||
def _delayed_writer():
|
||||
time.sleep(0.1)
|
||||
self.params.put("CarParams", "test")
|
||||
threading.Thread(target=_delayed_writer).start()
|
||||
assert self.params.get("CarParams") is None
|
||||
assert self.params.get("CarParams", True) == b"test"
|
||||
|
||||
def test_params_unknown_key_fails(self):
|
||||
with pytest.raises(UnknownKeyName):
|
||||
self.params.get("swag")
|
||||
|
||||
with pytest.raises(UnknownKeyName):
|
||||
self.params.get_bool("swag")
|
||||
|
||||
with pytest.raises(UnknownKeyName):
|
||||
self.params.put("swag", "abc")
|
||||
|
||||
with pytest.raises(UnknownKeyName):
|
||||
self.params.put_bool("swag", True)
|
||||
|
||||
def test_remove_not_there(self):
|
||||
assert self.params.get("CarParams") is None
|
||||
self.params.remove("CarParams")
|
||||
assert self.params.get("CarParams") is None
|
||||
|
||||
def test_get_bool(self):
|
||||
self.params.remove("IsMetric")
|
||||
assert not self.params.get_bool("IsMetric")
|
||||
|
||||
self.params.put_bool("IsMetric", True)
|
||||
assert self.params.get_bool("IsMetric")
|
||||
|
||||
self.params.put_bool("IsMetric", False)
|
||||
assert not self.params.get_bool("IsMetric")
|
||||
|
||||
self.params.put("IsMetric", "1")
|
||||
assert self.params.get_bool("IsMetric")
|
||||
|
||||
self.params.put("IsMetric", "0")
|
||||
assert not self.params.get_bool("IsMetric")
|
||||
|
||||
def test_put_non_blocking_with_get_block(self):
|
||||
q = Params()
|
||||
def _delayed_writer():
|
||||
time.sleep(0.1)
|
||||
Params().put_nonblocking("CarParams", "test")
|
||||
threading.Thread(target=_delayed_writer).start()
|
||||
assert q.get("CarParams") is None
|
||||
assert q.get("CarParams", True) == b"test"
|
||||
|
||||
def test_put_bool_non_blocking_with_get_block(self):
|
||||
q = Params()
|
||||
def _delayed_writer():
|
||||
time.sleep(0.1)
|
||||
Params().put_bool_nonblocking("CarParams", True)
|
||||
threading.Thread(target=_delayed_writer).start()
|
||||
assert q.get("CarParams") is None
|
||||
assert q.get("CarParams", True) == b"1"
|
||||
|
||||
def test_params_all_keys(self):
|
||||
keys = Params().all_keys()
|
||||
|
||||
# sanity checks
|
||||
assert len(keys) > 20
|
||||
assert len(keys) == len(set(keys))
|
||||
assert b"CarParams" in keys
|
||||
29
common/tests/test_simple_kalman.py
Normal file
29
common/tests/test_simple_kalman.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from openpilot.common.simple_kalman import KF1D
|
||||
|
||||
|
||||
class TestSimpleKalman:
|
||||
def setup_method(self):
|
||||
dt = 0.01
|
||||
x0_0 = 0.0
|
||||
x1_0 = 0.0
|
||||
A0_0 = 1.0
|
||||
A0_1 = dt
|
||||
A1_0 = 0.0
|
||||
A1_1 = 1.0
|
||||
C0_0 = 1.0
|
||||
C0_1 = 0.0
|
||||
K0_0 = 0.12287673
|
||||
K1_0 = 0.29666309
|
||||
|
||||
self.kf = KF1D(x0=[[x0_0], [x1_0]],
|
||||
A=[[A0_0, A0_1], [A1_0, A1_1]],
|
||||
C=[C0_0, C0_1],
|
||||
K=[[K0_0], [K1_0]])
|
||||
|
||||
def test_getter_setter(self):
|
||||
self.kf.set_x([[1.0], [1.0]])
|
||||
assert self.kf.x == [[1.0], [1.0]]
|
||||
|
||||
def update_returns_state(self):
|
||||
x = self.kf.update(100)
|
||||
assert x == self.kf.x
|
||||
63
common/text_window.py
Executable file
63
common/text_window.py
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import time
|
||||
import subprocess
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
|
||||
|
||||
class TextWindow:
|
||||
def __init__(self, text):
|
||||
try:
|
||||
self.text_proc = subprocess.Popen(["./text", text],
|
||||
stdin=subprocess.PIPE,
|
||||
cwd=os.path.join(BASEDIR, "selfdrive", "ui"),
|
||||
close_fds=True)
|
||||
except OSError:
|
||||
self.text_proc = None
|
||||
|
||||
def get_status(self):
|
||||
if self.text_proc is not None:
|
||||
self.text_proc.poll()
|
||||
return self.text_proc.returncode
|
||||
return None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def close(self):
|
||||
if self.text_proc is not None:
|
||||
self.text_proc.terminate()
|
||||
self.text_proc = None
|
||||
|
||||
def wait_for_exit(self):
|
||||
if self.text_proc is not None:
|
||||
while True:
|
||||
if self.get_status() == 1:
|
||||
return
|
||||
time.sleep(0.1)
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
text = """Traceback (most recent call last):
|
||||
File "./controlsd.py", line 608, in <module>
|
||||
main()
|
||||
File "./controlsd.py", line 604, in main
|
||||
controlsd_thread(sm, pm, logcan)
|
||||
File "./controlsd.py", line 455, in controlsd_thread
|
||||
1/0
|
||||
ZeroDivisionError: division by zero"""
|
||||
print(text)
|
||||
|
||||
with TextWindow(text) as s:
|
||||
for _ in range(100):
|
||||
if s.get_status() == 1:
|
||||
print("Got exit button")
|
||||
break
|
||||
time.sleep(0.1)
|
||||
print("gone")
|
||||
15
common/time_helpers.py
Normal file
15
common/time_helpers.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
|
||||
MIN_DATE = datetime.datetime(year=2025, month=2, day=21)
|
||||
|
||||
def min_date():
|
||||
# on systemd systems, the default time is the systemd build time
|
||||
systemd_path = Path("/lib/systemd/systemd")
|
||||
if systemd_path.exists():
|
||||
d = datetime.datetime.fromtimestamp(systemd_path.stat().st_mtime)
|
||||
return max(MIN_DATE, d + datetime.timedelta(days=1))
|
||||
return MIN_DATE
|
||||
|
||||
def system_time_valid():
|
||||
return datetime.datetime.now() > min_date()
|
||||
27
common/timeout.py
Normal file
27
common/timeout.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import signal
|
||||
|
||||
class TimeoutException(Exception):
|
||||
pass
|
||||
|
||||
class Timeout:
|
||||
"""
|
||||
Timeout context manager.
|
||||
For example this code will raise a TimeoutException:
|
||||
with Timeout(seconds=5, error_msg="Sleep was too long"):
|
||||
time.sleep(10)
|
||||
"""
|
||||
def __init__(self, seconds, error_msg=None):
|
||||
if error_msg is None:
|
||||
error_msg = f'Timed out after {seconds} seconds'
|
||||
self.seconds = seconds
|
||||
self.error_msg = error_msg
|
||||
|
||||
def handle_timeout(self, signume, frame):
|
||||
raise TimeoutException(self.error_msg)
|
||||
|
||||
def __enter__(self):
|
||||
signal.signal(signal.SIGALRM, self.handle_timeout)
|
||||
signal.alarm(self.seconds)
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
signal.alarm(0)
|
||||
51
common/timing.h
Normal file
51
common/timing.h
Normal file
@@ -0,0 +1,51 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
#include <ctime>
|
||||
|
||||
#ifdef __APPLE__
|
||||
#define CLOCK_BOOTTIME CLOCK_MONOTONIC
|
||||
#endif
|
||||
|
||||
static inline uint64_t nanos_since_boot() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_BOOTTIME, &t);
|
||||
return t.tv_sec * 1000000000ULL + t.tv_nsec;
|
||||
}
|
||||
|
||||
static inline double millis_since_boot() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_BOOTTIME, &t);
|
||||
return t.tv_sec * 1000.0 + t.tv_nsec * 1e-6;
|
||||
}
|
||||
|
||||
static inline double seconds_since_boot() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_BOOTTIME, &t);
|
||||
return (double)t.tv_sec + t.tv_nsec * 1e-9;
|
||||
}
|
||||
|
||||
static inline uint64_t nanos_since_epoch() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_REALTIME, &t);
|
||||
return t.tv_sec * 1000000000ULL + t.tv_nsec;
|
||||
}
|
||||
|
||||
static inline double seconds_since_epoch() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_REALTIME, &t);
|
||||
return (double)t.tv_sec + t.tv_nsec * 1e-9;
|
||||
}
|
||||
|
||||
// you probably should use nanos_since_boot instead
|
||||
static inline uint64_t nanos_monotonic() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_MONOTONIC, &t);
|
||||
return t.tv_sec * 1000000000ULL + t.tv_nsec;
|
||||
}
|
||||
|
||||
static inline uint64_t nanos_monotonic_raw() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_MONOTONIC_RAW, &t);
|
||||
return t.tv_sec * 1000000000ULL + t.tv_nsec;
|
||||
}
|
||||
70
common/transformations/README.md
Normal file
70
common/transformations/README.md
Normal file
@@ -0,0 +1,70 @@
|
||||
|
||||
Reference Frames
|
||||
------
|
||||
Many reference frames are used throughout. This
|
||||
folder contains all helper functions needed to
|
||||
transform between them. Generally this is done
|
||||
by generating a rotation matrix and multiplying.
|
||||
|
||||
|
||||
| Name | [x, y, z] | Units | Notes |
|
||||
| :-------------: |:-------------:| :-----:| :----: |
|
||||
| Geodetic | [Latitude, Longitude, Altitude] | geodetic coordinates | Sometimes used as [lon, lat, alt], avoid this frame. |
|
||||
| ECEF | [x, y, z] | meters | We use **ITRF14 (IGS14)**, NOT NAD83. <br> This is the global Mesh3D frame. |
|
||||
| NED | [North, East, Down] | meters | Relative to earth's surface, useful for visualizing. |
|
||||
| Device | [Forward, Right, Down] | meters | This is the Mesh3D local frame. <br> Relative to camera, **not imu.** <br> |
|
||||
| Calibrated | [Forward, Right, Down] | meters | This is the frame the model outputs are in. <br> More details below. <br>|
|
||||
| Car | [Forward, Right, Down] | meters | This is useful for estimating position of points on the road. <br> More details below. <br>|
|
||||
| View | [Right, Down, Forward] | meters | Like device frame, but according to camera conventions. |
|
||||
| Camera | [u, v, focal] | pixels | Like view frame, but 2d on the camera image.|
|
||||
| Normalized Camera | [u / focal, v / focal, 1] | / | |
|
||||
| Model | [u, v, focal] | pixels | The sampled rectangle of the full camera frame the model uses. |
|
||||
| Normalized Model | [u / focal, v / focal, 1] | / | |
|
||||
|
||||
|
||||
|
||||
|
||||
Orientation Conventions
|
||||
------
|
||||
Quaternions, rotation matrices and euler angles are three
|
||||
equivalent representations of orientation and all three are
|
||||
used throughout the code base.
|
||||
|
||||
For euler angles the preferred convention is [roll, pitch, yaw]
|
||||
which corresponds to rotations around the [x, y, z] axes. All
|
||||
euler angles should always be in radians or radians/s unless
|
||||
for plotting or display purposes. For quaternions the hamilton
|
||||
notations is preferred which is [q<sub>w</sub>, q<sub>x</sub>, q<sub>y</sub>, q<sub>z</sub>]. All quaternions
|
||||
should always be normalized with a strictly positive q<sub>w</sub>. **These
|
||||
quaternions are a unique representation of orientation whereas euler angles
|
||||
or rotation matrices are not.**
|
||||
|
||||
To rotate from one frame into another with euler angles the
|
||||
convention is to rotate around roll, then pitch and then yaw,
|
||||
while rotating around the rotated axes, not the original axes.
|
||||
|
||||
|
||||
Car frame
|
||||
------
|
||||
Device frame is aligned with the road-facing camera used by openpilot. However, when controlling the vehicle it is helpful to think in a reference frame aligned with the vehicle. These two reference frames can be different.
|
||||
|
||||
The orientation of car frame is defined to be aligned with the car's direction of travel and the road plane when the vehicle is driving on a flat road and not turning. The origin of car frame is defined to be directly below device frame (in car frame), such that it is on the road plane. The position and orientation of this frame is not necessarily always aligned with the direction of travel or the road plane due to suspension movements and other effects.
|
||||
|
||||
|
||||
Calibrated frame
|
||||
------
|
||||
It is helpful for openpilot's driving model to take in images that look similar when mounted differently in different cars. To achieve this we "calibrate" the images by transforming it into calibrated frame. Calibrated frame is defined to be aligned with car frame in pitch and yaw, and aligned with device frame in roll. It also has the same origin as device frame.
|
||||
|
||||
|
||||
Example
|
||||
------
|
||||
To transform global Mesh3D positions and orientations (positions_ecef, quats_ecef) into the local frame described by the
|
||||
first position and orientation from Mesh3D one would do:
|
||||
```
|
||||
ecef_from_local = rot_from_quat(quats_ecef[0])
|
||||
local_from_ecef = ecef_from_local.T
|
||||
positions_local = np.einsum('ij,kj->ki', local_from_ecef, postions_ecef - positions_ecef[0])
|
||||
rotations_global = rot_from_quat(quats_ecef)
|
||||
rotations_local = np.einsum('ij,kjl->kil', local_from_ecef, rotations_global)
|
||||
eulers_local = euler_from_rot(rotations_local)
|
||||
```
|
||||
0
common/transformations/__init__.py
Normal file
0
common/transformations/__init__.py
Normal file
179
common/transformations/camera.py
Normal file
179
common/transformations/camera.py
Normal file
@@ -0,0 +1,179 @@
|
||||
import itertools
|
||||
import numpy as np
|
||||
from dataclasses import dataclass
|
||||
|
||||
import openpilot.common.transformations.orientation as orient
|
||||
|
||||
## -- hardcoded hardware params --
|
||||
@dataclass(frozen=True)
|
||||
class CameraConfig:
|
||||
width: int
|
||||
height: int
|
||||
focal_length: float
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
return (self.width, self.height)
|
||||
|
||||
@property
|
||||
def intrinsics(self):
|
||||
# aka 'K' aka camera_frame_from_view_frame
|
||||
return np.array([
|
||||
[self.focal_length, 0.0, float(self.width)/2],
|
||||
[0.0, self.focal_length, float(self.height)/2],
|
||||
[0.0, 0.0, 1.0]
|
||||
])
|
||||
|
||||
@property
|
||||
def intrinsics_inv(self):
|
||||
# aka 'K_inv' aka view_frame_from_camera_frame
|
||||
return np.linalg.inv(self.intrinsics)
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _NoneCameraConfig(CameraConfig):
|
||||
width: int = 0
|
||||
height: int = 0
|
||||
focal_length: float = 0
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DeviceCameraConfig:
|
||||
fcam: CameraConfig
|
||||
dcam: CameraConfig
|
||||
ecam: CameraConfig
|
||||
|
||||
def all_cams(self):
|
||||
for cam in ['fcam', 'dcam', 'ecam']:
|
||||
if not isinstance(getattr(self, cam), _NoneCameraConfig):
|
||||
yield cam, getattr(self, cam)
|
||||
|
||||
_ar_ox_fisheye = CameraConfig(1928, 1208, 567.0) # focal length probably wrong? magnification is not consistent across frame
|
||||
_os_fisheye = CameraConfig(2688 // 2, 1520 // 2, 567.0 / 4 * 3)
|
||||
_ar_ox_config = DeviceCameraConfig(CameraConfig(1928, 1208, 2648.0), _ar_ox_fisheye, _ar_ox_fisheye)
|
||||
_os_config = DeviceCameraConfig(CameraConfig(2688 // 2, 1520 // 2, 1522.0 * 3 / 4), _os_fisheye, _os_fisheye)
|
||||
_neo_config = DeviceCameraConfig(CameraConfig(1164, 874, 910.0), CameraConfig(816, 612, 650.0), _NoneCameraConfig())
|
||||
|
||||
DEVICE_CAMERAS = {
|
||||
# A "device camera" is defined by a device type and sensor
|
||||
|
||||
# sensor type was never set on eon/neo/two
|
||||
("neo", "unknown"): _neo_config,
|
||||
# unknown here is AR0231, field was added with OX03C10 support
|
||||
("tici", "unknown"): _ar_ox_config,
|
||||
|
||||
# before deviceState.deviceType was set, assume tici AR config
|
||||
("unknown", "ar0231"): _ar_ox_config,
|
||||
("unknown", "ox03c10"): _ar_ox_config,
|
||||
|
||||
# simulator (emulates a tici)
|
||||
("pc", "unknown"): _ar_ox_config,
|
||||
}
|
||||
prods = itertools.product(('tici', 'tizi', 'mici'), (('ar0231', _ar_ox_config), ('ox03c10', _ar_ox_config), ('os04c10', _os_config)))
|
||||
DEVICE_CAMERAS.update({(d, c[0]): c[1] for d, c in prods})
|
||||
|
||||
# device/mesh : x->forward, y-> right, z->down
|
||||
# view : x->right, y->down, z->forward
|
||||
device_frame_from_view_frame = np.array([
|
||||
[ 0., 0., 1.],
|
||||
[ 1., 0., 0.],
|
||||
[ 0., 1., 0.]
|
||||
])
|
||||
view_frame_from_device_frame = device_frame_from_view_frame.T
|
||||
|
||||
|
||||
# aka 'extrinsic_matrix'
|
||||
# road : x->forward, y -> left, z->up
|
||||
def get_view_frame_from_road_frame(roll, pitch, yaw, height):
|
||||
device_from_road = orient.rot_from_euler([roll, pitch, yaw]).dot(np.diag([1, -1, -1]))
|
||||
view_from_road = view_frame_from_device_frame.dot(device_from_road)
|
||||
return np.hstack((view_from_road, [[0], [height], [0]]))
|
||||
|
||||
|
||||
|
||||
# aka 'extrinsic_matrix'
|
||||
def get_view_frame_from_calib_frame(roll, pitch, yaw, height):
|
||||
device_from_calib= orient.rot_from_euler([roll, pitch, yaw])
|
||||
view_from_calib = view_frame_from_device_frame.dot(device_from_calib)
|
||||
return np.hstack((view_from_calib, [[0], [height], [0]]))
|
||||
|
||||
|
||||
def vp_from_ke(m):
|
||||
"""
|
||||
Computes the vanishing point from the product of the intrinsic and extrinsic
|
||||
matrices C = KE.
|
||||
|
||||
The vanishing point is defined as lim x->infinity C (x, 0, 0, 1).T
|
||||
"""
|
||||
return (m[0, 0]/m[2, 0], m[1, 0]/m[2, 0])
|
||||
|
||||
|
||||
def roll_from_ke(m):
|
||||
# note: different from calibration.h/RollAnglefromKE: i think that one's just wrong
|
||||
return np.arctan2(-(m[1, 0] - m[1, 1] * m[2, 0] / m[2, 1]),
|
||||
-(m[0, 0] - m[0, 1] * m[2, 0] / m[2, 1]))
|
||||
|
||||
|
||||
def normalize(img_pts, intrinsics):
|
||||
# normalizes image coordinates
|
||||
# accepts single pt or array of pts
|
||||
intrinsics_inv = np.linalg.inv(intrinsics)
|
||||
img_pts = np.array(img_pts)
|
||||
input_shape = img_pts.shape
|
||||
img_pts = np.atleast_2d(img_pts)
|
||||
img_pts = np.hstack((img_pts, np.ones((img_pts.shape[0], 1))))
|
||||
img_pts_normalized = img_pts.dot(intrinsics_inv.T)
|
||||
img_pts_normalized[(img_pts < 0).any(axis=1)] = np.nan
|
||||
return img_pts_normalized[:, :2].reshape(input_shape)
|
||||
|
||||
|
||||
def denormalize(img_pts, intrinsics, width=np.inf, height=np.inf):
|
||||
# denormalizes image coordinates
|
||||
# accepts single pt or array of pts
|
||||
img_pts = np.array(img_pts)
|
||||
input_shape = img_pts.shape
|
||||
img_pts = np.atleast_2d(img_pts)
|
||||
img_pts = np.hstack((img_pts, np.ones((img_pts.shape[0], 1), dtype=img_pts.dtype)))
|
||||
img_pts_denormalized = img_pts.dot(intrinsics.T)
|
||||
if np.isfinite(width):
|
||||
img_pts_denormalized[img_pts_denormalized[:, 0] > width] = np.nan
|
||||
img_pts_denormalized[img_pts_denormalized[:, 0] < 0] = np.nan
|
||||
if np.isfinite(height):
|
||||
img_pts_denormalized[img_pts_denormalized[:, 1] > height] = np.nan
|
||||
img_pts_denormalized[img_pts_denormalized[:, 1] < 0] = np.nan
|
||||
return img_pts_denormalized[:, :2].reshape(input_shape)
|
||||
|
||||
|
||||
def get_calib_from_vp(vp, intrinsics):
|
||||
vp_norm = normalize(vp, intrinsics)
|
||||
yaw_calib = np.arctan(vp_norm[0])
|
||||
pitch_calib = -np.arctan(vp_norm[1]*np.cos(yaw_calib))
|
||||
roll_calib = 0
|
||||
return roll_calib, pitch_calib, yaw_calib
|
||||
|
||||
|
||||
def device_from_ecef(pos_ecef, orientation_ecef, pt_ecef):
|
||||
# device from ecef frame
|
||||
# device frame is x -> forward, y-> right, z -> down
|
||||
# accepts single pt or array of pts
|
||||
input_shape = pt_ecef.shape
|
||||
pt_ecef = np.atleast_2d(pt_ecef)
|
||||
ecef_from_device_rot = orient.rotations_from_quats(orientation_ecef)
|
||||
device_from_ecef_rot = ecef_from_device_rot.T
|
||||
pt_ecef_rel = pt_ecef - pos_ecef
|
||||
pt_device = np.einsum('jk,ik->ij', device_from_ecef_rot, pt_ecef_rel)
|
||||
return pt_device.reshape(input_shape)
|
||||
|
||||
|
||||
def img_from_device(pt_device):
|
||||
# img coordinates from pts in device frame
|
||||
# first transforms to view frame, then to img coords
|
||||
# accepts single pt or array of pts
|
||||
input_shape = pt_device.shape
|
||||
pt_device = np.atleast_2d(pt_device)
|
||||
pt_view = np.einsum('jk,ik->ij', view_frame_from_device_frame, pt_device)
|
||||
|
||||
# This function should never return negative depths
|
||||
pt_view[pt_view[:, 2] < 0] = np.nan
|
||||
|
||||
pt_img = pt_view/pt_view[:, 2:3]
|
||||
return pt_img.reshape(input_shape)[:, :2]
|
||||
|
||||
43
common/transformations/coordinates.hpp
Normal file
43
common/transformations/coordinates.hpp
Normal file
@@ -0,0 +1,43 @@
|
||||
#pragma once
|
||||
|
||||
#include <eigen3/Eigen/Dense>
|
||||
|
||||
#define DEG2RAD(x) ((x) * M_PI / 180.0)
|
||||
#define RAD2DEG(x) ((x) * 180.0 / M_PI)
|
||||
|
||||
struct ECEF {
|
||||
double x, y, z;
|
||||
Eigen::Vector3d to_vector() const {
|
||||
return Eigen::Vector3d(x, y, z);
|
||||
}
|
||||
};
|
||||
|
||||
struct NED {
|
||||
double n, e, d;
|
||||
Eigen::Vector3d to_vector() const {
|
||||
return Eigen::Vector3d(n, e, d);
|
||||
}
|
||||
};
|
||||
|
||||
struct Geodetic {
|
||||
double lat, lon, alt;
|
||||
bool radians=false;
|
||||
};
|
||||
|
||||
ECEF geodetic2ecef(const Geodetic &g);
|
||||
Geodetic ecef2geodetic(const ECEF &e);
|
||||
|
||||
class LocalCoord {
|
||||
public:
|
||||
Eigen::Matrix3d ned2ecef_matrix;
|
||||
Eigen::Matrix3d ecef2ned_matrix;
|
||||
Eigen::Vector3d init_ecef;
|
||||
LocalCoord(const Geodetic &g, const ECEF &e);
|
||||
LocalCoord(const Geodetic &g) : LocalCoord(g, ::geodetic2ecef(g)) {}
|
||||
LocalCoord(const ECEF &e) : LocalCoord(::ecef2geodetic(e), e) {}
|
||||
|
||||
NED ecef2ned(const ECEF &e);
|
||||
ECEF ned2ecef(const NED &n);
|
||||
NED geodetic2ned(const Geodetic &g);
|
||||
Geodetic ned2geodetic(const NED &n);
|
||||
};
|
||||
18
common/transformations/coordinates.py
Normal file
18
common/transformations/coordinates.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from openpilot.common.transformations.orientation import numpy_wrap
|
||||
from openpilot.common.transformations.transformations import (ecef2geodetic_single,
|
||||
geodetic2ecef_single)
|
||||
from openpilot.common.transformations.transformations import LocalCoord as LocalCoord_single
|
||||
|
||||
|
||||
class LocalCoord(LocalCoord_single):
|
||||
ecef2ned = numpy_wrap(LocalCoord_single.ecef2ned_single, (3,), (3,))
|
||||
ned2ecef = numpy_wrap(LocalCoord_single.ned2ecef_single, (3,), (3,))
|
||||
geodetic2ned = numpy_wrap(LocalCoord_single.geodetic2ned_single, (3,), (3,))
|
||||
ned2geodetic = numpy_wrap(LocalCoord_single.ned2geodetic_single, (3,), (3,))
|
||||
|
||||
|
||||
geodetic2ecef = numpy_wrap(geodetic2ecef_single, (3,), (3,))
|
||||
ecef2geodetic = numpy_wrap(ecef2geodetic_single, (3,), (3,))
|
||||
|
||||
geodetic_from_ecef = ecef2geodetic
|
||||
ecef_from_geodetic = geodetic2ecef
|
||||
70
common/transformations/model.py
Normal file
70
common/transformations/model.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import numpy as np
|
||||
|
||||
from openpilot.common.transformations.orientation import rot_from_euler
|
||||
from openpilot.common.transformations.camera import get_view_frame_from_calib_frame, view_frame_from_device_frame, _ar_ox_fisheye
|
||||
|
||||
# segnet
|
||||
SEGNET_SIZE = (512, 384)
|
||||
|
||||
# MED model
|
||||
MEDMODEL_INPUT_SIZE = (512, 256)
|
||||
MEDMODEL_YUV_SIZE = (MEDMODEL_INPUT_SIZE[0], MEDMODEL_INPUT_SIZE[1] * 3 // 2)
|
||||
MEDMODEL_CY = 47.6
|
||||
|
||||
medmodel_fl = 910.0
|
||||
medmodel_intrinsics = np.array([
|
||||
[medmodel_fl, 0.0, 0.5 * MEDMODEL_INPUT_SIZE[0]],
|
||||
[0.0, medmodel_fl, MEDMODEL_CY],
|
||||
[0.0, 0.0, 1.0]])
|
||||
|
||||
|
||||
# BIG model
|
||||
BIGMODEL_INPUT_SIZE = (1024, 512)
|
||||
BIGMODEL_YUV_SIZE = (BIGMODEL_INPUT_SIZE[0], BIGMODEL_INPUT_SIZE[1] * 3 // 2)
|
||||
|
||||
bigmodel_fl = 910.0
|
||||
bigmodel_intrinsics = np.array([
|
||||
[bigmodel_fl, 0.0, 0.5 * BIGMODEL_INPUT_SIZE[0]],
|
||||
[0.0, bigmodel_fl, 256 + MEDMODEL_CY],
|
||||
[0.0, 0.0, 1.0]])
|
||||
|
||||
|
||||
# SBIG model (big model with the size of small model)
|
||||
SBIGMODEL_INPUT_SIZE = (512, 256)
|
||||
SBIGMODEL_YUV_SIZE = (SBIGMODEL_INPUT_SIZE[0], SBIGMODEL_INPUT_SIZE[1] * 3 // 2)
|
||||
|
||||
sbigmodel_fl = 455.0
|
||||
sbigmodel_intrinsics = np.array([
|
||||
[sbigmodel_fl, 0.0, 0.5 * SBIGMODEL_INPUT_SIZE[0]],
|
||||
[0.0, sbigmodel_fl, 0.5 * (256 + MEDMODEL_CY)],
|
||||
[0.0, 0.0, 1.0]])
|
||||
|
||||
DM_INPUT_SIZE = (1440, 960)
|
||||
dmonitoringmodel_fl = _ar_ox_fisheye.focal_length
|
||||
dmonitoringmodel_intrinsics = np.array([
|
||||
[dmonitoringmodel_fl, 0.0, DM_INPUT_SIZE[0]/2],
|
||||
[0.0, dmonitoringmodel_fl, DM_INPUT_SIZE[1]/2 - (_ar_ox_fisheye.height - DM_INPUT_SIZE[1])/2],
|
||||
[0.0, 0.0, 1.0]])
|
||||
|
||||
bigmodel_frame_from_calib_frame = np.dot(bigmodel_intrinsics,
|
||||
get_view_frame_from_calib_frame(0, 0, 0, 0))
|
||||
|
||||
|
||||
sbigmodel_frame_from_calib_frame = np.dot(sbigmodel_intrinsics,
|
||||
get_view_frame_from_calib_frame(0, 0, 0, 0))
|
||||
|
||||
medmodel_frame_from_calib_frame = np.dot(medmodel_intrinsics,
|
||||
get_view_frame_from_calib_frame(0, 0, 0, 0))
|
||||
|
||||
medmodel_frame_from_bigmodel_frame = np.dot(medmodel_intrinsics, np.linalg.inv(bigmodel_intrinsics))
|
||||
|
||||
calib_from_medmodel = np.linalg.inv(medmodel_frame_from_calib_frame[:, :3])
|
||||
calib_from_sbigmodel = np.linalg.inv(sbigmodel_frame_from_calib_frame[:, :3])
|
||||
|
||||
# This function is verified to give similar results to xx.uncommon.utils.transform_img
|
||||
def get_warp_matrix(device_from_calib_euler: np.ndarray, intrinsics: np.ndarray, bigmodel_frame: bool = False) -> np.ndarray:
|
||||
calib_from_model = calib_from_sbigmodel if bigmodel_frame else calib_from_medmodel
|
||||
device_from_calib = rot_from_euler(device_from_calib_euler)
|
||||
camera_from_calib = intrinsics @ view_frame_from_device_frame @ device_from_calib
|
||||
warp_matrix: np.ndarray = camera_from_calib @ calib_from_model
|
||||
return warp_matrix
|
||||
17
common/transformations/orientation.hpp
Normal file
17
common/transformations/orientation.hpp
Normal file
@@ -0,0 +1,17 @@
|
||||
#pragma once
|
||||
#include <eigen3/Eigen/Dense>
|
||||
#include "common/transformations/coordinates.hpp"
|
||||
|
||||
|
||||
Eigen::Quaterniond ensure_unique(const Eigen::Quaterniond &quat);
|
||||
|
||||
Eigen::Quaterniond euler2quat(const Eigen::Vector3d &euler);
|
||||
Eigen::Vector3d quat2euler(const Eigen::Quaterniond &quat);
|
||||
Eigen::Matrix3d quat2rot(const Eigen::Quaterniond &quat);
|
||||
Eigen::Quaterniond rot2quat(const Eigen::Matrix3d &rot);
|
||||
Eigen::Matrix3d euler2rot(const Eigen::Vector3d &euler);
|
||||
Eigen::Vector3d rot2euler(const Eigen::Matrix3d &rot);
|
||||
Eigen::Matrix3d rot_matrix(double roll, double pitch, double yaw);
|
||||
Eigen::Matrix3d rot(const Eigen::Vector3d &axis, double angle);
|
||||
Eigen::Vector3d ecef_euler_from_ned(const ECEF &ecef_init, const Eigen::Vector3d &ned_pose);
|
||||
Eigen::Vector3d ned_euler_from_ecef(const ECEF &ecef_init, const Eigen::Vector3d &ecef_pose);
|
||||
52
common/transformations/orientation.py
Normal file
52
common/transformations/orientation.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import numpy as np
|
||||
from collections.abc import Callable
|
||||
|
||||
from openpilot.common.transformations.transformations import (ecef_euler_from_ned_single,
|
||||
euler2quat_single,
|
||||
euler2rot_single,
|
||||
ned_euler_from_ecef_single,
|
||||
quat2euler_single,
|
||||
quat2rot_single,
|
||||
rot2euler_single,
|
||||
rot2quat_single)
|
||||
|
||||
|
||||
def numpy_wrap(function, input_shape, output_shape) -> Callable[..., np.ndarray]:
|
||||
"""Wrap a function to take either an input or list of inputs and return the correct shape"""
|
||||
def f(*inps):
|
||||
*args, inp = inps
|
||||
inp = np.array(inp)
|
||||
shape = inp.shape
|
||||
|
||||
if len(shape) == len(input_shape):
|
||||
out_shape = output_shape
|
||||
else:
|
||||
out_shape = (shape[0],) + output_shape
|
||||
|
||||
# Add empty dimension if inputs is not a list
|
||||
if len(shape) == len(input_shape):
|
||||
inp.shape = (1, ) + inp.shape
|
||||
|
||||
result = np.asarray([function(*args, i) for i in inp])
|
||||
result.shape = out_shape
|
||||
return result
|
||||
return f
|
||||
|
||||
|
||||
euler2quat = numpy_wrap(euler2quat_single, (3,), (4,))
|
||||
quat2euler = numpy_wrap(quat2euler_single, (4,), (3,))
|
||||
quat2rot = numpy_wrap(quat2rot_single, (4,), (3, 3))
|
||||
rot2quat = numpy_wrap(rot2quat_single, (3, 3), (4,))
|
||||
euler2rot = numpy_wrap(euler2rot_single, (3,), (3, 3))
|
||||
rot2euler = numpy_wrap(rot2euler_single, (3, 3), (3,))
|
||||
ecef_euler_from_ned = numpy_wrap(ecef_euler_from_ned_single, (3,), (3,))
|
||||
ned_euler_from_ecef = numpy_wrap(ned_euler_from_ecef_single, (3,), (3,))
|
||||
|
||||
quats_from_rotations = rot2quat
|
||||
quat_from_rot = rot2quat
|
||||
rotations_from_quats = quat2rot
|
||||
rot_from_quat = quat2rot
|
||||
euler_from_rot = rot2euler
|
||||
euler_from_quat = quat2euler
|
||||
rot_from_euler = euler2rot
|
||||
quat_from_euler = euler2quat
|
||||
0
common/transformations/tests/__init__.py
Normal file
0
common/transformations/tests/__init__.py
Normal file
104
common/transformations/tests/test_coordinates.py
Normal file
104
common/transformations/tests/test_coordinates.py
Normal file
@@ -0,0 +1,104 @@
|
||||
import numpy as np
|
||||
|
||||
import openpilot.common.transformations.coordinates as coord
|
||||
|
||||
geodetic_positions = np.array([[37.7610403, -122.4778699, 115],
|
||||
[27.4840915, -68.5867592, 2380],
|
||||
[32.4916858, -113.652821, -6],
|
||||
[15.1392514, 103.6976037, 24],
|
||||
[24.2302229, 44.2835412, 1650]])
|
||||
|
||||
ecef_positions = np.array([[-2711076.55270557, -4259167.14692758, 3884579.87669935],
|
||||
[ 2068042.69652729, -5273435.40316622, 2927004.89190746],
|
||||
[-2160412.60461669, -4932588.89873832, 3406542.29652851],
|
||||
[-1458247.92550567, 5983060.87496612, 1654984.6099885 ],
|
||||
[ 4167239.10867871, 4064301.90363223, 2602234.6065749 ]])
|
||||
|
||||
ecef_positions_offset = np.array([[-2711004.46961115, -4259099.33540613, 3884605.16002147],
|
||||
[ 2068074.30639499, -5273413.78835412, 2927012.48741131],
|
||||
[-2160344.53748176, -4932586.20092211, 3406636.2962545 ],
|
||||
[-1458211.98517094, 5983151.11161276, 1655077.02698447],
|
||||
[ 4167271.20055269, 4064398.22619263, 2602238.95265847]])
|
||||
|
||||
|
||||
ned_offsets = np.array([[78.722153649976391, 24.396208657446344, 60.343017506838436],
|
||||
[10.699003365155221, 37.319278617604269, 4.1084100025050407],
|
||||
[95.282646251726959, 61.266689955574428, -25.376506058505054],
|
||||
[68.535769283630003, -56.285970011848889, -100.54840137956515],
|
||||
[-33.066609321880179, 46.549821994306861, -84.062540548335591]])
|
||||
|
||||
ecef_init_batch = np.array([2068042.69652729, -5273435.40316622, 2927004.89190746])
|
||||
ecef_positions_offset_batch = np.array([[ 2068089.41454771, -5273434.46829148, 2927074.04783672],
|
||||
[ 2068103.31628647, -5273393.92275431, 2927102.08725987],
|
||||
[ 2068108.49939636, -5273359.27047121, 2927045.07091581],
|
||||
[ 2068075.12395611, -5273381.69432566, 2927041.08207992],
|
||||
[ 2068060.72033399, -5273430.6061505, 2927094.54928305]])
|
||||
|
||||
ned_offsets_batch = np.array([[ 53.88103168, 43.83445935, -46.27488057],
|
||||
[ 93.83378995, 71.57943024, -30.23113187],
|
||||
[ 57.26725796, 89.05602684, 23.02265814],
|
||||
[ 49.71775195, 49.79767572, 17.15351015],
|
||||
[ 78.56272609, 18.53100158, -43.25290759]])
|
||||
|
||||
|
||||
class TestNED:
|
||||
def test_small_distances(self):
|
||||
start_geodetic = np.array([33.8042184, -117.888593, 0.0])
|
||||
local_coord = coord.LocalCoord.from_geodetic(start_geodetic)
|
||||
|
||||
start_ned = local_coord.geodetic2ned(start_geodetic)
|
||||
np.testing.assert_array_equal(start_ned, np.zeros(3,))
|
||||
|
||||
west_geodetic = start_geodetic + [0, -0.0005, 0]
|
||||
west_ned = local_coord.geodetic2ned(west_geodetic)
|
||||
assert np.abs(west_ned[0]) < 1e-3
|
||||
assert west_ned[1] < 0
|
||||
|
||||
southwest_geodetic = start_geodetic + [-0.0005, -0.002, 0]
|
||||
southwest_ned = local_coord.geodetic2ned(southwest_geodetic)
|
||||
assert southwest_ned[0] < 0
|
||||
assert southwest_ned[1] < 0
|
||||
|
||||
def test_ecef_geodetic(self):
|
||||
# testing single
|
||||
np.testing.assert_allclose(ecef_positions[0], coord.geodetic2ecef(geodetic_positions[0]), rtol=1e-9)
|
||||
np.testing.assert_allclose(geodetic_positions[0, :2], coord.ecef2geodetic(ecef_positions[0])[:2], rtol=1e-9)
|
||||
np.testing.assert_allclose(geodetic_positions[0, 2], coord.ecef2geodetic(ecef_positions[0])[2], rtol=1e-9, atol=1e-4)
|
||||
|
||||
np.testing.assert_allclose(geodetic_positions[:, :2], coord.ecef2geodetic(ecef_positions)[:, :2], rtol=1e-9)
|
||||
np.testing.assert_allclose(geodetic_positions[:, 2], coord.ecef2geodetic(ecef_positions)[:, 2], rtol=1e-9, atol=1e-4)
|
||||
np.testing.assert_allclose(ecef_positions, coord.geodetic2ecef(geodetic_positions), rtol=1e-9)
|
||||
|
||||
|
||||
def test_ned(self):
|
||||
for ecef_pos in ecef_positions:
|
||||
converter = coord.LocalCoord.from_ecef(ecef_pos)
|
||||
ecef_pos_moved = ecef_pos + [25, -25, 25]
|
||||
ecef_pos_moved_double_converted = converter.ned2ecef(converter.ecef2ned(ecef_pos_moved))
|
||||
np.testing.assert_allclose(ecef_pos_moved, ecef_pos_moved_double_converted, rtol=1e-9)
|
||||
|
||||
for geo_pos in geodetic_positions:
|
||||
converter = coord.LocalCoord.from_geodetic(geo_pos)
|
||||
geo_pos_moved = geo_pos + np.array([0, 0, 10])
|
||||
geo_pos_double_converted_moved = converter.ned2geodetic(converter.geodetic2ned(geo_pos) + np.array([0, 0, -10]))
|
||||
np.testing.assert_allclose(geo_pos_moved[:2], geo_pos_double_converted_moved[:2], rtol=1e-9, atol=1e-6)
|
||||
np.testing.assert_allclose(geo_pos_moved[2], geo_pos_double_converted_moved[2], rtol=1e-9, atol=1e-4)
|
||||
|
||||
def test_ned_saved_results(self):
|
||||
for i, ecef_pos in enumerate(ecef_positions):
|
||||
converter = coord.LocalCoord.from_ecef(ecef_pos)
|
||||
np.testing.assert_allclose(converter.ned2ecef(ned_offsets[i]),
|
||||
ecef_positions_offset[i],
|
||||
rtol=1e-9, atol=1e-4)
|
||||
np.testing.assert_allclose(converter.ecef2ned(ecef_positions_offset[i]),
|
||||
ned_offsets[i],
|
||||
rtol=1e-9, atol=1e-4)
|
||||
|
||||
def test_ned_batch(self):
|
||||
converter = coord.LocalCoord.from_ecef(ecef_init_batch)
|
||||
np.testing.assert_allclose(converter.ecef2ned(ecef_positions_offset_batch),
|
||||
ned_offsets_batch,
|
||||
rtol=1e-9, atol=1e-7)
|
||||
np.testing.assert_allclose(converter.ned2ecef(ned_offsets_batch),
|
||||
ecef_positions_offset_batch,
|
||||
rtol=1e-9, atol=1e-7)
|
||||
61
common/transformations/tests/test_orientation.py
Normal file
61
common/transformations/tests/test_orientation.py
Normal file
@@ -0,0 +1,61 @@
|
||||
import numpy as np
|
||||
|
||||
from openpilot.common.transformations.orientation import euler2quat, quat2euler, euler2rot, rot2euler, \
|
||||
rot2quat, quat2rot, \
|
||||
ned_euler_from_ecef
|
||||
|
||||
eulers = np.array([[ 1.46520501, 2.78688383, 2.92780854],
|
||||
[ 4.86909526, 3.60618161, 4.30648981],
|
||||
[ 3.72175965, 2.68763705, 5.43895988],
|
||||
[ 5.92306687, 5.69573614, 0.81100357],
|
||||
[ 0.67838374, 5.02402037, 2.47106426]])
|
||||
|
||||
quats = np.array([[ 0.66855182, -0.71500939, 0.19539353, 0.06017818],
|
||||
[ 0.43163717, 0.70013301, 0.28209145, 0.49389021],
|
||||
[ 0.44121991, -0.08252646, 0.34257534, 0.82532207],
|
||||
[ 0.88578382, -0.04515356, -0.32936046, 0.32383617],
|
||||
[ 0.06578165, 0.61282835, 0.07126891, 0.78424163]])
|
||||
|
||||
ecef_positions = np.array([[-2711076.55270557, -4259167.14692758, 3884579.87669935],
|
||||
[ 2068042.69652729, -5273435.40316622, 2927004.89190746],
|
||||
[-2160412.60461669, -4932588.89873832, 3406542.29652851],
|
||||
[-1458247.92550567, 5983060.87496612, 1654984.6099885 ],
|
||||
[ 4167239.10867871, 4064301.90363223, 2602234.6065749 ]])
|
||||
|
||||
ned_eulers = np.array([[ 0.46806039, -0.4881889 , 1.65697808],
|
||||
[-2.14525969, -0.36533066, 0.73813479],
|
||||
[-1.39523364, -0.58540761, -1.77376356],
|
||||
[-1.84220435, 0.61828016, -1.03310421],
|
||||
[ 2.50450101, 0.36304151, 0.33136365]])
|
||||
|
||||
|
||||
class TestOrientation:
|
||||
def test_quat_euler(self):
|
||||
for i, eul in enumerate(eulers):
|
||||
np.testing.assert_allclose(quats[i], euler2quat(eul), rtol=1e-7)
|
||||
np.testing.assert_allclose(quats[i], euler2quat(quat2euler(quats[i])), rtol=1e-6)
|
||||
for i, eul in enumerate(eulers):
|
||||
np.testing.assert_allclose(quats[i], euler2quat(list(eul)), rtol=1e-7)
|
||||
np.testing.assert_allclose(quats[i], euler2quat(quat2euler(list(quats[i]))), rtol=1e-6)
|
||||
np.testing.assert_allclose(quats, euler2quat(eulers), rtol=1e-7)
|
||||
np.testing.assert_allclose(quats, euler2quat(quat2euler(quats)), rtol=1e-6)
|
||||
|
||||
def test_rot_euler(self):
|
||||
for eul in eulers:
|
||||
np.testing.assert_allclose(euler2quat(eul), euler2quat(rot2euler(euler2rot(eul))), rtol=1e-7)
|
||||
for eul in eulers:
|
||||
np.testing.assert_allclose(euler2quat(eul), euler2quat(rot2euler(euler2rot(list(eul)))), rtol=1e-7)
|
||||
np.testing.assert_allclose(euler2quat(eulers), euler2quat(rot2euler(euler2rot(eulers))), rtol=1e-7)
|
||||
|
||||
def test_rot_quat(self):
|
||||
for quat in quats:
|
||||
np.testing.assert_allclose(quat, rot2quat(quat2rot(quat)), rtol=1e-7)
|
||||
for quat in quats:
|
||||
np.testing.assert_allclose(quat, rot2quat(quat2rot(list(quat))), rtol=1e-7)
|
||||
np.testing.assert_allclose(quats, rot2quat(quat2rot(quats)), rtol=1e-7)
|
||||
|
||||
def test_euler_ned(self):
|
||||
for i in range(len(eulers)):
|
||||
np.testing.assert_allclose(ned_eulers[i], ned_euler_from_ecef(ecef_positions[i], eulers[i]), rtol=1e-7)
|
||||
#np.testing.assert_allclose(eulers[i], ecef_euler_from_ned(ecef_positions[i], ned_eulers[i]), rtol=1e-7)
|
||||
# np.testing.assert_allclose(ned_eulers, ned_euler_from_ecef(ecef_positions, eulers), rtol=1e-7)
|
||||
72
common/transformations/transformations.pxd
Normal file
72
common/transformations/transformations.pxd
Normal file
@@ -0,0 +1,72 @@
|
||||
# cython: language_level=3
|
||||
from libcpp cimport bool
|
||||
|
||||
cdef extern from "orientation.cc":
|
||||
pass
|
||||
|
||||
cdef extern from "orientation.hpp":
|
||||
cdef cppclass Quaternion "Eigen::Quaterniond":
|
||||
Quaternion()
|
||||
Quaternion(double, double, double, double)
|
||||
double w()
|
||||
double x()
|
||||
double y()
|
||||
double z()
|
||||
|
||||
cdef cppclass Vector3 "Eigen::Vector3d":
|
||||
Vector3()
|
||||
Vector3(double, double, double)
|
||||
double operator()(int)
|
||||
|
||||
cdef cppclass Matrix3 "Eigen::Matrix3d":
|
||||
Matrix3()
|
||||
Matrix3(double*)
|
||||
|
||||
double operator()(int, int)
|
||||
|
||||
Quaternion euler2quat(const Vector3 &)
|
||||
Vector3 quat2euler(const Quaternion &)
|
||||
Matrix3 quat2rot(const Quaternion &)
|
||||
Quaternion rot2quat(const Matrix3 &)
|
||||
Vector3 rot2euler(const Matrix3 &)
|
||||
Matrix3 euler2rot(const Vector3 &)
|
||||
Matrix3 rot_matrix(double, double, double)
|
||||
Vector3 ecef_euler_from_ned(const ECEF &, const Vector3 &)
|
||||
Vector3 ned_euler_from_ecef(const ECEF &, const Vector3 &)
|
||||
|
||||
|
||||
cdef extern from "coordinates.cc":
|
||||
cdef struct ECEF:
|
||||
double x
|
||||
double y
|
||||
double z
|
||||
|
||||
cdef struct NED:
|
||||
double n
|
||||
double e
|
||||
double d
|
||||
|
||||
cdef struct Geodetic:
|
||||
double lat
|
||||
double lon
|
||||
double alt
|
||||
bool radians
|
||||
|
||||
ECEF geodetic2ecef(const Geodetic &)
|
||||
Geodetic ecef2geodetic(const ECEF &)
|
||||
|
||||
cdef cppclass LocalCoord_c "LocalCoord":
|
||||
Matrix3 ned2ecef_matrix
|
||||
Matrix3 ecef2ned_matrix
|
||||
|
||||
LocalCoord_c(const Geodetic &, const ECEF &)
|
||||
LocalCoord_c(const Geodetic &)
|
||||
LocalCoord_c(const ECEF &)
|
||||
|
||||
NED ecef2ned(const ECEF &)
|
||||
ECEF ned2ecef(const NED &)
|
||||
NED geodetic2ned(const Geodetic &)
|
||||
Geodetic ned2geodetic(const NED &)
|
||||
|
||||
cdef extern from "coordinates.hpp":
|
||||
pass
|
||||
173
common/transformations/transformations.pyx
Normal file
173
common/transformations/transformations.pyx
Normal file
@@ -0,0 +1,173 @@
|
||||
# distutils: language = c++
|
||||
# cython: language_level = 3
|
||||
from openpilot.common.transformations.transformations cimport Matrix3, Vector3, Quaternion
|
||||
from openpilot.common.transformations.transformations cimport ECEF, NED, Geodetic
|
||||
|
||||
from openpilot.common.transformations.transformations cimport euler2quat as euler2quat_c
|
||||
from openpilot.common.transformations.transformations cimport quat2euler as quat2euler_c
|
||||
from openpilot.common.transformations.transformations cimport quat2rot as quat2rot_c
|
||||
from openpilot.common.transformations.transformations cimport rot2quat as rot2quat_c
|
||||
from openpilot.common.transformations.transformations cimport euler2rot as euler2rot_c
|
||||
from openpilot.common.transformations.transformations cimport rot2euler as rot2euler_c
|
||||
from openpilot.common.transformations.transformations cimport rot_matrix as rot_matrix_c
|
||||
from openpilot.common.transformations.transformations cimport ecef_euler_from_ned as ecef_euler_from_ned_c
|
||||
from openpilot.common.transformations.transformations cimport ned_euler_from_ecef as ned_euler_from_ecef_c
|
||||
from openpilot.common.transformations.transformations cimport geodetic2ecef as geodetic2ecef_c
|
||||
from openpilot.common.transformations.transformations cimport ecef2geodetic as ecef2geodetic_c
|
||||
from openpilot.common.transformations.transformations cimport LocalCoord_c
|
||||
|
||||
|
||||
import numpy as np
|
||||
cimport numpy as np
|
||||
|
||||
cdef np.ndarray[double, ndim=2] matrix2numpy(Matrix3 m):
|
||||
return np.array([
|
||||
[m(0, 0), m(0, 1), m(0, 2)],
|
||||
[m(1, 0), m(1, 1), m(1, 2)],
|
||||
[m(2, 0), m(2, 1), m(2, 2)],
|
||||
])
|
||||
|
||||
cdef Matrix3 numpy2matrix(np.ndarray[double, ndim=2, mode="fortran"] m):
|
||||
assert m.shape[0] == 3
|
||||
assert m.shape[1] == 3
|
||||
return Matrix3(<double*>m.data)
|
||||
|
||||
cdef ECEF list2ecef(ecef):
|
||||
cdef ECEF e
|
||||
e.x = ecef[0]
|
||||
e.y = ecef[1]
|
||||
e.z = ecef[2]
|
||||
return e
|
||||
|
||||
cdef NED list2ned(ned):
|
||||
cdef NED n
|
||||
n.n = ned[0]
|
||||
n.e = ned[1]
|
||||
n.d = ned[2]
|
||||
return n
|
||||
|
||||
cdef Geodetic list2geodetic(geodetic):
|
||||
cdef Geodetic g
|
||||
g.lat = geodetic[0]
|
||||
g.lon = geodetic[1]
|
||||
g.alt = geodetic[2]
|
||||
return g
|
||||
|
||||
def euler2quat_single(euler):
|
||||
cdef Vector3 e = Vector3(euler[0], euler[1], euler[2])
|
||||
cdef Quaternion q = euler2quat_c(e)
|
||||
return [q.w(), q.x(), q.y(), q.z()]
|
||||
|
||||
def quat2euler_single(quat):
|
||||
cdef Quaternion q = Quaternion(quat[0], quat[1], quat[2], quat[3])
|
||||
cdef Vector3 e = quat2euler_c(q)
|
||||
return [e(0), e(1), e(2)]
|
||||
|
||||
def quat2rot_single(quat):
|
||||
cdef Quaternion q = Quaternion(quat[0], quat[1], quat[2], quat[3])
|
||||
cdef Matrix3 r = quat2rot_c(q)
|
||||
return matrix2numpy(r)
|
||||
|
||||
def rot2quat_single(rot):
|
||||
cdef Matrix3 r = numpy2matrix(np.asfortranarray(rot, dtype=np.double))
|
||||
cdef Quaternion q = rot2quat_c(r)
|
||||
return [q.w(), q.x(), q.y(), q.z()]
|
||||
|
||||
def euler2rot_single(euler):
|
||||
cdef Vector3 e = Vector3(euler[0], euler[1], euler[2])
|
||||
cdef Matrix3 r = euler2rot_c(e)
|
||||
return matrix2numpy(r)
|
||||
|
||||
def rot2euler_single(rot):
|
||||
cdef Matrix3 r = numpy2matrix(np.asfortranarray(rot, dtype=np.double))
|
||||
cdef Vector3 e = rot2euler_c(r)
|
||||
return [e(0), e(1), e(2)]
|
||||
|
||||
def rot_matrix(roll, pitch, yaw):
|
||||
return matrix2numpy(rot_matrix_c(roll, pitch, yaw))
|
||||
|
||||
def ecef_euler_from_ned_single(ecef_init, ned_pose):
|
||||
cdef ECEF init = list2ecef(ecef_init)
|
||||
cdef Vector3 pose = Vector3(ned_pose[0], ned_pose[1], ned_pose[2])
|
||||
|
||||
cdef Vector3 e = ecef_euler_from_ned_c(init, pose)
|
||||
return [e(0), e(1), e(2)]
|
||||
|
||||
def ned_euler_from_ecef_single(ecef_init, ecef_pose):
|
||||
cdef ECEF init = list2ecef(ecef_init)
|
||||
cdef Vector3 pose = Vector3(ecef_pose[0], ecef_pose[1], ecef_pose[2])
|
||||
|
||||
cdef Vector3 e = ned_euler_from_ecef_c(init, pose)
|
||||
return [e(0), e(1), e(2)]
|
||||
|
||||
def geodetic2ecef_single(geodetic):
|
||||
cdef Geodetic g = list2geodetic(geodetic)
|
||||
cdef ECEF e = geodetic2ecef_c(g)
|
||||
return [e.x, e.y, e.z]
|
||||
|
||||
def ecef2geodetic_single(ecef):
|
||||
cdef ECEF e = list2ecef(ecef)
|
||||
cdef Geodetic g = ecef2geodetic_c(e)
|
||||
return [g.lat, g.lon, g.alt]
|
||||
|
||||
|
||||
cdef class LocalCoord:
|
||||
cdef LocalCoord_c * lc
|
||||
|
||||
def __init__(self, geodetic=None, ecef=None):
|
||||
assert (geodetic is not None) or (ecef is not None)
|
||||
if geodetic is not None:
|
||||
self.lc = new LocalCoord_c(list2geodetic(geodetic))
|
||||
elif ecef is not None:
|
||||
self.lc = new LocalCoord_c(list2ecef(ecef))
|
||||
|
||||
@property
|
||||
def ned2ecef_matrix(self):
|
||||
return matrix2numpy(self.lc.ned2ecef_matrix)
|
||||
|
||||
@property
|
||||
def ecef2ned_matrix(self):
|
||||
return matrix2numpy(self.lc.ecef2ned_matrix)
|
||||
|
||||
@property
|
||||
def ned_from_ecef_matrix(self):
|
||||
return self.ecef2ned_matrix
|
||||
|
||||
@property
|
||||
def ecef_from_ned_matrix(self):
|
||||
return self.ned2ecef_matrix
|
||||
|
||||
@classmethod
|
||||
def from_geodetic(cls, geodetic):
|
||||
return cls(geodetic=geodetic)
|
||||
|
||||
@classmethod
|
||||
def from_ecef(cls, ecef):
|
||||
return cls(ecef=ecef)
|
||||
|
||||
def ecef2ned_single(self, ecef):
|
||||
assert self.lc
|
||||
cdef ECEF e = list2ecef(ecef)
|
||||
cdef NED n = self.lc.ecef2ned(e)
|
||||
return [n.n, n.e, n.d]
|
||||
|
||||
def ned2ecef_single(self, ned):
|
||||
assert self.lc
|
||||
cdef NED n = list2ned(ned)
|
||||
cdef ECEF e = self.lc.ned2ecef(n)
|
||||
return [e.x, e.y, e.z]
|
||||
|
||||
def geodetic2ned_single(self, geodetic):
|
||||
assert self.lc
|
||||
cdef Geodetic g = list2geodetic(geodetic)
|
||||
cdef NED n = self.lc.geodetic2ned(g)
|
||||
return [n.n, n.e, n.d]
|
||||
|
||||
def ned2geodetic_single(self, ned):
|
||||
assert self.lc
|
||||
cdef NED n = list2ned(ned)
|
||||
cdef Geodetic g = self.lc.ned2geodetic(n)
|
||||
return [g.lat, g.lon, g.alt]
|
||||
|
||||
def __dealloc__(self):
|
||||
del self.lc
|
||||
BIN
common/transformations/transformations.so
Executable file
BIN
common/transformations/transformations.so
Executable file
Binary file not shown.
187
common/util.h
Normal file
187
common/util.h
Normal file
@@ -0,0 +1,187 @@
|
||||
#pragma once
|
||||
|
||||
#include <fcntl.h>
|
||||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <atomic>
|
||||
#include <chrono>
|
||||
#include <csignal>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <string>
|
||||
#include <thread>
|
||||
#include <vector>
|
||||
|
||||
// keep trying if x gets interrupted by a signal
|
||||
#define HANDLE_EINTR(x) \
|
||||
({ \
|
||||
decltype(x) ret_; \
|
||||
int try_cnt = 0; \
|
||||
do { \
|
||||
ret_ = (x); \
|
||||
} while (ret_ == -1 && errno == EINTR && try_cnt++ < 100); \
|
||||
ret_; \
|
||||
})
|
||||
|
||||
#ifndef sighandler_t
|
||||
typedef void (*sighandler_t)(int sig);
|
||||
#endif
|
||||
|
||||
const double MILE_TO_KM = 1.609344;
|
||||
const double KM_TO_MILE = 1. / MILE_TO_KM;
|
||||
const double MS_TO_KPH = 3.6;
|
||||
const double MS_TO_MPH = MS_TO_KPH * KM_TO_MILE;
|
||||
const double METER_TO_MILE = KM_TO_MILE / 1000.0;
|
||||
const double METER_TO_FOOT = 3.28084;
|
||||
|
||||
#define ALIGNED_SIZE(x, align) (((x) + (align)-1) & ~((align)-1))
|
||||
|
||||
namespace util {
|
||||
|
||||
void set_thread_name(const char* name);
|
||||
int set_realtime_priority(int level);
|
||||
int set_core_affinity(std::vector<int> cores);
|
||||
int set_file_descriptor_limit(uint64_t limit);
|
||||
|
||||
// ***** math helpers *****
|
||||
|
||||
// map x from [a1, a2] to [b1, b2]
|
||||
template <typename T>
|
||||
T map_val(T x, T a1, T a2, T b1, T b2) {
|
||||
x = std::clamp(x, a1, a2);
|
||||
T ra = a2 - a1;
|
||||
T rb = b2 - b1;
|
||||
return (x - a1) * rb / ra + b1;
|
||||
}
|
||||
|
||||
// ***** string helpers *****
|
||||
|
||||
template <typename... Args>
|
||||
std::string string_format(const std::string& format, Args... args) {
|
||||
size_t size = snprintf(nullptr, 0, format.c_str(), args...) + 1;
|
||||
std::unique_ptr<char[]> buf(new char[size]);
|
||||
snprintf(buf.get(), size, format.c_str(), args...);
|
||||
return std::string(buf.get(), buf.get() + size - 1);
|
||||
}
|
||||
|
||||
std::string getenv(const char* key, std::string default_val = "");
|
||||
int getenv(const char* key, int default_val);
|
||||
float getenv(const char* key, float default_val);
|
||||
|
||||
std::string hexdump(const uint8_t* in, const size_t size);
|
||||
bool starts_with(const std::string &s1, const std::string &s2);
|
||||
bool ends_with(const std::string &s, const std::string &suffix);
|
||||
std::string strip(const std::string &str);
|
||||
|
||||
// ***** random helpers *****
|
||||
int random_int(int min, int max);
|
||||
std::string random_string(std::string::size_type length);
|
||||
|
||||
// **** file helpers *****
|
||||
std::string read_file(const std::string& fn);
|
||||
std::map<std::string, std::string> read_files_in_dir(const std::string& path);
|
||||
int write_file(const char* path, const void* data, size_t size, int flags = O_WRONLY, mode_t mode = 0664);
|
||||
|
||||
FILE* safe_fopen(const char* filename, const char* mode);
|
||||
size_t safe_fwrite(const void * ptr, size_t size, size_t count, FILE * stream);
|
||||
int safe_fflush(FILE *stream);
|
||||
int safe_ioctl(int fd, unsigned long request, void *argp);
|
||||
|
||||
std::string readlink(const std::string& path);
|
||||
bool file_exists(const std::string& fn);
|
||||
bool create_directories(const std::string &dir, mode_t mode);
|
||||
|
||||
std::string check_output(const std::string& command);
|
||||
|
||||
bool system_time_valid();
|
||||
|
||||
inline void sleep_for(const int milliseconds) {
|
||||
if (milliseconds > 0) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(milliseconds));
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace util
|
||||
|
||||
class ExitHandler {
|
||||
public:
|
||||
ExitHandler() {
|
||||
std::signal(SIGINT, (sighandler_t)set_do_exit);
|
||||
std::signal(SIGTERM, (sighandler_t)set_do_exit);
|
||||
|
||||
#ifndef __APPLE__
|
||||
std::signal(SIGPWR, (sighandler_t)set_do_exit);
|
||||
#endif
|
||||
}
|
||||
inline static std::atomic<bool> power_failure = false;
|
||||
inline static std::atomic<int> signal = 0;
|
||||
inline operator bool() { return do_exit; }
|
||||
inline ExitHandler& operator=(bool v) {
|
||||
signal = 0;
|
||||
do_exit = v;
|
||||
return *this;
|
||||
}
|
||||
private:
|
||||
static void set_do_exit(int sig) {
|
||||
#ifndef __APPLE__
|
||||
power_failure = (sig == SIGPWR);
|
||||
#endif
|
||||
signal = sig;
|
||||
do_exit = true;
|
||||
}
|
||||
inline static std::atomic<bool> do_exit = false;
|
||||
};
|
||||
|
||||
struct unique_fd {
|
||||
unique_fd(int fd = -1) : fd_(fd) {}
|
||||
unique_fd& operator=(unique_fd&& uf) {
|
||||
fd_ = uf.fd_;
|
||||
uf.fd_ = -1;
|
||||
return *this;
|
||||
}
|
||||
~unique_fd() {
|
||||
if (fd_ != -1) close(fd_);
|
||||
}
|
||||
operator int() const { return fd_; }
|
||||
int fd_;
|
||||
};
|
||||
|
||||
class FirstOrderFilter {
|
||||
public:
|
||||
FirstOrderFilter(float x0, float ts, float dt, bool initialized = true) {
|
||||
k_ = (dt / ts) / (1.0 + dt / ts);
|
||||
x_ = x0;
|
||||
initialized_ = initialized;
|
||||
}
|
||||
inline float update(float x) {
|
||||
if (initialized_) {
|
||||
x_ = (1. - k_) * x_ + k_ * x;
|
||||
} else {
|
||||
initialized_ = true;
|
||||
x_ = x;
|
||||
}
|
||||
return x_;
|
||||
}
|
||||
inline void reset(float x) { x_ = x; }
|
||||
inline float x(){ return x_; }
|
||||
|
||||
private:
|
||||
float x_, k_;
|
||||
bool initialized_;
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
void update_max_atomic(std::atomic<T>& max, T const& value) {
|
||||
T prev = max;
|
||||
while (prev < value && !max.compare_exchange_weak(prev, value)) {}
|
||||
}
|
||||
|
||||
typedef struct Rect {
|
||||
int x;
|
||||
int y;
|
||||
int w;
|
||||
int h;
|
||||
} Rect;
|
||||
46
common/util.py
Normal file
46
common/util.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def sudo_write(val: str, path: str) -> None:
|
||||
try:
|
||||
with open(path, 'w') as f:
|
||||
f.write(str(val))
|
||||
except PermissionError:
|
||||
os.system(f"sudo chmod a+w {path}")
|
||||
try:
|
||||
with open(path, 'w') as f:
|
||||
f.write(str(val))
|
||||
except PermissionError:
|
||||
# fallback for debugfs files
|
||||
os.system(f"sudo su -c 'echo {val} > {path}'")
|
||||
|
||||
def sudo_read(path: str) -> str:
|
||||
try:
|
||||
return subprocess.check_output(f"sudo cat {path}", shell=True, encoding='utf8').strip()
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
class MovingAverage:
|
||||
def __init__(self, window_size: int):
|
||||
self.window_size: int = window_size
|
||||
self.buffer: list[float] = [0.0] * window_size
|
||||
self.index: int = 0
|
||||
self.count: int = 0
|
||||
self.sum: float = 0.0
|
||||
|
||||
def add_value(self, new_value: float):
|
||||
# Update the sum: subtract the value being replaced and add the new value
|
||||
self.sum -= self.buffer[self.index]
|
||||
self.buffer[self.index] = new_value
|
||||
self.sum += new_value
|
||||
|
||||
# Update the index in a circular manner
|
||||
self.index = (self.index + 1) % self.window_size
|
||||
|
||||
# Track the number of added values (for partial windows)
|
||||
self.count = min(self.count + 1, self.window_size)
|
||||
|
||||
def get_average(self) -> float:
|
||||
if self.count == 0:
|
||||
return float('nan')
|
||||
return self.sum / self.count
|
||||
118
common/utils.py
Normal file
118
common/utils.py
Normal file
@@ -0,0 +1,118 @@
|
||||
import io
|
||||
import os
|
||||
import tempfile
|
||||
import contextlib
|
||||
import subprocess
|
||||
import time
|
||||
import functools
|
||||
from subprocess import Popen, PIPE, TimeoutExpired
|
||||
import zstandard as zstd
|
||||
from openpilot.common.swaglog import cloudlog
|
||||
|
||||
LOG_COMPRESSION_LEVEL = 10 # little benefit up to level 15. level ~17 is a small step change
|
||||
|
||||
|
||||
class CallbackReader:
|
||||
"""Wraps a file, but overrides the read method to also
|
||||
call a callback function with the number of bytes read so far."""
|
||||
def __init__(self, f, callback, *args):
|
||||
self.f = f
|
||||
self.callback = callback
|
||||
self.cb_args = args
|
||||
self.total_read = 0
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.f, attr)
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
chunk = self.f.read(*args, **kwargs)
|
||||
self.total_read += len(chunk)
|
||||
self.callback(*self.cb_args, self.total_read)
|
||||
return chunk
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def atomic_write_in_dir(path: str, mode: str = 'w', buffering: int = -1, encoding: str | None = None, newline: str | None = None,
|
||||
overwrite: bool = False):
|
||||
"""Write to a file atomically using a temporary file in the same directory as the destination file."""
|
||||
dir_name = os.path.dirname(path)
|
||||
|
||||
if not overwrite and os.path.exists(path):
|
||||
raise FileExistsError(f"File '{path}' already exists. To overwrite it, set 'overwrite' to True.")
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode=mode, buffering=buffering, encoding=encoding, newline=newline, dir=dir_name, delete=False) as tmp_file:
|
||||
yield tmp_file
|
||||
tmp_file_name = tmp_file.name
|
||||
os.replace(tmp_file_name, path)
|
||||
|
||||
|
||||
def get_upload_stream(filepath: str, should_compress: bool) -> tuple[io.BufferedIOBase, int]:
|
||||
if not should_compress:
|
||||
file_size = os.path.getsize(filepath)
|
||||
file_stream = open(filepath, "rb")
|
||||
return file_stream, file_size
|
||||
|
||||
# Compress the file on the fly
|
||||
compressed_stream = io.BytesIO()
|
||||
compressor = zstd.ZstdCompressor(level=LOG_COMPRESSION_LEVEL)
|
||||
|
||||
with open(filepath, "rb") as f:
|
||||
compressor.copy_stream(f, compressed_stream)
|
||||
compressed_size = compressed_stream.tell()
|
||||
compressed_stream.seek(0)
|
||||
return compressed_stream, compressed_size
|
||||
|
||||
|
||||
# remove all keys that end in DEPRECATED
|
||||
def strip_deprecated_keys(d):
|
||||
for k in list(d.keys()):
|
||||
if isinstance(k, str):
|
||||
if k.endswith('DEPRECATED'):
|
||||
d.pop(k)
|
||||
elif isinstance(d[k], dict):
|
||||
strip_deprecated_keys(d[k])
|
||||
return d
|
||||
|
||||
|
||||
def run_cmd(cmd: list[str], cwd=None, env=None) -> str:
|
||||
return subprocess.check_output(cmd, encoding='utf8', cwd=cwd, env=env).strip()
|
||||
|
||||
|
||||
def run_cmd_default(cmd: list[str], default: str = "", cwd=None, env=None) -> str:
|
||||
try:
|
||||
return run_cmd(cmd, cwd=cwd, env=env)
|
||||
except subprocess.CalledProcessError:
|
||||
return default
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def managed_proc(cmd: list[str], env: dict[str, str]):
|
||||
proc = Popen(cmd, env=env, stdout=PIPE, stderr=PIPE)
|
||||
try:
|
||||
yield proc
|
||||
finally:
|
||||
if proc.poll() is None:
|
||||
proc.terminate()
|
||||
try:
|
||||
proc.wait(timeout=5)
|
||||
except TimeoutExpired:
|
||||
proc.kill()
|
||||
|
||||
|
||||
def retry(attempts=3, delay=1.0, ignore_failure=False):
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
for _ in range(attempts):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception:
|
||||
cloudlog.exception(f"{func.__name__} failed, trying again")
|
||||
time.sleep(delay)
|
||||
|
||||
if ignore_failure:
|
||||
cloudlog.error(f"{func.__name__} failed after retry")
|
||||
else:
|
||||
raise Exception(f"{func.__name__} failed after retry")
|
||||
return wrapper
|
||||
return decorator
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user