sunnypilot v0.10.1
version: sunnypilot v0.10.1 (staging-tici) date: 2025-10-13T01:35:37 master commit: 737a6c4236e843034680c951005b38d15815363f
This commit is contained in:
commit
1f7233cb98
19
.clang-tidy
Normal file
19
.clang-tidy
Normal file
@ -0,0 +1,19 @@
|
||||
---
|
||||
Checks: '
|
||||
bugprone-*,
|
||||
-bugprone-integer-division,
|
||||
-bugprone-narrowing-conversions,
|
||||
performance-*,
|
||||
clang-analyzer-*,
|
||||
misc-*,
|
||||
-misc-unused-parameters,
|
||||
modernize-*,
|
||||
-modernize-avoid-c-arrays,
|
||||
-modernize-deprecated-headers,
|
||||
-modernize-use-auto,
|
||||
-modernize-use-using,
|
||||
-modernize-use-nullptr,
|
||||
-modernize-use-trailing-return-type,
|
||||
'
|
||||
CheckOptions:
|
||||
...
|
||||
5
.codespellignore
Normal file
5
.codespellignore
Normal file
@ -0,0 +1,5 @@
|
||||
Wen
|
||||
REGIST
|
||||
PullRequest
|
||||
cancelled
|
||||
FOF
|
||||
39
.dockerignore
Normal file
39
.dockerignore
Normal file
@ -0,0 +1,39 @@
|
||||
**/.git
|
||||
.DS_Store
|
||||
*.dylib
|
||||
*.DSYM
|
||||
*.d
|
||||
*.pyc
|
||||
*.pyo
|
||||
.*.swp
|
||||
.*.swo
|
||||
.*.un~
|
||||
*.tmp
|
||||
*.o
|
||||
*.o-*
|
||||
*.os
|
||||
*.os-*
|
||||
*.so
|
||||
*.a
|
||||
|
||||
venv/
|
||||
.venv/
|
||||
|
||||
notebooks
|
||||
phone
|
||||
massivemap
|
||||
neos
|
||||
installer
|
||||
chffr/app2
|
||||
chffr/backend/env
|
||||
selfdrive/nav
|
||||
selfdrive/baseui
|
||||
selfdrive/test/simulator2
|
||||
**/cache_data
|
||||
xx/plus
|
||||
xx/community
|
||||
xx/projects
|
||||
!xx/projects/eon_testing_master
|
||||
!xx/projects/map3d
|
||||
xx/ops
|
||||
xx/junk
|
||||
11
.editorconfig
Normal file
11
.editorconfig
Normal file
@ -0,0 +1,11 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.{py,pyx,pxd}]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
112
.gitignore
vendored
Normal file
112
.gitignore
vendored
Normal file
@ -0,0 +1,112 @@
|
||||
venv/
|
||||
.venv/
|
||||
.ci_cache
|
||||
.env
|
||||
.clang-format
|
||||
.DS_Store
|
||||
.tags
|
||||
.ipynb_checkpoints
|
||||
.idea
|
||||
.overlay_init
|
||||
.overlay_consistent
|
||||
.sconsign.dblite
|
||||
model2.png
|
||||
a.out
|
||||
.hypothesis
|
||||
.cache/
|
||||
|
||||
/docs_site/
|
||||
|
||||
*.mp4
|
||||
*.dylib
|
||||
*.DSYM
|
||||
*.d
|
||||
*.pyc
|
||||
*.pyo
|
||||
.*.swp
|
||||
.*.swo
|
||||
.*.un~
|
||||
*.tmp
|
||||
*.o
|
||||
*.o-*
|
||||
*.os
|
||||
*.os-*
|
||||
*.so
|
||||
*.a
|
||||
*.clb
|
||||
*.class
|
||||
*.pyxbldc
|
||||
*.vcd
|
||||
*.qm
|
||||
*_pyx.cpp
|
||||
config.json
|
||||
clcache
|
||||
compile_commands.json
|
||||
compare_runtime*.html
|
||||
|
||||
persist
|
||||
selfdrive/pandad/pandad
|
||||
cereal/services.h
|
||||
cereal/gen
|
||||
cereal/messaging/bridge
|
||||
selfdrive/mapd/default_speeds_by_region.json
|
||||
system/proclogd/proclogd
|
||||
selfdrive/ui/translations/tmp
|
||||
selfdrive/test/longitudinal_maneuvers/out
|
||||
selfdrive/car/tests/cars_dump
|
||||
system/camerad/camerad
|
||||
system/camerad/test/ae_gray_test
|
||||
|
||||
notebooks
|
||||
hyperthneed
|
||||
provisioning
|
||||
|
||||
.coverage*
|
||||
coverage.xml
|
||||
htmlcov
|
||||
pandaextra
|
||||
|
||||
.mypy_cache/
|
||||
flycheck_*
|
||||
|
||||
cppcheck_report.txt
|
||||
comma*.sh
|
||||
|
||||
selfdrive/modeld/models/*.pkl
|
||||
sunnypilot/modeld*/thneed/compile
|
||||
sunnypilot/modeld*/models/*.thneed
|
||||
sunnypilot/modeld*/models/*.pkl
|
||||
|
||||
*.bz2
|
||||
*.zst
|
||||
|
||||
build/
|
||||
|
||||
!**/.gitkeep
|
||||
|
||||
poetry.toml
|
||||
Pipfile
|
||||
|
||||
### VisualStudioCode ###
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
!.vscode/*.code-snippets
|
||||
|
||||
# Local History for Visual Studio Code
|
||||
.history/
|
||||
|
||||
# Built Visual Studio Code Extensions
|
||||
*.vsix
|
||||
|
||||
### VisualStudioCode Patch ###
|
||||
# Ignore all local history of files
|
||||
.history
|
||||
.ionide
|
||||
|
||||
### JetBrains ###
|
||||
!.idea/customTargets.xml
|
||||
!.idea/tools/*
|
||||
!.run/*
|
||||
8
.vscode/extensions.json
vendored
Normal file
8
.vscode/extensions.json
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"ms-python.python",
|
||||
"ms-vscode.cpptools",
|
||||
"elagil.pre-commit-helper",
|
||||
"charliermarsh.ruff",
|
||||
]
|
||||
}
|
||||
46
.vscode/launch.json
vendored
Normal file
46
.vscode/launch.json
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"inputs": [
|
||||
{
|
||||
"id": "python_process",
|
||||
"type": "pickString",
|
||||
"description": "Select the process to debug",
|
||||
"options": [
|
||||
"selfdrive/controls/controlsd.py",
|
||||
"system/timed/timed.py",
|
||||
"tools/sim/run_bridge.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "cpp_process",
|
||||
"type": "pickString",
|
||||
"description": "Select the process to debug",
|
||||
"options": [
|
||||
"selfdrive/ui/ui"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "args",
|
||||
"description": "Arguments to pass to the process",
|
||||
"type": "promptString"
|
||||
}
|
||||
],
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: openpilot Process",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${input:python_process}",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"args": "${input:args}"
|
||||
},
|
||||
{
|
||||
"name": "C++: openpilot Process",
|
||||
"type": "cppdbg",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/${input:cpp_process}",
|
||||
"cwd": "${workspaceFolder}",
|
||||
}
|
||||
]
|
||||
}
|
||||
27
.vscode/settings.json
vendored
Normal file
27
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
{
|
||||
"editor.tabSize": 2,
|
||||
"editor.insertSpaces": true,
|
||||
"editor.renderWhitespace": "trailing",
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"search.exclude": {
|
||||
"**/.git": true,
|
||||
"**/.venv": true,
|
||||
"**/__pycache__": true
|
||||
},
|
||||
"files.exclude": {
|
||||
"**/.git": true,
|
||||
"**/.venv": true,
|
||||
"**/__pycache__": true
|
||||
},
|
||||
"python.analysis.exclude": [
|
||||
"**/.git",
|
||||
"**/.venv",
|
||||
"**/__pycache__",
|
||||
// exclude directories that should be using the symlinked version
|
||||
"common/**",
|
||||
"selfdrive/**",
|
||||
"system/**",
|
||||
"third_party/**",
|
||||
"tools/**",
|
||||
]
|
||||
}
|
||||
12
Dockerfile.openpilot
Normal file
12
Dockerfile.openpilot
Normal file
@ -0,0 +1,12 @@
|
||||
FROM ghcr.io/commaai/openpilot-base:latest
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
ENV OPENPILOT_PATH=/home/batman/openpilot
|
||||
|
||||
RUN mkdir -p ${OPENPILOT_PATH}
|
||||
WORKDIR ${OPENPILOT_PATH}
|
||||
|
||||
COPY . ${OPENPILOT_PATH}/
|
||||
|
||||
RUN scons --cache-readonly -j$(nproc)
|
||||
82
Dockerfile.openpilot_base
Normal file
82
Dockerfile.openpilot_base
Normal file
@ -0,0 +1,82 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends sudo tzdata locales ssh pulseaudio xvfb x11-xserver-utils gnome-screenshot python3-tk python3-dev && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen
|
||||
ENV LANG=en_US.UTF-8
|
||||
ENV LANGUAGE=en_US:en
|
||||
ENV LC_ALL=en_US.UTF-8
|
||||
|
||||
COPY tools/install_ubuntu_dependencies.sh /tmp/tools/
|
||||
RUN /tmp/tools/install_ubuntu_dependencies.sh && \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||
cd /usr/lib/gcc/arm-none-eabi/* && \
|
||||
rm -rf arm/ thumb/nofp thumb/v6* thumb/v8* thumb/v7+fp thumb/v7-r+fp.sp
|
||||
|
||||
# Add OpenCL
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
apt-utils \
|
||||
alien \
|
||||
unzip \
|
||||
tar \
|
||||
curl \
|
||||
xz-utils \
|
||||
dbus \
|
||||
gcc-arm-none-eabi \
|
||||
tmux \
|
||||
vim \
|
||||
libx11-6 \
|
||||
wget \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir -p /tmp/opencl-driver-intel && \
|
||||
cd /tmp/opencl-driver-intel && \
|
||||
wget https://github.com/intel/llvm/releases/download/2024-WW14/oclcpuexp-2024.17.3.0.09_rel.tar.gz && \
|
||||
wget https://github.com/oneapi-src/oneTBB/releases/download/v2021.12.0/oneapi-tbb-2021.12.0-lin.tgz && \
|
||||
mkdir -p /opt/intel/oclcpuexp_2024.17.3.0.09_rel && \
|
||||
cd /opt/intel/oclcpuexp_2024.17.3.0.09_rel && \
|
||||
tar -zxvf /tmp/opencl-driver-intel/oclcpuexp-2024.17.3.0.09_rel.tar.gz && \
|
||||
mkdir -p /etc/OpenCL/vendors && \
|
||||
echo /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64/libintelocl.so > /etc/OpenCL/vendors/intel_expcpu.icd && \
|
||||
cd /opt/intel && \
|
||||
tar -zxvf /tmp/opencl-driver-intel/oneapi-tbb-2021.12.0-lin.tgz && \
|
||||
ln -s /opt/intel/oneapi-tbb-2021.12.0/lib/intel64/gcc4.8/libtbb.so /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 && \
|
||||
ln -s /opt/intel/oneapi-tbb-2021.12.0/lib/intel64/gcc4.8/libtbbmalloc.so /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 && \
|
||||
ln -s /opt/intel/oneapi-tbb-2021.12.0/lib/intel64/gcc4.8/libtbb.so.12 /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 && \
|
||||
ln -s /opt/intel/oneapi-tbb-2021.12.0/lib/intel64/gcc4.8/libtbbmalloc.so.2 /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 && \
|
||||
mkdir -p /etc/ld.so.conf.d && \
|
||||
echo /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 > /etc/ld.so.conf.d/libintelopenclexp.conf && \
|
||||
ldconfig -f /etc/ld.so.conf.d/libintelopenclexp.conf && \
|
||||
cd / && \
|
||||
rm -rf /tmp/opencl-driver-intel
|
||||
|
||||
ENV NVIDIA_VISIBLE_DEVICES=all
|
||||
ENV NVIDIA_DRIVER_CAPABILITIES=graphics,utility,compute
|
||||
ENV QTWEBENGINE_DISABLE_SANDBOX=1
|
||||
|
||||
RUN dbus-uuidgen > /etc/machine-id
|
||||
RUN apt-get update && apt-get install -y fonts-noto-cjk fonts-noto-color-emoji
|
||||
|
||||
ARG USER=batman
|
||||
ARG USER_UID=1001
|
||||
RUN useradd -m -s /bin/bash -u $USER_UID $USER
|
||||
RUN usermod -aG sudo $USER
|
||||
RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
||||
USER $USER
|
||||
|
||||
COPY --chown=$USER pyproject.toml uv.lock /home/$USER
|
||||
COPY --chown=$USER tools/install_python_dependencies.sh /home/$USER/tools/
|
||||
|
||||
ENV VIRTUAL_ENV=/home/$USER/.venv
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
RUN cd /home/$USER && \
|
||||
tools/install_python_dependencies.sh && \
|
||||
rm -rf tools/ pyproject.toml uv.lock .cache
|
||||
|
||||
USER root
|
||||
RUN sudo git config --global --add safe.directory /tmp/openpilot
|
||||
12
Dockerfile.sunnypilot
Normal file
12
Dockerfile.sunnypilot
Normal file
@ -0,0 +1,12 @@
|
||||
FROM ghcr.io/sunnypilot/sunnypilot-tici-base:latest
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
ENV OPENPILOT_PATH=/home/batman/openpilot
|
||||
|
||||
RUN mkdir -p ${OPENPILOT_PATH}
|
||||
WORKDIR ${OPENPILOT_PATH}
|
||||
|
||||
COPY . ${OPENPILOT_PATH}/
|
||||
|
||||
RUN scons --cache-readonly -j$(nproc)
|
||||
83
Dockerfile.sunnypilot_base
Normal file
83
Dockerfile.sunnypilot_base
Normal file
@ -0,0 +1,83 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends sudo tzdata locales ssh pulseaudio xvfb x11-xserver-utils gnome-screenshot python3-tk python3-dev && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen
|
||||
ENV LANG=en_US.UTF-8
|
||||
ENV LANGUAGE=en_US:en
|
||||
ENV LC_ALL=en_US.UTF-8
|
||||
|
||||
COPY tools/install_ubuntu_dependencies.sh /tmp/tools/
|
||||
RUN /tmp/tools/install_ubuntu_dependencies.sh && \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||
cd /usr/lib/gcc/arm-none-eabi/* && \
|
||||
rm -rf arm/ thumb/nofp thumb/v6* thumb/v8* thumb/v7+fp thumb/v7-r+fp.sp
|
||||
|
||||
# Add OpenCL
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
apt-utils \
|
||||
alien \
|
||||
unzip \
|
||||
tar \
|
||||
curl \
|
||||
xz-utils \
|
||||
dbus \
|
||||
gcc-arm-none-eabi \
|
||||
tmux \
|
||||
vim \
|
||||
libx11-6 \
|
||||
wget \
|
||||
rsync \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir -p /tmp/opencl-driver-intel && \
|
||||
cd /tmp/opencl-driver-intel && \
|
||||
wget https://github.com/intel/llvm/releases/download/2024-WW14/oclcpuexp-2024.17.3.0.09_rel.tar.gz && \
|
||||
wget https://github.com/oneapi-src/oneTBB/releases/download/v2021.12.0/oneapi-tbb-2021.12.0-lin.tgz && \
|
||||
mkdir -p /opt/intel/oclcpuexp_2024.17.3.0.09_rel && \
|
||||
cd /opt/intel/oclcpuexp_2024.17.3.0.09_rel && \
|
||||
tar -zxvf /tmp/opencl-driver-intel/oclcpuexp-2024.17.3.0.09_rel.tar.gz && \
|
||||
mkdir -p /etc/OpenCL/vendors && \
|
||||
echo /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64/libintelocl.so > /etc/OpenCL/vendors/intel_expcpu.icd && \
|
||||
cd /opt/intel && \
|
||||
tar -zxvf /tmp/opencl-driver-intel/oneapi-tbb-2021.12.0-lin.tgz && \
|
||||
ln -s /opt/intel/oneapi-tbb-2021.12.0/lib/intel64/gcc4.8/libtbb.so /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 && \
|
||||
ln -s /opt/intel/oneapi-tbb-2021.12.0/lib/intel64/gcc4.8/libtbbmalloc.so /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 && \
|
||||
ln -s /opt/intel/oneapi-tbb-2021.12.0/lib/intel64/gcc4.8/libtbb.so.12 /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 && \
|
||||
ln -s /opt/intel/oneapi-tbb-2021.12.0/lib/intel64/gcc4.8/libtbbmalloc.so.2 /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 && \
|
||||
mkdir -p /etc/ld.so.conf.d && \
|
||||
echo /opt/intel/oclcpuexp_2024.17.3.0.09_rel/x64 > /etc/ld.so.conf.d/libintelopenclexp.conf && \
|
||||
ldconfig -f /etc/ld.so.conf.d/libintelopenclexp.conf && \
|
||||
cd / && \
|
||||
rm -rf /tmp/opencl-driver-intel
|
||||
|
||||
ENV NVIDIA_VISIBLE_DEVICES=all
|
||||
ENV NVIDIA_DRIVER_CAPABILITIES=graphics,utility,compute
|
||||
ENV QTWEBENGINE_DISABLE_SANDBOX=1
|
||||
|
||||
RUN dbus-uuidgen > /etc/machine-id
|
||||
RUN apt-get update && apt-get install -y fonts-noto-cjk fonts-noto-color-emoji
|
||||
|
||||
ARG USER=batman
|
||||
ARG USER_UID=1001
|
||||
RUN useradd -m -s /bin/bash -u $USER_UID $USER
|
||||
RUN usermod -aG sudo $USER
|
||||
RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
||||
USER $USER
|
||||
|
||||
COPY --chown=$USER pyproject.toml uv.lock /home/$USER
|
||||
COPY --chown=$USER tools/install_python_dependencies.sh /home/$USER/tools/
|
||||
|
||||
ENV VIRTUAL_ENV=/home/$USER/.venv
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
RUN cd /home/$USER && \
|
||||
tools/install_python_dependencies.sh && \
|
||||
rm -rf tools/ pyproject.toml uv.lock .cache
|
||||
|
||||
USER root
|
||||
RUN sudo git config --global --add safe.directory /tmp/openpilot
|
||||
7
LICENSE
Normal file
7
LICENSE
Normal file
@ -0,0 +1,7 @@
|
||||
Copyright (c) 2018, Comma.ai, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
21
LICENSE.md
Normal file
21
LICENSE.md
Normal file
@ -0,0 +1,21 @@
|
||||
# Custom MIT License
|
||||
|
||||
Copyright (c) 2024, Haibin Wen, SUNNYPILOT LLC
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to view and modify the Software, subject to the following conditions:
|
||||
|
||||
1. **Permission Required**: Permission Required for Commercial, For-Profit, or Closed Source Use: Use of the Software, in whole or in part, for any commercial purposes, for-profit projects, or in closed source projects requires explicit written permission from the original author(s).
|
||||
|
||||
2. **Redistribution**: Any redistribution of the Software, modified or unmodified, must retain this license notice and the following acknowledgment:
|
||||
"This software is licensed under a custom license requiring permission for use."
|
||||
|
||||
3. **Visibility**: Any project that uses the Software must visibly mention the following acknowledgment:
|
||||
"This project uses software from Haibin Wen and SUNNYPILOT LLC and is licensed under a custom license requiring permission for use."
|
||||
|
||||
4. **No Warranty**: THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
Contact sunnypilot Support <support@sunnypilot.ai> for permission requests.
|
||||
|
||||
---
|
||||
|
||||
Haibin Wen, SUNNYPILOT LLC
|
||||
131
README.md
Normal file
131
README.md
Normal file
@ -0,0 +1,131 @@
|
||||

|
||||
|
||||
## 🌞 What is sunnypilot?
|
||||
[sunnypilot](https://github.com/sunnyhaibin/sunnypilot) is a fork of comma.ai's openpilot, an open source driver assistance system. sunnypilot offers the user a unique driving experience for over 300+ supported car makes and models with modified behaviors of driving assist engagements. sunnypilot complies with comma.ai's safety rules as accurately as possible.
|
||||
|
||||
## 💭 Join our Discord
|
||||
Join the official sunnypilot Discord server to stay up to date with all the latest features and be a part of shaping the future of sunnypilot!
|
||||
* https://discord.gg/sunnypilot
|
||||
|
||||
 
|
||||
|
||||
## Documentation
|
||||
https://docs.sunnypilot.ai/ is your one stop shop for everything from features to installation to FAQ about the sunnypilot
|
||||
|
||||
## 🚘 Running on a dedicated device in a car
|
||||
* A supported device to run this software
|
||||
* a [comma three](https://comma.ai/shop/products/three) or a [C3X](https://comma.ai/shop/comma-3x)
|
||||
* This software
|
||||
* One of [the 300+ supported cars](https://github.com/commaai/openpilot/blob/master/docs/CARS.md). We support Honda, Toyota, Hyundai, Nissan, Kia, Chrysler, Lexus, Acura, Audi, VW, Ford and more. If your car is not supported but has adaptive cruise control and lane-keeping assist, it's likely able to run sunnypilot.
|
||||
* A [car harness](https://comma.ai/shop/products/car-harness) to connect to your car
|
||||
|
||||
Detailed instructions for [how to mount the device in a car](https://comma.ai/setup).
|
||||
|
||||
## Installation
|
||||
Please refer to [Recommended Branches](#-recommended-branches) to find your preferred/supported branch. This guide will assume you want to install the latest `staging-tici` branch.
|
||||
|
||||
### If you want to use our newest branches (our rewrite)
|
||||
> [!TIP]
|
||||
>You can see the rewrite state on our [rewrite project board](https://github.com/orgs/sunnypilot/projects/2), and to install the new branches, you can use the following links
|
||||
|
||||
* sunnypilot not installed or you installed a version before 0.8.17?
|
||||
1. [Factory reset/uninstall](https://github.com/commaai/openpilot/wiki/FAQ#how-can-i-reset-the-device) the previous software if you have another software/fork installed.
|
||||
2. After factory reset/uninstall and upon reboot, select `Custom Software` when given the option.
|
||||
3. Input the installation URL per [Recommended Branches](#-recommended-branches). Example: ```https://staging-tici.sunnypilot.ai```.
|
||||
4. Complete the rest of the installation following the onscreen instructions.
|
||||
|
||||
* sunnypilot already installed and you installed a version after 0.8.17?
|
||||
1. On the comma three, go to `Settings` ▶️ `Software`.
|
||||
2. At the `Download` option, press `CHECK`. This will fetch the list of latest branches from sunnypilot.
|
||||
3. At the `Target Branch` option, press `SELECT` to open the Target Branch selector.
|
||||
4. Scroll to select the desired branch per Recommended Branches (see below). Example: `staging-tici`
|
||||
|
||||
|
||||
| Branch | Installation URL |
|
||||
|:---------------:|:---------------------------------------------:|
|
||||
| `staging-tici` | `https://staging-tici.sunnypilot.ai` |
|
||||
| `custom-branch` | `https://install.sunnypilot.ai/{branch_name}` |
|
||||
| `release-tici` | **Not yet available**. |
|
||||
|
||||
> [!TIP]
|
||||
> You can use sunnypilot/targetbranch as an install URL. Example: 'sunnypilot/staging-tici'.
|
||||
|
||||
> [!NOTE]
|
||||
> Do you require further assistance with software installation? Join the [sunnypilot Discord server](https://discord.sunnypilot.com) and message us in the `#installation-help` channel.
|
||||
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Older legacy branches</summary>
|
||||
|
||||
### If you want to use our older legacy branches (*not recommended*)
|
||||
|
||||
> [**IMPORTANT**]
|
||||
> It is recommended to [re-flash AGNOS](https://flash.comma.ai/) if you intend to downgrade from the new branches.
|
||||
> You can still restore the latest sunnylink backup made on the old branches.
|
||||
|
||||
| Branch | Installation URL |
|
||||
|:------------:|:--------------------------------:|
|
||||
| `release-c3` | https://release-c3.sunnypilot.ai |
|
||||
| `staging-c3` | https://staging-c3.sunnypilot.ai |
|
||||
| `dev-c3` | https://dev-c3.sunnypilot.ai |
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
## 🎆 Pull Requests
|
||||
We welcome both pull requests and issues on GitHub. Bug fixes are encouraged.
|
||||
|
||||
Pull requests should be against the most current `master` branch.
|
||||
|
||||
## 📊 User Data
|
||||
|
||||
By default, sunnypilot uploads the driving data to comma servers. You can also access your data through [comma connect](https://connect.comma.ai/).
|
||||
|
||||
sunnypilot is open source software. The user is free to disable data collection if they wish to do so.
|
||||
|
||||
sunnypilot logs the road-facing camera, CAN, GPS, IMU, magnetometer, thermal sensors, crashes, and operating system logs.
|
||||
The driver-facing camera and microphone are only logged if you explicitly opt-in in settings.
|
||||
|
||||
By using this software, you understand that use of this software or its related services will generate certain types of user data, which may be logged and stored at the sole discretion of comma. By accepting this agreement, you grant an irrevocable, perpetual, worldwide right to comma for the use of this data.
|
||||
|
||||
## Licensing
|
||||
|
||||
sunnypilot is released under the [MIT License](LICENSE). This repository includes original work as well as significant portions of code derived from [openpilot by comma.ai](https://github.com/commaai/openpilot), which is also released under the MIT license with additional disclaimers.
|
||||
|
||||
The original openpilot license notice, including comma.ai’s indemnification and alpha software disclaimer, is reproduced below as required:
|
||||
|
||||
> openpilot is released under the MIT license. Some parts of the software are released under other licenses as specified.
|
||||
>
|
||||
> Any user of this software shall indemnify and hold harmless Comma.ai, Inc. and its directors, officers, employees, agents, stockholders, affiliates, subcontractors and customers from and against all allegations, claims, actions, suits, demands, damages, liabilities, obligations, losses, settlements, judgments, costs and expenses (including without limitation attorneys’ fees and costs) which arise out of, relate to or result from any use of this software by user.
|
||||
>
|
||||
> **THIS IS ALPHA QUALITY SOFTWARE FOR RESEARCH PURPOSES ONLY. THIS IS NOT A PRODUCT.
|
||||
> YOU ARE RESPONSIBLE FOR COMPLYING WITH LOCAL LAWS AND REGULATIONS.
|
||||
> NO WARRANTY EXPRESSED OR IMPLIED.**
|
||||
|
||||
For full license terms, please see the [`LICENSE`](LICENSE) file.
|
||||
|
||||
## 💰 Support sunnypilot
|
||||
If you find any of the features useful, consider becoming a [sponsor on GitHub](https://github.com/sponsors/sunnyhaibin) to support future feature development and improvements.
|
||||
|
||||
|
||||
By becoming a sponsor, you will gain access to exclusive content, early access to new features, and the opportunity to directly influence the project's development.
|
||||
|
||||
|
||||
<h3>GitHub Sponsor</h3>
|
||||
|
||||
<a href="https://github.com/sponsors/sunnyhaibin">
|
||||
<img src="https://user-images.githubusercontent.com/47793918/244135584-9800acbd-69fd-4b2b-bec9-e5fa2d85c817.png" alt="Become a Sponsor" width="300" style="max-width: 100%; height: auto;">
|
||||
</a>
|
||||
<br>
|
||||
|
||||
<h3>PayPal</h3>
|
||||
|
||||
<a href="https://paypal.me/sunnyhaibin0850" target="_blank">
|
||||
<img src="https://www.paypalobjects.com/en_US/i/btn/btn_donateCC_LG.gif" alt="PayPal this" title="PayPal - The safer, easier way to pay online!" border="0" />
|
||||
</a>
|
||||
<br></br>
|
||||
|
||||
Your continuous love and support are greatly appreciated! Enjoy 🥰
|
||||
|
||||
<span>-</span> Jason, Founder of sunnypilot
|
||||
1078
RELEASES.md
Normal file
1078
RELEASES.md
Normal file
File diff suppressed because it is too large
Load Diff
5
SECURITY.md
Normal file
5
SECURITY.md
Normal file
@ -0,0 +1,5 @@
|
||||
# Security Policy
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Suspected vulnerabilities can be reported to both `adeeb@comma.ai` and `security@comma.ai`.
|
||||
95
cereal/README.md
Normal file
95
cereal/README.md
Normal file
@ -0,0 +1,95 @@
|
||||
# What is cereal?
|
||||
|
||||
cereal is the messaging system for openpilot. It uses [msgq](https://github.com/commaai/msgq) as a pub/sub backend, and [Cap'n proto](https://capnproto.org/capnp-tool.html) for serialization of the structs.
|
||||
|
||||
|
||||
## Messaging Spec
|
||||
|
||||
You'll find the message types in [log.capnp](log.capnp). It uses [Cap'n proto](https://capnproto.org/capnp-tool.html) and defines one struct called `Event`.
|
||||
|
||||
All `Events` have a `logMonoTime` and a `valid`. Then a big union defines the packet type.
|
||||
|
||||
### Best Practices
|
||||
|
||||
- **All fields must describe quantities in SI units**, unless otherwise specified in the field name.
|
||||
- In the context of the message they are in, field names should be completely unambiguous.
|
||||
- All values should be easy to plot and be human-readable with minimal parsing.
|
||||
|
||||
### Maintaining backwards-compatibility
|
||||
|
||||
When making changes to the messaging spec you want to maintain backwards-compatibility, such that old logs can
|
||||
be parsed with a new version of cereal. Adding structs and adding members to structs is generally safe, most other
|
||||
things are not. Read more details [here](https://capnproto.org/language.html).
|
||||
|
||||
### Custom forks
|
||||
|
||||
Forks of [openpilot](https://github.com/commaai/openpilot) might want to add things to the messaging
|
||||
spec, however this could conflict with future changes made in mainline cereal/openpilot. Rebasing against mainline openpilot
|
||||
then means breaking backwards-compatibility with all old logs of your fork. So we added reserved events in
|
||||
[custom.capnp](custom.capnp) that we will leave empty in mainline cereal/openpilot. **If you only modify those, you can ensure your
|
||||
fork will remain backwards-compatible with all versions of mainline openpilot and your fork.**
|
||||
|
||||
An example of compatible changes:
|
||||
```diff
|
||||
diff --git a/cereal/custom.capnp b/cereal/custom.capnp
|
||||
index 3348e859e..3365c7b98 100644
|
||||
--- a/cereal/custom.capnp
|
||||
+++ b/cereal/custom.capnp
|
||||
@@ -10,7 +10,11 @@ $Cxx.namespace("cereal");
|
||||
# DO rename the structs
|
||||
# DON'T change the identifier (e.g. @0x81c2f05a394cf4af)
|
||||
|
||||
-struct CustomReserved0 @0x81c2f05a394cf4af {
|
||||
+struct SteeringInfo @0x81c2f05a394cf4af {
|
||||
+ active @0 :Bool;
|
||||
+ steeringAngleDeg @1 :Float32;
|
||||
+ steeringRateDeg @2 :Float32;
|
||||
+ steeringAccelDeg @3 :Float32;
|
||||
}
|
||||
|
||||
struct CustomReserved1 @0xaedffd8f31e7b55d {
|
||||
diff --git a/cereal/log.capnp b/cereal/log.capnp
|
||||
index 1209f3fd9..b189f58b6 100644
|
||||
--- a/cereal/log.capnp
|
||||
+++ b/cereal/log.capnp
|
||||
@@ -2558,14 +2558,14 @@ struct Event {
|
||||
|
||||
# DO change the name of the field
|
||||
# DON'T change anything after the "@"
|
||||
- customReservedRawData0 @124 :Data;
|
||||
+ rawCanData @124 :Data;
|
||||
customReservedRawData1 @125 :Data;
|
||||
customReservedRawData2 @126 :Data;
|
||||
|
||||
# DO change the name of the field and struct
|
||||
# DON'T change the ID (e.g. @107)
|
||||
# DON'T change which struct it points to
|
||||
- customReserved0 @107 :Custom.CustomReserved0;
|
||||
+ steeringInfo @107 :Custom.SteeringInfo;
|
||||
customReserved1 @108 :Custom.CustomReserved1;
|
||||
customReserved2 @109 :Custom.CustomReserved2;
|
||||
customReserved3 @110 :Custom.CustomReserved3;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Example
|
||||
---
|
||||
```python
|
||||
import cereal.messaging as messaging
|
||||
|
||||
# in subscriber
|
||||
sm = messaging.SubMaster(['sensorEvents'])
|
||||
while 1:
|
||||
sm.update()
|
||||
print(sm['sensorEvents'])
|
||||
|
||||
```
|
||||
|
||||
```python
|
||||
# in publisher
|
||||
pm = messaging.PubMaster(['sensorEvents'])
|
||||
dat = messaging.new_message('sensorEvents', size=1)
|
||||
dat.sensorEvents[0] = {"gyro": {"v": [0.1, -0.1, 0.1]}}
|
||||
pm.send('sensorEvents', dat)
|
||||
```
|
||||
11
cereal/__init__.py
Normal file
11
cereal/__init__.py
Normal file
@ -0,0 +1,11 @@
|
||||
import os
|
||||
import capnp
|
||||
from importlib.resources import as_file, files
|
||||
|
||||
capnp.remove_import_hook()
|
||||
|
||||
with as_file(files("cereal")) as fspath:
|
||||
CEREAL_PATH = fspath.as_posix()
|
||||
log = capnp.load(os.path.join(CEREAL_PATH, "log.capnp"))
|
||||
car = capnp.load(os.path.join(CEREAL_PATH, "car.capnp"))
|
||||
custom = capnp.load(os.path.join(CEREAL_PATH, "custom.capnp"))
|
||||
1
cereal/car.capnp
Symbolic link
1
cereal/car.capnp
Symbolic link
@ -0,0 +1 @@
|
||||
../opendbc_repo/opendbc/car/car.capnp
|
||||
314
cereal/custom.capnp
Normal file
314
cereal/custom.capnp
Normal file
@ -0,0 +1,314 @@
|
||||
using Cxx = import "./include/c++.capnp";
|
||||
$Cxx.namespace("cereal");
|
||||
|
||||
@0xb526ba661d550a59;
|
||||
|
||||
# custom.capnp: a home for empty structs reserved for custom forks
|
||||
# These structs are guaranteed to remain reserved and empty in mainline
|
||||
# cereal, so use these if you want custom events in your fork.
|
||||
|
||||
# DO rename the structs
|
||||
# DON'T change the identifier (e.g. @0x81c2f05a394cf4af)
|
||||
|
||||
struct ModularAssistiveDrivingSystem {
|
||||
state @0 :ModularAssistiveDrivingSystemState;
|
||||
enabled @1 :Bool;
|
||||
active @2 :Bool;
|
||||
available @3 :Bool;
|
||||
|
||||
enum ModularAssistiveDrivingSystemState {
|
||||
disabled @0;
|
||||
paused @1;
|
||||
enabled @2;
|
||||
softDisabling @3;
|
||||
overriding @4;
|
||||
}
|
||||
}
|
||||
|
||||
# Same struct as Log.RadarState.LeadData
|
||||
struct LeadData {
|
||||
dRel @0 :Float32;
|
||||
yRel @1 :Float32;
|
||||
vRel @2 :Float32;
|
||||
aRel @3 :Float32;
|
||||
vLead @4 :Float32;
|
||||
dPath @6 :Float32;
|
||||
vLat @7 :Float32;
|
||||
vLeadK @8 :Float32;
|
||||
aLeadK @9 :Float32;
|
||||
fcw @10 :Bool;
|
||||
status @11 :Bool;
|
||||
aLeadTau @12 :Float32;
|
||||
modelProb @13 :Float32;
|
||||
radar @14 :Bool;
|
||||
radarTrackId @15 :Int32 = -1;
|
||||
|
||||
aLeadDEPRECATED @5 :Float32;
|
||||
}
|
||||
|
||||
struct SelfdriveStateSP @0x81c2f05a394cf4af {
|
||||
mads @0 :ModularAssistiveDrivingSystem;
|
||||
}
|
||||
|
||||
struct ModelManagerSP @0xaedffd8f31e7b55d {
|
||||
activeBundle @0 :ModelBundle;
|
||||
selectedBundle @1 :ModelBundle;
|
||||
availableBundles @2 :List(ModelBundle);
|
||||
|
||||
struct DownloadUri {
|
||||
uri @0 :Text;
|
||||
sha256 @1 :Text;
|
||||
}
|
||||
|
||||
enum DownloadStatus {
|
||||
notDownloading @0;
|
||||
downloading @1;
|
||||
downloaded @2;
|
||||
cached @3;
|
||||
failed @4;
|
||||
}
|
||||
|
||||
struct DownloadProgress {
|
||||
status @0 :DownloadStatus;
|
||||
progress @1 :Float32;
|
||||
eta @2 :UInt32;
|
||||
}
|
||||
|
||||
struct Artifact {
|
||||
fileName @0 :Text;
|
||||
downloadUri @1 :DownloadUri;
|
||||
downloadProgress @2 :DownloadProgress;
|
||||
}
|
||||
|
||||
struct Model {
|
||||
type @0 :Type;
|
||||
artifact @1 :Artifact; # Main artifact
|
||||
metadata @2 :Artifact; # Metadata artifact
|
||||
|
||||
enum Type {
|
||||
supercombo @0;
|
||||
navigation @1;
|
||||
vision @2;
|
||||
policy @3;
|
||||
}
|
||||
}
|
||||
|
||||
enum Runner {
|
||||
snpe @0;
|
||||
tinygrad @1;
|
||||
stock @2;
|
||||
}
|
||||
|
||||
struct Override {
|
||||
key @0 :Text;
|
||||
value @1 :Text;
|
||||
}
|
||||
|
||||
struct ModelBundle {
|
||||
index @0 :UInt32;
|
||||
internalName @1 :Text;
|
||||
displayName @2 :Text;
|
||||
models @3 :List(Model);
|
||||
status @4 :DownloadStatus;
|
||||
generation @5 :UInt32;
|
||||
environment @6 :Text;
|
||||
runner @7 :Runner;
|
||||
is20hz @8 :Bool;
|
||||
ref @9 :Text;
|
||||
minimumSelectorVersion @10 :UInt32;
|
||||
overrides @11 :List(Override);
|
||||
}
|
||||
}
|
||||
|
||||
struct LongitudinalPlanSP @0xf35cc4560bbf6ec2 {
|
||||
dec @0 :DynamicExperimentalControl;
|
||||
|
||||
struct DynamicExperimentalControl {
|
||||
state @0 :DynamicExperimentalControlState;
|
||||
enabled @1 :Bool;
|
||||
active @2 :Bool;
|
||||
|
||||
enum DynamicExperimentalControlState {
|
||||
acc @0;
|
||||
blended @1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct OnroadEventSP @0xda96579883444c35 {
|
||||
events @0 :List(Event);
|
||||
|
||||
struct Event {
|
||||
name @0 :EventName;
|
||||
|
||||
# event types
|
||||
enable @1 :Bool;
|
||||
noEntry @2 :Bool;
|
||||
warning @3 :Bool; # alerts presented only when enabled or soft disabling
|
||||
userDisable @4 :Bool;
|
||||
softDisable @5 :Bool;
|
||||
immediateDisable @6 :Bool;
|
||||
preEnable @7 :Bool;
|
||||
permanent @8 :Bool; # alerts presented regardless of openpilot state
|
||||
overrideLateral @10 :Bool;
|
||||
overrideLongitudinal @9 :Bool;
|
||||
}
|
||||
|
||||
enum EventName {
|
||||
lkasEnable @0;
|
||||
lkasDisable @1;
|
||||
manualSteeringRequired @2;
|
||||
manualLongitudinalRequired @3;
|
||||
silentLkasEnable @4;
|
||||
silentLkasDisable @5;
|
||||
silentBrakeHold @6;
|
||||
silentWrongGear @7;
|
||||
silentReverseGear @8;
|
||||
silentDoorOpen @9;
|
||||
silentSeatbeltNotLatched @10;
|
||||
silentParkBrake @11;
|
||||
controlsMismatchLateral @12;
|
||||
hyundaiRadarTracksConfirmed @13;
|
||||
experimentalModeSwitched @14;
|
||||
wrongCarModeAlertOnly @15;
|
||||
pedalPressedAlertOnly @16;
|
||||
laneTurnLeft @17;
|
||||
laneTurnRight @18;
|
||||
}
|
||||
}
|
||||
|
||||
struct CarParamsSP @0x80ae746ee2596b11 {
|
||||
flags @0 :UInt32; # flags for car specific quirks in sunnypilot
|
||||
safetyParam @1 : Int16; # flags for sunnypilot's custom safety flags
|
||||
|
||||
neuralNetworkLateralControl @2 :NeuralNetworkLateralControl;
|
||||
|
||||
struct NeuralNetworkLateralControl {
|
||||
model @0 :Model;
|
||||
fuzzyFingerprint @1 :Bool;
|
||||
|
||||
struct Model {
|
||||
path @0 :Text;
|
||||
name @1 :Text;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct CarControlSP @0xa5cd762cd951a455 {
|
||||
mads @0 :ModularAssistiveDrivingSystem;
|
||||
params @1 :List(Param);
|
||||
leadOne @2 :LeadData;
|
||||
leadTwo @3 :LeadData;
|
||||
|
||||
struct Param {
|
||||
key @0 :Text;
|
||||
type @2 :ParamType;
|
||||
value @3 :Data;
|
||||
|
||||
valueDEPRECATED @1 :Text; # The data type change may cause issues with backwards compatibility.
|
||||
}
|
||||
|
||||
enum ParamType {
|
||||
string @0;
|
||||
bool @1;
|
||||
int @2;
|
||||
float @3;
|
||||
time @4;
|
||||
json @5;
|
||||
bytes @6;
|
||||
}
|
||||
}
|
||||
|
||||
struct BackupManagerSP @0xf98d843bfd7004a3 {
|
||||
backupStatus @0 :Status;
|
||||
restoreStatus @1 :Status;
|
||||
backupProgress @2 :Float32;
|
||||
restoreProgress @3 :Float32;
|
||||
lastError @4 :Text;
|
||||
currentBackup @5 :BackupInfo;
|
||||
backupHistory @6 :List(BackupInfo);
|
||||
|
||||
enum Status {
|
||||
idle @0;
|
||||
inProgress @1;
|
||||
completed @2;
|
||||
failed @3;
|
||||
}
|
||||
|
||||
struct Version {
|
||||
major @0 :UInt16;
|
||||
minor @1 :UInt16;
|
||||
patch @2 :UInt16;
|
||||
build @3 :UInt16;
|
||||
branch @4 :Text;
|
||||
}
|
||||
|
||||
struct MetadataEntry {
|
||||
key @0 :Text;
|
||||
value @1 :Text;
|
||||
tags @2 :List(Text);
|
||||
}
|
||||
|
||||
struct BackupInfo {
|
||||
deviceId @0 :Text;
|
||||
version @1 :UInt32;
|
||||
config @2 :Text;
|
||||
isEncrypted @3 :Bool;
|
||||
createdAt @4 :Text; # ISO timestamp
|
||||
updatedAt @5 :Text; # ISO timestamp
|
||||
sunnypilotVersion @6 :Version;
|
||||
backupMetadata @7 :List(MetadataEntry);
|
||||
}
|
||||
}
|
||||
|
||||
struct CarStateSP @0xb86e6369214c01c8 {
|
||||
}
|
||||
|
||||
struct LiveMapDataSP @0xf416ec09499d9d19 {
|
||||
speedLimitValid @0 :Bool;
|
||||
speedLimit @1 :Float32;
|
||||
speedLimitAheadValid @2 :Bool;
|
||||
speedLimitAhead @3 :Float32;
|
||||
speedLimitAheadDistance @4 :Float32;
|
||||
roadName @5 :Text;
|
||||
}
|
||||
|
||||
struct ModelDataV2SP @0xa1680744031fdb2d {
|
||||
laneTurnDirection @0 :TurnDirection;
|
||||
}
|
||||
|
||||
enum TurnDirection {
|
||||
none @0;
|
||||
turnLeft @1;
|
||||
turnRight @2;
|
||||
}
|
||||
|
||||
struct CustomReserved10 @0xcb9fd56c7057593a {
|
||||
}
|
||||
|
||||
struct CustomReserved11 @0xc2243c65e0340384 {
|
||||
}
|
||||
|
||||
struct CustomReserved12 @0x9ccdc8676701b412 {
|
||||
}
|
||||
|
||||
struct CustomReserved13 @0xcd96dafb67a082d0 {
|
||||
}
|
||||
|
||||
struct CustomReserved14 @0xb057204d7deadf3f {
|
||||
}
|
||||
|
||||
struct CustomReserved15 @0xbd443b539493bc68 {
|
||||
}
|
||||
|
||||
struct CustomReserved16 @0xfc6241ed8877b611 {
|
||||
}
|
||||
|
||||
struct CustomReserved17 @0xa30662f84033036c {
|
||||
}
|
||||
|
||||
struct CustomReserved18 @0xc86a3d38d13eb3ef {
|
||||
}
|
||||
|
||||
struct CustomReserved19 @0xa4f1eb3323f5f582 {
|
||||
}
|
||||
7472
cereal/gen/cpp/car.capnp.c++
Normal file
7472
cereal/gen/cpp/car.capnp.c++
Normal file
File diff suppressed because it is too large
Load Diff
9584
cereal/gen/cpp/car.capnp.h
Normal file
9584
cereal/gen/cpp/car.capnp.h
Normal file
File diff suppressed because it is too large
Load Diff
3918
cereal/gen/cpp/custom.capnp.c++
Normal file
3918
cereal/gen/cpp/custom.capnp.c++
Normal file
File diff suppressed because it is too large
Load Diff
6621
cereal/gen/cpp/custom.capnp.h
Normal file
6621
cereal/gen/cpp/custom.capnp.h
Normal file
File diff suppressed because it is too large
Load Diff
6878
cereal/gen/cpp/legacy.capnp.c++
Normal file
6878
cereal/gen/cpp/legacy.capnp.c++
Normal file
File diff suppressed because it is too large
Load Diff
11415
cereal/gen/cpp/legacy.capnp.h
Normal file
11415
cereal/gen/cpp/legacy.capnp.h
Normal file
File diff suppressed because it is too large
Load Diff
34665
cereal/gen/cpp/log.capnp.c++
Normal file
34665
cereal/gen/cpp/log.capnp.c++
Normal file
File diff suppressed because it is too large
Load Diff
59506
cereal/gen/cpp/log.capnp.h
Normal file
59506
cereal/gen/cpp/log.capnp.h
Normal file
File diff suppressed because it is too large
Load Diff
26
cereal/include/c++.capnp
Normal file
26
cereal/include/c++.capnp
Normal file
@ -0,0 +1,26 @@
|
||||
# Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
|
||||
# Licensed under the MIT License:
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
@0xbdf87d7bb8304e81;
|
||||
$namespace("capnp::annotations");
|
||||
|
||||
annotation namespace(file): Text;
|
||||
annotation name(field, enumerant, struct, enum, interface, method, param, group, union): Text;
|
||||
574
cereal/legacy.capnp
Normal file
574
cereal/legacy.capnp
Normal file
@ -0,0 +1,574 @@
|
||||
using Cxx = import "./include/c++.capnp";
|
||||
$Cxx.namespace("cereal");
|
||||
|
||||
@0x80ef1ec4889c2a63;
|
||||
|
||||
# legacy.capnp: a home for deprecated structs
|
||||
|
||||
struct LogRotate @0x9811e1f38f62f2d1 {
|
||||
segmentNum @0 :Int32;
|
||||
path @1 :Text;
|
||||
}
|
||||
|
||||
struct LiveUI @0xc08240f996aefced {
|
||||
rearViewCam @0 :Bool;
|
||||
alertText1 @1 :Text;
|
||||
alertText2 @2 :Text;
|
||||
awarenessStatus @3 :Float32;
|
||||
}
|
||||
|
||||
struct UiLayoutState @0x88dcce08ad29dda0 {
|
||||
activeApp @0 :App;
|
||||
sidebarCollapsed @1 :Bool;
|
||||
mapEnabled @2 :Bool;
|
||||
mockEngaged @3 :Bool;
|
||||
|
||||
enum App @0x9917470acf94d285 {
|
||||
home @0;
|
||||
music @1;
|
||||
nav @2;
|
||||
settings @3;
|
||||
none @4;
|
||||
}
|
||||
}
|
||||
|
||||
struct OrbslamCorrection @0x8afd33dc9b35e1aa {
|
||||
correctionMonoTime @0 :UInt64;
|
||||
prePositionECEF @1 :List(Float64);
|
||||
postPositionECEF @2 :List(Float64);
|
||||
prePoseQuatECEF @3 :List(Float32);
|
||||
postPoseQuatECEF @4 :List(Float32);
|
||||
numInliers @5 :UInt32;
|
||||
}
|
||||
|
||||
struct EthernetPacket @0xa99a9d5b33cf5859 {
|
||||
pkt @0 :Data;
|
||||
ts @1 :Float32;
|
||||
}
|
||||
|
||||
struct CellInfo @0xcff7566681c277ce {
|
||||
timestamp @0 :UInt64;
|
||||
repr @1 :Text; # android toString() for now
|
||||
}
|
||||
|
||||
struct WifiScan @0xd4df5a192382ba0b {
|
||||
bssid @0 :Text;
|
||||
ssid @1 :Text;
|
||||
capabilities @2 :Text;
|
||||
frequency @3 :Int32;
|
||||
level @4 :Int32;
|
||||
timestamp @5 :Int64;
|
||||
|
||||
centerFreq0 @6 :Int32;
|
||||
centerFreq1 @7 :Int32;
|
||||
channelWidth @8 :ChannelWidth;
|
||||
operatorFriendlyName @9 :Text;
|
||||
venueName @10 :Text;
|
||||
is80211mcResponder @11 :Bool;
|
||||
passpoint @12 :Bool;
|
||||
|
||||
distanceCm @13 :Int32;
|
||||
distanceSdCm @14 :Int32;
|
||||
|
||||
enum ChannelWidth @0xcb6a279f015f6b51 {
|
||||
w20Mhz @0;
|
||||
w40Mhz @1;
|
||||
w80Mhz @2;
|
||||
w160Mhz @3;
|
||||
w80Plus80Mhz @4;
|
||||
}
|
||||
}
|
||||
|
||||
struct LiveEventData @0x94b7baa90c5c321e {
|
||||
name @0 :Text;
|
||||
value @1 :Int32;
|
||||
}
|
||||
|
||||
struct ModelData @0xb8aad62cffef28a9 {
|
||||
frameId @0 :UInt32;
|
||||
frameAge @12 :UInt32;
|
||||
frameDropPerc @13 :Float32;
|
||||
timestampEof @9 :UInt64;
|
||||
modelExecutionTime @14 :Float32;
|
||||
gpuExecutionTime @16 :Float32;
|
||||
rawPred @15 :Data;
|
||||
|
||||
path @1 :PathData;
|
||||
leftLane @2 :PathData;
|
||||
rightLane @3 :PathData;
|
||||
lead @4 :LeadData;
|
||||
freePath @6 :List(Float32);
|
||||
|
||||
settings @5 :ModelSettings;
|
||||
leadFuture @7 :LeadData;
|
||||
speed @8 :List(Float32);
|
||||
meta @10 :MetaData;
|
||||
longitudinal @11 :LongitudinalData;
|
||||
|
||||
struct PathData @0x8817eeea389e9f08 {
|
||||
points @0 :List(Float32);
|
||||
prob @1 :Float32;
|
||||
std @2 :Float32;
|
||||
stds @3 :List(Float32);
|
||||
poly @4 :List(Float32);
|
||||
validLen @5 :Float32;
|
||||
}
|
||||
|
||||
struct LeadData @0xd1c9bef96d26fa91 {
|
||||
dist @0 :Float32;
|
||||
prob @1 :Float32;
|
||||
std @2 :Float32;
|
||||
relVel @3 :Float32;
|
||||
relVelStd @4 :Float32;
|
||||
relY @5 :Float32;
|
||||
relYStd @6 :Float32;
|
||||
relA @7 :Float32;
|
||||
relAStd @8 :Float32;
|
||||
}
|
||||
|
||||
struct ModelSettings @0xa26e3710efd3e914 {
|
||||
bigBoxX @0 :UInt16;
|
||||
bigBoxY @1 :UInt16;
|
||||
bigBoxWidth @2 :UInt16;
|
||||
bigBoxHeight @3 :UInt16;
|
||||
boxProjection @4 :List(Float32);
|
||||
yuvCorrection @5 :List(Float32);
|
||||
inputTransform @6 :List(Float32);
|
||||
}
|
||||
|
||||
struct MetaData @0x9744f25fb60f2bf8 {
|
||||
engagedProb @0 :Float32;
|
||||
desirePrediction @1 :List(Float32);
|
||||
brakeDisengageProb @2 :Float32;
|
||||
gasDisengageProb @3 :Float32;
|
||||
steerOverrideProb @4 :Float32;
|
||||
desireState @5 :List(Float32);
|
||||
}
|
||||
|
||||
struct LongitudinalData @0xf98f999c6a071122 {
|
||||
distances @2 :List(Float32);
|
||||
speeds @0 :List(Float32);
|
||||
accelerations @1 :List(Float32);
|
||||
}
|
||||
}
|
||||
|
||||
struct ECEFPoint @0xc25bbbd524983447 {
|
||||
x @0 :Float64;
|
||||
y @1 :Float64;
|
||||
z @2 :Float64;
|
||||
}
|
||||
|
||||
struct ECEFPointDEPRECATED @0xe10e21168db0c7f7 {
|
||||
x @0 :Float32;
|
||||
y @1 :Float32;
|
||||
z @2 :Float32;
|
||||
}
|
||||
|
||||
struct GPSPlannerPoints @0xab54c59699f8f9f3 {
|
||||
curPosDEPRECATED @0 :ECEFPointDEPRECATED;
|
||||
pointsDEPRECATED @1 :List(ECEFPointDEPRECATED);
|
||||
curPos @6 :ECEFPoint;
|
||||
points @7 :List(ECEFPoint);
|
||||
valid @2 :Bool;
|
||||
trackName @3 :Text;
|
||||
speedLimit @4 :Float32;
|
||||
accelTarget @5 :Float32;
|
||||
}
|
||||
|
||||
struct GPSPlannerPlan @0xf5ad1d90cdc1dd6b {
|
||||
valid @0 :Bool;
|
||||
poly @1 :List(Float32);
|
||||
trackName @2 :Text;
|
||||
speed @3 :Float32;
|
||||
acceleration @4 :Float32;
|
||||
pointsDEPRECATED @5 :List(ECEFPointDEPRECATED);
|
||||
points @6 :List(ECEFPoint);
|
||||
xLookahead @7 :Float32;
|
||||
}
|
||||
|
||||
struct UiNavigationEvent @0x90c8426c3eaddd3b {
|
||||
type @0: Type;
|
||||
status @1: Status;
|
||||
distanceTo @2: Float32;
|
||||
endRoadPointDEPRECATED @3: ECEFPointDEPRECATED;
|
||||
endRoadPoint @4: ECEFPoint;
|
||||
|
||||
enum Type @0xe8db07dcf8fcea05 {
|
||||
none @0;
|
||||
laneChangeLeft @1;
|
||||
laneChangeRight @2;
|
||||
mergeLeft @3;
|
||||
mergeRight @4;
|
||||
turnLeft @5;
|
||||
turnRight @6;
|
||||
}
|
||||
|
||||
enum Status @0xb9aa88c75ef99a1f {
|
||||
none @0;
|
||||
passive @1;
|
||||
approaching @2;
|
||||
active @3;
|
||||
}
|
||||
}
|
||||
|
||||
struct LiveLocationData @0xb99b2bc7a57e8128 {
|
||||
status @0 :UInt8;
|
||||
|
||||
# 3D fix
|
||||
lat @1 :Float64;
|
||||
lon @2 :Float64;
|
||||
alt @3 :Float32; # m
|
||||
|
||||
# speed
|
||||
speed @4 :Float32; # m/s
|
||||
|
||||
# NED velocity components
|
||||
vNED @5 :List(Float32);
|
||||
|
||||
# roll, pitch, heading (x,y,z)
|
||||
roll @6 :Float32; # WRT to center of earth?
|
||||
pitch @7 :Float32; # WRT to center of earth?
|
||||
heading @8 :Float32; # WRT to north?
|
||||
|
||||
# what are these?
|
||||
wanderAngle @9 :Float32;
|
||||
trackAngle @10 :Float32;
|
||||
|
||||
# car frame -- https://upload.wikimedia.org/wikipedia/commons/f/f5/RPY_angles_of_cars.png
|
||||
|
||||
# gyro, in car frame, deg/s
|
||||
gyro @11 :List(Float32);
|
||||
|
||||
# accel, in car frame, m/s^2
|
||||
accel @12 :List(Float32);
|
||||
|
||||
accuracy @13 :Accuracy;
|
||||
|
||||
source @14 :SensorSource;
|
||||
# if we are fixing a location in the past
|
||||
fixMonoTime @15 :UInt64;
|
||||
|
||||
gpsWeek @16 :Int32;
|
||||
timeOfWeek @17 :Float64;
|
||||
|
||||
positionECEF @18 :List(Float64);
|
||||
poseQuatECEF @19 :List(Float32);
|
||||
pitchCalibration @20 :Float32;
|
||||
yawCalibration @21 :Float32;
|
||||
imuFrame @22 :List(Float32);
|
||||
|
||||
struct Accuracy @0x943dc4625473b03f {
|
||||
pNEDError @0 :List(Float32);
|
||||
vNEDError @1 :List(Float32);
|
||||
rollError @2 :Float32;
|
||||
pitchError @3 :Float32;
|
||||
headingError @4 :Float32;
|
||||
ellipsoidSemiMajorError @5 :Float32;
|
||||
ellipsoidSemiMinorError @6 :Float32;
|
||||
ellipsoidOrientationError @7 :Float32;
|
||||
}
|
||||
|
||||
enum SensorSource @0xc871d3cc252af657 {
|
||||
applanix @0;
|
||||
kalman @1;
|
||||
orbslam @2;
|
||||
timing @3;
|
||||
dummy @4;
|
||||
}
|
||||
}
|
||||
|
||||
struct OrbOdometry @0xd7700859ed1f5b76 {
|
||||
# timing first
|
||||
startMonoTime @0 :UInt64;
|
||||
endMonoTime @1 :UInt64;
|
||||
|
||||
# fundamental matrix and error
|
||||
f @2: List(Float64);
|
||||
err @3: Float64;
|
||||
|
||||
# number of inlier points
|
||||
inliers @4: Int32;
|
||||
|
||||
# for debug only
|
||||
# indexed by endMonoTime features
|
||||
# value is startMonoTime feature match
|
||||
# -1 if no match
|
||||
matches @5: List(Int16);
|
||||
}
|
||||
|
||||
struct OrbFeatures @0xcd60164a8a0159ef {
|
||||
timestampEof @0 :UInt64;
|
||||
# transposed arrays of normalized image coordinates
|
||||
# len(xs) == len(ys) == len(descriptors) * 32
|
||||
xs @1 :List(Float32);
|
||||
ys @2 :List(Float32);
|
||||
descriptors @3 :Data;
|
||||
octaves @4 :List(Int8);
|
||||
|
||||
# match index to last OrbFeatures
|
||||
# -1 if no match
|
||||
timestampLastEof @5 :UInt64;
|
||||
matches @6: List(Int16);
|
||||
}
|
||||
|
||||
struct OrbFeaturesSummary @0xd500d30c5803fa4f {
|
||||
timestampEof @0 :UInt64;
|
||||
timestampLastEof @1 :UInt64;
|
||||
|
||||
featureCount @2 :UInt16;
|
||||
matchCount @3 :UInt16;
|
||||
computeNs @4 :UInt64;
|
||||
}
|
||||
|
||||
struct OrbKeyFrame @0xc8233c0345e27e24 {
|
||||
# this is a globally unique id for the KeyFrame
|
||||
id @0: UInt64;
|
||||
|
||||
# this is the location of the KeyFrame
|
||||
pos @1: ECEFPoint;
|
||||
|
||||
# these are the features in the world
|
||||
# len(dpos) == len(descriptors) * 32
|
||||
dpos @2 :List(ECEFPoint);
|
||||
descriptors @3 :Data;
|
||||
}
|
||||
|
||||
struct KalmanOdometry @0x92e21bb7ea38793a {
|
||||
trans @0 :List(Float32); # m/s in device frame
|
||||
rot @1 :List(Float32); # rad/s in device frame
|
||||
transStd @2 :List(Float32); # std m/s in device frame
|
||||
rotStd @3 :List(Float32); # std rad/s in device frame
|
||||
}
|
||||
|
||||
struct OrbObservation @0x9b326d4e436afec7 {
|
||||
observationMonoTime @0 :UInt64;
|
||||
normalizedCoordinates @1 :List(Float32);
|
||||
locationECEF @2 :List(Float64);
|
||||
matchDistance @3: UInt32;
|
||||
}
|
||||
|
||||
struct CalibrationFeatures @0x8fdfadb254ea867a {
|
||||
frameId @0 :UInt32;
|
||||
|
||||
p0 @1 :List(Float32);
|
||||
p1 @2 :List(Float32);
|
||||
status @3 :List(Int8);
|
||||
}
|
||||
|
||||
struct NavStatus @0xbd8822120928120c {
|
||||
isNavigating @0 :Bool;
|
||||
currentAddress @1 :Address;
|
||||
|
||||
struct Address @0xce7cd672cacc7814 {
|
||||
title @0 :Text;
|
||||
lat @1 :Float64;
|
||||
lng @2 :Float64;
|
||||
house @3 :Text;
|
||||
address @4 :Text;
|
||||
street @5 :Text;
|
||||
city @6 :Text;
|
||||
state @7 :Text;
|
||||
country @8 :Text;
|
||||
}
|
||||
}
|
||||
|
||||
struct NavUpdate @0xdb98be6565516acb {
|
||||
isNavigating @0 :Bool;
|
||||
curSegment @1 :Int32;
|
||||
segments @2 :List(Segment);
|
||||
|
||||
struct LatLng @0x9eaef9187cadbb9b {
|
||||
lat @0 :Float64;
|
||||
lng @1 :Float64;
|
||||
}
|
||||
|
||||
struct Segment @0xa5b39b4fc4d7da3f {
|
||||
from @0 :LatLng;
|
||||
to @1 :LatLng;
|
||||
updateTime @2 :Int32;
|
||||
distance @3 :Int32;
|
||||
crossTime @4 :Int32;
|
||||
exitNo @5 :Int32;
|
||||
instruction @6 :Instruction;
|
||||
|
||||
parts @7 :List(LatLng);
|
||||
|
||||
enum Instruction @0xc5417a637451246f {
|
||||
turnLeft @0;
|
||||
turnRight @1;
|
||||
keepLeft @2;
|
||||
keepRight @3;
|
||||
straight @4;
|
||||
roundaboutExitNumber @5;
|
||||
roundaboutExit @6;
|
||||
roundaboutTurnLeft @7;
|
||||
unkn8 @8;
|
||||
roundaboutStraight @9;
|
||||
unkn10 @10;
|
||||
roundaboutTurnRight @11;
|
||||
unkn12 @12;
|
||||
roundaboutUturn @13;
|
||||
unkn14 @14;
|
||||
arrive @15;
|
||||
exitLeft @16;
|
||||
exitRight @17;
|
||||
unkn18 @18;
|
||||
uturn @19;
|
||||
# ...
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct TrafficEvent @0xacfa74a094e62626 {
|
||||
type @0 :Type;
|
||||
distance @1 :Float32;
|
||||
action @2 :Action;
|
||||
resuming @3 :Bool;
|
||||
|
||||
enum Type @0xd85d75253435bf4b {
|
||||
stopSign @0;
|
||||
lightRed @1;
|
||||
lightYellow @2;
|
||||
lightGreen @3;
|
||||
stopLight @4;
|
||||
}
|
||||
|
||||
enum Action @0xa6f6ce72165ccb49 {
|
||||
none @0;
|
||||
yield @1;
|
||||
stop @2;
|
||||
resumeReady @3;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
struct AndroidGnss @0xdfdf30d03fc485bd {
|
||||
union {
|
||||
measurements @0 :Measurements;
|
||||
navigationMessage @1 :NavigationMessage;
|
||||
}
|
||||
|
||||
struct Measurements @0xa20710d4f428d6cd {
|
||||
clock @0 :Clock;
|
||||
measurements @1 :List(Measurement);
|
||||
|
||||
struct Clock @0xa0e27b453a38f450 {
|
||||
timeNanos @0 :Int64;
|
||||
hardwareClockDiscontinuityCount @1 :Int32;
|
||||
|
||||
hasTimeUncertaintyNanos @2 :Bool;
|
||||
timeUncertaintyNanos @3 :Float64;
|
||||
|
||||
hasLeapSecond @4 :Bool;
|
||||
leapSecond @5 :Int32;
|
||||
|
||||
hasFullBiasNanos @6 :Bool;
|
||||
fullBiasNanos @7 :Int64;
|
||||
|
||||
hasBiasNanos @8 :Bool;
|
||||
biasNanos @9 :Float64;
|
||||
|
||||
hasBiasUncertaintyNanos @10 :Bool;
|
||||
biasUncertaintyNanos @11 :Float64;
|
||||
|
||||
hasDriftNanosPerSecond @12 :Bool;
|
||||
driftNanosPerSecond @13 :Float64;
|
||||
|
||||
hasDriftUncertaintyNanosPerSecond @14 :Bool;
|
||||
driftUncertaintyNanosPerSecond @15 :Float64;
|
||||
}
|
||||
|
||||
struct Measurement @0xd949bf717d77614d {
|
||||
svId @0 :Int32;
|
||||
constellation @1 :Constellation;
|
||||
|
||||
timeOffsetNanos @2 :Float64;
|
||||
state @3 :Int32;
|
||||
receivedSvTimeNanos @4 :Int64;
|
||||
receivedSvTimeUncertaintyNanos @5 :Int64;
|
||||
cn0DbHz @6 :Float64;
|
||||
pseudorangeRateMetersPerSecond @7 :Float64;
|
||||
pseudorangeRateUncertaintyMetersPerSecond @8 :Float64;
|
||||
accumulatedDeltaRangeState @9 :Int32;
|
||||
accumulatedDeltaRangeMeters @10 :Float64;
|
||||
accumulatedDeltaRangeUncertaintyMeters @11 :Float64;
|
||||
|
||||
hasCarrierFrequencyHz @12 :Bool;
|
||||
carrierFrequencyHz @13 :Float32;
|
||||
hasCarrierCycles @14 :Bool;
|
||||
carrierCycles @15 :Int64;
|
||||
hasCarrierPhase @16 :Bool;
|
||||
carrierPhase @17 :Float64;
|
||||
hasCarrierPhaseUncertainty @18 :Bool;
|
||||
carrierPhaseUncertainty @19 :Float64;
|
||||
hasSnrInDb @20 :Bool;
|
||||
snrInDb @21 :Float64;
|
||||
|
||||
multipathIndicator @22 :MultipathIndicator;
|
||||
|
||||
enum Constellation @0x9ef1f3ff0deb5ffb {
|
||||
unknown @0;
|
||||
gps @1;
|
||||
sbas @2;
|
||||
glonass @3;
|
||||
qzss @4;
|
||||
beidou @5;
|
||||
galileo @6;
|
||||
}
|
||||
|
||||
enum State @0xcbb9490adce12d72 {
|
||||
unknown @0;
|
||||
codeLock @1;
|
||||
bitSync @2;
|
||||
subframeSync @3;
|
||||
towDecoded @4;
|
||||
msecAmbiguous @5;
|
||||
symbolSync @6;
|
||||
gloStringSync @7;
|
||||
gloTodDecoded @8;
|
||||
bdsD2BitSync @9;
|
||||
bdsD2SubframeSync @10;
|
||||
galE1bcCodeLock @11;
|
||||
galE1c2ndCodeLock @12;
|
||||
galE1bPageSync @13;
|
||||
sbasSync @14;
|
||||
}
|
||||
|
||||
enum MultipathIndicator @0xc04e7b6231d4caa8 {
|
||||
unknown @0;
|
||||
detected @1;
|
||||
notDetected @2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct NavigationMessage @0xe2517b083095fd4e {
|
||||
type @0 :Int32;
|
||||
svId @1 :Int32;
|
||||
messageId @2 :Int32;
|
||||
submessageId @3 :Int32;
|
||||
data @4 :Data;
|
||||
status @5 :Status;
|
||||
|
||||
enum Status @0xec1ff7996b35366f {
|
||||
unknown @0;
|
||||
parityPassed @1;
|
||||
parityRebuilt @2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct LidarPts @0xe3d6685d4e9d8f7a {
|
||||
r @0 :List(UInt16); # uint16 m*500.0
|
||||
theta @1 :List(UInt16); # uint16 deg*100.0
|
||||
reflect @2 :List(UInt8); # uint8 0-255
|
||||
|
||||
# For storing out of file.
|
||||
idx @3 :UInt64;
|
||||
|
||||
# For storing in file
|
||||
pkt @4 :Data;
|
||||
}
|
||||
|
||||
|
||||
2691
cereal/log.capnp
Normal file
2691
cereal/log.capnp
Normal file
File diff suppressed because it is too large
Load Diff
257
cereal/messaging/__init__.py
Normal file
257
cereal/messaging/__init__.py
Normal file
@ -0,0 +1,257 @@
|
||||
# must be built with scons
|
||||
from msgq.ipc_pyx import Context, Poller, SubSocket, PubSocket, SocketEventHandle, toggle_fake_events, \
|
||||
set_fake_prefix, get_fake_prefix, delete_fake_prefix, wait_for_one_event
|
||||
from msgq.ipc_pyx import MultiplePublishersError, IpcError
|
||||
from msgq import fake_event_handle, pub_sock, sub_sock, drain_sock_raw
|
||||
import msgq
|
||||
|
||||
import os
|
||||
import capnp
|
||||
import time
|
||||
|
||||
from typing import Optional, List, Union, Dict
|
||||
|
||||
from cereal import log
|
||||
from cereal.services import SERVICE_LIST
|
||||
from openpilot.common.util import MovingAverage
|
||||
|
||||
NO_TRAVERSAL_LIMIT = 2**64-1
|
||||
|
||||
|
||||
def reset_context():
|
||||
msgq.context = Context()
|
||||
|
||||
|
||||
def log_from_bytes(dat: bytes, struct: capnp.lib.capnp._StructModule = log.Event) -> capnp.lib.capnp._DynamicStructReader:
|
||||
with struct.from_bytes(dat, traversal_limit_in_words=NO_TRAVERSAL_LIMIT) as msg:
|
||||
return msg
|
||||
|
||||
|
||||
def new_message(service: Optional[str], size: Optional[int] = None, **kwargs) -> capnp.lib.capnp._DynamicStructBuilder:
|
||||
args = {
|
||||
'valid': False,
|
||||
'logMonoTime': int(time.monotonic() * 1e9),
|
||||
**kwargs
|
||||
}
|
||||
dat = log.Event.new_message(**args)
|
||||
if service is not None:
|
||||
if size is None:
|
||||
dat.init(service)
|
||||
else:
|
||||
dat.init(service, size)
|
||||
return dat
|
||||
|
||||
|
||||
def drain_sock(sock: SubSocket, wait_for_one: bool = False) -> List[capnp.lib.capnp._DynamicStructReader]:
|
||||
"""Receive all message currently available on the queue"""
|
||||
msgs = drain_sock_raw(sock, wait_for_one=wait_for_one)
|
||||
return [log_from_bytes(m) for m in msgs]
|
||||
|
||||
|
||||
# TODO: print when we drop packets?
|
||||
def recv_sock(sock: SubSocket, wait: bool = False) -> Optional[capnp.lib.capnp._DynamicStructReader]:
|
||||
"""Same as drain sock, but only returns latest message. Consider using conflate instead."""
|
||||
dat = None
|
||||
|
||||
while 1:
|
||||
if wait and dat is None:
|
||||
recv = sock.receive()
|
||||
else:
|
||||
recv = sock.receive(non_blocking=True)
|
||||
|
||||
if recv is None: # Timeout hit
|
||||
break
|
||||
|
||||
dat = recv
|
||||
|
||||
if dat is not None:
|
||||
dat = log_from_bytes(dat)
|
||||
|
||||
return dat
|
||||
|
||||
|
||||
def recv_one(sock: SubSocket) -> Optional[capnp.lib.capnp._DynamicStructReader]:
|
||||
dat = sock.receive()
|
||||
if dat is not None:
|
||||
dat = log_from_bytes(dat)
|
||||
return dat
|
||||
|
||||
|
||||
def recv_one_or_none(sock: SubSocket) -> Optional[capnp.lib.capnp._DynamicStructReader]:
|
||||
dat = sock.receive(non_blocking=True)
|
||||
if dat is not None:
|
||||
dat = log_from_bytes(dat)
|
||||
return dat
|
||||
|
||||
|
||||
def recv_one_retry(sock: SubSocket) -> capnp.lib.capnp._DynamicStructReader:
|
||||
"""Keep receiving until we get a message"""
|
||||
while True:
|
||||
dat = sock.receive()
|
||||
if dat is not None:
|
||||
return log_from_bytes(dat)
|
||||
|
||||
|
||||
class FrequencyTracker:
|
||||
def __init__(self, service_freq: float, update_freq: float, is_poll: bool):
|
||||
freq = max(min(service_freq, update_freq), 1.)
|
||||
if is_poll:
|
||||
min_freq = max_freq = freq
|
||||
else:
|
||||
max_freq = min(freq, update_freq)
|
||||
if service_freq >= 2 * update_freq:
|
||||
min_freq = update_freq
|
||||
elif update_freq >= 2* service_freq:
|
||||
min_freq = freq
|
||||
else:
|
||||
min_freq = min(freq, freq / 2.)
|
||||
|
||||
self.min_freq = min_freq * 0.8
|
||||
self.max_freq = max_freq * 1.2
|
||||
self.avg_dt = MovingAverage(int(10 * freq))
|
||||
self.recent_avg_dt = MovingAverage(int(freq))
|
||||
self.prev_time = 0.0
|
||||
|
||||
def record_recv_time(self, cur_time: float) -> None:
|
||||
# TODO: Handle case where cur_time is less than prev_time
|
||||
if self.prev_time > 1e-5:
|
||||
dt = cur_time - self.prev_time
|
||||
|
||||
self.avg_dt.add_value(dt)
|
||||
self.recent_avg_dt.add_value(dt)
|
||||
|
||||
self.prev_time = cur_time
|
||||
|
||||
@property
|
||||
def valid(self) -> bool:
|
||||
if self.avg_dt.count == 0:
|
||||
return False
|
||||
|
||||
avg_freq = 1.0 / self.avg_dt.get_average()
|
||||
if self.min_freq <= avg_freq <= self.max_freq:
|
||||
return True
|
||||
|
||||
avg_freq_recent = 1.0 / self.recent_avg_dt.get_average()
|
||||
return self.min_freq <= avg_freq_recent <= self.max_freq
|
||||
|
||||
|
||||
class SubMaster:
|
||||
def __init__(self, services: List[str], poll: Optional[str] = None,
|
||||
ignore_alive: Optional[List[str]] = None, ignore_avg_freq: Optional[List[str]] = None,
|
||||
ignore_valid: Optional[List[str]] = None, addr: str = "127.0.0.1", frequency: Optional[float] = None):
|
||||
self.frame = -1
|
||||
self.services = services
|
||||
self.seen = {s: False for s in services}
|
||||
self.updated = {s: False for s in services}
|
||||
self.recv_time = {s: 0. for s in services}
|
||||
self.recv_frame = {s: 0 for s in services}
|
||||
self.sock = {}
|
||||
self.data = {}
|
||||
self.logMonoTime = {s: 0 for s in services}
|
||||
|
||||
# zero-frequency / on-demand services are always alive and presumed valid; all others must pass checks
|
||||
on_demand = {s: SERVICE_LIST[s].frequency <= 1e-5 for s in services}
|
||||
self.static_freq_services = set(s for s in services if not on_demand[s])
|
||||
self.alive = {s: on_demand[s] for s in services}
|
||||
self.freq_ok = {s: on_demand[s] for s in services}
|
||||
self.valid = {s: on_demand[s] for s in services}
|
||||
|
||||
self.freq_tracker: Dict[str, FrequencyTracker] = {}
|
||||
self.poller = Poller()
|
||||
polled_services = set([poll, ] if poll is not None else services)
|
||||
self.non_polled_services = set(services) - polled_services
|
||||
|
||||
self.ignore_average_freq = [] if ignore_avg_freq is None else ignore_avg_freq
|
||||
self.ignore_alive = [] if ignore_alive is None else ignore_alive
|
||||
self.ignore_valid = [] if ignore_valid is None else ignore_valid
|
||||
|
||||
self.simulation = bool(int(os.getenv("SIMULATION", "0")))
|
||||
|
||||
# if freq and poll aren't specified, assume the max to be conservative
|
||||
assert frequency is None or poll is None, "Do not specify 'frequency' - frequency of the polled service will be used."
|
||||
self.update_freq = frequency or max([SERVICE_LIST[s].frequency for s in polled_services])
|
||||
|
||||
for s in services:
|
||||
p = self.poller if s not in self.non_polled_services else None
|
||||
self.sock[s] = sub_sock(s, poller=p, addr=addr, conflate=True)
|
||||
|
||||
try:
|
||||
data = new_message(s)
|
||||
except capnp.lib.capnp.KjException:
|
||||
data = new_message(s, 0) # lists
|
||||
|
||||
self.data[s] = getattr(data.as_reader(), s)
|
||||
self.freq_tracker[s] = FrequencyTracker(SERVICE_LIST[s].frequency, self.update_freq, s == poll)
|
||||
|
||||
def __getitem__(self, s: str) -> capnp.lib.capnp._DynamicStructReader:
|
||||
return self.data[s]
|
||||
|
||||
def _check_avg_freq(self, s: str) -> bool:
|
||||
return SERVICE_LIST[s].frequency > 0.99 and (s not in self.ignore_average_freq) and (s not in self.ignore_alive)
|
||||
|
||||
def update(self, timeout: int = 100) -> None:
|
||||
msgs = []
|
||||
for sock in self.poller.poll(timeout):
|
||||
msgs.append(recv_one_or_none(sock))
|
||||
|
||||
# non-blocking receive for non-polled sockets
|
||||
for s in self.non_polled_services:
|
||||
msgs.append(recv_one_or_none(self.sock[s]))
|
||||
self.update_msgs(time.monotonic(), msgs)
|
||||
|
||||
def update_msgs(self, cur_time: float, msgs: List[capnp.lib.capnp._DynamicStructReader]) -> None:
|
||||
self.frame += 1
|
||||
self.updated = dict.fromkeys(self.services, False)
|
||||
for msg in msgs:
|
||||
if msg is None:
|
||||
continue
|
||||
|
||||
s = msg.which()
|
||||
self.seen[s] = True
|
||||
self.updated[s] = True
|
||||
|
||||
self.freq_tracker[s].record_recv_time(cur_time)
|
||||
self.recv_time[s] = cur_time
|
||||
self.recv_frame[s] = self.frame
|
||||
self.data[s] = getattr(msg, s)
|
||||
self.logMonoTime[s] = msg.logMonoTime
|
||||
self.valid[s] = msg.valid
|
||||
|
||||
for s in self.static_freq_services:
|
||||
# alive if delay is within 10x the expected frequency; checks relaxed in simulator
|
||||
self.alive[s] = (cur_time - self.recv_time[s]) < (10. / SERVICE_LIST[s].frequency) or (self.seen[s] and self.simulation)
|
||||
self.freq_ok[s] = self.freq_tracker[s].valid or self.simulation
|
||||
|
||||
def all_alive(self, service_list: Optional[List[str]] = None) -> bool:
|
||||
return all(self.alive[s] for s in (service_list or self.services) if s not in self.ignore_alive)
|
||||
|
||||
def all_freq_ok(self, service_list: Optional[List[str]] = None) -> bool:
|
||||
return all(self.freq_ok[s] for s in (service_list or self.services) if self._check_avg_freq(s))
|
||||
|
||||
def all_valid(self, service_list: Optional[List[str]] = None) -> bool:
|
||||
return all(self.valid[s] for s in (service_list or self.services) if s not in self.ignore_valid)
|
||||
|
||||
def all_checks(self, service_list: Optional[List[str]] = None) -> bool:
|
||||
return self.all_alive(service_list) and self.all_freq_ok(service_list) and self.all_valid(service_list)
|
||||
|
||||
|
||||
class PubMaster:
|
||||
def __init__(self, services: List[str]):
|
||||
self.sock = {}
|
||||
for s in services:
|
||||
self.sock[s] = pub_sock(s)
|
||||
|
||||
def send(self, s: str, dat: Union[bytes, capnp.lib.capnp._DynamicStructBuilder]) -> None:
|
||||
if not isinstance(dat, bytes):
|
||||
dat = dat.to_bytes()
|
||||
self.sock[s].send(dat)
|
||||
|
||||
def wait_for_readers_to_update(self, s: str, timeout: int, dt: float = 0.05) -> bool:
|
||||
for _ in range(int(timeout*(1./dt))):
|
||||
if self.sock[s].all_readers_updated():
|
||||
return True
|
||||
time.sleep(dt)
|
||||
return False
|
||||
|
||||
def all_readers_updated(self, s: str) -> bool:
|
||||
return self.sock[s].all_readers_updated() # type: ignore
|
||||
BIN
cereal/messaging/bridge
Executable file
BIN
cereal/messaging/bridge
Executable file
Binary file not shown.
102
cereal/messaging/messaging.h
Normal file
102
cereal/messaging/messaging.h
Normal file
@ -0,0 +1,102 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstddef>
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <utility>
|
||||
|
||||
#include <capnp/serialize.h>
|
||||
|
||||
#include "cereal/gen/cpp/log.capnp.h"
|
||||
#include "common/timing.h"
|
||||
#include "msgq/ipc.h"
|
||||
|
||||
class SubMaster {
|
||||
public:
|
||||
SubMaster(const std::vector<const char *> &service_list, const std::vector<const char *> &poll = {},
|
||||
const char *address = nullptr, const std::vector<const char *> &ignore_alive = {});
|
||||
void update(int timeout = 1000);
|
||||
void update_msgs(uint64_t current_time, const std::vector<std::pair<std::string, cereal::Event::Reader>> &messages);
|
||||
inline bool allAlive(const std::vector<const char *> &service_list = {}) { return all_(service_list, false, true); }
|
||||
inline bool allValid(const std::vector<const char *> &service_list = {}) { return all_(service_list, true, false); }
|
||||
inline bool allAliveAndValid(const std::vector<const char *> &service_list = {}) { return all_(service_list, true, true); }
|
||||
void drain();
|
||||
~SubMaster();
|
||||
|
||||
uint64_t frame = 0;
|
||||
bool updated(const char *name) const;
|
||||
bool alive(const char *name) const;
|
||||
bool valid(const char *name) const;
|
||||
uint64_t rcv_frame(const char *name) const;
|
||||
uint64_t rcv_time(const char *name) const;
|
||||
cereal::Event::Reader &operator[](const char *name) const;
|
||||
|
||||
private:
|
||||
bool all_(const std::vector<const char *> &service_list, bool valid, bool alive);
|
||||
Poller *poller_ = nullptr;
|
||||
struct SubMessage;
|
||||
std::map<SubSocket *, SubMessage *> messages_;
|
||||
std::map<std::string, SubMessage *> services_;
|
||||
};
|
||||
|
||||
class MessageBuilder : public capnp::MallocMessageBuilder {
|
||||
public:
|
||||
MessageBuilder() = default;
|
||||
|
||||
cereal::Event::Builder initEvent(bool valid = true) {
|
||||
cereal::Event::Builder event = initRoot<cereal::Event>();
|
||||
event.setLogMonoTime(nanos_since_boot());
|
||||
event.setValid(valid);
|
||||
return event;
|
||||
}
|
||||
|
||||
kj::ArrayPtr<capnp::byte> toBytes() {
|
||||
heapArray_ = capnp::messageToFlatArray(*this);
|
||||
return heapArray_.asBytes();
|
||||
}
|
||||
|
||||
size_t getSerializedSize() {
|
||||
return capnp::computeSerializedSizeInWords(*this) * sizeof(capnp::word);
|
||||
}
|
||||
|
||||
int serializeToBuffer(unsigned char *buffer, size_t buffer_size) {
|
||||
size_t serialized_size = getSerializedSize();
|
||||
if (serialized_size > buffer_size) { return -1; }
|
||||
kj::ArrayOutputStream out(kj::ArrayPtr<capnp::byte>(buffer, buffer_size));
|
||||
capnp::writeMessage(out, *this);
|
||||
return serialized_size;
|
||||
}
|
||||
|
||||
private:
|
||||
kj::Array<capnp::word> heapArray_;
|
||||
};
|
||||
|
||||
class PubMaster {
|
||||
public:
|
||||
PubMaster(const std::vector<const char *> &service_list);
|
||||
inline int send(const char *name, capnp::byte *data, size_t size) { return sockets_.at(name)->send((char *)data, size); }
|
||||
int send(const char *name, MessageBuilder &msg);
|
||||
~PubMaster();
|
||||
|
||||
private:
|
||||
std::map<std::string, PubSocket *> sockets_;
|
||||
};
|
||||
|
||||
class AlignedBuffer {
|
||||
public:
|
||||
kj::ArrayPtr<const capnp::word> align(const char *data, const size_t size) {
|
||||
words_size = size / sizeof(capnp::word) + 1;
|
||||
if (aligned_buf.size() < words_size) {
|
||||
aligned_buf = kj::heapArray<capnp::word>(words_size < 512 ? 512 : words_size);
|
||||
}
|
||||
memcpy(aligned_buf.begin(), data, size);
|
||||
return aligned_buf.slice(0, words_size);
|
||||
}
|
||||
inline kj::ArrayPtr<const capnp::word> align(Message *m) {
|
||||
return align(m->getData(), m->getSize());
|
||||
}
|
||||
private:
|
||||
kj::Array<capnp::word> aligned_buf;
|
||||
size_t words_size;
|
||||
};
|
||||
37
cereal/messaging/msgq_to_zmq.h
Normal file
37
cereal/messaging/msgq_to_zmq.h
Normal file
@ -0,0 +1,37 @@
|
||||
#pragma once
|
||||
|
||||
#include <condition_variable>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#define private public
|
||||
#include "msgq/impl_msgq.h"
|
||||
#include "msgq/impl_zmq.h"
|
||||
|
||||
class MsgqToZmq {
|
||||
public:
|
||||
MsgqToZmq() {}
|
||||
void run(const std::vector<std::string> &endpoints, const std::string &ip);
|
||||
|
||||
protected:
|
||||
void registerSockets();
|
||||
void zmqMonitorThread();
|
||||
|
||||
struct SocketPair {
|
||||
std::string endpoint;
|
||||
std::unique_ptr<ZMQPubSocket> pub_sock;
|
||||
std::unique_ptr<MSGQSubSocket> sub_sock;
|
||||
int connected_clients = 0;
|
||||
};
|
||||
|
||||
std::unique_ptr<MSGQContext> msgq_context;
|
||||
std::unique_ptr<ZMQContext> zmq_context;
|
||||
std::mutex mutex;
|
||||
std::condition_variable cv;
|
||||
std::unique_ptr<MSGQPoller> msgq_poller;
|
||||
std::map<SubSocket *, ZMQPubSocket *> sub2pub;
|
||||
std::vector<SocketPair> socket_pairs;
|
||||
};
|
||||
0
cereal/messaging/tests/__init__.py
Normal file
0
cereal/messaging/tests/__init__.py
Normal file
185
cereal/messaging/tests/test_messaging.py
Normal file
185
cereal/messaging/tests/test_messaging.py
Normal file
@ -0,0 +1,185 @@
|
||||
import os
|
||||
import capnp
|
||||
import multiprocessing
|
||||
import numbers
|
||||
import random
|
||||
import threading
|
||||
import time
|
||||
from parameterized import parameterized
|
||||
import pytest
|
||||
|
||||
from cereal import log, car
|
||||
import cereal.messaging as messaging
|
||||
from cereal.services import SERVICE_LIST
|
||||
|
||||
events = [evt for evt in log.Event.schema.union_fields if evt in SERVICE_LIST.keys()]
|
||||
|
||||
def random_sock():
|
||||
return random.choice(events)
|
||||
|
||||
def random_socks(num_socks=10):
|
||||
return list({random_sock() for _ in range(num_socks)})
|
||||
|
||||
def random_bytes(length=1000):
|
||||
return bytes([random.randrange(0xFF) for _ in range(length)])
|
||||
|
||||
def zmq_sleep(t=1):
|
||||
if "ZMQ" in os.environ:
|
||||
time.sleep(t)
|
||||
|
||||
|
||||
# TODO: this should take any capnp struct and returrn a msg with random populated data
|
||||
def random_carstate():
|
||||
fields = ["vEgo", "aEgo", "brake", "steeringAngleDeg"]
|
||||
msg = messaging.new_message("carState")
|
||||
cs = msg.carState
|
||||
for f in fields:
|
||||
setattr(cs, f, random.random() * 10)
|
||||
return msg
|
||||
|
||||
# TODO: this should compare any capnp structs
|
||||
def assert_carstate(cs1, cs2):
|
||||
for f in car.CarState.schema.non_union_fields:
|
||||
# TODO: check all types
|
||||
val1, val2 = getattr(cs1, f), getattr(cs2, f)
|
||||
if isinstance(val1, numbers.Number):
|
||||
assert val1 == val2, f"{f}: sent '{val1}' vs recvd '{val2}'"
|
||||
|
||||
def delayed_send(delay, sock, dat):
|
||||
def send_func():
|
||||
sock.send(dat)
|
||||
threading.Timer(delay, send_func).start()
|
||||
|
||||
|
||||
class TestMessaging:
|
||||
def setUp(self):
|
||||
# TODO: ZMQ tests are too slow; all sleeps will need to be
|
||||
# replaced with logic to block on the necessary condition
|
||||
if "ZMQ" in os.environ:
|
||||
pytest.skip()
|
||||
|
||||
# ZMQ pub socket takes too long to die
|
||||
# sleep to prevent multiple publishers error between tests
|
||||
zmq_sleep()
|
||||
|
||||
@parameterized.expand(events)
|
||||
def test_new_message(self, evt):
|
||||
try:
|
||||
msg = messaging.new_message(evt)
|
||||
except capnp.lib.capnp.KjException:
|
||||
msg = messaging.new_message(evt, random.randrange(200))
|
||||
assert (time.monotonic() - msg.logMonoTime) < 0.1
|
||||
assert not msg.valid
|
||||
assert evt == msg.which()
|
||||
|
||||
@parameterized.expand(events)
|
||||
def test_pub_sock(self, evt):
|
||||
messaging.pub_sock(evt)
|
||||
|
||||
@parameterized.expand(events)
|
||||
def test_sub_sock(self, evt):
|
||||
messaging.sub_sock(evt)
|
||||
|
||||
@parameterized.expand([
|
||||
(messaging.drain_sock, capnp._DynamicStructReader),
|
||||
(messaging.drain_sock_raw, bytes),
|
||||
])
|
||||
def test_drain_sock(self, func, expected_type):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sub_sock = messaging.sub_sock(sock, timeout=1000)
|
||||
zmq_sleep()
|
||||
|
||||
# no wait and no msgs in queue
|
||||
msgs = func(sub_sock)
|
||||
assert isinstance(msgs, list)
|
||||
assert len(msgs) == 0
|
||||
|
||||
# no wait but msgs are queued up
|
||||
num_msgs = random.randrange(3, 10)
|
||||
for _ in range(num_msgs):
|
||||
pub_sock.send(messaging.new_message(sock).to_bytes())
|
||||
time.sleep(0.1)
|
||||
msgs = func(sub_sock)
|
||||
assert isinstance(msgs, list)
|
||||
assert all(isinstance(msg, expected_type) for msg in msgs)
|
||||
assert len(msgs) == num_msgs
|
||||
|
||||
def test_recv_sock(self):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sub_sock = messaging.sub_sock(sock, timeout=100)
|
||||
zmq_sleep()
|
||||
|
||||
# no wait and no msg in queue, socket should timeout
|
||||
recvd = messaging.recv_sock(sub_sock)
|
||||
assert recvd is None
|
||||
|
||||
# no wait and one msg in queue
|
||||
msg = random_carstate()
|
||||
pub_sock.send(msg.to_bytes())
|
||||
time.sleep(0.01)
|
||||
recvd = messaging.recv_sock(sub_sock)
|
||||
assert isinstance(recvd, capnp._DynamicStructReader)
|
||||
# https://github.com/python/mypy/issues/13038
|
||||
assert_carstate(msg.carState, recvd.carState)
|
||||
|
||||
def test_recv_one(self):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sub_sock = messaging.sub_sock(sock, timeout=1000)
|
||||
zmq_sleep()
|
||||
|
||||
# no msg in queue, socket should timeout
|
||||
recvd = messaging.recv_one(sub_sock)
|
||||
assert recvd is None
|
||||
|
||||
# one msg in queue
|
||||
msg = random_carstate()
|
||||
pub_sock.send(msg.to_bytes())
|
||||
recvd = messaging.recv_one(sub_sock)
|
||||
assert isinstance(recvd, capnp._DynamicStructReader)
|
||||
assert_carstate(msg.carState, recvd.carState)
|
||||
|
||||
@pytest.mark.xfail(condition="ZMQ" in os.environ, reason='ZMQ detected')
|
||||
def test_recv_one_or_none(self):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sub_sock = messaging.sub_sock(sock)
|
||||
zmq_sleep()
|
||||
|
||||
# no msg in queue, socket shouldn't block
|
||||
recvd = messaging.recv_one_or_none(sub_sock)
|
||||
assert recvd is None
|
||||
|
||||
# one msg in queue
|
||||
msg = random_carstate()
|
||||
pub_sock.send(msg.to_bytes())
|
||||
recvd = messaging.recv_one_or_none(sub_sock)
|
||||
assert isinstance(recvd, capnp._DynamicStructReader)
|
||||
assert_carstate(msg.carState, recvd.carState)
|
||||
|
||||
def test_recv_one_retry(self):
|
||||
sock = "carState"
|
||||
sock_timeout = 0.1
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sub_sock = messaging.sub_sock(sock, timeout=round(sock_timeout*1000))
|
||||
zmq_sleep()
|
||||
|
||||
# this test doesn't work with ZMQ since multiprocessing interrupts it
|
||||
if "ZMQ" not in os.environ:
|
||||
# wait 5 socket timeouts and make sure it's still retrying
|
||||
p = multiprocessing.Process(target=messaging.recv_one_retry, args=(sub_sock,))
|
||||
p.start()
|
||||
time.sleep(sock_timeout*5)
|
||||
assert p.is_alive()
|
||||
p.terminate()
|
||||
|
||||
# wait 5 socket timeouts before sending
|
||||
msg = random_carstate()
|
||||
start_time = time.monotonic()
|
||||
delayed_send(sock_timeout*5, pub_sock, msg.to_bytes())
|
||||
recvd = messaging.recv_one_retry(sub_sock)
|
||||
assert (time.monotonic() - start_time) >= sock_timeout*5
|
||||
assert isinstance(recvd, capnp._DynamicStructReader)
|
||||
assert_carstate(msg.carState, recvd.carState)
|
||||
160
cereal/messaging/tests/test_pub_sub_master.py
Normal file
160
cereal/messaging/tests/test_pub_sub_master.py
Normal file
@ -0,0 +1,160 @@
|
||||
import random
|
||||
import time
|
||||
from typing import Sized, cast
|
||||
|
||||
import cereal.messaging as messaging
|
||||
from cereal.messaging.tests.test_messaging import events, random_sock, random_socks, \
|
||||
random_bytes, random_carstate, assert_carstate, \
|
||||
zmq_sleep
|
||||
from cereal.services import SERVICE_LIST
|
||||
|
||||
|
||||
class TestSubMaster:
|
||||
|
||||
def setup_method(self):
|
||||
# ZMQ pub socket takes too long to die
|
||||
# sleep to prevent multiple publishers error between tests
|
||||
zmq_sleep(3)
|
||||
|
||||
def test_init(self):
|
||||
sm = messaging.SubMaster(events)
|
||||
for p in [sm.updated, sm.recv_time, sm.recv_frame, sm.alive,
|
||||
sm.sock, sm.data, sm.logMonoTime, sm.valid]:
|
||||
assert len(cast(Sized, p)) == len(events)
|
||||
|
||||
def test_init_state(self):
|
||||
socks = random_socks()
|
||||
sm = messaging.SubMaster(socks)
|
||||
assert sm.frame == -1
|
||||
assert not any(sm.updated.values())
|
||||
assert not any(sm.seen.values())
|
||||
on_demand = {s: SERVICE_LIST[s].frequency <= 1e-5 for s in sm.services}
|
||||
assert all(sm.alive[s] == sm.valid[s] == sm.freq_ok[s] == on_demand[s] for s in sm.services)
|
||||
assert all(t == 0. for t in sm.recv_time.values())
|
||||
assert all(f == 0 for f in sm.recv_frame.values())
|
||||
assert all(t == 0 for t in sm.logMonoTime.values())
|
||||
|
||||
for p in [sm.updated, sm.recv_time, sm.recv_frame, sm.alive,
|
||||
sm.sock, sm.data, sm.logMonoTime, sm.valid]:
|
||||
assert len(cast(Sized, p)) == len(socks)
|
||||
|
||||
def test_getitem(self):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sm = messaging.SubMaster([sock,])
|
||||
zmq_sleep()
|
||||
|
||||
msg = random_carstate()
|
||||
pub_sock.send(msg.to_bytes())
|
||||
sm.update(1000)
|
||||
assert_carstate(msg.carState, sm[sock])
|
||||
|
||||
# TODO: break this test up to individually test SubMaster.update and SubMaster.update_msgs
|
||||
def test_update(self):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sm = messaging.SubMaster([sock,])
|
||||
zmq_sleep()
|
||||
|
||||
for i in range(10):
|
||||
msg = messaging.new_message(sock)
|
||||
pub_sock.send(msg.to_bytes())
|
||||
sm.update(1000)
|
||||
assert sm.frame == i
|
||||
assert all(sm.updated.values())
|
||||
|
||||
def test_update_timeout(self):
|
||||
sock = random_sock()
|
||||
sm = messaging.SubMaster([sock,])
|
||||
timeout = random.randrange(1000, 3000)
|
||||
start_time = time.monotonic()
|
||||
sm.update(timeout)
|
||||
t = time.monotonic() - start_time
|
||||
assert t >= timeout/1000.
|
||||
assert t < 3
|
||||
assert not any(sm.updated.values())
|
||||
|
||||
def test_avg_frequency_checks(self):
|
||||
for poll in (True, False):
|
||||
sm = messaging.SubMaster(["modelV2", "carParams", "carState", "cameraOdometry", "liveCalibration"],
|
||||
poll=("modelV2" if poll else None),
|
||||
frequency=(20. if not poll else None))
|
||||
|
||||
checks = {
|
||||
"carState": (20, 20),
|
||||
"modelV2": (20, 20 if poll else 10),
|
||||
"cameraOdometry": (20, 10),
|
||||
"liveCalibration": (4, 4),
|
||||
"carParams": (None, None),
|
||||
"userBookmark": (None, None),
|
||||
}
|
||||
|
||||
for service, (max_freq, min_freq) in checks.items():
|
||||
if max_freq is not None:
|
||||
assert sm._check_avg_freq(service)
|
||||
assert sm.freq_tracker[service].max_freq == max_freq*1.2
|
||||
assert sm.freq_tracker[service].min_freq == min_freq*0.8
|
||||
else:
|
||||
assert not sm._check_avg_freq(service)
|
||||
|
||||
def test_alive(self):
|
||||
pass
|
||||
|
||||
def test_ignore_alive(self):
|
||||
pass
|
||||
|
||||
def test_valid(self):
|
||||
pass
|
||||
|
||||
# SubMaster should always conflate
|
||||
def test_conflate(self):
|
||||
sock = "carState"
|
||||
pub_sock = messaging.pub_sock(sock)
|
||||
sm = messaging.SubMaster([sock,])
|
||||
|
||||
n = 10
|
||||
for i in range(n+1):
|
||||
msg = messaging.new_message(sock)
|
||||
msg.carState.vEgo = i
|
||||
pub_sock.send(msg.to_bytes())
|
||||
time.sleep(0.01)
|
||||
sm.update(1000)
|
||||
assert sm[sock].vEgo == n
|
||||
|
||||
|
||||
class TestPubMaster:
|
||||
|
||||
def setup_method(self):
|
||||
# ZMQ pub socket takes too long to die
|
||||
# sleep to prevent multiple publishers error between tests
|
||||
zmq_sleep(3)
|
||||
|
||||
def test_init(self):
|
||||
messaging.PubMaster(events)
|
||||
|
||||
def test_send(self):
|
||||
socks = random_socks()
|
||||
pm = messaging.PubMaster(socks)
|
||||
sub_socks = {s: messaging.sub_sock(s, conflate=True, timeout=1000) for s in socks}
|
||||
zmq_sleep()
|
||||
|
||||
# PubMaster accepts either a capnp msg builder or bytes
|
||||
for capnp in [True, False]:
|
||||
for i in range(100):
|
||||
sock = socks[i % len(socks)]
|
||||
|
||||
if capnp:
|
||||
try:
|
||||
msg = messaging.new_message(sock)
|
||||
except Exception:
|
||||
msg = messaging.new_message(sock, random.randrange(50))
|
||||
else:
|
||||
msg = random_bytes()
|
||||
|
||||
pm.send(sock, msg)
|
||||
recvd = sub_socks[sock].receive()
|
||||
|
||||
if capnp:
|
||||
msg.clear_write_flag()
|
||||
msg = msg.to_bytes()
|
||||
assert msg == recvd, i
|
||||
21
cereal/messaging/tests/test_services.py
Normal file
21
cereal/messaging/tests/test_services.py
Normal file
@ -0,0 +1,21 @@
|
||||
import os
|
||||
import tempfile
|
||||
from typing import Dict
|
||||
from parameterized import parameterized
|
||||
|
||||
import cereal.services as services
|
||||
from cereal.services import SERVICE_LIST
|
||||
|
||||
|
||||
class TestServices:
|
||||
|
||||
@parameterized.expand(SERVICE_LIST.keys())
|
||||
def test_services(self, s):
|
||||
service = SERVICE_LIST[s]
|
||||
assert service.frequency <= 104
|
||||
assert service.decimation != 0
|
||||
|
||||
def test_generated_header(self):
|
||||
with tempfile.NamedTemporaryFile(suffix=".h") as f:
|
||||
ret = os.system(f"python3 {services.__file__} > {f.name} && clang++ {f.name} -std=c++11")
|
||||
assert ret == 0, "generated services header is not valid C"
|
||||
222
cereal/messaging/tests/validate_sp_cereal_upstream.py
Executable file
222
cereal/messaging/tests/validate_sp_cereal_upstream.py
Executable file
@ -0,0 +1,222 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import sys
|
||||
from typing import Any, List, Tuple
|
||||
|
||||
DEBUG = False
|
||||
|
||||
|
||||
def print_debug(string: str) -> None:
|
||||
if DEBUG:
|
||||
print(string)
|
||||
|
||||
|
||||
def create_schema_instance(struct: Any, prop: Tuple[str, Any]) -> Any:
|
||||
"""
|
||||
Create a new instance of a schema type, handling different field types.
|
||||
|
||||
Args:
|
||||
struct: The Cap'n Proto schema structure
|
||||
prop: A tuple containing the field name and field metadata
|
||||
|
||||
Returns:
|
||||
A new initialized schema instance
|
||||
"""
|
||||
struct_instance = struct.new_message()
|
||||
field_name, field_metadata = prop
|
||||
|
||||
try:
|
||||
field_type = field_metadata.proto.slot.type.which()
|
||||
|
||||
# Initialize different types of fields
|
||||
if field_type in ('list', 'text', 'data'):
|
||||
struct_instance.init(field_name, 1)
|
||||
print_debug(f"Initialized list/text/data field: {field_name}")
|
||||
elif field_type in ('struct', 'object'):
|
||||
struct_instance.init(field_name)
|
||||
print_debug(f"Initialized struct/object field: {field_name}")
|
||||
|
||||
return struct_instance
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error creating instance for {field_name}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def get_schema_fields(schema_struct: Any) -> List[Tuple[str, Any]]:
|
||||
"""
|
||||
Retrieve all fields from a given schema structure.
|
||||
|
||||
Args:
|
||||
schema_struct: The Cap'n Proto schema structure
|
||||
|
||||
Returns:
|
||||
A list of field names and their metadata
|
||||
"""
|
||||
try:
|
||||
# Get all fields from the schema
|
||||
schema_fields = list(schema_struct.schema.fields.items())
|
||||
|
||||
print_debug("Discovered schema fields:")
|
||||
for field_name, field_metadata in schema_fields:
|
||||
print_debug(f"- {field_name}")
|
||||
|
||||
return schema_fields
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error retrieving schema fields: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def generate_schema_instances(schema_struct: Any) -> List[Any]:
|
||||
"""
|
||||
Generate instances for all fields in a given schema.
|
||||
|
||||
Args:
|
||||
schema_struct: The Cap'n Proto schema structure
|
||||
|
||||
Returns:
|
||||
A list of schema instances
|
||||
"""
|
||||
schema_fields = get_schema_fields(schema_struct)
|
||||
instances = []
|
||||
|
||||
for field_prop in schema_fields:
|
||||
try:
|
||||
instance = create_schema_instance(schema_struct, field_prop)
|
||||
if instance is not None:
|
||||
instances.append(instance)
|
||||
except Exception as e:
|
||||
print(f"Skipping field due to error: {e}")
|
||||
|
||||
print(f"Generated {len(instances)} schema instances")
|
||||
return instances
|
||||
|
||||
|
||||
def persist_instances(instances: List[Any], filename: str) -> None:
|
||||
"""
|
||||
Write schema instances to a binary file.
|
||||
|
||||
Args:
|
||||
instances: List of schema instances
|
||||
filename: Output file path
|
||||
"""
|
||||
try:
|
||||
with open(filename, 'wb') as f:
|
||||
for instance in instances:
|
||||
f.write(instance.to_bytes())
|
||||
|
||||
print(f"Successfully wrote {len(instances)} instances to {filename}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error persisting instances: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def read_instances(filename: str, schema_type: Any) -> List[Any]:
|
||||
"""
|
||||
Read schema instances from a binary file.
|
||||
|
||||
Args:
|
||||
filename: Input file path
|
||||
schema_type: The schema type to use for reading
|
||||
|
||||
Returns:
|
||||
A list of read schema instances
|
||||
"""
|
||||
try:
|
||||
with open(filename, 'rb') as f:
|
||||
data = f.read()
|
||||
|
||||
instances = list(schema_type.read_multiple_bytes(data))
|
||||
|
||||
print(f"Read {len(instances)} instances from {filename}")
|
||||
return instances
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error reading instances: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def compare_schemas(original_instances: List[Any], read_instances: List[Any]) -> bool:
|
||||
"""
|
||||
Compare original and read-back instances to detect potential breaking changes.
|
||||
|
||||
Args:
|
||||
original_instances: List of originally generated instances
|
||||
read_instances: List of instances read back from file
|
||||
|
||||
Returns:
|
||||
Boolean indicating whether schemas appear compatible
|
||||
"""
|
||||
if len(original_instances) != len(read_instances):
|
||||
print("❌ Schema Compatibility Warning: Instance count mismatch")
|
||||
return False
|
||||
|
||||
compatible = True
|
||||
for struct in read_instances:
|
||||
try:
|
||||
getattr(struct, struct.which()) # Attempting to access the field to validate readability
|
||||
except Exception as e:
|
||||
print(f"❌ Structural change detected: {struct.which()} is not readable.\nFull error: {e}")
|
||||
compatible = False
|
||||
|
||||
return compatible
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
CLI entry point for schema compatibility testing.
|
||||
"""
|
||||
# Setup argument parser
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Cap\'n Proto Schema Compatibility Testing Tool',
|
||||
epilog='Test schema compatibility by generating and reading back instances.'
|
||||
)
|
||||
|
||||
# Add mutually exclusive group for generation or reading mode
|
||||
mode_group = parser.add_mutually_exclusive_group(required=True)
|
||||
mode_group.add_argument('-g', '--generate', action='store_true',
|
||||
help='Generate schema instances')
|
||||
mode_group.add_argument('-r', '--read', action='store_true',
|
||||
help='Read and validate schema instances')
|
||||
|
||||
# Common arguments
|
||||
parser.add_argument('-f', '--file',
|
||||
default='schema_instances.bin',
|
||||
help='Output/input binary file (default: schema_instances.bin)')
|
||||
|
||||
# Parse arguments
|
||||
args = parser.parse_args()
|
||||
|
||||
# Import the schema dynamically
|
||||
try:
|
||||
from cereal import log
|
||||
schema_type = log.Event
|
||||
except ImportError:
|
||||
print("Error: Unable to import schema. Ensure 'cereal' is installed.")
|
||||
sys.exit(1)
|
||||
|
||||
# Execute based on mode
|
||||
if args.generate:
|
||||
print("🔧 Generating Schema Instances")
|
||||
instances = generate_schema_instances(schema_type)
|
||||
persist_instances(instances, args.file)
|
||||
print("✅ Instance generation complete")
|
||||
|
||||
elif args.read:
|
||||
print("🔍 Reading and Validating Schema Instances")
|
||||
generated_instances = generate_schema_instances(schema_type)
|
||||
read_back_instances = read_instances(args.file, schema_type)
|
||||
|
||||
# Compare schemas
|
||||
if compare_schemas(generated_instances, read_back_instances):
|
||||
print("✅ Schema Compatibility: No breaking changes detected")
|
||||
sys.exit(0)
|
||||
else:
|
||||
print("❌ Potential Schema Breaking Changes Detected")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
101
cereal/services.h
Normal file
101
cereal/services.h
Normal file
@ -0,0 +1,101 @@
|
||||
/* THIS IS AN AUTOGENERATED FILE, PLEASE EDIT services.py */
|
||||
#ifndef __SERVICES_H
|
||||
#define __SERVICES_H
|
||||
#include <map>
|
||||
#include <string>
|
||||
struct service { std::string name; bool should_log; int frequency; int decimation; };
|
||||
static std::map<std::string, service> services = {
|
||||
{ "gyroscope", {"gyroscope", true, 104, 104}},
|
||||
{ "gyroscope2", {"gyroscope2", true, 100, 100}},
|
||||
{ "accelerometer", {"accelerometer", true, 104, 104}},
|
||||
{ "accelerometer2", {"accelerometer2", true, 100, 100}},
|
||||
{ "magnetometer", {"magnetometer", true, 25, -1}},
|
||||
{ "lightSensor", {"lightSensor", true, 100, 100}},
|
||||
{ "temperatureSensor", {"temperatureSensor", true, 2, 200}},
|
||||
{ "temperatureSensor2", {"temperatureSensor2", true, 2, 200}},
|
||||
{ "gpsNMEA", {"gpsNMEA", true, 9, -1}},
|
||||
{ "deviceState", {"deviceState", true, 2, 1}},
|
||||
{ "touch", {"touch", true, 20, 1}},
|
||||
{ "can", {"can", true, 100, 2053}},
|
||||
{ "controlsState", {"controlsState", true, 100, 10}},
|
||||
{ "selfdriveState", {"selfdriveState", true, 100, 10}},
|
||||
{ "pandaStates", {"pandaStates", true, 10, 1}},
|
||||
{ "peripheralState", {"peripheralState", true, 2, 1}},
|
||||
{ "radarState", {"radarState", true, 20, 5}},
|
||||
{ "roadEncodeIdx", {"roadEncodeIdx", false, 20, 1}},
|
||||
{ "liveTracks", {"liveTracks", true, 20, -1}},
|
||||
{ "sendcan", {"sendcan", true, 100, 139}},
|
||||
{ "logMessage", {"logMessage", true, 0, -1}},
|
||||
{ "errorLogMessage", {"errorLogMessage", true, 0, 1}},
|
||||
{ "liveCalibration", {"liveCalibration", true, 4, 4}},
|
||||
{ "liveTorqueParameters", {"liveTorqueParameters", true, 4, 1}},
|
||||
{ "liveDelay", {"liveDelay", true, 4, 1}},
|
||||
{ "androidLog", {"androidLog", true, 0, -1}},
|
||||
{ "carState", {"carState", true, 100, 10}},
|
||||
{ "carControl", {"carControl", true, 100, 10}},
|
||||
{ "carOutput", {"carOutput", true, 100, 10}},
|
||||
{ "longitudinalPlan", {"longitudinalPlan", true, 20, 10}},
|
||||
{ "driverAssistance", {"driverAssistance", true, 20, 20}},
|
||||
{ "procLog", {"procLog", true, 0, 15}},
|
||||
{ "gpsLocationExternal", {"gpsLocationExternal", true, 10, 10}},
|
||||
{ "gpsLocation", {"gpsLocation", true, 1, 1}},
|
||||
{ "ubloxGnss", {"ubloxGnss", true, 10, -1}},
|
||||
{ "qcomGnss", {"qcomGnss", true, 2, -1}},
|
||||
{ "gnssMeasurements", {"gnssMeasurements", true, 10, 10}},
|
||||
{ "clocks", {"clocks", true, 0, 1}},
|
||||
{ "ubloxRaw", {"ubloxRaw", true, 20, -1}},
|
||||
{ "livePose", {"livePose", true, 20, 4}},
|
||||
{ "liveParameters", {"liveParameters", true, 20, 5}},
|
||||
{ "cameraOdometry", {"cameraOdometry", true, 20, 10}},
|
||||
{ "thumbnail", {"thumbnail", true, 0, 1}},
|
||||
{ "onroadEvents", {"onroadEvents", true, 1, 1}},
|
||||
{ "carParams", {"carParams", true, 0, 1}},
|
||||
{ "roadCameraState", {"roadCameraState", true, 20, 20}},
|
||||
{ "driverCameraState", {"driverCameraState", true, 20, 20}},
|
||||
{ "driverEncodeIdx", {"driverEncodeIdx", false, 20, 1}},
|
||||
{ "driverStateV2", {"driverStateV2", true, 20, 10}},
|
||||
{ "driverMonitoringState", {"driverMonitoringState", true, 20, 10}},
|
||||
{ "wideRoadEncodeIdx", {"wideRoadEncodeIdx", false, 20, 1}},
|
||||
{ "wideRoadCameraState", {"wideRoadCameraState", true, 20, 20}},
|
||||
{ "drivingModelData", {"drivingModelData", true, 20, 10}},
|
||||
{ "modelV2", {"modelV2", true, 20, -1}},
|
||||
{ "managerState", {"managerState", true, 2, 1}},
|
||||
{ "uploaderState", {"uploaderState", true, 0, 1}},
|
||||
{ "navInstruction", {"navInstruction", true, 1, 10}},
|
||||
{ "navRoute", {"navRoute", true, 0, -1}},
|
||||
{ "navThumbnail", {"navThumbnail", true, 0, -1}},
|
||||
{ "qRoadEncodeIdx", {"qRoadEncodeIdx", false, 20, -1}},
|
||||
{ "userBookmark", {"userBookmark", true, 0, 1}},
|
||||
{ "soundPressure", {"soundPressure", true, 10, 10}},
|
||||
{ "rawAudioData", {"rawAudioData", false, 20, -1}},
|
||||
{ "bookmarkButton", {"bookmarkButton", true, 0, 1}},
|
||||
{ "audioFeedback", {"audioFeedback", true, 0, 1}},
|
||||
{ "modelManagerSP", {"modelManagerSP", false, 1, 1}},
|
||||
{ "backupManagerSP", {"backupManagerSP", false, 1, 1}},
|
||||
{ "selfdriveStateSP", {"selfdriveStateSP", true, 100, 10}},
|
||||
{ "longitudinalPlanSP", {"longitudinalPlanSP", true, 20, 10}},
|
||||
{ "onroadEventsSP", {"onroadEventsSP", true, 1, 1}},
|
||||
{ "carParamsSP", {"carParamsSP", true, 0, 1}},
|
||||
{ "carControlSP", {"carControlSP", true, 100, 10}},
|
||||
{ "carStateSP", {"carStateSP", true, 100, 10}},
|
||||
{ "liveMapDataSP", {"liveMapDataSP", true, 1, 1}},
|
||||
{ "modelDataV2SP", {"modelDataV2SP", true, 20, -1}},
|
||||
{ "uiDebug", {"uiDebug", true, 0, 1}},
|
||||
{ "testJoystick", {"testJoystick", true, 0, -1}},
|
||||
{ "alertDebug", {"alertDebug", true, 20, 5}},
|
||||
{ "roadEncodeData", {"roadEncodeData", false, 20, -1}},
|
||||
{ "driverEncodeData", {"driverEncodeData", false, 20, -1}},
|
||||
{ "wideRoadEncodeData", {"wideRoadEncodeData", false, 20, -1}},
|
||||
{ "qRoadEncodeData", {"qRoadEncodeData", false, 20, -1}},
|
||||
{ "livestreamWideRoadEncodeIdx", {"livestreamWideRoadEncodeIdx", false, 20, -1}},
|
||||
{ "livestreamRoadEncodeIdx", {"livestreamRoadEncodeIdx", false, 20, -1}},
|
||||
{ "livestreamDriverEncodeIdx", {"livestreamDriverEncodeIdx", false, 20, -1}},
|
||||
{ "livestreamWideRoadEncodeData", {"livestreamWideRoadEncodeData", false, 20, -1}},
|
||||
{ "livestreamRoadEncodeData", {"livestreamRoadEncodeData", false, 20, -1}},
|
||||
{ "livestreamDriverEncodeData", {"livestreamDriverEncodeData", false, 20, -1}},
|
||||
{ "customReservedRawData0", {"customReservedRawData0", true, 0, -1}},
|
||||
{ "customReservedRawData1", {"customReservedRawData1", true, 0, -1}},
|
||||
{ "customReservedRawData2", {"customReservedRawData2", true, 0, -1}},
|
||||
};
|
||||
#endif
|
||||
|
||||
138
cereal/services.py
Executable file
138
cereal/services.py
Executable file
@ -0,0 +1,138 @@
|
||||
#!/usr/bin/env python3
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class Service:
|
||||
def __init__(self, should_log: bool, frequency: float, decimation: Optional[int] = None):
|
||||
self.should_log = should_log
|
||||
self.frequency = frequency
|
||||
self.decimation = decimation
|
||||
|
||||
|
||||
_services: dict[str, tuple] = {
|
||||
# service: (should_log, frequency, qlog decimation (optional))
|
||||
# note: the "EncodeIdx" packets will still be in the log
|
||||
"gyroscope": (True, 104., 104),
|
||||
"gyroscope2": (True, 100., 100),
|
||||
"accelerometer": (True, 104., 104),
|
||||
"accelerometer2": (True, 100., 100),
|
||||
"magnetometer": (True, 25.),
|
||||
"lightSensor": (True, 100., 100),
|
||||
"temperatureSensor": (True, 2., 200),
|
||||
"temperatureSensor2": (True, 2., 200),
|
||||
"gpsNMEA": (True, 9.),
|
||||
"deviceState": (True, 2., 1),
|
||||
"touch": (True, 20., 1),
|
||||
"can": (True, 100., 2053), # decimation gives ~3 msgs in a full segment
|
||||
"controlsState": (True, 100., 10),
|
||||
"selfdriveState": (True, 100., 10),
|
||||
"pandaStates": (True, 10., 1),
|
||||
"peripheralState": (True, 2., 1),
|
||||
"radarState": (True, 20., 5),
|
||||
"roadEncodeIdx": (False, 20., 1),
|
||||
"liveTracks": (True, 20.),
|
||||
"sendcan": (True, 100., 139),
|
||||
"logMessage": (True, 0.),
|
||||
"errorLogMessage": (True, 0., 1),
|
||||
"liveCalibration": (True, 4., 4),
|
||||
"liveTorqueParameters": (True, 4., 1),
|
||||
"liveDelay": (True, 4., 1),
|
||||
"androidLog": (True, 0.),
|
||||
"carState": (True, 100., 10),
|
||||
"carControl": (True, 100., 10),
|
||||
"carOutput": (True, 100., 10),
|
||||
"longitudinalPlan": (True, 20., 10),
|
||||
"driverAssistance": (True, 20., 20),
|
||||
"procLog": (True, 0.5, 15),
|
||||
"gpsLocationExternal": (True, 10., 10),
|
||||
"gpsLocation": (True, 1., 1),
|
||||
"ubloxGnss": (True, 10.),
|
||||
"qcomGnss": (True, 2.),
|
||||
"gnssMeasurements": (True, 10., 10),
|
||||
"clocks": (True, 0.1, 1),
|
||||
"ubloxRaw": (True, 20.),
|
||||
"livePose": (True, 20., 4),
|
||||
"liveParameters": (True, 20., 5),
|
||||
"cameraOdometry": (True, 20., 10),
|
||||
"thumbnail": (True, 1 / 60., 1),
|
||||
"onroadEvents": (True, 1., 1),
|
||||
"carParams": (True, 0.02, 1),
|
||||
"roadCameraState": (True, 20., 20),
|
||||
"driverCameraState": (True, 20., 20),
|
||||
"driverEncodeIdx": (False, 20., 1),
|
||||
"driverStateV2": (True, 20., 10),
|
||||
"driverMonitoringState": (True, 20., 10),
|
||||
"wideRoadEncodeIdx": (False, 20., 1),
|
||||
"wideRoadCameraState": (True, 20., 20),
|
||||
"drivingModelData": (True, 20., 10),
|
||||
"modelV2": (True, 20.),
|
||||
"managerState": (True, 2., 1),
|
||||
"uploaderState": (True, 0., 1),
|
||||
"navInstruction": (True, 1., 10),
|
||||
"navRoute": (True, 0.),
|
||||
"navThumbnail": (True, 0.),
|
||||
"qRoadEncodeIdx": (False, 20.),
|
||||
"userBookmark": (True, 0., 1),
|
||||
"soundPressure": (True, 10., 10),
|
||||
"rawAudioData": (False, 20.),
|
||||
"bookmarkButton": (True, 0., 1),
|
||||
"audioFeedback": (True, 0., 1),
|
||||
|
||||
# sunnypilot
|
||||
"modelManagerSP": (False, 1., 1),
|
||||
"backupManagerSP": (False, 1., 1),
|
||||
"selfdriveStateSP": (True, 100., 10),
|
||||
"longitudinalPlanSP": (True, 20., 10),
|
||||
"onroadEventsSP": (True, 1., 1),
|
||||
"carParamsSP": (True, 0.02, 1),
|
||||
"carControlSP": (True, 100., 10),
|
||||
"carStateSP": (True, 100., 10),
|
||||
"liveMapDataSP": (True, 1., 1),
|
||||
"modelDataV2SP": (True, 20.),
|
||||
|
||||
# debug
|
||||
"uiDebug": (True, 0., 1),
|
||||
"testJoystick": (True, 0.),
|
||||
"alertDebug": (True, 20., 5),
|
||||
"roadEncodeData": (False, 20.),
|
||||
"driverEncodeData": (False, 20.),
|
||||
"wideRoadEncodeData": (False, 20.),
|
||||
"qRoadEncodeData": (False, 20.),
|
||||
"livestreamWideRoadEncodeIdx": (False, 20.),
|
||||
"livestreamRoadEncodeIdx": (False, 20.),
|
||||
"livestreamDriverEncodeIdx": (False, 20.),
|
||||
"livestreamWideRoadEncodeData": (False, 20.),
|
||||
"livestreamRoadEncodeData": (False, 20.),
|
||||
"livestreamDriverEncodeData": (False, 20.),
|
||||
"customReservedRawData0": (True, 0.),
|
||||
"customReservedRawData1": (True, 0.),
|
||||
"customReservedRawData2": (True, 0.),
|
||||
}
|
||||
SERVICE_LIST = {name: Service(*vals) for
|
||||
idx, (name, vals) in enumerate(_services.items())}
|
||||
|
||||
|
||||
def build_header():
|
||||
h = ""
|
||||
h += "/* THIS IS AN AUTOGENERATED FILE, PLEASE EDIT services.py */\n"
|
||||
h += "#ifndef __SERVICES_H\n"
|
||||
h += "#define __SERVICES_H\n"
|
||||
|
||||
h += "#include <map>\n"
|
||||
h += "#include <string>\n"
|
||||
|
||||
h += "struct service { std::string name; bool should_log; int frequency; int decimation; };\n"
|
||||
h += "static std::map<std::string, service> services = {\n"
|
||||
for k, v in SERVICE_LIST.items():
|
||||
should_log = "true" if v.should_log else "false"
|
||||
decimation = -1 if v.decimation is None else v.decimation
|
||||
h += ' { "%s", {"%s", %s, %d, %d}},\n' % \
|
||||
(k, k, should_log, v.frequency, decimation)
|
||||
h += "};\n"
|
||||
|
||||
h += "#endif\n"
|
||||
return h
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(build_header())
|
||||
1
common/.gitignore
vendored
Normal file
1
common/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
*.cpp
|
||||
0
common/__init__.py
Normal file
0
common/__init__.py
Normal file
22
common/api/__init__.py
Normal file
22
common/api/__init__.py
Normal file
@ -0,0 +1,22 @@
|
||||
from openpilot.common.api.comma_connect import CommaConnectApi
|
||||
|
||||
|
||||
class Api:
|
||||
def __init__(self, dongle_id):
|
||||
self.service = CommaConnectApi(dongle_id)
|
||||
|
||||
def request(self, method, endpoint, **params):
|
||||
return self.service.request(method, endpoint, **params)
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
return self.service.get(*args, **kwargs)
|
||||
|
||||
def post(self, *args, **kwargs):
|
||||
return self.service.post(*args, **kwargs)
|
||||
|
||||
def get_token(self, expiry_hours=1):
|
||||
return self.service.get_token(expiry_hours)
|
||||
|
||||
|
||||
def api_get(endpoint, method='GET', timeout=None, access_token=None, **params):
|
||||
return CommaConnectApi(None).api_get(endpoint, method, timeout, access_token, **params)
|
||||
56
common/api/base.py
Normal file
56
common/api/base.py
Normal file
@ -0,0 +1,56 @@
|
||||
import jwt
|
||||
import requests
|
||||
import unicodedata
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
from openpilot.system.version import get_version
|
||||
|
||||
|
||||
class BaseApi:
|
||||
def __init__(self, dongle_id, api_host, user_agent="openpilot-"):
|
||||
self.dongle_id = dongle_id
|
||||
self.api_host = api_host
|
||||
self.user_agent = user_agent
|
||||
with open(f'{Paths.persist_root()}/comma/id_rsa') as f:
|
||||
self.private_key = f.read()
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
return self.request('GET', *args, **kwargs)
|
||||
|
||||
def post(self, *args, **kwargs):
|
||||
return self.request('POST', *args, **kwargs)
|
||||
|
||||
def request(self, method, endpoint, timeout=None, access_token=None, **params):
|
||||
return self.api_get(endpoint, method=method, timeout=timeout, access_token=access_token, **params)
|
||||
|
||||
def _get_token(self, expiry_hours=1, **extra_payload):
|
||||
now = datetime.now(UTC).replace(tzinfo=None)
|
||||
payload = {
|
||||
'identity': self.dongle_id,
|
||||
'nbf': now,
|
||||
'iat': now,
|
||||
'exp': now + timedelta(hours=expiry_hours),
|
||||
**extra_payload
|
||||
}
|
||||
token = jwt.encode(payload, self.private_key, algorithm='RS256')
|
||||
if isinstance(token, bytes):
|
||||
token = token.decode('utf8')
|
||||
return token
|
||||
|
||||
def get_token(self, expiry_hours=1):
|
||||
return self._get_token(expiry_hours)
|
||||
|
||||
def remove_non_ascii_chars(self, text):
|
||||
normalized_text = unicodedata.normalize('NFD', text)
|
||||
ascii_encoded_text = normalized_text.encode('ascii', 'ignore')
|
||||
return ascii_encoded_text.decode()
|
||||
|
||||
def api_get(self, endpoint, method='GET', timeout=None, access_token=None, json=None, **params):
|
||||
headers = {}
|
||||
if access_token is not None:
|
||||
headers['Authorization'] = "JWT " + access_token
|
||||
|
||||
version = self.remove_non_ascii_chars(get_version())
|
||||
headers['User-Agent'] = self.user_agent + version
|
||||
|
||||
return requests.request(method, f"{self.api_host}/{endpoint}", timeout=timeout, headers=headers, json=json, params=params)
|
||||
11
common/api/comma_connect.py
Normal file
11
common/api/comma_connect.py
Normal file
@ -0,0 +1,11 @@
|
||||
import os
|
||||
|
||||
from openpilot.common.api.base import BaseApi
|
||||
|
||||
API_HOST = os.getenv('API_HOST', 'https://api.commadotai.com')
|
||||
|
||||
|
||||
class CommaConnectApi(BaseApi):
|
||||
def __init__(self, dongle_id):
|
||||
super().__init__(dongle_id, API_HOST)
|
||||
self.user_agent = "openpilot-"
|
||||
4
common/basedir.py
Normal file
4
common/basedir.py
Normal file
@ -0,0 +1,4 @@
|
||||
import os
|
||||
|
||||
|
||||
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../"))
|
||||
28
common/clutil.h
Normal file
28
common/clutil.h
Normal file
@ -0,0 +1,28 @@
|
||||
#pragma once
|
||||
|
||||
#ifdef __APPLE__
|
||||
#include <OpenCL/cl.h>
|
||||
#else
|
||||
#include <CL/cl.h>
|
||||
#endif
|
||||
|
||||
#include <string>
|
||||
|
||||
#define CL_CHECK(_expr) \
|
||||
do { \
|
||||
assert(CL_SUCCESS == (_expr)); \
|
||||
} while (0)
|
||||
|
||||
#define CL_CHECK_ERR(_expr) \
|
||||
({ \
|
||||
cl_int err = CL_INVALID_VALUE; \
|
||||
__typeof__(_expr) _ret = _expr; \
|
||||
assert(_ret&& err == CL_SUCCESS); \
|
||||
_ret; \
|
||||
})
|
||||
|
||||
cl_device_id cl_get_device_id(cl_device_type device_type);
|
||||
cl_context cl_create_context(cl_device_id device_id);
|
||||
void cl_release_context(cl_context context);
|
||||
cl_program cl_program_from_source(cl_context ctx, cl_device_id device_id, const std::string& src, const char* args = nullptr);
|
||||
cl_program cl_program_from_file(cl_context ctx, cl_device_id device_id, const char* path, const char* args);
|
||||
23
common/constants.py
Normal file
23
common/constants.py
Normal file
@ -0,0 +1,23 @@
|
||||
import numpy as np
|
||||
|
||||
# conversions
|
||||
class CV:
|
||||
# Speed
|
||||
MPH_TO_KPH = 1.609344
|
||||
KPH_TO_MPH = 1. / MPH_TO_KPH
|
||||
MS_TO_KPH = 3.6
|
||||
KPH_TO_MS = 1. / MS_TO_KPH
|
||||
MS_TO_MPH = MS_TO_KPH * KPH_TO_MPH
|
||||
MPH_TO_MS = MPH_TO_KPH * KPH_TO_MS
|
||||
MS_TO_KNOTS = 1.9438
|
||||
KNOTS_TO_MS = 1. / MS_TO_KNOTS
|
||||
|
||||
# Angle
|
||||
DEG_TO_RAD = np.pi / 180.
|
||||
RAD_TO_DEG = 1. / DEG_TO_RAD
|
||||
|
||||
# Mass
|
||||
LB_TO_KG = 0.453592
|
||||
|
||||
|
||||
ACCELERATION_DUE_TO_GRAVITY = 9.81 # m/s^2
|
||||
9
common/dict_helpers.py
Normal file
9
common/dict_helpers.py
Normal file
@ -0,0 +1,9 @@
|
||||
# remove all keys that end in DEPRECATED
|
||||
def strip_deprecated_keys(d):
|
||||
for k in list(d.keys()):
|
||||
if isinstance(k, str):
|
||||
if k.endswith('DEPRECATED'):
|
||||
d.pop(k)
|
||||
elif isinstance(d[k], dict):
|
||||
strip_deprecated_keys(d[k])
|
||||
return d
|
||||
58
common/file_helpers.py
Normal file
58
common/file_helpers.py
Normal file
@ -0,0 +1,58 @@
|
||||
import io
|
||||
import os
|
||||
import tempfile
|
||||
import contextlib
|
||||
import zstandard as zstd
|
||||
|
||||
LOG_COMPRESSION_LEVEL = 10 # little benefit up to level 15. level ~17 is a small step change
|
||||
|
||||
|
||||
class CallbackReader:
|
||||
"""Wraps a file, but overrides the read method to also
|
||||
call a callback function with the number of bytes read so far."""
|
||||
def __init__(self, f, callback, *args):
|
||||
self.f = f
|
||||
self.callback = callback
|
||||
self.cb_args = args
|
||||
self.total_read = 0
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.f, attr)
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
chunk = self.f.read(*args, **kwargs)
|
||||
self.total_read += len(chunk)
|
||||
self.callback(*self.cb_args, self.total_read)
|
||||
return chunk
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def atomic_write_in_dir(path: str, mode: str = 'w', buffering: int = -1, encoding: str = None, newline: str = None,
|
||||
overwrite: bool = False):
|
||||
"""Write to a file atomically using a temporary file in the same directory as the destination file."""
|
||||
dir_name = os.path.dirname(path)
|
||||
|
||||
if not overwrite and os.path.exists(path):
|
||||
raise FileExistsError(f"File '{path}' already exists. To overwrite it, set 'overwrite' to True.")
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode=mode, buffering=buffering, encoding=encoding, newline=newline, dir=dir_name, delete=False) as tmp_file:
|
||||
yield tmp_file
|
||||
tmp_file_name = tmp_file.name
|
||||
os.replace(tmp_file_name, path)
|
||||
|
||||
|
||||
def get_upload_stream(filepath: str, should_compress: bool) -> tuple[io.BufferedIOBase, int]:
|
||||
if not should_compress:
|
||||
file_size = os.path.getsize(filepath)
|
||||
file_stream = open(filepath, "rb")
|
||||
return file_stream, file_size
|
||||
|
||||
# Compress the file on the fly
|
||||
compressed_stream = io.BytesIO()
|
||||
compressor = zstd.ZstdCompressor(level=LOG_COMPRESSION_LEVEL)
|
||||
|
||||
with open(filepath, "rb") as f:
|
||||
compressor.copy_stream(f, compressed_stream)
|
||||
compressed_size = compressed_stream.tell()
|
||||
compressed_stream.seek(0)
|
||||
return compressed_stream, compressed_size
|
||||
17
common/filter_simple.py
Normal file
17
common/filter_simple.py
Normal file
@ -0,0 +1,17 @@
|
||||
class FirstOrderFilter:
|
||||
def __init__(self, x0, rc, dt, initialized=True):
|
||||
self.x = x0
|
||||
self.dt = dt
|
||||
self.update_alpha(rc)
|
||||
self.initialized = initialized
|
||||
|
||||
def update_alpha(self, rc):
|
||||
self.alpha = self.dt / (rc + self.dt)
|
||||
|
||||
def update(self, x):
|
||||
if self.initialized:
|
||||
self.x = (1. - self.alpha) * self.x + self.alpha * x
|
||||
else:
|
||||
self.initialized = True
|
||||
self.x = x
|
||||
return self.x
|
||||
42
common/git.py
Normal file
42
common/git.py
Normal file
@ -0,0 +1,42 @@
|
||||
from functools import cache
|
||||
import subprocess
|
||||
from openpilot.common.run import run_cmd, run_cmd_default
|
||||
|
||||
|
||||
@cache
|
||||
def get_commit(cwd: str = None, branch: str = "HEAD") -> str:
|
||||
return run_cmd_default(["git", "rev-parse", branch], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_commit_date(cwd: str = None, commit: str = "HEAD") -> str:
|
||||
return run_cmd_default(["git", "show", "--no-patch", "--format='%ct %ci'", commit], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_short_branch(cwd: str = None) -> str:
|
||||
return run_cmd_default(["git", "rev-parse", "--abbrev-ref", "HEAD"], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_branch(cwd: str = None) -> str:
|
||||
return run_cmd_default(["git", "rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{u}"], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_origin(cwd: str = None) -> str:
|
||||
try:
|
||||
local_branch = run_cmd(["git", "name-rev", "--name-only", "HEAD"], cwd=cwd)
|
||||
tracking_remote = run_cmd(["git", "config", "branch." + local_branch + ".remote"], cwd=cwd)
|
||||
return run_cmd(["git", "config", "remote." + tracking_remote + ".url"], cwd=cwd)
|
||||
except subprocess.CalledProcessError: # Not on a branch, fallback
|
||||
return run_cmd_default(["git", "config", "--get", "remote.origin.url"], cwd=cwd)
|
||||
|
||||
|
||||
@cache
|
||||
def get_normalized_origin(cwd: str = None) -> str:
|
||||
return get_origin(cwd) \
|
||||
.replace("git@", "", 1) \
|
||||
.replace(".git", "", 1) \
|
||||
.replace("https://", "", 1) \
|
||||
.replace(":", "/", 1)
|
||||
89
common/gpio.py
Normal file
89
common/gpio.py
Normal file
@ -0,0 +1,89 @@
|
||||
import os
|
||||
import fcntl
|
||||
import ctypes
|
||||
from functools import cache
|
||||
|
||||
def gpio_init(pin: int, output: bool) -> None:
|
||||
try:
|
||||
with open(f"/sys/class/gpio/gpio{pin}/direction", 'wb') as f:
|
||||
f.write(b"out" if output else b"in")
|
||||
except Exception as e:
|
||||
print(f"Failed to set gpio {pin} direction: {e}")
|
||||
|
||||
def gpio_set(pin: int, high: bool) -> None:
|
||||
try:
|
||||
with open(f"/sys/class/gpio/gpio{pin}/value", 'wb') as f:
|
||||
f.write(b"1" if high else b"0")
|
||||
except Exception as e:
|
||||
print(f"Failed to set gpio {pin} value: {e}")
|
||||
|
||||
def gpio_read(pin: int) -> bool | None:
|
||||
val = None
|
||||
try:
|
||||
with open(f"/sys/class/gpio/gpio{pin}/value", 'rb') as f:
|
||||
val = bool(int(f.read().strip()))
|
||||
except Exception as e:
|
||||
print(f"Failed to set gpio {pin} value: {e}")
|
||||
|
||||
return val
|
||||
|
||||
def gpio_export(pin: int) -> None:
|
||||
if os.path.isdir(f"/sys/class/gpio/gpio{pin}"):
|
||||
return
|
||||
|
||||
try:
|
||||
with open("/sys/class/gpio/export", 'w') as f:
|
||||
f.write(str(pin))
|
||||
except Exception:
|
||||
print(f"Failed to export gpio {pin}")
|
||||
|
||||
@cache
|
||||
def get_irq_action(irq: int) -> list[str]:
|
||||
try:
|
||||
with open(f"/sys/kernel/irq/{irq}/actions") as f:
|
||||
actions = f.read().strip().split(',')
|
||||
return actions
|
||||
except FileNotFoundError:
|
||||
return []
|
||||
|
||||
def get_irqs_for_action(action: str) -> list[str]:
|
||||
ret = []
|
||||
with open("/proc/interrupts") as f:
|
||||
for l in f.readlines():
|
||||
irq = l.split(':')[0].strip()
|
||||
if irq.isdigit() and action in get_irq_action(irq):
|
||||
ret.append(irq)
|
||||
return ret
|
||||
|
||||
# *** gpiochip ***
|
||||
|
||||
class gpioevent_data(ctypes.Structure):
|
||||
_fields_ = [
|
||||
("timestamp", ctypes.c_uint64),
|
||||
("id", ctypes.c_uint32),
|
||||
]
|
||||
|
||||
class gpioevent_request(ctypes.Structure):
|
||||
_fields_ = [
|
||||
("lineoffset", ctypes.c_uint32),
|
||||
("handleflags", ctypes.c_uint32),
|
||||
("eventflags", ctypes.c_uint32),
|
||||
("label", ctypes.c_char * 32),
|
||||
("fd", ctypes.c_int)
|
||||
]
|
||||
|
||||
def gpiochip_get_ro_value_fd(label: str, gpiochip_id: int, pin: int) -> int:
|
||||
GPIOEVENT_REQUEST_BOTH_EDGES = 0x3
|
||||
GPIOHANDLE_REQUEST_INPUT = 0x1
|
||||
GPIO_GET_LINEEVENT_IOCTL = 0xc030b404
|
||||
|
||||
rq = gpioevent_request()
|
||||
rq.lineoffset = pin
|
||||
rq.handleflags = GPIOHANDLE_REQUEST_INPUT
|
||||
rq.eventflags = GPIOEVENT_REQUEST_BOTH_EDGES
|
||||
rq.label = label.encode('utf-8')[:31] + b'\0'
|
||||
|
||||
fd = os.open(f"/dev/gpiochip{gpiochip_id}", os.O_RDONLY)
|
||||
fcntl.ioctl(fd, GPIO_GET_LINEEVENT_IOCTL, rq)
|
||||
os.close(fd)
|
||||
return int(rq.fd)
|
||||
8
common/gps.py
Normal file
8
common/gps.py
Normal file
@ -0,0 +1,8 @@
|
||||
from openpilot.common.params import Params
|
||||
|
||||
|
||||
def get_gps_location_service(params: Params) -> str:
|
||||
if params.get_bool("UbloxAvailable"):
|
||||
return "gpsLocationExternal"
|
||||
else:
|
||||
return "gpsLocation"
|
||||
249
common/logging_extra.py
Normal file
249
common/logging_extra.py
Normal file
@ -0,0 +1,249 @@
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import copy
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
import socket
|
||||
import logging
|
||||
import traceback
|
||||
import numpy as np
|
||||
from threading import local
|
||||
from collections import OrderedDict
|
||||
from contextlib import contextmanager
|
||||
|
||||
LOG_TIMESTAMPS = "LOG_TIMESTAMPS" in os.environ
|
||||
|
||||
def json_handler(obj):
|
||||
if isinstance(obj, np.bool_):
|
||||
return bool(obj)
|
||||
# if isinstance(obj, (datetime.date, datetime.time)):
|
||||
# return obj.isoformat()
|
||||
return repr(obj)
|
||||
|
||||
def json_robust_dumps(obj):
|
||||
return json.dumps(obj, default=json_handler)
|
||||
|
||||
class NiceOrderedDict(OrderedDict):
|
||||
def __str__(self):
|
||||
return json_robust_dumps(self)
|
||||
|
||||
class SwagFormatter(logging.Formatter):
|
||||
def __init__(self, swaglogger):
|
||||
logging.Formatter.__init__(self, None, '%a %b %d %H:%M:%S %Z %Y')
|
||||
|
||||
self.swaglogger = swaglogger
|
||||
self.host = socket.gethostname()
|
||||
|
||||
def format_dict(self, record):
|
||||
record_dict = NiceOrderedDict()
|
||||
|
||||
if isinstance(record.msg, dict):
|
||||
record_dict['msg'] = record.msg
|
||||
else:
|
||||
try:
|
||||
record_dict['msg'] = record.getMessage()
|
||||
except (ValueError, TypeError):
|
||||
record_dict['msg'] = [record.msg]+record.args
|
||||
|
||||
record_dict['ctx'] = self.swaglogger.get_ctx()
|
||||
|
||||
if record.exc_info:
|
||||
record_dict['exc_info'] = self.formatException(record.exc_info)
|
||||
|
||||
record_dict['level'] = record.levelname
|
||||
record_dict['levelnum'] = record.levelno
|
||||
record_dict['name'] = record.name
|
||||
record_dict['filename'] = record.filename
|
||||
record_dict['lineno'] = record.lineno
|
||||
record_dict['pathname'] = record.pathname
|
||||
record_dict['module'] = record.module
|
||||
record_dict['funcName'] = record.funcName
|
||||
record_dict['host'] = self.host
|
||||
record_dict['process'] = record.process
|
||||
record_dict['thread'] = record.thread
|
||||
record_dict['threadName'] = record.threadName
|
||||
record_dict['created'] = record.created
|
||||
|
||||
return record_dict
|
||||
|
||||
def format(self, record):
|
||||
if self.swaglogger is None:
|
||||
raise Exception("must set swaglogger before calling format()")
|
||||
return json_robust_dumps(self.format_dict(record))
|
||||
|
||||
class SwagLogFileFormatter(SwagFormatter):
|
||||
def fix_kv(self, k, v):
|
||||
# append type to names to preserve legacy naming in logs
|
||||
# avoids overlapping key namespaces with different types
|
||||
# e.g. log.info() creates 'msg' -> 'msg$s'
|
||||
# log.event() creates 'msg.health.logMonoTime' -> 'msg.health.logMonoTime$i'
|
||||
# because overlapping namespace 'msg' caused problems
|
||||
if isinstance(v, (str, bytes)):
|
||||
k += "$s"
|
||||
elif isinstance(v, float):
|
||||
k += "$f"
|
||||
elif isinstance(v, bool):
|
||||
k += "$b"
|
||||
elif isinstance(v, int):
|
||||
k += "$i"
|
||||
elif isinstance(v, dict):
|
||||
nv = {}
|
||||
for ik, iv in v.items():
|
||||
ik, iv = self.fix_kv(ik, iv)
|
||||
nv[ik] = iv
|
||||
v = nv
|
||||
elif isinstance(v, list):
|
||||
k += "$a"
|
||||
return k, v
|
||||
|
||||
def format(self, record):
|
||||
if isinstance(record, str):
|
||||
v = json.loads(record)
|
||||
else:
|
||||
v = self.format_dict(record)
|
||||
|
||||
mk, mv = self.fix_kv('msg', v['msg'])
|
||||
del v['msg']
|
||||
v[mk] = mv
|
||||
v['id'] = uuid.uuid4().hex
|
||||
|
||||
return json_robust_dumps(v)
|
||||
|
||||
class SwagErrorFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
return record.levelno < logging.ERROR
|
||||
|
||||
def _tmpfunc():
|
||||
return 0
|
||||
|
||||
def _srcfile():
|
||||
return os.path.normcase(_tmpfunc.__code__.co_filename)
|
||||
|
||||
class SwagLogger(logging.Logger):
|
||||
def __init__(self):
|
||||
logging.Logger.__init__(self, "swaglog")
|
||||
|
||||
self.global_ctx = {}
|
||||
|
||||
self.log_local = local()
|
||||
self.log_local.ctx = {}
|
||||
|
||||
def local_ctx(self):
|
||||
try:
|
||||
return self.log_local.ctx
|
||||
except AttributeError:
|
||||
self.log_local.ctx = {}
|
||||
return self.log_local.ctx
|
||||
|
||||
def get_ctx(self):
|
||||
return dict(self.local_ctx(), **self.global_ctx)
|
||||
|
||||
@contextmanager
|
||||
def ctx(self, **kwargs):
|
||||
old_ctx = self.local_ctx()
|
||||
self.log_local.ctx = copy.copy(old_ctx) or {}
|
||||
self.log_local.ctx.update(kwargs)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.log_local.ctx = old_ctx
|
||||
|
||||
def bind(self, **kwargs):
|
||||
self.local_ctx().update(kwargs)
|
||||
|
||||
def bind_global(self, **kwargs):
|
||||
self.global_ctx.update(kwargs)
|
||||
|
||||
def event(self, event, *args, **kwargs):
|
||||
evt = NiceOrderedDict()
|
||||
evt['event'] = event
|
||||
if args:
|
||||
evt['args'] = args
|
||||
evt.update(kwargs)
|
||||
if 'error' in kwargs:
|
||||
self.error(evt)
|
||||
elif 'debug' in kwargs:
|
||||
self.debug(evt)
|
||||
else:
|
||||
self.info(evt)
|
||||
|
||||
def timestamp(self, event_name):
|
||||
if LOG_TIMESTAMPS:
|
||||
t = time.monotonic()
|
||||
tstp = NiceOrderedDict()
|
||||
tstp['timestamp'] = NiceOrderedDict()
|
||||
tstp['timestamp']["event"] = event_name
|
||||
tstp['timestamp']["time"] = t*1e9
|
||||
self.debug(tstp)
|
||||
|
||||
def findCaller(self, stack_info=False, stacklevel=1):
|
||||
"""
|
||||
Find the stack frame of the caller so that we can note the source
|
||||
file name, line number and function name.
|
||||
"""
|
||||
f = sys._getframe(3)
|
||||
#On some versions of IronPython, currentframe() returns None if
|
||||
#IronPython isn't run with -X:Frames.
|
||||
if f is not None:
|
||||
f = f.f_back
|
||||
orig_f = f
|
||||
while f and stacklevel > 1:
|
||||
f = f.f_back
|
||||
stacklevel -= 1
|
||||
if not f:
|
||||
f = orig_f
|
||||
rv = "(unknown file)", 0, "(unknown function)", None
|
||||
while hasattr(f, "f_code"):
|
||||
co = f.f_code
|
||||
filename = os.path.normcase(co.co_filename)
|
||||
|
||||
if filename == _srcfile:
|
||||
f = f.f_back
|
||||
continue
|
||||
sinfo = None
|
||||
if stack_info:
|
||||
sio = io.StringIO()
|
||||
sio.write('Stack (most recent call last):\n')
|
||||
traceback.print_stack(f, file=sio)
|
||||
sinfo = sio.getvalue()
|
||||
if sinfo[-1] == '\n':
|
||||
sinfo = sinfo[:-1]
|
||||
sio.close()
|
||||
rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
|
||||
break
|
||||
return rv
|
||||
|
||||
if __name__ == "__main__":
|
||||
log = SwagLogger()
|
||||
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setLevel(logging.INFO)
|
||||
stdout_handler.addFilter(SwagErrorFilter())
|
||||
log.addHandler(stdout_handler)
|
||||
|
||||
stderr_handler = logging.StreamHandler(sys.stderr)
|
||||
stderr_handler.setLevel(logging.ERROR)
|
||||
log.addHandler(stderr_handler)
|
||||
|
||||
log.info("asdasd %s", "a")
|
||||
log.info({'wut': 1})
|
||||
log.warning("warning")
|
||||
log.error("error")
|
||||
log.critical("critical")
|
||||
log.event("test", x="y")
|
||||
|
||||
with log.ctx():
|
||||
stdout_handler.setFormatter(SwagFormatter(log))
|
||||
stderr_handler.setFormatter(SwagFormatter(log))
|
||||
log.bind(user="some user")
|
||||
log.info("in req")
|
||||
print("")
|
||||
log.warning("warning")
|
||||
print("")
|
||||
log.error("error")
|
||||
print("")
|
||||
log.critical("critical")
|
||||
print("")
|
||||
log.event("do_req", a=1, b="c")
|
||||
45
common/markdown.py
Normal file
45
common/markdown.py
Normal file
@ -0,0 +1,45 @@
|
||||
HTML_REPLACEMENTS = [
|
||||
(r'&', r'&'),
|
||||
(r'"', r'"'),
|
||||
]
|
||||
|
||||
def parse_markdown(text: str, tab_length: int = 2) -> str:
|
||||
lines = text.split("\n")
|
||||
output: list[str] = []
|
||||
list_level = 0
|
||||
|
||||
def end_outstanding_lists(level: int, end_level: int) -> int:
|
||||
while level > end_level:
|
||||
level -= 1
|
||||
output.append("</ul>")
|
||||
if level > 0:
|
||||
output.append("</li>")
|
||||
return end_level
|
||||
|
||||
for i, line in enumerate(lines):
|
||||
if i + 1 < len(lines) and lines[i + 1].startswith("==="): # heading
|
||||
output.append(f"<h1>{line}</h1>")
|
||||
elif line.startswith("==="):
|
||||
pass
|
||||
elif line.lstrip().startswith("* "): # list
|
||||
line_level = 1 + line.count(" " * tab_length, 0, line.index("*"))
|
||||
if list_level >= line_level:
|
||||
list_level = end_outstanding_lists(list_level, line_level)
|
||||
else:
|
||||
list_level += 1
|
||||
if list_level > 1:
|
||||
output[-1] = output[-1].replace("</li>", "")
|
||||
output.append("<ul>")
|
||||
output.append(f"<li>{line.replace('*', '', 1).lstrip()}</li>")
|
||||
else:
|
||||
list_level = end_outstanding_lists(list_level, 0)
|
||||
if len(line) > 0:
|
||||
output.append(line)
|
||||
|
||||
end_outstanding_lists(list_level, 0)
|
||||
output_str = "\n".join(output) + "\n"
|
||||
|
||||
for (fr, to) in HTML_REPLACEMENTS:
|
||||
output_str = output_str.replace(fr, to)
|
||||
|
||||
return output_str
|
||||
85
common/mat.h
Normal file
85
common/mat.h
Normal file
@ -0,0 +1,85 @@
|
||||
#pragma once
|
||||
|
||||
typedef struct vec3 {
|
||||
float v[3];
|
||||
} vec3;
|
||||
|
||||
typedef struct vec4 {
|
||||
float v[4];
|
||||
} vec4;
|
||||
|
||||
typedef struct mat3 {
|
||||
float v[3*3];
|
||||
} mat3;
|
||||
|
||||
typedef struct mat4 {
|
||||
float v[4*4];
|
||||
} mat4;
|
||||
|
||||
static inline mat3 matmul3(const mat3 &a, const mat3 &b) {
|
||||
mat3 ret = {{0.0}};
|
||||
for (int r=0; r<3; r++) {
|
||||
for (int c=0; c<3; c++) {
|
||||
float v = 0.0;
|
||||
for (int k=0; k<3; k++) {
|
||||
v += a.v[r*3+k] * b.v[k*3+c];
|
||||
}
|
||||
ret.v[r*3+c] = v;
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
static inline vec3 matvecmul3(const mat3 &a, const vec3 &b) {
|
||||
vec3 ret = {{0.0}};
|
||||
for (int r=0; r<3; r++) {
|
||||
for (int c=0; c<3; c++) {
|
||||
ret.v[r] += a.v[r*3+c] * b.v[c];
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
static inline mat4 matmul(const mat4 &a, const mat4 &b) {
|
||||
mat4 ret = {{0.0}};
|
||||
for (int r=0; r<4; r++) {
|
||||
for (int c=0; c<4; c++) {
|
||||
float v = 0.0;
|
||||
for (int k=0; k<4; k++) {
|
||||
v += a.v[r*4+k] * b.v[k*4+c];
|
||||
}
|
||||
ret.v[r*4+c] = v;
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
static inline vec4 matvecmul(const mat4 &a, const vec4 &b) {
|
||||
vec4 ret = {{0.0}};
|
||||
for (int r=0; r<4; r++) {
|
||||
for (int c=0; c<4; c++) {
|
||||
ret.v[r] += a.v[r*4+c] * b.v[c];
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
// scales the input and output space of a transformation matrix
|
||||
// that assumes pixel-center origin.
|
||||
static inline mat3 transform_scale_buffer(const mat3 &in, float s) {
|
||||
// in_pt = ( transform(out_pt/s + 0.5) - 0.5) * s
|
||||
|
||||
mat3 transform_out = {{
|
||||
1.0f/s, 0.0f, 0.5f,
|
||||
0.0f, 1.0f/s, 0.5f,
|
||||
0.0f, 0.0f, 1.0f,
|
||||
}};
|
||||
|
||||
mat3 transform_in = {{
|
||||
s, 0.0f, -0.5f*s,
|
||||
0.0f, s, -0.5f*s,
|
||||
0.0f, 0.0f, 1.0f,
|
||||
}};
|
||||
|
||||
return matmul3(transform_in, matmul3(in, transform_out));
|
||||
}
|
||||
50
common/mock/__init__.py
Normal file
50
common/mock/__init__.py
Normal file
@ -0,0 +1,50 @@
|
||||
"""
|
||||
Utilities for generating mock messages for testing.
|
||||
example in common/tests/test_mock.py
|
||||
"""
|
||||
|
||||
|
||||
import functools
|
||||
import threading
|
||||
from cereal.messaging import PubMaster
|
||||
from cereal.services import SERVICE_LIST
|
||||
from openpilot.common.mock.generators import generate_livePose
|
||||
from openpilot.common.realtime import Ratekeeper
|
||||
|
||||
|
||||
MOCK_GENERATOR = {
|
||||
"livePose": generate_livePose
|
||||
}
|
||||
|
||||
|
||||
def generate_messages_loop(services: list[str], done: threading.Event):
|
||||
pm = PubMaster(services)
|
||||
rk = Ratekeeper(100)
|
||||
i = 0
|
||||
while not done.is_set():
|
||||
for s in services:
|
||||
should_send = i % (100/SERVICE_LIST[s].frequency) == 0
|
||||
if should_send:
|
||||
message = MOCK_GENERATOR[s]()
|
||||
pm.send(s, message)
|
||||
i += 1
|
||||
rk.keep_time()
|
||||
|
||||
|
||||
def mock_messages(services: list[str] | str):
|
||||
if isinstance(services, str):
|
||||
services = [services]
|
||||
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
done = threading.Event()
|
||||
t = threading.Thread(target=generate_messages_loop, args=(services, done))
|
||||
t.start()
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
finally:
|
||||
done.set()
|
||||
t.join()
|
||||
return wrapper
|
||||
return decorator
|
||||
14
common/mock/generators.py
Normal file
14
common/mock/generators.py
Normal file
@ -0,0 +1,14 @@
|
||||
from cereal import messaging
|
||||
|
||||
|
||||
def generate_livePose():
|
||||
msg = messaging.new_message('livePose')
|
||||
meas = {'x': 0.0, 'y': 0.0, 'z': 0.0, 'xStd': 0.0, 'yStd': 0.0, 'zStd': 0.0, 'valid': True}
|
||||
msg.livePose.orientationNED = meas
|
||||
msg.livePose.velocityDevice = meas
|
||||
msg.livePose.angularVelocityDevice = meas
|
||||
msg.livePose.accelerationDevice = meas
|
||||
msg.livePose.inputsOK = True
|
||||
msg.livePose.posenetOK = True
|
||||
msg.livePose.sensorsOK = True
|
||||
return msg
|
||||
1
common/model.h
Normal file
1
common/model.h
Normal file
@ -0,0 +1 @@
|
||||
#define DEFAULT_MODEL "Steam Powered (Default)"
|
||||
91
common/params.h
Normal file
91
common/params.h
Normal file
@ -0,0 +1,91 @@
|
||||
#pragma once
|
||||
|
||||
#include <future>
|
||||
#include <map>
|
||||
#include <optional>
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "common/queue.h"
|
||||
|
||||
enum ParamKeyFlag {
|
||||
PERSISTENT = 0x02,
|
||||
CLEAR_ON_MANAGER_START = 0x04,
|
||||
CLEAR_ON_ONROAD_TRANSITION = 0x08,
|
||||
CLEAR_ON_OFFROAD_TRANSITION = 0x10,
|
||||
DONT_LOG = 0x20,
|
||||
DEVELOPMENT_ONLY = 0x40,
|
||||
CLEAR_ON_IGNITION_ON = 0x80,
|
||||
BACKUP = 0x100,
|
||||
ALL = 0xFFFFFFFF
|
||||
};
|
||||
|
||||
enum ParamKeyType {
|
||||
STRING = 0, // must be utf-8 decodable
|
||||
BOOL = 1,
|
||||
INT = 2,
|
||||
FLOAT = 3,
|
||||
TIME = 4, // ISO 8601
|
||||
JSON = 5,
|
||||
BYTES = 6
|
||||
};
|
||||
|
||||
struct ParamKeyAttributes {
|
||||
uint32_t flags;
|
||||
ParamKeyType type;
|
||||
std::optional<std::string> default_value = std::nullopt;
|
||||
};
|
||||
|
||||
class Params {
|
||||
public:
|
||||
explicit Params(const std::string &path = {});
|
||||
~Params();
|
||||
// Not copyable.
|
||||
Params(const Params&) = delete;
|
||||
Params& operator=(const Params&) = delete;
|
||||
|
||||
std::vector<std::string> allKeys(ParamKeyFlag flag = ALL) const;
|
||||
bool checkKey(const std::string &key);
|
||||
ParamKeyFlag getKeyFlag(const std::string &key);
|
||||
ParamKeyType getKeyType(const std::string &key);
|
||||
std::optional<std::string> getKeyDefaultValue(const std::string &key);
|
||||
inline std::string getParamPath(const std::string &key = {}) {
|
||||
return params_path + params_prefix + (key.empty() ? "" : "/" + key);
|
||||
}
|
||||
|
||||
// Delete a value
|
||||
int remove(const std::string &key);
|
||||
void clearAll(ParamKeyFlag flag);
|
||||
|
||||
// helpers for reading values
|
||||
std::string get(const std::string &key, bool block = false);
|
||||
inline bool getBool(const std::string &key, bool block = false) {
|
||||
return get(key, block) == "1";
|
||||
}
|
||||
std::map<std::string, std::string> readAll();
|
||||
|
||||
// helpers for writing values
|
||||
int put(const char *key, const char *val, size_t value_size);
|
||||
inline int put(const std::string &key, const std::string &val) {
|
||||
return put(key.c_str(), val.data(), val.size());
|
||||
}
|
||||
inline int putBool(const std::string &key, bool val) {
|
||||
return put(key.c_str(), val ? "1" : "0", 1);
|
||||
}
|
||||
void putNonBlocking(const std::string &key, const std::string &val);
|
||||
inline void putBoolNonBlocking(const std::string &key, bool val) {
|
||||
putNonBlocking(key, val ? "1" : "0");
|
||||
}
|
||||
|
||||
private:
|
||||
void asyncWriteThread();
|
||||
|
||||
std::string params_path;
|
||||
std::string params_prefix;
|
||||
|
||||
// for nonblocking write
|
||||
std::future<void> future;
|
||||
SafeQueue<std::pair<std::string, std::string>> queue;
|
||||
};
|
||||
19
common/params.py
Normal file
19
common/params.py
Normal file
@ -0,0 +1,19 @@
|
||||
from openpilot.common.params_pyx import Params, ParamKeyFlag, ParamKeyType, UnknownKeyName
|
||||
assert Params
|
||||
assert ParamKeyFlag
|
||||
assert ParamKeyType
|
||||
assert UnknownKeyName
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
params = Params()
|
||||
key = sys.argv[1]
|
||||
assert params.check_key(key), f"unknown param: {key}"
|
||||
|
||||
if len(sys.argv) == 3:
|
||||
val = sys.argv[2]
|
||||
print(f"SET: {key} = {val}")
|
||||
params.put(key, val)
|
||||
elif len(sys.argv) == 2:
|
||||
print(f"GET: {key} = {params.get(key)}")
|
||||
226
common/params_keys.h
Normal file
226
common/params_keys.h
Normal file
@ -0,0 +1,226 @@
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
|
||||
#include "cereal/gen/cpp/log.capnp.h"
|
||||
|
||||
inline static std::unordered_map<std::string, ParamKeyAttributes> keys = {
|
||||
{"AccessToken", {CLEAR_ON_MANAGER_START | DONT_LOG, STRING}},
|
||||
{"AdbEnabled", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"AlwaysOnDM", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"ApiCache_Device", {PERSISTENT, STRING}},
|
||||
{"ApiCache_FirehoseStats", {PERSISTENT, JSON}},
|
||||
{"AssistNowToken", {PERSISTENT, STRING}},
|
||||
{"AthenadPid", {PERSISTENT, INT}},
|
||||
{"AthenadUploadQueue", {PERSISTENT, JSON}},
|
||||
{"AthenadRecentlyViewedRoutes", {PERSISTENT, STRING}},
|
||||
{"BootCount", {PERSISTENT, INT}},
|
||||
{"CalibrationParams", {PERSISTENT, BYTES}},
|
||||
{"CameraDebugExpGain", {CLEAR_ON_MANAGER_START, STRING}},
|
||||
{"CameraDebugExpTime", {CLEAR_ON_MANAGER_START, STRING}},
|
||||
{"CarBatteryCapacity", {PERSISTENT, INT}},
|
||||
{"CarParams", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, BYTES}},
|
||||
{"CarParamsCache", {CLEAR_ON_MANAGER_START, BYTES}},
|
||||
{"CarParamsPersistent", {PERSISTENT, BYTES}},
|
||||
{"CarParamsPrevRoute", {PERSISTENT, BYTES}},
|
||||
{"CompletedTrainingVersion", {PERSISTENT, STRING, "0"}},
|
||||
{"ControlsReady", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, BOOL}},
|
||||
{"CurrentBootlog", {PERSISTENT, STRING}},
|
||||
{"CurrentRoute", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, STRING}},
|
||||
{"DisableLogging", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, BOOL}},
|
||||
{"DisablePowerDown", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"DisableUpdates", {PERSISTENT | BACKUP, BOOL, "0"}},
|
||||
{"DisengageOnAccelerator", {PERSISTENT | BACKUP, BOOL, "0"}},
|
||||
{"DongleId", {PERSISTENT, STRING}},
|
||||
{"DoReboot", {CLEAR_ON_MANAGER_START, BOOL}},
|
||||
{"DoShutdown", {CLEAR_ON_MANAGER_START, BOOL}},
|
||||
{"DoUninstall", {CLEAR_ON_MANAGER_START, BOOL}},
|
||||
{"DriverTooDistracted", {CLEAR_ON_MANAGER_START | CLEAR_ON_IGNITION_ON, BOOL}},
|
||||
{"AlphaLongitudinalEnabled", {PERSISTENT | DEVELOPMENT_ONLY | BACKUP, BOOL}},
|
||||
{"ExperimentalMode", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"ExperimentalModeConfirmed", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"FirmwareQueryDone", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, BOOL}},
|
||||
{"ForcePowerDown", {PERSISTENT, BOOL}},
|
||||
{"GitBranch", {PERSISTENT, STRING}},
|
||||
{"GitCommit", {PERSISTENT, STRING}},
|
||||
{"GitCommitDate", {PERSISTENT, STRING}},
|
||||
{"GitDiff", {PERSISTENT, STRING}},
|
||||
{"GithubSshKeys", {PERSISTENT | BACKUP, STRING}},
|
||||
{"GithubUsername", {PERSISTENT | BACKUP, STRING}},
|
||||
{"GitRemote", {PERSISTENT, STRING}},
|
||||
{"GsmApn", {PERSISTENT | BACKUP, STRING}},
|
||||
{"GsmMetered", {PERSISTENT | BACKUP, BOOL, "1"}},
|
||||
{"GsmRoaming", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"HardwareSerial", {PERSISTENT, STRING}},
|
||||
{"HasAcceptedTerms", {PERSISTENT, STRING, "0"}},
|
||||
{"InstallDate", {PERSISTENT, TIME}},
|
||||
{"IsDriverViewEnabled", {CLEAR_ON_MANAGER_START, BOOL}},
|
||||
{"IsEngaged", {PERSISTENT, BOOL}},
|
||||
{"IsLdwEnabled", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"IsMetric", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"IsOffroad", {CLEAR_ON_MANAGER_START, BOOL}},
|
||||
{"IsOnroad", {PERSISTENT, BOOL}},
|
||||
{"IsRhdDetected", {PERSISTENT, BOOL}},
|
||||
{"IsReleaseBranch", {CLEAR_ON_MANAGER_START, BOOL}},
|
||||
{"IsTakingSnapshot", {CLEAR_ON_MANAGER_START, BOOL}},
|
||||
{"IsTestedBranch", {CLEAR_ON_MANAGER_START, BOOL}},
|
||||
{"JoystickDebugMode", {CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION, BOOL}},
|
||||
{"LanguageSetting", {PERSISTENT | BACKUP, STRING, "main_en"}},
|
||||
{"LastAthenaPingTime", {CLEAR_ON_MANAGER_START, INT}},
|
||||
{"LastGPSPosition", {PERSISTENT, STRING}},
|
||||
{"LastManagerExitReason", {CLEAR_ON_MANAGER_START, STRING}},
|
||||
{"LastOffroadStatusPacket", {CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION, JSON}},
|
||||
{"LastPowerDropDetected", {CLEAR_ON_MANAGER_START, STRING}},
|
||||
{"LastUpdateException", {CLEAR_ON_MANAGER_START, STRING}},
|
||||
{"LastUpdateRouteCount", {PERSISTENT, INT, "0"}},
|
||||
{"LastUpdateTime", {PERSISTENT, TIME}},
|
||||
{"LastUpdateUptimeOnroad", {PERSISTENT, FLOAT, "0.0"}},
|
||||
{"LiveDelay", {PERSISTENT | BACKUP, BYTES}},
|
||||
{"LiveParameters", {PERSISTENT, JSON}},
|
||||
{"LiveParametersV2", {PERSISTENT, BYTES}},
|
||||
{"LiveTorqueParameters", {PERSISTENT | DONT_LOG, BYTES}},
|
||||
{"LocationFilterInitialState", {PERSISTENT, BYTES}},
|
||||
{"LongitudinalManeuverMode", {CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION, BOOL}},
|
||||
{"LongitudinalPersonality", {PERSISTENT | BACKUP, INT, std::to_string(static_cast<int>(cereal::LongitudinalPersonality::STANDARD))}},
|
||||
{"NetworkMetered", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"ObdMultiplexingChanged", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, BOOL}},
|
||||
{"ObdMultiplexingEnabled", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, BOOL}},
|
||||
{"Offroad_CarUnrecognized", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, JSON}},
|
||||
{"Offroad_ConnectivityNeeded", {CLEAR_ON_MANAGER_START, JSON}},
|
||||
{"Offroad_ConnectivityNeededPrompt", {CLEAR_ON_MANAGER_START, JSON}},
|
||||
{"Offroad_ExcessiveActuation", {PERSISTENT, JSON}},
|
||||
{"Offroad_IsTakingSnapshot", {CLEAR_ON_MANAGER_START, JSON}},
|
||||
{"Offroad_NeosUpdate", {CLEAR_ON_MANAGER_START, JSON}},
|
||||
{"Offroad_NoFirmware", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, JSON}},
|
||||
{"Offroad_Recalibration", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, JSON}},
|
||||
{"Offroad_StorageMissing", {CLEAR_ON_MANAGER_START, JSON}},
|
||||
{"Offroad_TemperatureTooHigh", {CLEAR_ON_MANAGER_START, JSON}},
|
||||
{"Offroad_UnregisteredHardware", {CLEAR_ON_MANAGER_START, JSON}},
|
||||
{"Offroad_UpdateFailed", {CLEAR_ON_MANAGER_START, JSON}},
|
||||
{"OnroadCycleRequested", {CLEAR_ON_MANAGER_START, BOOL}},
|
||||
{"OpenpilotEnabledToggle", {PERSISTENT | BACKUP, BOOL, "1"}},
|
||||
{"PandaHeartbeatLost", {CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION, BOOL}},
|
||||
{"PandaSomResetTriggered", {CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION, BOOL}},
|
||||
{"PandaSignatures", {CLEAR_ON_MANAGER_START, BYTES}},
|
||||
{"PrimeType", {PERSISTENT, INT}},
|
||||
{"RecordAudio", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"RecordAudioFeedback", {PERSISTENT | BACKUP, BOOL, "0"}},
|
||||
{"RecordFront", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"RecordFrontLock", {PERSISTENT, BOOL}}, // for the internal fleet
|
||||
{"SecOCKey", {PERSISTENT | DONT_LOG | BACKUP, STRING}},
|
||||
{"RouteCount", {PERSISTENT, INT, "0"}},
|
||||
{"SnoozeUpdate", {CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION, BOOL}},
|
||||
{"SshEnabled", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"TermsVersion", {PERSISTENT, STRING}},
|
||||
{"TrainingVersion", {PERSISTENT, STRING}},
|
||||
{"UbloxAvailable", {PERSISTENT, BOOL}},
|
||||
{"UpdateAvailable", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, BOOL}},
|
||||
{"UpdateFailedCount", {CLEAR_ON_MANAGER_START, INT}},
|
||||
{"UpdaterAvailableBranches", {PERSISTENT, STRING}},
|
||||
{"UpdaterCurrentDescription", {CLEAR_ON_MANAGER_START, STRING}},
|
||||
{"UpdaterCurrentReleaseNotes", {CLEAR_ON_MANAGER_START, BYTES}},
|
||||
{"UpdaterFetchAvailable", {CLEAR_ON_MANAGER_START, BOOL}},
|
||||
{"UpdaterNewDescription", {CLEAR_ON_MANAGER_START, STRING}},
|
||||
{"UpdaterNewReleaseNotes", {CLEAR_ON_MANAGER_START, BYTES}},
|
||||
{"UpdaterState", {CLEAR_ON_MANAGER_START, STRING}},
|
||||
{"UpdaterTargetBranch", {CLEAR_ON_MANAGER_START, STRING}},
|
||||
{"UpdaterLastFetchTime", {PERSISTENT, TIME}},
|
||||
{"UptimeOffroad", {PERSISTENT, FLOAT, "0.0"}},
|
||||
{"UptimeOnroad", {PERSISTENT, FLOAT, "0.0"}},
|
||||
{"Version", {PERSISTENT, STRING}},
|
||||
|
||||
// --- sunnypilot params --- //
|
||||
{"ApiCache_DriveStats", {PERSISTENT, JSON}},
|
||||
{"AutoLaneChangeBsmDelay", {PERSISTENT | BACKUP, BOOL, "0"}},
|
||||
{"AutoLaneChangeTimer", {PERSISTENT | BACKUP, INT, "0"}},
|
||||
{"BlinkerMinLateralControlSpeed", {PERSISTENT | BACKUP, INT, "20"}}, // MPH or km/h
|
||||
{"BlinkerPauseLateralControl", {PERSISTENT | BACKUP, INT, "0"}},
|
||||
{"Brightness", {PERSISTENT | BACKUP, INT, "0"}},
|
||||
{"CarParamsSP", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, BYTES}},
|
||||
{"CarParamsSPCache", {CLEAR_ON_MANAGER_START, BYTES}},
|
||||
{"CarParamsSPPersistent", {PERSISTENT, BYTES}},
|
||||
{"CarPlatformBundle", {PERSISTENT | BACKUP, JSON}},
|
||||
{"ChevronInfo", {PERSISTENT | BACKUP, INT, "4"}},
|
||||
{"CustomAccIncrementsEnabled", {PERSISTENT | BACKUP, BOOL, "0"}},
|
||||
{"CustomAccLongPressIncrement", {PERSISTENT | BACKUP, INT, "5"}},
|
||||
{"CustomAccShortPressIncrement", {PERSISTENT | BACKUP, INT, "1"}},
|
||||
{"DeviceBootMode", {PERSISTENT | BACKUP, INT, "0"}},
|
||||
{"DevUIInfo", {PERSISTENT | BACKUP, INT, "0"}},
|
||||
{"EnableCopyparty", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"EnableGithubRunner", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"GithubRunnerSufficientVoltage", {CLEAR_ON_MANAGER_START , BOOL}},
|
||||
{"InteractivityTimeout", {PERSISTENT | BACKUP, INT, "0"}},
|
||||
{"IsDevelopmentBranch", {CLEAR_ON_MANAGER_START, BOOL}},
|
||||
{"MaxTimeOffroad", {PERSISTENT | BACKUP, INT, "1800"}},
|
||||
{"ModelRunnerTypeCache", {CLEAR_ON_ONROAD_TRANSITION, INT}},
|
||||
{"OffroadMode", {CLEAR_ON_MANAGER_START, BOOL}},
|
||||
{"Offroad_TiciSupport", {CLEAR_ON_MANAGER_START, JSON}},
|
||||
{"QuickBootToggle", {PERSISTENT | BACKUP, BOOL, "0"}},
|
||||
{"QuietMode", {PERSISTENT | BACKUP, BOOL, "0"}},
|
||||
{"RainbowMode", {PERSISTENT | BACKUP, BOOL, "0"}},
|
||||
{"ShowAdvancedControls", {PERSISTENT | BACKUP, BOOL, "0"}},
|
||||
|
||||
// MADS params
|
||||
{"Mads", {PERSISTENT | BACKUP, BOOL, "1"}},
|
||||
{"MadsMainCruiseAllowed", {PERSISTENT | BACKUP, BOOL, "1"}},
|
||||
{"MadsSteeringMode", {PERSISTENT | BACKUP, INT, "0"}},
|
||||
{"MadsUnifiedEngagementMode", {PERSISTENT | BACKUP, BOOL, "1"}},
|
||||
|
||||
// Model Manager params
|
||||
{"ModelManager_ActiveBundle", {PERSISTENT, JSON}},
|
||||
{"ModelManager_ClearCache", {CLEAR_ON_MANAGER_START, BOOL}},
|
||||
{"ModelManager_DownloadIndex", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, INT, "0"}},
|
||||
{"ModelManager_Favs", {PERSISTENT | BACKUP, STRING}},
|
||||
{"ModelManager_LastSyncTime", {CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION, INT, "0"}},
|
||||
{"ModelManager_ModelsCache", {PERSISTENT | BACKUP, JSON}},
|
||||
|
||||
// Neural Network Lateral Control
|
||||
{"NeuralNetworkLateralControl", {PERSISTENT | BACKUP, BOOL, "0"}},
|
||||
|
||||
// sunnylink params
|
||||
{"EnableSunnylinkUploader", {PERSISTENT | BACKUP, BOOL}},
|
||||
{"LastSunnylinkPingTime", {CLEAR_ON_MANAGER_START, INT}},
|
||||
{"SunnylinkCache_Roles", {PERSISTENT, STRING}},
|
||||
{"SunnylinkCache_Users", {PERSISTENT, STRING}},
|
||||
{"SunnylinkDongleId", {PERSISTENT, STRING}},
|
||||
{"SunnylinkdPid", {PERSISTENT, INT}},
|
||||
{"SunnylinkEnabled", {PERSISTENT, BOOL}},
|
||||
|
||||
// Backup Manager params
|
||||
{"BackupManager_CreateBackup", {PERSISTENT, BOOL}},
|
||||
{"BackupManager_RestoreVersion", {PERSISTENT, STRING}},
|
||||
|
||||
// sunnypilot car specific params
|
||||
{"HyundaiLongitudinalTuning", {PERSISTENT | BACKUP, INT, "0"}},
|
||||
|
||||
{"DynamicExperimentalControl", {PERSISTENT | BACKUP, BOOL, "0"}},
|
||||
{"BlindSpot", {PERSISTENT | BACKUP, BOOL, "0"}},
|
||||
|
||||
// sunnypilot model params
|
||||
{"LagdToggle", {PERSISTENT | BACKUP, BOOL, "1"}},
|
||||
{"LagdToggleDelay", {PERSISTENT | BACKUP, FLOAT, "0.2"}},
|
||||
{"LagdValueCache", {PERSISTENT, FLOAT, "0.2"}},
|
||||
{"LaneTurnDesire", {PERSISTENT | BACKUP, BOOL, "0"}},
|
||||
{"LaneTurnValue", {PERSISTENT | BACKUP, FLOAT, "19.0"}},
|
||||
|
||||
// mapd
|
||||
{"MapAdvisorySpeedLimit", {CLEAR_ON_ONROAD_TRANSITION, FLOAT}},
|
||||
{"MapdVersion", {PERSISTENT, STRING}},
|
||||
{"MapSpeedLimit", {CLEAR_ON_ONROAD_TRANSITION, FLOAT, "0.0"}},
|
||||
{"NextMapSpeedLimit", {CLEAR_ON_ONROAD_TRANSITION, JSON}},
|
||||
{"Offroad_OSMUpdateRequired", {CLEAR_ON_MANAGER_START, JSON}},
|
||||
{"OsmDbUpdatesCheck", {CLEAR_ON_MANAGER_START, BOOL}}, // mapd database update happens with device ON, reset on boot
|
||||
{"OSMDownloadBounds", {PERSISTENT, STRING}},
|
||||
{"OsmDownloadedDate", {PERSISTENT, STRING, "0.0"}},
|
||||
{"OSMDownloadLocations", {PERSISTENT, JSON}},
|
||||
{"OSMDownloadProgress", {CLEAR_ON_MANAGER_START, JSON}},
|
||||
{"OsmLocal", {PERSISTENT, BOOL}},
|
||||
{"OsmLocationName", {PERSISTENT, STRING}},
|
||||
{"OsmLocationTitle", {PERSISTENT, STRING}},
|
||||
{"OsmLocationUrl", {PERSISTENT, STRING}},
|
||||
{"OsmStateName", {PERSISTENT, STRING, "All"}},
|
||||
{"OsmStateTitle", {PERSISTENT, STRING}},
|
||||
{"OsmWayTest", {PERSISTENT, STRING}},
|
||||
{"RoadName", {CLEAR_ON_ONROAD_TRANSITION, STRING}},
|
||||
};
|
||||
17837
common/params_pyx.cpp
Normal file
17837
common/params_pyx.cpp
Normal file
File diff suppressed because it is too large
Load Diff
197
common/params_pyx.pyx
Normal file
197
common/params_pyx.pyx
Normal file
@ -0,0 +1,197 @@
|
||||
# distutils: language = c++
|
||||
# cython: language_level = 3
|
||||
import builtins
|
||||
import datetime
|
||||
import json
|
||||
from libcpp cimport bool
|
||||
from libcpp.string cimport string
|
||||
from libcpp.vector cimport vector
|
||||
from libcpp.optional cimport optional
|
||||
|
||||
from openpilot.common.swaglog import cloudlog
|
||||
|
||||
cdef extern from "common/params.h":
|
||||
cpdef enum ParamKeyFlag:
|
||||
PERSISTENT
|
||||
CLEAR_ON_MANAGER_START
|
||||
CLEAR_ON_ONROAD_TRANSITION
|
||||
CLEAR_ON_OFFROAD_TRANSITION
|
||||
DEVELOPMENT_ONLY
|
||||
CLEAR_ON_IGNITION_ON
|
||||
BACKUP
|
||||
ALL
|
||||
|
||||
cpdef enum ParamKeyType:
|
||||
STRING
|
||||
BOOL
|
||||
INT
|
||||
FLOAT
|
||||
TIME
|
||||
JSON
|
||||
BYTES
|
||||
|
||||
cdef cppclass c_Params "Params":
|
||||
c_Params(string) except + nogil
|
||||
string get(string, bool) nogil
|
||||
bool getBool(string, bool) nogil
|
||||
int remove(string) nogil
|
||||
int put(string, string) nogil
|
||||
void putNonBlocking(string, string) nogil
|
||||
void putBoolNonBlocking(string, bool) nogil
|
||||
int putBool(string, bool) nogil
|
||||
bool checkKey(string) nogil
|
||||
ParamKeyType getKeyType(string) nogil
|
||||
optional[string] getKeyDefaultValue(string) nogil
|
||||
string getParamPath(string) nogil
|
||||
void clearAll(ParamKeyFlag)
|
||||
vector[string] allKeys(ParamKeyFlag)
|
||||
|
||||
PYTHON_2_CPP = {
|
||||
(str, STRING): lambda v: v,
|
||||
(builtins.bool, BOOL): lambda v: "1" if v else "0",
|
||||
(int, INT): str,
|
||||
(float, FLOAT): str,
|
||||
(datetime.datetime, TIME): lambda v: v.isoformat(),
|
||||
(dict, JSON): json.dumps,
|
||||
(list, JSON): json.dumps,
|
||||
(bytes, BYTES): lambda v: v,
|
||||
}
|
||||
CPP_2_PYTHON = {
|
||||
STRING: lambda v: v.decode("utf-8"),
|
||||
BOOL: lambda v: v == b"1",
|
||||
INT: int,
|
||||
FLOAT: float,
|
||||
TIME: lambda v: datetime.datetime.fromisoformat(v.decode("utf-8")),
|
||||
JSON: json.loads,
|
||||
BYTES: lambda v: v,
|
||||
}
|
||||
|
||||
def ensure_bytes(v):
|
||||
return v.encode() if isinstance(v, str) else v
|
||||
|
||||
class UnknownKeyName(Exception):
|
||||
pass
|
||||
|
||||
cdef class Params:
|
||||
cdef c_Params* p
|
||||
cdef str d
|
||||
|
||||
def __cinit__(self, d=""):
|
||||
cdef string path = <string>d.encode()
|
||||
with nogil:
|
||||
self.p = new c_Params(path)
|
||||
self.d = d
|
||||
|
||||
def __reduce__(self):
|
||||
return (type(self), (self.d,))
|
||||
|
||||
def __dealloc__(self):
|
||||
del self.p
|
||||
|
||||
def clear_all(self, tx_flag=ParamKeyFlag.ALL):
|
||||
self.p.clearAll(tx_flag)
|
||||
|
||||
def check_key(self, key):
|
||||
key = ensure_bytes(key)
|
||||
if not self.p.checkKey(key):
|
||||
raise UnknownKeyName(key)
|
||||
return key
|
||||
|
||||
def python2cpp(self, proposed_type, expected_type, value, key):
|
||||
cast = PYTHON_2_CPP.get((proposed_type, expected_type))
|
||||
if cast:
|
||||
return cast(value)
|
||||
raise TypeError(f"Type mismatch while writing param {key}: {proposed_type=} {expected_type=} {value=}")
|
||||
|
||||
def _cpp2python(self, t, value, default, key):
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return CPP_2_PYTHON[t](value)
|
||||
except (KeyError, TypeError, ValueError):
|
||||
cloudlog.warning(f"Failed to cast param {key} with {value=} from type {t=}")
|
||||
return self._cpp2python(t, default, None, key)
|
||||
|
||||
def get(self, key, bool block=False, bool return_default=False):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef ParamKeyType t = self.p.getKeyType(k)
|
||||
cdef optional[string] default = self.p.getKeyDefaultValue(k)
|
||||
cdef string val
|
||||
with nogil:
|
||||
val = self.p.get(k, block)
|
||||
|
||||
default_val = (default.value() if default.has_value() else None) if return_default else None
|
||||
if val == b"":
|
||||
if block:
|
||||
# If we got no value while running in blocked mode
|
||||
# it means we got an interrupt while waiting
|
||||
raise KeyboardInterrupt
|
||||
else:
|
||||
return self._cpp2python(t, default_val, None, key)
|
||||
return self._cpp2python(t, val, default_val, key)
|
||||
|
||||
def get_bool(self, key, bool block=False):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef bool r
|
||||
with nogil:
|
||||
r = self.p.getBool(k, block)
|
||||
return r
|
||||
|
||||
def _put_cast(self, key, dat):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef ParamKeyType t = self.p.getKeyType(k)
|
||||
return ensure_bytes(self.python2cpp(type(dat), t, dat, key))
|
||||
|
||||
def put(self, key, dat):
|
||||
"""
|
||||
Warning: This function blocks until the param is written to disk!
|
||||
In very rare cases this can take over a second, and your code will hang.
|
||||
Use the put_nonblocking, put_bool_nonblocking in time sensitive code, but
|
||||
in general try to avoid writing params as much as possible.
|
||||
"""
|
||||
cdef string k = self.check_key(key)
|
||||
cdef string dat_bytes = self._put_cast(key, dat)
|
||||
with nogil:
|
||||
self.p.put(k, dat_bytes)
|
||||
|
||||
def put_bool(self, key, bool val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putBool(k, val)
|
||||
|
||||
def put_nonblocking(self, key, dat):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef string dat_bytes = self._put_cast(key, dat)
|
||||
with nogil:
|
||||
self.p.putNonBlocking(k, dat_bytes)
|
||||
|
||||
def put_bool_nonblocking(self, key, bool val):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.putBoolNonBlocking(k, val)
|
||||
|
||||
def remove(self, key):
|
||||
cdef string k = self.check_key(key)
|
||||
with nogil:
|
||||
self.p.remove(k)
|
||||
|
||||
def get_param_path(self, key=""):
|
||||
cdef string key_bytes = ensure_bytes(key)
|
||||
return self.p.getParamPath(key_bytes).decode("utf-8")
|
||||
|
||||
def get_type(self, key):
|
||||
return self.p.getKeyType(self.check_key(key))
|
||||
|
||||
def all_keys(self, flag=ParamKeyFlag.ALL):
|
||||
return self.p.allKeys(flag)
|
||||
|
||||
def get_default_value(self, key):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef ParamKeyType t = self.p.getKeyType(k)
|
||||
cdef optional[string] default = self.p.getKeyDefaultValue(k)
|
||||
return self._cpp2python(t, default.value(), None, key) if default.has_value() else None
|
||||
|
||||
def cpp2python(self, key, value):
|
||||
cdef string k = self.check_key(key)
|
||||
cdef ParamKeyType t = self.p.getKeyType(k)
|
||||
return self._cpp2python(t, value, None, key)
|
||||
BIN
common/params_pyx.so
Executable file
BIN
common/params_pyx.so
Executable file
Binary file not shown.
64
common/pid.py
Normal file
64
common/pid.py
Normal file
@ -0,0 +1,64 @@
|
||||
import numpy as np
|
||||
from numbers import Number
|
||||
|
||||
class PIDController:
|
||||
def __init__(self, k_p, k_i, k_f=0., k_d=0., pos_limit=1e308, neg_limit=-1e308, rate=100):
|
||||
self._k_p = k_p
|
||||
self._k_i = k_i
|
||||
self._k_d = k_d
|
||||
self.k_f = k_f # feedforward gain
|
||||
if isinstance(self._k_p, Number):
|
||||
self._k_p = [[0], [self._k_p]]
|
||||
if isinstance(self._k_i, Number):
|
||||
self._k_i = [[0], [self._k_i]]
|
||||
if isinstance(self._k_d, Number):
|
||||
self._k_d = [[0], [self._k_d]]
|
||||
|
||||
self.set_limits(pos_limit, neg_limit)
|
||||
|
||||
self.i_rate = 1.0 / rate
|
||||
self.speed = 0.0
|
||||
|
||||
self.reset()
|
||||
|
||||
@property
|
||||
def k_p(self):
|
||||
return np.interp(self.speed, self._k_p[0], self._k_p[1])
|
||||
|
||||
@property
|
||||
def k_i(self):
|
||||
return np.interp(self.speed, self._k_i[0], self._k_i[1])
|
||||
|
||||
@property
|
||||
def k_d(self):
|
||||
return np.interp(self.speed, self._k_d[0], self._k_d[1])
|
||||
|
||||
def reset(self):
|
||||
self.p = 0.0
|
||||
self.i = 0.0
|
||||
self.d = 0.0
|
||||
self.f = 0.0
|
||||
self.control = 0
|
||||
|
||||
def set_limits(self, pos_limit, neg_limit):
|
||||
self.pos_limit = pos_limit
|
||||
self.neg_limit = neg_limit
|
||||
|
||||
def update(self, error, error_rate=0.0, speed=0.0, feedforward=0., freeze_integrator=False):
|
||||
self.speed = speed
|
||||
self.p = float(error) * self.k_p
|
||||
self.f = feedforward * self.k_f
|
||||
self.d = error_rate * self.k_d
|
||||
|
||||
if not freeze_integrator:
|
||||
i = self.i + error * self.k_i * self.i_rate
|
||||
|
||||
# Don't allow windup if already clipping
|
||||
test_control = self.p + i + self.d + self.f
|
||||
i_upperbound = self.i if test_control > self.pos_limit else self.pos_limit
|
||||
i_lowerbound = self.i if test_control < self.neg_limit else self.neg_limit
|
||||
self.i = np.clip(i, i_lowerbound, i_upperbound)
|
||||
|
||||
control = self.p + self.i + self.d + self.f
|
||||
self.control = np.clip(control, self.neg_limit, self.pos_limit)
|
||||
return self.control
|
||||
39
common/prefix.h
Normal file
39
common/prefix.h
Normal file
@ -0,0 +1,39 @@
|
||||
#pragma once
|
||||
|
||||
#include <cassert>
|
||||
#include <string>
|
||||
|
||||
#include "common/params.h"
|
||||
#include "common/util.h"
|
||||
#include "system/hardware/hw.h"
|
||||
|
||||
class OpenpilotPrefix {
|
||||
public:
|
||||
OpenpilotPrefix(std::string prefix = {}) {
|
||||
if (prefix.empty()) {
|
||||
prefix = util::random_string(15);
|
||||
}
|
||||
msgq_path = Path::shm_path() + "/" + prefix;
|
||||
bool ret = util::create_directories(msgq_path, 0777);
|
||||
assert(ret);
|
||||
setenv("OPENPILOT_PREFIX", prefix.c_str(), 1);
|
||||
}
|
||||
|
||||
~OpenpilotPrefix() {
|
||||
auto param_path = Params().getParamPath();
|
||||
if (util::file_exists(param_path)) {
|
||||
std::string real_path = util::readlink(param_path);
|
||||
system(util::string_format("rm %s -rf", real_path.c_str()).c_str());
|
||||
unlink(param_path.c_str());
|
||||
}
|
||||
if (getenv("COMMA_CACHE") == nullptr) {
|
||||
system(util::string_format("rm %s -rf", Path::download_cache_root().c_str()).c_str());
|
||||
}
|
||||
system(util::string_format("rm %s -rf", Path::comma_home().c_str()).c_str());
|
||||
system(util::string_format("rm %s -rf", msgq_path.c_str()).c_str());
|
||||
unsetenv("OPENPILOT_PREFIX");
|
||||
}
|
||||
|
||||
private:
|
||||
std::string msgq_path;
|
||||
};
|
||||
59
common/prefix.py
Normal file
59
common/prefix.py
Normal file
@ -0,0 +1,59 @@
|
||||
import os
|
||||
import shutil
|
||||
import uuid
|
||||
|
||||
|
||||
from openpilot.common.params import Params
|
||||
from openpilot.system.hardware import PC
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
from openpilot.system.hardware.hw import DEFAULT_DOWNLOAD_CACHE_ROOT
|
||||
|
||||
class OpenpilotPrefix:
|
||||
def __init__(self, prefix: str = None, create_dirs_on_enter: bool = True, clean_dirs_on_exit: bool = True, shared_download_cache: bool = False):
|
||||
self.prefix = prefix if prefix else str(uuid.uuid4().hex[0:15])
|
||||
self.msgq_path = os.path.join(Paths.shm_path(), self.prefix)
|
||||
self.create_dirs_on_enter = create_dirs_on_enter
|
||||
self.clean_dirs_on_exit = clean_dirs_on_exit
|
||||
self.shared_download_cache = shared_download_cache
|
||||
|
||||
def __enter__(self):
|
||||
self.original_prefix = os.environ.get('OPENPILOT_PREFIX', None)
|
||||
os.environ['OPENPILOT_PREFIX'] = self.prefix
|
||||
|
||||
if self.create_dirs_on_enter:
|
||||
self.create_dirs()
|
||||
|
||||
if self.shared_download_cache:
|
||||
os.environ["COMMA_CACHE"] = DEFAULT_DOWNLOAD_CACHE_ROOT
|
||||
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_obj, exc_tb):
|
||||
if self.clean_dirs_on_exit:
|
||||
self.clean_dirs()
|
||||
try:
|
||||
del os.environ['OPENPILOT_PREFIX']
|
||||
if self.original_prefix is not None:
|
||||
os.environ['OPENPILOT_PREFIX'] = self.original_prefix
|
||||
except KeyError:
|
||||
pass
|
||||
return False
|
||||
|
||||
def create_dirs(self):
|
||||
try:
|
||||
os.mkdir(self.msgq_path)
|
||||
except FileExistsError:
|
||||
pass
|
||||
os.makedirs(Paths.log_root(), exist_ok=True)
|
||||
|
||||
def clean_dirs(self):
|
||||
symlink_path = Params().get_param_path()
|
||||
if os.path.exists(symlink_path):
|
||||
shutil.rmtree(os.path.realpath(symlink_path), ignore_errors=True)
|
||||
os.remove(symlink_path)
|
||||
shutil.rmtree(self.msgq_path, ignore_errors=True)
|
||||
if PC:
|
||||
shutil.rmtree(Paths.log_root(), ignore_errors=True)
|
||||
if not os.environ.get("COMMA_CACHE", False):
|
||||
shutil.rmtree(Paths.download_cache_root(), ignore_errors=True)
|
||||
shutil.rmtree(Paths.comma_home(), ignore_errors=True)
|
||||
52
common/queue.h
Normal file
52
common/queue.h
Normal file
@ -0,0 +1,52 @@
|
||||
#pragma once
|
||||
|
||||
#include <condition_variable>
|
||||
#include <mutex>
|
||||
#include <queue>
|
||||
|
||||
template <class T>
|
||||
class SafeQueue {
|
||||
public:
|
||||
SafeQueue() = default;
|
||||
|
||||
void push(const T& v) {
|
||||
{
|
||||
std::unique_lock lk(m);
|
||||
q.push(v);
|
||||
}
|
||||
cv.notify_one();
|
||||
}
|
||||
|
||||
T pop() {
|
||||
std::unique_lock lk(m);
|
||||
cv.wait(lk, [this] { return !q.empty(); });
|
||||
T v = q.front();
|
||||
q.pop();
|
||||
return v;
|
||||
}
|
||||
|
||||
bool try_pop(T& v, int timeout_ms = 0) {
|
||||
std::unique_lock lk(m);
|
||||
if (!cv.wait_for(lk, std::chrono::milliseconds(timeout_ms), [this] { return !q.empty(); })) {
|
||||
return false;
|
||||
}
|
||||
v = q.front();
|
||||
q.pop();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool empty() const {
|
||||
std::scoped_lock lk(m);
|
||||
return q.empty();
|
||||
}
|
||||
|
||||
size_t size() const {
|
||||
std::scoped_lock lk(m);
|
||||
return q.size();
|
||||
}
|
||||
|
||||
private:
|
||||
mutable std::mutex m;
|
||||
std::condition_variable cv;
|
||||
std::queue<T> q;
|
||||
};
|
||||
23
common/ratekeeper.h
Normal file
23
common/ratekeeper.h
Normal file
@ -0,0 +1,23 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
#include <string>
|
||||
|
||||
class RateKeeper {
|
||||
public:
|
||||
RateKeeper(const std::string &name, float rate, float print_delay_threshold = 0);
|
||||
~RateKeeper() {}
|
||||
bool keepTime();
|
||||
bool monitorTime();
|
||||
inline uint64_t frame() const { return frame_; }
|
||||
inline double remaining() const { return remaining_; }
|
||||
|
||||
private:
|
||||
double interval;
|
||||
double next_frame_time;
|
||||
double last_monitor_time;
|
||||
double remaining_ = 0;
|
||||
float print_delay_threshold = 0;
|
||||
uint64_t frame_ = 0;
|
||||
std::string name;
|
||||
};
|
||||
96
common/realtime.py
Normal file
96
common/realtime.py
Normal file
@ -0,0 +1,96 @@
|
||||
"""Utilities for reading real time clocks and keeping soft real time constraints."""
|
||||
import gc
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from setproctitle import getproctitle
|
||||
|
||||
from openpilot.common.util import MovingAverage
|
||||
from openpilot.system.hardware import PC
|
||||
|
||||
|
||||
# time step for each process
|
||||
DT_CTRL = 0.01 # controlsd
|
||||
DT_MDL = 0.05 # model
|
||||
DT_HW = 0.5 # hardwared and manager
|
||||
DT_DMON = 0.05 # driver monitoring
|
||||
|
||||
|
||||
class Priority:
|
||||
# CORE 2
|
||||
# - modeld = 55
|
||||
# - camerad = 54
|
||||
CTRL_LOW = 51 # plannerd & radard
|
||||
|
||||
# CORE 3
|
||||
# - pandad = 55
|
||||
CTRL_HIGH = 53
|
||||
|
||||
|
||||
def set_core_affinity(cores: list[int]) -> None:
|
||||
if sys.platform == 'linux' and not PC:
|
||||
os.sched_setaffinity(0, cores)
|
||||
|
||||
|
||||
def config_realtime_process(cores: int | list[int], priority: int) -> None:
|
||||
gc.disable()
|
||||
if sys.platform == 'linux' and not PC:
|
||||
os.sched_setscheduler(0, os.SCHED_FIFO, os.sched_param(priority))
|
||||
c = cores if isinstance(cores, list) else [cores, ]
|
||||
set_core_affinity(c)
|
||||
|
||||
|
||||
class Ratekeeper:
|
||||
def __init__(self, rate: float, print_delay_threshold: float | None = 0.0) -> None:
|
||||
"""Rate in Hz for ratekeeping. print_delay_threshold must be nonnegative."""
|
||||
self._interval = 1. / rate
|
||||
self._print_delay_threshold = print_delay_threshold
|
||||
self._frame = 0
|
||||
self._remaining = 0.0
|
||||
self._process_name = getproctitle()
|
||||
self._last_monitor_time = -1.
|
||||
self._next_frame_time = -1.
|
||||
|
||||
self.avg_dt = MovingAverage(100)
|
||||
self.avg_dt.add_value(self._interval)
|
||||
|
||||
@property
|
||||
def frame(self) -> int:
|
||||
return self._frame
|
||||
|
||||
@property
|
||||
def remaining(self) -> float:
|
||||
return self._remaining
|
||||
|
||||
@property
|
||||
def lagging(self) -> bool:
|
||||
expected_dt = self._interval * (1 / 0.9)
|
||||
return self.avg_dt.get_average() > expected_dt
|
||||
|
||||
# Maintain loop rate by calling this at the end of each loop
|
||||
def keep_time(self) -> bool:
|
||||
lagged = self.monitor_time()
|
||||
if self._remaining > 0:
|
||||
time.sleep(self._remaining)
|
||||
return lagged
|
||||
|
||||
# Monitors the cumulative lag, but does not enforce a rate
|
||||
def monitor_time(self) -> bool:
|
||||
if self._last_monitor_time < 0:
|
||||
self._next_frame_time = time.monotonic() + self._interval
|
||||
self._last_monitor_time = time.monotonic()
|
||||
|
||||
prev = self._last_monitor_time
|
||||
self._last_monitor_time = time.monotonic()
|
||||
self.avg_dt.add_value(self._last_monitor_time - prev)
|
||||
|
||||
lagged = False
|
||||
remaining = self._next_frame_time - time.monotonic()
|
||||
self._next_frame_time += self._interval
|
||||
if self._print_delay_threshold is not None and remaining < -self._print_delay_threshold:
|
||||
print(f"{self._process_name} lagging by {-remaining * 1000:.2f} ms")
|
||||
lagged = True
|
||||
self._frame += 1
|
||||
self._remaining = remaining
|
||||
return lagged
|
||||
30
common/retry.py
Normal file
30
common/retry.py
Normal file
@ -0,0 +1,30 @@
|
||||
import time
|
||||
import functools
|
||||
|
||||
from openpilot.common.swaglog import cloudlog
|
||||
|
||||
|
||||
def retry(attempts=3, delay=1.0, ignore_failure=False):
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
for _ in range(attempts):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception:
|
||||
cloudlog.exception(f"{func.__name__} failed, trying again")
|
||||
time.sleep(delay)
|
||||
|
||||
if ignore_failure:
|
||||
cloudlog.error(f"{func.__name__} failed after retry")
|
||||
else:
|
||||
raise Exception(f"{func.__name__} failed after retry")
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@retry(attempts=10)
|
||||
def abc():
|
||||
raise ValueError("abc failed :(")
|
||||
abc()
|
||||
28
common/run.py
Normal file
28
common/run.py
Normal file
@ -0,0 +1,28 @@
|
||||
import subprocess
|
||||
from contextlib import contextmanager
|
||||
from subprocess import Popen, PIPE, TimeoutExpired
|
||||
|
||||
|
||||
def run_cmd(cmd: list[str], cwd=None, env=None) -> str:
|
||||
return subprocess.check_output(cmd, encoding='utf8', cwd=cwd, env=env).strip()
|
||||
|
||||
|
||||
def run_cmd_default(cmd: list[str], default: str = "", cwd=None, env=None) -> str:
|
||||
try:
|
||||
return run_cmd(cmd, cwd=cwd, env=env)
|
||||
except subprocess.CalledProcessError:
|
||||
return default
|
||||
|
||||
|
||||
@contextmanager
|
||||
def managed_proc(cmd: list[str], env: dict[str, str]):
|
||||
proc = Popen(cmd, env=env, stdout=PIPE, stderr=PIPE)
|
||||
try:
|
||||
yield proc
|
||||
finally:
|
||||
if proc.poll() is None:
|
||||
proc.terminate()
|
||||
try:
|
||||
proc.wait(timeout=5)
|
||||
except TimeoutExpired:
|
||||
proc.kill()
|
||||
54
common/simple_kalman.py
Normal file
54
common/simple_kalman.py
Normal file
@ -0,0 +1,54 @@
|
||||
import numpy as np
|
||||
|
||||
|
||||
def get_kalman_gain(dt, A, C, Q, R, iterations=100):
|
||||
P = np.zeros_like(Q)
|
||||
for _ in range(iterations):
|
||||
P = A.dot(P).dot(A.T) + dt * Q
|
||||
S = C.dot(P).dot(C.T) + R
|
||||
K = P.dot(C.T).dot(np.linalg.inv(S))
|
||||
P = (np.eye(len(P)) - K.dot(C)).dot(P)
|
||||
return K
|
||||
|
||||
|
||||
class KF1D:
|
||||
# this EKF assumes constant covariance matrix, so calculations are much simpler
|
||||
# the Kalman gain also needs to be precomputed using the control module
|
||||
|
||||
def __init__(self, x0, A, C, K):
|
||||
self.x0_0 = x0[0][0]
|
||||
self.x1_0 = x0[1][0]
|
||||
self.A0_0 = A[0][0]
|
||||
self.A0_1 = A[0][1]
|
||||
self.A1_0 = A[1][0]
|
||||
self.A1_1 = A[1][1]
|
||||
self.C0_0 = C[0]
|
||||
self.C0_1 = C[1]
|
||||
self.K0_0 = K[0][0]
|
||||
self.K1_0 = K[1][0]
|
||||
|
||||
self.A_K_0 = self.A0_0 - self.K0_0 * self.C0_0
|
||||
self.A_K_1 = self.A0_1 - self.K0_0 * self.C0_1
|
||||
self.A_K_2 = self.A1_0 - self.K1_0 * self.C0_0
|
||||
self.A_K_3 = self.A1_1 - self.K1_0 * self.C0_1
|
||||
|
||||
# K matrix needs to be pre-computed as follow:
|
||||
# import control
|
||||
# (x, l, K) = control.dare(np.transpose(self.A), np.transpose(self.C), Q, R)
|
||||
# self.K = np.transpose(K)
|
||||
|
||||
def update(self, meas):
|
||||
#self.x = np.dot(self.A_K, self.x) + np.dot(self.K, meas)
|
||||
x0_0 = self.A_K_0 * self.x0_0 + self.A_K_1 * self.x1_0 + self.K0_0 * meas
|
||||
x1_0 = self.A_K_2 * self.x0_0 + self.A_K_3 * self.x1_0 + self.K1_0 * meas
|
||||
self.x0_0 = x0_0
|
||||
self.x1_0 = x1_0
|
||||
return [self.x0_0, self.x1_0]
|
||||
|
||||
@property
|
||||
def x(self):
|
||||
return [[self.x0_0], [self.x1_0]]
|
||||
|
||||
def set_x(self, x):
|
||||
self.x0_0 = x[0][0]
|
||||
self.x1_0 = x[1][0]
|
||||
52
common/spinner.py
Executable file
52
common/spinner.py
Executable file
@ -0,0 +1,52 @@
|
||||
import os
|
||||
import subprocess
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
|
||||
|
||||
class Spinner:
|
||||
def __init__(self):
|
||||
try:
|
||||
self.spinner_proc = subprocess.Popen(["./spinner.py"],
|
||||
stdin=subprocess.PIPE,
|
||||
cwd=os.path.join(BASEDIR, "system", "ui"),
|
||||
close_fds=True)
|
||||
except OSError:
|
||||
self.spinner_proc = None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def update(self, spinner_text: str):
|
||||
if self.spinner_proc is not None:
|
||||
self.spinner_proc.stdin.write(spinner_text.encode('utf8') + b"\n")
|
||||
try:
|
||||
self.spinner_proc.stdin.flush()
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
|
||||
def update_progress(self, cur: float, total: float):
|
||||
self.update(str(round(100 * cur / total)))
|
||||
|
||||
def close(self):
|
||||
if self.spinner_proc is not None:
|
||||
self.spinner_proc.kill()
|
||||
try:
|
||||
self.spinner_proc.communicate(timeout=2.)
|
||||
except subprocess.TimeoutExpired:
|
||||
print("WARNING: failed to kill spinner")
|
||||
self.spinner_proc = None
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import time
|
||||
with Spinner() as s:
|
||||
s.update("Spinner text")
|
||||
time.sleep(5.0)
|
||||
print("gone")
|
||||
time.sleep(5.0)
|
||||
73
common/stat_live.py
Normal file
73
common/stat_live.py
Normal file
@ -0,0 +1,73 @@
|
||||
import numpy as np
|
||||
|
||||
class RunningStat:
|
||||
# tracks realtime mean and standard deviation without storing any data
|
||||
def __init__(self, priors=None, max_trackable=-1):
|
||||
self.max_trackable = max_trackable
|
||||
if priors is not None:
|
||||
# initialize from history
|
||||
self.M = priors[0]
|
||||
self.S = priors[1]
|
||||
self.n = priors[2]
|
||||
self.M_last = self.M
|
||||
self.S_last = self.S
|
||||
|
||||
else:
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
self.M = 0.
|
||||
self.S = 0.
|
||||
self.M_last = 0.
|
||||
self.S_last = 0.
|
||||
self.n = 0
|
||||
|
||||
def push_data(self, new_data):
|
||||
# short term memory hack
|
||||
if self.max_trackable < 0 or self.n < self.max_trackable:
|
||||
self.n += 1
|
||||
if self.n == 0:
|
||||
self.M_last = new_data
|
||||
self.M = self.M_last
|
||||
self.S_last = 0.
|
||||
else:
|
||||
self.M = self.M_last + (new_data - self.M_last) / self.n
|
||||
self.S = self.S_last + (new_data - self.M_last) * (new_data - self.M)
|
||||
self.M_last = self.M
|
||||
self.S_last = self.S
|
||||
|
||||
def mean(self):
|
||||
return self.M
|
||||
|
||||
def variance(self):
|
||||
if self.n >= 2:
|
||||
return self.S / (self.n - 1.)
|
||||
else:
|
||||
return 0
|
||||
|
||||
def std(self):
|
||||
return np.sqrt(self.variance())
|
||||
|
||||
def params_to_save(self):
|
||||
return [self.M, self.S, self.n]
|
||||
|
||||
class RunningStatFilter:
|
||||
def __init__(self, raw_priors=None, filtered_priors=None, max_trackable=-1):
|
||||
self.raw_stat = RunningStat(raw_priors, -1)
|
||||
self.filtered_stat = RunningStat(filtered_priors, max_trackable)
|
||||
|
||||
def reset(self):
|
||||
self.raw_stat.reset()
|
||||
self.filtered_stat.reset()
|
||||
|
||||
def push_and_update(self, new_data):
|
||||
_std_last = self.raw_stat.std()
|
||||
self.raw_stat.push_data(new_data)
|
||||
_delta_std = self.raw_stat.std() - _std_last
|
||||
if _delta_std <= 0:
|
||||
self.filtered_stat.push_data(new_data)
|
||||
else:
|
||||
pass
|
||||
# self.filtered_stat.push_data(self.filtered_stat.mean())
|
||||
|
||||
# class SequentialBayesian():
|
||||
76
common/swaglog.h
Normal file
76
common/swaglog.h
Normal file
@ -0,0 +1,76 @@
|
||||
#pragma once
|
||||
|
||||
#include "common/timing.h"
|
||||
|
||||
#define CLOUDLOG_DEBUG 10
|
||||
#define CLOUDLOG_INFO 20
|
||||
#define CLOUDLOG_WARNING 30
|
||||
#define CLOUDLOG_ERROR 40
|
||||
#define CLOUDLOG_CRITICAL 50
|
||||
|
||||
|
||||
#ifdef __GNUC__
|
||||
#define SWAG_LOG_CHECK_FMT(a, b) __attribute__ ((format (printf, a, b)))
|
||||
#else
|
||||
#define SWAG_LOG_CHECK_FMT(a, b)
|
||||
#endif
|
||||
|
||||
void cloudlog_e(int levelnum, const char* filename, int lineno, const char* func,
|
||||
const char* fmt, ...) SWAG_LOG_CHECK_FMT(5, 6);
|
||||
|
||||
void cloudlog_te(int levelnum, const char* filename, int lineno, const char* func,
|
||||
const char* fmt, ...) SWAG_LOG_CHECK_FMT(5, 6);
|
||||
|
||||
void cloudlog_te(int levelnum, const char* filename, int lineno, const char* func,
|
||||
uint32_t frame_id, const char* fmt, ...) SWAG_LOG_CHECK_FMT(6, 7);
|
||||
|
||||
|
||||
#define cloudlog(lvl, fmt, ...) cloudlog_e(lvl, __FILE__, __LINE__, \
|
||||
__func__, \
|
||||
fmt, ## __VA_ARGS__)
|
||||
|
||||
#define cloudlog_t(lvl, ...) cloudlog_te(lvl, __FILE__, __LINE__, \
|
||||
__func__, \
|
||||
__VA_ARGS__)
|
||||
|
||||
|
||||
#define cloudlog_rl(burst, millis, lvl, fmt, ...) \
|
||||
{ \
|
||||
static uint64_t __begin = 0; \
|
||||
static int __printed = 0; \
|
||||
static int __missed = 0; \
|
||||
\
|
||||
int __burst = (burst); \
|
||||
int __millis = (millis); \
|
||||
uint64_t __ts = nanos_since_boot(); \
|
||||
\
|
||||
if (!__begin) { __begin = __ts; } \
|
||||
\
|
||||
if (__begin + __millis*1000000ULL < __ts) { \
|
||||
if (__missed) { \
|
||||
cloudlog(CLOUDLOG_WARNING, "cloudlog: %d messages suppressed", __missed); \
|
||||
} \
|
||||
__begin = 0; \
|
||||
__printed = 0; \
|
||||
__missed = 0; \
|
||||
} \
|
||||
\
|
||||
if (__printed < __burst) { \
|
||||
cloudlog(lvl, fmt, ## __VA_ARGS__); \
|
||||
__printed++; \
|
||||
} else { \
|
||||
__missed++; \
|
||||
} \
|
||||
}
|
||||
|
||||
|
||||
#define LOGT(...) cloudlog_t(CLOUDLOG_DEBUG, __VA_ARGS__)
|
||||
#define LOGD(fmt, ...) cloudlog(CLOUDLOG_DEBUG, fmt, ## __VA_ARGS__)
|
||||
#define LOG(fmt, ...) cloudlog(CLOUDLOG_INFO, fmt, ## __VA_ARGS__)
|
||||
#define LOGW(fmt, ...) cloudlog(CLOUDLOG_WARNING, fmt, ## __VA_ARGS__)
|
||||
#define LOGE(fmt, ...) cloudlog(CLOUDLOG_ERROR, fmt, ## __VA_ARGS__)
|
||||
|
||||
#define LOGD_100(fmt, ...) cloudlog_rl(2, 100, CLOUDLOG_DEBUG, fmt, ## __VA_ARGS__)
|
||||
#define LOG_100(fmt, ...) cloudlog_rl(2, 100, CLOUDLOG_INFO, fmt, ## __VA_ARGS__)
|
||||
#define LOGW_100(fmt, ...) cloudlog_rl(2, 100, CLOUDLOG_WARNING, fmt, ## __VA_ARGS__)
|
||||
#define LOGE_100(fmt, ...) cloudlog_rl(2, 100, CLOUDLOG_ERROR, fmt, ## __VA_ARGS__)
|
||||
144
common/swaglog.py
Normal file
144
common/swaglog.py
Normal file
@ -0,0 +1,144 @@
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
from logging.handlers import BaseRotatingHandler
|
||||
|
||||
import zmq
|
||||
|
||||
from openpilot.common.logging_extra import SwagLogger, SwagFormatter, SwagLogFileFormatter
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
|
||||
|
||||
def get_file_handler():
|
||||
Path(Paths.swaglog_root()).mkdir(parents=True, exist_ok=True)
|
||||
base_filename = os.path.join(Paths.swaglog_root(), "swaglog")
|
||||
handler = SwaglogRotatingFileHandler(base_filename)
|
||||
return handler
|
||||
|
||||
class SwaglogRotatingFileHandler(BaseRotatingHandler):
|
||||
def __init__(self, base_filename, interval=60, max_bytes=1024*256, backup_count=2500, encoding=None):
|
||||
super().__init__(base_filename, mode="a", encoding=encoding, delay=True)
|
||||
self.base_filename = base_filename
|
||||
self.interval = interval # seconds
|
||||
self.max_bytes = max_bytes
|
||||
self.backup_count = backup_count
|
||||
self.log_files = self.get_existing_logfiles()
|
||||
log_indexes = [f.split(".")[-1] for f in self.log_files]
|
||||
self.last_file_idx = max([int(i) for i in log_indexes if i.isdigit()] or [-1])
|
||||
self.last_rollover = None
|
||||
self.doRollover()
|
||||
|
||||
def _open(self):
|
||||
self.last_rollover = time.monotonic()
|
||||
self.last_file_idx += 1
|
||||
next_filename = f"{self.base_filename}.{self.last_file_idx:010}"
|
||||
stream = open(next_filename, self.mode, encoding=self.encoding)
|
||||
self.log_files.insert(0, next_filename)
|
||||
return stream
|
||||
|
||||
def get_existing_logfiles(self):
|
||||
log_files = list()
|
||||
base_dir = os.path.dirname(self.base_filename)
|
||||
for fn in os.listdir(base_dir):
|
||||
fp = os.path.join(base_dir, fn)
|
||||
if fp.startswith(self.base_filename) and os.path.isfile(fp):
|
||||
log_files.append(fp)
|
||||
return sorted(log_files)
|
||||
|
||||
def shouldRollover(self, record):
|
||||
size_exceeded = self.max_bytes > 0 and self.stream.tell() >= self.max_bytes
|
||||
time_exceeded = self.interval > 0 and self.last_rollover + self.interval <= time.monotonic()
|
||||
return size_exceeded or time_exceeded
|
||||
|
||||
def doRollover(self):
|
||||
if self.stream:
|
||||
self.stream.close()
|
||||
self.stream = self._open()
|
||||
|
||||
if self.backup_count > 0:
|
||||
while len(self.log_files) > self.backup_count:
|
||||
to_delete = self.log_files.pop()
|
||||
if os.path.exists(to_delete): # just being safe, should always exist
|
||||
os.remove(to_delete)
|
||||
|
||||
class UnixDomainSocketHandler(logging.Handler):
|
||||
def __init__(self, formatter):
|
||||
logging.Handler.__init__(self)
|
||||
self.setFormatter(formatter)
|
||||
self.pid = None
|
||||
|
||||
self.zctx = None
|
||||
self.sock = None
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
if self.sock is not None:
|
||||
self.sock.close()
|
||||
if self.zctx is not None:
|
||||
self.zctx.term()
|
||||
|
||||
def connect(self):
|
||||
self.zctx = zmq.Context()
|
||||
self.sock = self.zctx.socket(zmq.PUSH)
|
||||
self.sock.setsockopt(zmq.LINGER, 10)
|
||||
self.sock.connect(Paths.swaglog_ipc())
|
||||
self.pid = os.getpid()
|
||||
|
||||
def emit(self, record):
|
||||
if os.getpid() != self.pid:
|
||||
# TODO suppresses warning about forking proc with zmq socket, fix root cause
|
||||
warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*<zmq.*>")
|
||||
self.connect()
|
||||
|
||||
msg = self.format(record).rstrip('\n')
|
||||
# print("SEND".format(repr(msg)))
|
||||
try:
|
||||
s = chr(record.levelno)+msg
|
||||
self.sock.send(s.encode('utf8'), zmq.NOBLOCK)
|
||||
except zmq.error.Again:
|
||||
# drop :/
|
||||
pass
|
||||
|
||||
|
||||
class ForwardingHandler(logging.Handler):
|
||||
def __init__(self, target_logger):
|
||||
super().__init__()
|
||||
self.target_logger = target_logger
|
||||
|
||||
def emit(self, record):
|
||||
self.target_logger.handle(record)
|
||||
|
||||
|
||||
def add_file_handler(log):
|
||||
"""
|
||||
Function to add the file log handler to swaglog.
|
||||
This can be used to store logs when logmessaged is not running.
|
||||
"""
|
||||
handler = get_file_handler()
|
||||
handler.setFormatter(SwagLogFileFormatter(log))
|
||||
log.addHandler(handler)
|
||||
|
||||
|
||||
cloudlog = log = SwagLogger()
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
outhandler = logging.StreamHandler()
|
||||
|
||||
print_level = os.environ.get('LOGPRINT', 'warning')
|
||||
if print_level == 'debug':
|
||||
outhandler.setLevel(logging.DEBUG)
|
||||
elif print_level == 'info':
|
||||
outhandler.setLevel(logging.INFO)
|
||||
elif print_level == 'warning':
|
||||
outhandler.setLevel(logging.WARNING)
|
||||
|
||||
ipchandler = UnixDomainSocketHandler(SwagFormatter(log))
|
||||
|
||||
log.addHandler(outhandler)
|
||||
# logs are sent through IPC before writing to disk to prevent disk I/O blocking
|
||||
log.addHandler(ipchandler)
|
||||
1
common/tests/.gitignore
vendored
Normal file
1
common/tests/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
test_common
|
||||
0
common/tests/__init__.py
Normal file
0
common/tests/__init__.py
Normal file
19
common/tests/test_file_helpers.py
Normal file
19
common/tests/test_file_helpers.py
Normal file
@ -0,0 +1,19 @@
|
||||
import os
|
||||
from uuid import uuid4
|
||||
|
||||
from openpilot.common.file_helpers import atomic_write_in_dir
|
||||
|
||||
|
||||
class TestFileHelpers:
|
||||
def run_atomic_write_func(self, atomic_write_func):
|
||||
path = f"/tmp/tmp{uuid4()}"
|
||||
with atomic_write_func(path) as f:
|
||||
f.write("test")
|
||||
assert not os.path.exists(path)
|
||||
|
||||
with open(path) as f:
|
||||
assert f.read() == "test"
|
||||
os.remove(path)
|
||||
|
||||
def test_atomic_write_in_dir(self):
|
||||
self.run_atomic_write_func(atomic_write_in_dir)
|
||||
15
common/tests/test_markdown.py
Normal file
15
common/tests/test_markdown.py
Normal file
@ -0,0 +1,15 @@
|
||||
import os
|
||||
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
from openpilot.common.markdown import parse_markdown
|
||||
|
||||
|
||||
class TestMarkdown:
|
||||
def test_all_release_notes(self):
|
||||
with open(os.path.join(BASEDIR, "RELEASES.md")) as f:
|
||||
release_notes = f.read().split("\n\n")
|
||||
assert len(release_notes) > 10
|
||||
|
||||
for rn in release_notes:
|
||||
md = parse_markdown(rn)
|
||||
assert len(md) > 0
|
||||
141
common/tests/test_params.py
Normal file
141
common/tests/test_params.py
Normal file
@ -0,0 +1,141 @@
|
||||
import pytest
|
||||
import datetime
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from openpilot.common.params import Params, ParamKeyFlag, UnknownKeyName
|
||||
|
||||
class TestParams:
|
||||
def setup_method(self):
|
||||
self.params = Params()
|
||||
|
||||
def test_params_put_and_get(self):
|
||||
self.params.put("DongleId", "cb38263377b873ee")
|
||||
assert self.params.get("DongleId") == "cb38263377b873ee"
|
||||
|
||||
def test_params_non_ascii(self):
|
||||
st = b"\xe1\x90\xff"
|
||||
self.params.put("CarParams", st)
|
||||
assert self.params.get("CarParams") == st
|
||||
|
||||
def test_params_get_cleared_manager_start(self):
|
||||
self.params.put("CarParams", b"test")
|
||||
self.params.put("DongleId", "cb38263377b873ee")
|
||||
assert self.params.get("CarParams") == b"test"
|
||||
|
||||
undefined_param = self.params.get_param_path(uuid.uuid4().hex)
|
||||
with open(undefined_param, "w") as f:
|
||||
f.write("test")
|
||||
assert os.path.isfile(undefined_param)
|
||||
|
||||
self.params.clear_all(ParamKeyFlag.CLEAR_ON_MANAGER_START)
|
||||
assert self.params.get("CarParams") is None
|
||||
assert self.params.get("DongleId") is not None
|
||||
assert not os.path.isfile(undefined_param)
|
||||
|
||||
def test_params_two_things(self):
|
||||
self.params.put("DongleId", "bob")
|
||||
self.params.put("AthenadPid", 123)
|
||||
assert self.params.get("DongleId") == "bob"
|
||||
assert self.params.get("AthenadPid") == 123
|
||||
|
||||
def test_params_get_block(self):
|
||||
def _delayed_writer():
|
||||
time.sleep(0.1)
|
||||
self.params.put("CarParams", b"test")
|
||||
threading.Thread(target=_delayed_writer).start()
|
||||
assert self.params.get("CarParams") is None
|
||||
assert self.params.get("CarParams", block=True) == b"test"
|
||||
|
||||
def test_params_unknown_key_fails(self):
|
||||
with pytest.raises(UnknownKeyName):
|
||||
self.params.get("swag")
|
||||
|
||||
with pytest.raises(UnknownKeyName):
|
||||
self.params.get_bool("swag")
|
||||
|
||||
with pytest.raises(UnknownKeyName):
|
||||
self.params.put("swag", "abc")
|
||||
|
||||
with pytest.raises(UnknownKeyName):
|
||||
self.params.put_bool("swag", True)
|
||||
|
||||
def test_remove_not_there(self):
|
||||
assert self.params.get("CarParams") is None
|
||||
self.params.remove("CarParams")
|
||||
assert self.params.get("CarParams") is None
|
||||
|
||||
def test_get_bool(self):
|
||||
self.params.remove("IsMetric")
|
||||
assert not self.params.get_bool("IsMetric")
|
||||
|
||||
self.params.put_bool("IsMetric", True)
|
||||
assert self.params.get_bool("IsMetric")
|
||||
|
||||
self.params.put_bool("IsMetric", False)
|
||||
assert not self.params.get_bool("IsMetric")
|
||||
|
||||
self.params.put("IsMetric", True)
|
||||
assert self.params.get_bool("IsMetric")
|
||||
|
||||
self.params.put("IsMetric", False)
|
||||
assert not self.params.get_bool("IsMetric")
|
||||
|
||||
def test_put_non_blocking_with_get_block(self):
|
||||
q = Params()
|
||||
def _delayed_writer():
|
||||
time.sleep(0.1)
|
||||
Params().put_nonblocking("CarParams", b"test")
|
||||
threading.Thread(target=_delayed_writer).start()
|
||||
assert q.get("CarParams") is None
|
||||
assert q.get("CarParams", True) == b"test"
|
||||
|
||||
def test_put_bool_non_blocking_with_get_block(self):
|
||||
q = Params()
|
||||
def _delayed_writer():
|
||||
time.sleep(0.1)
|
||||
Params().put_bool_nonblocking("CarParams", True)
|
||||
threading.Thread(target=_delayed_writer).start()
|
||||
assert q.get("CarParams") is None
|
||||
assert q.get("CarParams", True) == b"1"
|
||||
|
||||
def test_params_all_keys(self):
|
||||
keys = Params().all_keys()
|
||||
|
||||
# sanity checks
|
||||
assert len(keys) > 20
|
||||
assert len(keys) == len(set(keys))
|
||||
assert b"CarParams" in keys
|
||||
|
||||
def test_params_default_value(self):
|
||||
self.params.remove("LanguageSetting")
|
||||
self.params.remove("LongitudinalPersonality")
|
||||
self.params.remove("LiveParameters")
|
||||
|
||||
assert self.params.get("LanguageSetting") is None
|
||||
assert self.params.get("LanguageSetting", return_default=False) is None
|
||||
assert isinstance(self.params.get("LanguageSetting", return_default=True), str)
|
||||
assert isinstance(self.params.get("LongitudinalPersonality", return_default=True), int)
|
||||
assert self.params.get("LiveParameters") is None
|
||||
assert self.params.get("LiveParameters", return_default=True) is None
|
||||
|
||||
def test_params_get_type(self):
|
||||
# json
|
||||
self.params.put("ApiCache_FirehoseStats", {"a": 0})
|
||||
assert self.params.get("ApiCache_FirehoseStats") == {"a": 0}
|
||||
|
||||
# int
|
||||
self.params.put("BootCount", 1441)
|
||||
assert self.params.get("BootCount") == 1441
|
||||
|
||||
# bool
|
||||
self.params.put("AdbEnabled", True)
|
||||
assert self.params.get("AdbEnabled")
|
||||
assert isinstance(self.params.get("AdbEnabled"), bool)
|
||||
|
||||
# time
|
||||
now = datetime.datetime.now(datetime.UTC)
|
||||
self.params.put("InstallDate", now)
|
||||
assert self.params.get("InstallDate") == now
|
||||
29
common/tests/test_simple_kalman.py
Normal file
29
common/tests/test_simple_kalman.py
Normal file
@ -0,0 +1,29 @@
|
||||
from openpilot.common.simple_kalman import KF1D
|
||||
|
||||
|
||||
class TestSimpleKalman:
|
||||
def setup_method(self):
|
||||
dt = 0.01
|
||||
x0_0 = 0.0
|
||||
x1_0 = 0.0
|
||||
A0_0 = 1.0
|
||||
A0_1 = dt
|
||||
A1_0 = 0.0
|
||||
A1_1 = 1.0
|
||||
C0_0 = 1.0
|
||||
C0_1 = 0.0
|
||||
K0_0 = 0.12287673
|
||||
K1_0 = 0.29666309
|
||||
|
||||
self.kf = KF1D(x0=[[x0_0], [x1_0]],
|
||||
A=[[A0_0, A0_1], [A1_0, A1_1]],
|
||||
C=[C0_0, C0_1],
|
||||
K=[[K0_0], [K1_0]])
|
||||
|
||||
def test_getter_setter(self):
|
||||
self.kf.set_x([[1.0], [1.0]])
|
||||
assert self.kf.x == [[1.0], [1.0]]
|
||||
|
||||
def test_update_returns_state(self):
|
||||
x = self.kf.update(100)
|
||||
assert x == [i[0] for i in self.kf.x]
|
||||
63
common/text_window.py
Executable file
63
common/text_window.py
Executable file
@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import time
|
||||
import subprocess
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
|
||||
|
||||
class TextWindow:
|
||||
def __init__(self, text):
|
||||
try:
|
||||
self.text_proc = subprocess.Popen(["./text.py", text],
|
||||
stdin=subprocess.PIPE,
|
||||
cwd=os.path.join(BASEDIR, "system", "ui"),
|
||||
close_fds=True)
|
||||
except OSError:
|
||||
self.text_proc = None
|
||||
|
||||
def get_status(self):
|
||||
if self.text_proc is not None:
|
||||
self.text_proc.poll()
|
||||
return self.text_proc.returncode
|
||||
return None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def close(self):
|
||||
if self.text_proc is not None:
|
||||
self.text_proc.terminate()
|
||||
self.text_proc = None
|
||||
|
||||
def wait_for_exit(self):
|
||||
if self.text_proc is not None:
|
||||
while True:
|
||||
if self.get_status() == 1:
|
||||
return
|
||||
time.sleep(0.1)
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
text = """Traceback (most recent call last):
|
||||
File "./controlsd.py", line 608, in <module>
|
||||
main()
|
||||
File "./controlsd.py", line 604, in main
|
||||
controlsd_thread(sm, pm, logcan)
|
||||
File "./controlsd.py", line 455, in controlsd_thread
|
||||
1/0
|
||||
ZeroDivisionError: division by zero"""
|
||||
print(text)
|
||||
|
||||
with TextWindow(text) as s:
|
||||
for _ in range(100):
|
||||
if s.get_status() == 1:
|
||||
print("Got exit button")
|
||||
break
|
||||
time.sleep(0.1)
|
||||
print("gone")
|
||||
15
common/time_helpers.py
Normal file
15
common/time_helpers.py
Normal file
@ -0,0 +1,15 @@
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
|
||||
MIN_DATE = datetime.datetime(year=2025, month=2, day=21)
|
||||
|
||||
def min_date():
|
||||
# on systemd systems, the default time is the systemd build time
|
||||
systemd_path = Path("/lib/systemd/systemd")
|
||||
if systemd_path.exists():
|
||||
d = datetime.datetime.fromtimestamp(systemd_path.stat().st_mtime)
|
||||
return max(MIN_DATE, d + datetime.timedelta(days=1))
|
||||
return MIN_DATE
|
||||
|
||||
def system_time_valid():
|
||||
return datetime.datetime.now() > min_date()
|
||||
27
common/timeout.py
Normal file
27
common/timeout.py
Normal file
@ -0,0 +1,27 @@
|
||||
import signal
|
||||
|
||||
class TimeoutException(Exception):
|
||||
pass
|
||||
|
||||
class Timeout:
|
||||
"""
|
||||
Timeout context manager.
|
||||
For example this code will raise a TimeoutException:
|
||||
with Timeout(seconds=5, error_msg="Sleep was too long"):
|
||||
time.sleep(10)
|
||||
"""
|
||||
def __init__(self, seconds, error_msg=None):
|
||||
if error_msg is None:
|
||||
error_msg = f'Timed out after {seconds} seconds'
|
||||
self.seconds = seconds
|
||||
self.error_msg = error_msg
|
||||
|
||||
def handle_timeout(self, signume, frame):
|
||||
raise TimeoutException(self.error_msg)
|
||||
|
||||
def __enter__(self):
|
||||
signal.signal(signal.SIGALRM, self.handle_timeout)
|
||||
signal.alarm(self.seconds)
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
signal.alarm(0)
|
||||
51
common/timing.h
Normal file
51
common/timing.h
Normal file
@ -0,0 +1,51 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
#include <ctime>
|
||||
|
||||
#ifdef __APPLE__
|
||||
#define CLOCK_BOOTTIME CLOCK_MONOTONIC
|
||||
#endif
|
||||
|
||||
static inline uint64_t nanos_since_boot() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_BOOTTIME, &t);
|
||||
return t.tv_sec * 1000000000ULL + t.tv_nsec;
|
||||
}
|
||||
|
||||
static inline double millis_since_boot() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_BOOTTIME, &t);
|
||||
return t.tv_sec * 1000.0 + t.tv_nsec * 1e-6;
|
||||
}
|
||||
|
||||
static inline double seconds_since_boot() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_BOOTTIME, &t);
|
||||
return (double)t.tv_sec + t.tv_nsec * 1e-9;
|
||||
}
|
||||
|
||||
static inline uint64_t nanos_since_epoch() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_REALTIME, &t);
|
||||
return t.tv_sec * 1000000000ULL + t.tv_nsec;
|
||||
}
|
||||
|
||||
static inline double seconds_since_epoch() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_REALTIME, &t);
|
||||
return (double)t.tv_sec + t.tv_nsec * 1e-9;
|
||||
}
|
||||
|
||||
// you probably should use nanos_since_boot instead
|
||||
static inline uint64_t nanos_monotonic() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_MONOTONIC, &t);
|
||||
return t.tv_sec * 1000000000ULL + t.tv_nsec;
|
||||
}
|
||||
|
||||
static inline uint64_t nanos_monotonic_raw() {
|
||||
struct timespec t;
|
||||
clock_gettime(CLOCK_MONOTONIC_RAW, &t);
|
||||
return t.tv_sec * 1000000000ULL + t.tv_nsec;
|
||||
}
|
||||
2
common/transformations/.gitignore
vendored
Normal file
2
common/transformations/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
transformations
|
||||
transformations.cpp
|
||||
70
common/transformations/README.md
Normal file
70
common/transformations/README.md
Normal file
@ -0,0 +1,70 @@
|
||||
|
||||
Reference Frames
|
||||
------
|
||||
Many reference frames are used throughout. This
|
||||
folder contains all helper functions needed to
|
||||
transform between them. Generally this is done
|
||||
by generating a rotation matrix and multiplying.
|
||||
|
||||
|
||||
| Name | [x, y, z] | Units | Notes |
|
||||
| :-------------: |:-------------:| :-----:| :----: |
|
||||
| Geodetic | [Latitude, Longitude, Altitude] | geodetic coordinates | Sometimes used as [lon, lat, alt], avoid this frame. |
|
||||
| ECEF | [x, y, z] | meters | We use **ITRF14 (IGS14)**, NOT NAD83. <br> This is the global Mesh3D frame. |
|
||||
| NED | [North, East, Down] | meters | Relative to earth's surface, useful for visualizing. |
|
||||
| Device | [Forward, Right, Down] | meters | This is the Mesh3D local frame. <br> Relative to camera, **not imu.** <br> |
|
||||
| Calibrated | [Forward, Right, Down] | meters | This is the frame the model outputs are in. <br> More details below. <br>|
|
||||
| Car | [Forward, Right, Down] | meters | This is useful for estimating position of points on the road. <br> More details below. <br>|
|
||||
| View | [Right, Down, Forward] | meters | Like device frame, but according to camera conventions. |
|
||||
| Camera | [u, v, focal] | pixels | Like view frame, but 2d on the camera image.|
|
||||
| Normalized Camera | [u / focal, v / focal, 1] | / | |
|
||||
| Model | [u, v, focal] | pixels | The sampled rectangle of the full camera frame the model uses. |
|
||||
| Normalized Model | [u / focal, v / focal, 1] | / | |
|
||||
|
||||
|
||||
|
||||
|
||||
Orientation Conventions
|
||||
------
|
||||
Quaternions, rotation matrices and euler angles are three
|
||||
equivalent representations of orientation and all three are
|
||||
used throughout the code base.
|
||||
|
||||
For euler angles the preferred convention is [roll, pitch, yaw]
|
||||
which corresponds to rotations around the [x, y, z] axes. All
|
||||
euler angles should always be in radians or radians/s unless
|
||||
for plotting or display purposes. For quaternions the hamilton
|
||||
notations is preferred which is [q<sub>w</sub>, q<sub>x</sub>, q<sub>y</sub>, q<sub>z</sub>]. All quaternions
|
||||
should always be normalized with a strictly positive q<sub>w</sub>. **These
|
||||
quaternions are a unique representation of orientation whereas euler angles
|
||||
or rotation matrices are not.**
|
||||
|
||||
To rotate from one frame into another with euler angles the
|
||||
convention is to rotate around roll, then pitch and then yaw,
|
||||
while rotating around the rotated axes, not the original axes.
|
||||
|
||||
|
||||
Car frame
|
||||
------
|
||||
Device frame is aligned with the road-facing camera used by openpilot. However, when controlling the vehicle it is helpful to think in a reference frame aligned with the vehicle. These two reference frames can be different.
|
||||
|
||||
The orientation of car frame is defined to be aligned with the car's direction of travel and the road plane when the vehicle is driving on a flat road and not turning. The origin of car frame is defined to be directly below device frame (in car frame), such that it is on the road plane. The position and orientation of this frame is not necessarily always aligned with the direction of travel or the road plane due to suspension movements and other effects.
|
||||
|
||||
|
||||
Calibrated frame
|
||||
------
|
||||
It is helpful for openpilot's driving model to take in images that look similar when mounted differently in different cars. To achieve this we "calibrate" the images by transforming it into calibrated frame. Calibrated frame is defined to be aligned with car frame in pitch and yaw, and aligned with device frame in roll. It also has the same origin as device frame.
|
||||
|
||||
|
||||
Example
|
||||
------
|
||||
To transform global Mesh3D positions and orientations (positions_ecef, quats_ecef) into the local frame described by the
|
||||
first position and orientation from Mesh3D one would do:
|
||||
```
|
||||
ecef_from_local = rot_from_quat(quats_ecef[0])
|
||||
local_from_ecef = ecef_from_local.T
|
||||
positions_local = np.einsum('ij,kj->ki', local_from_ecef, postions_ecef - positions_ecef[0])
|
||||
rotations_global = rot_from_quat(quats_ecef)
|
||||
rotations_local = np.einsum('ij,kjl->kil', local_from_ecef, rotations_global)
|
||||
eulers_local = euler_from_rot(rotations_local)
|
||||
```
|
||||
0
common/transformations/__init__.py
Normal file
0
common/transformations/__init__.py
Normal file
179
common/transformations/camera.py
Normal file
179
common/transformations/camera.py
Normal file
@ -0,0 +1,179 @@
|
||||
import itertools
|
||||
import numpy as np
|
||||
from dataclasses import dataclass
|
||||
|
||||
import openpilot.common.transformations.orientation as orient
|
||||
|
||||
## -- hardcoded hardware params --
|
||||
@dataclass(frozen=True)
|
||||
class CameraConfig:
|
||||
width: int
|
||||
height: int
|
||||
focal_length: float
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
return (self.width, self.height)
|
||||
|
||||
@property
|
||||
def intrinsics(self):
|
||||
# aka 'K' aka camera_frame_from_view_frame
|
||||
return np.array([
|
||||
[self.focal_length, 0.0, float(self.width)/2],
|
||||
[0.0, self.focal_length, float(self.height)/2],
|
||||
[0.0, 0.0, 1.0]
|
||||
])
|
||||
|
||||
@property
|
||||
def intrinsics_inv(self):
|
||||
# aka 'K_inv' aka view_frame_from_camera_frame
|
||||
return np.linalg.inv(self.intrinsics)
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _NoneCameraConfig(CameraConfig):
|
||||
width: int = 0
|
||||
height: int = 0
|
||||
focal_length: float = 0
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DeviceCameraConfig:
|
||||
fcam: CameraConfig
|
||||
dcam: CameraConfig
|
||||
ecam: CameraConfig
|
||||
|
||||
def all_cams(self):
|
||||
for cam in ['fcam', 'dcam', 'ecam']:
|
||||
if not isinstance(getattr(self, cam), _NoneCameraConfig):
|
||||
yield cam, getattr(self, cam)
|
||||
|
||||
_ar_ox_fisheye = CameraConfig(1928, 1208, 567.0) # focal length probably wrong? magnification is not consistent across frame
|
||||
_os_fisheye = CameraConfig(2688 // 2, 1520 // 2, 567.0 / 4 * 3)
|
||||
_ar_ox_config = DeviceCameraConfig(CameraConfig(1928, 1208, 2648.0), _ar_ox_fisheye, _ar_ox_fisheye)
|
||||
_os_config = DeviceCameraConfig(CameraConfig(2688 // 2, 1520 // 2, 1522.0 * 3 / 4), _os_fisheye, _os_fisheye)
|
||||
_neo_config = DeviceCameraConfig(CameraConfig(1164, 874, 910.0), CameraConfig(816, 612, 650.0), _NoneCameraConfig())
|
||||
|
||||
DEVICE_CAMERAS = {
|
||||
# A "device camera" is defined by a device type and sensor
|
||||
|
||||
# sensor type was never set on eon/neo/two
|
||||
("neo", "unknown"): _neo_config,
|
||||
# unknown here is AR0231, field was added with OX03C10 support
|
||||
("tici", "unknown"): _ar_ox_config,
|
||||
|
||||
# before deviceState.deviceType was set, assume tici AR config
|
||||
("unknown", "ar0231"): _ar_ox_config,
|
||||
("unknown", "ox03c10"): _ar_ox_config,
|
||||
|
||||
# simulator (emulates a tici)
|
||||
("pc", "unknown"): _ar_ox_config,
|
||||
}
|
||||
prods = itertools.product(('tici', 'tizi', 'mici'), (('ar0231', _ar_ox_config), ('ox03c10', _ar_ox_config), ('os04c10', _os_config)))
|
||||
DEVICE_CAMERAS.update({(d, c[0]): c[1] for d, c in prods})
|
||||
|
||||
# device/mesh : x->forward, y-> right, z->down
|
||||
# view : x->right, y->down, z->forward
|
||||
device_frame_from_view_frame = np.array([
|
||||
[ 0., 0., 1.],
|
||||
[ 1., 0., 0.],
|
||||
[ 0., 1., 0.]
|
||||
])
|
||||
view_frame_from_device_frame = device_frame_from_view_frame.T
|
||||
|
||||
|
||||
# aka 'extrinsic_matrix'
|
||||
# road : x->forward, y -> left, z->up
|
||||
def get_view_frame_from_road_frame(roll, pitch, yaw, height):
|
||||
device_from_road = orient.rot_from_euler([roll, pitch, yaw]).dot(np.diag([1, -1, -1]))
|
||||
view_from_road = view_frame_from_device_frame.dot(device_from_road)
|
||||
return np.hstack((view_from_road, [[0], [height], [0]]))
|
||||
|
||||
|
||||
|
||||
# aka 'extrinsic_matrix'
|
||||
def get_view_frame_from_calib_frame(roll, pitch, yaw, height):
|
||||
device_from_calib= orient.rot_from_euler([roll, pitch, yaw])
|
||||
view_from_calib = view_frame_from_device_frame.dot(device_from_calib)
|
||||
return np.hstack((view_from_calib, [[0], [height], [0]]))
|
||||
|
||||
|
||||
def vp_from_ke(m):
|
||||
"""
|
||||
Computes the vanishing point from the product of the intrinsic and extrinsic
|
||||
matrices C = KE.
|
||||
|
||||
The vanishing point is defined as lim x->infinity C (x, 0, 0, 1).T
|
||||
"""
|
||||
return (m[0, 0]/m[2, 0], m[1, 0]/m[2, 0])
|
||||
|
||||
|
||||
def roll_from_ke(m):
|
||||
# note: different from calibration.h/RollAnglefromKE: i think that one's just wrong
|
||||
return np.arctan2(-(m[1, 0] - m[1, 1] * m[2, 0] / m[2, 1]),
|
||||
-(m[0, 0] - m[0, 1] * m[2, 0] / m[2, 1]))
|
||||
|
||||
|
||||
def normalize(img_pts, intrinsics):
|
||||
# normalizes image coordinates
|
||||
# accepts single pt or array of pts
|
||||
intrinsics_inv = np.linalg.inv(intrinsics)
|
||||
img_pts = np.array(img_pts)
|
||||
input_shape = img_pts.shape
|
||||
img_pts = np.atleast_2d(img_pts)
|
||||
img_pts = np.hstack((img_pts, np.ones((img_pts.shape[0], 1))))
|
||||
img_pts_normalized = img_pts.dot(intrinsics_inv.T)
|
||||
img_pts_normalized[(img_pts < 0).any(axis=1)] = np.nan
|
||||
return img_pts_normalized[:, :2].reshape(input_shape)
|
||||
|
||||
|
||||
def denormalize(img_pts, intrinsics, width=np.inf, height=np.inf):
|
||||
# denormalizes image coordinates
|
||||
# accepts single pt or array of pts
|
||||
img_pts = np.array(img_pts)
|
||||
input_shape = img_pts.shape
|
||||
img_pts = np.atleast_2d(img_pts)
|
||||
img_pts = np.hstack((img_pts, np.ones((img_pts.shape[0], 1), dtype=img_pts.dtype)))
|
||||
img_pts_denormalized = img_pts.dot(intrinsics.T)
|
||||
if np.isfinite(width):
|
||||
img_pts_denormalized[img_pts_denormalized[:, 0] > width] = np.nan
|
||||
img_pts_denormalized[img_pts_denormalized[:, 0] < 0] = np.nan
|
||||
if np.isfinite(height):
|
||||
img_pts_denormalized[img_pts_denormalized[:, 1] > height] = np.nan
|
||||
img_pts_denormalized[img_pts_denormalized[:, 1] < 0] = np.nan
|
||||
return img_pts_denormalized[:, :2].reshape(input_shape)
|
||||
|
||||
|
||||
def get_calib_from_vp(vp, intrinsics):
|
||||
vp_norm = normalize(vp, intrinsics)
|
||||
yaw_calib = np.arctan(vp_norm[0])
|
||||
pitch_calib = -np.arctan(vp_norm[1]*np.cos(yaw_calib))
|
||||
roll_calib = 0
|
||||
return roll_calib, pitch_calib, yaw_calib
|
||||
|
||||
|
||||
def device_from_ecef(pos_ecef, orientation_ecef, pt_ecef):
|
||||
# device from ecef frame
|
||||
# device frame is x -> forward, y-> right, z -> down
|
||||
# accepts single pt or array of pts
|
||||
input_shape = pt_ecef.shape
|
||||
pt_ecef = np.atleast_2d(pt_ecef)
|
||||
ecef_from_device_rot = orient.rotations_from_quats(orientation_ecef)
|
||||
device_from_ecef_rot = ecef_from_device_rot.T
|
||||
pt_ecef_rel = pt_ecef - pos_ecef
|
||||
pt_device = np.einsum('jk,ik->ij', device_from_ecef_rot, pt_ecef_rel)
|
||||
return pt_device.reshape(input_shape)
|
||||
|
||||
|
||||
def img_from_device(pt_device):
|
||||
# img coordinates from pts in device frame
|
||||
# first transforms to view frame, then to img coords
|
||||
# accepts single pt or array of pts
|
||||
input_shape = pt_device.shape
|
||||
pt_device = np.atleast_2d(pt_device)
|
||||
pt_view = np.einsum('jk,ik->ij', view_frame_from_device_frame, pt_device)
|
||||
|
||||
# This function should never return negative depths
|
||||
pt_view[pt_view[:, 2] < 0] = np.nan
|
||||
|
||||
pt_img = pt_view/pt_view[:, 2:3]
|
||||
return pt_img.reshape(input_shape)[:, :2]
|
||||
|
||||
43
common/transformations/coordinates.hpp
Normal file
43
common/transformations/coordinates.hpp
Normal file
@ -0,0 +1,43 @@
|
||||
#pragma once
|
||||
|
||||
#include <eigen3/Eigen/Dense>
|
||||
|
||||
#define DEG2RAD(x) ((x) * M_PI / 180.0)
|
||||
#define RAD2DEG(x) ((x) * 180.0 / M_PI)
|
||||
|
||||
struct ECEF {
|
||||
double x, y, z;
|
||||
Eigen::Vector3d to_vector() const {
|
||||
return Eigen::Vector3d(x, y, z);
|
||||
}
|
||||
};
|
||||
|
||||
struct NED {
|
||||
double n, e, d;
|
||||
Eigen::Vector3d to_vector() const {
|
||||
return Eigen::Vector3d(n, e, d);
|
||||
}
|
||||
};
|
||||
|
||||
struct Geodetic {
|
||||
double lat, lon, alt;
|
||||
bool radians=false;
|
||||
};
|
||||
|
||||
ECEF geodetic2ecef(const Geodetic &g);
|
||||
Geodetic ecef2geodetic(const ECEF &e);
|
||||
|
||||
class LocalCoord {
|
||||
public:
|
||||
Eigen::Matrix3d ned2ecef_matrix;
|
||||
Eigen::Matrix3d ecef2ned_matrix;
|
||||
Eigen::Vector3d init_ecef;
|
||||
LocalCoord(const Geodetic &g, const ECEF &e);
|
||||
LocalCoord(const Geodetic &g) : LocalCoord(g, ::geodetic2ecef(g)) {}
|
||||
LocalCoord(const ECEF &e) : LocalCoord(::ecef2geodetic(e), e) {}
|
||||
|
||||
NED ecef2ned(const ECEF &e);
|
||||
ECEF ned2ecef(const NED &n);
|
||||
NED geodetic2ned(const Geodetic &g);
|
||||
Geodetic ned2geodetic(const NED &n);
|
||||
};
|
||||
18
common/transformations/coordinates.py
Normal file
18
common/transformations/coordinates.py
Normal file
@ -0,0 +1,18 @@
|
||||
from openpilot.common.transformations.orientation import numpy_wrap
|
||||
from openpilot.common.transformations.transformations import (ecef2geodetic_single,
|
||||
geodetic2ecef_single)
|
||||
from openpilot.common.transformations.transformations import LocalCoord as LocalCoord_single
|
||||
|
||||
|
||||
class LocalCoord(LocalCoord_single):
|
||||
ecef2ned = numpy_wrap(LocalCoord_single.ecef2ned_single, (3,), (3,))
|
||||
ned2ecef = numpy_wrap(LocalCoord_single.ned2ecef_single, (3,), (3,))
|
||||
geodetic2ned = numpy_wrap(LocalCoord_single.geodetic2ned_single, (3,), (3,))
|
||||
ned2geodetic = numpy_wrap(LocalCoord_single.ned2geodetic_single, (3,), (3,))
|
||||
|
||||
|
||||
geodetic2ecef = numpy_wrap(geodetic2ecef_single, (3,), (3,))
|
||||
ecef2geodetic = numpy_wrap(ecef2geodetic_single, (3,), (3,))
|
||||
|
||||
geodetic_from_ecef = ecef2geodetic
|
||||
ecef_from_geodetic = geodetic2ecef
|
||||
70
common/transformations/model.py
Normal file
70
common/transformations/model.py
Normal file
@ -0,0 +1,70 @@
|
||||
import numpy as np
|
||||
|
||||
from openpilot.common.transformations.orientation import rot_from_euler
|
||||
from openpilot.common.transformations.camera import get_view_frame_from_calib_frame, view_frame_from_device_frame, _ar_ox_fisheye
|
||||
|
||||
# segnet
|
||||
SEGNET_SIZE = (512, 384)
|
||||
|
||||
# MED model
|
||||
MEDMODEL_INPUT_SIZE = (512, 256)
|
||||
MEDMODEL_YUV_SIZE = (MEDMODEL_INPUT_SIZE[0], MEDMODEL_INPUT_SIZE[1] * 3 // 2)
|
||||
MEDMODEL_CY = 47.6
|
||||
|
||||
medmodel_fl = 910.0
|
||||
medmodel_intrinsics = np.array([
|
||||
[medmodel_fl, 0.0, 0.5 * MEDMODEL_INPUT_SIZE[0]],
|
||||
[0.0, medmodel_fl, MEDMODEL_CY],
|
||||
[0.0, 0.0, 1.0]])
|
||||
|
||||
|
||||
# BIG model
|
||||
BIGMODEL_INPUT_SIZE = (1024, 512)
|
||||
BIGMODEL_YUV_SIZE = (BIGMODEL_INPUT_SIZE[0], BIGMODEL_INPUT_SIZE[1] * 3 // 2)
|
||||
|
||||
bigmodel_fl = 910.0
|
||||
bigmodel_intrinsics = np.array([
|
||||
[bigmodel_fl, 0.0, 0.5 * BIGMODEL_INPUT_SIZE[0]],
|
||||
[0.0, bigmodel_fl, 256 + MEDMODEL_CY],
|
||||
[0.0, 0.0, 1.0]])
|
||||
|
||||
|
||||
# SBIG model (big model with the size of small model)
|
||||
SBIGMODEL_INPUT_SIZE = (512, 256)
|
||||
SBIGMODEL_YUV_SIZE = (SBIGMODEL_INPUT_SIZE[0], SBIGMODEL_INPUT_SIZE[1] * 3 // 2)
|
||||
|
||||
sbigmodel_fl = 455.0
|
||||
sbigmodel_intrinsics = np.array([
|
||||
[sbigmodel_fl, 0.0, 0.5 * SBIGMODEL_INPUT_SIZE[0]],
|
||||
[0.0, sbigmodel_fl, 0.5 * (256 + MEDMODEL_CY)],
|
||||
[0.0, 0.0, 1.0]])
|
||||
|
||||
DM_INPUT_SIZE = (1440, 960)
|
||||
dmonitoringmodel_fl = _ar_ox_fisheye.focal_length
|
||||
dmonitoringmodel_intrinsics = np.array([
|
||||
[dmonitoringmodel_fl, 0.0, DM_INPUT_SIZE[0]/2],
|
||||
[0.0, dmonitoringmodel_fl, DM_INPUT_SIZE[1]/2 - (_ar_ox_fisheye.height - DM_INPUT_SIZE[1])/2],
|
||||
[0.0, 0.0, 1.0]])
|
||||
|
||||
bigmodel_frame_from_calib_frame = np.dot(bigmodel_intrinsics,
|
||||
get_view_frame_from_calib_frame(0, 0, 0, 0))
|
||||
|
||||
|
||||
sbigmodel_frame_from_calib_frame = np.dot(sbigmodel_intrinsics,
|
||||
get_view_frame_from_calib_frame(0, 0, 0, 0))
|
||||
|
||||
medmodel_frame_from_calib_frame = np.dot(medmodel_intrinsics,
|
||||
get_view_frame_from_calib_frame(0, 0, 0, 0))
|
||||
|
||||
medmodel_frame_from_bigmodel_frame = np.dot(medmodel_intrinsics, np.linalg.inv(bigmodel_intrinsics))
|
||||
|
||||
calib_from_medmodel = np.linalg.inv(medmodel_frame_from_calib_frame[:, :3])
|
||||
calib_from_sbigmodel = np.linalg.inv(sbigmodel_frame_from_calib_frame[:, :3])
|
||||
|
||||
# This function is verified to give similar results to xx.uncommon.utils.transform_img
|
||||
def get_warp_matrix(device_from_calib_euler: np.ndarray, intrinsics: np.ndarray, bigmodel_frame: bool = False) -> np.ndarray:
|
||||
calib_from_model = calib_from_sbigmodel if bigmodel_frame else calib_from_medmodel
|
||||
device_from_calib = rot_from_euler(device_from_calib_euler)
|
||||
camera_from_calib = intrinsics @ view_frame_from_device_frame @ device_from_calib
|
||||
warp_matrix: np.ndarray = camera_from_calib @ calib_from_model
|
||||
return warp_matrix
|
||||
17
common/transformations/orientation.hpp
Normal file
17
common/transformations/orientation.hpp
Normal file
@ -0,0 +1,17 @@
|
||||
#pragma once
|
||||
#include <eigen3/Eigen/Dense>
|
||||
#include "common/transformations/coordinates.hpp"
|
||||
|
||||
|
||||
Eigen::Quaterniond ensure_unique(const Eigen::Quaterniond &quat);
|
||||
|
||||
Eigen::Quaterniond euler2quat(const Eigen::Vector3d &euler);
|
||||
Eigen::Vector3d quat2euler(const Eigen::Quaterniond &quat);
|
||||
Eigen::Matrix3d quat2rot(const Eigen::Quaterniond &quat);
|
||||
Eigen::Quaterniond rot2quat(const Eigen::Matrix3d &rot);
|
||||
Eigen::Matrix3d euler2rot(const Eigen::Vector3d &euler);
|
||||
Eigen::Vector3d rot2euler(const Eigen::Matrix3d &rot);
|
||||
Eigen::Matrix3d rot_matrix(double roll, double pitch, double yaw);
|
||||
Eigen::Matrix3d rot(const Eigen::Vector3d &axis, double angle);
|
||||
Eigen::Vector3d ecef_euler_from_ned(const ECEF &ecef_init, const Eigen::Vector3d &ned_pose);
|
||||
Eigen::Vector3d ned_euler_from_ecef(const ECEF &ecef_init, const Eigen::Vector3d &ecef_pose);
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user