mirror of
https://github.com/sunnypilot/sunnypilot.git
synced 2026-02-18 17:43:54 +08:00
Merge branch 'upstream/openpilot/master' into sync-20250823
# Conflicts: # .github/workflows/release.yaml # README.md # RELEASES.md # common/params_keys.h # docs/CARS.md # opendbc_repo # panda # release/build_stripped.sh # selfdrive/controls/lib/longitudinal_planner.py # selfdrive/modeld/modeld.py # selfdrive/ui/feedback/feedbackd.py # selfdrive/ui/translations/main_ar.ts # selfdrive/ui/translations/main_de.ts # selfdrive/ui/translations/main_es.ts # selfdrive/ui/translations/main_fr.ts # selfdrive/ui/translations/main_ja.ts # selfdrive/ui/translations/main_ko.ts # selfdrive/ui/translations/main_pt-BR.ts # selfdrive/ui/translations/main_th.ts # selfdrive/ui/translations/main_tr.ts # selfdrive/ui/translations/main_zh-CHS.ts # selfdrive/ui/translations/main_zh-CHT.ts # system/version.py # uv.lock
This commit is contained in:
2
.github/workflows/release.yaml
vendored
2
.github/workflows/release.yaml
vendored
@@ -39,4 +39,4 @@ jobs:
|
|||||||
git config --global --add safe.directory '*'
|
git config --global --add safe.directory '*'
|
||||||
git lfs pull
|
git lfs pull
|
||||||
- name: Push __nightly
|
- name: Push __nightly
|
||||||
run: BRANCH=__nightly release/build_devel.sh
|
run: BRANCH=__nightly release/build_stripped.sh
|
||||||
|
|||||||
7
.github/workflows/selfdrive_tests.yaml
vendored
7
.github/workflows/selfdrive_tests.yaml
vendored
@@ -27,7 +27,7 @@ env:
|
|||||||
|
|
||||||
RUN: docker run --shm-size 2G -v $PWD:/tmp/openpilot -w /tmp/openpilot -e CI=1 -e PYTHONWARNINGS=error -e FILEREADER_CACHE=1 -e PYTHONPATH=/tmp/openpilot -e NUM_JOBS -e JOB_ID -e GITHUB_ACTION -e GITHUB_REF -e GITHUB_HEAD_REF -e GITHUB_SHA -e GITHUB_REPOSITORY -e GITHUB_RUN_ID -v $GITHUB_WORKSPACE/.ci_cache/scons_cache:/tmp/scons_cache -v $GITHUB_WORKSPACE/.ci_cache/comma_download_cache:/tmp/comma_download_cache -v $GITHUB_WORKSPACE/.ci_cache/openpilot_cache:/tmp/openpilot_cache $BASE_IMAGE /bin/bash -c
|
RUN: docker run --shm-size 2G -v $PWD:/tmp/openpilot -w /tmp/openpilot -e CI=1 -e PYTHONWARNINGS=error -e FILEREADER_CACHE=1 -e PYTHONPATH=/tmp/openpilot -e NUM_JOBS -e JOB_ID -e GITHUB_ACTION -e GITHUB_REF -e GITHUB_HEAD_REF -e GITHUB_SHA -e GITHUB_REPOSITORY -e GITHUB_RUN_ID -v $GITHUB_WORKSPACE/.ci_cache/scons_cache:/tmp/scons_cache -v $GITHUB_WORKSPACE/.ci_cache/comma_download_cache:/tmp/comma_download_cache -v $GITHUB_WORKSPACE/.ci_cache/openpilot_cache:/tmp/openpilot_cache $BASE_IMAGE /bin/bash -c
|
||||||
|
|
||||||
PYTEST: pytest --continue-on-collection-errors --durations=0 --durations-min=5 -n logical
|
PYTEST: pytest --continue-on-collection-errors --durations=0 -n logical
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_release:
|
build_release:
|
||||||
@@ -52,7 +52,7 @@ jobs:
|
|||||||
command: git lfs pull
|
command: git lfs pull
|
||||||
- name: Build devel
|
- name: Build devel
|
||||||
timeout-minutes: 1
|
timeout-minutes: 1
|
||||||
run: TARGET_DIR=$STRIPPED_DIR release/build_devel.sh
|
run: TARGET_DIR=$STRIPPED_DIR release/build_stripped.sh
|
||||||
- uses: ./.github/workflows/setup-with-retry
|
- uses: ./.github/workflows/setup-with-retry
|
||||||
- name: Build openpilot and run checks
|
- name: Build openpilot and run checks
|
||||||
timeout-minutes: ${{ ((steps.restore-scons-cache.outputs.cache-hit == 'true') && 10 || 30) }} # allow more time when we missed the scons cache
|
timeout-minutes: ${{ ((steps.restore-scons-cache.outputs.cache-hit == 'true') && 10 || 30) }} # allow more time when we missed the scons cache
|
||||||
@@ -190,7 +190,8 @@ jobs:
|
|||||||
timeout-minutes: ${{ contains(runner.name, 'nsc') && ((steps.setup-step.outputs.duration < 18) && 1 || 2) || 999 }}
|
timeout-minutes: ${{ contains(runner.name, 'nsc') && ((steps.setup-step.outputs.duration < 18) && 1 || 2) || 999 }}
|
||||||
run: |
|
run: |
|
||||||
${{ env.RUN }} "source selfdrive/test/setup_xvfb.sh && \
|
${{ env.RUN }} "source selfdrive/test/setup_xvfb.sh && \
|
||||||
$PYTEST --collect-only -m 'not slow' &> /dev/null && \
|
# Pre-compile Python bytecode so each pytest worker doesn't need to
|
||||||
|
$PYTEST --collect-only -m 'not slow' -qq && \
|
||||||
MAX_EXAMPLES=1 $PYTEST -m 'not slow' && \
|
MAX_EXAMPLES=1 $PYTEST -m 'not slow' && \
|
||||||
./selfdrive/ui/tests/create_test_translations.sh && \
|
./selfdrive/ui/tests/create_test_translations.sh && \
|
||||||
QT_QPA_PLATFORM=offscreen ./selfdrive/ui/tests/test_translations && \
|
QT_QPA_PLATFORM=offscreen ./selfdrive/ui/tests/test_translations && \
|
||||||
|
|||||||
2
Jenkinsfile
vendored
2
Jenkinsfile
vendored
@@ -167,7 +167,7 @@ node {
|
|||||||
env.GIT_COMMIT = checkout(scm).GIT_COMMIT
|
env.GIT_COMMIT = checkout(scm).GIT_COMMIT
|
||||||
|
|
||||||
def excludeBranches = ['__nightly', 'devel', 'devel-staging', 'release3', 'release3-staging',
|
def excludeBranches = ['__nightly', 'devel', 'devel-staging', 'release3', 'release3-staging',
|
||||||
'testing-closet*', 'hotfix-*']
|
'release-tici', 'testing-closet*', 'hotfix-*']
|
||||||
def excludeRegex = excludeBranches.join('|').replaceAll('\\*', '.*')
|
def excludeRegex = excludeBranches.join('|').replaceAll('\\*', '.*')
|
||||||
|
|
||||||
if (env.BRANCH_NAME != 'master' && !env.BRANCH_NAME.contains('__jenkins_loop_')) {
|
if (env.BRANCH_NAME != 'master' && !env.BRANCH_NAME.contains('__jenkins_loop_')) {
|
||||||
|
|||||||
14
RELEASES.md
14
RELEASES.md
@@ -1,18 +1,22 @@
|
|||||||
Version 0.10.1 (2025-09-08)
|
Version 0.10.1 (2025-09-08)
|
||||||
========================
|
========================
|
||||||
|
* Record driving feedback using LKAS button
|
||||||
|
* Honda City 2023 support thanks to drFritz!
|
||||||
|
|
||||||
Version 0.10.0 (2025-08-05)
|
Version 0.10.0 (2025-08-05)
|
||||||
========================
|
========================
|
||||||
* New driving model
|
* New driving model
|
||||||
* New training architecture
|
* New training architecture
|
||||||
* Architecture outlined in CVPR paper: "Learning to Drive from a World Model"
|
* Described in our CVPR paper: "Learning to Drive from a World Model"
|
||||||
* Longitudinal MPC replaced by E2E planning from worldmodel in experimental mode
|
* Longitudinal MPC replaced by E2E planning from World Model in Experimental Mode
|
||||||
* Action from lateral MPC as training objective replaced by E2E planning from worldmodel
|
* Action from lateral MPC as training objective replaced by E2E planning from World Model
|
||||||
* Low-speed lead car ground-truth fixes
|
* Low-speed lead car ground-truth fixes
|
||||||
|
|
||||||
* Enable live-learned steering actuation delay
|
* Enable live-learned steering actuation delay
|
||||||
* Record driving feedback using LKAS button when MADS is disabled
|
|
||||||
* Opt-in audio recording for dashcam video
|
* Opt-in audio recording for dashcam video
|
||||||
|
* Acura MDX 2025 support thanks to vanillagorillaa and MVL!
|
||||||
|
* Honda Accord 2023-25 support thanks to vanillagorillaa and MVL!
|
||||||
|
* Honda CR-V 2023-25 support thanks to vanillagorillaa and MVL!
|
||||||
|
* Honda Pilot 2023-25 support thanks to vanillagorillaa and MVL!
|
||||||
|
|
||||||
Version 0.9.9 (2025-05-23)
|
Version 0.9.9 (2025-05-23)
|
||||||
========================
|
========================
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
#define DEFAULT_MODEL "Down To Ride (Default)"
|
#define DEFAULT_MODEL "Steam Powered (Default)"
|
||||||
|
|||||||
@@ -73,9 +73,9 @@ inline static std::unordered_map<std::string, ParamKeyAttributes> keys = {
|
|||||||
{"LastOffroadStatusPacket", {CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION, JSON}},
|
{"LastOffroadStatusPacket", {CLEAR_ON_MANAGER_START | CLEAR_ON_OFFROAD_TRANSITION, JSON}},
|
||||||
{"LastPowerDropDetected", {CLEAR_ON_MANAGER_START, STRING}},
|
{"LastPowerDropDetected", {CLEAR_ON_MANAGER_START, STRING}},
|
||||||
{"LastUpdateException", {CLEAR_ON_MANAGER_START, STRING}},
|
{"LastUpdateException", {CLEAR_ON_MANAGER_START, STRING}},
|
||||||
{"LastUpdateRouteCount", {PERSISTENT, INT}},
|
{"LastUpdateRouteCount", {PERSISTENT, INT, "0"}},
|
||||||
{"LastUpdateTime", {PERSISTENT, TIME}},
|
{"LastUpdateTime", {PERSISTENT, TIME}},
|
||||||
{"LastUpdateUptimeOnroad", {PERSISTENT, FLOAT}},
|
{"LastUpdateUptimeOnroad", {PERSISTENT, FLOAT, "0.0"}},
|
||||||
{"LiveDelay", {PERSISTENT | BACKUP, BYTES}},
|
{"LiveDelay", {PERSISTENT | BACKUP, BYTES}},
|
||||||
{"LiveParameters", {PERSISTENT, JSON}},
|
{"LiveParameters", {PERSISTENT, JSON}},
|
||||||
{"LiveParametersV2", {PERSISTENT, BYTES}},
|
{"LiveParametersV2", {PERSISTENT, BYTES}},
|
||||||
@@ -199,7 +199,7 @@ inline static std::unordered_map<std::string, ParamKeyAttributes> keys = {
|
|||||||
|
|
||||||
// mapd
|
// mapd
|
||||||
{"MapAdvisorySpeedLimit", {CLEAR_ON_ONROAD_TRANSITION, FLOAT}},
|
{"MapAdvisorySpeedLimit", {CLEAR_ON_ONROAD_TRANSITION, FLOAT}},
|
||||||
{"MapdVersion", {PERSISTENT, STRING, ""}},
|
{"MapdVersion", {PERSISTENT, STRING}},
|
||||||
{"MapSpeedLimit", {CLEAR_ON_ONROAD_TRANSITION, FLOAT, "0.0"}},
|
{"MapSpeedLimit", {CLEAR_ON_ONROAD_TRANSITION, FLOAT, "0.0"}},
|
||||||
{"NextMapSpeedLimit", {CLEAR_ON_ONROAD_TRANSITION, JSON}},
|
{"NextMapSpeedLimit", {CLEAR_ON_ONROAD_TRANSITION, JSON}},
|
||||||
{"Offroad_OSMUpdateRequired", {CLEAR_ON_MANAGER_START, JSON}},
|
{"Offroad_OSMUpdateRequired", {CLEAR_ON_MANAGER_START, JSON}},
|
||||||
@@ -215,5 +215,5 @@ inline static std::unordered_map<std::string, ParamKeyAttributes> keys = {
|
|||||||
{"OsmStateName", {PERSISTENT, STRING, "All"}},
|
{"OsmStateName", {PERSISTENT, STRING, "All"}},
|
||||||
{"OsmStateTitle", {PERSISTENT, STRING}},
|
{"OsmStateTitle", {PERSISTENT, STRING}},
|
||||||
{"OsmWayTest", {PERSISTENT, STRING}},
|
{"OsmWayTest", {PERSISTENT, STRING}},
|
||||||
{"RoadName", {CLEAR_ON_ONROAD_TRANSITION, STRING, ""}},
|
{"RoadName", {CLEAR_ON_ONROAD_TRANSITION, STRING}},
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -14,8 +14,7 @@ class PIDController:
|
|||||||
if isinstance(self._k_d, Number):
|
if isinstance(self._k_d, Number):
|
||||||
self._k_d = [[0], [self._k_d]]
|
self._k_d = [[0], [self._k_d]]
|
||||||
|
|
||||||
self.pos_limit = pos_limit
|
self.set_limits(pos_limit, neg_limit)
|
||||||
self.neg_limit = neg_limit
|
|
||||||
|
|
||||||
self.i_rate = 1.0 / rate
|
self.i_rate = 1.0 / rate
|
||||||
self.speed = 0.0
|
self.speed = 0.0
|
||||||
@@ -41,6 +40,10 @@ class PIDController:
|
|||||||
self.f = 0.0
|
self.f = 0.0
|
||||||
self.control = 0
|
self.control = 0
|
||||||
|
|
||||||
|
def set_limits(self, pos_limit, neg_limit):
|
||||||
|
self.pos_limit = pos_limit
|
||||||
|
self.neg_limit = neg_limit
|
||||||
|
|
||||||
def update(self, error, error_rate=0.0, speed=0.0, feedforward=0., freeze_integrator=False):
|
def update(self, error, error_rate=0.0, speed=0.0, feedforward=0., freeze_integrator=False):
|
||||||
self.speed = speed
|
self.speed = speed
|
||||||
self.p = float(error) * self.k_p
|
self.p = float(error) * self.k_p
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from subprocess import Popen, PIPE, TimeoutExpired
|
||||||
|
|
||||||
|
|
||||||
def run_cmd(cmd: list[str], cwd=None, env=None) -> str:
|
def run_cmd(cmd: list[str], cwd=None, env=None) -> str:
|
||||||
@@ -11,3 +13,16 @@ def run_cmd_default(cmd: list[str], default: str = "", cwd=None, env=None) -> st
|
|||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
return default
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def managed_proc(cmd: list[str], env: dict[str, str]):
|
||||||
|
proc = Popen(cmd, env=env, stdout=PIPE, stderr=PIPE)
|
||||||
|
try:
|
||||||
|
yield proc
|
||||||
|
finally:
|
||||||
|
if proc.poll() is None:
|
||||||
|
proc.terminate()
|
||||||
|
try:
|
||||||
|
proc.wait(timeout=5)
|
||||||
|
except TimeoutExpired:
|
||||||
|
proc.kill()
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ Development is coordinated through [Discord](https://discord.comma.ai) and GitHu
|
|||||||
|
|
||||||
### Getting Started
|
### Getting Started
|
||||||
|
|
||||||
* Setup your [development environment](../tools/)
|
* Set up your [development environment](/tools/)
|
||||||
* Join our [Discord](https://discord.comma.ai)
|
* Join our [Discord](https://discord.comma.ai)
|
||||||
* Docs are at https://docs.comma.ai and https://blog.comma.ai
|
* Docs are at https://docs.comma.ai and https://blog.comma.ai
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
# Turn the speed blue
|
# Turn the speed blue
|
||||||
*A getting started guide for openpilot development*
|
*A getting started guide for openpilot development*
|
||||||
|
|
||||||
In 30 minutes, we'll get an openpilot development environment setup on your computer and make some changes to openpilot's UI.
|
In 30 minutes, we'll get an openpilot development environment set up on your computer and make some changes to openpilot's UI.
|
||||||
|
|
||||||
And if you have a comma 3/3X, we'll deploy the change to your device for testing.
|
And if you have a comma 3/3X, we'll deploy the change to your device for testing.
|
||||||
|
|
||||||
## 1. Setup your development environment
|
## 1. Set up your development environment
|
||||||
|
|
||||||
Run this to clone openpilot and install all the dependencies:
|
Run this to clone openpilot and install all the dependencies:
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ export OPENBLAS_NUM_THREADS=1
|
|||||||
export VECLIB_MAXIMUM_THREADS=1
|
export VECLIB_MAXIMUM_THREADS=1
|
||||||
|
|
||||||
if [ -z "$AGNOS_VERSION" ]; then
|
if [ -z "$AGNOS_VERSION" ]; then
|
||||||
export AGNOS_VERSION="12.6"
|
export AGNOS_VERSION="12.8"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
export STAGING_ROOT="/data/safe_staging"
|
export STAGING_ROOT="/data/safe_staging"
|
||||||
|
|||||||
Submodule opendbc_repo updated: dec0074043...aa0aa1b7aa
2
panda
2
panda
Submodule panda updated: 0e7a3fd8cf...f10ddc6a89
@@ -3,33 +3,34 @@
|
|||||||
```
|
```
|
||||||
## release checklist
|
## release checklist
|
||||||
|
|
||||||
**Go to `devel-staging`**
|
### Go to staging
|
||||||
- [ ] make issue to track release
|
- [ ] make a GitHub issue to track release
|
||||||
|
- [ ] create release master branch
|
||||||
- [ ] update RELEASES.md
|
- [ ] update RELEASES.md
|
||||||
- [ ] trigger new nightly build: https://github.com/commaai/openpilot/actions/workflows/release.yaml
|
- [ ] bump version on master: `common/version.h` and `RELEASES.md`
|
||||||
- [ ] update `devel-staging`: `git reset --hard origin/__nightly`
|
|
||||||
- [ ] build new userdata partition from `release3-staging`
|
- [ ] build new userdata partition from `release3-staging`
|
||||||
- [ ] open a pull request from `devel-staging` to `devel`
|
|
||||||
- [ ] post on Discord, tag `@release crew`
|
- [ ] post on Discord, tag `@release crew`
|
||||||
|
|
||||||
**Go to `devel`**
|
Updating staging:
|
||||||
- [ ] bump version on master: `common/version.h` and `RELEASES.md`
|
1. either rebase on master or cherry-pick changes
|
||||||
- [ ] before merging the pull request, test the following:
|
2. run this to update: `BRANCH=devel-staging release/build_devel.sh`
|
||||||
|
3. build new userdata partition from `release3-staging`
|
||||||
|
|
||||||
|
### Go to release
|
||||||
|
- [ ] before going to release, test the following:
|
||||||
- [ ] update from previous release -> new release
|
- [ ] update from previous release -> new release
|
||||||
- [ ] update from new release -> previous release
|
- [ ] update from new release -> previous release
|
||||||
- [ ] fresh install with `openpilot-test.comma.ai`
|
- [ ] fresh install with `openpilot-test.comma.ai`
|
||||||
- [ ] drive on fresh install
|
- [ ] drive on fresh install
|
||||||
- [ ] no submodules or LFS
|
- [ ] no submodules or LFS
|
||||||
- [ ] check sentry, MTBF, etc.
|
- [ ] check sentry, MTBF, etc.
|
||||||
- [ ] stress test in production
|
- [ ] stress test passes in production
|
||||||
|
|
||||||
**Go to `release3`**
|
|
||||||
- [ ] publish the blog post
|
- [ ] publish the blog post
|
||||||
- [ ] `git reset --hard origin/release3-staging`
|
- [ ] `git reset --hard origin/release3-staging`
|
||||||
- [ ] tag the release: `git tag v0.X.X <commit-hash> && git push origin v0.X.X`
|
- [ ] tag the release: `git tag v0.X.X <commit-hash> && git push origin v0.X.X`
|
||||||
- [ ] create GitHub release
|
- [ ] create GitHub release
|
||||||
- [ ] final test install on `openpilot.comma.ai`
|
- [ ] final test install on `openpilot.comma.ai`
|
||||||
- [ ] update factory provisioning
|
- [ ] update factory provisioning
|
||||||
- [ ] close out milestone
|
- [ ] close out milestone and issue
|
||||||
- [ ] post on Discord, X, etc.
|
- [ ] post on Discord, X, etc.
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -17,28 +17,23 @@ rm -rf $TARGET_DIR
|
|||||||
mkdir -p $TARGET_DIR
|
mkdir -p $TARGET_DIR
|
||||||
cd $TARGET_DIR
|
cd $TARGET_DIR
|
||||||
cp -r $SOURCE_DIR/.git $TARGET_DIR
|
cp -r $SOURCE_DIR/.git $TARGET_DIR
|
||||||
pre-commit uninstall || true
|
|
||||||
|
|
||||||
echo "[-] bringing __nightly and devel in sync T=$SECONDS"
|
echo "[-] setting up stripped branch sync T=$SECONDS"
|
||||||
cd $TARGET_DIR
|
cd $TARGET_DIR
|
||||||
|
|
||||||
git fetch --depth 1 origin __nightly
|
# tmp branch
|
||||||
git fetch --depth 1 origin devel
|
git checkout --orphan tmp
|
||||||
|
|
||||||
git checkout -f --track origin/__nightly
|
|
||||||
git reset --hard __nightly
|
|
||||||
git checkout __nightly
|
|
||||||
git reset --hard origin/devel
|
|
||||||
git clean -xdff
|
|
||||||
git lfs uninstall
|
|
||||||
|
|
||||||
# remove everything except .git
|
# remove everything except .git
|
||||||
echo "[-] erasing old sunnypilot T=$SECONDS"
|
echo "[-] erasing old sunnypilot T=$SECONDS"
|
||||||
|
git submodule deinit -f --all
|
||||||
|
git rm -rf --cached .
|
||||||
find . -maxdepth 1 -not -path './.git' -not -name '.' -not -name '..' -exec rm -rf '{}' \;
|
find . -maxdepth 1 -not -path './.git' -not -name '.' -not -name '..' -exec rm -rf '{}' \;
|
||||||
|
|
||||||
# reset source tree
|
# cleanup before the copy
|
||||||
cd $SOURCE_DIR
|
cd $SOURCE_DIR
|
||||||
git clean -xdff
|
git clean -xdff
|
||||||
|
git submodule foreach --recursive git clean -xdff
|
||||||
|
|
||||||
# do the files copy
|
# do the files copy
|
||||||
echo "[-] copying files T=$SECONDS"
|
echo "[-] copying files T=$SECONDS"
|
||||||
@@ -47,6 +42,7 @@ cp -pR --parents $(./release/release_files.py) $TARGET_DIR/
|
|||||||
|
|
||||||
# in the directory
|
# in the directory
|
||||||
cd $TARGET_DIR
|
cd $TARGET_DIR
|
||||||
|
rm -rf .git/modules/
|
||||||
rm -f panda/board/obj/panda.bin.signed
|
rm -f panda/board/obj/panda.bin.signed
|
||||||
|
|
||||||
# include source commit hash and build date in commit
|
# include source commit hash and build date in commit
|
||||||
@@ -85,7 +81,7 @@ fi
|
|||||||
|
|
||||||
if [ ! -z "$BRANCH" ]; then
|
if [ ! -z "$BRANCH" ]; then
|
||||||
echo "[-] Pushing to $BRANCH T=$SECONDS"
|
echo "[-] Pushing to $BRANCH T=$SECONDS"
|
||||||
git push -f origin __nightly:$BRANCH
|
git push -f origin tmp:$BRANCH
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "[-] done T=$SECONDS, ready at $TARGET_DIR"
|
echo "[-] done T=$SECONDS, ready at $TARGET_DIR"
|
||||||
@@ -3,7 +3,6 @@ import numpy as np
|
|||||||
|
|
||||||
from cereal import log
|
from cereal import log
|
||||||
from opendbc.car.lateral import FRICTION_THRESHOLD, get_friction
|
from opendbc.car.lateral import FRICTION_THRESHOLD, get_friction
|
||||||
from opendbc.car.interfaces import LatControlInputs
|
|
||||||
from openpilot.common.constants import ACCELERATION_DUE_TO_GRAVITY
|
from openpilot.common.constants import ACCELERATION_DUE_TO_GRAVITY
|
||||||
from openpilot.selfdrive.controls.lib.latcontrol import LatControl
|
from openpilot.selfdrive.controls.lib.latcontrol import LatControl
|
||||||
from openpilot.common.pid import PIDController
|
from openpilot.common.pid import PIDController
|
||||||
@@ -29,9 +28,11 @@ class LatControlTorque(LatControl):
|
|||||||
def __init__(self, CP, CP_SP, CI):
|
def __init__(self, CP, CP_SP, CI):
|
||||||
super().__init__(CP, CP_SP, CI)
|
super().__init__(CP, CP_SP, CI)
|
||||||
self.torque_params = CP.lateralTuning.torque.as_builder()
|
self.torque_params = CP.lateralTuning.torque.as_builder()
|
||||||
self.pid = PIDController(self.torque_params.kp, self.torque_params.ki,
|
|
||||||
k_f=self.torque_params.kf, pos_limit=self.steer_max, neg_limit=-self.steer_max)
|
|
||||||
self.torque_from_lateral_accel = CI.torque_from_lateral_accel()
|
self.torque_from_lateral_accel = CI.torque_from_lateral_accel()
|
||||||
|
self.lateral_accel_from_torque = CI.lateral_accel_from_torque()
|
||||||
|
self.pid = PIDController(self.torque_params.kp, self.torque_params.ki,
|
||||||
|
k_f=self.torque_params.kf)
|
||||||
|
self.update_limits()
|
||||||
self.steering_angle_deadzone_deg = self.torque_params.steeringAngleDeadzoneDeg
|
self.steering_angle_deadzone_deg = self.torque_params.steeringAngleDeadzoneDeg
|
||||||
|
|
||||||
self.extension = LatControlTorqueExt(self, CP, CP_SP)
|
self.extension = LatControlTorqueExt(self, CP, CP_SP)
|
||||||
@@ -40,6 +41,11 @@ class LatControlTorque(LatControl):
|
|||||||
self.torque_params.latAccelFactor = latAccelFactor
|
self.torque_params.latAccelFactor = latAccelFactor
|
||||||
self.torque_params.latAccelOffset = latAccelOffset
|
self.torque_params.latAccelOffset = latAccelOffset
|
||||||
self.torque_params.friction = friction
|
self.torque_params.friction = friction
|
||||||
|
self.update_limits()
|
||||||
|
|
||||||
|
def update_limits(self):
|
||||||
|
self.pid.set_limits(self.lateral_accel_from_torque(self.steer_max, self.torque_params),
|
||||||
|
self.lateral_accel_from_torque(-self.steer_max, self.torque_params))
|
||||||
|
|
||||||
def update(self, active, CS, VM, params, steer_limited_by_safety, desired_curvature, calibrated_pose, curvature_limited):
|
def update(self, active, CS, VM, params, steer_limited_by_safety, desired_curvature, calibrated_pose, curvature_limited):
|
||||||
pid_log = log.ControlsState.LateralTorqueState.new_message()
|
pid_log = log.ControlsState.LateralTorqueState.new_message()
|
||||||
@@ -61,13 +67,10 @@ class LatControlTorque(LatControl):
|
|||||||
setpoint = desired_lateral_accel + low_speed_factor * desired_curvature
|
setpoint = desired_lateral_accel + low_speed_factor * desired_curvature
|
||||||
measurement = actual_lateral_accel + low_speed_factor * actual_curvature
|
measurement = actual_lateral_accel + low_speed_factor * actual_curvature
|
||||||
gravity_adjusted_lateral_accel = desired_lateral_accel - roll_compensation
|
gravity_adjusted_lateral_accel = desired_lateral_accel - roll_compensation
|
||||||
torque_from_setpoint = self.torque_from_lateral_accel(LatControlInputs(setpoint, roll_compensation, CS.vEgo, CS.aEgo), self.torque_params,
|
|
||||||
gravity_adjusted=False)
|
# do error correction in lateral acceleration space, convert at end to handle non-linear torque responses correctly
|
||||||
torque_from_measurement = self.torque_from_lateral_accel(LatControlInputs(measurement, roll_compensation, CS.vEgo, CS.aEgo), self.torque_params,
|
pid_log.error = float(setpoint - measurement)
|
||||||
gravity_adjusted=False)
|
ff = gravity_adjusted_lateral_accel
|
||||||
pid_log.error = float(torque_from_setpoint - torque_from_measurement)
|
|
||||||
ff = self.torque_from_lateral_accel(LatControlInputs(gravity_adjusted_lateral_accel, roll_compensation, CS.vEgo, CS.aEgo), self.torque_params,
|
|
||||||
gravity_adjusted=True)
|
|
||||||
ff += get_friction(desired_lateral_accel - actual_lateral_accel, lateral_accel_deadzone, FRICTION_THRESHOLD, self.torque_params)
|
ff += get_friction(desired_lateral_accel - actual_lateral_accel, lateral_accel_deadzone, FRICTION_THRESHOLD, self.torque_params)
|
||||||
|
|
||||||
# Lateral acceleration torque controller extension updates
|
# Lateral acceleration torque controller extension updates
|
||||||
@@ -77,17 +80,18 @@ class LatControlTorque(LatControl):
|
|||||||
desired_curvature, actual_curvature)
|
desired_curvature, actual_curvature)
|
||||||
|
|
||||||
freeze_integrator = steer_limited_by_safety or CS.steeringPressed or CS.vEgo < 5
|
freeze_integrator = steer_limited_by_safety or CS.steeringPressed or CS.vEgo < 5
|
||||||
output_torque = self.pid.update(pid_log.error,
|
output_lataccel = self.pid.update(pid_log.error,
|
||||||
feedforward=ff,
|
feedforward=ff,
|
||||||
speed=CS.vEgo,
|
speed=CS.vEgo,
|
||||||
freeze_integrator=freeze_integrator)
|
freeze_integrator=freeze_integrator)
|
||||||
|
output_torque = self.torque_from_lateral_accel(output_lataccel, self.torque_params)
|
||||||
|
|
||||||
pid_log.active = True
|
pid_log.active = True
|
||||||
pid_log.p = float(self.pid.p)
|
pid_log.p = float(self.pid.p)
|
||||||
pid_log.i = float(self.pid.i)
|
pid_log.i = float(self.pid.i)
|
||||||
pid_log.d = float(self.pid.d)
|
pid_log.d = float(self.pid.d)
|
||||||
pid_log.f = float(self.pid.f)
|
pid_log.f = float(self.pid.f)
|
||||||
pid_log.output = float(-output_torque)
|
pid_log.output = float(-output_torque) # TODO: log lat accel?
|
||||||
pid_log.actualLateralAccel = float(actual_lateral_accel)
|
pid_log.actualLateralAccel = float(actual_lateral_accel)
|
||||||
pid_log.desiredLateralAccel = float(desired_lateral_accel)
|
pid_log.desiredLateralAccel = float(desired_lateral_accel)
|
||||||
pid_log.saturated = bool(self._check_saturation(self.steer_max - abs(output_torque) < 1e-3, CS, steer_limited_by_safety, curvature_limited))
|
pid_log.saturated = bool(self._check_saturation(self.steer_max - abs(output_torque) < 1e-3, CS, steer_limited_by_safety, curvature_limited))
|
||||||
|
|||||||
@@ -93,12 +93,12 @@ class LongitudinalPlanner(LongitudinalPlannerSP):
|
|||||||
return x, v, a, j, throttle_prob
|
return x, v, a, j, throttle_prob
|
||||||
|
|
||||||
def update(self, sm):
|
def update(self, sm):
|
||||||
self.mode = 'blended' if sm['selfdriveState'].experimentalMode else 'acc'
|
mode = 'blended' if sm['selfdriveState'].experimentalMode else 'acc'
|
||||||
if not self.mlsim:
|
if not self.mlsim:
|
||||||
self.mpc.mode = self.mode
|
self.mpc.mode = mode
|
||||||
LongitudinalPlannerSP.update(self, sm)
|
LongitudinalPlannerSP.update(self, sm)
|
||||||
if dec_mpc_mode := self.get_mpc_mode():
|
if dec_mpc_mode := self.get_mpc_mode():
|
||||||
self.mode = dec_mpc_mode
|
mode = dec_mpc_mode
|
||||||
if not self.mlsim:
|
if not self.mlsim:
|
||||||
self.mpc.mode = dec_mpc_mode
|
self.mpc.mode = dec_mpc_mode
|
||||||
|
|
||||||
@@ -123,7 +123,7 @@ class LongitudinalPlanner(LongitudinalPlannerSP):
|
|||||||
# No change cost when user is controlling the speed, or when standstill
|
# No change cost when user is controlling the speed, or when standstill
|
||||||
prev_accel_constraint = not (reset_state or sm['carState'].standstill)
|
prev_accel_constraint = not (reset_state or sm['carState'].standstill)
|
||||||
|
|
||||||
if self.mode == 'acc':
|
if mode == 'acc':
|
||||||
accel_clip = [ACCEL_MIN, get_max_accel(v_ego)]
|
accel_clip = [ACCEL_MIN, get_max_accel(v_ego)]
|
||||||
steer_angle_without_offset = sm['carState'].steeringAngleDeg - sm['liveParameters'].angleOffsetDeg
|
steer_angle_without_offset = sm['carState'].steeringAngleDeg - sm['liveParameters'].angleOffsetDeg
|
||||||
accel_clip = limit_accel_in_turns(v_ego, steer_angle_without_offset, accel_clip, self.CP)
|
accel_clip = limit_accel_in_turns(v_ego, steer_angle_without_offset, accel_clip, self.CP)
|
||||||
@@ -173,7 +173,7 @@ class LongitudinalPlanner(LongitudinalPlannerSP):
|
|||||||
output_a_target_e2e = sm['modelV2'].action.desiredAcceleration
|
output_a_target_e2e = sm['modelV2'].action.desiredAcceleration
|
||||||
output_should_stop_e2e = sm['modelV2'].action.shouldStop
|
output_should_stop_e2e = sm['modelV2'].action.shouldStop
|
||||||
|
|
||||||
if self.mode == 'acc' or not self.mlsim:
|
if mode == 'acc' or not self.mlsim:
|
||||||
output_a_target = output_a_target_mpc
|
output_a_target = output_a_target_mpc
|
||||||
self.output_should_stop = output_should_stop_mpc
|
self.output_should_stop = output_should_stop_mpc
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from opendbc.car.car_helpers import interfaces
|
|||||||
from opendbc.car.honda.values import CAR as HONDA
|
from opendbc.car.honda.values import CAR as HONDA
|
||||||
from opendbc.car.toyota.values import CAR as TOYOTA
|
from opendbc.car.toyota.values import CAR as TOYOTA
|
||||||
from opendbc.car.nissan.values import CAR as NISSAN
|
from opendbc.car.nissan.values import CAR as NISSAN
|
||||||
|
from opendbc.car.gm.values import CAR as GM
|
||||||
from opendbc.car.vehicle_model import VehicleModel
|
from opendbc.car.vehicle_model import VehicleModel
|
||||||
from openpilot.selfdrive.car.helpers import convert_to_capnp
|
from openpilot.selfdrive.car.helpers import convert_to_capnp
|
||||||
from openpilot.selfdrive.controls.lib.latcontrol_pid import LatControlPID
|
from openpilot.selfdrive.controls.lib.latcontrol_pid import LatControlPID
|
||||||
@@ -17,7 +18,8 @@ from openpilot.sunnypilot.selfdrive.car import interfaces as sunnypilot_interfac
|
|||||||
|
|
||||||
class TestLatControl:
|
class TestLatControl:
|
||||||
|
|
||||||
@parameterized.expand([(HONDA.HONDA_CIVIC, LatControlPID), (TOYOTA.TOYOTA_RAV4, LatControlTorque), (NISSAN.NISSAN_LEAF, LatControlAngle)])
|
@parameterized.expand([(HONDA.HONDA_CIVIC, LatControlPID), (TOYOTA.TOYOTA_RAV4, LatControlTorque),
|
||||||
|
(NISSAN.NISSAN_LEAF, LatControlAngle), (GM.CHEVROLET_BOLT_EUV, LatControlTorque)])
|
||||||
def test_saturation(self, car_name, controller):
|
def test_saturation(self, car_name, controller):
|
||||||
CarInterface = interfaces[car_name]
|
CarInterface = interfaces[car_name]
|
||||||
CP = CarInterface.get_non_essential_params(car_name)
|
CP = CarInterface.get_non_essential_params(car_name)
|
||||||
@@ -107,15 +107,12 @@ class ModelState(ModelStateBase):
|
|||||||
|
|
||||||
self.full_features_buffer = np.zeros((1, ModelConstants.FULL_HISTORY_BUFFER_LEN, ModelConstants.FEATURE_LEN), dtype=np.float32)
|
self.full_features_buffer = np.zeros((1, ModelConstants.FULL_HISTORY_BUFFER_LEN, ModelConstants.FEATURE_LEN), dtype=np.float32)
|
||||||
self.full_desire = np.zeros((1, ModelConstants.FULL_HISTORY_BUFFER_LEN, ModelConstants.DESIRE_LEN), dtype=np.float32)
|
self.full_desire = np.zeros((1, ModelConstants.FULL_HISTORY_BUFFER_LEN, ModelConstants.DESIRE_LEN), dtype=np.float32)
|
||||||
self.full_prev_desired_curv = np.zeros((1, ModelConstants.FULL_HISTORY_BUFFER_LEN, ModelConstants.PREV_DESIRED_CURV_LEN), dtype=np.float32)
|
|
||||||
self.temporal_idxs = slice(-1-(ModelConstants.TEMPORAL_SKIP*(ModelConstants.INPUT_HISTORY_BUFFER_LEN-1)), None, ModelConstants.TEMPORAL_SKIP)
|
self.temporal_idxs = slice(-1-(ModelConstants.TEMPORAL_SKIP*(ModelConstants.INPUT_HISTORY_BUFFER_LEN-1)), None, ModelConstants.TEMPORAL_SKIP)
|
||||||
|
|
||||||
# policy inputs
|
# policy inputs
|
||||||
self.numpy_inputs = {
|
self.numpy_inputs = {
|
||||||
'desire': np.zeros((1, ModelConstants.INPUT_HISTORY_BUFFER_LEN, ModelConstants.DESIRE_LEN), dtype=np.float32),
|
'desire': np.zeros((1, ModelConstants.INPUT_HISTORY_BUFFER_LEN, ModelConstants.DESIRE_LEN), dtype=np.float32),
|
||||||
'traffic_convention': np.zeros((1, ModelConstants.TRAFFIC_CONVENTION_LEN), dtype=np.float32),
|
'traffic_convention': np.zeros((1, ModelConstants.TRAFFIC_CONVENTION_LEN), dtype=np.float32),
|
||||||
'lateral_control_params': np.zeros((1, ModelConstants.LATERAL_CONTROL_PARAMS_LEN), dtype=np.float32),
|
|
||||||
'prev_desired_curv': np.zeros((1, ModelConstants.INPUT_HISTORY_BUFFER_LEN, ModelConstants.PREV_DESIRED_CURV_LEN), dtype=np.float32),
|
|
||||||
'features_buffer': np.zeros((1, ModelConstants.INPUT_HISTORY_BUFFER_LEN, ModelConstants.FEATURE_LEN), dtype=np.float32),
|
'features_buffer': np.zeros((1, ModelConstants.INPUT_HISTORY_BUFFER_LEN, ModelConstants.FEATURE_LEN), dtype=np.float32),
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -148,7 +145,6 @@ class ModelState(ModelStateBase):
|
|||||||
self.numpy_inputs['desire'][:] = self.full_desire.reshape((1,ModelConstants.INPUT_HISTORY_BUFFER_LEN,ModelConstants.TEMPORAL_SKIP,-1)).max(axis=2)
|
self.numpy_inputs['desire'][:] = self.full_desire.reshape((1,ModelConstants.INPUT_HISTORY_BUFFER_LEN,ModelConstants.TEMPORAL_SKIP,-1)).max(axis=2)
|
||||||
|
|
||||||
self.numpy_inputs['traffic_convention'][:] = inputs['traffic_convention']
|
self.numpy_inputs['traffic_convention'][:] = inputs['traffic_convention']
|
||||||
self.numpy_inputs['lateral_control_params'][:] = inputs['lateral_control_params']
|
|
||||||
imgs_cl = {name: self.frames[name].prepare(bufs[name], transforms[name].flatten()) for name in self.vision_input_names}
|
imgs_cl = {name: self.frames[name].prepare(bufs[name], transforms[name].flatten()) for name in self.vision_input_names}
|
||||||
|
|
||||||
if TICI and not USBGPU:
|
if TICI and not USBGPU:
|
||||||
@@ -174,11 +170,6 @@ class ModelState(ModelStateBase):
|
|||||||
self.policy_output = self.policy_run(**self.policy_inputs).contiguous().realize().uop.base.buffer.numpy()
|
self.policy_output = self.policy_run(**self.policy_inputs).contiguous().realize().uop.base.buffer.numpy()
|
||||||
policy_outputs_dict = self.parser.parse_policy_outputs(self.slice_outputs(self.policy_output, self.policy_output_slices))
|
policy_outputs_dict = self.parser.parse_policy_outputs(self.slice_outputs(self.policy_output, self.policy_output_slices))
|
||||||
|
|
||||||
# TODO model only uses last value now
|
|
||||||
self.full_prev_desired_curv[0,:-1] = self.full_prev_desired_curv[0,1:]
|
|
||||||
self.full_prev_desired_curv[0,-1,:] = policy_outputs_dict['desired_curvature'][0, :]
|
|
||||||
self.numpy_inputs['prev_desired_curv'][:] = 0*self.full_prev_desired_curv[0, self.temporal_idxs]
|
|
||||||
|
|
||||||
combined_outputs_dict = {**vision_outputs_dict, **policy_outputs_dict}
|
combined_outputs_dict = {**vision_outputs_dict, **policy_outputs_dict}
|
||||||
if SEND_RAW_PRED:
|
if SEND_RAW_PRED:
|
||||||
combined_outputs_dict['raw_pred'] = np.concatenate([self.vision_output.copy(), self.policy_output.copy()])
|
combined_outputs_dict['raw_pred'] = np.concatenate([self.vision_output.copy(), self.policy_output.copy()])
|
||||||
@@ -299,7 +290,6 @@ def main(demo=False):
|
|||||||
if sm.frame % 60 == 0:
|
if sm.frame % 60 == 0:
|
||||||
model.lat_delay = get_lat_delay(params, sm["liveDelay"].lateralDelay)
|
model.lat_delay = get_lat_delay(params, sm["liveDelay"].lateralDelay)
|
||||||
lat_delay = model.lat_delay + LAT_SMOOTH_SECONDS
|
lat_delay = model.lat_delay + LAT_SMOOTH_SECONDS
|
||||||
lateral_control_params = np.array([v_ego, lat_delay], dtype=np.float32)
|
|
||||||
if sm.updated["liveCalibration"] and sm.seen['roadCameraState'] and sm.seen['deviceState']:
|
if sm.updated["liveCalibration"] and sm.seen['roadCameraState'] and sm.seen['deviceState']:
|
||||||
device_from_calib_euler = np.array(sm["liveCalibration"].rpyCalib, dtype=np.float32)
|
device_from_calib_euler = np.array(sm["liveCalibration"].rpyCalib, dtype=np.float32)
|
||||||
dc = DEVICE_CAMERAS[(str(sm['deviceState'].deviceType), str(sm['roadCameraState'].sensor))]
|
dc = DEVICE_CAMERAS[(str(sm['deviceState'].deviceType), str(sm['roadCameraState'].sensor))]
|
||||||
@@ -332,7 +322,6 @@ def main(demo=False):
|
|||||||
inputs:dict[str, np.ndarray] = {
|
inputs:dict[str, np.ndarray] = {
|
||||||
'desire': vec_desire,
|
'desire': vec_desire,
|
||||||
'traffic_convention': traffic_convention,
|
'traffic_convention': traffic_convention,
|
||||||
'lateral_control_params': lateral_control_params,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
mt1 = time.perf_counter()
|
mt1 = time.perf_counter()
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
version https://git-lfs.github.com/spec/v1
|
version https://git-lfs.github.com/spec/v1
|
||||||
oid sha256:1af87c38492444521632a0e75839b5684ee46bf255b3474773784bffb9fe4f57
|
oid sha256:04b763fb71efe57a8a4c4168a8043ecd58939015026ded0dc755ded6905ac251
|
||||||
size 15583374
|
size 12343523
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
version https://git-lfs.github.com/spec/v1
|
version https://git-lfs.github.com/spec/v1
|
||||||
oid sha256:c824f68646a3b94f117f01c70dc8316fb466e05fbd42ccdba440b8a8dc86914b
|
oid sha256:e66bb8d53eced3786ed71a59b55ffc6810944cb217f0518621cc76303260a1ef
|
||||||
size 46265993
|
size 46271991
|
||||||
|
|||||||
@@ -22,9 +22,10 @@ class Parser:
|
|||||||
self.ignore_missing = ignore_missing
|
self.ignore_missing = ignore_missing
|
||||||
|
|
||||||
def check_missing(self, outs, name):
|
def check_missing(self, outs, name):
|
||||||
if name not in outs and not self.ignore_missing:
|
missing = name not in outs
|
||||||
|
if missing and not self.ignore_missing:
|
||||||
raise ValueError(f"Missing output {name}")
|
raise ValueError(f"Missing output {name}")
|
||||||
return name not in outs
|
return missing
|
||||||
|
|
||||||
def parse_categorical_crossentropy(self, name, outs, out_shape=None):
|
def parse_categorical_crossentropy(self, name, outs, out_shape=None):
|
||||||
if self.check_missing(outs, name):
|
if self.check_missing(outs, name):
|
||||||
@@ -84,6 +85,13 @@ class Parser:
|
|||||||
outs[name] = pred_mu_final.reshape(final_shape)
|
outs[name] = pred_mu_final.reshape(final_shape)
|
||||||
outs[name + '_stds'] = pred_std_final.reshape(final_shape)
|
outs[name + '_stds'] = pred_std_final.reshape(final_shape)
|
||||||
|
|
||||||
|
def is_mhp(self, outs, name, shape):
|
||||||
|
if self.check_missing(outs, name):
|
||||||
|
return False
|
||||||
|
if outs[name].shape[1] == 2 * shape:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
def parse_vision_outputs(self, outs: dict[str, np.ndarray]) -> dict[str, np.ndarray]:
|
def parse_vision_outputs(self, outs: dict[str, np.ndarray]) -> dict[str, np.ndarray]:
|
||||||
self.parse_mdn('pose', outs, in_N=0, out_N=0, out_shape=(ModelConstants.POSE_WIDTH,))
|
self.parse_mdn('pose', outs, in_N=0, out_N=0, out_shape=(ModelConstants.POSE_WIDTH,))
|
||||||
self.parse_mdn('wide_from_device_euler', outs, in_N=0, out_N=0, out_shape=(ModelConstants.WIDE_FROM_DEVICE_WIDTH,))
|
self.parse_mdn('wide_from_device_euler', outs, in_N=0, out_N=0, out_shape=(ModelConstants.WIDE_FROM_DEVICE_WIDTH,))
|
||||||
@@ -94,23 +102,17 @@ class Parser:
|
|||||||
self.parse_categorical_crossentropy('desire_pred', outs, out_shape=(ModelConstants.DESIRE_PRED_LEN,ModelConstants.DESIRE_PRED_WIDTH))
|
self.parse_categorical_crossentropy('desire_pred', outs, out_shape=(ModelConstants.DESIRE_PRED_LEN,ModelConstants.DESIRE_PRED_WIDTH))
|
||||||
self.parse_binary_crossentropy('meta', outs)
|
self.parse_binary_crossentropy('meta', outs)
|
||||||
self.parse_binary_crossentropy('lead_prob', outs)
|
self.parse_binary_crossentropy('lead_prob', outs)
|
||||||
if outs['lead'].shape[1] == 2 * ModelConstants.LEAD_MHP_SELECTION *ModelConstants.LEAD_TRAJ_LEN * ModelConstants.LEAD_WIDTH:
|
lead_mhp = self.is_mhp(outs, 'lead', ModelConstants.LEAD_MHP_SELECTION * ModelConstants.LEAD_TRAJ_LEN * ModelConstants.LEAD_WIDTH)
|
||||||
self.parse_mdn('lead', outs, in_N=0, out_N=0,
|
lead_in_N, lead_out_N = (ModelConstants.LEAD_MHP_N, ModelConstants.LEAD_MHP_SELECTION) if lead_mhp else (0, 0)
|
||||||
out_shape=(ModelConstants.LEAD_MHP_SELECTION, ModelConstants.LEAD_TRAJ_LEN,ModelConstants.LEAD_WIDTH))
|
lead_out_shape = (ModelConstants.LEAD_TRAJ_LEN, ModelConstants.LEAD_WIDTH) if lead_mhp else \
|
||||||
else:
|
(ModelConstants.LEAD_MHP_SELECTION, ModelConstants.LEAD_TRAJ_LEN, ModelConstants.LEAD_WIDTH)
|
||||||
self.parse_mdn('lead', outs, in_N=ModelConstants.LEAD_MHP_N, out_N=ModelConstants.LEAD_MHP_SELECTION,
|
self.parse_mdn('lead', outs, in_N=lead_in_N, out_N=lead_out_N, out_shape=lead_out_shape)
|
||||||
out_shape=(ModelConstants.LEAD_TRAJ_LEN,ModelConstants.LEAD_WIDTH))
|
|
||||||
return outs
|
return outs
|
||||||
|
|
||||||
def parse_policy_outputs(self, outs: dict[str, np.ndarray]) -> dict[str, np.ndarray]:
|
def parse_policy_outputs(self, outs: dict[str, np.ndarray]) -> dict[str, np.ndarray]:
|
||||||
if outs['plan'].shape[1] == 2 * ModelConstants.IDX_N * ModelConstants.PLAN_WIDTH:
|
plan_mhp = self.is_mhp(outs, 'plan', ModelConstants.IDX_N * ModelConstants.PLAN_WIDTH)
|
||||||
self.parse_mdn('plan', outs, in_N=0, out_N=0,
|
plan_in_N, plan_out_N = (ModelConstants.PLAN_MHP_N, ModelConstants.PLAN_MHP_SELECTION) if plan_mhp else (0, 0)
|
||||||
out_shape=(ModelConstants.IDX_N,ModelConstants.PLAN_WIDTH))
|
self.parse_mdn('plan', outs, in_N=plan_in_N, out_N=plan_out_N, out_shape=(ModelConstants.IDX_N, ModelConstants.PLAN_WIDTH))
|
||||||
else:
|
|
||||||
self.parse_mdn('plan', outs, in_N=ModelConstants.PLAN_MHP_N, out_N=ModelConstants.PLAN_MHP_SELECTION,
|
|
||||||
out_shape=(ModelConstants.IDX_N,ModelConstants.PLAN_WIDTH))
|
|
||||||
if 'desired_curvature' in outs:
|
|
||||||
self.parse_mdn('desired_curvature', outs, in_N=0, out_N=0, out_shape=(ModelConstants.DESIRED_CURV_WIDTH,))
|
|
||||||
self.parse_categorical_crossentropy('desire_state', outs, out_shape=(ModelConstants.DESIRE_PRED_WIDTH,))
|
self.parse_categorical_crossentropy('desire_state', outs, out_shape=(ModelConstants.DESIRE_PRED_WIDTH,))
|
||||||
return outs
|
return outs
|
||||||
|
|
||||||
|
|||||||
@@ -93,7 +93,7 @@ def main() -> None:
|
|||||||
|
|
||||||
# TODO: remove this in the next AGNOS
|
# TODO: remove this in the next AGNOS
|
||||||
# wait until USB is up before counting
|
# wait until USB is up before counting
|
||||||
if time.monotonic() < 35.:
|
if time.monotonic() < 60.:
|
||||||
no_internal_panda_count = 0
|
no_internal_panda_count = 0
|
||||||
|
|
||||||
# Handle missing internal panda
|
# Handle missing internal panda
|
||||||
|
|||||||
@@ -265,7 +265,10 @@ class SelfdriveD(CruiseHelper):
|
|||||||
if self.sm['driverAssistance'].leftLaneDeparture or self.sm['driverAssistance'].rightLaneDeparture:
|
if self.sm['driverAssistance'].leftLaneDeparture or self.sm['driverAssistance'].rightLaneDeparture:
|
||||||
self.events.add(EventName.ldw)
|
self.events.add(EventName.ldw)
|
||||||
|
|
||||||
# Check for excessive actuation
|
# ******************************************************************************************
|
||||||
|
# NOTE: To fork maintainers.
|
||||||
|
# Disabling or nerfing safety features will get you and your users banned from our servers.
|
||||||
|
# We recommend that you do not change these numbers from the defaults.
|
||||||
if self.sm.updated['liveCalibration']:
|
if self.sm.updated['liveCalibration']:
|
||||||
self.pose_calibrator.feed_live_calib(self.sm['liveCalibration'])
|
self.pose_calibrator.feed_live_calib(self.sm['liveCalibration'])
|
||||||
if self.sm.updated['livePose']:
|
if self.sm.updated['livePose']:
|
||||||
@@ -280,6 +283,7 @@ class SelfdriveD(CruiseHelper):
|
|||||||
|
|
||||||
if self.excessive_actuation:
|
if self.excessive_actuation:
|
||||||
self.events.add(EventName.excessiveActuation)
|
self.events.add(EventName.excessiveActuation)
|
||||||
|
# ******************************************************************************************
|
||||||
|
|
||||||
# Handle lane change
|
# Handle lane change
|
||||||
if self.sm['modelV2'].meta.laneChangeState == LaneChangeState.preLaneChange:
|
if self.sm['modelV2'].meta.laneChangeState == LaneChangeState.preLaneChange:
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
543bd2347fa35f8300478a3893fdd0a03a7c1fe6
|
6d3219bca9f66a229b38a5382d301a92b0147edb
|
||||||
@@ -63,7 +63,7 @@ segments = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
# dashcamOnly makes don't need to be tested until a full port is done
|
# dashcamOnly makes don't need to be tested until a full port is done
|
||||||
excluded_interfaces = ["mock", "body"]
|
excluded_interfaces = ["mock", "body", "psa"]
|
||||||
|
|
||||||
BASE_URL = "https://commadataci.blob.core.windows.net/openpilotci/"
|
BASE_URL = "https://commadataci.blob.core.windows.net/openpilotci/"
|
||||||
REF_COMMIT_FN = os.path.join(PROC_REPLAY_DIR, "ref_commit")
|
REF_COMMIT_FN = os.path.join(PROC_REPLAY_DIR, "ref_commit")
|
||||||
|
|||||||
@@ -333,20 +333,18 @@ class TestOnroad:
|
|||||||
assert np.all(eof_sof_diff > 0)
|
assert np.all(eof_sof_diff > 0)
|
||||||
assert np.all(eof_sof_diff < 50*1e6)
|
assert np.all(eof_sof_diff < 50*1e6)
|
||||||
|
|
||||||
first_fid = {c: min(self.ts[c]['frameId']) for c in cams}
|
first_fid = {min(self.ts[c]['frameId']) for c in cams}
|
||||||
|
assert len(first_fid) == 1, "Cameras don't start on same frame ID"
|
||||||
if cam.endswith('CameraState'):
|
if cam.endswith('CameraState'):
|
||||||
# camerad guarantees that all cams start on frame ID 0
|
# camerad guarantees that all cams start on frame ID 0
|
||||||
# (note loggerd also needs to start up fast enough to catch it)
|
# (note loggerd also needs to start up fast enough to catch it)
|
||||||
assert set(first_fid.values()) == {0, }, "Cameras don't start on frame ID 0"
|
assert next(iter(first_fid)) < 100, "Cameras start on frame ID too high"
|
||||||
else:
|
|
||||||
# encoder guarantees all cams start on the same frame ID
|
|
||||||
assert len(set(first_fid.values())) == 1, "Cameras don't start on same frame ID"
|
|
||||||
|
|
||||||
# we don't do a full segment rotation, so these might not match exactly
|
# we don't do a full segment rotation, so these might not match exactly
|
||||||
last_fid = {c: max(self.ts[c]['frameId']) for c in cams}
|
last_fid = {max(self.ts[c]['frameId']) for c in cams}
|
||||||
assert max(last_fid.values()) - min(last_fid.values()) < 10
|
assert max(last_fid) - min(last_fid) < 10
|
||||||
|
|
||||||
start, end = min(first_fid.values()), min(last_fid.values())
|
start, end = min(first_fid), min(last_fid)
|
||||||
for i in range(end-start):
|
for i in range(end-start):
|
||||||
ts = {c: round(self.ts[c]['timestampSof'][i]/1e6, 1) for c in cams}
|
ts = {c: round(self.ts[c]['timestampSof'][i]/1e6, 1) for c in cams}
|
||||||
diff = (max(ts.values()) - min(ts.values()))
|
diff = (max(ts.values()) - min(ts.values()))
|
||||||
|
|||||||
@@ -22,8 +22,9 @@ def main():
|
|||||||
sm.update()
|
sm.update()
|
||||||
should_send_bookmark = False
|
should_send_bookmark = False
|
||||||
|
|
||||||
|
# TODO: https://github.com/commaai/openpilot/issues/36015
|
||||||
# only allow the LKAS button to record feedback when MADS is disabled
|
# only allow the LKAS button to record feedback when MADS is disabled
|
||||||
if sm.updated['carState'] and sm['carState'].canValid and not sm['selfdriveStateSP'].mads.available:
|
if False and sm.updated['carState'] and sm['carState'].canValid and not sm['selfdriveStateSP'].mads.available:
|
||||||
for be in sm['carState'].buttonEvents:
|
for be in sm['carState'].buttonEvents:
|
||||||
if be.type == ButtonType.lkas:
|
if be.type == ButtonType.lkas:
|
||||||
if be.pressed:
|
if be.pressed:
|
||||||
|
|||||||
@@ -24,11 +24,13 @@ const std::string BRANCH_STR = get_str(BRANCH "?
|
|||||||
#define GIT_SSH_URL "git@github.com:commaai/openpilot.git"
|
#define GIT_SSH_URL "git@github.com:commaai/openpilot.git"
|
||||||
#define CONTINUE_PATH "/data/continue.sh"
|
#define CONTINUE_PATH "/data/continue.sh"
|
||||||
|
|
||||||
const std::string CACHE_PATH = "/data/openpilot.cache";
|
const std::string INSTALL_PATH = "/data/openpilot";
|
||||||
|
const std::string VALID_CACHE_PATH = "/data/.openpilot_cache";
|
||||||
|
|
||||||
#define INSTALL_PATH "/data/openpilot"
|
|
||||||
#define TMP_INSTALL_PATH "/data/tmppilot"
|
#define TMP_INSTALL_PATH "/data/tmppilot"
|
||||||
|
|
||||||
|
const int FONT_SIZE = 120;
|
||||||
|
|
||||||
extern const uint8_t str_continue[] asm("_binary_selfdrive_ui_installer_continue_openpilot_sh_start");
|
extern const uint8_t str_continue[] asm("_binary_selfdrive_ui_installer_continue_openpilot_sh_start");
|
||||||
extern const uint8_t str_continue_end[] asm("_binary_selfdrive_ui_installer_continue_openpilot_sh_end");
|
extern const uint8_t str_continue_end[] asm("_binary_selfdrive_ui_installer_continue_openpilot_sh_end");
|
||||||
extern const uint8_t inter_ttf[] asm("_binary_selfdrive_ui_installer_inter_ascii_ttf_start");
|
extern const uint8_t inter_ttf[] asm("_binary_selfdrive_ui_installer_inter_ascii_ttf_start");
|
||||||
@@ -41,6 +43,16 @@ void run(const char* cmd) {
|
|||||||
assert(err == 0);
|
assert(err == 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void finishInstall() {
|
||||||
|
BeginDrawing();
|
||||||
|
ClearBackground(BLACK);
|
||||||
|
const char *m = "Finishing install...";
|
||||||
|
int text_width = MeasureText(m, FONT_SIZE);
|
||||||
|
DrawTextEx(font, m, (Vector2){(float)(GetScreenWidth() - text_width)/2 + FONT_SIZE, (float)(GetScreenHeight() - FONT_SIZE)/2}, FONT_SIZE, 0, WHITE);
|
||||||
|
EndDrawing();
|
||||||
|
util::sleep_for(60 * 1000);
|
||||||
|
}
|
||||||
|
|
||||||
void renderProgress(int progress) {
|
void renderProgress(int progress) {
|
||||||
BeginDrawing();
|
BeginDrawing();
|
||||||
ClearBackground(BLACK);
|
ClearBackground(BLACK);
|
||||||
@@ -62,11 +74,11 @@ int doInstall() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// cleanup previous install attempts
|
// cleanup previous install attempts
|
||||||
run("rm -rf " TMP_INSTALL_PATH " " INSTALL_PATH);
|
run("rm -rf " TMP_INSTALL_PATH);
|
||||||
|
|
||||||
// do the install
|
// do the install
|
||||||
if (util::file_exists(CACHE_PATH)) {
|
if (util::file_exists(INSTALL_PATH) && util::file_exists(VALID_CACHE_PATH)) {
|
||||||
return cachedFetch(CACHE_PATH);
|
return cachedFetch(INSTALL_PATH);
|
||||||
} else {
|
} else {
|
||||||
return freshClone();
|
return freshClone();
|
||||||
}
|
}
|
||||||
@@ -135,7 +147,9 @@ void cloneFinished(int exitCode) {
|
|||||||
run("git submodule update --init");
|
run("git submodule update --init");
|
||||||
|
|
||||||
// move into place
|
// move into place
|
||||||
run("mv " TMP_INSTALL_PATH " " INSTALL_PATH);
|
run(("rm -f " + VALID_CACHE_PATH).c_str());
|
||||||
|
run(("rm -rf " + INSTALL_PATH).c_str());
|
||||||
|
run(util::string_format("mv %s %s", TMP_INSTALL_PATH, INSTALL_PATH.c_str()).c_str());
|
||||||
|
|
||||||
#ifdef INTERNAL
|
#ifdef INTERNAL
|
||||||
run("mkdir -p /data/params/d/");
|
run("mkdir -p /data/params/d/");
|
||||||
@@ -153,9 +167,9 @@ void cloneFinished(int exitCode) {
|
|||||||
param << value;
|
param << value;
|
||||||
param.close();
|
param.close();
|
||||||
}
|
}
|
||||||
run("cd " INSTALL_PATH " && "
|
run(("cd " + INSTALL_PATH + " && "
|
||||||
"git remote set-url origin --push " GIT_SSH_URL " && "
|
"git remote set-url origin --push " GIT_SSH_URL " && "
|
||||||
"git config --replace-all remote.origin.fetch \"+refs/heads/*:refs/remotes/origin/*\"");
|
"git config --replace-all remote.origin.fetch \"+refs/heads/*:refs/remotes/origin/*\"").c_str());
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// write continue.sh
|
// write continue.sh
|
||||||
@@ -171,16 +185,22 @@ void cloneFinished(int exitCode) {
|
|||||||
run("mv /data/continue.sh.new " CONTINUE_PATH);
|
run("mv /data/continue.sh.new " CONTINUE_PATH);
|
||||||
|
|
||||||
// wait for the installed software's UI to take over
|
// wait for the installed software's UI to take over
|
||||||
util::sleep_for(60 * 1000);
|
finishInstall();
|
||||||
}
|
}
|
||||||
|
|
||||||
int main(int argc, char *argv[]) {
|
int main(int argc, char *argv[]) {
|
||||||
InitWindow(2160, 1080, "Installer");
|
InitWindow(2160, 1080, "Installer");
|
||||||
font = LoadFontFromMemory(".ttf", inter_ttf, inter_ttf_end - inter_ttf, 120, NULL, 0);
|
font = LoadFontFromMemory(".ttf", inter_ttf, inter_ttf_end - inter_ttf, FONT_SIZE, NULL, 0);
|
||||||
SetTextureFilter(font.texture, TEXTURE_FILTER_BILINEAR);
|
SetTextureFilter(font.texture, TEXTURE_FILTER_BILINEAR);
|
||||||
renderProgress(0);
|
|
||||||
int result = doInstall();
|
if (util::file_exists(CONTINUE_PATH)) {
|
||||||
cloneFinished(result);
|
finishInstall();
|
||||||
|
} else {
|
||||||
|
renderProgress(0);
|
||||||
|
int result = doInstall();
|
||||||
|
cloneFinished(result);
|
||||||
|
}
|
||||||
|
|
||||||
CloseWindow();
|
CloseWindow();
|
||||||
UnloadFont(font);
|
UnloadFont(font);
|
||||||
return 0;
|
return 0;
|
||||||
|
|||||||
@@ -23,10 +23,6 @@ DESCRIPTIONS = {
|
|||||||
'RecordFront': "Upload data from the driver facing camera and help improve the driver monitoring algorithm.",
|
'RecordFront': "Upload data from the driver facing camera and help improve the driver monitoring algorithm.",
|
||||||
"IsMetric": "Display speed in km/h instead of mph.",
|
"IsMetric": "Display speed in km/h instead of mph.",
|
||||||
"RecordAudio": "Record and store microphone audio while driving. The audio will be included in the dashcam video in comma connect.",
|
"RecordAudio": "Record and store microphone audio while driving. The audio will be included in the dashcam video in comma connect.",
|
||||||
"RecordAudioFeedback": (
|
|
||||||
"Press the LKAS button to record audio feedback about openpilot. When this toggle is disabled, the button acts as a bookmark button. " +
|
|
||||||
"The event will be highlighted in comma connect and the segment will be preserved on your device's storage."
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -85,12 +81,6 @@ class TogglesLayout(Widget):
|
|||||||
self._params.get_bool("RecordAudio"),
|
self._params.get_bool("RecordAudio"),
|
||||||
icon="microphone.png",
|
icon="microphone.png",
|
||||||
),
|
),
|
||||||
toggle_item(
|
|
||||||
"Record Audio Feedback with LKAS button",
|
|
||||||
DESCRIPTIONS["RecordAudioFeedback"],
|
|
||||||
self._params.get_bool("RecordAudioFeedback"),
|
|
||||||
icon="microphone.png",
|
|
||||||
),
|
|
||||||
toggle_item(
|
toggle_item(
|
||||||
"Use Metric System", DESCRIPTIONS["IsMetric"], self._params.get_bool("IsMetric"), icon="metric.png"
|
"Use Metric System", DESCRIPTIONS["IsMetric"], self._params.get_bool("IsMetric"), icon="metric.png"
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -2,12 +2,15 @@ from enum import IntEnum
|
|||||||
import os
|
import os
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
from functools import lru_cache
|
||||||
|
|
||||||
from openpilot.common.api import Api, api_get
|
from openpilot.common.api import Api, api_get
|
||||||
from openpilot.common.params import Params
|
from openpilot.common.params import Params
|
||||||
from openpilot.common.swaglog import cloudlog
|
from openpilot.common.swaglog import cloudlog
|
||||||
from openpilot.system.athena.registration import UNREGISTERED_DONGLE_ID
|
from openpilot.system.athena.registration import UNREGISTERED_DONGLE_ID
|
||||||
|
|
||||||
|
TOKEN_EXPIRY_HOURS = 2
|
||||||
|
|
||||||
|
|
||||||
class PrimeType(IntEnum):
|
class PrimeType(IntEnum):
|
||||||
UNKNOWN = -2,
|
UNKNOWN = -2,
|
||||||
@@ -20,6 +23,12 @@ class PrimeType(IntEnum):
|
|||||||
PURPLE = 5,
|
PURPLE = 5,
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def get_token(dongle_id: str, t: int):
|
||||||
|
print('getting token')
|
||||||
|
return Api(dongle_id).get_token(expiry_hours=TOKEN_EXPIRY_HOURS)
|
||||||
|
|
||||||
|
|
||||||
class PrimeState:
|
class PrimeState:
|
||||||
FETCH_INTERVAL = 5.0 # seconds between API calls
|
FETCH_INTERVAL = 5.0 # seconds between API calls
|
||||||
API_TIMEOUT = 10.0 # seconds for API requests
|
API_TIMEOUT = 10.0 # seconds for API requests
|
||||||
@@ -49,13 +58,15 @@ class PrimeState:
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
identity_token = Api(dongle_id).get_token()
|
identity_token = get_token(dongle_id, int(time.monotonic() / (TOKEN_EXPIRY_HOURS / 2 * 60 * 60)))
|
||||||
response = api_get(f"v1.1/devices/{dongle_id}", timeout=self.API_TIMEOUT, access_token=identity_token)
|
response = api_get(f"v1.1/devices/{dongle_id}", timeout=self.API_TIMEOUT, access_token=identity_token)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
data = response.json()
|
data = response.json()
|
||||||
is_paired = data.get("is_paired", False)
|
is_paired = data.get("is_paired", False)
|
||||||
prime_type = data.get("prime_type", 0)
|
prime_type = data.get("prime_type", 0)
|
||||||
self.set_type(PrimeType(prime_type) if is_paired else PrimeType.UNPAIRED)
|
self.set_type(PrimeType(prime_type) if is_paired else PrimeType.UNPAIRED)
|
||||||
|
elif response.status_code == 401:
|
||||||
|
get_token.cache_clear()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
cloudlog.error(f"Failed to fetch prime status: {e}")
|
cloudlog.error(f"Failed to fetch prime status: {e}")
|
||||||
|
|
||||||
|
|||||||
@@ -187,9 +187,9 @@ class ModelRenderer(Widget):
|
|||||||
self._path.raw_points, 0.9, self._path_offset_z, max_idx, allow_invert=False
|
self._path.raw_points, 0.9, self._path_offset_z, max_idx, allow_invert=False
|
||||||
)
|
)
|
||||||
|
|
||||||
self._update_experimental_gradient(self._rect.height)
|
self._update_experimental_gradient()
|
||||||
|
|
||||||
def _update_experimental_gradient(self, height):
|
def _update_experimental_gradient(self):
|
||||||
"""Pre-calculate experimental mode gradient colors"""
|
"""Pre-calculate experimental mode gradient colors"""
|
||||||
if not self._experimental_mode:
|
if not self._experimental_mode:
|
||||||
return
|
return
|
||||||
@@ -201,22 +201,21 @@ class ModelRenderer(Widget):
|
|||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
while i < max_len:
|
while i < max_len:
|
||||||
track_idx = max_len - i - 1 # flip idx to start from bottom right
|
# Some points (screen space) are out of frame (rect space)
|
||||||
track_y = self._path.projected_points[track_idx][1]
|
track_y = self._path.projected_points[i][1]
|
||||||
if track_y < 0 or track_y > height:
|
if track_y < self._rect.y or track_y > (self._rect.y + self._rect.height):
|
||||||
i += 1
|
i += 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Calculate color based on acceleration
|
# Calculate color based on acceleration (0 is bottom, 1 is top)
|
||||||
lin_grad_point = (height - track_y) / height
|
lin_grad_point = 1 - (track_y - self._rect.y) / self._rect.height
|
||||||
|
|
||||||
# speed up: 120, slow down: 0
|
# speed up: 120, slow down: 0
|
||||||
path_hue = max(min(60 + self._acceleration_x[i] * 35, 120), 0)
|
path_hue = np.clip(60 + self._acceleration_x[i] * 35, 0, 120)
|
||||||
path_hue = int(path_hue * 100 + 0.5) / 100
|
|
||||||
|
|
||||||
saturation = min(abs(self._acceleration_x[i] * 1.5), 1)
|
saturation = min(abs(self._acceleration_x[i] * 1.5), 1)
|
||||||
lightness = self._map_val(saturation, 0.0, 1.0, 0.95, 0.62)
|
lightness = np.interp(saturation, [0.0, 1.0], [0.95, 0.62])
|
||||||
alpha = self._map_val(lin_grad_point, 0.75 / 2.0, 0.75, 0.4, 0.0)
|
alpha = np.interp(lin_grad_point, [0.75 / 2.0, 0.75], [0.4, 0.0])
|
||||||
|
|
||||||
# Use HSL to RGB conversion
|
# Use HSL to RGB conversion
|
||||||
color = self._hsla_to_color(path_hue / 360.0, saturation, lightness, alpha)
|
color = self._hsla_to_color(path_hue / 360.0, saturation, lightness, alpha)
|
||||||
@@ -280,7 +279,7 @@ class ModelRenderer(Widget):
|
|||||||
|
|
||||||
if self._experimental_mode:
|
if self._experimental_mode:
|
||||||
# Draw with acceleration coloring
|
# Draw with acceleration coloring
|
||||||
if len(self._exp_gradient['colors']) > 2:
|
if len(self._exp_gradient['colors']) > 1:
|
||||||
draw_polygon(self._rect, self._path.projected_points, gradient=self._exp_gradient)
|
draw_polygon(self._rect, self._path.projected_points, gradient=self._exp_gradient)
|
||||||
else:
|
else:
|
||||||
draw_polygon(self._rect, self._path.projected_points, rl.Color(255, 255, 255, 30))
|
draw_polygon(self._rect, self._path.projected_points, rl.Color(255, 255, 255, 30))
|
||||||
@@ -409,13 +408,6 @@ class ModelRenderer(Widget):
|
|||||||
|
|
||||||
return np.vstack((left_screen.T, right_screen[:, ::-1].T)).astype(np.float32)
|
return np.vstack((left_screen.T, right_screen[:, ::-1].T)).astype(np.float32)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _map_val(x, x0, x1, y0, y1):
|
|
||||||
x = np.clip(x, x0, x1)
|
|
||||||
ra = x1 - x0
|
|
||||||
rb = y1 - y0
|
|
||||||
return (x - x0) * rb / ra + y0 if ra != 0 else y0
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _hsla_to_color(h, s, l, a):
|
def _hsla_to_color(h, s, l, a):
|
||||||
rgb = colorsys.hls_to_rgb(h, l, s)
|
rgb = colorsys.hls_to_rgb(h, l, s)
|
||||||
|
|||||||
@@ -75,13 +75,6 @@ TogglesPanel::TogglesPanel(SettingsWindow *parent) : ListWidget(parent) {
|
|||||||
"../assets/icons/microphone.png",
|
"../assets/icons/microphone.png",
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"RecordAudioFeedback",
|
|
||||||
tr("Record Audio Feedback with LKAS button"),
|
|
||||||
tr("Press the LKAS button to record and share driving feedback with the openpilot team. When this toggle is disabled, the button acts as a bookmark button. The event will be highlighted in comma connect and the segment will be preserved on your device's storage.\n\nNote that this feature is only compatible with select cars."),
|
|
||||||
"../assets/icons/microphone.png",
|
|
||||||
false,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"IsMetric",
|
"IsMetric",
|
||||||
tr("Use Metric System"),
|
tr("Use Metric System"),
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from openpilot.common.params import Params
|
|||||||
from openpilot.system.manager.process_config import managed_processes
|
from openpilot.system.manager.process_config import managed_processes
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip("tmp disabled")
|
||||||
class TestFeedbackd:
|
class TestFeedbackd:
|
||||||
def setup_method(self):
|
def setup_method(self):
|
||||||
self.pm = messaging.PubMaster(['carState', 'rawAudioData'])
|
self.pm = messaging.PubMaster(['carState', 'rawAudioData'])
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ class OsmMapData(BaseMapData):
|
|||||||
return float(self.mem_params.get("MapSpeedLimit") or 0.0)
|
return float(self.mem_params.get("MapSpeedLimit") or 0.0)
|
||||||
|
|
||||||
def get_current_road_name(self) -> str:
|
def get_current_road_name(self) -> str:
|
||||||
return str(self.mem_params.get("RoadName"))
|
return str(self.mem_params.get("RoadName") or "")
|
||||||
|
|
||||||
def get_next_speed_limit_and_distance(self) -> tuple[float, float]:
|
def get_next_speed_limit_and_distance(self) -> tuple[float, float]:
|
||||||
next_speed_limit_section_str = self.mem_params.get("NextMapSpeedLimit")
|
next_speed_limit_section_str = self.mem_params.get("NextMapSpeedLimit")
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ class MapdInstallManager:
|
|||||||
logging.error("Failed to download file after all retries")
|
logging.error("Failed to download file after all retries")
|
||||||
|
|
||||||
def get_installed_version(self) -> str:
|
def get_installed_version(self) -> str:
|
||||||
return str(self._params.get("MapdVersion"))
|
return str(self._params.get("MapdVersion") or "")
|
||||||
|
|
||||||
def wait_for_internet_connection(self, return_on_failure: bool = False) -> bool:
|
def wait_for_internet_connection(self, return_on_failure: bool = False) -> bool:
|
||||||
max_retries = 10
|
max_retries = 10
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
cee4a5f34c3c741fd67e4f130a7c21fd92258c9abfc0416c4d619d94e08a72eb
|
2ff2f49176a13bc7f856645d785b3b838a5c7ecf7f6cb37699fa0459ebf12453
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
#if SENSOR_ID == 1
|
|
||||||
|
|
||||||
#define VIGNETTE_PROFILE_8DT0MM
|
|
||||||
|
|
||||||
#define BIT_DEPTH 12
|
|
||||||
#define PV_MAX 4096
|
|
||||||
#define BLACK_LVL 168
|
|
||||||
|
|
||||||
float4 normalize_pv(int4 parsed, float vignette_factor) {
|
|
||||||
float4 pv = (convert_float4(parsed) - BLACK_LVL) / (PV_MAX - BLACK_LVL);
|
|
||||||
return clamp(pv*vignette_factor, 0.0, 1.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
float3 color_correct(float3 rgb) {
|
|
||||||
float3 corrected = rgb.x * (float3)(1.82717181, -0.31231438, 0.07307673);
|
|
||||||
corrected += rgb.y * (float3)(-0.5743977, 1.36858544, -0.53183455);
|
|
||||||
corrected += rgb.z * (float3)(-0.25277411, -0.05627105, 1.45875782);
|
|
||||||
return corrected;
|
|
||||||
}
|
|
||||||
|
|
||||||
float3 apply_gamma(float3 rgb, int expo_time) {
|
|
||||||
// tone mapping params
|
|
||||||
const float gamma_k = 0.75;
|
|
||||||
const float gamma_b = 0.125;
|
|
||||||
const float mp = 0.01; // ideally midpoint should be adaptive
|
|
||||||
const float rk = 9 - 100*mp;
|
|
||||||
|
|
||||||
// poly approximation for s curve
|
|
||||||
return (rgb > mp) ?
|
|
||||||
((rk * (rgb-mp) * (1-(gamma_k*mp+gamma_b)) * (1+1/(rk*(1-mp))) / (1+rk*(rgb-mp))) + gamma_k*mp + gamma_b) :
|
|
||||||
((rk * (rgb-mp) * (gamma_k*mp+gamma_b) * (1+1/(rk*mp)) / (1-rk*(rgb-mp))) + gamma_k*mp + gamma_b);
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
#if SENSOR_ID == 3
|
|
||||||
|
|
||||||
#define BGGR
|
|
||||||
#define VIGNETTE_PROFILE_4DT6MM
|
|
||||||
|
|
||||||
#define BIT_DEPTH 12
|
|
||||||
#define PV_MAX10 1023
|
|
||||||
#define PV_MAX12 4095
|
|
||||||
#define PV_MAX16 65536 // gamma curve is calibrated to 16bit
|
|
||||||
#define BLACK_LVL 48
|
|
||||||
|
|
||||||
float combine_dual_pvs(float lv, float sv, int expo_time) {
|
|
||||||
float svc = fmax(sv * expo_time, (float)(64 * (PV_MAX10 - BLACK_LVL)));
|
|
||||||
float svd = sv * fmin(expo_time, 8.0) / 8;
|
|
||||||
|
|
||||||
if (expo_time > 64) {
|
|
||||||
if (lv < PV_MAX10 - BLACK_LVL) {
|
|
||||||
return lv / (PV_MAX16 - BLACK_LVL);
|
|
||||||
} else {
|
|
||||||
return (svc / 64) / (PV_MAX16 - BLACK_LVL);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (lv > 32) {
|
|
||||||
return (lv * 64 / fmax(expo_time, 8.0)) / (PV_MAX16 - BLACK_LVL);
|
|
||||||
} else {
|
|
||||||
return svd / (PV_MAX16 - BLACK_LVL);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
float4 normalize_pv_hdr(int4 parsed, int4 short_parsed, float vignette_factor, int expo_time) {
|
|
||||||
float4 pl = convert_float4(parsed - BLACK_LVL);
|
|
||||||
float4 ps = convert_float4(short_parsed - BLACK_LVL);
|
|
||||||
float4 pv;
|
|
||||||
pv.s0 = combine_dual_pvs(pl.s0, ps.s0, expo_time);
|
|
||||||
pv.s1 = combine_dual_pvs(pl.s1, ps.s1, expo_time);
|
|
||||||
pv.s2 = combine_dual_pvs(pl.s2, ps.s2, expo_time);
|
|
||||||
pv.s3 = combine_dual_pvs(pl.s3, ps.s3, expo_time);
|
|
||||||
return clamp(pv*vignette_factor, 0.0, 1.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
float4 normalize_pv(int4 parsed, float vignette_factor) {
|
|
||||||
float4 pv = (convert_float4(parsed) - BLACK_LVL) / (PV_MAX12 - BLACK_LVL);
|
|
||||||
return clamp(pv*vignette_factor, 0.0, 1.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
float3 color_correct(float3 rgb) {
|
|
||||||
float3 corrected = rgb.x * (float3)(1.55361989, -0.268894615, -0.000593219);
|
|
||||||
corrected += rgb.y * (float3)(-0.421217301, 1.51883144, -0.69760146);
|
|
||||||
corrected += rgb.z * (float3)(-0.132402589, -0.249936825, 1.69819468);
|
|
||||||
return corrected;
|
|
||||||
}
|
|
||||||
|
|
||||||
float3 apply_gamma(float3 rgb, int expo_time) {
|
|
||||||
return (10 * rgb) / (1 + 9 * rgb);
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
#if SENSOR_ID == 2
|
|
||||||
|
|
||||||
#define VIGNETTE_PROFILE_8DT0MM
|
|
||||||
|
|
||||||
#define BIT_DEPTH 12
|
|
||||||
#define BLACK_LVL 64
|
|
||||||
|
|
||||||
float ox_lut_func(int x) {
|
|
||||||
if (x < 512) {
|
|
||||||
return x * 5.94873e-8;
|
|
||||||
} else if (512 <= x && x < 768) {
|
|
||||||
return 3.0458e-05 + (x-512) * 1.19913e-7;
|
|
||||||
} else if (768 <= x && x < 1536) {
|
|
||||||
return 6.1154e-05 + (x-768) * 2.38493e-7;
|
|
||||||
} else if (1536 <= x && x < 1792) {
|
|
||||||
return 0.0002448 + (x-1536) * 9.56930e-7;
|
|
||||||
} else if (1792 <= x && x < 2048) {
|
|
||||||
return 0.00048977 + (x-1792) * 1.91441e-6;
|
|
||||||
} else if (2048 <= x && x < 2304) {
|
|
||||||
return 0.00097984 + (x-2048) * 3.82937e-6;
|
|
||||||
} else if (2304 <= x && x < 2560) {
|
|
||||||
return 0.0019601 + (x-2304) * 7.659055e-6;
|
|
||||||
} else if (2560 <= x && x < 2816) {
|
|
||||||
return 0.0039207 + (x-2560) * 1.525e-5;
|
|
||||||
} else {
|
|
||||||
return 0.0078421 + (exp((x-2816)/273.0) - 1) * 0.0092421;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
float4 normalize_pv(int4 parsed, float vignette_factor) {
|
|
||||||
// PWL
|
|
||||||
float4 pv = {ox_lut_func(parsed.s0), ox_lut_func(parsed.s1), ox_lut_func(parsed.s2), ox_lut_func(parsed.s3)};
|
|
||||||
return clamp(pv*vignette_factor*256.0, 0.0, 1.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
float3 color_correct(float3 rgb) {
|
|
||||||
float3 corrected = rgb.x * (float3)(1.5664815, -0.29808738, -0.03973474);
|
|
||||||
corrected += rgb.y * (float3)(-0.48672447, 1.41914433, -0.40295248);
|
|
||||||
corrected += rgb.z * (float3)(-0.07975703, -0.12105695, 1.44268722);
|
|
||||||
return corrected;
|
|
||||||
}
|
|
||||||
|
|
||||||
float3 apply_gamma(float3 rgb, int expo_time) {
|
|
||||||
return -0.507089*exp(-12.54124638*rgb) + 0.9655*powr(rgb, 0.5) - 0.472597*rgb + 0.507089;
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif
|
|
||||||
@@ -21,16 +21,16 @@ class TiciFanController(BaseFanController):
|
|||||||
self.controller = PIDController(k_p=0, k_i=4e-3, k_f=1, rate=(1 / DT_HW))
|
self.controller = PIDController(k_p=0, k_i=4e-3, k_f=1, rate=(1 / DT_HW))
|
||||||
|
|
||||||
def update(self, cur_temp: float, ignition: bool) -> int:
|
def update(self, cur_temp: float, ignition: bool) -> int:
|
||||||
self.controller.neg_limit = -(100 if ignition else 30)
|
self.controller.pos_limit = 100 if ignition else 30
|
||||||
self.controller.pos_limit = -(30 if ignition else 0)
|
self.controller.neg_limit = 30 if ignition else 0
|
||||||
|
|
||||||
if ignition != self.last_ignition:
|
if ignition != self.last_ignition:
|
||||||
self.controller.reset()
|
self.controller.reset()
|
||||||
|
|
||||||
error = 75 - cur_temp
|
error = cur_temp - 75
|
||||||
fan_pwr_out = -int(self.controller.update(
|
fan_pwr_out = int(self.controller.update(
|
||||||
error=error,
|
error=error,
|
||||||
feedforward=np.interp(cur_temp, [60.0, 100.0], [0, -100])
|
feedforward=np.interp(cur_temp, [60.0, 100.0], [0, 100])
|
||||||
))
|
))
|
||||||
|
|
||||||
self.last_ignition = ignition
|
self.last_ignition = ignition
|
||||||
|
|||||||
@@ -67,17 +67,17 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "system",
|
"name": "system",
|
||||||
"url": "https://commadist.azureedge.net/agnosupdate/system-18100d9065bb44a315262041b9fb6bfd9e59179981876e442200cc1284d43643.img.xz",
|
"url": "https://commadist.azureedge.net/agnosupdate/system-e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087.img.xz",
|
||||||
"hash": "49faee0e9b084abf0ea46f87722e3366bbd0435fb6b25cce189295c1ff368da1",
|
"hash": "1468d50b7ad0fda0f04074755d21e786e3b1b6ca5dd5b17eb2608202025e6126",
|
||||||
"hash_raw": "18100d9065bb44a315262041b9fb6bfd9e59179981876e442200cc1284d43643",
|
"hash_raw": "e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087",
|
||||||
"size": 5368709120,
|
"size": 5368709120,
|
||||||
"sparse": true,
|
"sparse": true,
|
||||||
"full_check": false,
|
"full_check": false,
|
||||||
"has_ab": true,
|
"has_ab": true,
|
||||||
"ondevice_hash": "db07761be0130e35a9d3ea6bec8df231260d3e767ae770850f18f10e14d0ab3f",
|
"ondevice_hash": "242aa5adad1c04e1398e00e2440d1babf962022eb12b89adf2e60ee3068946e7",
|
||||||
"alt": {
|
"alt": {
|
||||||
"hash": "18100d9065bb44a315262041b9fb6bfd9e59179981876e442200cc1284d43643",
|
"hash": "e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087",
|
||||||
"url": "https://commadist.azureedge.net/agnosupdate/system-18100d9065bb44a315262041b9fb6bfd9e59179981876e442200cc1284d43643.img",
|
"url": "https://commadist.azureedge.net/agnosupdate/system-e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087.img",
|
||||||
"size": 5368709120
|
"size": 5368709120
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -350,51 +350,51 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "system",
|
"name": "system",
|
||||||
"url": "https://commadist.azureedge.net/agnosupdate/system-18100d9065bb44a315262041b9fb6bfd9e59179981876e442200cc1284d43643.img.xz",
|
"url": "https://commadist.azureedge.net/agnosupdate/system-e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087.img.xz",
|
||||||
"hash": "49faee0e9b084abf0ea46f87722e3366bbd0435fb6b25cce189295c1ff368da1",
|
"hash": "1468d50b7ad0fda0f04074755d21e786e3b1b6ca5dd5b17eb2608202025e6126",
|
||||||
"hash_raw": "18100d9065bb44a315262041b9fb6bfd9e59179981876e442200cc1284d43643",
|
"hash_raw": "e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087",
|
||||||
"size": 5368709120,
|
"size": 5368709120,
|
||||||
"sparse": true,
|
"sparse": true,
|
||||||
"full_check": false,
|
"full_check": false,
|
||||||
"has_ab": true,
|
"has_ab": true,
|
||||||
"ondevice_hash": "db07761be0130e35a9d3ea6bec8df231260d3e767ae770850f18f10e14d0ab3f",
|
"ondevice_hash": "242aa5adad1c04e1398e00e2440d1babf962022eb12b89adf2e60ee3068946e7",
|
||||||
"alt": {
|
"alt": {
|
||||||
"hash": "18100d9065bb44a315262041b9fb6bfd9e59179981876e442200cc1284d43643",
|
"hash": "e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087",
|
||||||
"url": "https://commadist.azureedge.net/agnosupdate/system-18100d9065bb44a315262041b9fb6bfd9e59179981876e442200cc1284d43643.img",
|
"url": "https://commadist.azureedge.net/agnosupdate/system-e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087.img",
|
||||||
"size": 5368709120
|
"size": 5368709120
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "userdata_90",
|
"name": "userdata_90",
|
||||||
"url": "https://commadist.azureedge.net/agnosupdate/userdata_90-02f7abb4b667c04043c0c6950145aaebd704851261f32256d0f7e84a52059dda.img.xz",
|
"url": "https://commadist.azureedge.net/agnosupdate/userdata_90-602d5103cba97e1b07f76508d5febb47cfc4463a7e31bd20e461b55c801feb0a.img.xz",
|
||||||
"hash": "1eda66d4e31222fc2e792a62ae8e7d322fc643f0b23785e7527bb51a9fee97c7",
|
"hash": "6a11d448bac50467791809339051eed2894aae971c37bf6284b3b972a99ba3ac",
|
||||||
"hash_raw": "02f7abb4b667c04043c0c6950145aaebd704851261f32256d0f7e84a52059dda",
|
"hash_raw": "602d5103cba97e1b07f76508d5febb47cfc4463a7e31bd20e461b55c801feb0a",
|
||||||
"size": 96636764160,
|
"size": 96636764160,
|
||||||
"sparse": true,
|
"sparse": true,
|
||||||
"full_check": true,
|
"full_check": true,
|
||||||
"has_ab": false,
|
"has_ab": false,
|
||||||
"ondevice_hash": "679b650ee04b7b1ef610b63fde9b43569fded39ceacf88789b564de99c221ea1"
|
"ondevice_hash": "e014d92940a696bf8582807259820ab73948b950656ed83a45da738f26083705"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "userdata_89",
|
"name": "userdata_89",
|
||||||
"url": "https://commadist.azureedge.net/agnosupdate/userdata_89-bab8399bbe3968f3c496f7bc83c2541b33acc1f47814c4ad95801bf5cb7e7588.img.xz",
|
"url": "https://commadist.azureedge.net/agnosupdate/userdata_89-4d7f6d12a5557eb6e3cbff9a4cd595677456fdfddcc879eddcea96a43a9d8b48.img.xz",
|
||||||
"hash": "e63d3277285aae1f04fd7f4f48429ce35010f4843ab755f10d360c3aa788e484",
|
"hash": "748e31a5fc01fc256c012e359c3382d1f98cce98feafe8ecc0fca3e47caef116",
|
||||||
"hash_raw": "bab8399bbe3968f3c496f7bc83c2541b33acc1f47814c4ad95801bf5cb7e7588",
|
"hash_raw": "4d7f6d12a5557eb6e3cbff9a4cd595677456fdfddcc879eddcea96a43a9d8b48",
|
||||||
"size": 95563022336,
|
"size": 95563022336,
|
||||||
"sparse": true,
|
"sparse": true,
|
||||||
"full_check": true,
|
"full_check": true,
|
||||||
"has_ab": false,
|
"has_ab": false,
|
||||||
"ondevice_hash": "2947374fc5980ffe3c5b94b61cc1c81bc55214f494153ed234164801731f5dc0"
|
"ondevice_hash": "c181b93050787adcfef730c086bcb780f28508d84e6376d9b80d37e5dc02b55e"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "userdata_30",
|
"name": "userdata_30",
|
||||||
"url": "https://commadist.azureedge.net/agnosupdate/userdata_30-22c874b4b66bbc000f3219abede8d62cb307f5786fd526a8473c61422765dea0.img.xz",
|
"url": "https://commadist.azureedge.net/agnosupdate/userdata_30-80a76c8e56bbd7536fd5e87e8daa12984e2960db4edeb1f83229b2baeecc4668.img.xz",
|
||||||
"hash": "12d9245711e8c49c51ff2c7b82d7301f2fcb1911edcddb35a105a80911859113",
|
"hash": "09ff390e639e4373d772e1688d05a5ac77a573463ed1deeff86390686fa686f9",
|
||||||
"hash_raw": "22c874b4b66bbc000f3219abede8d62cb307f5786fd526a8473c61422765dea0",
|
"hash_raw": "80a76c8e56bbd7536fd5e87e8daa12984e2960db4edeb1f83229b2baeecc4668",
|
||||||
"size": 32212254720,
|
"size": 32212254720,
|
||||||
"sparse": true,
|
"sparse": true,
|
||||||
"full_check": true,
|
"full_check": true,
|
||||||
"has_ab": false,
|
"has_ab": false,
|
||||||
"ondevice_hash": "03c8b65c945207f887ed6c52d38b53d53d71c8597dcb0b63dfbb11f7cfff8d2b"
|
"ondevice_hash": "2c01ab470c02121c721ff6afc25582437e821686207f3afef659387afb69c507"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@@ -44,7 +44,7 @@ def manager_init() -> None:
|
|||||||
# set unset params to their default value
|
# set unset params to their default value
|
||||||
for k in params.all_keys():
|
for k in params.all_keys():
|
||||||
default_value = params.get_default_value(k)
|
default_value = params.get_default_value(k)
|
||||||
if default_value and params.get(k) is None:
|
if default_value is not None and params.get(k) is None:
|
||||||
params.put(k, default_value)
|
params.put(k, default_value)
|
||||||
|
|
||||||
# Create folders needed for msgq
|
# Create folders needed for msgq
|
||||||
|
|||||||
@@ -46,9 +46,10 @@ class TestManager:
|
|||||||
manager.main()
|
manager.main()
|
||||||
for k in params.all_keys():
|
for k in params.all_keys():
|
||||||
default_value = params.get_default_value(k)
|
default_value = params.get_default_value(k)
|
||||||
if default_value:
|
if default_value is not None:
|
||||||
assert params.get(k) == default_value
|
assert params.get(k) == default_value
|
||||||
assert params.get("OpenpilotEnabledToggle")
|
assert params.get("OpenpilotEnabledToggle")
|
||||||
|
assert params.get("RouteCount") == 0
|
||||||
|
|
||||||
@pytest.mark.skip("this test is flaky the way it's currently written, should be moved to test_onroad")
|
@pytest.mark.skip("this test is flaky the way it's currently written, should be moved to test_onroad")
|
||||||
def test_clean_exit(self, subtests):
|
def test_clean_exit(self, subtests):
|
||||||
|
|||||||
@@ -67,7 +67,8 @@ class MouseEvent(NamedTuple):
|
|||||||
|
|
||||||
|
|
||||||
class MouseState:
|
class MouseState:
|
||||||
def __init__(self):
|
def __init__(self, scale: float = 1.0):
|
||||||
|
self._scale = scale
|
||||||
self._events: deque[MouseEvent] = deque(maxlen=MOUSE_THREAD_RATE) # bound event list
|
self._events: deque[MouseEvent] = deque(maxlen=MOUSE_THREAD_RATE) # bound event list
|
||||||
self._prev_mouse_event: list[MouseEvent | None] = [None] * MAX_TOUCH_SLOTS
|
self._prev_mouse_event: list[MouseEvent | None] = [None] * MAX_TOUCH_SLOTS
|
||||||
|
|
||||||
@@ -102,8 +103,10 @@ class MouseState:
|
|||||||
def _handle_mouse_event(self):
|
def _handle_mouse_event(self):
|
||||||
for slot in range(MAX_TOUCH_SLOTS):
|
for slot in range(MAX_TOUCH_SLOTS):
|
||||||
mouse_pos = rl.get_touch_position(slot)
|
mouse_pos = rl.get_touch_position(slot)
|
||||||
|
x = mouse_pos.x / self._scale if self._scale != 1.0 else mouse_pos.x
|
||||||
|
y = mouse_pos.y / self._scale if self._scale != 1.0 else mouse_pos.y
|
||||||
ev = MouseEvent(
|
ev = MouseEvent(
|
||||||
MousePos(mouse_pos.x, mouse_pos.y),
|
MousePos(x, y),
|
||||||
slot,
|
slot,
|
||||||
rl.is_mouse_button_pressed(slot),
|
rl.is_mouse_button_pressed(slot),
|
||||||
rl.is_mouse_button_released(slot),
|
rl.is_mouse_button_released(slot),
|
||||||
@@ -133,7 +136,7 @@ class GuiApplication:
|
|||||||
self._trace_log_callback = None
|
self._trace_log_callback = None
|
||||||
self._modal_overlay = ModalOverlay()
|
self._modal_overlay = ModalOverlay()
|
||||||
|
|
||||||
self._mouse = MouseState()
|
self._mouse = MouseState(self._scale)
|
||||||
self._mouse_events: list[MouseEvent] = []
|
self._mouse_events: list[MouseEvent] = []
|
||||||
|
|
||||||
# Debug variables
|
# Debug variables
|
||||||
|
|||||||
@@ -39,6 +39,10 @@ vec4 getGradientColor(vec2 pos) {
|
|||||||
float t = clamp(dot(pos - gradientStart, normalizedDir) / gradientLength, 0.0, 1.0);
|
float t = clamp(dot(pos - gradientStart, normalizedDir) / gradientLength, 0.0, 1.0);
|
||||||
|
|
||||||
if (gradientColorCount <= 1) return gradientColors[0];
|
if (gradientColorCount <= 1) return gradientColors[0];
|
||||||
|
|
||||||
|
// handle t before first / after last stop
|
||||||
|
if (t <= gradientStops[0]) return gradientColors[0];
|
||||||
|
if (t >= gradientStops[gradientColorCount-1]) return gradientColors[gradientColorCount-1];
|
||||||
for (int i = 0; i < gradientColorCount - 1; i++) {
|
for (int i = 0; i < gradientColorCount - 1; i++) {
|
||||||
if (t >= gradientStops[i] && t <= gradientStops[i+1]) {
|
if (t >= gradientStops[i] && t <= gradientStops[i+1]) {
|
||||||
float segmentT = (t - gradientStops[i]) / (gradientStops[i+1] - gradientStops[i]);
|
float segmentT = (t - gradientStops[i]) / (gradientStops[i+1] - gradientStops[i]);
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ NM_DEVICE_IFACE = "org.freedesktop.NetworkManager.Device"
|
|||||||
NM_DEVICE_STATE_REASON_SUPPLICANT_DISCONNECT = 8
|
NM_DEVICE_STATE_REASON_SUPPLICANT_DISCONNECT = 8
|
||||||
|
|
||||||
TETHERING_IP_ADDRESS = "192.168.43.1"
|
TETHERING_IP_ADDRESS = "192.168.43.1"
|
||||||
DEFAULT_TETHERING_PASSWORD = "12345678"
|
DEFAULT_TETHERING_PASSWORD = "swagswagcomma"
|
||||||
|
|
||||||
|
|
||||||
# NetworkManager device states
|
# NetworkManager device states
|
||||||
|
|||||||
@@ -6,10 +6,14 @@ import time
|
|||||||
import urllib.request
|
import urllib.request
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
from enum import IntEnum
|
from enum import IntEnum
|
||||||
|
import shutil
|
||||||
|
|
||||||
import pyray as rl
|
import pyray as rl
|
||||||
|
|
||||||
from cereal import log
|
from cereal import log
|
||||||
|
from openpilot.common.run import run_cmd
|
||||||
from openpilot.system.hardware import HARDWARE
|
from openpilot.system.hardware import HARDWARE
|
||||||
|
from openpilot.system.ui.lib.scroll_panel import GuiScrollPanel
|
||||||
from openpilot.system.ui.lib.application import gui_app, FontWeight
|
from openpilot.system.ui.lib.application import gui_app, FontWeight
|
||||||
from openpilot.system.ui.widgets import Widget
|
from openpilot.system.ui.widgets import Widget
|
||||||
from openpilot.system.ui.widgets.button import Button, ButtonStyle, ButtonRadio
|
from openpilot.system.ui.widgets.button import Button, ButtonStyle, ButtonRadio
|
||||||
@@ -30,6 +34,19 @@ BUTTON_SPACING = 50
|
|||||||
OPENPILOT_URL = "https://openpilot.comma.ai"
|
OPENPILOT_URL = "https://openpilot.comma.ai"
|
||||||
USER_AGENT = f"AGNOSSetup-{HARDWARE.get_os_version()}"
|
USER_AGENT = f"AGNOSSetup-{HARDWARE.get_os_version()}"
|
||||||
|
|
||||||
|
CONTINUE_PATH = "/data/continue.sh"
|
||||||
|
TMP_CONTINUE_PATH = "/data/continue.sh.new"
|
||||||
|
INSTALL_PATH = "/data/openpilot"
|
||||||
|
VALID_CACHE_PATH = "/data/.openpilot_cache"
|
||||||
|
INSTALLER_SOURCE_PATH = "/usr/comma/installer"
|
||||||
|
INSTALLER_DESTINATION_PATH = "/tmp/installer"
|
||||||
|
INSTALLER_URL_PATH = "/tmp/installer_url"
|
||||||
|
|
||||||
|
CONTINUE = """#!/usr/bin/env bash
|
||||||
|
|
||||||
|
cd /data/openpilot
|
||||||
|
exec ./launch_openpilot.sh
|
||||||
|
"""
|
||||||
|
|
||||||
class SetupState(IntEnum):
|
class SetupState(IntEnum):
|
||||||
LOW_VOLTAGE = 0
|
LOW_VOLTAGE = 0
|
||||||
@@ -93,14 +110,21 @@ class Setup(Widget):
|
|||||||
self._network_setup_continue_button.set_enabled(False)
|
self._network_setup_continue_button.set_enabled(False)
|
||||||
self._network_setup_title_label = Label("Connect to Wi-Fi", TITLE_FONT_SIZE, FontWeight.BOLD, TextAlignment.LEFT)
|
self._network_setup_title_label = Label("Connect to Wi-Fi", TITLE_FONT_SIZE, FontWeight.BOLD, TextAlignment.LEFT)
|
||||||
|
|
||||||
self._custom_software_warning_continue_button = Button("Continue", self._custom_software_warning_continue_button_callback)
|
self._custom_software_warning_continue_button = Button("Scroll to continue", self._custom_software_warning_continue_button_callback,
|
||||||
|
button_style=ButtonStyle.PRIMARY)
|
||||||
|
self._custom_software_warning_continue_button.set_enabled(False)
|
||||||
self._custom_software_warning_back_button = Button("Back", self._custom_software_warning_back_button_callback)
|
self._custom_software_warning_back_button = Button("Back", self._custom_software_warning_back_button_callback)
|
||||||
self._custom_software_warning_title_label = Label("WARNING: Custom Software", 100, FontWeight.BOLD, TextAlignment.LEFT, text_color=rl.Color(255,89,79,255),
|
self._custom_software_warning_title_label = Label("WARNING: Custom Software", 100, FontWeight.BOLD, TextAlignment.LEFT, text_color=rl.Color(255,89,79,255),
|
||||||
text_padding=60)
|
text_padding=60)
|
||||||
self._custom_software_warning_body_label = Label("Use caution when installing third-party software. Third-party software has not been tested by comma,"
|
self._custom_software_warning_body_label = Label("Use caution when installing third-party software.\n\n"
|
||||||
+ " and may cause damage to your device and/or vehicle.\n\nIf you'd like to proceed, use https://flash.comma.ai "
|
+ "⚠️ It has not been tested by comma.\n\n"
|
||||||
|
+ "⚠️ It may not comply with relevant safety standards.\n\n"
|
||||||
|
+ "⚠️ It may cause damage to your device and/or vehicle.\n\n"
|
||||||
|
+ "If you'd like to proceed, use https://flash.comma.ai "
|
||||||
+ "to restore your device to a factory state later.",
|
+ "to restore your device to a factory state later.",
|
||||||
85, text_alignment=TextAlignment.LEFT, text_padding=60)
|
85, text_alignment=TextAlignment.LEFT, text_padding=60)
|
||||||
|
self._custom_software_warning_body_scroll_panel = GuiScrollPanel()
|
||||||
|
|
||||||
self._downloading_body_label = Label("Downloading...", TITLE_FONT_SIZE, FontWeight.MEDIUM)
|
self._downloading_body_label = Label("Downloading...", TITLE_FONT_SIZE, FontWeight.MEDIUM)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -136,21 +160,19 @@ class Setup(Widget):
|
|||||||
self.state = SetupState.SOFTWARE_SELECTION
|
self.state = SetupState.SOFTWARE_SELECTION
|
||||||
|
|
||||||
def _custom_software_warning_continue_button_callback(self):
|
def _custom_software_warning_continue_button_callback(self):
|
||||||
self.state = SetupState.CUSTOM_SOFTWARE
|
self.state = SetupState.NETWORK_SETUP
|
||||||
|
self.stop_network_check_thread.clear()
|
||||||
|
self.start_network_check()
|
||||||
|
|
||||||
def _getting_started_button_callback(self):
|
def _getting_started_button_callback(self):
|
||||||
self.state = SetupState.NETWORK_SETUP
|
self.state = SetupState.SOFTWARE_SELECTION
|
||||||
self.stop_network_check_thread.clear()
|
|
||||||
self.start_network_check()
|
|
||||||
|
|
||||||
def _software_selection_back_button_callback(self):
|
def _software_selection_back_button_callback(self):
|
||||||
self.state = SetupState.NETWORK_SETUP
|
self.state = SetupState.GETTING_STARTED
|
||||||
self.stop_network_check_thread.clear()
|
|
||||||
self.start_network_check()
|
|
||||||
|
|
||||||
def _software_selection_continue_button_callback(self):
|
def _software_selection_continue_button_callback(self):
|
||||||
if self._software_selection_openpilot_button.selected:
|
if self._software_selection_openpilot_button.selected:
|
||||||
self.download(OPENPILOT_URL)
|
self.use_openpilot()
|
||||||
else:
|
else:
|
||||||
self.state = SetupState.CUSTOM_SOFTWARE_WARNING
|
self.state = SetupState.CUSTOM_SOFTWARE_WARNING
|
||||||
|
|
||||||
@@ -158,11 +180,14 @@ class Setup(Widget):
|
|||||||
self.state = SetupState.GETTING_STARTED
|
self.state = SetupState.GETTING_STARTED
|
||||||
|
|
||||||
def _network_setup_back_button_callback(self):
|
def _network_setup_back_button_callback(self):
|
||||||
self.state = SetupState.GETTING_STARTED
|
self.state = SetupState.SOFTWARE_SELECTION
|
||||||
|
|
||||||
def _network_setup_continue_button_callback(self):
|
def _network_setup_continue_button_callback(self):
|
||||||
self.state = SetupState.SOFTWARE_SELECTION
|
|
||||||
self.stop_network_check_thread.set()
|
self.stop_network_check_thread.set()
|
||||||
|
if self._software_selection_openpilot_button.selected:
|
||||||
|
self.download(OPENPILOT_URL)
|
||||||
|
else:
|
||||||
|
self.state = SetupState.CUSTOM_SOFTWARE
|
||||||
|
|
||||||
def render_low_voltage(self, rect: rl.Rectangle):
|
def render_low_voltage(self, rect: rl.Rectangle):
|
||||||
rl.draw_texture(self.warning, int(rect.x + 150), int(rect.y + 110), rl.WHITE)
|
rl.draw_texture(self.warning, int(rect.x + 150), int(rect.y + 110), rl.WHITE)
|
||||||
@@ -274,13 +299,23 @@ class Setup(Widget):
|
|||||||
self._download_failed_startover_button.render(rl.Rectangle(rect.x + MARGIN + button_width + BUTTON_SPACING, button_y, button_width, BUTTON_HEIGHT))
|
self._download_failed_startover_button.render(rl.Rectangle(rect.x + MARGIN + button_width + BUTTON_SPACING, button_y, button_width, BUTTON_HEIGHT))
|
||||||
|
|
||||||
def render_custom_software_warning(self, rect: rl.Rectangle):
|
def render_custom_software_warning(self, rect: rl.Rectangle):
|
||||||
self._custom_software_warning_title_label.render(rl.Rectangle(rect.x + 50, rect.y + 150, rect.width - 265, TITLE_FONT_SIZE))
|
warn_rect = rl.Rectangle(rect.x, rect.y, rect.width, 1500)
|
||||||
self._custom_software_warning_body_label.render(rl.Rectangle(rect.x + 50, rect.y + 200 , rect.width - 50, BODY_FONT_SIZE * 3))
|
offset = self._custom_software_warning_body_scroll_panel.handle_scroll(rect, warn_rect)
|
||||||
|
|
||||||
button_width = (rect.width - MARGIN * 3) / 2
|
button_width = (rect.width - MARGIN * 3) / 2
|
||||||
button_y = rect.height - MARGIN - BUTTON_HEIGHT
|
button_y = rect.height - MARGIN - BUTTON_HEIGHT
|
||||||
|
|
||||||
|
rl.begin_scissor_mode(int(rect.x), int(rect.y), int(rect.width), int(button_y - BODY_FONT_SIZE))
|
||||||
|
y_offset = rect.y + offset.y
|
||||||
|
self._custom_software_warning_title_label.render(rl.Rectangle(rect.x + 50, y_offset + 150, rect.width - 265, TITLE_FONT_SIZE))
|
||||||
|
self._custom_software_warning_body_label.render(rl.Rectangle(rect.x + 50, y_offset + 200 , rect.width - 50, BODY_FONT_SIZE * 3))
|
||||||
|
rl.end_scissor_mode()
|
||||||
|
|
||||||
self._custom_software_warning_back_button.render(rl.Rectangle(rect.x + MARGIN, button_y, button_width, BUTTON_HEIGHT))
|
self._custom_software_warning_back_button.render(rl.Rectangle(rect.x + MARGIN, button_y, button_width, BUTTON_HEIGHT))
|
||||||
self._custom_software_warning_continue_button.render(rl.Rectangle(rect.x + MARGIN * 2 + button_width, button_y, button_width, BUTTON_HEIGHT))
|
self._custom_software_warning_continue_button.render(rl.Rectangle(rect.x + MARGIN * 2 + button_width, button_y, button_width, BUTTON_HEIGHT))
|
||||||
|
if offset.y < (rect.height - warn_rect.height):
|
||||||
|
self._custom_software_warning_continue_button.set_enabled(True)
|
||||||
|
self._custom_software_warning_continue_button.set_text("Continue")
|
||||||
|
|
||||||
def render_custom_software(self):
|
def render_custom_software(self):
|
||||||
def handle_keyboard_result(result):
|
def handle_keyboard_result(result):
|
||||||
@@ -299,6 +334,23 @@ class Setup(Widget):
|
|||||||
self.keyboard.set_title("Enter URL", "for Custom Software")
|
self.keyboard.set_title("Enter URL", "for Custom Software")
|
||||||
gui_app.set_modal_overlay(self.keyboard, callback=handle_keyboard_result)
|
gui_app.set_modal_overlay(self.keyboard, callback=handle_keyboard_result)
|
||||||
|
|
||||||
|
def use_openpilot(self):
|
||||||
|
if os.path.isdir(INSTALL_PATH) and os.path.isfile(VALID_CACHE_PATH):
|
||||||
|
os.remove(VALID_CACHE_PATH)
|
||||||
|
with open(TMP_CONTINUE_PATH, "w") as f:
|
||||||
|
f.write(CONTINUE)
|
||||||
|
run_cmd(["chmod", "+x", TMP_CONTINUE_PATH])
|
||||||
|
shutil.move(TMP_CONTINUE_PATH, CONTINUE_PATH)
|
||||||
|
shutil.copyfile(INSTALLER_SOURCE_PATH, INSTALLER_DESTINATION_PATH)
|
||||||
|
|
||||||
|
# give time for installer UI to take over
|
||||||
|
time.sleep(1)
|
||||||
|
gui_app.request_close()
|
||||||
|
else:
|
||||||
|
self.state = SetupState.NETWORK_SETUP
|
||||||
|
self.stop_network_check_thread.clear()
|
||||||
|
self.start_network_check()
|
||||||
|
|
||||||
def download(self, url: str):
|
def download(self, url: str):
|
||||||
# autocomplete incomplete URLs
|
# autocomplete incomplete URLs
|
||||||
if re.match("^([^/.]+)/([^/]+)$", url):
|
if re.match("^([^/.]+)/([^/]+)$", url):
|
||||||
@@ -316,7 +368,7 @@ class Setup(Widget):
|
|||||||
try:
|
try:
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
_, tmpfile = tempfile.mkstemp(prefix="installer_")
|
fd, tmpfile = tempfile.mkstemp(prefix="installer_")
|
||||||
|
|
||||||
headers = {"User-Agent": USER_AGENT, "X-openpilot-serial": HARDWARE.get_serial()}
|
headers = {"User-Agent": USER_AGENT, "X-openpilot-serial": HARDWARE.get_serial()}
|
||||||
req = urllib.request.Request(self.download_url, headers=headers)
|
req = urllib.request.Request(self.download_url, headers=headers)
|
||||||
@@ -346,12 +398,16 @@ class Setup(Widget):
|
|||||||
self.download_failed(self.download_url, "No custom software found at this URL.")
|
self.download_failed(self.download_url, "No custom software found at this URL.")
|
||||||
return
|
return
|
||||||
|
|
||||||
os.rename(tmpfile, "/tmp/installer")
|
# AGNOS might try to execute the installer before this process exits.
|
||||||
os.chmod("/tmp/installer", 0o755)
|
# Therefore, important to close the fd before renaming the installer.
|
||||||
|
os.close(fd)
|
||||||
|
os.rename(tmpfile, INSTALLER_DESTINATION_PATH)
|
||||||
|
|
||||||
with open("/tmp/installer_url", "w") as f:
|
with open(INSTALLER_URL_PATH, "w") as f:
|
||||||
f.write(self.download_url)
|
f.write(self.download_url)
|
||||||
|
|
||||||
|
# give time for installer UI to take over
|
||||||
|
time.sleep(5)
|
||||||
gui_app.request_close()
|
gui_app.request_close()
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|||||||
@@ -1,17 +1,17 @@
|
|||||||
from dataclasses import dataclass
|
from enum import IntEnum
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
from typing import Literal
|
from typing import cast
|
||||||
|
|
||||||
import pyray as rl
|
import pyray as rl
|
||||||
from openpilot.system.ui.lib.application import gui_app
|
from openpilot.system.ui.lib.application import gui_app
|
||||||
from openpilot.system.ui.lib.scroll_panel import GuiScrollPanel
|
from openpilot.system.ui.lib.scroll_panel import GuiScrollPanel
|
||||||
from openpilot.system.ui.lib.wifi_manager import NetworkInfo, WifiManagerCallbacks, WifiManagerWrapper, SecurityType
|
from openpilot.system.ui.lib.wifi_manager import NetworkInfo, WifiManagerCallbacks, WifiManagerWrapper, SecurityType
|
||||||
from openpilot.system.ui.widgets import Widget
|
from openpilot.system.ui.widgets import Widget
|
||||||
from openpilot.system.ui.widgets.button import ButtonStyle, Button, TextAlignment
|
from openpilot.system.ui.widgets.button import ButtonStyle, Button
|
||||||
from openpilot.system.ui.widgets.confirm_dialog import ConfirmDialog
|
from openpilot.system.ui.widgets.confirm_dialog import ConfirmDialog
|
||||||
from openpilot.system.ui.widgets.keyboard import Keyboard
|
from openpilot.system.ui.widgets.keyboard import Keyboard
|
||||||
from openpilot.system.ui.widgets.label import gui_label
|
from openpilot.system.ui.widgets.label import TextAlignment, gui_label
|
||||||
|
|
||||||
NM_DEVICE_STATE_NEED_AUTH = 60
|
NM_DEVICE_STATE_NEED_AUTH = 60
|
||||||
MIN_PASSWORD_LENGTH = 8
|
MIN_PASSWORD_LENGTH = 8
|
||||||
@@ -27,43 +27,20 @@ STRENGTH_ICONS = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class UIState(IntEnum):
|
||||||
class StateIdle:
|
IDLE = 0
|
||||||
action: Literal["idle"] = "idle"
|
CONNECTING = 1
|
||||||
|
NEEDS_AUTH = 2
|
||||||
|
SHOW_FORGET_CONFIRM = 3
|
||||||
@dataclass
|
FORGETTING = 4
|
||||||
class StateConnecting:
|
|
||||||
network: NetworkInfo
|
|
||||||
action: Literal["connecting"] = "connecting"
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class StateNeedsAuth:
|
|
||||||
network: NetworkInfo
|
|
||||||
retry: bool
|
|
||||||
action: Literal["needs_auth"] = "needs_auth"
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class StateShowForgetConfirm:
|
|
||||||
network: NetworkInfo
|
|
||||||
action: Literal["show_forget_confirm"] = "show_forget_confirm"
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class StateForgetting:
|
|
||||||
network: NetworkInfo
|
|
||||||
action: Literal["forgetting"] = "forgetting"
|
|
||||||
|
|
||||||
|
|
||||||
UIState = StateIdle | StateConnecting | StateNeedsAuth | StateShowForgetConfirm | StateForgetting
|
|
||||||
|
|
||||||
|
|
||||||
class WifiManagerUI(Widget):
|
class WifiManagerUI(Widget):
|
||||||
def __init__(self, wifi_manager: WifiManagerWrapper):
|
def __init__(self, wifi_manager: WifiManagerWrapper):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.state: UIState = StateIdle()
|
self.state: UIState = UIState.IDLE
|
||||||
|
self._state_network: NetworkInfo | None = None # for CONNECTING / NEEDS_AUTH / SHOW_FORGET_CONFIRM / FORGETTING
|
||||||
|
self._password_retry: bool = False # for NEEDS_AUTH
|
||||||
self.btn_width: int = 200
|
self.btn_width: int = 200
|
||||||
self.scroll_panel = GuiScrollPanel()
|
self.scroll_panel = GuiScrollPanel()
|
||||||
self.keyboard = Keyboard(max_text_size=MAX_PASSWORD_LENGTH, min_text_size=MIN_PASSWORD_LENGTH, show_password_toggle=True)
|
self.keyboard = Keyboard(max_text_size=MAX_PASSWORD_LENGTH, min_text_size=MIN_PASSWORD_LENGTH, show_password_toggle=True)
|
||||||
@@ -93,17 +70,16 @@ class WifiManagerUI(Widget):
|
|||||||
gui_label(rect, "Scanning Wi-Fi networks...", 72, alignment=rl.GuiTextAlignment.TEXT_ALIGN_CENTER)
|
gui_label(rect, "Scanning Wi-Fi networks...", 72, alignment=rl.GuiTextAlignment.TEXT_ALIGN_CENTER)
|
||||||
return
|
return
|
||||||
|
|
||||||
match self.state:
|
if self.state == UIState.NEEDS_AUTH and self._state_network:
|
||||||
case StateNeedsAuth(network, retry):
|
self.keyboard.set_title("Wrong password" if self._password_retry else "Enter password", f"for {self._state_network.ssid}")
|
||||||
self.keyboard.set_title("Wrong password" if retry else "Enter password", f"for {network.ssid}")
|
self.keyboard.reset()
|
||||||
self.keyboard.reset()
|
gui_app.set_modal_overlay(self.keyboard, lambda result: self._on_password_entered(cast(NetworkInfo, self._state_network), result))
|
||||||
gui_app.set_modal_overlay(self.keyboard, lambda result: self._on_password_entered(network, result))
|
elif self.state == UIState.SHOW_FORGET_CONFIRM and self._state_network:
|
||||||
case StateShowForgetConfirm(network):
|
self._confirm_dialog.set_text(f'Forget Wi-Fi Network "{self._state_network.ssid}"?')
|
||||||
self._confirm_dialog.set_text(f'Forget Wi-Fi Network "{network.ssid}"?')
|
self._confirm_dialog.reset()
|
||||||
self._confirm_dialog.reset()
|
gui_app.set_modal_overlay(self._confirm_dialog, callback=lambda result: self.on_forgot_confirm_finished(self._state_network, result))
|
||||||
gui_app.set_modal_overlay(self._confirm_dialog, callback=lambda result: self.on_forgot_confirm_finished(network, result))
|
else:
|
||||||
case _:
|
self._draw_network_list(rect)
|
||||||
self._draw_network_list(rect)
|
|
||||||
|
|
||||||
def _on_password_entered(self, network: NetworkInfo, result: int):
|
def _on_password_entered(self, network: NetworkInfo, result: int):
|
||||||
if result == 1:
|
if result == 1:
|
||||||
@@ -113,13 +89,13 @@ class WifiManagerUI(Widget):
|
|||||||
if len(password) >= MIN_PASSWORD_LENGTH:
|
if len(password) >= MIN_PASSWORD_LENGTH:
|
||||||
self.connect_to_network(network, password)
|
self.connect_to_network(network, password)
|
||||||
elif result == 0:
|
elif result == 0:
|
||||||
self.state = StateIdle()
|
self.state = UIState.IDLE
|
||||||
|
|
||||||
def on_forgot_confirm_finished(self, network, result: int):
|
def on_forgot_confirm_finished(self, network, result: int):
|
||||||
if result == 1:
|
if result == 1:
|
||||||
self.forget_network(network)
|
self.forget_network(network)
|
||||||
elif result == 0:
|
elif result == 0:
|
||||||
self.state = StateIdle()
|
self.state = UIState.IDLE
|
||||||
|
|
||||||
def _draw_network_list(self, rect: rl.Rectangle):
|
def _draw_network_list(self, rect: rl.Rectangle):
|
||||||
content_rect = rl.Rectangle(rect.x, rect.y, rect.width, len(self._networks) * ITEM_HEIGHT)
|
content_rect = rl.Rectangle(rect.x, rect.y, rect.width, len(self._networks) * ITEM_HEIGHT)
|
||||||
@@ -147,17 +123,18 @@ class WifiManagerUI(Widget):
|
|||||||
security_icon_rect = rl.Rectangle(signal_icon_rect.x - spacing - ICON_SIZE, rect.y + (ITEM_HEIGHT - ICON_SIZE) / 2, ICON_SIZE, ICON_SIZE)
|
security_icon_rect = rl.Rectangle(signal_icon_rect.x - spacing - ICON_SIZE, rect.y + (ITEM_HEIGHT - ICON_SIZE) / 2, ICON_SIZE, ICON_SIZE)
|
||||||
|
|
||||||
status_text = ""
|
status_text = ""
|
||||||
match self.state:
|
if self.state == UIState.CONNECTING and self._state_network:
|
||||||
case StateConnecting(network=connecting):
|
if self._state_network.ssid == network.ssid:
|
||||||
if connecting.ssid == network.ssid:
|
self._networks_buttons[network.ssid].set_enabled(False)
|
||||||
self._networks_buttons[network.ssid].set_enabled(False)
|
status_text = "CONNECTING..."
|
||||||
status_text = "CONNECTING..."
|
elif self.state == UIState.FORGETTING and self._state_network:
|
||||||
case StateForgetting(network=forgetting):
|
if self._state_network.ssid == network.ssid:
|
||||||
if forgetting.ssid == network.ssid:
|
self._networks_buttons[network.ssid].set_enabled(False)
|
||||||
self._networks_buttons[network.ssid].set_enabled(False)
|
status_text = "FORGETTING..."
|
||||||
status_text = "FORGETTING..."
|
elif network.security_type == SecurityType.UNSUPPORTED:
|
||||||
case _:
|
self._networks_buttons[network.ssid].set_enabled(False)
|
||||||
self._networks_buttons[network.ssid].set_enabled(True)
|
else:
|
||||||
|
self._networks_buttons[network.ssid].set_enabled(True)
|
||||||
|
|
||||||
self._networks_buttons[network.ssid].render(ssid_rect)
|
self._networks_buttons[network.ssid].render(ssid_rect)
|
||||||
|
|
||||||
@@ -181,13 +158,16 @@ class WifiManagerUI(Widget):
|
|||||||
def _networks_buttons_callback(self, network):
|
def _networks_buttons_callback(self, network):
|
||||||
if self.scroll_panel.is_touch_valid():
|
if self.scroll_panel.is_touch_valid():
|
||||||
if not network.is_saved and network.security_type != SecurityType.OPEN:
|
if not network.is_saved and network.security_type != SecurityType.OPEN:
|
||||||
self.state = StateNeedsAuth(network, False)
|
self.state = UIState.NEEDS_AUTH
|
||||||
|
self._state_network = network
|
||||||
|
self._password_retry = False
|
||||||
elif not network.is_connected:
|
elif not network.is_connected:
|
||||||
self.connect_to_network(network)
|
self.connect_to_network(network)
|
||||||
|
|
||||||
def _forget_networks_buttons_callback(self, network):
|
def _forget_networks_buttons_callback(self, network):
|
||||||
if self.scroll_panel.is_touch_valid():
|
if self.scroll_panel.is_touch_valid():
|
||||||
self.state = StateShowForgetConfirm(network)
|
self.state = UIState.SHOW_FORGET_CONFIRM
|
||||||
|
self._state_network = network
|
||||||
|
|
||||||
def _draw_status_icon(self, rect, network: NetworkInfo):
|
def _draw_status_icon(self, rect, network: NetworkInfo):
|
||||||
"""Draw the status icon based on network's connection state"""
|
"""Draw the status icon based on network's connection state"""
|
||||||
@@ -212,14 +192,16 @@ class WifiManagerUI(Widget):
|
|||||||
rl.draw_texture_v(gui_app.texture(STRENGTH_ICONS[strength_level], ICON_SIZE, ICON_SIZE), rl.Vector2(rect.x, rect.y), rl.WHITE)
|
rl.draw_texture_v(gui_app.texture(STRENGTH_ICONS[strength_level], ICON_SIZE, ICON_SIZE), rl.Vector2(rect.x, rect.y), rl.WHITE)
|
||||||
|
|
||||||
def connect_to_network(self, network: NetworkInfo, password=''):
|
def connect_to_network(self, network: NetworkInfo, password=''):
|
||||||
self.state = StateConnecting(network)
|
self.state = UIState.CONNECTING
|
||||||
|
self._state_network = network
|
||||||
if network.is_saved and not password:
|
if network.is_saved and not password:
|
||||||
self.wifi_manager.activate_connection(network.ssid)
|
self.wifi_manager.activate_connection(network.ssid)
|
||||||
else:
|
else:
|
||||||
self.wifi_manager.connect_to_network(network.ssid, password)
|
self.wifi_manager.connect_to_network(network.ssid, password)
|
||||||
|
|
||||||
def forget_network(self, network: NetworkInfo):
|
def forget_network(self, network: NetworkInfo):
|
||||||
self.state = StateForgetting(network)
|
self.state = UIState.FORGETTING
|
||||||
|
self._state_network = network
|
||||||
network.is_saved = False
|
network.is_saved = False
|
||||||
self.wifi_manager.forget_connection(network.ssid)
|
self.wifi_manager.forget_connection(network.ssid)
|
||||||
|
|
||||||
@@ -236,22 +218,24 @@ class WifiManagerUI(Widget):
|
|||||||
with self._lock:
|
with self._lock:
|
||||||
network = next((n for n in self._networks if n.ssid == ssid), None)
|
network = next((n for n in self._networks if n.ssid == ssid), None)
|
||||||
if network:
|
if network:
|
||||||
self.state = StateNeedsAuth(network, True)
|
self.state = UIState.NEEDS_AUTH
|
||||||
|
self._state_network = network
|
||||||
|
self._password_retry = True
|
||||||
|
|
||||||
def _on_activated(self):
|
def _on_activated(self):
|
||||||
with self._lock:
|
with self._lock:
|
||||||
if isinstance(self.state, StateConnecting):
|
if self.state == UIState.CONNECTING:
|
||||||
self.state = StateIdle()
|
self.state = UIState.IDLE
|
||||||
|
|
||||||
def _on_forgotten(self, ssid):
|
def _on_forgotten(self, ssid):
|
||||||
with self._lock:
|
with self._lock:
|
||||||
if isinstance(self.state, StateForgetting):
|
if self.state == UIState.FORGETTING:
|
||||||
self.state = StateIdle()
|
self.state = UIState.IDLE
|
||||||
|
|
||||||
def _on_connection_failed(self, ssid: str, error: str):
|
def _on_connection_failed(self, ssid: str, error: str):
|
||||||
with self._lock:
|
with self._lock:
|
||||||
if isinstance(self.state, StateConnecting):
|
if self.state == UIState.CONNECTING:
|
||||||
self.state = StateIdle()
|
self.state = UIState.IDLE
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|||||||
@@ -242,6 +242,9 @@ class Updater:
|
|||||||
b: str | None = self.params.get("UpdaterTargetBranch")
|
b: str | None = self.params.get("UpdaterTargetBranch")
|
||||||
if b is None:
|
if b is None:
|
||||||
b = self.get_branch(BASEDIR)
|
b = self.get_branch(BASEDIR)
|
||||||
|
b = {
|
||||||
|
("tici", "release3"): "release-tici"
|
||||||
|
}.get((HARDWARE.get_device_type(), b), b)
|
||||||
return b
|
return b
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -283,8 +286,8 @@ class Updater:
|
|||||||
self.params.put("LastUpdateUptimeOnroad", last_uptime_onroad)
|
self.params.put("LastUpdateUptimeOnroad", last_uptime_onroad)
|
||||||
self.params.put("LastUpdateRouteCount", last_route_count)
|
self.params.put("LastUpdateRouteCount", last_route_count)
|
||||||
else:
|
else:
|
||||||
last_uptime_onroad = self.params.get("LastUpdateUptimeOnroad") or last_uptime_onroad
|
last_uptime_onroad = self.params.get("LastUpdateUptimeOnroad", return_default=True)
|
||||||
last_route_count = self.params.get("LastUpdateRouteCount") or last_route_count
|
last_route_count = self.params.get("LastUpdateRouteCount", return_default=True)
|
||||||
|
|
||||||
if exception is None:
|
if exception is None:
|
||||||
self.params.remove("LastUpdateException")
|
self.params.remove("LastUpdateException")
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ from openpilot.common.git import get_commit, get_origin, get_branch, get_short_b
|
|||||||
RELEASE_SP_BRANCHES = ['release-c3']
|
RELEASE_SP_BRANCHES = ['release-c3']
|
||||||
TESTED_SP_BRANCHES = ['staging-c3', 'staging-c3-new']
|
TESTED_SP_BRANCHES = ['staging-c3', 'staging-c3-new']
|
||||||
MASTER_SP_BRANCHES = ['master']
|
MASTER_SP_BRANCHES = ['master']
|
||||||
RELEASE_BRANCHES = ['release3-staging', 'release3', 'nightly'] + RELEASE_SP_BRANCHES
|
RELEASE_BRANCHES = ['release3-staging', 'release3', 'release-tici', 'nightly'] + RELEASE_SP_BRANCHES
|
||||||
TESTED_BRANCHES = RELEASE_BRANCHES + ['devel', 'devel-staging', 'nightly-dev'] + TESTED_SP_BRANCHES
|
TESTED_BRANCHES = RELEASE_BRANCHES + ['devel', 'devel-staging', 'nightly-dev'] + TESTED_SP_BRANCHES
|
||||||
|
|
||||||
BUILD_METADATA_FILENAME = "build.json"
|
BUILD_METADATA_FILENAME = "build.json"
|
||||||
|
|||||||
@@ -107,7 +107,7 @@ def decoder(addr, vipc_server, vst, nvidia, W, H, debug=False):
|
|||||||
|
|
||||||
|
|
||||||
class CompressedVipc:
|
class CompressedVipc:
|
||||||
def __init__(self, addr, vision_streams, nvidia=False, debug=False):
|
def __init__(self, addr, vision_streams, server_name, nvidia=False, debug=False):
|
||||||
print("getting frame sizes")
|
print("getting frame sizes")
|
||||||
os.environ["ZMQ"] = "1"
|
os.environ["ZMQ"] = "1"
|
||||||
messaging.reset_context()
|
messaging.reset_context()
|
||||||
@@ -117,7 +117,7 @@ class CompressedVipc:
|
|||||||
os.environ.pop("ZMQ")
|
os.environ.pop("ZMQ")
|
||||||
messaging.reset_context()
|
messaging.reset_context()
|
||||||
|
|
||||||
self.vipc_server = VisionIpcServer("camerad")
|
self.vipc_server = VisionIpcServer(server_name)
|
||||||
for vst in vision_streams:
|
for vst in vision_streams:
|
||||||
ed = sm[ENCODE_SOCKETS[vst]]
|
ed = sm[ENCODE_SOCKETS[vst]]
|
||||||
self.vipc_server.create_buffers(vst, 4, ed.width, ed.height)
|
self.vipc_server.create_buffers(vst, 4, ed.width, ed.height)
|
||||||
@@ -144,6 +144,7 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument("addr", help="Address of comma three")
|
parser.add_argument("addr", help="Address of comma three")
|
||||||
parser.add_argument("--nvidia", action="store_true", help="Use nvidia instead of ffmpeg")
|
parser.add_argument("--nvidia", action="store_true", help="Use nvidia instead of ffmpeg")
|
||||||
parser.add_argument("--cams", default="0,1,2", help="Cameras to decode")
|
parser.add_argument("--cams", default="0,1,2", help="Cameras to decode")
|
||||||
|
parser.add_argument("--server", default="camerad", help="choose vipc server name")
|
||||||
parser.add_argument("--silent", action="store_true", help="Suppress debug output")
|
parser.add_argument("--silent", action="store_true", help="Suppress debug output")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@@ -154,7 +155,7 @@ if __name__ == "__main__":
|
|||||||
]
|
]
|
||||||
|
|
||||||
vsts = [vision_streams[int(x)] for x in args.cams.split(",")]
|
vsts = [vision_streams[int(x)] for x in args.cams.split(",")]
|
||||||
cvipc = CompressedVipc(args.addr, vsts, args.nvidia, debug=(not args.silent))
|
cvipc = CompressedVipc(args.addr, vsts, args.server, args.nvidia, debug=(not args.silent))
|
||||||
|
|
||||||
# register exit handler
|
# register exit handler
|
||||||
signal.signal(signal.SIGINT, lambda sig, frame: cvipc.kill())
|
signal.signal(signal.SIGINT, lambda sig, frame: cvipc.kill())
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import atexit
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
@@ -11,13 +10,14 @@ from argparse import ArgumentParser, ArgumentTypeError
|
|||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from random import randint
|
from random import randint
|
||||||
from subprocess import Popen, PIPE
|
from subprocess import Popen
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
|
|
||||||
from cereal.messaging import SubMaster
|
from cereal.messaging import SubMaster
|
||||||
from openpilot.common.basedir import BASEDIR
|
from openpilot.common.basedir import BASEDIR
|
||||||
from openpilot.common.params import Params, UnknownKeyName
|
from openpilot.common.params import Params, UnknownKeyName
|
||||||
from openpilot.common.prefix import OpenpilotPrefix
|
from openpilot.common.prefix import OpenpilotPrefix
|
||||||
|
from openpilot.common.run import managed_proc
|
||||||
from openpilot.tools.lib.route import Route
|
from openpilot.tools.lib.route import Route
|
||||||
from openpilot.tools.lib.logreader import LogReader
|
from openpilot.tools.lib.logreader import LogReader
|
||||||
|
|
||||||
@@ -38,22 +38,23 @@ UI = str(Path(BASEDIR, 'selfdrive/ui/ui').resolve())
|
|||||||
logger = logging.getLogger('clip.py')
|
logger = logging.getLogger('clip.py')
|
||||||
|
|
||||||
|
|
||||||
def check_for_failure(proc: Popen):
|
def check_for_failure(procs: list[Popen]):
|
||||||
exit_code = proc.poll()
|
for proc in procs:
|
||||||
if exit_code is not None and exit_code != 0:
|
exit_code = proc.poll()
|
||||||
cmd = str(proc.args)
|
if exit_code is not None and exit_code != 0:
|
||||||
if isinstance(proc.args, str):
|
cmd = str(proc.args)
|
||||||
cmd = proc.args
|
if isinstance(proc.args, str):
|
||||||
elif isinstance(proc.args, Sequence):
|
cmd = proc.args
|
||||||
cmd = str(proc.args[0])
|
elif isinstance(proc.args, Sequence):
|
||||||
msg = f'{cmd} failed, exit code {exit_code}'
|
cmd = str(proc.args[0])
|
||||||
logger.error(msg)
|
msg = f'{cmd} failed, exit code {exit_code}'
|
||||||
stdout, stderr = proc.communicate()
|
logger.error(msg)
|
||||||
if stdout:
|
stdout, stderr = proc.communicate()
|
||||||
logger.error(stdout.decode())
|
if stdout:
|
||||||
if stderr:
|
logger.error(stdout.decode())
|
||||||
logger.error(stderr.decode())
|
if stderr:
|
||||||
raise ChildProcessError(msg)
|
logger.error(stderr.decode())
|
||||||
|
raise ChildProcessError(msg)
|
||||||
|
|
||||||
|
|
||||||
def escape_ffmpeg_text(value: str):
|
def escape_ffmpeg_text(value: str):
|
||||||
@@ -137,10 +138,6 @@ def populate_car_params(lr: LogReader):
|
|||||||
logger.debug('persisted CarParams')
|
logger.debug('persisted CarParams')
|
||||||
|
|
||||||
|
|
||||||
def start_proc(args: list[str], env: dict[str, str]):
|
|
||||||
return Popen(args, env=env, stdout=PIPE, stderr=PIPE)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_env(parser: ArgumentParser):
|
def validate_env(parser: ArgumentParser):
|
||||||
if platform.system() not in ['Linux']:
|
if platform.system() not in ['Linux']:
|
||||||
parser.exit(1, f'clip.py: error: {platform.system()} is not a supported operating system\n')
|
parser.exit(1, f'clip.py: error: {platform.system()} is not a supported operating system\n')
|
||||||
@@ -176,8 +173,7 @@ def wait_for_frames(procs: list[Popen]):
|
|||||||
while no_frames_drawn:
|
while no_frames_drawn:
|
||||||
sm.update()
|
sm.update()
|
||||||
no_frames_drawn = sm['uiDebug'].drawTimeMillis == 0.
|
no_frames_drawn = sm['uiDebug'].drawTimeMillis == 0.
|
||||||
for proc in procs:
|
check_for_failure(procs)
|
||||||
check_for_failure(proc)
|
|
||||||
|
|
||||||
|
|
||||||
def clip(
|
def clip(
|
||||||
@@ -253,35 +249,22 @@ def clip(
|
|||||||
|
|
||||||
with OpenpilotPrefix(prefix, shared_download_cache=True):
|
with OpenpilotPrefix(prefix, shared_download_cache=True):
|
||||||
populate_car_params(lr)
|
populate_car_params(lr)
|
||||||
|
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env['DISPLAY'] = display
|
env['DISPLAY'] = display
|
||||||
|
|
||||||
xvfb_proc = start_proc(xvfb_cmd, env)
|
with managed_proc(xvfb_cmd, env) as xvfb_proc, managed_proc(ui_cmd, env) as ui_proc, managed_proc(replay_cmd, env) as replay_proc:
|
||||||
atexit.register(lambda: xvfb_proc.terminate())
|
procs = [xvfb_proc, ui_proc, replay_proc]
|
||||||
ui_proc = start_proc(ui_cmd, env)
|
logger.info('waiting for replay to begin (loading segments, may take a while)...')
|
||||||
atexit.register(lambda: ui_proc.terminate())
|
wait_for_frames(procs)
|
||||||
replay_proc = start_proc(replay_cmd, env)
|
logger.debug(f'letting UI warm up ({SECONDS_TO_WARM}s)...')
|
||||||
atexit.register(lambda: replay_proc.terminate())
|
time.sleep(SECONDS_TO_WARM)
|
||||||
procs = [replay_proc, ui_proc, xvfb_proc]
|
check_for_failure(procs)
|
||||||
|
with managed_proc(ffmpeg_cmd, env) as ffmpeg_proc:
|
||||||
logger.info('waiting for replay to begin (loading segments, may take a while)...')
|
procs.append(ffmpeg_proc)
|
||||||
wait_for_frames(procs)
|
logger.info(f'recording in progress ({duration}s)...')
|
||||||
|
ffmpeg_proc.wait(duration + PROC_WAIT_SECONDS)
|
||||||
logger.debug(f'letting UI warm up ({SECONDS_TO_WARM}s)...')
|
check_for_failure(procs)
|
||||||
time.sleep(SECONDS_TO_WARM)
|
logger.info(f'recording complete: {Path(out).resolve()}')
|
||||||
for proc in procs:
|
|
||||||
check_for_failure(proc)
|
|
||||||
|
|
||||||
ffmpeg_proc = start_proc(ffmpeg_cmd, env)
|
|
||||||
procs.append(ffmpeg_proc)
|
|
||||||
atexit.register(lambda: ffmpeg_proc.terminate())
|
|
||||||
|
|
||||||
logger.info(f'recording in progress ({duration}s)...')
|
|
||||||
ffmpeg_proc.wait(duration + PROC_WAIT_SECONDS)
|
|
||||||
for proc in procs:
|
|
||||||
check_for_failure(proc)
|
|
||||||
logger.info(f'recording complete: {Path(out).resolve()}')
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -319,9 +302,7 @@ def main():
|
|||||||
logger.exception('interrupted by user', exc_info=e)
|
logger.exception('interrupted by user', exc_info=e)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception('encountered error', exc_info=e)
|
logger.exception('encountered error', exc_info=e)
|
||||||
finally:
|
sys.exit(exit_code)
|
||||||
atexit._run_exitfuncs()
|
|
||||||
sys.exit(exit_code)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
57
tools/lib/file_sources.py
Executable file
57
tools/lib/file_sources.py
Executable file
@@ -0,0 +1,57 @@
|
|||||||
|
from collections.abc import Callable
|
||||||
|
|
||||||
|
from openpilot.tools.lib.comma_car_segments import get_url as get_comma_segments_url
|
||||||
|
from openpilot.tools.lib.openpilotci import get_url
|
||||||
|
from openpilot.tools.lib.filereader import DATA_ENDPOINT, file_exists, internal_source_available
|
||||||
|
from openpilot.tools.lib.route import Route, SegmentRange, FileName
|
||||||
|
|
||||||
|
# When passed a tuple of file names, each source will return the first that exists (rlog.zst, rlog.bz2)
|
||||||
|
FileNames = tuple[str, ...]
|
||||||
|
Source = Callable[[SegmentRange, list[int], FileNames], dict[int, str]]
|
||||||
|
|
||||||
|
InternalUnavailableException = Exception("Internal source not available")
|
||||||
|
|
||||||
|
|
||||||
|
def comma_api_source(sr: SegmentRange, seg_idxs: list[int], fns: FileNames) -> dict[int, str]:
|
||||||
|
route = Route(sr.route_name)
|
||||||
|
|
||||||
|
# comma api will have already checked if the file exists
|
||||||
|
if fns == FileName.RLOG:
|
||||||
|
return {seg: route.log_paths()[seg] for seg in seg_idxs if route.log_paths()[seg] is not None}
|
||||||
|
else:
|
||||||
|
return {seg: route.qlog_paths()[seg] for seg in seg_idxs if route.qlog_paths()[seg] is not None}
|
||||||
|
|
||||||
|
|
||||||
|
def internal_source(sr: SegmentRange, seg_idxs: list[int], fns: FileNames, endpoint_url: str = DATA_ENDPOINT) -> dict[int, str]:
|
||||||
|
if not internal_source_available(endpoint_url):
|
||||||
|
raise InternalUnavailableException
|
||||||
|
|
||||||
|
def get_internal_url(sr: SegmentRange, seg, file):
|
||||||
|
return f"{endpoint_url.rstrip('/')}/{sr.dongle_id}/{sr.log_id}/{seg}/{file}"
|
||||||
|
|
||||||
|
return eval_source({seg: [get_internal_url(sr, seg, fn) for fn in fns] for seg in seg_idxs})
|
||||||
|
|
||||||
|
|
||||||
|
def openpilotci_source(sr: SegmentRange, seg_idxs: list[int], fns: FileNames) -> dict[int, str]:
|
||||||
|
return eval_source({seg: [get_url(sr.route_name, seg, fn) for fn in fns] for seg in seg_idxs})
|
||||||
|
|
||||||
|
|
||||||
|
def comma_car_segments_source(sr: SegmentRange, seg_idxs: list[int], fns: FileNames) -> dict[int, str]:
|
||||||
|
return eval_source({seg: get_comma_segments_url(sr.route_name, seg) for seg in seg_idxs})
|
||||||
|
|
||||||
|
|
||||||
|
def eval_source(files: dict[int, list[str] | str]) -> dict[int, str]:
|
||||||
|
# Returns valid file URLs given a list of possible file URLs for each segment (e.g. rlog.bz2, rlog.zst)
|
||||||
|
valid_files: dict[int, str] = {}
|
||||||
|
|
||||||
|
for seg_idx, urls in files.items():
|
||||||
|
if isinstance(urls, str):
|
||||||
|
urls = [urls]
|
||||||
|
|
||||||
|
# Add first valid file URL
|
||||||
|
for url in urls:
|
||||||
|
if file_exists(url):
|
||||||
|
valid_files[seg_idx] = url
|
||||||
|
break
|
||||||
|
|
||||||
|
return valid_files
|
||||||
@@ -12,16 +12,15 @@ import urllib.parse
|
|||||||
import warnings
|
import warnings
|
||||||
import zstandard as zstd
|
import zstandard as zstd
|
||||||
|
|
||||||
from collections.abc import Callable, Iterable, Iterator
|
from collections.abc import Iterable, Iterator
|
||||||
from typing import cast
|
from typing import cast
|
||||||
from urllib.parse import parse_qs, urlparse
|
from urllib.parse import parse_qs, urlparse
|
||||||
|
|
||||||
from cereal import log as capnp_log
|
from cereal import log as capnp_log
|
||||||
from openpilot.common.swaglog import cloudlog
|
from openpilot.common.swaglog import cloudlog
|
||||||
from openpilot.tools.lib.comma_car_segments import get_url as get_comma_segments_url
|
from openpilot.tools.lib.filereader import FileReader
|
||||||
from openpilot.tools.lib.openpilotci import get_url
|
from openpilot.tools.lib.file_sources import comma_api_source, internal_source, openpilotci_source, comma_car_segments_source, Source
|
||||||
from openpilot.tools.lib.filereader import DATA_ENDPOINT, FileReader, file_exists, internal_source_available
|
from openpilot.tools.lib.route import SegmentRange, FileName
|
||||||
from openpilot.tools.lib.route import Route, SegmentRange, FileName
|
|
||||||
from openpilot.tools.lib.log_time_series import msgs_to_time_series
|
from openpilot.tools.lib.log_time_series import msgs_to_time_series
|
||||||
|
|
||||||
LogMessage = type[capnp._DynamicStructReader]
|
LogMessage = type[capnp._DynamicStructReader]
|
||||||
@@ -40,6 +39,7 @@ def save_log(dest, log_msgs, compress=True):
|
|||||||
with open(dest, "wb") as f:
|
with open(dest, "wb") as f:
|
||||||
f.write(dat)
|
f.write(dat)
|
||||||
|
|
||||||
|
|
||||||
def decompress_stream(data: bytes):
|
def decompress_stream(data: bytes):
|
||||||
dctx = zstd.ZstdDecompressor()
|
dctx = zstd.ZstdDecompressor()
|
||||||
decompressed_data = b""
|
decompressed_data = b""
|
||||||
@@ -139,73 +139,22 @@ class ReadMode(enum.StrEnum):
|
|||||||
AUTO_INTERACTIVE = "i" # default to rlogs, fallback to qlogs with a prompt from the user
|
AUTO_INTERACTIVE = "i" # default to rlogs, fallback to qlogs with a prompt from the user
|
||||||
|
|
||||||
|
|
||||||
LogPath = str | None
|
|
||||||
LogFileName = tuple[str, ...]
|
|
||||||
Source = Callable[[SegmentRange, LogFileName], list[LogPath]]
|
|
||||||
|
|
||||||
InternalUnavailableException = Exception("Internal source not available")
|
|
||||||
|
|
||||||
|
|
||||||
class LogsUnavailable(Exception):
|
class LogsUnavailable(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def comma_api_source(sr: SegmentRange, fns: LogFileName) -> list[LogPath]:
|
|
||||||
route = Route(sr.route_name)
|
|
||||||
|
|
||||||
# comma api will have already checked if the file exists
|
|
||||||
if fns == FileName.RLOG:
|
|
||||||
return [route.log_paths()[seg] for seg in sr.seg_idxs]
|
|
||||||
else:
|
|
||||||
return [route.qlog_paths()[seg] for seg in sr.seg_idxs]
|
|
||||||
|
|
||||||
|
|
||||||
def internal_source(sr: SegmentRange, fns: LogFileName, endpoint_url: str = DATA_ENDPOINT) -> list[LogPath]:
|
|
||||||
if not internal_source_available(endpoint_url):
|
|
||||||
raise InternalUnavailableException
|
|
||||||
|
|
||||||
def get_internal_url(sr: SegmentRange, seg, file):
|
|
||||||
return f"{endpoint_url.rstrip('/')}/{sr.dongle_id}/{sr.log_id}/{seg}/{file}"
|
|
||||||
|
|
||||||
return eval_source([[get_internal_url(sr, seg, fn) for fn in fns] for seg in sr.seg_idxs])
|
|
||||||
|
|
||||||
|
|
||||||
def openpilotci_source(sr: SegmentRange, fns: LogFileName) -> list[LogPath]:
|
|
||||||
return eval_source([[get_url(sr.route_name, seg, fn) for fn in fns] for seg in sr.seg_idxs])
|
|
||||||
|
|
||||||
|
|
||||||
def comma_car_segments_source(sr: SegmentRange, fns: LogFileName) -> list[LogPath]:
|
|
||||||
return eval_source([get_comma_segments_url(sr.route_name, seg) for seg in sr.seg_idxs])
|
|
||||||
|
|
||||||
|
|
||||||
def direct_source(file_or_url: str) -> list[str]:
|
def direct_source(file_or_url: str) -> list[str]:
|
||||||
return [file_or_url]
|
return [file_or_url]
|
||||||
|
|
||||||
|
|
||||||
def eval_source(files: list[list[str] | str]) -> list[LogPath]:
|
# TODO this should apply to camera files as well
|
||||||
# Returns valid file URLs given a list of possible file URLs for each segment (e.g. rlog.bz2, rlog.zst)
|
|
||||||
valid_files: list[LogPath] = []
|
|
||||||
|
|
||||||
for urls in files:
|
|
||||||
if isinstance(urls, str):
|
|
||||||
urls = [urls]
|
|
||||||
|
|
||||||
for url in urls:
|
|
||||||
if file_exists(url):
|
|
||||||
valid_files.append(url)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
valid_files.append(None)
|
|
||||||
|
|
||||||
return valid_files
|
|
||||||
|
|
||||||
|
|
||||||
def auto_source(identifier: str, sources: list[Source], default_mode: ReadMode) -> list[str]:
|
def auto_source(identifier: str, sources: list[Source], default_mode: ReadMode) -> list[str]:
|
||||||
exceptions = {}
|
exceptions = {}
|
||||||
|
|
||||||
sr = SegmentRange(identifier)
|
sr = SegmentRange(identifier)
|
||||||
mode = default_mode if sr.selector is None else ReadMode(sr.selector)
|
needed_seg_idxs = sr.seg_idxs
|
||||||
|
|
||||||
|
mode = default_mode if sr.selector is None else ReadMode(sr.selector)
|
||||||
if mode == ReadMode.QLOG:
|
if mode == ReadMode.QLOG:
|
||||||
try_fns = [FileName.QLOG]
|
try_fns = [FileName.QLOG]
|
||||||
else:
|
else:
|
||||||
@@ -217,37 +166,35 @@ def auto_source(identifier: str, sources: list[Source], default_mode: ReadMode)
|
|||||||
|
|
||||||
# Build a dict of valid files as we evaluate each source. May contain mix of rlogs, qlogs, and None.
|
# Build a dict of valid files as we evaluate each source. May contain mix of rlogs, qlogs, and None.
|
||||||
# This function only returns when we've sourced all files, or throws an exception
|
# This function only returns when we've sourced all files, or throws an exception
|
||||||
valid_files: dict[int, LogPath] = {}
|
valid_files: dict[int, str] = {}
|
||||||
for fn in try_fns:
|
for fn in try_fns:
|
||||||
for source in sources:
|
for source in sources:
|
||||||
try:
|
try:
|
||||||
files = source(sr, fn)
|
files = source(sr, needed_seg_idxs, fn)
|
||||||
|
|
||||||
# Check every source returns an expected number of files
|
|
||||||
assert len(files) == len(valid_files) or len(valid_files) == 0, f"Source {source.__name__} returned unexpected number of files"
|
|
||||||
|
|
||||||
# Build a dict of valid files
|
# Build a dict of valid files
|
||||||
for idx, f in enumerate(files):
|
valid_files |= files
|
||||||
if valid_files.get(idx) is None:
|
|
||||||
valid_files[idx] = f
|
# Don't check for segment files that have already been found
|
||||||
|
needed_seg_idxs = [idx for idx in needed_seg_idxs if idx not in valid_files]
|
||||||
|
|
||||||
# We've found all files, return them
|
# We've found all files, return them
|
||||||
if all(f is not None for f in valid_files.values()):
|
if len(needed_seg_idxs) == 0:
|
||||||
return cast(list[str], list(valid_files.values()))
|
return cast(list[str], list(valid_files.values()))
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
exceptions[source.__name__] = e
|
exceptions[source.__name__] = e
|
||||||
|
|
||||||
if fn == try_fns[0]:
|
if fn == try_fns[0]:
|
||||||
missing_logs = list(valid_files.values()).count(None)
|
missing_logs = len(needed_seg_idxs)
|
||||||
if mode == ReadMode.AUTO:
|
if mode == ReadMode.AUTO:
|
||||||
cloudlog.warning(f"{missing_logs}/{len(valid_files)} rlogs were not found, falling back to qlogs for those segments...")
|
cloudlog.warning(f"{missing_logs}/{len(sr.seg_idxs)} rlogs were not found, falling back to qlogs for those segments...")
|
||||||
elif mode == ReadMode.AUTO_INTERACTIVE:
|
elif mode == ReadMode.AUTO_INTERACTIVE:
|
||||||
if input(f"{missing_logs}/{len(valid_files)} rlogs were not found, would you like to fallback to qlogs for those segments? (y/N) ").lower() != "y":
|
if input(f"{missing_logs}/{len(sr.seg_idxs)} rlogs were not found, would you like to fallback to qlogs for those segments? (y/N) ").lower() != "y":
|
||||||
break
|
break
|
||||||
|
|
||||||
missing_logs = list(valid_files.values()).count(None)
|
missing_logs = len(needed_seg_idxs)
|
||||||
raise LogsUnavailable(f"{missing_logs}/{len(valid_files)} logs were not found, please ensure all logs " +
|
raise LogsUnavailable(f"{missing_logs}/{len(sr.seg_idxs)} logs were not found, please ensure all logs " +
|
||||||
"are uploaded. You can fall back to qlogs with '/a' selector at the end of the route name.\n\n" +
|
"are uploaded. You can fall back to qlogs with '/a' selector at the end of the route name.\n\n" +
|
||||||
"Exceptions for sources:\n - " + "\n - ".join([f"{k}: {repr(v)}" for k, v in exceptions.items()]))
|
"Exceptions for sources:\n - " + "\n - ".join([f"{k}: {repr(v)}" for k, v in exceptions.items()]))
|
||||||
|
|
||||||
@@ -298,7 +245,7 @@ class LogReader:
|
|||||||
def __init__(self, identifier: str | list[str], default_mode: ReadMode = ReadMode.RLOG,
|
def __init__(self, identifier: str | list[str], default_mode: ReadMode = ReadMode.RLOG,
|
||||||
sources: list[Source] = None, sort_by_time=False, only_union_types=False):
|
sources: list[Source] = None, sort_by_time=False, only_union_types=False):
|
||||||
if sources is None:
|
if sources is None:
|
||||||
sources = [internal_source, openpilotci_source, comma_api_source, comma_car_segments_source]
|
sources = [internal_source, comma_api_source, openpilotci_source, comma_car_segments_source]
|
||||||
|
|
||||||
self.default_mode = default_mode
|
self.default_mode = default_mode
|
||||||
self.sources = sources
|
self.sources = sources
|
||||||
@@ -351,6 +298,7 @@ class LogReader:
|
|||||||
def time_series(self):
|
def time_series(self):
|
||||||
return msgs_to_time_series(self)
|
return msgs_to_time_series(self)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import codecs
|
import codecs
|
||||||
|
|
||||||
|
|||||||
@@ -231,7 +231,6 @@ class RouteName:
|
|||||||
def __str__(self) -> str: return self._canonical_name
|
def __str__(self) -> str: return self._canonical_name
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class SegmentName:
|
class SegmentName:
|
||||||
# TODO: add constructor that takes dongle_id, time_str, segment_num and then create instances
|
# TODO: add constructor that takes dongle_id, time_str, segment_num and then create instances
|
||||||
# of this class instead of manually constructing a segment name (use canonical_name prop instead)
|
# of this class instead of manually constructing a segment name (use canonical_name prop instead)
|
||||||
@@ -252,7 +251,7 @@ class SegmentName:
|
|||||||
@property
|
@property
|
||||||
def canonical_name(self) -> str: return self._canonical_name
|
def canonical_name(self) -> str: return self._canonical_name
|
||||||
|
|
||||||
#TODO should only use one name
|
# TODO should only use one name
|
||||||
@property
|
@property
|
||||||
def data_name(self) -> str: return f"{self._route_name.canonical_name}/{self._num}"
|
def data_name(self) -> str: return f"{self._route_name.canonical_name}/{self._num}"
|
||||||
|
|
||||||
@@ -283,7 +282,7 @@ class SegmentName:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def from_file_name(file_name):
|
def from_file_name(file_name):
|
||||||
# ??????/xxxxxxxxxxxxxxxx|1111-11-11-11--11-11-11/1/rlog.bz2
|
# ??????/xxxxxxxxxxxxxxxx|1111-11-11-11--11-11-11/1/rlog.bz2
|
||||||
dongle_id, route_name, segment_num = file_name.replace('|','/').split('/')[-4:-1]
|
dongle_id, route_name, segment_num = file_name.replace('|', '/').split('/')[-4:-1]
|
||||||
return SegmentName(dongle_id + "|" + route_name + "--" + segment_num)
|
return SegmentName(dongle_id + "|" + route_name + "--" + segment_num)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -304,6 +303,7 @@ class SegmentName:
|
|||||||
dongle_id, route_name, segment_num = prefix.split("/")
|
dongle_id, route_name, segment_num = prefix.split("/")
|
||||||
return SegmentName(dongle_id + "|" + route_name + "--" + segment_num)
|
return SegmentName(dongle_id + "|" + route_name + "--" + segment_num)
|
||||||
|
|
||||||
|
|
||||||
@cache
|
@cache
|
||||||
def get_max_seg_number_cached(sr: 'SegmentRange') -> int:
|
def get_max_seg_number_cached(sr: 'SegmentRange') -> int:
|
||||||
try:
|
try:
|
||||||
@@ -365,4 +365,3 @@ class SegmentRange:
|
|||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return self.__str__()
|
return self.__str__()
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,8 @@ import requests
|
|||||||
from parameterized import parameterized
|
from parameterized import parameterized
|
||||||
|
|
||||||
from cereal import log as capnp_log
|
from cereal import log as capnp_log
|
||||||
from openpilot.tools.lib.logreader import LogsUnavailable, LogIterable, LogReader, comma_api_source, parse_indirect, ReadMode, InternalUnavailableException
|
from openpilot.tools.lib.logreader import LogsUnavailable, LogIterable, LogReader, parse_indirect, ReadMode
|
||||||
|
from openpilot.tools.lib.file_sources import comma_api_source, InternalUnavailableException
|
||||||
from openpilot.tools.lib.route import SegmentRange
|
from openpilot.tools.lib.route import SegmentRange
|
||||||
from openpilot.tools.lib.url_file import URLFileException
|
from openpilot.tools.lib.url_file import URLFileException
|
||||||
|
|
||||||
@@ -36,12 +37,12 @@ def setup_source_scenario(mocker, is_internal=False):
|
|||||||
comma_api_source_mock.__name__ = comma_api_source_mock._mock_name
|
comma_api_source_mock.__name__ = comma_api_source_mock._mock_name
|
||||||
|
|
||||||
if is_internal:
|
if is_internal:
|
||||||
internal_source_mock.return_value = [QLOG_FILE]
|
internal_source_mock.return_value = {3: QLOG_FILE}
|
||||||
else:
|
else:
|
||||||
internal_source_mock.side_effect = InternalUnavailableException
|
internal_source_mock.side_effect = InternalUnavailableException
|
||||||
|
|
||||||
openpilotci_source_mock.return_value = [None]
|
openpilotci_source_mock.return_value = {}
|
||||||
comma_api_source_mock.return_value = [QLOG_FILE]
|
comma_api_source_mock.return_value = {3: QLOG_FILE}
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
@@ -90,7 +91,7 @@ class TestLogReader:
|
|||||||
|
|
||||||
@pytest.mark.parametrize("cache_enabled", [True, False])
|
@pytest.mark.parametrize("cache_enabled", [True, False])
|
||||||
def test_direct_parsing(self, mocker, cache_enabled):
|
def test_direct_parsing(self, mocker, cache_enabled):
|
||||||
file_exists_mock = mocker.patch("openpilot.tools.lib.logreader.file_exists")
|
file_exists_mock = mocker.patch("openpilot.tools.lib.filereader.file_exists")
|
||||||
os.environ["FILEREADER_CACHE"] = "1" if cache_enabled else "0"
|
os.environ["FILEREADER_CACHE"] = "1" if cache_enabled else "0"
|
||||||
qlog = tempfile.NamedTemporaryFile(mode='wb', delete=False)
|
qlog = tempfile.NamedTemporaryFile(mode='wb', delete=False)
|
||||||
|
|
||||||
@@ -208,13 +209,12 @@ class TestLogReader:
|
|||||||
assert qlog_len == log_len
|
assert qlog_len == log_len
|
||||||
|
|
||||||
@pytest.mark.parametrize("is_internal", [True, False])
|
@pytest.mark.parametrize("is_internal", [True, False])
|
||||||
@pytest.mark.slow
|
|
||||||
def test_auto_source_scenarios(self, mocker, is_internal):
|
def test_auto_source_scenarios(self, mocker, is_internal):
|
||||||
lr = LogReader(QLOG_FILE)
|
lr = LogReader(QLOG_FILE)
|
||||||
qlog_len = len(list(lr))
|
qlog_len = len(list(lr))
|
||||||
|
|
||||||
with setup_source_scenario(mocker, is_internal=is_internal):
|
with setup_source_scenario(mocker, is_internal=is_internal):
|
||||||
lr = LogReader(f"{TEST_ROUTE}/0/q")
|
lr = LogReader(f"{TEST_ROUTE}/3/q")
|
||||||
log_len = len(list(lr))
|
log_len = len(list(lr))
|
||||||
assert qlog_len == log_len
|
assert qlog_len == log_len
|
||||||
|
|
||||||
|
|||||||
@@ -9,12 +9,14 @@ from urllib3.util import Timeout
|
|||||||
|
|
||||||
from openpilot.common.file_helpers import atomic_write_in_dir
|
from openpilot.common.file_helpers import atomic_write_in_dir
|
||||||
from openpilot.system.hardware.hw import Paths
|
from openpilot.system.hardware.hw import Paths
|
||||||
|
|
||||||
# Cache chunk size
|
# Cache chunk size
|
||||||
K = 1000
|
K = 1000
|
||||||
CHUNK_SIZE = 1000 * K
|
CHUNK_SIZE = 1000 * K
|
||||||
|
|
||||||
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
|
||||||
def hash_256(link: str) -> str:
|
def hash_256(link: str) -> str:
|
||||||
return sha256((link.split("?")[0]).encode('utf-8')).hexdigest()
|
return sha256((link.split("?")[0]).encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
@@ -24,7 +26,7 @@ class URLFileException(Exception):
|
|||||||
|
|
||||||
|
|
||||||
class URLFile:
|
class URLFile:
|
||||||
_pool_manager: PoolManager|None = None
|
_pool_manager: PoolManager | None = None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def reset() -> None:
|
def reset() -> None:
|
||||||
@@ -33,16 +35,16 @@ class URLFile:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def pool_manager() -> PoolManager:
|
def pool_manager() -> PoolManager:
|
||||||
if URLFile._pool_manager is None:
|
if URLFile._pool_manager is None:
|
||||||
socket_options = [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),]
|
socket_options = [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)]
|
||||||
retries = Retry(total=5, backoff_factor=0.5, status_forcelist=[409, 429, 503, 504])
|
retries = Retry(total=5, backoff_factor=0.5, status_forcelist=[409, 429, 503, 504])
|
||||||
URLFile._pool_manager = PoolManager(num_pools=10, maxsize=100, socket_options=socket_options, retries=retries)
|
URLFile._pool_manager = PoolManager(num_pools=10, maxsize=100, socket_options=socket_options, retries=retries)
|
||||||
return URLFile._pool_manager
|
return URLFile._pool_manager
|
||||||
|
|
||||||
def __init__(self, url: str, timeout: int=10, debug: bool=False, cache: bool|None=None):
|
def __init__(self, url: str, timeout: int = 10, debug: bool = False, cache: bool | None = None):
|
||||||
self._url = url
|
self._url = url
|
||||||
self._timeout = Timeout(connect=timeout, read=timeout)
|
self._timeout = Timeout(connect=timeout, read=timeout)
|
||||||
self._pos = 0
|
self._pos = 0
|
||||||
self._length: int|None = None
|
self._length: int | None = None
|
||||||
self._debug = debug
|
self._debug = debug
|
||||||
# True by default, false if FILEREADER_CACHE is defined, but can be overwritten by the cache input
|
# True by default, false if FILEREADER_CACHE is defined, but can be overwritten by the cache input
|
||||||
self._force_download = not int(os.environ.get("FILEREADER_CACHE", "0"))
|
self._force_download = not int(os.environ.get("FILEREADER_CACHE", "0"))
|
||||||
@@ -58,7 +60,7 @@ class URLFile:
|
|||||||
def __exit__(self, exc_type, exc_value, traceback) -> None:
|
def __exit__(self, exc_type, exc_value, traceback) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _request(self, method: str, url: str, headers: dict[str, str]|None=None) -> BaseHTTPResponse:
|
def _request(self, method: str, url: str, headers: dict[str, str] | None = None) -> BaseHTTPResponse:
|
||||||
return URLFile.pool_manager().request(method, url, timeout=self._timeout, headers=headers)
|
return URLFile.pool_manager().request(method, url, timeout=self._timeout, headers=headers)
|
||||||
|
|
||||||
def get_length_online(self) -> int:
|
def get_length_online(self) -> int:
|
||||||
@@ -85,7 +87,7 @@ class URLFile:
|
|||||||
file_length.write(str(self._length))
|
file_length.write(str(self._length))
|
||||||
return self._length
|
return self._length
|
||||||
|
|
||||||
def read(self, ll: int|None=None) -> bytes:
|
def read(self, ll: int | None = None) -> bytes:
|
||||||
if self._force_download:
|
if self._force_download:
|
||||||
return self.read_aux(ll=ll)
|
return self.read_aux(ll=ll)
|
||||||
|
|
||||||
@@ -117,7 +119,7 @@ class URLFile:
|
|||||||
self._pos = file_end
|
self._pos = file_end
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def read_aux(self, ll: int|None=None) -> bytes:
|
def read_aux(self, ll: int | None = None) -> bytes:
|
||||||
download_range = False
|
download_range = False
|
||||||
headers = {}
|
headers = {}
|
||||||
if self._pos != 0 or ll is not None:
|
if self._pos != 0 or ll is not None:
|
||||||
@@ -152,7 +154,7 @@ class URLFile:
|
|||||||
self._pos += len(ret)
|
self._pos += len(ret)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def seek(self, pos:int) -> None:
|
def seek(self, pos: int) -> None:
|
||||||
self._pos = pos
|
self._pos = pos
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
Reference in New Issue
Block a user