diff --git a/.github/workflows/selfdrive_tests.yaml b/.github/workflows/selfdrive_tests.yaml index 640ef3d01a..41d8156ca8 100644 --- a/.github/workflows/selfdrive_tests.yaml +++ b/.github/workflows/selfdrive_tests.yaml @@ -107,7 +107,6 @@ jobs: build_mac: name: build macOS - if: false # temp disable since homebrew install is getting stuck runs-on: ${{ ((github.repository == 'commaai/openpilot') && ((github.event_name != 'pull_request') || (github.event.pull_request.head.repo.full_name == 'commaai/openpilot'))) && 'namespace-profile-macos-8x14' || 'macos-latest' }} steps: - uses: actions/checkout@v4 @@ -125,8 +124,8 @@ jobs: - name: Install dependencies run: ./tools/mac_setup.sh env: - # package install has DeprecationWarnings - PYTHONWARNINGS: default + PYTHONWARNINGS: default # package install has DeprecationWarnings + HOMEBREW_DISPLAY_INSTALL_TIMES: 1 - name: Save Homebrew cache uses: actions/cache/save@v4 if: github.ref == 'refs/heads/master' diff --git a/.gitignore b/.gitignore index 3c0dad521d..00a0533d86 100644 --- a/.gitignore +++ b/.gitignore @@ -50,7 +50,6 @@ cereal/services.h cereal/gen cereal/messaging/bridge selfdrive/mapd/default_speeds_by_region.json -system/proclogd/proclogd selfdrive/ui/translations/tmp selfdrive/test/longitudinal_maneuvers/out selfdrive/car/tests/cars_dump diff --git a/.vscode/launch.json b/.vscode/launch.json index a6b341d9ea..f090061c42 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -23,6 +23,11 @@ "id": "args", "description": "Arguments to pass to the process", "type": "promptString" + }, + { + "id": "replayArg", + "type": "promptString", + "description": "Enter route or segment to replay." } ], "configurations": [ @@ -40,7 +45,41 @@ "type": "cppdbg", "request": "launch", "program": "${workspaceFolder}/${input:cpp_process}", - "cwd": "${workspaceFolder}", + "cwd": "${workspaceFolder}" + }, + { + "name": "Attach LLDB to Replay drive", + "type": "lldb", + "request": "attach", + "pid": "${command:pickMyProcess}", + "initCommands": [ + "script import time; time.sleep(3)" + ] + }, + { + "name": "Replay drive", + "type": "debugpy", + "request": "launch", + "program": "${workspaceFolder}/opendbc/safety/tests/safety_replay/replay_drive.py", + "args": [ + "${input:replayArg}" + ], + "console": "integratedTerminal", + "justMyCode": false, + "env": { + "PYTHONPATH": "${workspaceFolder}" + }, + "subProcess": true, + "stopOnEntry": false + } + ], + "compounds": [ + { + "name": "Replay drive + Safety LLDB", + "configurations": [ + "Replay drive", + "Attach LLDB to Replay drive" + ] } ] } \ No newline at end of file diff --git a/Jenkinsfile b/Jenkinsfile index 0905abd6da..f3a63d3dec 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -178,7 +178,7 @@ node { try { if (env.BRANCH_NAME == 'devel-staging') { - deviceStage("build release3-staging", "tici-needs-can", [], [ + deviceStage("build release3-staging", "tizi-needs-can", [], [ step("build release3-staging", "RELEASE_BRANCH=release3-staging $SOURCE_DIR/release/build_release.sh"), ]) } @@ -186,12 +186,12 @@ node { if (env.BRANCH_NAME == '__nightly') { parallel ( 'nightly': { - deviceStage("build nightly", "tici-needs-can", [], [ + deviceStage("build nightly", "tizi-needs-can", [], [ step("build nightly", "RELEASE_BRANCH=nightly $SOURCE_DIR/release/build_release.sh"), ]) }, 'nightly-dev': { - deviceStage("build nightly-dev", "tici-needs-can", [], [ + deviceStage("build nightly-dev", "tizi-needs-can", [], [ step("build nightly-dev", "PANDA_DEBUG_BUILD=1 RELEASE_BRANCH=nightly-dev $SOURCE_DIR/release/build_release.sh"), ]) }, @@ -200,39 +200,30 @@ node { if (!env.BRANCH_NAME.matches(excludeRegex)) { parallel ( - // tici tests 'onroad tests': { - deviceStage("onroad", "tici-needs-can", ["UNSAFE=1"], [ + deviceStage("onroad", "tizi-needs-can", ["UNSAFE=1"], [ step("build openpilot", "cd system/manager && ./build.py"), step("check dirty", "release/check-dirty.sh"), step("onroad tests", "pytest selfdrive/test/test_onroad.py -s", [timeout: 60]), ]) }, 'HW + Unit Tests': { - deviceStage("tici-hardware", "tici-common", ["UNSAFE=1"], [ + deviceStage("tizi-hardware", "tizi-common", ["UNSAFE=1"], [ step("build", "cd system/manager && ./build.py"), step("test pandad", "pytest selfdrive/pandad/tests/test_pandad.py", [diffPaths: ["panda", "selfdrive/pandad/"]]), step("test power draw", "pytest -s system/hardware/tici/tests/test_power_draw.py"), step("test encoder", "LD_LIBRARY_PATH=/usr/local/lib pytest system/loggerd/tests/test_encoder.py", [diffPaths: ["system/loggerd/"]]), - step("test pigeond", "pytest system/ubloxd/tests/test_pigeond.py", [diffPaths: ["system/ubloxd/"]]), step("test manager", "pytest system/manager/test/test_manager.py"), ]) }, 'loopback': { - deviceStage("loopback", "tici-loopback", ["UNSAFE=1"], [ + deviceStage("loopback", "tizi-loopback", ["UNSAFE=1"], [ step("build openpilot", "cd system/manager && ./build.py"), step("test pandad loopback", "pytest selfdrive/pandad/tests/test_pandad_loopback.py"), ]) }, - 'camerad AR0231': { - deviceStage("AR0231", "tici-ar0231", ["UNSAFE=1"], [ - step("build", "cd system/manager && ./build.py"), - step("test camerad", "pytest system/camerad/test/test_camerad.py", [timeout: 60]), - step("test exposure", "pytest system/camerad/test/test_exposure.py"), - ]) - }, 'camerad OX03C10': { - deviceStage("OX03C10", "tici-ox03c10", ["UNSAFE=1"], [ + deviceStage("OX03C10", "tizi-ox03c10", ["UNSAFE=1"], [ step("build", "cd system/manager && ./build.py"), step("test camerad", "pytest system/camerad/test/test_camerad.py", [timeout: 60]), step("test exposure", "pytest system/camerad/test/test_exposure.py"), @@ -246,17 +237,13 @@ node { ]) }, 'sensord': { - deviceStage("LSM + MMC", "tici-lsmc", ["UNSAFE=1"], [ - step("build", "cd system/manager && ./build.py"), - step("test sensord", "pytest system/sensord/tests/test_sensord.py"), - ]) - deviceStage("BMX + LSM", "tici-bmx-lsm", ["UNSAFE=1"], [ + deviceStage("LSM + MMC", "tizi-lsmc", ["UNSAFE=1"], [ step("build", "cd system/manager && ./build.py"), step("test sensord", "pytest system/sensord/tests/test_sensord.py"), ]) }, 'replay': { - deviceStage("model-replay", "tici-replay", ["UNSAFE=1"], [ + deviceStage("model-replay", "tizi-replay", ["UNSAFE=1"], [ step("build", "cd system/manager && ./build.py", [diffPaths: ["selfdrive/modeld/", "tinygrad_repo", "selfdrive/test/process_replay/model_replay.py"]]), step("model replay", "selfdrive/test/process_replay/model_replay.py", [diffPaths: ["selfdrive/modeld/", "tinygrad_repo", "selfdrive/test/process_replay/model_replay.py"]]), ]) @@ -266,7 +253,6 @@ node { step("build openpilot", "cd system/manager && ./build.py"), step("test pandad loopback", "SINGLE_PANDA=1 pytest selfdrive/pandad/tests/test_pandad_loopback.py"), step("test pandad spi", "pytest selfdrive/pandad/tests/test_pandad_spi.py"), - step("test pandad", "pytest selfdrive/pandad/tests/test_pandad.py", [diffPaths: ["panda", "selfdrive/pandad/"]]), step("test amp", "pytest system/hardware/tici/tests/test_amplifier.py"), // TODO: enable once new AGNOS is available // step("test esim", "pytest system/hardware/tici/tests/test_esim.py"), diff --git a/RELEASES.md b/RELEASES.md index dacf0eaa17..189aa7ad54 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1,7 +1,13 @@ Version 0.10.1 (2025-09-08) ======================== -* Record driving feedback using LKAS button -* Honda City 2023 support thanks to drFritz! +* New driving model + * World Model: removed global localization inputs + * World Model: 2x the number of parameters + * World Model: trained on 4x the number of segments + * Driving Vision Model: trained on 4x the number of segments +* Honda City 2023 support thanks to vanillagorillaa and drFritz! +* Honda N-Box 2018 support thanks to miettal! +* Honda Odyssey 2021-25 support thanks to csouers and MVL! Version 0.10.0 (2025-08-05) ======================== diff --git a/SConstruct b/SConstruct index 192f83a0f6..12d879b350 100644 --- a/SConstruct +++ b/SConstruct @@ -359,11 +359,6 @@ SConscript([ 'system/ubloxd/SConscript', 'system/loggerd/SConscript', ]) -if arch != "Darwin": - SConscript([ - 'system/logcatd/SConscript', - 'system/proclogd/SConscript', - ]) if arch == "larch64": SConscript(['system/camerad/SConscript']) diff --git a/cereal/log.capnp b/cereal/log.capnp index 474b4777cb..7babd4d8b5 100644 --- a/cereal/log.capnp +++ b/cereal/log.capnp @@ -585,7 +585,6 @@ struct PandaState @0xa7649e2575e4591e { heartbeatLost @22 :Bool; interruptLoad @25 :Float32; fanPower @28 :UInt8; - fanStallCount @34 :UInt8; spiErrorCount @33 :UInt16; @@ -714,6 +713,7 @@ struct PandaState @0xa7649e2575e4591e { usbPowerModeDEPRECATED @12 :PeripheralState.UsbPowerModeDEPRECATED; safetyParamDEPRECATED @20 :Int16; safetyParam2DEPRECATED @26 :UInt32; + fanStallCountDEPRECATED @34 :UInt8; } struct PeripheralState { diff --git a/cereal/messaging/socketmaster.cc b/cereal/messaging/socketmaster.cc index 1a7a48980e..7f7e2795c4 100644 --- a/cereal/messaging/socketmaster.cc +++ b/cereal/messaging/socketmaster.cc @@ -33,7 +33,7 @@ MessageContext message_context; struct SubMaster::SubMessage { std::string name; SubSocket *socket = nullptr; - int freq = 0; + float freq = 0.0f; bool updated = false, alive = false, valid = false, ignore_alive; uint64_t rcv_time = 0, rcv_frame = 0; void *allocated_msg_reader = nullptr; diff --git a/cereal/services.py b/cereal/services.py index b48290bc77..17ac2e926d 100755 --- a/cereal/services.py +++ b/cereal/services.py @@ -121,12 +121,12 @@ def build_header(): h += "#include \n" h += "#include \n" - h += "struct service { std::string name; bool should_log; int frequency; int decimation; };\n" + h += "struct service { std::string name; bool should_log; float frequency; int decimation; };\n" h += "static std::map services = {\n" for k, v in SERVICE_LIST.items(): should_log = "true" if v.should_log else "false" decimation = -1 if v.decimation is None else v.decimation - h += ' { "%s", {"%s", %s, %d, %d}},\n' % \ + h += ' { "%s", {"%s", %s, %f, %d}},\n' % \ (k, k, should_log, v.frequency, decimation) h += "};\n" diff --git a/common/model.h b/common/model.h index a984f55e8d..a1e034c06f 100644 --- a/common/model.h +++ b/common/model.h @@ -1 +1 @@ -#define DEFAULT_MODEL "Steam Powered (Default)" +#define DEFAULT_MODEL "Firehose (Default)" diff --git a/common/params_keys.h b/common/params_keys.h index fc7842720b..011a3161f9 100644 --- a/common/params_keys.h +++ b/common/params_keys.h @@ -94,7 +94,6 @@ inline static std::unordered_map keys = { {"Offroad_NeosUpdate", {CLEAR_ON_MANAGER_START, JSON}}, {"Offroad_NoFirmware", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, JSON}}, {"Offroad_Recalibration", {CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION, JSON}}, - {"Offroad_StorageMissing", {CLEAR_ON_MANAGER_START, JSON}}, {"Offroad_TemperatureTooHigh", {CLEAR_ON_MANAGER_START, JSON}}, {"Offroad_UnregisteredHardware", {CLEAR_ON_MANAGER_START, JSON}}, {"Offroad_UpdateFailed", {CLEAR_ON_MANAGER_START, JSON}}, diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index 154734b7fc..7583095eaf 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -39,7 +39,7 @@ All of these are examples of good PRs: ### First contribution [Projects / openpilot bounties](https://github.com/orgs/commaai/projects/26/views/1?pane=info) is the best place to get started and goes in-depth on what's expected when working on a bounty. -There's lot of bounties that don't require a comma 3/3X or a car. +There's lot of bounties that don't require a comma 3X or a car. ## Pull Requests diff --git a/docs/DEBUGGING_SAFETY.md b/docs/DEBUGGING_SAFETY.md new file mode 100644 index 0000000000..cd0a46b446 --- /dev/null +++ b/docs/DEBUGGING_SAFETY.md @@ -0,0 +1,30 @@ +# Debugging Panda Safety with Replay Drive + LLDB + +## 1. Start the debugger in VS Code + +* Select **Replay drive + Safety LLDB**. +* Enter the route or segment when prompted. +[](https://github.com/user-attachments/assets/b0cc320a-083e-46a7-a9f8-ca775bbe5604) + +## 2. Attach LLDB + +* When prompted, pick the running **`replay_drive` process**. +* ⚠️ Attach quickly, or `replay_drive` will start consuming messages. + +> [!TIP] +> Add a Python breakpoint at the start of `replay_drive.py` to pause execution and give yourself time to attach LLDB. + +## 3. Set breakpoints in VS Code +Breakpoints can be set directly in `modes/xxx.h` (or any C file). +No extra LLDB commands are required — just place breakpoints in the editor. + +## 4. Resume execution +Once attached, you can step through both Python (on the replay) and C safety code as CAN logs are replayed. + +> [!NOTE] +> * Use short routes for quicker iteration. +> * Pause `replay_drive` early to avoid wasting log messages. + +## Video + +View a demo of this workflow on the PR that added it: https://github.com/commaai/openpilot/pull/36055#issue-3352911578 \ No newline at end of file diff --git a/docs/SAFETY.md b/docs/SAFETY.md index 18a450a395..25815e3372 100644 --- a/docs/SAFETY.md +++ b/docs/SAFETY.md @@ -16,7 +16,7 @@ industry standards of safety for Level 2 Driver Assistance Systems. In particula ISO26262 guidelines, including those from [pertinent documents](https://www.nhtsa.gov/sites/nhtsa.dot.gov/files/documents/13498a_812_573_alcsystemreport.pdf) released by NHTSA. In addition, we impose strict coding guidelines (like [MISRA C : 2012](https://www.misra.org.uk/what-is-misra/)) on parts of openpilot that are safety relevant. We also perform software-in-the-loop, -hardware-in-the-loop and in-vehicle tests before each software release. +hardware-in-the-loop, and in-vehicle tests before each software release. Following Hazard and Risk Analysis and FMEA, at a very high level, we have designed openpilot ensuring two main safety requirements. @@ -29,8 +29,18 @@ ensuring two main safety requirements. For additional safety implementation details, refer to [panda safety model](https://github.com/commaai/panda#safety-model). For vehicle specific implementation of the safety concept, refer to [opendbc/safety/safety](https://github.com/commaai/opendbc/tree/master/opendbc/safety/safety). -**Extra note**: comma.ai strongly discourages the use of openpilot forks with safety code either missing or - not fully meeting the above requirements. +[^1]: For these actuator limits we observe ISO11270 and ISO15622. Lateral limits described there translate to 0.9 seconds of maximum actuation to achieve a 1m lateral deviation. -[^1]: For these actuator limits we observe ISO11270 and ISO15622. Lateral limits described there translate to 0.9 seconds of maximum actuation to achieve a 1m lateral deviation. +--- +### Forks of openpilot + +* Do not disable or nerf [driver monitoring](https://github.com/commaai/openpilot/tree/master/selfdrive/monitoring) +* Do not disable or nerf [excessive actuation checks](https://github.com/commaai/openpilot/tree/master/selfdrive/selfdrived/helpers.py) +* If your fork modifies any of the code in `opendbc/safety/`: + * your fork cannot use the openpilot trademark + * your fork must preserve the full [safety test suite](https://github.com/commaai/opendbc/tree/master/opendbc/safety/tests) and all tests must pass, including any new coverage required by the fork's changes + +Failure to comply with these standards will get you and your users banned from comma.ai servers. + +**comma.ai strongly discourages the use of openpilot forks with safety code either missing or not fully meeting the above requirements.** diff --git a/docs/how-to/connect-to-comma.md b/docs/how-to/connect-to-comma.md index cbaccaae6a..5f02e11599 100644 --- a/docs/how-to/connect-to-comma.md +++ b/docs/how-to/connect-to-comma.md @@ -1,11 +1,11 @@ -# connect to a comma 3/3X +# connect to a comma 3X -A comma 3/3X is a normal [Linux](https://github.com/commaai/agnos-builder) computer that exposes [SSH](https://wiki.archlinux.org/title/Secure_Shell) and a [serial console](https://wiki.archlinux.org/title/Working_with_the_serial_console). +A comma 3X is a normal [Linux](https://github.com/commaai/agnos-builder) computer that exposes [SSH](https://wiki.archlinux.org/title/Secure_Shell) and a [serial console](https://wiki.archlinux.org/title/Working_with_the_serial_console). ## Serial Console On both the comma three and 3X, the serial console is accessible from the main OBD-C port. -Connect the comma 3/3X to your computer with a normal USB C cable, or use a [comma serial](https://comma.ai/shop/comma-serial) for steady 12V power. +Connect the comma 3X to your computer with a normal USB C cable, or use a [comma serial](https://comma.ai/shop/comma-serial) for steady 12V power. On the comma three, the serial console is exposed through a UART-to-USB chip, and `tools/scripts/serial.sh` can be used to connect. @@ -45,7 +45,7 @@ In order to use ADB on your device, you'll need to perform the following steps u * Here's an example command for connecting to your device using its tethered connection: `adb connect 192.168.43.1:5555` > [!NOTE] -> The default port for ADB is 5555 on the comma 3/3X. +> The default port for ADB is 5555 on the comma 3X. For more info on ADB, see the [Android Debug Bridge (ADB) documentation](https://developer.android.com/tools/adb). diff --git a/docs/how-to/replay-a-drive.md b/docs/how-to/replay-a-drive.md index 084b6bf825..b0db36a46f 100644 --- a/docs/how-to/replay-a-drive.md +++ b/docs/how-to/replay-a-drive.md @@ -8,7 +8,7 @@ Replaying is a critical tool for openpilot development and debugging. Just run `tools/replay/replay --demo`. ## Replaying CAN data -*Hardware required: jungle and comma 3/3X* +*Hardware required: jungle and comma 3X* 1. Connect your PC to a jungle. 2. diff --git a/docs/how-to/turn-the-speed-blue.md b/docs/how-to/turn-the-speed-blue.md index 13b3b03e80..eb6e75afa2 100644 --- a/docs/how-to/turn-the-speed-blue.md +++ b/docs/how-to/turn-the-speed-blue.md @@ -3,7 +3,7 @@ In 30 minutes, we'll get an openpilot development environment set up on your computer and make some changes to openpilot's UI. -And if you have a comma 3/3X, we'll deploy the change to your device for testing. +And if you have a comma 3X, we'll deploy the change to your device for testing. ## 1. Set up your development environment diff --git a/launch_env.sh b/launch_env.sh index 4c011c6ac0..67dd5ee795 100755 --- a/launch_env.sh +++ b/launch_env.sh @@ -7,7 +7,7 @@ export OPENBLAS_NUM_THREADS=1 export VECLIB_MAXIMUM_THREADS=1 if [ -z "$AGNOS_VERSION" ]; then - export AGNOS_VERSION="12.8" + export AGNOS_VERSION="13.1" fi export STAGING_ROOT="/data/safe_staging" diff --git a/mkdocs.yml b/mkdocs.yml index a66d1c76d4..550f807aca 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -21,7 +21,7 @@ nav: - What is openpilot?: getting-started/what-is-openpilot.md - How-to: - Turn the speed blue: how-to/turn-the-speed-blue.md - - Connect to a comma 3/3X: how-to/connect-to-comma.md + - Connect to a comma 3X: how-to/connect-to-comma.md # - Make your first pull request: how-to/make-first-pr.md #- Replay a drive: how-to/replay-a-drive.md - Concepts: diff --git a/opendbc_repo b/opendbc_repo index 42bbd450b9..c705ebf987 160000 --- a/opendbc_repo +++ b/opendbc_repo @@ -1 +1 @@ -Subproject commit 42bbd450b90b01c4f7f82fef324bea06341b1a54 +Subproject commit c705ebf9872193e97f9498868cead9e9affdee15 diff --git a/panda b/panda index 7eab6fd61b..7c393d1cd5 160000 --- a/panda +++ b/panda @@ -1 +1 @@ -Subproject commit 7eab6fd61bae085e0fd44cccb79dc6451163029e +Subproject commit 7c393d1cd5cc88d3c28af2086bf25ff6f3035574 diff --git a/pyproject.toml b/pyproject.toml index 0968cd4ed3..2d4505c744 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,6 +36,9 @@ dependencies = [ "pyopenssl < 24.3.0", "pyaudio", + # ubloxd (TODO: just use struct) + "kaitaistruct", + # panda "libusb1", "spidev; platform_system == 'Linux'", @@ -101,8 +104,9 @@ dev = [ "av", "azure-identity", "azure-storage-blob", - "dbus-next", + "dbus-next", # TODO: remove once we moved everything to jeepney "dictdiffer", + "jeepney", "matplotlib", "opencv-python-headless", "parameterized >=0.8, <0.9", @@ -120,6 +124,7 @@ dev = [ tools = [ "metadrive-simulator @ https://github.com/commaai/metadrive/releases/download/MetaDrive-minimal-0.4.2.4/metadrive_simulator-0.4.2.4-py3-none-any.whl ; (platform_machine != 'aarch64')", + "dearpygui>=2.1.0", ] [project.urls] @@ -157,7 +162,6 @@ testpaths = [ "system/camerad", "system/hardware", "system/loggerd", - "system/proclogd", "system/tests", "system/ubloxd", "system/webrtc", @@ -173,7 +177,7 @@ quiet-level = 3 # if you've got a short variable name that's getting flagged, add it here ignore-words-list = "bu,ro,te,ue,alo,hda,ois,nam,nams,ned,som,parm,setts,inout,warmup,bumb,nd,sie,preints,whit,indexIn,ws,uint,grey,deque,stdio,amin,BA,LITE,atEnd,UIs,errorString,arange,FocusIn,od,tim,relA,hist,copyable,jupyter,thead,TGE,abl,lite" builtin = "clear,rare,informal,code,names,en-GB_to_en-US" -skip = "./third_party/*, ./tinygrad/*, ./tinygrad_repo/*, ./msgq/*, ./panda/*, ./opendbc/*, ./opendbc_repo/*, ./rednose/*, ./rednose_repo/*, ./teleoprtc/*, ./teleoprtc_repo/*, *.ts, uv.lock, *.onnx, ./cereal/gen/*, */c_generated_code/*, docs/assets/*" +skip = "./third_party/*, ./tinygrad/*, ./tinygrad_repo/*, ./msgq/*, ./panda/*, ./opendbc/*, ./opendbc_repo/*, ./rednose/*, ./rednose_repo/*, ./teleoprtc/*, ./teleoprtc_repo/*, *.ts, uv.lock, *.onnx, ./cereal/gen/*, */c_generated_code/*, docs/assets/*, tools/plotjuggler/layouts/*" [tool.mypy] python_version = "3.11" @@ -247,6 +251,7 @@ exclude = [ "teleoprtc_repo", "third_party", "*.ipynb", + "generated", ] lint.flake8-implicit-str-concat.allow-multiline = false diff --git a/selfdrive/car/tests/test_car_interfaces.py b/selfdrive/car/tests/test_car_interfaces.py index 7d136b824c..7c7a563003 100644 --- a/selfdrive/car/tests/test_car_interfaces.py +++ b/selfdrive/car/tests/test_car_interfaces.py @@ -1,15 +1,12 @@ import os -import math import hypothesis.strategies as st from hypothesis import Phase, given, settings from parameterized import parameterized from cereal import car, custom from opendbc.car import DT_CTRL -from opendbc.car.car_helpers import interfaces from opendbc.car.structs import CarParams -from opendbc.car.tests.test_car_interfaces import get_fuzzy_car_interface_args -from opendbc.car.fw_versions import FW_VERSIONS, FW_QUERY_CONFIGS +from opendbc.car.tests.test_car_interfaces import get_fuzzy_car_interface from opendbc.car.mock.values import CAR as MOCK from opendbc.car.values import PLATFORMS from openpilot.selfdrive.car.helpers import convert_carControlSP @@ -21,11 +18,6 @@ from openpilot.selfdrive.test.fuzzy_generation import FuzzyGenerator from openpilot.sunnypilot.selfdrive.car import interfaces as sunnypilot_interfaces -ALL_ECUS = {ecu for ecus in FW_VERSIONS.values() for ecu in ecus.keys()} -ALL_ECUS |= {ecu for config in FW_QUERY_CONFIGS.values() for ecu in config.extra_ecus} - -ALL_REQUESTS = {tuple(r.request) for config in FW_QUERY_CONFIGS.values() for r in config.requests} - MAX_EXAMPLES = int(os.environ.get('MAX_EXAMPLES', '60')) @@ -37,43 +29,10 @@ class TestCarInterfaces: phases=(Phase.reuse, Phase.generate, Phase.shrink)) @given(data=st.data()) def test_car_interfaces(self, car_name, data): - CarInterface = interfaces[car_name] - - args = get_fuzzy_car_interface_args(data.draw) - - car_params = CarInterface.get_params(car_name, args['fingerprints'], args['car_fw'], - alpha_long=args['alpha_long'], is_release=False, docs=False) - car_params_sp = CarInterface.get_params_sp(car_params, car_name, args['fingerprints'], args['car_fw'], - alpha_long=args['alpha_long'], docs=False) - car_params = car_params.as_reader() - car_interface = CarInterface(car_params, car_params_sp) + car_interface = get_fuzzy_car_interface(car_name, data.draw) + car_params = car_interface.CP.as_reader() + car_params_sp = car_interface.CP_SP sunnypilot_interfaces.setup_interfaces(car_interface) - assert car_params - assert car_params_sp - assert car_interface - - assert car_params.mass > 1 - assert car_params.wheelbase > 0 - # centerToFront is center of gravity to front wheels, assert a reasonable range - assert car_params.wheelbase * 0.3 < car_params.centerToFront < car_params.wheelbase * 0.7 - assert car_params.maxLateralAccel > 0 - - # Longitudinal sanity checks - assert len(car_params.longitudinalTuning.kpV) == len(car_params.longitudinalTuning.kpBP) - assert len(car_params.longitudinalTuning.kiV) == len(car_params.longitudinalTuning.kiBP) - - # Lateral sanity checks - if car_params.steerControlType != CarParams.SteerControlType.angle: - tune = car_params.lateralTuning - if tune.which() == 'pid': - if car_name != MOCK.MOCK: - assert not math.isnan(tune.pid.kf) and tune.pid.kf > 0 - assert len(tune.pid.kpV) > 0 and len(tune.pid.kpV) == len(tune.pid.kpBP) - assert len(tune.pid.kiV) > 0 and len(tune.pid.kiV) == len(tune.pid.kiBP) - - elif tune.which() == 'torque': - assert not math.isnan(tune.torque.kf) and tune.torque.kf > 0 - assert not math.isnan(tune.torque.friction) and tune.torque.friction > 0 cc_msg = FuzzyGenerator.get_random_msg(data.draw, car.CarControl, real_floats=True) cc_sp_msg = FuzzyGenerator.get_random_msg(data.draw, custom.CarControlSP, real_floats=True) diff --git a/selfdrive/controls/lib/desire_helper.py b/selfdrive/controls/lib/desire_helper.py index 93088182f9..e72d464d06 100644 --- a/selfdrive/controls/lib/desire_helper.py +++ b/selfdrive/controls/lib/desire_helper.py @@ -51,6 +51,10 @@ class DesireHelper: self.lane_turn_controller = LaneTurnController(self) self.lane_turn_direction = custom.TurnDirection.none + @staticmethod + def get_lane_change_direction(CS): + return LaneChangeDirection.left if CS.leftBlinker else LaneChangeDirection.right + def update(self, carstate, lateral_active, lane_change_prob): self.alc.update_params() self.lane_turn_controller.update_params() @@ -71,12 +75,13 @@ class DesireHelper: if self.lane_change_state == LaneChangeState.off and one_blinker and not self.prev_one_blinker and not below_lane_change_speed: self.lane_change_state = LaneChangeState.preLaneChange self.lane_change_ll_prob = 1.0 + # Initialize lane change direction to prevent UI alert flicker + self.lane_change_direction = self.get_lane_change_direction(carstate) # LaneChangeState.preLaneChange elif self.lane_change_state == LaneChangeState.preLaneChange: - # Set lane change direction - self.lane_change_direction = LaneChangeDirection.left if \ - carstate.leftBlinker else LaneChangeDirection.right + # Update lane change direction + self.lane_change_direction = self.get_lane_change_direction(carstate) torque_applied = carstate.steeringPressed and \ ((carstate.steeringTorque > 0 and self.lane_change_direction == LaneChangeDirection.left) or diff --git a/selfdrive/controls/lib/latcontrol_torque.py b/selfdrive/controls/lib/latcontrol_torque.py index e4554ae464..ca9c3fde66 100644 --- a/selfdrive/controls/lib/latcontrol_torque.py +++ b/selfdrive/controls/lib/latcontrol_torque.py @@ -56,10 +56,8 @@ class LatControlTorque(LatControl): actual_curvature = -VM.calc_curvature(math.radians(CS.steeringAngleDeg - params.angleOffsetDeg), CS.vEgo, params.roll) roll_compensation = params.roll * ACCELERATION_DUE_TO_GRAVITY curvature_deadzone = abs(VM.calc_curvature(math.radians(self.steering_angle_deadzone_deg), CS.vEgo, 0.0)) - desired_lateral_accel = desired_curvature * CS.vEgo ** 2 - # desired rate is the desired rate of change in the setpoint, not the absolute desired curvature - # desired_lateral_jerk = desired_curvature_rate * CS.vEgo ** 2 + desired_lateral_accel = desired_curvature * CS.vEgo ** 2 actual_lateral_accel = actual_curvature * CS.vEgo ** 2 lateral_accel_deadzone = curvature_deadzone * CS.vEgo ** 2 @@ -71,6 +69,8 @@ class LatControlTorque(LatControl): # do error correction in lateral acceleration space, convert at end to handle non-linear torque responses correctly pid_log.error = float(setpoint - measurement) ff = gravity_adjusted_lateral_accel + # latAccelOffset corrects roll compensation bias from device roll misalignment relative to car roll + ff -= self.torque_params.latAccelOffset ff += get_friction(desired_lateral_accel - actual_lateral_accel, lateral_accel_deadzone, FRICTION_THRESHOLD, self.torque_params) freeze_integrator = steer_limited_by_safety or CS.steeringPressed or CS.vEgo < 5 diff --git a/selfdrive/controls/tests/test_torqued_lat_accel_offset.py b/selfdrive/controls/tests/test_torqued_lat_accel_offset.py new file mode 100644 index 0000000000..84389856b6 --- /dev/null +++ b/selfdrive/controls/tests/test_torqued_lat_accel_offset.py @@ -0,0 +1,70 @@ +import numpy as np +from cereal import car, messaging +from opendbc.car import ACCELERATION_DUE_TO_GRAVITY +from opendbc.car import structs +from opendbc.car.lateral import get_friction, FRICTION_THRESHOLD +from openpilot.common.realtime import DT_MDL +from openpilot.selfdrive.locationd.torqued import TorqueEstimator, MIN_BUCKET_POINTS, POINTS_PER_BUCKET, STEER_BUCKET_BOUNDS + +np.random.seed(0) + +LA_ERR_STD = 1.0 +INPUT_NOISE_STD = 0.08 +V_EGO = 30.0 + +WARMUP_BUCKET_POINTS = (1.5*MIN_BUCKET_POINTS).astype(int) +STRAIGHT_ROAD_LA_BOUNDS = (0.02, 0.03) + +ROLL_BIAS_DEG = 2.0 +ROLL_COMPENSATION_BIAS = ACCELERATION_DUE_TO_GRAVITY*float(np.sin(np.deg2rad(ROLL_BIAS_DEG))) +TORQUE_TUNE = structs.CarParams.LateralTorqueTuning(latAccelFactor=2.0, latAccelOffset=0.0, friction=0.2) +TORQUE_TUNE_BIASED = structs.CarParams.LateralTorqueTuning(latAccelFactor=2.0, latAccelOffset=-ROLL_COMPENSATION_BIAS, friction=0.2) + +def generate_inputs(torque_tune, la_err_std, input_noise_std=None): + rng = np.random.default_rng(0) + steer_torques = np.concat([rng.uniform(bnd[0], bnd[1], pts) for bnd, pts in zip(STEER_BUCKET_BOUNDS, WARMUP_BUCKET_POINTS, strict=True)]) + la_errs = rng.normal(scale=la_err_std, size=steer_torques.size) + frictions = np.array([get_friction(la_err, 0.0, FRICTION_THRESHOLD, torque_tune) for la_err in la_errs]) + lat_accels = torque_tune.latAccelFactor*steer_torques + torque_tune.latAccelOffset + frictions + if input_noise_std is not None: + steer_torques += rng.normal(scale=input_noise_std, size=steer_torques.size) + lat_accels += rng.normal(scale=input_noise_std, size=steer_torques.size) + return steer_torques, lat_accels + +def get_warmed_up_estimator(steer_torques, lat_accels): + est = TorqueEstimator(car.CarParams()) + for steer_torque, lat_accel in zip(steer_torques, lat_accels, strict=True): + est.filtered_points.add_point(steer_torque, lat_accel) + return est + +def simulate_straight_road_msgs(est): + carControl = messaging.new_message('carControl').carControl + carOutput = messaging.new_message('carOutput').carOutput + carState = messaging.new_message('carState').carState + livePose = messaging.new_message('livePose').livePose + carControl.latActive = True + carState.vEgo = V_EGO + carState.steeringPressed = False + ts = DT_MDL*np.arange(2*POINTS_PER_BUCKET) + steer_torques = np.concat((np.linspace(-0.03, -0.02, POINTS_PER_BUCKET), np.linspace(0.02, 0.03, POINTS_PER_BUCKET))) + lat_accels = TORQUE_TUNE.latAccelFactor * steer_torques + for t, steer_torque, lat_accel in zip(ts, steer_torques, lat_accels, strict=True): + carOutput.actuatorsOutput.torque = float(-steer_torque) + livePose.orientationNED.x = float(np.deg2rad(ROLL_BIAS_DEG)) + livePose.angularVelocityDevice.z = float(lat_accel / V_EGO) + for which, msg in (('carControl', carControl), ('carOutput', carOutput), ('carState', carState), ('livePose', livePose)): + est.handle_log(t, which, msg) + +def test_estimated_offset(): + steer_torques, lat_accels = generate_inputs(TORQUE_TUNE_BIASED, la_err_std=LA_ERR_STD, input_noise_std=INPUT_NOISE_STD) + est = get_warmed_up_estimator(steer_torques, lat_accels) + msg = est.get_msg() + # TODO add lataccelfactor and friction check when we have more accurate estimates + assert abs(msg.liveTorqueParameters.latAccelOffsetRaw - TORQUE_TUNE_BIASED.latAccelOffset) < 0.1 + +def test_straight_road_roll_bias(): + steer_torques, lat_accels = generate_inputs(TORQUE_TUNE, la_err_std=LA_ERR_STD, input_noise_std=INPUT_NOISE_STD) + est = get_warmed_up_estimator(steer_torques, lat_accels) + simulate_straight_road_msgs(est) + msg = est.get_msg() + assert (msg.liveTorqueParameters.latAccelOffsetRaw < -0.05) and np.isfinite(msg.liveTorqueParameters.latAccelOffsetRaw) diff --git a/selfdrive/modeld/constants.py b/selfdrive/modeld/constants.py index 5ca0a86bc8..ff7e1d8600 100644 --- a/selfdrive/modeld/constants.py +++ b/selfdrive/modeld/constants.py @@ -13,12 +13,9 @@ class ModelConstants: META_T_IDXS = [2., 4., 6., 8., 10.] # model inputs constants - MODEL_FREQ = 20 - HISTORY_FREQ = 5 - HISTORY_LEN_SECONDS = 5 - TEMPORAL_SKIP = MODEL_FREQ // HISTORY_FREQ - FULL_HISTORY_BUFFER_LEN = MODEL_FREQ * HISTORY_LEN_SECONDS - INPUT_HISTORY_BUFFER_LEN = HISTORY_FREQ * HISTORY_LEN_SECONDS + N_FRAMES = 2 + MODEL_RUN_FREQ = 20 + MODEL_CONTEXT_FREQ = 5 # "model_trained_fps" FEATURE_LEN = 512 diff --git a/selfdrive/modeld/fill_model_msg.py b/selfdrive/modeld/fill_model_msg.py index a2b54b420e..82c4c92b1d 100644 --- a/selfdrive/modeld/fill_model_msg.py +++ b/selfdrive/modeld/fill_model_msg.py @@ -149,7 +149,7 @@ def fill_model_msg(base_msg: capnp._DynamicStructBuilder, extended_msg: capnp._D meta.hardBrakePredicted = hard_brake_predicted.item() # confidence - if vipc_frame_id % (2*ModelConstants.MODEL_FREQ) == 0: + if vipc_frame_id % (2*ModelConstants.MODEL_RUN_FREQ) == 0: # any disengage prob brake_disengage_probs = net_output_data['meta'][0,Meta.BRAKE_DISENGAGE] gas_disengage_probs = net_output_data['meta'][0,Meta.GAS_DISENGAGE] diff --git a/selfdrive/modeld/modeld.py b/selfdrive/modeld/modeld.py index 5f9ccf5072..00b49afaba 100755 --- a/selfdrive/modeld/modeld.py +++ b/selfdrive/modeld/modeld.py @@ -80,6 +80,64 @@ class FrameMeta: if vipc is not None: self.frame_id, self.timestamp_sof, self.timestamp_eof = vipc.frame_id, vipc.timestamp_sof, vipc.timestamp_eof +class InputQueues: + def __init__ (self, model_fps, env_fps, n_frames_input): + assert env_fps % model_fps == 0 + assert env_fps >= model_fps + self.model_fps = model_fps + self.env_fps = env_fps + self.n_frames_input = n_frames_input + + self.dtypes = {} + self.shapes = {} + self.q = {} + + def update_dtypes_and_shapes(self, input_dtypes, input_shapes) -> None: + self.dtypes.update(input_dtypes) + if self.env_fps == self.model_fps: + self.shapes.update(input_shapes) + else: + for k in input_shapes: + shape = list(input_shapes[k]) + if 'img' in k: + n_channels = shape[1] // self.n_frames_input + shape[1] = (self.env_fps // self.model_fps + (self.n_frames_input - 1)) * n_channels + else: + shape[1] = (self.env_fps // self.model_fps) * shape[1] + self.shapes[k] = tuple(shape) + + def reset(self) -> None: + self.q = {k: np.zeros(self.shapes[k], dtype=self.dtypes[k]) for k in self.dtypes.keys()} + + def enqueue(self, inputs:dict[str, np.ndarray]) -> None: + for k in inputs.keys(): + if inputs[k].dtype != self.dtypes[k]: + raise ValueError(f'supplied input <{k}({inputs[k].dtype})> has wrong dtype, expected {self.dtypes[k]}') + input_shape = list(self.shapes[k]) + input_shape[1] = -1 + single_input = inputs[k].reshape(tuple(input_shape)) + sz = single_input.shape[1] + self.q[k][:,:-sz] = self.q[k][:,sz:] + self.q[k][:,-sz:] = single_input + + def get(self, *names) -> dict[str, np.ndarray]: + if self.env_fps == self.model_fps: + return {k: self.q[k] for k in names} + else: + out = {} + for k in names: + shape = self.shapes[k] + if 'img' in k: + n_channels = shape[1] // (self.env_fps // self.model_fps + (self.n_frames_input - 1)) + out[k] = np.concatenate([self.q[k][:, s:s+n_channels] for s in np.linspace(0, shape[1] - n_channels, self.n_frames_input, dtype=int)], axis=1) + elif 'pulse' in k: + # any pulse within interval counts + out[k] = self.q[k].reshape((shape[0], shape[1] * self.model_fps // self.env_fps, self.env_fps // self.model_fps, -1)).max(axis=2) + else: + idxs = np.arange(-1, -shape[1], -self.env_fps // self.model_fps)[::-1] + out[k] = self.q[k][:, idxs] + return out + class ModelState(ModelStateBase): frames: dict[str, DrivingModelFrame] inputs: dict[str, np.ndarray] @@ -102,19 +160,15 @@ class ModelState(ModelStateBase): self.policy_output_slices = policy_metadata['output_slices'] policy_output_size = policy_metadata['output_shapes']['outputs'][1] - self.frames = {name: DrivingModelFrame(context, ModelConstants.TEMPORAL_SKIP) for name in self.vision_input_names} + self.frames = {name: DrivingModelFrame(context, ModelConstants.MODEL_RUN_FREQ//ModelConstants.MODEL_CONTEXT_FREQ) for name in self.vision_input_names} self.prev_desire = np.zeros(ModelConstants.DESIRE_LEN, dtype=np.float32) - self.full_features_buffer = np.zeros((1, ModelConstants.FULL_HISTORY_BUFFER_LEN, ModelConstants.FEATURE_LEN), dtype=np.float32) - self.full_desire = np.zeros((1, ModelConstants.FULL_HISTORY_BUFFER_LEN, ModelConstants.DESIRE_LEN), dtype=np.float32) - self.temporal_idxs = slice(-1-(ModelConstants.TEMPORAL_SKIP*(ModelConstants.INPUT_HISTORY_BUFFER_LEN-1)), None, ModelConstants.TEMPORAL_SKIP) - # policy inputs - self.numpy_inputs = { - 'desire': np.zeros((1, ModelConstants.INPUT_HISTORY_BUFFER_LEN, ModelConstants.DESIRE_LEN), dtype=np.float32), - 'traffic_convention': np.zeros((1, ModelConstants.TRAFFIC_CONVENTION_LEN), dtype=np.float32), - 'features_buffer': np.zeros((1, ModelConstants.INPUT_HISTORY_BUFFER_LEN, ModelConstants.FEATURE_LEN), dtype=np.float32), - } + self.numpy_inputs = {k: np.zeros(self.policy_input_shapes[k], dtype=np.float32) for k in self.policy_input_shapes} + self.full_input_queues = InputQueues(ModelConstants.MODEL_CONTEXT_FREQ, ModelConstants.MODEL_RUN_FREQ, ModelConstants.N_FRAMES) + for k in ['desire_pulse', 'features_buffer']: + self.full_input_queues.update_dtypes_and_shapes({k: self.numpy_inputs[k].dtype}, {k: self.numpy_inputs[k].shape}) + self.full_input_queues.reset() # img buffers are managed in openCL transform code self.vision_inputs: dict[str, Tensor] = {} @@ -136,15 +190,10 @@ class ModelState(ModelStateBase): def run(self, bufs: dict[str, VisionBuf], transforms: dict[str, np.ndarray], inputs: dict[str, np.ndarray], prepare_only: bool) -> dict[str, np.ndarray] | None: # Model decides when action is completed, so desire input is just a pulse triggered on rising edge - inputs['desire'][0] = 0 - new_desire = np.where(inputs['desire'] - self.prev_desire > .99, inputs['desire'], 0) - self.prev_desire[:] = inputs['desire'] + inputs['desire_pulse'][0] = 0 + new_desire = np.where(inputs['desire_pulse'] - self.prev_desire > .99, inputs['desire_pulse'], 0) + self.prev_desire[:] = inputs['desire_pulse'] - self.full_desire[0,:-1] = self.full_desire[0,1:] - self.full_desire[0,-1] = new_desire - self.numpy_inputs['desire'][:] = self.full_desire.reshape((1,ModelConstants.INPUT_HISTORY_BUFFER_LEN,ModelConstants.TEMPORAL_SKIP,-1)).max(axis=2) - - self.numpy_inputs['traffic_convention'][:] = inputs['traffic_convention'] imgs_cl = {name: self.frames[name].prepare(bufs[name], transforms[name].flatten()) for name in self.vision_input_names} if TICI and not USBGPU: @@ -163,9 +212,10 @@ class ModelState(ModelStateBase): self.vision_output = self.vision_run(**self.vision_inputs).contiguous().realize().uop.base.buffer.numpy() vision_outputs_dict = self.parser.parse_vision_outputs(self.slice_outputs(self.vision_output, self.vision_output_slices)) - self.full_features_buffer[0,:-1] = self.full_features_buffer[0,1:] - self.full_features_buffer[0,-1] = vision_outputs_dict['hidden_state'][0, :] - self.numpy_inputs['features_buffer'][:] = self.full_features_buffer[0, self.temporal_idxs] + self.full_input_queues.enqueue({'features_buffer': vision_outputs_dict['hidden_state'], 'desire_pulse': new_desire}) + for k in ['desire_pulse', 'features_buffer']: + self.numpy_inputs[k][:] = self.full_input_queues.get(k)[k] + self.numpy_inputs['traffic_convention'][:] = inputs['traffic_convention'] self.policy_output = self.policy_run(**self.policy_inputs).contiguous().realize().uop.base.buffer.numpy() policy_outputs_dict = self.parser.parse_policy_outputs(self.slice_outputs(self.policy_output, self.policy_output_slices)) @@ -223,7 +273,7 @@ def main(demo=False): params = Params() # setup filter to track dropped frames - frame_dropped_filter = FirstOrderFilter(0., 10., 1. / ModelConstants.MODEL_FREQ) + frame_dropped_filter = FirstOrderFilter(0., 10., 1. / ModelConstants.MODEL_RUN_FREQ) frame_id = 0 last_vipc_frame_id = 0 run_count = 0 @@ -320,7 +370,7 @@ def main(demo=False): bufs = {name: buf_extra if 'big' in name else buf_main for name in model.vision_input_names} transforms = {name: model_transform_extra if 'big' in name else model_transform_main for name in model.vision_input_names} inputs:dict[str, np.ndarray] = { - 'desire': vec_desire, + 'desire_pulse': vec_desire, 'traffic_convention': traffic_convention, } diff --git a/selfdrive/modeld/models/driving_policy.onnx b/selfdrive/modeld/models/driving_policy.onnx index 867a0d3b9b..7b87846748 100644 --- a/selfdrive/modeld/models/driving_policy.onnx +++ b/selfdrive/modeld/models/driving_policy.onnx @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:04b763fb71efe57a8a4c4168a8043ecd58939015026ded0dc755ded6905ac251 -size 12343523 +oid sha256:ebb38a934d6472c061cc6010f46d9720ca132d631a47e585a893bdd41ade2419 +size 12343535 diff --git a/selfdrive/modeld/models/driving_vision.onnx b/selfdrive/modeld/models/driving_vision.onnx index ce0dc927e7..4b4fa05df8 100644 --- a/selfdrive/modeld/models/driving_vision.onnx +++ b/selfdrive/modeld/models/driving_vision.onnx @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:e66bb8d53eced3786ed71a59b55ffc6810944cb217f0518621cc76303260a1ef +oid sha256:befac016a247b7ad5dc5b55d339d127774ed7bd2b848f1583f72aa4caee37781 size 46271991 diff --git a/selfdrive/pandad/pandad.cc b/selfdrive/pandad/pandad.cc index 26f42add6c..bcb39391c4 100644 --- a/selfdrive/pandad/pandad.cc +++ b/selfdrive/pandad/pandad.cc @@ -84,12 +84,10 @@ Panda *connect(std::string serial="", uint32_t index=0) { panda->set_can_fd_auto(i, true); } - bool is_deprecated_panda = std::find(DEPRECATED_PANDA_TYPES.begin(), - DEPRECATED_PANDA_TYPES.end(), - panda->hw_type) != DEPRECATED_PANDA_TYPES.end(); + bool is_supported_panda = std::find(SUPPORTED_PANDA_TYPES.begin(), SUPPORTED_PANDA_TYPES.end(), panda->hw_type) != SUPPORTED_PANDA_TYPES.end(); - if (is_deprecated_panda) { - LOGW("panda %s is deprecated (hw_type: %i), skipping firmware check...", panda->hw_serial().c_str(), static_cast(panda->hw_type)); + if (!is_supported_panda) { + LOGW("panda %s is not supported (hw_type: %i), skipping firmware check...", panda->hw_serial().c_str(), static_cast(panda->hw_type)); return panda.release(); } @@ -175,7 +173,6 @@ void fill_panda_state(cereal::PandaState::Builder &ps, cereal::PandaState::Panda ps.setHarnessStatus(cereal::PandaState::HarnessStatus(health.car_harness_status_pkt)); ps.setInterruptLoad(health.interrupt_load_pkt); ps.setFanPower(health.fan_power); - ps.setFanStallCount(health.fan_stall_count); ps.setSafetyRxChecksInvalid((bool)(health.safety_rx_checks_invalid_pkt)); ps.setSpiErrorCount(health.spi_error_count_pkt); ps.setSbu1Voltage(health.sbu1_voltage_mV / 1000.0f); diff --git a/selfdrive/pandad/pandad.h b/selfdrive/pandad/pandad.h index cdacdc894b..954851b613 100644 --- a/selfdrive/pandad/pandad.h +++ b/selfdrive/pandad/pandad.h @@ -9,13 +9,10 @@ void pandad_main_thread(std::vector serials); // deprecated devices -static const std::vector DEPRECATED_PANDA_TYPES = { - cereal::PandaState::PandaType::WHITE_PANDA, - cereal::PandaState::PandaType::GREY_PANDA, - cereal::PandaState::PandaType::BLACK_PANDA, - cereal::PandaState::PandaType::PEDAL, - cereal::PandaState::PandaType::UNO, - cereal::PandaState::PandaType::RED_PANDA_V2 +static const std::vector SUPPORTED_PANDA_TYPES = { + cereal::PandaState::PandaType::RED_PANDA, + cereal::PandaState::PandaType::TRES, + cereal::PandaState::PandaType::CUATRO, }; diff --git a/selfdrive/pandad/pandad.py b/selfdrive/pandad/pandad.py index 265f673628..361c1f2148 100755 --- a/selfdrive/pandad/pandad.py +++ b/selfdrive/pandad/pandad.py @@ -36,10 +36,10 @@ def flash_panda(panda_serial: str) -> Panda: panda_signature = b"" if panda.bootstub else panda.get_signature() cloudlog.warning(f"Panda {panda_serial} connected, version: {panda_version}, signature {panda_signature.hex()[:16]}, expected {fw_signature.hex()[:16]}") - # skip flashing if the detected device is deprecated from upstream + # skip flashing if the detected device is not supported from upstream hw_type = panda.get_type() - if hw_type in Panda.DEPRECATED_DEVICES: - cloudlog.warning(f"Panda {panda_serial} is deprecated (hw_type: {hw_type}), skipping flash...") + if hw_type not in Panda.SUPPORTED_DEVICES: + cloudlog.warning(f"Panda {panda_serial} is not supported (hw_type: {hw_type}), skipping flash...") return panda if panda.bootstub or panda_signature != fw_signature: @@ -91,11 +91,6 @@ def main() -> None: cloudlog.event("pandad.flash_and_connect", count=count) params.remove("PandaSignatures") - # TODO: remove this in the next AGNOS - # wait until USB is up before counting - if time.monotonic() < 60.: - no_internal_panda_count = 0 - # Handle missing internal panda if no_internal_panda_count > 0: if no_internal_panda_count == 3: diff --git a/selfdrive/pandad/tests/bootstub.panda.bin b/selfdrive/pandad/tests/bootstub.panda.bin deleted file mode 100755 index 43db537061..0000000000 Binary files a/selfdrive/pandad/tests/bootstub.panda.bin and /dev/null differ diff --git a/selfdrive/pandad/tests/test_pandad.py b/selfdrive/pandad/tests/test_pandad.py index 87f30e4793..6a7359fd85 100644 --- a/selfdrive/pandad/tests/test_pandad.py +++ b/selfdrive/pandad/tests/test_pandad.py @@ -22,8 +22,6 @@ class TestPandad: if len(Panda.list()) == 0: self._run_test(60) - self.spi = HARDWARE.get_device_type() != 'tici' - def teardown_method(self): managed_processes['pandad'].stop() @@ -106,11 +104,9 @@ class TestPandad: # - 0.2s pandad -> pandad # - plus some buffer print("startup times", ts, sum(ts) / len(ts)) - assert 0.1 < (sum(ts)/len(ts)) < (0.7 if self.spi else 5.0) + assert 0.1 < (sum(ts)/len(ts)) < 0.7 def test_protocol_version_check(self): - if not self.spi: - pytest.skip("SPI test") # flash old fw fn = os.path.join(HERE, "bootstub.panda_h7_spiv0.bin") self._flash_bootstub_and_test(fn, expect_mismatch=True) diff --git a/selfdrive/pandad/tests/test_pandad_spi.py b/selfdrive/pandad/tests/test_pandad_spi.py index 9f7cc3b029..da4b181993 100644 --- a/selfdrive/pandad/tests/test_pandad_spi.py +++ b/selfdrive/pandad/tests/test_pandad_spi.py @@ -6,7 +6,6 @@ import random import cereal.messaging as messaging from cereal.services import SERVICE_LIST -from openpilot.system.hardware import HARDWARE from openpilot.selfdrive.test.helpers import with_processes from openpilot.selfdrive.pandad.tests.test_pandad_loopback import setup_pandad, send_random_can_messages @@ -16,8 +15,6 @@ JUNGLE_SPAM = "JUNGLE_SPAM" in os.environ class TestBoarddSpi: @classmethod def setup_class(cls): - if HARDWARE.get_device_type() == 'tici': - pytest.skip("only for spi pandas") os.environ['STARTED'] = '1' os.environ['SPI_ERR_PROB'] = '0.001' if not JUNGLE_SPAM: diff --git a/selfdrive/selfdrived/alerts_offroad.json b/selfdrive/selfdrived/alerts_offroad.json index 87a007211f..6bae034766 100644 --- a/selfdrive/selfdrived/alerts_offroad.json +++ b/selfdrive/selfdrived/alerts_offroad.json @@ -29,10 +29,6 @@ "text": "Device failed to register with the comma.ai backend. It will not connect or upload to comma.ai servers, and receives no support from comma.ai. If this is a device purchased at comma.ai/shop, open a ticket at https://comma.ai/support.", "severity": 1 }, - "Offroad_StorageMissing": { - "text": "NVMe drive not mounted.", - "severity": 1 - }, "Offroad_CarUnrecognized": { "text": "sunnypilot was unable to identify your car. Your car is either unsupported or its ECUs are not recognized. Please submit a pull request to add the firmware versions to the proper vehicle. Need help? Join discord.comma.ai.", "severity": 0 diff --git a/selfdrive/selfdrived/events.py b/selfdrive/selfdrived/events.py index 591e54918e..b03a953a65 100755 --- a/selfdrive/selfdrived/events.py +++ b/selfdrive/selfdrived/events.py @@ -83,7 +83,7 @@ def below_steer_speed_alert(CP: car.CarParams, CS: car.CarState, sm: messaging.S f"Steer Unavailable Below {get_display_speed(CP.minSteerSpeed, metric)}", "", AlertStatus.userPrompt, AlertSize.small, - Priority.LOW, VisualAlert.steerRequired, AudibleAlert.prompt, 0.4) + Priority.LOW, VisualAlert.none, AudibleAlert.prompt, 0.4) def calibration_incomplete_alert(CP: car.CarParams, CS: car.CarState, sm: messaging.SubMaster, metric: bool, soft_disable_time: int, personality) -> Alert: diff --git a/selfdrive/selfdrived/selfdrived.py b/selfdrive/selfdrived/selfdrived.py index c56c7fe65f..749378f3a6 100755 --- a/selfdrive/selfdrived/selfdrived.py +++ b/selfdrive/selfdrived/selfdrived.py @@ -21,7 +21,6 @@ from openpilot.selfdrive.selfdrived.helpers import ExcessiveActuationCheck from openpilot.selfdrive.selfdrived.state import StateMachine from openpilot.selfdrive.selfdrived.alertmanager import AlertManager, set_offroad_alert -from openpilot.system.hardware import HARDWARE from openpilot.system.version import get_build_metadata from openpilot.sunnypilot.mads.mads import ModularAssistiveDrivingSystem @@ -135,13 +134,7 @@ class SelfdriveD(CruiseHelper): self.state_machine = StateMachine() self.rk = Ratekeeper(100, print_delay_threshold=None) - # some comma three with NVMe experience NVMe dropouts mid-drive that - # cause loggerd to crash on write, so ignore it only on that platform - self.ignored_processes = set() - nvme_expected = os.path.exists('/dev/nvme0n1') or (not os.path.isfile("/persist/comma/living-in-the-moment")) - if HARDWARE.get_device_type() == 'tici' and nvme_expected: - self.ignored_processes = {'loggerd', } - self.ignored_processes.update({'mapd'}) + self.ignored_processes = {'mapd', } # Determine startup event is_remote = build_metadata.openpilot.comma_remote or build_metadata.openpilot.sunnypilot_remote diff --git a/selfdrive/test/process_replay/process_replay.py b/selfdrive/test/process_replay/process_replay.py index b1190c25e9..895204538f 100755 --- a/selfdrive/test/process_replay/process_replay.py +++ b/selfdrive/test/process_replay/process_replay.py @@ -595,7 +595,7 @@ def get_custom_params_from_lr(lr: LogIterable, initial_state: str = "first") -> if len(live_calibration) > 0: custom_params["CalibrationParams"] = live_calibration[msg_index].as_builder().to_bytes() if len(live_parameters) > 0: - custom_params["LiveParameters"] = live_parameters[msg_index].as_builder().to_bytes() + custom_params["LiveParametersV2"] = live_parameters[msg_index].as_builder().to_bytes() if len(live_torque_parameters) > 0: custom_params["LiveTorqueParameters"] = live_torque_parameters[msg_index].as_builder().to_bytes() diff --git a/selfdrive/test/process_replay/ref_commit b/selfdrive/test/process_replay/ref_commit index a4297096c0..a833fadb94 100644 --- a/selfdrive/test/process_replay/ref_commit +++ b/selfdrive/test/process_replay/ref_commit @@ -1 +1 @@ -6d3219bca9f66a229b38a5382d301a92b0147edb \ No newline at end of file +afcab1abb62b9d5678342956cced4712f44e909e \ No newline at end of file diff --git a/selfdrive/test/test_onroad.py b/selfdrive/test/test_onroad.py index 0149653c84..b4b9b9dbbe 100644 --- a/selfdrive/test/test_onroad.py +++ b/selfdrive/test/test_onroad.py @@ -18,7 +18,6 @@ from openpilot.common.timeout import Timeout from openpilot.common.params import Params from openpilot.selfdrive.selfdrived.events import EVENTS, ET from openpilot.selfdrive.test.helpers import set_params_enabled, release_only -from openpilot.system.hardware import HARDWARE from openpilot.system.hardware.hw import Paths from openpilot.tools.lib.logreader import LogReader from openpilot.tools.lib.log_time_series import msgs_to_time_series @@ -33,7 +32,7 @@ CPU usage budget TEST_DURATION = 25 LOG_OFFSET = 8 -MAX_TOTAL_CPU = 280. # total for all 8 cores +MAX_TOTAL_CPU = 300. # total for all 8 cores PROCS = { # Baseline CPU usage by process "selfdrive.controls.controlsd": 16.0, @@ -57,30 +56,20 @@ PROCS = { "selfdrive.ui.soundd": 3.0, "selfdrive.ui.feedback.feedbackd": 1.0, "selfdrive.monitoring.dmonitoringd": 4.0, - "./proclogd": 2.0, + "system.proclogd": 3.0, "system.logmessaged": 1.0, "system.tombstoned": 0, - "./logcatd": 1.0, + "system.journald": 1.0, "system.micd": 5.0, "system.timed": 0, "selfdrive.pandad.pandad": 0, "system.statsd": 1.0, "system.loggerd.uploader": 15.0, "system.loggerd.deleter": 1.0, + "./pandad": 19.0, + "system.qcomgpsd.qcomgpsd": 1.0, } -PROCS.update({ - "tici": { - "./pandad": 5.0, - "./ubloxd": 1.0, - "system.ubloxd.pigeond": 6.0, - }, - "tizi": { - "./pandad": 19.0, - "system.qcomgpsd.qcomgpsd": 1.0, - } -}.get(HARDWARE.get_device_type(), {})) - TIMINGS = { # rtols: max/min, rsd "can": [2.5, 0.35], diff --git a/selfdrive/ui/layouts/main.py b/selfdrive/ui/layouts/main.py index 3ab8525d33..777d2f4c3f 100644 --- a/selfdrive/ui/layouts/main.py +++ b/selfdrive/ui/layouts/main.py @@ -63,14 +63,20 @@ class MainLayout(Widget): # Don't hide sidebar from interactive timeout if self._current_mode != MainState.ONROAD: self._sidebar.set_visible(False) - self._current_mode = MainState.ONROAD + self._set_current_layout(MainState.ONROAD) else: - self._current_mode = MainState.HOME + self._set_current_layout(MainState.HOME) self._sidebar.set_visible(True) + def _set_current_layout(self, layout: MainState): + if layout != self._current_mode: + self._layouts[self._current_mode].hide_event() + self._current_mode = layout + self._layouts[self._current_mode].show_event() + def open_settings(self, panel_type: PanelType): self._layouts[MainState.SETTINGS].set_current_panel(panel_type) - self._current_mode = MainState.SETTINGS + self._set_current_layout(MainState.SETTINGS) self._sidebar.set_visible(False) def _on_settings_clicked(self): diff --git a/selfdrive/ui/layouts/network.py b/selfdrive/ui/layouts/network.py deleted file mode 100644 index 856be26e97..0000000000 --- a/selfdrive/ui/layouts/network.py +++ /dev/null @@ -1,17 +0,0 @@ -import pyray as rl -from openpilot.system.ui.lib.wifi_manager import WifiManagerWrapper -from openpilot.system.ui.widgets import Widget -from openpilot.system.ui.widgets.network import WifiManagerUI - - -class NetworkLayout(Widget): - def __init__(self): - super().__init__() - self.wifi_manager = WifiManagerWrapper() - self.wifi_ui = WifiManagerUI(self.wifi_manager) - - def _render(self, rect: rl.Rectangle): - self.wifi_ui.render(rect) - - def shutdown(self): - self.wifi_manager.shutdown() diff --git a/selfdrive/ui/layouts/settings/firehose.py b/selfdrive/ui/layouts/settings/firehose.py index 74a8f317d5..b3db1fa5f0 100644 --- a/selfdrive/ui/layouts/settings/firehose.py +++ b/selfdrive/ui/layouts/settings/firehose.py @@ -2,7 +2,7 @@ import pyray as rl import time import threading -from openpilot.common.api import Api, api_get +from openpilot.common.api import api_get from openpilot.common.params import Params from openpilot.common.swaglog import cloudlog from openpilot.selfdrive.ui.ui_state import ui_state @@ -11,7 +11,7 @@ from openpilot.system.ui.lib.application import gui_app, FontWeight from openpilot.system.ui.lib.scroll_panel import GuiScrollPanel from openpilot.system.ui.lib.wrap_text import wrap_text from openpilot.system.ui.widgets import Widget - +from openpilot.selfdrive.ui.lib.api_helpers import get_token TITLE = "Firehose Mode" DESCRIPTION = ( @@ -163,7 +163,7 @@ class FirehoseLayout(Widget): dongle_id = self.params.get("DongleId") if not dongle_id or dongle_id == UNREGISTERED_DONGLE_ID: return - identity_token = Api(dongle_id).get_token() + identity_token = get_token(dongle_id) response = api_get(f"v1/devices/{dongle_id}/firehose_stats", access_token=identity_token) if response.status_code == 200: data = response.json() diff --git a/selfdrive/ui/layouts/settings/settings.py b/selfdrive/ui/layouts/settings/settings.py index 674e5005f4..a731a9158c 100644 --- a/selfdrive/ui/layouts/settings/settings.py +++ b/selfdrive/ui/layouts/settings/settings.py @@ -2,7 +2,6 @@ import pyray as rl from dataclasses import dataclass from enum import IntEnum from collections.abc import Callable -from openpilot.selfdrive.ui.layouts.network import NetworkLayout from openpilot.selfdrive.ui.layouts.settings.developer import DeveloperLayout from openpilot.selfdrive.ui.layouts.settings.device import DeviceLayout from openpilot.selfdrive.ui.layouts.settings.firehose import FirehoseLayout @@ -10,7 +9,9 @@ from openpilot.selfdrive.ui.layouts.settings.software import SoftwareLayout from openpilot.selfdrive.ui.layouts.settings.toggles import TogglesLayout from openpilot.system.ui.lib.application import gui_app, FontWeight, MousePos from openpilot.system.ui.lib.text_measure import measure_text_cached +from openpilot.system.ui.lib.wifi_manager import WifiManager from openpilot.system.ui.widgets import Widget +from openpilot.system.ui.widgets.network import WifiManagerUI # Settings close button SETTINGS_CLOSE_TEXT = "×" @@ -43,7 +44,7 @@ class PanelType(IntEnum): @dataclass class PanelInfo: name: str - instance: object + instance: Widget button_rect: rl.Rectangle = rl.Rectangle(0, 0, 0, 0) @@ -53,9 +54,12 @@ class SettingsLayout(Widget): self._current_panel = PanelType.DEVICE # Panel configuration + wifi_manager = WifiManager() + wifi_manager.set_active(False) + self._panels = { PanelType.DEVICE: PanelInfo("Device", DeviceLayout()), - PanelType.NETWORK: PanelInfo("Network", NetworkLayout()), + PanelType.NETWORK: PanelInfo("Network", WifiManagerUI(wifi_manager)), PanelType.TOGGLES: PanelInfo("Toggles", TogglesLayout()), PanelType.SOFTWARE: PanelInfo("Software", SoftwareLayout()), PanelType.FIREHOSE: PanelInfo("Firehose", FirehoseLayout()), @@ -149,8 +153,14 @@ class SettingsLayout(Widget): def set_current_panel(self, panel_type: PanelType): if panel_type != self._current_panel: + self._panels[self._current_panel].instance.hide_event() self._current_panel = panel_type + self._panels[self._current_panel].instance.show_event() - def close_settings(self): - if self._close_callback: - self._close_callback() + def show_event(self): + super().show_event() + self._panels[self._current_panel].instance.show_event() + + def hide_event(self): + super().hide_event() + self._panels[self._current_panel].instance.hide_event() diff --git a/selfdrive/ui/lib/api_helpers.py b/selfdrive/ui/lib/api_helpers.py new file mode 100644 index 0000000000..b83efedb60 --- /dev/null +++ b/selfdrive/ui/lib/api_helpers.py @@ -0,0 +1,14 @@ +import time +from functools import lru_cache +from openpilot.common.api import Api + +TOKEN_EXPIRY_HOURS = 2 + + +@lru_cache(maxsize=1) +def _get_token(dongle_id: str, t: int): + return Api(dongle_id).get_token(expiry_hours=TOKEN_EXPIRY_HOURS) + + +def get_token(dongle_id: str): + return _get_token(dongle_id, int(time.monotonic() / (TOKEN_EXPIRY_HOURS / 2 * 60 * 60))) diff --git a/selfdrive/ui/lib/prime_state.py b/selfdrive/ui/lib/prime_state.py index da2ff899dd..be2132c1b7 100644 --- a/selfdrive/ui/lib/prime_state.py +++ b/selfdrive/ui/lib/prime_state.py @@ -2,14 +2,12 @@ from enum import IntEnum import os import threading import time -from functools import lru_cache -from openpilot.common.api import Api, api_get +from openpilot.common.api import api_get from openpilot.common.params import Params from openpilot.common.swaglog import cloudlog from openpilot.system.athena.registration import UNREGISTERED_DONGLE_ID - -TOKEN_EXPIRY_HOURS = 2 +from openpilot.selfdrive.ui.lib.api_helpers import get_token class PrimeType(IntEnum): @@ -23,12 +21,6 @@ class PrimeType(IntEnum): PURPLE = 5, -@lru_cache(maxsize=1) -def get_token(dongle_id: str, t: int): - print('getting token') - return Api(dongle_id).get_token(expiry_hours=TOKEN_EXPIRY_HOURS) - - class PrimeState: FETCH_INTERVAL = 5.0 # seconds between API calls API_TIMEOUT = 10.0 # seconds for API requests @@ -58,15 +50,13 @@ class PrimeState: return try: - identity_token = get_token(dongle_id, int(time.monotonic() / (TOKEN_EXPIRY_HOURS / 2 * 60 * 60))) + identity_token = get_token(dongle_id) response = api_get(f"v1.1/devices/{dongle_id}", timeout=self.API_TIMEOUT, access_token=identity_token) if response.status_code == 200: data = response.json() is_paired = data.get("is_paired", False) prime_type = data.get("prime_type", 0) self.set_type(PrimeType(prime_type) if is_paired else PrimeType.UNPAIRED) - elif response.status_code == 401: - get_token.cache_clear() except Exception as e: cloudlog.error(f"Failed to fetch prime status: {e}") diff --git a/selfdrive/ui/tests/test_ui/run.py b/selfdrive/ui/tests/test_ui/run.py index 653395ba1d..68d1f0c185 100755 --- a/selfdrive/ui/tests/test_ui/run.py +++ b/selfdrive/ui/tests/test_ui/run.py @@ -29,7 +29,7 @@ UI_DELAY = 0.5 # may be slower on CI? TEST_ROUTE = "a2a0ccea32023010|2023-07-27--13-01-19" STREAMS: list[tuple[VisionStreamType, CameraConfig, bytes]] = [] -OFFROAD_ALERTS = ['Offroad_StorageMissing', 'Offroad_IsTakingSnapshot'] +OFFROAD_ALERTS = ['Offroad_IsTakingSnapshot', ] DATA: dict[str, capnp.lib.capnp._DynamicStructBuilder] = dict.fromkeys( ["carParams", "deviceState", "pandaStates", "controlsState", "selfdriveState", "liveCalibration", "modelV2", "radarState", "driverMonitoringState", "carState", diff --git a/selfdrive/ui/ui.cc b/selfdrive/ui/ui.cc index 4b8f79785b..4cad883f7d 100644 --- a/selfdrive/ui/ui.cc +++ b/selfdrive/ui/ui.cc @@ -55,8 +55,7 @@ void update_state(UIState *s) { } if (sm.updated("wideRoadCameraState")) { auto cam_state = sm["wideRoadCameraState"].getWideRoadCameraState(); - float scale = (cam_state.getSensor() == cereal::FrameData::ImageSensor::AR0231) ? 6.0f : 1.0f; - scene.light_sensor = std::max(100.0f - scale * cam_state.getExposureValPercent(), 0.0f); + scene.light_sensor = std::max(100.0f - cam_state.getExposureValPercent(), 0.0f); } else if (!sm.allAliveAndValid({"wideRoadCameraState"})) { scene.light_sensor = -1; } diff --git a/selfdrive/ui/ui_state.py b/selfdrive/ui/ui_state.py index c2bb5f77a4..c4a2c0ca11 100644 --- a/selfdrive/ui/ui_state.py +++ b/selfdrive/ui/ui_state.py @@ -105,10 +105,7 @@ class UIState: # Handle wide road camera state updates if self.sm.updated["wideRoadCameraState"]: cam_state = self.sm["wideRoadCameraState"] - - # Scale factor based on sensor type - scale = 6.0 if cam_state.sensor == 'ar0231' else 1.0 - self.light_sensor = max(100.0 - scale * cam_state.exposureValPercent, 0.0) + self.light_sensor = max(100.0 - cam_state.exposureValPercent, 0.0) elif not self.sm.alive["wideRoadCameraState"] or not self.sm.valid["wideRoadCameraState"]: self.light_sensor = -1 diff --git a/sunnypilot/models/tests/model_hash b/sunnypilot/models/tests/model_hash index f66baa7e71..33d7d86e28 100644 --- a/sunnypilot/models/tests/model_hash +++ b/sunnypilot/models/tests/model_hash @@ -1 +1 @@ -2ff2f49176a13bc7f856645d785b3b838a5c7ecf7f6cb37699fa0459ebf12453 \ No newline at end of file +70406ab4dd66d0e384734a8a56632ae4a62bc9670c2e630a0f71588c4e212cd8 \ No newline at end of file diff --git a/system/camerad/SConscript b/system/camerad/SConscript index fe5cf87b78..734f748a2a 100644 --- a/system/camerad/SConscript +++ b/system/camerad/SConscript @@ -4,7 +4,7 @@ libs = [common, 'OpenCL', messaging, visionipc, gpucommon] if arch != "Darwin": camera_obj = env.Object(['cameras/camera_qcom2.cc', 'cameras/camera_common.cc', 'cameras/spectra.cc', - 'cameras/cdm.cc', 'sensors/ar0231.cc', 'sensors/ox03c10.cc', 'sensors/os04c10.cc']) + 'cameras/cdm.cc', 'sensors/ox03c10.cc', 'sensors/os04c10.cc']) env.Program('camerad', ['main.cc', camera_obj], LIBS=libs) if GetOption("extras") and arch == "x86_64": diff --git a/system/camerad/cameras/hw.h b/system/camerad/cameras/hw.h index d299627ce9..f20a1b3ade 100644 --- a/system/camerad/cameras/hw.h +++ b/system/camerad/cameras/hw.h @@ -13,7 +13,7 @@ typedef enum { ISP_BPS_PROCESSED, // fully processed image through the BPS } SpectraOutputType; -// For the comma 3/3X three camera platform +// For the comma 3X three camera platform struct CameraConfig { int camera_num; diff --git a/system/camerad/cameras/spectra.cc b/system/camerad/cameras/spectra.cc index 47ae9061f4..caf7871573 100644 --- a/system/camerad/cameras/spectra.cc +++ b/system/camerad/cameras/spectra.cc @@ -1004,8 +1004,7 @@ bool SpectraCamera::openSensor() { }; // Figure out which sensor we have - if (!init_sensor_lambda(new AR0231) && - !init_sensor_lambda(new OX03C10) && + if (!init_sensor_lambda(new OX03C10) && !init_sensor_lambda(new OS04C10)) { LOGE("** sensor %d FAILED bringup, disabling", cc.camera_num); enabled = false; diff --git a/system/camerad/sensors/ar0231.cc b/system/camerad/sensors/ar0231.cc deleted file mode 100644 index e4ae29f079..0000000000 --- a/system/camerad/sensors/ar0231.cc +++ /dev/null @@ -1,136 +0,0 @@ -#include -#include - -#include "system/camerad/sensors/sensor.h" - -namespace { - -const size_t AR0231_REGISTERS_HEIGHT = 2; -// TODO: this extra height is universal and doesn't apply per camera -const size_t AR0231_STATS_HEIGHT = 2 + 8; - -const float sensor_analog_gains_AR0231[] = { - 1.0 / 8.0, 2.0 / 8.0, 2.0 / 7.0, 3.0 / 7.0, // 0, 1, 2, 3 - 3.0 / 6.0, 4.0 / 6.0, 4.0 / 5.0, 5.0 / 5.0, // 4, 5, 6, 7 - 5.0 / 4.0, 6.0 / 4.0, 6.0 / 3.0, 7.0 / 3.0, // 8, 9, 10, 11 - 7.0 / 2.0, 8.0 / 2.0, 8.0 / 1.0}; // 12, 13, 14, 15 = bypass - -} // namespace - -AR0231::AR0231() { - image_sensor = cereal::FrameData::ImageSensor::AR0231; - bayer_pattern = CAM_ISP_PATTERN_BAYER_GRGRGR; - pixel_size_mm = 0.003; - data_word = true; - frame_width = 1928; - frame_height = 1208; - frame_stride = (frame_width * 12 / 8) + 4; - extra_height = AR0231_REGISTERS_HEIGHT + AR0231_STATS_HEIGHT; - - registers_offset = 0; - frame_offset = AR0231_REGISTERS_HEIGHT; - stats_offset = AR0231_REGISTERS_HEIGHT + frame_height; - - start_reg_array.assign(std::begin(start_reg_array_ar0231), std::end(start_reg_array_ar0231)); - init_reg_array.assign(std::begin(init_array_ar0231), std::end(init_array_ar0231)); - probe_reg_addr = 0x3000; - probe_expected_data = 0x354; - bits_per_pixel = 12; - mipi_format = CAM_FORMAT_MIPI_RAW_12; - frame_data_type = 0x12; // Changing stats to 0x2C doesn't work, so change pixels to 0x12 instead - mclk_frequency = 19200000; //Hz - - readout_time_ns = 22850000; - - dc_gain_factor = 2.5; - dc_gain_min_weight = 0; - dc_gain_max_weight = 1; - dc_gain_on_grey = 0.2; - dc_gain_off_grey = 0.3; - exposure_time_min = 2; // with HDR, fastest ss - exposure_time_max = 0x0855; // with HDR, slowest ss, 40ms - analog_gain_min_idx = 0x1; // 0.25x - analog_gain_rec_idx = 0x6; // 0.8x - analog_gain_max_idx = 0xD; // 4.0x - analog_gain_cost_delta = 0; - analog_gain_cost_low = 0.1; - analog_gain_cost_high = 5.0; - for (int i = 0; i <= analog_gain_max_idx; i++) { - sensor_analog_gains[i] = sensor_analog_gains_AR0231[i]; - } - min_ev = exposure_time_min * sensor_analog_gains[analog_gain_min_idx]; - max_ev = exposure_time_max * dc_gain_factor * sensor_analog_gains[analog_gain_max_idx]; - target_grey_factor = 1.0; - - black_level = 168; - color_correct_matrix = { - 0x000000af, 0x00000ff9, 0x00000fd8, - 0x00000fbc, 0x000000bb, 0x00000009, - 0x00000fb6, 0x00000fe0, 0x000000ea, - }; - for (int i = 0; i < 65; i++) { - float fx = i / 64.0; - const float gamma_k = 0.75; - const float gamma_b = 0.125; - const float mp = 0.01; // ideally midpoint should be adaptive - const float rk = 9 - 100*mp; - // poly approximation for s curve - fx = (fx > mp) ? - ((rk * (fx-mp) * (1-(gamma_k*mp+gamma_b)) * (1+1/(rk*(1-mp))) / (1+rk*(fx-mp))) + gamma_k*mp + gamma_b) : - ((rk * (fx-mp) * (gamma_k*mp+gamma_b) * (1+1/(rk*mp)) / (1-rk*(fx-mp))) + gamma_k*mp + gamma_b); - gamma_lut_rgb.push_back((uint32_t)(fx*1023.0 + 0.5)); - } - prepare_gamma_lut(); - linearization_lut = { - 0x02000000, 0x02000000, 0x02000000, 0x02000000, - 0x020007ff, 0x020007ff, 0x020007ff, 0x020007ff, - 0x02000bff, 0x02000bff, 0x02000bff, 0x02000bff, - 0x020017ff, 0x020017ff, 0x020017ff, 0x020017ff, - 0x02001bff, 0x02001bff, 0x02001bff, 0x02001bff, - 0x020023ff, 0x020023ff, 0x020023ff, 0x020023ff, - 0x00003fff, 0x00003fff, 0x00003fff, 0x00003fff, - 0x00003fff, 0x00003fff, 0x00003fff, 0x00003fff, - 0x00003fff, 0x00003fff, 0x00003fff, 0x00003fff, - }; - linearization_pts = {0x07ff0bff, 0x17ff1bff, 0x23ff3fff, 0x3fff3fff}; - vignetting_lut = { - 0x00eaa755, 0x00cf2679, 0x00bc05e0, 0x00acc566, 0x00a1450a, 0x009984cc, 0x0095a4ad, 0x009584ac, 0x009944ca, 0x00a0c506, 0x00ac0560, 0x00bb25d9, 0x00ce2671, 0x00e90748, 0x01112889, 0x014a2a51, 0x01984cc2, - 0x00db06d8, 0x00c30618, 0x00afe57f, 0x00a0a505, 0x009524a9, 0x008d646b, 0x0089844c, 0x0089644b, 0x008d2469, 0x0094a4a5, 0x009fe4ff, 0x00af0578, 0x00c20610, 0x00d986cc, 0x00fda7ed, 0x01320990, 0x017aebd7, - 0x00d1868c, 0x00baa5d5, 0x00a7853c, 0x009844c2, 0x008cc466, 0x0085a42d, 0x0083641b, 0x0083641b, 0x0085842c, 0x008c4462, 0x0097a4bd, 0x00a6c536, 0x00b9a5cd, 0x00d06683, 0x00f1678b, 0x01226913, 0x0167ab3d, - 0x00cd0668, 0x00b625b1, 0x00a30518, 0x0093c49e, 0x00884442, 0x00830418, 0x0080e407, 0x0080c406, 0x0082e417, 0x0087c43e, 0x00932499, 0x00a22511, 0x00b525a9, 0x00cbe65f, 0x00eb0758, 0x011a68d3, 0x015daaed, - 0x00cc4662, 0x00b565ab, 0x00a24512, 0x00930498, 0x0087843c, 0x0082a415, 0x00806403, 0x00806403, 0x00828414, 0x00870438, 0x00926493, 0x00a1850c, 0x00b465a3, 0x00cb2659, 0x00ea2751, 0x011928c9, 0x015c2ae1, - 0x00cf667b, 0x00b885c4, 0x00a5652b, 0x009624b1, 0x008aa455, 0x00846423, 0x00822411, 0x00822411, 0x00844422, 0x008a2451, 0x009564ab, 0x00a48524, 0x00b785bc, 0x00ce4672, 0x00ee6773, 0x011e88f4, 0x0162eb17, - 0x00d6c6b6, 0x00bf65fb, 0x00ac4562, 0x009d04e8, 0x0091848c, 0x0089c44e, 0x00862431, 0x00860430, 0x0089844c, 0x00910488, 0x009c64e3, 0x00ab655b, 0x00be65f3, 0x00d566ab, 0x00f847c2, 0x012b2959, 0x01726b93, - 0x00e3e71f, 0x00ca0650, 0x00b705b8, 0x00a7a53d, 0x009c24e1, 0x009484a4, 0x00908484, 0x00908484, 0x009424a1, 0x009bc4de, 0x00a70538, 0x00b625b1, 0x00c90648, 0x00e26713, 0x0108e847, 0x013fe9ff, 0x018bcc5e, - 0x00f807c0, 0x00d966cb, 0x00c5862c, 0x00b625b1, 0x00aaa555, 0x00a30518, 0x009f04f8, 0x009f04f8, 0x00a2a515, 0x00aa2551, 0x00b585ac, 0x00c4a625, 0x00d846c2, 0x00f647b2, 0x0121a90d, 0x015e4af2, 0x01b8cdc6, - 0x011548aa, 0x00f1678b, 0x00d886c4, 0x00c86643, 0x00bce5e7, 0x00b545aa, 0x00b1658b, 0x00b1458a, 0x00b505a8, 0x00bc85e4, 0x00c7c63e, 0x00d786bc, 0x00efe77f, 0x0113489a, 0x0144ea27, 0x01888c44, 0x01fdcfee, - 0x013e49f2, 0x0113e89f, 0x00f5a7ad, 0x00e0c706, 0x00d30698, 0x00cb665b, 0x00c7663b, 0x00c7663b, 0x00cb0658, 0x00d2a695, 0x00dfe6ff, 0x00f467a3, 0x01122891, 0x013be9df, 0x01750ba8, 0x01cfae7d, 0x025912c8, - 0x01766bb3, 0x01446a23, 0x011fc8fe, 0x0105e82f, 0x00f467a3, 0x00e9874c, 0x00e46723, 0x00e44722, 0x00e92749, 0x00f3a79d, 0x0104c826, 0x011e48f2, 0x01424a12, 0x01738b9c, 0x01bf6dfb, 0x023611b0, 0x02ced676, - 0x01cf8e7c, 0x01866c33, 0x015aaad5, 0x013ae9d7, 0x01250928, 0x011768bb, 0x0110a885, 0x01108884, 0x0116e8b7, 0x01242921, 0x0139a9cd, 0x0158eac7, 0x01840c20, 0x01cb0e58, 0x0233719b, 0x02b9d5ce, 0x03645b22, - }; -} - -std::vector AR0231::getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const { - uint16_t analog_gain_reg = 0xFF00 | (new_exp_g << 4) | new_exp_g; - return { - {0x3366, analog_gain_reg}, - {0x3362, (uint16_t)(dc_gain_enabled ? 0x1 : 0x0)}, - {0x3012, (uint16_t)exposure_time}, - }; -} - -int AR0231::getSlaveAddress(int port) const { - assert(port >= 0 && port <= 2); - return (int[]){0x20, 0x30, 0x20}[port]; -} - -float AR0231::getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const { - // Cost of ev diff - float score = std::abs(desired_ev - (exp_t * exp_gain)) * 10; - // Cost of absolute gain - float m = exp_g_idx > analog_gain_rec_idx ? analog_gain_cost_high : analog_gain_cost_low; - score += std::abs(exp_g_idx - (int)analog_gain_rec_idx) * m; - // Cost of changing gain - score += std::abs(exp_g_idx - gain_idx) * (score + 1.0) / 10.0; - return score; -} diff --git a/system/camerad/sensors/ar0231_registers.h b/system/camerad/sensors/ar0231_registers.h deleted file mode 100644 index e0872a673a..0000000000 --- a/system/camerad/sensors/ar0231_registers.h +++ /dev/null @@ -1,121 +0,0 @@ -#pragma once - -const struct i2c_random_wr_payload start_reg_array_ar0231[] = {{0x301A, 0x91C}}; -const struct i2c_random_wr_payload stop_reg_array_ar0231[] = {{0x301A, 0x918}}; - -const struct i2c_random_wr_payload init_array_ar0231[] = { - {0x301A, 0x0018}, // RESET_REGISTER - - // **NOTE**: if this is changed, readout_time_ns must be updated in the Sensor config - - // CLOCK Settings - // input clock is 19.2 / 2 * 0x37 = 528 MHz - // pixclk is 528 / 6 = 88 MHz - // full roll time is 1000/(PIXCLK/(LINE_LENGTH_PCK*FRAME_LENGTH_LINES)) = 39.99 ms - // img roll time is 1000/(PIXCLK/(LINE_LENGTH_PCK*Y_OUTPUT_CONTROL)) = 22.85 ms - {0x302A, 0x0006}, // VT_PIX_CLK_DIV - {0x302C, 0x0001}, // VT_SYS_CLK_DIV - {0x302E, 0x0002}, // PRE_PLL_CLK_DIV - {0x3030, 0x0037}, // PLL_MULTIPLIER - {0x3036, 0x000C}, // OP_PIX_CLK_DIV - {0x3038, 0x0001}, // OP_SYS_CLK_DIV - - // FORMAT - {0x3040, 0xC000}, // READ_MODE - {0x3004, 0x0000}, // X_ADDR_START_ - {0x3008, 0x0787}, // X_ADDR_END_ - {0x3002, 0x0000}, // Y_ADDR_START_ - {0x3006, 0x04B7}, // Y_ADDR_END_ - {0x3032, 0x0000}, // SCALING_MODE - {0x30A2, 0x0001}, // X_ODD_INC_ - {0x30A6, 0x0001}, // Y_ODD_INC_ - {0x3402, 0x0788}, // X_OUTPUT_CONTROL - {0x3404, 0x04B8}, // Y_OUTPUT_CONTROL - {0x3064, 0x1982}, // SMIA_TEST - {0x30BA, 0x11F2}, // DIGITAL_CTRL - - // Enable external trigger and disable GPIO outputs - {0x30CE, 0x0120}, // SLAVE_SH_SYNC_MODE | FRAME_START_MODE - {0x340A, 0xE0}, // GPIO3_INPUT_DISABLE | GPIO2_INPUT_DISABLE | GPIO1_INPUT_DISABLE - {0x340C, 0x802}, // GPIO_HIDRV_EN | GPIO0_ISEL=2 - - // Readout timing - {0x300C, 0x0672}, // LINE_LENGTH_PCK (valid for 3-exposure HDR) - {0x300A, 0x0855}, // FRAME_LENGTH_LINES - {0x3042, 0x0000}, // EXTRA_DELAY - - // Readout Settings - {0x31AE, 0x0204}, // SERIAL_FORMAT, 4-lane MIPI - {0x31AC, 0x0C0C}, // DATA_FORMAT_BITS, 12 -> 12 - {0x3342, 0x1212}, // MIPI_F1_PDT_EDT - {0x3346, 0x1212}, // MIPI_F2_PDT_EDT - {0x334A, 0x1212}, // MIPI_F3_PDT_EDT - {0x334E, 0x1212}, // MIPI_F4_PDT_EDT - {0x3344, 0x0011}, // MIPI_F1_VDT_VC - {0x3348, 0x0111}, // MIPI_F2_VDT_VC - {0x334C, 0x0211}, // MIPI_F3_VDT_VC - {0x3350, 0x0311}, // MIPI_F4_VDT_VC - {0x31B0, 0x0053}, // FRAME_PREAMBLE - {0x31B2, 0x003B}, // LINE_PREAMBLE - {0x301A, 0x001C}, // RESET_REGISTER - - // Noise Corrections - {0x3092, 0x0C24}, // ROW_NOISE_CONTROL - {0x337A, 0x0C80}, // DBLC_SCALE0 - {0x3370, 0x03B1}, // DBLC - {0x3044, 0x0400}, // DARK_CONTROL - - // Enable temperature sensor - {0x30B4, 0x0007}, // TEMPSENS0_CTRL_REG - {0x30B8, 0x0007}, // TEMPSENS1_CTRL_REG - - // Enable dead pixel correction using - // the 1D line correction scheme - {0x31E0, 0x0003}, - - // HDR Settings - {0x3082, 0x0004}, // OPERATION_MODE_CTRL - {0x3238, 0x0444}, // EXPOSURE_RATIO - - {0x1008, 0x0361}, // FINE_INTEGRATION_TIME_MIN - {0x100C, 0x0589}, // FINE_INTEGRATION_TIME2_MIN - {0x100E, 0x07B1}, // FINE_INTEGRATION_TIME3_MIN - {0x1010, 0x0139}, // FINE_INTEGRATION_TIME4_MIN - - // TODO: do these have to be lower than LINE_LENGTH_PCK? - {0x3014, 0x08CB}, // FINE_INTEGRATION_TIME_ - {0x321E, 0x0894}, // FINE_INTEGRATION_TIME2 - - {0x31D0, 0x0000}, // COMPANDING, no good in 10 bit? - {0x33DA, 0x0000}, // COMPANDING - {0x318E, 0x0200}, // PRE_HDR_GAIN_EN - - // DLO Settings - {0x3100, 0x4000}, // DLO_CONTROL0 - {0x3280, 0x0CCC}, // T1 G1 - {0x3282, 0x0CCC}, // T1 R - {0x3284, 0x0CCC}, // T1 B - {0x3286, 0x0CCC}, // T1 G2 - {0x3288, 0x0FA0}, // T2 G1 - {0x328A, 0x0FA0}, // T2 R - {0x328C, 0x0FA0}, // T2 B - {0x328E, 0x0FA0}, // T2 G2 - - // Initial Gains - {0x3022, 0x0001}, // GROUPED_PARAMETER_HOLD_ - {0x3366, 0xFF77}, // ANALOG_GAIN (1x) - - {0x3060, 0x3333}, // ANALOG_COLOR_GAIN - - {0x3362, 0x0000}, // DC GAIN - - {0x305A, 0x00F8}, // red gain - {0x3058, 0x0122}, // blue gain - {0x3056, 0x009A}, // g1 gain - {0x305C, 0x009A}, // g2 gain - - {0x3022, 0x0000}, // GROUPED_PARAMETER_HOLD_ - - // Initial Integration Time - {0x3012, 0x0005}, -}; diff --git a/system/camerad/sensors/sensor.h b/system/camerad/sensors/sensor.h index d4be3cf036..96aa8b604f 100644 --- a/system/camerad/sensors/sensor.h +++ b/system/camerad/sensors/sensor.h @@ -10,7 +10,6 @@ #include "media/cam_sensor.h" #include "cereal/gen/cpp/log.capnp.h" -#include "system/camerad/sensors/ar0231_registers.h" #include "system/camerad/sensors/ox03c10_registers.h" #include "system/camerad/sensors/os04c10_registers.h" @@ -88,17 +87,6 @@ public: }; }; -class AR0231 : public SensorInfo { -public: - AR0231(); - std::vector getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const override; - float getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const override; - int getSlaveAddress(int port) const override; - -private: - mutable std::map> ar0231_register_lut; -}; - class OX03C10 : public SensorInfo { public: OX03C10(); diff --git a/system/hardware/base.py b/system/hardware/base.py index b457ea4e17..ce97bf294d 100644 --- a/system/hardware/base.py +++ b/system/hardware/base.py @@ -65,6 +65,10 @@ class ThermalConfig: return ret class LPABase(ABC): + @abstractmethod + def bootstrap(self) -> None: + pass + @abstractmethod def list_profiles(self) -> list[Profile]: pass @@ -89,6 +93,9 @@ class LPABase(ABC): def switch_profile(self, iccid: str) -> None: pass + def is_comma_profile(self, iccid: str) -> bool: + return any(iccid.startswith(prefix) for prefix in ('8985235',)) + class HardwareBase(ABC): @staticmethod def get_cmdline() -> dict[str, str]: diff --git a/system/hardware/esim.py b/system/hardware/esim.py index 58ead6593f..909ad41e03 100755 --- a/system/hardware/esim.py +++ b/system/hardware/esim.py @@ -3,10 +3,32 @@ import argparse import time from openpilot.system.hardware import HARDWARE +from openpilot.system.hardware.base import LPABase + + +def bootstrap(lpa: LPABase) -> None: + print('┌──────────────────────────────────────────────────────────────────────────────┐') + print('│ WARNING, PLEASE READ BEFORE PROCEEDING │') + print('│ │') + print('│ this is an irreversible operation that will remove the comma-provisioned │') + print('│ profile. │') + print('│ │') + print('│ after this operation, you must purchase a new eSIM from comma in order to │') + print('│ use the comma prime subscription again. │') + print('└──────────────────────────────────────────────────────────────────────────────┘') + print() + for severity in ('sure', '100% sure'): + print(f'are you {severity} you want to proceed? (y/N) ', end='') + confirm = input() + if confirm != 'y': + print('aborting') + exit(0) + lpa.bootstrap() if __name__ == '__main__': parser = argparse.ArgumentParser(prog='esim.py', description='manage eSIM profiles on your comma device', epilog='comma.ai') + parser.add_argument('--bootstrap', action='store_true', help='bootstrap the eUICC (required before downloading profiles)') parser.add_argument('--backend', choices=['qmi', 'at'], default='qmi', help='use the specified backend, defaults to qmi') parser.add_argument('--switch', metavar='iccid', help='switch to profile') parser.add_argument('--delete', metavar='iccid', help='delete profile (warning: this cannot be undone)') @@ -16,7 +38,10 @@ if __name__ == '__main__': mutated = False lpa = HARDWARE.get_sim_lpa() - if args.switch: + if args.bootstrap: + bootstrap(lpa) + mutated = True + elif args.switch: lpa.switch_profile(args.switch) mutated = True elif args.delete: diff --git a/system/hardware/hardwared.py b/system/hardware/hardwared.py index df8ae23ab5..0144a9df03 100755 --- a/system/hardware/hardwared.py +++ b/system/hardware/hardwared.py @@ -6,7 +6,6 @@ import struct import threading import time from collections import OrderedDict, namedtuple -from pathlib import Path import psutil @@ -342,12 +341,6 @@ def hardware_thread(end_event, hw_queue) -> None: show_alert = (not onroad_conditions["device_temp_good"] or not startup_conditions["device_temp_engageable"]) and onroad_conditions["ignition"] set_offroad_alert_if_changed("Offroad_TemperatureTooHigh", show_alert, extra_text=extra_text) - # TODO: this should move to TICI.initialize_hardware, but we currently can't import params there - if TICI and HARDWARE.get_device_type() == "tici": - if not os.path.isfile("/persist/comma/living-in-the-moment"): - if not Path("/data/media").is_mount(): - set_offroad_alert_if_changed("Offroad_StorageMissing", True) - # Handle offroad/onroad transition should_start = all(onroad_conditions.values()) if started_ts is None: diff --git a/system/hardware/tici/agnos.json b/system/hardware/tici/agnos.json index 941a4956bf..d93963cf2c 100644 --- a/system/hardware/tici/agnos.json +++ b/system/hardware/tici/agnos.json @@ -23,14 +23,14 @@ }, { "name": "abl", - "url": "https://commadist.azureedge.net/agnosupdate/abl-32a2174b5f764e95dfc54cf358ba01752943b1b3b90e626149c3da7d5f1830b6.img.xz", - "hash": "32a2174b5f764e95dfc54cf358ba01752943b1b3b90e626149c3da7d5f1830b6", - "hash_raw": "32a2174b5f764e95dfc54cf358ba01752943b1b3b90e626149c3da7d5f1830b6", + "url": "https://commadist.azureedge.net/agnosupdate/abl-556bbb4ed1c671402b217bd2f3c07edce4f88b0bbd64e92241b82e396aa9ebee.img.xz", + "hash": "556bbb4ed1c671402b217bd2f3c07edce4f88b0bbd64e92241b82e396aa9ebee", + "hash_raw": "556bbb4ed1c671402b217bd2f3c07edce4f88b0bbd64e92241b82e396aa9ebee", "size": 274432, "sparse": false, "full_check": true, "has_ab": true, - "ondevice_hash": "32a2174b5f764e95dfc54cf358ba01752943b1b3b90e626149c3da7d5f1830b6" + "ondevice_hash": "556bbb4ed1c671402b217bd2f3c07edce4f88b0bbd64e92241b82e396aa9ebee" }, { "name": "aop", @@ -56,29 +56,29 @@ }, { "name": "boot", - "url": "https://commadist.azureedge.net/agnosupdate/boot-0191529aa97d90d1fa04b472d80230b777606459e1e1e9e2323c9519839827b4.img.xz", - "hash": "0191529aa97d90d1fa04b472d80230b777606459e1e1e9e2323c9519839827b4", - "hash_raw": "0191529aa97d90d1fa04b472d80230b777606459e1e1e9e2323c9519839827b4", - "size": 18515968, + "url": "https://commadist.azureedge.net/agnosupdate/boot-b96882012ab6cddda04f440009c798a6cff65977f984b12072e89afa592d86cb.img.xz", + "hash": "b96882012ab6cddda04f440009c798a6cff65977f984b12072e89afa592d86cb", + "hash_raw": "b96882012ab6cddda04f440009c798a6cff65977f984b12072e89afa592d86cb", + "size": 17442816, "sparse": false, "full_check": true, "has_ab": true, - "ondevice_hash": "492ae27f569e8db457c79d0e358a7a6297d1a1c685c2b1ae6deba7315d3a6cb0" + "ondevice_hash": "8ed6c2796be5c5b29d64e6413b8e878d5bd1a3981d15216d2b5e84140cc4ea2a" }, { "name": "system", - "url": "https://commadist.azureedge.net/agnosupdate/system-e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087.img.xz", - "hash": "1468d50b7ad0fda0f04074755d21e786e3b1b6ca5dd5b17eb2608202025e6126", - "hash_raw": "e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087", - "size": 5368709120, + "url": "https://commadist.azureedge.net/agnosupdate/system-2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc.img.xz", + "hash": "325414e5c9f7516b2bf0fedb6abe6682f717897a6d84ab70d5afe91a59f244e9", + "hash_raw": "2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc", + "size": 4718592000, "sparse": true, "full_check": false, "has_ab": true, - "ondevice_hash": "242aa5adad1c04e1398e00e2440d1babf962022eb12b89adf2e60ee3068946e7", + "ondevice_hash": "79f4f6d0b5b4a416f0f31261b430943a78e37c26d0e226e0ef412fe0eae3c727", "alt": { - "hash": "e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087", - "url": "https://commadist.azureedge.net/agnosupdate/system-e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087.img", - "size": 5368709120 + "hash": "2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc", + "url": "https://commadist.azureedge.net/agnosupdate/system-2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc.img", + "size": 4718592000 } } ] \ No newline at end of file diff --git a/system/hardware/tici/all-partitions.json b/system/hardware/tici/all-partitions.json index 5891e2748a..ebffc01dfd 100644 --- a/system/hardware/tici/all-partitions.json +++ b/system/hardware/tici/all-partitions.json @@ -152,14 +152,14 @@ }, { "name": "abl", - "url": "https://commadist.azureedge.net/agnosupdate/abl-32a2174b5f764e95dfc54cf358ba01752943b1b3b90e626149c3da7d5f1830b6.img.xz", - "hash": "32a2174b5f764e95dfc54cf358ba01752943b1b3b90e626149c3da7d5f1830b6", - "hash_raw": "32a2174b5f764e95dfc54cf358ba01752943b1b3b90e626149c3da7d5f1830b6", + "url": "https://commadist.azureedge.net/agnosupdate/abl-556bbb4ed1c671402b217bd2f3c07edce4f88b0bbd64e92241b82e396aa9ebee.img.xz", + "hash": "556bbb4ed1c671402b217bd2f3c07edce4f88b0bbd64e92241b82e396aa9ebee", + "hash_raw": "556bbb4ed1c671402b217bd2f3c07edce4f88b0bbd64e92241b82e396aa9ebee", "size": 274432, "sparse": false, "full_check": true, "has_ab": true, - "ondevice_hash": "32a2174b5f764e95dfc54cf358ba01752943b1b3b90e626149c3da7d5f1830b6" + "ondevice_hash": "556bbb4ed1c671402b217bd2f3c07edce4f88b0bbd64e92241b82e396aa9ebee" }, { "name": "aop", @@ -339,62 +339,62 @@ }, { "name": "boot", - "url": "https://commadist.azureedge.net/agnosupdate/boot-0191529aa97d90d1fa04b472d80230b777606459e1e1e9e2323c9519839827b4.img.xz", - "hash": "0191529aa97d90d1fa04b472d80230b777606459e1e1e9e2323c9519839827b4", - "hash_raw": "0191529aa97d90d1fa04b472d80230b777606459e1e1e9e2323c9519839827b4", - "size": 18515968, + "url": "https://commadist.azureedge.net/agnosupdate/boot-b96882012ab6cddda04f440009c798a6cff65977f984b12072e89afa592d86cb.img.xz", + "hash": "b96882012ab6cddda04f440009c798a6cff65977f984b12072e89afa592d86cb", + "hash_raw": "b96882012ab6cddda04f440009c798a6cff65977f984b12072e89afa592d86cb", + "size": 17442816, "sparse": false, "full_check": true, "has_ab": true, - "ondevice_hash": "492ae27f569e8db457c79d0e358a7a6297d1a1c685c2b1ae6deba7315d3a6cb0" + "ondevice_hash": "8ed6c2796be5c5b29d64e6413b8e878d5bd1a3981d15216d2b5e84140cc4ea2a" }, { "name": "system", - "url": "https://commadist.azureedge.net/agnosupdate/system-e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087.img.xz", - "hash": "1468d50b7ad0fda0f04074755d21e786e3b1b6ca5dd5b17eb2608202025e6126", - "hash_raw": "e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087", - "size": 5368709120, + "url": "https://commadist.azureedge.net/agnosupdate/system-2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc.img.xz", + "hash": "325414e5c9f7516b2bf0fedb6abe6682f717897a6d84ab70d5afe91a59f244e9", + "hash_raw": "2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc", + "size": 4718592000, "sparse": true, "full_check": false, "has_ab": true, - "ondevice_hash": "242aa5adad1c04e1398e00e2440d1babf962022eb12b89adf2e60ee3068946e7", + "ondevice_hash": "79f4f6d0b5b4a416f0f31261b430943a78e37c26d0e226e0ef412fe0eae3c727", "alt": { - "hash": "e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087", - "url": "https://commadist.azureedge.net/agnosupdate/system-e0007afa5d1026671c1943d44bb7f7ad26259f673392dd00a03073a2870df087.img", - "size": 5368709120 + "hash": "2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc", + "url": "https://commadist.azureedge.net/agnosupdate/system-2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc.img", + "size": 4718592000 } }, { "name": "userdata_90", - "url": "https://commadist.azureedge.net/agnosupdate/userdata_90-602d5103cba97e1b07f76508d5febb47cfc4463a7e31bd20e461b55c801feb0a.img.xz", - "hash": "6a11d448bac50467791809339051eed2894aae971c37bf6284b3b972a99ba3ac", - "hash_raw": "602d5103cba97e1b07f76508d5febb47cfc4463a7e31bd20e461b55c801feb0a", + "url": "https://commadist.azureedge.net/agnosupdate/userdata_90-b3112984d2a8534a83d2ce43d35efdd10c7d163d9699f611f0f72ad9e9cb5af9.img.xz", + "hash": "bea163e6fb6ac6224c7f32619affb5afb834cd859971b0cab6d8297dd0098f0a", + "hash_raw": "b3112984d2a8534a83d2ce43d35efdd10c7d163d9699f611f0f72ad9e9cb5af9", "size": 96636764160, "sparse": true, "full_check": true, "has_ab": false, - "ondevice_hash": "e014d92940a696bf8582807259820ab73948b950656ed83a45da738f26083705" + "ondevice_hash": "f4841c6ae3207197886e5efbd50f44cc24822680d7b785fa2d2743c657f23287" }, { "name": "userdata_89", - "url": "https://commadist.azureedge.net/agnosupdate/userdata_89-4d7f6d12a5557eb6e3cbff9a4cd595677456fdfddcc879eddcea96a43a9d8b48.img.xz", - "hash": "748e31a5fc01fc256c012e359c3382d1f98cce98feafe8ecc0fca3e47caef116", - "hash_raw": "4d7f6d12a5557eb6e3cbff9a4cd595677456fdfddcc879eddcea96a43a9d8b48", + "url": "https://commadist.azureedge.net/agnosupdate/userdata_89-3e63f670e4270474cec96f4da9250ee4e87e3106b0b043b7e82371e1c761e167.img.xz", + "hash": "b5458a29dd7d4a4c9b7ad77b8baa5f804142ac78d97c6668839bf2a650e32518", + "hash_raw": "3e63f670e4270474cec96f4da9250ee4e87e3106b0b043b7e82371e1c761e167", "size": 95563022336, "sparse": true, "full_check": true, "has_ab": false, - "ondevice_hash": "c181b93050787adcfef730c086bcb780f28508d84e6376d9b80d37e5dc02b55e" + "ondevice_hash": "1dc10c542d3b019258fc08dc7dfdb49d9abad065e46d030b89bc1a2e0197f526" }, { "name": "userdata_30", - "url": "https://commadist.azureedge.net/agnosupdate/userdata_30-80a76c8e56bbd7536fd5e87e8daa12984e2960db4edeb1f83229b2baeecc4668.img.xz", - "hash": "09ff390e639e4373d772e1688d05a5ac77a573463ed1deeff86390686fa686f9", - "hash_raw": "80a76c8e56bbd7536fd5e87e8daa12984e2960db4edeb1f83229b2baeecc4668", + "url": "https://commadist.azureedge.net/agnosupdate/userdata_30-1d3885d4370974e55f0c6f567fd0344fc5ee10db067aa5810fbaf402eadb032c.img.xz", + "hash": "687d178cfc91be5d7e8aa1333405b610fdce01775b8333bd0985b81642b94eea", + "hash_raw": "1d3885d4370974e55f0c6f567fd0344fc5ee10db067aa5810fbaf402eadb032c", "size": 32212254720, "sparse": true, "full_check": true, "has_ab": false, - "ondevice_hash": "2c01ab470c02121c721ff6afc25582437e821686207f3afef659387afb69c507" + "ondevice_hash": "9ddbd1dae6ee7dc919f018364cf2f29dad138c9203c5a49aea0cbb9bf2e137e5" } ] \ No newline at end of file diff --git a/system/hardware/tici/amplifier.py b/system/hardware/tici/amplifier.py index f6b29ec0ce..bfdcc6ddaf 100755 --- a/system/hardware/tici/amplifier.py +++ b/system/hardware/tici/amplifier.py @@ -61,18 +61,6 @@ BASE_CONFIG = [ ] CONFIGS = { - "tici": [ - AmpConfig("Right speaker output from right DAC", 0b1, 0x2C, 0, 0b11111111), - AmpConfig("Right Speaker Mixer Gain", 0b00, 0x2D, 2, 0b00001100), - AmpConfig("Right speaker output volume", 0x1c, 0x3E, 0, 0b00011111), - AmpConfig("DAI2 EQ enable", 0b1, 0x49, 1, 0b00000010), - - *configs_from_eq_params(0x84, EQParams(0x274F, 0xC0FF, 0x3BF9, 0x0B3C, 0x1656)), - *configs_from_eq_params(0x8E, EQParams(0x1009, 0xC6BF, 0x2952, 0x1C97, 0x30DF)), - *configs_from_eq_params(0x98, EQParams(0x0F75, 0xCBE5, 0x0ED2, 0x2528, 0x3E42)), - *configs_from_eq_params(0xA2, EQParams(0x091F, 0x3D4C, 0xCE11, 0x1266, 0x2807)), - *configs_from_eq_params(0xAC, EQParams(0x0A9E, 0x3F20, 0xE573, 0x0A8B, 0x3A3B)), - ], "tizi": [ AmpConfig("Left speaker output from left DAC", 0b1, 0x2B, 0, 0b11111111), AmpConfig("Right speaker output from right DAC", 0b1, 0x2C, 0, 0b11111111), diff --git a/system/hardware/tici/esim.py b/system/hardware/tici/esim.py index b489286f50..391ba45531 100644 --- a/system/hardware/tici/esim.py +++ b/system/hardware/tici/esim.py @@ -40,6 +40,7 @@ class TiciLPA(LPABase): self._process_notifications() def download_profile(self, qr: str, nickname: str | None = None) -> None: + self._check_bootstrapped() msgs = self._invoke('profile', 'download', '-a', qr) self._validate_successful(msgs) new_profile = next((m for m in msgs if m['payload']['message'] == 'es8p_meatadata_parse'), None) @@ -54,6 +55,7 @@ class TiciLPA(LPABase): self._validate_successful(self._invoke('profile', 'nickname', iccid, nickname)) def switch_profile(self, iccid: str) -> None: + self._check_bootstrapped() self._validate_profile_exists(iccid) latest = self.get_active_profile() if latest and latest.iccid == iccid: @@ -61,6 +63,33 @@ class TiciLPA(LPABase): self._validate_successful(self._invoke('profile', 'enable', iccid)) self._process_notifications() + def bootstrap(self) -> None: + """ + find all comma-provisioned profiles and delete them. they conflict with user-provisioned profiles + and must be deleted. + + **note**: this is a **very** destructive operation. you **must** purchase a new comma SIM in order + to use comma prime again. + """ + if self._is_bootstrapped(): + return + + for p in self.list_profiles(): + if self.is_comma_profile(p.iccid): + self._disable_profile(p.iccid) + self.delete_profile(p.iccid) + + def _disable_profile(self, iccid: str) -> None: + self._validate_successful(self._invoke('profile', 'disable', iccid)) + self._process_notifications() + + def _check_bootstrapped(self) -> None: + assert self._is_bootstrapped(), 'eUICC is not bootstrapped, please bootstrap before performing this operation' + + def _is_bootstrapped(self) -> bool: + """ check if any comma provisioned profiles are on the eUICC """ + return not any(self.is_comma_profile(iccid) for iccid in (p.iccid for p in self.list_profiles())) + def _invoke(self, *cmd: str): proc = subprocess.Popen(['sudo', '-E', 'lpac'] + list(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=self.env) try: diff --git a/system/hardware/tici/hardware.py b/system/hardware/tici/hardware.py index 35f9916c31..0f50acdc38 100644 --- a/system/hardware/tici/hardware.py +++ b/system/hardware/tici/hardware.py @@ -425,9 +425,6 @@ class Tici(HardwareBase): # pandad core affine_irq(3, "spi_geni") # SPI - if "tici" in self.get_device_type(): - affine_irq(3, "xhci-hcd:usb3") # aux panda USB (or potentially anything else on USB) - affine_irq(3, "xhci-hcd:usb1") # internal panda USB (also modem) try: pid = subprocess.check_output(["pgrep", "-f", "spi0"], encoding='utf8').strip() subprocess.call(["sudo", "chrt", "-f", "-p", "1", pid]) @@ -446,22 +443,20 @@ class Tici(HardwareBase): cmds = [] - if self.get_device_type() in ("tici", "tizi"): + if self.get_device_type() in ("tizi", ): # clear out old blue prime initial APN os.system('mmcli -m any --3gpp-set-initial-eps-bearer-settings="apn="') cmds += [ + # SIM hot swap + 'AT+QSIMDET=1,0', + 'AT+QSIMSTAT=1', + # configure modem as data-centric 'AT+QNVW=5280,0,"0102000000000000"', 'AT+QNVFW="/nv/item_files/ims/IMS_enable",00', 'AT+QNVFW="/nv/item_files/modem/mmode/ue_usage_setting",01', ] - if self.get_device_type() == "tizi": - # SIM hot swap, not routed on tici - cmds += [ - 'AT+QSIMDET=1,0', - 'AT+QSIMSTAT=1', - ] elif manufacturer == 'Cavli Inc.': cmds += [ 'AT^SIMSWAP=1', # use SIM slot, instead of internal eSIM @@ -492,7 +487,7 @@ class Tici(HardwareBase): # eSIM prime dest = "/etc/NetworkManager/system-connections/esim.nmconnection" - if sim_id.startswith('8985235') and not os.path.exists(dest): + if self.get_sim_lpa().is_comma_profile(sim_id) and not os.path.exists(dest): with open(Path(__file__).parent/'esim.nmconnection') as f, tempfile.NamedTemporaryFile(mode='w') as tf: dat = f.read() dat = dat.replace("sim-id=", f"sim-id={sim_id}") @@ -503,6 +498,14 @@ class Tici(HardwareBase): os.system(f"sudo cp {tf.name} {dest}") os.system(f"sudo nmcli con load {dest}") + def reboot_modem(self): + modem = self.get_modem() + for state in (0, 1): + try: + modem.Command(f'AT+CFUN={state}', math.ceil(TIMEOUT), dbus_interface=MM_MODEM, timeout=TIMEOUT) + except Exception: + pass + def get_networks(self): r = {} diff --git a/system/hardware/tici/pins.py b/system/hardware/tici/pins.py index 747278d1ec..bdbea591fb 100644 --- a/system/hardware/tici/pins.py +++ b/system/hardware/tici/pins.py @@ -1,5 +1,3 @@ -# TODO: these are also defined in a header - # GPIO pin definitions class GPIO: # both GPIO_STM_RST_N and GPIO_LTE_RST_N are misnamed, they are high to reset @@ -26,7 +24,4 @@ class GPIO: CAM2_RSTN = 12 # Sensor interrupts - BMX055_ACCEL_INT = 21 - BMX055_GYRO_INT = 23 - BMX055_MAGN_INT = 87 LSM_INT = 84 diff --git a/system/hardware/tici/tests/test_power_draw.py b/system/hardware/tici/tests/test_power_draw.py index db0fab884c..4fbde81673 100644 --- a/system/hardware/tici/tests/test_power_draw.py +++ b/system/hardware/tici/tests/test_power_draw.py @@ -31,9 +31,9 @@ class Proc: PROCS = [ - Proc(['camerad'], 1.75, msgs=['roadCameraState', 'wideRoadCameraState', 'driverCameraState']), - Proc(['modeld'], 1.12, atol=0.2, msgs=['modelV2']), - Proc(['dmonitoringmodeld'], 0.6, msgs=['driverStateV2']), + Proc(['camerad'], 1.65, atol=0.4, msgs=['roadCameraState', 'wideRoadCameraState', 'driverCameraState']), + Proc(['modeld'], 1.24, atol=0.2, msgs=['modelV2']), + Proc(['dmonitoringmodeld'], 0.65, atol=0.35, msgs=['driverStateV2']), Proc(['encoderd'], 0.23, msgs=[]), ] diff --git a/system/journald.py b/system/journald.py new file mode 100755 index 0000000000..37158b9251 --- /dev/null +++ b/system/journald.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +import json +import subprocess + +import cereal.messaging as messaging +from openpilot.common.swaglog import cloudlog + + +def main(): + pm = messaging.PubMaster(['androidLog']) + cmd = ['journalctl', '-f', '-o', 'json'] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, text=True) + assert proc.stdout is not None + try: + for line in proc.stdout: + line = line.strip() + if not line: + continue + try: + kv = json.loads(line) + except json.JSONDecodeError: + cloudlog.exception("failed to parse journalctl output") + continue + + msg = messaging.new_message('androidLog') + entry = msg.androidLog + entry.ts = int(kv.get('__REALTIME_TIMESTAMP', 0)) + entry.message = json.dumps(kv) + if '_PID' in kv: + entry.pid = int(kv['_PID']) + if 'PRIORITY' in kv: + entry.priority = int(kv['PRIORITY']) + if 'SYSLOG_IDENTIFIER' in kv: + entry.tag = kv['SYSLOG_IDENTIFIER'] + + pm.send('androidLog', msg) + finally: + proc.terminate() + proc.wait() + + +if __name__ == '__main__': + main() diff --git a/system/logcatd/.gitignore b/system/logcatd/.gitignore deleted file mode 100644 index c66f7622d9..0000000000 --- a/system/logcatd/.gitignore +++ /dev/null @@ -1 +0,0 @@ -logcatd diff --git a/system/logcatd/SConscript b/system/logcatd/SConscript deleted file mode 100644 index 39c45d1093..0000000000 --- a/system/logcatd/SConscript +++ /dev/null @@ -1,3 +0,0 @@ -Import('env', 'messaging', 'common') - -env.Program('logcatd', 'logcatd_systemd.cc', LIBS=[messaging, common, 'systemd']) diff --git a/system/logcatd/logcatd_systemd.cc b/system/logcatd/logcatd_systemd.cc deleted file mode 100644 index 54b3782132..0000000000 --- a/system/logcatd/logcatd_systemd.cc +++ /dev/null @@ -1,75 +0,0 @@ -#include - -#include -#include -#include -#include - -#include "third_party/json11/json11.hpp" - -#include "cereal/messaging/messaging.h" -#include "common/timing.h" -#include "common/util.h" - -ExitHandler do_exit; -int main(int argc, char *argv[]) { - - PubMaster pm({"androidLog"}); - - sd_journal *journal; - int err = sd_journal_open(&journal, 0); - assert(err >= 0); - err = sd_journal_get_fd(journal); // needed so sd_journal_wait() works properly if files rotate - assert(err >= 0); - err = sd_journal_seek_tail(journal); - assert(err >= 0); - - // workaround for bug https://github.com/systemd/systemd/issues/9934 - // call sd_journal_previous_skip after sd_journal_seek_tail (like journalctl -f does) to makes things work. - sd_journal_previous_skip(journal, 1); - - while (!do_exit) { - err = sd_journal_next(journal); - assert(err >= 0); - - // Wait for new message if we didn't receive anything - if (err == 0) { - err = sd_journal_wait(journal, 1000 * 1000); - assert(err >= 0); - continue; // Try again - } - - uint64_t timestamp = 0; - err = sd_journal_get_realtime_usec(journal, ×tamp); - assert(err >= 0); - - const void *data; - size_t length; - std::map kv; - - SD_JOURNAL_FOREACH_DATA(journal, data, length) { - std::string str((char*)data, length); - - // Split "KEY=VALUE"" on "=" and put in map - std::size_t found = str.find("="); - if (found != std::string::npos) { - kv[str.substr(0, found)] = str.substr(found + 1, std::string::npos); - } - } - - MessageBuilder msg; - - // Build message - auto androidEntry = msg.initEvent().initAndroidLog(); - androidEntry.setTs(timestamp); - androidEntry.setMessage(json11::Json(kv).dump()); - if (kv.count("_PID")) androidEntry.setPid(std::atoi(kv["_PID"].c_str())); - if (kv.count("PRIORITY")) androidEntry.setPriority(std::atoi(kv["PRIORITY"].c_str())); - if (kv.count("SYSLOG_IDENTIFIER")) androidEntry.setTag(kv["SYSLOG_IDENTIFIER"]); - - pm.send("androidLog", msg); - } - - sd_journal_close(journal); - return 0; -} diff --git a/system/loggerd/tests/vidc_debug.sh b/system/loggerd/tests/vidc_debug.sh new file mode 100755 index 0000000000..7471f2ab08 --- /dev/null +++ b/system/loggerd/tests/vidc_debug.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash +set -e + +cd /sys/kernel/debug/tracing +echo "" > trace +echo 1 > tracing_on +echo 1 > /sys/kernel/debug/tracing/events/msm_vidc/enable + +echo 0xff > /sys/module/videobuf2_core/parameters/debug +echo 0x7fffffff > /sys/kernel/debug/msm_vidc/debug_level +echo 0xff > /sys/devices/platform/soc/aa00000.qcom,vidc/video4linux/video33/dev_debug + +cat /sys/kernel/debug/tracing/trace_pipe diff --git a/system/manager/process_config.py b/system/manager/process_config.py index 6130916887..92d5a4ef8c 100644 --- a/system/manager/process_config.py +++ b/system/manager/process_config.py @@ -111,8 +111,8 @@ procs = [ NativeProcess("camerad", "system/camerad", ["./camerad"], driverview, enabled=not WEBCAM), PythonProcess("webcamerad", "tools.webcam.camerad", driverview, enabled=WEBCAM), - NativeProcess("logcatd", "system/logcatd", ["./logcatd"], only_onroad, platform.system() != "Darwin"), - NativeProcess("proclogd", "system/proclogd", ["./proclogd"], only_onroad, platform.system() != "Darwin"), + PythonProcess("proclogd", "system.proclogd", only_onroad, enabled=platform.system() != "Darwin"), + PythonProcess("journald", "system.journald", only_onroad, platform.system() != "Darwin"), PythonProcess("micd", "system.micd", iscar), PythonProcess("timed", "system.timed", always_run, enabled=not PC), @@ -137,7 +137,7 @@ procs = [ PythonProcess("pandad", "selfdrive.pandad.pandad", always_run), PythonProcess("paramsd", "selfdrive.locationd.paramsd", only_onroad), PythonProcess("lagd", "selfdrive.locationd.lagd", only_onroad), - NativeProcess("ubloxd", "system/ubloxd", ["./ubloxd"], ublox, enabled=TICI), + PythonProcess("ubloxd", "system.ubloxd.ubloxd", ublox, enabled=TICI), PythonProcess("pigeond", "system.ubloxd.pigeond", ublox, enabled=TICI), PythonProcess("plannerd", "selfdrive.controls.plannerd", not_long_maneuver), PythonProcess("maneuversd", "tools.longitudinal_maneuvers.maneuversd", long_maneuver), diff --git a/system/proclogd.py b/system/proclogd.py new file mode 100755 index 0000000000..3279425b7b --- /dev/null +++ b/system/proclogd.py @@ -0,0 +1,227 @@ +#!/usr/bin/env python3 +import os +from typing import NoReturn, TypedDict + +from cereal import messaging +from openpilot.common.realtime import Ratekeeper +from openpilot.common.swaglog import cloudlog + +JIFFY = os.sysconf(os.sysconf_names['SC_CLK_TCK']) +PAGE_SIZE = os.sysconf(os.sysconf_names['SC_PAGE_SIZE']) + + +def _cpu_times() -> list[dict[str, float]]: + cpu_times: list[dict[str, float]] = [] + try: + with open('/proc/stat') as f: + lines = f.readlines()[1:] + for line in lines: + if not line.startswith('cpu') or len(line) < 4 or not line[3].isdigit(): + break + parts = line.split() + cpu_times.append({ + 'cpuNum': int(parts[0][3:]), + 'user': float(parts[1]) / JIFFY, + 'nice': float(parts[2]) / JIFFY, + 'system': float(parts[3]) / JIFFY, + 'idle': float(parts[4]) / JIFFY, + 'iowait': float(parts[5]) / JIFFY, + 'irq': float(parts[6]) / JIFFY, + 'softirq': float(parts[7]) / JIFFY, + }) + except Exception: + cloudlog.exception("failed to read /proc/stat") + return cpu_times + + +def _mem_info() -> dict[str, int]: + keys = ["MemTotal:", "MemFree:", "MemAvailable:", "Buffers:", "Cached:", "Active:", "Inactive:", "Shmem:"] + info: dict[str, int] = dict.fromkeys(keys, 0) + try: + with open('/proc/meminfo') as f: + for line in f: + parts = line.split() + if parts and parts[0] in info: + info[parts[0]] = int(parts[1]) * 1024 + except Exception: + cloudlog.exception("failed to read /proc/meminfo") + return info + + +_STAT_POS = { + 'pid': 1, + 'state': 3, + 'ppid': 4, + 'utime': 14, + 'stime': 15, + 'cutime': 16, + 'cstime': 17, + 'priority': 18, + 'nice': 19, + 'num_threads': 20, + 'starttime': 22, + 'vsize': 23, + 'rss': 24, + 'processor': 39, +} + +class ProcStat(TypedDict): + name: str + pid: int + state: str + ppid: int + utime: int + stime: int + cutime: int + cstime: int + priority: int + nice: int + num_threads: int + starttime: int + vms: int + rss: int + processor: int + + +def _parse_proc_stat(stat: str) -> ProcStat | None: + open_paren = stat.find('(') + close_paren = stat.rfind(')') + if open_paren == -1 or close_paren == -1 or open_paren > close_paren: + return None + name = stat[open_paren + 1:close_paren] + stat = stat[:open_paren] + stat[open_paren:close_paren].replace(' ', '_') + stat[close_paren:] + parts = stat.split() + if len(parts) < 52: + return None + try: + return { + 'name': name, + 'pid': int(parts[_STAT_POS['pid'] - 1]), + 'state': parts[_STAT_POS['state'] - 1][0], + 'ppid': int(parts[_STAT_POS['ppid'] - 1]), + 'utime': int(parts[_STAT_POS['utime'] - 1]), + 'stime': int(parts[_STAT_POS['stime'] - 1]), + 'cutime': int(parts[_STAT_POS['cutime'] - 1]), + 'cstime': int(parts[_STAT_POS['cstime'] - 1]), + 'priority': int(parts[_STAT_POS['priority'] - 1]), + 'nice': int(parts[_STAT_POS['nice'] - 1]), + 'num_threads': int(parts[_STAT_POS['num_threads'] - 1]), + 'starttime': int(parts[_STAT_POS['starttime'] - 1]), + 'vms': int(parts[_STAT_POS['vsize'] - 1]), + 'rss': int(parts[_STAT_POS['rss'] - 1]), + 'processor': int(parts[_STAT_POS['processor'] - 1]), + } + except Exception: + cloudlog.exception("failed to parse /proc//stat") + return None + +class ProcExtra(TypedDict): + pid: int + name: str + exe: str + cmdline: list[str] + + +_proc_cache: dict[int, ProcExtra] = {} + + +def _get_proc_extra(pid: int, name: str) -> ProcExtra: + cache: ProcExtra | None = _proc_cache.get(pid) + if cache is None or cache.get('name') != name: + exe = '' + cmdline: list[str] = [] + try: + exe = os.readlink(f'/proc/{pid}/exe') + except OSError: + pass + try: + with open(f'/proc/{pid}/cmdline', 'rb') as f: + cmdline = [c.decode('utf-8', errors='replace') for c in f.read().split(b'\0') if c] + except OSError: + pass + cache = {'pid': pid, 'name': name, 'exe': exe, 'cmdline': cmdline} + _proc_cache[pid] = cache + return cache + + +def _procs() -> list[ProcStat]: + stats: list[ProcStat] = [] + for pid_str in os.listdir('/proc'): + if not pid_str.isdigit(): + continue + try: + with open(f'/proc/{pid_str}/stat') as f: + stat = f.read() + parsed = _parse_proc_stat(stat) + if parsed is not None: + stats.append(parsed) + except OSError: + continue + return stats + + +def build_proc_log_message(msg) -> None: + pl = msg.procLog + + procs = _procs() + l = pl.init('procs', len(procs)) + for i, r in enumerate(procs): + proc = l[i] + proc.pid = r['pid'] + proc.state = ord(r['state'][0]) + proc.ppid = r['ppid'] + proc.cpuUser = r['utime'] / JIFFY + proc.cpuSystem = r['stime'] / JIFFY + proc.cpuChildrenUser = r['cutime'] / JIFFY + proc.cpuChildrenSystem = r['cstime'] / JIFFY + proc.priority = r['priority'] + proc.nice = r['nice'] + proc.numThreads = r['num_threads'] + proc.startTime = r['starttime'] / JIFFY + proc.memVms = r['vms'] + proc.memRss = r['rss'] * PAGE_SIZE + proc.processor = r['processor'] + proc.name = r['name'] + + extra = _get_proc_extra(r['pid'], r['name']) + proc.exe = extra['exe'] + cmdline = proc.init('cmdline', len(extra['cmdline'])) + for j, arg in enumerate(extra['cmdline']): + cmdline[j] = arg + + cpu_times = _cpu_times() + cpu_list = pl.init('cpuTimes', len(cpu_times)) + for i, ct in enumerate(cpu_times): + cpu = cpu_list[i] + cpu.cpuNum = ct['cpuNum'] + cpu.user = ct['user'] + cpu.nice = ct['nice'] + cpu.system = ct['system'] + cpu.idle = ct['idle'] + cpu.iowait = ct['iowait'] + cpu.irq = ct['irq'] + cpu.softirq = ct['softirq'] + + mem_info = _mem_info() + pl.mem.total = mem_info["MemTotal:"] + pl.mem.free = mem_info["MemFree:"] + pl.mem.available = mem_info["MemAvailable:"] + pl.mem.buffers = mem_info["Buffers:"] + pl.mem.cached = mem_info["Cached:"] + pl.mem.active = mem_info["Active:"] + pl.mem.inactive = mem_info["Inactive:"] + pl.mem.shared = mem_info["Shmem:"] + + +def main() -> NoReturn: + pm = messaging.PubMaster(['procLog']) + rk = Ratekeeper(0.5) + while True: + msg = messaging.new_message('procLog', valid=True) + build_proc_log_message(msg) + pm.send('procLog', msg) + rk.keep_time() + + +if __name__ == '__main__': + main() diff --git a/system/proclogd/SConscript b/system/proclogd/SConscript deleted file mode 100644 index 08814d5ccb..0000000000 --- a/system/proclogd/SConscript +++ /dev/null @@ -1,6 +0,0 @@ -Import('env', 'messaging', 'common') -libs = [messaging, 'pthread', common] -env.Program('proclogd', ['main.cc', 'proclog.cc'], LIBS=libs) - -if GetOption('extras'): - env.Program('tests/test_proclog', ['tests/test_proclog.cc', 'proclog.cc'], LIBS=libs) diff --git a/system/proclogd/main.cc b/system/proclogd/main.cc deleted file mode 100644 index 3f8a889eea..0000000000 --- a/system/proclogd/main.cc +++ /dev/null @@ -1,25 +0,0 @@ - -#include - -#include "common/ratekeeper.h" -#include "common/util.h" -#include "system/proclogd/proclog.h" - -ExitHandler do_exit; - -int main(int argc, char **argv) { - setpriority(PRIO_PROCESS, 0, -15); - - RateKeeper rk("proclogd", 0.5); - PubMaster publisher({"procLog"}); - - while (!do_exit) { - MessageBuilder msg; - buildProcLogMessage(msg); - publisher.send("procLog", msg); - - rk.keepTime(); - } - - return 0; -} diff --git a/system/proclogd/proclog.cc b/system/proclogd/proclog.cc deleted file mode 100644 index 09ab4f559e..0000000000 --- a/system/proclogd/proclog.cc +++ /dev/null @@ -1,239 +0,0 @@ -#include "system/proclogd/proclog.h" - -#include - -#include -#include -#include -#include - -#include "common/swaglog.h" -#include "common/util.h" - -namespace Parser { - -// parse /proc/stat -std::vector cpuTimes(std::istream &stream) { - std::vector cpu_times; - std::string line; - // skip the first line for cpu total - std::getline(stream, line); - while (std::getline(stream, line)) { - if (line.compare(0, 3, "cpu") != 0) break; - - CPUTime t = {}; - std::istringstream iss(line); - if (iss.ignore(3) >> t.id >> t.utime >> t.ntime >> t.stime >> t.itime >> t.iowtime >> t.irqtime >> t.sirqtime) - cpu_times.push_back(t); - } - return cpu_times; -} - -// parse /proc/meminfo -std::unordered_map memInfo(std::istream &stream) { - std::unordered_map mem_info; - std::string line, key; - while (std::getline(stream, line)) { - uint64_t val = 0; - std::istringstream iss(line); - if (iss >> key >> val) { - mem_info[key] = val * 1024; - } - } - return mem_info; -} - -// field position (https://man7.org/linux/man-pages/man5/proc.5.html) -enum StatPos { - pid = 1, - state = 3, - ppid = 4, - utime = 14, - stime = 15, - cutime = 16, - cstime = 17, - priority = 18, - nice = 19, - num_threads = 20, - starttime = 22, - vsize = 23, - rss = 24, - processor = 39, - MAX_FIELD = 52, -}; - -// parse /proc/pid/stat -std::optional procStat(std::string stat) { - // To avoid being fooled by names containing a closing paren, scan backwards. - auto open_paren = stat.find('('); - auto close_paren = stat.rfind(')'); - if (open_paren == std::string::npos || close_paren == std::string::npos || open_paren > close_paren) { - return std::nullopt; - } - - std::string name = stat.substr(open_paren + 1, close_paren - open_paren - 1); - // replace space in name with _ - std::replace(&stat[open_paren], &stat[close_paren], ' ', '_'); - std::istringstream iss(stat); - std::vector v{std::istream_iterator(iss), - std::istream_iterator()}; - try { - if (v.size() != StatPos::MAX_FIELD) { - throw std::invalid_argument("stat"); - } - ProcStat p = { - .name = name, - .pid = stoi(v[StatPos::pid - 1]), - .state = v[StatPos::state - 1][0], - .ppid = stoi(v[StatPos::ppid - 1]), - .utime = stoul(v[StatPos::utime - 1]), - .stime = stoul(v[StatPos::stime - 1]), - .cutime = stol(v[StatPos::cutime - 1]), - .cstime = stol(v[StatPos::cstime - 1]), - .priority = stol(v[StatPos::priority - 1]), - .nice = stol(v[StatPos::nice - 1]), - .num_threads = stol(v[StatPos::num_threads - 1]), - .starttime = stoull(v[StatPos::starttime - 1]), - .vms = stoul(v[StatPos::vsize - 1]), - .rss = stol(v[StatPos::rss - 1]), - .processor = stoi(v[StatPos::processor - 1]), - }; - return p; - } catch (const std::invalid_argument &e) { - LOGE("failed to parse procStat (%s) :%s", e.what(), stat.c_str()); - } catch (const std::out_of_range &e) { - LOGE("failed to parse procStat (%s) :%s", e.what(), stat.c_str()); - } - return std::nullopt; -} - -// return list of PIDs from /proc -std::vector pids() { - std::vector ids; - DIR *d = opendir("/proc"); - assert(d); - char *p_end; - struct dirent *de = NULL; - while ((de = readdir(d))) { - if (de->d_type == DT_DIR) { - int pid = strtol(de->d_name, &p_end, 10); - if (p_end == (de->d_name + strlen(de->d_name))) { - ids.push_back(pid); - } - } - } - closedir(d); - return ids; -} - -// null-delimited cmdline arguments to vector -std::vector cmdline(std::istream &stream) { - std::vector ret; - std::string line; - while (std::getline(stream, line, '\0')) { - if (!line.empty()) { - ret.push_back(line); - } - } - return ret; -} - -const ProcCache &getProcExtraInfo(int pid, const std::string &name) { - static std::unordered_map proc_cache; - ProcCache &cache = proc_cache[pid]; - if (cache.pid != pid || cache.name != name) { - cache.pid = pid; - cache.name = name; - std::string proc_path = "/proc/" + std::to_string(pid); - cache.exe = util::readlink(proc_path + "/exe"); - std::ifstream stream(proc_path + "/cmdline"); - cache.cmdline = cmdline(stream); - } - return cache; -} - -} // namespace Parser - -const double jiffy = sysconf(_SC_CLK_TCK); -const size_t page_size = sysconf(_SC_PAGE_SIZE); - -void buildCPUTimes(cereal::ProcLog::Builder &builder) { - std::ifstream stream("/proc/stat"); - std::vector stats = Parser::cpuTimes(stream); - - auto log_cpu_times = builder.initCpuTimes(stats.size()); - for (int i = 0; i < stats.size(); ++i) { - auto l = log_cpu_times[i]; - const CPUTime &r = stats[i]; - l.setCpuNum(r.id); - l.setUser(r.utime / jiffy); - l.setNice(r.ntime / jiffy); - l.setSystem(r.stime / jiffy); - l.setIdle(r.itime / jiffy); - l.setIowait(r.iowtime / jiffy); - l.setIrq(r.irqtime / jiffy); - l.setSoftirq(r.sirqtime / jiffy); - } -} - -void buildMemInfo(cereal::ProcLog::Builder &builder) { - std::ifstream stream("/proc/meminfo"); - auto mem_info = Parser::memInfo(stream); - - auto mem = builder.initMem(); - mem.setTotal(mem_info["MemTotal:"]); - mem.setFree(mem_info["MemFree:"]); - mem.setAvailable(mem_info["MemAvailable:"]); - mem.setBuffers(mem_info["Buffers:"]); - mem.setCached(mem_info["Cached:"]); - mem.setActive(mem_info["Active:"]); - mem.setInactive(mem_info["Inactive:"]); - mem.setShared(mem_info["Shmem:"]); -} - -void buildProcs(cereal::ProcLog::Builder &builder) { - auto pids = Parser::pids(); - std::vector proc_stats; - proc_stats.reserve(pids.size()); - for (int pid : pids) { - std::string path = "/proc/" + std::to_string(pid) + "/stat"; - if (auto stat = Parser::procStat(util::read_file(path))) { - proc_stats.push_back(*stat); - } - } - - auto procs = builder.initProcs(proc_stats.size()); - for (size_t i = 0; i < proc_stats.size(); i++) { - auto l = procs[i]; - const ProcStat &r = proc_stats[i]; - l.setPid(r.pid); - l.setState(r.state); - l.setPpid(r.ppid); - l.setCpuUser(r.utime / jiffy); - l.setCpuSystem(r.stime / jiffy); - l.setCpuChildrenUser(r.cutime / jiffy); - l.setCpuChildrenSystem(r.cstime / jiffy); - l.setPriority(r.priority); - l.setNice(r.nice); - l.setNumThreads(r.num_threads); - l.setStartTime(r.starttime / jiffy); - l.setMemVms(r.vms); - l.setMemRss((uint64_t)r.rss * page_size); - l.setProcessor(r.processor); - l.setName(r.name); - - const ProcCache &extra_info = Parser::getProcExtraInfo(r.pid, r.name); - l.setExe(extra_info.exe); - auto lcmdline = l.initCmdline(extra_info.cmdline.size()); - for (size_t j = 0; j < lcmdline.size(); j++) { - lcmdline.set(j, extra_info.cmdline[j]); - } - } -} - -void buildProcLogMessage(MessageBuilder &msg) { - auto procLog = msg.initEvent().initProcLog(); - buildProcs(procLog); - buildCPUTimes(procLog); - buildMemInfo(procLog); -} diff --git a/system/proclogd/proclog.h b/system/proclogd/proclog.h deleted file mode 100644 index 49f97cdd36..0000000000 --- a/system/proclogd/proclog.h +++ /dev/null @@ -1,40 +0,0 @@ -#include -#include -#include -#include - -#include "cereal/messaging/messaging.h" - -struct CPUTime { - int id; - unsigned long utime, ntime, stime, itime; - unsigned long iowtime, irqtime, sirqtime; -}; - -struct ProcCache { - int pid; - std::string name, exe; - std::vector cmdline; -}; - -struct ProcStat { - int pid, ppid, processor; - char state; - long cutime, cstime, priority, nice, num_threads, rss; - unsigned long utime, stime, vms; - unsigned long long starttime; - std::string name; -}; - -namespace Parser { - -std::vector pids(); -std::optional procStat(std::string stat); -std::vector cmdline(std::istream &stream); -std::vector cpuTimes(std::istream &stream); -std::unordered_map memInfo(std::istream &stream); -const ProcCache &getProcExtraInfo(int pid, const std::string &name); - -}; // namespace Parser - -void buildProcLogMessage(MessageBuilder &msg); diff --git a/system/proclogd/tests/.gitignore b/system/proclogd/tests/.gitignore deleted file mode 100644 index 5230b1598d..0000000000 --- a/system/proclogd/tests/.gitignore +++ /dev/null @@ -1 +0,0 @@ -test_proclog diff --git a/system/proclogd/tests/test_proclog.cc b/system/proclogd/tests/test_proclog.cc deleted file mode 100644 index b86229a499..0000000000 --- a/system/proclogd/tests/test_proclog.cc +++ /dev/null @@ -1,142 +0,0 @@ -#define CATCH_CONFIG_MAIN -#include "catch2/catch.hpp" -#include "common/util.h" -#include "system/proclogd/proclog.h" - -const std::string allowed_states = "RSDTZtWXxKWPI"; - -TEST_CASE("Parser::procStat") { - SECTION("from string") { - const std::string stat_str = - "33012 (code )) S 32978 6620 6620 0 -1 4194368 2042377 0 144 0 24510 11627 0 " - "0 20 0 39 0 53077 830029824 62214 18446744073709551615 94257242783744 94257366235808 " - "140735738643248 0 0 0 0 4098 1073808632 0 0 0 17 2 0 0 2 0 0 94257370858656 94257371248232 " - "94257404952576 140735738648768 140735738648823 140735738648823 140735738650595 0"; - auto stat = Parser::procStat(stat_str); - REQUIRE(stat); - REQUIRE(stat->pid == 33012); - REQUIRE(stat->name == "code )"); - REQUIRE(stat->state == 'S'); - REQUIRE(stat->ppid == 32978); - REQUIRE(stat->utime == 24510); - REQUIRE(stat->stime == 11627); - REQUIRE(stat->cutime == 0); - REQUIRE(stat->cstime == 0); - REQUIRE(stat->priority == 20); - REQUIRE(stat->nice == 0); - REQUIRE(stat->num_threads == 39); - REQUIRE(stat->starttime == 53077); - REQUIRE(stat->vms == 830029824); - REQUIRE(stat->rss == 62214); - REQUIRE(stat->processor == 2); - } - SECTION("all processes") { - std::vector pids = Parser::pids(); - REQUIRE(pids.size() > 1); - for (int pid : pids) { - std::string stat_path = "/proc/" + std::to_string(pid) + "/stat"; - INFO(stat_path); - if (auto stat = Parser::procStat(util::read_file(stat_path))) { - REQUIRE(stat->pid == pid); - REQUIRE(allowed_states.find(stat->state) != std::string::npos); - } else { - REQUIRE(util::file_exists(stat_path) == false); - } - } - } -} - -TEST_CASE("Parser::cpuTimes") { - SECTION("from string") { - std::string stat = - "cpu 0 0 0 0 0 0 0 0 0 0\n" - "cpu0 1 2 3 4 5 6 7 8 9 10\n" - "cpu1 1 2 3 4 5 6 7 8 9 10\n"; - std::istringstream stream(stat); - auto stats = Parser::cpuTimes(stream); - REQUIRE(stats.size() == 2); - for (int i = 0; i < stats.size(); ++i) { - REQUIRE(stats[i].id == i); - REQUIRE(stats[i].utime == 1); - REQUIRE(stats[i].ntime ==2); - REQUIRE(stats[i].stime == 3); - REQUIRE(stats[i].itime == 4); - REQUIRE(stats[i].iowtime == 5); - REQUIRE(stats[i].irqtime == 6); - REQUIRE(stats[i].sirqtime == 7); - } - } - SECTION("all cpus") { - std::istringstream stream(util::read_file("/proc/stat")); - auto stats = Parser::cpuTimes(stream); - REQUIRE(stats.size() == sysconf(_SC_NPROCESSORS_ONLN)); - for (int i = 0; i < stats.size(); ++i) { - REQUIRE(stats[i].id == i); - } - } -} - -TEST_CASE("Parser::memInfo") { - SECTION("from string") { - std::istringstream stream("MemTotal: 1024 kb\nMemFree: 2048 kb\n"); - auto meminfo = Parser::memInfo(stream); - REQUIRE(meminfo["MemTotal:"] == 1024 * 1024); - REQUIRE(meminfo["MemFree:"] == 2048 * 1024); - } - SECTION("from /proc/meminfo") { - std::string require_keys[] = {"MemTotal:", "MemFree:", "MemAvailable:", "Buffers:", "Cached:", "Active:", "Inactive:", "Shmem:"}; - std::istringstream stream(util::read_file("/proc/meminfo")); - auto meminfo = Parser::memInfo(stream); - for (auto &key : require_keys) { - REQUIRE(meminfo.find(key) != meminfo.end()); - REQUIRE(meminfo[key] > 0); - } - } -} - -void test_cmdline(std::string cmdline, const std::vector requires) { - std::stringstream ss; - ss.write(&cmdline[0], cmdline.size()); - auto cmds = Parser::cmdline(ss); - REQUIRE(cmds.size() == requires.size()); - for (int i = 0; i < requires.size(); ++i) { - REQUIRE(cmds[i] == requires[i]); - } -} -TEST_CASE("Parser::cmdline") { - test_cmdline(std::string("a\0b\0c\0", 7), {"a", "b", "c"}); - test_cmdline(std::string("a\0\0c\0", 6), {"a", "c"}); - test_cmdline(std::string("a\0b\0c\0\0\0", 9), {"a", "b", "c"}); -} - -TEST_CASE("buildProcLoggerMessage") { - MessageBuilder msg; - buildProcLogMessage(msg); - - kj::Array buf = capnp::messageToFlatArray(msg); - capnp::FlatArrayMessageReader reader(buf); - auto log = reader.getRoot().getProcLog(); - REQUIRE(log.totalSize().wordCount > 0); - - // test cereal::ProcLog::CPUTimes - auto cpu_times = log.getCpuTimes(); - REQUIRE(cpu_times.size() == sysconf(_SC_NPROCESSORS_ONLN)); - REQUIRE(cpu_times[cpu_times.size() - 1].getCpuNum() == cpu_times.size() - 1); - - // test cereal::ProcLog::Mem - auto mem = log.getMem(); - REQUIRE(mem.getTotal() > 0); - REQUIRE(mem.getShared() > 0); - - // test cereal::ProcLog::Process - auto procs = log.getProcs(); - for (auto p : procs) { - REQUIRE(allowed_states.find(p.getState()) != std::string::npos); - if (p.getPid() == ::getpid()) { - REQUIRE(p.getName() == "test_proclog"); - REQUIRE(p.getState() == 'R'); - REQUIRE_THAT(p.getExe().cStr(), Catch::Matchers::Contains("test_proclog")); - REQUIRE_THAT(p.getCmdline()[0], Catch::Matchers::Contains("test_proclog")); - } - } -} diff --git a/system/ubloxd/.gitignore b/system/ubloxd/.gitignore deleted file mode 100644 index 05263ff67c..0000000000 --- a/system/ubloxd/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -ubloxd -tests/test_glonass_runner diff --git a/system/ubloxd/SConscript b/system/ubloxd/SConscript index ce09e235e6..9eb50760ba 100644 --- a/system/ubloxd/SConscript +++ b/system/ubloxd/SConscript @@ -1,20 +1,11 @@ -Import('env', 'common', 'messaging') - -loc_libs = [messaging, common, 'kaitai', 'pthread'] +Import('env') if GetOption('kaitai'): - generated = Dir('generated').srcnode().abspath - cmd = f"kaitai-struct-compiler --target cpp_stl --outdir {generated} $SOURCES" - env.Command(['generated/ubx.cpp', 'generated/ubx.h'], 'ubx.ksy', cmd) - env.Command(['generated/gps.cpp', 'generated/gps.h'], 'gps.ksy', cmd) - glonass = env.Command(['generated/glonass.cpp', 'generated/glonass.h'], 'glonass.ksy', cmd) - + current_dir = Dir('./generated/').srcnode().abspath + python_cmd = f"kaitai-struct-compiler --target python --outdir {current_dir} $SOURCES" + env.Command(File('./generated/ubx.py'), 'ubx.ksy', python_cmd) + env.Command(File('./generated/gps.py'), 'gps.ksy', python_cmd) + env.Command(File('./generated/glonass.py'), 'glonass.ksy', python_cmd) # kaitai issue: https://github.com/kaitai-io/kaitai_struct/issues/910 - patch = env.Command(None, 'glonass_fix.patch', 'git apply $SOURCES') - env.Depends(patch, glonass) - -glonass_obj = env.Object('generated/glonass.cpp') -env.Program("ubloxd", ["ubloxd.cc", "ublox_msg.cc", "generated/ubx.cpp", "generated/gps.cpp", glonass_obj], LIBS=loc_libs) - -if GetOption('extras'): - env.Program("tests/test_glonass_runner", ['tests/test_glonass_runner.cc', 'tests/test_glonass_kaitai.cc', glonass_obj], LIBS=[loc_libs]) \ No newline at end of file + py_glonass_fix = env.Command(None, File('./generated/glonass.py'), "sed -i 's/self._io.align_to_byte()/# self._io.align_to_byte()/' $SOURCES") + env.Depends(py_glonass_fix, File('./generated/glonass.py')) diff --git a/system/ubloxd/generated/glonass.cpp b/system/ubloxd/generated/glonass.cpp deleted file mode 100644 index cd0f96ab68..0000000000 --- a/system/ubloxd/generated/glonass.cpp +++ /dev/null @@ -1,353 +0,0 @@ -// This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild - -#include "glonass.h" - -glonass_t::glonass_t(kaitai::kstream* p__io, kaitai::kstruct* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = this; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void glonass_t::_read() { - m_idle_chip = m__io->read_bits_int_be(1); - m_string_number = m__io->read_bits_int_be(4); - //m__io->align_to_byte(); - switch (string_number()) { - case 4: { - m_data = new string_4_t(m__io, this, m__root); - break; - } - case 1: { - m_data = new string_1_t(m__io, this, m__root); - break; - } - case 3: { - m_data = new string_3_t(m__io, this, m__root); - break; - } - case 5: { - m_data = new string_5_t(m__io, this, m__root); - break; - } - case 2: { - m_data = new string_2_t(m__io, this, m__root); - break; - } - default: { - m_data = new string_non_immediate_t(m__io, this, m__root); - break; - } - } - m_hamming_code = m__io->read_bits_int_be(8); - m_pad_1 = m__io->read_bits_int_be(11); - m_superframe_number = m__io->read_bits_int_be(16); - m_pad_2 = m__io->read_bits_int_be(8); - m_frame_number = m__io->read_bits_int_be(8); -} - -glonass_t::~glonass_t() { - _clean_up(); -} - -void glonass_t::_clean_up() { - if (m_data) { - delete m_data; m_data = 0; - } -} - -glonass_t::string_4_t::string_4_t(kaitai::kstream* p__io, glonass_t* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - f_tau_n = false; - f_delta_tau_n = false; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void glonass_t::string_4_t::_read() { - m_tau_n_sign = m__io->read_bits_int_be(1); - m_tau_n_value = m__io->read_bits_int_be(21); - m_delta_tau_n_sign = m__io->read_bits_int_be(1); - m_delta_tau_n_value = m__io->read_bits_int_be(4); - m_e_n = m__io->read_bits_int_be(5); - m_not_used_1 = m__io->read_bits_int_be(14); - m_p4 = m__io->read_bits_int_be(1); - m_f_t = m__io->read_bits_int_be(4); - m_not_used_2 = m__io->read_bits_int_be(3); - m_n_t = m__io->read_bits_int_be(11); - m_n = m__io->read_bits_int_be(5); - m_m = m__io->read_bits_int_be(2); -} - -glonass_t::string_4_t::~string_4_t() { - _clean_up(); -} - -void glonass_t::string_4_t::_clean_up() { -} - -int32_t glonass_t::string_4_t::tau_n() { - if (f_tau_n) - return m_tau_n; - m_tau_n = ((tau_n_sign()) ? ((tau_n_value() * -1)) : (tau_n_value())); - f_tau_n = true; - return m_tau_n; -} - -int32_t glonass_t::string_4_t::delta_tau_n() { - if (f_delta_tau_n) - return m_delta_tau_n; - m_delta_tau_n = ((delta_tau_n_sign()) ? ((delta_tau_n_value() * -1)) : (delta_tau_n_value())); - f_delta_tau_n = true; - return m_delta_tau_n; -} - -glonass_t::string_non_immediate_t::string_non_immediate_t(kaitai::kstream* p__io, glonass_t* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void glonass_t::string_non_immediate_t::_read() { - m_data_1 = m__io->read_bits_int_be(64); - m_data_2 = m__io->read_bits_int_be(8); -} - -glonass_t::string_non_immediate_t::~string_non_immediate_t() { - _clean_up(); -} - -void glonass_t::string_non_immediate_t::_clean_up() { -} - -glonass_t::string_5_t::string_5_t(kaitai::kstream* p__io, glonass_t* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void glonass_t::string_5_t::_read() { - m_n_a = m__io->read_bits_int_be(11); - m_tau_c = m__io->read_bits_int_be(32); - m_not_used = m__io->read_bits_int_be(1); - m_n_4 = m__io->read_bits_int_be(5); - m_tau_gps = m__io->read_bits_int_be(22); - m_l_n = m__io->read_bits_int_be(1); -} - -glonass_t::string_5_t::~string_5_t() { - _clean_up(); -} - -void glonass_t::string_5_t::_clean_up() { -} - -glonass_t::string_1_t::string_1_t(kaitai::kstream* p__io, glonass_t* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - f_x_vel = false; - f_x_accel = false; - f_x = false; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void glonass_t::string_1_t::_read() { - m_not_used = m__io->read_bits_int_be(2); - m_p1 = m__io->read_bits_int_be(2); - m_t_k = m__io->read_bits_int_be(12); - m_x_vel_sign = m__io->read_bits_int_be(1); - m_x_vel_value = m__io->read_bits_int_be(23); - m_x_accel_sign = m__io->read_bits_int_be(1); - m_x_accel_value = m__io->read_bits_int_be(4); - m_x_sign = m__io->read_bits_int_be(1); - m_x_value = m__io->read_bits_int_be(26); -} - -glonass_t::string_1_t::~string_1_t() { - _clean_up(); -} - -void glonass_t::string_1_t::_clean_up() { -} - -int32_t glonass_t::string_1_t::x_vel() { - if (f_x_vel) - return m_x_vel; - m_x_vel = ((x_vel_sign()) ? ((x_vel_value() * -1)) : (x_vel_value())); - f_x_vel = true; - return m_x_vel; -} - -int32_t glonass_t::string_1_t::x_accel() { - if (f_x_accel) - return m_x_accel; - m_x_accel = ((x_accel_sign()) ? ((x_accel_value() * -1)) : (x_accel_value())); - f_x_accel = true; - return m_x_accel; -} - -int32_t glonass_t::string_1_t::x() { - if (f_x) - return m_x; - m_x = ((x_sign()) ? ((x_value() * -1)) : (x_value())); - f_x = true; - return m_x; -} - -glonass_t::string_2_t::string_2_t(kaitai::kstream* p__io, glonass_t* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - f_y_vel = false; - f_y_accel = false; - f_y = false; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void glonass_t::string_2_t::_read() { - m_b_n = m__io->read_bits_int_be(3); - m_p2 = m__io->read_bits_int_be(1); - m_t_b = m__io->read_bits_int_be(7); - m_not_used = m__io->read_bits_int_be(5); - m_y_vel_sign = m__io->read_bits_int_be(1); - m_y_vel_value = m__io->read_bits_int_be(23); - m_y_accel_sign = m__io->read_bits_int_be(1); - m_y_accel_value = m__io->read_bits_int_be(4); - m_y_sign = m__io->read_bits_int_be(1); - m_y_value = m__io->read_bits_int_be(26); -} - -glonass_t::string_2_t::~string_2_t() { - _clean_up(); -} - -void glonass_t::string_2_t::_clean_up() { -} - -int32_t glonass_t::string_2_t::y_vel() { - if (f_y_vel) - return m_y_vel; - m_y_vel = ((y_vel_sign()) ? ((y_vel_value() * -1)) : (y_vel_value())); - f_y_vel = true; - return m_y_vel; -} - -int32_t glonass_t::string_2_t::y_accel() { - if (f_y_accel) - return m_y_accel; - m_y_accel = ((y_accel_sign()) ? ((y_accel_value() * -1)) : (y_accel_value())); - f_y_accel = true; - return m_y_accel; -} - -int32_t glonass_t::string_2_t::y() { - if (f_y) - return m_y; - m_y = ((y_sign()) ? ((y_value() * -1)) : (y_value())); - f_y = true; - return m_y; -} - -glonass_t::string_3_t::string_3_t(kaitai::kstream* p__io, glonass_t* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - f_gamma_n = false; - f_z_vel = false; - f_z_accel = false; - f_z = false; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void glonass_t::string_3_t::_read() { - m_p3 = m__io->read_bits_int_be(1); - m_gamma_n_sign = m__io->read_bits_int_be(1); - m_gamma_n_value = m__io->read_bits_int_be(10); - m_not_used = m__io->read_bits_int_be(1); - m_p = m__io->read_bits_int_be(2); - m_l_n = m__io->read_bits_int_be(1); - m_z_vel_sign = m__io->read_bits_int_be(1); - m_z_vel_value = m__io->read_bits_int_be(23); - m_z_accel_sign = m__io->read_bits_int_be(1); - m_z_accel_value = m__io->read_bits_int_be(4); - m_z_sign = m__io->read_bits_int_be(1); - m_z_value = m__io->read_bits_int_be(26); -} - -glonass_t::string_3_t::~string_3_t() { - _clean_up(); -} - -void glonass_t::string_3_t::_clean_up() { -} - -int32_t glonass_t::string_3_t::gamma_n() { - if (f_gamma_n) - return m_gamma_n; - m_gamma_n = ((gamma_n_sign()) ? ((gamma_n_value() * -1)) : (gamma_n_value())); - f_gamma_n = true; - return m_gamma_n; -} - -int32_t glonass_t::string_3_t::z_vel() { - if (f_z_vel) - return m_z_vel; - m_z_vel = ((z_vel_sign()) ? ((z_vel_value() * -1)) : (z_vel_value())); - f_z_vel = true; - return m_z_vel; -} - -int32_t glonass_t::string_3_t::z_accel() { - if (f_z_accel) - return m_z_accel; - m_z_accel = ((z_accel_sign()) ? ((z_accel_value() * -1)) : (z_accel_value())); - f_z_accel = true; - return m_z_accel; -} - -int32_t glonass_t::string_3_t::z() { - if (f_z) - return m_z; - m_z = ((z_sign()) ? ((z_value() * -1)) : (z_value())); - f_z = true; - return m_z; -} diff --git a/system/ubloxd/generated/glonass.h b/system/ubloxd/generated/glonass.h deleted file mode 100644 index 19867ba22b..0000000000 --- a/system/ubloxd/generated/glonass.h +++ /dev/null @@ -1,375 +0,0 @@ -#ifndef GLONASS_H_ -#define GLONASS_H_ - -// This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild - -#include "kaitai/kaitaistruct.h" -#include - -#if KAITAI_STRUCT_VERSION < 9000L -#error "Incompatible Kaitai Struct C++/STL API: version 0.9 or later is required" -#endif - -class glonass_t : public kaitai::kstruct { - -public: - class string_4_t; - class string_non_immediate_t; - class string_5_t; - class string_1_t; - class string_2_t; - class string_3_t; - - glonass_t(kaitai::kstream* p__io, kaitai::kstruct* p__parent = 0, glonass_t* p__root = 0); - -private: - void _read(); - void _clean_up(); - -public: - ~glonass_t(); - - class string_4_t : public kaitai::kstruct { - - public: - - string_4_t(kaitai::kstream* p__io, glonass_t* p__parent = 0, glonass_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~string_4_t(); - - private: - bool f_tau_n; - int32_t m_tau_n; - - public: - int32_t tau_n(); - - private: - bool f_delta_tau_n; - int32_t m_delta_tau_n; - - public: - int32_t delta_tau_n(); - - private: - bool m_tau_n_sign; - uint64_t m_tau_n_value; - bool m_delta_tau_n_sign; - uint64_t m_delta_tau_n_value; - uint64_t m_e_n; - uint64_t m_not_used_1; - bool m_p4; - uint64_t m_f_t; - uint64_t m_not_used_2; - uint64_t m_n_t; - uint64_t m_n; - uint64_t m_m; - glonass_t* m__root; - glonass_t* m__parent; - - public: - bool tau_n_sign() const { return m_tau_n_sign; } - uint64_t tau_n_value() const { return m_tau_n_value; } - bool delta_tau_n_sign() const { return m_delta_tau_n_sign; } - uint64_t delta_tau_n_value() const { return m_delta_tau_n_value; } - uint64_t e_n() const { return m_e_n; } - uint64_t not_used_1() const { return m_not_used_1; } - bool p4() const { return m_p4; } - uint64_t f_t() const { return m_f_t; } - uint64_t not_used_2() const { return m_not_used_2; } - uint64_t n_t() const { return m_n_t; } - uint64_t n() const { return m_n; } - uint64_t m() const { return m_m; } - glonass_t* _root() const { return m__root; } - glonass_t* _parent() const { return m__parent; } - }; - - class string_non_immediate_t : public kaitai::kstruct { - - public: - - string_non_immediate_t(kaitai::kstream* p__io, glonass_t* p__parent = 0, glonass_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~string_non_immediate_t(); - - private: - uint64_t m_data_1; - uint64_t m_data_2; - glonass_t* m__root; - glonass_t* m__parent; - - public: - uint64_t data_1() const { return m_data_1; } - uint64_t data_2() const { return m_data_2; } - glonass_t* _root() const { return m__root; } - glonass_t* _parent() const { return m__parent; } - }; - - class string_5_t : public kaitai::kstruct { - - public: - - string_5_t(kaitai::kstream* p__io, glonass_t* p__parent = 0, glonass_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~string_5_t(); - - private: - uint64_t m_n_a; - uint64_t m_tau_c; - bool m_not_used; - uint64_t m_n_4; - uint64_t m_tau_gps; - bool m_l_n; - glonass_t* m__root; - glonass_t* m__parent; - - public: - uint64_t n_a() const { return m_n_a; } - uint64_t tau_c() const { return m_tau_c; } - bool not_used() const { return m_not_used; } - uint64_t n_4() const { return m_n_4; } - uint64_t tau_gps() const { return m_tau_gps; } - bool l_n() const { return m_l_n; } - glonass_t* _root() const { return m__root; } - glonass_t* _parent() const { return m__parent; } - }; - - class string_1_t : public kaitai::kstruct { - - public: - - string_1_t(kaitai::kstream* p__io, glonass_t* p__parent = 0, glonass_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~string_1_t(); - - private: - bool f_x_vel; - int32_t m_x_vel; - - public: - int32_t x_vel(); - - private: - bool f_x_accel; - int32_t m_x_accel; - - public: - int32_t x_accel(); - - private: - bool f_x; - int32_t m_x; - - public: - int32_t x(); - - private: - uint64_t m_not_used; - uint64_t m_p1; - uint64_t m_t_k; - bool m_x_vel_sign; - uint64_t m_x_vel_value; - bool m_x_accel_sign; - uint64_t m_x_accel_value; - bool m_x_sign; - uint64_t m_x_value; - glonass_t* m__root; - glonass_t* m__parent; - - public: - uint64_t not_used() const { return m_not_used; } - uint64_t p1() const { return m_p1; } - uint64_t t_k() const { return m_t_k; } - bool x_vel_sign() const { return m_x_vel_sign; } - uint64_t x_vel_value() const { return m_x_vel_value; } - bool x_accel_sign() const { return m_x_accel_sign; } - uint64_t x_accel_value() const { return m_x_accel_value; } - bool x_sign() const { return m_x_sign; } - uint64_t x_value() const { return m_x_value; } - glonass_t* _root() const { return m__root; } - glonass_t* _parent() const { return m__parent; } - }; - - class string_2_t : public kaitai::kstruct { - - public: - - string_2_t(kaitai::kstream* p__io, glonass_t* p__parent = 0, glonass_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~string_2_t(); - - private: - bool f_y_vel; - int32_t m_y_vel; - - public: - int32_t y_vel(); - - private: - bool f_y_accel; - int32_t m_y_accel; - - public: - int32_t y_accel(); - - private: - bool f_y; - int32_t m_y; - - public: - int32_t y(); - - private: - uint64_t m_b_n; - bool m_p2; - uint64_t m_t_b; - uint64_t m_not_used; - bool m_y_vel_sign; - uint64_t m_y_vel_value; - bool m_y_accel_sign; - uint64_t m_y_accel_value; - bool m_y_sign; - uint64_t m_y_value; - glonass_t* m__root; - glonass_t* m__parent; - - public: - uint64_t b_n() const { return m_b_n; } - bool p2() const { return m_p2; } - uint64_t t_b() const { return m_t_b; } - uint64_t not_used() const { return m_not_used; } - bool y_vel_sign() const { return m_y_vel_sign; } - uint64_t y_vel_value() const { return m_y_vel_value; } - bool y_accel_sign() const { return m_y_accel_sign; } - uint64_t y_accel_value() const { return m_y_accel_value; } - bool y_sign() const { return m_y_sign; } - uint64_t y_value() const { return m_y_value; } - glonass_t* _root() const { return m__root; } - glonass_t* _parent() const { return m__parent; } - }; - - class string_3_t : public kaitai::kstruct { - - public: - - string_3_t(kaitai::kstream* p__io, glonass_t* p__parent = 0, glonass_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~string_3_t(); - - private: - bool f_gamma_n; - int32_t m_gamma_n; - - public: - int32_t gamma_n(); - - private: - bool f_z_vel; - int32_t m_z_vel; - - public: - int32_t z_vel(); - - private: - bool f_z_accel; - int32_t m_z_accel; - - public: - int32_t z_accel(); - - private: - bool f_z; - int32_t m_z; - - public: - int32_t z(); - - private: - bool m_p3; - bool m_gamma_n_sign; - uint64_t m_gamma_n_value; - bool m_not_used; - uint64_t m_p; - bool m_l_n; - bool m_z_vel_sign; - uint64_t m_z_vel_value; - bool m_z_accel_sign; - uint64_t m_z_accel_value; - bool m_z_sign; - uint64_t m_z_value; - glonass_t* m__root; - glonass_t* m__parent; - - public: - bool p3() const { return m_p3; } - bool gamma_n_sign() const { return m_gamma_n_sign; } - uint64_t gamma_n_value() const { return m_gamma_n_value; } - bool not_used() const { return m_not_used; } - uint64_t p() const { return m_p; } - bool l_n() const { return m_l_n; } - bool z_vel_sign() const { return m_z_vel_sign; } - uint64_t z_vel_value() const { return m_z_vel_value; } - bool z_accel_sign() const { return m_z_accel_sign; } - uint64_t z_accel_value() const { return m_z_accel_value; } - bool z_sign() const { return m_z_sign; } - uint64_t z_value() const { return m_z_value; } - glonass_t* _root() const { return m__root; } - glonass_t* _parent() const { return m__parent; } - }; - -private: - bool m_idle_chip; - uint64_t m_string_number; - kaitai::kstruct* m_data; - uint64_t m_hamming_code; - uint64_t m_pad_1; - uint64_t m_superframe_number; - uint64_t m_pad_2; - uint64_t m_frame_number; - glonass_t* m__root; - kaitai::kstruct* m__parent; - -public: - bool idle_chip() const { return m_idle_chip; } - uint64_t string_number() const { return m_string_number; } - kaitai::kstruct* data() const { return m_data; } - uint64_t hamming_code() const { return m_hamming_code; } - uint64_t pad_1() const { return m_pad_1; } - uint64_t superframe_number() const { return m_superframe_number; } - uint64_t pad_2() const { return m_pad_2; } - uint64_t frame_number() const { return m_frame_number; } - glonass_t* _root() const { return m__root; } - kaitai::kstruct* _parent() const { return m__parent; } -}; - -#endif // GLONASS_H_ diff --git a/system/ubloxd/generated/glonass.py b/system/ubloxd/generated/glonass.py new file mode 100644 index 0000000000..40aa16bb6f --- /dev/null +++ b/system/ubloxd/generated/glonass.py @@ -0,0 +1,247 @@ +# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild + +import kaitaistruct +from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO + + +if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9): + raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__)) + +class Glonass(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.idle_chip = self._io.read_bits_int_be(1) != 0 + self.string_number = self._io.read_bits_int_be(4) + # workaround for kaitai bit alignment issue (see glonass_fix.patch for C++) + # self._io.align_to_byte() + _on = self.string_number + if _on == 4: + self.data = Glonass.String4(self._io, self, self._root) + elif _on == 1: + self.data = Glonass.String1(self._io, self, self._root) + elif _on == 3: + self.data = Glonass.String3(self._io, self, self._root) + elif _on == 5: + self.data = Glonass.String5(self._io, self, self._root) + elif _on == 2: + self.data = Glonass.String2(self._io, self, self._root) + else: + self.data = Glonass.StringNonImmediate(self._io, self, self._root) + self.hamming_code = self._io.read_bits_int_be(8) + self.pad_1 = self._io.read_bits_int_be(11) + self.superframe_number = self._io.read_bits_int_be(16) + self.pad_2 = self._io.read_bits_int_be(8) + self.frame_number = self._io.read_bits_int_be(8) + + class String4(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.tau_n_sign = self._io.read_bits_int_be(1) != 0 + self.tau_n_value = self._io.read_bits_int_be(21) + self.delta_tau_n_sign = self._io.read_bits_int_be(1) != 0 + self.delta_tau_n_value = self._io.read_bits_int_be(4) + self.e_n = self._io.read_bits_int_be(5) + self.not_used_1 = self._io.read_bits_int_be(14) + self.p4 = self._io.read_bits_int_be(1) != 0 + self.f_t = self._io.read_bits_int_be(4) + self.not_used_2 = self._io.read_bits_int_be(3) + self.n_t = self._io.read_bits_int_be(11) + self.n = self._io.read_bits_int_be(5) + self.m = self._io.read_bits_int_be(2) + + @property + def tau_n(self): + if hasattr(self, '_m_tau_n'): + return self._m_tau_n + + self._m_tau_n = ((self.tau_n_value * -1) if self.tau_n_sign else self.tau_n_value) + return getattr(self, '_m_tau_n', None) + + @property + def delta_tau_n(self): + if hasattr(self, '_m_delta_tau_n'): + return self._m_delta_tau_n + + self._m_delta_tau_n = ((self.delta_tau_n_value * -1) if self.delta_tau_n_sign else self.delta_tau_n_value) + return getattr(self, '_m_delta_tau_n', None) + + + class StringNonImmediate(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.data_1 = self._io.read_bits_int_be(64) + self.data_2 = self._io.read_bits_int_be(8) + + + class String5(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.n_a = self._io.read_bits_int_be(11) + self.tau_c = self._io.read_bits_int_be(32) + self.not_used = self._io.read_bits_int_be(1) != 0 + self.n_4 = self._io.read_bits_int_be(5) + self.tau_gps = self._io.read_bits_int_be(22) + self.l_n = self._io.read_bits_int_be(1) != 0 + + + class String1(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.not_used = self._io.read_bits_int_be(2) + self.p1 = self._io.read_bits_int_be(2) + self.t_k = self._io.read_bits_int_be(12) + self.x_vel_sign = self._io.read_bits_int_be(1) != 0 + self.x_vel_value = self._io.read_bits_int_be(23) + self.x_accel_sign = self._io.read_bits_int_be(1) != 0 + self.x_accel_value = self._io.read_bits_int_be(4) + self.x_sign = self._io.read_bits_int_be(1) != 0 + self.x_value = self._io.read_bits_int_be(26) + + @property + def x_vel(self): + if hasattr(self, '_m_x_vel'): + return self._m_x_vel + + self._m_x_vel = ((self.x_vel_value * -1) if self.x_vel_sign else self.x_vel_value) + return getattr(self, '_m_x_vel', None) + + @property + def x_accel(self): + if hasattr(self, '_m_x_accel'): + return self._m_x_accel + + self._m_x_accel = ((self.x_accel_value * -1) if self.x_accel_sign else self.x_accel_value) + return getattr(self, '_m_x_accel', None) + + @property + def x(self): + if hasattr(self, '_m_x'): + return self._m_x + + self._m_x = ((self.x_value * -1) if self.x_sign else self.x_value) + return getattr(self, '_m_x', None) + + + class String2(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.b_n = self._io.read_bits_int_be(3) + self.p2 = self._io.read_bits_int_be(1) != 0 + self.t_b = self._io.read_bits_int_be(7) + self.not_used = self._io.read_bits_int_be(5) + self.y_vel_sign = self._io.read_bits_int_be(1) != 0 + self.y_vel_value = self._io.read_bits_int_be(23) + self.y_accel_sign = self._io.read_bits_int_be(1) != 0 + self.y_accel_value = self._io.read_bits_int_be(4) + self.y_sign = self._io.read_bits_int_be(1) != 0 + self.y_value = self._io.read_bits_int_be(26) + + @property + def y_vel(self): + if hasattr(self, '_m_y_vel'): + return self._m_y_vel + + self._m_y_vel = ((self.y_vel_value * -1) if self.y_vel_sign else self.y_vel_value) + return getattr(self, '_m_y_vel', None) + + @property + def y_accel(self): + if hasattr(self, '_m_y_accel'): + return self._m_y_accel + + self._m_y_accel = ((self.y_accel_value * -1) if self.y_accel_sign else self.y_accel_value) + return getattr(self, '_m_y_accel', None) + + @property + def y(self): + if hasattr(self, '_m_y'): + return self._m_y + + self._m_y = ((self.y_value * -1) if self.y_sign else self.y_value) + return getattr(self, '_m_y', None) + + + class String3(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.p3 = self._io.read_bits_int_be(1) != 0 + self.gamma_n_sign = self._io.read_bits_int_be(1) != 0 + self.gamma_n_value = self._io.read_bits_int_be(10) + self.not_used = self._io.read_bits_int_be(1) != 0 + self.p = self._io.read_bits_int_be(2) + self.l_n = self._io.read_bits_int_be(1) != 0 + self.z_vel_sign = self._io.read_bits_int_be(1) != 0 + self.z_vel_value = self._io.read_bits_int_be(23) + self.z_accel_sign = self._io.read_bits_int_be(1) != 0 + self.z_accel_value = self._io.read_bits_int_be(4) + self.z_sign = self._io.read_bits_int_be(1) != 0 + self.z_value = self._io.read_bits_int_be(26) + + @property + def gamma_n(self): + if hasattr(self, '_m_gamma_n'): + return self._m_gamma_n + + self._m_gamma_n = ((self.gamma_n_value * -1) if self.gamma_n_sign else self.gamma_n_value) + return getattr(self, '_m_gamma_n', None) + + @property + def z_vel(self): + if hasattr(self, '_m_z_vel'): + return self._m_z_vel + + self._m_z_vel = ((self.z_vel_value * -1) if self.z_vel_sign else self.z_vel_value) + return getattr(self, '_m_z_vel', None) + + @property + def z_accel(self): + if hasattr(self, '_m_z_accel'): + return self._m_z_accel + + self._m_z_accel = ((self.z_accel_value * -1) if self.z_accel_sign else self.z_accel_value) + return getattr(self, '_m_z_accel', None) + + @property + def z(self): + if hasattr(self, '_m_z'): + return self._m_z + + self._m_z = ((self.z_value * -1) if self.z_sign else self.z_value) + return getattr(self, '_m_z', None) + + diff --git a/system/ubloxd/generated/gps.cpp b/system/ubloxd/generated/gps.cpp deleted file mode 100644 index 8e1cb85b95..0000000000 --- a/system/ubloxd/generated/gps.cpp +++ /dev/null @@ -1,325 +0,0 @@ -// This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild - -#include "gps.h" -#include "kaitai/exceptions.h" - -gps_t::gps_t(kaitai::kstream* p__io, kaitai::kstruct* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = this; - m_tlm = 0; - m_how = 0; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void gps_t::_read() { - m_tlm = new tlm_t(m__io, this, m__root); - m_how = new how_t(m__io, this, m__root); - n_body = true; - switch (how()->subframe_id()) { - case 1: { - n_body = false; - m_body = new subframe_1_t(m__io, this, m__root); - break; - } - case 2: { - n_body = false; - m_body = new subframe_2_t(m__io, this, m__root); - break; - } - case 3: { - n_body = false; - m_body = new subframe_3_t(m__io, this, m__root); - break; - } - case 4: { - n_body = false; - m_body = new subframe_4_t(m__io, this, m__root); - break; - } - } -} - -gps_t::~gps_t() { - _clean_up(); -} - -void gps_t::_clean_up() { - if (m_tlm) { - delete m_tlm; m_tlm = 0; - } - if (m_how) { - delete m_how; m_how = 0; - } - if (!n_body) { - if (m_body) { - delete m_body; m_body = 0; - } - } -} - -gps_t::subframe_1_t::subframe_1_t(kaitai::kstream* p__io, gps_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - f_af_0 = false; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void gps_t::subframe_1_t::_read() { - m_week_no = m__io->read_bits_int_be(10); - m_code = m__io->read_bits_int_be(2); - m_sv_accuracy = m__io->read_bits_int_be(4); - m_sv_health = m__io->read_bits_int_be(6); - m_iodc_msb = m__io->read_bits_int_be(2); - m_l2_p_data_flag = m__io->read_bits_int_be(1); - m_reserved1 = m__io->read_bits_int_be(23); - m_reserved2 = m__io->read_bits_int_be(24); - m_reserved3 = m__io->read_bits_int_be(24); - m_reserved4 = m__io->read_bits_int_be(16); - m__io->align_to_byte(); - m_t_gd = m__io->read_s1(); - m_iodc_lsb = m__io->read_u1(); - m_t_oc = m__io->read_u2be(); - m_af_2 = m__io->read_s1(); - m_af_1 = m__io->read_s2be(); - m_af_0_sign = m__io->read_bits_int_be(1); - m_af_0_value = m__io->read_bits_int_be(21); - m_reserved5 = m__io->read_bits_int_be(2); -} - -gps_t::subframe_1_t::~subframe_1_t() { - _clean_up(); -} - -void gps_t::subframe_1_t::_clean_up() { -} - -int32_t gps_t::subframe_1_t::af_0() { - if (f_af_0) - return m_af_0; - m_af_0 = ((af_0_sign()) ? ((af_0_value() - (1 << 21))) : (af_0_value())); - f_af_0 = true; - return m_af_0; -} - -gps_t::subframe_3_t::subframe_3_t(kaitai::kstream* p__io, gps_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - f_omega_dot = false; - f_idot = false; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void gps_t::subframe_3_t::_read() { - m_c_ic = m__io->read_s2be(); - m_omega_0 = m__io->read_s4be(); - m_c_is = m__io->read_s2be(); - m_i_0 = m__io->read_s4be(); - m_c_rc = m__io->read_s2be(); - m_omega = m__io->read_s4be(); - m_omega_dot_sign = m__io->read_bits_int_be(1); - m_omega_dot_value = m__io->read_bits_int_be(23); - m__io->align_to_byte(); - m_iode = m__io->read_u1(); - m_idot_sign = m__io->read_bits_int_be(1); - m_idot_value = m__io->read_bits_int_be(13); - m_reserved = m__io->read_bits_int_be(2); -} - -gps_t::subframe_3_t::~subframe_3_t() { - _clean_up(); -} - -void gps_t::subframe_3_t::_clean_up() { -} - -int32_t gps_t::subframe_3_t::omega_dot() { - if (f_omega_dot) - return m_omega_dot; - m_omega_dot = ((omega_dot_sign()) ? ((omega_dot_value() - (1 << 23))) : (omega_dot_value())); - f_omega_dot = true; - return m_omega_dot; -} - -int32_t gps_t::subframe_3_t::idot() { - if (f_idot) - return m_idot; - m_idot = ((idot_sign()) ? ((idot_value() - (1 << 13))) : (idot_value())); - f_idot = true; - return m_idot; -} - -gps_t::subframe_4_t::subframe_4_t(kaitai::kstream* p__io, gps_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void gps_t::subframe_4_t::_read() { - m_data_id = m__io->read_bits_int_be(2); - m_page_id = m__io->read_bits_int_be(6); - m__io->align_to_byte(); - n_body = true; - switch (page_id()) { - case 56: { - n_body = false; - m_body = new ionosphere_data_t(m__io, this, m__root); - break; - } - } -} - -gps_t::subframe_4_t::~subframe_4_t() { - _clean_up(); -} - -void gps_t::subframe_4_t::_clean_up() { - if (!n_body) { - if (m_body) { - delete m_body; m_body = 0; - } - } -} - -gps_t::subframe_4_t::ionosphere_data_t::ionosphere_data_t(kaitai::kstream* p__io, gps_t::subframe_4_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void gps_t::subframe_4_t::ionosphere_data_t::_read() { - m_a0 = m__io->read_s1(); - m_a1 = m__io->read_s1(); - m_a2 = m__io->read_s1(); - m_a3 = m__io->read_s1(); - m_b0 = m__io->read_s1(); - m_b1 = m__io->read_s1(); - m_b2 = m__io->read_s1(); - m_b3 = m__io->read_s1(); -} - -gps_t::subframe_4_t::ionosphere_data_t::~ionosphere_data_t() { - _clean_up(); -} - -void gps_t::subframe_4_t::ionosphere_data_t::_clean_up() { -} - -gps_t::how_t::how_t(kaitai::kstream* p__io, gps_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void gps_t::how_t::_read() { - m_tow_count = m__io->read_bits_int_be(17); - m_alert = m__io->read_bits_int_be(1); - m_anti_spoof = m__io->read_bits_int_be(1); - m_subframe_id = m__io->read_bits_int_be(3); - m_reserved = m__io->read_bits_int_be(2); -} - -gps_t::how_t::~how_t() { - _clean_up(); -} - -void gps_t::how_t::_clean_up() { -} - -gps_t::tlm_t::tlm_t(kaitai::kstream* p__io, gps_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void gps_t::tlm_t::_read() { - m_preamble = m__io->read_bytes(1); - if (!(preamble() == std::string("\x8B", 1))) { - throw kaitai::validation_not_equal_error(std::string("\x8B", 1), preamble(), _io(), std::string("/types/tlm/seq/0")); - } - m_tlm = m__io->read_bits_int_be(14); - m_integrity_status = m__io->read_bits_int_be(1); - m_reserved = m__io->read_bits_int_be(1); -} - -gps_t::tlm_t::~tlm_t() { - _clean_up(); -} - -void gps_t::tlm_t::_clean_up() { -} - -gps_t::subframe_2_t::subframe_2_t(kaitai::kstream* p__io, gps_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void gps_t::subframe_2_t::_read() { - m_iode = m__io->read_u1(); - m_c_rs = m__io->read_s2be(); - m_delta_n = m__io->read_s2be(); - m_m_0 = m__io->read_s4be(); - m_c_uc = m__io->read_s2be(); - m_e = m__io->read_s4be(); - m_c_us = m__io->read_s2be(); - m_sqrt_a = m__io->read_u4be(); - m_t_oe = m__io->read_u2be(); - m_fit_interval_flag = m__io->read_bits_int_be(1); - m_aoda = m__io->read_bits_int_be(5); - m_reserved = m__io->read_bits_int_be(2); -} - -gps_t::subframe_2_t::~subframe_2_t() { - _clean_up(); -} - -void gps_t::subframe_2_t::_clean_up() { -} diff --git a/system/ubloxd/generated/gps.h b/system/ubloxd/generated/gps.h deleted file mode 100644 index 9dfc5031f5..0000000000 --- a/system/ubloxd/generated/gps.h +++ /dev/null @@ -1,359 +0,0 @@ -#ifndef GPS_H_ -#define GPS_H_ - -// This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild - -#include "kaitai/kaitaistruct.h" -#include - -#if KAITAI_STRUCT_VERSION < 9000L -#error "Incompatible Kaitai Struct C++/STL API: version 0.9 or later is required" -#endif - -class gps_t : public kaitai::kstruct { - -public: - class subframe_1_t; - class subframe_3_t; - class subframe_4_t; - class how_t; - class tlm_t; - class subframe_2_t; - - gps_t(kaitai::kstream* p__io, kaitai::kstruct* p__parent = 0, gps_t* p__root = 0); - -private: - void _read(); - void _clean_up(); - -public: - ~gps_t(); - - class subframe_1_t : public kaitai::kstruct { - - public: - - subframe_1_t(kaitai::kstream* p__io, gps_t* p__parent = 0, gps_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~subframe_1_t(); - - private: - bool f_af_0; - int32_t m_af_0; - - public: - int32_t af_0(); - - private: - uint64_t m_week_no; - uint64_t m_code; - uint64_t m_sv_accuracy; - uint64_t m_sv_health; - uint64_t m_iodc_msb; - bool m_l2_p_data_flag; - uint64_t m_reserved1; - uint64_t m_reserved2; - uint64_t m_reserved3; - uint64_t m_reserved4; - int8_t m_t_gd; - uint8_t m_iodc_lsb; - uint16_t m_t_oc; - int8_t m_af_2; - int16_t m_af_1; - bool m_af_0_sign; - uint64_t m_af_0_value; - uint64_t m_reserved5; - gps_t* m__root; - gps_t* m__parent; - - public: - uint64_t week_no() const { return m_week_no; } - uint64_t code() const { return m_code; } - uint64_t sv_accuracy() const { return m_sv_accuracy; } - uint64_t sv_health() const { return m_sv_health; } - uint64_t iodc_msb() const { return m_iodc_msb; } - bool l2_p_data_flag() const { return m_l2_p_data_flag; } - uint64_t reserved1() const { return m_reserved1; } - uint64_t reserved2() const { return m_reserved2; } - uint64_t reserved3() const { return m_reserved3; } - uint64_t reserved4() const { return m_reserved4; } - int8_t t_gd() const { return m_t_gd; } - uint8_t iodc_lsb() const { return m_iodc_lsb; } - uint16_t t_oc() const { return m_t_oc; } - int8_t af_2() const { return m_af_2; } - int16_t af_1() const { return m_af_1; } - bool af_0_sign() const { return m_af_0_sign; } - uint64_t af_0_value() const { return m_af_0_value; } - uint64_t reserved5() const { return m_reserved5; } - gps_t* _root() const { return m__root; } - gps_t* _parent() const { return m__parent; } - }; - - class subframe_3_t : public kaitai::kstruct { - - public: - - subframe_3_t(kaitai::kstream* p__io, gps_t* p__parent = 0, gps_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~subframe_3_t(); - - private: - bool f_omega_dot; - int32_t m_omega_dot; - - public: - int32_t omega_dot(); - - private: - bool f_idot; - int32_t m_idot; - - public: - int32_t idot(); - - private: - int16_t m_c_ic; - int32_t m_omega_0; - int16_t m_c_is; - int32_t m_i_0; - int16_t m_c_rc; - int32_t m_omega; - bool m_omega_dot_sign; - uint64_t m_omega_dot_value; - uint8_t m_iode; - bool m_idot_sign; - uint64_t m_idot_value; - uint64_t m_reserved; - gps_t* m__root; - gps_t* m__parent; - - public: - int16_t c_ic() const { return m_c_ic; } - int32_t omega_0() const { return m_omega_0; } - int16_t c_is() const { return m_c_is; } - int32_t i_0() const { return m_i_0; } - int16_t c_rc() const { return m_c_rc; } - int32_t omega() const { return m_omega; } - bool omega_dot_sign() const { return m_omega_dot_sign; } - uint64_t omega_dot_value() const { return m_omega_dot_value; } - uint8_t iode() const { return m_iode; } - bool idot_sign() const { return m_idot_sign; } - uint64_t idot_value() const { return m_idot_value; } - uint64_t reserved() const { return m_reserved; } - gps_t* _root() const { return m__root; } - gps_t* _parent() const { return m__parent; } - }; - - class subframe_4_t : public kaitai::kstruct { - - public: - class ionosphere_data_t; - - subframe_4_t(kaitai::kstream* p__io, gps_t* p__parent = 0, gps_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~subframe_4_t(); - - class ionosphere_data_t : public kaitai::kstruct { - - public: - - ionosphere_data_t(kaitai::kstream* p__io, gps_t::subframe_4_t* p__parent = 0, gps_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~ionosphere_data_t(); - - private: - int8_t m_a0; - int8_t m_a1; - int8_t m_a2; - int8_t m_a3; - int8_t m_b0; - int8_t m_b1; - int8_t m_b2; - int8_t m_b3; - gps_t* m__root; - gps_t::subframe_4_t* m__parent; - - public: - int8_t a0() const { return m_a0; } - int8_t a1() const { return m_a1; } - int8_t a2() const { return m_a2; } - int8_t a3() const { return m_a3; } - int8_t b0() const { return m_b0; } - int8_t b1() const { return m_b1; } - int8_t b2() const { return m_b2; } - int8_t b3() const { return m_b3; } - gps_t* _root() const { return m__root; } - gps_t::subframe_4_t* _parent() const { return m__parent; } - }; - - private: - uint64_t m_data_id; - uint64_t m_page_id; - ionosphere_data_t* m_body; - bool n_body; - - public: - bool _is_null_body() { body(); return n_body; }; - - private: - gps_t* m__root; - gps_t* m__parent; - - public: - uint64_t data_id() const { return m_data_id; } - uint64_t page_id() const { return m_page_id; } - ionosphere_data_t* body() const { return m_body; } - gps_t* _root() const { return m__root; } - gps_t* _parent() const { return m__parent; } - }; - - class how_t : public kaitai::kstruct { - - public: - - how_t(kaitai::kstream* p__io, gps_t* p__parent = 0, gps_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~how_t(); - - private: - uint64_t m_tow_count; - bool m_alert; - bool m_anti_spoof; - uint64_t m_subframe_id; - uint64_t m_reserved; - gps_t* m__root; - gps_t* m__parent; - - public: - uint64_t tow_count() const { return m_tow_count; } - bool alert() const { return m_alert; } - bool anti_spoof() const { return m_anti_spoof; } - uint64_t subframe_id() const { return m_subframe_id; } - uint64_t reserved() const { return m_reserved; } - gps_t* _root() const { return m__root; } - gps_t* _parent() const { return m__parent; } - }; - - class tlm_t : public kaitai::kstruct { - - public: - - tlm_t(kaitai::kstream* p__io, gps_t* p__parent = 0, gps_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~tlm_t(); - - private: - std::string m_preamble; - uint64_t m_tlm; - bool m_integrity_status; - bool m_reserved; - gps_t* m__root; - gps_t* m__parent; - - public: - std::string preamble() const { return m_preamble; } - uint64_t tlm() const { return m_tlm; } - bool integrity_status() const { return m_integrity_status; } - bool reserved() const { return m_reserved; } - gps_t* _root() const { return m__root; } - gps_t* _parent() const { return m__parent; } - }; - - class subframe_2_t : public kaitai::kstruct { - - public: - - subframe_2_t(kaitai::kstream* p__io, gps_t* p__parent = 0, gps_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~subframe_2_t(); - - private: - uint8_t m_iode; - int16_t m_c_rs; - int16_t m_delta_n; - int32_t m_m_0; - int16_t m_c_uc; - int32_t m_e; - int16_t m_c_us; - uint32_t m_sqrt_a; - uint16_t m_t_oe; - bool m_fit_interval_flag; - uint64_t m_aoda; - uint64_t m_reserved; - gps_t* m__root; - gps_t* m__parent; - - public: - uint8_t iode() const { return m_iode; } - int16_t c_rs() const { return m_c_rs; } - int16_t delta_n() const { return m_delta_n; } - int32_t m_0() const { return m_m_0; } - int16_t c_uc() const { return m_c_uc; } - int32_t e() const { return m_e; } - int16_t c_us() const { return m_c_us; } - uint32_t sqrt_a() const { return m_sqrt_a; } - uint16_t t_oe() const { return m_t_oe; } - bool fit_interval_flag() const { return m_fit_interval_flag; } - uint64_t aoda() const { return m_aoda; } - uint64_t reserved() const { return m_reserved; } - gps_t* _root() const { return m__root; } - gps_t* _parent() const { return m__parent; } - }; - -private: - tlm_t* m_tlm; - how_t* m_how; - kaitai::kstruct* m_body; - bool n_body; - -public: - bool _is_null_body() { body(); return n_body; }; - -private: - gps_t* m__root; - kaitai::kstruct* m__parent; - -public: - tlm_t* tlm() const { return m_tlm; } - how_t* how() const { return m_how; } - kaitai::kstruct* body() const { return m_body; } - gps_t* _root() const { return m__root; } - kaitai::kstruct* _parent() const { return m__parent; } -}; - -#endif // GPS_H_ diff --git a/system/ubloxd/generated/gps.py b/system/ubloxd/generated/gps.py new file mode 100644 index 0000000000..a999016f3e --- /dev/null +++ b/system/ubloxd/generated/gps.py @@ -0,0 +1,193 @@ +# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild + +import kaitaistruct +from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO + + +if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9): + raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__)) + +class Gps(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.tlm = Gps.Tlm(self._io, self, self._root) + self.how = Gps.How(self._io, self, self._root) + _on = self.how.subframe_id + if _on == 1: + self.body = Gps.Subframe1(self._io, self, self._root) + elif _on == 2: + self.body = Gps.Subframe2(self._io, self, self._root) + elif _on == 3: + self.body = Gps.Subframe3(self._io, self, self._root) + elif _on == 4: + self.body = Gps.Subframe4(self._io, self, self._root) + + class Subframe1(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.week_no = self._io.read_bits_int_be(10) + self.code = self._io.read_bits_int_be(2) + self.sv_accuracy = self._io.read_bits_int_be(4) + self.sv_health = self._io.read_bits_int_be(6) + self.iodc_msb = self._io.read_bits_int_be(2) + self.l2_p_data_flag = self._io.read_bits_int_be(1) != 0 + self.reserved1 = self._io.read_bits_int_be(23) + self.reserved2 = self._io.read_bits_int_be(24) + self.reserved3 = self._io.read_bits_int_be(24) + self.reserved4 = self._io.read_bits_int_be(16) + self._io.align_to_byte() + self.t_gd = self._io.read_s1() + self.iodc_lsb = self._io.read_u1() + self.t_oc = self._io.read_u2be() + self.af_2 = self._io.read_s1() + self.af_1 = self._io.read_s2be() + self.af_0_sign = self._io.read_bits_int_be(1) != 0 + self.af_0_value = self._io.read_bits_int_be(21) + self.reserved5 = self._io.read_bits_int_be(2) + + @property + def af_0(self): + if hasattr(self, '_m_af_0'): + return self._m_af_0 + + self._m_af_0 = ((self.af_0_value - (1 << 21)) if self.af_0_sign else self.af_0_value) + return getattr(self, '_m_af_0', None) + + + class Subframe3(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.c_ic = self._io.read_s2be() + self.omega_0 = self._io.read_s4be() + self.c_is = self._io.read_s2be() + self.i_0 = self._io.read_s4be() + self.c_rc = self._io.read_s2be() + self.omega = self._io.read_s4be() + self.omega_dot_sign = self._io.read_bits_int_be(1) != 0 + self.omega_dot_value = self._io.read_bits_int_be(23) + self._io.align_to_byte() + self.iode = self._io.read_u1() + self.idot_sign = self._io.read_bits_int_be(1) != 0 + self.idot_value = self._io.read_bits_int_be(13) + self.reserved = self._io.read_bits_int_be(2) + + @property + def omega_dot(self): + if hasattr(self, '_m_omega_dot'): + return self._m_omega_dot + + self._m_omega_dot = ((self.omega_dot_value - (1 << 23)) if self.omega_dot_sign else self.omega_dot_value) + return getattr(self, '_m_omega_dot', None) + + @property + def idot(self): + if hasattr(self, '_m_idot'): + return self._m_idot + + self._m_idot = ((self.idot_value - (1 << 13)) if self.idot_sign else self.idot_value) + return getattr(self, '_m_idot', None) + + + class Subframe4(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.data_id = self._io.read_bits_int_be(2) + self.page_id = self._io.read_bits_int_be(6) + self._io.align_to_byte() + _on = self.page_id + if _on == 56: + self.body = Gps.Subframe4.IonosphereData(self._io, self, self._root) + + class IonosphereData(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.a0 = self._io.read_s1() + self.a1 = self._io.read_s1() + self.a2 = self._io.read_s1() + self.a3 = self._io.read_s1() + self.b0 = self._io.read_s1() + self.b1 = self._io.read_s1() + self.b2 = self._io.read_s1() + self.b3 = self._io.read_s1() + + + + class How(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.tow_count = self._io.read_bits_int_be(17) + self.alert = self._io.read_bits_int_be(1) != 0 + self.anti_spoof = self._io.read_bits_int_be(1) != 0 + self.subframe_id = self._io.read_bits_int_be(3) + self.reserved = self._io.read_bits_int_be(2) + + + class Tlm(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.preamble = self._io.read_bytes(1) + if not self.preamble == b"\x8B": + raise kaitaistruct.ValidationNotEqualError(b"\x8B", self.preamble, self._io, u"/types/tlm/seq/0") + self.tlm = self._io.read_bits_int_be(14) + self.integrity_status = self._io.read_bits_int_be(1) != 0 + self.reserved = self._io.read_bits_int_be(1) != 0 + + + class Subframe2(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.iode = self._io.read_u1() + self.c_rs = self._io.read_s2be() + self.delta_n = self._io.read_s2be() + self.m_0 = self._io.read_s4be() + self.c_uc = self._io.read_s2be() + self.e = self._io.read_s4be() + self.c_us = self._io.read_s2be() + self.sqrt_a = self._io.read_u4be() + self.t_oe = self._io.read_u2be() + self.fit_interval_flag = self._io.read_bits_int_be(1) != 0 + self.aoda = self._io.read_bits_int_be(5) + self.reserved = self._io.read_bits_int_be(2) + + + diff --git a/system/ubloxd/generated/ubx.cpp b/system/ubloxd/generated/ubx.cpp deleted file mode 100644 index 81b82ccafc..0000000000 --- a/system/ubloxd/generated/ubx.cpp +++ /dev/null @@ -1,424 +0,0 @@ -// This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild - -#include "ubx.h" -#include "kaitai/exceptions.h" - -ubx_t::ubx_t(kaitai::kstream* p__io, kaitai::kstruct* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = this; - f_checksum = false; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void ubx_t::_read() { - m_magic = m__io->read_bytes(2); - if (!(magic() == std::string("\xB5\x62", 2))) { - throw kaitai::validation_not_equal_error(std::string("\xB5\x62", 2), magic(), _io(), std::string("/seq/0")); - } - m_msg_type = m__io->read_u2be(); - m_length = m__io->read_u2le(); - n_body = true; - switch (msg_type()) { - case 2569: { - n_body = false; - m_body = new mon_hw_t(m__io, this, m__root); - break; - } - case 533: { - n_body = false; - m_body = new rxm_rawx_t(m__io, this, m__root); - break; - } - case 531: { - n_body = false; - m_body = new rxm_sfrbx_t(m__io, this, m__root); - break; - } - case 309: { - n_body = false; - m_body = new nav_sat_t(m__io, this, m__root); - break; - } - case 2571: { - n_body = false; - m_body = new mon_hw2_t(m__io, this, m__root); - break; - } - case 263: { - n_body = false; - m_body = new nav_pvt_t(m__io, this, m__root); - break; - } - } -} - -ubx_t::~ubx_t() { - _clean_up(); -} - -void ubx_t::_clean_up() { - if (!n_body) { - if (m_body) { - delete m_body; m_body = 0; - } - } - if (f_checksum) { - } -} - -ubx_t::rxm_rawx_t::rxm_rawx_t(kaitai::kstream* p__io, ubx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - m_meas = 0; - m__raw_meas = 0; - m__io__raw_meas = 0; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void ubx_t::rxm_rawx_t::_read() { - m_rcv_tow = m__io->read_f8le(); - m_week = m__io->read_u2le(); - m_leap_s = m__io->read_s1(); - m_num_meas = m__io->read_u1(); - m_rec_stat = m__io->read_u1(); - m_reserved1 = m__io->read_bytes(3); - m__raw_meas = new std::vector(); - m__io__raw_meas = new std::vector(); - m_meas = new std::vector(); - const int l_meas = num_meas(); - for (int i = 0; i < l_meas; i++) { - m__raw_meas->push_back(m__io->read_bytes(32)); - kaitai::kstream* io__raw_meas = new kaitai::kstream(m__raw_meas->at(m__raw_meas->size() - 1)); - m__io__raw_meas->push_back(io__raw_meas); - m_meas->push_back(new measurement_t(io__raw_meas, this, m__root)); - } -} - -ubx_t::rxm_rawx_t::~rxm_rawx_t() { - _clean_up(); -} - -void ubx_t::rxm_rawx_t::_clean_up() { - if (m__raw_meas) { - delete m__raw_meas; m__raw_meas = 0; - } - if (m__io__raw_meas) { - for (std::vector::iterator it = m__io__raw_meas->begin(); it != m__io__raw_meas->end(); ++it) { - delete *it; - } - delete m__io__raw_meas; m__io__raw_meas = 0; - } - if (m_meas) { - for (std::vector::iterator it = m_meas->begin(); it != m_meas->end(); ++it) { - delete *it; - } - delete m_meas; m_meas = 0; - } -} - -ubx_t::rxm_rawx_t::measurement_t::measurement_t(kaitai::kstream* p__io, ubx_t::rxm_rawx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void ubx_t::rxm_rawx_t::measurement_t::_read() { - m_pr_mes = m__io->read_f8le(); - m_cp_mes = m__io->read_f8le(); - m_do_mes = m__io->read_f4le(); - m_gnss_id = static_cast(m__io->read_u1()); - m_sv_id = m__io->read_u1(); - m_reserved2 = m__io->read_bytes(1); - m_freq_id = m__io->read_u1(); - m_lock_time = m__io->read_u2le(); - m_cno = m__io->read_u1(); - m_pr_stdev = m__io->read_u1(); - m_cp_stdev = m__io->read_u1(); - m_do_stdev = m__io->read_u1(); - m_trk_stat = m__io->read_u1(); - m_reserved3 = m__io->read_bytes(1); -} - -ubx_t::rxm_rawx_t::measurement_t::~measurement_t() { - _clean_up(); -} - -void ubx_t::rxm_rawx_t::measurement_t::_clean_up() { -} - -ubx_t::rxm_sfrbx_t::rxm_sfrbx_t(kaitai::kstream* p__io, ubx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - m_body = 0; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void ubx_t::rxm_sfrbx_t::_read() { - m_gnss_id = static_cast(m__io->read_u1()); - m_sv_id = m__io->read_u1(); - m_reserved1 = m__io->read_bytes(1); - m_freq_id = m__io->read_u1(); - m_num_words = m__io->read_u1(); - m_reserved2 = m__io->read_bytes(1); - m_version = m__io->read_u1(); - m_reserved3 = m__io->read_bytes(1); - m_body = new std::vector(); - const int l_body = num_words(); - for (int i = 0; i < l_body; i++) { - m_body->push_back(m__io->read_u4le()); - } -} - -ubx_t::rxm_sfrbx_t::~rxm_sfrbx_t() { - _clean_up(); -} - -void ubx_t::rxm_sfrbx_t::_clean_up() { - if (m_body) { - delete m_body; m_body = 0; - } -} - -ubx_t::nav_sat_t::nav_sat_t(kaitai::kstream* p__io, ubx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - m_svs = 0; - m__raw_svs = 0; - m__io__raw_svs = 0; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void ubx_t::nav_sat_t::_read() { - m_itow = m__io->read_u4le(); - m_version = m__io->read_u1(); - m_num_svs = m__io->read_u1(); - m_reserved = m__io->read_bytes(2); - m__raw_svs = new std::vector(); - m__io__raw_svs = new std::vector(); - m_svs = new std::vector(); - const int l_svs = num_svs(); - for (int i = 0; i < l_svs; i++) { - m__raw_svs->push_back(m__io->read_bytes(12)); - kaitai::kstream* io__raw_svs = new kaitai::kstream(m__raw_svs->at(m__raw_svs->size() - 1)); - m__io__raw_svs->push_back(io__raw_svs); - m_svs->push_back(new nav_t(io__raw_svs, this, m__root)); - } -} - -ubx_t::nav_sat_t::~nav_sat_t() { - _clean_up(); -} - -void ubx_t::nav_sat_t::_clean_up() { - if (m__raw_svs) { - delete m__raw_svs; m__raw_svs = 0; - } - if (m__io__raw_svs) { - for (std::vector::iterator it = m__io__raw_svs->begin(); it != m__io__raw_svs->end(); ++it) { - delete *it; - } - delete m__io__raw_svs; m__io__raw_svs = 0; - } - if (m_svs) { - for (std::vector::iterator it = m_svs->begin(); it != m_svs->end(); ++it) { - delete *it; - } - delete m_svs; m_svs = 0; - } -} - -ubx_t::nav_sat_t::nav_t::nav_t(kaitai::kstream* p__io, ubx_t::nav_sat_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void ubx_t::nav_sat_t::nav_t::_read() { - m_gnss_id = static_cast(m__io->read_u1()); - m_sv_id = m__io->read_u1(); - m_cno = m__io->read_u1(); - m_elev = m__io->read_s1(); - m_azim = m__io->read_s2le(); - m_pr_res = m__io->read_s2le(); - m_flags = m__io->read_u4le(); -} - -ubx_t::nav_sat_t::nav_t::~nav_t() { - _clean_up(); -} - -void ubx_t::nav_sat_t::nav_t::_clean_up() { -} - -ubx_t::nav_pvt_t::nav_pvt_t(kaitai::kstream* p__io, ubx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void ubx_t::nav_pvt_t::_read() { - m_i_tow = m__io->read_u4le(); - m_year = m__io->read_u2le(); - m_month = m__io->read_u1(); - m_day = m__io->read_u1(); - m_hour = m__io->read_u1(); - m_min = m__io->read_u1(); - m_sec = m__io->read_u1(); - m_valid = m__io->read_u1(); - m_t_acc = m__io->read_u4le(); - m_nano = m__io->read_s4le(); - m_fix_type = m__io->read_u1(); - m_flags = m__io->read_u1(); - m_flags2 = m__io->read_u1(); - m_num_sv = m__io->read_u1(); - m_lon = m__io->read_s4le(); - m_lat = m__io->read_s4le(); - m_height = m__io->read_s4le(); - m_h_msl = m__io->read_s4le(); - m_h_acc = m__io->read_u4le(); - m_v_acc = m__io->read_u4le(); - m_vel_n = m__io->read_s4le(); - m_vel_e = m__io->read_s4le(); - m_vel_d = m__io->read_s4le(); - m_g_speed = m__io->read_s4le(); - m_head_mot = m__io->read_s4le(); - m_s_acc = m__io->read_s4le(); - m_head_acc = m__io->read_u4le(); - m_p_dop = m__io->read_u2le(); - m_flags3 = m__io->read_u1(); - m_reserved1 = m__io->read_bytes(5); - m_head_veh = m__io->read_s4le(); - m_mag_dec = m__io->read_s2le(); - m_mag_acc = m__io->read_u2le(); -} - -ubx_t::nav_pvt_t::~nav_pvt_t() { - _clean_up(); -} - -void ubx_t::nav_pvt_t::_clean_up() { -} - -ubx_t::mon_hw2_t::mon_hw2_t(kaitai::kstream* p__io, ubx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void ubx_t::mon_hw2_t::_read() { - m_ofs_i = m__io->read_s1(); - m_mag_i = m__io->read_u1(); - m_ofs_q = m__io->read_s1(); - m_mag_q = m__io->read_u1(); - m_cfg_source = static_cast(m__io->read_u1()); - m_reserved1 = m__io->read_bytes(3); - m_low_lev_cfg = m__io->read_u4le(); - m_reserved2 = m__io->read_bytes(8); - m_post_status = m__io->read_u4le(); - m_reserved3 = m__io->read_bytes(4); -} - -ubx_t::mon_hw2_t::~mon_hw2_t() { - _clean_up(); -} - -void ubx_t::mon_hw2_t::_clean_up() { -} - -ubx_t::mon_hw_t::mon_hw_t(kaitai::kstream* p__io, ubx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) { - m__parent = p__parent; - m__root = p__root; - - try { - _read(); - } catch(...) { - _clean_up(); - throw; - } -} - -void ubx_t::mon_hw_t::_read() { - m_pin_sel = m__io->read_u4le(); - m_pin_bank = m__io->read_u4le(); - m_pin_dir = m__io->read_u4le(); - m_pin_val = m__io->read_u4le(); - m_noise_per_ms = m__io->read_u2le(); - m_agc_cnt = m__io->read_u2le(); - m_a_status = static_cast(m__io->read_u1()); - m_a_power = static_cast(m__io->read_u1()); - m_flags = m__io->read_u1(); - m_reserved1 = m__io->read_bytes(1); - m_used_mask = m__io->read_u4le(); - m_vp = m__io->read_bytes(17); - m_jam_ind = m__io->read_u1(); - m_reserved2 = m__io->read_bytes(2); - m_pin_irq = m__io->read_u4le(); - m_pull_h = m__io->read_u4le(); - m_pull_l = m__io->read_u4le(); -} - -ubx_t::mon_hw_t::~mon_hw_t() { - _clean_up(); -} - -void ubx_t::mon_hw_t::_clean_up() { -} - -uint16_t ubx_t::checksum() { - if (f_checksum) - return m_checksum; - std::streampos _pos = m__io->pos(); - m__io->seek((length() + 6)); - m_checksum = m__io->read_u2le(); - m__io->seek(_pos); - f_checksum = true; - return m_checksum; -} diff --git a/system/ubloxd/generated/ubx.h b/system/ubloxd/generated/ubx.h deleted file mode 100644 index 022108489f..0000000000 --- a/system/ubloxd/generated/ubx.h +++ /dev/null @@ -1,484 +0,0 @@ -#ifndef UBX_H_ -#define UBX_H_ - -// This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild - -#include "kaitai/kaitaistruct.h" -#include -#include - -#if KAITAI_STRUCT_VERSION < 9000L -#error "Incompatible Kaitai Struct C++/STL API: version 0.9 or later is required" -#endif - -class ubx_t : public kaitai::kstruct { - -public: - class rxm_rawx_t; - class rxm_sfrbx_t; - class nav_sat_t; - class nav_pvt_t; - class mon_hw2_t; - class mon_hw_t; - - enum gnss_type_t { - GNSS_TYPE_GPS = 0, - GNSS_TYPE_SBAS = 1, - GNSS_TYPE_GALILEO = 2, - GNSS_TYPE_BEIDOU = 3, - GNSS_TYPE_IMES = 4, - GNSS_TYPE_QZSS = 5, - GNSS_TYPE_GLONASS = 6 - }; - - ubx_t(kaitai::kstream* p__io, kaitai::kstruct* p__parent = 0, ubx_t* p__root = 0); - -private: - void _read(); - void _clean_up(); - -public: - ~ubx_t(); - - class rxm_rawx_t : public kaitai::kstruct { - - public: - class measurement_t; - - rxm_rawx_t(kaitai::kstream* p__io, ubx_t* p__parent = 0, ubx_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~rxm_rawx_t(); - - class measurement_t : public kaitai::kstruct { - - public: - - measurement_t(kaitai::kstream* p__io, ubx_t::rxm_rawx_t* p__parent = 0, ubx_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~measurement_t(); - - private: - double m_pr_mes; - double m_cp_mes; - float m_do_mes; - gnss_type_t m_gnss_id; - uint8_t m_sv_id; - std::string m_reserved2; - uint8_t m_freq_id; - uint16_t m_lock_time; - uint8_t m_cno; - uint8_t m_pr_stdev; - uint8_t m_cp_stdev; - uint8_t m_do_stdev; - uint8_t m_trk_stat; - std::string m_reserved3; - ubx_t* m__root; - ubx_t::rxm_rawx_t* m__parent; - - public: - double pr_mes() const { return m_pr_mes; } - double cp_mes() const { return m_cp_mes; } - float do_mes() const { return m_do_mes; } - gnss_type_t gnss_id() const { return m_gnss_id; } - uint8_t sv_id() const { return m_sv_id; } - std::string reserved2() const { return m_reserved2; } - uint8_t freq_id() const { return m_freq_id; } - uint16_t lock_time() const { return m_lock_time; } - uint8_t cno() const { return m_cno; } - uint8_t pr_stdev() const { return m_pr_stdev; } - uint8_t cp_stdev() const { return m_cp_stdev; } - uint8_t do_stdev() const { return m_do_stdev; } - uint8_t trk_stat() const { return m_trk_stat; } - std::string reserved3() const { return m_reserved3; } - ubx_t* _root() const { return m__root; } - ubx_t::rxm_rawx_t* _parent() const { return m__parent; } - }; - - private: - double m_rcv_tow; - uint16_t m_week; - int8_t m_leap_s; - uint8_t m_num_meas; - uint8_t m_rec_stat; - std::string m_reserved1; - std::vector* m_meas; - ubx_t* m__root; - ubx_t* m__parent; - std::vector* m__raw_meas; - std::vector* m__io__raw_meas; - - public: - double rcv_tow() const { return m_rcv_tow; } - uint16_t week() const { return m_week; } - int8_t leap_s() const { return m_leap_s; } - uint8_t num_meas() const { return m_num_meas; } - uint8_t rec_stat() const { return m_rec_stat; } - std::string reserved1() const { return m_reserved1; } - std::vector* meas() const { return m_meas; } - ubx_t* _root() const { return m__root; } - ubx_t* _parent() const { return m__parent; } - std::vector* _raw_meas() const { return m__raw_meas; } - std::vector* _io__raw_meas() const { return m__io__raw_meas; } - }; - - class rxm_sfrbx_t : public kaitai::kstruct { - - public: - - rxm_sfrbx_t(kaitai::kstream* p__io, ubx_t* p__parent = 0, ubx_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~rxm_sfrbx_t(); - - private: - gnss_type_t m_gnss_id; - uint8_t m_sv_id; - std::string m_reserved1; - uint8_t m_freq_id; - uint8_t m_num_words; - std::string m_reserved2; - uint8_t m_version; - std::string m_reserved3; - std::vector* m_body; - ubx_t* m__root; - ubx_t* m__parent; - - public: - gnss_type_t gnss_id() const { return m_gnss_id; } - uint8_t sv_id() const { return m_sv_id; } - std::string reserved1() const { return m_reserved1; } - uint8_t freq_id() const { return m_freq_id; } - uint8_t num_words() const { return m_num_words; } - std::string reserved2() const { return m_reserved2; } - uint8_t version() const { return m_version; } - std::string reserved3() const { return m_reserved3; } - std::vector* body() const { return m_body; } - ubx_t* _root() const { return m__root; } - ubx_t* _parent() const { return m__parent; } - }; - - class nav_sat_t : public kaitai::kstruct { - - public: - class nav_t; - - nav_sat_t(kaitai::kstream* p__io, ubx_t* p__parent = 0, ubx_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~nav_sat_t(); - - class nav_t : public kaitai::kstruct { - - public: - - nav_t(kaitai::kstream* p__io, ubx_t::nav_sat_t* p__parent = 0, ubx_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~nav_t(); - - private: - gnss_type_t m_gnss_id; - uint8_t m_sv_id; - uint8_t m_cno; - int8_t m_elev; - int16_t m_azim; - int16_t m_pr_res; - uint32_t m_flags; - ubx_t* m__root; - ubx_t::nav_sat_t* m__parent; - - public: - gnss_type_t gnss_id() const { return m_gnss_id; } - uint8_t sv_id() const { return m_sv_id; } - uint8_t cno() const { return m_cno; } - int8_t elev() const { return m_elev; } - int16_t azim() const { return m_azim; } - int16_t pr_res() const { return m_pr_res; } - uint32_t flags() const { return m_flags; } - ubx_t* _root() const { return m__root; } - ubx_t::nav_sat_t* _parent() const { return m__parent; } - }; - - private: - uint32_t m_itow; - uint8_t m_version; - uint8_t m_num_svs; - std::string m_reserved; - std::vector* m_svs; - ubx_t* m__root; - ubx_t* m__parent; - std::vector* m__raw_svs; - std::vector* m__io__raw_svs; - - public: - uint32_t itow() const { return m_itow; } - uint8_t version() const { return m_version; } - uint8_t num_svs() const { return m_num_svs; } - std::string reserved() const { return m_reserved; } - std::vector* svs() const { return m_svs; } - ubx_t* _root() const { return m__root; } - ubx_t* _parent() const { return m__parent; } - std::vector* _raw_svs() const { return m__raw_svs; } - std::vector* _io__raw_svs() const { return m__io__raw_svs; } - }; - - class nav_pvt_t : public kaitai::kstruct { - - public: - - nav_pvt_t(kaitai::kstream* p__io, ubx_t* p__parent = 0, ubx_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~nav_pvt_t(); - - private: - uint32_t m_i_tow; - uint16_t m_year; - uint8_t m_month; - uint8_t m_day; - uint8_t m_hour; - uint8_t m_min; - uint8_t m_sec; - uint8_t m_valid; - uint32_t m_t_acc; - int32_t m_nano; - uint8_t m_fix_type; - uint8_t m_flags; - uint8_t m_flags2; - uint8_t m_num_sv; - int32_t m_lon; - int32_t m_lat; - int32_t m_height; - int32_t m_h_msl; - uint32_t m_h_acc; - uint32_t m_v_acc; - int32_t m_vel_n; - int32_t m_vel_e; - int32_t m_vel_d; - int32_t m_g_speed; - int32_t m_head_mot; - int32_t m_s_acc; - uint32_t m_head_acc; - uint16_t m_p_dop; - uint8_t m_flags3; - std::string m_reserved1; - int32_t m_head_veh; - int16_t m_mag_dec; - uint16_t m_mag_acc; - ubx_t* m__root; - ubx_t* m__parent; - - public: - uint32_t i_tow() const { return m_i_tow; } - uint16_t year() const { return m_year; } - uint8_t month() const { return m_month; } - uint8_t day() const { return m_day; } - uint8_t hour() const { return m_hour; } - uint8_t min() const { return m_min; } - uint8_t sec() const { return m_sec; } - uint8_t valid() const { return m_valid; } - uint32_t t_acc() const { return m_t_acc; } - int32_t nano() const { return m_nano; } - uint8_t fix_type() const { return m_fix_type; } - uint8_t flags() const { return m_flags; } - uint8_t flags2() const { return m_flags2; } - uint8_t num_sv() const { return m_num_sv; } - int32_t lon() const { return m_lon; } - int32_t lat() const { return m_lat; } - int32_t height() const { return m_height; } - int32_t h_msl() const { return m_h_msl; } - uint32_t h_acc() const { return m_h_acc; } - uint32_t v_acc() const { return m_v_acc; } - int32_t vel_n() const { return m_vel_n; } - int32_t vel_e() const { return m_vel_e; } - int32_t vel_d() const { return m_vel_d; } - int32_t g_speed() const { return m_g_speed; } - int32_t head_mot() const { return m_head_mot; } - int32_t s_acc() const { return m_s_acc; } - uint32_t head_acc() const { return m_head_acc; } - uint16_t p_dop() const { return m_p_dop; } - uint8_t flags3() const { return m_flags3; } - std::string reserved1() const { return m_reserved1; } - int32_t head_veh() const { return m_head_veh; } - int16_t mag_dec() const { return m_mag_dec; } - uint16_t mag_acc() const { return m_mag_acc; } - ubx_t* _root() const { return m__root; } - ubx_t* _parent() const { return m__parent; } - }; - - class mon_hw2_t : public kaitai::kstruct { - - public: - - enum config_source_t { - CONFIG_SOURCE_FLASH = 102, - CONFIG_SOURCE_OTP = 111, - CONFIG_SOURCE_CONFIG_PINS = 112, - CONFIG_SOURCE_ROM = 113 - }; - - mon_hw2_t(kaitai::kstream* p__io, ubx_t* p__parent = 0, ubx_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~mon_hw2_t(); - - private: - int8_t m_ofs_i; - uint8_t m_mag_i; - int8_t m_ofs_q; - uint8_t m_mag_q; - config_source_t m_cfg_source; - std::string m_reserved1; - uint32_t m_low_lev_cfg; - std::string m_reserved2; - uint32_t m_post_status; - std::string m_reserved3; - ubx_t* m__root; - ubx_t* m__parent; - - public: - int8_t ofs_i() const { return m_ofs_i; } - uint8_t mag_i() const { return m_mag_i; } - int8_t ofs_q() const { return m_ofs_q; } - uint8_t mag_q() const { return m_mag_q; } - config_source_t cfg_source() const { return m_cfg_source; } - std::string reserved1() const { return m_reserved1; } - uint32_t low_lev_cfg() const { return m_low_lev_cfg; } - std::string reserved2() const { return m_reserved2; } - uint32_t post_status() const { return m_post_status; } - std::string reserved3() const { return m_reserved3; } - ubx_t* _root() const { return m__root; } - ubx_t* _parent() const { return m__parent; } - }; - - class mon_hw_t : public kaitai::kstruct { - - public: - - enum antenna_status_t { - ANTENNA_STATUS_INIT = 0, - ANTENNA_STATUS_DONTKNOW = 1, - ANTENNA_STATUS_OK = 2, - ANTENNA_STATUS_SHORT = 3, - ANTENNA_STATUS_OPEN = 4 - }; - - enum antenna_power_t { - ANTENNA_POWER_FALSE = 0, - ANTENNA_POWER_TRUE = 1, - ANTENNA_POWER_DONTKNOW = 2 - }; - - mon_hw_t(kaitai::kstream* p__io, ubx_t* p__parent = 0, ubx_t* p__root = 0); - - private: - void _read(); - void _clean_up(); - - public: - ~mon_hw_t(); - - private: - uint32_t m_pin_sel; - uint32_t m_pin_bank; - uint32_t m_pin_dir; - uint32_t m_pin_val; - uint16_t m_noise_per_ms; - uint16_t m_agc_cnt; - antenna_status_t m_a_status; - antenna_power_t m_a_power; - uint8_t m_flags; - std::string m_reserved1; - uint32_t m_used_mask; - std::string m_vp; - uint8_t m_jam_ind; - std::string m_reserved2; - uint32_t m_pin_irq; - uint32_t m_pull_h; - uint32_t m_pull_l; - ubx_t* m__root; - ubx_t* m__parent; - - public: - uint32_t pin_sel() const { return m_pin_sel; } - uint32_t pin_bank() const { return m_pin_bank; } - uint32_t pin_dir() const { return m_pin_dir; } - uint32_t pin_val() const { return m_pin_val; } - uint16_t noise_per_ms() const { return m_noise_per_ms; } - uint16_t agc_cnt() const { return m_agc_cnt; } - antenna_status_t a_status() const { return m_a_status; } - antenna_power_t a_power() const { return m_a_power; } - uint8_t flags() const { return m_flags; } - std::string reserved1() const { return m_reserved1; } - uint32_t used_mask() const { return m_used_mask; } - std::string vp() const { return m_vp; } - uint8_t jam_ind() const { return m_jam_ind; } - std::string reserved2() const { return m_reserved2; } - uint32_t pin_irq() const { return m_pin_irq; } - uint32_t pull_h() const { return m_pull_h; } - uint32_t pull_l() const { return m_pull_l; } - ubx_t* _root() const { return m__root; } - ubx_t* _parent() const { return m__parent; } - }; - -private: - bool f_checksum; - uint16_t m_checksum; - -public: - uint16_t checksum(); - -private: - std::string m_magic; - uint16_t m_msg_type; - uint16_t m_length; - kaitai::kstruct* m_body; - bool n_body; - -public: - bool _is_null_body() { body(); return n_body; }; - -private: - ubx_t* m__root; - kaitai::kstruct* m__parent; - -public: - std::string magic() const { return m_magic; } - uint16_t msg_type() const { return m_msg_type; } - uint16_t length() const { return m_length; } - kaitai::kstruct* body() const { return m_body; } - ubx_t* _root() const { return m__root; } - kaitai::kstruct* _parent() const { return m__parent; } -}; - -#endif // UBX_H_ diff --git a/system/ubloxd/generated/ubx.py b/system/ubloxd/generated/ubx.py new file mode 100644 index 0000000000..9946584388 --- /dev/null +++ b/system/ubloxd/generated/ubx.py @@ -0,0 +1,273 @@ +# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild + +import kaitaistruct +from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO +from enum import Enum + + +if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9): + raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__)) + +class Ubx(KaitaiStruct): + + class GnssType(Enum): + gps = 0 + sbas = 1 + galileo = 2 + beidou = 3 + imes = 4 + qzss = 5 + glonass = 6 + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.magic = self._io.read_bytes(2) + if not self.magic == b"\xB5\x62": + raise kaitaistruct.ValidationNotEqualError(b"\xB5\x62", self.magic, self._io, u"/seq/0") + self.msg_type = self._io.read_u2be() + self.length = self._io.read_u2le() + _on = self.msg_type + if _on == 2569: + self.body = Ubx.MonHw(self._io, self, self._root) + elif _on == 533: + self.body = Ubx.RxmRawx(self._io, self, self._root) + elif _on == 531: + self.body = Ubx.RxmSfrbx(self._io, self, self._root) + elif _on == 309: + self.body = Ubx.NavSat(self._io, self, self._root) + elif _on == 2571: + self.body = Ubx.MonHw2(self._io, self, self._root) + elif _on == 263: + self.body = Ubx.NavPvt(self._io, self, self._root) + + class RxmRawx(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.rcv_tow = self._io.read_f8le() + self.week = self._io.read_u2le() + self.leap_s = self._io.read_s1() + self.num_meas = self._io.read_u1() + self.rec_stat = self._io.read_u1() + self.reserved1 = self._io.read_bytes(3) + self._raw_meas = [] + self.meas = [] + for i in range(self.num_meas): + self._raw_meas.append(self._io.read_bytes(32)) + _io__raw_meas = KaitaiStream(BytesIO(self._raw_meas[i])) + self.meas.append(Ubx.RxmRawx.Measurement(_io__raw_meas, self, self._root)) + + + class Measurement(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.pr_mes = self._io.read_f8le() + self.cp_mes = self._io.read_f8le() + self.do_mes = self._io.read_f4le() + self.gnss_id = KaitaiStream.resolve_enum(Ubx.GnssType, self._io.read_u1()) + self.sv_id = self._io.read_u1() + self.reserved2 = self._io.read_bytes(1) + self.freq_id = self._io.read_u1() + self.lock_time = self._io.read_u2le() + self.cno = self._io.read_u1() + self.pr_stdev = self._io.read_u1() + self.cp_stdev = self._io.read_u1() + self.do_stdev = self._io.read_u1() + self.trk_stat = self._io.read_u1() + self.reserved3 = self._io.read_bytes(1) + + + + class RxmSfrbx(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.gnss_id = KaitaiStream.resolve_enum(Ubx.GnssType, self._io.read_u1()) + self.sv_id = self._io.read_u1() + self.reserved1 = self._io.read_bytes(1) + self.freq_id = self._io.read_u1() + self.num_words = self._io.read_u1() + self.reserved2 = self._io.read_bytes(1) + self.version = self._io.read_u1() + self.reserved3 = self._io.read_bytes(1) + self.body = [] + for i in range(self.num_words): + self.body.append(self._io.read_u4le()) + + + + class NavSat(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.itow = self._io.read_u4le() + self.version = self._io.read_u1() + self.num_svs = self._io.read_u1() + self.reserved = self._io.read_bytes(2) + self._raw_svs = [] + self.svs = [] + for i in range(self.num_svs): + self._raw_svs.append(self._io.read_bytes(12)) + _io__raw_svs = KaitaiStream(BytesIO(self._raw_svs[i])) + self.svs.append(Ubx.NavSat.Nav(_io__raw_svs, self, self._root)) + + + class Nav(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.gnss_id = KaitaiStream.resolve_enum(Ubx.GnssType, self._io.read_u1()) + self.sv_id = self._io.read_u1() + self.cno = self._io.read_u1() + self.elev = self._io.read_s1() + self.azim = self._io.read_s2le() + self.pr_res = self._io.read_s2le() + self.flags = self._io.read_u4le() + + + + class NavPvt(KaitaiStruct): + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.i_tow = self._io.read_u4le() + self.year = self._io.read_u2le() + self.month = self._io.read_u1() + self.day = self._io.read_u1() + self.hour = self._io.read_u1() + self.min = self._io.read_u1() + self.sec = self._io.read_u1() + self.valid = self._io.read_u1() + self.t_acc = self._io.read_u4le() + self.nano = self._io.read_s4le() + self.fix_type = self._io.read_u1() + self.flags = self._io.read_u1() + self.flags2 = self._io.read_u1() + self.num_sv = self._io.read_u1() + self.lon = self._io.read_s4le() + self.lat = self._io.read_s4le() + self.height = self._io.read_s4le() + self.h_msl = self._io.read_s4le() + self.h_acc = self._io.read_u4le() + self.v_acc = self._io.read_u4le() + self.vel_n = self._io.read_s4le() + self.vel_e = self._io.read_s4le() + self.vel_d = self._io.read_s4le() + self.g_speed = self._io.read_s4le() + self.head_mot = self._io.read_s4le() + self.s_acc = self._io.read_s4le() + self.head_acc = self._io.read_u4le() + self.p_dop = self._io.read_u2le() + self.flags3 = self._io.read_u1() + self.reserved1 = self._io.read_bytes(5) + self.head_veh = self._io.read_s4le() + self.mag_dec = self._io.read_s2le() + self.mag_acc = self._io.read_u2le() + + + class MonHw2(KaitaiStruct): + + class ConfigSource(Enum): + flash = 102 + otp = 111 + config_pins = 112 + rom = 113 + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.ofs_i = self._io.read_s1() + self.mag_i = self._io.read_u1() + self.ofs_q = self._io.read_s1() + self.mag_q = self._io.read_u1() + self.cfg_source = KaitaiStream.resolve_enum(Ubx.MonHw2.ConfigSource, self._io.read_u1()) + self.reserved1 = self._io.read_bytes(3) + self.low_lev_cfg = self._io.read_u4le() + self.reserved2 = self._io.read_bytes(8) + self.post_status = self._io.read_u4le() + self.reserved3 = self._io.read_bytes(4) + + + class MonHw(KaitaiStruct): + + class AntennaStatus(Enum): + init = 0 + dontknow = 1 + ok = 2 + short = 3 + open = 4 + + class AntennaPower(Enum): + false = 0 + true = 1 + dontknow = 2 + def __init__(self, _io, _parent=None, _root=None): + self._io = _io + self._parent = _parent + self._root = _root if _root else self + self._read() + + def _read(self): + self.pin_sel = self._io.read_u4le() + self.pin_bank = self._io.read_u4le() + self.pin_dir = self._io.read_u4le() + self.pin_val = self._io.read_u4le() + self.noise_per_ms = self._io.read_u2le() + self.agc_cnt = self._io.read_u2le() + self.a_status = KaitaiStream.resolve_enum(Ubx.MonHw.AntennaStatus, self._io.read_u1()) + self.a_power = KaitaiStream.resolve_enum(Ubx.MonHw.AntennaPower, self._io.read_u1()) + self.flags = self._io.read_u1() + self.reserved1 = self._io.read_bytes(1) + self.used_mask = self._io.read_u4le() + self.vp = self._io.read_bytes(17) + self.jam_ind = self._io.read_u1() + self.reserved2 = self._io.read_bytes(2) + self.pin_irq = self._io.read_u4le() + self.pull_h = self._io.read_u4le() + self.pull_l = self._io.read_u4le() + + + @property + def checksum(self): + if hasattr(self, '_m_checksum'): + return self._m_checksum + + _pos = self._io.pos() + self._io.seek((self.length + 6)) + self._m_checksum = self._io.read_u2le() + self._io.seek(_pos) + return getattr(self, '_m_checksum', None) + + diff --git a/system/ubloxd/glonass_fix.patch b/system/ubloxd/glonass_fix.patch deleted file mode 100644 index 7eb973a348..0000000000 --- a/system/ubloxd/glonass_fix.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/system/ubloxd/generated/glonass.cpp b/system/ubloxd/generated/glonass.cpp -index 5b17bc327..b5c6aa610 100644 ---- a/system/ubloxd/generated/glonass.cpp -+++ b/system/ubloxd/generated/glonass.cpp -@@ -17,7 +17,7 @@ glonass_t::glonass_t(kaitai::kstream* p__io, kaitai::kstruct* p__parent, glonass - void glonass_t::_read() { - m_idle_chip = m__io->read_bits_int_be(1); - m_string_number = m__io->read_bits_int_be(4); -- m__io->align_to_byte(); -+ //m__io->align_to_byte(); - switch (string_number()) { - case 4: { - m_data = new string_4_t(m__io, this, m__root); diff --git a/system/ubloxd/tests/print_gps_stats.py b/system/ubloxd/tests/print_gps_stats.py deleted file mode 100755 index 8d190f9ec1..0000000000 --- a/system/ubloxd/tests/print_gps_stats.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 -import time -import cereal.messaging as messaging - -if __name__ == "__main__": - sm = messaging.SubMaster(['ubloxGnss', 'gpsLocationExternal']) - - while 1: - ug = sm['ubloxGnss'] - gle = sm['gpsLocationExternal'] - - try: - cnos = [] - for m in ug.measurementReport.measurements: - cnos.append(m.cno) - print(f"Sats: {ug.measurementReport.numMeas} Accuracy: {gle.horizontalAccuracy:.2f} m cnos", sorted(cnos)) - except Exception: - pass - sm.update() - time.sleep(0.1) diff --git a/system/ubloxd/tests/test_glonass_kaitai.cc b/system/ubloxd/tests/test_glonass_kaitai.cc deleted file mode 100644 index 96f43742b4..0000000000 --- a/system/ubloxd/tests/test_glonass_kaitai.cc +++ /dev/null @@ -1,360 +0,0 @@ -#include -#include -#include -#include -#include -#include - -#include "catch2/catch.hpp" -#include "system/ubloxd/generated/glonass.h" - -typedef std::vector> string_data; - -#define IDLE_CHIP_IDX 0 -#define STRING_NUMBER_IDX 1 -// string data 1-5 -#define HC_IDX 0 -#define PAD1_IDX 1 -#define SUPERFRAME_IDX 2 -#define PAD2_IDX 3 -#define FRAME_IDX 4 - -// Indexes for string number 1 -#define ST1_NU_IDX 2 -#define ST1_P1_IDX 3 -#define ST1_T_K_IDX 4 -#define ST1_X_VEL_S_IDX 5 -#define ST1_X_VEL_V_IDX 6 -#define ST1_X_ACCEL_S_IDX 7 -#define ST1_X_ACCEL_V_IDX 8 -#define ST1_X_S_IDX 9 -#define ST1_X_V_IDX 10 -#define ST1_HC_OFF 11 - -// Indexes for string number 2 -#define ST2_BN_IDX 2 -#define ST2_P2_IDX 3 -#define ST2_TB_IDX 4 -#define ST2_NU_IDX 5 -#define ST2_Y_VEL_S_IDX 6 -#define ST2_Y_VEL_V_IDX 7 -#define ST2_Y_ACCEL_S_IDX 8 -#define ST2_Y_ACCEL_V_IDX 9 -#define ST2_Y_S_IDX 10 -#define ST2_Y_V_IDX 11 -#define ST2_HC_OFF 12 - -// Indexes for string number 3 -#define ST3_P3_IDX 2 -#define ST3_GAMMA_N_S_IDX 3 -#define ST3_GAMMA_N_V_IDX 4 -#define ST3_NU_1_IDX 5 -#define ST3_P_IDX 6 -#define ST3_L_N_IDX 7 -#define ST3_Z_VEL_S_IDX 8 -#define ST3_Z_VEL_V_IDX 9 -#define ST3_Z_ACCEL_S_IDX 10 -#define ST3_Z_ACCEL_V_IDX 11 -#define ST3_Z_S_IDX 12 -#define ST3_Z_V_IDX 13 -#define ST3_HC_OFF 14 - -// Indexes for string number 4 -#define ST4_TAU_N_S_IDX 2 -#define ST4_TAU_N_V_IDX 3 -#define ST4_DELTA_TAU_N_S_IDX 4 -#define ST4_DELTA_TAU_N_V_IDX 5 -#define ST4_E_N_IDX 6 -#define ST4_NU_1_IDX 7 -#define ST4_P4_IDX 8 -#define ST4_F_T_IDX 9 -#define ST4_NU_2_IDX 10 -#define ST4_N_T_IDX 11 -#define ST4_N_IDX 12 -#define ST4_M_IDX 13 -#define ST4_HC_OFF 14 - -// Indexes for string number 5 -#define ST5_N_A_IDX 2 -#define ST5_TAU_C_IDX 3 -#define ST5_NU_IDX 4 -#define ST5_N_4_IDX 5 -#define ST5_TAU_GPS_IDX 6 -#define ST5_L_N_IDX 7 -#define ST5_HC_OFF 8 - -// Indexes for non immediate -#define ST6_DATA_1_IDX 2 -#define ST6_DATA_2_IDX 3 -#define ST6_HC_OFF 4 - - -std::string generate_inp_data(string_data& data) { - std::string inp_data = ""; - for (auto& [b, v] : data) { - std::string tmp = std::bitset<64>(v).to_string(); - inp_data += tmp.substr(64-b, b); - } - assert(inp_data.size() == 128); - - std::string string_data; - string_data.reserve(16); - for (int i = 0; i < 128; i+=8) { - std::string substr = inp_data.substr(i, 8); - string_data.push_back((uint8_t)std::stoi(substr.c_str(), 0, 2)); - } - - return string_data; -} - -string_data generate_string_data(uint8_t string_number) { - - srand((unsigned)time(0)); - string_data data; // - data.push_back({1, 0}); // idle chip - data.push_back({4, string_number}); // string number - - if (string_number == 1) { - data.push_back({2, 3}); // not_used - data.push_back({2, 1}); // p1 - data.push_back({12, 113}); // t_k - data.push_back({1, rand() & 1}); // x_vel_sign - data.push_back({23, 7122}); // x_vel_value - data.push_back({1, rand() & 1}); // x_accel_sign - data.push_back({4, 3}); // x_accel_value - data.push_back({1, rand() & 1}); // x_sign - data.push_back({26, 33554431}); // x_value - } else if (string_number == 2) { - data.push_back({3, 3}); // b_n - data.push_back({1, 1}); // p2 - data.push_back({7, 123}); // t_b - data.push_back({5, 31}); // not_used - data.push_back({1, rand() & 1}); // y_vel_sign - data.push_back({23, 7422}); // y_vel_value - data.push_back({1, rand() & 1}); // y_accel_sign - data.push_back({4, 3}); // y_accel_value - data.push_back({1, rand() & 1}); // y_sign - data.push_back({26, 67108863}); // y_value - } else if (string_number == 3) { - data.push_back({1, 0}); // p3 - data.push_back({1, 1}); // gamma_n_sign - data.push_back({10, 123}); // gamma_n_value - data.push_back({1, 0}); // not_used - data.push_back({2, 2}); // p - data.push_back({1, 1}); // l_n - data.push_back({1, rand() & 1}); // z_vel_sign - data.push_back({23, 1337}); // z_vel_value - data.push_back({1, rand() & 1}); // z_accel_sign - data.push_back({4, 9}); // z_accel_value - data.push_back({1, rand() & 1}); // z_sign - data.push_back({26, 100023}); // z_value - } else if (string_number == 4) { - data.push_back({1, rand() & 1}); // tau_n_sign - data.push_back({21, 197152}); // tau_n_value - data.push_back({1, rand() & 1}); // delta_tau_n_sign - data.push_back({4, 4}); // delta_tau_n_value - data.push_back({5, 0}); // e_n - data.push_back({14, 2}); // not_used_1 - data.push_back({1, 1}); // p4 - data.push_back({4, 9}); // f_t - data.push_back({3, 3}); // not_used_2 - data.push_back({11, 2047}); // n_t - data.push_back({5, 2}); // n - data.push_back({2, 1}); // m - } else if (string_number == 5) { - data.push_back({11, 2047}); // n_a - data.push_back({32, 4294767295}); // tau_c - data.push_back({1, 0}); // not_used_1 - data.push_back({5, 2}); // n_4 - data.push_back({22, 4114304}); // tau_gps - data.push_back({1, 0}); // l_n - } else { // non-immediate data is not parsed - data.push_back({64, rand()}); // data_1 - data.push_back({8, 6}); // data_2 - } - - data.push_back({8, rand() & 0xFF}); // hamming code - data.push_back({11, rand() & 0x7FF}); // pad - data.push_back({16, rand() & 0xFFFF}); // superframe - data.push_back({8, rand() & 0xFF}); // pad - data.push_back({8, rand() & 0xFF}); // frame - return data; -} - -TEST_CASE("parse_string_number_1"){ - string_data data = generate_string_data(1); - std::string inp_data = generate_inp_data(data); - - kaitai::kstream stream(inp_data); - glonass_t gl_string(&stream); - - REQUIRE(gl_string.idle_chip() == data[IDLE_CHIP_IDX].second); - REQUIRE(gl_string.string_number() == data[STRING_NUMBER_IDX].second); - REQUIRE(gl_string.hamming_code() == data[ST1_HC_OFF + HC_IDX].second); - REQUIRE(gl_string.pad_1() == data[ST1_HC_OFF + PAD1_IDX].second); - REQUIRE(gl_string.superframe_number() == data[ST1_HC_OFF + SUPERFRAME_IDX].second); - REQUIRE(gl_string.pad_2() == data[ST1_HC_OFF + PAD2_IDX].second); - REQUIRE(gl_string.frame_number() == data[ST1_HC_OFF + FRAME_IDX].second); - - kaitai::kstream str1(inp_data); - glonass_t str1_data(&str1); - glonass_t::string_1_t* s1 = static_cast(str1_data.data()); - - REQUIRE(s1->not_used() == data[ST1_NU_IDX].second); - REQUIRE(s1->p1() == data[ST1_P1_IDX].second); - REQUIRE(s1->t_k() == data[ST1_T_K_IDX].second); - - int mul = s1->x_vel_sign() ? (-1) : 1; - REQUIRE(s1->x_vel() == (data[ST1_X_VEL_V_IDX].second * mul)); - mul = s1->x_accel_sign() ? (-1) : 1; - REQUIRE(s1->x_accel() == (data[ST1_X_ACCEL_V_IDX].second * mul)); - mul = s1->x_sign() ? (-1) : 1; - REQUIRE(s1->x() == (data[ST1_X_V_IDX].second * mul)); -} - -TEST_CASE("parse_string_number_2"){ - string_data data = generate_string_data(2); - std::string inp_data = generate_inp_data(data); - - kaitai::kstream stream(inp_data); - glonass_t gl_string(&stream); - - REQUIRE(gl_string.idle_chip() == data[IDLE_CHIP_IDX].second); - REQUIRE(gl_string.string_number() == data[STRING_NUMBER_IDX].second); - REQUIRE(gl_string.hamming_code() == data[ST2_HC_OFF + HC_IDX].second); - REQUIRE(gl_string.pad_1() == data[ST2_HC_OFF + PAD1_IDX].second); - REQUIRE(gl_string.superframe_number() == data[ST2_HC_OFF + SUPERFRAME_IDX].second); - REQUIRE(gl_string.pad_2() == data[ST2_HC_OFF + PAD2_IDX].second); - REQUIRE(gl_string.frame_number() == data[ST2_HC_OFF + FRAME_IDX].second); - - kaitai::kstream str2(inp_data); - glonass_t str2_data(&str2); - glonass_t::string_2_t* s2 = static_cast(str2_data.data()); - - REQUIRE(s2->b_n() == data[ST2_BN_IDX].second); - REQUIRE(s2->not_used() == data[ST2_NU_IDX].second); - REQUIRE(s2->p2() == data[ST2_P2_IDX].second); - REQUIRE(s2->t_b() == data[ST2_TB_IDX].second); - int mul = s2->y_vel_sign() ? (-1) : 1; - REQUIRE(s2->y_vel() == (data[ST2_Y_VEL_V_IDX].second * mul)); - mul = s2->y_accel_sign() ? (-1) : 1; - REQUIRE(s2->y_accel() == (data[ST2_Y_ACCEL_V_IDX].second * mul)); - mul = s2->y_sign() ? (-1) : 1; - REQUIRE(s2->y() == (data[ST2_Y_V_IDX].second * mul)); -} - -TEST_CASE("parse_string_number_3"){ - string_data data = generate_string_data(3); - std::string inp_data = generate_inp_data(data); - - kaitai::kstream stream(inp_data); - glonass_t gl_string(&stream); - - REQUIRE(gl_string.idle_chip() == data[IDLE_CHIP_IDX].second); - REQUIRE(gl_string.string_number() == data[STRING_NUMBER_IDX].second); - REQUIRE(gl_string.hamming_code() == data[ST3_HC_OFF + HC_IDX].second); - REQUIRE(gl_string.pad_1() == data[ST3_HC_OFF + PAD1_IDX].second); - REQUIRE(gl_string.superframe_number() == data[ST3_HC_OFF + SUPERFRAME_IDX].second); - REQUIRE(gl_string.pad_2() == data[ST3_HC_OFF + PAD2_IDX].second); - REQUIRE(gl_string.frame_number() == data[ST3_HC_OFF + FRAME_IDX].second); - - kaitai::kstream str3(inp_data); - glonass_t str3_data(&str3); - glonass_t::string_3_t* s3 = static_cast(str3_data.data()); - - REQUIRE(s3->p3() == data[ST3_P3_IDX].second); - int mul = s3->gamma_n_sign() ? (-1) : 1; - REQUIRE(s3->gamma_n() == (data[ST3_GAMMA_N_V_IDX].second * mul)); - REQUIRE(s3->not_used() == data[ST3_NU_1_IDX].second); - REQUIRE(s3->p() == data[ST3_P_IDX].second); - REQUIRE(s3->l_n() == data[ST3_L_N_IDX].second); - mul = s3->z_vel_sign() ? (-1) : 1; - REQUIRE(s3->z_vel() == (data[ST3_Z_VEL_V_IDX].second * mul)); - mul = s3->z_accel_sign() ? (-1) : 1; - REQUIRE(s3->z_accel() == (data[ST3_Z_ACCEL_V_IDX].second * mul)); - mul = s3->z_sign() ? (-1) : 1; - REQUIRE(s3->z() == (data[ST3_Z_V_IDX].second * mul)); -} - -TEST_CASE("parse_string_number_4"){ - string_data data = generate_string_data(4); - std::string inp_data = generate_inp_data(data); - - kaitai::kstream stream(inp_data); - glonass_t gl_string(&stream); - - REQUIRE(gl_string.idle_chip() == data[IDLE_CHIP_IDX].second); - REQUIRE(gl_string.string_number() == data[STRING_NUMBER_IDX].second); - REQUIRE(gl_string.hamming_code() == data[ST4_HC_OFF + HC_IDX].second); - REQUIRE(gl_string.pad_1() == data[ST4_HC_OFF + PAD1_IDX].second); - REQUIRE(gl_string.superframe_number() == data[ST4_HC_OFF + SUPERFRAME_IDX].second); - REQUIRE(gl_string.pad_2() == data[ST4_HC_OFF + PAD2_IDX].second); - REQUIRE(gl_string.frame_number() == data[ST4_HC_OFF + FRAME_IDX].second); - - kaitai::kstream str4(inp_data); - glonass_t str4_data(&str4); - glonass_t::string_4_t* s4 = static_cast(str4_data.data()); - - int mul = s4->tau_n_sign() ? (-1) : 1; - REQUIRE(s4->tau_n() == (data[ST4_TAU_N_V_IDX].second * mul)); - mul = s4->delta_tau_n_sign() ? (-1) : 1; - REQUIRE(s4->delta_tau_n() == (data[ST4_DELTA_TAU_N_V_IDX].second * mul)); - REQUIRE(s4->e_n() == data[ST4_E_N_IDX].second); - REQUIRE(s4->not_used_1() == data[ST4_NU_1_IDX].second); - REQUIRE(s4->p4() == data[ST4_P4_IDX].second); - REQUIRE(s4->f_t() == data[ST4_F_T_IDX].second); - REQUIRE(s4->not_used_2() == data[ST4_NU_2_IDX].second); - REQUIRE(s4->n_t() == data[ST4_N_T_IDX].second); - REQUIRE(s4->n() == data[ST4_N_IDX].second); - REQUIRE(s4->m() == data[ST4_M_IDX].second); -} - -TEST_CASE("parse_string_number_5"){ - string_data data = generate_string_data(5); - std::string inp_data = generate_inp_data(data); - - kaitai::kstream stream(inp_data); - glonass_t gl_string(&stream); - - REQUIRE(gl_string.idle_chip() == data[IDLE_CHIP_IDX].second); - REQUIRE(gl_string.string_number() == data[STRING_NUMBER_IDX].second); - REQUIRE(gl_string.hamming_code() == data[ST5_HC_OFF + HC_IDX].second); - REQUIRE(gl_string.pad_1() == data[ST5_HC_OFF + PAD1_IDX].second); - REQUIRE(gl_string.superframe_number() == data[ST5_HC_OFF + SUPERFRAME_IDX].second); - REQUIRE(gl_string.pad_2() == data[ST5_HC_OFF + PAD2_IDX].second); - REQUIRE(gl_string.frame_number() == data[ST5_HC_OFF + FRAME_IDX].second); - - kaitai::kstream str5(inp_data); - glonass_t str5_data(&str5); - glonass_t::string_5_t* s5 = static_cast(str5_data.data()); - - REQUIRE(s5->n_a() == data[ST5_N_A_IDX].second); - REQUIRE(s5->tau_c() == data[ST5_TAU_C_IDX].second); - REQUIRE(s5->not_used() == data[ST5_NU_IDX].second); - REQUIRE(s5->n_4() == data[ST5_N_4_IDX].second); - REQUIRE(s5->tau_gps() == data[ST5_TAU_GPS_IDX].second); - REQUIRE(s5->l_n() == data[ST5_L_N_IDX].second); -} - -TEST_CASE("parse_string_number_NI"){ - string_data data = generate_string_data((rand() % 10) + 6); - std::string inp_data = generate_inp_data(data); - - kaitai::kstream stream(inp_data); - glonass_t gl_string(&stream); - - REQUIRE(gl_string.idle_chip() == data[IDLE_CHIP_IDX].second); - REQUIRE(gl_string.string_number() == data[STRING_NUMBER_IDX].second); - REQUIRE(gl_string.hamming_code() == data[ST6_HC_OFF + HC_IDX].second); - REQUIRE(gl_string.pad_1() == data[ST6_HC_OFF + PAD1_IDX].second); - REQUIRE(gl_string.superframe_number() == data[ST6_HC_OFF + SUPERFRAME_IDX].second); - REQUIRE(gl_string.pad_2() == data[ST6_HC_OFF + PAD2_IDX].second); - REQUIRE(gl_string.frame_number() == data[ST6_HC_OFF + FRAME_IDX].second); - - kaitai::kstream strni(inp_data); - glonass_t strni_data(&strni); - glonass_t::string_non_immediate_t* sni = static_cast(strni_data.data()); - - REQUIRE(sni->data_1() == data[ST6_DATA_1_IDX].second); - REQUIRE(sni->data_2() == data[ST6_DATA_2_IDX].second); -} diff --git a/system/ubloxd/tests/test_glonass_runner.cc b/system/ubloxd/tests/test_glonass_runner.cc deleted file mode 100644 index 62bf7476a1..0000000000 --- a/system/ubloxd/tests/test_glonass_runner.cc +++ /dev/null @@ -1,2 +0,0 @@ -#define CATCH_CONFIG_MAIN -#include "catch2/catch.hpp" diff --git a/system/ubloxd/tests/ubloxd.py b/system/ubloxd/tests/ubloxd.py deleted file mode 100755 index c17387114f..0000000000 --- a/system/ubloxd/tests/ubloxd.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python3 -# type: ignore - -from openpilot.selfdrive.locationd.test import ublox -import struct - -baudrate = 460800 -rate = 100 # send new data every 100ms - - -def configure_ublox(dev): - # configure ports and solution parameters and rate - dev.configure_port(port=ublox.PORT_USB, inMask=1, outMask=1) # enable only UBX on USB - dev.configure_port(port=0, inMask=0, outMask=0) # disable DDC - - payload = struct.pack(' - -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "common/swaglog.h" - -const double gpsPi = 3.1415926535898; -#define UBLOX_MSG_SIZE(hdr) (*(uint16_t *)&hdr[4]) - -inline static bool bit_to_bool(uint8_t val, int shifts) { - return (bool)(val & (1 << shifts)); -} - -inline int UbloxMsgParser::needed_bytes() { - // Msg header incomplete? - if (bytes_in_parse_buf < ublox::UBLOX_HEADER_SIZE) - return ublox::UBLOX_HEADER_SIZE + ublox::UBLOX_CHECKSUM_SIZE - bytes_in_parse_buf; - uint16_t needed = UBLOX_MSG_SIZE(msg_parse_buf) + ublox::UBLOX_HEADER_SIZE + ublox::UBLOX_CHECKSUM_SIZE; - // too much data - if (needed < (uint16_t)bytes_in_parse_buf) - return -1; - return needed - (uint16_t)bytes_in_parse_buf; -} - -inline bool UbloxMsgParser::valid_cheksum() { - uint8_t ck_a = 0, ck_b = 0; - for (int i = 2; i < bytes_in_parse_buf - ublox::UBLOX_CHECKSUM_SIZE; i++) { - ck_a = (ck_a + msg_parse_buf[i]) & 0xFF; - ck_b = (ck_b + ck_a) & 0xFF; - } - if (ck_a != msg_parse_buf[bytes_in_parse_buf - 2]) { - LOGD("Checksum a mismatch: %02X, %02X", ck_a, msg_parse_buf[6]); - return false; - } - if (ck_b != msg_parse_buf[bytes_in_parse_buf - 1]) { - LOGD("Checksum b mismatch: %02X, %02X", ck_b, msg_parse_buf[7]); - return false; - } - return true; -} - -inline bool UbloxMsgParser::valid() { - return bytes_in_parse_buf >= ublox::UBLOX_HEADER_SIZE + ublox::UBLOX_CHECKSUM_SIZE && - needed_bytes() == 0 && valid_cheksum(); -} - -inline bool UbloxMsgParser::valid_so_far() { - if (bytes_in_parse_buf > 0 && msg_parse_buf[0] != ublox::PREAMBLE1) { - return false; - } - if (bytes_in_parse_buf > 1 && msg_parse_buf[1] != ublox::PREAMBLE2) { - return false; - } - if (needed_bytes() == 0 && !valid()) { - return false; - } - return true; -} - -bool UbloxMsgParser::add_data(float log_time, const uint8_t *incoming_data, uint32_t incoming_data_len, size_t &bytes_consumed) { - last_log_time = log_time; - int needed = needed_bytes(); - if (needed > 0) { - bytes_consumed = std::min((uint32_t)needed, incoming_data_len); - // Add data to buffer - memcpy(msg_parse_buf + bytes_in_parse_buf, incoming_data, bytes_consumed); - bytes_in_parse_buf += bytes_consumed; - } else { - bytes_consumed = incoming_data_len; - } - - // Validate msg format, detect invalid header and invalid checksum. - while (!valid_so_far() && bytes_in_parse_buf != 0) { - // Corrupted msg, drop a byte. - bytes_in_parse_buf -= 1; - if (bytes_in_parse_buf > 0) - memmove(&msg_parse_buf[0], &msg_parse_buf[1], bytes_in_parse_buf); - } - - // There is redundant data at the end of buffer, reset the buffer. - if (needed_bytes() == -1) { - bytes_in_parse_buf = 0; - } - return valid(); -} - - -std::pair> UbloxMsgParser::gen_msg() { - std::string dat = data(); - kaitai::kstream stream(dat); - - ubx_t ubx_message(&stream); - auto body = ubx_message.body(); - - switch (ubx_message.msg_type()) { - case 0x0107: - return {"gpsLocationExternal", gen_nav_pvt(static_cast(body))}; - case 0x0213: // UBX-RXM-SFRB (Broadcast Navigation Data Subframe) - return {"ubloxGnss", gen_rxm_sfrbx(static_cast(body))}; - case 0x0215: // UBX-RXM-RAW (Multi-GNSS Raw Measurement Data) - return {"ubloxGnss", gen_rxm_rawx(static_cast(body))}; - case 0x0a09: - return {"ubloxGnss", gen_mon_hw(static_cast(body))}; - case 0x0a0b: - return {"ubloxGnss", gen_mon_hw2(static_cast(body))}; - case 0x0135: - return {"ubloxGnss", gen_nav_sat(static_cast(body))}; - default: - LOGE("Unknown message type %x", ubx_message.msg_type()); - return {"ubloxGnss", kj::Array()}; - } -} - - -kj::Array UbloxMsgParser::gen_nav_pvt(ubx_t::nav_pvt_t *msg) { - MessageBuilder msg_builder; - auto gpsLoc = msg_builder.initEvent().initGpsLocationExternal(); - gpsLoc.setSource(cereal::GpsLocationData::SensorSource::UBLOX); - gpsLoc.setFlags(msg->flags()); - gpsLoc.setHasFix((msg->flags() % 2) == 1); - gpsLoc.setLatitude(msg->lat() * 1e-07); - gpsLoc.setLongitude(msg->lon() * 1e-07); - gpsLoc.setAltitude(msg->height() * 1e-03); - gpsLoc.setSpeed(msg->g_speed() * 1e-03); - gpsLoc.setBearingDeg(msg->head_mot() * 1e-5); - gpsLoc.setHorizontalAccuracy(msg->h_acc() * 1e-03); - gpsLoc.setSatelliteCount(msg->num_sv()); - std::tm timeinfo = std::tm(); - timeinfo.tm_year = msg->year() - 1900; - timeinfo.tm_mon = msg->month() - 1; - timeinfo.tm_mday = msg->day(); - timeinfo.tm_hour = msg->hour(); - timeinfo.tm_min = msg->min(); - timeinfo.tm_sec = msg->sec(); - - std::time_t utc_tt = timegm(&timeinfo); - gpsLoc.setUnixTimestampMillis(utc_tt * 1e+03 + msg->nano() * 1e-06); - float f[] = { msg->vel_n() * 1e-03f, msg->vel_e() * 1e-03f, msg->vel_d() * 1e-03f }; - gpsLoc.setVNED(f); - gpsLoc.setVerticalAccuracy(msg->v_acc() * 1e-03); - gpsLoc.setSpeedAccuracy(msg->s_acc() * 1e-03); - gpsLoc.setBearingAccuracyDeg(msg->head_acc() * 1e-05); - return capnp::messageToFlatArray(msg_builder); -} - -kj::Array UbloxMsgParser::parse_gps_ephemeris(ubx_t::rxm_sfrbx_t *msg) { - // GPS subframes are packed into 10x 4 bytes, each containing 3 actual bytes - // We will first need to separate the data from the padding and parity - auto body = *msg->body(); - assert(body.size() == 10); - - std::string subframe_data; - subframe_data.reserve(30); - for (uint32_t word : body) { - word = word >> 6; // TODO: Verify parity - subframe_data.push_back(word >> 16); - subframe_data.push_back(word >> 8); - subframe_data.push_back(word >> 0); - } - - // Collect subframes in map and parse when we have all the parts - { - kaitai::kstream stream(subframe_data); - gps_t subframe(&stream); - - int subframe_id = subframe.how()->subframe_id(); - if (subframe_id > 3 || subframe_id < 1) { - // don't parse almanac subframes - return kj::Array(); - } - gps_subframes[msg->sv_id()][subframe_id] = subframe_data; - } - - // publish if subframes 1-3 have been collected - if (gps_subframes[msg->sv_id()].size() == 3) { - MessageBuilder msg_builder; - auto eph = msg_builder.initEvent().initUbloxGnss().initEphemeris(); - eph.setSvId(msg->sv_id()); - - int iode_s2 = 0; - int iode_s3 = 0; - int iodc_lsb = 0; - int week; - - // Subframe 1 - { - kaitai::kstream stream(gps_subframes[msg->sv_id()][1]); - gps_t subframe(&stream); - gps_t::subframe_1_t* subframe_1 = static_cast(subframe.body()); - - // Each message is incremented to be greater or equal than week 1877 (2015-12-27). - // To skip this use the current_time argument - week = subframe_1->week_no(); - week += 1024; - if (week < 1877) { - week += 1024; - } - //eph.setGpsWeek(subframe_1->week_no()); - eph.setTgd(subframe_1->t_gd() * pow(2, -31)); - eph.setToc(subframe_1->t_oc() * pow(2, 4)); - eph.setAf2(subframe_1->af_2() * pow(2, -55)); - eph.setAf1(subframe_1->af_1() * pow(2, -43)); - eph.setAf0(subframe_1->af_0() * pow(2, -31)); - eph.setSvHealth(subframe_1->sv_health()); - eph.setTowCount(subframe.how()->tow_count()); - iodc_lsb = subframe_1->iodc_lsb(); - } - - // Subframe 2 - { - kaitai::kstream stream(gps_subframes[msg->sv_id()][2]); - gps_t subframe(&stream); - gps_t::subframe_2_t* subframe_2 = static_cast(subframe.body()); - - // GPS week refers to current week, the ephemeris can be valid for the next - // if toe equals 0, this can be verified by the TOW count if it is within the - // last 2 hours of the week (gps ephemeris valid for 4hours) - if (subframe_2->t_oe() == 0 and subframe.how()->tow_count()*6 >= (SECS_IN_WEEK - 2*SECS_IN_HR)){ - week += 1; - } - eph.setCrs(subframe_2->c_rs() * pow(2, -5)); - eph.setDeltaN(subframe_2->delta_n() * pow(2, -43) * gpsPi); - eph.setM0(subframe_2->m_0() * pow(2, -31) * gpsPi); - eph.setCuc(subframe_2->c_uc() * pow(2, -29)); - eph.setEcc(subframe_2->e() * pow(2, -33)); - eph.setCus(subframe_2->c_us() * pow(2, -29)); - eph.setA(pow(subframe_2->sqrt_a() * pow(2, -19), 2.0)); - eph.setToe(subframe_2->t_oe() * pow(2, 4)); - iode_s2 = subframe_2->iode(); - } - - // Subframe 3 - { - kaitai::kstream stream(gps_subframes[msg->sv_id()][3]); - gps_t subframe(&stream); - gps_t::subframe_3_t* subframe_3 = static_cast(subframe.body()); - - eph.setCic(subframe_3->c_ic() * pow(2, -29)); - eph.setOmega0(subframe_3->omega_0() * pow(2, -31) * gpsPi); - eph.setCis(subframe_3->c_is() * pow(2, -29)); - eph.setI0(subframe_3->i_0() * pow(2, -31) * gpsPi); - eph.setCrc(subframe_3->c_rc() * pow(2, -5)); - eph.setOmega(subframe_3->omega() * pow(2, -31) * gpsPi); - eph.setOmegaDot(subframe_3->omega_dot() * pow(2, -43) * gpsPi); - eph.setIode(subframe_3->iode()); - eph.setIDot(subframe_3->idot() * pow(2, -43) * gpsPi); - iode_s3 = subframe_3->iode(); - } - - eph.setToeWeek(week); - eph.setTocWeek(week); - - gps_subframes[msg->sv_id()].clear(); - if (iodc_lsb != iode_s2 || iodc_lsb != iode_s3) { - // data set cutover, reject ephemeris - return kj::Array(); - } - return capnp::messageToFlatArray(msg_builder); - } - return kj::Array(); -} - -kj::Array UbloxMsgParser::parse_glonass_ephemeris(ubx_t::rxm_sfrbx_t *msg) { - // This parser assumes that no 2 satellites of the same frequency - // can be in view at the same time - auto body = *msg->body(); - assert(body.size() == 4); - { - std::string string_data; - string_data.reserve(16); - for (uint32_t word : body) { - for (int i = 3; i >= 0; i--) - string_data.push_back(word >> 8*i); - } - - kaitai::kstream stream(string_data); - glonass_t gl_string(&stream); - int string_number = gl_string.string_number(); - if (string_number < 1 || string_number > 5 || gl_string.idle_chip()) { - // don't parse non immediate data, idle_chip == 0 - return kj::Array(); - } - - // Check if new string either has same superframe_id or log transmission times make sense - bool superframe_unknown = false; - bool needs_clear = false; - for (int i = 1; i <= 5; i++) { - if (glonass_strings[msg->freq_id()].find(i) == glonass_strings[msg->freq_id()].end()) - continue; - if (glonass_string_superframes[msg->freq_id()][i] == 0 || gl_string.superframe_number() == 0) { - superframe_unknown = true; - } else if (glonass_string_superframes[msg->freq_id()][i] != gl_string.superframe_number()) { - needs_clear = true; - } - // Check if string times add up to being from the same frame - // If superframe is known this is redundant - // Strings are sent 2s apart and frames are 30s apart - if (superframe_unknown && - std::abs((glonass_string_times[msg->freq_id()][i] - 2.0 * i) - (last_log_time - 2.0 * string_number)) > 10) - needs_clear = true; - } - if (needs_clear) { - glonass_strings[msg->freq_id()].clear(); - glonass_string_superframes[msg->freq_id()].clear(); - glonass_string_times[msg->freq_id()].clear(); - } - glonass_strings[msg->freq_id()][string_number] = string_data; - glonass_string_superframes[msg->freq_id()][string_number] = gl_string.superframe_number(); - glonass_string_times[msg->freq_id()][string_number] = last_log_time; - } - if (msg->sv_id() == 255) { - // data can be decoded before identifying the SV number, in this case 255 - // is returned, which means "unknown" (ublox p32) - return kj::Array(); - } - - // publish if strings 1-5 have been collected - if (glonass_strings[msg->freq_id()].size() != 5) { - return kj::Array(); - } - - MessageBuilder msg_builder; - auto eph = msg_builder.initEvent().initUbloxGnss().initGlonassEphemeris(); - eph.setSvId(msg->sv_id()); - eph.setFreqNum(msg->freq_id() - 7); - - uint16_t current_day = 0; - uint16_t tk = 0; - - // string number 1 - { - kaitai::kstream stream(glonass_strings[msg->freq_id()][1]); - glonass_t gl_stream(&stream); - glonass_t::string_1_t* data = static_cast(gl_stream.data()); - - eph.setP1(data->p1()); - tk = data->t_k(); - eph.setTkDEPRECATED(tk); - eph.setXVel(data->x_vel() * pow(2, -20)); - eph.setXAccel(data->x_accel() * pow(2, -30)); - eph.setX(data->x() * pow(2, -11)); - } - - // string number 2 - { - kaitai::kstream stream(glonass_strings[msg->freq_id()][2]); - glonass_t gl_stream(&stream); - glonass_t::string_2_t* data = static_cast(gl_stream.data()); - - eph.setSvHealth(data->b_n()>>2); // MSB indicates health - eph.setP2(data->p2()); - eph.setTb(data->t_b()); - eph.setYVel(data->y_vel() * pow(2, -20)); - eph.setYAccel(data->y_accel() * pow(2, -30)); - eph.setY(data->y() * pow(2, -11)); - } - - // string number 3 - { - kaitai::kstream stream(glonass_strings[msg->freq_id()][3]); - glonass_t gl_stream(&stream); - glonass_t::string_3_t* data = static_cast(gl_stream.data()); - - eph.setP3(data->p3()); - eph.setGammaN(data->gamma_n() * pow(2, -40)); - eph.setSvHealth(eph.getSvHealth() | data->l_n()); - eph.setZVel(data->z_vel() * pow(2, -20)); - eph.setZAccel(data->z_accel() * pow(2, -30)); - eph.setZ(data->z() * pow(2, -11)); - } - - // string number 4 - { - kaitai::kstream stream(glonass_strings[msg->freq_id()][4]); - glonass_t gl_stream(&stream); - glonass_t::string_4_t* data = static_cast(gl_stream.data()); - - current_day = data->n_t(); - eph.setNt(current_day); - eph.setTauN(data->tau_n() * pow(2, -30)); - eph.setDeltaTauN(data->delta_tau_n() * pow(2, -30)); - eph.setAge(data->e_n()); - eph.setP4(data->p4()); - eph.setSvURA(glonass_URA_lookup.at(data->f_t())); - if (msg->sv_id() != data->n()) { - LOGE("SV_ID != SLOT_NUMBER: %d %" PRIu64, msg->sv_id(), data->n()); - } - eph.setSvType(data->m()); - } - - // string number 5 - { - kaitai::kstream stream(glonass_strings[msg->freq_id()][5]); - glonass_t gl_stream(&stream); - glonass_t::string_5_t* data = static_cast(gl_stream.data()); - - // string5 parsing is only needed to get the year, this can be removed and - // the year can be fetched later in laika (note rollovers and leap year) - eph.setN4(data->n_4()); - int tk_seconds = SECS_IN_HR * ((tk>>7) & 0x1F) + SECS_IN_MIN * ((tk>>1) & 0x3F) + (tk & 0x1) * 30; - eph.setTkSeconds(tk_seconds); - } - - glonass_strings[msg->freq_id()].clear(); - return capnp::messageToFlatArray(msg_builder); -} - - -kj::Array UbloxMsgParser::gen_rxm_sfrbx(ubx_t::rxm_sfrbx_t *msg) { - switch (msg->gnss_id()) { - case ubx_t::gnss_type_t::GNSS_TYPE_GPS: - return parse_gps_ephemeris(msg); - case ubx_t::gnss_type_t::GNSS_TYPE_GLONASS: - return parse_glonass_ephemeris(msg); - default: - return kj::Array(); - } -} - -kj::Array UbloxMsgParser::gen_rxm_rawx(ubx_t::rxm_rawx_t *msg) { - MessageBuilder msg_builder; - auto mr = msg_builder.initEvent().initUbloxGnss().initMeasurementReport(); - mr.setRcvTow(msg->rcv_tow()); - mr.setGpsWeek(msg->week()); - mr.setLeapSeconds(msg->leap_s()); - mr.setGpsWeek(msg->week()); - - auto mb = mr.initMeasurements(msg->num_meas()); - auto measurements = *msg->meas(); - for (int8_t i = 0; i < msg->num_meas(); i++) { - mb[i].setSvId(measurements[i]->sv_id()); - mb[i].setPseudorange(measurements[i]->pr_mes()); - mb[i].setCarrierCycles(measurements[i]->cp_mes()); - mb[i].setDoppler(measurements[i]->do_mes()); - mb[i].setGnssId(measurements[i]->gnss_id()); - mb[i].setGlonassFrequencyIndex(measurements[i]->freq_id()); - mb[i].setLocktime(measurements[i]->lock_time()); - mb[i].setCno(measurements[i]->cno()); - mb[i].setPseudorangeStdev(0.01 * (pow(2, (measurements[i]->pr_stdev() & 15)))); // weird scaling, might be wrong - mb[i].setCarrierPhaseStdev(0.004 * (measurements[i]->cp_stdev() & 15)); - mb[i].setDopplerStdev(0.002 * (pow(2, (measurements[i]->do_stdev() & 15)))); // weird scaling, might be wrong - - auto ts = mb[i].initTrackingStatus(); - auto trk_stat = measurements[i]->trk_stat(); - ts.setPseudorangeValid(bit_to_bool(trk_stat, 0)); - ts.setCarrierPhaseValid(bit_to_bool(trk_stat, 1)); - ts.setHalfCycleValid(bit_to_bool(trk_stat, 2)); - ts.setHalfCycleSubtracted(bit_to_bool(trk_stat, 3)); - } - - mr.setNumMeas(msg->num_meas()); - auto rs = mr.initReceiverStatus(); - rs.setLeapSecValid(bit_to_bool(msg->rec_stat(), 0)); - rs.setClkReset(bit_to_bool(msg->rec_stat(), 2)); - return capnp::messageToFlatArray(msg_builder); -} - -kj::Array UbloxMsgParser::gen_nav_sat(ubx_t::nav_sat_t *msg) { - MessageBuilder msg_builder; - auto sr = msg_builder.initEvent().initUbloxGnss().initSatReport(); - sr.setITow(msg->itow()); - - auto svs = sr.initSvs(msg->num_svs()); - auto svs_data = *msg->svs(); - for (int8_t i = 0; i < msg->num_svs(); i++) { - svs[i].setSvId(svs_data[i]->sv_id()); - svs[i].setGnssId(svs_data[i]->gnss_id()); - svs[i].setFlagsBitfield(svs_data[i]->flags()); - svs[i].setCno(svs_data[i]->cno()); - svs[i].setElevationDeg(svs_data[i]->elev()); - svs[i].setAzimuthDeg(svs_data[i]->azim()); - svs[i].setPseudorangeResidual(svs_data[i]->pr_res() * 0.1); - } - - return capnp::messageToFlatArray(msg_builder); -} - -kj::Array UbloxMsgParser::gen_mon_hw(ubx_t::mon_hw_t *msg) { - MessageBuilder msg_builder; - auto hwStatus = msg_builder.initEvent().initUbloxGnss().initHwStatus(); - hwStatus.setNoisePerMS(msg->noise_per_ms()); - hwStatus.setFlags(msg->flags()); - hwStatus.setAgcCnt(msg->agc_cnt()); - hwStatus.setAStatus((cereal::UbloxGnss::HwStatus::AntennaSupervisorState) msg->a_status()); - hwStatus.setAPower((cereal::UbloxGnss::HwStatus::AntennaPowerStatus) msg->a_power()); - hwStatus.setJamInd(msg->jam_ind()); - return capnp::messageToFlatArray(msg_builder); -} - -kj::Array UbloxMsgParser::gen_mon_hw2(ubx_t::mon_hw2_t *msg) { - MessageBuilder msg_builder; - auto hwStatus = msg_builder.initEvent().initUbloxGnss().initHwStatus2(); - hwStatus.setOfsI(msg->ofs_i()); - hwStatus.setMagI(msg->mag_i()); - hwStatus.setOfsQ(msg->ofs_q()); - hwStatus.setMagQ(msg->mag_q()); - - switch (msg->cfg_source()) { - case ubx_t::mon_hw2_t::config_source_t::CONFIG_SOURCE_ROM: - hwStatus.setCfgSource(cereal::UbloxGnss::HwStatus2::ConfigSource::ROM); - break; - case ubx_t::mon_hw2_t::config_source_t::CONFIG_SOURCE_OTP: - hwStatus.setCfgSource(cereal::UbloxGnss::HwStatus2::ConfigSource::OTP); - break; - case ubx_t::mon_hw2_t::config_source_t::CONFIG_SOURCE_CONFIG_PINS: - hwStatus.setCfgSource(cereal::UbloxGnss::HwStatus2::ConfigSource::CONFIGPINS); - break; - case ubx_t::mon_hw2_t::config_source_t::CONFIG_SOURCE_FLASH: - hwStatus.setCfgSource(cereal::UbloxGnss::HwStatus2::ConfigSource::FLASH); - break; - default: - hwStatus.setCfgSource(cereal::UbloxGnss::HwStatus2::ConfigSource::UNDEFINED); - break; - } - - hwStatus.setLowLevCfg(msg->low_lev_cfg()); - hwStatus.setPostStatus(msg->post_status()); - - return capnp::messageToFlatArray(msg_builder); -} diff --git a/system/ubloxd/ublox_msg.h b/system/ubloxd/ublox_msg.h deleted file mode 100644 index d21760edc2..0000000000 --- a/system/ubloxd/ublox_msg.h +++ /dev/null @@ -1,131 +0,0 @@ -#pragma once - -#include -#include -#include -#include -#include -#include -#include - -#include "cereal/messaging/messaging.h" -#include "common/util.h" -#include "system/ubloxd/generated/gps.h" -#include "system/ubloxd/generated/glonass.h" -#include "system/ubloxd/generated/ubx.h" - -using namespace std::string_literals; - -const int SECS_IN_MIN = 60; -const int SECS_IN_HR = 60 * SECS_IN_MIN; -const int SECS_IN_DAY = 24 * SECS_IN_HR; -const int SECS_IN_WEEK = 7 * SECS_IN_DAY; - -// protocol constants -namespace ublox { - const uint8_t PREAMBLE1 = 0xb5; - const uint8_t PREAMBLE2 = 0x62; - - const int UBLOX_HEADER_SIZE = 6; - const int UBLOX_CHECKSUM_SIZE = 2; - const int UBLOX_MAX_MSG_SIZE = 65536; - - struct ubx_mga_ini_time_utc_t { - uint8_t type; - uint8_t version; - uint8_t ref; - int8_t leapSecs; - uint16_t year; - uint8_t month; - uint8_t day; - uint8_t hour; - uint8_t minute; - uint8_t second; - uint8_t reserved1; - uint32_t ns; - uint16_t tAccS; - uint16_t reserved2; - uint32_t tAccNs; - } __attribute__((packed)); - - inline std::string ubx_add_checksum(const std::string &msg) { - assert(msg.size() > 2); - - uint8_t ck_a = 0, ck_b = 0; - for (int i = 2; i < msg.size(); i++) { - ck_a = (ck_a + msg[i]) & 0xFF; - ck_b = (ck_b + ck_a) & 0xFF; - } - - std::string r = msg; - r.push_back(ck_a); - r.push_back(ck_b); - return r; - } - - inline std::string build_ubx_mga_ini_time_utc(struct tm time) { - ublox::ubx_mga_ini_time_utc_t payload = { - .type = 0x10, - .version = 0x0, - .ref = 0x0, - .leapSecs = -128, // Unknown - .year = (uint16_t)(1900 + time.tm_year), - .month = (uint8_t)(1 + time.tm_mon), - .day = (uint8_t)time.tm_mday, - .hour = (uint8_t)time.tm_hour, - .minute = (uint8_t)time.tm_min, - .second = (uint8_t)time.tm_sec, - .reserved1 = 0x0, - .ns = 0, - .tAccS = 30, - .reserved2 = 0x0, - .tAccNs = 0, - }; - assert(sizeof(payload) == 24); - - std::string msg = "\xb5\x62\x13\x40\x18\x00"s; - msg += std::string((char*)&payload, sizeof(payload)); - - return ubx_add_checksum(msg); - } -} - -class UbloxMsgParser { - public: - bool add_data(float log_time, const uint8_t *incoming_data, uint32_t incoming_data_len, size_t &bytes_consumed); - inline void reset() {bytes_in_parse_buf = 0;} - inline int needed_bytes(); - inline std::string data() {return std::string((const char*)msg_parse_buf, bytes_in_parse_buf);} - - std::pair> gen_msg(); - kj::Array gen_nav_pvt(ubx_t::nav_pvt_t *msg); - kj::Array gen_rxm_sfrbx(ubx_t::rxm_sfrbx_t *msg); - kj::Array gen_rxm_rawx(ubx_t::rxm_rawx_t *msg); - kj::Array gen_mon_hw(ubx_t::mon_hw_t *msg); - kj::Array gen_mon_hw2(ubx_t::mon_hw2_t *msg); - kj::Array gen_nav_sat(ubx_t::nav_sat_t *msg); - - private: - inline bool valid_cheksum(); - inline bool valid(); - inline bool valid_so_far(); - - kj::Array parse_gps_ephemeris(ubx_t::rxm_sfrbx_t *msg); - kj::Array parse_glonass_ephemeris(ubx_t::rxm_sfrbx_t *msg); - - std::unordered_map> gps_subframes; - - float last_log_time = 0.0; - size_t bytes_in_parse_buf = 0; - uint8_t msg_parse_buf[ublox::UBLOX_HEADER_SIZE + ublox::UBLOX_MAX_MSG_SIZE]; - - // user range accuracy in meters - const std::unordered_map glonass_URA_lookup = - {{ 0, 1}, { 1, 2}, { 2, 2.5}, { 3, 4}, { 4, 5}, {5, 7}, - { 6, 10}, { 7, 12}, { 8, 14}, { 9, 16}, {10, 32}, - {11, 64}, {12, 128}, {13, 256}, {14, 512}, {15, 1024}}; - - std::unordered_map> glonass_strings; - std::unordered_map> glonass_string_times; - std::unordered_map> glonass_string_superframes; -}; diff --git a/system/ubloxd/ubloxd.cc b/system/ubloxd/ubloxd.cc deleted file mode 100644 index 4e7e91f830..0000000000 --- a/system/ubloxd/ubloxd.cc +++ /dev/null @@ -1,62 +0,0 @@ -#include - -#include - -#include "cereal/messaging/messaging.h" -#include "common/swaglog.h" -#include "common/util.h" -#include "system/ubloxd/ublox_msg.h" - -ExitHandler do_exit; -using namespace ublox; - -int main() { - LOGW("starting ubloxd"); - AlignedBuffer aligned_buf; - UbloxMsgParser parser; - - PubMaster pm({"ubloxGnss", "gpsLocationExternal"}); - - std::unique_ptr context(Context::create()); - std::unique_ptr subscriber(SubSocket::create(context.get(), "ubloxRaw")); - assert(subscriber != NULL); - subscriber->setTimeout(100); - - - while (!do_exit) { - std::unique_ptr msg(subscriber->receive()); - if (!msg) { - continue; - } - - capnp::FlatArrayMessageReader cmsg(aligned_buf.align(msg.get())); - cereal::Event::Reader event = cmsg.getRoot(); - auto ubloxRaw = event.getUbloxRaw(); - float log_time = 1e-9 * event.getLogMonoTime(); - - const uint8_t *data = ubloxRaw.begin(); - size_t len = ubloxRaw.size(); - size_t bytes_consumed = 0; - - while (bytes_consumed < len && !do_exit) { - size_t bytes_consumed_this_time = 0U; - if (parser.add_data(log_time, data + bytes_consumed, (uint32_t)(len - bytes_consumed), bytes_consumed_this_time)) { - - try { - auto ublox_msg = parser.gen_msg(); - if (ublox_msg.second.size() > 0) { - auto bytes = ublox_msg.second.asBytes(); - pm.send(ublox_msg.first.c_str(), bytes.begin(), bytes.size()); - } - } catch (const std::exception& e) { - LOGE("Error parsing ublox message %s", e.what()); - } - - parser.reset(); - } - bytes_consumed += bytes_consumed_this_time; - } - } - - return 0; -} diff --git a/system/ubloxd/ubloxd.py b/system/ubloxd/ubloxd.py new file mode 100755 index 0000000000..84a926dd78 --- /dev/null +++ b/system/ubloxd/ubloxd.py @@ -0,0 +1,519 @@ +#!/usr/bin/env python3 +import math +import capnp +import calendar +import numpy as np +from collections import defaultdict +from dataclasses import dataclass + +from cereal import log +from cereal import messaging +from openpilot.system.ubloxd.generated.ubx import Ubx +from openpilot.system.ubloxd.generated.gps import Gps +from openpilot.system.ubloxd.generated.glonass import Glonass + + +SECS_IN_MIN = 60 +SECS_IN_HR = 60 * SECS_IN_MIN +SECS_IN_DAY = 24 * SECS_IN_HR +SECS_IN_WEEK = 7 * SECS_IN_DAY + + +class UbxFramer: + PREAMBLE1 = 0xB5 + PREAMBLE2 = 0x62 + HEADER_SIZE = 6 + CHECKSUM_SIZE = 2 + + def __init__(self) -> None: + self.buf = bytearray() + self.last_log_time = 0.0 + + def reset(self) -> None: + self.buf.clear() + + @staticmethod + def _checksum_ok(frame: bytes) -> bool: + ck_a = 0 + ck_b = 0 + for b in frame[2:-2]: + ck_a = (ck_a + b) & 0xFF + ck_b = (ck_b + ck_a) & 0xFF + return ck_a == frame[-2] and ck_b == frame[-1] + + def add_data(self, log_time: float, incoming: bytes) -> list[bytes]: + self.last_log_time = log_time + out: list[bytes] = [] + if not incoming: + return out + self.buf += incoming + + while True: + # find preamble + if len(self.buf) < 2: + break + start = self.buf.find(b"\xB5\x62") + if start < 0: + # no preamble in buffer + self.buf.clear() + break + if start > 0: + # drop garbage before preamble + self.buf = self.buf[start:] + + if len(self.buf) < self.HEADER_SIZE: + break + + length_le = int.from_bytes(self.buf[4:6], 'little', signed=False) + total_len = self.HEADER_SIZE + length_le + self.CHECKSUM_SIZE + if len(self.buf) < total_len: + break + + candidate = bytes(self.buf[:total_len]) + if self._checksum_ok(candidate): + out.append(candidate) + # consume this frame + self.buf = self.buf[total_len:] + else: + # drop first byte and retry + self.buf = self.buf[1:] + + return out + + +def _bit(b: int, shift: int) -> bool: + return (b & (1 << shift)) != 0 + + +@dataclass +class EphemerisCaches: + gps_subframes: defaultdict[int, dict[int, bytes]] + glonass_strings: defaultdict[int, dict[int, bytes]] + glonass_string_times: defaultdict[int, dict[int, float]] + glonass_string_superframes: defaultdict[int, dict[int, int]] + + +class UbloxMsgParser: + gpsPi = 3.1415926535898 + + # user range accuracy in meters + glonass_URA_lookup: dict[int, float] = { + 0: 1, 1: 2, 2: 2.5, 3: 4, 4: 5, 5: 7, + 6: 10, 7: 12, 8: 14, 9: 16, 10: 32, + 11: 64, 12: 128, 13: 256, 14: 512, 15: 1024, + } + + def __init__(self) -> None: + self.framer = UbxFramer() + self.caches = EphemerisCaches( + gps_subframes=defaultdict(dict), + glonass_strings=defaultdict(dict), + glonass_string_times=defaultdict(dict), + glonass_string_superframes=defaultdict(dict), + ) + + # Message generation entry point + def parse_frame(self, frame: bytes) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder] | None: + # Quick header parse + msg_type = int.from_bytes(frame[2:4], 'big') + payload = frame[6:-2] + if msg_type == 0x0107: + body = Ubx.NavPvt.from_bytes(payload) + return self._gen_nav_pvt(body) + if msg_type == 0x0213: + # Manually parse RXM-SFRBX to avoid Kaitai EOF on some frames + if len(payload) < 8: + return None + gnss_id = payload[0] + sv_id = payload[1] + freq_id = payload[3] + num_words = payload[4] + exp = 8 + 4 * num_words + if exp != len(payload): + return None + words: list[int] = [] + off = 8 + for _ in range(num_words): + words.append(int.from_bytes(payload[off:off+4], 'little')) + off += 4 + + class _SfrbxView: + def __init__(self, gid: int, sid: int, fid: int, body: list[int]): + self.gnss_id = Ubx.GnssType(gid) + self.sv_id = sid + self.freq_id = fid + self.body = body + view = _SfrbxView(gnss_id, sv_id, freq_id, words) + return self._gen_rxm_sfrbx(view) + if msg_type == 0x0215: + body = Ubx.RxmRawx.from_bytes(payload) + return self._gen_rxm_rawx(body) + if msg_type == 0x0A09: + body = Ubx.MonHw.from_bytes(payload) + return self._gen_mon_hw(body) + if msg_type == 0x0A0B: + body = Ubx.MonHw2.from_bytes(payload) + return self._gen_mon_hw2(body) + if msg_type == 0x0135: + body = Ubx.NavSat.from_bytes(payload) + return self._gen_nav_sat(body) + return None + + # NAV-PVT -> gpsLocationExternal + def _gen_nav_pvt(self, msg: Ubx.NavPvt) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder]: + dat = messaging.new_message('gpsLocationExternal', valid=True) + gps = dat.gpsLocationExternal + gps.source = log.GpsLocationData.SensorSource.ublox + gps.flags = msg.flags + gps.hasFix = (msg.flags % 2) == 1 + gps.latitude = msg.lat * 1e-07 + gps.longitude = msg.lon * 1e-07 + gps.altitude = msg.height * 1e-03 + gps.speed = msg.g_speed * 1e-03 + gps.bearingDeg = msg.head_mot * 1e-5 + gps.horizontalAccuracy = msg.h_acc * 1e-03 + gps.satelliteCount = msg.num_sv + + # build UTC timestamp millis (NAV-PVT is in UTC) + # tolerate invalid or unset date values like C++ timegm + try: + utc_tt = calendar.timegm((msg.year, msg.month, msg.day, msg.hour, msg.min, msg.sec, 0, 0, 0)) + except Exception: + utc_tt = 0 + gps.unixTimestampMillis = int(utc_tt * 1e3 + (msg.nano * 1e-6)) + + # match C++ float32 rounding semantics exactly + gps.vNED = [ + float(np.float32(msg.vel_n) * np.float32(1e-03)), + float(np.float32(msg.vel_e) * np.float32(1e-03)), + float(np.float32(msg.vel_d) * np.float32(1e-03)), + ] + gps.verticalAccuracy = msg.v_acc * 1e-03 + gps.speedAccuracy = msg.s_acc * 1e-03 + gps.bearingAccuracyDeg = msg.head_acc * 1e-05 + return ('gpsLocationExternal', dat) + + # RXM-SFRBX dispatch to GPS or GLONASS ephemeris + def _gen_rxm_sfrbx(self, msg) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder] | None: + if msg.gnss_id == Ubx.GnssType.gps: + return self._parse_gps_ephemeris(msg) + if msg.gnss_id == Ubx.GnssType.glonass: + return self._parse_glonass_ephemeris(msg) + return None + + def _parse_gps_ephemeris(self, msg: Ubx.RxmSfrbx) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder] | None: + # body is list of 10 words; convert to 30-byte subframe (strip parity/padding) + body = msg.body + if len(body) != 10: + return None + subframe_data = bytearray() + for word in body: + word >>= 6 + subframe_data.append((word >> 16) & 0xFF) + subframe_data.append((word >> 8) & 0xFF) + subframe_data.append(word & 0xFF) + + sf = Gps.from_bytes(bytes(subframe_data)) + subframe_id = sf.how.subframe_id + if subframe_id < 1 or subframe_id > 3: + return None + self.caches.gps_subframes[msg.sv_id][subframe_id] = bytes(subframe_data) + + if len(self.caches.gps_subframes[msg.sv_id]) != 3: + return None + + dat = messaging.new_message('ubloxGnss', valid=True) + eph = dat.ubloxGnss.init('ephemeris') + eph.svId = msg.sv_id + + iode_s2 = 0 + iode_s3 = 0 + iodc_lsb = 0 + week = 0 + + # Subframe 1 + sf1 = Gps.from_bytes(self.caches.gps_subframes[msg.sv_id][1]) + s1 = sf1.body + assert isinstance(s1, Gps.Subframe1) + week = s1.week_no + week += 1024 + if week < 1877: + week += 1024 + eph.tgd = s1.t_gd * math.pow(2, -31) + eph.toc = s1.t_oc * math.pow(2, 4) + eph.af2 = s1.af_2 * math.pow(2, -55) + eph.af1 = s1.af_1 * math.pow(2, -43) + eph.af0 = s1.af_0 * math.pow(2, -31) + eph.svHealth = s1.sv_health + eph.towCount = sf1.how.tow_count + iodc_lsb = s1.iodc_lsb + + # Subframe 2 + sf2 = Gps.from_bytes(self.caches.gps_subframes[msg.sv_id][2]) + s2 = sf2.body + assert isinstance(s2, Gps.Subframe2) + if s2.t_oe == 0 and sf2.how.tow_count * 6 >= (SECS_IN_WEEK - 2 * SECS_IN_HR): + week += 1 + eph.crs = s2.c_rs * math.pow(2, -5) + eph.deltaN = s2.delta_n * math.pow(2, -43) * self.gpsPi + eph.m0 = s2.m_0 * math.pow(2, -31) * self.gpsPi + eph.cuc = s2.c_uc * math.pow(2, -29) + eph.ecc = s2.e * math.pow(2, -33) + eph.cus = s2.c_us * math.pow(2, -29) + eph.a = math.pow(s2.sqrt_a * math.pow(2, -19), 2.0) + eph.toe = s2.t_oe * math.pow(2, 4) + iode_s2 = s2.iode + + # Subframe 3 + sf3 = Gps.from_bytes(self.caches.gps_subframes[msg.sv_id][3]) + s3 = sf3.body + assert isinstance(s3, Gps.Subframe3) + eph.cic = s3.c_ic * math.pow(2, -29) + eph.omega0 = s3.omega_0 * math.pow(2, -31) * self.gpsPi + eph.cis = s3.c_is * math.pow(2, -29) + eph.i0 = s3.i_0 * math.pow(2, -31) * self.gpsPi + eph.crc = s3.c_rc * math.pow(2, -5) + eph.omega = s3.omega * math.pow(2, -31) * self.gpsPi + eph.omegaDot = s3.omega_dot * math.pow(2, -43) * self.gpsPi + eph.iode = s3.iode + eph.iDot = s3.idot * math.pow(2, -43) * self.gpsPi + iode_s3 = s3.iode + + eph.toeWeek = week + eph.tocWeek = week + + # clear cache for this SV + self.caches.gps_subframes[msg.sv_id].clear() + if not (iodc_lsb == iode_s2 == iode_s3): + return None + return ('ubloxGnss', dat) + + def _parse_glonass_ephemeris(self, msg: Ubx.RxmSfrbx) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder] | None: + # words are 4 bytes each; Glonass parser expects 16 bytes (string) + body = msg.body + if len(body) != 4: + return None + string_bytes = bytearray() + for word in body: + for i in (3, 2, 1, 0): + string_bytes.append((word >> (8 * i)) & 0xFF) + + gl = Glonass.from_bytes(bytes(string_bytes)) + string_number = gl.string_number + if string_number < 1 or string_number > 5 or gl.idle_chip: + return None + + # correlate by superframe and timing, similar to C++ logic + freq_id = msg.freq_id + superframe_unknown = False + needs_clear = False + for i in range(1, 6): + if i not in self.caches.glonass_strings[freq_id]: + continue + sf_prev = self.caches.glonass_string_superframes[freq_id].get(i, 0) + if sf_prev == 0 or gl.superframe_number == 0: + superframe_unknown = True + elif sf_prev != gl.superframe_number: + needs_clear = True + if superframe_unknown: + prev_time = self.caches.glonass_string_times[freq_id].get(i, 0.0) + if abs((prev_time - 2.0 * i) - (self.framer.last_log_time - 2.0 * string_number)) > 10: + needs_clear = True + + if needs_clear: + self.caches.glonass_strings[freq_id].clear() + self.caches.glonass_string_superframes[freq_id].clear() + self.caches.glonass_string_times[freq_id].clear() + + self.caches.glonass_strings[freq_id][string_number] = bytes(string_bytes) + self.caches.glonass_string_superframes[freq_id][string_number] = gl.superframe_number + self.caches.glonass_string_times[freq_id][string_number] = self.framer.last_log_time + + if msg.sv_id == 255: + # unknown SV id + return None + if len(self.caches.glonass_strings[freq_id]) != 5: + return None + + dat = messaging.new_message('ubloxGnss', valid=True) + eph = dat.ubloxGnss.init('glonassEphemeris') + eph.svId = msg.sv_id + eph.freqNum = msg.freq_id - 7 + + current_day = 0 + tk = 0 + + # string 1 + try: + s1 = Glonass.from_bytes(self.caches.glonass_strings[freq_id][1]).data + except Exception: + return None + assert isinstance(s1, Glonass.String1) + eph.p1 = int(s1.p1) + tk = int(s1.t_k) + eph.tkDEPRECATED = tk + eph.xVel = float(s1.x_vel) * math.pow(2, -20) + eph.xAccel = float(s1.x_accel) * math.pow(2, -30) + eph.x = float(s1.x) * math.pow(2, -11) + + # string 2 + try: + s2 = Glonass.from_bytes(self.caches.glonass_strings[freq_id][2]).data + except Exception: + return None + assert isinstance(s2, Glonass.String2) + eph.svHealth = int(s2.b_n >> 2) + eph.p2 = int(s2.p2) + eph.tb = int(s2.t_b) + eph.yVel = float(s2.y_vel) * math.pow(2, -20) + eph.yAccel = float(s2.y_accel) * math.pow(2, -30) + eph.y = float(s2.y) * math.pow(2, -11) + + # string 3 + try: + s3 = Glonass.from_bytes(self.caches.glonass_strings[freq_id][3]).data + except Exception: + return None + assert isinstance(s3, Glonass.String3) + eph.p3 = int(s3.p3) + eph.gammaN = float(s3.gamma_n) * math.pow(2, -40) + eph.svHealth = int(eph.svHealth | (1 if s3.l_n else 0)) + eph.zVel = float(s3.z_vel) * math.pow(2, -20) + eph.zAccel = float(s3.z_accel) * math.pow(2, -30) + eph.z = float(s3.z) * math.pow(2, -11) + + # string 4 + try: + s4 = Glonass.from_bytes(self.caches.glonass_strings[freq_id][4]).data + except Exception: + return None + assert isinstance(s4, Glonass.String4) + current_day = int(s4.n_t) + eph.nt = current_day + eph.tauN = float(s4.tau_n) * math.pow(2, -30) + eph.deltaTauN = float(s4.delta_tau_n) * math.pow(2, -30) + eph.age = int(s4.e_n) + eph.p4 = int(s4.p4) + eph.svURA = float(self.glonass_URA_lookup.get(int(s4.f_t), 0.0)) + # consistency check: SV slot number + # if it doesn't match, keep going but note mismatch (no logging here) + eph.svType = int(s4.m) + + # string 5 + try: + s5 = Glonass.from_bytes(self.caches.glonass_strings[freq_id][5]).data + except Exception: + return None + assert isinstance(s5, Glonass.String5) + eph.n4 = int(s5.n_4) + tk_seconds = int(SECS_IN_HR * ((tk >> 7) & 0x1F) + SECS_IN_MIN * ((tk >> 1) & 0x3F) + (tk & 0x1) * 30) + eph.tkSeconds = tk_seconds + + self.caches.glonass_strings[freq_id].clear() + return ('ubloxGnss', dat) + + def _gen_rxm_rawx(self, msg: Ubx.RxmRawx) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder]: + dat = messaging.new_message('ubloxGnss', valid=True) + mr = dat.ubloxGnss.init('measurementReport') + mr.rcvTow = msg.rcv_tow + mr.gpsWeek = msg.week + mr.leapSeconds = msg.leap_s + + mb = mr.init('measurements', msg.num_meas) + for i, m in enumerate(msg.meas): + mb[i].svId = m.sv_id + mb[i].pseudorange = m.pr_mes + mb[i].carrierCycles = m.cp_mes + mb[i].doppler = m.do_mes + mb[i].gnssId = int(m.gnss_id.value) + mb[i].glonassFrequencyIndex = m.freq_id + mb[i].locktime = m.lock_time + mb[i].cno = m.cno + mb[i].pseudorangeStdev = 0.01 * (math.pow(2, (m.pr_stdev & 15))) + mb[i].carrierPhaseStdev = 0.004 * (m.cp_stdev & 15) + mb[i].dopplerStdev = 0.002 * (math.pow(2, (m.do_stdev & 15))) + + ts = mb[i].init('trackingStatus') + trk = m.trk_stat + ts.pseudorangeValid = _bit(trk, 0) + ts.carrierPhaseValid = _bit(trk, 1) + ts.halfCycleValid = _bit(trk, 2) + ts.halfCycleSubtracted = _bit(trk, 3) + + mr.numMeas = msg.num_meas + rs = mr.init('receiverStatus') + rs.leapSecValid = _bit(msg.rec_stat, 0) + rs.clkReset = _bit(msg.rec_stat, 2) + return ('ubloxGnss', dat) + + def _gen_nav_sat(self, msg: Ubx.NavSat) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder]: + dat = messaging.new_message('ubloxGnss', valid=True) + sr = dat.ubloxGnss.init('satReport') + sr.iTow = msg.itow + svs = sr.init('svs', msg.num_svs) + for i, s in enumerate(msg.svs): + svs[i].svId = s.sv_id + svs[i].gnssId = int(s.gnss_id.value) + svs[i].flagsBitfield = s.flags + svs[i].cno = s.cno + svs[i].elevationDeg = s.elev + svs[i].azimuthDeg = s.azim + svs[i].pseudorangeResidual = s.pr_res * 0.1 + return ('ubloxGnss', dat) + + def _gen_mon_hw(self, msg: Ubx.MonHw) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder]: + dat = messaging.new_message('ubloxGnss', valid=True) + hw = dat.ubloxGnss.init('hwStatus') + hw.noisePerMS = msg.noise_per_ms + hw.flags = msg.flags + hw.agcCnt = msg.agc_cnt + hw.aStatus = int(msg.a_status.value) + hw.aPower = int(msg.a_power.value) + hw.jamInd = msg.jam_ind + return ('ubloxGnss', dat) + + def _gen_mon_hw2(self, msg: Ubx.MonHw2) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder]: + dat = messaging.new_message('ubloxGnss', valid=True) + hw = dat.ubloxGnss.init('hwStatus2') + hw.ofsI = msg.ofs_i + hw.magI = msg.mag_i + hw.ofsQ = msg.ofs_q + hw.magQ = msg.mag_q + # Map Ubx enum to cereal enum {undefined=0, rom=1, otp=2, configpins=3, flash=4} + cfg_map = { + Ubx.MonHw2.ConfigSource.rom: 1, + Ubx.MonHw2.ConfigSource.otp: 2, + Ubx.MonHw2.ConfigSource.config_pins: 3, + Ubx.MonHw2.ConfigSource.flash: 4, + } + hw.cfgSource = cfg_map.get(msg.cfg_source, 0) + hw.lowLevCfg = msg.low_lev_cfg + hw.postStatus = msg.post_status + return ('ubloxGnss', dat) + + +def main(): + parser = UbloxMsgParser() + pm = messaging.PubMaster(['ubloxGnss', 'gpsLocationExternal']) + sock = messaging.sub_sock('ubloxRaw', timeout=100, conflate=False) + + while True: + msg = messaging.recv_one_or_none(sock) + if msg is None: + continue + + data = bytes(msg.ubloxRaw) + log_time = msg.logMonoTime * 1e-9 + frames = parser.framer.add_data(log_time, data) + for frame in frames: + try: + res = parser.parse_frame(frame) + except Exception: + continue + if not res: + continue + service, dat = res + pm.send(service, dat) + +if __name__ == '__main__': + main() diff --git a/system/ui/lib/networkmanager.py b/system/ui/lib/networkmanager.py new file mode 100644 index 0000000000..07b5d42f4b --- /dev/null +++ b/system/ui/lib/networkmanager.py @@ -0,0 +1,44 @@ +from enum import IntEnum + + +# NetworkManager device states +class NMDeviceState(IntEnum): + UNKNOWN = 0 + DISCONNECTED = 30 + PREPARE = 40 + STATE_CONFIG = 50 + NEED_AUTH = 60 + IP_CONFIG = 70 + ACTIVATED = 100 + DEACTIVATING = 110 + + +# NetworkManager constants +NM = "org.freedesktop.NetworkManager" +NM_PATH = '/org/freedesktop/NetworkManager' +NM_IFACE = 'org.freedesktop.NetworkManager' +NM_ACCESS_POINT_IFACE = 'org.freedesktop.NetworkManager.AccessPoint' +NM_SETTINGS_PATH = '/org/freedesktop/NetworkManager/Settings' +NM_SETTINGS_IFACE = 'org.freedesktop.NetworkManager.Settings' +NM_CONNECTION_IFACE = 'org.freedesktop.NetworkManager.Settings.Connection' +NM_WIRELESS_IFACE = 'org.freedesktop.NetworkManager.Device.Wireless' +NM_PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties' +NM_DEVICE_IFACE = "org.freedesktop.NetworkManager.Device" + +NM_DEVICE_TYPE_WIFI = 2 +NM_DEVICE_TYPE_MODEM = 8 +NM_DEVICE_STATE_REASON_SUPPLICANT_DISCONNECT = 8 +NM_DEVICE_STATE_REASON_NEW_ACTIVATION = 60 + +# https://developer.gnome.org/NetworkManager/1.26/nm-dbus-types.html#NM80211ApFlags +NM_802_11_AP_FLAGS_NONE = 0x0 +NM_802_11_AP_FLAGS_PRIVACY = 0x1 +NM_802_11_AP_FLAGS_WPS = 0x2 + +# https://developer.gnome.org/NetworkManager/1.26/nm-dbus-types.html#NM80211ApSecurityFlags +NM_802_11_AP_SEC_PAIR_WEP40 = 0x00000001 +NM_802_11_AP_SEC_PAIR_WEP104 = 0x00000002 +NM_802_11_AP_SEC_GROUP_WEP40 = 0x00000010 +NM_802_11_AP_SEC_GROUP_WEP104 = 0x00000020 +NM_802_11_AP_SEC_KEY_MGMT_PSK = 0x00000100 +NM_802_11_AP_SEC_KEY_MGMT_802_1X = 0x00000200 diff --git a/system/ui/lib/wifi_manager.py b/system/ui/lib/wifi_manager.py index 4cb741bc95..af9ae943ea 100644 --- a/system/ui/lib/wifi_manager.py +++ b/system/ui/lib/wifi_manager.py @@ -1,52 +1,35 @@ -import asyncio -import concurrent.futures -import copy +import atexit import threading import time import uuid from collections.abc import Callable from dataclasses import dataclass from enum import IntEnum -from typing import TypeVar +from typing import Any -from dbus_next.aio import MessageBus -from dbus_next import BusType, Variant, Message -from dbus_next.errors import DBusError -from dbus_next.constants import MessageType +from jeepney import DBusAddress, new_method_call +from jeepney.bus_messages import MatchRule, message_bus +from jeepney.io.blocking import open_dbus_connection as open_dbus_connection_blocking +from jeepney.io.threading import DBusRouter, open_dbus_connection as open_dbus_connection_threading +from jeepney.low_level import MessageType +from jeepney.wrappers import Properties -try: - from openpilot.common.params import Params -except ImportError: - # Params/Cythonized modules are not available in zipapp - Params = None from openpilot.common.swaglog import cloudlog - -T = TypeVar("T") - -# NetworkManager constants -NM = "org.freedesktop.NetworkManager" -NM_PATH = '/org/freedesktop/NetworkManager' -NM_IFACE = 'org.freedesktop.NetworkManager' -NM_SETTINGS_PATH = '/org/freedesktop/NetworkManager/Settings' -NM_SETTINGS_IFACE = 'org.freedesktop.NetworkManager.Settings' -NM_CONNECTION_IFACE = 'org.freedesktop.NetworkManager.Settings.Connection' -NM_WIRELESS_IFACE = 'org.freedesktop.NetworkManager.Device.Wireless' -NM_PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties' -NM_DEVICE_IFACE = "org.freedesktop.NetworkManager.Device" - -NM_DEVICE_STATE_REASON_SUPPLICANT_DISCONNECT = 8 +from openpilot.system.ui.lib.networkmanager import (NM, NM_WIRELESS_IFACE, NM_802_11_AP_SEC_PAIR_WEP40, + NM_802_11_AP_SEC_PAIR_WEP104, NM_802_11_AP_SEC_GROUP_WEP40, + NM_802_11_AP_SEC_GROUP_WEP104, NM_802_11_AP_SEC_KEY_MGMT_PSK, + NM_802_11_AP_SEC_KEY_MGMT_802_1X, NM_802_11_AP_FLAGS_NONE, + NM_802_11_AP_FLAGS_PRIVACY, NM_802_11_AP_FLAGS_WPS, + NM_PATH, NM_IFACE, NM_ACCESS_POINT_IFACE, NM_SETTINGS_PATH, + NM_SETTINGS_IFACE, NM_CONNECTION_IFACE, NM_DEVICE_IFACE, + NM_DEVICE_TYPE_WIFI, NM_DEVICE_STATE_REASON_SUPPLICANT_DISCONNECT, + NM_DEVICE_STATE_REASON_NEW_ACTIVATION, + NMDeviceState) TETHERING_IP_ADDRESS = "192.168.43.1" DEFAULT_TETHERING_PASSWORD = "swagswagcomma" - - -# NetworkManager device states -class NMDeviceState(IntEnum): - DISCONNECTED = 30 - PREPARE = 40 - NEED_AUTH = 60 - IP_CONFIG = 70 - ACTIVATED = 100 +SIGNAL_QUEUE_SIZE = 10 +SCAN_PERIOD_SECONDS = 10 class SecurityType(IntEnum): @@ -57,673 +40,387 @@ class SecurityType(IntEnum): UNSUPPORTED = 4 -@dataclass -class NetworkInfo: +def get_security_type(flags: int, wpa_flags: int, rsn_flags: int) -> SecurityType: + wpa_props = wpa_flags | rsn_flags + + # obtained by looking at flags of networks in the office as reported by an Android phone + supports_wpa = (NM_802_11_AP_SEC_PAIR_WEP40 | NM_802_11_AP_SEC_PAIR_WEP104 | NM_802_11_AP_SEC_GROUP_WEP40 | + NM_802_11_AP_SEC_GROUP_WEP104 | NM_802_11_AP_SEC_KEY_MGMT_PSK) + + if (flags == NM_802_11_AP_FLAGS_NONE) or ((flags & NM_802_11_AP_FLAGS_WPS) and not (wpa_props & supports_wpa)): + return SecurityType.OPEN + elif (flags & NM_802_11_AP_FLAGS_PRIVACY) and (wpa_props & supports_wpa) and not (wpa_props & NM_802_11_AP_SEC_KEY_MGMT_802_1X): + return SecurityType.WPA + else: + cloudlog.warning(f"Unsupported network! flags: {flags}, wpa_flags: {wpa_flags}, rsn_flags: {rsn_flags}") + return SecurityType.UNSUPPORTED + + +@dataclass(frozen=True) +class Network: ssid: str strength: int is_connected: bool security_type: SecurityType - path: str + is_saved: bool + + @classmethod + def from_dbus(cls, ssid: str, aps: list["AccessPoint"], is_saved: bool) -> "Network": + # we only want to show the strongest AP for each Network/SSID + strongest_ap = max(aps, key=lambda ap: ap.strength) + is_connected = any(ap.is_connected for ap in aps) + security_type = get_security_type(strongest_ap.flags, strongest_ap.wpa_flags, strongest_ap.rsn_flags) + + return cls( + ssid=ssid, + strength=strongest_ap.strength, + is_connected=is_connected and is_saved, + security_type=security_type, + is_saved=is_saved, + ) + + +@dataclass(frozen=True) +class AccessPoint: + ssid: str bssid: str - is_saved: bool = False - # saved_path: str + strength: int + is_connected: bool + flags: int + wpa_flags: int + rsn_flags: int + ap_path: str + @classmethod + def from_dbus(cls, ap_props: dict[str, tuple[str, Any]], ap_path: str, active_ap_path: str) -> "AccessPoint": + ssid = bytes(ap_props['Ssid'][1]).decode("utf-8", "replace") + bssid = str(ap_props['HwAddress'][1]) + strength = int(ap_props['Strength'][1]) + flags = int(ap_props['Flags'][1]) + wpa_flags = int(ap_props['WpaFlags'][1]) + rsn_flags = int(ap_props['RsnFlags'][1]) -@dataclass -class WifiManagerCallbacks: - need_auth: Callable[[str], None] | None = None - activated: Callable[[], None] | None = None - forgotten: Callable[[str], None] | None = None - networks_updated: Callable[[list[NetworkInfo]], None] | None = None - connection_failed: Callable[[str, str], None] | None = None # Added for error feedback + return cls( + ssid=ssid, + bssid=bssid, + strength=strength, + is_connected=ap_path == active_ap_path, + flags=flags, + wpa_flags=wpa_flags, + rsn_flags=rsn_flags, + ap_path=ap_path, + ) class WifiManager: - def __init__(self, callbacks): - self.callbacks: WifiManagerCallbacks = callbacks - self.networks: list[NetworkInfo] = [] - self.bus: MessageBus = None - self.device_path: str = "" - self.device_proxy = None - self.saved_connections: dict[str, str] = {} - self.active_ap_path: str = "" - self.scan_task: asyncio.Task | None = None - # Set tethering ssid as "weedle" + first 4 characters of a dongle id - self._tethering_ssid = "weedle" - if Params is not None: - dongle_id = Params().get("DongleId") - if dongle_id: - self._tethering_ssid += "-" + dongle_id[:4] - self.running: bool = True - self._current_connection_ssid: str | None = None + def __init__(self): + self._networks = [] # a network can be comprised of multiple APs + self._active = True # used to not run when not in settings + self._exit = False - async def connect(self) -> None: - """Connect to the DBus system bus.""" + # DBus connections try: - self.bus = await MessageBus(bus_type=BusType.SYSTEM).connect() - while not await self._find_wifi_device(): - await asyncio.sleep(1) + self._router_main = DBusRouter(open_dbus_connection_threading(bus="SYSTEM")) # used by scanner / general method calls + self._conn_monitor = open_dbus_connection_blocking(bus="SYSTEM") # used by state monitor thread + self._nm = DBusAddress(NM_PATH, bus_name=NM, interface=NM_IFACE) + except FileNotFoundError: + cloudlog.exception("Failed to connect to system D-Bus") + self._exit = True - await self._setup_signals(self.device_path) - self.active_ap_path = await self.get_active_access_point() - await self.add_tethering_connection(self._tethering_ssid, DEFAULT_TETHERING_PASSWORD) - self.saved_connections = await self._get_saved_connections() - self.scan_task = asyncio.create_task(self._periodic_scan()) - except DBusError as e: - cloudlog.error(f"Failed to connect to DBus: {e}") - raise - except Exception as e: - cloudlog.error(f"Unexpected error during connect: {e}") - raise + # Store wifi device path + self._wifi_device: str | None = None - async def shutdown(self) -> None: - self.running = False - if self.scan_task: - self.scan_task.cancel() - try: - await self.scan_task - except asyncio.CancelledError: - pass - if self.bus: - self.bus.disconnect() + # State + self._connecting_to_ssid: str = "" + self._last_network_update: float = 0.0 + self._callback_queue: list[Callable] = [] - async def _request_scan(self) -> None: - try: - interface = self.device_proxy.get_interface(NM_WIRELESS_IFACE) - await interface.call_request_scan({}) - except DBusError as e: - cloudlog.warning(f"Scan request failed: {str(e)}") + # Callbacks + self._need_auth: Callable[[str], None] | None = None + self._activated: Callable[[], None] | None = None + self._forgotten: Callable[[], None] | None = None + self._networks_updated: Callable[[list[Network]], None] | None = None + self._disconnected: Callable[[], None] | None = None - async def get_active_access_point(self): - try: - props_iface = self.device_proxy.get_interface(NM_PROPERTIES_IFACE) - ap_path = await props_iface.call_get(NM_WIRELESS_IFACE, 'ActiveAccessPoint') - return ap_path.value - except DBusError as e: - cloudlog.error(f"Error fetching active access point: {str(e)}") - return '' + self._lock = threading.Lock() - async def forget_connection(self, ssid: str) -> bool: - path = self.saved_connections.get(ssid) - if not path: - return False + self._scan_thread = threading.Thread(target=self._network_scanner, daemon=True) + self._scan_thread.start() - try: - nm_iface = await self._get_interface(NM, path, NM_CONNECTION_IFACE) - await nm_iface.call_delete() + self._state_thread = threading.Thread(target=self._monitor_state, daemon=True) + self._state_thread.start() - if self._current_connection_ssid == ssid: - self._current_connection_ssid = None + atexit.register(self.stop) - if ssid in self.saved_connections: - del self.saved_connections[ssid] + def set_callbacks(self, need_auth: Callable[[str], None], + activated: Callable[[], None] | None, + forgotten: Callable[[], None], + networks_updated: Callable[[list[Network]], None], + disconnected: Callable[[], None]): + self._need_auth = need_auth + self._activated = activated + self._forgotten = forgotten + self._networks_updated = networks_updated + self._disconnected = disconnected - for network in self.networks: - if network.ssid == ssid: - network.is_saved = False - network.is_connected = False + def _enqueue_callback(self, cb: Callable, *args): + self._callback_queue.append(lambda: cb(*args)) + + def process_callbacks(self): + # Call from UI thread to run any pending callbacks + to_run, self._callback_queue = self._callback_queue, [] + for cb in to_run: + cb() + + def set_active(self, active: bool): + self._active = active + + # Scan immediately if we haven't scanned in a while + if active and time.monotonic() - self._last_network_update > SCAN_PERIOD_SECONDS / 2: + self._last_network_update = 0.0 + + def _monitor_state(self): + device_path = self._wait_for_wifi_device() + if device_path is None: + return + + rule = MatchRule( + type="signal", + interface=NM_DEVICE_IFACE, + member="StateChanged", + path=device_path, + ) + + # Filter for StateChanged signal + self._conn_monitor.send_and_get_reply(message_bus.AddMatch(rule)) + + with self._conn_monitor.filter(rule, bufsize=SIGNAL_QUEUE_SIZE) as q: + while not self._exit: + if not self._active: + time.sleep(1) + continue + + # Block until a matching signal arrives + try: + msg = self._conn_monitor.recv_until_filtered(q, timeout=1) + except TimeoutError: + continue + + new_state, previous_state, change_reason = msg.body + + # BAD PASSWORD + if new_state == NMDeviceState.NEED_AUTH and change_reason == NM_DEVICE_STATE_REASON_SUPPLICANT_DISCONNECT and len(self._connecting_to_ssid): + self.forget_connection(self._connecting_to_ssid, block=True) + if self._need_auth is not None: + self._enqueue_callback(self._need_auth, self._connecting_to_ssid) + self._connecting_to_ssid = "" + + elif new_state == NMDeviceState.ACTIVATED: + if self._activated is not None: + self._update_networks() + self._enqueue_callback(self._activated) + self._connecting_to_ssid = "" + + elif new_state == NMDeviceState.DISCONNECTED and change_reason != NM_DEVICE_STATE_REASON_NEW_ACTIVATION: + self._connecting_to_ssid = "" + if self._disconnected is not None: + self._enqueue_callback(self._disconnected) + + def _network_scanner(self): + self._wait_for_wifi_device() + + while not self._exit: + if self._active: + if time.monotonic() - self._last_network_update > SCAN_PERIOD_SECONDS: + # Scan for networks every 10 seconds + # TODO: should update when scan is complete (PropertiesChanged), but this is more than good enough for now + self._update_networks() + self._request_scan() + self._last_network_update = time.monotonic() + time.sleep(1 / 2.) + + def _wait_for_wifi_device(self) -> str | None: + with self._lock: + device_path: str | None = None + while not self._exit: + device_path = self._get_wifi_device() + if device_path is not None: break + time.sleep(1) + return device_path - # Notify UI of forgotten connection - if self.callbacks.networks_updated: - self.callbacks.networks_updated(copy.deepcopy(self.networks)) + def _get_wifi_device(self) -> str | None: + if self._wifi_device is not None: + return self._wifi_device - return True - except DBusError as e: - cloudlog.error(f"Failed to delete connection for SSID: {ssid}. Error: {e}") - return False + device_paths = self._router_main.send_and_get_reply(new_method_call(self._nm, 'GetDevices')).body[0] + for device_path in device_paths: + dev_addr = DBusAddress(device_path, bus_name=NM, interface=NM_DEVICE_IFACE) + dev_type = self._router_main.send_and_get_reply(Properties(dev_addr).get('DeviceType')).body[0][1] - async def activate_connection(self, ssid: str) -> bool: - connection_path = self.saved_connections.get(ssid) - if not connection_path: - return False - try: - nm_iface = await self._get_interface(NM, NM_PATH, NM_IFACE) - await nm_iface.call_activate_connection(connection_path, self.device_path, "/") - return True - except DBusError as e: - cloudlog.error(f"Failed to activate connection {ssid}: {str(e)}") - return False + if dev_type == NM_DEVICE_TYPE_WIFI: + self._wifi_device = device_path + break - async def connect_to_network(self, ssid: str, password: str = None, bssid: str = None, is_hidden: bool = False) -> None: - """Connect to a selected Wi-Fi network.""" - try: - self._current_connection_ssid = ssid + return self._wifi_device - if ssid in self.saved_connections: - # Forget old connection if new password provided - if password: - await self.forget_connection(ssid) - await asyncio.sleep(0.2) # NetworkManager delay - else: - # Just activate existing connection - await self.activate_connection(ssid) - return + def _get_connections(self) -> dict[str, str]: + settings_addr = DBusAddress(NM_SETTINGS_PATH, bus_name=NM, interface=NM_SETTINGS_IFACE) + known_connections = self._router_main.send_and_get_reply(new_method_call(settings_addr, 'ListConnections')).body[0] + + conns: dict[str, str] = {} + for conn_path in known_connections: + conn_addr = DBusAddress(conn_path, bus_name=NM, interface=NM_CONNECTION_IFACE) + reply = self._router_main.send_and_get_reply(new_method_call(conn_addr, "GetSettings")) + + # ignore connections removed during iteration (need auth, etc.) + if reply.header.message_type == MessageType.error: + cloudlog.warning(f"Failed to get connection properties for {conn_path}") + continue + + settings = reply.body[0] + if "802-11-wireless" in settings: + ssid = settings['802-11-wireless']['ssid'][1].decode("utf-8", "replace") + if ssid != "": + conns[ssid] = conn_path + return conns + + def connect_to_network(self, ssid: str, password: str): + def worker(): + # Clear all connections that may already exist to the network we are connecting to + self._connecting_to_ssid = ssid + self.forget_connection(ssid, block=True) + + is_hidden = False connection = { 'connection': { - 'type': Variant('s', '802-11-wireless'), - 'uuid': Variant('s', str(uuid.uuid4())), - 'id': Variant('s', f'openpilot connection {ssid}'), - 'autoconnect-retries': Variant('i', 0), + 'type': ('s', '802-11-wireless'), + 'uuid': ('s', str(uuid.uuid4())), + 'id': ('s', f'openpilot connection {ssid}'), + 'autoconnect-retries': ('i', 0), }, '802-11-wireless': { - 'ssid': Variant('ay', ssid.encode('utf-8')), - 'hidden': Variant('b', is_hidden), - 'mode': Variant('s', 'infrastructure'), + 'ssid': ('ay', ssid.encode("utf-8")), + 'hidden': ('b', is_hidden), + 'mode': ('s', 'infrastructure'), }, 'ipv4': { - 'method': Variant('s', 'auto'), - 'dns-priority': Variant('i', 600), + 'method': ('s', 'auto'), + 'dns-priority': ('i', 600), }, - 'ipv6': {'method': Variant('s', 'ignore')}, + 'ipv6': {'method': ('s', 'ignore')}, } - if bssid: - connection['802-11-wireless']['bssid'] = Variant('ay', bssid.encode('utf-8')) - if password: connection['802-11-wireless-security'] = { - 'key-mgmt': Variant('s', 'wpa-psk'), - 'auth-alg': Variant('s', 'open'), - 'psk': Variant('s', password), + 'key-mgmt': ('s', 'wpa-psk'), + 'auth-alg': ('s', 'open'), + 'psk': ('s', password), } - nm_iface = await self._get_interface(NM, NM_PATH, NM_IFACE) - await nm_iface.call_add_and_activate_connection(connection, self.device_path, "/") - except Exception as e: - self._current_connection_ssid = None - cloudlog.error(f"Error connecting to network: {e}") - # Notify UI of failure - if self.callbacks.connection_failed: - self.callbacks.connection_failed(ssid, str(e)) + settings_addr = DBusAddress(NM_SETTINGS_PATH, bus_name=NM, interface=NM_SETTINGS_IFACE) + self._router_main.send_and_get_reply(new_method_call(settings_addr, 'AddConnection', 'a{sa{sv}}', (connection,))) + self.activate_connection(ssid, block=True) - def is_saved(self, ssid: str) -> bool: - return ssid in self.saved_connections + threading.Thread(target=worker, daemon=True).start() - async def _find_wifi_device(self) -> bool: - nm_iface = await self._get_interface(NM, NM_PATH, NM_IFACE) - devices = await nm_iface.get_devices() + def forget_connection(self, ssid: str, block: bool = False): + def worker(): + conn_path = self._get_connections().get(ssid, None) + if conn_path is not None: + conn_addr = DBusAddress(conn_path, bus_name=NM, interface=NM_CONNECTION_IFACE) + self._router_main.send_and_get_reply(new_method_call(conn_addr, 'Delete')) - for device_path in devices: - device = await self.bus.introspect(NM, device_path) - device_proxy = self.bus.get_proxy_object(NM, device_path, device) - device_interface = device_proxy.get_interface(NM_DEVICE_IFACE) - device_type = await device_interface.get_device_type() # type: ignore[attr-defined] - if device_type == 2: # Wi-Fi device - self.device_path = device_path - self.device_proxy = device_proxy - return True + if self._forgotten is not None: + self._update_networks() + self._enqueue_callback(self._forgotten) - return False + if block: + worker() + else: + threading.Thread(target=worker, daemon=True).start() - async def add_tethering_connection(self, ssid: str, password: str = "12345678") -> bool: - """Create a WiFi tethering connection.""" - if len(password) < 8: - print("Tethering password must be at least 8 characters") - return False + def activate_connection(self, ssid: str, block: bool = False): + def worker(): + conn_path = self._get_connections().get(ssid, None) + if conn_path is not None: + if self._wifi_device is None: + cloudlog.warning("No WiFi device found") + return - try: - # First, check if a hotspot connection already exists - settings_iface = await self._get_interface(NM, NM_SETTINGS_PATH, NM_SETTINGS_IFACE) - connection_paths = await settings_iface.call_list_connections() + self._connecting_to_ssid = ssid + self._router_main.send(new_method_call(self._nm, 'ActivateConnection', 'ooo', + (conn_path, self._wifi_device, "/"))) + + if block: + worker() + else: + threading.Thread(target=worker, daemon=True).start() + + def _request_scan(self): + if self._wifi_device is None: + cloudlog.warning("No WiFi device found") + return + + wifi_addr = DBusAddress(self._wifi_device, bus_name=NM, interface=NM_WIRELESS_IFACE) + reply = self._router_main.send_and_get_reply(new_method_call(wifi_addr, 'RequestScan', 'a{sv}', ({},))) + + if reply.header.message_type == MessageType.error: + cloudlog.warning(f"Failed to request scan: {reply}") + + def _update_networks(self): + with self._lock: + if self._wifi_device is None: + cloudlog.warning("No WiFi device found") + return + + # returns '/' if no active AP + wifi_addr = DBusAddress(self._wifi_device, NM, interface=NM_WIRELESS_IFACE) + active_ap_path = self._router_main.send_and_get_reply(Properties(wifi_addr).get('ActiveAccessPoint')).body[0][1] + ap_paths = self._router_main.send_and_get_reply(new_method_call(wifi_addr, 'GetAllAccessPoints')).body[0] + + aps: dict[str, list[AccessPoint]] = {} + + for ap_path in ap_paths: + ap_addr = DBusAddress(ap_path, NM, interface=NM_ACCESS_POINT_IFACE) + ap_props = self._router_main.send_and_get_reply(Properties(ap_addr).get_all()) + + # some APs have been seen dropping off during iteration + if ap_props.header.message_type == MessageType.error: + cloudlog.warning(f"Failed to get AP properties for {ap_path}") + continue - # Look for an existing hotspot connection - for path in connection_paths: try: - settings = await self._get_connection_settings(path) - conn_type = settings.get('connection', {}).get('type', Variant('s', '')).value - wifi_mode = settings.get('802-11-wireless', {}).get('mode', Variant('s', '')).value + ap = AccessPoint.from_dbus(ap_props.body[0], ap_path, active_ap_path) + if ap.ssid == "": + continue - if conn_type == '802-11-wireless' and wifi_mode == 'ap': - # Extract the SSID to check - connection_ssid = self._extract_ssid(settings) - if connection_ssid == ssid: - return True - except DBusError: - continue + if ap.ssid not in aps: + aps[ap.ssid] = [] - connection = { - 'connection': { - 'id': Variant('s', 'Hotspot'), - 'uuid': Variant('s', str(uuid.uuid4())), - 'type': Variant('s', '802-11-wireless'), - 'interface-name': Variant('s', 'wlan0'), - 'autoconnect': Variant('b', False), - }, - '802-11-wireless': { - 'band': Variant('s', 'bg'), - 'mode': Variant('s', 'ap'), - 'ssid': Variant('ay', ssid.encode('utf-8')), - }, - '802-11-wireless-security': { - 'group': Variant('as', ['ccmp']), - 'key-mgmt': Variant('s', 'wpa-psk'), - 'pairwise': Variant('as', ['ccmp']), - 'proto': Variant('as', ['rsn']), - 'psk': Variant('s', password), - }, - 'ipv4': { - 'method': Variant('s', 'shared'), - 'address-data': Variant('aa{sv}', [{'address': Variant('s', TETHERING_IP_ADDRESS), 'prefix': Variant('u', 24)}]), - 'gateway': Variant('s', TETHERING_IP_ADDRESS), - 'never-default': Variant('b', True), - }, - 'ipv6': { - 'method': Variant('s', 'ignore'), - }, - } + aps[ap.ssid].append(ap) + except Exception: + # catch all for parsing errors + cloudlog.exception(f"Failed to parse AP properties for {ap_path}") - settings_iface = await self._get_interface(NM, NM_SETTINGS_PATH, NM_SETTINGS_IFACE) - new_connection = await settings_iface.call_add_connection(connection) - print(f"Added tethering connection with path: {new_connection}") - return True - except DBusError as e: - print(f"Failed to add tethering connection: {e}") - return False - except Exception as e: - print(f"Unexpected error adding tethering connection: {e}") - return False + known_connections = self._get_connections() + networks = [Network.from_dbus(ssid, ap_list, ssid in known_connections) for ssid, ap_list in aps.items()] + networks.sort(key=lambda n: (-n.is_connected, -n.strength, n.ssid.lower())) + self._networks = networks - async def get_tethering_password(self) -> str: - """Get the current tethering password.""" - try: - hotspot_path = self.saved_connections.get(self._tethering_ssid) - if hotspot_path: - conn_iface = await self._get_interface(NM, hotspot_path, NM_CONNECTION_IFACE) - secrets = await conn_iface.call_get_secrets('802-11-wireless-security') - if secrets and '802-11-wireless-security' in secrets: - psk = secrets.get('802-11-wireless-security', {}).get('psk', Variant('s', '')).value - return str(psk) if psk is not None else "" - return "" - except DBusError as e: - print(f"Failed to get tethering password: {e}") - return "" - except Exception as e: - print(f"Unexpected error getting tethering password: {e}") - return "" + if self._networks_updated is not None: + self._enqueue_callback(self._networks_updated, self._networks) - async def set_tethering_password(self, password: str) -> bool: - """Set the tethering password.""" - if len(password) < 8: - cloudlog.error("Tethering password must be at least 8 characters") - return False + def __del__(self): + self.stop() - try: - hotspot_path = self.saved_connections.get(self._tethering_ssid) - if not hotspot_path: - print("No hotspot connection found") - return False + def stop(self): + if not self._exit: + self._exit = True + self._scan_thread.join() + self._state_thread.join() - # Update the connection settings with new password - settings = await self._get_connection_settings(hotspot_path) - if '802-11-wireless-security' not in settings: - settings['802-11-wireless-security'] = {} - settings['802-11-wireless-security']['psk'] = Variant('s', password) - - # Apply changes - conn_iface = await self._get_interface(NM, hotspot_path, NM_CONNECTION_IFACE) - await conn_iface.call_update(settings) - - # Check if connection is active and restart if needed - is_active = False - nm_iface = await self._get_interface(NM, NM_PATH, NM_IFACE) - active_connections = await nm_iface.get_active_connections() - - for conn_path in active_connections: - props_iface = await self._get_interface(NM, conn_path, NM_PROPERTIES_IFACE) - conn_id_path = await props_iface.call_get('org.freedesktop.NetworkManager.Connection.Active', 'Connection') - if conn_id_path.value == hotspot_path: - is_active = True - await nm_iface.call_deactivate_connection(conn_path) - break - - if is_active: - await nm_iface.call_activate_connection(hotspot_path, self.device_path, "/") - - print("Tethering password updated successfully") - return True - except DBusError as e: - print(f"Failed to set tethering password: {e}") - return False - except Exception as e: - print(f"Unexpected error setting tethering password: {e}") - return False - - async def is_tethering_active(self) -> bool: - """Check if tethering is active for the specified SSID.""" - try: - hotspot_path = self.saved_connections.get(self._tethering_ssid) - if not hotspot_path: - return False - - nm_iface = await self._get_interface(NM, NM_PATH, NM_IFACE) - active_connections = await nm_iface.get_active_connections() - - for conn_path in active_connections: - props_iface = await self._get_interface(NM, conn_path, NM_PROPERTIES_IFACE) - conn_id_path = await props_iface.call_get('org.freedesktop.NetworkManager.Connection.Active', 'Connection') - - if conn_id_path.value == hotspot_path: - return True - - return False - except Exception: - return False - - async def _periodic_scan(self): - while self.running: - try: - await self._request_scan() - await asyncio.sleep(30) - except asyncio.CancelledError: - break - except DBusError as e: - cloudlog.error(f"Scan failed: {e}") - await asyncio.sleep(5) - - async def _setup_signals(self, device_path: str) -> None: - rules = [ - f"type='signal',interface='{NM_PROPERTIES_IFACE}',member='PropertiesChanged',path='{device_path}'", - f"type='signal',interface='{NM_DEVICE_IFACE}',member='StateChanged',path='{device_path}'", - f"type='signal',interface='{NM_SETTINGS_IFACE}',member='NewConnection',path='{NM_SETTINGS_PATH}'", - f"type='signal',interface='{NM_SETTINGS_IFACE}',member='ConnectionRemoved',path='{NM_SETTINGS_PATH}'", - ] - for rule in rules: - await self._add_match_rule(rule) - - # Set up signal handlers - self.device_proxy.get_interface(NM_PROPERTIES_IFACE).on_properties_changed(self._on_properties_changed) - self.device_proxy.get_interface(NM_DEVICE_IFACE).on_state_changed(self._on_state_changed) - - settings_iface = await self._get_interface(NM, NM_SETTINGS_PATH, NM_SETTINGS_IFACE) - settings_iface.on_new_connection(self._on_new_connection) - settings_iface.on_connection_removed(self._on_connection_removed) - - def _on_properties_changed(self, interface: str, changed: dict, invalidated: list): - if interface == NM_WIRELESS_IFACE and 'LastScan' in changed: - asyncio.create_task(self._refresh_networks()) - elif interface == NM_WIRELESS_IFACE and "ActiveAccessPoint" in changed: - new_ap_path = changed["ActiveAccessPoint"].value - if self.active_ap_path != new_ap_path: - self.active_ap_path = new_ap_path - - def _on_state_changed(self, new_state: int, old_state: int, reason: int): - if new_state == NMDeviceState.ACTIVATED: - if self.callbacks.activated: - self.callbacks.activated() - self._current_connection_ssid = None - asyncio.create_task(self._refresh_networks()) - elif new_state in (NMDeviceState.DISCONNECTED, NMDeviceState.NEED_AUTH): - for network in self.networks: - network.is_connected = False - - # BAD PASSWORD - if new_state == NMDeviceState.NEED_AUTH and reason == NM_DEVICE_STATE_REASON_SUPPLICANT_DISCONNECT and self.callbacks.need_auth: - if self._current_connection_ssid: - asyncio.create_task(self.forget_connection(self._current_connection_ssid)) - self.callbacks.need_auth(self._current_connection_ssid) - else: - # Try to find the network from active_ap_path - for network in self.networks: - if network.path == self.active_ap_path: - asyncio.create_task(self.forget_connection(network.ssid)) - self.callbacks.need_auth(network.ssid) - break - else: - # Couldn't identify the network that needs auth - cloudlog.error("Network needs authentication but couldn't identify which one") - - def _on_new_connection(self, path: str) -> None: - """Callback for NewConnection signal.""" - asyncio.create_task(self._add_saved_connection(path)) - - def _on_connection_removed(self, path: str) -> None: - """Callback for ConnectionRemoved signal.""" - for ssid, p in list(self.saved_connections.items()): - if path == p: - del self.saved_connections[ssid] - - if self.callbacks.forgotten: - self.callbacks.forgotten(ssid) - break - - async def _add_saved_connection(self, path: str) -> None: - """Add a new saved connection to the dictionary.""" - try: - settings = await self._get_connection_settings(path) - if ssid := self._extract_ssid(settings): - self.saved_connections[ssid] = path - except DBusError as e: - cloudlog.error(f"Failed to add connection {path}: {e}") - - def _extract_ssid(self, settings: dict) -> str | None: - """Extract SSID from connection settings.""" - ssid_variant = settings.get('802-11-wireless', {}).get('ssid', Variant('ay', b'')).value - return bytes(ssid_variant).decode('utf-8') if ssid_variant else None - - async def _add_match_rule(self, rule): - """Add a match rule on the bus.""" - reply = await self.bus.call( - Message( - message_type=MessageType.METHOD_CALL, - destination='org.freedesktop.DBus', - interface="org.freedesktop.DBus", - path='/org/freedesktop/DBus', - member='AddMatch', - signature='s', - body=[rule], - ) - ) - - assert reply.message_type == MessageType.METHOD_RETURN - return reply - - async def _refresh_networks(self): - """Get a list of available networks via NetworkManager.""" - wifi_iface = self.device_proxy.get_interface(NM_WIRELESS_IFACE) - access_points = await wifi_iface.get_access_points() - self.active_ap_path = await self.get_active_access_point() - network_dict = {} - for ap_path in access_points: - try: - props_iface = await self._get_interface(NM, ap_path, NM_PROPERTIES_IFACE) - properties = await props_iface.call_get_all('org.freedesktop.NetworkManager.AccessPoint') - ssid_variant = properties['Ssid'].value - ssid = bytes(ssid_variant).decode('utf-8') - if not ssid: - continue - - bssid = properties.get('HwAddress', Variant('s', '')).value - strength = properties['Strength'].value - flags = properties['Flags'].value - wpa_flags = properties['WpaFlags'].value - rsn_flags = properties['RsnFlags'].value - - # May be multiple access points for each SSID. Use first for ssid - # and security type, then update the rest using all APs - if ssid not in network_dict: - network_dict[ssid] = NetworkInfo( - ssid=ssid, - strength=0, - security_type=self._get_security_type(flags, wpa_flags, rsn_flags), - path="", - bssid="", - is_connected=False, - is_saved=ssid in self.saved_connections - ) - - existing_network = network_dict.get(ssid) - if existing_network.strength < strength: - existing_network.strength = strength - existing_network.path = ap_path - existing_network.bssid = bssid - if self.active_ap_path == ap_path: - existing_network.is_connected = self._current_connection_ssid != ssid - - except DBusError as e: - cloudlog.error(f"Error fetching networks: {e}") - except Exception as e: - cloudlog.error({e}) - - self.networks = sorted( - network_dict.values(), - key=lambda network: ( - not network.is_connected, - -network.strength, # Higher signal strength first - network.ssid.lower(), - ), - ) - - if self.callbacks.networks_updated: - self.callbacks.networks_updated(copy.deepcopy(self.networks)) - - async def _get_connection_settings(self, path): - """Fetch connection settings for a specific connection path.""" - try: - settings = await self._get_interface(NM, path, NM_CONNECTION_IFACE) - return await settings.call_get_settings() - except DBusError as e: - cloudlog.error(f"Failed to get settings for {path}: {str(e)}") - return {} - - async def _process_chunk(self, paths_chunk): - """Process a chunk of connection paths.""" - tasks = [self._get_connection_settings(path) for path in paths_chunk] - return await asyncio.gather(*tasks, return_exceptions=True) - - async def _get_saved_connections(self) -> dict[str, str]: - try: - settings_iface = await self._get_interface(NM, NM_SETTINGS_PATH, NM_SETTINGS_IFACE) - connection_paths = await settings_iface.call_list_connections() - saved_ssids: dict[str, str] = {} - batch_size = 20 - for i in range(0, len(connection_paths), batch_size): - chunk = connection_paths[i : i + batch_size] - results = await self._process_chunk(chunk) - for path, config in zip(chunk, results, strict=True): - if isinstance(config, dict) and '802-11-wireless' in config: - if ssid := self._extract_ssid(config): - saved_ssids[ssid] = path - return saved_ssids - except DBusError as e: - cloudlog.error(f"Error fetching saved connections: {str(e)}") - return {} - - async def _get_interface(self, bus_name: str, path: str, name: str): - introspection = await self.bus.introspect(bus_name, path) - proxy = self.bus.get_proxy_object(bus_name, path, introspection) - return proxy.get_interface(name) - - def _get_security_type(self, flags: int, wpa_flags: int, rsn_flags: int) -> SecurityType: - """Determine the security type based on flags.""" - if flags == 0 and not (wpa_flags or rsn_flags): - return SecurityType.OPEN - if rsn_flags & 0x200: # SAE (WPA3 Personal) - # TODO: support WPA3 - return SecurityType.UNSUPPORTED - if rsn_flags: # RSN indicates WPA2 or higher - return SecurityType.WPA2 - if wpa_flags: # WPA flags indicate WPA - return SecurityType.WPA - return SecurityType.UNSUPPORTED - - -class WifiManagerWrapper: - def __init__(self): - self._manager: WifiManager | None = None - self._callbacks: WifiManagerCallbacks = WifiManagerCallbacks() - - self._thread = threading.Thread(target=self._run, daemon=True) - self._loop: asyncio.EventLoop | None = None - self._running = False - - def set_callbacks(self, callbacks: WifiManagerCallbacks): - self._callbacks = callbacks - - def start(self) -> None: - if not self._running: - self._thread.start() - while self._thread is not None and not self._running: - time.sleep(0.1) - - def _run(self): - self._loop = asyncio.new_event_loop() - asyncio.set_event_loop(self._loop) - - try: - self._manager = WifiManager(self._callbacks) - self._running = True - self._loop.run_forever() - except Exception as e: - cloudlog.error(f"Error in WifiManagerWrapper thread: {e}") - finally: - if self._loop.is_running(): - self._loop.stop() - self._running = False - - def shutdown(self) -> None: - if self._running: - if self._manager is not None and self._loop: - shutdown_future = asyncio.run_coroutine_threadsafe(self._manager.shutdown(), self._loop) - shutdown_future.result(timeout=3.0) - - if self._loop and self._loop.is_running(): - self._loop.call_soon_threadsafe(self._loop.stop) - if self._thread and self._thread.is_alive(): - self._thread.join(timeout=2.0) - self._running = False - - def is_saved(self, ssid: str) -> bool: - """Check if a network is saved.""" - return self._run_coroutine_sync(lambda manager: manager.is_saved(ssid), default=False) - - def connect(self): - """Connect to DBus and start Wi-Fi scanning.""" - if not self._manager: - return - self._run_coroutine(self._manager.connect()) - - def forget_connection(self, ssid: str): - """Forget a saved Wi-Fi connection.""" - if not self._manager: - return - self._run_coroutine(self._manager.forget_connection(ssid)) - - def activate_connection(self, ssid: str): - """Activate an existing Wi-Fi connection.""" - if not self._manager: - return - self._run_coroutine(self._manager.activate_connection(ssid)) - - def connect_to_network(self, ssid: str, password: str = None, bssid: str = None, is_hidden: bool = False): - """Connect to a Wi-Fi network.""" - if not self._manager: - return - self._run_coroutine(self._manager.connect_to_network(ssid, password, bssid, is_hidden)) - - def _run_coroutine(self, coro): - """Run a coroutine in the async thread.""" - if not self._running or not self._loop: - cloudlog.error("WifiManager thread is not running") - return - asyncio.run_coroutine_threadsafe(coro, self._loop) - - def _run_coroutine_sync(self, func: Callable[[WifiManager], T], default: T) -> T: - """Run a function synchronously in the async thread.""" - if not self._running or not self._loop or not self._manager: - return default - future = concurrent.futures.Future[T]() - - def wrapper(manager: WifiManager) -> None: - try: - future.set_result(func(manager)) - except Exception as e: - future.set_exception(e) - - try: - self._loop.call_soon_threadsafe(wrapper, self._manager) - return future.result(timeout=1.0) - except Exception as e: - cloudlog.error(f"WifiManagerWrapper property access failed: {e}") - return default + self._router_main.close() + self._router_main.conn.close() + self._conn_monitor.close() diff --git a/system/ui/reset.py b/system/ui/reset.py index b1bf5a6b6a..a5cf1731dc 100755 --- a/system/ui/reset.py +++ b/system/ui/reset.py @@ -13,7 +13,6 @@ from openpilot.system.ui.widgets import Widget from openpilot.system.ui.widgets.button import Button, ButtonStyle from openpilot.system.ui.widgets.label import gui_label, gui_text_box -NVME = "/dev/nvme0n1" USERDATA = "/dev/disk/by-partlabel/userdata" TIMEOUT = 3*60 @@ -49,10 +48,6 @@ class Reset(Widget): if PC: return - # Best effort to wipe NVME - os.system(f"sudo umount {NVME}") - os.system(f"yes | sudo mkfs.ext4 {NVME}") - # Removing data and formatting rm = os.system("sudo rm -rf /data/*") os.system(f"sudo umount {USERDATA}") diff --git a/system/ui/setup.py b/system/ui/setup.py index 800ca7662c..a985e783be 100755 --- a/system/ui/setup.py +++ b/system/ui/setup.py @@ -19,7 +19,7 @@ from openpilot.system.ui.widgets import Widget from openpilot.system.ui.widgets.button import Button, ButtonStyle, ButtonRadio from openpilot.system.ui.widgets.keyboard import Keyboard from openpilot.system.ui.widgets.label import Label, TextAlignment -from openpilot.system.ui.widgets.network import WifiManagerUI, WifiManagerWrapper +from openpilot.system.ui.widgets.network import WifiManagerUI, WifiManager NetworkType = log.DeviceState.NetworkType @@ -72,8 +72,7 @@ class Setup(Widget): self.download_url = "" self.download_progress = 0 self.download_thread = None - self.wifi_manager = WifiManagerWrapper() - self.wifi_ui = WifiManagerUI(self.wifi_manager) + self.wifi_ui = WifiManagerUI(WifiManager()) self.keyboard = Keyboard() self.selected_radio = None self.warning = gui_app.texture("icons/warning.png", 150, 150) diff --git a/system/ui/updater.py b/system/ui/updater.py index 31799d3628..b3cdc82cf5 100755 --- a/system/ui/updater.py +++ b/system/ui/updater.py @@ -7,7 +7,7 @@ from enum import IntEnum from openpilot.system.hardware import HARDWARE from openpilot.system.ui.lib.application import gui_app, FontWeight -from openpilot.system.ui.lib.wifi_manager import WifiManagerWrapper +from openpilot.system.ui.lib.wifi_manager import WifiManager from openpilot.system.ui.widgets import Widget from openpilot.system.ui.widgets.button import gui_button, ButtonStyle from openpilot.system.ui.widgets.label import gui_text_box, gui_label @@ -43,8 +43,7 @@ class Updater(Widget): self.show_reboot_button = False self.process = None self.update_thread = None - self.wifi_manager = WifiManagerWrapper() - self.wifi_manager_ui = WifiManagerUI(self.wifi_manager) + self.wifi_manager_ui = WifiManagerUI(WifiManager()) def install_update(self): self.current_screen = Screen.PROGRESS diff --git a/system/ui/widgets/__init__.py b/system/ui/widgets/__init__.py index d45f48ac38..cca2cec7ad 100644 --- a/system/ui/widgets/__init__.py +++ b/system/ui/widgets/__init__.py @@ -129,3 +129,10 @@ class Widget(abc.ABC): def _handle_mouse_release(self, mouse_pos: MousePos) -> bool: """Optionally handle mouse release events.""" return False + + def show_event(self): + """Optionally handle show event. Parent must manually call this""" + + def hide_event(self): + """Optionally handle hide event. Parent must manually call this""" + diff --git a/system/ui/widgets/list_view.py b/system/ui/widgets/list_view.py index e871eef0a1..a8d81a8ba2 100644 --- a/system/ui/widgets/list_view.py +++ b/system/ui/widgets/list_view.py @@ -213,6 +213,7 @@ class ListItem(Widget): self.set_rect(rl.Rectangle(0, 0, ITEM_BASE_WIDTH, ITEM_BASE_HEIGHT)) self._font = gui_app.font(FontWeight.NORMAL) + self._icon_texture = gui_app.texture(os.path.join("icons", self.icon), ICON_SIZE, ICON_SIZE) if self.icon else None # Cached properties for performance self._prev_max_width: int = 0 @@ -261,8 +262,7 @@ class ListItem(Widget): if self.title: # Draw icon if present if self.icon: - icon_texture = gui_app.texture(os.path.join("icons", self.icon), ICON_SIZE, ICON_SIZE) - rl.draw_texture(icon_texture, int(content_x), int(self._rect.y + (ITEM_BASE_HEIGHT - icon_texture.width) // 2), rl.WHITE) + rl.draw_texture(self._icon_texture, int(content_x), int(self._rect.y + (ITEM_BASE_HEIGHT - self._icon_texture.width) // 2), rl.WHITE) text_x += ICON_SIZE + ITEM_PADDING # Draw main text diff --git a/system/ui/widgets/network.py b/system/ui/widgets/network.py index 0bb759a919..3e6317a49c 100644 --- a/system/ui/widgets/network.py +++ b/system/ui/widgets/network.py @@ -1,12 +1,11 @@ from enum import IntEnum from functools import partial -from threading import Lock from typing import cast import pyray as rl from openpilot.system.ui.lib.application import gui_app from openpilot.system.ui.lib.scroll_panel import GuiScrollPanel -from openpilot.system.ui.lib.wifi_manager import NetworkInfo, WifiManagerCallbacks, WifiManagerWrapper, SecurityType +from openpilot.system.ui.lib.wifi_manager import WifiManager, SecurityType, Network from openpilot.system.ui.widgets import Widget from openpilot.system.ui.widgets.button import ButtonStyle, Button from openpilot.system.ui.widgets.confirm_dialog import ConfirmDialog @@ -36,52 +35,58 @@ class UIState(IntEnum): class WifiManagerUI(Widget): - def __init__(self, wifi_manager: WifiManagerWrapper): + def __init__(self, wifi_manager: WifiManager): super().__init__() + self.wifi_manager = wifi_manager self.state: UIState = UIState.IDLE - self._state_network: NetworkInfo | None = None # for CONNECTING / NEEDS_AUTH / SHOW_FORGET_CONFIRM / FORGETTING + self._state_network: Network | None = None # for CONNECTING / NEEDS_AUTH / SHOW_FORGET_CONFIRM / FORGETTING self._password_retry: bool = False # for NEEDS_AUTH self.btn_width: int = 200 self.scroll_panel = GuiScrollPanel() self.keyboard = Keyboard(max_text_size=MAX_PASSWORD_LENGTH, min_text_size=MIN_PASSWORD_LENGTH, show_password_toggle=True) + self._load_icons() - self._networks: list[NetworkInfo] = [] + self._networks: list[Network] = [] self._networks_buttons: dict[str, Button] = {} self._forget_networks_buttons: dict[str, Button] = {} - self._lock = Lock() - self.wifi_manager = wifi_manager self._confirm_dialog = ConfirmDialog("", "Forget", "Cancel") - self.wifi_manager.set_callbacks( - WifiManagerCallbacks( - need_auth=self._on_need_auth, - activated=self._on_activated, - forgotten=self._on_forgotten, - networks_updated=self._on_network_updated, - connection_failed=self._on_connection_failed - ) - ) - self.wifi_manager.start() - self.wifi_manager.connect() + self.wifi_manager.set_callbacks(need_auth=self._on_need_auth, + activated=self._on_activated, + forgotten=self._on_forgotten, + networks_updated=self._on_network_updated, + disconnected=self._on_disconnected) + + def show_event(self): + # start/stop scanning when widget is visible + self.wifi_manager.set_active(True) + + def hide_event(self): + self.wifi_manager.set_active(False) + + def _load_icons(self): + for icon in STRENGTH_ICONS + ["icons/checkmark.png", "icons/circled_slash.png", "icons/lock_closed.png"]: + gui_app.texture(icon, ICON_SIZE, ICON_SIZE) def _render(self, rect: rl.Rectangle): - with self._lock: - if not self._networks: - gui_label(rect, "Scanning Wi-Fi networks...", 72, alignment=rl.GuiTextAlignment.TEXT_ALIGN_CENTER) - return + self.wifi_manager.process_callbacks() - if self.state == UIState.NEEDS_AUTH and self._state_network: - self.keyboard.set_title("Wrong password" if self._password_retry else "Enter password", f"for {self._state_network.ssid}") - self.keyboard.reset() - gui_app.set_modal_overlay(self.keyboard, lambda result: self._on_password_entered(cast(NetworkInfo, self._state_network), result)) - elif self.state == UIState.SHOW_FORGET_CONFIRM and self._state_network: - self._confirm_dialog.set_text(f'Forget Wi-Fi Network "{self._state_network.ssid}"?') - self._confirm_dialog.reset() - gui_app.set_modal_overlay(self._confirm_dialog, callback=lambda result: self.on_forgot_confirm_finished(self._state_network, result)) - else: - self._draw_network_list(rect) + if not self._networks: + gui_label(rect, "Scanning Wi-Fi networks...", 72, alignment=rl.GuiTextAlignment.TEXT_ALIGN_CENTER) + return - def _on_password_entered(self, network: NetworkInfo, result: int): + if self.state == UIState.NEEDS_AUTH and self._state_network: + self.keyboard.set_title("Wrong password" if self._password_retry else "Enter password", f"for {self._state_network.ssid}") + self.keyboard.reset() + gui_app.set_modal_overlay(self.keyboard, lambda result: self._on_password_entered(cast(Network, self._state_network), result)) + elif self.state == UIState.SHOW_FORGET_CONFIRM and self._state_network: + self._confirm_dialog.set_text(f'Forget Wi-Fi Network "{self._state_network.ssid}"?') + self._confirm_dialog.reset() + gui_app.set_modal_overlay(self._confirm_dialog, callback=lambda result: self.on_forgot_confirm_finished(self._state_network, result)) + else: + self._draw_network_list(rect) + + def _on_password_entered(self, network: Network, result: int): if result == 1: password = self.keyboard.text self.keyboard.clear() @@ -116,7 +121,7 @@ class WifiManagerUI(Widget): rl.end_scissor_mode() - def _draw_network_item(self, rect, network: NetworkInfo, clicked: bool): + def _draw_network_item(self, rect, network: Network, clicked: bool): spacing = 50 ssid_rect = rl.Rectangle(rect.x, rect.y, rect.width - self.btn_width * 2, ITEM_HEIGHT) signal_icon_rect = rl.Rectangle(rect.x + rect.width - ICON_SIZE, rect.y + (ITEM_HEIGHT - ICON_SIZE) / 2, ICON_SIZE, ICON_SIZE) @@ -169,10 +174,10 @@ class WifiManagerUI(Widget): self.state = UIState.SHOW_FORGET_CONFIRM self._state_network = network - def _draw_status_icon(self, rect, network: NetworkInfo): + def _draw_status_icon(self, rect, network: Network): """Draw the status icon based on network's connection state""" icon_file = None - if network.is_connected: + if network.is_connected and self.state != UIState.CONNECTING: icon_file = "icons/checkmark.png" elif network.security_type == SecurityType.UNSUPPORTED: icon_file = "icons/circled_slash.png" @@ -186,12 +191,12 @@ class WifiManagerUI(Widget): icon_rect = rl.Vector2(rect.x, rect.y + (ICON_SIZE - texture.height) / 2) rl.draw_texture_v(texture, icon_rect, rl.WHITE) - def _draw_signal_strength_icon(self, rect: rl.Rectangle, network: NetworkInfo): + def _draw_signal_strength_icon(self, rect: rl.Rectangle, network: Network): """Draw the Wi-Fi signal strength icon based on network's signal strength""" strength_level = max(0, min(3, round(network.strength / 33.0))) rl.draw_texture_v(gui_app.texture(STRENGTH_ICONS[strength_level], ICON_SIZE, ICON_SIZE), rl.Vector2(rect.x, rect.y), rl.WHITE) - def connect_to_network(self, network: NetworkInfo, password=''): + def connect_to_network(self, network: Network, password=''): self.state = UIState.CONNECTING self._state_network = network if network.is_saved and not password: @@ -199,54 +204,46 @@ class WifiManagerUI(Widget): else: self.wifi_manager.connect_to_network(network.ssid, password) - def forget_network(self, network: NetworkInfo): + def forget_network(self, network: Network): self.state = UIState.FORGETTING self._state_network = network - network.is_saved = False self.wifi_manager.forget_connection(network.ssid) - def _on_network_updated(self, networks: list[NetworkInfo]): - with self._lock: - self._networks = networks - for n in self._networks: - self._networks_buttons[n.ssid] = Button(n.ssid, partial(self._networks_buttons_callback, n), font_size=55, text_alignment=TextAlignment.LEFT, - button_style=ButtonStyle.NO_EFFECT) - self._forget_networks_buttons[n.ssid] = Button("Forget", partial(self._forget_networks_buttons_callback, n), button_style=ButtonStyle.FORGET_WIFI, - font_size=45) + def _on_network_updated(self, networks: list[Network]): + self._networks = networks + for n in self._networks: + self._networks_buttons[n.ssid] = Button(n.ssid, partial(self._networks_buttons_callback, n), font_size=55, text_alignment=TextAlignment.LEFT, + button_style=ButtonStyle.NO_EFFECT) + self._forget_networks_buttons[n.ssid] = Button("Forget", partial(self._forget_networks_buttons_callback, n), button_style=ButtonStyle.FORGET_WIFI, + font_size=45) def _on_need_auth(self, ssid): - with self._lock: - network = next((n for n in self._networks if n.ssid == ssid), None) - if network: - self.state = UIState.NEEDS_AUTH - self._state_network = network - self._password_retry = True + network = next((n for n in self._networks if n.ssid == ssid), None) + if network: + self.state = UIState.NEEDS_AUTH + self._state_network = network + self._password_retry = True def _on_activated(self): - with self._lock: - if self.state == UIState.CONNECTING: - self.state = UIState.IDLE + if self.state == UIState.CONNECTING: + self.state = UIState.IDLE - def _on_forgotten(self, ssid): - with self._lock: - if self.state == UIState.FORGETTING: - self.state = UIState.IDLE + def _on_forgotten(self): + if self.state == UIState.FORGETTING: + self.state = UIState.IDLE - def _on_connection_failed(self, ssid: str, error: str): - with self._lock: - if self.state == UIState.CONNECTING: - self.state = UIState.IDLE + def _on_disconnected(self): + if self.state == UIState.CONNECTING: + self.state = UIState.IDLE def main(): gui_app.init_window("Wi-Fi Manager") - wifi_manager = WifiManagerWrapper() - wifi_ui = WifiManagerUI(wifi_manager) + wifi_ui = WifiManagerUI(WifiManager()) for _ in gui_app.render(): wifi_ui.render(rl.Rectangle(50, 50, gui_app.width - 100, gui_app.height - 100)) - wifi_manager.shutdown() gui_app.close() diff --git a/third_party/SConscript b/third_party/SConscript index 507c17c4a5..3a7497d162 100644 --- a/third_party/SConscript +++ b/third_party/SConscript @@ -1,4 +1,3 @@ Import('env') env.Library('json11', ['json11/json11.cpp'], CCFLAGS=env['CCFLAGS'] + ['-Wno-unqualified-std-cast-call']) -env.Library('kaitai', ['kaitai/kaitaistream.cpp'], CPPDEFINES=['KS_STR_ENCODING_NONE']) diff --git a/third_party/kaitai/custom_decoder.h b/third_party/kaitai/custom_decoder.h deleted file mode 100644 index 6da7f5fd23..0000000000 --- a/third_party/kaitai/custom_decoder.h +++ /dev/null @@ -1,16 +0,0 @@ -#ifndef KAITAI_CUSTOM_DECODER_H -#define KAITAI_CUSTOM_DECODER_H - -#include - -namespace kaitai { - -class custom_decoder { -public: - virtual ~custom_decoder() {}; - virtual std::string decode(std::string src) = 0; -}; - -} - -#endif diff --git a/third_party/kaitai/exceptions.h b/third_party/kaitai/exceptions.h deleted file mode 100644 index 5c09c4672b..0000000000 --- a/third_party/kaitai/exceptions.h +++ /dev/null @@ -1,189 +0,0 @@ -#ifndef KAITAI_EXCEPTIONS_H -#define KAITAI_EXCEPTIONS_H - -#include - -#include -#include - -// We need to use "noexcept" in virtual destructor of our exceptions -// subclasses. Different compilers have different ideas on how to -// achieve that: C++98 compilers prefer `throw()`, C++11 and later -// use `noexcept`. We define KS_NOEXCEPT macro for that. - -#if __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1900) -#define KS_NOEXCEPT noexcept -#else -#define KS_NOEXCEPT throw() -#endif - -namespace kaitai { - -/** - * Common ancestor for all error originating from Kaitai Struct usage. - * Stores KSY source path, pointing to an element supposedly guilty of - * an error. - */ -class kstruct_error: public std::runtime_error { -public: - kstruct_error(const std::string what, const std::string src_path): - std::runtime_error(src_path + ": " + what), - m_src_path(src_path) - { - } - - virtual ~kstruct_error() KS_NOEXCEPT {}; - -protected: - const std::string m_src_path; -}; - -/** - * Error that occurs when default endianness should be decided with - * a switch, but nothing matches (although using endianness expression - * implies that there should be some positive result). - */ -class undecided_endianness_error: public kstruct_error { -public: - undecided_endianness_error(const std::string src_path): - kstruct_error("unable to decide on endianness for a type", src_path) - { - } - - virtual ~undecided_endianness_error() KS_NOEXCEPT {}; -}; - -/** - * Common ancestor for all validation failures. Stores pointer to - * KaitaiStream IO object which was involved in an error. - */ -class validation_failed_error: public kstruct_error { -public: - validation_failed_error(const std::string what, kstream* io, const std::string src_path): - kstruct_error("at pos " + kstream::to_string(static_cast(io->pos())) + ": validation failed: " + what, src_path), - m_io(io) - { - } - -// "at pos #{io.pos}: validation failed: #{msg}" - - virtual ~validation_failed_error() KS_NOEXCEPT {}; - -protected: - kstream* m_io; -}; - -/** - * Signals validation failure: we required "actual" value to be equal to - * "expected", but it turned out that it's not. - */ -template -class validation_not_equal_error: public validation_failed_error { -public: - validation_not_equal_error(const T& expected, const T& actual, kstream* io, const std::string src_path): - validation_failed_error("not equal", io, src_path), - m_expected(expected), - m_actual(actual) - { - } - - // "not equal, expected #{expected.inspect}, but got #{actual.inspect}" - - virtual ~validation_not_equal_error() KS_NOEXCEPT {}; - -protected: - const T& m_expected; - const T& m_actual; -}; - -/** - * Signals validation failure: we required "actual" value to be greater - * than or equal to "min", but it turned out that it's not. - */ -template -class validation_less_than_error: public validation_failed_error { -public: - validation_less_than_error(const T& min, const T& actual, kstream* io, const std::string src_path): - validation_failed_error("not in range", io, src_path), - m_min(min), - m_actual(actual) - { - } - - // "not in range, min #{min.inspect}, but got #{actual.inspect}" - - virtual ~validation_less_than_error() KS_NOEXCEPT {}; - -protected: - const T& m_min; - const T& m_actual; -}; - -/** - * Signals validation failure: we required "actual" value to be less - * than or equal to "max", but it turned out that it's not. - */ -template -class validation_greater_than_error: public validation_failed_error { -public: - validation_greater_than_error(const T& max, const T& actual, kstream* io, const std::string src_path): - validation_failed_error("not in range", io, src_path), - m_max(max), - m_actual(actual) - { - } - - // "not in range, max #{max.inspect}, but got #{actual.inspect}" - - virtual ~validation_greater_than_error() KS_NOEXCEPT {}; - -protected: - const T& m_max; - const T& m_actual; -}; - -/** - * Signals validation failure: we required "actual" value to be from - * the list, but it turned out that it's not. - */ -template -class validation_not_any_of_error: public validation_failed_error { -public: - validation_not_any_of_error(const T& actual, kstream* io, const std::string src_path): - validation_failed_error("not any of the list", io, src_path), - m_actual(actual) - { - } - - // "not any of the list, got #{actual.inspect}" - - virtual ~validation_not_any_of_error() KS_NOEXCEPT {}; - -protected: - const T& m_actual; -}; - -/** - * Signals validation failure: we required "actual" value to match - * the expression, but it turned out that it doesn't. - */ -template -class validation_expr_error: public validation_failed_error { -public: - validation_expr_error(const T& actual, kstream* io, const std::string src_path): - validation_failed_error("not matching the expression", io, src_path), - m_actual(actual) - { - } - - // "not matching the expression, got #{actual.inspect}" - - virtual ~validation_expr_error() KS_NOEXCEPT {}; - -protected: - const T& m_actual; -}; - -} - -#endif diff --git a/third_party/kaitai/kaitaistream.cpp b/third_party/kaitai/kaitaistream.cpp deleted file mode 100644 index d82ddb7e82..0000000000 --- a/third_party/kaitai/kaitaistream.cpp +++ /dev/null @@ -1,689 +0,0 @@ -#include - -#if defined(__APPLE__) -#include -#include -#define bswap_16(x) OSSwapInt16(x) -#define bswap_32(x) OSSwapInt32(x) -#define bswap_64(x) OSSwapInt64(x) -#define __BYTE_ORDER BYTE_ORDER -#define __BIG_ENDIAN BIG_ENDIAN -#define __LITTLE_ENDIAN LITTLE_ENDIAN -#elif defined(_MSC_VER) // !__APPLE__ -#include -#define __LITTLE_ENDIAN 1234 -#define __BIG_ENDIAN 4321 -#define __BYTE_ORDER __LITTLE_ENDIAN -#define bswap_16(x) _byteswap_ushort(x) -#define bswap_32(x) _byteswap_ulong(x) -#define bswap_64(x) _byteswap_uint64(x) -#else // !__APPLE__ or !_MSC_VER -#include -#include -#endif - -#include -#include -#include - -kaitai::kstream::kstream(std::istream* io) { - m_io = io; - init(); -} - -kaitai::kstream::kstream(std::string& data): m_io_str(data) { - m_io = &m_io_str; - init(); -} - -void kaitai::kstream::init() { - exceptions_enable(); - align_to_byte(); -} - -void kaitai::kstream::close() { - // m_io->close(); -} - -void kaitai::kstream::exceptions_enable() const { - m_io->exceptions( - std::istream::eofbit | - std::istream::failbit | - std::istream::badbit - ); -} - -// ======================================================================== -// Stream positioning -// ======================================================================== - -bool kaitai::kstream::is_eof() const { - if (m_bits_left > 0) { - return false; - } - char t; - m_io->exceptions( - std::istream::badbit - ); - m_io->get(t); - if (m_io->eof()) { - m_io->clear(); - exceptions_enable(); - return true; - } else { - m_io->unget(); - exceptions_enable(); - return false; - } -} - -void kaitai::kstream::seek(uint64_t pos) { - m_io->seekg(pos); -} - -uint64_t kaitai::kstream::pos() { - return m_io->tellg(); -} - -uint64_t kaitai::kstream::size() { - std::iostream::pos_type cur_pos = m_io->tellg(); - m_io->seekg(0, std::ios::end); - std::iostream::pos_type len = m_io->tellg(); - m_io->seekg(cur_pos); - return len; -} - -// ======================================================================== -// Integer numbers -// ======================================================================== - -// ------------------------------------------------------------------------ -// Signed -// ------------------------------------------------------------------------ - -int8_t kaitai::kstream::read_s1() { - char t; - m_io->get(t); - return t; -} - -// ........................................................................ -// Big-endian -// ........................................................................ - -int16_t kaitai::kstream::read_s2be() { - int16_t t; - m_io->read(reinterpret_cast(&t), 2); -#if __BYTE_ORDER == __LITTLE_ENDIAN - t = bswap_16(t); -#endif - return t; -} - -int32_t kaitai::kstream::read_s4be() { - int32_t t; - m_io->read(reinterpret_cast(&t), 4); -#if __BYTE_ORDER == __LITTLE_ENDIAN - t = bswap_32(t); -#endif - return t; -} - -int64_t kaitai::kstream::read_s8be() { - int64_t t; - m_io->read(reinterpret_cast(&t), 8); -#if __BYTE_ORDER == __LITTLE_ENDIAN - t = bswap_64(t); -#endif - return t; -} - -// ........................................................................ -// Little-endian -// ........................................................................ - -int16_t kaitai::kstream::read_s2le() { - int16_t t; - m_io->read(reinterpret_cast(&t), 2); -#if __BYTE_ORDER == __BIG_ENDIAN - t = bswap_16(t); -#endif - return t; -} - -int32_t kaitai::kstream::read_s4le() { - int32_t t; - m_io->read(reinterpret_cast(&t), 4); -#if __BYTE_ORDER == __BIG_ENDIAN - t = bswap_32(t); -#endif - return t; -} - -int64_t kaitai::kstream::read_s8le() { - int64_t t; - m_io->read(reinterpret_cast(&t), 8); -#if __BYTE_ORDER == __BIG_ENDIAN - t = bswap_64(t); -#endif - return t; -} - -// ------------------------------------------------------------------------ -// Unsigned -// ------------------------------------------------------------------------ - -uint8_t kaitai::kstream::read_u1() { - char t; - m_io->get(t); - return t; -} - -// ........................................................................ -// Big-endian -// ........................................................................ - -uint16_t kaitai::kstream::read_u2be() { - uint16_t t; - m_io->read(reinterpret_cast(&t), 2); -#if __BYTE_ORDER == __LITTLE_ENDIAN - t = bswap_16(t); -#endif - return t; -} - -uint32_t kaitai::kstream::read_u4be() { - uint32_t t; - m_io->read(reinterpret_cast(&t), 4); -#if __BYTE_ORDER == __LITTLE_ENDIAN - t = bswap_32(t); -#endif - return t; -} - -uint64_t kaitai::kstream::read_u8be() { - uint64_t t; - m_io->read(reinterpret_cast(&t), 8); -#if __BYTE_ORDER == __LITTLE_ENDIAN - t = bswap_64(t); -#endif - return t; -} - -// ........................................................................ -// Little-endian -// ........................................................................ - -uint16_t kaitai::kstream::read_u2le() { - uint16_t t; - m_io->read(reinterpret_cast(&t), 2); -#if __BYTE_ORDER == __BIG_ENDIAN - t = bswap_16(t); -#endif - return t; -} - -uint32_t kaitai::kstream::read_u4le() { - uint32_t t; - m_io->read(reinterpret_cast(&t), 4); -#if __BYTE_ORDER == __BIG_ENDIAN - t = bswap_32(t); -#endif - return t; -} - -uint64_t kaitai::kstream::read_u8le() { - uint64_t t; - m_io->read(reinterpret_cast(&t), 8); -#if __BYTE_ORDER == __BIG_ENDIAN - t = bswap_64(t); -#endif - return t; -} - -// ======================================================================== -// Floating point numbers -// ======================================================================== - -// ........................................................................ -// Big-endian -// ........................................................................ - -float kaitai::kstream::read_f4be() { - uint32_t t; - m_io->read(reinterpret_cast(&t), 4); -#if __BYTE_ORDER == __LITTLE_ENDIAN - t = bswap_32(t); -#endif - return reinterpret_cast(t); -} - -double kaitai::kstream::read_f8be() { - uint64_t t; - m_io->read(reinterpret_cast(&t), 8); -#if __BYTE_ORDER == __LITTLE_ENDIAN - t = bswap_64(t); -#endif - return reinterpret_cast(t); -} - -// ........................................................................ -// Little-endian -// ........................................................................ - -float kaitai::kstream::read_f4le() { - uint32_t t; - m_io->read(reinterpret_cast(&t), 4); -#if __BYTE_ORDER == __BIG_ENDIAN - t = bswap_32(t); -#endif - return reinterpret_cast(t); -} - -double kaitai::kstream::read_f8le() { - uint64_t t; - m_io->read(reinterpret_cast(&t), 8); -#if __BYTE_ORDER == __BIG_ENDIAN - t = bswap_64(t); -#endif - return reinterpret_cast(t); -} - -// ======================================================================== -// Unaligned bit values -// ======================================================================== - -void kaitai::kstream::align_to_byte() { - m_bits_left = 0; - m_bits = 0; -} - -uint64_t kaitai::kstream::read_bits_int_be(int n) { - int bits_needed = n - m_bits_left; - if (bits_needed > 0) { - // 1 bit => 1 byte - // 8 bits => 1 byte - // 9 bits => 2 bytes - int bytes_needed = ((bits_needed - 1) / 8) + 1; - if (bytes_needed > 8) - throw std::runtime_error("read_bits_int: more than 8 bytes requested"); - char buf[8]; - m_io->read(buf, bytes_needed); - for (int i = 0; i < bytes_needed; i++) { - uint8_t b = buf[i]; - m_bits <<= 8; - m_bits |= b; - m_bits_left += 8; - } - } - - // raw mask with required number of 1s, starting from lowest bit - uint64_t mask = get_mask_ones(n); - // shift mask to align with highest bits available in @bits - int shift_bits = m_bits_left - n; - mask <<= shift_bits; - // derive reading result - uint64_t res = (m_bits & mask) >> shift_bits; - // clear top bits that we've just read => AND with 1s - m_bits_left -= n; - mask = get_mask_ones(m_bits_left); - m_bits &= mask; - - return res; -} - -// Deprecated, use read_bits_int_be() instead. -uint64_t kaitai::kstream::read_bits_int(int n) { - return read_bits_int_be(n); -} - -uint64_t kaitai::kstream::read_bits_int_le(int n) { - int bits_needed = n - m_bits_left; - if (bits_needed > 0) { - // 1 bit => 1 byte - // 8 bits => 1 byte - // 9 bits => 2 bytes - int bytes_needed = ((bits_needed - 1) / 8) + 1; - if (bytes_needed > 8) - throw std::runtime_error("read_bits_int_le: more than 8 bytes requested"); - char buf[8]; - m_io->read(buf, bytes_needed); - for (int i = 0; i < bytes_needed; i++) { - uint8_t b = buf[i]; - m_bits |= (static_cast(b) << m_bits_left); - m_bits_left += 8; - } - } - - // raw mask with required number of 1s, starting from lowest bit - uint64_t mask = get_mask_ones(n); - // derive reading result - uint64_t res = m_bits & mask; - // remove bottom bits that we've just read by shifting - m_bits >>= n; - m_bits_left -= n; - - return res; -} - -uint64_t kaitai::kstream::get_mask_ones(int n) { - if (n == 64) { - return 0xFFFFFFFFFFFFFFFF; - } else { - return ((uint64_t) 1 << n) - 1; - } -} - -// ======================================================================== -// Byte arrays -// ======================================================================== - -std::string kaitai::kstream::read_bytes(std::streamsize len) { - std::vector result(len); - - // NOTE: streamsize type is signed, negative values are only *supposed* to not be used. - // http://en.cppreference.com/w/cpp/io/streamsize - if (len < 0) { - throw std::runtime_error("read_bytes: requested a negative amount"); - } - - if (len > 0) { - m_io->read(&result[0], len); - } - - return std::string(result.begin(), result.end()); -} - -std::string kaitai::kstream::read_bytes_full() { - std::iostream::pos_type p1 = m_io->tellg(); - m_io->seekg(0, std::ios::end); - std::iostream::pos_type p2 = m_io->tellg(); - size_t len = p2 - p1; - - // Note: this requires a std::string to be backed with a - // contiguous buffer. Officially, it's a only requirement since - // C++11 (C++98 and C++03 didn't have this requirement), but all - // major implementations had contiguous buffers anyway. - std::string result(len, ' '); - m_io->seekg(p1); - m_io->read(&result[0], len); - - return result; -} - -std::string kaitai::kstream::read_bytes_term(char term, bool include, bool consume, bool eos_error) { - std::string result; - std::getline(*m_io, result, term); - if (m_io->eof()) { - // encountered EOF - if (eos_error) { - throw std::runtime_error("read_bytes_term: encountered EOF"); - } - } else { - // encountered terminator - if (include) - result.push_back(term); - if (!consume) - m_io->unget(); - } - return result; -} - -std::string kaitai::kstream::ensure_fixed_contents(std::string expected) { - std::string actual = read_bytes(expected.length()); - - if (actual != expected) { - // NOTE: I think printing it outright is not best idea, it could contain non-ascii charactes like backspace and beeps and whatnot. It would be better to print hexlified version, and also to redirect it to stderr. - throw std::runtime_error("ensure_fixed_contents: actual data does not match expected data"); - } - - return actual; -} - -std::string kaitai::kstream::bytes_strip_right(std::string src, char pad_byte) { - std::size_t new_len = src.length(); - - while (new_len > 0 && src[new_len - 1] == pad_byte) - new_len--; - - return src.substr(0, new_len); -} - -std::string kaitai::kstream::bytes_terminate(std::string src, char term, bool include) { - std::size_t new_len = 0; - std::size_t max_len = src.length(); - - while (new_len < max_len && src[new_len] != term) - new_len++; - - if (include && new_len < max_len) - new_len++; - - return src.substr(0, new_len); -} - -// ======================================================================== -// Byte array processing -// ======================================================================== - -std::string kaitai::kstream::process_xor_one(std::string data, uint8_t key) { - size_t len = data.length(); - std::string result(len, ' '); - - for (size_t i = 0; i < len; i++) - result[i] = data[i] ^ key; - - return result; -} - -std::string kaitai::kstream::process_xor_many(std::string data, std::string key) { - size_t len = data.length(); - size_t kl = key.length(); - std::string result(len, ' '); - - size_t ki = 0; - for (size_t i = 0; i < len; i++) { - result[i] = data[i] ^ key[ki]; - ki++; - if (ki >= kl) - ki = 0; - } - - return result; -} - -std::string kaitai::kstream::process_rotate_left(std::string data, int amount) { - size_t len = data.length(); - std::string result(len, ' '); - - for (size_t i = 0; i < len; i++) { - uint8_t bits = data[i]; - result[i] = (bits << amount) | (bits >> (8 - amount)); - } - - return result; -} - -#ifdef KS_ZLIB -#include - -std::string kaitai::kstream::process_zlib(std::string data) { - int ret; - - unsigned char *src_ptr = reinterpret_cast(&data[0]); - std::stringstream dst_strm; - - z_stream strm; - strm.zalloc = Z_NULL; - strm.zfree = Z_NULL; - strm.opaque = Z_NULL; - - ret = inflateInit(&strm); - if (ret != Z_OK) - throw std::runtime_error("process_zlib: inflateInit error"); - - strm.next_in = src_ptr; - strm.avail_in = data.length(); - - unsigned char outbuffer[ZLIB_BUF_SIZE]; - std::string outstring; - - // get the decompressed bytes blockwise using repeated calls to inflate - do { - strm.next_out = reinterpret_cast(outbuffer); - strm.avail_out = sizeof(outbuffer); - - ret = inflate(&strm, 0); - - if (outstring.size() < strm.total_out) - outstring.append(reinterpret_cast(outbuffer), strm.total_out - outstring.size()); - } while (ret == Z_OK); - - if (ret != Z_STREAM_END) { // an error occurred that was not EOF - std::ostringstream exc_msg; - exc_msg << "process_zlib: error #" << ret << "): " << strm.msg; - throw std::runtime_error(exc_msg.str()); - } - - if (inflateEnd(&strm) != Z_OK) - throw std::runtime_error("process_zlib: inflateEnd error"); - - return outstring; -} -#endif - -// ======================================================================== -// Misc utility methods -// ======================================================================== - -int kaitai::kstream::mod(int a, int b) { - if (b <= 0) - throw std::invalid_argument("mod: divisor b <= 0"); - int r = a % b; - if (r < 0) - r += b; - return r; -} - -#include -std::string kaitai::kstream::to_string(int val) { - // if int is 32 bits, "-2147483648" is the longest string representation - // => 11 chars + zero => 12 chars - // if int is 64 bits, "-9223372036854775808" is the longest - // => 20 chars + zero => 21 chars - char buf[25]; - int got_len = snprintf(buf, sizeof(buf), "%d", val); - - // should never happen, but check nonetheless - if (got_len > sizeof(buf)) - throw std::invalid_argument("to_string: integer is longer than string buffer"); - - return std::string(buf); -} - -#include -std::string kaitai::kstream::reverse(std::string val) { - std::reverse(val.begin(), val.end()); - - return val; -} - -uint8_t kaitai::kstream::byte_array_min(const std::string val) { - uint8_t min = 0xff; // UINT8_MAX - std::string::const_iterator end = val.end(); - for (std::string::const_iterator it = val.begin(); it != end; ++it) { - uint8_t cur = static_cast(*it); - if (cur < min) { - min = cur; - } - } - return min; -} - -uint8_t kaitai::kstream::byte_array_max(const std::string val) { - uint8_t max = 0; // UINT8_MIN - std::string::const_iterator end = val.end(); - for (std::string::const_iterator it = val.begin(); it != end; ++it) { - uint8_t cur = static_cast(*it); - if (cur > max) { - max = cur; - } - } - return max; -} - -// ======================================================================== -// Other internal methods -// ======================================================================== - -#ifndef KS_STR_DEFAULT_ENCODING -#define KS_STR_DEFAULT_ENCODING "UTF-8" -#endif - -#ifdef KS_STR_ENCODING_ICONV - -#include -#include -#include - -std::string kaitai::kstream::bytes_to_str(std::string src, std::string src_enc) { - iconv_t cd = iconv_open(KS_STR_DEFAULT_ENCODING, src_enc.c_str()); - - if (cd == (iconv_t) -1) { - if (errno == EINVAL) { - throw std::runtime_error("bytes_to_str: invalid encoding pair conversion requested"); - } else { - throw std::runtime_error("bytes_to_str: error opening iconv"); - } - } - - size_t src_len = src.length(); - size_t src_left = src_len; - - // Start with a buffer length of double the source length. - size_t dst_len = src_len * 2; - std::string dst(dst_len, ' '); - size_t dst_left = dst_len; - - char *src_ptr = &src[0]; - char *dst_ptr = &dst[0]; - - while (true) { - size_t res = iconv(cd, &src_ptr, &src_left, &dst_ptr, &dst_left); - - if (res == (size_t) -1) { - if (errno == E2BIG) { - // dst buffer is not enough to accomodate whole string - // enlarge the buffer and try again - size_t dst_used = dst_len - dst_left; - dst_left += dst_len; - dst_len += dst_len; - dst.resize(dst_len); - - // dst.resize might have allocated destination buffer in another area - // of memory, thus our previous pointer "dst" will be invalid; re-point - // it using "dst_used". - dst_ptr = &dst[dst_used]; - } else { - throw std::runtime_error("bytes_to_str: iconv error"); - } - } else { - // conversion successful - dst.resize(dst_len - dst_left); - break; - } - } - - if (iconv_close(cd) != 0) { - throw std::runtime_error("bytes_to_str: iconv close error"); - } - - return dst; -} -#elif defined(KS_STR_ENCODING_NONE) -std::string kaitai::kstream::bytes_to_str(std::string src, std::string src_enc) { - return src; -} -#else -#error Need to decide how to handle strings: please define one of: KS_STR_ENCODING_ICONV, KS_STR_ENCODING_NONE -#endif diff --git a/third_party/kaitai/kaitaistream.h b/third_party/kaitai/kaitaistream.h deleted file mode 100644 index e7f4c6ce34..0000000000 --- a/third_party/kaitai/kaitaistream.h +++ /dev/null @@ -1,268 +0,0 @@ -#ifndef KAITAI_STREAM_H -#define KAITAI_STREAM_H - -// Kaitai Struct runtime API version: x.y.z = 'xxxyyyzzz' decimal -#define KAITAI_STRUCT_VERSION 9000L - -#include -#include -#include -#include - -namespace kaitai { - -/** - * Kaitai Stream class (kaitai::kstream) is an implementation of - * Kaitai Struct stream API - * for C++/STL. It's implemented as a wrapper over generic STL std::istream. - * - * It provides a wide variety of simple methods to read (parse) binary - * representations of primitive types, such as integer and floating - * point numbers, byte arrays and strings, and also provides stream - * positioning / navigation methods with unified cross-language and - * cross-toolkit semantics. - * - * Typically, end users won't access Kaitai Stream class manually, but would - * describe a binary structure format using .ksy language and then would use - * Kaitai Struct compiler to generate source code in desired target language. - * That code, in turn, would use this class and API to do the actual parsing - * job. - */ -class kstream { -public: - /** - * Constructs new Kaitai Stream object, wrapping a given std::istream. - * \param io istream object to use for this Kaitai Stream - */ - kstream(std::istream* io); - - /** - * Constructs new Kaitai Stream object, wrapping a given in-memory data - * buffer. - * \param data data buffer to use for this Kaitai Stream - */ - kstream(std::string& data); - - void close(); - - /** @name Stream positioning */ - //@{ - /** - * Check if stream pointer is at the end of stream. Note that the semantics - * are different from traditional STL semantics: one does *not* need to do a - * read (which will fail) after the actual end of the stream to trigger EOF - * flag, which can be accessed after that read. It is sufficient to just be - * at the end of the stream for this method to return true. - * \return "true" if we are located at the end of the stream. - */ - bool is_eof() const; - - /** - * Set stream pointer to designated position. - * \param pos new position (offset in bytes from the beginning of the stream) - */ - void seek(uint64_t pos); - - /** - * Get current position of a stream pointer. - * \return pointer position, number of bytes from the beginning of the stream - */ - uint64_t pos(); - - /** - * Get total size of the stream in bytes. - * \return size of the stream in bytes - */ - uint64_t size(); - //@} - - /** @name Integer numbers */ - //@{ - - // ------------------------------------------------------------------------ - // Signed - // ------------------------------------------------------------------------ - - int8_t read_s1(); - - // ........................................................................ - // Big-endian - // ........................................................................ - - int16_t read_s2be(); - int32_t read_s4be(); - int64_t read_s8be(); - - // ........................................................................ - // Little-endian - // ........................................................................ - - int16_t read_s2le(); - int32_t read_s4le(); - int64_t read_s8le(); - - // ------------------------------------------------------------------------ - // Unsigned - // ------------------------------------------------------------------------ - - uint8_t read_u1(); - - // ........................................................................ - // Big-endian - // ........................................................................ - - uint16_t read_u2be(); - uint32_t read_u4be(); - uint64_t read_u8be(); - - // ........................................................................ - // Little-endian - // ........................................................................ - - uint16_t read_u2le(); - uint32_t read_u4le(); - uint64_t read_u8le(); - - //@} - - /** @name Floating point numbers */ - //@{ - - // ........................................................................ - // Big-endian - // ........................................................................ - - float read_f4be(); - double read_f8be(); - - // ........................................................................ - // Little-endian - // ........................................................................ - - float read_f4le(); - double read_f8le(); - - //@} - - /** @name Unaligned bit values */ - //@{ - - void align_to_byte(); - uint64_t read_bits_int_be(int n); - uint64_t read_bits_int(int n); - uint64_t read_bits_int_le(int n); - - //@} - - /** @name Byte arrays */ - //@{ - - std::string read_bytes(std::streamsize len); - std::string read_bytes_full(); - std::string read_bytes_term(char term, bool include, bool consume, bool eos_error); - std::string ensure_fixed_contents(std::string expected); - - static std::string bytes_strip_right(std::string src, char pad_byte); - static std::string bytes_terminate(std::string src, char term, bool include); - static std::string bytes_to_str(std::string src, std::string src_enc); - - //@} - - /** @name Byte array processing */ - //@{ - - /** - * Performs a XOR processing with given data, XORing every byte of input with a single - * given value. - * @param data data to process - * @param key value to XOR with - * @return processed data - */ - static std::string process_xor_one(std::string data, uint8_t key); - - /** - * Performs a XOR processing with given data, XORing every byte of input with a key - * array, repeating key array many times, if necessary (i.e. if data array is longer - * than key array). - * @param data data to process - * @param key array of bytes to XOR with - * @return processed data - */ - static std::string process_xor_many(std::string data, std::string key); - - /** - * Performs a circular left rotation shift for a given buffer by a given amount of bits, - * using groups of 1 bytes each time. Right circular rotation should be performed - * using this procedure with corrected amount. - * @param data source data to process - * @param amount number of bits to shift by - * @return copy of source array with requested shift applied - */ - static std::string process_rotate_left(std::string data, int amount); - -#ifdef KS_ZLIB - /** - * Performs an unpacking ("inflation") of zlib-compressed data with usual zlib headers. - * @param data data to unpack - * @return unpacked data - * @throws IOException - */ - static std::string process_zlib(std::string data); -#endif - - //@} - - /** - * Performs modulo operation between two integers: dividend `a` - * and divisor `b`. Divisor `b` is expected to be positive. The - * result is always 0 <= x <= b - 1. - */ - static int mod(int a, int b); - - /** - * Converts given integer `val` to a decimal string representation. - * Should be used in place of std::to_string() (which is available only - * since C++11) in older C++ implementations. - */ - static std::string to_string(int val); - - /** - * Reverses given string `val`, so that the first character becomes the - * last and the last one becomes the first. This should be used to avoid - * the need of local variables at the caller. - */ - static std::string reverse(std::string val); - - /** - * Finds the minimal byte in a byte array, treating bytes as - * unsigned values. - * @param val byte array to scan - * @return minimal byte in byte array as integer - */ - static uint8_t byte_array_min(const std::string val); - - /** - * Finds the maximal byte in a byte array, treating bytes as - * unsigned values. - * @param val byte array to scan - * @return maximal byte in byte array as integer - */ - static uint8_t byte_array_max(const std::string val); - -private: - std::istream* m_io; - std::istringstream m_io_str; - int m_bits_left; - uint64_t m_bits; - - void init(); - void exceptions_enable() const; - - static uint64_t get_mask_ones(int n); - - static const int ZLIB_BUF_SIZE = 128 * 1024; -}; - -} - -#endif diff --git a/third_party/kaitai/kaitaistruct.h b/third_party/kaitai/kaitaistruct.h deleted file mode 100644 index 8172ede6c9..0000000000 --- a/third_party/kaitai/kaitaistruct.h +++ /dev/null @@ -1,20 +0,0 @@ -#ifndef KAITAI_STRUCT_H -#define KAITAI_STRUCT_H - -#include - -namespace kaitai { - -class kstruct { -public: - kstruct(kstream *_io) { m__io = _io; } - virtual ~kstruct() {} -protected: - kstream *m__io; -public: - kstream *_io() { return m__io; } -}; - -} - -#endif diff --git a/tools/auto_source.py b/tools/auto_source.py index 401929a9ad..bef6a43e53 100755 --- a/tools/auto_source.py +++ b/tools/auto_source.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 import sys -from openpilot.tools.lib.logreader import LogReader +from openpilot.tools.lib.logreader import LogReader, ReadMode def main(): @@ -9,7 +9,7 @@ def main(): sys.exit(1) log_path = sys.argv[1] - lr = LogReader(log_path, sort_by_time=True) + lr = LogReader(log_path, default_mode=ReadMode.AUTO, sort_by_time=True) print("\n".join(lr.logreader_identifiers)) diff --git a/tools/jotpluggler/assets/pause.png b/tools/jotpluggler/assets/pause.png new file mode 100644 index 0000000000..8040099831 --- /dev/null +++ b/tools/jotpluggler/assets/pause.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3ea96d8193eb9067a5efdc5d88a3099730ecafa40efcd09d7402bb3efd723603 +size 2305 diff --git a/tools/jotpluggler/assets/play.png b/tools/jotpluggler/assets/play.png new file mode 100644 index 0000000000..b1556cf0ab --- /dev/null +++ b/tools/jotpluggler/assets/play.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:53097ac5403b725ff1841dfa186ea770b4bb3714205824bde36ec3c2a0fb5dba +size 2758 diff --git a/tools/jotpluggler/assets/split_h.png b/tools/jotpluggler/assets/split_h.png new file mode 100644 index 0000000000..4fd88806e1 --- /dev/null +++ b/tools/jotpluggler/assets/split_h.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:54dd035ff898d881509fa686c402a61af8ef5fb408b92414722da01f773b0d33 +size 2900 diff --git a/tools/jotpluggler/assets/split_v.png b/tools/jotpluggler/assets/split_v.png new file mode 100644 index 0000000000..752e62a4ae --- /dev/null +++ b/tools/jotpluggler/assets/split_v.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:adbd4e5df1f58694dca9dde46d1d95b4e7471684e42e6bca9f41ea5d346e67c5 +size 3669 diff --git a/tools/jotpluggler/assets/x.png b/tools/jotpluggler/assets/x.png new file mode 100644 index 0000000000..3b2eabd447 --- /dev/null +++ b/tools/jotpluggler/assets/x.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a6d9c90cb0dd906e0b15e1f7f3fd9f0dfad3c3b0b34eeed7a7882768dc5f3961 +size 2053 diff --git a/tools/jotpluggler/data.py b/tools/jotpluggler/data.py new file mode 100644 index 0000000000..100dfe544d --- /dev/null +++ b/tools/jotpluggler/data.py @@ -0,0 +1,352 @@ +import numpy as np +import threading +import multiprocessing +import bisect +from collections import defaultdict +from tqdm import tqdm +from openpilot.common.swaglog import cloudlog +from openpilot.tools.lib.logreader import _LogFileReader, LogReader + + +def flatten_dict(d: dict, sep: str = "/", prefix: str = None) -> dict: + result = {} + stack: list[tuple] = [(d, prefix)] + + while stack: + obj, current_prefix = stack.pop() + + if isinstance(obj, dict): + for key, val in obj.items(): + new_prefix = key if current_prefix is None else f"{current_prefix}{sep}{key}" + if isinstance(val, (dict, list)): + stack.append((val, new_prefix)) + else: + result[new_prefix] = val + elif isinstance(obj, list): + for i, item in enumerate(obj): + new_prefix = f"{current_prefix}{sep}{i}" + if isinstance(item, (dict, list)): + stack.append((item, new_prefix)) + else: + result[new_prefix] = item + else: + if current_prefix is not None: + result[current_prefix] = obj + return result + + +def extract_field_types(schema, prefix, field_types_dict): + stack = [(schema, prefix)] + + while stack: + current_schema, current_prefix = stack.pop() + + for field in current_schema.fields_list: + field_name = field.proto.name + field_path = f"{current_prefix}/{field_name}" + field_proto = field.proto + field_which = field_proto.which() + + field_type = field_proto.slot.type.which() if field_which == 'slot' else field_which + field_types_dict[field_path] = field_type + + if field_which == 'slot': + slot_type = field_proto.slot.type + type_which = slot_type.which() + + if type_which == 'list': + element_type = slot_type.list.elementType.which() + list_path = f"{field_path}/*" + field_types_dict[list_path] = element_type + + if element_type == 'struct': + stack.append((field.schema.elementType, list_path)) + + elif type_which == 'struct': + stack.append((field.schema, field_path)) + + elif field_which == 'group': + stack.append((field.schema, field_path)) + + +def _convert_to_optimal_dtype(values_list, capnp_type): + dtype_mapping = { + 'bool': np.bool_, 'int8': np.int8, 'int16': np.int16, 'int32': np.int32, 'int64': np.int64, + 'uint8': np.uint8, 'uint16': np.uint16, 'uint32': np.uint32, 'uint64': np.uint64, + 'float32': np.float32, 'float64': np.float64, 'text': object, 'data': object, + 'enum': object, 'anyPointer': object, + } + + target_dtype = dtype_mapping.get(capnp_type, object) + return np.array(values_list, dtype=target_dtype) + + +def _match_field_type(field_path, field_types): + if field_path in field_types: + return field_types[field_path] + + path_parts = field_path.split('/') + template_parts = [p if not p.isdigit() else '*' for p in path_parts] + template_path = '/'.join(template_parts) + return field_types.get(template_path) + + +def _get_field_times_values(segment, field_name): + if field_name not in segment: + return None, None + + field_data = segment[field_name] + segment_times = segment['t'] + + if field_data['sparse']: + if len(field_data['t_index']) == 0: + return None, None + return segment_times[field_data['t_index']], field_data['values'] + else: + return segment_times, field_data['values'] + + +def msgs_to_time_series(msgs): + """Extract scalar fields and return (time_series_data, start_time, end_time).""" + collected_data = defaultdict(lambda: {'timestamps': [], 'columns': defaultdict(list), 'sparse_fields': set()}) + field_types = {} + extracted_schemas = set() + min_time = max_time = None + + for msg in msgs: + typ = msg.which() + timestamp = msg.logMonoTime * 1e-9 + if typ != 'initData': + if min_time is None: + min_time = timestamp + max_time = timestamp + + sub_msg = getattr(msg, typ) + if not hasattr(sub_msg, 'to_dict'): + continue + + if hasattr(sub_msg, 'schema') and typ not in extracted_schemas: + extract_field_types(sub_msg.schema, typ, field_types) + extracted_schemas.add(typ) + + try: + msg_dict = sub_msg.to_dict(verbose=True) + except Exception as e: + cloudlog.warning(f"Failed to convert sub_msg.to_dict() for message of type: {typ}: {e}") + continue + + flat_dict = flatten_dict(msg_dict) + flat_dict['_valid'] = msg.valid + field_types[f"{typ}/_valid"] = 'bool' + + type_data = collected_data[typ] + columns, sparse_fields = type_data['columns'], type_data['sparse_fields'] + known_fields = set(columns.keys()) + missing_fields = known_fields - flat_dict.keys() + + for field, value in flat_dict.items(): + if field not in known_fields and type_data['timestamps']: + sparse_fields.add(field) + columns[field].append(value) + if value is None: + sparse_fields.add(field) + + for field in missing_fields: + columns[field].append(None) + sparse_fields.add(field) + + type_data['timestamps'].append(timestamp) + + final_result = {} + for typ, data in collected_data.items(): + if not data['timestamps']: + continue + + typ_result = {'t': np.array(data['timestamps'], dtype=np.float64)} + sparse_fields = data['sparse_fields'] + + for field_name, values in data['columns'].items(): + if len(values) < len(data['timestamps']): + values = [None] * (len(data['timestamps']) - len(values)) + values + sparse_fields.add(field_name) + + capnp_type = _match_field_type(f"{typ}/{field_name}", field_types) + + if field_name in sparse_fields: # extract non-None values and their indices + non_none_indices = [] + non_none_values = [] + for i, value in enumerate(values): + if value is not None: + non_none_indices.append(i) + non_none_values.append(value) + + if non_none_values: # check if indices > uint16 max, currently would require a 1000+ Hz signal since indices are within segments + assert max(non_none_indices) <= 65535, f"Sparse field {typ}/{field_name} has timestamp indices exceeding uint16 max. Max: {max(non_none_indices)}" + + typ_result[field_name] = { + 'values': _convert_to_optimal_dtype(non_none_values, capnp_type), + 'sparse': True, + 't_index': np.array(non_none_indices, dtype=np.uint16), + } + else: # dense representation + typ_result[field_name] = {'values': _convert_to_optimal_dtype(values, capnp_type), 'sparse': False} + + final_result[typ] = typ_result + + return final_result, min_time or 0.0, max_time or 0.0 + + +def _process_segment(segment_identifier: str): + try: + lr = _LogFileReader(segment_identifier, sort_by_time=True) + return msgs_to_time_series(lr) + except Exception as e: + cloudlog.warning(f"Warning: Failed to process segment {segment_identifier}: {e}") + return {}, 0.0, 0.0 + + +class DataManager: + def __init__(self): + self._segments = [] + self._segment_starts = [] + self._start_time = 0.0 + self._duration = 0.0 + self._paths = set() + self._observers = [] + self._loading = False + self._lock = threading.RLock() + + def load_route(self, route: str) -> None: + if self._loading: + return + self._reset() + threading.Thread(target=self._load_async, args=(route,), daemon=True).start() + + def get_timeseries(self, path: str): + with self._lock: + msg_type, field = path.split('/', 1) + times, values = [], [] + + for segment in self._segments: + if msg_type in segment: + field_times, field_values = _get_field_times_values(segment[msg_type], field) + if field_times is not None: + times.append(field_times) + values.append(field_values) + + if not times: + return np.array([]), np.array([]) + + combined_times = np.concatenate(times) - self._start_time + + if len(values) > 1: + first_dtype = values[0].dtype + if all(arr.dtype == first_dtype for arr in values): # check if all arrays have compatible dtypes + combined_values = np.concatenate(values) + else: + combined_values = np.concatenate([arr.astype(object) for arr in values]) + else: + combined_values = values[0] if values else np.array([]) + + return combined_times, combined_values + + def get_value_at(self, path: str, time: float): + with self._lock: + MAX_LOOKBACK = 5.0 # seconds + absolute_time = self._start_time + time + message_type, field = path.split('/', 1) + current_index = bisect.bisect_right(self._segment_starts, absolute_time) - 1 + for index in (current_index, current_index - 1): + if not 0 <= index < len(self._segments): + continue + segment = self._segments[index].get(message_type) + if not segment: + continue + times, values = _get_field_times_values(segment, field) + if times is None or len(times) == 0 or (index != current_index and absolute_time - times[-1] > MAX_LOOKBACK): + continue + position = np.searchsorted(times, absolute_time, 'right') - 1 + if position >= 0 and absolute_time - times[position] <= MAX_LOOKBACK: + return values[position] + return None + + def get_all_paths(self): + with self._lock: + return sorted(self._paths) + + def get_duration(self): + with self._lock: + return self._duration + + def is_plottable(self, path: str): + _, values = self.get_timeseries(path) + if len(values) == 0: + return False + return np.issubdtype(values.dtype, np.number) or np.issubdtype(values.dtype, np.bool_) + + def add_observer(self, callback): + with self._lock: + self._observers.append(callback) + + def remove_observer(self, callback): + with self._lock: + if callback in self._observers: + self._observers.remove(callback) + + def _reset(self): + with self._lock: + self._loading = True + self._segments.clear() + self._segment_starts.clear() + self._paths.clear() + self._start_time = self._duration = 0.0 + observers = self._observers.copy() + + for callback in observers: + callback({'reset': True}) + + def _load_async(self, route: str): + try: + lr = LogReader(route, sort_by_time=True) + if not lr.logreader_identifiers: + cloudlog.warning(f"Warning: No log segments found for route: {route}") + return + + num_processes = max(1, multiprocessing.cpu_count() // 2) + with multiprocessing.Pool(processes=num_processes) as pool, tqdm(total=len(lr.logreader_identifiers), desc="Processing Segments") as pbar: + for segment_result, start_time, end_time in pool.imap(_process_segment, lr.logreader_identifiers): + pbar.update(1) + if segment_result: + self._add_segment(segment_result, start_time, end_time) + except Exception: + cloudlog.exception(f"Error loading route {route}:") + finally: + self._finalize_loading() + + def _add_segment(self, segment_data: dict, start_time: float, end_time: float): + with self._lock: + self._segments.append(segment_data) + self._segment_starts.append(start_time) + + if len(self._segments) == 1: + self._start_time = start_time + self._duration = end_time - self._start_time + + for msg_type, data in segment_data.items(): + for field_name in data.keys(): + if field_name != 't': + self._paths.add(f"{msg_type}/{field_name}") + + observers = self._observers.copy() + + for callback in observers: + callback({'segment_added': True, 'duration': self._duration, 'segment_count': len(self._segments)}) + + def _finalize_loading(self): + with self._lock: + self._loading = False + observers = self._observers.copy() + duration = self._duration + + for callback in observers: + callback({'loading_complete': True, 'duration': duration}) diff --git a/tools/jotpluggler/datatree.py b/tools/jotpluggler/datatree.py new file mode 100644 index 0000000000..3390fed2e1 --- /dev/null +++ b/tools/jotpluggler/datatree.py @@ -0,0 +1,315 @@ +import os +import re +import threading +import numpy as np +import dearpygui.dearpygui as dpg + + +class DataTreeNode: + def __init__(self, name: str, full_path: str = "", parent=None): + self.name = name + self.full_path = full_path + self.parent = parent + self.children: dict[str, DataTreeNode] = {} + self.filtered_children: dict[str, DataTreeNode] = {} + self.created_children: dict[str, DataTreeNode] = {} + self.is_leaf = False + self.is_plottable: bool | None = None + self.ui_created = False + self.children_ui_created = False + self.ui_tag: str | None = None + + +class DataTree: + MAX_NODES_PER_FRAME = 50 + + def __init__(self, data_manager, playback_manager): + self.data_manager = data_manager + self.playback_manager = playback_manager + self.current_search = "" + self.data_tree = DataTreeNode(name="root") + self._build_queue: dict[str, tuple[DataTreeNode, DataTreeNode, str | int]] = {} # full_path -> (node, parent, before_tag) + self._current_created_paths: set[str] = set() + self._current_filtered_paths: set[str] = set() + self._path_to_node: dict[str, DataTreeNode] = {} # full_path -> node + self._expanded_tags: set[str] = set() + self._item_handlers: dict[str, str] = {} # ui_tag -> handler_tag + self._char_width = None + self._queued_search = None + self._new_data = False + self._ui_lock = threading.RLock() + self._handlers_to_delete = [] + self.data_manager.add_observer(self._on_data_loaded) + + def create_ui(self, parent_tag: str): + with dpg.child_window(parent=parent_tag, border=False, width=-1, height=-1): + dpg.add_text("Timeseries List") + dpg.add_separator() + dpg.add_input_text(tag="search_input", width=-1, hint="Search fields...", callback=self.search_data) + dpg.add_separator() + with dpg.child_window(border=False, width=-1, height=-1): + with dpg.group(tag="data_tree_container"): + pass + + def _on_data_loaded(self, data: dict): + with self._ui_lock: + if data.get('segment_added') or data.get('reset'): + self._new_data = True + + def update_frame(self, font): + if self._handlers_to_delete: # we need to do everything in main thread, frame callbacks are flaky + dpg.render_dearpygui_frame() # wait a frame to ensure queued callbacks are done + with self._ui_lock: + for handler in self._handlers_to_delete: + dpg.delete_item(handler) + self._handlers_to_delete.clear() + + with self._ui_lock: + if self._char_width is None: + if size := dpg.get_text_size(" ", font=font): + self._char_width = size[0] + + if self._new_data: + self._process_path_change() + self._new_data = False + return + + if self._queued_search is not None: + self.current_search = self._queued_search + self._process_path_change() + self._queued_search = None + return + + nodes_processed = 0 + while self._build_queue and nodes_processed < self.MAX_NODES_PER_FRAME: + child_node, parent, before_tag = self._build_queue.pop(next(iter(self._build_queue))) + parent_tag = "data_tree_container" if parent.name == "root" else parent.ui_tag + if not child_node.ui_created: + if child_node.is_leaf: + self._create_leaf_ui(child_node, parent_tag, before_tag) + else: + self._create_tree_node_ui(child_node, parent_tag, before_tag) + parent.created_children[child_node.name] = parent.children[child_node.name] + self._current_created_paths.add(child_node.full_path) + nodes_processed += 1 + + def _process_path_change(self): + self._build_queue.clear() + search_term = self.current_search.strip().lower() + all_paths = set(self.data_manager.get_all_paths()) + new_filtered_leafs = {path for path in all_paths if self._should_show_path(path, search_term)} + new_filtered_paths = set(new_filtered_leafs) + for path in new_filtered_leafs: + parts = path.split('/') + for i in range(1, len(parts)): + prefix = '/'.join(parts[:i]) + new_filtered_paths.add(prefix) + created_paths_to_remove = self._current_created_paths - new_filtered_paths + filtered_paths_to_remove = self._current_filtered_paths - new_filtered_leafs + + if created_paths_to_remove or filtered_paths_to_remove: + self._remove_paths_from_tree(created_paths_to_remove, filtered_paths_to_remove) + self._apply_expansion_to_tree(self.data_tree, search_term) + + paths_to_add = new_filtered_leafs - self._current_created_paths + if paths_to_add: + self._add_paths_to_tree(paths_to_add) + self._apply_expansion_to_tree(self.data_tree, search_term) + self._current_filtered_paths = new_filtered_paths + + def _remove_paths_from_tree(self, created_paths_to_remove, filtered_paths_to_remove): + for path in sorted(created_paths_to_remove, reverse=True): + current_node = self._path_to_node[path] + + if len(current_node.created_children) == 0: + self._current_created_paths.remove(current_node.full_path) + if item_handler_tag := self._item_handlers.get(current_node.ui_tag): + dpg.configure_item(item_handler_tag, show=False) + self._handlers_to_delete.append(item_handler_tag) + del self._item_handlers[current_node.ui_tag] + dpg.delete_item(current_node.ui_tag) + current_node.ui_created = False + current_node.ui_tag = None + current_node.children_ui_created = False + del current_node.parent.created_children[current_node.name] + del current_node.parent.filtered_children[current_node.name] + + for path in filtered_paths_to_remove: + parts = path.split('/') + current_node = self._path_to_node[path] + + part_array_index = -1 + while len(current_node.filtered_children) == 0 and part_array_index >= -len(parts): + current_node = current_node.parent + if parts[part_array_index] in current_node.filtered_children: + del current_node.filtered_children[parts[part_array_index]] + part_array_index -= 1 + + def _add_paths_to_tree(self, paths): + parent_nodes_to_recheck = set() + for path in sorted(paths): + parts = path.split('/') + current_node = self.data_tree + current_path_prefix = "" + + for i, part in enumerate(parts): + current_path_prefix = f"{current_path_prefix}/{part}" if current_path_prefix else part + if i < len(parts) - 1: + parent_nodes_to_recheck.add(current_node) # for incremental changes from new data + if part not in current_node.children: + current_node.children[part] = DataTreeNode(name=part, full_path=current_path_prefix, parent=current_node) + self._path_to_node[current_path_prefix] = current_node.children[part] + current_node.filtered_children[part] = current_node.children[part] + current_node = current_node.children[part] + + if not current_node.is_leaf: + current_node.is_leaf = True + + for p_node in parent_nodes_to_recheck: + p_node.children_ui_created = False + self._request_children_build(p_node) + + def _get_node_label_and_expand(self, node: DataTreeNode, search_term: str): + label = f"{node.name} ({len(node.filtered_children)} fields)" + expand = len(search_term) > 0 and any(search_term in path for path in self._get_descendant_paths(node)) + if expand and node.parent and len(node.parent.filtered_children) > 100 and len(node.filtered_children) > 2: + label += " (+)" # symbol for large lists which aren't fully expanded for performance (only affects procLog rn) + expand = False + return label, expand + + def _apply_expansion_to_tree(self, node: DataTreeNode, search_term: str): + if node.ui_created and not node.is_leaf and node.ui_tag and dpg.does_item_exist(node.ui_tag): + label, expand = self._get_node_label_and_expand(node, search_term) + if expand: + self._expanded_tags.add(node.ui_tag) + dpg.set_value(node.ui_tag, expand) + elif node.ui_tag in self._expanded_tags: # not expanded and was expanded + self._expanded_tags.remove(node.ui_tag) + dpg.set_value(node.ui_tag, expand) + dpg.delete_item(node.ui_tag, children_only=True) # delete children (not visible since collapsed) + self._reset_ui_state_recursive(node) + node.children_ui_created = False + dpg.set_item_label(node.ui_tag, label) + for child in node.created_children.values(): + self._apply_expansion_to_tree(child, search_term) + + def _reset_ui_state_recursive(self, node: DataTreeNode): + for child in node.created_children.values(): + if child.ui_tag is not None: + if item_handler_tag := self._item_handlers.get(child.ui_tag): + self._handlers_to_delete.append(item_handler_tag) + dpg.configure_item(item_handler_tag, show=False) + del self._item_handlers[child.ui_tag] + self._reset_ui_state_recursive(child) + child.ui_created = False + child.ui_tag = None + child.children_ui_created = False + self._current_created_paths.remove(child.full_path) + node.created_children.clear() + + def search_data(self): + with self._ui_lock: + self._queued_search = dpg.get_value("search_input") + + def _create_tree_node_ui(self, node: DataTreeNode, parent_tag: str, before: str | int): + node.ui_tag = f"tree_{node.full_path}" + search_term = self.current_search.strip().lower() + label, expand = self._get_node_label_and_expand(node, search_term) + if expand: + self._expanded_tags.add(node.ui_tag) + elif node.ui_tag in self._expanded_tags: + self._expanded_tags.remove(node.ui_tag) + + with dpg.tree_node( + label=label, parent=parent_tag, tag=node.ui_tag, default_open=expand, open_on_arrow=True, open_on_double_click=True, before=before, delay_search=True + ): + with dpg.item_handler_registry() as handler_tag: + dpg.add_item_toggled_open_handler(callback=lambda s, a, u: self._request_children_build(node)) + dpg.add_item_visible_handler(callback=lambda s, a, u: self._request_children_build(node)) + dpg.bind_item_handler_registry(node.ui_tag, handler_tag) + self._item_handlers[node.ui_tag] = handler_tag + node.ui_created = True + + def _create_leaf_ui(self, node: DataTreeNode, parent_tag: str, before: str | int): + node.ui_tag = f"leaf_{node.full_path}" + with dpg.group(parent=parent_tag, tag=node.ui_tag, before=before, delay_search=True): + with dpg.table(header_row=False, policy=dpg.mvTable_SizingStretchProp, delay_search=True): + dpg.add_table_column(init_width_or_weight=0.5) + dpg.add_table_column(init_width_or_weight=0.5) + with dpg.table_row(): + dpg.add_text(node.name) + dpg.add_text("N/A", tag=f"value_{node.full_path}") + + if node.is_plottable is None: + node.is_plottable = self.data_manager.is_plottable(node.full_path) + if node.is_plottable: + with dpg.drag_payload(parent=node.ui_tag, drag_data=node.full_path, payload_type="TIMESERIES_PAYLOAD"): + dpg.add_text(f"Plot: {node.full_path}") + + with dpg.item_handler_registry() as handler_tag: + dpg.add_item_visible_handler(callback=self._on_item_visible, user_data=node.full_path) + dpg.bind_item_handler_registry(node.ui_tag, handler_tag) + self._item_handlers[node.ui_tag] = handler_tag + node.ui_created = True + + def _on_item_visible(self, sender, app_data, user_data): + with self._ui_lock: + path = user_data + value_tag = f"value_{path}" + if not dpg.does_item_exist(value_tag): + return + value_column_width = dpg.get_item_rect_size(f"leaf_{path}")[0] // 2 + value = self.data_manager.get_value_at(path, self.playback_manager.current_time_s) + if value is not None: + formatted_value = self.format_and_truncate(value, value_column_width, self._char_width) + dpg.set_value(value_tag, formatted_value) + else: + dpg.set_value(value_tag, "N/A") + + def _request_children_build(self, node: DataTreeNode): + with self._ui_lock: + if not node.children_ui_created and (node.name == "root" or (node.ui_tag is not None and dpg.get_value(node.ui_tag))): # check root or node expanded + sorted_children = sorted(node.filtered_children.values(), key=self._natural_sort_key) + next_existing: list[int | str] = [0] * len(sorted_children) + current_before_tag: int | str = 0 + + for i in range(len(sorted_children) - 1, -1, -1): # calculate "before_tag" for correct ordering when incrementally building tree + child = sorted_children[i] + next_existing[i] = current_before_tag + if child.ui_created: + candidate_tag = f"leaf_{child.full_path}" if child.is_leaf else f"tree_{child.full_path}" + if dpg.does_item_exist(candidate_tag): + current_before_tag = candidate_tag + + for i, child_node in enumerate(sorted_children): + if not child_node.ui_created: + before_tag = next_existing[i] + self._build_queue[child_node.full_path] = (child_node, node, before_tag) + node.children_ui_created = True + + def _should_show_path(self, path: str, search_term: str) -> bool: + if 'DEPRECATED' in path and not os.environ.get('SHOW_DEPRECATED'): + return False + return not search_term or search_term in path.lower() + + def _natural_sort_key(self, node: DataTreeNode): + node_type_key = node.is_leaf + parts = [int(p) if p.isdigit() else p.lower() for p in re.split(r'(\d+)', node.name) if p] + return (node_type_key, parts) + + def _get_descendant_paths(self, node: DataTreeNode): + for child_name, child_node in node.filtered_children.items(): + child_name_lower = child_name.lower() + if child_node.is_leaf: + yield child_name_lower + else: + for path in self._get_descendant_paths(child_node): + yield f"{child_name_lower}/{path}" + + @staticmethod + def format_and_truncate(value, available_width: float, char_width: float) -> str: + s = f"{value:.5f}" if np.issubdtype(type(value), np.floating) else str(value) + max_chars = int(available_width / char_width) + if len(s) > max_chars: + return s[: max(0, max_chars - 3)] + "..." + return s diff --git a/tools/jotpluggler/layout.py b/tools/jotpluggler/layout.py new file mode 100644 index 0000000000..917c156f9f --- /dev/null +++ b/tools/jotpluggler/layout.py @@ -0,0 +1,272 @@ +import dearpygui.dearpygui as dpg +from openpilot.tools.jotpluggler.data import DataManager +from openpilot.tools.jotpluggler.views import TimeSeriesPanel + +GRIP_SIZE = 4 +MIN_PANE_SIZE = 60 + + +class PlotLayoutManager: + def __init__(self, data_manager: DataManager, playback_manager, worker_manager, scale: float = 1.0): + self.data_manager = data_manager + self.playback_manager = playback_manager + self.worker_manager = worker_manager + self.scale = scale + self.container_tag = "plot_layout_container" + self.active_panels: list = [] + + self.grip_size = int(GRIP_SIZE * self.scale) + self.min_pane_size = int(MIN_PANE_SIZE * self.scale) + + initial_panel = TimeSeriesPanel(data_manager, playback_manager, worker_manager) + self.layout: dict = {"type": "panel", "panel": initial_panel} + + def create_ui(self, parent_tag: str): + if dpg.does_item_exist(self.container_tag): + dpg.delete_item(self.container_tag) + + with dpg.child_window(tag=self.container_tag, parent=parent_tag, border=False, width=-1, height=-1, no_scrollbar=True, no_scroll_with_mouse=True): + container_width, container_height = dpg.get_item_rect_size(self.container_tag) + self._create_ui_recursive(self.layout, self.container_tag, [], container_width, container_height) + + def _create_ui_recursive(self, layout: dict, parent_tag: str, path: list[int], width: int, height: int): + if layout["type"] == "panel": + self._create_panel_ui(layout, parent_tag, path, width, height) + else: + self._create_split_ui(layout, parent_tag, path, width, height) + + def _create_panel_ui(self, layout: dict, parent_tag: str, path: list[int], width: int, height:int): + panel_tag = self._path_to_tag(path, "panel") + panel = layout["panel"] + self.active_panels.append(panel) + text_size = int(13 * self.scale) + bar_height = (text_size+24) if width < int(279 * self.scale + 80) else (text_size+8) # adjust height to allow for scrollbar + + with dpg.child_window(parent=parent_tag, border=True, width=-1, height=-1, no_scrollbar=True): + with dpg.group(horizontal=True): + with dpg.child_window(tag=panel_tag, width=-(text_size + 16), height=bar_height, horizontal_scrollbar=True, no_scroll_with_mouse=True, border=False): + with dpg.group(horizontal=True): + dpg.add_input_text(default_value=panel.title, width=int(100 * self.scale), callback=lambda s, v: setattr(panel, "title", v)) + dpg.add_combo(items=["Time Series"], default_value="Time Series", width=int(100 * self.scale)) + dpg.add_button(label="Clear", callback=lambda: self.clear_panel(panel), width=int(40 * self.scale)) + dpg.add_image_button(texture_tag="split_h_texture", callback=lambda: self.split_panel(path, 0), width=text_size, height=text_size) + dpg.add_image_button(texture_tag="split_v_texture", callback=lambda: self.split_panel(path, 1), width=text_size, height=text_size) + dpg.add_image_button(texture_tag="x_texture", callback=lambda: self.delete_panel(path), width=text_size, height=text_size) + + dpg.add_separator() + + content_tag = self._path_to_tag(path, "content") + with dpg.child_window(tag=content_tag, border=False, height=-1, width=-1, no_scrollbar=True): + panel.create_ui(content_tag) + + def _create_split_ui(self, layout: dict, parent_tag: str, path: list[int], width: int, height: int): + split_tag = self._path_to_tag(path, "split") + orientation, _, pane_sizes = self._get_split_geometry(layout, (width, height)) + + with dpg.group(tag=split_tag, parent=parent_tag, horizontal=orientation == 0): + for i, child_layout in enumerate(layout["children"]): + child_path = path + [i] + container_tag = self._path_to_tag(child_path, "container") + pane_width, pane_height = [(pane_sizes[i], -1), (-1, pane_sizes[i])][orientation] # fill 2nd dim up to the border + with dpg.child_window(tag=container_tag, width=pane_width, height=pane_height, border=False, no_scrollbar=True): + child_width, child_height = [(pane_sizes[i], height), (width, pane_sizes[i])][orientation] + self._create_ui_recursive(child_layout, container_tag, child_path, child_width, child_height) + if i < len(layout["children"]) - 1: + self._create_grip(split_tag, path, i, orientation) + + def clear_panel(self, panel): + panel.clear() + + def delete_panel(self, panel_path: list[int]): + if not panel_path: # Root deletion + old_panel = self.layout["panel"] + old_panel.destroy_ui() + self.active_panels.remove(old_panel) + new_panel = TimeSeriesPanel(self.data_manager, self.playback_manager, self.worker_manager) + self.layout = {"type": "panel", "panel": new_panel} + self._rebuild_ui_at_path([]) + return + + parent, child_index = self._get_parent_and_index(panel_path) + layout_to_delete = parent["children"][child_index] + self._cleanup_ui_recursive(layout_to_delete, panel_path) + + parent["children"].pop(child_index) + parent["proportions"].pop(child_index) + + if len(parent["children"]) == 1: # remove parent and collapse + remaining_child = parent["children"][0] + if len(panel_path) == 1: # parent is at root level - promote remaining child to root + self.layout = remaining_child + self._rebuild_ui_at_path([]) + else: # replace parent with remaining child in grandparent + grandparent_path = panel_path[:-2] + parent_index = panel_path[-2] + self._replace_layout_at_path(grandparent_path + [parent_index], remaining_child) + self._rebuild_ui_at_path(grandparent_path + [parent_index]) + else: # redistribute proportions + equal_prop = 1.0 / len(parent["children"]) + parent["proportions"] = [equal_prop] * len(parent["children"]) + self._rebuild_ui_at_path(panel_path[:-1]) + + def split_panel(self, panel_path: list[int], orientation: int): + current_layout = self._get_layout_at_path(panel_path) + existing_panel = current_layout["panel"] + new_panel = TimeSeriesPanel(self.data_manager, self.playback_manager, self.worker_manager) + parent, child_index = self._get_parent_and_index(panel_path) + + if parent is None: # Root split + self.layout = { + "type": "split", + "orientation": orientation, + "children": [{"type": "panel", "panel": existing_panel}, {"type": "panel", "panel": new_panel}], + "proportions": [0.5, 0.5], + } + self._rebuild_ui_at_path([]) + elif parent["type"] == "split" and parent["orientation"] == orientation: # Same orientation - insert into existing split + parent["children"].insert(child_index + 1, {"type": "panel", "panel": new_panel}) + parent["proportions"] = [1.0 / len(parent["children"])] * len(parent["children"]) + self._rebuild_ui_at_path(panel_path[:-1]) + else: # Different orientation - create new split level + new_split = {"type": "split", "orientation": orientation, "children": [current_layout, {"type": "panel", "panel": new_panel}], "proportions": [0.5, 0.5]} + self._replace_layout_at_path(panel_path, new_split) + self._rebuild_ui_at_path(panel_path) + + def _rebuild_ui_at_path(self, path: list[int]): + layout = self._get_layout_at_path(path) + if path: + container_tag = self._path_to_tag(path, "container") + else: # Root update + container_tag = self.container_tag + + self._cleanup_ui_recursive(layout, path) + dpg.delete_item(container_tag, children_only=True) + width, height = dpg.get_item_rect_size(container_tag) + self._create_ui_recursive(layout, container_tag, path, width, height) + + def _cleanup_ui_recursive(self, layout: dict, path: list[int]): + if layout["type"] == "panel": + panel = layout["panel"] + panel.destroy_ui() + if panel in self.active_panels: + self.active_panels.remove(panel) + else: + for i in range(len(layout["children"]) - 1): + handler_tag = f"{self._path_to_tag(path, f'grip_{i}')}_handler" + if dpg.does_item_exist(handler_tag): + dpg.delete_item(handler_tag) + + for i, child in enumerate(layout["children"]): + self._cleanup_ui_recursive(child, path + [i]) + + def update_all_panels(self): + for panel in self.active_panels: + panel.update() + + def on_viewport_resize(self): + self._resize_splits_recursive(self.layout, []) + + def _resize_splits_recursive(self, layout: dict, path: list[int], width: int | None = None, height: int | None = None): + if layout["type"] == "split": + split_tag = self._path_to_tag(path, "split") + if dpg.does_item_exist(split_tag): + available_sizes = (width, height) if width and height else dpg.get_item_rect_size(dpg.get_item_parent(split_tag)) + orientation, _, pane_sizes = self._get_split_geometry(layout, available_sizes) + size_properties = ("width", "height") + + for i, child_layout in enumerate(layout["children"]): + child_path = path + [i] + container_tag = self._path_to_tag(child_path, "container") + if dpg.does_item_exist(container_tag): + dpg.configure_item(container_tag, **{size_properties[orientation]: pane_sizes[i]}) + child_width, child_height = [(pane_sizes[i], available_sizes[1]), (available_sizes[0], pane_sizes[i])][orientation] + self._resize_splits_recursive(child_layout, child_path, child_width, child_height) + else: # leaf node/panel - adjust bar height to allow for scrollbar + panel_tag = self._path_to_tag(path, "panel") + if width is not None and width < int(279 * self.scale + 80): # scaled widths of the elements in top bar + fixed 8 padding on left and right of each item + dpg.configure_item(panel_tag, height=(int(13*self.scale) + 24)) + else: + dpg.configure_item(panel_tag, height=(int(13*self.scale) + 8)) + + def _get_split_geometry(self, layout: dict, available_size: tuple[int, int]) -> tuple[int, int, list[int]]: + orientation = layout["orientation"] + num_grips = len(layout["children"]) - 1 + usable_size = max(self.min_pane_size, available_size[orientation] - (num_grips * (self.grip_size + 8 * (2-orientation)))) # approximate, scaling is weird + pane_sizes = [max(self.min_pane_size, int(usable_size * prop)) for prop in layout["proportions"]] + return orientation, usable_size, pane_sizes + + def _get_layout_at_path(self, path: list[int]) -> dict: + current = self.layout + for index in path: + current = current["children"][index] + return current + + def _get_parent_and_index(self, path: list[int]) -> tuple: + return (None, -1) if not path else (self._get_layout_at_path(path[:-1]), path[-1]) + + def _replace_layout_at_path(self, path: list[int], new_layout: dict): + if not path: + self.layout = new_layout + else: + parent, index = self._get_parent_and_index(path) + parent["children"][index] = new_layout + + def _path_to_tag(self, path: list[int], prefix: str = "") -> str: + path_str = "_".join(map(str, path)) if path else "root" + return f"{prefix}_{path_str}" if prefix else path_str + + def _create_grip(self, parent_tag: str, path: list[int], grip_index: int, orientation: int): + grip_tag = self._path_to_tag(path, f"grip_{grip_index}") + width, height = [(self.grip_size, -1), (-1, self.grip_size)][orientation] + + with dpg.child_window(tag=grip_tag, parent=parent_tag, width=width, height=height, no_scrollbar=True, border=False): + button_tag = dpg.add_button(label="", width=-1, height=-1) + + with dpg.item_handler_registry(tag=f"{grip_tag}_handler"): + user_data = (path, grip_index, orientation) + dpg.add_item_active_handler(callback=self._on_grip_drag, user_data=user_data) + dpg.add_item_deactivated_handler(callback=self._on_grip_end, user_data=user_data) + dpg.bind_item_handler_registry(button_tag, f"{grip_tag}_handler") + + def _on_grip_drag(self, sender, app_data, user_data): + path, grip_index, orientation = user_data + layout = self._get_layout_at_path(path) + + if "_drag_data" not in layout: + layout["_drag_data"] = {"initial_proportions": layout["proportions"][:], "start_mouse": dpg.get_mouse_pos(local=False)[orientation]} + return + + drag_data = layout["_drag_data"] + split_tag = self._path_to_tag(path, "split") + if not dpg.does_item_exist(split_tag): + return + + _, usable_size, _ = self._get_split_geometry(layout, dpg.get_item_rect_size(split_tag)) + current_coord = dpg.get_mouse_pos(local=False)[orientation] + delta = current_coord - drag_data["start_mouse"] + delta_prop = delta / usable_size + + left_idx = grip_index + right_idx = left_idx + 1 + initial = drag_data["initial_proportions"] + min_prop = self.min_pane_size / usable_size + + new_left = max(min_prop, initial[left_idx] + delta_prop) + new_right = max(min_prop, initial[right_idx] - delta_prop) + + total_available = initial[left_idx] + initial[right_idx] + if new_left + new_right > total_available: + if new_left > new_right: + new_left = total_available - new_right + else: + new_right = total_available - new_left + + layout["proportions"] = initial[:] + layout["proportions"][left_idx] = new_left + layout["proportions"][right_idx] = new_right + + self._resize_splits_recursive(layout, path) + + def _on_grip_end(self, sender, app_data, user_data): + path, _, _ = user_data + self._get_layout_at_path(path).pop("_drag_data", None) diff --git a/tools/jotpluggler/pluggle.py b/tools/jotpluggler/pluggle.py new file mode 100755 index 0000000000..582a44454e --- /dev/null +++ b/tools/jotpluggler/pluggle.py @@ -0,0 +1,252 @@ +#!/usr/bin/env python3 +import argparse +import os +import pyautogui +import subprocess +import dearpygui.dearpygui as dpg +import multiprocessing +import uuid +import signal +from openpilot.common.basedir import BASEDIR +from openpilot.tools.jotpluggler.data import DataManager +from openpilot.tools.jotpluggler.datatree import DataTree +from openpilot.tools.jotpluggler.layout import PlotLayoutManager + +DEMO_ROUTE = "a2a0ccea32023010|2023-07-27--13-01-19" + + +class WorkerManager: + def __init__(self, max_workers=None): + self.pool = multiprocessing.Pool(max_workers or min(4, multiprocessing.cpu_count()), initializer=WorkerManager.worker_initializer) + self.active_tasks = {} + + def submit_task(self, func, args_list, callback=None, task_id=None): + task_id = task_id or str(uuid.uuid4()) + + if task_id in self.active_tasks: + try: + self.active_tasks[task_id].terminate() + except Exception: + pass + + def handle_success(result): + self.active_tasks.pop(task_id, None) + if callback: + try: + callback(result) + except Exception as e: + print(f"Callback for task {task_id} failed: {e}") + + def handle_error(error): + self.active_tasks.pop(task_id, None) + print(f"Task {task_id} failed: {error}") + + async_result = self.pool.starmap_async(func, args_list, callback=handle_success, error_callback=handle_error) + self.active_tasks[task_id] = async_result + return task_id + + @staticmethod + def worker_initializer(): + signal.signal(signal.SIGINT, signal.SIG_IGN) + + def shutdown(self): + for task in self.active_tasks.values(): + try: + task.terminate() + except Exception: + pass + self.pool.terminate() + self.pool.join() + + +class PlaybackManager: + def __init__(self): + self.is_playing = False + self.current_time_s = 0.0 + self.duration_s = 0.0 + + def set_route_duration(self, duration: float): + self.duration_s = duration + self.seek(min(self.current_time_s, duration)) + + def toggle_play_pause(self): + if not self.is_playing and self.current_time_s >= self.duration_s: + self.seek(0.0) + self.is_playing = not self.is_playing + texture_tag = "pause_texture" if self.is_playing else "play_texture" + dpg.configure_item("play_pause_button", texture_tag=texture_tag) + + def seek(self, time_s: float): + self.current_time_s = max(0.0, min(time_s, self.duration_s)) + + def update_time(self, delta_t: float): + if self.is_playing: + self.current_time_s = min(self.current_time_s + delta_t, self.duration_s) + if self.current_time_s >= self.duration_s: + self.is_playing = False + dpg.configure_item("play_pause_button", texture_tag="play_texture") + return self.current_time_s + + +class MainController: + def __init__(self, scale: float = 1.0): + self.scale = scale + self.data_manager = DataManager() + self.playback_manager = PlaybackManager() + self.worker_manager = WorkerManager() + self._create_global_themes() + self.data_tree = DataTree(self.data_manager, self.playback_manager) + self.plot_layout_manager = PlotLayoutManager(self.data_manager, self.playback_manager, self.worker_manager, scale=self.scale) + self.data_manager.add_observer(self.on_data_loaded) + + def _create_global_themes(self): + with dpg.theme(tag="global_line_theme"): + with dpg.theme_component(dpg.mvLineSeries): + scaled_thickness = max(1.0, self.scale) + dpg.add_theme_style(dpg.mvPlotStyleVar_LineWeight, scaled_thickness, category=dpg.mvThemeCat_Plots) + + with dpg.theme(tag="global_timeline_theme"): + with dpg.theme_component(dpg.mvInfLineSeries): + scaled_thickness = max(1.0, self.scale) + dpg.add_theme_style(dpg.mvPlotStyleVar_LineWeight, scaled_thickness, category=dpg.mvThemeCat_Plots) + dpg.add_theme_color(dpg.mvPlotCol_Line, (255, 0, 0, 128), category=dpg.mvThemeCat_Plots) + + def on_data_loaded(self, data: dict): + duration = data.get('duration', 0.0) + self.playback_manager.set_route_duration(duration) + + if data.get('reset'): + self.playback_manager.current_time_s = 0.0 + self.playback_manager.duration_s = 0.0 + self.playback_manager.is_playing = False + dpg.set_value("load_status", "Loading...") + dpg.set_value("timeline_slider", 0.0) + dpg.configure_item("timeline_slider", max_value=0.0) + dpg.configure_item("play_pause_button", texture_tag="play_texture") + dpg.configure_item("load_button", enabled=True) + elif data.get('loading_complete'): + num_paths = len(self.data_manager.get_all_paths()) + dpg.set_value("load_status", f"Loaded {num_paths} data paths") + dpg.configure_item("load_button", enabled=True) + elif data.get('segment_added'): + segment_count = data.get('segment_count', 0) + dpg.set_value("load_status", f"Loading... {segment_count} segments processed") + + dpg.configure_item("timeline_slider", max_value=duration) + + def setup_ui(self): + with dpg.texture_registry(): + script_dir = os.path.dirname(os.path.realpath(__file__)) + for image in ["play", "pause", "x", "split_h", "split_v"]: + texture = dpg.load_image(os.path.join(script_dir, "assets", f"{image}.png")) + dpg.add_static_texture(width=texture[0], height=texture[1], default_value=texture[3], tag=f"{image}_texture") + + with dpg.window(tag="Primary Window"): + with dpg.group(horizontal=True): + # Left panel - Data tree + with dpg.child_window(label="Sidebar", width=300 * self.scale, tag="sidebar_window", border=True, resizable_x=True): + with dpg.group(horizontal=True): + dpg.add_input_text(tag="route_input", width=-75 * self.scale, hint="Enter route name...") + dpg.add_button(label="Load", callback=self.load_route, tag="load_button", width=-1) + dpg.add_text("Ready to load route", tag="load_status") + dpg.add_separator() + self.data_tree.create_ui("sidebar_window") + + # Right panel - Plots and timeline + with dpg.group(tag="right_panel"): + with dpg.child_window(label="Plot Window", border=True, height=-(32 + 13 * self.scale), tag="main_plot_area"): + self.plot_layout_manager.create_ui("main_plot_area") + + with dpg.child_window(label="Timeline", border=True): + with dpg.table(header_row=False, borders_innerH=False, borders_innerV=False, borders_outerH=False, borders_outerV=False): + btn_size = int(13 * self.scale) + dpg.add_table_column(width_fixed=True, init_width_or_weight=(btn_size + 8)) # Play button + dpg.add_table_column(width_stretch=True) # Timeline slider + dpg.add_table_column(width_fixed=True, init_width_or_weight=int(50 * self.scale)) # FPS counter + with dpg.table_row(): + dpg.add_image_button(texture_tag="play_texture", tag="play_pause_button", callback=self.toggle_play_pause, width=btn_size, height=btn_size) + dpg.add_slider_float(tag="timeline_slider", default_value=0.0, label="", width=-1, callback=self.timeline_drag) + dpg.add_text("", tag="fps_counter") + with dpg.item_handler_registry(tag="plot_resize_handler"): + dpg.add_item_resize_handler(callback=self.on_plot_resize) + dpg.bind_item_handler_registry("right_panel", "plot_resize_handler") + + dpg.set_primary_window("Primary Window", True) + + def on_plot_resize(self, sender, app_data, user_data): + self.plot_layout_manager.on_viewport_resize() + + def load_route(self): + route_name = dpg.get_value("route_input").strip() + if route_name: + dpg.set_value("load_status", "Loading route...") + dpg.configure_item("load_button", enabled=False) + self.data_manager.load_route(route_name) + + def toggle_play_pause(self, sender): + self.playback_manager.toggle_play_pause() + + def timeline_drag(self, sender, app_data): + self.playback_manager.seek(app_data) + + def update_frame(self, font): + self.data_tree.update_frame(font) + + new_time = self.playback_manager.update_time(dpg.get_delta_time()) + if not dpg.is_item_active("timeline_slider"): + dpg.set_value("timeline_slider", new_time) + + self.plot_layout_manager.update_all_panels() + + dpg.set_value("fps_counter", f"{dpg.get_frame_rate():.1f} FPS") + + def shutdown(self): + self.worker_manager.shutdown() + + +def main(route_to_load=None): + dpg.create_context() + + # TODO: find better way of calculating display scaling + try: + w, h = next(tuple(map(int, l.split()[0].split('x'))) for l in subprocess.check_output(['xrandr']).decode().split('\n') if '*' in l) # actual resolution + scale = pyautogui.size()[0] / w # scaled resolution + except Exception: + scale = 1 + + with dpg.font_registry(): + default_font = dpg.add_font(os.path.join(BASEDIR, "selfdrive/assets/fonts/JetBrainsMono-Medium.ttf"), int(13 * scale)) + dpg.bind_font(default_font) + + viewport_width, viewport_height = int(1200 * scale), int(800 * scale) + mouse_x, mouse_y = pyautogui.position() # TODO: find better way of creating the window where the user is (default dpg behavior annoying on multiple displays) + dpg.create_viewport( + title='JotPluggler', width=viewport_width, height=viewport_height, x_pos=mouse_x - viewport_width // 2, y_pos=mouse_y - viewport_height // 2 + ) + dpg.setup_dearpygui() + + controller = MainController(scale=scale) + controller.setup_ui() + + if route_to_load: + dpg.set_value("route_input", route_to_load) + controller.load_route() + + dpg.show_viewport() + + # Main loop + try: + while dpg.is_dearpygui_running(): + controller.update_frame(default_font) + dpg.render_dearpygui_frame() + finally: + controller.shutdown() + dpg.destroy_context() + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="A tool for visualizing openpilot logs.") + parser.add_argument("--demo", action="store_true", help="Use the demo route instead of providing one") + parser.add_argument("route", nargs='?', default=None, help="Optional route name to load on startup.") + args = parser.parse_args() + route = DEMO_ROUTE if args.demo else args.route + main(route_to_load=route) diff --git a/tools/jotpluggler/views.py b/tools/jotpluggler/views.py new file mode 100644 index 0000000000..4af9a102ac --- /dev/null +++ b/tools/jotpluggler/views.py @@ -0,0 +1,195 @@ +import uuid +import threading +import numpy as np +from collections import deque +import dearpygui.dearpygui as dpg +from abc import ABC, abstractmethod + + +class ViewPanel(ABC): + """Abstract base class for all view panels that can be displayed in a plot container""" + + def __init__(self, panel_id: str = None): + self.panel_id = panel_id or str(uuid.uuid4()) + self.title = "Untitled Panel" + + @abstractmethod + def clear(self): + pass + + @abstractmethod + def create_ui(self, parent_tag: str): + pass + + @abstractmethod + def destroy_ui(self): + pass + + @abstractmethod + def get_panel_type(self) -> str: + pass + + @abstractmethod + def update(self): + pass + + +class TimeSeriesPanel(ViewPanel): + def __init__(self, data_manager, playback_manager, worker_manager, panel_id: str | None = None): + super().__init__(panel_id) + self.data_manager = data_manager + self.playback_manager = playback_manager + self.worker_manager = worker_manager + self.title = "Time Series Plot" + self.plot_tag = f"plot_{self.panel_id}" + self.x_axis_tag = f"{self.plot_tag}_x_axis" + self.y_axis_tag = f"{self.plot_tag}_y_axis" + self.timeline_indicator_tag = f"{self.plot_tag}_timeline" + self._ui_created = False + self._series_data: dict[str, tuple[np.ndarray, np.ndarray]] = {} + self._last_plot_duration = 0 + self._update_lock = threading.RLock() + self._results_deque: deque[tuple[str, list, list]] = deque() + self._new_data = False + + def create_ui(self, parent_tag: str): + self.data_manager.add_observer(self.on_data_loaded) + with dpg.plot(height=-1, width=-1, tag=self.plot_tag, parent=parent_tag, drop_callback=self._on_series_drop, payload_type="TIMESERIES_PAYLOAD"): + dpg.add_plot_legend() + dpg.add_plot_axis(dpg.mvXAxis, no_label=True, tag=self.x_axis_tag) + dpg.add_plot_axis(dpg.mvYAxis, no_label=True, tag=self.y_axis_tag) + timeline_series_tag = dpg.add_inf_line_series(x=[0], label="Timeline", parent=self.y_axis_tag, tag=self.timeline_indicator_tag) + dpg.bind_item_theme(timeline_series_tag, "global_timeline_theme") + + for series_path in list(self._series_data.keys()): + self.add_series(series_path) + self._ui_created = True + + def update(self): + with self._update_lock: + if not self._ui_created: + return + + if self._new_data: # handle new data in main thread + self._new_data = False + for series_path in list(self._series_data.keys()): + self.add_series(series_path, update=True) + + while self._results_deque: # handle downsampled results in main thread + results = self._results_deque.popleft() + for series_path, downsampled_time, downsampled_values in results: + series_tag = f"series_{self.panel_id}_{series_path}" + if dpg.does_item_exist(series_tag): + dpg.set_value(series_tag, (downsampled_time, downsampled_values.astype(float))) + + # update timeline + current_time_s = self.playback_manager.current_time_s + dpg.set_value(self.timeline_indicator_tag, [[current_time_s], [0]]) + + # update timeseries legend label + for series_path, (time_array, value_array) in self._series_data.items(): + position = np.searchsorted(time_array, current_time_s, side='right') - 1 + if position >= 0 and (current_time_s - time_array[position]) <= 1.0: + value = value_array[position] + formatted_value = f"{value:.5f}" if np.issubdtype(type(value), np.floating) else str(value) + series_tag = f"series_{self.panel_id}_{series_path}" + if dpg.does_item_exist(series_tag): + dpg.configure_item(series_tag, label=f"{series_path}: {formatted_value}") + + # downsample if plot zoom changed significantly + plot_duration = dpg.get_axis_limits(self.x_axis_tag)[1] - dpg.get_axis_limits(self.x_axis_tag)[0] + if plot_duration > self._last_plot_duration * 2 or plot_duration < self._last_plot_duration * 0.5: + self._downsample_all_series(plot_duration) + + def _downsample_all_series(self, plot_duration): + plot_width = dpg.get_item_rect_size(self.plot_tag)[0] + if plot_width <= 0 or plot_duration <= 0: + return + + self._last_plot_duration = plot_duration + target_points_per_second = plot_width / plot_duration + work_items = [] + for series_path, (time_array, value_array) in self._series_data.items(): + if len(time_array) == 0: + continue + series_duration = time_array[-1] - time_array[0] if len(time_array) > 1 else 1 + points_per_second = len(time_array) / series_duration + if points_per_second > target_points_per_second * 2: + target_points = max(int(target_points_per_second * series_duration), plot_width) + work_items.append((series_path, time_array, value_array, target_points)) + elif dpg.does_item_exist(f"series_{self.panel_id}_{series_path}"): + dpg.set_value(f"series_{self.panel_id}_{series_path}", (time_array, value_array.astype(float))) + + if work_items: + self.worker_manager.submit_task( + TimeSeriesPanel._downsample_worker, work_items, callback=lambda results: self._results_deque.append(results), task_id=f"downsample_{self.panel_id}" + ) + + def add_series(self, series_path: str, update: bool = False): + with self._update_lock: + if update or series_path not in self._series_data: + self._series_data[series_path] = self.data_manager.get_timeseries(series_path) + + time_array, value_array = self._series_data[series_path] + series_tag = f"series_{self.panel_id}_{series_path}" + if dpg.does_item_exist(series_tag): + dpg.set_value(series_tag, (time_array, value_array.astype(float))) + else: + line_series_tag = dpg.add_line_series(x=time_array, y=value_array.astype(float), label=series_path, parent=self.y_axis_tag, tag=series_tag) + dpg.bind_item_theme(line_series_tag, "global_line_theme") + dpg.fit_axis_data(self.x_axis_tag) + dpg.fit_axis_data(self.y_axis_tag) + plot_duration = dpg.get_axis_limits(self.x_axis_tag)[1] - dpg.get_axis_limits(self.x_axis_tag)[0] + self._downsample_all_series(plot_duration) + + def destroy_ui(self): + with self._update_lock: + self.data_manager.remove_observer(self.on_data_loaded) + if dpg.does_item_exist(self.plot_tag): + dpg.delete_item(self.plot_tag) + self._ui_created = False + + def get_panel_type(self) -> str: + return "timeseries" + + def clear(self): + with self._update_lock: + for series_path in list(self._series_data.keys()): + self.remove_series(series_path) + + def remove_series(self, series_path: str): + with self._update_lock: + if series_path in self._series_data: + if dpg.does_item_exist(f"series_{self.panel_id}_{series_path}"): + dpg.delete_item(f"series_{self.panel_id}_{series_path}") + del self._series_data[series_path] + + def on_data_loaded(self, data: dict): + self._new_data = True + + def _on_series_drop(self, sender, app_data, user_data): + self.add_series(app_data) + + @staticmethod + def _downsample_worker(series_path, time_array, value_array, target_points): + if len(time_array) <= target_points: + return series_path, time_array, value_array + + step = len(time_array) / target_points + indices = [] + + for i in range(target_points): + start_idx = int(i * step) + end_idx = int((i + 1) * step) + if start_idx == end_idx: + indices.append(start_idx) + else: + bucket_values = value_array[start_idx:end_idx] + min_idx = start_idx + np.argmin(bucket_values) + max_idx = start_idx + np.argmax(bucket_values) + if min_idx != max_idx: + indices.extend([min(min_idx, max_idx), max(min_idx, max_idx)]) + else: + indices.append(min_idx) + indices = sorted(set(indices)) + return series_path, time_array[indices], value_array[indices] diff --git a/tools/op.sh b/tools/op.sh index e83b46b2d2..54ff8e97e9 100755 --- a/tools/op.sh +++ b/tools/op.sh @@ -365,6 +365,7 @@ function op_switch() { fi BRANCH="$1" + git config --replace-all remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" git fetch "$REMOTE" "$BRANCH" git checkout -f FETCH_HEAD git checkout -B "$BRANCH" --track "$REMOTE"/"$BRANCH" diff --git a/tools/plotjuggler/layouts/torque-controller.xml b/tools/plotjuggler/layouts/torque-controller.xml index 606df03611..8e9a1a8526 100644 --- a/tools/plotjuggler/layouts/torque-controller.xml +++ b/tools/plotjuggler/layouts/torque-controller.xml @@ -1,77 +1,45 @@ - + - + - - + + - - + + - - + + - - + + - - + + - + - + - - + + - - + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + @@ -79,115 +47,134 @@ - + - - + + - - + + - - + + - - + + - - + + - - + + - + - - - - + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - + + - - + + - - - - + + + + - - + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + @@ -199,25 +186,34 @@ + + + + + + + + + + + + + - + - return (0) - /carState/canValid + return value * 3.6 + /carState/vEgo - + - return (value * v1 ^ 2) - (v2 * 9.81) - /controlsState/curvature - - /carState/vEgo - /liveParameters/roll - + return value * 2.23694 + /carState/vEgo @@ -228,15 +224,24 @@ /liveParameters/roll - + - return value * 2.23694 - /carState/vEgo + return (value * v1 ^ 2) - (v2 * 9.81) + /controlsState/curvature + + /carState/vEgo + /liveParameters/roll + - + - return value * 3.6 - /carState/vEgo + return value + 0.2 + /carParams/steerActuatorDelay + + + + return (0) + /carState/canValid diff --git a/tools/replay/SConscript b/tools/replay/SConscript index 18849407cf..136c4119f6 100644 --- a/tools/replay/SConscript +++ b/tools/replay/SConscript @@ -13,6 +13,8 @@ else: replay_lib_src = ["replay.cc", "consoleui.cc", "camera.cc", "filereader.cc", "logreader.cc", "framereader.cc", "route.cc", "util.cc", "seg_mgr.cc", "timeline.cc", "api.cc"] +if arch != "Darwin": + replay_lib_src.append("qcom_decoder.cc") replay_lib = replay_env.Library("replay", replay_lib_src, LIBS=base_libs, FRAMEWORKS=base_frameworks) Export('replay_lib') replay_libs = [replay_lib, 'avutil', 'avcodec', 'avformat', 'bz2', 'zstd', 'curl', 'yuv', 'ncurses'] + base_libs diff --git a/tools/replay/framereader.cc b/tools/replay/framereader.cc index ed88626be3..e9cd090446 100644 --- a/tools/replay/framereader.cc +++ b/tools/replay/framereader.cc @@ -8,6 +8,7 @@ #include "common/util.h" #include "third_party/libyuv/include/libyuv.h" #include "tools/replay/util.h" +#include "system/hardware/hw.h" #ifdef __APPLE__ #define HW_DEVICE_TYPE AV_HWDEVICE_TYPE_VIDEOTOOLBOX @@ -37,7 +38,16 @@ struct DecoderManager { return it->second.get(); } - auto decoder = std::make_unique(); + std::unique_ptr decoder; + #ifndef __APPLE__ + if (Hardware::TICI() && hw_decoder) { + decoder = std::make_unique(); + } else + #endif + { + decoder = std::make_unique(); + } + if (!decoder->open(codecpar, hw_decoder)) { decoder.reset(nullptr); } @@ -114,19 +124,19 @@ bool FrameReader::get(int idx, VisionBuf *buf) { // class VideoDecoder -VideoDecoder::VideoDecoder() { +FFmpegVideoDecoder::FFmpegVideoDecoder() { av_frame_ = av_frame_alloc(); hw_frame_ = av_frame_alloc(); } -VideoDecoder::~VideoDecoder() { +FFmpegVideoDecoder::~FFmpegVideoDecoder() { if (hw_device_ctx) av_buffer_unref(&hw_device_ctx); if (decoder_ctx) avcodec_free_context(&decoder_ctx); av_frame_free(&av_frame_); av_frame_free(&hw_frame_); } -bool VideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) { +bool FFmpegVideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) { const AVCodec *decoder = avcodec_find_decoder(codecpar->codec_id); if (!decoder) return false; @@ -149,7 +159,7 @@ bool VideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) { return true; } -bool VideoDecoder::initHardwareDecoder(AVHWDeviceType hw_device_type) { +bool FFmpegVideoDecoder::initHardwareDecoder(AVHWDeviceType hw_device_type) { const AVCodecHWConfig *config = nullptr; for (int i = 0; (config = avcodec_get_hw_config(decoder_ctx->codec, i)) != nullptr; i++) { if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX && config->device_type == hw_device_type) { @@ -175,7 +185,7 @@ bool VideoDecoder::initHardwareDecoder(AVHWDeviceType hw_device_type) { return true; } -bool VideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) { +bool FFmpegVideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) { int current_idx = idx; if (idx != reader->prev_idx + 1) { // seeking to the nearest key frame @@ -219,7 +229,7 @@ bool VideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) { return false; } -AVFrame *VideoDecoder::decodeFrame(AVPacket *pkt) { +AVFrame *FFmpegVideoDecoder::decodeFrame(AVPacket *pkt) { int ret = avcodec_send_packet(decoder_ctx, pkt); if (ret < 0) { rError("Error sending a packet for decoding: %d", ret); @@ -239,7 +249,7 @@ AVFrame *VideoDecoder::decodeFrame(AVPacket *pkt) { return (av_frame_->format == hw_pix_fmt) ? hw_frame_ : av_frame_; } -bool VideoDecoder::copyBuffer(AVFrame *f, VisionBuf *buf) { +bool FFmpegVideoDecoder::copyBuffer(AVFrame *f, VisionBuf *buf) { if (hw_pix_fmt == HW_PIX_FMT) { for (int i = 0; i < height/2; i++) { memcpy(buf->y + (i*2 + 0)*buf->stride, f->data[0] + (i*2 + 0)*f->linesize[0], width); @@ -256,3 +266,47 @@ bool VideoDecoder::copyBuffer(AVFrame *f, VisionBuf *buf) { } return true; } + +#ifndef __APPLE__ +bool QcomVideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) { + if (codecpar->codec_id != AV_CODEC_ID_HEVC) { + rError("Hardware decoder only supports HEVC codec"); + return false; + } + width = codecpar->width; + height = codecpar->height; + msm_vidc.init(VIDEO_DEVICE, width, height, V4L2_PIX_FMT_HEVC); + return true; +} + +bool QcomVideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) { + int from_idx = idx; + if (idx != reader->prev_idx + 1) { + // seeking to the nearest key frame + for (int i = idx; i >= 0; --i) { + if (reader->packets_info[i].flags & AV_PKT_FLAG_KEY) { + from_idx = i; + break; + } + } + + auto pos = reader->packets_info[from_idx].pos; + int ret = avformat_seek_file(reader->input_ctx, 0, pos, pos, pos, AVSEEK_FLAG_BYTE); + if (ret < 0) { + rError("Failed to seek to byte position %lld: %d", pos, AVERROR(ret)); + return false; + } + } + reader->prev_idx = idx; + bool result = false; + AVPacket pkt; + msm_vidc.avctx = reader->input_ctx; + for (int i = from_idx; i <= idx; ++i) { + if (av_read_frame(reader->input_ctx, &pkt) == 0) { + result = msm_vidc.decodeFrame(&pkt, buf) && (i == idx); + av_packet_unref(&pkt); + } + } + return result; +} +#endif diff --git a/tools/replay/framereader.h b/tools/replay/framereader.h index a15847e311..d8e86fce0f 100644 --- a/tools/replay/framereader.h +++ b/tools/replay/framereader.h @@ -7,6 +7,10 @@ #include "tools/replay/filereader.h" #include "tools/replay/util.h" +#ifndef __APPLE__ +#include "tools/replay/qcom_decoder.h" +#endif + extern "C" { #include #include @@ -40,11 +44,18 @@ public: class VideoDecoder { public: - VideoDecoder(); - ~VideoDecoder(); - bool open(AVCodecParameters *codecpar, bool hw_decoder); - bool decode(FrameReader *reader, int idx, VisionBuf *buf); + virtual ~VideoDecoder() = default; + virtual bool open(AVCodecParameters *codecpar, bool hw_decoder) = 0; + virtual bool decode(FrameReader *reader, int idx, VisionBuf *buf) = 0; int width = 0, height = 0; +}; + +class FFmpegVideoDecoder : public VideoDecoder { +public: + FFmpegVideoDecoder(); + ~FFmpegVideoDecoder() override; + bool open(AVCodecParameters *codecpar, bool hw_decoder) override; + bool decode(FrameReader *reader, int idx, VisionBuf *buf) override; private: bool initHardwareDecoder(AVHWDeviceType hw_device_type); @@ -56,3 +67,16 @@ private: AVPixelFormat hw_pix_fmt = AV_PIX_FMT_NONE; AVBufferRef *hw_device_ctx = nullptr; }; + +#ifndef __APPLE__ +class QcomVideoDecoder : public VideoDecoder { +public: + QcomVideoDecoder() {}; + ~QcomVideoDecoder() override {}; + bool open(AVCodecParameters *codecpar, bool hw_decoder) override; + bool decode(FrameReader *reader, int idx, VisionBuf *buf) override; + +private: + MsmVidc msm_vidc = MsmVidc(); +}; +#endif diff --git a/tools/replay/qcom_decoder.cc b/tools/replay/qcom_decoder.cc new file mode 100644 index 0000000000..eb5409daa3 --- /dev/null +++ b/tools/replay/qcom_decoder.cc @@ -0,0 +1,346 @@ +#include "qcom_decoder.h" + +#include +#include "third_party/linux/include/v4l2-controls.h" +#include + + +#include "common/swaglog.h" +#include "common/util.h" + +// echo "0xFFFF" > /sys/kernel/debug/msm_vidc/debug_level + +static void copyBuffer(VisionBuf *src_buf, VisionBuf *dst_buf) { + // Copy Y plane + memcpy(dst_buf->y, src_buf->y, src_buf->height * src_buf->stride); + // Copy UV plane + memcpy(dst_buf->uv, src_buf->uv, src_buf->height / 2 * src_buf->stride); +} + +static void request_buffers(int fd, v4l2_buf_type buf_type, unsigned int count) { + struct v4l2_requestbuffers reqbuf = { + .count = count, + .type = buf_type, + .memory = V4L2_MEMORY_USERPTR + }; + util::safe_ioctl(fd, VIDIOC_REQBUFS, &reqbuf, "VIDIOC_REQBUFS failed"); +} + +MsmVidc::~MsmVidc() { + if (fd > 0) { + close(fd); + } +} + +bool MsmVidc::init(const char* dev, size_t width, size_t height, uint64_t codec) { + LOG("Initializing msm_vidc device %s", dev); + this->w = width; + this->h = height; + this->fd = open(dev, O_RDWR, 0); + if (fd < 0) { + LOGE("failed to open video device %s", dev); + return false; + } + subscribeEvents(); + v4l2_buf_type out_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + setPlaneFormat(out_type, V4L2_PIX_FMT_HEVC); // Also allocates the output buffer + setFPS(FPS); + request_buffers(fd, out_type, OUTPUT_BUFFER_COUNT); + util::safe_ioctl(fd, VIDIOC_STREAMON, &out_type, "VIDIOC_STREAMON OUTPUT failed"); + restartCapture(); + setupPolling(); + + this->initialized = true; + return true; +} + +VisionBuf* MsmVidc::decodeFrame(AVPacket *pkt, VisionBuf *buf) { + assert(initialized && (pkt != nullptr) && (buf != nullptr)); + + this->frame_ready = false; + this->current_output_buf = buf; + bool sent_packet = false; + + while (!this->frame_ready) { + if (!sent_packet) { + int buf_index = getBufferUnlocked(); + if (buf_index >= 0) { + assert(buf_index < out_buf_cnt); + sendPacket(buf_index, pkt); + sent_packet = true; + } + } + + if (poll(pfd, nfds, -1) < 0) { + LOGE("poll() error: %d", errno); + return nullptr; + } + + if (VisionBuf* result = processEvents()) { + return result; + } + } + + return buf; +} + +VisionBuf* MsmVidc::processEvents() { + for (int idx = 0; idx < nfds; idx++) { + short revents = pfd[idx].revents; + if (!revents) continue; + + if (idx == ev[EV_VIDEO]) { + if (revents & (POLLIN | POLLRDNORM)) { + VisionBuf *result = handleCapture(); + if (result == this->current_output_buf) { + this->frame_ready = true; + } + } + if (revents & (POLLOUT | POLLWRNORM)) { + handleOutput(); + } + if (revents & POLLPRI) { + handleEvent(); + } + } else { + LOGE("Unexpected event on fd %d", pfd[idx].fd); + } + } + return nullptr; +} + +VisionBuf* MsmVidc::handleCapture() { + struct v4l2_buffer buf = {0}; + struct v4l2_plane planes[1] = {0}; + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.memory = V4L2_MEMORY_USERPTR; + buf.m.planes = planes; + buf.length = 1; + util::safe_ioctl(this->fd, VIDIOC_DQBUF, &buf, "VIDIOC_DQBUF CAPTURE failed"); + + if (this->reconfigure_pending || buf.m.planes[0].bytesused == 0) { + return nullptr; + } + + copyBuffer(&cap_bufs[buf.index], this->current_output_buf); + queueCaptureBuffer(buf.index); + return this->current_output_buf; +} + +bool MsmVidc::subscribeEvents() { + for (uint32_t event : subscriptions) { + struct v4l2_event_subscription sub = { .type = event}; + util::safe_ioctl(fd, VIDIOC_SUBSCRIBE_EVENT, &sub, "VIDIOC_SUBSCRIBE_EVENT failed"); + } + return true; +} + +bool MsmVidc::setPlaneFormat(enum v4l2_buf_type type, uint32_t fourcc) { + struct v4l2_format fmt = {.type = type}; + struct v4l2_pix_format_mplane *pix = &fmt.fmt.pix_mp; + *pix = { + .width = (__u32)this->w, + .height = (__u32)this->h, + .pixelformat = fourcc + }; + util::safe_ioctl(fd, VIDIOC_S_FMT, &fmt, "VIDIOC_S_FMT failed"); + if (type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) { + this->out_buf_size = pix->plane_fmt[0].sizeimage; + int ion_size = this->out_buf_size * OUTPUT_BUFFER_COUNT; // Output (input) buffers are ION buffer. + this->out_buf.allocate(ion_size); // mmap rw + for (int i = 0; i < OUTPUT_BUFFER_COUNT; i++) { + this->out_buf_off[i] = i * this->out_buf_size; + this->out_buf_addr[i] = (char *)this->out_buf.addr + this->out_buf_off[i]; + this->out_buf_flag[i] = false; + } + LOGD("Set output buffer size to %d, count %d, addr %p", this->out_buf_size, OUTPUT_BUFFER_COUNT, this->out_buf.addr); + } else if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) { + request_buffers(this->fd, type, CAPTURE_BUFFER_COUNT); + util::safe_ioctl(fd, VIDIOC_G_FMT, &fmt, "VIDIOC_G_FMT failed"); + const __u32 y_size = pix->plane_fmt[0].sizeimage; + const __u32 y_stride = pix->plane_fmt[0].bytesperline; + for (int i = 0; i < CAPTURE_BUFFER_COUNT; i++) { + size_t uv_offset = (size_t)y_stride * pix->height; + size_t required = uv_offset + (y_stride * pix->height / 2); // enough for Y + UV. For linear NV12, UV plane starts at y_stride * height. + size_t alloc_size = std::max(y_size, required); + this->cap_bufs[i].allocate(alloc_size); + this->cap_bufs[i].init_yuv(pix->width, pix->height, y_stride, uv_offset); + } + LOGD("Set capture buffer size to %d, count %d, addr %p, extradata size %d", + pix->plane_fmt[0].sizeimage, CAPTURE_BUFFER_COUNT, this->cap_bufs[0].addr, pix->plane_fmt[1].sizeimage); + } + return true; +} + +bool MsmVidc::setFPS(uint32_t fps) { + struct v4l2_streamparm streamparam = { + .type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, + .parm.output.timeperframe = {1, fps} + }; + util::safe_ioctl(fd, VIDIOC_S_PARM, &streamparam, "VIDIOC_S_PARM failed"); + return true; +} + +bool MsmVidc::restartCapture() { + // stop if already initialized + enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + if (this->initialized) { + LOGD("Restarting capture, flushing buffers..."); + util::safe_ioctl(this->fd, VIDIOC_STREAMOFF, &type, "VIDIOC_STREAMOFF CAPTURE failed"); + struct v4l2_requestbuffers reqbuf = {.type = type, .memory = V4L2_MEMORY_USERPTR}; + util::safe_ioctl(this->fd, VIDIOC_REQBUFS, &reqbuf, "VIDIOC_REQBUFS failed"); + for (size_t i = 0; i < CAPTURE_BUFFER_COUNT; ++i) { + this->cap_bufs[i].free(); + this->cap_buf_flag[i] = false; // mark as not queued + cap_bufs[i].~VisionBuf(); + new (&cap_bufs[i]) VisionBuf(); + } + } + // setup, start and queue capture buffers + setDBP(); + setPlaneFormat(type, V4L2_PIX_FMT_NV12); + util::safe_ioctl(this->fd, VIDIOC_STREAMON, &type, "VIDIOC_STREAMON CAPTURE failed"); + for (size_t i = 0; i < CAPTURE_BUFFER_COUNT; ++i) { + queueCaptureBuffer(i); + } + + return true; +} + +bool MsmVidc::queueCaptureBuffer(int i) { + struct v4l2_buffer buf = {0}; + struct v4l2_plane planes[1] = {0}; + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.memory = V4L2_MEMORY_USERPTR; + buf.index = i; + buf.m.planes = planes; + buf.length = 1; + // decoded frame plane + planes[0].m.userptr = (unsigned long)this->cap_bufs[i].addr; // no security + planes[0].length = this->cap_bufs[i].len; + planes[0].reserved[0] = this->cap_bufs[i].fd; // ION fd + planes[0].reserved[1] = 0; + planes[0].bytesused = this->cap_bufs[i].len; + planes[0].data_offset = 0; + util::safe_ioctl(this->fd, VIDIOC_QBUF, &buf, "VIDIOC_QBUF failed"); + this->cap_buf_flag[i] = true; // mark as queued + return true; +} + +bool MsmVidc::queueOutputBuffer(int i, size_t size) { + struct v4l2_buffer buf = {0}; + struct v4l2_plane planes[1] = {0}; + + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.memory = V4L2_MEMORY_USERPTR; + buf.index = i; + buf.m.planes = planes; + buf.length = 1; + // decoded frame plane + planes[0].m.userptr = (unsigned long)this->out_buf_off[i]; // check this + planes[0].length = this->out_buf_size; + planes[0].reserved[0] = this->out_buf.fd; // ION fd + planes[0].reserved[1] = 0; + planes[0].bytesused = size; + planes[0].data_offset = 0; + assert((this->out_buf_off[i] & 0xfff) == 0); // must be 4 KiB aligned + assert(this->out_buf_size % 4096 == 0); // ditto for size + + util::safe_ioctl(this->fd, VIDIOC_QBUF, &buf, "VIDIOC_QBUF failed"); + this->out_buf_flag[i] = true; // mark as queued + return true; +} + +bool MsmVidc::setDBP() { + struct v4l2_ext_control control[2] = {0}; + struct v4l2_ext_controls controls = {0}; + control[0].id = V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_MODE; + control[0].value = 1; // V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_SECONDARY + control[1].id = V4L2_CID_MPEG_VIDC_VIDEO_DPB_COLOR_FORMAT; + control[1].value = 0; // V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_NONE + controls.count = 2; + controls.ctrl_class = V4L2_CTRL_CLASS_MPEG; + controls.controls = control; + util::safe_ioctl(fd, VIDIOC_S_EXT_CTRLS, &controls, "VIDIOC_S_EXT_CTRLS failed"); + return true; +} + +bool MsmVidc::setupPolling() { + // Initialize poll array + pfd[EV_VIDEO] = {fd, POLLIN | POLLOUT | POLLWRNORM | POLLRDNORM | POLLPRI, 0}; + ev[EV_VIDEO] = EV_VIDEO; + nfds = 1; + return true; +} + +bool MsmVidc::sendPacket(int buf_index, AVPacket *pkt) { + assert(buf_index >= 0 && buf_index < out_buf_cnt); + assert(pkt != nullptr && pkt->data != nullptr && pkt->size > 0); + // Prepare output buffer + memset(this->out_buf_addr[buf_index], 0, this->out_buf_size); + uint8_t * data = (uint8_t *)this->out_buf_addr[buf_index]; + memcpy(data, pkt->data, pkt->size); + queueOutputBuffer(buf_index, pkt->size); + return true; +} + +int MsmVidc::getBufferUnlocked() { + for (int i = 0; i < this->out_buf_cnt; i++) { + if (!out_buf_flag[i]) { + return i; + } + } + return -1; +} + + +bool MsmVidc::handleOutput() { + struct v4l2_buffer buf = {0}; + struct v4l2_plane planes[1]; + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.memory = V4L2_MEMORY_USERPTR; + buf.m.planes = planes; + buf.length = 1; + util::safe_ioctl(this->fd, VIDIOC_DQBUF, &buf, "VIDIOC_DQBUF OUTPUT failed"); + this->out_buf_flag[buf.index] = false; // mark as not queued + return true; +} + +bool MsmVidc::handleEvent() { + // dequeue event + struct v4l2_event event = {0}; + util::safe_ioctl(this->fd, VIDIOC_DQEVENT, &event, "VIDIOC_DQEVENT failed"); + switch (event.type) { + case V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT: { + unsigned int *ptr = (unsigned int *)event.u.data; + unsigned int height = ptr[0]; + unsigned int width = ptr[1]; + this->w = width; + this->h = height; + LOGD("Port Reconfig received insufficient, new size %ux%u, flushing capture bufs...", width, height); // This is normal + struct v4l2_decoder_cmd dec; + dec.flags = V4L2_QCOM_CMD_FLUSH_CAPTURE; + dec.cmd = V4L2_QCOM_CMD_FLUSH; + util::safe_ioctl(this->fd, VIDIOC_DECODER_CMD, &dec, "VIDIOC_DECODER_CMD FLUSH_CAPTURE failed"); + this->reconfigure_pending = true; + LOGD("Waiting for flush done event to reconfigure capture queue"); + break; + } + + case V4L2_EVENT_MSM_VIDC_FLUSH_DONE: { + unsigned int *ptr = (unsigned int *)event.u.data; + unsigned int flags = ptr[0]; + if (flags & V4L2_QCOM_CMD_FLUSH_CAPTURE) { + if (this->reconfigure_pending) { + this->restartCapture(); + this->reconfigure_pending = false; + } + } + break; + } + default: + break; + } + return true; +} \ No newline at end of file diff --git a/tools/replay/qcom_decoder.h b/tools/replay/qcom_decoder.h new file mode 100644 index 0000000000..1d7522cc70 --- /dev/null +++ b/tools/replay/qcom_decoder.h @@ -0,0 +1,88 @@ +#pragma once + +#include +#include + +#include "msgq/visionipc/visionbuf.h" + +extern "C" { + #include + #include +} + +#define V4L2_EVENT_MSM_VIDC_START (V4L2_EVENT_PRIVATE_START + 0x00001000) +#define V4L2_EVENT_MSM_VIDC_FLUSH_DONE (V4L2_EVENT_MSM_VIDC_START + 1) +#define V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT (V4L2_EVENT_MSM_VIDC_START + 3) +#define V4L2_CID_MPEG_MSM_VIDC_BASE 0x00992000 +#define V4L2_CID_MPEG_VIDC_VIDEO_DPB_COLOR_FORMAT (V4L2_CID_MPEG_MSM_VIDC_BASE + 44) +#define V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_MODE (V4L2_CID_MPEG_MSM_VIDC_BASE + 22) +#define V4L2_QCOM_CMD_FLUSH_CAPTURE (1 << 1) +#define V4L2_QCOM_CMD_FLUSH (4) + +#define VIDEO_DEVICE "/dev/video32" +#define OUTPUT_BUFFER_COUNT 8 +#define CAPTURE_BUFFER_COUNT 8 +#define FPS 20 + + +class MsmVidc { +public: + MsmVidc() = default; + ~MsmVidc(); + + bool init(const char* dev, size_t width, size_t height, uint64_t codec); + VisionBuf* decodeFrame(AVPacket* pkt, VisionBuf* buf); + + AVFormatContext* avctx = nullptr; + int fd = 0; + +private: + bool initialized = false; + bool reconfigure_pending = false; + bool frame_ready = false; + + VisionBuf* current_output_buf = nullptr; + VisionBuf out_buf; // Single input buffer + VisionBuf cap_bufs[CAPTURE_BUFFER_COUNT]; // Capture (output) buffers + + size_t w = 1928, h = 1208; + size_t cap_height = 0, cap_width = 0; + + int cap_buf_size = 0; + int out_buf_size = 0; + + size_t cap_plane_off[CAPTURE_BUFFER_COUNT] = {0}; + size_t cap_plane_stride[CAPTURE_BUFFER_COUNT] = {0}; + bool cap_buf_flag[CAPTURE_BUFFER_COUNT] = {false}; + + size_t out_buf_off[OUTPUT_BUFFER_COUNT] = {0}; + void* out_buf_addr[OUTPUT_BUFFER_COUNT] = {0}; + bool out_buf_flag[OUTPUT_BUFFER_COUNT] = {false}; + const int out_buf_cnt = OUTPUT_BUFFER_COUNT; + + const int subscriptions[2] = { + V4L2_EVENT_MSM_VIDC_FLUSH_DONE, + V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT + }; + + enum { EV_VIDEO, EV_COUNT }; + struct pollfd pfd[EV_COUNT] = {0}; + int ev[EV_COUNT] = {-1}; + int nfds = 0; + + VisionBuf* processEvents(); + bool setupOutput(); + bool subscribeEvents(); + bool setPlaneFormat(v4l2_buf_type type, uint32_t fourcc); + bool setFPS(uint32_t fps); + bool restartCapture(); + bool queueCaptureBuffer(int i); + bool queueOutputBuffer(int i, size_t size); + bool setDBP(); + bool setupPolling(); + bool sendPacket(int buf_index, AVPacket* pkt); + int getBufferUnlocked(); + VisionBuf* handleCapture(); + bool handleOutput(); + bool handleEvent(); +}; diff --git a/tools/sim/lib/simulated_car.py b/tools/sim/lib/simulated_car.py index 2681b26904..68ff3050db 100644 --- a/tools/sim/lib/simulated_car.py +++ b/tools/sim/lib/simulated_car.py @@ -11,7 +11,7 @@ from openpilot.tools.sim.lib.common import SimulatorState class SimulatedCar: """Simulates a honda civic 2022 (panda state + can messages) to OpenPilot""" - packer = CANPacker("honda_civic_ex_2022_can_generated") + packer = CANPacker("honda_bosch_radarless_generated") def __init__(self): self.pm = messaging.PubMaster(['can', 'pandaStates']) @@ -23,7 +23,7 @@ class SimulatedCar: @staticmethod def get_car_can_parser(): - dbc_f = 'honda_civic_ex_2022_can_generated' + dbc_f = 'honda_bosch_radarless_generated' checks = [] return CANParser(dbc_f, checks, 0) diff --git a/uv.lock b/uv.lock index 7010cdfb83..25d2b626cf 100644 --- a/uv.lock +++ b/uv.lock @@ -451,6 +451,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/fc/c0a3f4c4eaa5a22fbef91713474666e13d0ea2a69c84532579490a9f2cc8/dbus_next-0.2.3-py3-none-any.whl", hash = "sha256:58948f9aff9db08316734c0be2a120f6dc502124d9642f55e90ac82ffb16a18b", size = 57885, upload-time = "2021-07-25T22:11:25.466Z" }, ] +[[package]] +name = "dearpygui" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/fe/66293fc40254a29f060efd3398f2b1001ed79263ae1837db9ec42caa8f1d/dearpygui-2.1.0-cp311-cp311-macosx_10_6_x86_64.whl", hash = "sha256:03e5dc0b3dd2f7965e50bbe41f3316a814408064b582586de994d93afedb125c", size = 2100924, upload-time = "2025-07-07T14:20:00.602Z" }, + { url = "https://files.pythonhosted.org/packages/c4/4d/9fa1c3156ba7bbf4dc89e2e322998752fccfdc3575923a98dd6a4da48911/dearpygui-2.1.0-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b5b37710c3fa135c48e2347f39ecd1f415146e86db5d404707a0bf72d16bd304", size = 1874441, upload-time = "2025-07-07T14:20:09.165Z" }, + { url = "https://files.pythonhosted.org/packages/5a/3c/af5673b50699e1734296a0b5bcef39bb6989175b001ad1f9b0e7888ad90d/dearpygui-2.1.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:b0cfd7ac7eaa090fc22d6aa60fc4b527fc631cee10c348e4d8df92bb39af03d2", size = 2636574, upload-time = "2025-07-07T14:20:14.951Z" }, + { url = "https://files.pythonhosted.org/packages/7f/db/ed4db0bb3d88e7a8c405472641419086bef9632c4b8b0489dc0c43519c0d/dearpygui-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a9af54f96d3ef30c5db9d12cdf3266f005507396fb0da2e12e6b22b662161070", size = 1810266, upload-time = "2025-07-07T14:19:51.565Z" }, + { url = "https://files.pythonhosted.org/packages/55/9d/20a55786cc9d9266395544463d5db3be3528f7d5244bc52ba760de5dcc2d/dearpygui-2.1.0-cp312-cp312-macosx_10_6_x86_64.whl", hash = "sha256:1270ceb9cdb8ecc047c42477ccaa075b7864b314a5d09191f9280a24c8aa90a0", size = 2101499, upload-time = "2025-07-07T14:20:01.701Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b2/39d820796b7ac4d0ebf93306c1f031bf3516b159408286f1fb495c6babeb/dearpygui-2.1.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:ce9969eb62057b9d4c88a8baaed13b5fbe4058caa9faf5b19fec89da75aece3d", size = 1874385, upload-time = "2025-07-07T14:20:11.226Z" }, + { url = "https://files.pythonhosted.org/packages/fc/26/c29998ffeb5eb8d638f307851e51a81c8bd4aeaf89ad660fc67ea4d1ac1a/dearpygui-2.1.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:a3ca8cf788db63ef7e2e8d6f277631b607d548b37606f080ca1b42b1f0a9b183", size = 2635863, upload-time = "2025-07-07T14:20:17.186Z" }, + { url = "https://files.pythonhosted.org/packages/28/9c/3ab33927f1d8c839c5b7033a33d44fc9f0aeb00c264fc9772cb7555a03c4/dearpygui-2.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:43f0e4db9402f44fc3683a1f5c703564819de18cc15a042de7f1ed1c8cb5d148", size = 1810460, upload-time = "2025-07-07T14:19:53.13Z" }, +] + [[package]] name = "dictdiffer" version = "0.9.0" @@ -510,27 +525,27 @@ wheels = [ [[package]] name = "fonttools" -version = "4.59.1" +version = "4.59.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/7f/29c9c3fe4246f6ad96fee52b88d0dc3a863c7563b0afc959e36d78b965dc/fonttools-4.59.1.tar.gz", hash = "sha256:74995b402ad09822a4c8002438e54940d9f1ecda898d2bb057729d7da983e4cb", size = 3534394, upload-time = "2025-08-14T16:28:14.266Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/a5/fba25f9fbdab96e26dedcaeeba125e5f05a09043bf888e0305326e55685b/fonttools-4.59.2.tar.gz", hash = "sha256:e72c0749b06113f50bcb80332364c6be83a9582d6e3db3fe0b280f996dc2ef22", size = 3540889, upload-time = "2025-08-27T16:40:30.97Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/62/9667599561f623d4a523cc9eb4f66f3b94b6155464110fa9aebbf90bbec7/fonttools-4.59.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4909cce2e35706f3d18c54d3dcce0414ba5e0fb436a454dffec459c61653b513", size = 2778815, upload-time = "2025-08-14T16:26:28.484Z" }, - { url = "https://files.pythonhosted.org/packages/8f/78/cc25bcb2ce86033a9df243418d175e58f1956a35047c685ef553acae67d6/fonttools-4.59.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbec204fa9f877641747f2d9612b2b656071390d7a7ef07a9dbf0ecf9c7195c", size = 2341631, upload-time = "2025-08-14T16:26:30.396Z" }, - { url = "https://files.pythonhosted.org/packages/a4/cc/fcbb606dd6871f457ac32f281c20bcd6cc77d9fce77b5a4e2b2afab1f500/fonttools-4.59.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39dfd42cc2dc647b2c5469bc7a5b234d9a49e72565b96dd14ae6f11c2c59ef15", size = 5022222, upload-time = "2025-08-14T16:26:32.447Z" }, - { url = "https://files.pythonhosted.org/packages/61/96/c0b1cf2b74d08eb616a80dbf5564351fe4686147291a25f7dce8ace51eb3/fonttools-4.59.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b11bc177a0d428b37890825d7d025040d591aa833f85f8d8878ed183354f47df", size = 4966512, upload-time = "2025-08-14T16:26:34.621Z" }, - { url = "https://files.pythonhosted.org/packages/a4/26/51ce2e3e0835ffc2562b1b11d1fb9dafd0aca89c9041b64a9e903790a761/fonttools-4.59.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b9b4c35b3be45e5bc774d3fc9608bbf4f9a8d371103b858c80edbeed31dd5aa", size = 5001645, upload-time = "2025-08-14T16:26:36.876Z" }, - { url = "https://files.pythonhosted.org/packages/36/11/ef0b23f4266349b6d5ccbd1a07b7adc998d5bce925792aa5d1ec33f593e3/fonttools-4.59.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:01158376b8a418a0bae9625c476cebfcfcb5e6761e9d243b219cd58341e7afbb", size = 5113777, upload-time = "2025-08-14T16:26:39.002Z" }, - { url = "https://files.pythonhosted.org/packages/d0/da/b398fe61ef433da0a0472cdb5d4399124f7581ffe1a31b6242c91477d802/fonttools-4.59.1-cp311-cp311-win32.whl", hash = "sha256:cf7c5089d37787387123f1cb8f1793a47c5e1e3d1e4e7bfbc1cc96e0f925eabe", size = 2215076, upload-time = "2025-08-14T16:26:41.196Z" }, - { url = "https://files.pythonhosted.org/packages/94/bd/e2624d06ab94e41c7c77727b2941f1baed7edb647e63503953e6888020c9/fonttools-4.59.1-cp311-cp311-win_amd64.whl", hash = "sha256:c866eef7a0ba320486ade6c32bfc12813d1a5db8567e6904fb56d3d40acc5116", size = 2262779, upload-time = "2025-08-14T16:26:43.483Z" }, - { url = "https://files.pythonhosted.org/packages/ac/fe/6e069cc4cb8881d164a9bd956e9df555bc62d3eb36f6282e43440200009c/fonttools-4.59.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:43ab814bbba5f02a93a152ee61a04182bb5809bd2bc3609f7822e12c53ae2c91", size = 2769172, upload-time = "2025-08-14T16:26:45.729Z" }, - { url = "https://files.pythonhosted.org/packages/b9/98/ec4e03f748fefa0dd72d9d95235aff6fef16601267f4a2340f0e16b9330f/fonttools-4.59.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4f04c3ffbfa0baafcbc550657cf83657034eb63304d27b05cff1653b448ccff6", size = 2337281, upload-time = "2025-08-14T16:26:47.921Z" }, - { url = "https://files.pythonhosted.org/packages/8b/b1/890360a7e3d04a30ba50b267aca2783f4c1364363797e892e78a4f036076/fonttools-4.59.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d601b153e51a5a6221f0d4ec077b6bfc6ac35bfe6c19aeaa233d8990b2b71726", size = 4909215, upload-time = "2025-08-14T16:26:49.682Z" }, - { url = "https://files.pythonhosted.org/packages/8a/ec/2490599550d6c9c97a44c1e36ef4de52d6acf742359eaa385735e30c05c4/fonttools-4.59.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c735e385e30278c54f43a0d056736942023c9043f84ee1021eff9fd616d17693", size = 4951958, upload-time = "2025-08-14T16:26:51.616Z" }, - { url = "https://files.pythonhosted.org/packages/d1/40/bd053f6f7634234a9b9805ff8ae4f32df4f2168bee23cafd1271ba9915a9/fonttools-4.59.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1017413cdc8555dce7ee23720da490282ab7ec1cf022af90a241f33f9a49afc4", size = 4894738, upload-time = "2025-08-14T16:26:53.836Z" }, - { url = "https://files.pythonhosted.org/packages/ac/a1/3cd12a010d288325a7cfcf298a84825f0f9c29b01dee1baba64edfe89257/fonttools-4.59.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5c6d8d773470a5107052874341ed3c487c16ecd179976d81afed89dea5cd7406", size = 5045983, upload-time = "2025-08-14T16:26:56.153Z" }, - { url = "https://files.pythonhosted.org/packages/a2/af/8a2c3f6619cc43cf87951405337cc8460d08a4e717bb05eaa94b335d11dc/fonttools-4.59.1-cp312-cp312-win32.whl", hash = "sha256:2a2d0d33307f6ad3a2086a95dd607c202ea8852fa9fb52af9b48811154d1428a", size = 2203407, upload-time = "2025-08-14T16:26:58.165Z" }, - { url = "https://files.pythonhosted.org/packages/8e/f2/a19b874ddbd3ebcf11d7e25188ef9ac3f68b9219c62263acb34aca8cde05/fonttools-4.59.1-cp312-cp312-win_amd64.whl", hash = "sha256:0b9e4fa7eaf046ed6ac470f6033d52c052481ff7a6e0a92373d14f556f298dc0", size = 2251561, upload-time = "2025-08-14T16:27:00.646Z" }, - { url = "https://files.pythonhosted.org/packages/0f/64/9d606e66d498917cd7a2ff24f558010d42d6fd4576d9dd57f0bd98333f5a/fonttools-4.59.1-py3-none-any.whl", hash = "sha256:647db657073672a8330608970a984d51573557f328030566521bc03415535042", size = 1130094, upload-time = "2025-08-14T16:28:12.048Z" }, + { url = "https://files.pythonhosted.org/packages/f8/53/742fcd750ae0bdc74de4c0ff923111199cc2f90a4ee87aaddad505b6f477/fonttools-4.59.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:511946e8d7ea5c0d6c7a53c4cb3ee48eda9ab9797cd9bf5d95829a398400354f", size = 2774961, upload-time = "2025-08-27T16:38:47.536Z" }, + { url = "https://files.pythonhosted.org/packages/57/2a/976f5f9fa3b4dd911dc58d07358467bec20e813d933bc5d3db1a955dd456/fonttools-4.59.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e5e2682cf7be766d84f462ba8828d01e00c8751a8e8e7ce12d7784ccb69a30d", size = 2344690, upload-time = "2025-08-27T16:38:49.723Z" }, + { url = "https://files.pythonhosted.org/packages/c1/8f/b7eefc274fcf370911e292e95565c8253b0b87c82a53919ab3c795a4f50e/fonttools-4.59.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5729e12a982dba3eeae650de48b06f3b9ddb51e9aee2fcaf195b7d09a96250e2", size = 5026910, upload-time = "2025-08-27T16:38:51.904Z" }, + { url = "https://files.pythonhosted.org/packages/69/95/864726eaa8f9d4e053d0c462e64d5830ec7c599cbdf1db9e40f25ca3972e/fonttools-4.59.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c52694eae5d652361d59ecdb5a2246bff7cff13b6367a12da8499e9df56d148d", size = 4971031, upload-time = "2025-08-27T16:38:53.676Z" }, + { url = "https://files.pythonhosted.org/packages/24/4c/b8c4735ebdea20696277c70c79e0de615dbe477834e5a7c2569aa1db4033/fonttools-4.59.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f1f1bbc23ba1312bd8959896f46f667753b90216852d2a8cfa2d07e0cb234144", size = 5006112, upload-time = "2025-08-27T16:38:55.69Z" }, + { url = "https://files.pythonhosted.org/packages/3b/23/f9ea29c292aa2fc1ea381b2e5621ac436d5e3e0a5dee24ffe5404e58eae8/fonttools-4.59.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1a1bfe5378962825dabe741720885e8b9ae9745ec7ecc4a5ec1f1ce59a6062bf", size = 5117671, upload-time = "2025-08-27T16:38:58.984Z" }, + { url = "https://files.pythonhosted.org/packages/ba/07/cfea304c555bf06e86071ff2a3916bc90f7c07ec85b23bab758d4908c33d/fonttools-4.59.2-cp311-cp311-win32.whl", hash = "sha256:e937790f3c2c18a1cbc7da101550a84319eb48023a715914477d2e7faeaba570", size = 2218157, upload-time = "2025-08-27T16:39:00.75Z" }, + { url = "https://files.pythonhosted.org/packages/d7/de/35d839aa69db737a3f9f3a45000ca24721834d40118652a5775d5eca8ebb/fonttools-4.59.2-cp311-cp311-win_amd64.whl", hash = "sha256:9836394e2f4ce5f9c0a7690ee93bd90aa1adc6b054f1a57b562c5d242c903104", size = 2265846, upload-time = "2025-08-27T16:39:02.453Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3d/1f45db2df51e7bfa55492e8f23f383d372200be3a0ded4bf56a92753dd1f/fonttools-4.59.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:82906d002c349cad647a7634b004825a7335f8159d0d035ae89253b4abf6f3ea", size = 2769711, upload-time = "2025-08-27T16:39:04.423Z" }, + { url = "https://files.pythonhosted.org/packages/29/df/cd236ab32a8abfd11558f296e064424258db5edefd1279ffdbcfd4fd8b76/fonttools-4.59.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a10c1bd7644dc58f8862d8ba0cf9fb7fef0af01ea184ba6ce3f50ab7dfe74d5a", size = 2340225, upload-time = "2025-08-27T16:39:06.143Z" }, + { url = "https://files.pythonhosted.org/packages/98/12/b6f9f964fe6d4b4dd4406bcbd3328821c3de1f909ffc3ffa558fe72af48c/fonttools-4.59.2-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:738f31f23e0339785fd67652a94bc69ea49e413dfdb14dcb8c8ff383d249464e", size = 4912766, upload-time = "2025-08-27T16:39:08.138Z" }, + { url = "https://files.pythonhosted.org/packages/73/78/82bde2f2d2c306ef3909b927363170b83df96171f74e0ccb47ad344563cd/fonttools-4.59.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ec99f9bdfee9cdb4a9172f9e8fd578cce5feb231f598909e0aecf5418da4f25", size = 4955178, upload-time = "2025-08-27T16:39:10.094Z" }, + { url = "https://files.pythonhosted.org/packages/92/77/7de766afe2d31dda8ee46d7e479f35c7d48747e558961489a2d6e3a02bd4/fonttools-4.59.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0476ea74161322e08c7a982f83558a2b81b491509984523a1a540baf8611cc31", size = 4897898, upload-time = "2025-08-27T16:39:12.087Z" }, + { url = "https://files.pythonhosted.org/packages/c5/77/ce0e0b905d62a06415fda9f2b2e109a24a5db54a59502b769e9e297d2242/fonttools-4.59.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:95922a922daa1f77cc72611747c156cfb38030ead72436a2c551d30ecef519b9", size = 5049144, upload-time = "2025-08-27T16:39:13.84Z" }, + { url = "https://files.pythonhosted.org/packages/d9/ea/870d93aefd23fff2e07cbeebdc332527868422a433c64062c09d4d5e7fe6/fonttools-4.59.2-cp312-cp312-win32.whl", hash = "sha256:39ad9612c6a622726a6a130e8ab15794558591f999673f1ee7d2f3d30f6a3e1c", size = 2206473, upload-time = "2025-08-27T16:39:15.854Z" }, + { url = "https://files.pythonhosted.org/packages/61/c4/e44bad000c4a4bb2e9ca11491d266e857df98ab6d7428441b173f0fe2517/fonttools-4.59.2-cp312-cp312-win_amd64.whl", hash = "sha256:980fd7388e461b19a881d35013fec32c713ffea1fc37aef2f77d11f332dfd7da", size = 2254706, upload-time = "2025-08-27T16:39:17.893Z" }, + { url = "https://files.pythonhosted.org/packages/65/a4/d2f7be3c86708912c02571db0b550121caab8cd88a3c0aacb9cfa15ea66e/fonttools-4.59.2-py3-none-any.whl", hash = "sha256:8bd0f759020e87bb5d323e6283914d9bf4ae35a7307dafb2cbd1e379e720ad37", size = 1132315, upload-time = "2025-08-27T16:40:28.984Z" }, ] [[package]] @@ -622,10 +637,10 @@ name = "gymnasium" version = "1.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cloudpickle" }, - { name = "farama-notifications" }, - { name = "numpy" }, - { name = "typing-extensions" }, + { name = "cloudpickle", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "farama-notifications", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "numpy", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "typing-extensions", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fd/17/c2a0e15c2cd5a8e788389b280996db927b923410de676ec5c7b2695e9261/gymnasium-1.2.0.tar.gz", hash = "sha256:344e87561012558f603880baf264ebc97f8a5c997a957b0c9f910281145534b0", size = 821142, upload-time = "2025-06-27T08:21:20.262Z" } wheels = [ @@ -690,6 +705,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" }, ] +[[package]] +name = "jeepney" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/6f/357efd7602486741aa73ffc0617fb310a29b588ed0fd69c2399acbb85b0c/jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732", size = 106758, upload-time = "2025-02-27T18:51:01.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/a3/e137168c9c44d18eff0376253da9f1e9234d0239e0ee230d2fee6cea8e55/jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683", size = 49010, upload-time = "2025-02-27T18:51:00.104Z" }, +] + [[package]] name = "jinja2" version = "3.1.6" @@ -711,6 +735,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/94/9e/820c4b086ad01ba7d77369fb8b11470a01fac9b4977f02e18659cf378b6b/json_rpc-1.15.0-py2.py3-none-any.whl", hash = "sha256:4a4668bbbe7116feb4abbd0f54e64a4adcf4b8f648f19ffa0848ad0f6606a9bf", size = 39450, upload-time = "2023-06-11T09:45:47.136Z" }, ] +[[package]] +name = "kaitaistruct" +version = "0.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/04/dd60b9cb65d580ef6cb6eaee975ad1bdd22d46a3f51b07a1e0606710ea88/kaitaistruct-0.10.tar.gz", hash = "sha256:a044dee29173d6afbacf27bcac39daf89b654dd418cfa009ab82d9178a9ae52a", size = 7061, upload-time = "2022-07-09T00:34:06.729Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/bf/88ad23efc08708bda9a2647169828e3553bb2093a473801db61f75356395/kaitaistruct-0.10-py2.py3-none-any.whl", hash = "sha256:a97350919adbf37fda881f75e9365e2fb88d04832b7a4e57106ec70119efb235", size = 7013, upload-time = "2022-07-09T00:34:03.905Z" }, +] + [[package]] name = "kiwisolver" version = "1.4.9" @@ -846,7 +879,7 @@ wheels = [ [[package]] name = "matplotlib" -version = "3.10.5" +version = "3.10.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "contourpy" }, @@ -859,25 +892,25 @@ dependencies = [ { name = "pyparsing" }, { name = "python-dateutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/43/91/f2939bb60b7ebf12478b030e0d7f340247390f402b3b189616aad790c366/matplotlib-3.10.5.tar.gz", hash = "sha256:352ed6ccfb7998a00881692f38b4ca083c691d3e275b4145423704c34c909076", size = 34804044, upload-time = "2025-07-31T18:09:33.805Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a0/59/c3e6453a9676ffba145309a73c462bb407f4400de7de3f2b41af70720a3c/matplotlib-3.10.6.tar.gz", hash = "sha256:ec01b645840dd1996df21ee37f208cd8ba57644779fa20464010638013d3203c", size = 34804264, upload-time = "2025-08-30T00:14:25.137Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/c7/1f2db90a1d43710478bb1e9b57b162852f79234d28e4f48a28cc415aa583/matplotlib-3.10.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:dcfc39c452c6a9f9028d3e44d2d721484f665304857188124b505b2c95e1eecf", size = 8239216, upload-time = "2025-07-31T18:07:51.947Z" }, - { url = "https://files.pythonhosted.org/packages/82/6d/ca6844c77a4f89b1c9e4d481c412e1d1dbabf2aae2cbc5aa2da4a1d6683e/matplotlib-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:903352681b59f3efbf4546985142a9686ea1d616bb054b09a537a06e4b892ccf", size = 8102130, upload-time = "2025-07-31T18:07:53.65Z" }, - { url = "https://files.pythonhosted.org/packages/1d/1e/5e187a30cc673a3e384f3723e5f3c416033c1d8d5da414f82e4e731128ea/matplotlib-3.10.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:080c3676a56b8ee1c762bcf8fca3fe709daa1ee23e6ef06ad9f3fc17332f2d2a", size = 8666471, upload-time = "2025-07-31T18:07:55.304Z" }, - { url = "https://files.pythonhosted.org/packages/03/c0/95540d584d7d645324db99a845ac194e915ef75011a0d5e19e1b5cee7e69/matplotlib-3.10.5-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b4984d5064a35b6f66d2c11d668565f4389b1119cc64db7a4c1725bc11adffc", size = 9500518, upload-time = "2025-07-31T18:07:57.199Z" }, - { url = "https://files.pythonhosted.org/packages/ba/2e/e019352099ea58b4169adb9c6e1a2ad0c568c6377c2b677ee1f06de2adc7/matplotlib-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3967424121d3a46705c9fa9bdb0931de3228f13f73d7bb03c999c88343a89d89", size = 9552372, upload-time = "2025-07-31T18:07:59.41Z" }, - { url = "https://files.pythonhosted.org/packages/b7/81/3200b792a5e8b354f31f4101ad7834743ad07b6d620259f2059317b25e4d/matplotlib-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:33775bbeb75528555a15ac29396940128ef5613cf9a2d31fb1bfd18b3c0c0903", size = 8100634, upload-time = "2025-07-31T18:08:01.801Z" }, - { url = "https://files.pythonhosted.org/packages/52/46/a944f6f0c1f5476a0adfa501969d229ce5ae60cf9a663be0e70361381f89/matplotlib-3.10.5-cp311-cp311-win_arm64.whl", hash = "sha256:c61333a8e5e6240e73769d5826b9a31d8b22df76c0778f8480baf1b4b01c9420", size = 7978880, upload-time = "2025-07-31T18:08:03.407Z" }, - { url = "https://files.pythonhosted.org/packages/66/1e/c6f6bcd882d589410b475ca1fc22e34e34c82adff519caf18f3e6dd9d682/matplotlib-3.10.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:00b6feadc28a08bd3c65b2894f56cf3c94fc8f7adcbc6ab4516ae1e8ed8f62e2", size = 8253056, upload-time = "2025-07-31T18:08:05.385Z" }, - { url = "https://files.pythonhosted.org/packages/53/e6/d6f7d1b59413f233793dda14419776f5f443bcccb2dfc84b09f09fe05dbe/matplotlib-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee98a5c5344dc7f48dc261b6ba5d9900c008fc12beb3fa6ebda81273602cc389", size = 8110131, upload-time = "2025-07-31T18:08:07.293Z" }, - { url = "https://files.pythonhosted.org/packages/66/2b/bed8a45e74957549197a2ac2e1259671cd80b55ed9e1fe2b5c94d88a9202/matplotlib-3.10.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a17e57e33de901d221a07af32c08870ed4528db0b6059dce7d7e65c1122d4bea", size = 8669603, upload-time = "2025-07-31T18:08:09.064Z" }, - { url = "https://files.pythonhosted.org/packages/7e/a7/315e9435b10d057f5e52dfc603cd353167ae28bb1a4e033d41540c0067a4/matplotlib-3.10.5-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97b9d6443419085950ee4a5b1ee08c363e5c43d7176e55513479e53669e88468", size = 9508127, upload-time = "2025-07-31T18:08:10.845Z" }, - { url = "https://files.pythonhosted.org/packages/7f/d9/edcbb1f02ca99165365d2768d517898c22c6040187e2ae2ce7294437c413/matplotlib-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ceefe5d40807d29a66ae916c6a3915d60ef9f028ce1927b84e727be91d884369", size = 9566926, upload-time = "2025-07-31T18:08:13.186Z" }, - { url = "https://files.pythonhosted.org/packages/3b/d9/6dd924ad5616c97b7308e6320cf392c466237a82a2040381163b7500510a/matplotlib-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:c04cba0f93d40e45b3c187c6c52c17f24535b27d545f757a2fffebc06c12b98b", size = 8107599, upload-time = "2025-07-31T18:08:15.116Z" }, - { url = "https://files.pythonhosted.org/packages/0e/f3/522dc319a50f7b0279fbe74f86f7a3506ce414bc23172098e8d2bdf21894/matplotlib-3.10.5-cp312-cp312-win_arm64.whl", hash = "sha256:a41bcb6e2c8e79dc99c5511ae6f7787d2fb52efd3d805fff06d5d4f667db16b2", size = 7978173, upload-time = "2025-07-31T18:08:21.518Z" }, - { url = "https://files.pythonhosted.org/packages/dc/d6/e921be4e1a5f7aca5194e1f016cb67ec294548e530013251f630713e456d/matplotlib-3.10.5-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:160e125da27a749481eaddc0627962990f6029811dbeae23881833a011a0907f", size = 8233224, upload-time = "2025-07-31T18:09:27.512Z" }, - { url = "https://files.pythonhosted.org/packages/ec/74/a2b9b04824b9c349c8f1b2d21d5af43fa7010039427f2b133a034cb09e59/matplotlib-3.10.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac3d50760394d78a3c9be6b28318fe22b494c4fcf6407e8fd4794b538251899b", size = 8098539, upload-time = "2025-07-31T18:09:29.629Z" }, - { url = "https://files.pythonhosted.org/packages/fc/66/cd29ebc7f6c0d2a15d216fb572573e8fc38bd5d6dec3bd9d7d904c0949f7/matplotlib-3.10.5-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6c49465bf689c4d59d174d0c7795fb42a21d4244d11d70e52b8011987367ac61", size = 8672192, upload-time = "2025-07-31T18:09:31.407Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/5d3665aa44c49005aaacaa68ddea6fcb27345961cd538a98bb0177934ede/matplotlib-3.10.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:905b60d1cb0ee604ce65b297b61cf8be9f4e6cfecf95a3fe1c388b5266bc8f4f", size = 8257527, upload-time = "2025-08-30T00:12:45.31Z" }, + { url = "https://files.pythonhosted.org/packages/8c/af/30ddefe19ca67eebd70047dabf50f899eaff6f3c5e6a1a7edaecaf63f794/matplotlib-3.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bac38d816637343e53d7185d0c66677ff30ffb131044a81898b5792c956ba76", size = 8119583, upload-time = "2025-08-30T00:12:47.236Z" }, + { url = "https://files.pythonhosted.org/packages/d3/29/4a8650a3dcae97fa4f375d46efcb25920d67b512186f8a6788b896062a81/matplotlib-3.10.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:942a8de2b5bfff1de31d95722f702e2966b8a7e31f4e68f7cd963c7cd8861cf6", size = 8692682, upload-time = "2025-08-30T00:12:48.781Z" }, + { url = "https://files.pythonhosted.org/packages/aa/d3/b793b9cb061cfd5d42ff0f69d1822f8d5dbc94e004618e48a97a8373179a/matplotlib-3.10.6-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3276c85370bc0dfca051ec65c5817d1e0f8f5ce1b7787528ec8ed2d524bbc2f", size = 9521065, upload-time = "2025-08-30T00:12:50.602Z" }, + { url = "https://files.pythonhosted.org/packages/f7/c5/53de5629f223c1c66668d46ac2621961970d21916a4bc3862b174eb2a88f/matplotlib-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9df5851b219225731f564e4b9e7f2ac1e13c9e6481f941b5631a0f8e2d9387ce", size = 9576888, upload-time = "2025-08-30T00:12:52.92Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8e/0a18d6d7d2d0a2e66585032a760d13662e5250c784d53ad50434e9560991/matplotlib-3.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:abb5d9478625dd9c9eb51a06d39aae71eda749ae9b3138afb23eb38824026c7e", size = 8115158, upload-time = "2025-08-30T00:12:54.863Z" }, + { url = "https://files.pythonhosted.org/packages/07/b3/1a5107bb66c261e23b9338070702597a2d374e5aa7004b7adfc754fbed02/matplotlib-3.10.6-cp311-cp311-win_arm64.whl", hash = "sha256:886f989ccfae63659183173bb3fced7fd65e9eb793c3cc21c273add368536951", size = 7992444, upload-time = "2025-08-30T00:12:57.067Z" }, + { url = "https://files.pythonhosted.org/packages/ea/1a/7042f7430055d567cc3257ac409fcf608599ab27459457f13772c2d9778b/matplotlib-3.10.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:31ca662df6a80bd426f871105fdd69db7543e28e73a9f2afe80de7e531eb2347", size = 8272404, upload-time = "2025-08-30T00:12:59.112Z" }, + { url = "https://files.pythonhosted.org/packages/a9/5d/1d5f33f5b43f4f9e69e6a5fe1fb9090936ae7bc8e2ff6158e7a76542633b/matplotlib-3.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1678bb61d897bb4ac4757b5ecfb02bfb3fddf7f808000fb81e09c510712fda75", size = 8128262, upload-time = "2025-08-30T00:13:01.141Z" }, + { url = "https://files.pythonhosted.org/packages/67/c3/135fdbbbf84e0979712df58e5e22b4f257b3f5e52a3c4aacf1b8abec0d09/matplotlib-3.10.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:56cd2d20842f58c03d2d6e6c1f1cf5548ad6f66b91e1e48f814e4fb5abd1cb95", size = 8697008, upload-time = "2025-08-30T00:13:03.24Z" }, + { url = "https://files.pythonhosted.org/packages/9c/be/c443ea428fb2488a3ea7608714b1bd85a82738c45da21b447dc49e2f8e5d/matplotlib-3.10.6-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:662df55604a2f9a45435566d6e2660e41efe83cd94f4288dfbf1e6d1eae4b0bb", size = 9530166, upload-time = "2025-08-30T00:13:05.951Z" }, + { url = "https://files.pythonhosted.org/packages/a9/35/48441422b044d74034aea2a3e0d1a49023f12150ebc58f16600132b9bbaf/matplotlib-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:08f141d55148cd1fc870c3387d70ca4df16dee10e909b3b038782bd4bda6ea07", size = 9593105, upload-time = "2025-08-30T00:13:08.356Z" }, + { url = "https://files.pythonhosted.org/packages/45/c3/994ef20eb4154ab84cc08d033834555319e4af970165e6c8894050af0b3c/matplotlib-3.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:590f5925c2d650b5c9d813c5b3b5fc53f2929c3f8ef463e4ecfa7e052044fb2b", size = 8122784, upload-time = "2025-08-30T00:13:10.367Z" }, + { url = "https://files.pythonhosted.org/packages/57/b8/5c85d9ae0e40f04e71bedb053aada5d6bab1f9b5399a0937afb5d6b02d98/matplotlib-3.10.6-cp312-cp312-win_arm64.whl", hash = "sha256:f44c8d264a71609c79a78d50349e724f5d5fc3684ead7c2a473665ee63d868aa", size = 7992823, upload-time = "2025-08-30T00:13:12.24Z" }, + { url = "https://files.pythonhosted.org/packages/12/bb/02c35a51484aae5f49bd29f091286e7af5f3f677a9736c58a92b3c78baeb/matplotlib-3.10.6-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f2d684c3204fa62421bbf770ddfebc6b50130f9cad65531eeba19236d73bb488", size = 8252296, upload-time = "2025-08-30T00:14:19.49Z" }, + { url = "https://files.pythonhosted.org/packages/7d/85/41701e3092005aee9a2445f5ee3904d9dbd4a7df7a45905ffef29b7ef098/matplotlib-3.10.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:6f4a69196e663a41d12a728fab8751177215357906436804217d6d9cf0d4d6cf", size = 8116749, upload-time = "2025-08-30T00:14:21.344Z" }, + { url = "https://files.pythonhosted.org/packages/16/53/8d8fa0ea32a8c8239e04d022f6c059ee5e1b77517769feccd50f1df43d6d/matplotlib-3.10.6-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d6ca6ef03dfd269f4ead566ec6f3fb9becf8dab146fb999022ed85ee9f6b3eb", size = 8693933, upload-time = "2025-08-30T00:14:22.942Z" }, ] [[package]] @@ -894,22 +927,22 @@ name = "metadrive-simulator" version = "0.4.2.4" source = { url = "https://github.com/commaai/metadrive/releases/download/MetaDrive-minimal-0.4.2.4/metadrive_simulator-0.4.2.4-py3-none-any.whl" } dependencies = [ - { name = "filelock" }, - { name = "gymnasium" }, - { name = "lxml" }, - { name = "matplotlib" }, - { name = "numpy" }, - { name = "opencv-python-headless" }, - { name = "panda3d" }, - { name = "panda3d-gltf" }, - { name = "pillow" }, - { name = "progressbar" }, - { name = "psutil" }, - { name = "pygments" }, - { name = "requests" }, - { name = "shapely" }, - { name = "tqdm" }, - { name = "yapf" }, + { name = "filelock", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "gymnasium", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "lxml", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "matplotlib", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "numpy", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "opencv-python-headless", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "panda3d", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "panda3d-gltf", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "pillow", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "progressbar", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "psutil", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "pygments", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "requests", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "shapely", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "tqdm", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "yapf", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, ] wheels = [ { url = "https://github.com/commaai/metadrive/releases/download/MetaDrive-minimal-0.4.2.4/metadrive_simulator-0.4.2.4-py3-none-any.whl", hash = "sha256:fbf0ea9be67e65cd45d38ff930e3d49f705dd76c9ddbd1e1482e3f87b61efcef" }, @@ -981,6 +1014,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9f/d4/029f984e8d3f3b6b726bd33cafc473b75e9e44c0f7e80a5b29abc466bdea/mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134", size = 9521, upload-time = "2023-11-20T17:51:08.587Z" }, ] +[[package]] +name = "ml-dtypes" +version = "0.5.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/a7/aad060393123cfb383956dca68402aff3db1e1caffd5764887ed5153f41b/ml_dtypes-0.5.3.tar.gz", hash = "sha256:95ce33057ba4d05df50b1f3cfefab22e351868a843b3b15a46c65836283670c9", size = 692316, upload-time = "2025-07-29T18:39:19.454Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/f1/720cb1409b5d0c05cff9040c0e9fba73fa4c67897d33babf905d5d46a070/ml_dtypes-0.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4a177b882667c69422402df6ed5c3428ce07ac2c1f844d8a1314944651439458", size = 667412, upload-time = "2025-07-29T18:38:25.275Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d5/05861ede5d299f6599f86e6bc1291714e2116d96df003cfe23cc54bcc568/ml_dtypes-0.5.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9849ce7267444c0a717c80c6900997de4f36e2815ce34ac560a3edb2d9a64cd2", size = 4964606, upload-time = "2025-07-29T18:38:27.045Z" }, + { url = "https://files.pythonhosted.org/packages/db/dc/72992b68de367741bfab8df3b3fe7c29f982b7279d341aa5bf3e7ef737ea/ml_dtypes-0.5.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c3f5ae0309d9f888fd825c2e9d0241102fadaca81d888f26f845bc8c13c1e4ee", size = 4938435, upload-time = "2025-07-29T18:38:29.193Z" }, + { url = "https://files.pythonhosted.org/packages/81/1c/d27a930bca31fb07d975a2d7eaf3404f9388114463b9f15032813c98f893/ml_dtypes-0.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:58e39349d820b5702bb6f94ea0cb2dc8ec62ee81c0267d9622067d8333596a46", size = 206334, upload-time = "2025-07-29T18:38:30.687Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d8/6922499effa616012cb8dc445280f66d100a7ff39b35c864cfca019b3f89/ml_dtypes-0.5.3-cp311-cp311-win_arm64.whl", hash = "sha256:66c2756ae6cfd7f5224e355c893cfd617fa2f747b8bbd8996152cbdebad9a184", size = 157584, upload-time = "2025-07-29T18:38:32.187Z" }, + { url = "https://files.pythonhosted.org/packages/0d/eb/bc07c88a6ab002b4635e44585d80fa0b350603f11a2097c9d1bfacc03357/ml_dtypes-0.5.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:156418abeeda48ea4797db6776db3c5bdab9ac7be197c1233771e0880c304057", size = 663864, upload-time = "2025-07-29T18:38:33.777Z" }, + { url = "https://files.pythonhosted.org/packages/cf/89/11af9b0f21b99e6386b6581ab40fb38d03225f9de5f55cf52097047e2826/ml_dtypes-0.5.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1db60c154989af253f6c4a34e8a540c2c9dce4d770784d426945e09908fbb177", size = 4951313, upload-time = "2025-07-29T18:38:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/d8/a9/b98b86426c24900b0c754aad006dce2863df7ce0bb2bcc2c02f9cc7e8489/ml_dtypes-0.5.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1b255acada256d1fa8c35ed07b5f6d18bc21d1556f842fbc2d5718aea2cd9e55", size = 4928805, upload-time = "2025-07-29T18:38:38.29Z" }, + { url = "https://files.pythonhosted.org/packages/50/c1/85e6be4fc09c6175f36fb05a45917837f30af9a5146a5151cb3a3f0f9e09/ml_dtypes-0.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:da65e5fd3eea434ccb8984c3624bc234ddcc0d9f4c81864af611aaebcc08a50e", size = 208182, upload-time = "2025-07-29T18:38:39.72Z" }, + { url = "https://files.pythonhosted.org/packages/9e/17/cf5326d6867be057f232d0610de1458f70a8ce7b6290e4b4a277ea62b4cd/ml_dtypes-0.5.3-cp312-cp312-win_arm64.whl", hash = "sha256:8bb9cd1ce63096567f5f42851f5843b5a0ea11511e50039a7649619abfb4ba6d", size = 161560, upload-time = "2025-07-29T18:38:41.072Z" }, +] + [[package]] name = "mouseinfo" version = "0.1.3" @@ -1155,27 +1209,28 @@ wheels = [ [[package]] name = "onnx" -version = "1.18.0" +version = "1.19.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "ml-dtypes" }, { name = "numpy" }, { name = "protobuf" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/60/e56e8ec44ed34006e6d4a73c92a04d9eea6163cc12440e35045aec069175/onnx-1.18.0.tar.gz", hash = "sha256:3d8dbf9e996629131ba3aa1afd1d8239b660d1f830c6688dd7e03157cccd6b9c", size = 12563009, upload-time = "2025-05-12T22:03:09.626Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/bf/b0a63ee9f3759dcd177b28c6f2cb22f2aecc6d9b3efecaabc298883caa5f/onnx-1.19.0.tar.gz", hash = "sha256:aa3f70b60f54a29015e41639298ace06adf1dd6b023b9b30f1bca91bb0db9473", size = 11949859, upload-time = "2025-08-27T02:34:27.107Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/3a/a336dac4db1eddba2bf577191e5b7d3e4c26fcee5ec518a5a5b11d13540d/onnx-1.18.0-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:735e06d8d0cf250dc498f54038831401063c655a8d6e5975b2527a4e7d24be3e", size = 18281831, upload-time = "2025-05-12T22:02:06.429Z" }, - { url = "https://files.pythonhosted.org/packages/02/3a/56475a111120d1e5d11939acbcbb17c92198c8e64a205cd68e00bdfd8a1f/onnx-1.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73160799472e1a86083f786fecdf864cf43d55325492a9b5a1cfa64d8a523ecc", size = 17424359, upload-time = "2025-05-12T22:02:09.866Z" }, - { url = "https://files.pythonhosted.org/packages/cf/03/5eb5e9ef446ed9e78c4627faf3c1bc25e0f707116dd00e9811de232a8df5/onnx-1.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6acafb3823238bbe8f4340c7ac32fb218689442e074d797bee1c5c9a02fdae75", size = 17586006, upload-time = "2025-05-12T22:02:13.217Z" }, - { url = "https://files.pythonhosted.org/packages/b0/4e/70943125729ce453271a6e46bb847b4a612496f64db6cbc6cb1f49f41ce1/onnx-1.18.0-cp311-cp311-win32.whl", hash = "sha256:4c8c4bbda760c654e65eaffddb1a7de71ec02e60092d33f9000521f897c99be9", size = 15734988, upload-time = "2025-05-12T22:02:16.561Z" }, - { url = "https://files.pythonhosted.org/packages/44/b0/435fd764011911e8f599e3361f0f33425b1004662c1ea33a0ad22e43db2d/onnx-1.18.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5810194f0f6be2e58c8d6dedc6119510df7a14280dd07ed5f0f0a85bd74816a", size = 15849576, upload-time = "2025-05-12T22:02:19.569Z" }, - { url = "https://files.pythonhosted.org/packages/6c/f0/9e31f4b4626d60f1c034f71b411810bc9fafe31f4e7dd3598effd1b50e05/onnx-1.18.0-cp311-cp311-win_arm64.whl", hash = "sha256:aa1b7483fac6cdec26922174fc4433f8f5c2f239b1133c5625063bb3b35957d0", size = 15822961, upload-time = "2025-05-12T22:02:22.735Z" }, - { url = "https://files.pythonhosted.org/packages/a7/fe/16228aca685392a7114625b89aae98b2dc4058a47f0f467a376745efe8d0/onnx-1.18.0-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:521bac578448667cbb37c50bf05b53c301243ede8233029555239930996a625b", size = 18285770, upload-time = "2025-05-12T22:02:26.116Z" }, - { url = "https://files.pythonhosted.org/packages/1e/77/ba50a903a9b5e6f9be0fa50f59eb2fca4a26ee653375408fbc72c3acbf9f/onnx-1.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4da451bf1c5ae381f32d430004a89f0405bc57a8471b0bddb6325a5b334aa40", size = 17421291, upload-time = "2025-05-12T22:02:29.645Z" }, - { url = "https://files.pythonhosted.org/packages/11/23/25ec2ba723ac62b99e8fed6d7b59094dadb15e38d4c007331cc9ae3dfa5f/onnx-1.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99afac90b4cdb1471432203c3c1f74e16549c526df27056d39f41a9a47cfb4af", size = 17584084, upload-time = "2025-05-12T22:02:32.789Z" }, - { url = "https://files.pythonhosted.org/packages/6a/4d/2c253a36070fb43f340ff1d2c450df6a9ef50b938adcd105693fee43c4ee/onnx-1.18.0-cp312-cp312-win32.whl", hash = "sha256:ee159b41a3ae58d9c7341cf432fc74b96aaf50bd7bb1160029f657b40dc69715", size = 15734892, upload-time = "2025-05-12T22:02:35.527Z" }, - { url = "https://files.pythonhosted.org/packages/e8/92/048ba8fafe6b2b9a268ec2fb80def7e66c0b32ab2cae74de886981f05a27/onnx-1.18.0-cp312-cp312-win_amd64.whl", hash = "sha256:102c04edc76b16e9dfeda5a64c1fccd7d3d2913b1544750c01d38f1ac3c04e05", size = 15850336, upload-time = "2025-05-12T22:02:38.545Z" }, - { url = "https://files.pythonhosted.org/packages/a1/66/bbc4ffedd44165dcc407a51ea4c592802a5391ce3dc94aa5045350f64635/onnx-1.18.0-cp312-cp312-win_arm64.whl", hash = "sha256:911b37d724a5d97396f3c2ef9ea25361c55cbc9aa18d75b12a52b620b67145af", size = 15823802, upload-time = "2025-05-12T22:02:42.037Z" }, + { url = "https://files.pythonhosted.org/packages/db/5c/b959b17608cfb6ccf6359b39fe56a5b0b7d965b3d6e6a3c0add90812c36e/onnx-1.19.0-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:206f00c47b85b5c7af79671e3307147407991a17994c26974565aadc9e96e4e4", size = 18312580, upload-time = "2025-08-27T02:33:03.081Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ee/ac052bbbc832abe0debb784c2c57f9582444fb5f51d63c2967fd04432444/onnx-1.19.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4d7bee94abaac28988b50da675ae99ef8dd3ce16210d591fbd0b214a5930beb3", size = 18029165, upload-time = "2025-08-27T02:33:05.771Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c9/8687ba0948d46fd61b04e3952af9237883bbf8f16d716e7ed27e688d73b8/onnx-1.19.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7730b96b68c0c354bbc7857961bb4909b9aaa171360a8e3708d0a4c749aaadeb", size = 18202125, upload-time = "2025-08-27T02:33:09.325Z" }, + { url = "https://files.pythonhosted.org/packages/e2/16/6249c013e81bd689f46f96c7236d7677f1af5dd9ef22746716b48f10e506/onnx-1.19.0-cp311-cp311-win32.whl", hash = "sha256:7cb7a3ad8059d1a0dfdc5e0a98f71837d82002e441f112825403b137227c2c97", size = 16332738, upload-time = "2025-08-27T02:33:12.448Z" }, + { url = "https://files.pythonhosted.org/packages/6a/28/34a1e2166e418c6a78e5c82e66f409d9da9317832f11c647f7d4e23846a6/onnx-1.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:d75452a9be868bd30c3ef6aa5991df89bbfe53d0d90b2325c5e730fbd91fff85", size = 16452303, upload-time = "2025-08-27T02:33:15.176Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b7/639664626e5ba8027860c4d2a639ee02b37e9c322215c921e9222513c3aa/onnx-1.19.0-cp311-cp311-win_arm64.whl", hash = "sha256:23c7959370d7b3236f821e609b0af7763cff7672a758e6c1fc877bac099e786b", size = 16425340, upload-time = "2025-08-27T02:33:17.78Z" }, + { url = "https://files.pythonhosted.org/packages/0d/94/f56f6ca5e2f921b28c0f0476705eab56486b279f04e1d568ed64c14e7764/onnx-1.19.0-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:61d94e6498ca636756f8f4ee2135708434601b2892b7c09536befb19bc8ca007", size = 18322331, upload-time = "2025-08-27T02:33:20.373Z" }, + { url = "https://files.pythonhosted.org/packages/c8/00/8cc3f3c40b54b28f96923380f57c9176872e475face726f7d7a78bd74098/onnx-1.19.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:224473354462f005bae985c72028aaa5c85ab11de1b71d55b06fdadd64a667dd", size = 18027513, upload-time = "2025-08-27T02:33:23.44Z" }, + { url = "https://files.pythonhosted.org/packages/61/90/17c4d2566fd0117a5e412688c9525f8950d467f477fbd574e6b32bc9cb8d/onnx-1.19.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ae475c85c89bc4d1f16571006fd21a3e7c0e258dd2c091f6e8aafb083d1ed9b", size = 18202278, upload-time = "2025-08-27T02:33:26.103Z" }, + { url = "https://files.pythonhosted.org/packages/bc/6e/a9383d9cf6db4ac761a129b081e9fa5d0cd89aad43cf1e3fc6285b915c7d/onnx-1.19.0-cp312-cp312-win32.whl", hash = "sha256:323f6a96383a9cdb3960396cffea0a922593d221f3929b17312781e9f9b7fb9f", size = 16333080, upload-time = "2025-08-27T02:33:28.559Z" }, + { url = "https://files.pythonhosted.org/packages/a7/2e/3ff480a8c1fa7939662bdc973e41914add2d4a1f2b8572a3c39c2e4982e5/onnx-1.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:50220f3499a499b1a15e19451a678a58e22ad21b34edf2c844c6ef1d9febddc2", size = 16453927, upload-time = "2025-08-27T02:33:31.177Z" }, + { url = "https://files.pythonhosted.org/packages/57/37/ad500945b1b5c154fe9d7b826b30816ebd629d10211ea82071b5bcc30aa4/onnx-1.19.0-cp312-cp312-win_arm64.whl", hash = "sha256:efb768299580b786e21abe504e1652ae6189f0beed02ab087cd841cb4bb37e43", size = 16426022, upload-time = "2025-08-27T02:33:33.515Z" }, ] [[package]] @@ -1206,9 +1261,11 @@ dependencies = [ { name = "cffi" }, { name = "crcmod" }, { name = "cython" }, + { name = "dearpygui" }, { name = "future-fstrings" }, { name = "inputs" }, { name = "json-rpc" }, + { name = "kaitaistruct" }, { name = "libusb1" }, { name = "numpy" }, { name = "onnx" }, @@ -1243,6 +1300,7 @@ dev = [ { name = "azure-storage-blob" }, { name = "dbus-next" }, { name = "dictdiffer" }, + { name = "jeepney" }, { name = "matplotlib" }, { name = "opencv-python-headless" }, { name = "parameterized" }, @@ -1295,12 +1353,15 @@ requires-dist = [ { name = "crcmod" }, { name = "cython" }, { name = "dbus-next", marker = "extra == 'dev'" }, + { name = "dearpygui", specifier = ">=2.1.0" }, { name = "dictdiffer", marker = "extra == 'dev'" }, { name = "future-fstrings" }, { name = "hypothesis", marker = "extra == 'testing'", specifier = "==6.47.*" }, { name = "inputs" }, + { name = "jeepney", marker = "extra == 'dev'" }, { name = "jinja2", marker = "extra == 'docs'" }, { name = "json-rpc" }, + { name = "kaitaistruct" }, { name = "libusb1" }, { name = "matplotlib", marker = "extra == 'dev'" }, { name = "metadrive-simulator", marker = "platform_machine != 'aarch64' and extra == 'tools'", url = "https://github.com/commaai/metadrive/releases/download/MetaDrive-minimal-0.4.2.4/metadrive_simulator-0.4.2.4-py3-none-any.whl" }, @@ -1389,8 +1450,8 @@ name = "panda3d-gltf" version = "0.13" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "panda3d" }, - { name = "panda3d-simplepbr" }, + { name = "panda3d", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "panda3d-simplepbr", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/07/7f/9f18fc3fa843a080acb891af6bcc12262e7bdf1d194a530f7042bebfc81f/panda3d-gltf-0.13.tar.gz", hash = "sha256:d06d373bdd91cf530909b669f43080e599463bbf6d3ef00c3558bad6c6b19675", size = 25573, upload-time = "2021-05-21T05:46:32.738Z" } wheels = [ @@ -1402,8 +1463,8 @@ name = "panda3d-simplepbr" version = "0.13.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "panda3d" }, - { name = "typing-extensions" }, + { name = "panda3d", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "typing-extensions", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0d/be/c4d1ded04c22b357277cf6e6a44c1ab4abb285a700bd1991460460e05b99/panda3d_simplepbr-0.13.1.tar.gz", hash = "sha256:c83766d7c8f47499f365a07fe1dff078fc8b3054c2689bdc8dceabddfe7f1a35", size = 6216055, upload-time = "2025-03-30T16:57:41.087Z" } wheels = [ @@ -1467,11 +1528,11 @@ wheels = [ [[package]] name = "platformdirs" -version = "4.3.8" +version = "4.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } +sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, + { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, ] [[package]] @@ -4140,9 +4201,9 @@ name = "pyopencl" version = "2025.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy" }, - { name = "platformdirs" }, - { name = "pytools" }, + { name = "numpy", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "platformdirs", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "pytools", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/28/88/0ac460d3e2def08b2ad6345db6a13613815f616bbbd60c6f4bdf774f4c41/pyopencl-2025.1.tar.gz", hash = "sha256:0116736d7f7920f87b8db4b66a03f27b1d930d2e37ddd14518407cc22dd24779", size = 422510, upload-time = "2025-01-22T00:16:58.421Z" } wheels = [ @@ -4318,7 +4379,7 @@ wheels = [ [[package]] name = "pytest-xdist" -version = "3.7.1.dev24+g2b4372bd6" +version = "3.7.1.dev24+g2b4372b" source = { git = "https://github.com/sshane/pytest-xdist?rev=2b4372bd62699fb412c4fe2f95bf9f01bd2018da#2b4372bd62699fb412c4fe2f95bf9f01bd2018da" } dependencies = [ { name = "execnet" }, @@ -4360,9 +4421,9 @@ name = "pytools" version = "2024.1.10" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "platformdirs" }, - { name = "siphash24" }, - { name = "typing-extensions" }, + { name = "platformdirs", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "siphash24", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "typing-extensions", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ee/0f/56e109c0307f831b5d598ad73976aaaa84b4d0e98da29a642e797eaa940c/pytools-2024.1.10.tar.gz", hash = "sha256:9af6f4b045212c49be32bb31fe19606c478ee4b09631886d05a32459f4ce0a12", size = 81741, upload-time = "2024-07-17T18:47:38.287Z" } wheels = [ @@ -4594,28 +4655,28 @@ wheels = [ [[package]] name = "ruff" -version = "0.12.10" +version = "0.12.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3b/eb/8c073deb376e46ae767f4961390d17545e8535921d2f65101720ed8bd434/ruff-0.12.10.tar.gz", hash = "sha256:189ab65149d11ea69a2d775343adf5f49bb2426fc4780f65ee33b423ad2e47f9", size = 5310076, upload-time = "2025-08-21T18:23:22.595Z" } +sdist = { url = "https://files.pythonhosted.org/packages/de/55/16ab6a7d88d93001e1ae4c34cbdcfb376652d761799459ff27c1dc20f6fa/ruff-0.12.11.tar.gz", hash = "sha256:c6b09ae8426a65bbee5425b9d0b82796dbb07cb1af045743c79bfb163001165d", size = 5347103, upload-time = "2025-08-28T13:59:08.87Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/24/e7/560d049d15585d6c201f9eeacd2fd130def3741323e5ccf123786e0e3c95/ruff-0.12.10-py3-none-linux_armv6l.whl", hash = "sha256:8b593cb0fb55cc8692dac7b06deb29afda78c721c7ccfed22db941201b7b8f7b", size = 11935161, upload-time = "2025-08-21T18:22:26.965Z" }, - { url = "https://files.pythonhosted.org/packages/d1/b0/ad2464922a1113c365d12b8f80ed70fcfb39764288ac77c995156080488d/ruff-0.12.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ebb7333a45d56efc7c110a46a69a1b32365d5c5161e7244aaf3aa20ce62399c1", size = 12660884, upload-time = "2025-08-21T18:22:30.925Z" }, - { url = "https://files.pythonhosted.org/packages/d7/f1/97f509b4108d7bae16c48389f54f005b62ce86712120fd8b2d8e88a7cb49/ruff-0.12.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d59e58586829f8e4a9920788f6efba97a13d1fa320b047814e8afede381c6839", size = 11872754, upload-time = "2025-08-21T18:22:34.035Z" }, - { url = "https://files.pythonhosted.org/packages/12/ad/44f606d243f744a75adc432275217296095101f83f966842063d78eee2d3/ruff-0.12.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:822d9677b560f1fdeab69b89d1f444bf5459da4aa04e06e766cf0121771ab844", size = 12092276, upload-time = "2025-08-21T18:22:36.764Z" }, - { url = "https://files.pythonhosted.org/packages/06/1f/ed6c265e199568010197909b25c896d66e4ef2c5e1c3808caf461f6f3579/ruff-0.12.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b4a64f4062a50c75019c61c7017ff598cb444984b638511f48539d3a1c98db", size = 11734700, upload-time = "2025-08-21T18:22:39.822Z" }, - { url = "https://files.pythonhosted.org/packages/63/c5/b21cde720f54a1d1db71538c0bc9b73dee4b563a7dd7d2e404914904d7f5/ruff-0.12.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c6f4064c69d2542029b2a61d39920c85240c39837599d7f2e32e80d36401d6e", size = 13468783, upload-time = "2025-08-21T18:22:42.559Z" }, - { url = "https://files.pythonhosted.org/packages/02/9e/39369e6ac7f2a1848f22fb0b00b690492f20811a1ac5c1fd1d2798329263/ruff-0.12.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:059e863ea3a9ade41407ad71c1de2badfbe01539117f38f763ba42a1206f7559", size = 14436642, upload-time = "2025-08-21T18:22:45.612Z" }, - { url = "https://files.pythonhosted.org/packages/e3/03/5da8cad4b0d5242a936eb203b58318016db44f5c5d351b07e3f5e211bb89/ruff-0.12.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1bef6161e297c68908b7218fa6e0e93e99a286e5ed9653d4be71e687dff101cf", size = 13859107, upload-time = "2025-08-21T18:22:48.886Z" }, - { url = "https://files.pythonhosted.org/packages/19/19/dd7273b69bf7f93a070c9cec9494a94048325ad18fdcf50114f07e6bf417/ruff-0.12.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4f1345fbf8fb0531cd722285b5f15af49b2932742fc96b633e883da8d841896b", size = 12886521, upload-time = "2025-08-21T18:22:51.567Z" }, - { url = "https://files.pythonhosted.org/packages/c0/1d/b4207ec35e7babaee62c462769e77457e26eb853fbdc877af29417033333/ruff-0.12.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f68433c4fbc63efbfa3ba5db31727db229fa4e61000f452c540474b03de52a9", size = 13097528, upload-time = "2025-08-21T18:22:54.609Z" }, - { url = "https://files.pythonhosted.org/packages/ff/00/58f7b873b21114456e880b75176af3490d7a2836033779ca42f50de3b47a/ruff-0.12.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:141ce3d88803c625257b8a6debf4a0473eb6eed9643a6189b68838b43e78165a", size = 13080443, upload-time = "2025-08-21T18:22:57.413Z" }, - { url = "https://files.pythonhosted.org/packages/12/8c/9e6660007fb10189ccb78a02b41691288038e51e4788bf49b0a60f740604/ruff-0.12.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f3fc21178cd44c98142ae7590f42ddcb587b8e09a3b849cbc84edb62ee95de60", size = 11896759, upload-time = "2025-08-21T18:23:00.473Z" }, - { url = "https://files.pythonhosted.org/packages/67/4c/6d092bb99ea9ea6ebda817a0e7ad886f42a58b4501a7e27cd97371d0ba54/ruff-0.12.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7d1a4e0bdfafcd2e3e235ecf50bf0176f74dd37902f241588ae1f6c827a36c56", size = 11701463, upload-time = "2025-08-21T18:23:03.211Z" }, - { url = "https://files.pythonhosted.org/packages/59/80/d982c55e91df981f3ab62559371380616c57ffd0172d96850280c2b04fa8/ruff-0.12.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e67d96827854f50b9e3e8327b031647e7bcc090dbe7bb11101a81a3a2cbf1cc9", size = 12691603, upload-time = "2025-08-21T18:23:06.935Z" }, - { url = "https://files.pythonhosted.org/packages/ad/37/63a9c788bbe0b0850611669ec6b8589838faf2f4f959647f2d3e320383ae/ruff-0.12.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ae479e1a18b439c59138f066ae79cc0f3ee250712a873d00dbafadaad9481e5b", size = 13164356, upload-time = "2025-08-21T18:23:10.225Z" }, - { url = "https://files.pythonhosted.org/packages/47/d4/1aaa7fb201a74181989970ebccd12f88c0fc074777027e2a21de5a90657e/ruff-0.12.10-py3-none-win32.whl", hash = "sha256:9de785e95dc2f09846c5e6e1d3a3d32ecd0b283a979898ad427a9be7be22b266", size = 11896089, upload-time = "2025-08-21T18:23:14.232Z" }, - { url = "https://files.pythonhosted.org/packages/ad/14/2ad38fd4037daab9e023456a4a40ed0154e9971f8d6aed41bdea390aabd9/ruff-0.12.10-py3-none-win_amd64.whl", hash = "sha256:7837eca8787f076f67aba2ca559cefd9c5cbc3a9852fd66186f4201b87c1563e", size = 13004616, upload-time = "2025-08-21T18:23:17.422Z" }, - { url = "https://files.pythonhosted.org/packages/24/3c/21cf283d67af33a8e6ed242396863af195a8a6134ec581524fd22b9811b6/ruff-0.12.10-py3-none-win_arm64.whl", hash = "sha256:cc138cc06ed9d4bfa9d667a65af7172b47840e1a98b02ce7011c391e54635ffc", size = 12074225, upload-time = "2025-08-21T18:23:20.137Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a2/3b3573e474de39a7a475f3fbaf36a25600bfeb238e1a90392799163b64a0/ruff-0.12.11-py3-none-linux_armv6l.whl", hash = "sha256:93fce71e1cac3a8bf9200e63a38ac5c078f3b6baebffb74ba5274fb2ab276065", size = 11979885, upload-time = "2025-08-28T13:58:26.654Z" }, + { url = "https://files.pythonhosted.org/packages/76/e4/235ad6d1785a2012d3ded2350fd9bc5c5af8c6f56820e696b0118dfe7d24/ruff-0.12.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8e33ac7b28c772440afa80cebb972ffd823621ded90404f29e5ab6d1e2d4b93", size = 12742364, upload-time = "2025-08-28T13:58:30.256Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0d/15b72c5fe6b1e402a543aa9d8960e0a7e19dfb079f5b0b424db48b7febab/ruff-0.12.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d69fb9d4937aa19adb2e9f058bc4fbfe986c2040acb1a4a9747734834eaa0bfd", size = 11920111, upload-time = "2025-08-28T13:58:33.677Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c0/f66339d7893798ad3e17fa5a1e587d6fd9806f7c1c062b63f8b09dda6702/ruff-0.12.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:411954eca8464595077a93e580e2918d0a01a19317af0a72132283e28ae21bee", size = 12160060, upload-time = "2025-08-28T13:58:35.74Z" }, + { url = "https://files.pythonhosted.org/packages/03/69/9870368326db26f20c946205fb2d0008988aea552dbaec35fbacbb46efaa/ruff-0.12.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a2c0a2e1a450f387bf2c6237c727dd22191ae8c00e448e0672d624b2bbd7fb0", size = 11799848, upload-time = "2025-08-28T13:58:38.051Z" }, + { url = "https://files.pythonhosted.org/packages/25/8c/dd2c7f990e9b3a8a55eee09d4e675027d31727ce33cdb29eab32d025bdc9/ruff-0.12.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ca4c3a7f937725fd2413c0e884b5248a19369ab9bdd850b5781348ba283f644", size = 13536288, upload-time = "2025-08-28T13:58:40.046Z" }, + { url = "https://files.pythonhosted.org/packages/7a/30/d5496fa09aba59b5e01ea76775a4c8897b13055884f56f1c35a4194c2297/ruff-0.12.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4d1df0098124006f6a66ecf3581a7f7e754c4df7644b2e6704cd7ca80ff95211", size = 14490633, upload-time = "2025-08-28T13:58:42.285Z" }, + { url = "https://files.pythonhosted.org/packages/9b/2f/81f998180ad53445d403c386549d6946d0748e536d58fce5b5e173511183/ruff-0.12.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a8dd5f230efc99a24ace3b77e3555d3fbc0343aeed3fc84c8d89e75ab2ff793", size = 13888430, upload-time = "2025-08-28T13:58:44.641Z" }, + { url = "https://files.pythonhosted.org/packages/87/71/23a0d1d5892a377478c61dbbcffe82a3476b050f38b5162171942a029ef3/ruff-0.12.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4dc75533039d0ed04cd33fb8ca9ac9620b99672fe7ff1533b6402206901c34ee", size = 12913133, upload-time = "2025-08-28T13:58:47.039Z" }, + { url = "https://files.pythonhosted.org/packages/80/22/3c6cef96627f89b344c933781ed38329bfb87737aa438f15da95907cbfd5/ruff-0.12.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fc58f9266d62c6eccc75261a665f26b4ef64840887fc6cbc552ce5b29f96cc8", size = 13169082, upload-time = "2025-08-28T13:58:49.157Z" }, + { url = "https://files.pythonhosted.org/packages/05/b5/68b3ff96160d8b49e8dd10785ff3186be18fd650d356036a3770386e6c7f/ruff-0.12.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5a0113bd6eafd545146440225fe60b4e9489f59eb5f5f107acd715ba5f0b3d2f", size = 13139490, upload-time = "2025-08-28T13:58:51.593Z" }, + { url = "https://files.pythonhosted.org/packages/59/b9/050a3278ecd558f74f7ee016fbdf10591d50119df8d5f5da45a22c6afafc/ruff-0.12.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0d737b4059d66295c3ea5720e6efc152623bb83fde5444209b69cd33a53e2000", size = 11958928, upload-time = "2025-08-28T13:58:53.943Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bc/93be37347db854806904a43b0493af8d6873472dfb4b4b8cbb27786eb651/ruff-0.12.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:916fc5defee32dbc1fc1650b576a8fed68f5e8256e2180d4d9855aea43d6aab2", size = 11764513, upload-time = "2025-08-28T13:58:55.976Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a1/1471751e2015a81fd8e166cd311456c11df74c7e8769d4aabfbc7584c7ac/ruff-0.12.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c984f07d7adb42d3ded5be894fb4007f30f82c87559438b4879fe7aa08c62b39", size = 12745154, upload-time = "2025-08-28T13:58:58.16Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/2542b14890d0f4872dd81b7b2a6aed3ac1786fae1ce9b17e11e6df9e31e3/ruff-0.12.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e07fbb89f2e9249f219d88331c833860489b49cdf4b032b8e4432e9b13e8a4b9", size = 13227653, upload-time = "2025-08-28T13:59:00.276Z" }, + { url = "https://files.pythonhosted.org/packages/22/16/2fbfc61047dbfd009c58a28369a693a1484ad15441723be1cd7fe69bb679/ruff-0.12.11-py3-none-win32.whl", hash = "sha256:c792e8f597c9c756e9bcd4d87cf407a00b60af77078c96f7b6366ea2ce9ba9d3", size = 11944270, upload-time = "2025-08-28T13:59:02.347Z" }, + { url = "https://files.pythonhosted.org/packages/08/a5/34276984705bfe069cd383101c45077ee029c3fe3b28225bf67aa35f0647/ruff-0.12.11-py3-none-win_amd64.whl", hash = "sha256:a3283325960307915b6deb3576b96919ee89432ebd9c48771ca12ee8afe4a0fd", size = 13046600, upload-time = "2025-08-28T13:59:04.751Z" }, + { url = "https://files.pythonhosted.org/packages/84/a8/001d4a7c2b37623a3fd7463208267fb906df40ff31db496157549cfd6e72/ruff-0.12.11-py3-none-win_arm64.whl", hash = "sha256:bae4d6e6a2676f8fb0f98b74594a048bae1b944aab17e9f5d504062303c6dbea", size = 12135290, upload-time = "2025-08-28T13:59:06.933Z" }, ] [[package]] @@ -4629,15 +4690,15 @@ wheels = [ [[package]] name = "sentry-sdk" -version = "2.35.0" +version = "2.35.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/31/83/055dc157b719651ef13db569bb8cf2103df11174478649735c1b2bf3f6bc/sentry_sdk-2.35.0.tar.gz", hash = "sha256:5ea58d352779ce45d17bc2fa71ec7185205295b83a9dbb5707273deb64720092", size = 343014, upload-time = "2025-08-14T17:11:20.223Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/79/0ecb942f3f1ad26c40c27f81ff82392d85c01d26a45e3c72c2b37807e680/sentry_sdk-2.35.2.tar.gz", hash = "sha256:e9e8f3c795044beb59f2c8f4c6b9b0f9779e5e604099882df05eec525e782cc6", size = 343377, upload-time = "2025-09-01T11:00:58.633Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/3d/742617a7c644deb0c1628dcf6bb2d2165ab7c6aab56fe5222758994007f8/sentry_sdk-2.35.0-py2.py3-none-any.whl", hash = "sha256:6e0c29b9a5d34de8575ffb04d289a987ff3053cf2c98ede445bea995e3830263", size = 363806, upload-time = "2025-08-14T17:11:18.29Z" }, + { url = "https://files.pythonhosted.org/packages/c0/91/a43308dc82a0e32d80cd0dfdcfca401ecbd0f431ab45f24e48bb97b7800d/sentry_sdk-2.35.2-py2.py3-none-any.whl", hash = "sha256:38c98e3cbb620dd3dd80a8d6e39c753d453dd41f8a9df581b0584c19a52bc926", size = 363975, upload-time = "2025-09-01T11:00:56.574Z" }, ] [[package]] @@ -4686,7 +4747,7 @@ name = "shapely" version = "2.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy" }, + { name = "numpy", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ca/3c/2da625233f4e605155926566c0e7ea8dda361877f48e8b1655e53456f252/shapely-2.1.1.tar.gz", hash = "sha256:500621967f2ffe9642454808009044c21e5b35db89ce69f8a2042c2ffd0e2772", size = 315422, upload-time = "2025-05-19T11:04:41.265Z" } wheels = [ @@ -4915,7 +4976,7 @@ name = "yapf" version = "0.43.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "platformdirs" }, + { name = "platformdirs", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/23/97/b6f296d1e9cc1ec25c7604178b48532fa5901f721bcf1b8d8148b13e5588/yapf-0.43.0.tar.gz", hash = "sha256:00d3aa24bfedff9420b2e0d5d9f5ab6d9d4268e72afbf59bb3fa542781d5218e", size = 254907, upload-time = "2024-11-14T00:11:41.584Z" } wheels = [