simplify HITL tests (#2229)

* lil fixups

* revert that for now

* no n0

* also 1.8V

* global
This commit is contained in:
Adeeb Shihadeh
2025-07-19 16:13:51 -07:00
committed by GitHub
parent 76bd9e93d7
commit edd4146635
11 changed files with 58 additions and 124 deletions

View File

@@ -80,4 +80,4 @@ jobs:
- name: tests/misra/install.sh
run: ${{ env.RUN }} "cd tests/misra && ./install.sh"
- name: MISRA mutation tests
run: ${{ env.RUN }} "cd tests/misra && pytest -n2 test_mutation.py"
run: ${{ env.RUN }} "cd tests/misra && pytest test_mutation.py"

8
Jenkinsfile vendored
View File

@@ -109,7 +109,7 @@ pipeline {
["build", "scons -j4"],
["flash", "cd scripts/ && ./reflash_internal_panda.py"],
["flash jungle", "cd board/jungle && ./flash.py --all"],
["test", "cd tests/hitl && HW_TYPES=10 pytest -n0 --durations=0 2*.py [5-9]*.py"],
["test", "cd tests/hitl && HW_TYPES=10 pytest --durations=0 2*.py [5-9]*.py"],
])
}
}
@@ -121,7 +121,7 @@ pipeline {
["build", "scons -j4"],
["flash", "cd scripts/ && ./reflash_internal_panda.py"],
["flash jungle", "cd board/jungle && ./flash.py --all"],
["test", "cd tests/hitl && HW_TYPES=9 pytest -n0 --durations=0 2*.py [5-9]*.py"],
["test", "cd tests/hitl && HW_TYPES=9 pytest --durations=0 2*.py [5-9]*.py"],
])
}
}
@@ -133,7 +133,7 @@ pipeline {
["build", "scons -j4"],
["flash", "cd scripts/ && ./reflash_internal_panda.py"],
["flash jungle", "cd board/jungle && ./flash.py --all"],
["test", "cd tests/hitl && HW_TYPES=6 pytest -n0 --durations=0 [2-9]*.py -k 'not test_send_recv'"],
["test", "cd tests/hitl && HW_TYPES=6 pytest --durations=0 [2-9]*.py -k 'not test_send_recv'"],
])
}
}
@@ -141,7 +141,7 @@ pipeline {
stage('bootkick tests') {
steps {
script {
docker_run("test", 10, "pytest -n0 ./tests/som/test_bootkick.py")
docker_run("test", 10, "pytest ./tests/som/test_bootkick.py")
}
}
}

View File

@@ -3,7 +3,7 @@ name = "pandacan"
version = "0.0.10"
description = "Code powering the comma.ai panda"
readme = "README.md"
requires-python = ">=3.11,<3.13"
requires-python = ">=3.11,<3.13" # macOS doesn't work with 3.13 due to pycapnp from opendbc
license = {text = "MIT"}
authors = [{name = "comma.ai"}]
classifiers = [
@@ -24,7 +24,6 @@ dev = [
"flaky",
"pytest",
"pytest-mock",
"pytest-xdist",
"pytest-timeout",
"pytest-randomly",
"ruff",
@@ -69,7 +68,7 @@ flake8-implicit-str-concat.allow-multiline=false
"pytest.main".msg = "pytest.main requires special handling that is easy to mess up!"
[tool.pytest.ini_options]
addopts = "-n0 -Werror --strict-config --strict-markers --durations=10 --ignore-glob='*.sh' --ignore=tests/misra --ignore=tests/som --ignore=tests/hitl"
addopts = "-Werror --strict-config --strict-markers --durations=10 --ignore-glob='*.sh' --ignore=tests/misra --ignore=tests/som --ignore=tests/hitl"
python_files = "test_*.py"
testpaths = [
"tests/"

View File

@@ -12,11 +12,17 @@ if [[ $PLATFORM == "Darwin" ]]; then
brew install --cask gcc-arm-embedded
brew install python3 gcc@13
elif [[ $PLATFORM == "Linux" ]]; then
# for AGNOS since we clear the apt lists
if [[ ! -d /"var/lib/apt/" ]]; then
sudo apt update
fi
sudo apt-get install -y --no-install-recommends \
curl \
curl ca-certificates \
make g++ git libnewlib-arm-none-eabi \
libusb-1.0-0 \
gcc-arm-none-eabi python3-pip python3-venv python3-dev
gcc-arm-none-eabi \
python3-dev python3-pip python3-venv
else
echo "WARNING: unsupported platform. skipping apt/brew install."
fi

View File

@@ -17,5 +17,5 @@ mypy python/
# *** test ***
# TODO: make xdist and randomly work
pytest -n0 --randomly-dont-reorganize tests/
# TODO: make randomly work
pytest --randomly-dont-reorganize tests/

View File

@@ -1,10 +1,15 @@
import time
import pytest
from flaky import flaky
from opendbc.car.structs import CarParams
from panda import Panda
from panda.tests.hitl.helpers import time_many_sends
pytestmark = [
pytest.mark.test_panda_types((Panda.HW_TYPE_DOS, Panda.HW_TYPE_RED_PANDA))
]
def test_can_loopback(p):
p.set_safety_mode(CarParams.SafetyModel.allOutput)
p.set_can_loopback(True)

View File

@@ -7,7 +7,6 @@ from flaky import flaky
from collections import defaultdict
from opendbc.car.structs import CarParams
from panda.tests.hitl.conftest import PandaGroup
from panda.tests.hitl.helpers import time_many_sends, get_random_can_messages, clear_can_buffers
@flaky(max_runs=3, min_passes=1)
@@ -90,7 +89,6 @@ def test_latency(p, panda_jungle):
@pytest.mark.panda_expect_can_error
@pytest.mark.test_panda_types(PandaGroup.GEN2)
def test_gen2_loopback(p, panda_jungle):
def test(p_send, p_recv, address=None):
for bus in range(4):

View File

@@ -4,12 +4,10 @@ import itertools
from opendbc.car.structs import CarParams
from panda import Panda
from panda.tests.hitl.conftest import PandaGroup
# TODO: test relay
@pytest.mark.panda_expect_can_error
@pytest.mark.test_panda_types(PandaGroup.GEN2)
def test_harness_status(p, panda_jungle):
# map from jungle orientations to panda orientations
orientation_map = {
@@ -61,7 +59,7 @@ def test_harness_status(p, panda_jungle):
assert buses[2] == (0 if flipped else 2)
# SBU voltages
supply_voltage_mV = 1800 if p.get_type() in [Panda.HW_TYPE_TRES, ] else 3300
supply_voltage_mV = 1800 if p.get_type() in [Panda.HW_TYPE_TRES, Panda.HW_TYPE_CUATRO] else 3300
if orientation == Panda.HARNESS_STATUS_NC:
assert health['sbu1_voltage_mV'] > 0.9 * supply_voltage_mV

View File

@@ -1,62 +1,32 @@
import os
import pytest
import concurrent.futures
from panda import Panda, PandaDFU, PandaJungle
from panda.tests.hitl.helpers import clear_can_buffers
# needed to get output when using xdist
if "DEBUG" in os.environ:
import sys
sys.stdout = sys.stderr
SPEED_NORMAL = 500
BUS_SPEEDS = [(0, SPEED_NORMAL), (1, SPEED_NORMAL), (2, SPEED_NORMAL)]
JUNGLE_SERIAL = os.getenv("PANDAS_JUNGLE")
# test options
NO_JUNGLE = os.environ.get("NO_JUNGLE", "0") == "1"
PANDAS_EXCLUDE = os.getenv("PANDAS_EXCLUDE", "").strip().split(" ")
HW_TYPES = os.environ.get("HW_TYPES", None)
PARALLEL = "PARALLEL" in os.environ
NON_PARALLEL = "NON_PARALLEL" in os.environ
if PARALLEL:
NO_JUNGLE = True
class PandaGroup:
H7 = (Panda.HW_TYPE_RED_PANDA, Panda.HW_TYPE_RED_PANDA_V2, Panda.HW_TYPE_TRES)
GEN2 = (Panda.HW_TYPE_UNO, Panda.HW_TYPE_DOS) + H7
TESTED = (Panda.HW_TYPE_RED_PANDA, Panda.HW_TYPE_RED_PANDA_V2, Panda.HW_TYPE_UNO)
if HW_TYPES is not None:
PandaGroup.TESTED = [bytes([int(x), ]) for x in HW_TYPES.strip().split(",")] # type: ignore
# Find all pandas connected
_all_pandas = {}
_panda_jungle = None
def init_all_pandas():
_panda_type = None
_panda_serial = None
def init_devices():
if not NO_JUNGLE:
global _panda_jungle
_panda_jungle = PandaJungle(JUNGLE_SERIAL)
_panda_jungle = PandaJungle()
_panda_jungle.set_panda_power(True)
for serial in Panda.list():
if serial not in PANDAS_EXCLUDE:
with Panda(serial=serial, claim=False) as p:
ptype = bytes(p.get_type())
if ptype in PandaGroup.TESTED:
_all_pandas[serial] = ptype
# ensure we have all tested panda types
missing_types = set(PandaGroup.TESTED) - set(_all_pandas.values())
assert len(missing_types) == 0, f"Missing panda types: {missing_types}"
print(f"{len(_all_pandas)} total pandas")
init_all_pandas()
_all_panda_serials = sorted(_all_pandas.keys())
with Panda(serial=None, claim=False) as p:
global _panda_type
global _panda_serial
_panda_serial = p.get_usb_serial()
_panda_type = bytes(p.get_type())
assert _panda_serial is not None, "No panda found!"
init_devices()
def init_jungle():
if _panda_jungle is None:
@@ -90,24 +60,9 @@ def pytest_collection_modifyitems(items):
if item.get_closest_marker('timeout') is None:
item.add_marker(pytest.mark.timeout(60))
# xdist grouping by panda
serial = item.name.split("serial=")[1].split(",")[0]
assert len(serial) == 24
item.add_marker(pytest.mark.xdist_group(serial))
needs_jungle = "panda_jungle" in item.fixturenames
if PARALLEL and needs_jungle:
item.add_marker(pytest.mark.skip(reason="no jungle tests in PARALLEL mode"))
elif NON_PARALLEL and not needs_jungle:
item.add_marker(pytest.mark.skip(reason="only running jungle tests"))
def pytest_make_parametrize_id(config, val, argname):
if val in _all_pandas:
# TODO: get nice string instead of int
hw_type = _all_pandas[val][0]
return f"serial={val}, hw_type={hw_type}"
return None
if needs_jungle and NO_JUNGLE:
item.add_marker(pytest.mark.skip(reason="skipping tests that requires a jungle"))
@pytest.fixture(name='panda_jungle', scope='function')
def fixture_panda_jungle(request):
@@ -116,6 +71,8 @@ def fixture_panda_jungle(request):
@pytest.fixture(name='p', scope='function')
def func_fixture_panda(request, module_panda):
# *** Setup ***
p = module_panda
# Check if test is applicable to this panda
@@ -123,14 +80,14 @@ def func_fixture_panda(request, module_panda):
if mark:
assert len(mark.args) > 0, "Missing panda types argument in mark"
test_types = mark.args[0]
if _all_pandas[p.get_usb_serial()] not in test_types:
if _panda_type not in test_types:
pytest.skip(f"Not applicable, {test_types} pandas only")
mark = request.node.get_closest_marker('skip_panda_types')
if mark:
assert len(mark.args) > 0, "Missing panda types argument in mark"
skip_types = mark.args[0]
if _all_pandas[p.get_usb_serial()] in skip_types:
if _panda_type in skip_types:
pytest.skip(f"Not applicable to {skip_types}")
# this is 2+ seconds on USB pandas due to slow
@@ -140,10 +97,10 @@ def func_fixture_panda(request, module_panda):
# ensure FW hasn't changed
assert p.up_to_date()
# Run test
# *** Run test ***
yield p
# Teardown
# *** Teardown ***
# reconnect
if p.get_dfu_serial() in PandaDFU.list():
@@ -182,41 +139,27 @@ def func_fixture_panda(request, module_panda):
assert can_health['total_error_cnt'] == 0
assert can_health['total_tx_checksum_error_cnt'] == 0
@pytest.fixture(name='module_panda', params=_all_panda_serials, scope='module')
@pytest.fixture(name='module_panda', scope='module')
def fixture_panda_setup(request):
"""
Clean up all pandas + jungle and return the panda under test.
Clean up panda + jungle and return the panda under test.
"""
panda_serial = request.param
# Initialize jungle
# init jungle
init_jungle()
# Connect to pandas
def cnnct(s):
if s == panda_serial:
p = Panda(serial=s)
p.reset(reconnect=True)
# init panda
p = Panda(serial=_panda_serial)
p.reset(reconnect=True)
p.set_can_loopback(False)
p.set_power_save(False)
for bus, speed in BUS_SPEEDS:
p.set_can_speed_kbps(bus, speed)
clear_can_buffers(p)
p.set_power_save(False)
return p
elif not PARALLEL:
with Panda(serial=s) as p:
p.reset(reconnect=False)
return None
with concurrent.futures.ThreadPoolExecutor() as exc:
ps = list(exc.map(cnnct, _all_panda_serials, timeout=20))
pandas = [p for p in ps if p is not None]
p.set_can_loopback(False)
p.set_power_save(False)
for bus, speed in BUS_SPEEDS:
p.set_can_speed_kbps(bus, speed)
clear_can_buffers(p)
p.set_power_save(False)
# run test
yield pandas[0]
yield p
# Teardown
for p in pandas:
p.close()
# teardown
p.close()

View File

@@ -1,8 +0,0 @@
#!/usr/bin/env bash
set -e
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)"
cd $DIR
# n = number of pandas tested
PARALLEL=1 pytest --durations=0 *.py -n 5 --dist loadgroup -x

View File

@@ -1,7 +0,0 @@
#!/usr/bin/env bash
set -e
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)"
cd $DIR
NON_PARALLEL=1 pytest --durations=0 *.py -x