HITL tests: add timeouts (#1352)

* set timeout

* closest

* more timeout

* fan test is slow

* retry

* break

* bump
This commit is contained in:
Adeeb Shihadeh 2023-06-04 01:00:17 -07:00 committed by GitHub
parent 17ca4171df
commit 92ed48ae5c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 16 additions and 0 deletions

View File

@ -4,6 +4,7 @@ hexdump>=3.3
pycryptodome==3.9.8 pycryptodome==3.9.8
tqdm>=4.14.0 tqdm>=4.14.0
pytest pytest
pytest-timeouts
parameterized parameterized
requests requests
flake8==3.7.9 flake8==3.7.9

View File

@ -1,5 +1,6 @@
import os import os
import time import time
import pytest
from panda import Panda, PandaDFU, McuType, BASEDIR from panda import Panda, PandaDFU, McuType, BASEDIR
@ -10,6 +11,7 @@ def check_signature(p):
# TODO: make more comprehensive bootstub tests and run on a few production ones + current # TODO: make more comprehensive bootstub tests and run on a few production ones + current
# TODO: also test release-signed app # TODO: also test release-signed app
@pytest.mark.execution_timeout(30)
def test_a_known_bootstub(p): def test_a_known_bootstub(p):
""" """
Test that compiled app can work with known production bootstub Test that compiled app can work with known production bootstub
@ -55,10 +57,12 @@ def test_a_known_bootstub(p):
check_signature(p) check_signature(p)
assert not p.bootstub assert not p.bootstub
@pytest.mark.execution_timeout(15)
def test_b_recover(p): def test_b_recover(p):
assert p.recover(timeout=30) assert p.recover(timeout=30)
check_signature(p) check_signature(p)
@pytest.mark.execution_timeout(25)
def test_c_flash(p): def test_c_flash(p):
# test flash from bootstub # test flash from bootstub
serial = p._serial serial = p._serial

View File

@ -11,6 +11,7 @@ from panda.tests.hitl.conftest import PandaGroup, PARTIAL_TESTS
from panda.tests.hitl.helpers import time_many_sends, clear_can_buffers from panda.tests.hitl.helpers import time_many_sends, clear_can_buffers
@flaky(max_runs=3, min_passes=1) @flaky(max_runs=3, min_passes=1)
@pytest.mark.execution_timeout(35)
def test_send_recv(p, panda_jungle): def test_send_recv(p, panda_jungle):
def test(p_send, p_recv): def test(p_send, p_recv):
p_send.set_can_loopback(False) p_send.set_can_loopback(False)
@ -45,6 +46,7 @@ def test_send_recv(p, panda_jungle):
@flaky(max_runs=6, min_passes=1) @flaky(max_runs=6, min_passes=1)
@pytest.mark.execution_timeout(30)
def test_latency(p, panda_jungle): def test_latency(p, panda_jungle):
def test(p_send, p_recv): def test(p_send, p_recv):
p_send.set_can_loopback(False) p_send.set_can_loopback(False)

View File

@ -8,6 +8,7 @@ pytestmark = [
pytest.mark.test_panda_types(Panda.INTERNAL_DEVICES) pytest.mark.test_panda_types(Panda.INTERNAL_DEVICES)
] ]
@pytest.mark.execution_timeout(50)
def test_fan_controller(p): def test_fan_controller(p):
start_health = p.health() start_health = p.health()

View File

@ -87,6 +87,14 @@ def pytest_configure(config):
"markers", "panda_expect_can_error: mark test to ignore CAN health errors" "markers", "panda_expect_can_error: mark test to ignore CAN health errors"
) )
def pytest_collection_modifyitems(items):
for item in items:
if item.get_closest_marker('execution_timeout') is None:
item.add_marker(pytest.mark.execution_timeout(10))
item.add_marker(pytest.mark.setup_timeout(18))
item.add_marker(pytest.mark.teardown_timeout(12))
def pytest_make_parametrize_id(config, val, argname): def pytest_make_parametrize_id(config, val, argname):
if val in _all_pandas: if val in _all_pandas: