2021-12-07 11:25:40 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
import os
|
2020-01-17 11:20:17 -08:00
|
|
|
import time
|
|
|
|
|
import threading
|
|
|
|
|
import logging
|
|
|
|
|
import json
|
2023-07-14 15:12:11 +01:00
|
|
|
from pathlib import Path
|
2023-09-07 11:32:47 -07:00
|
|
|
from openpilot.system.hardware.hw import Paths
|
2020-01-17 11:20:17 -08:00
|
|
|
|
2023-12-06 17:27:51 -08:00
|
|
|
from openpilot.common.swaglog import cloudlog
|
2024-01-17 14:24:09 -08:00
|
|
|
from openpilot.system.loggerd.uploader import main, UPLOAD_ATTR_NAME, UPLOAD_ATTR_VALUE
|
2020-01-17 11:20:17 -08:00
|
|
|
|
2023-08-20 20:49:55 -07:00
|
|
|
from openpilot.system.loggerd.tests.loggerd_tests_common import UploaderTestCase
|
2020-01-17 11:20:17 -08:00
|
|
|
|
2021-11-18 16:05:06 +01:00
|
|
|
|
2023-08-28 10:26:19 -07:00
|
|
|
class FakeLogHandler(logging.Handler):
|
2020-01-17 11:20:17 -08:00
|
|
|
def __init__(self):
|
|
|
|
|
logging.Handler.__init__(self)
|
|
|
|
|
self.reset()
|
|
|
|
|
|
|
|
|
|
def reset(self):
|
|
|
|
|
self.upload_order = list()
|
2020-04-10 09:46:03 -07:00
|
|
|
self.upload_ignored = list()
|
2020-01-17 11:20:17 -08:00
|
|
|
|
|
|
|
|
def emit(self, record):
|
|
|
|
|
try:
|
2021-11-18 16:05:06 +01:00
|
|
|
j = json.loads(record.getMessage())
|
2020-01-17 11:20:17 -08:00
|
|
|
if j["event"] == "upload_success":
|
|
|
|
|
self.upload_order.append(j["key"])
|
2020-04-10 09:46:03 -07:00
|
|
|
if j["event"] == "upload_ignored":
|
|
|
|
|
self.upload_ignored.append(j["key"])
|
2020-03-30 12:30:21 -07:00
|
|
|
except Exception:
|
2020-01-17 11:20:17 -08:00
|
|
|
pass
|
|
|
|
|
|
2023-08-28 10:26:19 -07:00
|
|
|
log_handler = FakeLogHandler()
|
2020-01-17 11:20:17 -08:00
|
|
|
cloudlog.addHandler(log_handler)
|
|
|
|
|
|
2021-11-18 16:05:06 +01:00
|
|
|
|
2020-01-17 11:20:17 -08:00
|
|
|
class TestUploader(UploaderTestCase):
|
2024-05-17 11:01:44 -07:00
|
|
|
def setup_method(self):
|
|
|
|
|
super().setup_method()
|
2020-01-17 11:20:17 -08:00
|
|
|
log_handler.reset()
|
|
|
|
|
|
|
|
|
|
def start_thread(self):
|
|
|
|
|
self.end_event = threading.Event()
|
2024-01-17 14:24:09 -08:00
|
|
|
self.up_thread = threading.Thread(target=main, args=[self.end_event])
|
2020-01-17 11:20:17 -08:00
|
|
|
self.up_thread.daemon = True
|
|
|
|
|
self.up_thread.start()
|
|
|
|
|
|
|
|
|
|
def join_thread(self):
|
|
|
|
|
self.end_event.set()
|
|
|
|
|
self.up_thread.join()
|
|
|
|
|
|
2024-02-25 21:29:18 +00:00
|
|
|
def gen_files(self, lock=False, xattr: bytes = None, boot=True) -> list[Path]:
|
2023-07-14 15:12:11 +01:00
|
|
|
f_paths = []
|
2022-05-03 14:09:17 +02:00
|
|
|
for t in ["qlog", "rlog", "dcamera.hevc", "fcamera.hevc"]:
|
2023-07-20 21:30:26 +01:00
|
|
|
f_paths.append(self.make_file_with_data(self.seg_dir, t, 1, lock=lock, upload_xattr=xattr))
|
2021-12-07 11:25:40 +01:00
|
|
|
|
|
|
|
|
if boot:
|
2023-07-20 21:30:26 +01:00
|
|
|
f_paths.append(self.make_file_with_data("boot", f"{self.seg_dir}", 1, lock=lock, upload_xattr=xattr))
|
2020-01-17 11:20:17 -08:00
|
|
|
return f_paths
|
|
|
|
|
|
2024-02-25 00:41:23 +00:00
|
|
|
def gen_order(self, seg1: list[int], seg2: list[int], boot=True) -> list[str]:
|
2021-12-07 11:25:40 +01:00
|
|
|
keys = []
|
|
|
|
|
if boot:
|
|
|
|
|
keys += [f"boot/{self.seg_format.format(i)}.bz2" for i in seg1]
|
|
|
|
|
keys += [f"boot/{self.seg_format2.format(i)}.bz2" for i in seg2]
|
|
|
|
|
keys += [f"{self.seg_format.format(i)}/qlog.bz2" for i in seg1]
|
2020-01-17 11:20:17 -08:00
|
|
|
keys += [f"{self.seg_format2.format(i)}/qlog.bz2" for i in seg2]
|
|
|
|
|
return keys
|
|
|
|
|
|
|
|
|
|
def test_upload(self):
|
2021-12-07 11:25:40 +01:00
|
|
|
self.gen_files(lock=False)
|
2020-01-17 11:20:17 -08:00
|
|
|
|
|
|
|
|
self.start_thread()
|
2020-03-19 18:09:26 -07:00
|
|
|
# allow enough time that files could upload twice if there is a bug in the logic
|
|
|
|
|
time.sleep(5)
|
2020-01-17 11:20:17 -08:00
|
|
|
self.join_thread()
|
|
|
|
|
|
|
|
|
|
exp_order = self.gen_order([self.seg_num], [])
|
2021-12-07 11:25:40 +01:00
|
|
|
|
2024-05-17 11:01:44 -07:00
|
|
|
assert len(log_handler.upload_ignored) == 0, "Some files were ignored"
|
|
|
|
|
assert not len(log_handler.upload_order) < len(exp_order), "Some files failed to upload"
|
|
|
|
|
assert not len(log_handler.upload_order) > len(exp_order), "Some files were uploaded twice"
|
2021-12-07 11:25:40 +01:00
|
|
|
for f_path in exp_order:
|
2024-05-17 11:01:44 -07:00
|
|
|
assert os.getxattr((Path(Paths.log_root()) / f_path).with_suffix(""), UPLOAD_ATTR_NAME) == UPLOAD_ATTR_VALUE, "All files not uploaded"
|
2023-05-16 21:55:24 -07:00
|
|
|
|
2024-05-17 11:01:44 -07:00
|
|
|
assert log_handler.upload_order == exp_order, "Files uploaded in wrong order"
|
2023-05-16 21:55:24 -07:00
|
|
|
|
|
|
|
|
def test_upload_with_wrong_xattr(self):
|
|
|
|
|
self.gen_files(lock=False, xattr=b'0')
|
|
|
|
|
|
|
|
|
|
self.start_thread()
|
|
|
|
|
# allow enough time that files could upload twice if there is a bug in the logic
|
|
|
|
|
time.sleep(5)
|
|
|
|
|
self.join_thread()
|
|
|
|
|
|
|
|
|
|
exp_order = self.gen_order([self.seg_num], [])
|
|
|
|
|
|
2024-05-17 11:01:44 -07:00
|
|
|
assert len(log_handler.upload_ignored) == 0, "Some files were ignored"
|
|
|
|
|
assert not len(log_handler.upload_order) < len(exp_order), "Some files failed to upload"
|
|
|
|
|
assert not len(log_handler.upload_order) > len(exp_order), "Some files were uploaded twice"
|
2023-05-16 21:55:24 -07:00
|
|
|
for f_path in exp_order:
|
2024-05-17 11:01:44 -07:00
|
|
|
assert os.getxattr((Path(Paths.log_root()) / f_path).with_suffix(""), UPLOAD_ATTR_NAME) == UPLOAD_ATTR_VALUE, "All files not uploaded"
|
2021-12-07 11:25:40 +01:00
|
|
|
|
2024-05-17 11:01:44 -07:00
|
|
|
assert log_handler.upload_order == exp_order, "Files uploaded in wrong order"
|
2020-01-17 11:20:17 -08:00
|
|
|
|
2020-04-10 09:46:03 -07:00
|
|
|
def test_upload_ignored(self):
|
|
|
|
|
self.set_ignore()
|
2021-12-07 11:25:40 +01:00
|
|
|
self.gen_files(lock=False)
|
2020-04-10 09:46:03 -07:00
|
|
|
|
|
|
|
|
self.start_thread()
|
|
|
|
|
# allow enough time that files could upload twice if there is a bug in the logic
|
|
|
|
|
time.sleep(5)
|
|
|
|
|
self.join_thread()
|
|
|
|
|
|
|
|
|
|
exp_order = self.gen_order([self.seg_num], [])
|
2021-12-07 11:25:40 +01:00
|
|
|
|
2024-05-17 11:01:44 -07:00
|
|
|
assert len(log_handler.upload_order) == 0, "Some files were not ignored"
|
|
|
|
|
assert not len(log_handler.upload_ignored) < len(exp_order), "Some files failed to ignore"
|
|
|
|
|
assert not len(log_handler.upload_ignored) > len(exp_order), "Some files were ignored twice"
|
2021-12-07 11:25:40 +01:00
|
|
|
for f_path in exp_order:
|
2024-05-17 11:01:44 -07:00
|
|
|
assert os.getxattr((Path(Paths.log_root()) / f_path).with_suffix(""), UPLOAD_ATTR_NAME) == UPLOAD_ATTR_VALUE, "All files not ignored"
|
2021-12-07 11:25:40 +01:00
|
|
|
|
2024-05-17 11:01:44 -07:00
|
|
|
assert log_handler.upload_ignored == exp_order, "Files ignored in wrong order"
|
2020-04-10 09:46:03 -07:00
|
|
|
|
2020-01-17 11:20:17 -08:00
|
|
|
def test_upload_files_in_create_order(self):
|
2020-05-31 12:37:52 -07:00
|
|
|
seg1_nums = [0, 1, 2, 10, 20]
|
2020-01-17 11:20:17 -08:00
|
|
|
for i in seg1_nums:
|
|
|
|
|
self.seg_dir = self.seg_format.format(i)
|
2021-12-07 11:25:40 +01:00
|
|
|
self.gen_files(boot=False)
|
2020-05-31 12:37:52 -07:00
|
|
|
seg2_nums = [5, 50, 51]
|
2020-01-17 11:20:17 -08:00
|
|
|
for i in seg2_nums:
|
|
|
|
|
self.seg_dir = self.seg_format2.format(i)
|
2021-12-07 11:25:40 +01:00
|
|
|
self.gen_files(boot=False)
|
|
|
|
|
|
|
|
|
|
exp_order = self.gen_order(seg1_nums, seg2_nums, boot=False)
|
2020-01-17 11:20:17 -08:00
|
|
|
|
|
|
|
|
self.start_thread()
|
2020-03-19 18:09:26 -07:00
|
|
|
# allow enough time that files could upload twice if there is a bug in the logic
|
|
|
|
|
time.sleep(5)
|
2020-01-17 11:20:17 -08:00
|
|
|
self.join_thread()
|
|
|
|
|
|
2024-05-17 11:01:44 -07:00
|
|
|
assert len(log_handler.upload_ignored) == 0, "Some files were ignored"
|
|
|
|
|
assert not len(log_handler.upload_order) < len(exp_order), "Some files failed to upload"
|
|
|
|
|
assert not len(log_handler.upload_order) > len(exp_order), "Some files were uploaded twice"
|
2021-12-07 11:25:40 +01:00
|
|
|
for f_path in exp_order:
|
2024-05-17 11:01:44 -07:00
|
|
|
assert os.getxattr((Path(Paths.log_root()) / f_path).with_suffix(""), UPLOAD_ATTR_NAME) == UPLOAD_ATTR_VALUE, "All files not uploaded"
|
2021-12-07 11:25:40 +01:00
|
|
|
|
2024-05-17 11:01:44 -07:00
|
|
|
assert log_handler.upload_order == exp_order, "Files uploaded in wrong order"
|
2020-01-17 11:20:17 -08:00
|
|
|
|
|
|
|
|
def test_no_upload_with_lock_file(self):
|
2022-02-22 22:30:11 -08:00
|
|
|
self.start_thread()
|
|
|
|
|
|
|
|
|
|
time.sleep(0.25)
|
2021-12-07 11:25:40 +01:00
|
|
|
f_paths = self.gen_files(lock=True, boot=False)
|
2020-01-17 11:20:17 -08:00
|
|
|
|
|
|
|
|
# allow enough time that files should have been uploaded if they would be uploaded
|
|
|
|
|
time.sleep(5)
|
|
|
|
|
self.join_thread()
|
|
|
|
|
|
|
|
|
|
for f_path in f_paths:
|
2023-07-14 15:12:11 +01:00
|
|
|
fn = f_path.with_suffix(f_path.suffix.replace(".bz2", ""))
|
2023-05-16 21:55:24 -07:00
|
|
|
uploaded = UPLOAD_ATTR_NAME in os.listxattr(fn) and os.getxattr(fn, UPLOAD_ATTR_NAME) == UPLOAD_ATTR_VALUE
|
2024-05-17 11:01:44 -07:00
|
|
|
assert not uploaded, "File upload when locked"
|
2020-03-19 18:09:26 -07:00
|
|
|
|
2023-05-16 21:55:24 -07:00
|
|
|
def test_no_upload_with_xattr(self):
|
|
|
|
|
self.gen_files(lock=False, xattr=UPLOAD_ATTR_VALUE)
|
|
|
|
|
|
|
|
|
|
self.start_thread()
|
|
|
|
|
# allow enough time that files could upload twice if there is a bug in the logic
|
|
|
|
|
time.sleep(5)
|
|
|
|
|
self.join_thread()
|
|
|
|
|
|
2024-05-17 11:01:44 -07:00
|
|
|
assert len(log_handler.upload_order) == 0, "File uploaded again"
|
2023-05-16 21:55:24 -07:00
|
|
|
|
2022-02-22 22:30:11 -08:00
|
|
|
def test_clear_locks_on_startup(self):
|
|
|
|
|
f_paths = self.gen_files(lock=True, boot=False)
|
|
|
|
|
self.start_thread()
|
|
|
|
|
time.sleep(1)
|
|
|
|
|
self.join_thread()
|
|
|
|
|
|
|
|
|
|
for f_path in f_paths:
|
2023-07-14 15:12:11 +01:00
|
|
|
lock_path = f_path.with_suffix(f_path.suffix + ".lock")
|
2024-05-17 11:01:44 -07:00
|
|
|
assert not lock_path.is_file(), "File lock not cleared on startup"
|