mirror of
https://github.com/sunnypilot/sunnypilot.git
synced 2026-02-19 05:24:06 +08:00
Replace ui.py with a Rerun visualizer (#32850)
* Replace ui.py with rerun * Visualizing radarpoints * Visualizing all points * Code clean-up * Merging matrices into one * Removing pygame depndency * Replacing ui.py with rp_visualization.py * Minor fix, changing color names * Update README.md
This commit is contained in:
@@ -19,8 +19,10 @@ tools/replay/replay --demo
|
||||
# watch the replay with the normal openpilot UI
|
||||
cd selfdrive/ui && ./ui
|
||||
|
||||
# or try out a debug visualizer:
|
||||
python replay/ui.py
|
||||
# or try out radar point visualization in Rerun:
|
||||
python replay/rp_visualization.py
|
||||
|
||||
# NOTE: To visualize radar points, make sure tools/replay/replay is running.
|
||||
```
|
||||
|
||||
## usage
|
||||
|
||||
108
tools/replay/lib/rp_helpers.py
Normal file
108
tools/replay/lib/rp_helpers.py
Normal file
@@ -0,0 +1,108 @@
|
||||
import numpy as np
|
||||
from openpilot.selfdrive.controls.radard import RADAR_TO_CAMERA
|
||||
|
||||
# Color palette used for rerun AnnotationContext
|
||||
rerunColorPalette = [(96, "red", (255, 0, 0)),
|
||||
(100, "pink", (255, 36, 0)),
|
||||
(124, "yellow", (255, 255, 0)),
|
||||
(230, "vibrantpink", (255, 36, 170)),
|
||||
(240, "orange", (255, 146, 0)),
|
||||
(255, "white", (255, 255, 255)),
|
||||
(110, "carColor", (255,0,127))]
|
||||
|
||||
|
||||
class UIParams:
|
||||
lidar_x, lidar_y, lidar_zoom = 384, 960, 6
|
||||
lidar_car_x, lidar_car_y = lidar_x / 2., lidar_y / 1.1
|
||||
car_hwidth = 1.7272 / 2 * lidar_zoom
|
||||
car_front = 2.6924 * lidar_zoom
|
||||
car_back = 1.8796 * lidar_zoom
|
||||
car_color = rerunColorPalette[6][0]
|
||||
UP = UIParams
|
||||
|
||||
|
||||
def to_topdown_pt(y, x):
|
||||
px, py = x * UP.lidar_zoom + UP.lidar_car_x, -y * UP.lidar_zoom + UP.lidar_car_y
|
||||
if px > 0 and py > 0 and px < UP.lidar_x and py < UP.lidar_y:
|
||||
return int(px), int(py)
|
||||
return -1, -1
|
||||
|
||||
|
||||
def draw_path(path, lid_overlay, lid_color=None):
|
||||
x, y = np.asarray(path.x), np.asarray(path.y)
|
||||
# draw lidar path point on lidar
|
||||
if lid_color is not None and lid_overlay is not None:
|
||||
for i in range(len(x)):
|
||||
px, py = to_topdown_pt(x[i], y[i])
|
||||
if px != -1:
|
||||
lid_overlay[px, py] = lid_color
|
||||
|
||||
|
||||
def plot_model(m, lid_overlay):
|
||||
if lid_overlay is None:
|
||||
return
|
||||
for lead in m.leadsV3:
|
||||
if lead.prob < 0.5:
|
||||
continue
|
||||
x, y = lead.x[0], lead.y[0]
|
||||
x_std = lead.xStd[0]
|
||||
x -= RADAR_TO_CAMERA
|
||||
_, py_top = to_topdown_pt(x + x_std, y)
|
||||
px, py_bottom = to_topdown_pt(x - x_std, y)
|
||||
lid_overlay[int(round(px - 4)):int(round(px + 4)), py_top:py_bottom] = rerunColorPalette[2][0]
|
||||
|
||||
for path in m.laneLines:
|
||||
draw_path(path, lid_overlay, rerunColorPalette[2][0])
|
||||
for edge in m.roadEdges:
|
||||
draw_path(edge, lid_overlay, rerunColorPalette[0][0])
|
||||
draw_path(m.position, lid_overlay, rerunColorPalette[0][0])
|
||||
|
||||
|
||||
def plot_lead(rs, lid_overlay):
|
||||
for lead in [rs.leadOne, rs.leadTwo]:
|
||||
if not lead.status:
|
||||
continue
|
||||
x = lead.dRel
|
||||
px_left, py = to_topdown_pt(x, -10)
|
||||
px_right, _ = to_topdown_pt(x, 10)
|
||||
lid_overlay[px_left:px_right, py] = rerunColorPalette[0][0]
|
||||
|
||||
|
||||
def maybe_update_radar_points(lt, lid_overlay):
|
||||
ar_pts = []
|
||||
if lt is not None:
|
||||
ar_pts = {}
|
||||
for track in lt:
|
||||
ar_pts[track.trackId] = [track.dRel, track.yRel, track.vRel, track.aRel, track.oncoming, track.stationary]
|
||||
for ids, pt in ar_pts.items():
|
||||
# negative here since radar is left positive
|
||||
px, py = to_topdown_pt(pt[0], -pt[1])
|
||||
if px != -1:
|
||||
if pt[-1]:
|
||||
color = rerunColorPalette[4][0]
|
||||
elif pt[-2]:
|
||||
color = rerunColorPalette[3][0]
|
||||
else:
|
||||
color = rerunColorPalette[5][0]
|
||||
if int(ids) == 1:
|
||||
lid_overlay[px - 2:px + 2, py - 10:py + 10] = rerunColorPalette[1][0]
|
||||
else:
|
||||
lid_overlay[px - 2:px + 2, py - 2:py + 2] = color
|
||||
|
||||
|
||||
def get_blank_lid_overlay(UP):
|
||||
lid_overlay = np.zeros((UP.lidar_x, UP.lidar_y), 'uint8')
|
||||
# Draw the car.
|
||||
lid_overlay[int(round(UP.lidar_car_x - UP.car_hwidth)):int(
|
||||
round(UP.lidar_car_x + UP.car_hwidth)), int(round(UP.lidar_car_y -
|
||||
UP.car_front))] = UP.car_color
|
||||
lid_overlay[int(round(UP.lidar_car_x - UP.car_hwidth)):int(
|
||||
round(UP.lidar_car_x + UP.car_hwidth)), int(round(UP.lidar_car_y +
|
||||
UP.car_back))] = UP.car_color
|
||||
lid_overlay[int(round(UP.lidar_car_x - UP.car_hwidth)), int(
|
||||
round(UP.lidar_car_y - UP.car_front)):int(round(
|
||||
UP.lidar_car_y + UP.car_back))] = UP.car_color
|
||||
lid_overlay[int(round(UP.lidar_car_x + UP.car_hwidth)), int(
|
||||
round(UP.lidar_car_y - UP.car_front)):int(round(
|
||||
UP.lidar_car_y + UP.car_back))] = UP.car_color
|
||||
return lid_overlay
|
||||
@@ -1,231 +0,0 @@
|
||||
import itertools
|
||||
from typing import Any
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import pygame
|
||||
|
||||
from matplotlib.backends.backend_agg import FigureCanvasAgg
|
||||
|
||||
from openpilot.common.transformations.camera import get_view_frame_from_calib_frame
|
||||
from openpilot.selfdrive.controls.radard import RADAR_TO_CAMERA
|
||||
|
||||
|
||||
RED = (255, 0, 0)
|
||||
GREEN = (0, 255, 0)
|
||||
BLUE = (0, 0, 255)
|
||||
YELLOW = (255, 255, 0)
|
||||
BLACK = (0, 0, 0)
|
||||
WHITE = (255, 255, 255)
|
||||
|
||||
class UIParams:
|
||||
lidar_x, lidar_y, lidar_zoom = 384, 960, 6
|
||||
lidar_car_x, lidar_car_y = lidar_x / 2., lidar_y / 1.1
|
||||
car_hwidth = 1.7272 / 2 * lidar_zoom
|
||||
car_front = 2.6924 * lidar_zoom
|
||||
car_back = 1.8796 * lidar_zoom
|
||||
car_color = 110
|
||||
UP = UIParams
|
||||
|
||||
METER_WIDTH = 20
|
||||
|
||||
class Calibration:
|
||||
def __init__(self, num_px, rpy, intrinsic, calib_scale):
|
||||
self.intrinsic = intrinsic
|
||||
self.extrinsics_matrix = get_view_frame_from_calib_frame(rpy[0], rpy[1], rpy[2], 0.0)[:,:3]
|
||||
self.zoom = calib_scale
|
||||
|
||||
def car_space_to_ff(self, x, y, z):
|
||||
car_space_projective = np.column_stack((x, y, z)).T
|
||||
|
||||
ep = self.extrinsics_matrix.dot(car_space_projective)
|
||||
kep = self.intrinsic.dot(ep)
|
||||
return (kep[:-1, :] / kep[-1, :]).T
|
||||
|
||||
def car_space_to_bb(self, x, y, z):
|
||||
pts = self.car_space_to_ff(x, y, z)
|
||||
return pts / self.zoom
|
||||
|
||||
|
||||
_COLOR_CACHE : dict[tuple[int, int, int], Any] = {}
|
||||
def find_color(lidar_surface, color):
|
||||
if color in _COLOR_CACHE:
|
||||
return _COLOR_CACHE[color]
|
||||
tcolor = 0
|
||||
ret = 255
|
||||
for x in lidar_surface.get_palette():
|
||||
if x[0:3] == color:
|
||||
ret = tcolor
|
||||
break
|
||||
tcolor += 1
|
||||
_COLOR_CACHE[color] = ret
|
||||
return ret
|
||||
|
||||
|
||||
def to_topdown_pt(y, x):
|
||||
px, py = x * UP.lidar_zoom + UP.lidar_car_x, -y * UP.lidar_zoom + UP.lidar_car_y
|
||||
if px > 0 and py > 0 and px < UP.lidar_x and py < UP.lidar_y:
|
||||
return int(px), int(py)
|
||||
return -1, -1
|
||||
|
||||
|
||||
def draw_path(path, color, img, calibration, top_down, lid_color=None, z_off=0):
|
||||
x, y, z = np.asarray(path.x), np.asarray(path.y), np.asarray(path.z) + z_off
|
||||
pts = calibration.car_space_to_bb(x, y, z)
|
||||
pts = np.round(pts).astype(int)
|
||||
|
||||
# draw lidar path point on lidar
|
||||
# find color in 8 bit
|
||||
if lid_color is not None and top_down is not None:
|
||||
tcolor = find_color(top_down[0], lid_color)
|
||||
for i in range(len(x)):
|
||||
px, py = to_topdown_pt(x[i], y[i])
|
||||
if px != -1:
|
||||
top_down[1][px, py] = tcolor
|
||||
|
||||
height, width = img.shape[:2]
|
||||
for x, y in pts:
|
||||
if 1 < x < width - 1 and 1 < y < height - 1:
|
||||
for a, b in itertools.permutations([-1, 0, -1], 2):
|
||||
img[y + a, x + b] = color
|
||||
|
||||
|
||||
def init_plots(arr, name_to_arr_idx, plot_xlims, plot_ylims, plot_names, plot_colors, plot_styles):
|
||||
color_palette = { "r": (1, 0, 0),
|
||||
"g": (0, 1, 0),
|
||||
"b": (0, 0, 1),
|
||||
"k": (0, 0, 0),
|
||||
"y": (1, 1, 0),
|
||||
"p": (0, 1, 1),
|
||||
"m": (1, 0, 1)}
|
||||
|
||||
dpi = 90
|
||||
fig = plt.figure(figsize=(575 / dpi, 600 / dpi), dpi=dpi)
|
||||
canvas = FigureCanvasAgg(fig)
|
||||
|
||||
fig.set_facecolor((0.2, 0.2, 0.2))
|
||||
|
||||
axs = []
|
||||
for pn in range(len(plot_ylims)):
|
||||
ax = fig.add_subplot(len(plot_ylims), 1, len(axs)+1)
|
||||
ax.set_xlim(plot_xlims[pn][0], plot_xlims[pn][1])
|
||||
ax.set_ylim(plot_ylims[pn][0], plot_ylims[pn][1])
|
||||
ax.patch.set_facecolor((0.4, 0.4, 0.4))
|
||||
axs.append(ax)
|
||||
|
||||
plots, idxs, plot_select = [], [], []
|
||||
for i, pl_list in enumerate(plot_names):
|
||||
for j, item in enumerate(pl_list):
|
||||
plot, = axs[i].plot(arr[:, name_to_arr_idx[item]],
|
||||
label=item,
|
||||
color=color_palette[plot_colors[i][j]],
|
||||
linestyle=plot_styles[i][j])
|
||||
plots.append(plot)
|
||||
idxs.append(name_to_arr_idx[item])
|
||||
plot_select.append(i)
|
||||
axs[i].set_title(", ".join(f"{nm} ({cl})"
|
||||
for (nm, cl) in zip(pl_list, plot_colors[i], strict=False)), fontsize=10)
|
||||
axs[i].tick_params(axis="x", colors="white")
|
||||
axs[i].tick_params(axis="y", colors="white")
|
||||
axs[i].title.set_color("white")
|
||||
|
||||
if i < len(plot_ylims) - 1:
|
||||
axs[i].set_xticks([])
|
||||
|
||||
canvas.draw()
|
||||
|
||||
def draw_plots(arr):
|
||||
for ax in axs:
|
||||
ax.draw_artist(ax.patch)
|
||||
for i in range(len(plots)):
|
||||
plots[i].set_ydata(arr[:, idxs[i]])
|
||||
axs[plot_select[i]].draw_artist(plots[i])
|
||||
|
||||
raw_data = canvas.buffer_rgba()
|
||||
plot_surface = pygame.image.frombuffer(raw_data, canvas.get_width_height(), "RGBA").convert()
|
||||
return plot_surface
|
||||
|
||||
return draw_plots
|
||||
|
||||
|
||||
def pygame_modules_have_loaded():
|
||||
return pygame.display.get_init() and pygame.font.get_init()
|
||||
|
||||
|
||||
def plot_model(m, img, calibration, top_down):
|
||||
if calibration is None or top_down is None:
|
||||
return
|
||||
|
||||
for lead in m.leadsV3:
|
||||
if lead.prob < 0.5:
|
||||
continue
|
||||
|
||||
x, y = lead.x[0], lead.y[0]
|
||||
x_std = lead.xStd[0]
|
||||
x -= RADAR_TO_CAMERA
|
||||
|
||||
_, py_top = to_topdown_pt(x + x_std, y)
|
||||
px, py_bottom = to_topdown_pt(x - x_std, y)
|
||||
top_down[1][int(round(px - 4)):int(round(px + 4)), py_top:py_bottom] = find_color(top_down[0], YELLOW)
|
||||
|
||||
for path, prob, _ in zip(m.laneLines, m.laneLineProbs, m.laneLineStds, strict=True):
|
||||
color = (0, int(255 * prob), 0)
|
||||
draw_path(path, color, img, calibration, top_down, YELLOW)
|
||||
|
||||
for edge, std in zip(m.roadEdges, m.roadEdgeStds, strict=True):
|
||||
prob = max(1 - std, 0)
|
||||
color = (int(255 * prob), 0, 0)
|
||||
draw_path(edge, color, img, calibration, top_down, RED)
|
||||
|
||||
color = (255, 0, 0)
|
||||
draw_path(m.position, color, img, calibration, top_down, RED, 1.22)
|
||||
|
||||
|
||||
def plot_lead(rs, top_down):
|
||||
for lead in [rs.leadOne, rs.leadTwo]:
|
||||
if not lead.status:
|
||||
continue
|
||||
|
||||
x = lead.dRel
|
||||
px_left, py = to_topdown_pt(x, -10)
|
||||
px_right, _ = to_topdown_pt(x, 10)
|
||||
top_down[1][px_left:px_right, py] = find_color(top_down[0], RED)
|
||||
|
||||
|
||||
def maybe_update_radar_points(lt, lid_overlay):
|
||||
ar_pts = []
|
||||
if lt is not None:
|
||||
ar_pts = {}
|
||||
for track in lt:
|
||||
ar_pts[track.trackId] = [track.dRel, track.yRel, track.vRel, track.aRel, track.oncoming, track.stationary]
|
||||
for ids, pt in ar_pts.items():
|
||||
# negative here since radar is left positive
|
||||
px, py = to_topdown_pt(pt[0], -pt[1])
|
||||
if px != -1:
|
||||
if pt[-1]:
|
||||
color = 240
|
||||
elif pt[-2]:
|
||||
color = 230
|
||||
else:
|
||||
color = 255
|
||||
if int(ids) == 1:
|
||||
lid_overlay[px - 2:px + 2, py - 10:py + 10] = 100
|
||||
else:
|
||||
lid_overlay[px - 2:px + 2, py - 2:py + 2] = color
|
||||
|
||||
def get_blank_lid_overlay(UP):
|
||||
lid_overlay = np.zeros((UP.lidar_x, UP.lidar_y), 'uint8')
|
||||
# Draw the car.
|
||||
lid_overlay[int(round(UP.lidar_car_x - UP.car_hwidth)):int(
|
||||
round(UP.lidar_car_x + UP.car_hwidth)), int(round(UP.lidar_car_y -
|
||||
UP.car_front))] = UP.car_color
|
||||
lid_overlay[int(round(UP.lidar_car_x - UP.car_hwidth)):int(
|
||||
round(UP.lidar_car_x + UP.car_hwidth)), int(round(UP.lidar_car_y +
|
||||
UP.car_back))] = UP.car_color
|
||||
lid_overlay[int(round(UP.lidar_car_x - UP.car_hwidth)), int(
|
||||
round(UP.lidar_car_y - UP.car_front)):int(round(
|
||||
UP.lidar_car_y + UP.car_back))] = UP.car_color
|
||||
lid_overlay[int(round(UP.lidar_car_x + UP.car_hwidth)), int(
|
||||
round(UP.lidar_car_y - UP.car_front)):int(round(
|
||||
UP.lidar_car_y + UP.car_back))] = UP.car_color
|
||||
return lid_overlay
|
||||
59
tools/replay/rp_visualization.py
Executable file
59
tools/replay/rp_visualization.py
Executable file
@@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import numpy as np
|
||||
import rerun as rr
|
||||
import cereal.messaging as messaging
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
from openpilot.tools.replay.lib.rp_helpers import (UP, rerunColorPalette,
|
||||
get_blank_lid_overlay,
|
||||
maybe_update_radar_points, plot_lead,
|
||||
plot_model)
|
||||
from msgq.visionipc import VisionIpcClient, VisionStreamType
|
||||
|
||||
os.environ['BASEDIR'] = BASEDIR
|
||||
|
||||
UP.lidar_zoom = 6
|
||||
|
||||
def visualize(addr):
|
||||
sm = messaging.SubMaster(['radarState', 'liveTracks', 'modelV2'], addr=addr)
|
||||
vipc_client = VisionIpcClient("camerad", VisionStreamType.VISION_STREAM_ROAD, True)
|
||||
while True:
|
||||
if not vipc_client.is_connected():
|
||||
vipc_client.connect(True)
|
||||
new_data = vipc_client.recv()
|
||||
if new_data is None or not new_data.data.any():
|
||||
continue
|
||||
|
||||
sm.update(0)
|
||||
lid_overlay = get_blank_lid_overlay(UP)
|
||||
if sm.recv_frame['modelV2']:
|
||||
plot_model(sm['modelV2'], lid_overlay)
|
||||
if sm.recv_frame['radarState']:
|
||||
plot_lead(sm['radarState'], lid_overlay)
|
||||
liveTracksTime = sm.logMonoTime['liveTracks']
|
||||
maybe_update_radar_points(sm['liveTracks'], lid_overlay)
|
||||
rr.set_time_nanos("TIMELINE", liveTracksTime)
|
||||
rr.log("tracks", rr.SegmentationImage(np.flip(np.rot90(lid_overlay, k=-1), axis=1)))
|
||||
|
||||
|
||||
def get_arg_parser():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Show replay data in a UI.",
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument("ip_address", nargs="?", default="127.0.0.1",
|
||||
help="The ip address on which to receive zmq messages.")
|
||||
parser.add_argument("--frame-address", default=None,
|
||||
help="The frame address (fully qualified ZMQ endpoint for frames) on which to receive zmq messages.")
|
||||
return parser
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = get_arg_parser().parse_args(sys.argv[1:])
|
||||
if args.ip_address != "127.0.0.1":
|
||||
os.environ["ZMQ"] = "1"
|
||||
messaging.context = messaging.Context()
|
||||
rr.init("RadarPoints", spawn= True)
|
||||
rr.log("tracks", rr.AnnotationContext(rerunColorPalette), static=True)
|
||||
visualize(args.ip_address)
|
||||
@@ -1,238 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
import pygame
|
||||
|
||||
import cereal.messaging as messaging
|
||||
from openpilot.common.numpy_fast import clip
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
from openpilot.common.transformations.camera import DEVICE_CAMERAS
|
||||
from openpilot.tools.replay.lib.ui_helpers import (UP,
|
||||
BLACK, GREEN,
|
||||
YELLOW, Calibration,
|
||||
get_blank_lid_overlay, init_plots,
|
||||
maybe_update_radar_points, plot_lead,
|
||||
plot_model,
|
||||
pygame_modules_have_loaded)
|
||||
from msgq.visionipc import VisionIpcClient, VisionStreamType
|
||||
|
||||
os.environ['BASEDIR'] = BASEDIR
|
||||
|
||||
ANGLE_SCALE = 5.0
|
||||
|
||||
def ui_thread(addr):
|
||||
cv2.setNumThreads(1)
|
||||
pygame.init()
|
||||
pygame.font.init()
|
||||
assert pygame_modules_have_loaded()
|
||||
|
||||
disp_info = pygame.display.Info()
|
||||
max_height = disp_info.current_h
|
||||
|
||||
hor_mode = os.getenv("HORIZONTAL") is not None
|
||||
hor_mode = True if max_height < 960+300 else hor_mode
|
||||
|
||||
if hor_mode:
|
||||
size = (640+384+640, 960)
|
||||
write_x = 5
|
||||
write_y = 680
|
||||
else:
|
||||
size = (640+384, 960+300)
|
||||
write_x = 645
|
||||
write_y = 970
|
||||
|
||||
pygame.display.set_caption("openpilot debug UI")
|
||||
screen = pygame.display.set_mode(size, pygame.DOUBLEBUF)
|
||||
|
||||
alert1_font = pygame.font.SysFont("arial", 30)
|
||||
alert2_font = pygame.font.SysFont("arial", 20)
|
||||
info_font = pygame.font.SysFont("arial", 15)
|
||||
|
||||
camera_surface = pygame.surface.Surface((640, 480), 0, 24).convert()
|
||||
top_down_surface = pygame.surface.Surface((UP.lidar_x, UP.lidar_y), 0, 8)
|
||||
|
||||
sm = messaging.SubMaster(['carState', 'longitudinalPlan', 'carControl', 'radarState', 'liveCalibration', 'controlsState',
|
||||
'liveTracks', 'modelV2', 'liveParameters', 'roadCameraState'], addr=addr)
|
||||
|
||||
img = np.zeros((480, 640, 3), dtype='uint8')
|
||||
imgff = None
|
||||
num_px = 0
|
||||
calibration = None
|
||||
|
||||
lid_overlay_blank = get_blank_lid_overlay(UP)
|
||||
|
||||
# plots
|
||||
name_to_arr_idx = { "gas": 0,
|
||||
"computer_gas": 1,
|
||||
"user_brake": 2,
|
||||
"computer_brake": 3,
|
||||
"v_ego": 4,
|
||||
"v_pid": 5,
|
||||
"angle_steers_des": 6,
|
||||
"angle_steers": 7,
|
||||
"angle_steers_k": 8,
|
||||
"steer_torque": 9,
|
||||
"v_override": 10,
|
||||
"v_cruise": 11,
|
||||
"a_ego": 12,
|
||||
"a_target": 13}
|
||||
|
||||
plot_arr = np.zeros((100, len(name_to_arr_idx.values())))
|
||||
|
||||
plot_xlims = [(0, plot_arr.shape[0]), (0, plot_arr.shape[0]), (0, plot_arr.shape[0]), (0, plot_arr.shape[0])]
|
||||
plot_ylims = [(-0.1, 1.1), (-ANGLE_SCALE, ANGLE_SCALE), (0., 75.), (-3.0, 2.0)]
|
||||
plot_names = [["gas", "computer_gas", "user_brake", "computer_brake"],
|
||||
["angle_steers", "angle_steers_des", "angle_steers_k", "steer_torque"],
|
||||
["v_ego", "v_override", "v_pid", "v_cruise"],
|
||||
["a_ego", "a_target"]]
|
||||
plot_colors = [["b", "b", "g", "r", "y"],
|
||||
["b", "g", "y", "r"],
|
||||
["b", "g", "r", "y"],
|
||||
["b", "r"]]
|
||||
plot_styles = [["-", "-", "-", "-", "-"],
|
||||
["-", "-", "-", "-"],
|
||||
["-", "-", "-", "-"],
|
||||
["-", "-"]]
|
||||
|
||||
draw_plots = init_plots(plot_arr, name_to_arr_idx, plot_xlims, plot_ylims, plot_names, plot_colors, plot_styles)
|
||||
|
||||
vipc_client = VisionIpcClient("camerad", VisionStreamType.VISION_STREAM_ROAD, True)
|
||||
while True:
|
||||
for event in pygame.event.get():
|
||||
if event.type == pygame.QUIT:
|
||||
pygame.quit()
|
||||
sys.exit()
|
||||
|
||||
screen.fill((64, 64, 64))
|
||||
lid_overlay = lid_overlay_blank.copy()
|
||||
top_down = top_down_surface, lid_overlay
|
||||
|
||||
# ***** frame *****
|
||||
if not vipc_client.is_connected():
|
||||
vipc_client.connect(True)
|
||||
|
||||
yuv_img_raw = vipc_client.recv()
|
||||
if yuv_img_raw is None or not yuv_img_raw.data.any():
|
||||
continue
|
||||
|
||||
sm.update(0)
|
||||
|
||||
camera = DEVICE_CAMERAS[("tici", str(sm['roadCameraState'].sensor))]
|
||||
|
||||
imgff = np.frombuffer(yuv_img_raw.data, dtype=np.uint8).reshape((len(yuv_img_raw.data) // vipc_client.stride, vipc_client.stride))
|
||||
num_px = vipc_client.width * vipc_client.height
|
||||
rgb = cv2.cvtColor(imgff[:vipc_client.height * 3 // 2, :vipc_client.width], cv2.COLOR_YUV2RGB_NV12)
|
||||
|
||||
qcam = "QCAM" in os.environ
|
||||
bb_scale = (528 if qcam else camera.fcam.width) / 640.
|
||||
calib_scale = camera.fcam.width / 640.
|
||||
zoom_matrix = np.asarray([
|
||||
[bb_scale, 0., 0.],
|
||||
[0., bb_scale, 0.],
|
||||
[0., 0., 1.]])
|
||||
cv2.warpAffine(rgb, zoom_matrix[:2], (img.shape[1], img.shape[0]), dst=img, flags=cv2.WARP_INVERSE_MAP)
|
||||
|
||||
intrinsic_matrix = camera.fcam.intrinsics
|
||||
|
||||
w = sm['controlsState'].lateralControlState.which()
|
||||
if w == 'lqrStateDEPRECATED':
|
||||
angle_steers_k = sm['controlsState'].lateralControlState.lqrStateDEPRECATED.steeringAngleDeg
|
||||
elif w == 'indiState':
|
||||
angle_steers_k = sm['controlsState'].lateralControlState.indiState.steeringAngleDeg
|
||||
else:
|
||||
angle_steers_k = np.inf
|
||||
|
||||
plot_arr[:-1] = plot_arr[1:]
|
||||
plot_arr[-1, name_to_arr_idx['angle_steers']] = sm['carState'].steeringAngleDeg
|
||||
plot_arr[-1, name_to_arr_idx['angle_steers_des']] = sm['carControl'].actuators.steeringAngleDeg
|
||||
plot_arr[-1, name_to_arr_idx['angle_steers_k']] = angle_steers_k
|
||||
plot_arr[-1, name_to_arr_idx['gas']] = sm['carState'].gas
|
||||
# TODO gas is deprecated
|
||||
plot_arr[-1, name_to_arr_idx['computer_gas']] = clip(sm['carControl'].actuators.accel/4.0, 0.0, 1.0)
|
||||
plot_arr[-1, name_to_arr_idx['user_brake']] = sm['carState'].brake
|
||||
plot_arr[-1, name_to_arr_idx['steer_torque']] = sm['carControl'].actuators.steer * ANGLE_SCALE
|
||||
# TODO brake is deprecated
|
||||
plot_arr[-1, name_to_arr_idx['computer_brake']] = clip(-sm['carControl'].actuators.accel/4.0, 0.0, 1.0)
|
||||
plot_arr[-1, name_to_arr_idx['v_ego']] = sm['carState'].vEgo
|
||||
plot_arr[-1, name_to_arr_idx['v_cruise']] = sm['carState'].cruiseState.speed
|
||||
plot_arr[-1, name_to_arr_idx['a_ego']] = sm['carState'].aEgo
|
||||
|
||||
if len(sm['longitudinalPlan'].accels):
|
||||
plot_arr[-1, name_to_arr_idx['a_target']] = sm['longitudinalPlan'].accels[0]
|
||||
|
||||
if sm.recv_frame['modelV2']:
|
||||
plot_model(sm['modelV2'], img, calibration, top_down)
|
||||
|
||||
if sm.recv_frame['radarState']:
|
||||
plot_lead(sm['radarState'], top_down)
|
||||
|
||||
# draw all radar points
|
||||
maybe_update_radar_points(sm['liveTracks'], top_down[1])
|
||||
|
||||
if sm.updated['liveCalibration'] and num_px:
|
||||
rpyCalib = np.asarray(sm['liveCalibration'].rpyCalib)
|
||||
calibration = Calibration(num_px, rpyCalib, intrinsic_matrix, calib_scale)
|
||||
|
||||
# *** blits ***
|
||||
pygame.surfarray.blit_array(camera_surface, img.swapaxes(0, 1))
|
||||
screen.blit(camera_surface, (0, 0))
|
||||
|
||||
# display alerts
|
||||
alert_line1 = alert1_font.render(sm['controlsState'].alertText1, True, (255, 0, 0))
|
||||
alert_line2 = alert2_font.render(sm['controlsState'].alertText2, True, (255, 0, 0))
|
||||
screen.blit(alert_line1, (180, 150))
|
||||
screen.blit(alert_line2, (180, 190))
|
||||
|
||||
if hor_mode:
|
||||
screen.blit(draw_plots(plot_arr), (640+384, 0))
|
||||
else:
|
||||
screen.blit(draw_plots(plot_arr), (0, 600))
|
||||
|
||||
pygame.surfarray.blit_array(*top_down)
|
||||
screen.blit(top_down[0], (640, 0))
|
||||
|
||||
SPACING = 25
|
||||
|
||||
lines = [
|
||||
info_font.render("ENABLED", True, GREEN if sm['controlsState'].enabled else BLACK),
|
||||
info_font.render("SPEED: " + str(round(sm['carState'].vEgo, 1)) + " m/s", True, YELLOW),
|
||||
info_font.render("LONG CONTROL STATE: " + str(sm['controlsState'].longControlState), True, YELLOW),
|
||||
info_font.render("LONG MPC SOURCE: " + str(sm['longitudinalPlan'].longitudinalPlanSource), True, YELLOW),
|
||||
None,
|
||||
info_font.render("ANGLE OFFSET (AVG): " + str(round(sm['liveParameters'].angleOffsetAverageDeg, 2)) + " deg", True, YELLOW),
|
||||
info_font.render("ANGLE OFFSET (INSTANT): " + str(round(sm['liveParameters'].angleOffsetDeg, 2)) + " deg", True, YELLOW),
|
||||
info_font.render("STIFFNESS: " + str(round(sm['liveParameters'].stiffnessFactor * 100., 2)) + " %", True, YELLOW),
|
||||
info_font.render("STEER RATIO: " + str(round(sm['liveParameters'].steerRatio, 2)), True, YELLOW)
|
||||
]
|
||||
|
||||
for i, line in enumerate(lines):
|
||||
if line is not None:
|
||||
screen.blit(line, (write_x, write_y + i * SPACING))
|
||||
|
||||
# this takes time...vsync or something
|
||||
pygame.display.flip()
|
||||
|
||||
def get_arg_parser():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Show replay data in a UI.",
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
|
||||
parser.add_argument("ip_address", nargs="?", default="127.0.0.1",
|
||||
help="The ip address on which to receive zmq messages.")
|
||||
|
||||
parser.add_argument("--frame-address", default=None,
|
||||
help="The frame address (fully qualified ZMQ endpoint for frames) on which to receive zmq messages.")
|
||||
return parser
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = get_arg_parser().parse_args(sys.argv[1:])
|
||||
|
||||
if args.ip_address != "127.0.0.1":
|
||||
os.environ["ZMQ"] = "1"
|
||||
messaging.context = messaging.Context()
|
||||
|
||||
ui_thread(args.ip_address)
|
||||
Reference in New Issue
Block a user