mirror of
https://github.com/sunnypilot/sunnypilot.git
synced 2026-02-18 18:53:55 +08:00
pre-process fonts for raylib (#36489)
* pre-process fonts for raylib * it's fast! * raylib processing * build with scons * padding * happy ruff * all exported * cleanup * more pad
This commit is contained in:
2
selfdrive/assets/.gitignore
vendored
2
selfdrive/assets/.gitignore
vendored
@@ -1,2 +1,4 @@
|
||||
*.cc
|
||||
fonts/*.fnt
|
||||
fonts/*.png
|
||||
translations_assets.qrc
|
||||
|
||||
124
selfdrive/assets/fonts/process.py
Executable file
124
selfdrive/assets/fonts/process.py
Executable file
@@ -0,0 +1,124 @@
|
||||
#!/usr/bin/env python3
|
||||
from pathlib import Path
|
||||
import json
|
||||
|
||||
import pyray as rl
|
||||
|
||||
FONT_DIR = Path(__file__).resolve().parent
|
||||
SELFDRIVE_DIR = FONT_DIR.parents[1]
|
||||
TRANSLATIONS_DIR = SELFDRIVE_DIR / "ui" / "translations"
|
||||
LANGUAGES_FILE = TRANSLATIONS_DIR / "languages.json"
|
||||
|
||||
FONT_SIZE = 200
|
||||
GLYPH_PADDING = 6
|
||||
EXTRA_CHARS = "–‑✓×°§•€£¥"
|
||||
UNIFONT_LANGUAGES = {"ar", "th", "zh-CHT", "zh-CHS", "ko", "ja"}
|
||||
|
||||
|
||||
def _languages():
|
||||
if not LANGUAGES_FILE.exists():
|
||||
return {}
|
||||
with LANGUAGES_FILE.open(encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def _char_sets():
|
||||
base = set(map(chr, range(32, 127))) | set(EXTRA_CHARS)
|
||||
unifont = set(base)
|
||||
|
||||
for language, code in _languages().items():
|
||||
unifont.update(language)
|
||||
po_path = TRANSLATIONS_DIR / f"app_{code}.po"
|
||||
try:
|
||||
chars = set(po_path.read_text(encoding="utf-8"))
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
(unifont if code in UNIFONT_LANGUAGES else base).update(chars)
|
||||
|
||||
return tuple(sorted(ord(c) for c in base)), tuple(sorted(ord(c) for c in unifont))
|
||||
|
||||
|
||||
def _glyph_metrics(glyphs, rects, codepoints):
|
||||
entries = []
|
||||
min_offset_y, max_extent = None, 0
|
||||
for idx, codepoint in enumerate(codepoints):
|
||||
glyph = glyphs[idx]
|
||||
rect = rects[idx]
|
||||
width = int(round(rect.width))
|
||||
height = int(round(rect.height))
|
||||
offset_y = int(round(glyph.offsetY))
|
||||
min_offset_y = offset_y if min_offset_y is None else min(min_offset_y, offset_y)
|
||||
max_extent = max(max_extent, offset_y + height)
|
||||
entries.append({
|
||||
"id": codepoint,
|
||||
"x": int(round(rect.x)),
|
||||
"y": int(round(rect.y)),
|
||||
"width": width,
|
||||
"height": height,
|
||||
"xoffset": int(round(glyph.offsetX)),
|
||||
"yoffset": offset_y,
|
||||
"xadvance": int(round(glyph.advanceX)),
|
||||
})
|
||||
|
||||
if min_offset_y is None:
|
||||
raise RuntimeError("No glyphs were generated")
|
||||
|
||||
line_height = int(round(max_extent - min_offset_y))
|
||||
base = int(round(max_extent))
|
||||
return entries, line_height, base
|
||||
|
||||
|
||||
def _write_bmfont(path: Path, face: str, atlas_name: str, line_height: int, base: int, atlas_size, entries):
|
||||
lines = [
|
||||
f"info face=\"{face}\" size=-{FONT_SIZE} bold=0 italic=0 charset=\"\" unicode=1 stretchH=100 smooth=0 aa=1 padding=0,0,0,0 spacing=0,0 outline=0",
|
||||
f"common lineHeight={line_height} base={base} scaleW={atlas_size[0]} scaleH={atlas_size[1]} pages=1 packed=0 alphaChnl=0 redChnl=4 greenChnl=4 blueChnl=4",
|
||||
f"page id=0 file=\"{atlas_name}\"",
|
||||
f"chars count={len(entries)}",
|
||||
]
|
||||
for entry in entries:
|
||||
lines.append(
|
||||
("char id={id:<4} x={x:<5} y={y:<5} width={width:<5} height={height:<5} " +
|
||||
"xoffset={xoffset:<5} yoffset={yoffset:<5} xadvance={xadvance:<5} page=0 chnl=15").format(**entry)
|
||||
)
|
||||
path.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def _process_font(font_path: Path, codepoints: tuple[int, ...]):
|
||||
print(f"Processing {font_path.name}...")
|
||||
data = font_path.read_bytes()
|
||||
file_buf = rl.ffi.new("unsigned char[]", data)
|
||||
cp_buffer = rl.ffi.new("int[]", codepoints)
|
||||
cp_ptr = rl.ffi.cast("int *", cp_buffer)
|
||||
glyphs = rl.load_font_data(rl.ffi.cast("unsigned char *", file_buf), len(data), FONT_SIZE, cp_ptr, len(codepoints), rl.FontType.FONT_DEFAULT)
|
||||
if glyphs == rl.ffi.NULL:
|
||||
raise RuntimeError("raylib failed to load font data")
|
||||
|
||||
rects_ptr = rl.ffi.new("Rectangle **")
|
||||
image = rl.gen_image_font_atlas(glyphs, rects_ptr, len(codepoints), FONT_SIZE, GLYPH_PADDING, 0)
|
||||
if image.width == 0 or image.height == 0:
|
||||
raise RuntimeError("raylib returned an empty atlas")
|
||||
|
||||
rects = rects_ptr[0]
|
||||
atlas_name = f"{font_path.stem}.png"
|
||||
atlas_path = FONT_DIR / atlas_name
|
||||
entries, line_height, base = _glyph_metrics(glyphs, rects, codepoints)
|
||||
|
||||
if not rl.export_image(image, atlas_path.as_posix()):
|
||||
raise RuntimeError("Failed to export atlas image")
|
||||
|
||||
_write_bmfont(FONT_DIR / f"{font_path.stem}.fnt", font_path.stem, atlas_name, line_height, base, (image.width, image.height), entries)
|
||||
|
||||
|
||||
def main():
|
||||
base_cp, unifont_cp = _char_sets()
|
||||
fonts = sorted(FONT_DIR.glob("*.ttf")) + sorted(FONT_DIR.glob("*.otf"))
|
||||
for font in fonts:
|
||||
if "emoji" in font.name.lower():
|
||||
continue
|
||||
glyphs = unifont_cp if font.stem.lower().startswith("unifont") else base_cp
|
||||
_process_font(font, glyphs)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@@ -1,7 +1,23 @@
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
from pathlib import Path
|
||||
Import('env', 'arch', 'common')
|
||||
|
||||
# build the fonts
|
||||
generator = File("#selfdrive/assets/fonts/process.py")
|
||||
source_files = Glob("#selfdrive/assets/fonts/*.ttf") + Glob("#selfdrive/assets/fonts/*.otf")
|
||||
output_files = [
|
||||
(f.abspath.split('.')[0] + ".fnt", f.abspath.split('.')[0] + ".png")
|
||||
for f in source_files
|
||||
if "NotoColor" not in f.name
|
||||
]
|
||||
env.Command(
|
||||
target=output_files,
|
||||
source=[generator, source_files],
|
||||
action=f"python3 {generator}",
|
||||
)
|
||||
|
||||
# compile gettext .po -> .mo translations
|
||||
with open(File("translations/languages.json").abspath) as f:
|
||||
languages = json.loads(f.read())
|
||||
|
||||
@@ -15,7 +15,7 @@ from typing import NamedTuple
|
||||
from importlib.resources import as_file, files
|
||||
from openpilot.common.swaglog import cloudlog
|
||||
from openpilot.system.hardware import HARDWARE, PC, TICI
|
||||
from openpilot.system.ui.lib.multilang import TRANSLATIONS_DIR, UNIFONT_LANGUAGES, multilang
|
||||
from openpilot.system.ui.lib.multilang import multilang
|
||||
from openpilot.common.realtime import Ratekeeper
|
||||
|
||||
_DEFAULT_FPS = int(os.getenv("FPS", 20 if TICI else 60))
|
||||
@@ -43,16 +43,16 @@ FONT_DIR = ASSETS_DIR.joinpath("fonts")
|
||||
|
||||
|
||||
class FontWeight(StrEnum):
|
||||
THIN = "Inter-Thin.ttf"
|
||||
EXTRA_LIGHT = "Inter-ExtraLight.ttf"
|
||||
LIGHT = "Inter-Light.ttf"
|
||||
NORMAL = "Inter-Regular.ttf"
|
||||
MEDIUM = "Inter-Medium.ttf"
|
||||
SEMI_BOLD = "Inter-SemiBold.ttf"
|
||||
BOLD = "Inter-Bold.ttf"
|
||||
EXTRA_BOLD = "Inter-ExtraBold.ttf"
|
||||
BLACK = "Inter-Black.ttf"
|
||||
UNIFONT = "unifont.otf"
|
||||
THIN = "Inter-Thin.fnt"
|
||||
EXTRA_LIGHT = "Inter-ExtraLight.fnt"
|
||||
LIGHT = "Inter-Light.fnt"
|
||||
NORMAL = "Inter-Regular.fnt"
|
||||
MEDIUM = "Inter-Medium.fnt"
|
||||
SEMI_BOLD = "Inter-SemiBold.fnt"
|
||||
BOLD = "Inter-Bold.fnt"
|
||||
EXTRA_BOLD = "Inter-ExtraBold.fnt"
|
||||
BLACK = "Inter-Black.fnt"
|
||||
UNIFONT = "unifont.fnt"
|
||||
|
||||
|
||||
def font_fallback(font: rl.Font) -> rl.Font:
|
||||
@@ -392,51 +392,11 @@ class GuiApplication:
|
||||
return self._height
|
||||
|
||||
def _load_fonts(self):
|
||||
# Create a character set from our keyboard layouts
|
||||
from openpilot.system.ui.widgets.keyboard import KEYBOARD_LAYOUTS
|
||||
|
||||
base_chars = set()
|
||||
for layout in KEYBOARD_LAYOUTS.values():
|
||||
base_chars.update(key for row in layout for key in row)
|
||||
base_chars |= set("–‑✓×°§•")
|
||||
|
||||
# Load only the characters used in translations
|
||||
unifont_chars = set(base_chars)
|
||||
for language, code in multilang.languages.items():
|
||||
unifont_chars |= set(language)
|
||||
try:
|
||||
with open(os.path.join(TRANSLATIONS_DIR, f"app_{code}.po")) as f:
|
||||
lang_chars = set(f.read())
|
||||
if code in UNIFONT_LANGUAGES:
|
||||
unifont_chars |= lang_chars
|
||||
else:
|
||||
base_chars |= lang_chars
|
||||
except FileNotFoundError:
|
||||
cloudlog.warning(f"Translation file for language '{code}' not found when loading fonts.")
|
||||
|
||||
base_chars = "".join(base_chars)
|
||||
cloudlog.debug(f"Loading fonts with {len(base_chars)} glyphs.")
|
||||
|
||||
unifont_chars = "".join(unifont_chars)
|
||||
cloudlog.debug(f"Loading unifont with {len(unifont_chars)} glyphs.")
|
||||
|
||||
base_codepoint_count = rl.ffi.new("int *", 1)
|
||||
base_codepoints = rl.load_codepoints(base_chars, base_codepoint_count)
|
||||
|
||||
unifont_codepoint_count = rl.ffi.new("int *", 1)
|
||||
unifont_codepoints = rl.load_codepoints(unifont_chars, unifont_codepoint_count)
|
||||
|
||||
for font_weight_file in FontWeight:
|
||||
with as_file(FONT_DIR.joinpath(font_weight_file)) as fspath:
|
||||
if font_weight_file == FontWeight.UNIFONT:
|
||||
font = rl.load_font_ex(fspath.as_posix(), 200, unifont_codepoints, unifont_codepoint_count[0])
|
||||
else:
|
||||
font = rl.load_font_ex(fspath.as_posix(), 200, base_codepoints, base_codepoint_count[0])
|
||||
font = rl.load_font(fspath.as_posix())
|
||||
rl.set_texture_filter(font.texture, rl.TextureFilter.TEXTURE_FILTER_BILINEAR)
|
||||
self._fonts[font_weight_file] = font
|
||||
|
||||
rl.unload_codepoints(base_codepoints)
|
||||
rl.unload_codepoints(unifont_codepoints)
|
||||
rl.gui_set_font(self._fonts[FontWeight.NORMAL])
|
||||
|
||||
def _set_styles(self):
|
||||
|
||||
Reference in New Issue
Block a user