perf: font-compression improvements (#1056)
## Purpose This PR includes some preparatory changes that are needed for an upcoming performant CJK font feature. The changes have no impact on render time and heap allocation for latin text. **Despite this, I think these changes stand on their own as a better font compression/decompression implementation.** ## Summary - Font decompressor rewrite: Replaced the 4-slot LRU group cache with a two-tier system — a page buffer (glyphs prewarmed before rendering begins) and a hot-group fallback (last decompressed group retained for non-prewarmed glyphs). - Byte-aligned compressed bitmap format: Glyph bitmaps within compressed groups are now stored row-padded rather than tightly packed before DEFLATE compression, improving compression ratios by making identical pixel rows produce identical byte patterns. Glyphs are compacted back to packed format on demand at render time. Reduces flash size by 155 KB. - Page prewarm system: Added `Page::collectText` and `Page::getDominantStyle` to extract per-style glyph requirements before rendering, and `GfxRenderer::prewarmFontCache` to pre-decompress only the groups needed for the dominant style — eliminating mid-render decompression for the common case. - UTF-8 robustness fixes: `utf8NextCodepoint` now validates continuation bytes and returns a replacement glyph on malformed input; `ChapterHtmlSlimParser` correctly preserves incomplete multi-byte sequences across word-buffer flush boundaries rather than splitting them. --- ### AI Usage While CrossPoint doesn't have restrictions on AI tools in contributing, please be transparent about their usage as it helps set the right context for reviewers. Did you use AI tools to help write this code? _**YES**_ Architecture and design was done by me, refined a bit by Claude. Code mostly by Claude, but not entirely.
This commit is contained in:
committed by
GitHub
parent
b467ea7973
commit
f1e9dc7f30
@@ -689,7 +689,38 @@ print(f"ligatures: {len(ligature_pairs)} pairs extracted", file=sys.stderr)
|
||||
|
||||
compress = args.compress
|
||||
|
||||
|
||||
def to_byte_aligned(packed, width, height):
|
||||
"""Convert packed 2-bit bitmap to byte-aligned format (rows padded to byte boundary).
|
||||
|
||||
In packed format, pixels flow continuously across row boundaries (4 pixels/byte).
|
||||
In byte-aligned format, each row starts at a byte boundary, padding the last byte
|
||||
of each row with zero bits if width % 4 != 0. This improves DEFLATE compression
|
||||
because identical pixel rows produce identical byte patterns regardless of position.
|
||||
"""
|
||||
if width == 0 or height == 0:
|
||||
return b''
|
||||
row_stride = (width + 3) // 4 # bytes per byte-aligned row
|
||||
aligned = bytearray(row_stride * height)
|
||||
for y in range(height):
|
||||
for x in range(width):
|
||||
# Read pixel from packed format (continuous bit stream)
|
||||
packed_pos = y * width + x
|
||||
packed_byte_idx = packed_pos // 4
|
||||
packed_shift = (3 - (packed_pos % 4)) * 2
|
||||
pixel = (packed[packed_byte_idx] >> packed_shift) & 0x3
|
||||
|
||||
# Write pixel to byte-aligned format (row-aligned)
|
||||
aligned_byte_idx = y * row_stride + x // 4
|
||||
aligned_shift = (3 - (x % 4)) * 2
|
||||
aligned[aligned_byte_idx] |= (pixel << aligned_shift)
|
||||
return bytes(aligned)
|
||||
|
||||
|
||||
# Build groups for compression
|
||||
if compress and not is2Bit:
|
||||
print("Error: --compress requires --2bit (byte-aligned compression only supports 2-bit format)", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if compress:
|
||||
# Script-based grouping: glyphs that co-occur in typical text rendering
|
||||
# are grouped together for efficient LRU caching on the embedded target.
|
||||
@@ -747,11 +778,12 @@ if compress:
|
||||
|
||||
for first_idx, count in groups:
|
||||
# Concatenate bitmap data for this group
|
||||
group_data = b''
|
||||
packed_len = 0
|
||||
group_aligned = bytearray()
|
||||
for gi in range(first_idx, first_idx + count):
|
||||
props, packed = all_glyphs[gi]
|
||||
# Update glyph's dataOffset to be within-group offset
|
||||
within_group_offset = len(group_data)
|
||||
# Update glyph's dataOffset to be within-group offset (packed offset)
|
||||
within_group_offset = packed_len
|
||||
old_props = modified_glyph_props[gi]
|
||||
modified_glyph_props[gi] = GlyphProps(
|
||||
width=old_props.width,
|
||||
@@ -763,13 +795,14 @@ if compress:
|
||||
data_offset=within_group_offset,
|
||||
code_point=old_props.code_point,
|
||||
)
|
||||
group_data += packed
|
||||
packed_len += len(packed)
|
||||
group_aligned.extend(to_byte_aligned(packed, old_props.width, old_props.height))
|
||||
|
||||
# Compress with raw DEFLATE (no zlib/gzip header)
|
||||
# Compress byte-aligned data with raw DEFLATE (no zlib/gzip header)
|
||||
compressor = zlib.compressobj(level=9, wbits=-15)
|
||||
compressed = compressor.compress(group_data) + compressor.flush()
|
||||
compressed = compressor.compress(bytes(group_aligned)) + compressor.flush()
|
||||
|
||||
compressed_groups.append((compressed, len(group_data), count, first_idx))
|
||||
compressed_groups.append((compressed, len(group_aligned), count, first_idx))
|
||||
compressed_bitmap_data.extend(compressed)
|
||||
compressed_offset += len(compressed)
|
||||
|
||||
@@ -862,8 +895,10 @@ if compress:
|
||||
print(f" {font_name}Groups,")
|
||||
print(f" {len(compressed_groups)},")
|
||||
else:
|
||||
print(f" nullptr,")
|
||||
print(f" 0,")
|
||||
print(" nullptr,")
|
||||
print(" 0,")
|
||||
# glyphToGroup (not used for script-grouped fonts)
|
||||
print(" nullptr,")
|
||||
if kern_map:
|
||||
print(f" {font_name}KernLeftClasses,")
|
||||
print(f" {font_name}KernRightClasses,")
|
||||
|
||||
@@ -3,9 +3,13 @@
|
||||
Round-trip verification for compressed font headers.
|
||||
|
||||
Parses each generated .h file in the given directory, identifies compressed fonts
|
||||
(those with a Groups array), decompresses each group, and verifies that
|
||||
decompression succeeds and all glyph offsets/lengths fall within bounds.
|
||||
(those with a Groups array), decompresses each group (byte-aligned bitmap format),
|
||||
compacts to packed format, and verifies the data matches expected glyph sizes.
|
||||
|
||||
Supports both contiguous-group fonts (Latin) and frequency-grouped fonts (CJK)
|
||||
with glyphToGroup mapping arrays.
|
||||
"""
|
||||
import math
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@@ -18,6 +22,11 @@ def parse_hex_array(text):
|
||||
return bytes(int(h, 16) for h in hex_vals)
|
||||
|
||||
|
||||
def parse_uint8_array(text):
|
||||
"""Extract uint8/uint16 values from a C array string like '{ 0, 1, 0xFF, ... }'"""
|
||||
return [int(v, 0) for v in re.findall(r'\b0x[0-9A-Fa-f]+\b|\b\d+\b', text)]
|
||||
|
||||
|
||||
def parse_groups(text):
|
||||
"""Parse EpdFontGroup array entries: { compressedOffset, compressedSize, uncompressedSize, glyphCount, firstGlyphIndex }"""
|
||||
groups = []
|
||||
@@ -48,6 +57,45 @@ def parse_glyphs(text):
|
||||
return glyphs
|
||||
|
||||
|
||||
def get_group_glyph_indices(group, group_index, glyphs, glyph_to_group):
|
||||
"""Get the ordered list of glyph indices belonging to a group."""
|
||||
if glyph_to_group is not None:
|
||||
# Frequency-grouped: scan all glyphs
|
||||
return [i for i in range(len(glyphs)) if glyph_to_group[i] == group_index]
|
||||
else:
|
||||
# Contiguous: sequential from firstGlyphIndex
|
||||
first = group['firstGlyphIndex']
|
||||
return list(range(first, first + group['glyphCount']))
|
||||
|
||||
|
||||
def compact_aligned_to_packed(aligned_data, width, height):
|
||||
"""Convert byte-aligned 2-bit bitmap to packed format (reverse of to_byte_aligned).
|
||||
|
||||
In byte-aligned format, each row starts at a byte boundary.
|
||||
In packed format, pixels flow continuously across row boundaries (4 pixels/byte).
|
||||
"""
|
||||
if width == 0 or height == 0:
|
||||
return b''
|
||||
packed_size = math.ceil(width * height / 4)
|
||||
packed = bytearray(packed_size)
|
||||
row_stride = (width + 3) // 4 # bytes per byte-aligned row
|
||||
|
||||
for y in range(height):
|
||||
for x in range(width):
|
||||
# Read pixel from byte-aligned format (row-aligned)
|
||||
aligned_byte_idx = y * row_stride + x // 4
|
||||
aligned_shift = (3 - (x % 4)) * 2
|
||||
pixel = (aligned_data[aligned_byte_idx] >> aligned_shift) & 0x3
|
||||
|
||||
# Write pixel to packed format (continuous bit stream)
|
||||
packed_pos = y * width + x
|
||||
packed_byte_idx = packed_pos // 4
|
||||
packed_shift = (3 - (packed_pos % 4)) * 2
|
||||
packed[packed_byte_idx] |= (pixel << packed_shift)
|
||||
|
||||
return bytes(packed)
|
||||
|
||||
|
||||
def verify_font_file(filepath):
|
||||
"""Verify a single font header file. Returns (font_name, success, message)."""
|
||||
with open(filepath, 'r') as f:
|
||||
@@ -92,6 +140,20 @@ def verify_font_file(filepath):
|
||||
|
||||
glyphs = parse_glyphs(glyphs_match.group(1))
|
||||
|
||||
# Check for glyphToGroup array (frequency-grouped fonts)
|
||||
glyph_to_group = None
|
||||
g2g_match = re.search(
|
||||
r'static const uint16_t ' + re.escape(font_name) + r'GlyphToGroup\[\]\s*=\s*\{(.+?)\};',
|
||||
content, re.DOTALL
|
||||
)
|
||||
if g2g_match:
|
||||
glyph_to_group = parse_uint8_array(g2g_match.group(1))
|
||||
if len(glyph_to_group) != len(glyphs):
|
||||
return (font_name, False, f"glyphToGroup length ({len(glyph_to_group)}) != glyph count ({len(glyphs)})")
|
||||
max_group_id = max(glyph_to_group)
|
||||
if max_group_id >= len(groups):
|
||||
return (font_name, False, f"glyphToGroup contains group ID {max_group_id} but only {len(groups)} groups exist")
|
||||
|
||||
# Verify each group
|
||||
for gi, group in enumerate(groups):
|
||||
# Extract compressed chunk
|
||||
@@ -99,7 +161,7 @@ def verify_font_file(filepath):
|
||||
if len(chunk) != group['compressedSize']:
|
||||
return (font_name, False, f"group {gi}: compressed data truncated (expected {group['compressedSize']}, got {len(chunk)})")
|
||||
|
||||
# Decompress with raw DEFLATE
|
||||
# Decompress with raw DEFLATE — result is byte-aligned data
|
||||
try:
|
||||
decompressed = zlib.decompress(chunk, -15)
|
||||
except zlib.error as e:
|
||||
@@ -108,22 +170,64 @@ def verify_font_file(filepath):
|
||||
if len(decompressed) != group['uncompressedSize']:
|
||||
return (font_name, False, f"group {gi}: size mismatch (expected {group['uncompressedSize']}, got {len(decompressed)})")
|
||||
|
||||
# Verify each glyph's data within the group
|
||||
first = group['firstGlyphIndex']
|
||||
for j in range(group['glyphCount']):
|
||||
glyph_idx = first + j
|
||||
# Get glyph indices for this group
|
||||
group_glyph_indices = get_group_glyph_indices(group, gi, glyphs, glyph_to_group)
|
||||
if glyph_to_group is not None and len(group_glyph_indices) != group['glyphCount']:
|
||||
return (font_name, False,
|
||||
f"group {gi}: glyphCount {group['glyphCount']} != mapping count {len(group_glyph_indices)}")
|
||||
|
||||
# Walk through byte-aligned data, compact each glyph, and verify against packed format
|
||||
byte_aligned_offset = 0
|
||||
packed_offset = 0
|
||||
|
||||
for glyph_idx in group_glyph_indices:
|
||||
if glyph_idx >= len(glyphs):
|
||||
return (font_name, False, f"group {gi}: glyph index {glyph_idx} out of range")
|
||||
|
||||
glyph = glyphs[glyph_idx]
|
||||
offset = glyph['dataOffset']
|
||||
length = glyph['dataLength']
|
||||
width = glyph['width']
|
||||
height = glyph['height']
|
||||
|
||||
if offset + length > len(decompressed):
|
||||
return (font_name, False, f"group {gi}, glyph {glyph_idx}: data extends beyond decompressed buffer "
|
||||
f"(offset={offset}, length={length}, decompressed_size={len(decompressed)})")
|
||||
if width == 0 or height == 0:
|
||||
# Zero-size glyphs should have dataOffset == current packed_offset and dataLength == 0
|
||||
if glyph['dataOffset'] != packed_offset:
|
||||
return (font_name, False, f"group {gi}, glyph {glyph_idx}: zero-size glyph dataOffset {glyph['dataOffset']} != expected packed offset {packed_offset}")
|
||||
if glyph['dataLength'] != 0:
|
||||
return (font_name, False, f"group {gi}, glyph {glyph_idx}: zero-size glyph dataLength {glyph['dataLength']} != expected 0")
|
||||
continue
|
||||
|
||||
return (font_name, True, f"{len(groups)} groups, {len(glyphs)} glyphs OK")
|
||||
aligned_size = ((width + 3) // 4) * height
|
||||
packed_size = math.ceil(width * height / 4)
|
||||
|
||||
# Verify packed offset and size match glyph metadata
|
||||
if glyph['dataOffset'] != packed_offset:
|
||||
return (font_name, False, f"group {gi}, glyph {glyph_idx}: dataOffset {glyph['dataOffset']} != expected packed offset {packed_offset}")
|
||||
if glyph['dataLength'] != packed_size:
|
||||
return (font_name, False, f"group {gi}, glyph {glyph_idx}: dataLength {glyph['dataLength']} != expected packed length {packed_size} "
|
||||
f"(width={width}, height={height})")
|
||||
|
||||
# Extract byte-aligned data for this glyph
|
||||
if byte_aligned_offset + aligned_size > len(decompressed):
|
||||
return (font_name, False, f"group {gi}, glyph {glyph_idx}: byte-aligned data extends beyond decompressed buffer "
|
||||
f"(offset={byte_aligned_offset}, size={aligned_size}, buf_size={len(decompressed)})")
|
||||
|
||||
aligned_glyph = decompressed[byte_aligned_offset:byte_aligned_offset + aligned_size]
|
||||
|
||||
# Compact to packed and verify pixel values are valid (0-3 for 2-bit)
|
||||
packed_glyph = compact_aligned_to_packed(aligned_glyph, width, height)
|
||||
if len(packed_glyph) != packed_size:
|
||||
return (font_name, False, f"group {gi}, glyph {glyph_idx}: compacted size {len(packed_glyph)} != expected {packed_size}")
|
||||
|
||||
byte_aligned_offset += aligned_size
|
||||
packed_offset += packed_size
|
||||
|
||||
# Verify total byte-aligned size matches uncompressedSize
|
||||
if byte_aligned_offset != group['uncompressedSize']:
|
||||
return (font_name, False, f"group {gi}: total byte-aligned size {byte_aligned_offset} != uncompressedSize {group['uncompressedSize']}")
|
||||
|
||||
extra_info = ""
|
||||
if glyph_to_group is not None:
|
||||
extra_info = " (frequency-grouped)"
|
||||
return (font_name, True, f"{len(groups)} groups, {len(glyphs)} glyphs OK{extra_info}")
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
Reference in New Issue
Block a user