release: ef-1.0.5 - stability, memory, and upstream merges
Webserver: JSON batching, removed MD5 blocking, simplified flow control Memory: QR code caching, WiFi scan optimization, cover buffer leak fix EPUB: Fixed errant underlining before styled inline elements Flash screen: Version string overflow fix, half refresh for cleaner display Upstream merges: - PR #522: HAL abstraction layer (HalDisplay, HalGPIO) - PR #603: Sunlight fading fix toggle in Display settings
This commit is contained in:
parent
520a0cb124
commit
fbe7d2feb4
@ -32,9 +32,16 @@ Base: CrossPoint Reader 0.15.0
|
||||
- **Display Quality**: Changed flashing screen to half refresh for cleaner appearance
|
||||
- **Timing**: Adjusted pre-flash script timing for half refresh completion
|
||||
|
||||
### Upstream Merges
|
||||
|
||||
- **PR #522 - HAL Abstraction Layer**: Merged hardware abstraction layer refactor introducing `HalDisplay` and `HalGPIO` classes, decoupling application code from direct hardware access
|
||||
- **PR #603 - Sunlight Fading Fix**: Added user-toggleable setting to turn off display between refreshes, mitigating the sunlight fading issue on e-ink displays
|
||||
- New "Sunlight Fading Fix" toggle in Display settings (OFF/ON)
|
||||
- Passes `turnOffScreen` parameter through display stack when enabled
|
||||
|
||||
### Files Changed
|
||||
|
||||
- `src/main.cpp` - flash screen fixes, cover buffer free on File Transfer entry
|
||||
- `src/main.cpp` - flash screen fixes, cover buffer free on File Transfer entry, fading fix integration
|
||||
- `scripts/pre_flash.py` - timing adjustments for full refresh
|
||||
- `src/network/CrossPointWebServer.cpp` - JSON batching, removed MD5 from listings
|
||||
- `src/network/CrossPointWebServer.h` - removed md5 from FileInfo, simplified sendContentSafe
|
||||
@ -42,6 +49,16 @@ Base: CrossPoint Reader 0.15.0
|
||||
- `src/activities/network/CrossPointWebServerActivity.h` - QR code cache members
|
||||
- `src/activities/network/WifiSelectionActivity.cpp` - WiFi scan memory optimization
|
||||
- `lib/Epub/Epub/parsers/ChapterHtmlSlimParser.cpp` - flush buffer before style changes
|
||||
- `lib/hal/HalDisplay.h` - new HAL abstraction for display (PR #522), turnOffScreen parameter (PR #603)
|
||||
- `lib/hal/HalDisplay.cpp` - HAL display implementation with fading fix passthrough
|
||||
- `lib/hal/HalGPIO.h` - new HAL abstraction for GPIO (PR #522)
|
||||
- `lib/hal/HalGPIO.cpp` - HAL GPIO implementation
|
||||
- `lib/GfxRenderer/GfxRenderer.h` - updated for HAL layer, added fadingFix member
|
||||
- `lib/GfxRenderer/GfxRenderer.cpp` - updated for HAL layer, passes fadingFix to display
|
||||
- `src/CrossPointSettings.h` - added fadingFix setting
|
||||
- `src/CrossPointSettings.cpp` - fadingFix persistence
|
||||
- `src/activities/settings/SettingsActivity.cpp` - added Sunlight Fading Fix toggle
|
||||
- `open-x4-sdk` - updated submodule with turnOffScreen support in EInkDisplay
|
||||
|
||||
---
|
||||
|
||||
|
||||
@ -380,19 +380,15 @@ void ParsedText::extractLine(const size_t breakIndex, const int pageWidth, const
|
||||
|
||||
// *** CRITICAL STEP: CONSUME DATA USING MOVE + ERASE ***
|
||||
// Move first lineWordCount elements from words into lineWords
|
||||
std::vector<std::string> lineWords(
|
||||
std::make_move_iterator(words.begin()),
|
||||
std::make_move_iterator(words.begin() + lineWordCount));
|
||||
std::vector<std::string> lineWords(std::make_move_iterator(words.begin()),
|
||||
std::make_move_iterator(words.begin() + lineWordCount));
|
||||
words.erase(words.begin(), words.begin() + lineWordCount);
|
||||
|
||||
std::vector<EpdFontFamily::Style> lineWordStyles(
|
||||
std::make_move_iterator(wordStyles.begin()),
|
||||
std::make_move_iterator(wordStyles.begin() + lineWordCount));
|
||||
std::vector<EpdFontFamily::Style> lineWordStyles(std::make_move_iterator(wordStyles.begin()),
|
||||
std::make_move_iterator(wordStyles.begin() + lineWordCount));
|
||||
wordStyles.erase(wordStyles.begin(), wordStyles.begin() + lineWordCount);
|
||||
|
||||
std::vector<bool> lineWordUnderlines(
|
||||
wordUnderlines.begin(),
|
||||
wordUnderlines.begin() + lineWordCount);
|
||||
std::vector<bool> lineWordUnderlines(wordUnderlines.begin(), wordUnderlines.begin() + lineWordCount);
|
||||
wordUnderlines.erase(wordUnderlines.begin(), wordUnderlines.begin() + lineWordCount);
|
||||
|
||||
for (auto& word : lineWords) {
|
||||
|
||||
@ -9,11 +9,11 @@
|
||||
* Padding is treated similarly to margins for rendering purposes.
|
||||
*/
|
||||
struct BlockStyle {
|
||||
int8_t marginTop = 0; // 0-2 lines
|
||||
int8_t marginBottom = 0; // 0-2 lines
|
||||
int8_t paddingTop = 0; // 0-2 lines (treated same as margin)
|
||||
int8_t paddingBottom = 0; // 0-2 lines (treated same as margin)
|
||||
int16_t textIndent = 0; // pixels (first line indent)
|
||||
int16_t marginLeft = 0; // pixels (horizontal indent for entire block)
|
||||
int8_t marginTop = 0; // 0-2 lines
|
||||
int8_t marginBottom = 0; // 0-2 lines
|
||||
int8_t paddingTop = 0; // 0-2 lines (treated same as margin)
|
||||
int8_t paddingBottom = 0; // 0-2 lines (treated same as margin)
|
||||
int16_t textIndent = 0; // pixels (first line indent)
|
||||
int16_t marginLeft = 0; // pixels (horizontal indent for entire block)
|
||||
bool hasLeftBorder = false; // draw vertical bar in left margin (for blockquotes)
|
||||
};
|
||||
|
||||
@ -2,9 +2,9 @@
|
||||
#include <EpdFontFamily.h>
|
||||
#include <SdFat.h>
|
||||
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "Block.h"
|
||||
#include "BlockStyle.h"
|
||||
@ -30,7 +30,8 @@ class TextBlock final : public Block {
|
||||
public:
|
||||
explicit TextBlock(std::vector<std::string> words, std::vector<uint16_t> word_xpos,
|
||||
std::vector<EpdFontFamily::Style> word_styles, const Style style,
|
||||
const BlockStyle& blockStyle = BlockStyle(), std::vector<bool> word_underlines = std::vector<bool>())
|
||||
const BlockStyle& blockStyle = BlockStyle(),
|
||||
std::vector<bool> word_underlines = std::vector<bool>())
|
||||
: words(std::move(words)),
|
||||
wordXpos(std::move(word_xpos)),
|
||||
wordStyles(std::move(word_styles)),
|
||||
|
||||
@ -299,8 +299,7 @@ bool StarDict::decompressDefinition(uint32_t offset, uint32_t size, std::string&
|
||||
const uint32_t endChunk = (offset + size - 1) / dzInfo.chunkLength;
|
||||
const uint32_t startOffsetInChunk = offset % dzInfo.chunkLength;
|
||||
|
||||
Serial.printf("[DICT-DBG] Chunks: start=%lu, end=%lu, total=%u\n",
|
||||
startChunk, endChunk, dzInfo.chunkCount);
|
||||
Serial.printf("[DICT-DBG] Chunks: start=%lu, end=%lu, total=%u\n", startChunk, endChunk, dzInfo.chunkCount);
|
||||
|
||||
if (endChunk >= dzInfo.chunkCount) {
|
||||
Serial.printf("[DICT-DBG] endChunk %lu >= chunkCount %u\n", endChunk, dzInfo.chunkCount);
|
||||
@ -324,16 +323,16 @@ bool StarDict::decompressDefinition(uint32_t offset, uint32_t size, std::string&
|
||||
|
||||
// Allocate buffers - allocate inflator FIRST (smallest) to reduce fragmentation impact
|
||||
// tinfl_decompressor is ~11KB, so total allocations are ~85KB
|
||||
Serial.printf("[DICT-DBG] Allocating inflator=%u, comp=%lu, decomp=%u bytes\n",
|
||||
sizeof(tinfl_decompressor), maxCompressedSize, dzInfo.chunkLength);
|
||||
|
||||
Serial.printf("[DICT-DBG] Allocating inflator=%u, comp=%lu, decomp=%u bytes\n", sizeof(tinfl_decompressor),
|
||||
maxCompressedSize, dzInfo.chunkLength);
|
||||
|
||||
auto* inflator = static_cast<tinfl_decompressor*>(malloc(sizeof(tinfl_decompressor)));
|
||||
if (!inflator) {
|
||||
Serial.printf("[DICT-DBG] inflator alloc failed! (need %u bytes)\n", sizeof(tinfl_decompressor));
|
||||
file.close();
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
auto* compressedBuf = static_cast<uint8_t*>(malloc(maxCompressedSize));
|
||||
if (!compressedBuf) {
|
||||
Serial.printf("[DICT-DBG] compressedBuf alloc failed!\n");
|
||||
@ -469,8 +468,7 @@ StarDict::LookupResult StarDict::lookup(const std::string& word) {
|
||||
return result;
|
||||
}
|
||||
|
||||
Serial.printf("[DICT-DBG] Searching for: '%s' (normalized: '%s')\n",
|
||||
word.c_str(), normalizedSearch.c_str());
|
||||
Serial.printf("[DICT-DBG] Searching for: '%s' (normalized: '%s')\n", word.c_str(), normalizedSearch.c_str());
|
||||
|
||||
// First try .idx (main entries) - use prefix jump table for fast lookup
|
||||
const std::string idxPath = basePath + ".idx";
|
||||
@ -487,8 +485,8 @@ StarDict::LookupResult StarDict::lookup(const std::string& word) {
|
||||
const uint16_t prefixIdx = DictPrefixIndex::prefixToIndex(normalizedSearch[0], normalizedSearch[1]);
|
||||
position = DictPrefixIndex::dictPrefixOffsets[prefixIdx];
|
||||
}
|
||||
Serial.printf("[DICT-DBG] Starting at position %lu (prefix: %c%c)\n",
|
||||
position, normalizedSearch[0], normalizedSearch[1]);
|
||||
Serial.printf("[DICT-DBG] Starting at position %lu (prefix: %c%c)\n", position, normalizedSearch[0],
|
||||
normalizedSearch[1]);
|
||||
bool found = false;
|
||||
uint32_t wordCount = 0;
|
||||
|
||||
@ -501,20 +499,19 @@ StarDict::LookupResult StarDict::lookup(const std::string& word) {
|
||||
}
|
||||
wordCount++;
|
||||
if (wordCount % 50000 == 0) {
|
||||
Serial.printf("[DICT-DBG] Progress: %lu words scanned, pos=%lu, current='%s'\n",
|
||||
wordCount, position, currentWord.c_str());
|
||||
Serial.printf("[DICT-DBG] Progress: %lu words scanned, pos=%lu, current='%s'\n", wordCount, position,
|
||||
currentWord.c_str());
|
||||
}
|
||||
|
||||
// Use stardictStrcmp for case-insensitive matching
|
||||
const int cmp = stardictStrcmp(normalizedSearch, currentWord);
|
||||
|
||||
if (cmp == 0) {
|
||||
Serial.printf("[DICT-DBG] MATCH: '%s' == '%s' (offset=%lu, size=%lu)\n",
|
||||
normalizedSearch.c_str(), currentWord.c_str(), dictOffset, dictSize);
|
||||
Serial.printf("[DICT-DBG] MATCH: '%s' == '%s' (offset=%lu, size=%lu)\n", normalizedSearch.c_str(),
|
||||
currentWord.c_str(), dictOffset, dictSize);
|
||||
std::string definition;
|
||||
const bool loaded = useUncompressed
|
||||
? readDefinitionDirect(dictOffset, dictSize, definition)
|
||||
: decompressDefinition(dictOffset, dictSize, definition);
|
||||
const bool loaded = useUncompressed ? readDefinitionDirect(dictOffset, dictSize, definition)
|
||||
: decompressDefinition(dictOffset, dictSize, definition);
|
||||
if (loaded) {
|
||||
Serial.printf("[DICT-DBG] Definition loaded, %u bytes\n", definition.length());
|
||||
if (!found) {
|
||||
@ -537,8 +534,7 @@ StarDict::LookupResult StarDict::lookup(const std::string& word) {
|
||||
// may not land exactly at target position
|
||||
}
|
||||
|
||||
Serial.printf("[DICT-DBG] Search complete: %lu words scanned, found=%s\n",
|
||||
wordCount, found ? "YES" : "NO");
|
||||
Serial.printf("[DICT-DBG] Search complete: %lu words scanned, found=%s\n", wordCount, found ? "YES" : "NO");
|
||||
idxFile.close();
|
||||
|
||||
// If not found in main index, try synonym file with prefix jump
|
||||
@ -591,9 +587,8 @@ StarDict::LookupResult StarDict::lookup(const std::string& word) {
|
||||
uint32_t dictOffset, dictSize;
|
||||
if (readWordAtPosition(idxFile2, pos, mainWord, dictOffset, dictSize)) {
|
||||
std::string definition;
|
||||
const bool loaded = useUncompressed
|
||||
? readDefinitionDirect(dictOffset, dictSize, definition)
|
||||
: decompressDefinition(dictOffset, dictSize, definition);
|
||||
const bool loaded = useUncompressed ? readDefinitionDirect(dictOffset, dictSize, definition)
|
||||
: decompressDefinition(dictOffset, dictSize, definition);
|
||||
if (loaded) {
|
||||
result.word = synWord;
|
||||
result.definition = definition;
|
||||
|
||||
@ -3,7 +3,7 @@ default_envs = default
|
||||
|
||||
[crosspoint]
|
||||
# 0.15.0 CrossPoint base, ef-1.0.0 is the first release of the ef branch
|
||||
version = 0.15.ef-1.0.4
|
||||
version = 0.15.ef-1.0.5
|
||||
|
||||
[base]
|
||||
platform = espressif32 @ 6.12.0
|
||||
|
||||
@ -378,7 +378,8 @@ bool BookManager::clearBookCache(const std::string& bookPath, bool preserveProgr
|
||||
dir.close();
|
||||
}
|
||||
|
||||
Serial.printf("[%lu] [%s] Cache cleared: %d items deleted, %d failed\n", millis(), LOG_TAG, deletedCount, failedCount);
|
||||
Serial.printf("[%lu] [%s] Cache cleared: %d items deleted, %d failed\n", millis(), LOG_TAG, deletedCount,
|
||||
failedCount);
|
||||
return failedCount == 0;
|
||||
}
|
||||
|
||||
|
||||
@ -98,7 +98,7 @@ void DictionaryResultActivity::loop() {
|
||||
// At end of cached pages but more content available - parse next chunk
|
||||
Serial.printf("[DICT-DBG] Parsing next chunk on navigation (page %d)\n", currentPage);
|
||||
parseNextChunk();
|
||||
|
||||
|
||||
// After parsing (and possible page trimming), check if we can advance
|
||||
// Note: Don't compare page counts - trimming may keep size the same while adding new content
|
||||
if (currentPage < static_cast<int>(pages.size()) - 1) {
|
||||
@ -143,9 +143,9 @@ void DictionaryResultActivity::paginateDefinition() {
|
||||
// With HTML overhead, multiply by ~2, plus buffer for finding break points
|
||||
constexpr size_t CHUNK_SIZE_BASE = 1500; // Base chunk size
|
||||
const size_t chunkSize = std::max(CHUNK_SIZE_BASE, static_cast<size_t>(linesPerPage * 120));
|
||||
|
||||
Serial.printf("[DICT-DBG] Chunked parsing: defLen=%u, chunkSize=%u, linesPerPage=%d\n",
|
||||
rawDefinition.length(), chunkSize, linesPerPage);
|
||||
|
||||
Serial.printf("[DICT-DBG] Chunked parsing: defLen=%u, chunkSize=%u, linesPerPage=%d\n", rawDefinition.length(),
|
||||
chunkSize, linesPerPage);
|
||||
|
||||
// Determine how much to parse for first page
|
||||
size_t parseEnd;
|
||||
@ -158,20 +158,18 @@ void DictionaryResultActivity::paginateDefinition() {
|
||||
parseEnd = findHtmlBreakPoint(rawDefinition, chunkSize / 2, chunkSize);
|
||||
hasMoreContent = (parseEnd < rawDefinition.length());
|
||||
}
|
||||
|
||||
|
||||
// Extract the chunk to parse
|
||||
std::string chunk = rawDefinition.substr(0, parseEnd);
|
||||
parsePosition = parseEnd;
|
||||
|
||||
Serial.printf("[DICT-DBG] Parsing first chunk: 0-%u of %u, hasMore=%d\n",
|
||||
parseEnd, rawDefinition.length(), hasMoreContent);
|
||||
|
||||
Serial.printf("[DICT-DBG] Parsing first chunk: 0-%u of %u, hasMore=%d\n", parseEnd, rawDefinition.length(),
|
||||
hasMoreContent);
|
||||
|
||||
// Parse this chunk into TextBlocks
|
||||
std::vector<std::shared_ptr<TextBlock>> allBlocks;
|
||||
DictHtmlParser::parse(chunk, UI_10_FONT_ID, renderer, textWidth,
|
||||
[&allBlocks](std::shared_ptr<TextBlock> block) {
|
||||
allBlocks.push_back(block);
|
||||
});
|
||||
[&allBlocks](std::shared_ptr<TextBlock> block) { allBlocks.push_back(block); });
|
||||
Serial.printf("[DICT-DBG] First chunk parsed, %u TextBlocks\n", allBlocks.size());
|
||||
|
||||
if (allBlocks.empty()) {
|
||||
@ -209,27 +207,27 @@ void DictionaryResultActivity::paginateDefinition() {
|
||||
if (!currentPageBlocks.empty()) {
|
||||
pages.push_back(currentPageBlocks);
|
||||
}
|
||||
|
||||
|
||||
Serial.printf("[DICT-DBG] Initial pagination: %u pages\n", pages.size());
|
||||
}
|
||||
|
||||
size_t DictionaryResultActivity::findHtmlBreakPoint(const std::string& html, size_t searchStart, size_t maxPos) {
|
||||
// Search backwards from maxPos for good HTML break points
|
||||
// Priority: </li>, </p>, </ol>, </ul>, </div> then any '>' then whitespace
|
||||
|
||||
|
||||
if (maxPos >= html.length()) {
|
||||
return html.length();
|
||||
}
|
||||
|
||||
|
||||
// Clamp searchStart to not exceed maxPos
|
||||
if (searchStart > maxPos) {
|
||||
searchStart = maxPos;
|
||||
}
|
||||
|
||||
|
||||
// Search for closing block tags (best break points)
|
||||
const char* closingTags[] = {"</li>", "</p>", "</ol>", "</ul>", "</div>", "</dd>", "</dt>"};
|
||||
size_t bestBreak = std::string::npos;
|
||||
|
||||
|
||||
for (const char* tag : closingTags) {
|
||||
size_t pos = html.rfind(tag, maxPos);
|
||||
if (pos != std::string::npos && pos >= searchStart) {
|
||||
@ -240,17 +238,17 @@ size_t DictionaryResultActivity::findHtmlBreakPoint(const std::string& html, siz
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (bestBreak != std::string::npos) {
|
||||
return bestBreak;
|
||||
}
|
||||
|
||||
|
||||
// Fallback: search for any '>' (end of tag)
|
||||
size_t tagEnd = html.rfind('>', maxPos);
|
||||
if (tagEnd != std::string::npos && tagEnd >= searchStart) {
|
||||
return tagEnd + 1;
|
||||
}
|
||||
|
||||
|
||||
// Last resort: search for whitespace
|
||||
for (size_t i = maxPos; i >= searchStart && i != std::string::npos; i--) {
|
||||
if (std::isspace(static_cast<unsigned char>(html[i]))) {
|
||||
@ -258,7 +256,7 @@ size_t DictionaryResultActivity::findHtmlBreakPoint(const std::string& html, siz
|
||||
}
|
||||
if (i == 0) break;
|
||||
}
|
||||
|
||||
|
||||
// No good break point found - use maxPos
|
||||
return maxPos;
|
||||
}
|
||||
@ -269,8 +267,7 @@ void DictionaryResultActivity::parseNextChunk() {
|
||||
return;
|
||||
}
|
||||
|
||||
Serial.printf("[DICT-DBG] parseNextChunk starting at position %u of %u\n",
|
||||
parsePosition, rawDefinition.length());
|
||||
Serial.printf("[DICT-DBG] parseNextChunk starting at position %u of %u\n", parsePosition, rawDefinition.length());
|
||||
|
||||
// Get margins with button hint space for all orientations
|
||||
int marginTop, marginRight, marginBottom, marginLeft;
|
||||
@ -295,7 +292,7 @@ void DictionaryResultActivity::parseNextChunk() {
|
||||
// Determine parse range for this chunk
|
||||
size_t parseStart = parsePosition;
|
||||
size_t parseEnd;
|
||||
|
||||
|
||||
if (parsePosition + chunkSize >= rawDefinition.length()) {
|
||||
// This will be the last chunk
|
||||
parseEnd = rawDefinition.length();
|
||||
@ -315,9 +312,7 @@ void DictionaryResultActivity::parseNextChunk() {
|
||||
// Parse this chunk into TextBlocks
|
||||
std::vector<std::shared_ptr<TextBlock>> allBlocks;
|
||||
DictHtmlParser::parse(chunk, UI_10_FONT_ID, renderer, textWidth,
|
||||
[&allBlocks](std::shared_ptr<TextBlock> block) {
|
||||
allBlocks.push_back(block);
|
||||
});
|
||||
[&allBlocks](std::shared_ptr<TextBlock> block) { allBlocks.push_back(block); });
|
||||
|
||||
Serial.printf("[DICT-DBG] Chunk parsed, %u TextBlocks\n", allBlocks.size());
|
||||
|
||||
@ -359,39 +354,38 @@ void DictionaryResultActivity::parseNextChunk() {
|
||||
Serial.printf("[DICT-DBG] Trimmed old page, firstPageNumber now %d\n", firstPageNumber);
|
||||
}
|
||||
|
||||
Serial.printf("[DICT-DBG] After chunk: %u cached pages (pages %d-%d)\n",
|
||||
pages.size(), firstPageNumber, firstPageNumber + static_cast<int>(pages.size()) - 1);
|
||||
Serial.printf("[DICT-DBG] After chunk: %u cached pages (pages %d-%d)\n", pages.size(), firstPageNumber,
|
||||
firstPageNumber + static_cast<int>(pages.size()) - 1);
|
||||
}
|
||||
|
||||
void DictionaryResultActivity::reparseToPage(int targetPageNumber) {
|
||||
// Re-parse from the beginning to reach an earlier page that was trimmed
|
||||
// This allows backward navigation through the entire definition
|
||||
|
||||
|
||||
Serial.printf("[DICT-DBG] reparseToPage: target=%d, clearing and re-parsing\n", targetPageNumber);
|
||||
|
||||
|
||||
// Clear current state and start fresh
|
||||
pages.clear();
|
||||
parsePosition = 0;
|
||||
firstPageNumber = 1;
|
||||
hasMoreContent = !rawDefinition.empty();
|
||||
|
||||
|
||||
// Parse chunks until we have the target page
|
||||
while (hasMoreContent && firstPageNumber + static_cast<int>(pages.size()) - 1 < targetPageNumber) {
|
||||
parseNextChunk();
|
||||
}
|
||||
|
||||
|
||||
// Now position currentPage to show the target page
|
||||
if (targetPageNumber >= firstPageNumber &&
|
||||
targetPageNumber < firstPageNumber + static_cast<int>(pages.size())) {
|
||||
if (targetPageNumber >= firstPageNumber && targetPageNumber < firstPageNumber + static_cast<int>(pages.size())) {
|
||||
currentPage = targetPageNumber - firstPageNumber;
|
||||
} else {
|
||||
// Target page doesn't exist (definition is shorter than expected)
|
||||
currentPage = static_cast<int>(pages.size()) - 1;
|
||||
if (currentPage < 0) currentPage = 0;
|
||||
}
|
||||
|
||||
Serial.printf("[DICT-DBG] reparseToPage done: currentPage=%d, firstPageNumber=%d, pages=%u\n",
|
||||
currentPage, firstPageNumber, pages.size());
|
||||
|
||||
Serial.printf("[DICT-DBG] reparseToPage done: currentPage=%d, firstPageNumber=%d, pages=%u\n", currentPage,
|
||||
firstPageNumber, pages.size());
|
||||
}
|
||||
|
||||
void DictionaryResultActivity::displayTaskLoop() {
|
||||
@ -425,8 +419,8 @@ void DictionaryResultActivity::render() const {
|
||||
renderer.drawCenteredText(UI_10_FONT_ID, centerY, "Word not found");
|
||||
} else if (!pages.empty()) {
|
||||
// Draw definition text using TextBlocks with rich formatting
|
||||
constexpr int headerHeight = 55; // Space for "Dictionary" + lookup word
|
||||
constexpr int footerHeight = 20; // Space for page indicator
|
||||
constexpr int headerHeight = 55; // Space for "Dictionary" + lookup word
|
||||
constexpr int footerHeight = 20; // Space for page indicator
|
||||
const int textStartY = marginTop + headerHeight;
|
||||
const int textMargin = marginLeft + 10;
|
||||
const int lineHeight = renderer.getLineHeight(UI_10_FONT_ID);
|
||||
|
||||
@ -29,8 +29,8 @@ class DictionaryResultActivity final : public Activity {
|
||||
// We limit cached pages to prevent memory exhaustion on long definitions
|
||||
static constexpr int MAX_CACHED_PAGES = 4;
|
||||
std::vector<std::vector<std::shared_ptr<TextBlock>>> pages;
|
||||
int currentPage = 0; // Index into pages vector
|
||||
int firstPageNumber = 1; // The page number of pages[0] (1-based for display)
|
||||
int currentPage = 0; // Index into pages vector
|
||||
int firstPageNumber = 1; // The page number of pages[0] (1-based for display)
|
||||
bool notFound = false;
|
||||
|
||||
// Chunked parsing state - parse definition on-demand as user navigates
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
#include "EpubWordSelectionActivity.h"
|
||||
|
||||
#include <HalDisplay.h>
|
||||
#include <GfxRenderer.h>
|
||||
#include <HalDisplay.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <cctype>
|
||||
|
||||
@ -63,9 +63,9 @@ class MyLibraryActivity final : public Activity {
|
||||
ActionType selectedAction = ActionType::Archive;
|
||||
std::string actionTargetPath;
|
||||
std::string actionTargetName;
|
||||
int menuSelection = 0; // 0 = Archive, 1 = Delete
|
||||
bool ignoreNextConfirmRelease = false; // Prevents immediate selection after long-press opens menu
|
||||
bool clearCachePreserveProgress = true; // For Clear Cache: whether to preserve reading progress
|
||||
int menuSelection = 0; // 0 = Archive, 1 = Delete
|
||||
bool ignoreNextConfirmRelease = false; // Prevents immediate selection after long-press opens menu
|
||||
bool clearCachePreserveProgress = true; // For Clear Cache: whether to preserve reading progress
|
||||
|
||||
// Recent tab state
|
||||
std::vector<RecentBook> recentBooks;
|
||||
|
||||
@ -71,14 +71,14 @@ class CrossPointWebServerActivity final : public ActivityWithSubactivity {
|
||||
// Avoids recomputing QR data on every render (every 30s stats refresh)
|
||||
// Marked mutable since QR drawing doesn't modify logical state but qrcode_getModule takes non-const
|
||||
bool qrCacheValid = false;
|
||||
mutable QRCode qrWebBrowser;
|
||||
mutable QRCode qrCompanionApp;
|
||||
mutable QRCode qrCompanionAppLibrary;
|
||||
mutable QRCode qrWifiConfig; // For AP mode WiFi connection QR
|
||||
uint8_t qrWebBrowserBuffer[QR_BUFFER_SIZE];
|
||||
uint8_t qrCompanionAppBuffer[QR_BUFFER_SIZE];
|
||||
uint8_t qrCompanionAppLibraryBuffer[QR_BUFFER_SIZE];
|
||||
uint8_t qrWifiConfigBuffer[QR_BUFFER_SIZE];
|
||||
mutable QRCode qrWebBrowser = {};
|
||||
mutable QRCode qrCompanionApp = {};
|
||||
mutable QRCode qrCompanionAppLibrary = {};
|
||||
mutable QRCode qrWifiConfig = {}; // For AP mode WiFi connection QR
|
||||
uint8_t qrWebBrowserBuffer[QR_BUFFER_SIZE] = {};
|
||||
uint8_t qrCompanionAppBuffer[QR_BUFFER_SIZE] = {};
|
||||
uint8_t qrCompanionAppLibraryBuffer[QR_BUFFER_SIZE] = {};
|
||||
uint8_t qrWifiConfigBuffer[QR_BUFFER_SIZE] = {};
|
||||
|
||||
static void taskTrampoline(void* param);
|
||||
[[noreturn]] void displayTaskLoop();
|
||||
|
||||
@ -138,7 +138,7 @@ void EpubReaderActivity::onEnter() {
|
||||
nextPageNumber = data[2] + (data[3] << 8);
|
||||
hasContentOffset = false;
|
||||
Serial.printf("[%lu] [ERS] Loaded legacy progress (unknown version %d): spine %d, page %d\n", millis(),
|
||||
version, currentSpineIndex, nextPageNumber);
|
||||
version, currentSpineIndex, nextPageNumber);
|
||||
}
|
||||
}
|
||||
} else if (fileSize >= 4) {
|
||||
@ -761,8 +761,7 @@ void EpubReaderActivity::renderScreen() {
|
||||
}
|
||||
|
||||
if (section->currentPage < 0 || section->currentPage >= section->pageCount) {
|
||||
Serial.printf("[%lu] [ERS] Page out of bounds: %d (max %d)\n", millis(), section->currentPage,
|
||||
section->pageCount);
|
||||
Serial.printf("[%lu] [ERS] Page out of bounds: %d (max %d)\n", millis(), section->currentPage, section->pageCount);
|
||||
renderer.drawCenteredText(UI_12_FONT_ID, 300, "Out of bounds", true, EpdFontFamily::BOLD);
|
||||
renderStatusBar(orientedMarginRight, orientedMarginBottom, orientedMarginLeft);
|
||||
renderer.displayBuffer();
|
||||
|
||||
@ -237,7 +237,8 @@ void ClearCacheActivity::clearCache() {
|
||||
HomeActivity::freeCoverBufferIfAllocated();
|
||||
MyLibraryActivity::clearThumbExistsCache();
|
||||
|
||||
Serial.printf("[%lu] [CLEAR_CACHE] Cache cleared: %d items removed, %d failed\n", millis(), clearedCount, failedCount);
|
||||
Serial.printf("[%lu] [CLEAR_CACHE] Cache cleared: %d items removed, %d failed\n", millis(), clearedCount,
|
||||
failedCount);
|
||||
|
||||
state = SUCCESS;
|
||||
updateRequired = true;
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
#include "KeyboardEntryActivity.h"
|
||||
|
||||
#include "activities/dictionary/DictionaryMargins.h"
|
||||
#include "MappedInputManager.h"
|
||||
#include "activities/dictionary/DictionaryMargins.h"
|
||||
#include "fontIds.h"
|
||||
|
||||
// Keyboard layouts - lowercase
|
||||
|
||||
@ -29,9 +29,9 @@ class QuickMenuActivity final : public Activity {
|
||||
const bool isPageBookmarked; // True if current page already has a bookmark
|
||||
|
||||
// Edit mode state
|
||||
bool editMode = false; // True when in edit mode
|
||||
int movingIndex = -1; // Index of item being moved (-1 if none)
|
||||
uint8_t localOrder[5] = {0}; // Local copy of order for editing
|
||||
bool editMode = false; // True when in edit mode
|
||||
int movingIndex = -1; // Index of item being moved (-1 if none)
|
||||
uint8_t localOrder[5] = {0}; // Local copy of order for editing
|
||||
|
||||
static void taskTrampoline(void* param);
|
||||
[[noreturn]] void displayTaskLoop();
|
||||
|
||||
@ -1269,9 +1269,7 @@ bool CrossPointWebServer::sendContentSafe(const char* content) const {
|
||||
return server->client().connected();
|
||||
}
|
||||
|
||||
bool CrossPointWebServer::sendContentSafe(const String& content) const {
|
||||
return sendContentSafe(content.c_str());
|
||||
}
|
||||
bool CrossPointWebServer::sendContentSafe(const String& content) const { return sendContentSafe(content.c_str()); }
|
||||
|
||||
bool CrossPointWebServer::copyFile(const String& srcPath, const String& destPath) const {
|
||||
FsFile srcFile;
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user