Add MD5 hash API for companion app sync and improve upload reliability
- Add /api/hash endpoint to compute and cache MD5 hashes on demand - Extend /api/files response with md5 field for EPUBs (null if not cached) - Compute and cache MD5 automatically after EPUB uploads - Add flush() before close() in WebSocket and HTTP upload handlers - New Md5Utils module using ESP32's mbedtls for chunked hash computation The MD5 hashes enable the companion app to detect file changes without downloading content. Hashes are cached in each book's .crosspoint cache directory and invalidated when file size changes.
This commit is contained in:
parent
a707cc6da2
commit
f739869519
@ -14,6 +14,7 @@
|
||||
#include "CrossPointSettings.h"
|
||||
#include "html/FilesPageHtml.generated.h"
|
||||
#include "html/HomePageHtml.generated.h"
|
||||
#include "util/Md5Utils.h"
|
||||
#include "util/StringUtils.h"
|
||||
|
||||
namespace {
|
||||
@ -42,6 +43,20 @@ void clearEpubCacheIfNeeded(const String& filePath) {
|
||||
Serial.printf("[%lu] [WEB] Cleared epub cache for: %s\n", millis(), filePath.c_str());
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to compute and cache MD5 hash after upload
|
||||
void computeMd5AfterUpload(const String& filePath) {
|
||||
// Only compute hash for EPUB files (companion app uses this for sync)
|
||||
if (StringUtils::checkFileExtension(filePath, ".epub")) {
|
||||
Serial.printf("[%lu] [WEB] Computing MD5 hash after upload for: %s\n", millis(), filePath.c_str());
|
||||
const std::string md5 = Md5Utils::computeAndCacheMd5(filePath.c_str(), BookManager::CROSSPOINT_DIR);
|
||||
if (!md5.empty()) {
|
||||
Serial.printf("[%lu] [WEB] MD5 hash cached: %s\n", millis(), md5.c_str());
|
||||
} else {
|
||||
Serial.printf("[%lu] [WEB] Failed to compute MD5 hash\n", millis());
|
||||
}
|
||||
}
|
||||
}
|
||||
} // namespace
|
||||
|
||||
// File listing page template - now using generated headers:
|
||||
@ -99,6 +114,7 @@ void CrossPointWebServer::begin() {
|
||||
|
||||
server->on("/api/status", HTTP_GET, [this] { handleStatus(); });
|
||||
server->on("/api/files", HTTP_GET, [this] { handleFileListData(); });
|
||||
server->on("/api/hash", HTTP_GET, [this] { handleHash(); });
|
||||
|
||||
// Upload endpoint with special handling for multipart form data
|
||||
server->on("/upload", HTTP_POST, [this] { handleUploadPost(); }, [this] { handleUpload(); });
|
||||
@ -307,9 +323,27 @@ void CrossPointWebServer::scanFiles(const char* path, const std::function<void(F
|
||||
if (info.isDirectory) {
|
||||
info.size = 0;
|
||||
info.isEpub = false;
|
||||
// md5 remains empty for directories
|
||||
} else {
|
||||
info.size = file.size();
|
||||
info.isEpub = isEpubFile(info.name);
|
||||
|
||||
// For EPUBs, try to get cached MD5 hash
|
||||
if (info.isEpub) {
|
||||
// Build full file path
|
||||
String fullPath = String(path);
|
||||
if (!fullPath.endsWith("/")) {
|
||||
fullPath += "/";
|
||||
}
|
||||
fullPath += fileName;
|
||||
|
||||
const std::string cachedMd5 =
|
||||
Md5Utils::getCachedMd5(fullPath.c_str(), BookManager::CROSSPOINT_DIR, info.size);
|
||||
if (!cachedMd5.empty()) {
|
||||
info.md5 = String(cachedMd5.c_str());
|
||||
}
|
||||
// If not cached, md5 remains empty (companion app can request via /api/hash)
|
||||
}
|
||||
}
|
||||
|
||||
callback(info);
|
||||
@ -376,6 +410,15 @@ void CrossPointWebServer::handleFileListData() const {
|
||||
doc["isDirectory"] = info.isDirectory;
|
||||
doc["isEpub"] = info.isEpub;
|
||||
|
||||
// Include md5 field for EPUBs (null if not cached, hash string if available)
|
||||
if (info.isEpub) {
|
||||
if (info.md5.isEmpty()) {
|
||||
doc["md5"] = nullptr; // JSON null
|
||||
} else {
|
||||
doc["md5"] = info.md5;
|
||||
}
|
||||
}
|
||||
|
||||
const size_t written = serializeJson(doc, output, outputSize);
|
||||
if (written >= outputSize) {
|
||||
// JSON output truncated; skip this entry to avoid sending malformed JSON
|
||||
@ -552,6 +595,7 @@ void CrossPointWebServer::handleUpload() const {
|
||||
if (!flushUploadBuffer()) {
|
||||
uploadError = "Failed to write final data to SD card";
|
||||
}
|
||||
uploadFile.flush(); // Ensure FsFile internal buffer is written to SD card
|
||||
uploadFile.close();
|
||||
|
||||
if (uploadError.isEmpty()) {
|
||||
@ -573,6 +617,9 @@ void CrossPointWebServer::handleUpload() const {
|
||||
if (!filePath.endsWith("/")) filePath += "/";
|
||||
filePath += uploadFileName;
|
||||
clearEpubCacheIfNeeded(filePath);
|
||||
|
||||
// Compute and cache MD5 hash for uploaded EPUB files
|
||||
computeMd5AfterUpload(filePath);
|
||||
}
|
||||
}
|
||||
} else if (upload.status == UPLOAD_FILE_ABORTED) {
|
||||
@ -942,6 +989,7 @@ void CrossPointWebServer::onWebSocketEvent(uint8_t num, WStype_t type, uint8_t*
|
||||
|
||||
// Check if upload complete
|
||||
if (wsUploadReceived >= wsUploadSize) {
|
||||
wsUploadFile.flush(); // Ensure all buffered data is written to SD card
|
||||
wsUploadFile.close();
|
||||
wsUploadInProgress = false;
|
||||
|
||||
@ -961,6 +1009,9 @@ void CrossPointWebServer::onWebSocketEvent(uint8_t num, WStype_t type, uint8_t*
|
||||
filePath += wsUploadFileName;
|
||||
clearEpubCacheIfNeeded(filePath);
|
||||
|
||||
// Compute and cache MD5 hash for uploaded EPUB files
|
||||
computeMd5AfterUpload(filePath);
|
||||
|
||||
wsServer->sendTXT(num, "DONE");
|
||||
lastProgressSent = 0;
|
||||
}
|
||||
@ -1626,3 +1677,85 @@ void CrossPointWebServer::handleListPost() const {
|
||||
server->send(400, "application/json", "{\"error\":\"Invalid action. Use 'upload' or 'delete'\"}");
|
||||
}
|
||||
}
|
||||
|
||||
void CrossPointWebServer::handleHash() const {
|
||||
Serial.printf("[%lu] [WEB] GET /api/hash request\n", millis());
|
||||
|
||||
// Validate path parameter
|
||||
if (!server->hasArg("path")) {
|
||||
server->send(400, "application/json", "{\"error\":\"Missing path parameter\"}");
|
||||
return;
|
||||
}
|
||||
|
||||
String filePath = server->arg("path");
|
||||
|
||||
// Ensure path starts with /
|
||||
if (!filePath.startsWith("/")) {
|
||||
filePath = "/" + filePath;
|
||||
}
|
||||
|
||||
// Security check: prevent directory traversal
|
||||
if (filePath.indexOf("..") >= 0) {
|
||||
Serial.printf("[%lu] [WEB] Hash rejected - directory traversal attempt: %s\n", millis(), filePath.c_str());
|
||||
server->send(403, "application/json", "{\"error\":\"Invalid path\"}");
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract filename for security checks
|
||||
const String filename = filePath.substring(filePath.lastIndexOf('/') + 1);
|
||||
|
||||
// Security check: reject hidden/system files
|
||||
if (filename.startsWith(".")) {
|
||||
Serial.printf("[%lu] [WEB] Hash rejected - hidden/system file: %s\n", millis(), filePath.c_str());
|
||||
server->send(403, "application/json", "{\"error\":\"Cannot hash system files\"}");
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if file exists
|
||||
if (!SdMan.exists(filePath.c_str())) {
|
||||
Serial.printf("[%lu] [WEB] Hash failed - file not found: %s\n", millis(), filePath.c_str());
|
||||
server->send(404, "application/json", "{\"error\":\"File not found\"}");
|
||||
return;
|
||||
}
|
||||
|
||||
// Get file size for cache validation and response
|
||||
FsFile file;
|
||||
if (!SdMan.openFileForRead("WEB", filePath, file)) {
|
||||
server->send(500, "application/json", "{\"error\":\"Failed to open file\"}");
|
||||
return;
|
||||
}
|
||||
|
||||
if (file.isDirectory()) {
|
||||
file.close();
|
||||
server->send(400, "application/json", "{\"error\":\"Cannot hash a directory\"}");
|
||||
return;
|
||||
}
|
||||
|
||||
const size_t fileSize = file.size();
|
||||
file.close();
|
||||
|
||||
Serial.printf("[%lu] [WEB] Computing hash for: %s (%zu bytes)\n", millis(), filePath.c_str(), fileSize);
|
||||
|
||||
// Try to get cached hash first
|
||||
std::string md5 = Md5Utils::getCachedMd5(filePath.c_str(), BookManager::CROSSPOINT_DIR, fileSize);
|
||||
|
||||
// If not cached or invalid, compute and cache it
|
||||
if (md5.empty()) {
|
||||
md5 = Md5Utils::computeAndCacheMd5(filePath.c_str(), BookManager::CROSSPOINT_DIR);
|
||||
if (md5.empty()) {
|
||||
server->send(500, "application/json", "{\"error\":\"Failed to compute hash\"}");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Build JSON response
|
||||
JsonDocument doc;
|
||||
doc["md5"] = md5;
|
||||
doc["size"] = fileSize;
|
||||
|
||||
String response;
|
||||
serializeJson(doc, response);
|
||||
server->send(200, "application/json", response);
|
||||
|
||||
Serial.printf("[%lu] [WEB] Hash computed: %s = %s\n", millis(), filePath.c_str(), md5.c_str());
|
||||
}
|
||||
|
||||
@ -11,6 +11,7 @@ struct FileInfo {
|
||||
size_t size;
|
||||
bool isEpub;
|
||||
bool isDirectory;
|
||||
String md5; // MD5 hash for EPUBs (empty if not cached/available)
|
||||
};
|
||||
|
||||
class CrossPointWebServer {
|
||||
@ -84,6 +85,7 @@ class CrossPointWebServer {
|
||||
void handleRename() const;
|
||||
void handleCopy() const;
|
||||
void handleMove() const;
|
||||
void handleHash() const;
|
||||
|
||||
// Helper for copy operations
|
||||
bool copyFile(const String& srcPath, const String& destPath) const;
|
||||
|
||||
249
src/util/Md5Utils.cpp
Normal file
249
src/util/Md5Utils.cpp
Normal file
@ -0,0 +1,249 @@
|
||||
#include "Md5Utils.h"
|
||||
|
||||
#include <ArduinoJson.h>
|
||||
#include <HardwareSerial.h>
|
||||
#include <SDCardManager.h>
|
||||
#include <esp_task_wdt.h>
|
||||
#include <mbedtls/md5.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <functional>
|
||||
|
||||
namespace {
|
||||
constexpr const char* LOG_TAG = "MD5";
|
||||
constexpr size_t HASH_BUFFER_SIZE = 4096; // Read in 4KB chunks
|
||||
constexpr const char* MD5_CACHE_FILENAME = "/content_md5.json";
|
||||
|
||||
// Convert 16-byte MD5 hash to 32-character lowercase hex string
|
||||
std::string hashToHexString(const uint8_t hash[16]) {
|
||||
static const char hexChars[] = "0123456789abcdef";
|
||||
std::string result;
|
||||
result.reserve(32);
|
||||
for (int i = 0; i < 16; i++) {
|
||||
result += hexChars[(hash[i] >> 4) & 0x0F];
|
||||
result += hexChars[hash[i] & 0x0F];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Compute cache directory path for a book file (mirrors BookManager logic)
|
||||
std::string getCacheDirForBook(const std::string& bookPath, const std::string& cacheDir) {
|
||||
// Get file extension
|
||||
const size_t lastDot = bookPath.find_last_of('.');
|
||||
if (lastDot == std::string::npos) {
|
||||
return "";
|
||||
}
|
||||
std::string ext = bookPath.substr(lastDot);
|
||||
std::transform(ext.begin(), ext.end(), ext.begin(), ::tolower);
|
||||
|
||||
// Determine prefix based on extension
|
||||
std::string prefix;
|
||||
if (ext == ".epub") {
|
||||
prefix = "epub_";
|
||||
} else if (ext == ".txt") {
|
||||
prefix = "txt_";
|
||||
} else if (ext == ".xtc" || ext == ".xtch") {
|
||||
prefix = "xtc_";
|
||||
} else {
|
||||
return "";
|
||||
}
|
||||
|
||||
// Compute hash of path
|
||||
const size_t hash = std::hash<std::string>{}(bookPath);
|
||||
return cacheDir + "/" + prefix + std::to_string(hash);
|
||||
}
|
||||
} // namespace
|
||||
|
||||
std::string Md5Utils::computeFileMd5(const std::string& filePath) {
|
||||
FsFile file;
|
||||
if (!SdMan.openFileForRead(LOG_TAG, filePath, file)) {
|
||||
Serial.printf("[%lu] [%s] Failed to open file for MD5: %s\n", millis(), LOG_TAG, filePath.c_str());
|
||||
return "";
|
||||
}
|
||||
|
||||
const size_t fileSize = file.size();
|
||||
Serial.printf("[%lu] [%s] Computing MD5 for %s (%zu bytes)\n", millis(), LOG_TAG, filePath.c_str(), fileSize);
|
||||
|
||||
// Initialize MD5 context
|
||||
mbedtls_md5_context ctx;
|
||||
mbedtls_md5_init(&ctx);
|
||||
if (mbedtls_md5_starts_ret(&ctx) != 0) {
|
||||
Serial.printf("[%lu] [%s] Failed to initialize MD5 context\n", millis(), LOG_TAG);
|
||||
mbedtls_md5_free(&ctx);
|
||||
file.close();
|
||||
return "";
|
||||
}
|
||||
|
||||
// Read file in chunks and update hash
|
||||
uint8_t buffer[HASH_BUFFER_SIZE];
|
||||
size_t totalRead = 0;
|
||||
const unsigned long startTime = millis();
|
||||
bool hashError = false;
|
||||
|
||||
while (file.available()) {
|
||||
esp_task_wdt_reset(); // Reset watchdog to prevent timeout on large files
|
||||
|
||||
const size_t bytesRead = file.read(buffer, HASH_BUFFER_SIZE);
|
||||
if (bytesRead == 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (mbedtls_md5_update_ret(&ctx, buffer, bytesRead) != 0) {
|
||||
hashError = true;
|
||||
break;
|
||||
}
|
||||
totalRead += bytesRead;
|
||||
|
||||
// Yield periodically to allow other tasks
|
||||
yield();
|
||||
}
|
||||
|
||||
file.close();
|
||||
|
||||
if (hashError) {
|
||||
Serial.printf("[%lu] [%s] Error during MD5 computation\n", millis(), LOG_TAG);
|
||||
mbedtls_md5_free(&ctx);
|
||||
return "";
|
||||
}
|
||||
|
||||
// Finalize hash
|
||||
uint8_t hash[16];
|
||||
if (mbedtls_md5_finish_ret(&ctx, hash) != 0) {
|
||||
Serial.printf("[%lu] [%s] Failed to finalize MD5\n", millis(), LOG_TAG);
|
||||
mbedtls_md5_free(&ctx);
|
||||
return "";
|
||||
}
|
||||
mbedtls_md5_free(&ctx);
|
||||
|
||||
const std::string hexHash = hashToHexString(hash);
|
||||
const unsigned long elapsed = millis() - startTime;
|
||||
const float kbps = (elapsed > 0) ? (totalRead / 1024.0) / (elapsed / 1000.0) : 0;
|
||||
|
||||
Serial.printf("[%lu] [%s] MD5 computed: %s (%zu bytes in %lu ms, %.1f KB/s)\n", millis(), LOG_TAG, hexHash.c_str(),
|
||||
totalRead, elapsed, kbps);
|
||||
|
||||
return hexHash;
|
||||
}
|
||||
|
||||
std::string Md5Utils::getCachedMd5(const std::string& bookPath, const std::string& cacheDir, size_t currentFileSize) {
|
||||
const std::string bookCacheDir = getCacheDirForBook(bookPath, cacheDir);
|
||||
if (bookCacheDir.empty()) {
|
||||
return "";
|
||||
}
|
||||
|
||||
const std::string cachePath = bookCacheDir + MD5_CACHE_FILENAME;
|
||||
|
||||
FsFile file;
|
||||
if (!SdMan.openFileForRead(LOG_TAG, cachePath, file)) {
|
||||
// Cache file doesn't exist - this is normal, not an error
|
||||
return "";
|
||||
}
|
||||
|
||||
// Read the JSON content
|
||||
const size_t fileLen = file.size();
|
||||
if (fileLen > 256) { // Sanity check - cache file should be small
|
||||
Serial.printf("[%lu] [%s] Cache file too large, ignoring: %s\n", millis(), LOG_TAG, cachePath.c_str());
|
||||
file.close();
|
||||
return "";
|
||||
}
|
||||
|
||||
char buffer[257];
|
||||
const size_t bytesRead = file.read(reinterpret_cast<uint8_t*>(buffer), fileLen);
|
||||
file.close();
|
||||
buffer[bytesRead] = '\0';
|
||||
|
||||
// Parse JSON
|
||||
JsonDocument doc;
|
||||
const DeserializationError error = deserializeJson(doc, buffer);
|
||||
if (error) {
|
||||
Serial.printf("[%lu] [%s] Failed to parse cache JSON: %s\n", millis(), LOG_TAG, error.c_str());
|
||||
return "";
|
||||
}
|
||||
|
||||
// Validate file size matches
|
||||
const size_t cachedSize = doc["size"] | 0;
|
||||
if (cachedSize != currentFileSize) {
|
||||
Serial.printf("[%lu] [%s] Cache size mismatch (cached=%zu, current=%zu), invalidating\n", millis(), LOG_TAG,
|
||||
cachedSize, currentFileSize);
|
||||
return "";
|
||||
}
|
||||
|
||||
// Return cached MD5
|
||||
const char* md5 = doc["md5"] | "";
|
||||
if (strlen(md5) != 32) {
|
||||
Serial.printf("[%lu] [%s] Invalid cached MD5 length: %zu\n", millis(), LOG_TAG, strlen(md5));
|
||||
return "";
|
||||
}
|
||||
|
||||
return std::string(md5);
|
||||
}
|
||||
|
||||
bool Md5Utils::cacheMd5(const std::string& bookPath, const std::string& cacheDir, const std::string& md5,
|
||||
size_t fileSize) {
|
||||
if (md5.length() != 32) {
|
||||
Serial.printf("[%lu] [%s] Invalid MD5 length for caching: %zu\n", millis(), LOG_TAG, md5.length());
|
||||
return false;
|
||||
}
|
||||
|
||||
const std::string bookCacheDir = getCacheDirForBook(bookPath, cacheDir);
|
||||
if (bookCacheDir.empty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Ensure cache directory exists
|
||||
if (!SdMan.exists(bookCacheDir.c_str())) {
|
||||
if (!SdMan.mkdir(bookCacheDir.c_str())) {
|
||||
Serial.printf("[%lu] [%s] Failed to create cache directory: %s\n", millis(), LOG_TAG, bookCacheDir.c_str());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const std::string cachePath = bookCacheDir + MD5_CACHE_FILENAME;
|
||||
|
||||
// Build JSON
|
||||
JsonDocument doc;
|
||||
doc["md5"] = md5;
|
||||
doc["size"] = fileSize;
|
||||
|
||||
char buffer[128];
|
||||
const size_t jsonLen = serializeJson(doc, buffer, sizeof(buffer));
|
||||
|
||||
// Write to file
|
||||
FsFile file;
|
||||
if (!SdMan.openFileForWrite(LOG_TAG, cachePath, file)) {
|
||||
Serial.printf("[%lu] [%s] Failed to create cache file: %s\n", millis(), LOG_TAG, cachePath.c_str());
|
||||
return false;
|
||||
}
|
||||
|
||||
const size_t written = file.write(reinterpret_cast<const uint8_t*>(buffer), jsonLen);
|
||||
file.close();
|
||||
|
||||
if (written != jsonLen) {
|
||||
Serial.printf("[%lu] [%s] Failed to write cache file\n", millis(), LOG_TAG);
|
||||
return false;
|
||||
}
|
||||
|
||||
Serial.printf("[%lu] [%s] Cached MD5 for %s\n", millis(), LOG_TAG, bookPath.c_str());
|
||||
return true;
|
||||
}
|
||||
|
||||
std::string Md5Utils::computeAndCacheMd5(const std::string& bookPath, const std::string& cacheDir) {
|
||||
// Get file size first
|
||||
FsFile file;
|
||||
if (!SdMan.openFileForRead(LOG_TAG, bookPath, file)) {
|
||||
return "";
|
||||
}
|
||||
const size_t fileSize = file.size();
|
||||
file.close();
|
||||
|
||||
// Compute MD5
|
||||
const std::string md5 = computeFileMd5(bookPath);
|
||||
if (md5.empty()) {
|
||||
return "";
|
||||
}
|
||||
|
||||
// Cache the result
|
||||
cacheMd5(bookPath, cacheDir, md5, fileSize);
|
||||
|
||||
return md5;
|
||||
}
|
||||
44
src/util/Md5Utils.h
Normal file
44
src/util/Md5Utils.h
Normal file
@ -0,0 +1,44 @@
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace Md5Utils {
|
||||
|
||||
/**
|
||||
* Compute MD5 hash of a file, reading in chunks to avoid memory issues.
|
||||
* @param filePath Path to the file on SD card
|
||||
* @return MD5 hash as lowercase hex string, or empty string on error
|
||||
*/
|
||||
std::string computeFileMd5(const std::string& filePath);
|
||||
|
||||
/**
|
||||
* Get cached MD5 for a book file.
|
||||
* Validates that the cached hash was computed for the current file size.
|
||||
* @param bookPath Full path to the book file (e.g., "/Books/mybook.epub")
|
||||
* @param cacheDir The .crosspoint cache directory (e.g., "/.crosspoint")
|
||||
* @param currentFileSize Current size of the book file for validation
|
||||
* @return Cached MD5 hash, or empty string if not cached or invalid
|
||||
*/
|
||||
std::string getCachedMd5(const std::string& bookPath, const std::string& cacheDir, size_t currentFileSize);
|
||||
|
||||
/**
|
||||
* Cache MD5 hash for a book file.
|
||||
* Stores the hash along with the file size for later validation.
|
||||
* @param bookPath Full path to the book file
|
||||
* @param cacheDir The .crosspoint cache directory
|
||||
* @param md5 The MD5 hash to cache
|
||||
* @param fileSize The file size when hash was computed
|
||||
* @return true if successfully cached
|
||||
*/
|
||||
bool cacheMd5(const std::string& bookPath, const std::string& cacheDir, const std::string& md5, size_t fileSize);
|
||||
|
||||
/**
|
||||
* Compute and cache MD5 hash for a book file.
|
||||
* Combines computeFileMd5 and cacheMd5 into a single operation.
|
||||
* @param bookPath Full path to the book file
|
||||
* @param cacheDir The .crosspoint cache directory
|
||||
* @return MD5 hash as lowercase hex string, or empty string on error
|
||||
*/
|
||||
std::string computeAndCacheMd5(const std::string& bookPath, const std::string& cacheDir);
|
||||
|
||||
} // namespace Md5Utils
|
||||
Loading…
x
Reference in New Issue
Block a user