initial commit

This commit is contained in:
cottongin
2026-02-05 00:15:54 -05:00
commit 016c9d9e33
16 changed files with 7329 additions and 0 deletions

7
.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
chat-summaries/
target/
.DS_Store
*.mp4
*.gif
.cursor/

5637
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

43
Cargo.toml Normal file
View File

@@ -0,0 +1,43 @@
[package]
name = "avif-maker"
version = "0.1.0"
edition = "2021"
description = "Convert images and videos to AVIF format with a native GUI"
license = "MIT"
[dependencies]
# GUI
eframe = "0.30"
rfd = "0.15"
egui_extras = { version = "0.30", features = ["image"] }
# AVIF encoding (links to libavif C library)
libavif = "0.14"
# Image decoding (pure Rust)
image = "0.25"
gif = "0.14"
# Video decoding (runs ffmpeg as subprocess - no FFI issues)
ffmpeg-sidecar = "2"
# Async & utilities
tokio = { version = "1", features = ["rt-multi-thread", "sync", "macros"] }
directories = "5"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
# Error handling
thiserror = "2"
anyhow = "1"
# Logging
tracing = "0.1"
tracing-subscriber = "0.3"
# Utilities
open = "5"
[profile.release]
opt-level = 3
lto = true

113
README.md Normal file
View File

@@ -0,0 +1,113 @@
# AVIF Maker
A native GUI application to convert images and videos to AVIF format.
## Features
- **Drag and drop** files or use the file picker
- **Batch conversion** - queue multiple files
- **Animated AVIF** - convert GIFs and videos to animated AVIF
- **Alpha channel support** - preserves transparency from GIF, PNG, and video sources
- **Configurable quality** - separate quality controls for color and alpha
- **Speed/quality tradeoff** - encoder speed setting from 0 (best) to 10 (fastest)
- **Lossless mode** - perfect quality output
## Supported Input Formats
| Format | Decoder | Alpha Support | Notes |
| ------------ | ------------------- | ------------- | -------------------------- |
| GIF | `gif` crate | 1-bit | Animated, frame delays |
| PNG/APNG | `image` crate | Full | Static images |
| JPEG | `image` crate | N/A | No alpha |
| WebP | `image` crate | Full | Static only |
| MP4/MOV/WebM | `ffmpeg-sidecar` | Codec-dependent | Requires ffmpeg in PATH |
## Build Requirements
### macOS
```bash
# Install dependencies
brew install libavif ffmpeg
# Build and run
cargo build --release
./target/release/avif-maker
```
### Linux (Ubuntu/Debian)
```bash
# Install dependencies
sudo apt install libavif-dev ffmpeg
# Build and run
cargo build --release
./target/release/avif-maker
```
### Windows
Use vcpkg or pre-built binaries for libavif and FFmpeg.
```powershell
# With vcpkg
vcpkg install libavif ffmpeg
# Set environment variables
$env:VCPKG_ROOT = "C:\path\to\vcpkg"
# Build
cargo build --release
```
## Usage
1. **Add files** - Drag and drop files onto the window, or click to browse
2. **Configure settings** - Adjust quality, speed, and output options in the left panel
3. **Convert** - Click "Convert All" to process the queue
4. **Access outputs** - Click "Open" or "Show in Finder" on completed files
## Settings
- **Quality (0-100)**: Higher = better quality, larger file. Default: 75
- **Alpha Quality (0-100)**: Separate quality for transparency. Default: 90
- **Speed (0-10)**: 0 = slowest/best quality, 10 = fastest. Default: 6
- **Lossless**: Perfect quality at the cost of file size
- **FPS Override**: Override detected framerate for video inputs
- **Output Directory**: Save to same folder as source, or a custom folder
## Test Files
Three test files are included:
- `Highlander-IMustFlip.mp4` - Video (tests ffmpeg decoder)
- `Highlander-IMustFlipSmaller-OG.gif` - Animated GIF without transparency
- `BTS-240-art.gif` - Animated GIF with transparency (1-bit alpha)
## Architecture
```
avif-maker/
├── src/
│ ├── main.rs # Application entry point
│ ├── app.rs # Main application state
│ ├── decoder/
│ │ ├── mod.rs # Decoder trait and dispatch
│ │ ├── gif.rs # GIF decoder (pure Rust)
│ │ ├── image.rs # PNG/JPEG/WebP decoder
│ │ └── video.rs # FFmpeg-based video decoder
│ ├── encoder/
│ │ ├── mod.rs
│ │ └── avif.rs # libavif encoder wrapper
│ └── ui/
│ ├── mod.rs
│ ├── drop_zone.rs # Drag-and-drop area
│ ├── settings.rs # Settings panel
│ └── queue.rs # Job queue display
└── Cargo.toml
```
## License
MIT

462
src/app.rs Normal file
View File

@@ -0,0 +1,462 @@
use crate::decoder::{Decoder, ProgressCallback};
use crate::encoder::{AvifEncoder, EncodeProgressCallback};
use crate::ui::{DropZone, Queue, Settings};
use eframe::egui;
use std::path::PathBuf;
use std::sync::mpsc::{self, Receiver, Sender};
use std::sync::Arc;
use std::thread;
/// Represents a single conversion job
#[derive(Clone)]
pub struct ConversionJob {
pub id: usize,
pub input_path: PathBuf,
pub output_path: Option<PathBuf>,
pub status: JobStatus,
pub progress: f32,
pub progress_detail: Option<String>,
pub error: Option<String>,
}
#[derive(Clone, PartialEq)]
pub enum JobStatus {
Queued,
Decoding,
Encoding,
Complete,
Failed,
}
impl std::fmt::Display for JobStatus {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
JobStatus::Queued => write!(f, "Queued"),
JobStatus::Decoding => write!(f, "Decoding"),
JobStatus::Encoding => write!(f, "Encoding"),
JobStatus::Complete => write!(f, "Complete"),
JobStatus::Failed => write!(f, "Failed"),
}
}
}
/// Messages sent from worker threads back to the UI
pub enum WorkerMessage {
ProgressUpdate {
job_id: usize,
status: JobStatus,
progress: f32,
detail: Option<String>,
},
JobComplete {
job_id: usize,
output_path: PathBuf,
},
JobFailed {
job_id: usize,
error: String,
},
}
/// Application settings for AVIF encoding
#[derive(Clone, serde::Serialize, serde::Deserialize)]
pub struct EncodingSettings {
pub quality: u8,
pub alpha_quality: u8,
pub speed: u8,
pub fps_override: Option<f64>,
pub output_directory: Option<PathBuf>,
pub use_same_directory: bool,
pub lossless: bool,
}
impl Default for EncodingSettings {
fn default() -> Self {
Self {
quality: 75,
alpha_quality: 90,
speed: 6,
fps_override: None,
output_directory: None,
use_same_directory: true,
lossless: false,
}
}
}
pub struct AvifMakerApp {
/// Encoding settings
pub settings: EncodingSettings,
/// Queue of conversion jobs
pub jobs: Vec<ConversionJob>,
/// Next job ID
next_job_id: usize,
/// Channel for receiving worker updates
worker_rx: Receiver<WorkerMessage>,
/// Channel for sending to workers (kept for spawning new workers)
worker_tx: Sender<WorkerMessage>,
/// Whether conversion is currently running
pub is_converting: bool,
/// UI components
drop_zone: DropZone,
settings_ui: Settings,
queue_ui: Queue,
}
impl AvifMakerApp {
pub fn new(_cc: &eframe::CreationContext<'_>) -> Self {
let (worker_tx, worker_rx) = mpsc::channel();
// Try to load saved settings
let settings = Self::load_settings().unwrap_or_default();
Self {
settings,
jobs: Vec::new(),
next_job_id: 0,
worker_rx,
worker_tx,
is_converting: false,
drop_zone: DropZone::new(),
settings_ui: Settings::new(),
queue_ui: Queue::new(),
}
}
fn load_settings() -> Option<EncodingSettings> {
let dirs = directories::ProjectDirs::from("", "", "avif-maker")?;
let config_path = dirs.config_dir().join("settings.json");
let data = std::fs::read_to_string(config_path).ok()?;
serde_json::from_str(&data).ok()
}
fn save_settings(&self) {
if let Some(dirs) = directories::ProjectDirs::from("", "", "avif-maker") {
let config_dir = dirs.config_dir();
let _ = std::fs::create_dir_all(config_dir);
let config_path = config_dir.join("settings.json");
if let Ok(data) = serde_json::to_string_pretty(&self.settings) {
let _ = std::fs::write(config_path, data);
}
}
}
pub fn add_files(&mut self, paths: Vec<PathBuf>) {
for path in paths {
if Self::is_supported_format(&path) {
let job = ConversionJob {
id: self.next_job_id,
input_path: path,
output_path: None,
status: JobStatus::Queued,
progress: 0.0,
progress_detail: None,
error: None,
};
self.jobs.push(job);
self.next_job_id += 1;
}
}
}
fn is_supported_format(path: &PathBuf) -> bool {
let ext = path
.extension()
.and_then(|e| e.to_str())
.map(|e| e.to_lowercase());
matches!(
ext.as_deref(),
Some("gif")
| Some("png")
| Some("jpg")
| Some("jpeg")
| Some("webp")
| Some("mp4")
| Some("mov")
| Some("webm")
| Some("mkv")
| Some("avi")
| Some("m4v")
)
}
pub fn remove_job(&mut self, job_id: usize) {
self.jobs.retain(|j| j.id != job_id);
}
pub fn clear_completed(&mut self) {
self.jobs
.retain(|j| j.status != JobStatus::Complete && j.status != JobStatus::Failed);
}
pub fn start_conversion(&mut self) {
if self.is_converting {
return;
}
// Find next queued job
let next_job = self.jobs.iter_mut().find(|j| j.status == JobStatus::Queued);
if let Some(job) = next_job {
self.is_converting = true;
job.status = JobStatus::Decoding;
let job_id = job.id;
let input_path = job.input_path.clone();
let settings = self.settings.clone();
let tx = self.worker_tx.clone();
// Determine output path
let output_path = if self.settings.use_same_directory {
input_path.with_extension("avif")
} else if let Some(ref dir) = self.settings.output_directory {
dir.join(input_path.file_stem().unwrap_or_default())
.with_extension("avif")
} else {
input_path.with_extension("avif")
};
// Spawn worker thread
thread::spawn(move || {
Self::process_job(job_id, input_path, output_path, settings, tx);
});
}
}
fn process_job(
job_id: usize,
input_path: PathBuf,
output_path: PathBuf,
settings: EncodingSettings,
tx: Sender<WorkerMessage>,
) {
// Create decode progress callback
let tx_decode = tx.clone();
let decode_progress: ProgressCallback = Arc::new(move |current, total| {
let (progress, detail) = if let Some(total) = total {
let pct = current as f32 / total as f32;
(pct * 0.5, format!("frame {}/{}", current, total)) // Decoding is 0-50%
} else {
// Unknown total - just show frame count
(0.25, format!("frame {}", current))
};
let _ = tx_decode.send(WorkerMessage::ProgressUpdate {
job_id,
status: JobStatus::Decoding,
progress,
detail: Some(detail),
});
});
// Update status: decoding started
let _ = tx.send(WorkerMessage::ProgressUpdate {
job_id,
status: JobStatus::Decoding,
progress: 0.0,
detail: Some("starting...".to_string()),
});
// Decode input with progress
let decoder = Decoder::for_path(&input_path);
let frames = match decoder.decode_with_progress(&input_path, Some(decode_progress)) {
Ok(frames) => frames,
Err(e) => {
let _ = tx.send(WorkerMessage::JobFailed {
job_id,
error: format!("Decode error: {}", e),
});
return;
}
};
let total_frames = frames.frames.len();
// Create encode progress callback
let tx_encode = tx.clone();
let encode_progress: EncodeProgressCallback = Arc::new(move |current, total| {
let pct = 0.5 + (current as f32 / total as f32) * 0.5; // Encoding is 50-100%
let detail = format!("frame {}/{}", current, total);
let _ = tx_encode.send(WorkerMessage::ProgressUpdate {
job_id,
status: JobStatus::Encoding,
progress: pct,
detail: Some(detail),
});
});
// Update status: encoding started
let _ = tx.send(WorkerMessage::ProgressUpdate {
job_id,
status: JobStatus::Encoding,
progress: 0.5,
detail: Some(format!("0/{} frames", total_frames)),
});
// Encode to AVIF with progress
let encoder = AvifEncoder::new(&settings);
match encoder.encode_with_progress(&frames, &output_path, Some(encode_progress)) {
Ok(()) => {
let _ = tx.send(WorkerMessage::JobComplete {
job_id,
output_path,
});
}
Err(e) => {
let _ = tx.send(WorkerMessage::JobFailed {
job_id,
error: format!("Encode error: {}", e),
});
}
}
}
fn process_worker_messages(&mut self) {
while let Ok(msg) = self.worker_rx.try_recv() {
match msg {
WorkerMessage::ProgressUpdate {
job_id,
status,
progress,
detail,
} => {
if let Some(job) = self.jobs.iter_mut().find(|j| j.id == job_id) {
job.status = status;
job.progress = progress;
job.progress_detail = detail;
}
}
WorkerMessage::JobComplete { job_id, output_path } => {
if let Some(job) = self.jobs.iter_mut().find(|j| j.id == job_id) {
job.status = JobStatus::Complete;
job.output_path = Some(output_path);
job.progress = 1.0;
job.progress_detail = None;
}
self.is_converting = false;
// Start next job if any
self.start_conversion();
}
WorkerMessage::JobFailed { job_id, error } => {
if let Some(job) = self.jobs.iter_mut().find(|j| j.id == job_id) {
job.status = JobStatus::Failed;
job.error = Some(error);
job.progress_detail = None;
}
self.is_converting = false;
// Start next job if any
self.start_conversion();
}
}
}
}
}
impl eframe::App for AvifMakerApp {
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
// Process any pending worker messages
self.process_worker_messages();
// Handle dropped files
ctx.input(|i| {
if !i.raw.dropped_files.is_empty() {
let paths: Vec<PathBuf> = i
.raw
.dropped_files
.iter()
.filter_map(|f| f.path.clone())
.collect();
self.add_files(paths);
}
});
// Top panel with title
egui::TopBottomPanel::top("top_panel").show(ctx, |ui| {
ui.add_space(8.0);
ui.horizontal(|ui| {
ui.heading("AVIF Maker");
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
ui.label(format!("{} files in queue", self.jobs.len()));
});
});
ui.add_space(4.0);
});
// Left panel with settings
egui::SidePanel::left("settings_panel")
.resizable(true)
.default_width(280.0)
.show(ctx, |ui| {
self.settings_ui.show(ui, &mut self.settings);
ui.add_space(16.0);
// Save settings when changed
if ui.button("Save Settings").clicked() {
self.save_settings();
}
});
// Collect files from drop zone (to avoid borrow issues)
let mut dropped_paths: Vec<PathBuf> = Vec::new();
// Central panel with drop zone and queue
egui::CentralPanel::default().show(ctx, |ui| {
// Drop zone at the top
self.drop_zone.show(ui, |paths| {
dropped_paths.extend(paths);
});
ui.add_space(16.0);
ui.separator();
ui.add_space(8.0);
// Action buttons
ui.horizontal(|ui| {
let has_queued = self.jobs.iter().any(|j| j.status == JobStatus::Queued);
if ui
.add_enabled(
has_queued && !self.is_converting,
egui::Button::new("Convert All"),
)
.clicked()
{
self.start_conversion();
}
if ui
.add_enabled(self.is_converting, egui::Button::new("Cancel"))
.clicked()
{
// Cancel is a no-op for now - could implement with cancellation tokens
}
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
if ui.button("Clear Completed").clicked() {
self.clear_completed();
}
});
});
ui.add_space(8.0);
ui.separator();
ui.add_space(8.0);
// Queue display
let jobs_to_remove = self.queue_ui.show(ui, &self.jobs);
for job_id in jobs_to_remove {
self.remove_job(job_id);
}
});
// Add dropped files after the UI closure
if !dropped_paths.is_empty() {
self.add_files(dropped_paths);
}
// Request repaint if we're converting (to update progress)
if self.is_converting {
ctx.request_repaint();
}
}
}

121
src/decoder/gif.rs Normal file
View File

@@ -0,0 +1,121 @@
use super::{DecodeError, DecodedFrames, DecoderTrait, Frame, ProgressCallback};
use std::fs::File;
use std::path::Path;
pub struct GifDecoder;
impl DecoderTrait for GifDecoder {
fn decode_with_progress(
&self,
path: &Path,
progress: Option<ProgressCallback>,
) -> Result<DecodedFrames, DecodeError> {
let file = File::open(path)?;
let mut decoder = gif::DecodeOptions::new();
decoder.set_color_output(gif::ColorOutput::RGBA);
let mut decoder = decoder
.read_info(file)
.map_err(|e| DecodeError::Gif(e.to_string()))?;
let width = decoder.width() as u32;
let height = decoder.height() as u32;
let mut frames = Vec::new();
let mut has_alpha = false;
let mut frame_count = 0usize;
// Canvas to accumulate frames (GIF disposal methods require this)
let mut canvas = vec![0u8; (width * height * 4) as usize];
while let Some(frame) = decoder
.read_next_frame()
.map_err(|e| DecodeError::Gif(e.to_string()))?
{
let frame_x = frame.left as u32;
let frame_y = frame.top as u32;
let frame_w = frame.width as u32;
let frame_h = frame.height as u32;
// Get frame delay in milliseconds (GIF stores in centiseconds)
let delay_ms = (frame.delay as u64) * 10;
// Minimum delay of 20ms (browsers treat 0 or very small delays as ~100ms)
let delay_ms = if delay_ms < 20 { 100 } else { delay_ms };
// Handle disposal method
let canvas_backup = match frame.dispose {
gif::DisposalMethod::Previous => Some(canvas.clone()),
_ => None,
};
// Check for transparency
if frame.transparent.is_some() {
has_alpha = true;
}
// Composite frame onto canvas
for y in 0..frame_h {
for x in 0..frame_w {
let src_idx = ((y * frame_w + x) * 4) as usize;
let dst_x = frame_x + x;
let dst_y = frame_y + y;
if dst_x < width && dst_y < height {
let dst_idx = ((dst_y * width + dst_x) * 4) as usize;
let alpha = frame.buffer[src_idx + 3];
// Only copy if not fully transparent
if alpha > 0 {
canvas[dst_idx..dst_idx + 4]
.copy_from_slice(&frame.buffer[src_idx..src_idx + 4]);
}
}
}
}
// Save the current canvas state as a frame
frames.push(Frame {
rgba: canvas.clone(),
width,
height,
duration_ms: delay_ms,
});
// Report progress (we don't know total frames upfront for GIF)
frame_count += 1;
if let Some(ref cb) = progress {
cb(frame_count, None);
}
// Handle disposal
match frame.dispose {
gif::DisposalMethod::Background => {
// Clear the frame area to background (transparent)
for y in 0..frame_h {
for x in 0..frame_w {
let dst_x = frame_x + x;
let dst_y = frame_y + y;
if dst_x < width && dst_y < height {
let dst_idx = ((dst_y * width + dst_x) * 4) as usize;
canvas[dst_idx..dst_idx + 4].copy_from_slice(&[0, 0, 0, 0]);
}
}
}
}
gif::DisposalMethod::Previous => {
if let Some(backup) = canvas_backup {
canvas = backup;
}
}
_ => {
// Keep the canvas as-is
}
}
}
if frames.is_empty() {
return Err(DecodeError::Gif("No frames found in GIF".to_string()));
}
Ok(DecodedFrames::new(frames, width, height, has_alpha))
}
}

52
src/decoder/image.rs Normal file
View File

@@ -0,0 +1,52 @@
use super::{DecodeError, DecodedFrames, DecoderTrait, Frame, ProgressCallback};
use std::path::Path;
pub struct ImageDecoder;
impl DecoderTrait for ImageDecoder {
fn decode_with_progress(
&self,
path: &Path,
progress: Option<ProgressCallback>,
) -> Result<DecodedFrames, DecodeError> {
// Report start
if let Some(ref cb) = progress {
cb(0, Some(1));
}
// Try to open as animated first (for APNG support)
let img = image::open(path).map_err(|e| DecodeError::Image(e.to_string()))?;
let rgba = img.to_rgba8();
let width = rgba.width();
let height = rgba.height();
// Check if image has meaningful alpha
let has_alpha = rgba.pixels().any(|p| p[3] < 255);
let frame = Frame {
rgba: rgba.into_raw(),
width,
height,
duration_ms: 0, // Static image
};
// Report complete
if let Some(ref cb) = progress {
cb(1, Some(1));
}
Ok(DecodedFrames::new(vec![frame], width, height, has_alpha))
}
}
impl ImageDecoder {
/// Decode animated PNG (APNG)
#[allow(dead_code)]
pub fn decode_apng(path: &Path) -> Result<DecodedFrames, DecodeError> {
// The image crate doesn't fully support APNG animation extraction yet
// For now, fall back to static image decoding
// In a future version, we could use the `apng` crate for full APNG support
let decoder = ImageDecoder;
decoder.decode(path)
}
}

107
src/decoder/mod.rs Normal file
View File

@@ -0,0 +1,107 @@
mod gif;
mod image;
mod video;
use std::path::Path;
use std::sync::Arc;
use thiserror::Error;
pub use self::gif::GifDecoder;
pub use self::image::ImageDecoder;
pub use self::video::VideoDecoder;
#[derive(Error, Debug)]
pub enum DecodeError {
#[error("IO error: {0}")]
Io(#[from] std::io::Error),
#[error("GIF decode error: {0}")]
Gif(String),
#[error("Image decode error: {0}")]
Image(String),
#[error("Video decode error: {0}")]
Video(String),
#[error("Unsupported format: {0}")]
UnsupportedFormat(String),
}
/// Progress callback type - receives (current, total) or (current, None) if total unknown
pub type ProgressCallback = Arc<dyn Fn(usize, Option<usize>) + Send + Sync>;
/// A single decoded frame with RGBA data
#[derive(Clone)]
pub struct Frame {
/// RGBA pixel data (width * height * 4 bytes)
pub rgba: Vec<u8>,
/// Frame width in pixels
pub width: u32,
/// Frame height in pixels
pub height: u32,
/// Frame duration in milliseconds
pub duration_ms: u64,
}
/// Collection of decoded frames
pub struct DecodedFrames {
pub frames: Vec<Frame>,
pub width: u32,
pub height: u32,
/// Whether the source had alpha channel
pub has_alpha: bool,
}
impl DecodedFrames {
pub fn new(frames: Vec<Frame>, width: u32, height: u32, has_alpha: bool) -> Self {
Self {
frames,
width,
height,
has_alpha,
}
}
pub fn is_animated(&self) -> bool {
self.frames.len() > 1
}
pub fn frame_count(&self) -> usize {
self.frames.len()
}
}
/// Decoder dispatcher - selects the appropriate decoder based on file extension
pub struct Decoder;
impl Decoder {
pub fn for_path(path: &Path) -> Box<dyn DecoderTrait> {
let ext = path
.extension()
.and_then(|e| e.to_str())
.map(|e| e.to_lowercase());
match ext.as_deref() {
Some("gif") => Box::new(GifDecoder),
Some("mp4") | Some("mov") | Some("webm") | Some("mkv") | Some("avi") | Some("m4v") => {
Box::new(VideoDecoder::new())
}
_ => Box::new(ImageDecoder),
}
}
pub fn decode(path: &Path) -> Result<DecodedFrames, DecodeError> {
let decoder = Self::for_path(path);
decoder.decode(path)
}
}
/// Trait for all decoders
pub trait DecoderTrait: Send {
fn decode(&self, path: &Path) -> Result<DecodedFrames, DecodeError> {
self.decode_with_progress(path, None)
}
fn decode_with_progress(
&self,
path: &Path,
progress: Option<ProgressCallback>,
) -> Result<DecodedFrames, DecodeError>;
}

141
src/decoder/video.rs Normal file
View File

@@ -0,0 +1,141 @@
use super::{DecodeError, DecodedFrames, DecoderTrait, Frame, ProgressCallback};
use ffmpeg_sidecar::command::FfmpegCommand;
use ffmpeg_sidecar::event::{FfmpegEvent, OutputVideoFrame};
use std::path::Path;
pub struct VideoDecoder {
fps_override: Option<f64>,
}
impl VideoDecoder {
pub fn new() -> Self {
Self { fps_override: None }
}
#[allow(dead_code)]
pub fn with_fps(fps: f64) -> Self {
Self {
fps_override: Some(fps),
}
}
/// Get video metadata (fps, duration, frame_count) using ffprobe
fn get_video_info(path: &Path) -> Result<(f64, Option<f64>, Option<usize>), DecodeError> {
use std::process::Command;
let output = Command::new("ffprobe")
.args([
"-v", "quiet",
"-select_streams", "v:0",
"-show_entries", "stream=r_frame_rate,duration,nb_frames",
"-of", "csv=p=0",
])
.arg(path)
.output()
.map_err(|e| DecodeError::Video(format!("Failed to run ffprobe: {}", e)))?;
let stdout = String::from_utf8_lossy(&output.stdout);
let parts: Vec<&str> = stdout.trim().split(',').collect();
// Parse frame rate (format: "num/den")
let fps = if let Some(rate_str) = parts.first() {
if let Some((num, den)) = rate_str.split_once('/') {
let num: f64 = num.parse().unwrap_or(30.0);
let den: f64 = den.parse().unwrap_or(1.0);
if den > 0.0 { num / den } else { 30.0 }
} else {
rate_str.parse().unwrap_or(30.0)
}
} else {
30.0
};
// Parse duration
let duration = parts.get(1).and_then(|s| s.parse().ok());
// Parse frame count (if available)
let frame_count = parts.get(2).and_then(|s| s.parse().ok());
// Estimate frame count from duration if not available
let frame_count: Option<usize> = frame_count.or_else(|| {
duration.map(|d: f64| (d * fps).ceil() as usize)
});
Ok((fps, duration, frame_count))
}
}
impl DecoderTrait for VideoDecoder {
fn decode_with_progress(
&self,
path: &Path,
progress: Option<ProgressCallback>,
) -> Result<DecodedFrames, DecodeError> {
// Get video info for frame timing and progress
let (detected_fps, _duration, estimated_frames) = Self::get_video_info(path).unwrap_or((30.0, None, None));
let fps = self.fps_override.unwrap_or(detected_fps);
let frame_duration_ms = (1000.0 / fps) as u64;
// Run ffmpeg to extract raw RGBA frames
let mut ffmpeg = FfmpegCommand::new()
.input(path.to_string_lossy().to_string())
.args(["-f", "rawvideo"])
.args(["-pix_fmt", "rgba"])
.args(["-"]) // Output to stdout
.spawn()
.map_err(|e| DecodeError::Video(format!("Failed to spawn ffmpeg: {}. Is ffmpeg installed?", e)))?;
let mut frames = Vec::new();
let mut width = 0u32;
let mut height = 0u32;
let mut has_alpha = false;
// Process ffmpeg output events
let iter = ffmpeg.iter()
.map_err(|e| DecodeError::Video(format!("Failed to iterate ffmpeg output: {}", e)))?;
for event in iter {
match event {
FfmpegEvent::OutputFrame(OutputVideoFrame { data, width: w, height: h, .. }) => {
if width == 0 {
width = w;
height = h;
}
// Check for alpha (any pixel with alpha < 255)
if !has_alpha {
for chunk in data.chunks(4) {
if chunk.len() == 4 && chunk[3] < 255 {
has_alpha = true;
break;
}
}
}
frames.push(Frame {
rgba: data,
width: w,
height: h,
duration_ms: frame_duration_ms,
});
// Report progress
if let Some(ref cb) = progress {
cb(frames.len(), estimated_frames);
}
}
FfmpegEvent::Error(e) | FfmpegEvent::ParsedStreamMapping(e) => {
// Log but don't fail on warnings
tracing::debug!("ffmpeg: {}", e);
}
_ => {}
}
}
if frames.is_empty() {
return Err(DecodeError::Video("No frames decoded from video".to_string()));
}
Ok(DecodedFrames::new(frames, width, height, has_alpha))
}
}

178
src/encoder/avif.rs Normal file
View File

@@ -0,0 +1,178 @@
use crate::app::EncodingSettings;
use crate::decoder::DecodedFrames;
use libavif::{AvifImage, Encoder, RgbPixels};
use std::fs::File;
use std::io::Write;
use std::path::Path;
use std::sync::Arc;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum EncodeError {
#[error("IO error: {0}")]
Io(#[from] std::io::Error),
#[error("AVIF encoding error: {0}")]
Avif(String),
}
/// Progress callback type - receives (current, total)
pub type EncodeProgressCallback = Arc<dyn Fn(usize, usize) + Send + Sync>;
pub struct AvifEncoder {
quality: u8,
alpha_quality: u8,
speed: u8,
lossless: bool,
}
impl AvifEncoder {
pub fn new(settings: &EncodingSettings) -> Self {
Self {
quality: if settings.lossless {
100
} else {
settings.quality
},
alpha_quality: if settings.lossless {
100
} else {
settings.alpha_quality
},
speed: settings.speed,
lossless: settings.lossless,
}
}
pub fn encode(&self, frames: &DecodedFrames, output_path: &Path) -> Result<(), EncodeError> {
self.encode_with_progress(frames, output_path, None)
}
pub fn encode_with_progress(
&self,
frames: &DecodedFrames,
output_path: &Path,
progress: Option<EncodeProgressCallback>,
) -> Result<(), EncodeError> {
if frames.frames.is_empty() {
return Err(EncodeError::Avif("No frames to encode".to_string()));
}
let data = if frames.is_animated() {
self.encode_animated_with_progress(frames, progress)?
} else {
// Report progress for single frame
if let Some(ref cb) = progress {
cb(0, 1);
}
let result = self.encode_single(frames)?;
if let Some(ref cb) = progress {
cb(1, 1);
}
result
};
let mut file = File::create(output_path)?;
file.write_all(&data)?;
Ok(())
}
fn encode_single(&self, frames: &DecodedFrames) -> Result<Vec<u8>, EncodeError> {
let frame = &frames.frames[0];
// Create AvifImage from RGBA data
let image = self.create_avif_image(
frame.width,
frame.height,
&frame.rgba,
frames.has_alpha,
)?;
// Configure encoder
let mut encoder = Encoder::new();
self.configure_encoder(&mut encoder);
// Encode single image
let data = encoder
.encode(&image)
.map_err(|e| EncodeError::Avif(format!("Encoding failed: {:?}", e)))?;
Ok(data.to_vec())
}
fn encode_animated(&self, frames: &DecodedFrames) -> Result<Vec<u8>, EncodeError> {
self.encode_animated_with_progress(frames, None)
}
fn encode_animated_with_progress(
&self,
frames: &DecodedFrames,
progress: Option<EncodeProgressCallback>,
) -> Result<Vec<u8>, EncodeError> {
let mut encoder = Encoder::new();
self.configure_encoder(&mut encoder);
// Set timescale to milliseconds
encoder.set_timescale(1000);
let total_frames = frames.frames.len();
for (i, frame) in frames.frames.iter().enumerate() {
let image = self.create_avif_image(
frame.width,
frame.height,
&frame.rgba,
frames.has_alpha,
)?;
// Add frame with duration
encoder
.add_image(&image, frame.duration_ms, libavif::AddImageFlags::NONE)
.map_err(|e| EncodeError::Avif(format!("Failed to add frame: {:?}", e)))?;
// Report progress after each frame is added
if let Some(ref cb) = progress {
cb(i + 1, total_frames);
}
}
let data = encoder
.finish()
.map_err(|e| EncodeError::Avif(format!("Failed to finish encoding: {:?}", e)))?;
Ok(data.to_vec())
}
fn create_avif_image(
&self,
width: u32,
height: u32,
rgba: &[u8],
has_alpha: bool,
) -> Result<AvifImage, EncodeError> {
// Create RGB pixels from RGBA data
let rgb = RgbPixels::new(width, height, rgba)
.map_err(|e| EncodeError::Avif(format!("Failed to create RGB pixels: {:?}", e)))?;
// Convert to AvifImage
let image = rgb.to_image(if has_alpha {
libavif::YuvFormat::Yuv444
} else {
libavif::YuvFormat::Yuv420
});
Ok(image)
}
fn configure_encoder(&self, encoder: &mut Encoder) {
// Quality 0-100 maps directly to libavif quality
encoder.set_quality(self.quality);
encoder.set_alpha_quality(self.alpha_quality);
encoder.set_speed(self.speed);
if self.lossless {
encoder.set_quality(100);
encoder.set_alpha_quality(100);
}
}
}

3
src/encoder/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
mod avif;
pub use avif::{AvifEncoder, EncodeProgressCallback};

28
src/main.rs Normal file
View File

@@ -0,0 +1,28 @@
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
mod app;
mod decoder;
mod encoder;
mod ui;
use app::AvifMakerApp;
use eframe::egui;
fn main() -> eframe::Result<()> {
// Initialize logging
tracing_subscriber::fmt::init();
let native_options = eframe::NativeOptions {
viewport: egui::ViewportBuilder::default()
.with_inner_size([900.0, 700.0])
.with_min_inner_size([600.0, 400.0])
.with_drag_and_drop(true),
..Default::default()
};
eframe::run_native(
"AVIF Maker",
native_options,
Box::new(|cc| Ok(Box::new(AvifMakerApp::new(cc)))),
)
}

127
src/ui/drop_zone.rs Normal file
View File

@@ -0,0 +1,127 @@
use eframe::egui::{self, Color32, Stroke, Vec2};
use std::path::PathBuf;
pub struct DropZone {
is_hovering: bool,
}
impl DropZone {
pub fn new() -> Self {
Self { is_hovering: false }
}
pub fn show<F>(&mut self, ui: &mut egui::Ui, mut on_files_dropped: F)
where
F: FnMut(Vec<PathBuf>),
{
let available_width = ui.available_width();
let height = 120.0;
// Check for drag-and-drop hover state
let is_hovering = ui.ctx().input(|i| !i.raw.hovered_files.is_empty());
self.is_hovering = is_hovering;
// Style based on hover state
let (bg_color, stroke_color, stroke_width) = if self.is_hovering {
(
Color32::from_rgba_unmultiplied(100, 149, 237, 30), // Cornflower blue, semi-transparent
Color32::from_rgb(100, 149, 237),
2.0,
)
} else {
(
Color32::from_gray(40),
Color32::from_gray(80),
1.0,
)
};
let (rect, response) = ui.allocate_exact_size(Vec2::new(available_width, height), egui::Sense::click());
// Draw background
ui.painter().rect(
rect,
8.0,
bg_color,
Stroke::new(stroke_width, stroke_color),
);
// Draw dashed border effect (using multiple small rects)
if !self.is_hovering {
let dash_length = 10.0;
let gap = 5.0;
let inset = 4.0;
let inner_rect = rect.shrink(inset);
// Top and bottom
let mut x = inner_rect.min.x;
while x < inner_rect.max.x - dash_length {
ui.painter().line_segment(
[egui::pos2(x, inner_rect.min.y), egui::pos2(x + dash_length, inner_rect.min.y)],
Stroke::new(1.0, Color32::from_gray(60)),
);
ui.painter().line_segment(
[egui::pos2(x, inner_rect.max.y), egui::pos2(x + dash_length, inner_rect.max.y)],
Stroke::new(1.0, Color32::from_gray(60)),
);
x += dash_length + gap;
}
// Left and right
let mut y = inner_rect.min.y;
while y < inner_rect.max.y - dash_length {
ui.painter().line_segment(
[egui::pos2(inner_rect.min.x, y), egui::pos2(inner_rect.min.x, y + dash_length)],
Stroke::new(1.0, Color32::from_gray(60)),
);
ui.painter().line_segment(
[egui::pos2(inner_rect.max.x, y), egui::pos2(inner_rect.max.x, y + dash_length)],
Stroke::new(1.0, Color32::from_gray(60)),
);
y += dash_length + gap;
}
}
// Draw text
let text = if self.is_hovering {
"Drop files here"
} else {
"Drag and drop files here\nor click to browse"
};
let text_color = if self.is_hovering {
Color32::WHITE
} else {
Color32::from_gray(180)
};
ui.painter().text(
rect.center(),
egui::Align2::CENTER_CENTER,
text,
egui::FontId::proportional(16.0),
text_color,
);
// Supported formats hint
ui.painter().text(
egui::pos2(rect.center().x, rect.max.y - 20.0),
egui::Align2::CENTER_CENTER,
"GIF, PNG, JPEG, WebP, MP4, MOV, WebM",
egui::FontId::proportional(11.0),
Color32::from_gray(120),
);
// Handle click to open file dialog
if response.clicked() {
let dialog = rfd::FileDialog::new()
.add_filter("All Supported", &["gif", "png", "jpg", "jpeg", "webp", "mp4", "mov", "webm", "mkv", "avi", "m4v"])
.add_filter("Images", &["gif", "png", "jpg", "jpeg", "webp"])
.add_filter("Videos", &["mp4", "mov", "webm", "mkv", "avi", "m4v"]);
if let Some(paths) = dialog.pick_files() {
on_files_dropped(paths);
}
}
}
}

7
src/ui/mod.rs Normal file
View File

@@ -0,0 +1,7 @@
mod drop_zone;
mod queue;
mod settings;
pub use drop_zone::DropZone;
pub use queue::Queue;
pub use settings::Settings;

142
src/ui/queue.rs Normal file
View File

@@ -0,0 +1,142 @@
use crate::app::{ConversionJob, JobStatus};
use eframe::egui::{self, Color32, RichText, Vec2};
pub struct Queue;
impl Queue {
pub fn new() -> Self {
Self
}
/// Shows the queue and returns a list of job IDs to remove
pub fn show(&mut self, ui: &mut egui::Ui, jobs: &[ConversionJob]) -> Vec<usize> {
let mut jobs_to_remove = Vec::new();
if jobs.is_empty() {
ui.centered_and_justified(|ui| {
ui.label(
RichText::new("No files in queue")
.size(14.0)
.color(Color32::from_gray(120)),
);
});
return jobs_to_remove;
}
egui::ScrollArea::vertical()
.auto_shrink([false, false])
.show(ui, |ui| {
for job in jobs {
let remove = self.show_job_item(ui, job);
if remove {
jobs_to_remove.push(job.id);
}
ui.add_space(4.0);
}
});
jobs_to_remove
}
fn show_job_item(&self, ui: &mut egui::Ui, job: &ConversionJob) -> bool {
let mut should_remove = false;
let frame = egui::Frame::none()
.fill(Color32::from_gray(35))
.rounding(4.0)
.inner_margin(8.0);
frame.show(ui, |ui| {
ui.horizontal(|ui| {
// Status indicator
let status_color = match job.status {
JobStatus::Queued => Color32::from_gray(100),
JobStatus::Decoding | JobStatus::Encoding => Color32::from_rgb(100, 149, 237),
JobStatus::Complete => Color32::from_rgb(50, 205, 50),
JobStatus::Failed => Color32::from_rgb(220, 20, 60),
};
let (rect, _) = ui.allocate_exact_size(Vec2::new(8.0, 8.0), egui::Sense::hover());
ui.painter()
.circle_filled(rect.center(), 4.0, status_color);
ui.add_space(8.0);
// File info
ui.vertical(|ui| {
// Filename
let filename = job
.input_path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("Unknown");
ui.label(RichText::new(filename).strong());
// Status text and progress
ui.horizontal(|ui| {
// Build status label with detail
let status_text = if let Some(ref detail) = job.progress_detail {
format!("{} ({})", job.status, detail)
} else {
job.status.to_string()
};
ui.label(
RichText::new(&status_text)
.small()
.color(status_color),
);
if job.status == JobStatus::Decoding || job.status == JobStatus::Encoding {
ui.add_space(8.0);
let progress_bar = egui::ProgressBar::new(job.progress)
.desired_width(120.0)
.show_percentage();
ui.add(progress_bar);
}
if let Some(ref error) = job.error {
ui.add_space(8.0);
ui.label(RichText::new(error).small().color(Color32::from_rgb(220, 20, 60)));
}
if job.status == JobStatus::Complete {
if let Some(ref output) = job.output_path {
ui.add_space(8.0);
if ui.small_button("Open").clicked() {
let _ = open::that(output);
}
if ui.small_button("Show in Finder").clicked() {
let _ = open::that(output.parent().unwrap_or(output));
}
}
}
});
// Path (smaller, dimmed)
let path_str = job.input_path.display().to_string();
ui.label(
RichText::new(&path_str)
.small()
.color(Color32::from_gray(100)),
);
});
// Remove button (right side)
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
if job.status == JobStatus::Queued
|| job.status == JobStatus::Complete
|| job.status == JobStatus::Failed
{
if ui.small_button("").clicked() {
should_remove = true;
}
}
});
});
});
should_remove
}
}

161
src/ui/settings.rs Normal file
View File

@@ -0,0 +1,161 @@
use crate::app::EncodingSettings;
use eframe::egui;
pub struct Settings;
impl Settings {
pub fn new() -> Self {
Self
}
pub fn show(&mut self, ui: &mut egui::Ui, settings: &mut EncodingSettings) {
ui.heading("Encoding Settings");
ui.add_space(12.0);
// Quality slider
ui.horizontal(|ui| {
ui.label("Quality:");
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
ui.label(format!("{}", settings.quality));
});
});
let quality_slider = egui::Slider::new(&mut settings.quality, 0..=100)
.show_value(false);
ui.add(quality_slider);
ui.add_space(4.0);
ui.label(
egui::RichText::new("0 = smallest file, 100 = best quality")
.small()
.weak(),
);
ui.add_space(12.0);
// Alpha quality slider
ui.horizontal(|ui| {
ui.label("Alpha Quality:");
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
ui.label(format!("{}", settings.alpha_quality));
});
});
let alpha_slider = egui::Slider::new(&mut settings.alpha_quality, 0..=100)
.show_value(false);
ui.add(alpha_slider);
ui.add_space(4.0);
ui.label(
egui::RichText::new("Separate quality for transparency")
.small()
.weak(),
);
ui.add_space(12.0);
// Speed slider
ui.horizontal(|ui| {
ui.label("Speed:");
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
ui.label(format!("{}", settings.speed));
});
});
let speed_slider = egui::Slider::new(&mut settings.speed, 0..=10)
.show_value(false);
ui.add(speed_slider);
ui.add_space(4.0);
ui.label(
egui::RichText::new("0 = slowest/best, 10 = fastest")
.small()
.weak(),
);
ui.add_space(16.0);
ui.separator();
ui.add_space(8.0);
// Lossless checkbox
let mut lossless = settings.lossless;
if ui.checkbox(&mut lossless, "Lossless mode").changed() {
settings.lossless = lossless;
if lossless {
settings.quality = 100;
settings.alpha_quality = 100;
}
}
ui.add_space(4.0);
ui.label(
egui::RichText::new("Perfect quality, larger files")
.small()
.weak(),
);
ui.add_space(16.0);
ui.separator();
ui.add_space(8.0);
// FPS Override
ui.label("FPS Override (for videos):");
ui.add_space(4.0);
let has_override = settings.fps_override.is_some();
let mut fps_value = settings.fps_override.unwrap_or(30.0);
ui.horizontal(|ui| {
let mut use_override = has_override;
if ui.checkbox(&mut use_override, "").changed() {
if use_override {
settings.fps_override = Some(fps_value);
} else {
settings.fps_override = None;
}
}
ui.add_enabled_ui(has_override, |ui| {
let fps_edit = egui::DragValue::new(&mut fps_value)
.speed(0.5)
.range(1.0..=120.0)
.suffix(" fps");
if ui.add(fps_edit).changed() && has_override {
settings.fps_override = Some(fps_value);
}
});
if !has_override {
ui.label(egui::RichText::new("Auto-detect").weak());
}
});
ui.add_space(16.0);
ui.separator();
ui.add_space(8.0);
// Output directory
ui.label("Output Directory:");
ui.add_space(4.0);
if ui
.checkbox(&mut settings.use_same_directory, "Same as source")
.changed()
{
if settings.use_same_directory {
settings.output_directory = None;
}
}
ui.add_enabled_ui(!settings.use_same_directory, |ui| {
ui.horizontal(|ui| {
let dir_text = settings
.output_directory
.as_ref()
.map(|p| p.display().to_string())
.unwrap_or_else(|| "Not set".to_string());
ui.label(egui::RichText::new(&dir_text).small());
});
if ui.button("Browse...").clicked() {
if let Some(path) = rfd::FileDialog::new().pick_folder() {
settings.output_directory = Some(path);
}
}
});
}
}