diff --git a/Cargo.lock b/Cargo.lock index 178b729..5903249 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -298,9 +298,9 @@ dependencies = [ [[package]] name = "gif-dispose" -version = "5.0.0-beta.2" +version = "5.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0d20a3802e15ff705c260e39152ff1987145a1c5ae016bc3d510abceb45b9ed" +checksum = "781005a5985b4c723fd3e6586df79d823151846ebcbcf2fcc7e3d3fba18c2d51" dependencies = [ "gif", "imgref", diff --git a/Cargo.toml b/Cargo.toml index 1ebb0ef..52cb9fc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,7 +24,7 @@ required-features = ["binary"] clap = { version = "4.5.4", features = ["cargo"], optional = true } imgref = "1.10.1" gif = { version = "0.13.1", default-features = false, features = ["std", "raii_no_panic"] } -gif-dispose = "5.0.0-beta.2" +gif-dispose = "5.0.0" imagequant = "4.3.0" lodepng = { version = "3.10.1", optional = true } pbr = { version = "1.1.1", optional = true } @@ -40,6 +40,8 @@ loop9 = "0.1.5" num-traits = { version = "0.2.17", features = ["i128", "std"] } crossbeam-utils = "0.8.19" ordered-channel = { version = "1.0.0", features = ["crossbeam-channel"] } +y4m = { version = "0.8.0", optional = true } +yuv = { version = "0.1.5", optional = true } [dependencies.ffmpeg] package = "ffmpeg-next" @@ -54,9 +56,9 @@ lodepng = "3.10.1" [features] # `cargo build` will skip the binaries with missing `required-features` # so all CLI dependencies have to be enabled by default. -default = ["gifsicle", "binary"] +default = ["gifsicle", "video", "binary"] # You can disable this feture when using gifski as a library. -binary = ["dep:clap", "png", "pbr", "dep:wild", "dep:natord", "dep:dunce"] +binary = ["dep:clap", "dep:yuv", "dep:y4m", "png", "pbr", "dep:wild", "dep:natord", "dep:dunce"] capi = [] # internal for cargo-c only png = ["dep:lodepng"] # Links dynamically to ffmpeg. Needs ffmpeg devel package installed on the system. diff --git a/src/bin/gifski.rs b/src/bin/gifski.rs index 55bbd5a..1deb40b 100644 --- a/src/bin/gifski.rs +++ b/src/bin/gifski.rs @@ -8,6 +8,7 @@ #![allow(clippy::redundant_closure_for_method_calls)] #![allow(clippy::wildcard_imports)] +use clap::error::ErrorKind::MissingRequiredArgument; use clap::builder::NonEmptyStringValueParser; use std::io::stdin; use std::io::BufRead; @@ -23,6 +24,7 @@ use clap::value_parser; mod ffmpeg_source; mod png; mod gif_source; +mod y4m_source; mod source; use crate::source::Source; @@ -43,7 +45,7 @@ use std::time::Duration; #[cfg(feature = "video")] const VIDEO_FRAMES_ARG_HELP: &str = "one video file supported by FFmpeg, or multiple PNG image files"; #[cfg(not(feature = "video"))] -const VIDEO_FRAMES_ARG_HELP: &str = "PNG image files for the animation frames"; +const VIDEO_FRAMES_ARG_HELP: &str = "PNG image files for the animation frames, or a .y4m file"; fn main() { if let Err(e) = bin_main() { @@ -171,7 +173,13 @@ fn bin_main() -> BinResult<()> { .num_args(1) .value_parser(parse_color) .value_name("RGBHEX")) - .get_matches_from(wild::args_os()); + .try_get_matches_from(wild::args_os()) + .unwrap_or_else(|e| { + if e.kind() == MissingRequiredArgument && !stdin().is_terminal() { + eprintln!("If you're trying to pipe a file, use \"-\" as the input file name"); + } + e.exit() + }); let mut frames: Vec<&str> = matches.get_many::("FILES").ok_or("?")?.map(|s| s.as_str()).collect(); if !matches.get_flag("nosort") && frames.len() > 1 { @@ -280,7 +288,7 @@ fn bin_main() -> BinResult<()> { _ if path.is_dir() => { return Err(format!("{} is a directory, not a PNG file", path.display()).into()); }, - FileType::Other => get_video_decoder(src, rate, settings)?, + other_type => get_video_decoder(other_type, src, rate, settings)?, } } else { if speed != 1.0 { @@ -294,9 +302,8 @@ fn bin_main() -> BinResult<()> { compression. Please don't use JPEG for making GIF animations. Please re-export\n\ your animation using the PNG format.".into()) }, - FileType::GIF => { - return Err("Too many arguments. Unexpectedly got a GIF as an input frame. Only PNG format is supported for individual frames.".into()); - }, + FileType::GIF => return unexpected("GIF"), + FileType::Y4M => return unexpected("Y4M"), _ => Box::new(png::Lodecoder::new(frames, rate)), } }; @@ -362,6 +369,11 @@ fn check_errors(err1: Result<(), gifski::Error>, err2: BinResult<()>) -> BinResu } } +#[cold] +fn unexpected(ftype: &'static str) -> BinResult<()> { + Err(format!("Too many arguments. Unexpectedly got a {ftype} as an input frame. Only PNG format is supported for individual frames.").into()) +} + #[cold] fn panic_err(err: Box) -> String { err.downcast::().map(|s| *s) @@ -398,16 +410,21 @@ fn color_parser() { } #[allow(clippy::upper_case_acronyms)] +#[derive(PartialEq)] enum FileType { - PNG, GIF, JPEG, Other, + PNG, GIF, JPEG, Y4M, Other, } fn file_type(src: &mut SrcPath) -> BinResult { let mut buf = [0; 4]; match src { - SrcPath::Path(path) => { - let mut file = std::fs::File::open(path)?; - file.read_exact(&mut buf)?; + SrcPath::Path(path) => match path.extension() { + Some(e) if e.eq_ignore_ascii_case("y4m") => return Ok(FileType::Y4M), + Some(e) if e.eq_ignore_ascii_case("png") => return Ok(FileType::PNG), + _ => { + let mut file = std::fs::File::open(path)?; + file.read_exact(&mut buf)?; + } }, SrcPath::Stdin(stdin) => { buf.copy_from_slice(&stdin.fill_buf()?[..4]); @@ -421,6 +438,9 @@ fn file_type(src: &mut SrcPath) -> BinResult { if &buf == b"GIF8" { return Ok(FileType::GIF); } + if &buf == b"YUV4" { + return Ok(FileType::Y4M); + } if buf[..2] == [0xFF, 0xD8] { return Ok(FileType::JPEG); } @@ -483,23 +503,38 @@ impl fmt::Display for DestPath<'_> { } #[cfg(feature = "video")] -fn get_video_decoder(path: SrcPath, fps: source::Fps, settings: Settings) -> BinResult> { - Ok(Box::new(ffmpeg_source::FfmpegDecoder::new(path, fps, settings)?)) +fn get_video_decoder(ftype: FileType, src: SrcPath, fps: source::Fps, settings: Settings) -> BinResult> { + Ok(if ftype == FileType::Y4M { + Box::new(y4m_source::Y4MDecoder::new(src, fps)?) + } else { + Box::new(ffmpeg_source::FfmpegDecoder::new(src, fps, settings)?) + }) } #[cfg(not(feature = "video"))] #[cold] -fn get_video_decoder(_: SrcPath<'_>, _: source::Fps, _: Settings) -> BinResult> { - Err(r"Video support is permanently disabled in this executable. +fn get_video_decoder(ftype: FileType, path: SrcPath, fps: source::Fps, _: Settings) -> BinResult> { + if ftype == FileType::Y4M { + Ok(Box::new(y4m_source::Y4MDecoder::new(src, fps)?)) + } else { + let rel_path = path.file_name().map(Path::new).unwrap_or(path); + Err(format!(r#"Video support is permanently disabled in this distribution of gifski. -To enable video decoding you need to recompile gifski from source with: -cargo build --release --features=video -or -cargo install gifski --features=video +The only 'video' format supported at this time is YUV4MPEG2, which can be piped from ffmpeg: -Alternatively, use ffmpeg command to export PNG frames, and then specify + ffmpeg -i "{src}" -f yuv4mpegpipe | gifski -o "{gif}" - + +To enable full video decoding you need to recompile gifski from source. +https://github.com/imageoptim/gifski + +Alternatively, use ffmpeg or other tool to export PNG frames, and then specify the PNG files as input for this executable. Instructions on https://gif.ski -".into()) +"#, +src = path.display(), +y4mfile = rel_path.with_extension("y4m").display(), +gif = rel_path.with_extension("gif").display() +).into()) + } } struct ProgressBar { diff --git a/src/bin/y4m_source.rs b/src/bin/y4m_source.rs new file mode 100644 index 0000000..74ca621 --- /dev/null +++ b/src/bin/y4m_source.rs @@ -0,0 +1,215 @@ +//! This is for reading GIFs as an input for re-encoding as another GIF + +use std::io::BufReader; +use std::io::Read; +use imgref::ImgVec; +use y4m::Colorspace; +use y4m::Decoder; +use gifski::Collector; +use yuv::color::MatrixCoefficients; +use yuv::color::Range; +use yuv::convert::RGBConvert; +use yuv::YUV; +use crate::{SrcPath, BinResult}; +use crate::source::{Fps, Source}; + +pub struct Y4MDecoder { + fps: Fps, + decoder: Decoder>, + file_size: Option, +} + +impl Y4MDecoder { + pub fn new(src: SrcPath, fps: Fps) -> BinResult { + let mut file_size = None; + let reader = match src { + SrcPath::Path(path) => { + let f = std::fs::File::open(path)?; + let m = f.metadata()?; + #[cfg(unix)] { + use std::os::unix::fs::MetadataExt; + file_size = Some(m.size()); + } + #[cfg(windows)] { + use std::os::windows::fs::MetadataExt; + file_size = Some(m.file_size()); + } + Box::new(BufReader::new(f)) as Box + }, + SrcPath::Stdin(buf) => Box::new(buf) as _, + }; + + Ok(Self { + file_size, + fps, + decoder: Decoder::new(reader)?, + }) + } +} + +enum Samp { + Mono, + S1x1, + S2x1, + S2x2, +} + +impl Source for Y4MDecoder { + fn total_frames(&self) -> Option { + self.file_size.map(|file_size| { + let w = self.decoder.get_width(); + let h = self.decoder.get_height(); + let d = self.decoder.get_bytes_per_sample(); + let s = match self.decoder.get_colorspace() { + Colorspace::Cmono => 4, + Colorspace::Cmono12 => 4, + Colorspace::C420 => 6, + Colorspace::C420p10 => 6, + Colorspace::C420p12 => 6, + Colorspace::C420jpeg => 6, + Colorspace::C420paldv => 6, + Colorspace::C420mpeg2 => 6, + Colorspace::C422 => 8, + Colorspace::C422p10 => 8, + Colorspace::C422p12 => 8, + Colorspace::C444 => 12, + Colorspace::C444p10 => 12, + Colorspace::C444p12 => 12, + _ => 12, + }; + file_size.saturating_sub(self.decoder.get_raw_params().len() as _) / (w * h * d * s / 4 + 6) as u64 + }) + } + fn collect(&mut self, c: &mut Collector) -> BinResult<()> { + let fps = self.decoder.get_framerate(); + let frame_time = 1. / (fps.num as f64 / fps.den as f64); + let wanted_frame_time = 1. / f64::from(self.fps.fps); + let width = self.decoder.get_width(); + let height = self.decoder.get_height(); + let raw_params_str = &*String::from_utf8_lossy(self.decoder.get_raw_params()).into_owned(); + let range = raw_params_str.split_once("COLORRANGE=").map(|(_, r)| { + if r.starts_with("LIMIT") { Range::Limited } else { Range::Full } + }); + + let sd_or_hd = if height <= 480 && width <= 720 { MatrixCoefficients::BT601 } else { MatrixCoefficients::BT709 }; + + let (samp, conv) = match self.decoder.get_colorspace() { + Colorspace::Cmono => (Samp::Mono, RGBConvert::::new(range.unwrap_or(Range::Full), MatrixCoefficients::Identity)), + Colorspace::Cmono12 => return Err("Y4M with Cmono12 is not supported yet".into()), + Colorspace::C420 => (Samp::S2x2, RGBConvert::::new(range.unwrap_or(Range::Limited), MatrixCoefficients::BT601)), + Colorspace::C420p10 => return Err("Y4M with C420p10 is not supported yet".into()), + Colorspace::C420p12 => return Err("Y4M with C420p12 is not supported yet".into()), + Colorspace::C420jpeg => (Samp::S2x2, RGBConvert::::new(range.unwrap_or(Range::Full), MatrixCoefficients::BT601)), + Colorspace::C420paldv => (Samp::S2x2, RGBConvert::::new(range.unwrap_or(Range::Limited), MatrixCoefficients::BT601)), + Colorspace::C420mpeg2 => (Samp::S2x2, RGBConvert::::new(range.unwrap_or(Range::Limited), sd_or_hd)), + Colorspace::C422 => (Samp::S2x1, RGBConvert::::new(range.unwrap_or(Range::Limited), sd_or_hd)), + Colorspace::C422p10 => return Err("Y4M with C422p10 is not supported yet".into()), + Colorspace::C422p12 => return Err("Y4M with C422p12 is not supported yet".into()), + Colorspace::C444 => (Samp::S1x1, RGBConvert::::new(range.unwrap_or(Range::Full), MatrixCoefficients::BT709)), + Colorspace::C444p10 => return Err("Y4M with C444p10 is not supported yet".into()), + Colorspace::C444p12 => return Err("Y4M with C444p12 is not supported yet".into()), + _ => return Err(format!("Y4M uses unsupported color mode {raw_params_str}").into()), + }; + let conv = conv?; + if width == 0 || width > u16::MAX as _ || height == 0 || height > u16::MAX as _ { + return Err("Video too large".into()); + } + + #[cold] + fn bad_frame(mode: &str) -> BinResult<()> { + Err(format!("Bad Y4M frame (using {mode})").into()) + } + + let mut idx = 0; + let mut presentation_timestamp = 0.0; + let mut wanted_pts = 0.0; + loop { + match self.decoder.read_frame() { + Ok(frame) => { + let this_frame_pts = presentation_timestamp / f64::from(self.fps.speed); + presentation_timestamp += frame_time; + if presentation_timestamp < wanted_pts { + continue; // skip a frame + } + wanted_pts += wanted_frame_time; + + let y = frame.get_y_plane(); + if y.is_empty() { + return bad_frame(raw_params_str); + } + let u = frame.get_u_plane(); + let v = frame.get_v_plane(); + if v.len() != u.len() { + return bad_frame(raw_params_str); + } + + let mut out = Vec::new(); + out.try_reserve(width * height)?; + match samp { + Samp::Mono => todo!(), + Samp::S1x1 => { + if v.len() != y.len() { + return bad_frame(raw_params_str); + } + + let y = y.chunks_exact(width); + let u = u.chunks_exact(width); + let v = v.chunks_exact(width); + if y.len() != v.len() { + return bad_frame(raw_params_str); + } + for (y, (u, v)) in y.zip(u.zip(v)) { + out.extend( + y.iter().copied().zip(u.iter().copied().zip(v.iter().copied())) + .map(|(y, (u, v))| { + conv.to_rgb(YUV {y, u, v}).alpha(255) + })); + } + }, + Samp::S2x1 => { + let y = y.chunks_exact(width); + let u = u.chunks_exact((width+1)/2); + let v = v.chunks_exact((width+1)/2); + if y.len() != v.len() { + return bad_frame(raw_params_str); + } + for (y, (u, v)) in y.zip(u.zip(v)) { + let u = u.iter().copied().flat_map(|x| [x, x]); + let v = v.iter().copied().flat_map(|x| [x, x]); + out.extend( + y.iter().copied().zip(u.zip(v)) + .map(|(y, (u, v))| { + conv.to_rgb(YUV {y, u, v}).alpha(255) + })); + } + }, + Samp::S2x2 => { + let y = y.chunks_exact(width); + let u = u.chunks_exact((width+1)/2).flat_map(|r| [r, r]); + let v = v.chunks_exact((width+1)/2).flat_map(|r| [r, r]); + for (y, (u, v)) in y.zip(u.zip(v)) { + let u = u.iter().copied().flat_map(|x| [x, x]); + let v = v.iter().copied().flat_map(|x| [x, x]); + out.extend( + y.iter().copied().zip(u.zip(v)) + .map(|(y, (u, v))| { + conv.to_rgb(YUV {y, u, v}).alpha(255) + })); + } + }, + }; + if out.len() != width * height { + return bad_frame(raw_params_str); + } + let pixels = ImgVec::new(out, width, height); + + c.add_frame_rgba(idx, pixels, this_frame_pts)?; + idx += 1; + }, + Err(y4m::Error::EOF) => break, + Err(e) => return Err(e.into()), + } + } + Ok(()) + } +}