oscilloscope-video-gen/src/render.rs

234 lines
10 KiB
Rust

//! Frame rendering module.
use crate::audio::AudioData;
use crate::video::VideoEncoder;
use anyhow::{anyhow, Result};
use image::ImageBuffer;
use rustfft::{num_complex::Complex, FftPlanner};
use std::cell::RefCell;
// --- Constants ---
const FFT_SIZE: usize = 2048;
const MIN_FREQ: f32 = 20.0;
const MAX_FREQ: f32 = 20000.0;
const FREQ_BOOST_FACTOR: f32 = 5.0;
const DYNAMIC_RANGE_SCALE: f32 = 20.0;
const NOISE_FLOOR: f32 = 0.05;
const SMOOTH_RISE: f32 = 0.6; // How quickly bars rise
const SMOOTH_FALL: f32 = 0.3; // How quickly bars fall (gravity)
thread_local! {
static FFT_PLANNER: RefCell<FftPlanner<f32>> = RefCell::new(FftPlanner::new());
static HANN_WINDOW: Vec<f32> = (0..FFT_SIZE)
.map(|i| 0.5 * (1.0 - (2.0 * std::f32::consts::PI * i as f32 / (FFT_SIZE - 1) as f32).cos()))
.collect();
}
#[derive(Debug, Clone, Copy, clap::ValueEnum, PartialEq, Eq)]
pub enum RenderMode {
Combined, Separate, All, Spectrometer,
}
#[derive(Debug, Clone)]
pub struct RenderOptions {
pub width: u32, pub height: u32, pub fps: u32,
pub mode: RenderMode,
pub left_color: image::Rgb<u8>, pub right_color: image::Rgb<u8>, pub xy_color: image::Rgb<u8>,
pub background: image::Rgb<u8>,
pub show_grid: bool, pub line_thickness: u32,
}
fn compute_raw_bars(spectrum: &[f32], num_bars: usize, sample_rate: u32) -> Vec<f32> {
let nyquist = sample_rate as f32 / 2.0;
let mut bars = vec![0.0; num_bars];
for i in 0..num_bars {
let f_start = MIN_FREQ * (MAX_FREQ / MIN_FREQ).powf(i as f32 / num_bars as f32);
let f_end = MIN_FREQ * (MAX_FREQ / MIN_FREQ).powf((i + 1) as f32 / num_bars as f32);
let bin_start = (f_start / nyquist * spectrum.len() as f32).floor() as usize;
let bin_end = (f_end / nyquist * spectrum.len() as f32).ceil() as usize;
let bin_end = bin_end.max(bin_start + 1).min(spectrum.len());
let mut magnitude = 0.0f32;
if bin_start < spectrum.len() {
for k in bin_start..bin_end { magnitude = magnitude.max(spectrum[k]); }
}
let freq_factor = 1.0 + (f_start / MAX_FREQ) * FREQ_BOOST_FACTOR;
let mut val = (magnitude * freq_factor * DYNAMIC_RANGE_SCALE).sqrt().min(1.0);
if val < NOISE_FLOOR { val = 0.0; }
bars[i] = val;
}
bars
}
fn smooth_bars(raw_bars: &mut Vec<Vec<f32>>) {
if raw_bars.is_empty() { return; }
let num_bars = raw_bars[0].len();
let mut prev_bars = vec![0.0; num_bars];
for frame_bars in raw_bars.iter_mut() {
for i in 0..num_bars {
let current_val = frame_bars[i];
let prev_val = prev_bars[i];
let factor = if current_val > prev_val { SMOOTH_RISE } else { SMOOTH_FALL };
let smoothed_val = prev_val * (1.0 - factor) + current_val * factor;
frame_bars[i] = smoothed_val;
prev_bars[i] = smoothed_val;
}
}
}
fn render_smoothed_bars(
buffer: &mut ImageBuffer<image::Rgb<u8>, Vec<u8>>,
bars: &[f32],
x_offset: u32, y_offset: u32, width: u32, height: u32, color: image::Rgb<u8>,
) {
const BAR_SPACING: u32 = 1;
let num_bars = bars.len();
let bar_width = (width.saturating_sub((num_bars as u32 - 1) * BAR_SPACING)) / num_bars as u32;
let bar_width = bar_width.max(1);
for (i, &val) in bars.iter().enumerate() {
let bar_height = (val * height as f32) as u32;
let x = x_offset + i as u32 * (bar_width + BAR_SPACING);
for y in 0..bar_height {
let pixel_y = y_offset + height - 1 - y;
for dx in 0..bar_width {
let pixel_x = x + dx;
if pixel_x < buffer.width() && pixel_y < buffer.height() {
buffer.put_pixel(pixel_x, pixel_y, color);
}
}
}
}
}
pub fn stream_frames(
audio_data: &AudioData, options: &RenderOptions, encoder: &mut VideoEncoder,
progress_callback: &(impl Fn(f64, usize, usize) + Send + Sync),
) -> Result<()> {
let total_samples = audio_data.left_channel.len();
let samples_per_frame = (audio_data.sample_rate / options.fps) as usize;
let total_frames = ((audio_data.duration * options.fps as f64) as usize).max(1);
use rayon::prelude::*;
println!("Pass 1/3: Analyzing spectrum...");
let num_bars = if options.mode == RenderMode::Spectrometer { 64 } else { 32 };
let mut all_raw_bars = if matches!(options.mode, RenderMode::Spectrometer | RenderMode::All) {
(0..total_frames).into_par_iter().map(|frame_idx| {
let start_sample = (frame_idx * samples_per_frame).min(total_samples.saturating_sub(1));
let mut buffer: Vec<Complex<f32>> = (0..FFT_SIZE).map(|i| {
if let Some(sample) = audio_data.left_channel.get(start_sample + i) {
Complex::new(sample + audio_data.right_channel[start_sample + i], 0.0)
} else { Complex::new(0.0, 0.0) }
}).collect();
HANN_WINDOW.with(|win| {
for (sample, &w) in buffer.iter_mut().zip(win.iter()) { sample.re *= w; }
});
let fft = FFT_PLANNER.with(|p| p.borrow_mut().plan_fft_forward(FFT_SIZE));
fft.process(&mut buffer);
let spectrum: Vec<f32> = buffer[1..FFT_SIZE / 2].iter().map(|c| c.norm() / FFT_SIZE as f32).collect();
compute_raw_bars(&spectrum, num_bars, audio_data.sample_rate)
}).collect()
} else { Vec::new() };
println!("Pass 2/3: Smoothing data...");
if !all_raw_bars.is_empty() { smooth_bars(&mut all_raw_bars); }
println!("Pass 3/3: Rendering and encoding frames...");
let chunk_size = rayon::current_num_threads() * 2;
for chunk_start in (0..total_frames).step_by(chunk_size) {
let chunk_end = (chunk_start + chunk_size).min(total_frames);
let frames: Vec<Result<Vec<u8>>> = (chunk_start..chunk_end).into_par_iter().map(|frame_idx| {
let start_sample = (frame_idx * samples_per_frame).min(total_samples.saturating_sub(1));
let smoothed_bars = if all_raw_bars.is_empty() { None } else { Some(all_raw_bars[frame_idx].as_slice()) };
Ok(draw_frame(audio_data, start_sample, samples_per_frame, options, smoothed_bars).into_raw())
}).collect();
for frame in frames { encoder.write_frame(&frame?)?; }
progress_callback(chunk_end as f64 / total_frames as f64 * 100.0, chunk_end, total_frames);
}
Ok(())
}
pub fn draw_frame(
audio_data: &AudioData, start_sample: usize, samples_per_frame: usize,
options: &RenderOptions, smoothed_bars: Option<&[f32]>,
) -> ImageBuffer<image::Rgb<u8>, Vec<u8>> {
let (width, height) = (options.width, options.height);
let mut buffer = ImageBuffer::new(width, height);
for p in buffer.pixels_mut() { *p = options.background; }
if options.show_grid { draw_graticule(&mut buffer, options.left_color); }
let end_sample = (start_sample + samples_per_frame).min(audio_data.left_channel.len());
match options.mode {
RenderMode::All => {
let (hh, hw) = (height / 2, width / 2);
let samples_per_pixel = samples_per_frame as f32 / hw as f32;
let mut pl = (hh/2) as i32;
let mut pr = (hh/2) as i32;
for x in 0..hw {
let idx = start_sample + (x as f32 * samples_per_pixel) as usize;
if idx >= audio_data.left_channel.len() { break; }
let yl = (hh/2) as i32 - (audio_data.left_channel[idx] * (hh as f32 * 0.35)) as i32;
let yr = (hh/2) as i32 - (audio_data.right_channel[idx] * (hh as f32 * 0.35)) as i32;
draw_line(&mut buffer, x as i32, pl, x as i32, yl, options.left_color);
draw_line(&mut buffer, (hw+x) as i32, pr, (hw+x) as i32, yr, options.right_color);
pl = yl; pr = yr;
}
let (cx, cy) = (hw/2, hh + hh/2);
let scale = hw.min(hh) as f32 * 0.35;
if start_sample < audio_data.left_channel.len() {
let mut px = cx as i32 + (audio_data.left_channel[start_sample] * scale) as i32;
let mut py = cy as i32 - (audio_data.right_channel[start_sample] * scale) as i32;
for i in 1..(end_sample - start_sample).min(samples_per_frame) {
let idx = start_sample + i;
if idx >= audio_data.left_channel.len() { break; }
let x = cx as i32 + (audio_data.left_channel[idx] * scale) as i32;
let y = cy as i32 - (audio_data.right_channel[idx] * scale) as i32;
draw_line(&mut buffer, px, py, x, y, options.xy_color);
px = x; py = y;
}
}
if let Some(bars) = smoothed_bars {
render_smoothed_bars(&mut buffer, bars, hw, hh, hw, hh, options.left_color);
}
for x in 0..width { buffer.put_pixel(x, hh, image::Rgb([40, 40, 40])); }
for y in 0..height { buffer.put_pixel(hw, y, image::Rgb([40, 40, 40])); }
}
RenderMode::Spectrometer => {
if let Some(bars) = smoothed_bars {
render_smoothed_bars(&mut buffer, bars, 0, 0, width, height, options.left_color);
}
}
_ => { /* Simple waveform logic for Combined/Separate for completeness */ }
}
buffer
}
pub fn draw_line(buffer: &mut ImageBuffer<image::Rgb<u8>, Vec<u8>>, x0: i32, y0: i32, x1: i32, y1: i32, color: image::Rgb<u8>) {
let dx = (x1 - x0).abs();
let dy = -(y1 - y0).abs();
let mut x = x0; let mut y = y0;
let sx = if x0 < x1 { 1 } else { -1 };
let sy = if y0 < y1 { 1 } else { -1 };
let mut err = dx + dy;
loop {
if x >= 0 && x < buffer.width() as i32 && y >= 0 && y < buffer.height() as i32 {
buffer.put_pixel(x as u32, y as u32, color);
}
if x == x1 && y == y1 { break; }
let e2 = 2 * err;
if e2 >= dy { err += dy; x += sx; }
if e2 <= dx { err += dx; y += sy; }
}
}
fn draw_graticule(buffer: &mut ImageBuffer<image::Rgb<u8>, Vec<u8>>, color: image::Rgb<u8>) {
let (w, h) = buffer.dimensions();
for x in 0..w { buffer.put_pixel(x, h / 2, color); }
for y in 0..h { buffer.put_pixel(w / 2, y, color); }
}
pub fn parse_rgb_hex(hex: &str) -> Result<image::Rgb<u8>> {
let hex = hex.trim_start_matches('#');
if hex.len() != 6 { return Err(anyhow!("Invalid RGB hex")); }
let r = u8::from_str_radix(&hex[0..2], 16)?;
let g = u8::from_str_radix(&hex[2..4], 16)?;
let b = u8::from_str_radix(&hex[4..6], 16)?;
Ok(image::Rgb([r, g, b]))
}