Before attempting to integrate osciloscope properely. STILL BROKEN NOW

This commit is contained in:
JorySeverijnse 2025-12-20 15:34:07 +01:00
parent 6fa754a1eb
commit 26584ea848
5 changed files with 1420 additions and 0 deletions

View File

@ -0,0 +1,308 @@
import { useRef, useState } from 'react';
import { Button } from '@/components/ui/button';
import { Slider } from '@/components/ui/slider';
import { Mic, Radio, Move, Upload, Play, Pause, Square, Music, Video, Download, X } from 'lucide-react';
import {
Dialog,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
} from '@/components/ui/dialog';
import { Progress } from '@/components/ui/progress';
import type { ExportStage } from '@/hooks/useOfflineVideoExport';
interface ControlPanelProps {
mode: 'normal' | 'xy';
onModeChange: (mode: 'normal' | 'xy') => void;
isActive: boolean;
isPlaying: boolean;
source: 'microphone' | 'file' | null;
fileName: string | null;
onStartMicrophone: () => void;
onLoadAudioFile: (file: File) => void;
onTogglePlayPause: () => void;
onStop: () => void;
onGainChange: (value: number) => void;
error: string | null;
isExporting: boolean;
exportProgress: number;
exportStage: ExportStage;
exportFps: number;
onExportVideo: (format: 'webm' | 'mp4') => void;
onCancelExport: () => void;
}
export const ControlPanel = ({
mode,
onModeChange,
isActive,
isPlaying,
source,
fileName,
onStartMicrophone,
onLoadAudioFile,
onTogglePlayPause,
onStop,
onGainChange,
error,
isExporting,
exportProgress,
exportStage,
exportFps,
onExportVideo,
onCancelExport,
}: ControlPanelProps) => {
const fileInputRef = useRef<HTMLInputElement>(null);
const [showExportDialog, setShowExportDialog] = useState(false);
const handleFileChange = (e: React.ChangeEvent<HTMLInputElement>) => {
const file = e.target.files?.[0];
if (file) {
onLoadAudioFile(file);
}
};
const handleExportClick = () => {
if (isExporting) {
onCancelExport();
} else {
setShowExportDialog(true);
}
};
const handleFormatSelect = (format: 'webm' | 'mp4') => {
setShowExportDialog(false);
onExportVideo(format);
};
return (
<>
<div className="flex flex-col gap-5 p-5 bg-bezel rounded-lg border border-border">
{/* Status indicator */}
<div className="flex items-center gap-3">
<div
className={`w-3 h-3 rounded-full transition-all duration-300 ${
isActive
? 'bg-primary shadow-[0_0_10px_hsl(var(--primary))]'
: 'bg-muted-foreground'
}`}
/>
<span className="text-sm text-muted-foreground uppercase tracking-wider">
{isExporting ? 'Exporting' : isActive ? (source === 'microphone' ? 'Mic Active' : 'Playing') : 'Standby'}
</span>
{isExporting && (
<div className="w-2 h-2 rounded-full bg-destructive animate-pulse" />
)}
</div>
{/* Input Source */}
<div className="space-y-2">
<label className="text-xs text-muted-foreground uppercase tracking-wider">
Input Source
</label>
<div className="flex flex-col gap-2">
<Button
variant="oscilloscope"
className={`w-full justify-start ${source === 'microphone' ? 'border-primary shadow-[0_0_15px_hsl(var(--primary)/0.4)]' : ''}`}
onClick={onStartMicrophone}
disabled={isExporting}
>
<Mic className="w-4 h-4" />
Microphone
</Button>
<input
ref={fileInputRef}
type="file"
accept="audio/*"
onChange={handleFileChange}
className="hidden"
/>
<Button
variant="oscilloscope"
className={`w-full justify-start ${source === 'file' ? 'border-primary shadow-[0_0_15px_hsl(var(--primary)/0.4)]' : ''}`}
onClick={() => fileInputRef.current?.click()}
disabled={isExporting}
>
<Upload className="w-4 h-4" />
Load File
</Button>
</div>
</div>
{/* File name display */}
{fileName && (
<div className="flex items-center gap-2 p-2 bg-secondary/50 rounded border border-border/50">
<Music className="w-4 h-4 text-primary shrink-0" />
<span className="text-xs text-foreground truncate">{fileName}</span>
</div>
)}
{/* Playback controls */}
{isActive && !isExporting && (
<div className="flex gap-2">
{source === 'file' && (
<Button
variant="oscilloscope"
size="icon"
onClick={onTogglePlayPause}
>
{isPlaying ? <Pause className="w-4 h-4" /> : <Play className="w-4 h-4" />}
</Button>
)}
<Button
variant="oscilloscope"
className="flex-1"
onClick={onStop}
>
<Square className="w-4 h-4" />
Stop
</Button>
</div>
)}
{/* Video Export */}
{source === 'file' && (
<div className="space-y-2">
<label className="text-xs text-muted-foreground uppercase tracking-wider">
Video Export
</label>
<Button
variant="oscilloscope"
className={`w-full justify-start ${isExporting ? 'border-destructive shadow-[0_0_15px_hsl(var(--destructive)/0.4)]' : ''}`}
onClick={handleExportClick}
>
{isExporting ? (
<>
<X className="w-4 h-4" />
Cancel Export
</>
) : (
<>
<Video className="w-4 h-4" />
Export Video
</>
)}
</Button>
{isExporting && (
<div className="space-y-2">
<Progress value={exportProgress} className="h-2" />
<p className="text-xs text-muted-foreground/60 text-center">
{exportStage === 'preparing' && 'Preparing audio...'}
{exportStage === 'rendering' && `Rendering: ${exportProgress}% ${exportFps > 0 ? `(${exportFps} fps)` : ''}`}
{exportStage === 'encoding' && 'Encoding final video...'}
{exportStage === 'complete' && 'Finalizing...'}
</p>
</div>
)}
{!isExporting && (
<p className="text-xs text-muted-foreground/60">
Generates video from the entire audio file offline.
</p>
)}
</div>
)}
{/* Sensitivity / Gain control */}
<div className="space-y-3">
<label className="text-xs text-muted-foreground uppercase tracking-wider">
Sensitivity
</label>
<Slider
defaultValue={[3]}
min={0.5}
max={10}
step={0.5}
onValueChange={(value) => onGainChange(value[0])}
className="w-full"
disabled={isExporting}
/>
<p className="text-xs text-muted-foreground/60">
Increase for quiet audio sources
</p>
</div>
{/* Mode selector */}
<div className="space-y-2">
<label className="text-xs text-muted-foreground uppercase tracking-wider">
Display Mode
</label>
<div className="flex gap-2">
<Button
variant="oscilloscope"
className={`flex-1 ${mode === 'normal' ? 'border-primary shadow-[0_0_15px_hsl(var(--primary)/0.4)]' : ''}`}
onClick={() => onModeChange('normal')}
disabled={isExporting}
>
<Radio className="w-4 h-4" />
Normal
</Button>
<Button
variant="oscilloscope"
className={`flex-1 ${mode === 'xy' ? 'border-primary shadow-[0_0_15px_hsl(var(--primary)/0.4)]' : ''}`}
onClick={() => onModeChange('xy')}
disabled={isExporting}
>
<Move className="w-4 h-4" />
X-Y
</Button>
</div>
</div>
{/* Mode description */}
<div className="p-3 bg-secondary/50 rounded border border-border/50">
<p className="text-xs text-muted-foreground leading-relaxed">
{mode === 'normal'
? 'Time-domain waveform display. Shows amplitude over time.'
: 'Lissajous (X-Y) mode. Left channel controls X, Right controls Y. Creates patterns from stereo audio.'}
</p>
</div>
{/* Error display */}
{error && (
<div className="p-3 bg-destructive/10 border border-destructive/50 rounded">
<p className="text-xs text-destructive">{error}</p>
</div>
)}
{/* Info */}
<div className="mt-auto pt-4 border-t border-border/50">
<p className="text-xs text-muted-foreground/60 text-center">
Audio Oscilloscope v1.3
</p>
</div>
</div>
{/* Export Format Dialog */}
<Dialog open={showExportDialog} onOpenChange={setShowExportDialog}>
<DialogContent className="bg-bezel border-border">
<DialogHeader>
<DialogTitle className="text-foreground">Choose Export Format</DialogTitle>
<DialogDescription className="text-muted-foreground">
The video will be generated from the entire audio file. This works offline and supports large files.
</DialogDescription>
</DialogHeader>
<div className="flex gap-3 mt-4">
<Button
variant="oscilloscope"
className="flex-1"
onClick={() => handleFormatSelect('webm')}
>
<Download className="w-4 h-4 mr-2" />
WebM (recommended)
</Button>
<Button
variant="oscilloscope"
className="flex-1"
onClick={() => handleFormatSelect('mp4')}
>
<Download className="w-4 h-4 mr-2" />
MP4
</Button>
</div>
</DialogContent>
</Dialog>
</>
);
};

View File

@ -0,0 +1,119 @@
import { useState, useRef, useCallback } from 'react';
import { OscilloscopeScreen, OscilloscopeScreenHandle } from './OscilloscopeScreen';
import { ControlPanel } from './ControlPanel';
import { useAudioAnalyzer } from '@/hooks/useAudioAnalyzer';
import { useOfflineVideoExport } from '@/hooks/useOfflineVideoExport';
import { toast } from 'sonner';
export const Oscilloscope = () => {
const [mode, setMode] = useState<'normal' | 'xy'>('normal');
const screenRef = useRef<OscilloscopeScreenHandle>(null);
const audioFileRef = useRef<File | null>(null);
const {
isActive,
isPlaying,
source,
fileName,
error,
startMicrophone,
loadAudioFile,
togglePlayPause,
stop,
setGain,
getTimeDomainData,
getStereoData,
} = useAudioAnalyzer();
const {
isExporting,
progress,
stage,
fps: exportFps,
generateVideoWithAudio,
cancelExport,
downloadBlob,
} = useOfflineVideoExport();
const handleLoadAudioFile = useCallback((file: File) => {
audioFileRef.current = file;
loadAudioFile(file);
}, [loadAudioFile]);
const handleExportVideo = useCallback(async (format: 'webm' | 'mp4') => {
if (!audioFileRef.current) {
toast.error('Please load an audio file first');
return;
}
const drawFrame = screenRef.current?.drawFrameWithData;
if (!drawFrame) {
toast.error('Canvas not ready');
return;
}
toast.info('Starting video export... This may take a while for large files.');
const blob = await generateVideoWithAudio(
audioFileRef.current,
drawFrame,
{
fps: 60,
format,
width: 1920,
height: 1080,
}
);
if (blob) {
const baseName = fileName?.replace(/\.[^/.]+$/, '') || 'oscilloscope';
const extension = format === 'mp4' ? 'mp4' : 'webm';
downloadBlob(blob, `${baseName}.${extension}`);
toast.success('Video exported successfully!');
}
}, [fileName, generateVideoWithAudio, downloadBlob]);
return (
<div className="flex flex-col lg:flex-row gap-6 w-full max-w-7xl mx-auto p-4 lg:p-8">
{/* Main oscilloscope display */}
<div className="flex-1 min-h-[400px] lg:min-h-[600px]">
<div className="h-full bg-bezel p-4 lg:p-6 rounded-xl border border-border box-glow">
{/* Screen bezel */}
<div className="h-full rounded-lg overflow-hidden border-4 border-secondary">
<OscilloscopeScreen
ref={screenRef}
mode={mode}
getTimeDomainData={getTimeDomainData}
getStereoData={getStereoData}
isActive={isActive}
/>
</div>
</div>
</div>
{/* Control panel */}
<div className="w-full lg:w-72 shrink-0">
<ControlPanel
mode={mode}
onModeChange={setMode}
isActive={isActive}
isPlaying={isPlaying}
source={source}
fileName={fileName}
onStartMicrophone={startMicrophone}
onLoadAudioFile={handleLoadAudioFile}
onTogglePlayPause={togglePlayPause}
onStop={stop}
onGainChange={setGain}
error={error}
isExporting={isExporting}
exportProgress={progress}
exportStage={stage}
exportFps={exportFps}
onExportVideo={handleExportVideo}
onCancelExport={cancelExport}
/>
</div>
</div>
);
};

View File

@ -0,0 +1,295 @@
import { useRef, useEffect, useCallback, forwardRef, useImperativeHandle } from 'react';
interface OscilloscopeScreenProps {
mode: 'normal' | 'xy';
getTimeDomainData: () => Uint8Array | null;
getStereoData: () => { left: Uint8Array; right: Uint8Array } | null;
isActive: boolean;
}
export interface OscilloscopeScreenHandle {
getCanvas: () => HTMLCanvasElement | null;
drawFrameWithData: (ctx: CanvasRenderingContext2D, width: number, height: number, leftData: Uint8Array, rightData: Uint8Array) => void;
}
export const OscilloscopeScreen = forwardRef<OscilloscopeScreenHandle, OscilloscopeScreenProps>(({
mode,
getTimeDomainData,
getStereoData,
isActive,
}, ref) => {
const canvasRef = useRef<HTMLCanvasElement>(null);
const animationRef = useRef<number>();
const lastTimeRef = useRef<number>(0);
const targetFPS = 120;
const frameInterval = 1000 / targetFPS;
const drawGrid = useCallback((ctx: CanvasRenderingContext2D, width: number, height: number) => {
ctx.strokeStyle = '#1a3a1a';
ctx.lineWidth = 1;
const vDivisions = 10;
for (let i = 0; i <= vDivisions; i++) {
const x = Math.round((width / vDivisions) * i) + 0.5;
ctx.beginPath();
ctx.moveTo(x, 0);
ctx.lineTo(x, height);
ctx.stroke();
}
const hDivisions = 8;
for (let i = 0; i <= hDivisions; i++) {
const y = Math.round((height / hDivisions) * i) + 0.5;
ctx.beginPath();
ctx.moveTo(0, y);
ctx.lineTo(width, y);
ctx.stroke();
}
ctx.strokeStyle = '#2a5a2a';
ctx.lineWidth = 1;
const centerX = Math.round(width / 2) + 0.5;
const centerY = Math.round(height / 2) + 0.5;
const tickLength = 6;
const tickSpacing = width / 50;
ctx.beginPath();
ctx.moveTo(centerX, 0);
ctx.lineTo(centerX, height);
ctx.stroke();
ctx.beginPath();
ctx.moveTo(0, centerY);
ctx.lineTo(width, centerY);
ctx.stroke();
ctx.strokeStyle = '#2a5a2a';
for (let i = 0; i < 50; i++) {
const x = Math.round(i * tickSpacing) + 0.5;
const y = Math.round(i * tickSpacing * (height / width)) + 0.5;
ctx.beginPath();
ctx.moveTo(x, centerY - tickLength / 2);
ctx.lineTo(x, centerY + tickLength / 2);
ctx.stroke();
if (y < height) {
ctx.beginPath();
ctx.moveTo(centerX - tickLength / 2, y);
ctx.lineTo(centerX + tickLength / 2, y);
ctx.stroke();
}
}
}, []);
const drawNormalMode = useCallback((ctx: CanvasRenderingContext2D, width: number, height: number, data: Uint8Array) => {
const centerY = height / 2;
const points: { x: number; y: number }[] = [];
const step = Math.max(1, Math.floor(data.length / (width * 2)));
for (let i = 0; i < data.length; i += step) {
const x = (i / data.length) * width;
const normalizedValue = (data[i] - 128) / 128;
const y = centerY - (normalizedValue * (height / 2) * 0.85);
points.push({ x, y });
}
if (points.length < 2) return;
ctx.strokeStyle = 'rgba(0, 255, 0, 0.15)';
ctx.lineWidth = 6;
ctx.lineCap = 'round';
ctx.lineJoin = 'round';
ctx.beginPath();
ctx.moveTo(points[0].x, points[0].y);
for (let i = 1; i < points.length - 1; i++) {
const xc = (points[i].x + points[i + 1].x) / 2;
const yc = (points[i].y + points[i + 1].y) / 2;
ctx.quadraticCurveTo(points[i].x, points[i].y, xc, yc);
}
ctx.lineTo(points[points.length - 1].x, points[points.length - 1].y);
ctx.stroke();
ctx.strokeStyle = 'rgba(0, 255, 0, 0.3)';
ctx.lineWidth = 3;
ctx.stroke();
ctx.strokeStyle = '#00ff00';
ctx.lineWidth = 1.5;
ctx.stroke();
}, []);
const drawXYMode = useCallback((ctx: CanvasRenderingContext2D, width: number, height: number, leftData: Uint8Array, rightData: Uint8Array) => {
const centerX = width / 2;
const centerY = height / 2;
const scale = Math.min(width, height) / 2 * 0.85;
const points: { x: number; y: number }[] = [];
const step = Math.max(1, Math.floor(leftData.length / 2048));
for (let i = 0; i < leftData.length; i += step) {
const xNorm = (leftData[i] - 128) / 128;
const yNorm = (rightData[i] - 128) / 128;
const x = centerX + xNorm * scale;
const y = centerY - yNorm * scale;
points.push({ x, y });
}
if (points.length < 2) return;
ctx.strokeStyle = 'rgba(0, 255, 0, 0.15)';
ctx.lineWidth = 6;
ctx.lineCap = 'round';
ctx.lineJoin = 'round';
ctx.beginPath();
ctx.moveTo(points[0].x, points[0].y);
for (let i = 1; i < points.length - 1; i++) {
const xc = (points[i].x + points[i + 1].x) / 2;
const yc = (points[i].y + points[i + 1].y) / 2;
ctx.quadraticCurveTo(points[i].x, points[i].y, xc, yc);
}
ctx.lineTo(points[points.length - 1].x, points[points.length - 1].y);
ctx.stroke();
ctx.strokeStyle = 'rgba(0, 255, 0, 0.3)';
ctx.lineWidth = 3;
ctx.stroke();
ctx.strokeStyle = '#00ff00';
ctx.lineWidth = 1.5;
ctx.stroke();
}, []);
const drawIdleWave = useCallback((ctx: CanvasRenderingContext2D, width: number, height: number) => {
const centerY = height / 2;
ctx.strokeStyle = 'rgba(0, 255, 0, 0.15)';
ctx.lineWidth = 6;
ctx.lineCap = 'round';
ctx.beginPath();
ctx.moveTo(0, centerY);
ctx.lineTo(width, centerY);
ctx.stroke();
ctx.strokeStyle = 'rgba(0, 255, 0, 0.3)';
ctx.lineWidth = 3;
ctx.stroke();
ctx.strokeStyle = '#00ff00';
ctx.lineWidth = 1.5;
ctx.stroke();
}, []);
useImperativeHandle(ref, () => ({
getCanvas: () => canvasRef.current,
drawFrameWithData: (ctx: CanvasRenderingContext2D, width: number, height: number, leftData: Uint8Array, rightData: Uint8Array) => {
ctx.fillStyle = '#0a0f0a';
ctx.fillRect(0, 0, width, height);
drawGrid(ctx, width, height);
if (mode === 'normal') {
drawNormalMode(ctx, width, height, leftData);
} else {
drawXYMode(ctx, width, height, leftData, rightData);
}
},
}), [mode, drawGrid, drawNormalMode, drawXYMode]);
useEffect(() => {
const canvas = canvasRef.current;
if (!canvas) return;
const ctx = canvas.getContext('2d', { alpha: false });
if (!ctx) return;
const render = (currentTime: number) => {
const deltaTime = currentTime - lastTimeRef.current;
if (deltaTime >= frameInterval) {
lastTimeRef.current = currentTime - (deltaTime % frameInterval);
const dpr = window.devicePixelRatio || 1;
const width = canvas.width / dpr;
const height = canvas.height / dpr;
ctx.fillStyle = '#0a0f0a';
ctx.fillRect(0, 0, width, height);
drawGrid(ctx, width, height);
if (isActive) {
if (mode === 'normal') {
const data = getTimeDomainData();
if (data) {
drawNormalMode(ctx, width, height, data);
}
} else {
const stereoData = getStereoData();
if (stereoData) {
drawXYMode(ctx, width, height, stereoData.left, stereoData.right);
}
}
} else {
drawIdleWave(ctx, width, height);
}
}
animationRef.current = requestAnimationFrame(render);
};
animationRef.current = requestAnimationFrame(render);
return () => {
if (animationRef.current) {
cancelAnimationFrame(animationRef.current);
}
};
}, [mode, isActive, getTimeDomainData, getStereoData, drawGrid, drawNormalMode, drawXYMode, drawIdleWave, frameInterval]);
useEffect(() => {
const canvas = canvasRef.current;
if (!canvas) return;
const resizeCanvas = () => {
const container = canvas.parentElement;
if (!container) return;
const rect = container.getBoundingClientRect();
const dpr = window.devicePixelRatio || 1;
canvas.width = rect.width * dpr;
canvas.height = rect.height * dpr;
const ctx = canvas.getContext('2d');
if (ctx) {
ctx.scale(dpr, dpr);
}
canvas.style.width = `${rect.width}px`;
canvas.style.height = `${rect.height}px`;
};
resizeCanvas();
window.addEventListener('resize', resizeCanvas);
return () => {
window.removeEventListener('resize', resizeCanvas);
};
}, []);
return (
<div className="relative w-full h-full overflow-hidden rounded-lg" style={{ backgroundColor: '#0a0f0a' }}>
<canvas
ref={canvasRef}
className="w-full h-full"
/>
</div>
);
});

View File

@ -0,0 +1,246 @@
import { useState, useRef, useCallback, useEffect } from 'react';
interface AudioAnalyzerState {
isActive: boolean;
error: string | null;
source: 'microphone' | 'file' | null;
fileName: string | null;
isPlaying: boolean;
}
export const useAudioAnalyzer = () => {
const [state, setState] = useState<AudioAnalyzerState>({
isActive: false,
error: null,
source: null,
fileName: null,
isPlaying: false,
});
const audioContextRef = useRef<AudioContext | null>(null);
const analyzerLeftRef = useRef<AnalyserNode | null>(null);
const analyzerRightRef = useRef<AnalyserNode | null>(null);
const sourceRef = useRef<MediaStreamAudioSourceNode | MediaElementAudioSourceNode | null>(null);
const splitterRef = useRef<ChannelSplitterNode | null>(null);
const streamRef = useRef<MediaStream | null>(null);
const analysisGainNodeRef = useRef<GainNode | null>(null);
const audioElementRef = useRef<HTMLAudioElement | null>(null);
const gainValueRef = useRef<number>(3); // Default higher gain for analysis sensitivity only
const getTimeDomainData = useCallback(() => {
if (!analyzerLeftRef.current) return null;
const bufferLength = analyzerLeftRef.current.fftSize;
const dataArray = new Uint8Array(bufferLength);
analyzerLeftRef.current.getByteTimeDomainData(dataArray);
return dataArray;
}, []);
const getStereoData = useCallback(() => {
if (!analyzerLeftRef.current || !analyzerRightRef.current) return null;
const bufferLength = analyzerLeftRef.current.fftSize;
const leftData = new Uint8Array(bufferLength);
const rightData = new Uint8Array(bufferLength);
analyzerLeftRef.current.getByteTimeDomainData(leftData);
analyzerRightRef.current.getByteTimeDomainData(rightData);
return { left: leftData, right: rightData };
}, []);
const setGain = useCallback((value: number) => {
gainValueRef.current = value;
if (analysisGainNodeRef.current) {
analysisGainNodeRef.current.gain.value = value;
}
}, []);
const setupAnalyzers = useCallback((audioContext: AudioContext) => {
// Create gain node for analysis sensitivity (does NOT affect audio output)
analysisGainNodeRef.current = audioContext.createGain();
analysisGainNodeRef.current.gain.value = gainValueRef.current;
// Create channel splitter for stereo
splitterRef.current = audioContext.createChannelSplitter(2);
// Create analyzers for each channel
analyzerLeftRef.current = audioContext.createAnalyser();
analyzerRightRef.current = audioContext.createAnalyser();
// Configure analyzers for higher sensitivity
const fftSize = 2048;
analyzerLeftRef.current.fftSize = fftSize;
analyzerRightRef.current.fftSize = fftSize;
analyzerLeftRef.current.smoothingTimeConstant = 0.5;
analyzerRightRef.current.smoothingTimeConstant = 0.5;
analyzerLeftRef.current.minDecibels = -90;
analyzerRightRef.current.minDecibels = -90;
analyzerLeftRef.current.maxDecibels = -10;
analyzerRightRef.current.maxDecibels = -10;
}, []);
const startMicrophone = useCallback(async () => {
try {
setState(prev => ({ ...prev, isActive: false, error: null }));
const stream = await navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false,
},
});
streamRef.current = stream;
audioContextRef.current = new AudioContext();
setupAnalyzers(audioContextRef.current);
// Create source from microphone
const micSource = audioContextRef.current.createMediaStreamSource(stream);
sourceRef.current = micSource;
// Connect: source -> analysisGain -> splitter -> analyzers
// (microphone doesn't need output, just analysis)
micSource.connect(analysisGainNodeRef.current!);
analysisGainNodeRef.current!.connect(splitterRef.current!);
splitterRef.current!.connect(analyzerLeftRef.current!, 0);
splitterRef.current!.connect(analyzerRightRef.current!, 1);
setState({
isActive: true,
error: null,
source: 'microphone',
fileName: null,
isPlaying: true
});
} catch (err) {
const message = err instanceof Error ? err.message : 'Failed to access microphone';
setState(prev => ({ ...prev, isActive: false, error: message }));
}
}, [setupAnalyzers]);
const loadAudioFile = useCallback(async (file: File) => {
try {
// Stop any existing audio
stop();
setState(prev => ({ ...prev, isActive: false, error: null }));
// Create audio element
const audioElement = new Audio();
audioElement.src = URL.createObjectURL(file);
audioElement.loop = true;
audioElementRef.current = audioElement;
audioContextRef.current = new AudioContext();
setupAnalyzers(audioContextRef.current);
// Create source from audio element
const audioSource = audioContextRef.current.createMediaElementSource(audioElement);
sourceRef.current = audioSource;
// For files: source -> destination (clean audio output)
// source -> analysisGain -> splitter -> analyzers (boosted for visualization)
audioSource.connect(audioContextRef.current.destination);
audioSource.connect(analysisGainNodeRef.current!);
analysisGainNodeRef.current!.connect(splitterRef.current!);
splitterRef.current!.connect(analyzerLeftRef.current!, 0);
splitterRef.current!.connect(analyzerRightRef.current!, 1);
// Start playing
await audioElement.play();
setState({
isActive: true,
error: null,
source: 'file',
fileName: file.name,
isPlaying: true
});
} catch (err) {
const message = err instanceof Error ? err.message : 'Failed to load audio file';
setState(prev => ({ ...prev, isActive: false, error: message }));
}
}, [setupAnalyzers]);
const togglePlayPause = useCallback(() => {
if (!audioElementRef.current) return;
if (audioElementRef.current.paused) {
audioElementRef.current.play();
setState(prev => ({ ...prev, isPlaying: true }));
} else {
audioElementRef.current.pause();
setState(prev => ({ ...prev, isPlaying: false }));
}
}, []);
const stop = useCallback(() => {
if (streamRef.current) {
streamRef.current.getTracks().forEach(track => track.stop());
streamRef.current = null;
}
if (audioElementRef.current) {
audioElementRef.current.pause();
audioElementRef.current.src = '';
audioElementRef.current = null;
}
if (sourceRef.current) {
sourceRef.current.disconnect();
sourceRef.current = null;
}
if (analysisGainNodeRef.current) {
analysisGainNodeRef.current.disconnect();
analysisGainNodeRef.current = null;
}
if (splitterRef.current) {
splitterRef.current.disconnect();
splitterRef.current = null;
}
if (audioContextRef.current) {
audioContextRef.current.close();
audioContextRef.current = null;
}
analyzerLeftRef.current = null;
analyzerRightRef.current = null;
setState({
isActive: false,
error: null,
source: null,
fileName: null,
isPlaying: false
});
}, []);
useEffect(() => {
return () => {
stop();
};
}, [stop]);
const getAudioElement = useCallback(() => {
return audioElementRef.current;
}, []);
return {
...state,
startMicrophone,
loadAudioFile,
togglePlayPause,
stop,
setGain,
getTimeDomainData,
getStereoData,
getAudioElement,
};
};

View File

@ -0,0 +1,452 @@
import { useState, useCallback, useRef } from 'react';
export type ExportStage = 'idle' | 'preparing' | 'rendering' | 'encoding' | 'complete';
interface ExportState {
isExporting: boolean;
progress: number;
error: string | null;
stage: ExportStage;
fps: number;
}
interface ExportOptions {
fps: number;
format: 'webm' | 'mp4';
width: number;
height: number;
}
interface WavHeader {
sampleRate: number;
numChannels: number;
bitsPerSample: number;
dataOffset: number;
dataSize: number;
}
// Parse WAV header without loading entire file
async function parseWavHeader(file: File): Promise<WavHeader> {
const headerBuffer = await file.slice(0, 44).arrayBuffer();
const view = new DataView(headerBuffer);
// Verify RIFF header
const riff = String.fromCharCode(view.getUint8(0), view.getUint8(1), view.getUint8(2), view.getUint8(3));
if (riff !== 'RIFF') throw new Error('Not a valid WAV file');
const wave = String.fromCharCode(view.getUint8(8), view.getUint8(9), view.getUint8(10), view.getUint8(11));
if (wave !== 'WAVE') throw new Error('Not a valid WAV file');
// Find fmt chunk
const numChannels = view.getUint16(22, true);
const sampleRate = view.getUint32(24, true);
const bitsPerSample = view.getUint16(34, true);
// Find data chunk - scan for 'data' marker
let dataOffset = 36;
let dataSize = 0;
// Read more bytes to find data chunk
const extendedBuffer = await file.slice(0, Math.min(1024, file.size)).arrayBuffer();
const extendedView = new DataView(extendedBuffer);
for (let i = 36; i < extendedBuffer.byteLength - 8; i++) {
const marker = String.fromCharCode(
extendedView.getUint8(i),
extendedView.getUint8(i + 1),
extendedView.getUint8(i + 2),
extendedView.getUint8(i + 3)
);
if (marker === 'data') {
dataOffset = i + 8;
dataSize = extendedView.getUint32(i + 4, true);
break;
}
}
if (dataSize === 0) {
// Estimate from file size
dataSize = file.size - dataOffset;
}
return { sampleRate, numChannels, bitsPerSample, dataOffset, dataSize };
}
// Read a chunk of samples from WAV file
async function readWavChunk(
file: File,
header: WavHeader,
startSample: number,
numSamples: number
): Promise<{ left: Float32Array; right: Float32Array }> {
const bytesPerSample = header.bitsPerSample / 8;
const bytesPerFrame = bytesPerSample * header.numChannels;
const startByte = header.dataOffset + (startSample * bytesPerFrame);
const endByte = Math.min(startByte + (numSamples * bytesPerFrame), file.size);
const chunk = await file.slice(startByte, endByte).arrayBuffer();
const view = new DataView(chunk);
const actualSamples = Math.floor(chunk.byteLength / bytesPerFrame);
const left = new Float32Array(actualSamples);
const right = new Float32Array(actualSamples);
for (let i = 0; i < actualSamples; i++) {
const offset = i * bytesPerFrame;
if (header.bitsPerSample === 16) {
left[i] = view.getInt16(offset, true) / 32768;
right[i] = header.numChannels > 1
? view.getInt16(offset + 2, true) / 32768
: left[i];
} else if (header.bitsPerSample === 24) {
const l = (view.getUint8(offset) | (view.getUint8(offset + 1) << 8) | (view.getInt8(offset + 2) << 16));
left[i] = l / 8388608;
if (header.numChannels > 1) {
const r = (view.getUint8(offset + 3) | (view.getUint8(offset + 4) << 8) | (view.getInt8(offset + 5) << 16));
right[i] = r / 8388608;
} else {
right[i] = left[i];
}
} else if (header.bitsPerSample === 32) {
left[i] = view.getFloat32(offset, true);
right[i] = header.numChannels > 1
? view.getFloat32(offset + 4, true)
: left[i];
} else {
// 8-bit
left[i] = (view.getUint8(offset) - 128) / 128;
right[i] = header.numChannels > 1
? (view.getUint8(offset + 1) - 128) / 128
: left[i];
}
}
return { left, right };
}
export const useOfflineVideoExport = () => {
const [state, setState] = useState<ExportState>({
isExporting: false,
progress: 0,
error: null,
stage: 'idle',
fps: 0,
});
const cancelledRef = useRef(false);
const generateVideoWithAudio = useCallback(async (
audioFile: File,
drawFrame: (ctx: CanvasRenderingContext2D, width: number, height: number, leftData: Uint8Array, rightData: Uint8Array) => void,
options: ExportOptions
): Promise<Blob | null> => {
cancelledRef.current = false;
setState({ isExporting: true, progress: 0, error: null, stage: 'preparing', fps: 0 });
try {
const { fps, width, height } = options;
const isWav = audioFile.name.toLowerCase().endsWith('.wav');
console.log(`Starting memory-efficient export: ${audioFile.name} (${(audioFile.size / 1024 / 1024).toFixed(2)} MB)`);
let sampleRate: number;
let totalSamples: number;
let getChunk: (startSample: number, numSamples: number) => Promise<{ left: Float32Array; right: Float32Array }>;
if (isWav) {
// Memory-efficient WAV streaming
console.log('Using streaming WAV parser (memory efficient)');
const header = await parseWavHeader(audioFile);
sampleRate = header.sampleRate;
const bytesPerSample = header.bitsPerSample / 8 * header.numChannels;
totalSamples = Math.floor(header.dataSize / bytesPerSample);
getChunk = (startSample, numSamples) => readWavChunk(audioFile, header, startSample, numSamples);
console.log(`WAV: ${header.numChannels}ch, ${sampleRate}Hz, ${header.bitsPerSample}bit, ${totalSamples} samples`);
} else {
// For non-WAV files, we need to decode (uses more memory)
console.log('Non-WAV file, using AudioContext decode (higher memory)');
const arrayBuffer = await audioFile.arrayBuffer();
const audioContext = new AudioContext();
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
sampleRate = audioBuffer.sampleRate;
totalSamples = audioBuffer.length;
const leftChannel = audioBuffer.getChannelData(0);
const rightChannel = audioBuffer.numberOfChannels > 1 ? audioBuffer.getChannelData(1) : leftChannel;
await audioContext.close();
getChunk = async (startSample, numSamples) => {
const end = Math.min(startSample + numSamples, totalSamples);
return {
left: leftChannel.slice(startSample, end),
right: rightChannel.slice(startSample, end),
};
};
}
if (cancelledRef.current) {
setState({ isExporting: false, progress: 0, error: 'Cancelled', stage: 'idle', fps: 0 });
return null;
}
const duration = totalSamples / sampleRate;
const totalFrames = Math.ceil(duration * fps);
const samplesPerFrame = Math.floor(sampleRate / fps);
const fftSize = 2048;
console.log(`Duration: ${duration.toFixed(2)}s, ${totalFrames} frames @ ${fps}fps`);
setState(prev => ({ ...prev, stage: 'rendering', progress: 5 }));
// Create canvas
const canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext('2d', { alpha: false, desynchronized: true });
if (!ctx) throw new Error('Could not create canvas context');
// Setup video recording
const stream = canvas.captureStream(0);
const videoTrack = stream.getVideoTracks()[0];
const mimeType = MediaRecorder.isTypeSupported('video/webm;codecs=vp9')
? 'video/webm;codecs=vp9'
: 'video/webm;codecs=vp8';
const videoChunks: Blob[] = [];
const recorder = new MediaRecorder(stream, {
mimeType,
videoBitsPerSecond: 20_000_000,
});
recorder.ondataavailable = (e) => {
if (e.data.size > 0) videoChunks.push(e.data);
};
// Start recording
recorder.start(1000);
const startTime = performance.now();
let framesProcessed = 0;
// Process frames in batches, loading audio chunks as needed
const chunkSizeFrames = 120; // Process 2 seconds at a time (at 60fps)
const samplesPerChunk = chunkSizeFrames * samplesPerFrame + fftSize;
for (let frameIndex = 0; frameIndex < totalFrames; frameIndex += chunkSizeFrames) {
if (cancelledRef.current) {
recorder.stop();
setState({ isExporting: false, progress: 0, error: 'Cancelled', stage: 'idle', fps: 0 });
return null;
}
// Load audio chunk for this batch
const startSample = frameIndex * samplesPerFrame;
const { left: leftChunk, right: rightChunk } = await getChunk(startSample, samplesPerChunk);
// Process frames in this chunk
const endFrame = Math.min(frameIndex + chunkSizeFrames, totalFrames);
for (let f = frameIndex; f < endFrame; f++) {
const localOffset = (f - frameIndex) * samplesPerFrame;
// Extract waveform data for this frame
const leftData = new Uint8Array(fftSize);
const rightData = new Uint8Array(fftSize);
for (let i = 0; i < fftSize; i++) {
const sampleIndex = localOffset + Math.floor((i / fftSize) * samplesPerFrame);
if (sampleIndex >= 0 && sampleIndex < leftChunk.length) {
leftData[i] = Math.round((leftChunk[sampleIndex] * 128) + 128);
rightData[i] = Math.round((rightChunk[sampleIndex] * 128) + 128);
} else {
leftData[i] = 128;
rightData[i] = 128;
}
}
// Draw frame
drawFrame(ctx, width, height, leftData, rightData);
// Capture frame
const track = videoTrack as unknown as { requestFrame?: () => void };
if (track.requestFrame) track.requestFrame();
framesProcessed++;
}
// Update progress
const elapsed = (performance.now() - startTime) / 1000;
const currentFps = Math.round(framesProcessed / elapsed);
const progress = 5 + Math.round((framesProcessed / totalFrames) * 85);
setState(prev => ({ ...prev, progress, fps: currentFps }));
// Yield to main thread
await new Promise(r => setTimeout(r, 0));
}
// Stop recording
await new Promise(r => setTimeout(r, 200));
recorder.stop();
// Wait for recorder to finish
await new Promise<void>(resolve => {
const checkInterval = setInterval(() => {
if (recorder.state === 'inactive') {
clearInterval(checkInterval);
resolve();
}
}, 100);
});
const videoBlob = new Blob(videoChunks, { type: mimeType });
console.log(`Video rendered: ${(videoBlob.size / 1024 / 1024).toFixed(2)} MB`);
setState(prev => ({ ...prev, stage: 'encoding', progress: 92 }));
// Mux audio with video (streaming approach)
const finalBlob = await muxAudioVideo(videoBlob, audioFile, duration, fps);
setState({ isExporting: false, progress: 100, error: null, stage: 'complete', fps: 0 });
console.log(`Export complete: ${(finalBlob.size / 1024 / 1024).toFixed(2)} MB`);
return finalBlob;
} catch (err) {
console.error('Export error:', err);
const message = err instanceof Error ? err.message : 'Export failed';
setState({ isExporting: false, progress: 0, error: message, stage: 'idle', fps: 0 });
return null;
}
}, []);
const cancelExport = useCallback(() => {
cancelledRef.current = true;
}, []);
const downloadBlob = useCallback((blob: Blob, filename: string) => {
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = filename;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
URL.revokeObjectURL(url);
}, []);
return {
...state,
generateVideoWithAudio,
cancelExport,
downloadBlob,
};
};
// Memory-efficient muxing using real-time playback
async function muxAudioVideo(
videoBlob: Blob,
audioFile: File,
duration: number,
fps: number
): Promise<Blob> {
return new Promise((resolve, reject) => {
const videoUrl = URL.createObjectURL(videoBlob);
const audioUrl = URL.createObjectURL(audioFile);
const video = document.createElement('video');
const audio = document.createElement('audio');
video.src = videoUrl;
video.muted = true;
video.playbackRate = 4; // Speed up playback
audio.src = audioUrl;
audio.playbackRate = 4;
const cleanup = () => {
URL.revokeObjectURL(videoUrl);
URL.revokeObjectURL(audioUrl);
};
Promise.all([
new Promise<void>((res, rej) => {
video.onloadedmetadata = () => res();
video.onerror = () => rej(new Error('Failed to load video'));
}),
new Promise<void>((res, rej) => {
audio.onloadedmetadata = () => res();
audio.onerror = () => rej(new Error('Failed to load audio'));
}),
]).then(() => {
const audioContext = new AudioContext();
const audioSource = audioContext.createMediaElementSource(audio);
const audioDestination = audioContext.createMediaStreamDestination();
audioSource.connect(audioDestination);
const canvas = document.createElement('canvas');
canvas.width = video.videoWidth || 1920;
canvas.height = video.videoHeight || 1080;
const ctx = canvas.getContext('2d')!;
const canvasStream = canvas.captureStream(fps);
const combinedStream = new MediaStream([
...canvasStream.getVideoTracks(),
...audioDestination.stream.getAudioTracks(),
]);
const mimeType = MediaRecorder.isTypeSupported('video/webm;codecs=vp9,opus')
? 'video/webm;codecs=vp9,opus'
: 'video/webm;codecs=vp8,opus';
const chunks: Blob[] = [];
const recorder = new MediaRecorder(combinedStream, {
mimeType,
videoBitsPerSecond: 20_000_000,
audioBitsPerSecond: 320_000,
});
recorder.ondataavailable = (e) => {
if (e.data.size > 0) chunks.push(e.data);
};
recorder.onstop = () => {
cleanup();
audioContext.close();
resolve(new Blob(chunks, { type: mimeType }));
};
recorder.onerror = () => {
cleanup();
reject(new Error('Muxing failed'));
};
const drawLoop = () => {
if (video.ended || audio.ended) {
setTimeout(() => recorder.stop(), 100);
return;
}
ctx.drawImage(video, 0, 0);
requestAnimationFrame(drawLoop);
};
recorder.start(100);
video.currentTime = 0;
audio.currentTime = 0;
video.play();
audio.play();
drawLoop();
}).catch(err => {
cleanup();
console.warn('Muxing failed, returning video only:', err);
resolve(videoBlob);
});
});
}