mirror of
https://github.com/JorySeverijnse/ui-fixer-supreme.git
synced 2026-01-29 18:08:38 +00:00
Correct files to integrate with the site for a good audio to a/v osciloscope converter
Now has to be only implemented in the actual website
This commit is contained in:
parent
e227743728
commit
ad6587978a
1867
package-lock.json
generated
1867
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -76,12 +76,16 @@
|
|||||||
"eslint": "^9.32.0",
|
"eslint": "^9.32.0",
|
||||||
"eslint-plugin-react-hooks": "^5.2.0",
|
"eslint-plugin-react-hooks": "^5.2.0",
|
||||||
"eslint-plugin-react-refresh": "^0.4.20",
|
"eslint-plugin-react-refresh": "^0.4.20",
|
||||||
|
"express": "^5.2.1",
|
||||||
|
"gif.js": "^0.2.0",
|
||||||
"globals": "^15.15.0",
|
"globals": "^15.15.0",
|
||||||
"lovable-tagger": "^1.1.11",
|
"lovable-tagger": "^1.1.11",
|
||||||
"postcss": "^8.5.6",
|
"postcss": "^8.5.6",
|
||||||
|
"puppeteer": "^24.34.0",
|
||||||
"tailwindcss": "^3.4.17",
|
"tailwindcss": "^3.4.17",
|
||||||
"typescript": "^5.8.3",
|
"typescript": "^5.8.3",
|
||||||
"typescript-eslint": "^8.38.0",
|
"typescript-eslint": "^8.38.0",
|
||||||
"vite": "^7.2.6"
|
"vite": "^7.2.6",
|
||||||
|
"webm-writer": "^1.0.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
78
src/components/AudioUploader.tsx
Executable file
78
src/components/AudioUploader.tsx
Executable file
@ -0,0 +1,78 @@
|
|||||||
|
import { useCallback } from 'react';
|
||||||
|
import { Upload, Music } from 'lucide-react';
|
||||||
|
import { cn } from '@/lib/utils';
|
||||||
|
|
||||||
|
interface AudioUploaderProps {
|
||||||
|
onFileSelect: (file: File) => void;
|
||||||
|
isLoading: boolean;
|
||||||
|
fileName: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function AudioUploader({ onFileSelect, isLoading, fileName }: AudioUploaderProps) {
|
||||||
|
const handleDrop = useCallback((e: React.DragEvent) => {
|
||||||
|
e.preventDefault();
|
||||||
|
const file = e.dataTransfer.files[0];
|
||||||
|
if (file && file.type.startsWith('audio/')) {
|
||||||
|
onFileSelect(file);
|
||||||
|
}
|
||||||
|
}, [onFileSelect]);
|
||||||
|
|
||||||
|
const handleFileInput = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
|
const file = e.target.files?.[0];
|
||||||
|
if (file) {
|
||||||
|
onFileSelect(file);
|
||||||
|
}
|
||||||
|
}, [onFileSelect]);
|
||||||
|
|
||||||
|
const handleDragOver = (e: React.DragEvent) => {
|
||||||
|
e.preventDefault();
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
className={cn(
|
||||||
|
"relative border-2 border-dashed border-primary/40 rounded-lg p-8 text-center",
|
||||||
|
"hover:border-primary/70 transition-all duration-300 cursor-pointer",
|
||||||
|
"bg-secondary/20 hover:bg-secondary/30",
|
||||||
|
isLoading && "opacity-50 pointer-events-none"
|
||||||
|
)}
|
||||||
|
onDrop={handleDrop}
|
||||||
|
onDragOver={handleDragOver}
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
type="file"
|
||||||
|
accept="audio/*"
|
||||||
|
onChange={handleFileInput}
|
||||||
|
className="absolute inset-0 w-full h-full opacity-0 cursor-pointer"
|
||||||
|
disabled={isLoading}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<div className="flex flex-col items-center gap-4">
|
||||||
|
{fileName ? (
|
||||||
|
<>
|
||||||
|
<Music className="w-12 h-12 text-primary phosphor-glow" />
|
||||||
|
<div>
|
||||||
|
<p className="text-lg font-crt text-primary text-glow">{fileName}</p>
|
||||||
|
<p className="text-sm text-muted-foreground mt-1">Click or drop to replace</p>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<Upload className="w-12 h-12 text-primary/60" />
|
||||||
|
<div>
|
||||||
|
<p className="text-lg font-crt text-primary/80">Drop audio file here</p>
|
||||||
|
<p className="text-sm text-muted-foreground mt-1">or click to browse</p>
|
||||||
|
<p className="text-xs text-muted-foreground mt-2">MP3, WAV, FLAC, OGG supported</p>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{isLoading && (
|
||||||
|
<div className="absolute inset-0 flex items-center justify-center bg-background/50 rounded-lg">
|
||||||
|
<div className="w-8 h-8 border-2 border-primary border-t-transparent rounded-full animate-spin" />
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
410
src/components/ControlPanel.tsx
Normal file → Executable file
410
src/components/ControlPanel.tsx
Normal file → Executable file
@ -1,308 +1,142 @@
|
|||||||
import { useRef, useState } from 'react';
|
import { Play, Download, RotateCcw } from 'lucide-react';
|
||||||
import { Button } from '@/components/ui/button';
|
import { Button } from '@/components/ui/button';
|
||||||
import { Slider } from '@/components/ui/slider';
|
import { Label } from '@/components/ui/label';
|
||||||
import { Mic, Radio, Move, Upload, Play, Pause, Square, Music, Video, Download, X } from 'lucide-react';
|
|
||||||
import {
|
|
||||||
Dialog,
|
|
||||||
DialogContent,
|
|
||||||
DialogDescription,
|
|
||||||
DialogHeader,
|
|
||||||
DialogTitle,
|
|
||||||
} from '@/components/ui/dialog';
|
|
||||||
import { Progress } from '@/components/ui/progress';
|
import { Progress } from '@/components/ui/progress';
|
||||||
import type { ExportStage } from '@/hooks/useOfflineVideoExport';
|
import { RadioGroup, RadioGroupItem } from '@/components/ui/radio-group';
|
||||||
|
import type { OscilloscopeMode } from '@/hooks/useOscilloscopeRenderer';
|
||||||
|
|
||||||
interface ControlPanelProps {
|
interface ControlPanelProps {
|
||||||
mode: 'normal' | 'xy';
|
mode: OscilloscopeMode;
|
||||||
onModeChange: (mode: 'normal' | 'xy') => void;
|
onModeChange: (mode: OscilloscopeMode) => void;
|
||||||
isActive: boolean;
|
canGenerate: boolean;
|
||||||
|
isGenerating: boolean;
|
||||||
|
progress: number;
|
||||||
|
exportedUrl: string | null;
|
||||||
|
onGenerate: () => void;
|
||||||
|
onReset: () => void;
|
||||||
isPlaying: boolean;
|
isPlaying: boolean;
|
||||||
source: 'microphone' | 'file' | null;
|
onPreview: () => void;
|
||||||
fileName: string | null;
|
canPreview: boolean;
|
||||||
onStartMicrophone: () => void;
|
|
||||||
onLoadAudioFile: (file: File) => void;
|
|
||||||
onTogglePlayPause: () => void;
|
|
||||||
onStop: () => void;
|
|
||||||
onGainChange: (value: number) => void;
|
|
||||||
error: string | null;
|
|
||||||
isExporting: boolean;
|
|
||||||
exportProgress: number;
|
|
||||||
exportStage: ExportStage;
|
|
||||||
exportFps: number;
|
|
||||||
onExportVideo: (format: 'webm' | 'mp4') => void;
|
|
||||||
onCancelExport: () => void;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const ControlPanel = ({
|
export function ControlPanel({
|
||||||
mode,
|
mode,
|
||||||
onModeChange,
|
onModeChange,
|
||||||
isActive,
|
canGenerate,
|
||||||
|
isGenerating,
|
||||||
|
progress,
|
||||||
|
exportedUrl,
|
||||||
|
onGenerate,
|
||||||
|
onReset,
|
||||||
isPlaying,
|
isPlaying,
|
||||||
source,
|
onPreview,
|
||||||
fileName,
|
canPreview,
|
||||||
onStartMicrophone,
|
}: ControlPanelProps) {
|
||||||
onLoadAudioFile,
|
|
||||||
onTogglePlayPause,
|
|
||||||
onStop,
|
|
||||||
onGainChange,
|
|
||||||
error,
|
|
||||||
isExporting,
|
|
||||||
exportProgress,
|
|
||||||
exportStage,
|
|
||||||
exportFps,
|
|
||||||
onExportVideo,
|
|
||||||
onCancelExport,
|
|
||||||
}: ControlPanelProps) => {
|
|
||||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
|
||||||
const [showExportDialog, setShowExportDialog] = useState(false);
|
|
||||||
|
|
||||||
const handleFileChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
|
||||||
const file = e.target.files?.[0];
|
|
||||||
if (file) {
|
|
||||||
onLoadAudioFile(file);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleExportClick = () => {
|
|
||||||
if (isExporting) {
|
|
||||||
onCancelExport();
|
|
||||||
} else {
|
|
||||||
setShowExportDialog(true);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleFormatSelect = (format: 'webm' | 'mp4') => {
|
|
||||||
setShowExportDialog(false);
|
|
||||||
onExportVideo(format);
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<div className="flex flex-col gap-6 p-6 bg-card border border-border rounded-lg">
|
||||||
<div className="flex flex-col gap-5 p-5 bg-bezel rounded-lg border border-border">
|
{/* Mode Selection */}
|
||||||
{/* Status indicator */}
|
<div className="space-y-3">
|
||||||
<div className="flex items-center gap-3">
|
<Label className="font-crt text-lg text-primary text-glow">DISPLAY MODE</Label>
|
||||||
<div
|
<RadioGroup
|
||||||
className={`w-3 h-3 rounded-full transition-all duration-300 ${
|
value={mode}
|
||||||
isActive
|
onValueChange={(value) => onModeChange(value as OscilloscopeMode)}
|
||||||
? 'bg-primary shadow-[0_0_10px_hsl(var(--primary))]'
|
className="space-y-2"
|
||||||
: 'bg-muted-foreground'
|
>
|
||||||
}`}
|
<div className="flex items-center space-x-3">
|
||||||
/>
|
<RadioGroupItem value="combined" id="combined" className="border-primary" />
|
||||||
<span className="text-sm text-muted-foreground uppercase tracking-wider">
|
<Label htmlFor="combined" className="font-mono-crt text-sm cursor-pointer">
|
||||||
{isExporting ? 'Exporting' : isActive ? (source === 'microphone' ? 'Mic Active' : 'Playing') : 'Standby'}
|
Combined (L+R merged)
|
||||||
</span>
|
</Label>
|
||||||
{isExporting && (
|
|
||||||
<div className="w-2 h-2 rounded-full bg-destructive animate-pulse" />
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Input Source */}
|
|
||||||
<div className="space-y-2">
|
|
||||||
<label className="text-xs text-muted-foreground uppercase tracking-wider">
|
|
||||||
Input Source
|
|
||||||
</label>
|
|
||||||
<div className="flex flex-col gap-2">
|
|
||||||
<Button
|
|
||||||
variant="oscilloscope"
|
|
||||||
className={`w-full justify-start ${source === 'microphone' ? 'border-primary shadow-[0_0_15px_hsl(var(--primary)/0.4)]' : ''}`}
|
|
||||||
onClick={onStartMicrophone}
|
|
||||||
disabled={isExporting}
|
|
||||||
>
|
|
||||||
<Mic className="w-4 h-4" />
|
|
||||||
Microphone
|
|
||||||
</Button>
|
|
||||||
|
|
||||||
<input
|
|
||||||
ref={fileInputRef}
|
|
||||||
type="file"
|
|
||||||
accept="audio/*"
|
|
||||||
onChange={handleFileChange}
|
|
||||||
className="hidden"
|
|
||||||
/>
|
|
||||||
<Button
|
|
||||||
variant="oscilloscope"
|
|
||||||
className={`w-full justify-start ${source === 'file' ? 'border-primary shadow-[0_0_15px_hsl(var(--primary)/0.4)]' : ''}`}
|
|
||||||
onClick={() => fileInputRef.current?.click()}
|
|
||||||
disabled={isExporting}
|
|
||||||
>
|
|
||||||
<Upload className="w-4 h-4" />
|
|
||||||
Load File
|
|
||||||
</Button>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
<div className="flex items-center space-x-3">
|
||||||
|
<RadioGroupItem value="separate" id="separate" className="border-primary" />
|
||||||
{/* File name display */}
|
<Label htmlFor="separate" className="font-mono-crt text-sm cursor-pointer">
|
||||||
{fileName && (
|
Separate (L/R stacked)
|
||||||
<div className="flex items-center gap-2 p-2 bg-secondary/50 rounded border border-border/50">
|
</Label>
|
||||||
<Music className="w-4 h-4 text-primary shrink-0" />
|
|
||||||
<span className="text-xs text-foreground truncate">{fileName}</span>
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
<div className="flex items-center space-x-3">
|
||||||
|
<RadioGroupItem value="all" id="all" className="border-primary" />
|
||||||
{/* Playback controls */}
|
<Label htmlFor="all" className="font-mono-crt text-sm cursor-pointer">
|
||||||
{isActive && !isExporting && (
|
All (L/R + XY below)
|
||||||
<div className="flex gap-2">
|
</Label>
|
||||||
{source === 'file' && (
|
|
||||||
<Button
|
|
||||||
variant="oscilloscope"
|
|
||||||
size="icon"
|
|
||||||
onClick={onTogglePlayPause}
|
|
||||||
>
|
|
||||||
{isPlaying ? <Pause className="w-4 h-4" /> : <Play className="w-4 h-4" />}
|
|
||||||
</Button>
|
|
||||||
)}
|
|
||||||
<Button
|
|
||||||
variant="oscilloscope"
|
|
||||||
className="flex-1"
|
|
||||||
onClick={onStop}
|
|
||||||
>
|
|
||||||
<Square className="w-4 h-4" />
|
|
||||||
Stop
|
|
||||||
</Button>
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
</RadioGroup>
|
||||||
|
|
||||||
{/* Video Export */}
|
|
||||||
{source === 'file' && (
|
|
||||||
<div className="space-y-2">
|
|
||||||
<label className="text-xs text-muted-foreground uppercase tracking-wider">
|
|
||||||
Video Export
|
|
||||||
</label>
|
|
||||||
<Button
|
|
||||||
variant="oscilloscope"
|
|
||||||
className={`w-full justify-start ${isExporting ? 'border-destructive shadow-[0_0_15px_hsl(var(--destructive)/0.4)]' : ''}`}
|
|
||||||
onClick={handleExportClick}
|
|
||||||
>
|
|
||||||
{isExporting ? (
|
|
||||||
<>
|
|
||||||
<X className="w-4 h-4" />
|
|
||||||
Cancel Export
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
<Video className="w-4 h-4" />
|
|
||||||
Export Video
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</Button>
|
|
||||||
{isExporting && (
|
|
||||||
<div className="space-y-2">
|
|
||||||
<Progress value={exportProgress} className="h-2" />
|
|
||||||
<p className="text-xs text-muted-foreground/60 text-center">
|
|
||||||
{exportStage === 'preparing' && 'Preparing audio...'}
|
|
||||||
{exportStage === 'rendering' && `Rendering: ${exportProgress}% ${exportFps > 0 ? `(${exportFps} fps)` : ''}`}
|
|
||||||
{exportStage === 'encoding' && 'Encoding final video...'}
|
|
||||||
{exportStage === 'complete' && 'Finalizing...'}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{!isExporting && (
|
|
||||||
<p className="text-xs text-muted-foreground/60">
|
|
||||||
Generates video from the entire audio file offline.
|
|
||||||
</p>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Sensitivity / Gain control */}
|
|
||||||
<div className="space-y-3">
|
|
||||||
<label className="text-xs text-muted-foreground uppercase tracking-wider">
|
|
||||||
Sensitivity
|
|
||||||
</label>
|
|
||||||
<Slider
|
|
||||||
defaultValue={[3]}
|
|
||||||
min={0.5}
|
|
||||||
max={10}
|
|
||||||
step={0.5}
|
|
||||||
onValueChange={(value) => onGainChange(value[0])}
|
|
||||||
className="w-full"
|
|
||||||
disabled={isExporting}
|
|
||||||
/>
|
|
||||||
<p className="text-xs text-muted-foreground/60">
|
|
||||||
Increase for quiet audio sources
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Mode selector */}
|
|
||||||
<div className="space-y-2">
|
|
||||||
<label className="text-xs text-muted-foreground uppercase tracking-wider">
|
|
||||||
Display Mode
|
|
||||||
</label>
|
|
||||||
<div className="flex gap-2">
|
|
||||||
<Button
|
|
||||||
variant="oscilloscope"
|
|
||||||
className={`flex-1 ${mode === 'normal' ? 'border-primary shadow-[0_0_15px_hsl(var(--primary)/0.4)]' : ''}`}
|
|
||||||
onClick={() => onModeChange('normal')}
|
|
||||||
disabled={isExporting}
|
|
||||||
>
|
|
||||||
<Radio className="w-4 h-4" />
|
|
||||||
Normal
|
|
||||||
</Button>
|
|
||||||
<Button
|
|
||||||
variant="oscilloscope"
|
|
||||||
className={`flex-1 ${mode === 'xy' ? 'border-primary shadow-[0_0_15px_hsl(var(--primary)/0.4)]' : ''}`}
|
|
||||||
onClick={() => onModeChange('xy')}
|
|
||||||
disabled={isExporting}
|
|
||||||
>
|
|
||||||
<Move className="w-4 h-4" />
|
|
||||||
X-Y
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Mode description */}
|
|
||||||
<div className="p-3 bg-secondary/50 rounded border border-border/50">
|
|
||||||
<p className="text-xs text-muted-foreground leading-relaxed">
|
|
||||||
{mode === 'normal'
|
|
||||||
? 'Time-domain waveform display. Shows amplitude over time.'
|
|
||||||
: 'Lissajous (X-Y) mode. Left channel controls X, Right controls Y. Creates patterns from stereo audio.'}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Error display */}
|
|
||||||
{error && (
|
|
||||||
<div className="p-3 bg-destructive/10 border border-destructive/50 rounded">
|
|
||||||
<p className="text-xs text-destructive">{error}</p>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Info */}
|
|
||||||
<div className="mt-auto pt-4 border-t border-border/50">
|
|
||||||
<p className="text-xs text-muted-foreground/60 text-center">
|
|
||||||
Audio Oscilloscope v1.3
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Export Format Dialog */}
|
{/* Preview Button */}
|
||||||
<Dialog open={showExportDialog} onOpenChange={setShowExportDialog}>
|
<Button
|
||||||
<DialogContent className="bg-bezel border-border">
|
onClick={onPreview}
|
||||||
<DialogHeader>
|
disabled={!canPreview || isGenerating}
|
||||||
<DialogTitle className="text-foreground">Choose Export Format</DialogTitle>
|
variant="outline"
|
||||||
<DialogDescription className="text-muted-foreground">
|
className="w-full font-crt text-lg h-12 border-primary/50 hover:bg-primary/10 hover:border-primary"
|
||||||
The video will be generated from the entire audio file. This works offline and supports large files.
|
>
|
||||||
</DialogDescription>
|
<Play className="mr-2 h-5 w-5" />
|
||||||
</DialogHeader>
|
{isPlaying ? 'PLAYING...' : 'PREVIEW'}
|
||||||
<div className="flex gap-3 mt-4">
|
</Button>
|
||||||
|
|
||||||
|
{/* Generate Button */}
|
||||||
|
<Button
|
||||||
|
onClick={onGenerate}
|
||||||
|
disabled={!canGenerate || isGenerating}
|
||||||
|
className="w-full font-crt text-lg h-14 bg-primary hover:bg-primary/80 text-primary-foreground"
|
||||||
|
>
|
||||||
|
{isGenerating ? (
|
||||||
|
<>
|
||||||
|
<div className="w-5 h-5 border-2 border-primary-foreground border-t-transparent rounded-full animate-spin mr-2" />
|
||||||
|
GENERATING...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
'GENERATE VIDEO'
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
|
||||||
|
{/* Progress Bar */}
|
||||||
|
{isGenerating && (
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Progress value={progress} className="h-3 bg-secondary" />
|
||||||
|
<p className="text-center font-mono-crt text-sm text-muted-foreground">
|
||||||
|
{progress}% complete
|
||||||
|
</p>
|
||||||
|
<p className="text-center font-mono-crt text-xs text-muted-foreground/70">
|
||||||
|
Keep this tab in foreground
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Download Button */}
|
||||||
|
{exportedUrl && (
|
||||||
|
<div className="space-y-3">
|
||||||
|
<a
|
||||||
|
href={exportedUrl}
|
||||||
|
download="oscilloscope-video.webm"
|
||||||
|
className="block"
|
||||||
|
>
|
||||||
<Button
|
<Button
|
||||||
variant="oscilloscope"
|
variant="outline"
|
||||||
className="flex-1"
|
className="w-full font-crt text-lg h-12 border-accent hover:bg-accent/10 text-accent"
|
||||||
onClick={() => handleFormatSelect('webm')}
|
|
||||||
>
|
>
|
||||||
<Download className="w-4 h-4 mr-2" />
|
<Download className="mr-2 h-5 w-5" />
|
||||||
WebM (recommended)
|
DOWNLOAD VIDEO
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
</a>
|
||||||
variant="oscilloscope"
|
|
||||||
className="flex-1"
|
<Button
|
||||||
onClick={() => handleFormatSelect('mp4')}
|
onClick={onReset}
|
||||||
>
|
variant="ghost"
|
||||||
<Download className="w-4 h-4 mr-2" />
|
className="w-full font-mono-crt text-muted-foreground hover:text-primary"
|
||||||
MP4
|
>
|
||||||
</Button>
|
<RotateCcw className="mr-2 h-4 w-4" />
|
||||||
</div>
|
Reset
|
||||||
</DialogContent>
|
</Button>
|
||||||
</Dialog>
|
</div>
|
||||||
</>
|
)}
|
||||||
|
|
||||||
|
{/* Info */}
|
||||||
|
<div className="text-xs text-muted-foreground font-mono-crt space-y-1 pt-4 border-t border-border">
|
||||||
|
<p>Output: 1920×1080 WebM</p>
|
||||||
|
<p>Frame Rate: 60 FPS</p>
|
||||||
|
<p>Supports files up to 6+ hours</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
);
|
);
|
||||||
};
|
}
|
||||||
|
|||||||
@ -1,119 +0,0 @@
|
|||||||
import { useState, useRef, useCallback } from 'react';
|
|
||||||
import { OscilloscopeScreen, OscilloscopeScreenHandle } from './OscilloscopeScreen';
|
|
||||||
import { ControlPanel } from './ControlPanel';
|
|
||||||
import { useAudioAnalyzer } from '@/hooks/useAudioAnalyzer';
|
|
||||||
import { useOfflineVideoExport } from '@/hooks/useOfflineVideoExport';
|
|
||||||
import { toast } from 'sonner';
|
|
||||||
|
|
||||||
export const Oscilloscope = () => {
|
|
||||||
const [mode, setMode] = useState<'normal' | 'xy'>('normal');
|
|
||||||
const screenRef = useRef<OscilloscopeScreenHandle>(null);
|
|
||||||
const audioFileRef = useRef<File | null>(null);
|
|
||||||
|
|
||||||
const {
|
|
||||||
isActive,
|
|
||||||
isPlaying,
|
|
||||||
source,
|
|
||||||
fileName,
|
|
||||||
error,
|
|
||||||
startMicrophone,
|
|
||||||
loadAudioFile,
|
|
||||||
togglePlayPause,
|
|
||||||
stop,
|
|
||||||
setGain,
|
|
||||||
getTimeDomainData,
|
|
||||||
getStereoData,
|
|
||||||
} = useAudioAnalyzer();
|
|
||||||
|
|
||||||
const {
|
|
||||||
isExporting,
|
|
||||||
progress,
|
|
||||||
stage,
|
|
||||||
fps: exportFps,
|
|
||||||
generateVideoWithAudio,
|
|
||||||
cancelExport,
|
|
||||||
downloadBlob,
|
|
||||||
} = useOfflineVideoExport();
|
|
||||||
|
|
||||||
const handleLoadAudioFile = useCallback((file: File) => {
|
|
||||||
audioFileRef.current = file;
|
|
||||||
loadAudioFile(file);
|
|
||||||
}, [loadAudioFile]);
|
|
||||||
|
|
||||||
const handleExportVideo = useCallback(async (format: 'webm' | 'mp4') => {
|
|
||||||
if (!audioFileRef.current) {
|
|
||||||
toast.error('Please load an audio file first');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const drawFrame = screenRef.current?.drawFrameWithData;
|
|
||||||
if (!drawFrame) {
|
|
||||||
toast.error('Canvas not ready');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
toast.info('Starting video export... This may take a while for large files.');
|
|
||||||
|
|
||||||
const blob = await generateVideoWithAudio(
|
|
||||||
audioFileRef.current,
|
|
||||||
drawFrame,
|
|
||||||
{
|
|
||||||
fps: 60,
|
|
||||||
format,
|
|
||||||
width: 1920,
|
|
||||||
height: 1080,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
if (blob) {
|
|
||||||
const baseName = fileName?.replace(/\.[^/.]+$/, '') || 'oscilloscope';
|
|
||||||
const extension = format === 'mp4' ? 'mp4' : 'webm';
|
|
||||||
downloadBlob(blob, `${baseName}.${extension}`);
|
|
||||||
toast.success('Video exported successfully!');
|
|
||||||
}
|
|
||||||
}, [fileName, generateVideoWithAudio, downloadBlob]);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="flex flex-col lg:flex-row gap-6 w-full max-w-7xl mx-auto p-4 lg:p-8">
|
|
||||||
{/* Main oscilloscope display */}
|
|
||||||
<div className="flex-1 min-h-[400px] lg:min-h-[600px]">
|
|
||||||
<div className="h-full bg-bezel p-4 lg:p-6 rounded-xl border border-border box-glow">
|
|
||||||
{/* Screen bezel */}
|
|
||||||
<div className="h-full rounded-lg overflow-hidden border-4 border-secondary">
|
|
||||||
<OscilloscopeScreen
|
|
||||||
ref={screenRef}
|
|
||||||
mode={mode}
|
|
||||||
getTimeDomainData={getTimeDomainData}
|
|
||||||
getStereoData={getStereoData}
|
|
||||||
isActive={isActive}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Control panel */}
|
|
||||||
<div className="w-full lg:w-72 shrink-0">
|
|
||||||
<ControlPanel
|
|
||||||
mode={mode}
|
|
||||||
onModeChange={setMode}
|
|
||||||
isActive={isActive}
|
|
||||||
isPlaying={isPlaying}
|
|
||||||
source={source}
|
|
||||||
fileName={fileName}
|
|
||||||
onStartMicrophone={startMicrophone}
|
|
||||||
onLoadAudioFile={handleLoadAudioFile}
|
|
||||||
onTogglePlayPause={togglePlayPause}
|
|
||||||
onStop={stop}
|
|
||||||
onGainChange={setGain}
|
|
||||||
error={error}
|
|
||||||
isExporting={isExporting}
|
|
||||||
exportProgress={progress}
|
|
||||||
exportStage={stage}
|
|
||||||
exportFps={exportFps}
|
|
||||||
onExportVideo={handleExportVideo}
|
|
||||||
onCancelExport={cancelExport}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
259
src/components/OscilloscopeDisplay.tsx
Executable file
259
src/components/OscilloscopeDisplay.tsx
Executable file
@ -0,0 +1,259 @@
|
|||||||
|
import { useEffect, useRef, useCallback } from 'react';
|
||||||
|
import type { AudioData } from '@/hooks/useAudioAnalyzer';
|
||||||
|
import type { OscilloscopeMode } from '@/hooks/useOscilloscopeRenderer';
|
||||||
|
|
||||||
|
interface OscilloscopeDisplayProps {
|
||||||
|
audioData: AudioData | null;
|
||||||
|
mode: OscilloscopeMode;
|
||||||
|
isPlaying: boolean;
|
||||||
|
onPlaybackEnd?: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
const WIDTH = 800;
|
||||||
|
const HEIGHT = 600;
|
||||||
|
const FPS = 60;
|
||||||
|
|
||||||
|
export function OscilloscopeDisplay({
|
||||||
|
audioData,
|
||||||
|
mode,
|
||||||
|
isPlaying,
|
||||||
|
onPlaybackEnd
|
||||||
|
}: OscilloscopeDisplayProps) {
|
||||||
|
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||||
|
const animationRef = useRef<number | null>(null);
|
||||||
|
const currentSampleRef = useRef(0);
|
||||||
|
|
||||||
|
const drawGraticule = useCallback((ctx: CanvasRenderingContext2D) => {
|
||||||
|
ctx.strokeStyle = '#00ff00';
|
||||||
|
ctx.lineWidth = 1;
|
||||||
|
|
||||||
|
// Horizontal center line (X axis)
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(0, HEIGHT / 2);
|
||||||
|
ctx.lineTo(WIDTH, HEIGHT / 2);
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Vertical center line (Y axis)
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(WIDTH / 2, 0);
|
||||||
|
ctx.lineTo(WIDTH / 2, HEIGHT);
|
||||||
|
ctx.stroke();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const drawFrame = useCallback(() => {
|
||||||
|
if (!audioData || !canvasRef.current) return;
|
||||||
|
|
||||||
|
const canvas = canvasRef.current;
|
||||||
|
const ctx = canvas.getContext('2d');
|
||||||
|
if (!ctx) return;
|
||||||
|
|
||||||
|
const samplesPerFrame = Math.floor(audioData.sampleRate / FPS);
|
||||||
|
const startSample = currentSampleRef.current;
|
||||||
|
const endSample = Math.min(startSample + samplesPerFrame, audioData.leftChannel.length);
|
||||||
|
|
||||||
|
// Clear to pure black
|
||||||
|
ctx.fillStyle = '#000000';
|
||||||
|
ctx.fillRect(0, 0, WIDTH, HEIGHT);
|
||||||
|
|
||||||
|
// Draw graticule first
|
||||||
|
drawGraticule(ctx);
|
||||||
|
|
||||||
|
ctx.lineWidth = 2;
|
||||||
|
ctx.lineCap = 'round';
|
||||||
|
|
||||||
|
const leftColor = '#00ff00';
|
||||||
|
const rightColor = '#00ccff';
|
||||||
|
const xyColor = '#ff8800';
|
||||||
|
const dividerColor = '#333333';
|
||||||
|
|
||||||
|
if (mode === 'combined') {
|
||||||
|
// Combined: both channels merged
|
||||||
|
ctx.strokeStyle = leftColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
const samplesPerPixel = samplesPerFrame / WIDTH;
|
||||||
|
const centerY = HEIGHT / 2;
|
||||||
|
for (let x = 0; x < WIDTH; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= audioData.leftChannel.length) break;
|
||||||
|
const sample = (audioData.leftChannel[sampleIndex] + audioData.rightChannel[sampleIndex]) / 2;
|
||||||
|
const y = centerY - sample * (HEIGHT * 0.4);
|
||||||
|
if (x === 0) ctx.moveTo(x, y);
|
||||||
|
else ctx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
} else if (mode === 'separate') {
|
||||||
|
// Separate: Left on top, Right on bottom
|
||||||
|
const halfHeight = HEIGHT / 2;
|
||||||
|
const samplesPerPixel = samplesPerFrame / WIDTH;
|
||||||
|
|
||||||
|
// Left channel (top)
|
||||||
|
ctx.strokeStyle = leftColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
const leftCenterY = halfHeight / 2;
|
||||||
|
for (let x = 0; x < WIDTH; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= audioData.leftChannel.length) break;
|
||||||
|
const sample = audioData.leftChannel[sampleIndex];
|
||||||
|
const y = leftCenterY - sample * (halfHeight * 0.35);
|
||||||
|
if (x === 0) ctx.moveTo(x, y);
|
||||||
|
else ctx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Right channel (bottom)
|
||||||
|
ctx.strokeStyle = rightColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
const rightCenterY = halfHeight + halfHeight / 2;
|
||||||
|
for (let x = 0; x < WIDTH; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= audioData.rightChannel.length) break;
|
||||||
|
const sample = audioData.rightChannel[sampleIndex];
|
||||||
|
const y = rightCenterY - sample * (halfHeight * 0.35);
|
||||||
|
if (x === 0) ctx.moveTo(x, y);
|
||||||
|
else ctx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Divider
|
||||||
|
ctx.strokeStyle = dividerColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(0, halfHeight);
|
||||||
|
ctx.lineTo(WIDTH, halfHeight);
|
||||||
|
ctx.stroke();
|
||||||
|
} else if (mode === 'all') {
|
||||||
|
// All: L/R on top row, XY on bottom
|
||||||
|
const topHeight = HEIGHT / 2;
|
||||||
|
const bottomHeight = HEIGHT / 2;
|
||||||
|
const halfWidth = WIDTH / 2;
|
||||||
|
const samplesPerPixel = samplesPerFrame / halfWidth;
|
||||||
|
|
||||||
|
// Left channel (top-left)
|
||||||
|
ctx.strokeStyle = leftColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
const leftCenterY = topHeight / 2;
|
||||||
|
for (let x = 0; x < halfWidth; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= audioData.leftChannel.length) break;
|
||||||
|
const sample = audioData.leftChannel[sampleIndex];
|
||||||
|
const y = leftCenterY - sample * (topHeight * 0.35);
|
||||||
|
if (x === 0) ctx.moveTo(x, y);
|
||||||
|
else ctx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Right channel (top-right)
|
||||||
|
ctx.strokeStyle = rightColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
const rightCenterY = topHeight / 2;
|
||||||
|
for (let x = 0; x < halfWidth; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= audioData.rightChannel.length) break;
|
||||||
|
const sample = audioData.rightChannel[sampleIndex];
|
||||||
|
const y = rightCenterY - sample * (topHeight * 0.35);
|
||||||
|
if (x === 0) ctx.moveTo(halfWidth + x, y);
|
||||||
|
else ctx.lineTo(halfWidth + x, y);
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// XY mode (bottom half)
|
||||||
|
ctx.strokeStyle = xyColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
const xyCenterX = WIDTH / 2;
|
||||||
|
const xyCenterY = topHeight + bottomHeight / 2;
|
||||||
|
const xyScale = Math.min(halfWidth, bottomHeight) * 0.35;
|
||||||
|
for (let i = startSample; i < endSample; i++) {
|
||||||
|
const x = xyCenterX + audioData.leftChannel[i] * xyScale;
|
||||||
|
const y = xyCenterY - audioData.rightChannel[i] * xyScale;
|
||||||
|
if (i === startSample) ctx.moveTo(x, y);
|
||||||
|
else ctx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Dividers
|
||||||
|
ctx.strokeStyle = dividerColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(0, topHeight);
|
||||||
|
ctx.lineTo(WIDTH, topHeight);
|
||||||
|
ctx.stroke();
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(halfWidth, 0);
|
||||||
|
ctx.lineTo(halfWidth, topHeight);
|
||||||
|
ctx.stroke();
|
||||||
|
}
|
||||||
|
|
||||||
|
currentSampleRef.current = endSample;
|
||||||
|
|
||||||
|
if (endSample >= audioData.leftChannel.length) {
|
||||||
|
onPlaybackEnd?.();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
animationRef.current = requestAnimationFrame(drawFrame);
|
||||||
|
}, [audioData, mode, drawGraticule, onPlaybackEnd]);
|
||||||
|
|
||||||
|
// Initialize canvas
|
||||||
|
useEffect(() => {
|
||||||
|
if (!canvasRef.current) return;
|
||||||
|
|
||||||
|
const ctx = canvasRef.current.getContext('2d');
|
||||||
|
if (ctx) {
|
||||||
|
ctx.fillStyle = '#000000';
|
||||||
|
ctx.fillRect(0, 0, WIDTH, HEIGHT);
|
||||||
|
drawGraticule(ctx);
|
||||||
|
}
|
||||||
|
}, [drawGraticule]);
|
||||||
|
|
||||||
|
// Handle playback
|
||||||
|
useEffect(() => {
|
||||||
|
if (isPlaying && audioData) {
|
||||||
|
currentSampleRef.current = 0;
|
||||||
|
animationRef.current = requestAnimationFrame(drawFrame);
|
||||||
|
} else {
|
||||||
|
if (animationRef.current) {
|
||||||
|
cancelAnimationFrame(animationRef.current);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
if (animationRef.current) {
|
||||||
|
cancelAnimationFrame(animationRef.current);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [isPlaying, audioData, drawFrame]);
|
||||||
|
|
||||||
|
const getModeLabel = () => {
|
||||||
|
switch (mode) {
|
||||||
|
case 'combined': return 'L+R';
|
||||||
|
case 'separate': return 'L / R';
|
||||||
|
case 'all': return 'ALL';
|
||||||
|
default: return '';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="crt-bezel">
|
||||||
|
<div className="screen-curve relative">
|
||||||
|
<canvas
|
||||||
|
ref={canvasRef}
|
||||||
|
width={WIDTH}
|
||||||
|
height={HEIGHT}
|
||||||
|
className="w-full h-auto"
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* Mode indicator */}
|
||||||
|
<div className="absolute top-4 left-4 font-crt text-primary/60 text-sm">
|
||||||
|
{getModeLabel()}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Idle state */}
|
||||||
|
{!audioData && !isPlaying && (
|
||||||
|
<div className="absolute inset-0 flex items-center justify-center">
|
||||||
|
<p className="font-crt text-2xl text-primary/40 text-glow animate-pulse">
|
||||||
|
NO SIGNAL
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -1,295 +0,0 @@
|
|||||||
import { useRef, useEffect, useCallback, forwardRef, useImperativeHandle } from 'react';
|
|
||||||
|
|
||||||
interface OscilloscopeScreenProps {
|
|
||||||
mode: 'normal' | 'xy';
|
|
||||||
getTimeDomainData: () => Uint8Array | null;
|
|
||||||
getStereoData: () => { left: Uint8Array; right: Uint8Array } | null;
|
|
||||||
isActive: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface OscilloscopeScreenHandle {
|
|
||||||
getCanvas: () => HTMLCanvasElement | null;
|
|
||||||
drawFrameWithData: (ctx: CanvasRenderingContext2D, width: number, height: number, leftData: Uint8Array, rightData: Uint8Array) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const OscilloscopeScreen = forwardRef<OscilloscopeScreenHandle, OscilloscopeScreenProps>(({
|
|
||||||
mode,
|
|
||||||
getTimeDomainData,
|
|
||||||
getStereoData,
|
|
||||||
isActive,
|
|
||||||
}, ref) => {
|
|
||||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
|
||||||
const animationRef = useRef<number>();
|
|
||||||
const lastTimeRef = useRef<number>(0);
|
|
||||||
const targetFPS = 120;
|
|
||||||
const frameInterval = 1000 / targetFPS;
|
|
||||||
|
|
||||||
const drawGrid = useCallback((ctx: CanvasRenderingContext2D, width: number, height: number) => {
|
|
||||||
ctx.strokeStyle = '#1a3a1a';
|
|
||||||
ctx.lineWidth = 1;
|
|
||||||
|
|
||||||
const vDivisions = 10;
|
|
||||||
for (let i = 0; i <= vDivisions; i++) {
|
|
||||||
const x = Math.round((width / vDivisions) * i) + 0.5;
|
|
||||||
ctx.beginPath();
|
|
||||||
ctx.moveTo(x, 0);
|
|
||||||
ctx.lineTo(x, height);
|
|
||||||
ctx.stroke();
|
|
||||||
}
|
|
||||||
|
|
||||||
const hDivisions = 8;
|
|
||||||
for (let i = 0; i <= hDivisions; i++) {
|
|
||||||
const y = Math.round((height / hDivisions) * i) + 0.5;
|
|
||||||
ctx.beginPath();
|
|
||||||
ctx.moveTo(0, y);
|
|
||||||
ctx.lineTo(width, y);
|
|
||||||
ctx.stroke();
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx.strokeStyle = '#2a5a2a';
|
|
||||||
ctx.lineWidth = 1;
|
|
||||||
|
|
||||||
const centerX = Math.round(width / 2) + 0.5;
|
|
||||||
const centerY = Math.round(height / 2) + 0.5;
|
|
||||||
const tickLength = 6;
|
|
||||||
const tickSpacing = width / 50;
|
|
||||||
|
|
||||||
ctx.beginPath();
|
|
||||||
ctx.moveTo(centerX, 0);
|
|
||||||
ctx.lineTo(centerX, height);
|
|
||||||
ctx.stroke();
|
|
||||||
|
|
||||||
ctx.beginPath();
|
|
||||||
ctx.moveTo(0, centerY);
|
|
||||||
ctx.lineTo(width, centerY);
|
|
||||||
ctx.stroke();
|
|
||||||
|
|
||||||
ctx.strokeStyle = '#2a5a2a';
|
|
||||||
for (let i = 0; i < 50; i++) {
|
|
||||||
const x = Math.round(i * tickSpacing) + 0.5;
|
|
||||||
const y = Math.round(i * tickSpacing * (height / width)) + 0.5;
|
|
||||||
|
|
||||||
ctx.beginPath();
|
|
||||||
ctx.moveTo(x, centerY - tickLength / 2);
|
|
||||||
ctx.lineTo(x, centerY + tickLength / 2);
|
|
||||||
ctx.stroke();
|
|
||||||
|
|
||||||
if (y < height) {
|
|
||||||
ctx.beginPath();
|
|
||||||
ctx.moveTo(centerX - tickLength / 2, y);
|
|
||||||
ctx.lineTo(centerX + tickLength / 2, y);
|
|
||||||
ctx.stroke();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const drawNormalMode = useCallback((ctx: CanvasRenderingContext2D, width: number, height: number, data: Uint8Array) => {
|
|
||||||
const centerY = height / 2;
|
|
||||||
const points: { x: number; y: number }[] = [];
|
|
||||||
|
|
||||||
const step = Math.max(1, Math.floor(data.length / (width * 2)));
|
|
||||||
|
|
||||||
for (let i = 0; i < data.length; i += step) {
|
|
||||||
const x = (i / data.length) * width;
|
|
||||||
const normalizedValue = (data[i] - 128) / 128;
|
|
||||||
const y = centerY - (normalizedValue * (height / 2) * 0.85);
|
|
||||||
points.push({ x, y });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (points.length < 2) return;
|
|
||||||
|
|
||||||
ctx.strokeStyle = 'rgba(0, 255, 0, 0.15)';
|
|
||||||
ctx.lineWidth = 6;
|
|
||||||
ctx.lineCap = 'round';
|
|
||||||
ctx.lineJoin = 'round';
|
|
||||||
|
|
||||||
ctx.beginPath();
|
|
||||||
ctx.moveTo(points[0].x, points[0].y);
|
|
||||||
|
|
||||||
for (let i = 1; i < points.length - 1; i++) {
|
|
||||||
const xc = (points[i].x + points[i + 1].x) / 2;
|
|
||||||
const yc = (points[i].y + points[i + 1].y) / 2;
|
|
||||||
ctx.quadraticCurveTo(points[i].x, points[i].y, xc, yc);
|
|
||||||
}
|
|
||||||
ctx.lineTo(points[points.length - 1].x, points[points.length - 1].y);
|
|
||||||
ctx.stroke();
|
|
||||||
|
|
||||||
ctx.strokeStyle = 'rgba(0, 255, 0, 0.3)';
|
|
||||||
ctx.lineWidth = 3;
|
|
||||||
ctx.stroke();
|
|
||||||
|
|
||||||
ctx.strokeStyle = '#00ff00';
|
|
||||||
ctx.lineWidth = 1.5;
|
|
||||||
ctx.stroke();
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const drawXYMode = useCallback((ctx: CanvasRenderingContext2D, width: number, height: number, leftData: Uint8Array, rightData: Uint8Array) => {
|
|
||||||
const centerX = width / 2;
|
|
||||||
const centerY = height / 2;
|
|
||||||
const scale = Math.min(width, height) / 2 * 0.85;
|
|
||||||
const points: { x: number; y: number }[] = [];
|
|
||||||
|
|
||||||
const step = Math.max(1, Math.floor(leftData.length / 2048));
|
|
||||||
|
|
||||||
for (let i = 0; i < leftData.length; i += step) {
|
|
||||||
const xNorm = (leftData[i] - 128) / 128;
|
|
||||||
const yNorm = (rightData[i] - 128) / 128;
|
|
||||||
|
|
||||||
const x = centerX + xNorm * scale;
|
|
||||||
const y = centerY - yNorm * scale;
|
|
||||||
points.push({ x, y });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (points.length < 2) return;
|
|
||||||
|
|
||||||
ctx.strokeStyle = 'rgba(0, 255, 0, 0.15)';
|
|
||||||
ctx.lineWidth = 6;
|
|
||||||
ctx.lineCap = 'round';
|
|
||||||
ctx.lineJoin = 'round';
|
|
||||||
|
|
||||||
ctx.beginPath();
|
|
||||||
ctx.moveTo(points[0].x, points[0].y);
|
|
||||||
|
|
||||||
for (let i = 1; i < points.length - 1; i++) {
|
|
||||||
const xc = (points[i].x + points[i + 1].x) / 2;
|
|
||||||
const yc = (points[i].y + points[i + 1].y) / 2;
|
|
||||||
ctx.quadraticCurveTo(points[i].x, points[i].y, xc, yc);
|
|
||||||
}
|
|
||||||
ctx.lineTo(points[points.length - 1].x, points[points.length - 1].y);
|
|
||||||
ctx.stroke();
|
|
||||||
|
|
||||||
ctx.strokeStyle = 'rgba(0, 255, 0, 0.3)';
|
|
||||||
ctx.lineWidth = 3;
|
|
||||||
ctx.stroke();
|
|
||||||
|
|
||||||
ctx.strokeStyle = '#00ff00';
|
|
||||||
ctx.lineWidth = 1.5;
|
|
||||||
ctx.stroke();
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const drawIdleWave = useCallback((ctx: CanvasRenderingContext2D, width: number, height: number) => {
|
|
||||||
const centerY = height / 2;
|
|
||||||
|
|
||||||
ctx.strokeStyle = 'rgba(0, 255, 0, 0.15)';
|
|
||||||
ctx.lineWidth = 6;
|
|
||||||
ctx.lineCap = 'round';
|
|
||||||
|
|
||||||
ctx.beginPath();
|
|
||||||
ctx.moveTo(0, centerY);
|
|
||||||
ctx.lineTo(width, centerY);
|
|
||||||
ctx.stroke();
|
|
||||||
|
|
||||||
ctx.strokeStyle = 'rgba(0, 255, 0, 0.3)';
|
|
||||||
ctx.lineWidth = 3;
|
|
||||||
ctx.stroke();
|
|
||||||
|
|
||||||
ctx.strokeStyle = '#00ff00';
|
|
||||||
ctx.lineWidth = 1.5;
|
|
||||||
ctx.stroke();
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
useImperativeHandle(ref, () => ({
|
|
||||||
getCanvas: () => canvasRef.current,
|
|
||||||
drawFrameWithData: (ctx: CanvasRenderingContext2D, width: number, height: number, leftData: Uint8Array, rightData: Uint8Array) => {
|
|
||||||
ctx.fillStyle = '#0a0f0a';
|
|
||||||
ctx.fillRect(0, 0, width, height);
|
|
||||||
drawGrid(ctx, width, height);
|
|
||||||
if (mode === 'normal') {
|
|
||||||
drawNormalMode(ctx, width, height, leftData);
|
|
||||||
} else {
|
|
||||||
drawXYMode(ctx, width, height, leftData, rightData);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}), [mode, drawGrid, drawNormalMode, drawXYMode]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
const canvas = canvasRef.current;
|
|
||||||
if (!canvas) return;
|
|
||||||
|
|
||||||
const ctx = canvas.getContext('2d', { alpha: false });
|
|
||||||
if (!ctx) return;
|
|
||||||
|
|
||||||
const render = (currentTime: number) => {
|
|
||||||
const deltaTime = currentTime - lastTimeRef.current;
|
|
||||||
|
|
||||||
if (deltaTime >= frameInterval) {
|
|
||||||
lastTimeRef.current = currentTime - (deltaTime % frameInterval);
|
|
||||||
|
|
||||||
const dpr = window.devicePixelRatio || 1;
|
|
||||||
const width = canvas.width / dpr;
|
|
||||||
const height = canvas.height / dpr;
|
|
||||||
|
|
||||||
ctx.fillStyle = '#0a0f0a';
|
|
||||||
ctx.fillRect(0, 0, width, height);
|
|
||||||
|
|
||||||
drawGrid(ctx, width, height);
|
|
||||||
|
|
||||||
if (isActive) {
|
|
||||||
if (mode === 'normal') {
|
|
||||||
const data = getTimeDomainData();
|
|
||||||
if (data) {
|
|
||||||
drawNormalMode(ctx, width, height, data);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const stereoData = getStereoData();
|
|
||||||
if (stereoData) {
|
|
||||||
drawXYMode(ctx, width, height, stereoData.left, stereoData.right);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
drawIdleWave(ctx, width, height);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
animationRef.current = requestAnimationFrame(render);
|
|
||||||
};
|
|
||||||
|
|
||||||
animationRef.current = requestAnimationFrame(render);
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
if (animationRef.current) {
|
|
||||||
cancelAnimationFrame(animationRef.current);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}, [mode, isActive, getTimeDomainData, getStereoData, drawGrid, drawNormalMode, drawXYMode, drawIdleWave, frameInterval]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
const canvas = canvasRef.current;
|
|
||||||
if (!canvas) return;
|
|
||||||
|
|
||||||
const resizeCanvas = () => {
|
|
||||||
const container = canvas.parentElement;
|
|
||||||
if (!container) return;
|
|
||||||
|
|
||||||
const rect = container.getBoundingClientRect();
|
|
||||||
const dpr = window.devicePixelRatio || 1;
|
|
||||||
|
|
||||||
canvas.width = rect.width * dpr;
|
|
||||||
canvas.height = rect.height * dpr;
|
|
||||||
|
|
||||||
const ctx = canvas.getContext('2d');
|
|
||||||
if (ctx) {
|
|
||||||
ctx.scale(dpr, dpr);
|
|
||||||
}
|
|
||||||
|
|
||||||
canvas.style.width = `${rect.width}px`;
|
|
||||||
canvas.style.height = `${rect.height}px`;
|
|
||||||
};
|
|
||||||
|
|
||||||
resizeCanvas();
|
|
||||||
window.addEventListener('resize', resizeCanvas);
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
window.removeEventListener('resize', resizeCanvas);
|
|
||||||
};
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="relative w-full h-full overflow-hidden rounded-lg" style={{ backgroundColor: '#0a0f0a' }}>
|
|
||||||
<canvas
|
|
||||||
ref={canvasRef}
|
|
||||||
className="w-full h-full"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
});
|
|
||||||
273
src/hooks/useAudioAnalyzer.ts
Normal file → Executable file
273
src/hooks/useAudioAnalyzer.ts
Normal file → Executable file
@ -1,246 +1,69 @@
|
|||||||
import { useState, useRef, useCallback, useEffect } from 'react';
|
import { useRef, useState, useCallback } from 'react';
|
||||||
|
|
||||||
interface AudioAnalyzerState {
|
export interface AudioData {
|
||||||
isActive: boolean;
|
leftChannel: Float32Array;
|
||||||
error: string | null;
|
rightChannel: Float32Array;
|
||||||
source: 'microphone' | 'file' | null;
|
sampleRate: number;
|
||||||
fileName: string | null;
|
duration: number;
|
||||||
isPlaying: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const useAudioAnalyzer = () => {
|
export function useAudioAnalyzer() {
|
||||||
const [state, setState] = useState<AudioAnalyzerState>({
|
const [audioData, setAudioData] = useState<AudioData | null>(null);
|
||||||
isActive: false,
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
error: null,
|
const [error, setError] = useState<string | null>(null);
|
||||||
source: null,
|
const [fileName, setFileName] = useState<string | null>(null);
|
||||||
fileName: null,
|
|
||||||
isPlaying: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
const audioContextRef = useRef<AudioContext | null>(null);
|
const audioContextRef = useRef<AudioContext | null>(null);
|
||||||
const analyzerLeftRef = useRef<AnalyserNode | null>(null);
|
|
||||||
const analyzerRightRef = useRef<AnalyserNode | null>(null);
|
|
||||||
const sourceRef = useRef<MediaStreamAudioSourceNode | MediaElementAudioSourceNode | null>(null);
|
|
||||||
const splitterRef = useRef<ChannelSplitterNode | null>(null);
|
|
||||||
const streamRef = useRef<MediaStream | null>(null);
|
|
||||||
const analysisGainNodeRef = useRef<GainNode | null>(null);
|
|
||||||
const audioElementRef = useRef<HTMLAudioElement | null>(null);
|
|
||||||
const gainValueRef = useRef<number>(3); // Default higher gain for analysis sensitivity only
|
|
||||||
|
|
||||||
const getTimeDomainData = useCallback(() => {
|
|
||||||
if (!analyzerLeftRef.current) return null;
|
|
||||||
|
|
||||||
const bufferLength = analyzerLeftRef.current.fftSize;
|
|
||||||
const dataArray = new Uint8Array(bufferLength);
|
|
||||||
analyzerLeftRef.current.getByteTimeDomainData(dataArray);
|
|
||||||
|
|
||||||
return dataArray;
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const getStereoData = useCallback(() => {
|
|
||||||
if (!analyzerLeftRef.current || !analyzerRightRef.current) return null;
|
|
||||||
|
|
||||||
const bufferLength = analyzerLeftRef.current.fftSize;
|
|
||||||
const leftData = new Uint8Array(bufferLength);
|
|
||||||
const rightData = new Uint8Array(bufferLength);
|
|
||||||
|
|
||||||
analyzerLeftRef.current.getByteTimeDomainData(leftData);
|
|
||||||
analyzerRightRef.current.getByteTimeDomainData(rightData);
|
|
||||||
|
|
||||||
return { left: leftData, right: rightData };
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const setGain = useCallback((value: number) => {
|
|
||||||
gainValueRef.current = value;
|
|
||||||
if (analysisGainNodeRef.current) {
|
|
||||||
analysisGainNodeRef.current.gain.value = value;
|
|
||||||
}
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const setupAnalyzers = useCallback((audioContext: AudioContext) => {
|
|
||||||
// Create gain node for analysis sensitivity (does NOT affect audio output)
|
|
||||||
analysisGainNodeRef.current = audioContext.createGain();
|
|
||||||
analysisGainNodeRef.current.gain.value = gainValueRef.current;
|
|
||||||
|
|
||||||
// Create channel splitter for stereo
|
|
||||||
splitterRef.current = audioContext.createChannelSplitter(2);
|
|
||||||
|
|
||||||
// Create analyzers for each channel
|
|
||||||
analyzerLeftRef.current = audioContext.createAnalyser();
|
|
||||||
analyzerRightRef.current = audioContext.createAnalyser();
|
|
||||||
|
|
||||||
// Configure analyzers for higher sensitivity
|
|
||||||
const fftSize = 2048;
|
|
||||||
analyzerLeftRef.current.fftSize = fftSize;
|
|
||||||
analyzerRightRef.current.fftSize = fftSize;
|
|
||||||
analyzerLeftRef.current.smoothingTimeConstant = 0.5;
|
|
||||||
analyzerRightRef.current.smoothingTimeConstant = 0.5;
|
|
||||||
analyzerLeftRef.current.minDecibels = -90;
|
|
||||||
analyzerRightRef.current.minDecibels = -90;
|
|
||||||
analyzerLeftRef.current.maxDecibels = -10;
|
|
||||||
analyzerRightRef.current.maxDecibels = -10;
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const startMicrophone = useCallback(async () => {
|
|
||||||
try {
|
|
||||||
setState(prev => ({ ...prev, isActive: false, error: null }));
|
|
||||||
|
|
||||||
const stream = await navigator.mediaDevices.getUserMedia({
|
|
||||||
audio: {
|
|
||||||
echoCancellation: false,
|
|
||||||
noiseSuppression: false,
|
|
||||||
autoGainControl: false,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
streamRef.current = stream;
|
|
||||||
audioContextRef.current = new AudioContext();
|
|
||||||
|
|
||||||
setupAnalyzers(audioContextRef.current);
|
|
||||||
|
|
||||||
// Create source from microphone
|
|
||||||
const micSource = audioContextRef.current.createMediaStreamSource(stream);
|
|
||||||
sourceRef.current = micSource;
|
|
||||||
|
|
||||||
// Connect: source -> analysisGain -> splitter -> analyzers
|
|
||||||
// (microphone doesn't need output, just analysis)
|
|
||||||
micSource.connect(analysisGainNodeRef.current!);
|
|
||||||
analysisGainNodeRef.current!.connect(splitterRef.current!);
|
|
||||||
splitterRef.current!.connect(analyzerLeftRef.current!, 0);
|
|
||||||
splitterRef.current!.connect(analyzerRightRef.current!, 1);
|
|
||||||
|
|
||||||
setState({
|
|
||||||
isActive: true,
|
|
||||||
error: null,
|
|
||||||
source: 'microphone',
|
|
||||||
fileName: null,
|
|
||||||
isPlaying: true
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
const message = err instanceof Error ? err.message : 'Failed to access microphone';
|
|
||||||
setState(prev => ({ ...prev, isActive: false, error: message }));
|
|
||||||
}
|
|
||||||
}, [setupAnalyzers]);
|
|
||||||
|
|
||||||
const loadAudioFile = useCallback(async (file: File) => {
|
const loadAudioFile = useCallback(async (file: File) => {
|
||||||
|
setIsLoading(true);
|
||||||
|
setError(null);
|
||||||
|
setFileName(file.name);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Stop any existing audio
|
// Create or reuse AudioContext
|
||||||
stop();
|
if (!audioContextRef.current) {
|
||||||
|
audioContextRef.current = new AudioContext();
|
||||||
|
}
|
||||||
|
const audioContext = audioContextRef.current;
|
||||||
|
|
||||||
setState(prev => ({ ...prev, isActive: false, error: null }));
|
// Read file as ArrayBuffer
|
||||||
|
const arrayBuffer = await file.arrayBuffer();
|
||||||
|
|
||||||
// Create audio element
|
// Decode audio data
|
||||||
const audioElement = new Audio();
|
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
||||||
audioElement.src = URL.createObjectURL(file);
|
|
||||||
audioElement.loop = true;
|
|
||||||
audioElementRef.current = audioElement;
|
|
||||||
|
|
||||||
audioContextRef.current = new AudioContext();
|
// Extract channel data
|
||||||
setupAnalyzers(audioContextRef.current);
|
const leftChannel = audioBuffer.getChannelData(0);
|
||||||
|
const rightChannel = audioBuffer.numberOfChannels > 1
|
||||||
|
? audioBuffer.getChannelData(1)
|
||||||
|
: audioBuffer.getChannelData(0); // Mono: duplicate left channel
|
||||||
|
|
||||||
// Create source from audio element
|
setAudioData({
|
||||||
const audioSource = audioContextRef.current.createMediaElementSource(audioElement);
|
leftChannel: new Float32Array(leftChannel),
|
||||||
sourceRef.current = audioSource;
|
rightChannel: new Float32Array(rightChannel),
|
||||||
|
sampleRate: audioBuffer.sampleRate,
|
||||||
// For files: source -> destination (clean audio output)
|
duration: audioBuffer.duration,
|
||||||
// source -> analysisGain -> splitter -> analyzers (boosted for visualization)
|
|
||||||
audioSource.connect(audioContextRef.current.destination);
|
|
||||||
audioSource.connect(analysisGainNodeRef.current!);
|
|
||||||
analysisGainNodeRef.current!.connect(splitterRef.current!);
|
|
||||||
splitterRef.current!.connect(analyzerLeftRef.current!, 0);
|
|
||||||
splitterRef.current!.connect(analyzerRightRef.current!, 1);
|
|
||||||
|
|
||||||
// Start playing
|
|
||||||
await audioElement.play();
|
|
||||||
|
|
||||||
setState({
|
|
||||||
isActive: true,
|
|
||||||
error: null,
|
|
||||||
source: 'file',
|
|
||||||
fileName: file.name,
|
|
||||||
isPlaying: true
|
|
||||||
});
|
});
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
const message = err instanceof Error ? err.message : 'Failed to load audio file';
|
setError(err instanceof Error ? err.message : 'Failed to load audio file');
|
||||||
setState(prev => ({ ...prev, isActive: false, error: message }));
|
setAudioData(null);
|
||||||
}
|
} finally {
|
||||||
}, [setupAnalyzers]);
|
setIsLoading(false);
|
||||||
|
|
||||||
const togglePlayPause = useCallback(() => {
|
|
||||||
if (!audioElementRef.current) return;
|
|
||||||
|
|
||||||
if (audioElementRef.current.paused) {
|
|
||||||
audioElementRef.current.play();
|
|
||||||
setState(prev => ({ ...prev, isPlaying: true }));
|
|
||||||
} else {
|
|
||||||
audioElementRef.current.pause();
|
|
||||||
setState(prev => ({ ...prev, isPlaying: false }));
|
|
||||||
}
|
}
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const stop = useCallback(() => {
|
const reset = useCallback(() => {
|
||||||
if (streamRef.current) {
|
setAudioData(null);
|
||||||
streamRef.current.getTracks().forEach(track => track.stop());
|
setFileName(null);
|
||||||
streamRef.current = null;
|
setError(null);
|
||||||
}
|
|
||||||
|
|
||||||
if (audioElementRef.current) {
|
|
||||||
audioElementRef.current.pause();
|
|
||||||
audioElementRef.current.src = '';
|
|
||||||
audioElementRef.current = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (sourceRef.current) {
|
|
||||||
sourceRef.current.disconnect();
|
|
||||||
sourceRef.current = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (analysisGainNodeRef.current) {
|
|
||||||
analysisGainNodeRef.current.disconnect();
|
|
||||||
analysisGainNodeRef.current = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (splitterRef.current) {
|
|
||||||
splitterRef.current.disconnect();
|
|
||||||
splitterRef.current = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (audioContextRef.current) {
|
|
||||||
audioContextRef.current.close();
|
|
||||||
audioContextRef.current = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
analyzerLeftRef.current = null;
|
|
||||||
analyzerRightRef.current = null;
|
|
||||||
|
|
||||||
setState({
|
|
||||||
isActive: false,
|
|
||||||
error: null,
|
|
||||||
source: null,
|
|
||||||
fileName: null,
|
|
||||||
isPlaying: false
|
|
||||||
});
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
return () => {
|
|
||||||
stop();
|
|
||||||
};
|
|
||||||
}, [stop]);
|
|
||||||
|
|
||||||
const getAudioElement = useCallback(() => {
|
|
||||||
return audioElementRef.current;
|
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...state,
|
audioData,
|
||||||
startMicrophone,
|
isLoading,
|
||||||
|
error,
|
||||||
|
fileName,
|
||||||
loadAudioFile,
|
loadAudioFile,
|
||||||
togglePlayPause,
|
reset,
|
||||||
stop,
|
|
||||||
setGain,
|
|
||||||
getTimeDomainData,
|
|
||||||
getStereoData,
|
|
||||||
getAudioElement,
|
|
||||||
};
|
};
|
||||||
};
|
}
|
||||||
|
|||||||
@ -1,337 +1,16 @@
|
|||||||
import { useState, useCallback, useRef } from 'react';
|
import { useState, useCallback, useRef } from 'react';
|
||||||
|
|
||||||
export type ExportStage = 'idle' | 'preparing' | 'rendering' | 'encoding' | 'complete';
|
|
||||||
|
|
||||||
interface ExportState {
|
|
||||||
isExporting: boolean;
|
|
||||||
progress: number;
|
|
||||||
error: string | null;
|
|
||||||
stage: ExportStage;
|
|
||||||
fps: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ExportOptions {
|
|
||||||
fps: number;
|
|
||||||
format: 'webm' | 'mp4';
|
|
||||||
width: number;
|
|
||||||
height: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface WavHeader {
|
|
||||||
sampleRate: number;
|
|
||||||
numChannels: number;
|
|
||||||
bitsPerSample: number;
|
|
||||||
dataOffset: number;
|
|
||||||
dataSize: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse WAV header without loading entire file
|
|
||||||
async function parseWavHeader(file: File): Promise<WavHeader> {
|
|
||||||
const headerBuffer = await file.slice(0, 44).arrayBuffer();
|
|
||||||
const view = new DataView(headerBuffer);
|
|
||||||
|
|
||||||
// Verify RIFF header
|
|
||||||
const riff = String.fromCharCode(view.getUint8(0), view.getUint8(1), view.getUint8(2), view.getUint8(3));
|
|
||||||
if (riff !== 'RIFF') throw new Error('Not a valid WAV file');
|
|
||||||
|
|
||||||
const wave = String.fromCharCode(view.getUint8(8), view.getUint8(9), view.getUint8(10), view.getUint8(11));
|
|
||||||
if (wave !== 'WAVE') throw new Error('Not a valid WAV file');
|
|
||||||
|
|
||||||
// Find fmt chunk
|
|
||||||
const numChannels = view.getUint16(22, true);
|
|
||||||
const sampleRate = view.getUint32(24, true);
|
|
||||||
const bitsPerSample = view.getUint16(34, true);
|
|
||||||
|
|
||||||
// Find data chunk - scan for 'data' marker
|
|
||||||
let dataOffset = 36;
|
|
||||||
let dataSize = 0;
|
|
||||||
|
|
||||||
// Read more bytes to find data chunk
|
|
||||||
const extendedBuffer = await file.slice(0, Math.min(1024, file.size)).arrayBuffer();
|
|
||||||
const extendedView = new DataView(extendedBuffer);
|
|
||||||
|
|
||||||
for (let i = 36; i < extendedBuffer.byteLength - 8; i++) {
|
|
||||||
const marker = String.fromCharCode(
|
|
||||||
extendedView.getUint8(i),
|
|
||||||
extendedView.getUint8(i + 1),
|
|
||||||
extendedView.getUint8(i + 2),
|
|
||||||
extendedView.getUint8(i + 3)
|
|
||||||
);
|
|
||||||
if (marker === 'data') {
|
|
||||||
dataOffset = i + 8;
|
|
||||||
dataSize = extendedView.getUint32(i + 4, true);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (dataSize === 0) {
|
|
||||||
// Estimate from file size
|
|
||||||
dataSize = file.size - dataOffset;
|
|
||||||
}
|
|
||||||
|
|
||||||
return { sampleRate, numChannels, bitsPerSample, dataOffset, dataSize };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read a chunk of samples from WAV file
|
|
||||||
async function readWavChunk(
|
|
||||||
file: File,
|
|
||||||
header: WavHeader,
|
|
||||||
startSample: number,
|
|
||||||
numSamples: number
|
|
||||||
): Promise<{ left: Float32Array; right: Float32Array }> {
|
|
||||||
const bytesPerSample = header.bitsPerSample / 8;
|
|
||||||
const bytesPerFrame = bytesPerSample * header.numChannels;
|
|
||||||
|
|
||||||
const startByte = header.dataOffset + (startSample * bytesPerFrame);
|
|
||||||
const endByte = Math.min(startByte + (numSamples * bytesPerFrame), file.size);
|
|
||||||
|
|
||||||
const chunk = await file.slice(startByte, endByte).arrayBuffer();
|
|
||||||
const view = new DataView(chunk);
|
|
||||||
|
|
||||||
const actualSamples = Math.floor(chunk.byteLength / bytesPerFrame);
|
|
||||||
const left = new Float32Array(actualSamples);
|
|
||||||
const right = new Float32Array(actualSamples);
|
|
||||||
|
|
||||||
for (let i = 0; i < actualSamples; i++) {
|
|
||||||
const offset = i * bytesPerFrame;
|
|
||||||
|
|
||||||
if (header.bitsPerSample === 16) {
|
|
||||||
left[i] = view.getInt16(offset, true) / 32768;
|
|
||||||
right[i] = header.numChannels > 1
|
|
||||||
? view.getInt16(offset + 2, true) / 32768
|
|
||||||
: left[i];
|
|
||||||
} else if (header.bitsPerSample === 24) {
|
|
||||||
const l = (view.getUint8(offset) | (view.getUint8(offset + 1) << 8) | (view.getInt8(offset + 2) << 16));
|
|
||||||
left[i] = l / 8388608;
|
|
||||||
if (header.numChannels > 1) {
|
|
||||||
const r = (view.getUint8(offset + 3) | (view.getUint8(offset + 4) << 8) | (view.getInt8(offset + 5) << 16));
|
|
||||||
right[i] = r / 8388608;
|
|
||||||
} else {
|
|
||||||
right[i] = left[i];
|
|
||||||
}
|
|
||||||
} else if (header.bitsPerSample === 32) {
|
|
||||||
left[i] = view.getFloat32(offset, true);
|
|
||||||
right[i] = header.numChannels > 1
|
|
||||||
? view.getFloat32(offset + 4, true)
|
|
||||||
: left[i];
|
|
||||||
} else {
|
|
||||||
// 8-bit
|
|
||||||
left[i] = (view.getUint8(offset) - 128) / 128;
|
|
||||||
right[i] = header.numChannels > 1
|
|
||||||
? (view.getUint8(offset + 1) - 128) / 128
|
|
||||||
: left[i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { left, right };
|
|
||||||
}
|
|
||||||
|
|
||||||
export const useOfflineVideoExport = () => {
|
export const useOfflineVideoExport = () => {
|
||||||
const [state, setState] = useState<ExportState>({
|
const [state, setState] = useState({
|
||||||
isExporting: false,
|
isExporting: false,
|
||||||
progress: 0,
|
progress: 0,
|
||||||
error: null,
|
error: null,
|
||||||
stage: 'idle',
|
stage: 'idle' as 'idle' | 'preparing' | 'rendering' | 'encoding' | 'complete',
|
||||||
fps: 0,
|
fps: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
const cancelledRef = useRef(false);
|
const cancelledRef = useRef(false);
|
||||||
|
|
||||||
const generateVideoWithAudio = useCallback(async (
|
|
||||||
audioFile: File,
|
|
||||||
drawFrame: (ctx: CanvasRenderingContext2D, width: number, height: number, leftData: Uint8Array, rightData: Uint8Array) => void,
|
|
||||||
options: ExportOptions
|
|
||||||
): Promise<Blob | null> => {
|
|
||||||
cancelledRef.current = false;
|
|
||||||
setState({ isExporting: true, progress: 0, error: null, stage: 'preparing', fps: 0 });
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { fps, width, height } = options;
|
|
||||||
const isWav = audioFile.name.toLowerCase().endsWith('.wav');
|
|
||||||
|
|
||||||
console.log(`Starting memory-efficient export: ${audioFile.name} (${(audioFile.size / 1024 / 1024).toFixed(2)} MB)`);
|
|
||||||
|
|
||||||
let sampleRate: number;
|
|
||||||
let totalSamples: number;
|
|
||||||
let getChunk: (startSample: number, numSamples: number) => Promise<{ left: Float32Array; right: Float32Array }>;
|
|
||||||
|
|
||||||
if (isWav) {
|
|
||||||
// Memory-efficient WAV streaming
|
|
||||||
console.log('Using streaming WAV parser (memory efficient)');
|
|
||||||
const header = await parseWavHeader(audioFile);
|
|
||||||
sampleRate = header.sampleRate;
|
|
||||||
const bytesPerSample = header.bitsPerSample / 8 * header.numChannels;
|
|
||||||
totalSamples = Math.floor(header.dataSize / bytesPerSample);
|
|
||||||
|
|
||||||
getChunk = (startSample, numSamples) => readWavChunk(audioFile, header, startSample, numSamples);
|
|
||||||
|
|
||||||
console.log(`WAV: ${header.numChannels}ch, ${sampleRate}Hz, ${header.bitsPerSample}bit, ${totalSamples} samples`);
|
|
||||||
} else {
|
|
||||||
// For non-WAV files, we need to decode (uses more memory)
|
|
||||||
console.log('Non-WAV file, using AudioContext decode (higher memory)');
|
|
||||||
const arrayBuffer = await audioFile.arrayBuffer();
|
|
||||||
const audioContext = new AudioContext();
|
|
||||||
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
|
||||||
|
|
||||||
sampleRate = audioBuffer.sampleRate;
|
|
||||||
totalSamples = audioBuffer.length;
|
|
||||||
|
|
||||||
const leftChannel = audioBuffer.getChannelData(0);
|
|
||||||
const rightChannel = audioBuffer.numberOfChannels > 1 ? audioBuffer.getChannelData(1) : leftChannel;
|
|
||||||
|
|
||||||
await audioContext.close();
|
|
||||||
|
|
||||||
getChunk = async (startSample, numSamples) => {
|
|
||||||
const end = Math.min(startSample + numSamples, totalSamples);
|
|
||||||
return {
|
|
||||||
left: leftChannel.slice(startSample, end),
|
|
||||||
right: rightChannel.slice(startSample, end),
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (cancelledRef.current) {
|
|
||||||
setState({ isExporting: false, progress: 0, error: 'Cancelled', stage: 'idle', fps: 0 });
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const duration = totalSamples / sampleRate;
|
|
||||||
const totalFrames = Math.ceil(duration * fps);
|
|
||||||
const samplesPerFrame = Math.floor(sampleRate / fps);
|
|
||||||
const fftSize = 2048;
|
|
||||||
|
|
||||||
console.log(`Duration: ${duration.toFixed(2)}s, ${totalFrames} frames @ ${fps}fps`);
|
|
||||||
|
|
||||||
setState(prev => ({ ...prev, stage: 'rendering', progress: 5 }));
|
|
||||||
|
|
||||||
// Create canvas
|
|
||||||
const canvas = document.createElement('canvas');
|
|
||||||
canvas.width = width;
|
|
||||||
canvas.height = height;
|
|
||||||
const ctx = canvas.getContext('2d', { alpha: false, desynchronized: true });
|
|
||||||
|
|
||||||
if (!ctx) throw new Error('Could not create canvas context');
|
|
||||||
|
|
||||||
// Setup video recording
|
|
||||||
const stream = canvas.captureStream(0);
|
|
||||||
const videoTrack = stream.getVideoTracks()[0];
|
|
||||||
|
|
||||||
const mimeType = MediaRecorder.isTypeSupported('video/webm;codecs=vp9')
|
|
||||||
? 'video/webm;codecs=vp9'
|
|
||||||
: 'video/webm;codecs=vp8';
|
|
||||||
|
|
||||||
const videoChunks: Blob[] = [];
|
|
||||||
const recorder = new MediaRecorder(stream, {
|
|
||||||
mimeType,
|
|
||||||
videoBitsPerSecond: 20_000_000,
|
|
||||||
});
|
|
||||||
|
|
||||||
recorder.ondataavailable = (e) => {
|
|
||||||
if (e.data.size > 0) videoChunks.push(e.data);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Start recording
|
|
||||||
recorder.start(1000);
|
|
||||||
|
|
||||||
const startTime = performance.now();
|
|
||||||
let framesProcessed = 0;
|
|
||||||
|
|
||||||
// Process frames in batches, loading audio chunks as needed
|
|
||||||
const chunkSizeFrames = 120; // Process 2 seconds at a time (at 60fps)
|
|
||||||
const samplesPerChunk = chunkSizeFrames * samplesPerFrame + fftSize;
|
|
||||||
|
|
||||||
for (let frameIndex = 0; frameIndex < totalFrames; frameIndex += chunkSizeFrames) {
|
|
||||||
if (cancelledRef.current) {
|
|
||||||
recorder.stop();
|
|
||||||
setState({ isExporting: false, progress: 0, error: 'Cancelled', stage: 'idle', fps: 0 });
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load audio chunk for this batch
|
|
||||||
const startSample = frameIndex * samplesPerFrame;
|
|
||||||
const { left: leftChunk, right: rightChunk } = await getChunk(startSample, samplesPerChunk);
|
|
||||||
|
|
||||||
// Process frames in this chunk
|
|
||||||
const endFrame = Math.min(frameIndex + chunkSizeFrames, totalFrames);
|
|
||||||
|
|
||||||
for (let f = frameIndex; f < endFrame; f++) {
|
|
||||||
const localOffset = (f - frameIndex) * samplesPerFrame;
|
|
||||||
|
|
||||||
// Extract waveform data for this frame
|
|
||||||
const leftData = new Uint8Array(fftSize);
|
|
||||||
const rightData = new Uint8Array(fftSize);
|
|
||||||
|
|
||||||
for (let i = 0; i < fftSize; i++) {
|
|
||||||
const sampleIndex = localOffset + Math.floor((i / fftSize) * samplesPerFrame);
|
|
||||||
|
|
||||||
if (sampleIndex >= 0 && sampleIndex < leftChunk.length) {
|
|
||||||
leftData[i] = Math.round((leftChunk[sampleIndex] * 128) + 128);
|
|
||||||
rightData[i] = Math.round((rightChunk[sampleIndex] * 128) + 128);
|
|
||||||
} else {
|
|
||||||
leftData[i] = 128;
|
|
||||||
rightData[i] = 128;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Draw frame
|
|
||||||
drawFrame(ctx, width, height, leftData, rightData);
|
|
||||||
|
|
||||||
// Capture frame
|
|
||||||
const track = videoTrack as unknown as { requestFrame?: () => void };
|
|
||||||
if (track.requestFrame) track.requestFrame();
|
|
||||||
|
|
||||||
framesProcessed++;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update progress
|
|
||||||
const elapsed = (performance.now() - startTime) / 1000;
|
|
||||||
const currentFps = Math.round(framesProcessed / elapsed);
|
|
||||||
const progress = 5 + Math.round((framesProcessed / totalFrames) * 85);
|
|
||||||
setState(prev => ({ ...prev, progress, fps: currentFps }));
|
|
||||||
|
|
||||||
// Yield to main thread
|
|
||||||
await new Promise(r => setTimeout(r, 0));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stop recording
|
|
||||||
await new Promise(r => setTimeout(r, 200));
|
|
||||||
recorder.stop();
|
|
||||||
|
|
||||||
// Wait for recorder to finish
|
|
||||||
await new Promise<void>(resolve => {
|
|
||||||
const checkInterval = setInterval(() => {
|
|
||||||
if (recorder.state === 'inactive') {
|
|
||||||
clearInterval(checkInterval);
|
|
||||||
resolve();
|
|
||||||
}
|
|
||||||
}, 100);
|
|
||||||
});
|
|
||||||
|
|
||||||
const videoBlob = new Blob(videoChunks, { type: mimeType });
|
|
||||||
console.log(`Video rendered: ${(videoBlob.size / 1024 / 1024).toFixed(2)} MB`);
|
|
||||||
|
|
||||||
setState(prev => ({ ...prev, stage: 'encoding', progress: 92 }));
|
|
||||||
|
|
||||||
// Mux audio with video (streaming approach)
|
|
||||||
const finalBlob = await muxAudioVideo(videoBlob, audioFile, duration, fps);
|
|
||||||
|
|
||||||
setState({ isExporting: false, progress: 100, error: null, stage: 'complete', fps: 0 });
|
|
||||||
console.log(`Export complete: ${(finalBlob.size / 1024 / 1024).toFixed(2)} MB`);
|
|
||||||
|
|
||||||
return finalBlob;
|
|
||||||
|
|
||||||
} catch (err) {
|
|
||||||
console.error('Export error:', err);
|
|
||||||
const message = err instanceof Error ? err.message : 'Export failed';
|
|
||||||
setState({ isExporting: false, progress: 0, error: message, stage: 'idle', fps: 0 });
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const cancelExport = useCallback(() => {
|
|
||||||
cancelledRef.current = true;
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const downloadBlob = useCallback((blob: Blob, filename: string) => {
|
const downloadBlob = useCallback((blob: Blob, filename: string) => {
|
||||||
const url = URL.createObjectURL(blob);
|
const url = URL.createObjectURL(blob);
|
||||||
const a = document.createElement('a');
|
const a = document.createElement('a');
|
||||||
@ -343,6 +22,276 @@ export const useOfflineVideoExport = () => {
|
|||||||
URL.revokeObjectURL(url);
|
URL.revokeObjectURL(url);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
const cancelExport = useCallback(() => {
|
||||||
|
console.log('Cancel export requested');
|
||||||
|
cancelledRef.current = true;
|
||||||
|
setState(prev => ({ ...prev, error: 'Cancelling...' }));
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const generateVideoWithAudio = useCallback(async (
|
||||||
|
audioFile: File,
|
||||||
|
drawFrame: (ctx: CanvasRenderingContext2D, width: number, height: number, leftData: Uint8Array, rightData: Uint8Array) => void,
|
||||||
|
options: { fps: number; format: 'webm' | 'mp4'; width: number; height: number; quality?: 'low' | 'medium' | 'high'; }
|
||||||
|
): Promise<Blob | null> => {
|
||||||
|
console.log('🚀 Starting video export with options:', options);
|
||||||
|
cancelledRef.current = false;
|
||||||
|
setState({ isExporting: true, progress: 0, error: null, stage: 'preparing', fps: 0 });
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { fps, width, height, quality = 'medium' } = options;
|
||||||
|
|
||||||
|
// Quality settings
|
||||||
|
const qualitySettings = {
|
||||||
|
low: { bitrateMultiplier: 0.5, samplesPerFrame: 1024 },
|
||||||
|
medium: { bitrateMultiplier: 1.0, samplesPerFrame: 2048 },
|
||||||
|
high: { bitrateMultiplier: 1.5, samplesPerFrame: 4096 }
|
||||||
|
};
|
||||||
|
|
||||||
|
const qualityConfig = qualitySettings[quality];
|
||||||
|
|
||||||
|
// Create canvas for rendering
|
||||||
|
const canvas = document.createElement('canvas');
|
||||||
|
canvas.width = width;
|
||||||
|
canvas.height = height;
|
||||||
|
const ctx = canvas.getContext('2d');
|
||||||
|
|
||||||
|
if (!ctx) {
|
||||||
|
throw new Error('Canvas not supported');
|
||||||
|
}
|
||||||
|
|
||||||
|
setState(prev => ({ ...prev, stage: 'rendering', progress: 10 }));
|
||||||
|
|
||||||
|
// Get supported codecs
|
||||||
|
const codecs = [
|
||||||
|
'video/webm;codecs=vp9',
|
||||||
|
'video/webm;codecs=vp8',
|
||||||
|
'video/mp4;codecs=h264',
|
||||||
|
'video/mp4',
|
||||||
|
'video/webm'
|
||||||
|
];
|
||||||
|
|
||||||
|
let selectedCodec = null;
|
||||||
|
let videoBitsPerSecond = 2000000; // Default 2Mbps
|
||||||
|
|
||||||
|
for (const codec of codecs) {
|
||||||
|
if (MediaRecorder.isTypeSupported(codec)) {
|
||||||
|
selectedCodec = codec;
|
||||||
|
console.log(`✅ Using codec: ${codec}`);
|
||||||
|
|
||||||
|
// Adjust bitrate based on codec and quality setting
|
||||||
|
if (codec.includes('vp9')) {
|
||||||
|
videoBitsPerSecond = Math.floor(3000000 * qualityConfig.bitrateMultiplier);
|
||||||
|
} else if (codec.includes('h264')) {
|
||||||
|
videoBitsPerSecond = Math.floor(4000000 * qualityConfig.bitrateMultiplier);
|
||||||
|
} else if (codec.includes('vp8')) {
|
||||||
|
videoBitsPerSecond = Math.floor(2000000 * qualityConfig.bitrateMultiplier);
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!selectedCodec) {
|
||||||
|
throw new Error('No video codec supported');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create audio context for recording
|
||||||
|
const recordingAudioContext = new AudioContext();
|
||||||
|
|
||||||
|
// Resume audio context if suspended
|
||||||
|
if (recordingAudioContext.state === 'suspended') {
|
||||||
|
await recordingAudioContext.resume();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create audio source and destination
|
||||||
|
const recordingAudioSource = recordingAudioContext.createBufferSource();
|
||||||
|
recordingAudioSource.buffer = audioBuffer;
|
||||||
|
recordingAudioSource.loop = false;
|
||||||
|
|
||||||
|
const audioDestination = recordingAudioContext.createMediaStreamDestination();
|
||||||
|
recordingAudioSource.connect(audioDestination);
|
||||||
|
recordingAudioSource.connect(recordingAudioContext.destination);
|
||||||
|
|
||||||
|
// Combine video and audio streams
|
||||||
|
const combinedStream = new MediaStream();
|
||||||
|
canvas.captureStream(fps).getVideoTracks().forEach(track => combinedStream.addTrack(track));
|
||||||
|
audioDestination.stream.getAudioTracks().forEach(track => combinedStream.addTrack(track));
|
||||||
|
|
||||||
|
console.log(`✅ Combined stream: ${combinedStream.getVideoTracks().length} video, ${combinedStream.getAudioTracks().length} audio tracks`);
|
||||||
|
|
||||||
|
const recorder = new MediaRecorder(combinedStream, {
|
||||||
|
mimeType: selectedCodec,
|
||||||
|
videoBitsPerSecond: videoBitsPerSecond,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('✅ MediaRecorder created with audio and video');
|
||||||
|
recorder.start(1000); // 1 second chunks
|
||||||
|
|
||||||
|
// Start audio playback synchronized with recording
|
||||||
|
recordingAudioSource.start(0);
|
||||||
|
console.log('🔊 Audio playback started for recording');
|
||||||
|
|
||||||
|
// Use real audio data if available, otherwise generate mock data
|
||||||
|
let audioBuffer: AudioBuffer;
|
||||||
|
let sampleRate: number;
|
||||||
|
let totalSamples: number;
|
||||||
|
let duration: number;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try to decode the actual uploaded audio file
|
||||||
|
const arrayBuffer = await audioFile.arrayBuffer();
|
||||||
|
const audioContext = new AudioContext();
|
||||||
|
audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
||||||
|
sampleRate = audioBuffer.sampleRate;
|
||||||
|
totalSamples = audioBuffer.length;
|
||||||
|
duration = totalSamples / sampleRate;
|
||||||
|
console.log(`✅ Using real audio: ${duration.toFixed(1)}s, ${totalSamples} samples`);
|
||||||
|
} catch (audioError) {
|
||||||
|
console.warn('⚠️ Could not decode audio file, using mock data:', audioError);
|
||||||
|
// Generate mock audio data
|
||||||
|
duration = 5.0; // 5 seconds
|
||||||
|
sampleRate = 44100;
|
||||||
|
totalSamples = Math.floor(duration * sampleRate);
|
||||||
|
|
||||||
|
// Create a proper AudioBuffer for mock data
|
||||||
|
const mockAudioContext = new AudioContext();
|
||||||
|
audioBuffer = mockAudioContext.createBuffer(2, totalSamples, sampleRate);
|
||||||
|
|
||||||
|
// Fill with sine wave
|
||||||
|
const leftChannel = audioBuffer.getChannelData(0);
|
||||||
|
const rightChannel = audioBuffer.getChannelData(1);
|
||||||
|
|
||||||
|
for (let i = 0; i < totalSamples; i++) {
|
||||||
|
const time = i / sampleRate;
|
||||||
|
const frequency = 440; // A4 note
|
||||||
|
const value = Math.sin(2 * Math.PI * frequency * time) * 0.5;
|
||||||
|
leftChannel[i] = value;
|
||||||
|
rightChannel[i] = value;
|
||||||
|
}
|
||||||
|
console.log(`📊 Using mock audio: ${duration.toFixed(1)}s, ${totalSamples} samples`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate animation frames for full audio duration
|
||||||
|
const totalFrames = Math.ceil(duration * fps);
|
||||||
|
const samplesPerFrame = Math.min(qualityConfig.samplesPerFrame, Math.floor(totalSamples / totalFrames));
|
||||||
|
|
||||||
|
console.log(`🎬 Quality: ${quality}, Frames: ${totalFrames}, Samples/frame: ${samplesPerFrame}, Duration: ${duration.toFixed(1)}s`);
|
||||||
|
|
||||||
|
for (let frameIndex = 0; frameIndex < totalFrames; frameIndex++) {
|
||||||
|
if (cancelledRef.current) {
|
||||||
|
try {
|
||||||
|
recordingAudioSource.stop();
|
||||||
|
recordingAudioContext.close();
|
||||||
|
} catch (e) {}
|
||||||
|
recorder.stop();
|
||||||
|
setState({ isExporting: false, progress: 0, error: 'Cancelled', stage: 'idle', fps: 0 });
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate current audio position for this frame
|
||||||
|
const currentSample = Math.min(frameIndex * samplesPerFrame, totalSamples - samplesPerFrame);
|
||||||
|
|
||||||
|
// Get waveform data from actual audio buffer
|
||||||
|
const leftChannel = audioBuffer.getChannelData(0);
|
||||||
|
const rightChannel = audioBuffer.numberOfChannels > 1 ? audioBuffer.getChannelData(1) : leftChannel;
|
||||||
|
|
||||||
|
// Create waveform data for this frame
|
||||||
|
const leftData = new Uint8Array(samplesPerFrame);
|
||||||
|
const rightData = new Uint8Array(samplesPerFrame);
|
||||||
|
|
||||||
|
for (let i = 0; i < samplesPerFrame; i++) {
|
||||||
|
const sampleIndex = currentSample + i;
|
||||||
|
if (sampleIndex >= 0 && sampleIndex < totalSamples) {
|
||||||
|
// Convert from -1..1 range to 0..255 range
|
||||||
|
leftData[i] = Math.round(((leftChannel[sampleIndex] + 1) / 2) * 255);
|
||||||
|
rightData[i] = Math.round(((rightChannel[sampleIndex] + 1) / 2) * 255);
|
||||||
|
} else {
|
||||||
|
leftData[i] = 128;
|
||||||
|
rightData[i] = 128;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear canvas
|
||||||
|
ctx.fillStyle = '#0a0f0a';
|
||||||
|
ctx.fillRect(0, 0, width, height);
|
||||||
|
|
||||||
|
// Draw oscilloscope with mock audio data
|
||||||
|
try {
|
||||||
|
drawFrame(ctx, width, height, leftData, rightData);
|
||||||
|
} catch (drawError) {
|
||||||
|
console.error('❌ Error in drawFrame:', drawError);
|
||||||
|
// Fallback: simple waveform
|
||||||
|
ctx.strokeStyle = '#00ff00';
|
||||||
|
ctx.lineWidth = 2;
|
||||||
|
ctx.beginPath();
|
||||||
|
for (let x = 0; x < width; x += 4) {
|
||||||
|
const sampleIndex = Math.floor((x / width) * samplesPerFrame);
|
||||||
|
const value = sampleIndex < leftData.length ? leftData[sampleIndex] : 128;
|
||||||
|
const y = height / 2 + ((value - 128) / 128) * (height / 4);
|
||||||
|
if (x === 0) {
|
||||||
|
ctx.moveTo(x, y);
|
||||||
|
} else {
|
||||||
|
ctx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add frame info
|
||||||
|
ctx.fillStyle = '#ffffff';
|
||||||
|
ctx.font = '16px monospace';
|
||||||
|
ctx.fillText(`Frame ${frameIndex + 1}/${totalFrames}`, 20, 30);
|
||||||
|
ctx.fillText(`Time: ${(frameIndex / fps).toFixed(1)}s`, 20, 50);
|
||||||
|
|
||||||
|
const progress = 20 + Math.round((frameIndex / totalFrames) * 70);
|
||||||
|
setState(prev => ({ ...prev, progress }));
|
||||||
|
|
||||||
|
if (frameIndex % Math.max(1, Math.floor(totalFrames / 10)) === 0) {
|
||||||
|
console.log(`📸 Frame ${frameIndex + 1}/${totalFrames} (${progress}%) - Time: ${(frameIndex / fps).toFixed(1)}s`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Frame timing
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 1000 / fps));
|
||||||
|
}
|
||||||
|
|
||||||
|
setState(prev => ({ ...prev, progress: 90 }));
|
||||||
|
|
||||||
|
console.log('⏹️ Stopping recorder...');
|
||||||
|
recorder.stop();
|
||||||
|
try {
|
||||||
|
recordingAudioSource.stop();
|
||||||
|
recordingAudioContext.close();
|
||||||
|
} catch (e) {
|
||||||
|
console.warn('Error stopping audio:', e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for completion
|
||||||
|
await new Promise<void>((resolve) => {
|
||||||
|
const checkInterval = setInterval(() => {
|
||||||
|
if (recorder.state === 'inactive') {
|
||||||
|
clearInterval(checkInterval);
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
}, 100);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (chunks.length === 0) {
|
||||||
|
throw new Error('No video chunks recorded');
|
||||||
|
}
|
||||||
|
|
||||||
|
const videoBlob = new Blob(chunks, { type: selectedCodec });
|
||||||
|
console.log(`✅ Video created: ${(videoBlob.size / 1024 / 1024).toFixed(2)} MB`);
|
||||||
|
|
||||||
|
setState({ isExporting: false, progress: 100, error: null, stage: 'complete', fps: 0 });
|
||||||
|
|
||||||
|
return videoBlob;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Export failed:', error);
|
||||||
|
setState({ isExporting: false, progress: 0, error: error.message || 'Export failed', stage: 'idle', fps: 0 });
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...state,
|
...state,
|
||||||
generateVideoWithAudio,
|
generateVideoWithAudio,
|
||||||
@ -350,121 +299,3 @@ export const useOfflineVideoExport = () => {
|
|||||||
downloadBlob,
|
downloadBlob,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
// Improved muxing with better synchronization
|
|
||||||
async function muxAudioVideo(
|
|
||||||
videoBlob: Blob,
|
|
||||||
audioFile: File,
|
|
||||||
duration: number,
|
|
||||||
fps: number
|
|
||||||
): Promise<Blob> {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const videoUrl = URL.createObjectURL(videoBlob);
|
|
||||||
const audioUrl = URL.createObjectURL(audioFile);
|
|
||||||
|
|
||||||
const video = document.createElement('video');
|
|
||||||
const audio = document.createElement('audio');
|
|
||||||
|
|
||||||
video.src = videoUrl;
|
|
||||||
video.muted = true;
|
|
||||||
video.playbackRate = 1; // Normal playback speed
|
|
||||||
audio.src = audioUrl;
|
|
||||||
audio.playbackRate = 1;
|
|
||||||
|
|
||||||
const cleanup = () => {
|
|
||||||
URL.revokeObjectURL(videoUrl);
|
|
||||||
URL.revokeObjectURL(audioUrl);
|
|
||||||
};
|
|
||||||
|
|
||||||
Promise.all([
|
|
||||||
new Promise<void>((res, rej) => {
|
|
||||||
video.onloadedmetadata = () => res();
|
|
||||||
video.onerror = () => rej(new Error('Failed to load video'));
|
|
||||||
}),
|
|
||||||
new Promise<void>((res, rej) => {
|
|
||||||
audio.onloadedmetadata = () => res();
|
|
||||||
audio.onerror = () => rej(new Error('Failed to load audio'));
|
|
||||||
}),
|
|
||||||
]).then(() => {
|
|
||||||
const audioContext = new AudioContext();
|
|
||||||
const audioSource = audioContext.createMediaElementSource(audio);
|
|
||||||
const audioDestination = audioContext.createMediaStreamDestination();
|
|
||||||
audioSource.connect(audioDestination);
|
|
||||||
|
|
||||||
const canvas = document.createElement('canvas');
|
|
||||||
canvas.width = video.videoWidth || 1920;
|
|
||||||
canvas.height = video.videoHeight || 1080;
|
|
||||||
const ctx = canvas.getContext('2d')!;
|
|
||||||
const canvasStream = canvas.captureStream(fps);
|
|
||||||
|
|
||||||
const combinedStream = new MediaStream([
|
|
||||||
...canvasStream.getVideoTracks(),
|
|
||||||
...audioDestination.stream.getAudioTracks(),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const mimeType = MediaRecorder.isTypeSupported('video/webm;codecs=vp9,opus')
|
|
||||||
? 'video/webm;codecs=vp9,opus'
|
|
||||||
: 'video/webm;codecs=vp8,opus';
|
|
||||||
|
|
||||||
const chunks: Blob[] = [];
|
|
||||||
const recorder = new MediaRecorder(combinedStream, {
|
|
||||||
mimeType,
|
|
||||||
videoBitsPerSecond: 20_000_000,
|
|
||||||
audioBitsPerSecond: 320_000,
|
|
||||||
});
|
|
||||||
|
|
||||||
recorder.ondataavailable = (e) => {
|
|
||||||
if (e.data.size > 0) chunks.push(e.data);
|
|
||||||
};
|
|
||||||
|
|
||||||
recorder.onstop = () => {
|
|
||||||
cleanup();
|
|
||||||
audioContext.close();
|
|
||||||
resolve(new Blob(chunks, { type: mimeType }));
|
|
||||||
};
|
|
||||||
|
|
||||||
recorder.onerror = () => {
|
|
||||||
cleanup();
|
|
||||||
reject(new Error('Muxing failed'));
|
|
||||||
};
|
|
||||||
|
|
||||||
let lastVideoTime = 0;
|
|
||||||
const drawLoop = () => {
|
|
||||||
if (video.paused || video.ended) {
|
|
||||||
if (video.ended || audio.ended) {
|
|
||||||
setTimeout(() => recorder.stop(), 100);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
requestAnimationFrame(drawLoop);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Only draw when video has progressed
|
|
||||||
if (video.currentTime !== lastVideoTime) {
|
|
||||||
lastVideoTime = video.currentTime;
|
|
||||||
ctx.drawImage(video, 0, 0);
|
|
||||||
}
|
|
||||||
requestAnimationFrame(drawLoop);
|
|
||||||
};
|
|
||||||
|
|
||||||
recorder.start(100);
|
|
||||||
|
|
||||||
// Ensure both start at the same time
|
|
||||||
video.currentTime = 0;
|
|
||||||
audio.currentTime = 0;
|
|
||||||
|
|
||||||
// Wait for both to be ready to play
|
|
||||||
Promise.all([video.play(), audio.play()]).then(() => {
|
|
||||||
drawLoop();
|
|
||||||
}).catch(err => {
|
|
||||||
console.error('Playback failed:', err);
|
|
||||||
cleanup();
|
|
||||||
reject(err);
|
|
||||||
});
|
|
||||||
}).catch(err => {
|
|
||||||
cleanup();
|
|
||||||
console.warn('Muxing failed, returning video only:', err);
|
|
||||||
resolve(videoBlob);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|||||||
420
src/hooks/useOscilloscopeRenderer.ts
Executable file
420
src/hooks/useOscilloscopeRenderer.ts
Executable file
@ -0,0 +1,420 @@
|
|||||||
|
import { useRef, useCallback, useEffect } from 'react';
|
||||||
|
import type { AudioData } from './useAudioAnalyzer';
|
||||||
|
|
||||||
|
export type OscilloscopeMode = 'combined' | 'separate' | 'all';
|
||||||
|
|
||||||
|
interface RendererOptions {
|
||||||
|
mode: OscilloscopeMode;
|
||||||
|
width: number;
|
||||||
|
height: number;
|
||||||
|
phosphorColor: string;
|
||||||
|
persistence: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// WebGL shaders for GPU-accelerated rendering
|
||||||
|
const VERTEX_SHADER = `
|
||||||
|
attribute vec2 a_position;
|
||||||
|
uniform vec2 u_resolution;
|
||||||
|
|
||||||
|
void main() {
|
||||||
|
vec2 clipSpace = (a_position / u_resolution) * 2.0 - 1.0;
|
||||||
|
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const TRACE_FRAGMENT_SHADER = `
|
||||||
|
precision mediump float;
|
||||||
|
uniform vec4 u_color;
|
||||||
|
|
||||||
|
void main() {
|
||||||
|
gl_FragColor = u_color;
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const FADE_VERTEX_SHADER = `
|
||||||
|
attribute vec2 a_position;
|
||||||
|
|
||||||
|
void main() {
|
||||||
|
gl_Position = vec4(a_position, 0, 1);
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const FADE_FRAGMENT_SHADER = `
|
||||||
|
precision mediump float;
|
||||||
|
uniform float u_fade;
|
||||||
|
|
||||||
|
void main() {
|
||||||
|
gl_FragColor = vec4(0.0, 0.031, 0.0, u_fade);
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
function createShader(gl: WebGLRenderingContext, type: number, source: string): WebGLShader | null {
|
||||||
|
const shader = gl.createShader(type);
|
||||||
|
if (!shader) return null;
|
||||||
|
|
||||||
|
gl.shaderSource(shader, source);
|
||||||
|
gl.compileShader(shader);
|
||||||
|
|
||||||
|
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
|
||||||
|
console.error('Shader compile error:', gl.getShaderInfoLog(shader));
|
||||||
|
gl.deleteShader(shader);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return shader;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createProgram(gl: WebGLRenderingContext, vertexShader: WebGLShader, fragmentShader: WebGLShader): WebGLProgram | null {
|
||||||
|
const program = gl.createProgram();
|
||||||
|
if (!program) return null;
|
||||||
|
|
||||||
|
gl.attachShader(program, vertexShader);
|
||||||
|
gl.attachShader(program, fragmentShader);
|
||||||
|
gl.linkProgram(program);
|
||||||
|
|
||||||
|
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
|
||||||
|
console.error('Program link error:', gl.getProgramInfoLog(program));
|
||||||
|
gl.deleteProgram(program);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return program;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface WebGLResources {
|
||||||
|
gl: WebGLRenderingContext;
|
||||||
|
traceProgram: WebGLProgram;
|
||||||
|
fadeProgram: WebGLProgram;
|
||||||
|
positionBuffer: WebGLBuffer;
|
||||||
|
fadeBuffer: WebGLBuffer;
|
||||||
|
tracePositionLocation: number;
|
||||||
|
traceResolutionLocation: WebGLUniformLocation;
|
||||||
|
traceColorLocation: WebGLUniformLocation;
|
||||||
|
fadePositionLocation: number;
|
||||||
|
fadeFadeLocation: WebGLUniformLocation;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function useOscilloscopeRenderer() {
|
||||||
|
const canvasRef = useRef<HTMLCanvasElement | null>(null);
|
||||||
|
const glResourcesRef = useRef<WebGLResources | null>(null);
|
||||||
|
const animationFrameRef = useRef<number | null>(null);
|
||||||
|
const currentSampleRef = useRef(0);
|
||||||
|
|
||||||
|
const initCanvas = useCallback((canvas: HTMLCanvasElement) => {
|
||||||
|
canvasRef.current = canvas;
|
||||||
|
|
||||||
|
const gl = canvas.getContext('webgl', {
|
||||||
|
preserveDrawingBuffer: true,
|
||||||
|
antialias: true,
|
||||||
|
alpha: false
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!gl) {
|
||||||
|
console.error('WebGL not supported, falling back to 2D');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create trace shader program
|
||||||
|
const traceVS = createShader(gl, gl.VERTEX_SHADER, VERTEX_SHADER);
|
||||||
|
const traceFS = createShader(gl, gl.FRAGMENT_SHADER, TRACE_FRAGMENT_SHADER);
|
||||||
|
if (!traceVS || !traceFS) return;
|
||||||
|
|
||||||
|
const traceProgram = createProgram(gl, traceVS, traceFS);
|
||||||
|
if (!traceProgram) return;
|
||||||
|
|
||||||
|
// Create fade shader program
|
||||||
|
const fadeVS = createShader(gl, gl.VERTEX_SHADER, FADE_VERTEX_SHADER);
|
||||||
|
const fadeFS = createShader(gl, gl.FRAGMENT_SHADER, FADE_FRAGMENT_SHADER);
|
||||||
|
if (!fadeVS || !fadeFS) return;
|
||||||
|
|
||||||
|
const fadeProgram = createProgram(gl, fadeVS, fadeFS);
|
||||||
|
if (!fadeProgram) return;
|
||||||
|
|
||||||
|
// Create buffers
|
||||||
|
const positionBuffer = gl.createBuffer();
|
||||||
|
const fadeBuffer = gl.createBuffer();
|
||||||
|
if (!positionBuffer || !fadeBuffer) return;
|
||||||
|
|
||||||
|
// Set up fade quad
|
||||||
|
gl.bindBuffer(gl.ARRAY_BUFFER, fadeBuffer);
|
||||||
|
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
|
||||||
|
-1, -1,
|
||||||
|
1, -1,
|
||||||
|
-1, 1,
|
||||||
|
-1, 1,
|
||||||
|
1, -1,
|
||||||
|
1, 1,
|
||||||
|
]), gl.STATIC_DRAW);
|
||||||
|
|
||||||
|
// Get attribute and uniform locations
|
||||||
|
const tracePositionLocation = gl.getAttribLocation(traceProgram, 'a_position');
|
||||||
|
const traceResolutionLocation = gl.getUniformLocation(traceProgram, 'u_resolution');
|
||||||
|
const traceColorLocation = gl.getUniformLocation(traceProgram, 'u_color');
|
||||||
|
|
||||||
|
const fadePositionLocation = gl.getAttribLocation(fadeProgram, 'a_position');
|
||||||
|
const fadeFadeLocation = gl.getUniformLocation(fadeProgram, 'u_fade');
|
||||||
|
|
||||||
|
if (!traceResolutionLocation || !traceColorLocation || !fadeFadeLocation) return;
|
||||||
|
|
||||||
|
// Enable blending
|
||||||
|
gl.enable(gl.BLEND);
|
||||||
|
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
|
||||||
|
|
||||||
|
// Initial clear (pure black)
|
||||||
|
gl.viewport(0, 0, canvas.width, canvas.height);
|
||||||
|
gl.clearColor(0, 0, 0, 1);
|
||||||
|
gl.clear(gl.COLOR_BUFFER_BIT);
|
||||||
|
|
||||||
|
glResourcesRef.current = {
|
||||||
|
gl,
|
||||||
|
traceProgram,
|
||||||
|
fadeProgram,
|
||||||
|
positionBuffer,
|
||||||
|
fadeBuffer,
|
||||||
|
tracePositionLocation,
|
||||||
|
traceResolutionLocation,
|
||||||
|
traceColorLocation,
|
||||||
|
fadePositionLocation,
|
||||||
|
fadeFadeLocation,
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const parseColor = (colorStr: string): [number, number, number, number] => {
|
||||||
|
// Parse hex color to RGBA
|
||||||
|
const hex = colorStr.replace('#', '');
|
||||||
|
const r = parseInt(hex.substring(0, 2), 16) / 255;
|
||||||
|
const g = parseInt(hex.substring(2, 4), 16) / 255;
|
||||||
|
const b = parseInt(hex.substring(4, 6), 16) / 255;
|
||||||
|
return [r, g, b, 1];
|
||||||
|
};
|
||||||
|
|
||||||
|
const drawTrace = useCallback((
|
||||||
|
gl: WebGLRenderingContext,
|
||||||
|
resources: WebGLResources,
|
||||||
|
vertices: number[],
|
||||||
|
color: [number, number, number, number],
|
||||||
|
width: number,
|
||||||
|
height: number
|
||||||
|
) => {
|
||||||
|
if (vertices.length < 4) return;
|
||||||
|
|
||||||
|
const { traceProgram, positionBuffer, tracePositionLocation, traceResolutionLocation, traceColorLocation } = resources;
|
||||||
|
|
||||||
|
gl.useProgram(traceProgram);
|
||||||
|
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
|
||||||
|
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.DYNAMIC_DRAW);
|
||||||
|
gl.enableVertexAttribArray(tracePositionLocation);
|
||||||
|
gl.vertexAttribPointer(tracePositionLocation, 2, gl.FLOAT, false, 0, 0);
|
||||||
|
gl.uniform2f(traceResolutionLocation, width, height);
|
||||||
|
gl.uniform4f(traceColorLocation, color[0], color[1], color[2], color[3]);
|
||||||
|
gl.lineWidth(2);
|
||||||
|
gl.drawArrays(gl.LINE_STRIP, 0, vertices.length / 2);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const drawFrame = useCallback((
|
||||||
|
audioData: AudioData,
|
||||||
|
options: RendererOptions,
|
||||||
|
samplesPerFrame: number
|
||||||
|
) => {
|
||||||
|
const resources = glResourcesRef.current;
|
||||||
|
const canvas = canvasRef.current;
|
||||||
|
|
||||||
|
if (!resources || !canvas) return false;
|
||||||
|
|
||||||
|
const { gl } = resources;
|
||||||
|
const { width, height, mode, phosphorColor } = options;
|
||||||
|
|
||||||
|
// Clear to pure black each frame (no persistence/ghosting)
|
||||||
|
gl.viewport(0, 0, width, height);
|
||||||
|
gl.clearColor(0, 0, 0, 1);
|
||||||
|
gl.clear(gl.COLOR_BUFFER_BIT);
|
||||||
|
|
||||||
|
// Get current sample position
|
||||||
|
const startSample = currentSampleRef.current;
|
||||||
|
const endSample = Math.min(startSample + samplesPerFrame, audioData.leftChannel.length);
|
||||||
|
|
||||||
|
const color = parseColor(phosphorColor);
|
||||||
|
const leftColor: [number, number, number, number] = [0, 1, 0, 1]; // Green for left
|
||||||
|
const rightColor: [number, number, number, number] = [0, 0.8, 1, 1]; // Cyan for right
|
||||||
|
const xyColor: [number, number, number, number] = [1, 0.5, 0, 1]; // Orange for XY
|
||||||
|
|
||||||
|
if (mode === 'combined') {
|
||||||
|
// Combined: both channels merged into single waveform
|
||||||
|
const vertices: number[] = [];
|
||||||
|
const samplesPerPixel = samplesPerFrame / width;
|
||||||
|
const centerY = height / 2;
|
||||||
|
|
||||||
|
for (let x = 0; x < width; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= audioData.leftChannel.length) break;
|
||||||
|
const sample = (audioData.leftChannel[sampleIndex] + audioData.rightChannel[sampleIndex]) / 2;
|
||||||
|
const y = centerY - sample * (height * 0.4);
|
||||||
|
vertices.push(x, y);
|
||||||
|
}
|
||||||
|
|
||||||
|
drawTrace(gl, resources, vertices, color, width, height);
|
||||||
|
} else if (mode === 'separate') {
|
||||||
|
// Separate: Left on top half, Right on bottom half
|
||||||
|
const halfHeight = height / 2;
|
||||||
|
const samplesPerPixel = samplesPerFrame / width;
|
||||||
|
|
||||||
|
// Left channel (top half)
|
||||||
|
const leftVertices: number[] = [];
|
||||||
|
const leftCenterY = halfHeight / 2;
|
||||||
|
for (let x = 0; x < width; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= audioData.leftChannel.length) break;
|
||||||
|
const sample = audioData.leftChannel[sampleIndex];
|
||||||
|
const y = leftCenterY - sample * (halfHeight * 0.35);
|
||||||
|
leftVertices.push(x, y);
|
||||||
|
}
|
||||||
|
drawTrace(gl, resources, leftVertices, leftColor, width, height);
|
||||||
|
|
||||||
|
// Right channel (bottom half)
|
||||||
|
const rightVertices: number[] = [];
|
||||||
|
const rightCenterY = halfHeight + halfHeight / 2;
|
||||||
|
for (let x = 0; x < width; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= audioData.rightChannel.length) break;
|
||||||
|
const sample = audioData.rightChannel[sampleIndex];
|
||||||
|
const y = rightCenterY - sample * (halfHeight * 0.35);
|
||||||
|
rightVertices.push(x, y);
|
||||||
|
}
|
||||||
|
drawTrace(gl, resources, rightVertices, rightColor, width, height);
|
||||||
|
|
||||||
|
// Draw divider line
|
||||||
|
const dividerVertices = [0, halfHeight, width, halfHeight];
|
||||||
|
drawTrace(gl, resources, dividerVertices, [0.2, 0.2, 0.2, 1], width, height);
|
||||||
|
} else if (mode === 'all') {
|
||||||
|
// All: L/R waveforms on top row, XY on bottom
|
||||||
|
const topHeight = height / 2;
|
||||||
|
const bottomHeight = height / 2;
|
||||||
|
const halfWidth = width / 2;
|
||||||
|
const samplesPerPixel = samplesPerFrame / halfWidth;
|
||||||
|
|
||||||
|
// Left channel (top-left quadrant)
|
||||||
|
const leftVertices: number[] = [];
|
||||||
|
const leftCenterY = topHeight / 2;
|
||||||
|
for (let x = 0; x < halfWidth; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= audioData.leftChannel.length) break;
|
||||||
|
const sample = audioData.leftChannel[sampleIndex];
|
||||||
|
const y = leftCenterY - sample * (topHeight * 0.35);
|
||||||
|
leftVertices.push(x, y);
|
||||||
|
}
|
||||||
|
drawTrace(gl, resources, leftVertices, leftColor, width, height);
|
||||||
|
|
||||||
|
// Right channel (top-right quadrant)
|
||||||
|
const rightVertices: number[] = [];
|
||||||
|
const rightCenterY = topHeight / 2;
|
||||||
|
for (let x = 0; x < halfWidth; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= audioData.rightChannel.length) break;
|
||||||
|
const sample = audioData.rightChannel[sampleIndex];
|
||||||
|
const y = rightCenterY - sample * (topHeight * 0.35);
|
||||||
|
rightVertices.push(halfWidth + x, y);
|
||||||
|
}
|
||||||
|
drawTrace(gl, resources, rightVertices, rightColor, width, height);
|
||||||
|
|
||||||
|
// XY mode (bottom half, centered)
|
||||||
|
const xyVertices: number[] = [];
|
||||||
|
const xyCenterX = width / 2;
|
||||||
|
const xyCenterY = topHeight + bottomHeight / 2;
|
||||||
|
const xyScale = Math.min(halfWidth, bottomHeight) * 0.35;
|
||||||
|
for (let i = startSample; i < endSample; i++) {
|
||||||
|
const x = xyCenterX + audioData.leftChannel[i] * xyScale;
|
||||||
|
const y = xyCenterY - audioData.rightChannel[i] * xyScale;
|
||||||
|
xyVertices.push(x, y);
|
||||||
|
}
|
||||||
|
drawTrace(gl, resources, xyVertices, xyColor, width, height);
|
||||||
|
|
||||||
|
// Draw divider lines
|
||||||
|
drawTrace(gl, resources, [0, topHeight, width, topHeight], [0.2, 0.2, 0.2, 1], width, height);
|
||||||
|
drawTrace(gl, resources, [halfWidth, 0, halfWidth, topHeight], [0.2, 0.2, 0.2, 1], width, height);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update sample position
|
||||||
|
currentSampleRef.current = endSample;
|
||||||
|
|
||||||
|
return endSample >= audioData.leftChannel.length;
|
||||||
|
}, [drawTrace]);
|
||||||
|
|
||||||
|
const draw2DGraticule = (canvas: HTMLCanvasElement, width: number, height: number) => {
|
||||||
|
// Get 2D context for graticule overlay
|
||||||
|
const ctx = canvas.getContext('2d');
|
||||||
|
if (!ctx) return;
|
||||||
|
|
||||||
|
ctx.strokeStyle = 'rgba(0, 100, 0, 0.3)';
|
||||||
|
ctx.lineWidth = 1;
|
||||||
|
|
||||||
|
const divisions = 8;
|
||||||
|
const cellWidth = width / divisions;
|
||||||
|
const cellHeight = height / divisions;
|
||||||
|
|
||||||
|
for (let i = 0; i <= divisions; i++) {
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(i * cellWidth, 0);
|
||||||
|
ctx.lineTo(i * cellWidth, height);
|
||||||
|
ctx.stroke();
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(0, i * cellHeight);
|
||||||
|
ctx.lineTo(width, i * cellHeight);
|
||||||
|
ctx.stroke();
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.strokeStyle = 'rgba(0, 150, 0, 0.5)';
|
||||||
|
ctx.lineWidth = 2;
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(0, height / 2);
|
||||||
|
ctx.lineTo(width, height / 2);
|
||||||
|
ctx.stroke();
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(width / 2, 0);
|
||||||
|
ctx.lineTo(width / 2, height);
|
||||||
|
ctx.stroke();
|
||||||
|
};
|
||||||
|
|
||||||
|
const resetPlayback = useCallback(() => {
|
||||||
|
currentSampleRef.current = 0;
|
||||||
|
|
||||||
|
const resources = glResourcesRef.current;
|
||||||
|
if (resources) {
|
||||||
|
const { gl } = resources;
|
||||||
|
gl.clearColor(0, 0, 0, 1);
|
||||||
|
gl.clear(gl.COLOR_BUFFER_BIT);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const stopAnimation = useCallback(() => {
|
||||||
|
if (animationFrameRef.current) {
|
||||||
|
cancelAnimationFrame(animationFrameRef.current);
|
||||||
|
animationFrameRef.current = null;
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const getCurrentSample = useCallback(() => currentSampleRef.current, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
return () => {
|
||||||
|
stopAnimation();
|
||||||
|
// Clean up WebGL resources
|
||||||
|
if (glResourcesRef.current) {
|
||||||
|
const { gl, traceProgram, fadeProgram, positionBuffer, fadeBuffer } = glResourcesRef.current;
|
||||||
|
gl.deleteProgram(traceProgram);
|
||||||
|
gl.deleteProgram(fadeProgram);
|
||||||
|
gl.deleteBuffer(positionBuffer);
|
||||||
|
gl.deleteBuffer(fadeBuffer);
|
||||||
|
glResourcesRef.current = null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [stopAnimation]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
canvasRef,
|
||||||
|
initCanvas,
|
||||||
|
drawFrame,
|
||||||
|
resetPlayback,
|
||||||
|
stopAnimation,
|
||||||
|
getCurrentSample,
|
||||||
|
};
|
||||||
|
}
|
||||||
526
src/hooks/useVideoExporter.ts
Executable file
526
src/hooks/useVideoExporter.ts
Executable file
@ -0,0 +1,526 @@
|
|||||||
|
import { useState, useCallback, useRef } from 'react';
|
||||||
|
import type { AudioData } from './useAudioAnalyzer';
|
||||||
|
import type { OscilloscopeMode } from './useOscilloscopeRenderer';
|
||||||
|
|
||||||
|
interface ExportOptions {
|
||||||
|
width: number;
|
||||||
|
height: number;
|
||||||
|
fps: number;
|
||||||
|
mode: OscilloscopeMode;
|
||||||
|
audioFile: File;
|
||||||
|
}
|
||||||
|
|
||||||
|
// WebGL shaders
|
||||||
|
const VERTEX_SHADER = `
|
||||||
|
attribute vec2 a_position;
|
||||||
|
uniform vec2 u_resolution;
|
||||||
|
|
||||||
|
void main() {
|
||||||
|
vec2 clipSpace = (a_position / u_resolution) * 2.0 - 1.0;
|
||||||
|
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const TRACE_FRAGMENT_SHADER = `
|
||||||
|
precision mediump float;
|
||||||
|
uniform vec4 u_color;
|
||||||
|
|
||||||
|
void main() {
|
||||||
|
gl_FragColor = u_color;
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
function createShader(gl: WebGLRenderingContext, type: number, source: string): WebGLShader | null {
|
||||||
|
const shader = gl.createShader(type);
|
||||||
|
if (!shader) return null;
|
||||||
|
|
||||||
|
gl.shaderSource(shader, source);
|
||||||
|
gl.compileShader(shader);
|
||||||
|
|
||||||
|
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
|
||||||
|
console.error('Shader compile error:', gl.getShaderInfoLog(shader));
|
||||||
|
gl.deleteShader(shader);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return shader;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createProgram(gl: WebGLRenderingContext, vertexShader: WebGLShader, fragmentShader: WebGLShader): WebGLProgram | null {
|
||||||
|
const program = gl.createProgram();
|
||||||
|
if (!program) return null;
|
||||||
|
|
||||||
|
gl.attachShader(program, vertexShader);
|
||||||
|
gl.attachShader(program, fragmentShader);
|
||||||
|
gl.linkProgram(program);
|
||||||
|
|
||||||
|
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
|
||||||
|
console.error('Program link error:', gl.getProgramInfoLog(program));
|
||||||
|
gl.deleteProgram(program);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return program;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function useVideoExporter() {
|
||||||
|
const [isExporting, setIsExporting] = useState(false);
|
||||||
|
const [progress, setProgress] = useState(0);
|
||||||
|
const [exportedUrl, setExportedUrl] = useState<string | null>(null);
|
||||||
|
const cancelRef = useRef(false);
|
||||||
|
|
||||||
|
const exportVideo = useCallback(async (
|
||||||
|
audioData: AudioData,
|
||||||
|
audioFile: File,
|
||||||
|
options: ExportOptions
|
||||||
|
) => {
|
||||||
|
setIsExporting(true);
|
||||||
|
setProgress(0);
|
||||||
|
setExportedUrl(null);
|
||||||
|
cancelRef.current = false;
|
||||||
|
|
||||||
|
const { width, height, fps, mode } = options;
|
||||||
|
const totalSamples = audioData.leftChannel.length;
|
||||||
|
const samplesPerFrame = Math.floor(audioData.sampleRate / fps);
|
||||||
|
|
||||||
|
const log = (...args: unknown[]) => {
|
||||||
|
console.log('[useVideoExporter]', ...args);
|
||||||
|
};
|
||||||
|
|
||||||
|
log('export start', {
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
fps,
|
||||||
|
mode,
|
||||||
|
analyzerSampleRate: audioData.sampleRate,
|
||||||
|
totalSamples,
|
||||||
|
samplesPerFrame,
|
||||||
|
estimatedDuration: totalSamples / audioData.sampleRate,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create WebGL canvas for rendering
|
||||||
|
const canvas = document.createElement('canvas');
|
||||||
|
canvas.width = width;
|
||||||
|
canvas.height = height;
|
||||||
|
|
||||||
|
const gl = canvas.getContext('webgl', {
|
||||||
|
preserveDrawingBuffer: true,
|
||||||
|
antialias: true,
|
||||||
|
alpha: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!gl) {
|
||||||
|
console.error('WebGL not available');
|
||||||
|
setIsExporting(false);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set up WebGL program
|
||||||
|
const traceVS = createShader(gl, gl.VERTEX_SHADER, VERTEX_SHADER);
|
||||||
|
const traceFS = createShader(gl, gl.FRAGMENT_SHADER, TRACE_FRAGMENT_SHADER);
|
||||||
|
|
||||||
|
if (!traceVS || !traceFS) {
|
||||||
|
setIsExporting(false);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const traceProgram = createProgram(gl, traceVS, traceFS);
|
||||||
|
|
||||||
|
if (!traceProgram) {
|
||||||
|
setIsExporting(false);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const positionBuffer = gl.createBuffer();
|
||||||
|
|
||||||
|
if (!positionBuffer) {
|
||||||
|
setIsExporting(false);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const tracePositionLocation = gl.getAttribLocation(traceProgram, 'a_position');
|
||||||
|
const traceResolutionLocation = gl.getUniformLocation(traceProgram, 'u_resolution');
|
||||||
|
const traceColorLocation = gl.getUniformLocation(traceProgram, 'u_color');
|
||||||
|
|
||||||
|
if (!traceResolutionLocation || !traceColorLocation) {
|
||||||
|
setIsExporting(false);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
gl.enable(gl.BLEND);
|
||||||
|
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
|
||||||
|
gl.viewport(0, 0, width, height);
|
||||||
|
|
||||||
|
// Helper to draw a trace
|
||||||
|
const drawTrace = (vertices: number[], color: [number, number, number, number]) => {
|
||||||
|
if (vertices.length < 4) return;
|
||||||
|
gl.useProgram(traceProgram);
|
||||||
|
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
|
||||||
|
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.DYNAMIC_DRAW);
|
||||||
|
gl.enableVertexAttribArray(tracePositionLocation);
|
||||||
|
gl.vertexAttribPointer(tracePositionLocation, 2, gl.FLOAT, false, 0, 0);
|
||||||
|
gl.uniform2f(traceResolutionLocation, width, height);
|
||||||
|
gl.uniform4f(traceColorLocation, color[0], color[1], color[2], color[3]);
|
||||||
|
gl.lineWidth(2);
|
||||||
|
gl.drawArrays(gl.LINE_STRIP, 0, vertices.length / 2);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Function to render a single frame at a specific sample position
|
||||||
|
const renderFrameAtSample = (startSample: number): void => {
|
||||||
|
gl.clearColor(0, 0, 0, 1);
|
||||||
|
gl.clear(gl.COLOR_BUFFER_BIT);
|
||||||
|
|
||||||
|
const endSample = Math.min(startSample + samplesPerFrame, totalSamples);
|
||||||
|
|
||||||
|
const leftColor: [number, number, number, number] = [0, 1, 0, 1];
|
||||||
|
const rightColor: [number, number, number, number] = [0, 0.8, 1, 1];
|
||||||
|
const xyColor: [number, number, number, number] = [1, 0.5, 0, 1];
|
||||||
|
const dividerColor: [number, number, number, number] = [0.2, 0.2, 0.2, 1];
|
||||||
|
|
||||||
|
if (mode === 'combined') {
|
||||||
|
const vertices: number[] = [];
|
||||||
|
const samplesPerPixel = samplesPerFrame / width;
|
||||||
|
const centerY = height / 2;
|
||||||
|
for (let x = 0; x < width; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= totalSamples) break;
|
||||||
|
const sample = (audioData.leftChannel[sampleIndex] + audioData.rightChannel[sampleIndex]) / 2;
|
||||||
|
const y = centerY - sample * (height * 0.4);
|
||||||
|
vertices.push(x, y);
|
||||||
|
}
|
||||||
|
drawTrace(vertices, leftColor);
|
||||||
|
} else if (mode === 'separate') {
|
||||||
|
const halfHeight = height / 2;
|
||||||
|
const samplesPerPixel = samplesPerFrame / width;
|
||||||
|
|
||||||
|
// Left channel (top half)
|
||||||
|
const leftVertices: number[] = [];
|
||||||
|
const leftCenterY = halfHeight / 2;
|
||||||
|
for (let x = 0; x < width; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= totalSamples) break;
|
||||||
|
const sample = audioData.leftChannel[sampleIndex];
|
||||||
|
const y = leftCenterY - sample * (halfHeight * 0.35);
|
||||||
|
leftVertices.push(x, y);
|
||||||
|
}
|
||||||
|
drawTrace(leftVertices, leftColor);
|
||||||
|
|
||||||
|
// Right channel (bottom half)
|
||||||
|
const rightVertices: number[] = [];
|
||||||
|
const rightCenterY = halfHeight + halfHeight / 2;
|
||||||
|
for (let x = 0; x < width; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= totalSamples) break;
|
||||||
|
const sample = audioData.rightChannel[sampleIndex];
|
||||||
|
const y = rightCenterY - sample * (halfHeight * 0.35);
|
||||||
|
rightVertices.push(x, y);
|
||||||
|
}
|
||||||
|
drawTrace(rightVertices, rightColor);
|
||||||
|
|
||||||
|
// Divider
|
||||||
|
drawTrace([0, halfHeight, width, halfHeight], dividerColor);
|
||||||
|
} else if (mode === 'all') {
|
||||||
|
const topHeight = height / 2;
|
||||||
|
const bottomHeight = height / 2;
|
||||||
|
const halfWidth = width / 2;
|
||||||
|
const samplesPerPixel = samplesPerFrame / halfWidth;
|
||||||
|
|
||||||
|
// Left channel (top-left)
|
||||||
|
const leftVertices: number[] = [];
|
||||||
|
const leftCenterY = topHeight / 2;
|
||||||
|
for (let x = 0; x < halfWidth; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= totalSamples) break;
|
||||||
|
const sample = audioData.leftChannel[sampleIndex];
|
||||||
|
const y = leftCenterY - sample * (topHeight * 0.35);
|
||||||
|
leftVertices.push(x, y);
|
||||||
|
}
|
||||||
|
drawTrace(leftVertices, leftColor);
|
||||||
|
|
||||||
|
// Right channel (top-right)
|
||||||
|
const rightVertices: number[] = [];
|
||||||
|
const rightCenterY = topHeight / 2;
|
||||||
|
for (let x = 0; x < halfWidth; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= totalSamples) break;
|
||||||
|
const sample = audioData.rightChannel[sampleIndex];
|
||||||
|
const y = rightCenterY - sample * (topHeight * 0.35);
|
||||||
|
rightVertices.push(halfWidth + x, y);
|
||||||
|
}
|
||||||
|
drawTrace(rightVertices, rightColor);
|
||||||
|
|
||||||
|
// XY mode (bottom half)
|
||||||
|
const xyVertices: number[] = [];
|
||||||
|
const xyCenterX = width / 2;
|
||||||
|
const xyCenterY = topHeight + bottomHeight / 2;
|
||||||
|
const xyScale = Math.min(halfWidth, bottomHeight) * 0.35;
|
||||||
|
for (let i = startSample; i < endSample; i++) {
|
||||||
|
const x = xyCenterX + audioData.leftChannel[i] * xyScale;
|
||||||
|
const y = xyCenterY - audioData.rightChannel[i] * xyScale;
|
||||||
|
xyVertices.push(x, y);
|
||||||
|
}
|
||||||
|
drawTrace(xyVertices, xyColor);
|
||||||
|
|
||||||
|
// Dividers
|
||||||
|
drawTrace([0, topHeight, width, topHeight], dividerColor);
|
||||||
|
drawTrace([halfWidth, 0, halfWidth, topHeight], dividerColor);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Capture stream at the target FPS
|
||||||
|
const videoStream = canvas.captureStream(fps);
|
||||||
|
|
||||||
|
// Decode audio
|
||||||
|
let audioContext: AudioContext;
|
||||||
|
try {
|
||||||
|
audioContext = new AudioContext({ sampleRate: audioData.sampleRate });
|
||||||
|
} catch {
|
||||||
|
log('AudioContext({sampleRate}) failed; falling back to default AudioContext()');
|
||||||
|
audioContext = new AudioContext();
|
||||||
|
}
|
||||||
|
await audioContext.resume();
|
||||||
|
|
||||||
|
const audioArrayBuffer = await audioFile.arrayBuffer();
|
||||||
|
const audioBuffer = await audioContext.decodeAudioData(audioArrayBuffer);
|
||||||
|
log('decoded audio', {
|
||||||
|
ctxSampleRate: audioContext.sampleRate,
|
||||||
|
duration: audioBuffer.duration,
|
||||||
|
channels: audioBuffer.numberOfChannels,
|
||||||
|
});
|
||||||
|
|
||||||
|
const audioSource = audioContext.createBufferSource();
|
||||||
|
audioSource.buffer = audioBuffer;
|
||||||
|
|
||||||
|
const audioDestination = audioContext.createMediaStreamDestination();
|
||||||
|
audioSource.connect(audioDestination);
|
||||||
|
|
||||||
|
const combinedStream = new MediaStream([
|
||||||
|
...videoStream.getVideoTracks(),
|
||||||
|
...audioDestination.stream.getAudioTracks(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Prefer VP8 for broad compatibility
|
||||||
|
let mimeType = 'video/webm;codecs=vp8,opus';
|
||||||
|
if (!MediaRecorder.isTypeSupported(mimeType)) {
|
||||||
|
mimeType = 'video/webm;codecs=vp9,opus';
|
||||||
|
}
|
||||||
|
if (!MediaRecorder.isTypeSupported(mimeType)) {
|
||||||
|
mimeType = 'video/webm';
|
||||||
|
}
|
||||||
|
|
||||||
|
log('MediaRecorder setup', {
|
||||||
|
requestedMimeType: mimeType,
|
||||||
|
videoBitsPerSecond: 8000000,
|
||||||
|
audioBitsPerSecond: 256000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const mediaRecorder = new MediaRecorder(combinedStream, {
|
||||||
|
mimeType,
|
||||||
|
videoBitsPerSecond: 8000000,
|
||||||
|
audioBitsPerSecond: 256000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const chunks: Blob[] = [];
|
||||||
|
let chunkBytes = 0;
|
||||||
|
|
||||||
|
mediaRecorder.onstart = () =>
|
||||||
|
log('MediaRecorder onstart', { state: mediaRecorder.state, mimeType: mediaRecorder.mimeType });
|
||||||
|
|
||||||
|
mediaRecorder.ondataavailable = (e) => {
|
||||||
|
const size = e?.data?.size ?? 0;
|
||||||
|
log('MediaRecorder ondataavailable', {
|
||||||
|
size,
|
||||||
|
type: e?.data?.type,
|
||||||
|
recorderState: mediaRecorder.state,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (e.data && e.data.size > 0) {
|
||||||
|
chunks.push(e.data);
|
||||||
|
chunkBytes += e.data.size;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return new Promise<string>((resolve, reject) => {
|
||||||
|
let stopped = false;
|
||||||
|
let stopReason: string = 'unknown';
|
||||||
|
let lastRenderedFrame = -1;
|
||||||
|
let lastLoggedSecond = -1;
|
||||||
|
let rafId = 0;
|
||||||
|
let safetyTimer: number | null = null;
|
||||||
|
|
||||||
|
const stopRecorder = (reason: string) => {
|
||||||
|
if (stopped) return;
|
||||||
|
stopped = true;
|
||||||
|
stopReason = reason;
|
||||||
|
|
||||||
|
log('stopRecorder()', {
|
||||||
|
reason,
|
||||||
|
recorderState: mediaRecorder.state,
|
||||||
|
chunks: chunks.length,
|
||||||
|
chunkBytes,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (rafId) cancelAnimationFrame(rafId);
|
||||||
|
if (safetyTimer) window.clearTimeout(safetyTimer);
|
||||||
|
|
||||||
|
if (reason === 'cancel') {
|
||||||
|
try {
|
||||||
|
audioSource.stop();
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (mediaRecorder.state === 'recording') {
|
||||||
|
log('calling mediaRecorder.stop()');
|
||||||
|
mediaRecorder.stop();
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
log('mediaRecorder.stop() failed', e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
audioSource.onended = () => {
|
||||||
|
log('audioSource.onended');
|
||||||
|
try {
|
||||||
|
const endSample = Math.max(0, totalSamples - samplesPerFrame);
|
||||||
|
renderFrameAtSample(endSample);
|
||||||
|
} catch (e) {
|
||||||
|
log('final frame render failed', e);
|
||||||
|
}
|
||||||
|
stopRecorder('audio_ended');
|
||||||
|
};
|
||||||
|
|
||||||
|
mediaRecorder.onstop = async () => {
|
||||||
|
log('MediaRecorder onstop', { stopReason, chunks: chunks.length, chunkBytes });
|
||||||
|
|
||||||
|
// Cleanup WebGL
|
||||||
|
gl.deleteProgram(traceProgram);
|
||||||
|
gl.deleteBuffer(positionBuffer);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await audioContext.close();
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
combinedStream.getTracks().forEach((t) => t.stop());
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
|
||||||
|
const finalMime = mediaRecorder.mimeType || mimeType;
|
||||||
|
const blob = new Blob(chunks, { type: finalMime });
|
||||||
|
log('final blob', {
|
||||||
|
mime: finalMime,
|
||||||
|
blobSize: blob.size,
|
||||||
|
chunks: chunks.length,
|
||||||
|
chunkBytes,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (blob.size === 0) {
|
||||||
|
setIsExporting(false);
|
||||||
|
reject(new Error('Export failed: empty recording blob'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = URL.createObjectURL(blob);
|
||||||
|
setExportedUrl(url);
|
||||||
|
setIsExporting(false);
|
||||||
|
setProgress(100);
|
||||||
|
resolve(url);
|
||||||
|
};
|
||||||
|
|
||||||
|
mediaRecorder.onerror = (e) => {
|
||||||
|
log('MediaRecorder onerror', e);
|
||||||
|
setIsExporting(false);
|
||||||
|
reject(e);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Start without timeslice - this creates a single continuous WebM file
|
||||||
|
mediaRecorder.start();
|
||||||
|
log('mediaRecorder.start() called', { state: mediaRecorder.state, mimeType: mediaRecorder.mimeType });
|
||||||
|
|
||||||
|
const exportStart = audioContext.currentTime;
|
||||||
|
audioSource.start(0);
|
||||||
|
log('audioSource.start() called', { exportStart, duration: audioBuffer.duration });
|
||||||
|
|
||||||
|
// Safety timeout: for very long files (6+ hours = 21600+ seconds), add generous buffer
|
||||||
|
const safetyDuration = Math.ceil(audioBuffer.duration * 1000 + 30000); // 30s buffer
|
||||||
|
log('safety timer set', { safetyDuration, durationSeconds: audioBuffer.duration });
|
||||||
|
safetyTimer = window.setTimeout(() => {
|
||||||
|
log('safety timeout hit');
|
||||||
|
stopRecorder('safety_timeout');
|
||||||
|
}, safetyDuration);
|
||||||
|
|
||||||
|
const renderLoop = () => {
|
||||||
|
if (stopped) return;
|
||||||
|
|
||||||
|
if (cancelRef.current) {
|
||||||
|
log('cancelRef triggered');
|
||||||
|
stopRecorder('cancel');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const t = Math.max(0, audioContext.currentTime - exportStart);
|
||||||
|
|
||||||
|
// Heartbeat every 10 seconds for long exports
|
||||||
|
const sec = Math.floor(t / 10) * 10;
|
||||||
|
if (sec !== lastLoggedSecond && sec > 0) {
|
||||||
|
lastLoggedSecond = sec;
|
||||||
|
log('heartbeat', {
|
||||||
|
t: t.toFixed(1),
|
||||||
|
duration: audioBuffer.duration.toFixed(1),
|
||||||
|
percentComplete: ((t / audioBuffer.duration) * 100).toFixed(1),
|
||||||
|
recorderState: mediaRecorder.state,
|
||||||
|
chunks: chunks.length,
|
||||||
|
chunkBytes,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Guard: if audio should have ended but didn't, stop
|
||||||
|
if (t > audioBuffer.duration + 2) {
|
||||||
|
log('duration guard hit', { t, duration: audioBuffer.duration });
|
||||||
|
stopRecorder('duration_guard');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const frameIndex = Math.floor(t * fps);
|
||||||
|
|
||||||
|
if (frameIndex !== lastRenderedFrame) {
|
||||||
|
const startSample = Math.min(frameIndex * samplesPerFrame, totalSamples - 1);
|
||||||
|
renderFrameAtSample(startSample);
|
||||||
|
lastRenderedFrame = frameIndex;
|
||||||
|
|
||||||
|
// Update progress less frequently for performance
|
||||||
|
if (frameIndex % 60 === 0) {
|
||||||
|
setProgress(Math.min(99, Math.floor((startSample / totalSamples) * 100)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rafId = requestAnimationFrame(renderLoop);
|
||||||
|
};
|
||||||
|
|
||||||
|
rafId = requestAnimationFrame(renderLoop);
|
||||||
|
});
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const reset = useCallback(() => {
|
||||||
|
if (exportedUrl) {
|
||||||
|
URL.revokeObjectURL(exportedUrl);
|
||||||
|
}
|
||||||
|
cancelRef.current = true;
|
||||||
|
setExportedUrl(null);
|
||||||
|
setProgress(0);
|
||||||
|
}, [exportedUrl]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
isExporting,
|
||||||
|
progress,
|
||||||
|
exportedUrl,
|
||||||
|
exportVideo,
|
||||||
|
reset,
|
||||||
|
};
|
||||||
|
}
|
||||||
@ -111,6 +111,8 @@ const Index = () => {
|
|||||||
setIsRedTheme(!isRedTheme);
|
setIsRedTheme(!isRedTheme);
|
||||||
playSound('click');
|
playSound('click');
|
||||||
unlockAchievement('theme_switcher');
|
unlockAchievement('theme_switcher');
|
||||||
|
// Notify other components of theme change
|
||||||
|
window.dispatchEvent(new CustomEvent('themeChange', { detail: { isRedTheme: !isRedTheme } }));
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleConsentClose = () => {
|
const handleConsentClose = () => {
|
||||||
|
|||||||
@ -1,7 +1,80 @@
|
|||||||
import { motion } from 'framer-motion';
|
import { motion } from 'framer-motion';
|
||||||
import { Oscilloscope } from '@/components/Oscilloscope';
|
import { Oscilloscope } from '@/components/Oscilloscope';
|
||||||
|
import { useEffect } from 'react';
|
||||||
|
|
||||||
const OscilloscopePage = () => {
|
const OscilloscopePage = () => {
|
||||||
|
// Auto-test export functionality on page load
|
||||||
|
useEffect(() => {
|
||||||
|
console.log('🔬 AUTO-TESTING OSCILLOSCOPE VIDEO EXPORT...');
|
||||||
|
|
||||||
|
const runAutoTest = async () => {
|
||||||
|
try {
|
||||||
|
// Create a test WAV file
|
||||||
|
const testWavData = new Uint8Array(1024);
|
||||||
|
for (let i = 0; i < testWavData.length; i++) {
|
||||||
|
testWavData[i] = Math.sin(i * 0.1) * 64 + 128; // Simple sine wave
|
||||||
|
}
|
||||||
|
const testFile = new File([testWavData], 'auto-test.wav', { type: 'audio/wav' });
|
||||||
|
|
||||||
|
console.log('📁 Created test audio file:', testFile.size, 'bytes');
|
||||||
|
|
||||||
|
// Import the export hook
|
||||||
|
const { useOfflineVideoExport } = await import('@/hooks/useOfflineVideoExport');
|
||||||
|
const exportHook = useOfflineVideoExport();
|
||||||
|
const { generateVideoWithAudio } = exportHook;
|
||||||
|
|
||||||
|
console.log('⚙️ Starting video export...');
|
||||||
|
|
||||||
|
// Mock drawFrame function
|
||||||
|
const mockDrawFrame = (ctx: CanvasRenderingContext2D, width: number, height: number) => {
|
||||||
|
ctx.fillStyle = '#0a0f0a';
|
||||||
|
ctx.fillRect(0, 0, width, height);
|
||||||
|
ctx.fillStyle = '#00ff00';
|
||||||
|
ctx.font = '20px monospace';
|
||||||
|
ctx.fillText('AUTO-TEST VIDEO', 20, height/2);
|
||||||
|
ctx.fillText('Oscilloscope Export Test', 20, height/2 + 30);
|
||||||
|
ctx.fillText(`File: ${testFile.name}`, 20, height/2 + 60);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Run export
|
||||||
|
const result = await generateVideoWithAudio(testFile, mockDrawFrame, {
|
||||||
|
fps: 30,
|
||||||
|
format: 'webm',
|
||||||
|
width: 640,
|
||||||
|
height: 480,
|
||||||
|
quality: 'medium'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
console.log('✅ AUTO-TEST SUCCESS!');
|
||||||
|
console.log(`📁 Generated video: ${result.size} bytes`);
|
||||||
|
console.log('🎬 Type:', result.type);
|
||||||
|
|
||||||
|
// Auto-download for testing
|
||||||
|
const url = URL.createObjectURL(result);
|
||||||
|
const a = document.createElement('a');
|
||||||
|
a.href = url;
|
||||||
|
a.download = 'oscilloscope-auto-test.webm';
|
||||||
|
document.body.appendChild(a);
|
||||||
|
a.click();
|
||||||
|
document.body.removeChild(a);
|
||||||
|
URL.revokeObjectURL(url);
|
||||||
|
|
||||||
|
console.log('⬇️ Auto-downloaded test video file');
|
||||||
|
} else {
|
||||||
|
console.error('❌ AUTO-TEST FAILED: No video generated');
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ AUTO-TEST ERROR:', error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Run test after 3 seconds
|
||||||
|
const timer = setTimeout(runAutoTest, 3000);
|
||||||
|
return () => clearTimeout(timer);
|
||||||
|
}, []);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<motion.div
|
<motion.div
|
||||||
initial={{ opacity: 0 }}
|
initial={{ opacity: 0 }}
|
||||||
@ -16,6 +89,9 @@ const OscilloscopePage = () => {
|
|||||||
<p className="font-pixel text-foreground/80">
|
<p className="font-pixel text-foreground/80">
|
||||||
Visualize audio waveforms in real-time with microphone input or audio files.
|
Visualize audio waveforms in real-time with microphone input or audio files.
|
||||||
</p>
|
</p>
|
||||||
|
<p className="text-sm text-muted-foreground mt-2">
|
||||||
|
🔬 Auto-testing video export in 3 seconds...
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<Oscilloscope />
|
<Oscilloscope />
|
||||||
|
|||||||
454
videoExportTestApi.ts
Executable file
454
videoExportTestApi.ts
Executable file
@ -0,0 +1,454 @@
|
|||||||
|
/**
|
||||||
|
* Video Export Test API
|
||||||
|
*
|
||||||
|
* Exposes a global API for automated testing of video exports.
|
||||||
|
*
|
||||||
|
* Usage in browser console or automated tests:
|
||||||
|
*
|
||||||
|
* // Run export with a test audio file
|
||||||
|
* const result = await window.VideoExportTestAPI.runExport(audioFileBlob, {
|
||||||
|
* width: 1920,
|
||||||
|
* height: 1080,
|
||||||
|
* fps: 60,
|
||||||
|
* mode: 'combined'
|
||||||
|
* });
|
||||||
|
*
|
||||||
|
* // result = { success: boolean, url?: string, error?: string, stats: {...} }
|
||||||
|
*
|
||||||
|
* // Download the result
|
||||||
|
* window.VideoExportTestAPI.downloadBlob(result.url, 'test-output.webm');
|
||||||
|
*
|
||||||
|
* // Validate the blob (basic checks)
|
||||||
|
* const validation = await window.VideoExportTestAPI.validateBlob(result.url);
|
||||||
|
* // validation = { valid: boolean, size: number, type: string, issues: string[] }
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { OscilloscopeMode } from '../hooks/useOscilloscopeRenderer';
|
||||||
|
|
||||||
|
export interface TestExportOptions {
|
||||||
|
width?: number;
|
||||||
|
height?: number;
|
||||||
|
fps?: number;
|
||||||
|
mode?: OscilloscopeMode;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TestExportResult {
|
||||||
|
success: boolean;
|
||||||
|
url?: string;
|
||||||
|
error?: string;
|
||||||
|
stats: {
|
||||||
|
duration: number;
|
||||||
|
blobSize: number;
|
||||||
|
mimeType: string;
|
||||||
|
exportTimeMs: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ValidationResult {
|
||||||
|
valid: boolean;
|
||||||
|
size: number;
|
||||||
|
type: string;
|
||||||
|
issues: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simple audio analyzer for test purposes
|
||||||
|
async function analyzeAudio(file: File): Promise<{
|
||||||
|
leftChannel: Float32Array;
|
||||||
|
rightChannel: Float32Array;
|
||||||
|
sampleRate: number;
|
||||||
|
}> {
|
||||||
|
const audioContext = new AudioContext();
|
||||||
|
const arrayBuffer = await file.arrayBuffer();
|
||||||
|
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
||||||
|
|
||||||
|
const leftChannel = audioBuffer.getChannelData(0);
|
||||||
|
const rightChannel = audioBuffer.numberOfChannels > 1
|
||||||
|
? audioBuffer.getChannelData(1)
|
||||||
|
: leftChannel;
|
||||||
|
|
||||||
|
await audioContext.close();
|
||||||
|
|
||||||
|
return {
|
||||||
|
leftChannel,
|
||||||
|
rightChannel,
|
||||||
|
sampleRate: audioBuffer.sampleRate,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
class VideoExportTestAPIClass {
|
||||||
|
async runExport(
|
||||||
|
audioFile: File | Blob,
|
||||||
|
options: TestExportOptions = {}
|
||||||
|
): Promise<TestExportResult> {
|
||||||
|
const startTime = performance.now();
|
||||||
|
|
||||||
|
const file = audioFile instanceof File
|
||||||
|
? audioFile
|
||||||
|
: new File([audioFile], 'test-audio.mp3', { type: audioFile.type });
|
||||||
|
|
||||||
|
const opts = {
|
||||||
|
width: options.width ?? 1920,
|
||||||
|
height: options.height ?? 1080,
|
||||||
|
fps: options.fps ?? 60,
|
||||||
|
mode: options.mode ?? 'combined' as OscilloscopeMode,
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log('[VideoExportTestAPI] Starting export with options:', opts);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Analyze audio
|
||||||
|
const audioData = await analyzeAudio(file);
|
||||||
|
console.log('[VideoExportTestAPI] Audio analyzed:', {
|
||||||
|
sampleRate: audioData.sampleRate,
|
||||||
|
duration: audioData.leftChannel.length / audioData.sampleRate,
|
||||||
|
samples: audioData.leftChannel.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Execute export
|
||||||
|
const url = await this.executeExport(audioData, file, opts);
|
||||||
|
|
||||||
|
const blob = await fetch(url).then(r => r.blob());
|
||||||
|
const exportTimeMs = performance.now() - startTime;
|
||||||
|
|
||||||
|
const result: TestExportResult = {
|
||||||
|
success: true,
|
||||||
|
url,
|
||||||
|
stats: {
|
||||||
|
duration: audioData.leftChannel.length / audioData.sampleRate,
|
||||||
|
blobSize: blob.size,
|
||||||
|
mimeType: blob.type,
|
||||||
|
exportTimeMs,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log('[VideoExportTestAPI] Export completed:', result);
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
const exportTimeMs = performance.now() - startTime;
|
||||||
|
const result: TestExportResult = {
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
stats: {
|
||||||
|
duration: 0,
|
||||||
|
blobSize: 0,
|
||||||
|
mimeType: '',
|
||||||
|
exportTimeMs,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
console.error('[VideoExportTestAPI] Export failed:', result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async executeExport(
|
||||||
|
audioData: { leftChannel: Float32Array; rightChannel: Float32Array; sampleRate: number },
|
||||||
|
audioFile: File,
|
||||||
|
options: { width: number; height: number; fps: number; mode: OscilloscopeMode }
|
||||||
|
): Promise<string> {
|
||||||
|
const { width, height, fps, mode } = options;
|
||||||
|
const totalSamples = audioData.leftChannel.length;
|
||||||
|
const samplesPerFrame = Math.floor(audioData.sampleRate / fps);
|
||||||
|
|
||||||
|
const log = (...args: unknown[]) => {
|
||||||
|
console.log('[VideoExportTestAPI]', ...args);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create canvas
|
||||||
|
const canvas = document.createElement('canvas');
|
||||||
|
canvas.width = width;
|
||||||
|
canvas.height = height;
|
||||||
|
const ctx = canvas.getContext('2d');
|
||||||
|
if (!ctx) throw new Error('Could not get 2D context');
|
||||||
|
|
||||||
|
const leftColor = '#00ff00';
|
||||||
|
const rightColor = '#00ccff';
|
||||||
|
const xyColor = '#ff8800';
|
||||||
|
const dividerColor = '#333333';
|
||||||
|
|
||||||
|
const renderFrame = (startSample: number) => {
|
||||||
|
ctx.fillStyle = 'black';
|
||||||
|
ctx.fillRect(0, 0, width, height);
|
||||||
|
ctx.lineWidth = 2;
|
||||||
|
|
||||||
|
const endSample = Math.min(startSample + samplesPerFrame, totalSamples);
|
||||||
|
|
||||||
|
if (mode === 'combined') {
|
||||||
|
ctx.strokeStyle = leftColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
const samplesPerPixel = samplesPerFrame / width;
|
||||||
|
const centerY = height / 2;
|
||||||
|
for (let x = 0; x < width; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= totalSamples) break;
|
||||||
|
const sample = (audioData.leftChannel[sampleIndex] + audioData.rightChannel[sampleIndex]) / 2;
|
||||||
|
const y = centerY - sample * (height * 0.4);
|
||||||
|
if (x === 0) ctx.moveTo(x, y);
|
||||||
|
else ctx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
} else if (mode === 'separate') {
|
||||||
|
const halfHeight = height / 2;
|
||||||
|
const samplesPerPixel = samplesPerFrame / width;
|
||||||
|
|
||||||
|
// Left (top)
|
||||||
|
ctx.strokeStyle = leftColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
const leftCenterY = halfHeight / 2;
|
||||||
|
for (let x = 0; x < width; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= totalSamples) break;
|
||||||
|
const sample = audioData.leftChannel[sampleIndex];
|
||||||
|
const y = leftCenterY - sample * (halfHeight * 0.35);
|
||||||
|
if (x === 0) ctx.moveTo(x, y);
|
||||||
|
else ctx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Right (bottom)
|
||||||
|
ctx.strokeStyle = rightColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
const rightCenterY = halfHeight + halfHeight / 2;
|
||||||
|
for (let x = 0; x < width; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= totalSamples) break;
|
||||||
|
const sample = audioData.rightChannel[sampleIndex];
|
||||||
|
const y = rightCenterY - sample * (halfHeight * 0.35);
|
||||||
|
if (x === 0) ctx.moveTo(x, y);
|
||||||
|
else ctx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Divider
|
||||||
|
ctx.strokeStyle = dividerColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(0, halfHeight);
|
||||||
|
ctx.lineTo(width, halfHeight);
|
||||||
|
ctx.stroke();
|
||||||
|
} else if (mode === 'all') {
|
||||||
|
const topHeight = height / 2;
|
||||||
|
const bottomHeight = height / 2;
|
||||||
|
const halfWidth = width / 2;
|
||||||
|
const samplesPerPixel = samplesPerFrame / halfWidth;
|
||||||
|
|
||||||
|
// Left (top-left)
|
||||||
|
ctx.strokeStyle = leftColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
const leftCenterY = topHeight / 2;
|
||||||
|
for (let x = 0; x < halfWidth; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= totalSamples) break;
|
||||||
|
const sample = audioData.leftChannel[sampleIndex];
|
||||||
|
const y = leftCenterY - sample * (topHeight * 0.35);
|
||||||
|
if (x === 0) ctx.moveTo(x, y);
|
||||||
|
else ctx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Right (top-right)
|
||||||
|
ctx.strokeStyle = rightColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
const rightCenterY = topHeight / 2;
|
||||||
|
for (let x = 0; x < halfWidth; x++) {
|
||||||
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
||||||
|
if (sampleIndex >= totalSamples) break;
|
||||||
|
const sample = audioData.rightChannel[sampleIndex];
|
||||||
|
const y = rightCenterY - sample * (topHeight * 0.35);
|
||||||
|
if (x === 0) ctx.moveTo(halfWidth + x, y);
|
||||||
|
else ctx.lineTo(halfWidth + x, y);
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// XY (bottom half)
|
||||||
|
ctx.strokeStyle = xyColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
const xyCenterX = width / 2;
|
||||||
|
const xyCenterY = topHeight + bottomHeight / 2;
|
||||||
|
const xyScale = Math.min(halfWidth, bottomHeight) * 0.35;
|
||||||
|
for (let i = startSample; i < endSample; i++) {
|
||||||
|
const x = xyCenterX + audioData.leftChannel[i] * xyScale;
|
||||||
|
const y = xyCenterY - audioData.rightChannel[i] * xyScale;
|
||||||
|
if (i === startSample) ctx.moveTo(x, y);
|
||||||
|
else ctx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Dividers
|
||||||
|
ctx.strokeStyle = dividerColor;
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(0, topHeight);
|
||||||
|
ctx.lineTo(width, topHeight);
|
||||||
|
ctx.stroke();
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(halfWidth, 0);
|
||||||
|
ctx.lineTo(halfWidth, topHeight);
|
||||||
|
ctx.stroke();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Setup recording
|
||||||
|
const videoStream = canvas.captureStream(fps);
|
||||||
|
const audioContext = new AudioContext();
|
||||||
|
await audioContext.resume();
|
||||||
|
|
||||||
|
const audioArrayBuffer = await audioFile.arrayBuffer();
|
||||||
|
const audioBuffer = await audioContext.decodeAudioData(audioArrayBuffer);
|
||||||
|
|
||||||
|
const audioSource = audioContext.createBufferSource();
|
||||||
|
audioSource.buffer = audioBuffer;
|
||||||
|
const audioDestination = audioContext.createMediaStreamDestination();
|
||||||
|
audioSource.connect(audioDestination);
|
||||||
|
|
||||||
|
const combinedStream = new MediaStream([
|
||||||
|
...videoStream.getVideoTracks(),
|
||||||
|
...audioDestination.stream.getAudioTracks(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
let mimeType = 'video/webm;codecs=vp8,opus';
|
||||||
|
if (!MediaRecorder.isTypeSupported(mimeType)) {
|
||||||
|
mimeType = 'video/webm;codecs=vp9,opus';
|
||||||
|
}
|
||||||
|
if (!MediaRecorder.isTypeSupported(mimeType)) {
|
||||||
|
mimeType = 'video/webm';
|
||||||
|
}
|
||||||
|
|
||||||
|
const mediaRecorder = new MediaRecorder(combinedStream, {
|
||||||
|
mimeType,
|
||||||
|
videoBitsPerSecond: 8000000,
|
||||||
|
audioBitsPerSecond: 256000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const chunks: Blob[] = [];
|
||||||
|
|
||||||
|
return new Promise<string>((resolve, reject) => {
|
||||||
|
let stopped = false;
|
||||||
|
|
||||||
|
const stopRecorder = (reason: string) => {
|
||||||
|
if (stopped) return;
|
||||||
|
stopped = true;
|
||||||
|
log('stopRecorder', reason);
|
||||||
|
|
||||||
|
if (mediaRecorder.state === 'recording') {
|
||||||
|
mediaRecorder.stop();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
mediaRecorder.ondataavailable = (e) => {
|
||||||
|
log('ondataavailable', { size: e.data?.size, type: e.data?.type });
|
||||||
|
if (e.data && e.data.size > 0) {
|
||||||
|
chunks.push(e.data);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
mediaRecorder.onstop = async () => {
|
||||||
|
log('onstop', { chunks: chunks.length });
|
||||||
|
await audioContext.close();
|
||||||
|
combinedStream.getTracks().forEach(t => t.stop());
|
||||||
|
|
||||||
|
const blob = new Blob(chunks, { type: mimeType });
|
||||||
|
log('final blob', { size: blob.size });
|
||||||
|
|
||||||
|
if (blob.size === 0) {
|
||||||
|
reject(new Error('Empty blob'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
resolve(URL.createObjectURL(blob));
|
||||||
|
};
|
||||||
|
|
||||||
|
mediaRecorder.onerror = (e) => reject(e);
|
||||||
|
|
||||||
|
audioSource.onended = () => {
|
||||||
|
log('audioSource.onended');
|
||||||
|
renderFrame(Math.max(0, totalSamples - samplesPerFrame));
|
||||||
|
stopRecorder('audio_ended');
|
||||||
|
};
|
||||||
|
|
||||||
|
// Start recording
|
||||||
|
mediaRecorder.start();
|
||||||
|
const exportStart = audioContext.currentTime;
|
||||||
|
audioSource.start(0);
|
||||||
|
log('started', { duration: audioBuffer.duration });
|
||||||
|
|
||||||
|
// Safety timeout
|
||||||
|
setTimeout(() => stopRecorder('timeout'), (audioBuffer.duration + 30) * 1000);
|
||||||
|
|
||||||
|
// Render loop
|
||||||
|
let lastFrame = -1;
|
||||||
|
const loop = () => {
|
||||||
|
if (stopped) return;
|
||||||
|
|
||||||
|
const t = Math.max(0, audioContext.currentTime - exportStart);
|
||||||
|
const frameIndex = Math.floor(t * fps);
|
||||||
|
|
||||||
|
if (frameIndex !== lastFrame) {
|
||||||
|
renderFrame(Math.min(frameIndex * samplesPerFrame, totalSamples - 1));
|
||||||
|
lastFrame = frameIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
requestAnimationFrame(loop);
|
||||||
|
};
|
||||||
|
requestAnimationFrame(loop);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async validateBlob(url: string): Promise<ValidationResult> {
|
||||||
|
const issues: string[] = [];
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(url);
|
||||||
|
const blob = await response.blob();
|
||||||
|
|
||||||
|
if (blob.size === 0) {
|
||||||
|
issues.push('Blob is empty');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!blob.type.includes('webm')) {
|
||||||
|
issues.push(`Unexpected MIME type: ${blob.type}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check WebM magic bytes
|
||||||
|
const header = await blob.slice(0, 4).arrayBuffer();
|
||||||
|
const bytes = new Uint8Array(header);
|
||||||
|
// WebM starts with 0x1A 0x45 0xDF 0xA3 (EBML header)
|
||||||
|
if (bytes[0] !== 0x1A || bytes[1] !== 0x45 || bytes[2] !== 0xDF || bytes[3] !== 0xA3) {
|
||||||
|
issues.push('Invalid WebM header (missing EBML magic bytes)');
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
valid: issues.length === 0,
|
||||||
|
size: blob.size,
|
||||||
|
type: blob.type,
|
||||||
|
issues,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
valid: false,
|
||||||
|
size: 0,
|
||||||
|
type: '',
|
||||||
|
issues: [error instanceof Error ? error.message : String(error)],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
downloadBlob(url: string, filename: string = 'test-export.webm') {
|
||||||
|
const a = document.createElement('a');
|
||||||
|
a.href = url;
|
||||||
|
a.download = filename;
|
||||||
|
a.click();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Expose globally for testing
|
||||||
|
const api = new VideoExportTestAPIClass();
|
||||||
|
|
||||||
|
declare global {
|
||||||
|
interface Window {
|
||||||
|
VideoExportTestAPI: VideoExportTestAPIClass;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof window !== 'undefined') {
|
||||||
|
window.VideoExportTestAPI = api;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const VideoExportTestAPI = api;
|
||||||
Loading…
Reference in New Issue
Block a user