personal_website/videoExportTestApi.ts
2025-12-21 13:21:20 +01:00

455 lines
14 KiB
TypeScript
Executable File

/**
* Video Export Test API
*
* Exposes a global API for automated testing of video exports.
*
* Usage in browser console or automated tests:
*
* // Run export with a test audio file
* const result = await window.VideoExportTestAPI.runExport(audioFileBlob, {
* width: 1920,
* height: 1080,
* fps: 60,
* mode: 'combined'
* });
*
* // result = { success: boolean, url?: string, error?: string, stats: {...} }
*
* // Download the result
* window.VideoExportTestAPI.downloadBlob(result.url, 'test-output.webm');
*
* // Validate the blob (basic checks)
* const validation = await window.VideoExportTestAPI.validateBlob(result.url);
* // validation = { valid: boolean, size: number, type: string, issues: string[] }
*/
import type { OscilloscopeMode } from '../hooks/useOscilloscopeRenderer';
export interface TestExportOptions {
width?: number;
height?: number;
fps?: number;
mode?: OscilloscopeMode;
}
export interface TestExportResult {
success: boolean;
url?: string;
error?: string;
stats: {
duration: number;
blobSize: number;
mimeType: string;
exportTimeMs: number;
};
}
export interface ValidationResult {
valid: boolean;
size: number;
type: string;
issues: string[];
}
// Simple audio analyzer for test purposes
async function analyzeAudio(file: File): Promise<{
leftChannel: Float32Array;
rightChannel: Float32Array;
sampleRate: number;
}> {
const audioContext = new AudioContext();
const arrayBuffer = await file.arrayBuffer();
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
const leftChannel = audioBuffer.getChannelData(0);
const rightChannel = audioBuffer.numberOfChannels > 1
? audioBuffer.getChannelData(1)
: leftChannel;
await audioContext.close();
return {
leftChannel,
rightChannel,
sampleRate: audioBuffer.sampleRate,
};
}
class VideoExportTestAPIClass {
async runExport(
audioFile: File | Blob,
options: TestExportOptions = {}
): Promise<TestExportResult> {
const startTime = performance.now();
const file = audioFile instanceof File
? audioFile
: new File([audioFile], 'test-audio.mp3', { type: audioFile.type });
const opts = {
width: options.width ?? 1920,
height: options.height ?? 1080,
fps: options.fps ?? 60,
mode: options.mode ?? 'combined' as OscilloscopeMode,
};
console.log('[VideoExportTestAPI] Starting export with options:', opts);
try {
// Analyze audio
const audioData = await analyzeAudio(file);
console.log('[VideoExportTestAPI] Audio analyzed:', {
sampleRate: audioData.sampleRate,
duration: audioData.leftChannel.length / audioData.sampleRate,
samples: audioData.leftChannel.length,
});
// Execute export
const url = await this.executeExport(audioData, file, opts);
const blob = await fetch(url).then(r => r.blob());
const exportTimeMs = performance.now() - startTime;
const result: TestExportResult = {
success: true,
url,
stats: {
duration: audioData.leftChannel.length / audioData.sampleRate,
blobSize: blob.size,
mimeType: blob.type,
exportTimeMs,
},
};
console.log('[VideoExportTestAPI] Export completed:', result);
return result;
} catch (error) {
const exportTimeMs = performance.now() - startTime;
const result: TestExportResult = {
success: false,
error: error instanceof Error ? error.message : String(error),
stats: {
duration: 0,
blobSize: 0,
mimeType: '',
exportTimeMs,
},
};
console.error('[VideoExportTestAPI] Export failed:', result);
return result;
}
}
private async executeExport(
audioData: { leftChannel: Float32Array; rightChannel: Float32Array; sampleRate: number },
audioFile: File,
options: { width: number; height: number; fps: number; mode: OscilloscopeMode }
): Promise<string> {
const { width, height, fps, mode } = options;
const totalSamples = audioData.leftChannel.length;
const samplesPerFrame = Math.floor(audioData.sampleRate / fps);
const log = (...args: unknown[]) => {
console.log('[VideoExportTestAPI]', ...args);
};
// Create canvas
const canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext('2d');
if (!ctx) throw new Error('Could not get 2D context');
const leftColor = '#00ff00';
const rightColor = '#00ccff';
const xyColor = '#ff8800';
const dividerColor = '#333333';
const renderFrame = (startSample: number) => {
ctx.fillStyle = 'black';
ctx.fillRect(0, 0, width, height);
ctx.lineWidth = 2;
const endSample = Math.min(startSample + samplesPerFrame, totalSamples);
if (mode === 'combined') {
ctx.strokeStyle = leftColor;
ctx.beginPath();
const samplesPerPixel = samplesPerFrame / width;
const centerY = height / 2;
for (let x = 0; x < width; x++) {
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
if (sampleIndex >= totalSamples) break;
const sample = (audioData.leftChannel[sampleIndex] + audioData.rightChannel[sampleIndex]) / 2;
const y = centerY - sample * (height * 0.4);
if (x === 0) ctx.moveTo(x, y);
else ctx.lineTo(x, y);
}
ctx.stroke();
} else if (mode === 'separate') {
const halfHeight = height / 2;
const samplesPerPixel = samplesPerFrame / width;
// Left (top)
ctx.strokeStyle = leftColor;
ctx.beginPath();
const leftCenterY = halfHeight / 2;
for (let x = 0; x < width; x++) {
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
if (sampleIndex >= totalSamples) break;
const sample = audioData.leftChannel[sampleIndex];
const y = leftCenterY - sample * (halfHeight * 0.35);
if (x === 0) ctx.moveTo(x, y);
else ctx.lineTo(x, y);
}
ctx.stroke();
// Right (bottom)
ctx.strokeStyle = rightColor;
ctx.beginPath();
const rightCenterY = halfHeight + halfHeight / 2;
for (let x = 0; x < width; x++) {
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
if (sampleIndex >= totalSamples) break;
const sample = audioData.rightChannel[sampleIndex];
const y = rightCenterY - sample * (halfHeight * 0.35);
if (x === 0) ctx.moveTo(x, y);
else ctx.lineTo(x, y);
}
ctx.stroke();
// Divider
ctx.strokeStyle = dividerColor;
ctx.beginPath();
ctx.moveTo(0, halfHeight);
ctx.lineTo(width, halfHeight);
ctx.stroke();
} else if (mode === 'all') {
const topHeight = height / 2;
const bottomHeight = height / 2;
const halfWidth = width / 2;
const samplesPerPixel = samplesPerFrame / halfWidth;
// Left (top-left)
ctx.strokeStyle = leftColor;
ctx.beginPath();
const leftCenterY = topHeight / 2;
for (let x = 0; x < halfWidth; x++) {
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
if (sampleIndex >= totalSamples) break;
const sample = audioData.leftChannel[sampleIndex];
const y = leftCenterY - sample * (topHeight * 0.35);
if (x === 0) ctx.moveTo(x, y);
else ctx.lineTo(x, y);
}
ctx.stroke();
// Right (top-right)
ctx.strokeStyle = rightColor;
ctx.beginPath();
const rightCenterY = topHeight / 2;
for (let x = 0; x < halfWidth; x++) {
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
if (sampleIndex >= totalSamples) break;
const sample = audioData.rightChannel[sampleIndex];
const y = rightCenterY - sample * (topHeight * 0.35);
if (x === 0) ctx.moveTo(halfWidth + x, y);
else ctx.lineTo(halfWidth + x, y);
}
ctx.stroke();
// XY (bottom half)
ctx.strokeStyle = xyColor;
ctx.beginPath();
const xyCenterX = width / 2;
const xyCenterY = topHeight + bottomHeight / 2;
const xyScale = Math.min(halfWidth, bottomHeight) * 0.35;
for (let i = startSample; i < endSample; i++) {
const x = xyCenterX + audioData.leftChannel[i] * xyScale;
const y = xyCenterY - audioData.rightChannel[i] * xyScale;
if (i === startSample) ctx.moveTo(x, y);
else ctx.lineTo(x, y);
}
ctx.stroke();
// Dividers
ctx.strokeStyle = dividerColor;
ctx.beginPath();
ctx.moveTo(0, topHeight);
ctx.lineTo(width, topHeight);
ctx.stroke();
ctx.beginPath();
ctx.moveTo(halfWidth, 0);
ctx.lineTo(halfWidth, topHeight);
ctx.stroke();
}
};
// Setup recording
const videoStream = canvas.captureStream(fps);
const audioContext = new AudioContext();
await audioContext.resume();
const audioArrayBuffer = await audioFile.arrayBuffer();
const audioBuffer = await audioContext.decodeAudioData(audioArrayBuffer);
const audioSource = audioContext.createBufferSource();
audioSource.buffer = audioBuffer;
const audioDestination = audioContext.createMediaStreamDestination();
audioSource.connect(audioDestination);
const combinedStream = new MediaStream([
...videoStream.getVideoTracks(),
...audioDestination.stream.getAudioTracks(),
]);
let mimeType = 'video/webm;codecs=vp8,opus';
if (!MediaRecorder.isTypeSupported(mimeType)) {
mimeType = 'video/webm;codecs=vp9,opus';
}
if (!MediaRecorder.isTypeSupported(mimeType)) {
mimeType = 'video/webm';
}
const mediaRecorder = new MediaRecorder(combinedStream, {
mimeType,
videoBitsPerSecond: 8000000,
audioBitsPerSecond: 256000,
});
const chunks: Blob[] = [];
return new Promise<string>((resolve, reject) => {
let stopped = false;
const stopRecorder = (reason: string) => {
if (stopped) return;
stopped = true;
log('stopRecorder', reason);
if (mediaRecorder.state === 'recording') {
mediaRecorder.stop();
}
};
mediaRecorder.ondataavailable = (e) => {
log('ondataavailable', { size: e.data?.size, type: e.data?.type });
if (e.data && e.data.size > 0) {
chunks.push(e.data);
}
};
mediaRecorder.onstop = async () => {
log('onstop', { chunks: chunks.length });
await audioContext.close();
combinedStream.getTracks().forEach(t => t.stop());
const blob = new Blob(chunks, { type: mimeType });
log('final blob', { size: blob.size });
if (blob.size === 0) {
reject(new Error('Empty blob'));
return;
}
resolve(URL.createObjectURL(blob));
};
mediaRecorder.onerror = (e) => reject(e);
audioSource.onended = () => {
log('audioSource.onended');
renderFrame(Math.max(0, totalSamples - samplesPerFrame));
stopRecorder('audio_ended');
};
// Start recording
mediaRecorder.start();
const exportStart = audioContext.currentTime;
audioSource.start(0);
log('started', { duration: audioBuffer.duration });
// Safety timeout
setTimeout(() => stopRecorder('timeout'), (audioBuffer.duration + 30) * 1000);
// Render loop
let lastFrame = -1;
const loop = () => {
if (stopped) return;
const t = Math.max(0, audioContext.currentTime - exportStart);
const frameIndex = Math.floor(t * fps);
if (frameIndex !== lastFrame) {
renderFrame(Math.min(frameIndex * samplesPerFrame, totalSamples - 1));
lastFrame = frameIndex;
}
requestAnimationFrame(loop);
};
requestAnimationFrame(loop);
});
}
async validateBlob(url: string): Promise<ValidationResult> {
const issues: string[] = [];
try {
const response = await fetch(url);
const blob = await response.blob();
if (blob.size === 0) {
issues.push('Blob is empty');
}
if (!blob.type.includes('webm')) {
issues.push(`Unexpected MIME type: ${blob.type}`);
}
// Check WebM magic bytes
const header = await blob.slice(0, 4).arrayBuffer();
const bytes = new Uint8Array(header);
// WebM starts with 0x1A 0x45 0xDF 0xA3 (EBML header)
if (bytes[0] !== 0x1A || bytes[1] !== 0x45 || bytes[2] !== 0xDF || bytes[3] !== 0xA3) {
issues.push('Invalid WebM header (missing EBML magic bytes)');
}
return {
valid: issues.length === 0,
size: blob.size,
type: blob.type,
issues,
};
} catch (error) {
return {
valid: false,
size: 0,
type: '',
issues: [error instanceof Error ? error.message : String(error)],
};
}
}
downloadBlob(url: string, filename: string = 'test-export.webm') {
const a = document.createElement('a');
a.href = url;
a.download = filename;
a.click();
}
}
// Expose globally for testing
const api = new VideoExportTestAPIClass();
declare global {
interface Window {
VideoExportTestAPI: VideoExportTestAPIClass;
}
}
if (typeof window !== 'undefined') {
window.VideoExportTestAPI = api;
}
export const VideoExportTestAPI = api;