mirror of
https://github.com/JorySeverijnse/ui-fixer-supreme.git
synced 2026-01-29 19:48:38 +00:00
440 lines
14 KiB
TypeScript
Executable File
440 lines
14 KiB
TypeScript
Executable File
import { useEffect, useRef, useCallback } from 'react';
|
|
import type { AudioData } from '@/hooks/useAudioAnalyzer';
|
|
import type { OscilloscopeMode } from '@/hooks/useOscilloscopeRenderer';
|
|
import { useAudioAnalyzer as useSharedAudioAnalyzer } from '@/contexts/AudioAnalyzerContext';
|
|
import type { LiveDisplaySettings } from './ControlPanel';
|
|
|
|
interface OscilloscopeDisplayProps {
|
|
audioData: AudioData | null;
|
|
micAnalyzer: AnalyserNode | null;
|
|
mode: OscilloscopeMode;
|
|
isPlaying: boolean;
|
|
playbackSpeed: number;
|
|
isLooping: boolean;
|
|
seekPosition: number;
|
|
onPlaybackEnd?: () => void;
|
|
onSeek?: (position: number) => void;
|
|
liveSettings?: LiveDisplaySettings;
|
|
}
|
|
|
|
const WIDTH = 800;
|
|
const HEIGHT = 600;
|
|
const FPS = 60;
|
|
|
|
// Get computed CSS color from theme
|
|
const getThemeColor = (cssVar: string, fallback: string): string => {
|
|
if (typeof window === 'undefined') return fallback;
|
|
const root = document.documentElement;
|
|
const value = getComputedStyle(root).getPropertyValue(cssVar).trim();
|
|
if (value) {
|
|
return `hsl(${value})`;
|
|
}
|
|
return fallback;
|
|
};
|
|
|
|
export function OscilloscopeDisplay({
|
|
audioData,
|
|
micAnalyzer,
|
|
mode,
|
|
isPlaying,
|
|
playbackSpeed,
|
|
isLooping,
|
|
seekPosition,
|
|
onPlaybackEnd,
|
|
onSeek,
|
|
liveSettings
|
|
}: OscilloscopeDisplayProps) {
|
|
const canvasRef = useRef<HTMLCanvasElement>(null);
|
|
const animationRef = useRef<number | null>(null);
|
|
const currentSampleRef = useRef(0);
|
|
const lastSeekPositionRef = useRef(0);
|
|
const { analyzerNode: sharedAnalyzer } = useSharedAudioAnalyzer();
|
|
|
|
// Use shared analyzer for live audio (music player, sound effects)
|
|
const liveAnalyzer = sharedAnalyzer || micAnalyzer;
|
|
|
|
// Get settings with defaults
|
|
const lineThickness = liveSettings?.lineThickness ?? 2;
|
|
const showGrid = liveSettings?.showGrid ?? true;
|
|
const glowIntensity = liveSettings?.glowIntensity ?? 1;
|
|
const liveDisplayMode = liveSettings?.displayMode ?? 'combined';
|
|
|
|
const drawGraticule = useCallback((ctx: CanvasRenderingContext2D) => {
|
|
if (!showGrid) return;
|
|
|
|
const primaryColor = getThemeColor('--primary', '#00ff00');
|
|
ctx.strokeStyle = primaryColor;
|
|
ctx.globalAlpha = 0.3;
|
|
ctx.lineWidth = 1;
|
|
|
|
// Horizontal center line (X axis)
|
|
ctx.beginPath();
|
|
ctx.moveTo(0, HEIGHT / 2);
|
|
ctx.lineTo(WIDTH, HEIGHT / 2);
|
|
ctx.stroke();
|
|
|
|
// Vertical center line (Y axis)
|
|
ctx.beginPath();
|
|
ctx.moveTo(WIDTH / 2, 0);
|
|
ctx.lineTo(WIDTH / 2, HEIGHT);
|
|
ctx.stroke();
|
|
|
|
ctx.globalAlpha = 1;
|
|
}, [showGrid]);
|
|
|
|
const drawFrame = useCallback(() => {
|
|
if (!canvasRef.current) return;
|
|
|
|
// Always allow drawing if we have live analyzer, even without audioData
|
|
const hasLiveSource = liveAnalyzer || micAnalyzer;
|
|
if (!audioData && !hasLiveSource) return;
|
|
|
|
const canvas = canvasRef.current;
|
|
const ctx = canvas.getContext('2d');
|
|
if (!ctx) return;
|
|
|
|
const primaryColor = getThemeColor('--primary', '#00ff00');
|
|
const backgroundColor = getThemeColor('--background', '#000000');
|
|
|
|
let samplesPerFrame: number;
|
|
let startSample: number;
|
|
let endSample: number;
|
|
let samplesToAdvance: number = samplesPerFrame;
|
|
|
|
// Priority: micAnalyzer > liveAnalyzer (shared) > audioData (file)
|
|
const activeAnalyzer = micAnalyzer || liveAnalyzer;
|
|
|
|
if (activeAnalyzer && !audioData) {
|
|
// Real-time audio data (mic or music player)
|
|
const bufferLength = activeAnalyzer.frequencyBinCount;
|
|
const dataArray = new Uint8Array(bufferLength);
|
|
activeAnalyzer.getByteTimeDomainData(dataArray);
|
|
|
|
// Clear to background color
|
|
ctx.fillStyle = backgroundColor;
|
|
ctx.fillRect(0, 0, WIDTH, HEIGHT);
|
|
|
|
// Draw graticule first
|
|
drawGraticule(ctx);
|
|
|
|
// Convert to Float32Array-like for consistency
|
|
const liveData = new Float32Array(dataArray.length);
|
|
for (let i = 0; i < dataArray.length; i++) {
|
|
liveData[i] = (dataArray[i] - 128) / 128; // Normalize to -1 to 1
|
|
}
|
|
|
|
// Apply glow effect
|
|
if (glowIntensity > 0) {
|
|
ctx.shadowColor = primaryColor;
|
|
ctx.shadowBlur = glowIntensity * 8;
|
|
} else {
|
|
ctx.shadowBlur = 0;
|
|
}
|
|
|
|
ctx.strokeStyle = primaryColor;
|
|
ctx.lineWidth = lineThickness;
|
|
|
|
// Draw based on live display mode
|
|
if (liveDisplayMode === 'all') {
|
|
// XY / Lissajous mode - treat odd/even samples as L/R
|
|
ctx.beginPath();
|
|
const centerX = WIDTH / 2;
|
|
const centerY = HEIGHT / 2;
|
|
const scale = Math.min(WIDTH, HEIGHT) * 0.4;
|
|
|
|
for (let i = 0; i < liveData.length - 1; i += 2) {
|
|
const x = centerX + liveData[i] * scale;
|
|
const y = centerY - liveData[i + 1] * scale;
|
|
|
|
if (i === 0) {
|
|
ctx.moveTo(x, y);
|
|
} else {
|
|
ctx.lineTo(x, y);
|
|
}
|
|
}
|
|
ctx.stroke();
|
|
} else {
|
|
// Combined waveform mode (default)
|
|
ctx.beginPath();
|
|
const sliceWidth = WIDTH / liveData.length;
|
|
let x = 0;
|
|
|
|
for (let i = 0; i < liveData.length; i++) {
|
|
const v = liveData[i];
|
|
const y = (v * HEIGHT) / 2 + HEIGHT / 2;
|
|
|
|
if (i === 0) {
|
|
ctx.moveTo(x, y);
|
|
} else {
|
|
ctx.lineTo(x, y);
|
|
}
|
|
|
|
x += sliceWidth;
|
|
}
|
|
ctx.stroke();
|
|
}
|
|
|
|
ctx.shadowBlur = 0;
|
|
|
|
// Request next frame for real-time
|
|
animationRef.current = requestAnimationFrame(drawFrame);
|
|
return;
|
|
}
|
|
|
|
// File playback mode - need audioData
|
|
if (!audioData) return;
|
|
|
|
// File playback mode
|
|
const baseSamplesPerFrame = Math.floor(audioData.sampleRate / FPS);
|
|
samplesPerFrame = Math.floor(baseSamplesPerFrame * playbackSpeed);
|
|
samplesToAdvance = samplesPerFrame;
|
|
|
|
// Handle seeking
|
|
if (seekPosition > 0 && seekPosition !== lastSeekPositionRef.current) {
|
|
startSample = Math.floor(seekPosition * audioData.leftChannel.length);
|
|
currentSampleRef.current = startSample;
|
|
lastSeekPositionRef.current = seekPosition;
|
|
// Reset after one frame
|
|
setTimeout(() => {
|
|
lastSeekPositionRef.current = 0;
|
|
}, 1000 / FPS);
|
|
} else {
|
|
startSample = currentSampleRef.current;
|
|
}
|
|
|
|
endSample = Math.min(startSample + samplesPerFrame, audioData.leftChannel.length);
|
|
|
|
// Clear to background color
|
|
ctx.fillStyle = backgroundColor;
|
|
ctx.fillRect(0, 0, WIDTH, HEIGHT);
|
|
|
|
// Draw graticule first
|
|
drawGraticule(ctx);
|
|
|
|
// Apply glow effect
|
|
if (glowIntensity > 0) {
|
|
ctx.shadowColor = primaryColor;
|
|
ctx.shadowBlur = glowIntensity * 8;
|
|
} else {
|
|
ctx.shadowBlur = 0;
|
|
}
|
|
|
|
ctx.lineWidth = lineThickness;
|
|
ctx.lineCap = 'round';
|
|
|
|
const leftColor = primaryColor;
|
|
const rightColor = getThemeColor('--accent', '#00ccff');
|
|
const xyColor = getThemeColor('--secondary', '#ff8800');
|
|
const dividerColor = 'rgba(255,255,255,0.1)';
|
|
|
|
if (mode === 'combined') {
|
|
// Combined: both channels merged
|
|
ctx.strokeStyle = leftColor;
|
|
ctx.beginPath();
|
|
const samplesPerPixel = samplesPerFrame / WIDTH;
|
|
const centerY = HEIGHT / 2;
|
|
for (let x = 0; x < WIDTH; x++) {
|
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
|
if (sampleIndex >= audioData.leftChannel.length) break;
|
|
const sample = (audioData.leftChannel[sampleIndex] + audioData.rightChannel[sampleIndex]) / 2;
|
|
const y = centerY - sample * (HEIGHT * 0.4);
|
|
if (x === 0) ctx.moveTo(x, y);
|
|
else ctx.lineTo(x, y);
|
|
}
|
|
ctx.stroke();
|
|
} else if (mode === 'separate') {
|
|
// Separate: Left on top, Right on bottom
|
|
const halfHeight = HEIGHT / 2;
|
|
const samplesPerPixel = samplesPerFrame / WIDTH;
|
|
|
|
// Left channel (top)
|
|
ctx.strokeStyle = leftColor;
|
|
ctx.beginPath();
|
|
const leftCenterY = halfHeight / 2;
|
|
for (let x = 0; x < WIDTH; x++) {
|
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
|
if (sampleIndex >= audioData.leftChannel.length) break;
|
|
const sample = audioData.leftChannel[sampleIndex];
|
|
const y = leftCenterY - sample * (halfHeight * 0.35);
|
|
if (x === 0) ctx.moveTo(x, y);
|
|
else ctx.lineTo(x, y);
|
|
}
|
|
ctx.stroke();
|
|
|
|
// Right channel (bottom)
|
|
ctx.strokeStyle = rightColor;
|
|
ctx.beginPath();
|
|
const rightCenterY = halfHeight + halfHeight / 2;
|
|
for (let x = 0; x < WIDTH; x++) {
|
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
|
if (sampleIndex >= audioData.rightChannel.length) break;
|
|
const sample = audioData.rightChannel[sampleIndex];
|
|
const y = rightCenterY - sample * (halfHeight * 0.35);
|
|
if (x === 0) ctx.moveTo(x, y);
|
|
else ctx.lineTo(x, y);
|
|
}
|
|
ctx.stroke();
|
|
|
|
// Divider
|
|
ctx.strokeStyle = dividerColor;
|
|
ctx.beginPath();
|
|
ctx.moveTo(0, halfHeight);
|
|
ctx.lineTo(WIDTH, halfHeight);
|
|
ctx.stroke();
|
|
} else if (mode === 'all') {
|
|
// All: L/R on top row, XY on bottom
|
|
const topHeight = HEIGHT / 2;
|
|
const bottomHeight = HEIGHT / 2;
|
|
const halfWidth = WIDTH / 2;
|
|
const samplesPerPixel = samplesPerFrame / halfWidth;
|
|
|
|
// Left channel (top-left)
|
|
ctx.strokeStyle = leftColor;
|
|
ctx.beginPath();
|
|
const leftCenterY = topHeight / 2;
|
|
for (let x = 0; x < halfWidth; x++) {
|
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
|
if (sampleIndex >= audioData.leftChannel.length) break;
|
|
const sample = audioData.leftChannel[sampleIndex];
|
|
const y = leftCenterY - sample * (topHeight * 0.35);
|
|
if (x === 0) ctx.moveTo(x, y);
|
|
else ctx.lineTo(x, y);
|
|
}
|
|
ctx.stroke();
|
|
|
|
// Right channel (top-right)
|
|
ctx.strokeStyle = rightColor;
|
|
ctx.beginPath();
|
|
const rightCenterY = topHeight / 2;
|
|
for (let x = 0; x < halfWidth; x++) {
|
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
|
if (sampleIndex >= audioData.rightChannel.length) break;
|
|
const sample = audioData.rightChannel[sampleIndex];
|
|
const y = rightCenterY - sample * (topHeight * 0.35);
|
|
if (x === 0) ctx.moveTo(halfWidth + x, y);
|
|
else ctx.lineTo(halfWidth + x, y);
|
|
}
|
|
ctx.stroke();
|
|
|
|
// XY mode (bottom half)
|
|
ctx.strokeStyle = xyColor;
|
|
ctx.beginPath();
|
|
const xyCenterX = WIDTH / 2;
|
|
const xyCenterY = topHeight + bottomHeight / 2;
|
|
const xyScale = Math.min(halfWidth, bottomHeight) * 0.35;
|
|
for (let i = startSample; i < endSample; i++) {
|
|
const x = xyCenterX + audioData.leftChannel[i] * xyScale;
|
|
const y = xyCenterY - audioData.rightChannel[i] * xyScale;
|
|
if (i === startSample) ctx.moveTo(x, y);
|
|
else ctx.lineTo(x, y);
|
|
}
|
|
ctx.stroke();
|
|
|
|
// Dividers
|
|
ctx.strokeStyle = dividerColor;
|
|
ctx.beginPath();
|
|
ctx.moveTo(0, topHeight);
|
|
ctx.lineTo(WIDTH, topHeight);
|
|
ctx.stroke();
|
|
ctx.beginPath();
|
|
ctx.moveTo(halfWidth, 0);
|
|
ctx.lineTo(halfWidth, topHeight);
|
|
ctx.stroke();
|
|
}
|
|
|
|
currentSampleRef.current = endSample;
|
|
ctx.shadowBlur = 0;
|
|
|
|
if (endSample >= audioData.leftChannel.length) {
|
|
if (isLooping) {
|
|
currentSampleRef.current = 0; // Loop back to start
|
|
} else {
|
|
onPlaybackEnd?.();
|
|
return;
|
|
}
|
|
}
|
|
|
|
animationRef.current = requestAnimationFrame(drawFrame);
|
|
}, [audioData, micAnalyzer, liveAnalyzer, mode, drawGraticule, onPlaybackEnd, isPlaying, playbackSpeed, isLooping, seekPosition, lineThickness, glowIntensity, liveDisplayMode]);
|
|
|
|
// Initialize canvas
|
|
useEffect(() => {
|
|
if (!canvasRef.current) return;
|
|
|
|
const ctx = canvasRef.current.getContext('2d');
|
|
if (ctx) {
|
|
ctx.fillStyle = '#000000';
|
|
ctx.fillRect(0, 0, WIDTH, HEIGHT);
|
|
drawGraticule(ctx);
|
|
}
|
|
}, [drawGraticule]);
|
|
|
|
// Handle playback - start animation for file playback or live audio
|
|
useEffect(() => {
|
|
const hasLiveSource = liveAnalyzer || micAnalyzer;
|
|
|
|
if (isPlaying && audioData) {
|
|
// File playback
|
|
currentSampleRef.current = 0;
|
|
animationRef.current = requestAnimationFrame(drawFrame);
|
|
} else if (hasLiveSource && !audioData) {
|
|
// Live audio visualization (music player, sound effects)
|
|
animationRef.current = requestAnimationFrame(drawFrame);
|
|
} else {
|
|
if (animationRef.current) {
|
|
cancelAnimationFrame(animationRef.current);
|
|
}
|
|
}
|
|
|
|
return () => {
|
|
if (animationRef.current) {
|
|
cancelAnimationFrame(animationRef.current);
|
|
}
|
|
};
|
|
}, [isPlaying, audioData, liveAnalyzer, micAnalyzer, drawFrame]);
|
|
|
|
const getModeLabel = () => {
|
|
switch (mode) {
|
|
case 'combined': return 'L+R';
|
|
case 'separate': return 'L / R';
|
|
case 'all': return 'ALL';
|
|
default: return '';
|
|
}
|
|
};
|
|
|
|
return (
|
|
<div className="crt-bezel">
|
|
<div className="screen-curve relative">
|
|
<canvas
|
|
ref={canvasRef}
|
|
width={WIDTH}
|
|
height={HEIGHT}
|
|
className="w-full h-auto cursor-pointer"
|
|
onClick={(e) => {
|
|
if (!audioData) return;
|
|
const rect = canvasRef.current?.getBoundingClientRect();
|
|
if (!rect) return;
|
|
const x = e.clientX - rect.left;
|
|
const clickPosition = x / rect.width;
|
|
onSeek?.(Math.max(0, Math.min(1, clickPosition)));
|
|
}}
|
|
/>
|
|
|
|
{/* Mode indicator */}
|
|
<div className="absolute top-4 left-4 font-crt text-primary/60 text-sm">
|
|
{getModeLabel()}
|
|
</div>
|
|
|
|
{/* Idle state - only show if no live audio and no file */}
|
|
{!audioData && !liveAnalyzer && !micAnalyzer && (
|
|
<div className="absolute inset-0 flex items-center justify-center">
|
|
<p className="font-crt text-2xl text-primary/40 text-glow animate-pulse">
|
|
NO SIGNAL
|
|
</p>
|
|
</div>
|
|
)}
|
|
</div>
|
|
</div>
|
|
);
|
|
}
|