mirror of
https://github.com/JorySeverijnse/ui-fixer-supreme.git
synced 2026-01-29 17:58:38 +00:00
421 lines
14 KiB
TypeScript
Executable File
421 lines
14 KiB
TypeScript
Executable File
import { useRef, useCallback, useEffect } from 'react';
|
|
import type { AudioData } from './useAudioAnalyzer';
|
|
|
|
export type OscilloscopeMode = 'combined' | 'separate' | 'all';
|
|
|
|
interface RendererOptions {
|
|
mode: OscilloscopeMode;
|
|
width: number;
|
|
height: number;
|
|
phosphorColor: string;
|
|
persistence: number;
|
|
}
|
|
|
|
// WebGL shaders for GPU-accelerated rendering
|
|
const VERTEX_SHADER = `
|
|
attribute vec2 a_position;
|
|
uniform vec2 u_resolution;
|
|
|
|
void main() {
|
|
vec2 clipSpace = (a_position / u_resolution) * 2.0 - 1.0;
|
|
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
|
|
}
|
|
`;
|
|
|
|
const TRACE_FRAGMENT_SHADER = `
|
|
precision mediump float;
|
|
uniform vec4 u_color;
|
|
|
|
void main() {
|
|
gl_FragColor = u_color;
|
|
}
|
|
`;
|
|
|
|
const FADE_VERTEX_SHADER = `
|
|
attribute vec2 a_position;
|
|
|
|
void main() {
|
|
gl_Position = vec4(a_position, 0, 1);
|
|
}
|
|
`;
|
|
|
|
const FADE_FRAGMENT_SHADER = `
|
|
precision mediump float;
|
|
uniform float u_fade;
|
|
|
|
void main() {
|
|
gl_FragColor = vec4(0.0, 0.031, 0.0, u_fade);
|
|
}
|
|
`;
|
|
|
|
function createShader(gl: WebGLRenderingContext, type: number, source: string): WebGLShader | null {
|
|
const shader = gl.createShader(type);
|
|
if (!shader) return null;
|
|
|
|
gl.shaderSource(shader, source);
|
|
gl.compileShader(shader);
|
|
|
|
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
|
|
console.error('Shader compile error:', gl.getShaderInfoLog(shader));
|
|
gl.deleteShader(shader);
|
|
return null;
|
|
}
|
|
|
|
return shader;
|
|
}
|
|
|
|
function createProgram(gl: WebGLRenderingContext, vertexShader: WebGLShader, fragmentShader: WebGLShader): WebGLProgram | null {
|
|
const program = gl.createProgram();
|
|
if (!program) return null;
|
|
|
|
gl.attachShader(program, vertexShader);
|
|
gl.attachShader(program, fragmentShader);
|
|
gl.linkProgram(program);
|
|
|
|
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
|
|
console.error('Program link error:', gl.getProgramInfoLog(program));
|
|
gl.deleteProgram(program);
|
|
return null;
|
|
}
|
|
|
|
return program;
|
|
}
|
|
|
|
interface WebGLResources {
|
|
gl: WebGLRenderingContext;
|
|
traceProgram: WebGLProgram;
|
|
fadeProgram: WebGLProgram;
|
|
positionBuffer: WebGLBuffer;
|
|
fadeBuffer: WebGLBuffer;
|
|
tracePositionLocation: number;
|
|
traceResolutionLocation: WebGLUniformLocation;
|
|
traceColorLocation: WebGLUniformLocation;
|
|
fadePositionLocation: number;
|
|
fadeFadeLocation: WebGLUniformLocation;
|
|
}
|
|
|
|
export function useOscilloscopeRenderer() {
|
|
const canvasRef = useRef<HTMLCanvasElement | null>(null);
|
|
const glResourcesRef = useRef<WebGLResources | null>(null);
|
|
const animationFrameRef = useRef<number | null>(null);
|
|
const currentSampleRef = useRef(0);
|
|
|
|
const initCanvas = useCallback((canvas: HTMLCanvasElement) => {
|
|
canvasRef.current = canvas;
|
|
|
|
const gl = canvas.getContext('webgl', {
|
|
preserveDrawingBuffer: true,
|
|
antialias: true,
|
|
alpha: false
|
|
});
|
|
|
|
if (!gl) {
|
|
console.error('WebGL not supported, falling back to 2D');
|
|
return;
|
|
}
|
|
|
|
// Create trace shader program
|
|
const traceVS = createShader(gl, gl.VERTEX_SHADER, VERTEX_SHADER);
|
|
const traceFS = createShader(gl, gl.FRAGMENT_SHADER, TRACE_FRAGMENT_SHADER);
|
|
if (!traceVS || !traceFS) return;
|
|
|
|
const traceProgram = createProgram(gl, traceVS, traceFS);
|
|
if (!traceProgram) return;
|
|
|
|
// Create fade shader program
|
|
const fadeVS = createShader(gl, gl.VERTEX_SHADER, FADE_VERTEX_SHADER);
|
|
const fadeFS = createShader(gl, gl.FRAGMENT_SHADER, FADE_FRAGMENT_SHADER);
|
|
if (!fadeVS || !fadeFS) return;
|
|
|
|
const fadeProgram = createProgram(gl, fadeVS, fadeFS);
|
|
if (!fadeProgram) return;
|
|
|
|
// Create buffers
|
|
const positionBuffer = gl.createBuffer();
|
|
const fadeBuffer = gl.createBuffer();
|
|
if (!positionBuffer || !fadeBuffer) return;
|
|
|
|
// Set up fade quad
|
|
gl.bindBuffer(gl.ARRAY_BUFFER, fadeBuffer);
|
|
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
|
|
-1, -1,
|
|
1, -1,
|
|
-1, 1,
|
|
-1, 1,
|
|
1, -1,
|
|
1, 1,
|
|
]), gl.STATIC_DRAW);
|
|
|
|
// Get attribute and uniform locations
|
|
const tracePositionLocation = gl.getAttribLocation(traceProgram, 'a_position');
|
|
const traceResolutionLocation = gl.getUniformLocation(traceProgram, 'u_resolution');
|
|
const traceColorLocation = gl.getUniformLocation(traceProgram, 'u_color');
|
|
|
|
const fadePositionLocation = gl.getAttribLocation(fadeProgram, 'a_position');
|
|
const fadeFadeLocation = gl.getUniformLocation(fadeProgram, 'u_fade');
|
|
|
|
if (!traceResolutionLocation || !traceColorLocation || !fadeFadeLocation) return;
|
|
|
|
// Enable blending
|
|
gl.enable(gl.BLEND);
|
|
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
|
|
|
|
// Initial clear (pure black)
|
|
gl.viewport(0, 0, canvas.width, canvas.height);
|
|
gl.clearColor(0, 0, 0, 1);
|
|
gl.clear(gl.COLOR_BUFFER_BIT);
|
|
|
|
glResourcesRef.current = {
|
|
gl,
|
|
traceProgram,
|
|
fadeProgram,
|
|
positionBuffer,
|
|
fadeBuffer,
|
|
tracePositionLocation,
|
|
traceResolutionLocation,
|
|
traceColorLocation,
|
|
fadePositionLocation,
|
|
fadeFadeLocation,
|
|
};
|
|
}, []);
|
|
|
|
const parseColor = (colorStr: string): [number, number, number, number] => {
|
|
// Parse hex color to RGBA
|
|
const hex = colorStr.replace('#', '');
|
|
const r = parseInt(hex.substring(0, 2), 16) / 255;
|
|
const g = parseInt(hex.substring(2, 4), 16) / 255;
|
|
const b = parseInt(hex.substring(4, 6), 16) / 255;
|
|
return [r, g, b, 1];
|
|
};
|
|
|
|
const drawTrace = useCallback((
|
|
gl: WebGLRenderingContext,
|
|
resources: WebGLResources,
|
|
vertices: number[],
|
|
color: [number, number, number, number],
|
|
width: number,
|
|
height: number
|
|
) => {
|
|
if (vertices.length < 4) return;
|
|
|
|
const { traceProgram, positionBuffer, tracePositionLocation, traceResolutionLocation, traceColorLocation } = resources;
|
|
|
|
gl.useProgram(traceProgram);
|
|
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
|
|
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.DYNAMIC_DRAW);
|
|
gl.enableVertexAttribArray(tracePositionLocation);
|
|
gl.vertexAttribPointer(tracePositionLocation, 2, gl.FLOAT, false, 0, 0);
|
|
gl.uniform2f(traceResolutionLocation, width, height);
|
|
gl.uniform4f(traceColorLocation, color[0], color[1], color[2], color[3]);
|
|
gl.lineWidth(2);
|
|
gl.drawArrays(gl.LINE_STRIP, 0, vertices.length / 2);
|
|
}, []);
|
|
|
|
const drawFrame = useCallback((
|
|
audioData: AudioData,
|
|
options: RendererOptions,
|
|
samplesPerFrame: number
|
|
) => {
|
|
const resources = glResourcesRef.current;
|
|
const canvas = canvasRef.current;
|
|
|
|
if (!resources || !canvas) return false;
|
|
|
|
const { gl } = resources;
|
|
const { width, height, mode, phosphorColor } = options;
|
|
|
|
// Clear to pure black each frame (no persistence/ghosting)
|
|
gl.viewport(0, 0, width, height);
|
|
gl.clearColor(0, 0, 0, 1);
|
|
gl.clear(gl.COLOR_BUFFER_BIT);
|
|
|
|
// Get current sample position
|
|
const startSample = currentSampleRef.current;
|
|
const endSample = Math.min(startSample + samplesPerFrame, audioData.leftChannel.length);
|
|
|
|
const color = parseColor(phosphorColor);
|
|
const leftColor: [number, number, number, number] = [0, 1, 0, 1]; // Green for left
|
|
const rightColor: [number, number, number, number] = [0, 0.8, 1, 1]; // Cyan for right
|
|
const xyColor: [number, number, number, number] = [1, 0.5, 0, 1]; // Orange for XY
|
|
|
|
if (mode === 'combined') {
|
|
// Combined: both channels merged into single waveform
|
|
const vertices: number[] = [];
|
|
const samplesPerPixel = samplesPerFrame / width;
|
|
const centerY = height / 2;
|
|
|
|
for (let x = 0; x < width; x++) {
|
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
|
if (sampleIndex >= audioData.leftChannel.length) break;
|
|
const sample = (audioData.leftChannel[sampleIndex] + audioData.rightChannel[sampleIndex]) / 2;
|
|
const y = centerY - sample * (height * 0.4);
|
|
vertices.push(x, y);
|
|
}
|
|
|
|
drawTrace(gl, resources, vertices, color, width, height);
|
|
} else if (mode === 'separate') {
|
|
// Separate: Left on top half, Right on bottom half
|
|
const halfHeight = height / 2;
|
|
const samplesPerPixel = samplesPerFrame / width;
|
|
|
|
// Left channel (top half)
|
|
const leftVertices: number[] = [];
|
|
const leftCenterY = halfHeight / 2;
|
|
for (let x = 0; x < width; x++) {
|
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
|
if (sampleIndex >= audioData.leftChannel.length) break;
|
|
const sample = audioData.leftChannel[sampleIndex];
|
|
const y = leftCenterY - sample * (halfHeight * 0.35);
|
|
leftVertices.push(x, y);
|
|
}
|
|
drawTrace(gl, resources, leftVertices, leftColor, width, height);
|
|
|
|
// Right channel (bottom half)
|
|
const rightVertices: number[] = [];
|
|
const rightCenterY = halfHeight + halfHeight / 2;
|
|
for (let x = 0; x < width; x++) {
|
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
|
if (sampleIndex >= audioData.rightChannel.length) break;
|
|
const sample = audioData.rightChannel[sampleIndex];
|
|
const y = rightCenterY - sample * (halfHeight * 0.35);
|
|
rightVertices.push(x, y);
|
|
}
|
|
drawTrace(gl, resources, rightVertices, rightColor, width, height);
|
|
|
|
// Draw divider line
|
|
const dividerVertices = [0, halfHeight, width, halfHeight];
|
|
drawTrace(gl, resources, dividerVertices, [0.2, 0.2, 0.2, 1], width, height);
|
|
} else if (mode === 'all') {
|
|
// All: L/R waveforms on top row, XY on bottom
|
|
const topHeight = height / 2;
|
|
const bottomHeight = height / 2;
|
|
const halfWidth = width / 2;
|
|
const samplesPerPixel = samplesPerFrame / halfWidth;
|
|
|
|
// Left channel (top-left quadrant)
|
|
const leftVertices: number[] = [];
|
|
const leftCenterY = topHeight / 2;
|
|
for (let x = 0; x < halfWidth; x++) {
|
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
|
if (sampleIndex >= audioData.leftChannel.length) break;
|
|
const sample = audioData.leftChannel[sampleIndex];
|
|
const y = leftCenterY - sample * (topHeight * 0.35);
|
|
leftVertices.push(x, y);
|
|
}
|
|
drawTrace(gl, resources, leftVertices, leftColor, width, height);
|
|
|
|
// Right channel (top-right quadrant)
|
|
const rightVertices: number[] = [];
|
|
const rightCenterY = topHeight / 2;
|
|
for (let x = 0; x < halfWidth; x++) {
|
|
const sampleIndex = Math.floor(startSample + x * samplesPerPixel);
|
|
if (sampleIndex >= audioData.rightChannel.length) break;
|
|
const sample = audioData.rightChannel[sampleIndex];
|
|
const y = rightCenterY - sample * (topHeight * 0.35);
|
|
rightVertices.push(halfWidth + x, y);
|
|
}
|
|
drawTrace(gl, resources, rightVertices, rightColor, width, height);
|
|
|
|
// XY mode (bottom half, centered)
|
|
const xyVertices: number[] = [];
|
|
const xyCenterX = width / 2;
|
|
const xyCenterY = topHeight + bottomHeight / 2;
|
|
const xyScale = Math.min(halfWidth, bottomHeight) * 0.35;
|
|
for (let i = startSample; i < endSample; i++) {
|
|
const x = xyCenterX + audioData.leftChannel[i] * xyScale;
|
|
const y = xyCenterY - audioData.rightChannel[i] * xyScale;
|
|
xyVertices.push(x, y);
|
|
}
|
|
drawTrace(gl, resources, xyVertices, xyColor, width, height);
|
|
|
|
// Draw divider lines
|
|
drawTrace(gl, resources, [0, topHeight, width, topHeight], [0.2, 0.2, 0.2, 1], width, height);
|
|
drawTrace(gl, resources, [halfWidth, 0, halfWidth, topHeight], [0.2, 0.2, 0.2, 1], width, height);
|
|
}
|
|
|
|
// Update sample position
|
|
currentSampleRef.current = endSample;
|
|
|
|
return endSample >= audioData.leftChannel.length;
|
|
}, [drawTrace]);
|
|
|
|
const draw2DGraticule = (canvas: HTMLCanvasElement, width: number, height: number) => {
|
|
// Get 2D context for graticule overlay
|
|
const ctx = canvas.getContext('2d');
|
|
if (!ctx) return;
|
|
|
|
ctx.strokeStyle = 'rgba(0, 100, 0, 0.3)';
|
|
ctx.lineWidth = 1;
|
|
|
|
const divisions = 8;
|
|
const cellWidth = width / divisions;
|
|
const cellHeight = height / divisions;
|
|
|
|
for (let i = 0; i <= divisions; i++) {
|
|
ctx.beginPath();
|
|
ctx.moveTo(i * cellWidth, 0);
|
|
ctx.lineTo(i * cellWidth, height);
|
|
ctx.stroke();
|
|
ctx.beginPath();
|
|
ctx.moveTo(0, i * cellHeight);
|
|
ctx.lineTo(width, i * cellHeight);
|
|
ctx.stroke();
|
|
}
|
|
|
|
ctx.strokeStyle = 'rgba(0, 150, 0, 0.5)';
|
|
ctx.lineWidth = 2;
|
|
ctx.beginPath();
|
|
ctx.moveTo(0, height / 2);
|
|
ctx.lineTo(width, height / 2);
|
|
ctx.stroke();
|
|
ctx.beginPath();
|
|
ctx.moveTo(width / 2, 0);
|
|
ctx.lineTo(width / 2, height);
|
|
ctx.stroke();
|
|
};
|
|
|
|
const resetPlayback = useCallback(() => {
|
|
currentSampleRef.current = 0;
|
|
|
|
const resources = glResourcesRef.current;
|
|
if (resources) {
|
|
const { gl } = resources;
|
|
gl.clearColor(0, 0, 0, 1);
|
|
gl.clear(gl.COLOR_BUFFER_BIT);
|
|
}
|
|
}, []);
|
|
|
|
const stopAnimation = useCallback(() => {
|
|
if (animationFrameRef.current) {
|
|
cancelAnimationFrame(animationFrameRef.current);
|
|
animationFrameRef.current = null;
|
|
}
|
|
}, []);
|
|
|
|
const getCurrentSample = useCallback(() => currentSampleRef.current, []);
|
|
|
|
useEffect(() => {
|
|
return () => {
|
|
stopAnimation();
|
|
// Clean up WebGL resources
|
|
if (glResourcesRef.current) {
|
|
const { gl, traceProgram, fadeProgram, positionBuffer, fadeBuffer } = glResourcesRef.current;
|
|
gl.deleteProgram(traceProgram);
|
|
gl.deleteProgram(fadeProgram);
|
|
gl.deleteBuffer(positionBuffer);
|
|
gl.deleteBuffer(fadeBuffer);
|
|
glResourcesRef.current = null;
|
|
}
|
|
};
|
|
}, [stopAnimation]);
|
|
|
|
return {
|
|
canvasRef,
|
|
initCanvas,
|
|
drawFrame,
|
|
resetPlayback,
|
|
stopAnimation,
|
|
getCurrentSample,
|
|
};
|
|
}
|