mirror of
https://github.com/JorySeverijnse/ui-fixer-supreme.git
synced 2026-01-29 16:18:38 +00:00
Changes
This commit is contained in:
parent
cdc0f6d45e
commit
d4f544168d
@ -3,6 +3,11 @@ import { useNavigate, useLocation } from 'react-router-dom';
|
||||
import { useSettings } from '@/contexts/SettingsContext';
|
||||
import { useAudioAnalyzer } from '@/contexts/AudioAnalyzerContext';
|
||||
|
||||
// Get CSS variable value
|
||||
function getCSSVar(name: string): string {
|
||||
return getComputedStyle(document.documentElement).getPropertyValue(name).trim();
|
||||
}
|
||||
|
||||
export function MiniOscilloscope() {
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const animationRef = useRef<number>();
|
||||
@ -23,15 +28,23 @@ export function MiniOscilloscope() {
|
||||
const width = canvas.width;
|
||||
const height = canvas.height;
|
||||
|
||||
// Get theme colors
|
||||
const primaryHsl = getCSSVar('--primary');
|
||||
const primaryColor = primaryHsl ? `hsl(${primaryHsl})` : 'hsl(120, 100%, 50%)';
|
||||
const primaryColorDim = primaryHsl ? `hsl(${primaryHsl} / 0.3)` : 'hsl(120, 100%, 50%, 0.3)';
|
||||
const primaryColorFaint = primaryHsl ? `hsl(${primaryHsl} / 0.1)` : 'hsl(120, 100%, 50%, 0.1)';
|
||||
const bgHsl = getCSSVar('--background');
|
||||
const bgColor = bgHsl ? `hsl(${bgHsl} / 0.8)` : 'rgba(0, 0, 0, 0.6)';
|
||||
|
||||
// Clear with transparent background
|
||||
ctx.clearRect(0, 0, width, height);
|
||||
|
||||
// Draw background
|
||||
ctx.fillStyle = 'rgba(0, 0, 0, 0.6)';
|
||||
// Draw background with theme color
|
||||
ctx.fillStyle = bgColor;
|
||||
ctx.fillRect(0, 0, width, height);
|
||||
|
||||
// Draw grid lines
|
||||
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.1)';
|
||||
// Draw grid lines with theme color
|
||||
ctx.strokeStyle = primaryColorFaint;
|
||||
ctx.lineWidth = 1;
|
||||
|
||||
// Vertical grid lines
|
||||
@ -42,31 +55,29 @@ export function MiniOscilloscope() {
|
||||
ctx.stroke();
|
||||
}
|
||||
|
||||
// Center line
|
||||
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.3)';
|
||||
// Center line with theme color
|
||||
ctx.strokeStyle = primaryColorDim;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(0, height / 2);
|
||||
ctx.lineTo(width, height / 2);
|
||||
ctx.stroke();
|
||||
|
||||
// Draw waveform from analyzer
|
||||
let hasAudio = false;
|
||||
|
||||
// Draw waveform from analyzer or flat line
|
||||
if (analyzerNode) {
|
||||
const bufferLength = analyzerNode.frequencyBinCount;
|
||||
const dataArray = new Uint8Array(bufferLength);
|
||||
analyzerNode.getByteTimeDomainData(dataArray);
|
||||
|
||||
// Check if there's actual audio (not just silence)
|
||||
hasAudio = dataArray.some(v => Math.abs(v - 128) > 2);
|
||||
const hasAudio = dataArray.some(v => Math.abs(v - 128) > 2);
|
||||
|
||||
ctx.strokeStyle = primaryColor;
|
||||
ctx.lineWidth = 2;
|
||||
ctx.shadowColor = primaryColor;
|
||||
ctx.shadowBlur = hasAudio ? 10 : 0;
|
||||
ctx.beginPath();
|
||||
|
||||
if (hasAudio) {
|
||||
ctx.strokeStyle = 'hsl(120, 100%, 50%)';
|
||||
ctx.lineWidth = 2;
|
||||
ctx.shadowColor = 'hsl(120, 100%, 50%)';
|
||||
ctx.shadowBlur = 10;
|
||||
ctx.beginPath();
|
||||
|
||||
const sliceWidth = width / bufferLength;
|
||||
let x = 0;
|
||||
|
||||
@ -82,28 +93,21 @@ export function MiniOscilloscope() {
|
||||
|
||||
x += sliceWidth;
|
||||
}
|
||||
|
||||
} else {
|
||||
// Flat line when no audio
|
||||
ctx.moveTo(0, height / 2);
|
||||
ctx.lineTo(width, height / 2);
|
||||
ctx.stroke();
|
||||
ctx.shadowBlur = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Draw idle animation when no audio
|
||||
if (!hasAudio) {
|
||||
const time = Date.now() / 1000;
|
||||
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.5)';
|
||||
ctx.lineWidth = 1;
|
||||
ctx.stroke();
|
||||
ctx.shadowBlur = 0;
|
||||
} else {
|
||||
// Flat line when no analyzer
|
||||
ctx.strokeStyle = primaryColor;
|
||||
ctx.lineWidth = 2;
|
||||
ctx.beginPath();
|
||||
|
||||
for (let i = 0; i < width; i++) {
|
||||
const y = height / 2 + Math.sin(i * 0.05 + time * 2) * 3;
|
||||
if (i === 0) {
|
||||
ctx.moveTo(i, y);
|
||||
} else {
|
||||
ctx.lineTo(i, y);
|
||||
}
|
||||
}
|
||||
ctx.moveTo(0, height / 2);
|
||||
ctx.lineTo(width, height / 2);
|
||||
ctx.stroke();
|
||||
}
|
||||
|
||||
@ -153,7 +157,7 @@ export function MiniOscilloscope() {
|
||||
return (
|
||||
<div
|
||||
onClick={handleClick}
|
||||
className="fixed bottom-4 left-1/2 -translate-x-1/2 w-[350px] md:w-[500px] h-[70px] z-50 cursor-pointer group"
|
||||
className="fixed bottom-6 left-1/2 -translate-x-1/2 w-[400px] md:w-[600px] h-[80px] z-50 cursor-pointer group"
|
||||
title="Open Oscilloscope"
|
||||
>
|
||||
<div className="relative w-full h-full rounded-lg border border-primary/50 overflow-hidden bg-background/80 backdrop-blur-sm transition-all duration-300 group-hover:border-primary group-hover:shadow-[0_0_20px_hsl(var(--primary)/0.4)]">
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { useEffect, useRef, useCallback } from 'react';
|
||||
import type { AudioData } from '@/hooks/useAudioAnalyzer';
|
||||
import type { OscilloscopeMode } from '@/hooks/useOscilloscopeRenderer';
|
||||
import { useAudioAnalyzer as useSharedAudioAnalyzer } from '@/contexts/AudioAnalyzerContext';
|
||||
|
||||
interface OscilloscopeDisplayProps {
|
||||
audioData: AudioData | null;
|
||||
@ -33,6 +34,10 @@ export function OscilloscopeDisplay({
|
||||
const animationRef = useRef<number | null>(null);
|
||||
const currentSampleRef = useRef(0);
|
||||
const lastSeekPositionRef = useRef(0);
|
||||
const { analyzerNode: sharedAnalyzer } = useSharedAudioAnalyzer();
|
||||
|
||||
// Use shared analyzer for live audio (music player, sound effects)
|
||||
const liveAnalyzer = sharedAnalyzer || micAnalyzer;
|
||||
|
||||
const drawGraticule = useCallback((ctx: CanvasRenderingContext2D) => {
|
||||
ctx.strokeStyle = '#00ff00';
|
||||
@ -52,7 +57,11 @@ export function OscilloscopeDisplay({
|
||||
}, []);
|
||||
|
||||
const drawFrame = useCallback(() => {
|
||||
if ((!audioData && !micAnalyzer) || !canvasRef.current) return;
|
||||
if (!canvasRef.current) return;
|
||||
|
||||
// Always allow drawing if we have live analyzer, even without audioData
|
||||
const hasLiveSource = liveAnalyzer || micAnalyzer;
|
||||
if (!audioData && !hasLiveSource) return;
|
||||
|
||||
const canvas = canvasRef.current;
|
||||
const ctx = canvas.getContext('2d');
|
||||
@ -63,23 +72,33 @@ export function OscilloscopeDisplay({
|
||||
let endSample: number;
|
||||
let samplesToAdvance: number = samplesPerFrame;
|
||||
|
||||
if (micAnalyzer) {
|
||||
// Real-time microphone data
|
||||
const bufferLength = micAnalyzer.frequencyBinCount;
|
||||
// Priority: micAnalyzer > liveAnalyzer (shared) > audioData (file)
|
||||
const activeAnalyzer = micAnalyzer || liveAnalyzer;
|
||||
|
||||
if (activeAnalyzer && !audioData) {
|
||||
// Real-time audio data (mic or music player)
|
||||
const bufferLength = activeAnalyzer.frequencyBinCount;
|
||||
const dataArray = new Uint8Array(bufferLength);
|
||||
micAnalyzer.getByteTimeDomainData(dataArray);
|
||||
activeAnalyzer.getByteTimeDomainData(dataArray);
|
||||
|
||||
// Clear to pure black
|
||||
ctx.fillStyle = '#000000';
|
||||
ctx.fillRect(0, 0, WIDTH, HEIGHT);
|
||||
|
||||
// Draw graticule first
|
||||
drawGraticule(ctx);
|
||||
|
||||
// Convert to Float32Array-like for consistency
|
||||
const micData = new Float32Array(dataArray.length);
|
||||
const liveData = new Float32Array(dataArray.length);
|
||||
for (let i = 0; i < dataArray.length; i++) {
|
||||
micData[i] = (dataArray[i] - 128) / 128; // Normalize to -1 to 1
|
||||
liveData[i] = (dataArray[i] - 128) / 128; // Normalize to -1 to 1
|
||||
}
|
||||
|
||||
samplesPerFrame = micData.length;
|
||||
samplesPerFrame = liveData.length;
|
||||
startSample = 0;
|
||||
endSample = micData.length;
|
||||
endSample = liveData.length;
|
||||
|
||||
// Draw mic data directly
|
||||
// Draw live data directly
|
||||
ctx.strokeStyle = '#00ff00';
|
||||
ctx.lineWidth = 2;
|
||||
ctx.beginPath();
|
||||
@ -88,7 +107,7 @@ export function OscilloscopeDisplay({
|
||||
let x = 0;
|
||||
|
||||
for (let i = 0; i < samplesPerFrame; i++) {
|
||||
const v = micData[i];
|
||||
const v = liveData[i];
|
||||
const y = (v * HEIGHT) / 2 + HEIGHT / 2;
|
||||
|
||||
if (i === 0) {
|
||||
@ -102,15 +121,13 @@ export function OscilloscopeDisplay({
|
||||
|
||||
ctx.stroke();
|
||||
|
||||
// Draw graticule
|
||||
drawGraticule(ctx);
|
||||
|
||||
// Request next frame for real-time
|
||||
if (isPlaying) {
|
||||
animationRef.current = requestAnimationFrame(drawFrame);
|
||||
}
|
||||
animationRef.current = requestAnimationFrame(drawFrame);
|
||||
return;
|
||||
}
|
||||
|
||||
// File playback mode - need audioData
|
||||
if (!audioData) return;
|
||||
|
||||
// File playback mode
|
||||
const baseSamplesPerFrame = Math.floor(audioData.sampleRate / FPS);
|
||||
@ -274,7 +291,7 @@ export function OscilloscopeDisplay({
|
||||
}
|
||||
|
||||
animationRef.current = requestAnimationFrame(drawFrame);
|
||||
}, [audioData, micAnalyzer, mode, drawGraticule, onPlaybackEnd, isPlaying, playbackSpeed, isLooping, seekPosition]);
|
||||
}, [audioData, micAnalyzer, liveAnalyzer, mode, drawGraticule, onPlaybackEnd, isPlaying, playbackSpeed, isLooping, seekPosition]);
|
||||
|
||||
// Initialize canvas
|
||||
useEffect(() => {
|
||||
@ -288,11 +305,17 @@ export function OscilloscopeDisplay({
|
||||
}
|
||||
}, [drawGraticule]);
|
||||
|
||||
// Handle playback
|
||||
// Handle playback - start animation for file playback or live audio
|
||||
useEffect(() => {
|
||||
const hasLiveSource = liveAnalyzer || micAnalyzer;
|
||||
|
||||
if (isPlaying && audioData) {
|
||||
// File playback
|
||||
currentSampleRef.current = 0;
|
||||
animationRef.current = requestAnimationFrame(drawFrame);
|
||||
} else if (hasLiveSource && !audioData) {
|
||||
// Live audio visualization (music player, sound effects)
|
||||
animationRef.current = requestAnimationFrame(drawFrame);
|
||||
} else {
|
||||
if (animationRef.current) {
|
||||
cancelAnimationFrame(animationRef.current);
|
||||
@ -304,7 +327,7 @@ export function OscilloscopeDisplay({
|
||||
cancelAnimationFrame(animationRef.current);
|
||||
}
|
||||
};
|
||||
}, [isPlaying, audioData, drawFrame]);
|
||||
}, [isPlaying, audioData, liveAnalyzer, micAnalyzer, drawFrame]);
|
||||
|
||||
const getModeLabel = () => {
|
||||
switch (mode) {
|
||||
@ -338,8 +361,8 @@ export function OscilloscopeDisplay({
|
||||
{getModeLabel()}
|
||||
</div>
|
||||
|
||||
{/* Idle state */}
|
||||
{!audioData && !isPlaying && (
|
||||
{/* Idle state - only show if no live audio and no file */}
|
||||
{!audioData && !liveAnalyzer && !micAnalyzer && (
|
||||
<div className="absolute inset-0 flex items-center justify-center">
|
||||
<p className="font-crt text-2xl text-primary/40 text-glow animate-pulse">
|
||||
NO SIGNAL
|
||||
|
||||
Loading…
Reference in New Issue
Block a user