This commit is contained in:
gpt-engineer-app[bot] 2025-12-21 13:47:32 +00:00
parent fd8f1671ca
commit 56862114ec
2 changed files with 67 additions and 83 deletions

View File

@ -1,6 +1,7 @@
import { useEffect, useRef, useCallback } from 'react'; import { useEffect, useRef, useCallback } from 'react';
import { useNavigate } from 'react-router-dom'; import { useNavigate, useLocation } from 'react-router-dom';
import { useSettings } from '@/contexts/SettingsContext'; import { useSettings } from '@/contexts/SettingsContext';
import { useMusic } from '@/contexts/MusicContext';
export function MiniOscilloscope() { export function MiniOscilloscope() {
const canvasRef = useRef<HTMLCanvasElement>(null); const canvasRef = useRef<HTMLCanvasElement>(null);
@ -8,76 +9,60 @@ export function MiniOscilloscope() {
const analyzerRef = useRef<AnalyserNode | null>(null); const analyzerRef = useRef<AnalyserNode | null>(null);
const audioContextRef = useRef<AudioContext | null>(null); const audioContextRef = useRef<AudioContext | null>(null);
const sourceNodeRef = useRef<MediaElementAudioSourceNode | null>(null); const sourceNodeRef = useRef<MediaElementAudioSourceNode | null>(null);
const connectedElementsRef = useRef<Set<HTMLMediaElement>>(new Set()); const connectedElementRef = useRef<HTMLAudioElement | null>(null);
const navigate = useNavigate(); const navigate = useNavigate();
const location = useLocation();
const { playSound } = useSettings(); const { playSound } = useSettings();
const { audioElement, isPlaying } = useMusic();
// Find and connect to all audio elements on the page // Connect to music player's audio element
const connectToAudioElements = useCallback(() => {
if (!audioContextRef.current || !analyzerRef.current) return;
const audioElements = document.querySelectorAll('audio, video');
audioElements.forEach((element) => {
const mediaElement = element as HTMLMediaElement;
// Skip if already connected
if (connectedElementsRef.current.has(mediaElement)) return;
try {
// Create a source node for this element
const source = audioContextRef.current!.createMediaElementSource(mediaElement);
source.connect(analyzerRef.current!);
source.connect(audioContextRef.current!.destination);
connectedElementsRef.current.add(mediaElement);
} catch (e) {
// Element might already be connected to a different context
console.log('Could not connect audio element:', e);
}
});
}, []);
// Initialize audio context and analyzer
useEffect(() => { useEffect(() => {
const initAudio = async () => { if (!audioElement) return;
// Skip if already connected to this element
if (connectedElementRef.current === audioElement) return;
const connectToAudio = async () => {
try { try {
// Create or resume audio context
if (!audioContextRef.current) {
audioContextRef.current = new AudioContext(); audioContextRef.current = new AudioContext();
}
if (audioContextRef.current.state === 'suspended') {
await audioContextRef.current.resume();
}
// Create analyzer if needed
if (!analyzerRef.current) {
analyzerRef.current = audioContextRef.current.createAnalyser(); analyzerRef.current = audioContextRef.current.createAnalyser();
analyzerRef.current.fftSize = 256; analyzerRef.current.fftSize = 256;
analyzerRef.current.smoothingTimeConstant = 0.8; analyzerRef.current.smoothingTimeConstant = 0.8;
}
// Connect analyzer to destination for pass-through // Disconnect old source if exists
// We'll connect sources as we find them if (sourceNodeRef.current) {
try {
sourceNodeRef.current.disconnect();
} catch (e) { } catch (e) {
console.log('Could not initialize audio context:', e); // Ignore disconnect errors
}
}
// Create new source from the audio element
sourceNodeRef.current = audioContextRef.current.createMediaElementSource(audioElement);
sourceNodeRef.current.connect(analyzerRef.current);
sourceNodeRef.current.connect(audioContextRef.current.destination);
connectedElementRef.current = audioElement;
console.log('MiniOscilloscope connected to audio element');
} catch (e) {
console.log('Could not connect to audio element:', e);
} }
}; };
initAudio(); connectToAudio();
}, [audioElement]);
// Observe DOM for new audio elements
const observer = new MutationObserver(() => {
connectToAudioElements();
});
observer.observe(document.body, {
childList: true,
subtree: true,
});
// Initial connection attempt
connectToAudioElements();
return () => {
observer.disconnect();
if (animationRef.current) {
cancelAnimationFrame(animationRef.current);
}
if (audioContextRef.current) {
audioContextRef.current.close();
}
};
}, [connectToAudioElements]);
// Draw waveform // Draw waveform
useEffect(() => { useEffect(() => {
@ -118,7 +103,7 @@ export function MiniOscilloscope() {
ctx.stroke(); ctx.stroke();
// Draw waveform // Draw waveform
if (analyzerRef.current) { if (analyzerRef.current && isPlaying) {
const bufferLength = analyzerRef.current.frequencyBinCount; const bufferLength = analyzerRef.current.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength); const dataArray = new Uint8Array(bufferLength);
analyzerRef.current.getByteTimeDomainData(dataArray); analyzerRef.current.getByteTimeDomainData(dataArray);
@ -151,26 +136,8 @@ export function MiniOscilloscope() {
ctx.lineTo(width, height / 2); ctx.lineTo(width, height / 2);
ctx.stroke(); ctx.stroke();
ctx.shadowBlur = 0; ctx.shadowBlur = 0;
// If no audio, draw a subtle idle animation
if (!hasAudio) {
const time = Date.now() / 1000;
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.5)';
ctx.lineWidth = 1;
ctx.beginPath();
for (let i = 0; i < width; i++) {
const y = height / 2 + Math.sin(i * 0.05 + time * 2) * 3;
if (i === 0) {
ctx.moveTo(i, y);
} else { } else {
ctx.lineTo(i, y); // No audio playing - draw idle animation
}
}
ctx.stroke();
}
} else {
// No analyzer - draw idle animation
const time = Date.now() / 1000; const time = Date.now() / 1000;
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.5)'; ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.5)';
ctx.lineWidth = 1; ctx.lineWidth = 1;
@ -197,7 +164,7 @@ export function MiniOscilloscope() {
cancelAnimationFrame(animationRef.current); cancelAnimationFrame(animationRef.current);
} }
}; };
}, []); }, [isPlaying]);
// Handle resize // Handle resize
useEffect(() => { useEffect(() => {
@ -220,11 +187,26 @@ export function MiniOscilloscope() {
}; };
}, []); }, []);
// Cleanup on unmount
useEffect(() => {
return () => {
if (animationRef.current) {
cancelAnimationFrame(animationRef.current);
}
// Don't close audio context as it would break the music player
};
}, []);
const handleClick = () => { const handleClick = () => {
playSound('click'); playSound('click');
navigate('/oscilloscope'); navigate('/oscilloscope');
}; };
// Hide on oscilloscope page
if (location.pathname === '/oscilloscope') {
return null;
}
return ( return (
<div <div
onClick={handleClick} onClick={handleClick}

View File

@ -18,6 +18,7 @@ interface MusicContextType {
currentIndex: number; currentIndex: number;
selectedStation: Station | null; selectedStation: Station | null;
hasFetched: boolean; hasFetched: boolean;
audioElement: HTMLAudioElement | null;
setVolume: (volume: number) => void; setVolume: (volume: number) => void;
playStation: (station: Station, index: number) => void; playStation: (station: Station, index: number) => void;
togglePlay: () => void; togglePlay: () => void;
@ -188,6 +189,7 @@ export const MusicProvider = ({ children }: { children: ReactNode }) => {
currentIndex, currentIndex,
selectedStation, selectedStation,
hasFetched, hasFetched,
audioElement: audioRef.current,
setVolume, setVolume,
playStation, playStation,
togglePlay, togglePlay,