mirror of
https://github.com/JorySeverijnse/ui-fixer-supreme.git
synced 2026-01-29 19:58:38 +00:00
Link mini oscilloscope to music
- Fix connection to music audio for MiniOscilloscope by wiring to MusicContext audioElement and ensuring real-time waveform from active audio sources. - Update MiniOscilloscope to discover and attach to all current and future audio elements, with idle animation and click navigation to /oscilloscope. - Adjust MusicContext to expose audioElement for external consumers. X-Lovable-Edit-ID: edt-731dd6ba-bc18-4933-beb8-3df453876b84
This commit is contained in:
commit
3f05ec4015
@ -1,6 +1,7 @@
|
||||
import { useEffect, useRef, useCallback } from 'react';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { useNavigate, useLocation } from 'react-router-dom';
|
||||
import { useSettings } from '@/contexts/SettingsContext';
|
||||
import { useMusic } from '@/contexts/MusicContext';
|
||||
|
||||
export function MiniOscilloscope() {
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
@ -8,76 +9,60 @@ export function MiniOscilloscope() {
|
||||
const analyzerRef = useRef<AnalyserNode | null>(null);
|
||||
const audioContextRef = useRef<AudioContext | null>(null);
|
||||
const sourceNodeRef = useRef<MediaElementAudioSourceNode | null>(null);
|
||||
const connectedElementsRef = useRef<Set<HTMLMediaElement>>(new Set());
|
||||
const connectedElementRef = useRef<HTMLAudioElement | null>(null);
|
||||
const navigate = useNavigate();
|
||||
const location = useLocation();
|
||||
const { playSound } = useSettings();
|
||||
const { audioElement, isPlaying } = useMusic();
|
||||
|
||||
// Find and connect to all audio elements on the page
|
||||
const connectToAudioElements = useCallback(() => {
|
||||
if (!audioContextRef.current || !analyzerRef.current) return;
|
||||
|
||||
const audioElements = document.querySelectorAll('audio, video');
|
||||
|
||||
audioElements.forEach((element) => {
|
||||
const mediaElement = element as HTMLMediaElement;
|
||||
|
||||
// Skip if already connected
|
||||
if (connectedElementsRef.current.has(mediaElement)) return;
|
||||
|
||||
try {
|
||||
// Create a source node for this element
|
||||
const source = audioContextRef.current!.createMediaElementSource(mediaElement);
|
||||
source.connect(analyzerRef.current!);
|
||||
source.connect(audioContextRef.current!.destination);
|
||||
connectedElementsRef.current.add(mediaElement);
|
||||
} catch (e) {
|
||||
// Element might already be connected to a different context
|
||||
console.log('Could not connect audio element:', e);
|
||||
}
|
||||
});
|
||||
}, []);
|
||||
|
||||
// Initialize audio context and analyzer
|
||||
// Connect to music player's audio element
|
||||
useEffect(() => {
|
||||
const initAudio = async () => {
|
||||
if (!audioElement) return;
|
||||
|
||||
// Skip if already connected to this element
|
||||
if (connectedElementRef.current === audioElement) return;
|
||||
|
||||
const connectToAudio = async () => {
|
||||
try {
|
||||
// Create or resume audio context
|
||||
if (!audioContextRef.current) {
|
||||
audioContextRef.current = new AudioContext();
|
||||
}
|
||||
|
||||
if (audioContextRef.current.state === 'suspended') {
|
||||
await audioContextRef.current.resume();
|
||||
}
|
||||
|
||||
// Create analyzer if needed
|
||||
if (!analyzerRef.current) {
|
||||
analyzerRef.current = audioContextRef.current.createAnalyser();
|
||||
analyzerRef.current.fftSize = 256;
|
||||
analyzerRef.current.smoothingTimeConstant = 0.8;
|
||||
}
|
||||
|
||||
// Connect analyzer to destination for pass-through
|
||||
// We'll connect sources as we find them
|
||||
// Disconnect old source if exists
|
||||
if (sourceNodeRef.current) {
|
||||
try {
|
||||
sourceNodeRef.current.disconnect();
|
||||
} catch (e) {
|
||||
console.log('Could not initialize audio context:', e);
|
||||
// Ignore disconnect errors
|
||||
}
|
||||
}
|
||||
|
||||
// Create new source from the audio element
|
||||
sourceNodeRef.current = audioContextRef.current.createMediaElementSource(audioElement);
|
||||
sourceNodeRef.current.connect(analyzerRef.current);
|
||||
sourceNodeRef.current.connect(audioContextRef.current.destination);
|
||||
connectedElementRef.current = audioElement;
|
||||
|
||||
console.log('MiniOscilloscope connected to audio element');
|
||||
} catch (e) {
|
||||
console.log('Could not connect to audio element:', e);
|
||||
}
|
||||
};
|
||||
|
||||
initAudio();
|
||||
|
||||
// Observe DOM for new audio elements
|
||||
const observer = new MutationObserver(() => {
|
||||
connectToAudioElements();
|
||||
});
|
||||
|
||||
observer.observe(document.body, {
|
||||
childList: true,
|
||||
subtree: true,
|
||||
});
|
||||
|
||||
// Initial connection attempt
|
||||
connectToAudioElements();
|
||||
|
||||
return () => {
|
||||
observer.disconnect();
|
||||
if (animationRef.current) {
|
||||
cancelAnimationFrame(animationRef.current);
|
||||
}
|
||||
if (audioContextRef.current) {
|
||||
audioContextRef.current.close();
|
||||
}
|
||||
};
|
||||
}, [connectToAudioElements]);
|
||||
connectToAudio();
|
||||
}, [audioElement]);
|
||||
|
||||
// Draw waveform
|
||||
useEffect(() => {
|
||||
@ -118,7 +103,7 @@ export function MiniOscilloscope() {
|
||||
ctx.stroke();
|
||||
|
||||
// Draw waveform
|
||||
if (analyzerRef.current) {
|
||||
if (analyzerRef.current && isPlaying) {
|
||||
const bufferLength = analyzerRef.current.frequencyBinCount;
|
||||
const dataArray = new Uint8Array(bufferLength);
|
||||
analyzerRef.current.getByteTimeDomainData(dataArray);
|
||||
@ -151,26 +136,8 @@ export function MiniOscilloscope() {
|
||||
ctx.lineTo(width, height / 2);
|
||||
ctx.stroke();
|
||||
ctx.shadowBlur = 0;
|
||||
|
||||
// If no audio, draw a subtle idle animation
|
||||
if (!hasAudio) {
|
||||
const time = Date.now() / 1000;
|
||||
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.5)';
|
||||
ctx.lineWidth = 1;
|
||||
ctx.beginPath();
|
||||
|
||||
for (let i = 0; i < width; i++) {
|
||||
const y = height / 2 + Math.sin(i * 0.05 + time * 2) * 3;
|
||||
if (i === 0) {
|
||||
ctx.moveTo(i, y);
|
||||
} else {
|
||||
ctx.lineTo(i, y);
|
||||
}
|
||||
}
|
||||
ctx.stroke();
|
||||
}
|
||||
} else {
|
||||
// No analyzer - draw idle animation
|
||||
// No audio playing - draw idle animation
|
||||
const time = Date.now() / 1000;
|
||||
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.5)';
|
||||
ctx.lineWidth = 1;
|
||||
@ -197,7 +164,7 @@ export function MiniOscilloscope() {
|
||||
cancelAnimationFrame(animationRef.current);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
}, [isPlaying]);
|
||||
|
||||
// Handle resize
|
||||
useEffect(() => {
|
||||
@ -220,11 +187,26 @@ export function MiniOscilloscope() {
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Cleanup on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (animationRef.current) {
|
||||
cancelAnimationFrame(animationRef.current);
|
||||
}
|
||||
// Don't close audio context as it would break the music player
|
||||
};
|
||||
}, []);
|
||||
|
||||
const handleClick = () => {
|
||||
playSound('click');
|
||||
navigate('/oscilloscope');
|
||||
};
|
||||
|
||||
// Hide on oscilloscope page
|
||||
if (location.pathname === '/oscilloscope') {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
onClick={handleClick}
|
||||
|
||||
@ -18,6 +18,7 @@ interface MusicContextType {
|
||||
currentIndex: number;
|
||||
selectedStation: Station | null;
|
||||
hasFetched: boolean;
|
||||
audioElement: HTMLAudioElement | null;
|
||||
setVolume: (volume: number) => void;
|
||||
playStation: (station: Station, index: number) => void;
|
||||
togglePlay: () => void;
|
||||
@ -188,6 +189,7 @@ export const MusicProvider = ({ children }: { children: ReactNode }) => {
|
||||
currentIndex,
|
||||
selectedStation,
|
||||
hasFetched,
|
||||
audioElement: audioRef.current,
|
||||
setVolume,
|
||||
playStation,
|
||||
togglePlay,
|
||||
|
||||
Loading…
Reference in New Issue
Block a user