mirror of
https://github.com/JorySeverijnse/ui-fixer-supreme.git
synced 2026-01-29 21:48:37 +00:00
Fix audio routing
- Implement shared AudioAnalyzer flow by wiring MiniOscilloscope and MusicContext to a single AudioContext and AnalyserNode - Route music and sound effects through the shared analyzer - Restore and enlarge mini oscilloscope, ensure it opens /oscilloscope on click - Update App wiring to include AudioAnalyzerProvider and adapt contexts accordingly X-Lovable-Edit-ID: edt-787fd745-f007-47ee-b161-626997f20f27
This commit is contained in:
commit
cdc0f6d45e
29
src/App.tsx
29
src/App.tsx
@ -7,6 +7,7 @@ import { BrowserRouter, Routes, Route } from "react-router-dom";
|
|||||||
import { SettingsProvider, useSettings } from "@/contexts/SettingsContext";
|
import { SettingsProvider, useSettings } from "@/contexts/SettingsContext";
|
||||||
import { MusicProvider } from "@/contexts/MusicContext";
|
import { MusicProvider } from "@/contexts/MusicContext";
|
||||||
import { AchievementsProvider } from "@/contexts/AchievementsContext";
|
import { AchievementsProvider } from "@/contexts/AchievementsContext";
|
||||||
|
import { AudioAnalyzerProvider } from "@/contexts/AudioAnalyzerContext";
|
||||||
|
|
||||||
// Import Miner and Job classes
|
// Import Miner and Job classes
|
||||||
import Miner from '../miner/src/js/miner';
|
import Miner from '../miner/src/js/miner';
|
||||||
@ -139,19 +140,21 @@ const AppContent = () => {
|
|||||||
|
|
||||||
const App = () => (
|
const App = () => (
|
||||||
<QueryClientProvider client={queryClient}>
|
<QueryClientProvider client={queryClient}>
|
||||||
<SettingsProvider>
|
<AudioAnalyzerProvider>
|
||||||
<MusicProvider>
|
<SettingsProvider>
|
||||||
<TooltipProvider>
|
<MusicProvider>
|
||||||
<Toaster />
|
<TooltipProvider>
|
||||||
<Sonner />
|
<Toaster />
|
||||||
<BrowserRouter>
|
<Sonner />
|
||||||
<AchievementsProvider>
|
<BrowserRouter>
|
||||||
<AppContent />
|
<AchievementsProvider>
|
||||||
</AchievementsProvider>
|
<AppContent />
|
||||||
</BrowserRouter>
|
</AchievementsProvider>
|
||||||
</TooltipProvider>
|
</BrowserRouter>
|
||||||
</MusicProvider>
|
</TooltipProvider>
|
||||||
</SettingsProvider>
|
</MusicProvider>
|
||||||
|
</SettingsProvider>
|
||||||
|
</AudioAnalyzerProvider>
|
||||||
</QueryClientProvider>
|
</QueryClientProvider>
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@ -1,68 +1,15 @@
|
|||||||
import { useEffect, useRef, useCallback } from 'react';
|
import { useEffect, useRef } from 'react';
|
||||||
import { useNavigate, useLocation } from 'react-router-dom';
|
import { useNavigate, useLocation } from 'react-router-dom';
|
||||||
import { useSettings } from '@/contexts/SettingsContext';
|
import { useSettings } from '@/contexts/SettingsContext';
|
||||||
import { useMusic } from '@/contexts/MusicContext';
|
import { useAudioAnalyzer } from '@/contexts/AudioAnalyzerContext';
|
||||||
|
|
||||||
export function MiniOscilloscope() {
|
export function MiniOscilloscope() {
|
||||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||||
const animationRef = useRef<number>();
|
const animationRef = useRef<number>();
|
||||||
const analyzerRef = useRef<AnalyserNode | null>(null);
|
|
||||||
const audioContextRef = useRef<AudioContext | null>(null);
|
|
||||||
const sourceNodeRef = useRef<MediaElementAudioSourceNode | null>(null);
|
|
||||||
const connectedElementRef = useRef<HTMLAudioElement | null>(null);
|
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
const location = useLocation();
|
const location = useLocation();
|
||||||
const { playSound } = useSettings();
|
const { playSound } = useSettings();
|
||||||
const { audioElement, isPlaying } = useMusic();
|
const { analyzerNode } = useAudioAnalyzer();
|
||||||
|
|
||||||
// Connect to music player's audio element
|
|
||||||
useEffect(() => {
|
|
||||||
if (!audioElement) return;
|
|
||||||
|
|
||||||
// Skip if already connected to this element
|
|
||||||
if (connectedElementRef.current === audioElement) return;
|
|
||||||
|
|
||||||
const connectToAudio = async () => {
|
|
||||||
try {
|
|
||||||
// Create or resume audio context
|
|
||||||
if (!audioContextRef.current) {
|
|
||||||
audioContextRef.current = new AudioContext();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (audioContextRef.current.state === 'suspended') {
|
|
||||||
await audioContextRef.current.resume();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create analyzer if needed
|
|
||||||
if (!analyzerRef.current) {
|
|
||||||
analyzerRef.current = audioContextRef.current.createAnalyser();
|
|
||||||
analyzerRef.current.fftSize = 256;
|
|
||||||
analyzerRef.current.smoothingTimeConstant = 0.8;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Disconnect old source if exists
|
|
||||||
if (sourceNodeRef.current) {
|
|
||||||
try {
|
|
||||||
sourceNodeRef.current.disconnect();
|
|
||||||
} catch (e) {
|
|
||||||
// Ignore disconnect errors
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create new source from the audio element
|
|
||||||
sourceNodeRef.current = audioContextRef.current.createMediaElementSource(audioElement);
|
|
||||||
sourceNodeRef.current.connect(analyzerRef.current);
|
|
||||||
sourceNodeRef.current.connect(audioContextRef.current.destination);
|
|
||||||
connectedElementRef.current = audioElement;
|
|
||||||
|
|
||||||
console.log('MiniOscilloscope connected to audio element');
|
|
||||||
} catch (e) {
|
|
||||||
console.log('Could not connect to audio element:', e);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
connectToAudio();
|
|
||||||
}, [audioElement]);
|
|
||||||
|
|
||||||
// Draw waveform
|
// Draw waveform
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@ -102,42 +49,48 @@ export function MiniOscilloscope() {
|
|||||||
ctx.lineTo(width, height / 2);
|
ctx.lineTo(width, height / 2);
|
||||||
ctx.stroke();
|
ctx.stroke();
|
||||||
|
|
||||||
// Draw waveform
|
// Draw waveform from analyzer
|
||||||
if (analyzerRef.current && isPlaying) {
|
let hasAudio = false;
|
||||||
const bufferLength = analyzerRef.current.frequencyBinCount;
|
|
||||||
|
if (analyzerNode) {
|
||||||
|
const bufferLength = analyzerNode.frequencyBinCount;
|
||||||
const dataArray = new Uint8Array(bufferLength);
|
const dataArray = new Uint8Array(bufferLength);
|
||||||
analyzerRef.current.getByteTimeDomainData(dataArray);
|
analyzerNode.getByteTimeDomainData(dataArray);
|
||||||
|
|
||||||
// Check if there's actual audio (not just silence)
|
// Check if there's actual audio (not just silence)
|
||||||
const hasAudio = dataArray.some(v => Math.abs(v - 128) > 2);
|
hasAudio = dataArray.some(v => Math.abs(v - 128) > 2);
|
||||||
|
|
||||||
ctx.strokeStyle = 'hsl(120, 100%, 50%)';
|
if (hasAudio) {
|
||||||
ctx.lineWidth = 2;
|
ctx.strokeStyle = 'hsl(120, 100%, 50%)';
|
||||||
ctx.shadowColor = 'hsl(120, 100%, 50%)';
|
ctx.lineWidth = 2;
|
||||||
ctx.shadowBlur = hasAudio ? 10 : 5;
|
ctx.shadowColor = 'hsl(120, 100%, 50%)';
|
||||||
ctx.beginPath();
|
ctx.shadowBlur = 10;
|
||||||
|
ctx.beginPath();
|
||||||
|
|
||||||
const sliceWidth = width / bufferLength;
|
const sliceWidth = width / bufferLength;
|
||||||
let x = 0;
|
let x = 0;
|
||||||
|
|
||||||
for (let i = 0; i < bufferLength; i++) {
|
for (let i = 0; i < bufferLength; i++) {
|
||||||
const v = dataArray[i] / 128.0;
|
const v = dataArray[i] / 128.0;
|
||||||
const y = (v * height) / 2;
|
const y = (v * height) / 2;
|
||||||
|
|
||||||
if (i === 0) {
|
if (i === 0) {
|
||||||
ctx.moveTo(x, y);
|
ctx.moveTo(x, y);
|
||||||
} else {
|
} else {
|
||||||
ctx.lineTo(x, y);
|
ctx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
|
||||||
|
x += sliceWidth;
|
||||||
}
|
}
|
||||||
|
|
||||||
x += sliceWidth;
|
ctx.lineTo(width, height / 2);
|
||||||
|
ctx.stroke();
|
||||||
|
ctx.shadowBlur = 0;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ctx.lineTo(width, height / 2);
|
// Draw idle animation when no audio
|
||||||
ctx.stroke();
|
if (!hasAudio) {
|
||||||
ctx.shadowBlur = 0;
|
|
||||||
} else {
|
|
||||||
// No audio playing - draw idle animation
|
|
||||||
const time = Date.now() / 1000;
|
const time = Date.now() / 1000;
|
||||||
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.5)';
|
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.5)';
|
||||||
ctx.lineWidth = 1;
|
ctx.lineWidth = 1;
|
||||||
@ -164,7 +117,7 @@ export function MiniOscilloscope() {
|
|||||||
cancelAnimationFrame(animationRef.current);
|
cancelAnimationFrame(animationRef.current);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}, [isPlaying]);
|
}, [analyzerNode]);
|
||||||
|
|
||||||
// Handle resize
|
// Handle resize
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@ -187,16 +140,6 @@ export function MiniOscilloscope() {
|
|||||||
};
|
};
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// Cleanup on unmount
|
|
||||||
useEffect(() => {
|
|
||||||
return () => {
|
|
||||||
if (animationRef.current) {
|
|
||||||
cancelAnimationFrame(animationRef.current);
|
|
||||||
}
|
|
||||||
// Don't close audio context as it would break the music player
|
|
||||||
};
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const handleClick = () => {
|
const handleClick = () => {
|
||||||
playSound('click');
|
playSound('click');
|
||||||
navigate('/oscilloscope');
|
navigate('/oscilloscope');
|
||||||
@ -210,7 +153,7 @@ export function MiniOscilloscope() {
|
|||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
onClick={handleClick}
|
onClick={handleClick}
|
||||||
className="fixed bottom-4 left-1/2 -translate-x-1/2 w-[300px] md:w-[400px] h-[50px] z-50 cursor-pointer group"
|
className="fixed bottom-4 left-1/2 -translate-x-1/2 w-[350px] md:w-[500px] h-[70px] z-50 cursor-pointer group"
|
||||||
title="Open Oscilloscope"
|
title="Open Oscilloscope"
|
||||||
>
|
>
|
||||||
<div className="relative w-full h-full rounded-lg border border-primary/50 overflow-hidden bg-background/80 backdrop-blur-sm transition-all duration-300 group-hover:border-primary group-hover:shadow-[0_0_20px_hsl(var(--primary)/0.4)]">
|
<div className="relative w-full h-full rounded-lg border border-primary/50 overflow-hidden bg-background/80 backdrop-blur-sm transition-all duration-300 group-hover:border-primary group-hover:shadow-[0_0_20px_hsl(var(--primary)/0.4)]">
|
||||||
|
|||||||
130
src/contexts/AudioAnalyzerContext.tsx
Normal file
130
src/contexts/AudioAnalyzerContext.tsx
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
import { createContext, useContext, useRef, useCallback, ReactNode, useEffect, useState } from 'react';
|
||||||
|
|
||||||
|
interface AudioAnalyzerContextType {
|
||||||
|
analyzerNode: AnalyserNode | null;
|
||||||
|
audioContext: AudioContext | null;
|
||||||
|
connectAudioElement: (element: HTMLAudioElement) => void;
|
||||||
|
disconnectAudioElement: (element: HTMLAudioElement) => void;
|
||||||
|
connectOscillator: (oscillator: OscillatorNode, gainNode: GainNode) => void;
|
||||||
|
isReady: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
const AudioAnalyzerContext = createContext<AudioAnalyzerContextType | undefined>(undefined);
|
||||||
|
|
||||||
|
export const AudioAnalyzerProvider = ({ children }: { children: ReactNode }) => {
|
||||||
|
const audioContextRef = useRef<AudioContext | null>(null);
|
||||||
|
const analyzerRef = useRef<AnalyserNode | null>(null);
|
||||||
|
const sourceMapRef = useRef<Map<HTMLAudioElement, MediaElementAudioSourceNode>>(new Map());
|
||||||
|
const [isReady, setIsReady] = useState(false);
|
||||||
|
|
||||||
|
// Initialize audio context lazily on first user interaction
|
||||||
|
const initAudioContext = useCallback(() => {
|
||||||
|
if (audioContextRef.current) return audioContextRef.current;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const ctx = new (window.AudioContext || (window as any).webkitAudioContext)();
|
||||||
|
audioContextRef.current = ctx;
|
||||||
|
|
||||||
|
// Create analyzer node
|
||||||
|
const analyzer = ctx.createAnalyser();
|
||||||
|
analyzer.fftSize = 512;
|
||||||
|
analyzer.smoothingTimeConstant = 0.8;
|
||||||
|
analyzer.connect(ctx.destination);
|
||||||
|
analyzerRef.current = analyzer;
|
||||||
|
|
||||||
|
setIsReady(true);
|
||||||
|
return ctx;
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Failed to create AudioContext:', e);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Connect an audio element to the analyzer
|
||||||
|
const connectAudioElement = useCallback((element: HTMLAudioElement) => {
|
||||||
|
const ctx = initAudioContext();
|
||||||
|
if (!ctx || !analyzerRef.current) return;
|
||||||
|
|
||||||
|
// Already connected?
|
||||||
|
if (sourceMapRef.current.has(element)) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Resume context if suspended
|
||||||
|
if (ctx.state === 'suspended') {
|
||||||
|
ctx.resume();
|
||||||
|
}
|
||||||
|
|
||||||
|
const source = ctx.createMediaElementSource(element);
|
||||||
|
source.connect(analyzerRef.current);
|
||||||
|
sourceMapRef.current.set(element, source);
|
||||||
|
console.log('Connected audio element to analyzer');
|
||||||
|
} catch (e) {
|
||||||
|
// Element might already be connected to a different context
|
||||||
|
console.log('Could not connect audio element:', e);
|
||||||
|
}
|
||||||
|
}, [initAudioContext]);
|
||||||
|
|
||||||
|
// Disconnect an audio element
|
||||||
|
const disconnectAudioElement = useCallback((element: HTMLAudioElement) => {
|
||||||
|
const source = sourceMapRef.current.get(element);
|
||||||
|
if (source) {
|
||||||
|
try {
|
||||||
|
source.disconnect();
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore
|
||||||
|
}
|
||||||
|
sourceMapRef.current.delete(element);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Connect oscillator (for sound effects) to analyzer
|
||||||
|
const connectOscillator = useCallback((oscillator: OscillatorNode, gainNode: GainNode) => {
|
||||||
|
if (!analyzerRef.current) return;
|
||||||
|
|
||||||
|
// Route through analyzer instead of direct to destination
|
||||||
|
gainNode.disconnect();
|
||||||
|
gainNode.connect(analyzerRef.current);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Cleanup on unmount
|
||||||
|
useEffect(() => {
|
||||||
|
return () => {
|
||||||
|
sourceMapRef.current.forEach((source) => {
|
||||||
|
try {
|
||||||
|
source.disconnect();
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore
|
||||||
|
}
|
||||||
|
});
|
||||||
|
sourceMapRef.current.clear();
|
||||||
|
|
||||||
|
if (audioContextRef.current) {
|
||||||
|
audioContextRef.current.close();
|
||||||
|
audioContextRef.current = null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<AudioAnalyzerContext.Provider
|
||||||
|
value={{
|
||||||
|
analyzerNode: analyzerRef.current,
|
||||||
|
audioContext: audioContextRef.current,
|
||||||
|
connectAudioElement,
|
||||||
|
disconnectAudioElement,
|
||||||
|
connectOscillator,
|
||||||
|
isReady,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{children}
|
||||||
|
</AudioAnalyzerContext.Provider>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useAudioAnalyzer = () => {
|
||||||
|
const context = useContext(AudioAnalyzerContext);
|
||||||
|
if (context === undefined) {
|
||||||
|
throw new Error('useAudioAnalyzer must be used within an AudioAnalyzerProvider');
|
||||||
|
}
|
||||||
|
return context;
|
||||||
|
};
|
||||||
@ -1,4 +1,5 @@
|
|||||||
import { createContext, useContext, useState, useRef, useCallback, useEffect, ReactNode } from 'react';
|
import { createContext, useContext, useState, useRef, useCallback, useEffect, ReactNode } from 'react';
|
||||||
|
import { useAudioAnalyzer } from './AudioAnalyzerContext';
|
||||||
|
|
||||||
export interface Station {
|
export interface Station {
|
||||||
stationuuid: string;
|
stationuuid: string;
|
||||||
@ -18,7 +19,6 @@ interface MusicContextType {
|
|||||||
currentIndex: number;
|
currentIndex: number;
|
||||||
selectedStation: Station | null;
|
selectedStation: Station | null;
|
||||||
hasFetched: boolean;
|
hasFetched: boolean;
|
||||||
audioElement: HTMLAudioElement | null;
|
|
||||||
setVolume: (volume: number) => void;
|
setVolume: (volume: number) => void;
|
||||||
playStation: (station: Station, index: number) => void;
|
playStation: (station: Station, index: number) => void;
|
||||||
togglePlay: () => void;
|
togglePlay: () => void;
|
||||||
@ -40,6 +40,7 @@ export const MusicProvider = ({ children }: { children: ReactNode }) => {
|
|||||||
const [hasFetched, setHasFetched] = useState(false);
|
const [hasFetched, setHasFetched] = useState(false);
|
||||||
const [failedStations, setFailedStations] = useState<Set<string>>(new Set());
|
const [failedStations, setFailedStations] = useState<Set<string>>(new Set());
|
||||||
const audioRef = useRef<HTMLAudioElement | null>(null);
|
const audioRef = useRef<HTMLAudioElement | null>(null);
|
||||||
|
const { connectAudioElement, disconnectAudioElement } = useAudioAnalyzer();
|
||||||
|
|
||||||
// Update volume on audio element when volume state changes
|
// Update volume on audio element when volume state changes
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@ -70,6 +71,7 @@ export const MusicProvider = ({ children }: { children: ReactNode }) => {
|
|||||||
|
|
||||||
const stopCurrentAudio = useCallback(() => {
|
const stopCurrentAudio = useCallback(() => {
|
||||||
if (audioRef.current) {
|
if (audioRef.current) {
|
||||||
|
disconnectAudioElement(audioRef.current);
|
||||||
audioRef.current.pause();
|
audioRef.current.pause();
|
||||||
audioRef.current.src = '';
|
audioRef.current.src = '';
|
||||||
audioRef.current.onplay = null;
|
audioRef.current.onplay = null;
|
||||||
@ -80,16 +82,20 @@ export const MusicProvider = ({ children }: { children: ReactNode }) => {
|
|||||||
audioRef.current = null;
|
audioRef.current = null;
|
||||||
}
|
}
|
||||||
setIsBuffering(false);
|
setIsBuffering(false);
|
||||||
}, []);
|
}, [disconnectAudioElement]);
|
||||||
|
|
||||||
const playStation = useCallback((station: Station, index: number) => {
|
const playStation = useCallback((station: Station, index: number) => {
|
||||||
stopCurrentAudio();
|
stopCurrentAudio();
|
||||||
setIsBuffering(true);
|
setIsBuffering(true);
|
||||||
|
|
||||||
const audio = new Audio(station.url);
|
const audio = new Audio(station.url);
|
||||||
|
audio.crossOrigin = 'anonymous';
|
||||||
audio.volume = volume / 100;
|
audio.volume = volume / 100;
|
||||||
audioRef.current = audio;
|
audioRef.current = audio;
|
||||||
|
|
||||||
|
// Connect to analyzer for visualization
|
||||||
|
connectAudioElement(audio);
|
||||||
|
|
||||||
audio.onerror = () => {
|
audio.onerror = () => {
|
||||||
console.error('Failed to play station:', station.name);
|
console.error('Failed to play station:', station.name);
|
||||||
setFailedStations(prev => new Set(prev).add(station.stationuuid));
|
setFailedStations(prev => new Set(prev).add(station.stationuuid));
|
||||||
@ -128,7 +134,7 @@ export const MusicProvider = ({ children }: { children: ReactNode }) => {
|
|||||||
|
|
||||||
setSelectedStation(station);
|
setSelectedStation(station);
|
||||||
setCurrentIndex(index);
|
setCurrentIndex(index);
|
||||||
}, [volume, stopCurrentAudio]);
|
}, [volume, stopCurrentAudio, connectAudioElement]);
|
||||||
|
|
||||||
const togglePlay = useCallback(() => {
|
const togglePlay = useCallback(() => {
|
||||||
if (!audioRef.current || !selectedStation) {
|
if (!audioRef.current || !selectedStation) {
|
||||||
@ -189,7 +195,6 @@ export const MusicProvider = ({ children }: { children: ReactNode }) => {
|
|||||||
currentIndex,
|
currentIndex,
|
||||||
selectedStation,
|
selectedStation,
|
||||||
hasFetched,
|
hasFetched,
|
||||||
audioElement: audioRef.current,
|
|
||||||
setVolume,
|
setVolume,
|
||||||
playStation,
|
playStation,
|
||||||
togglePlay,
|
togglePlay,
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
import { createContext, useContext, useState, useEffect, useRef, useCallback, ReactNode } from 'react';
|
import { createContext, useContext, useState, useEffect, useRef, useCallback, ReactNode } from 'react';
|
||||||
|
import { useAudioAnalyzer } from './AudioAnalyzerContext';
|
||||||
|
|
||||||
type SoundType = 'click' | 'beep' | 'hover' | 'boot' | 'success' | 'error';
|
type SoundType = 'click' | 'beep' | 'hover' | 'boot' | 'success' | 'error';
|
||||||
|
|
||||||
@ -66,27 +67,29 @@ export const SettingsProvider = ({ children }: { children: ReactNode }) => {
|
|||||||
const [totalHashes, setTotalHashes] = useState(0);
|
const [totalHashes, setTotalHashes] = useState(0);
|
||||||
const [acceptedHashes, setAcceptedHashes] = useState(0);
|
const [acceptedHashes, setAcceptedHashes] = useState(0);
|
||||||
|
|
||||||
// Single AudioContext instance
|
// Use the shared audio analyzer context
|
||||||
const audioContextRef = useRef<AudioContext | null>(null);
|
const { audioContext: sharedAudioContext, analyzerNode } = useAudioAnalyzer();
|
||||||
|
|
||||||
const soundEnabledRef = useRef(soundEnabled);
|
const soundEnabledRef = useRef(soundEnabled);
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
soundEnabledRef.current = soundEnabled;
|
soundEnabledRef.current = soundEnabled;
|
||||||
}, [soundEnabled]);
|
}, [soundEnabled]);
|
||||||
|
|
||||||
|
// Local audio context for sound effects (fallback if shared not available)
|
||||||
|
const audioContextRef = useRef<AudioContext | null>(null);
|
||||||
|
|
||||||
// Detect audio blocked and show overlay
|
// Detect audio blocked and show overlay
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!soundEnabled) return;
|
if (!soundEnabled) return;
|
||||||
|
|
||||||
// Check if we need to show the audio overlay
|
|
||||||
const checkAudioState = () => {
|
const checkAudioState = () => {
|
||||||
if (audioContextRef.current) {
|
const ctx = sharedAudioContext || audioContextRef.current;
|
||||||
if (audioContextRef.current.state === 'suspended' && !userInteracted) {
|
if (ctx) {
|
||||||
|
if (ctx.state === 'suspended' && !userInteracted) {
|
||||||
setAudioBlocked(true);
|
setAudioBlocked(true);
|
||||||
setShowAudioOverlay(true);
|
setShowAudioOverlay(true);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Try to create AudioContext to check if it's blocked
|
|
||||||
try {
|
try {
|
||||||
const testContext = new (window.AudioContext || (window as any).webkitAudioContext)();
|
const testContext = new (window.AudioContext || (window as any).webkitAudioContext)();
|
||||||
if (testContext.state === 'suspended') {
|
if (testContext.state === 'suspended') {
|
||||||
@ -100,13 +103,26 @@ export const SettingsProvider = ({ children }: { children: ReactNode }) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Small delay to let page load
|
|
||||||
const timeout = setTimeout(checkAudioState, 500);
|
const timeout = setTimeout(checkAudioState, 500);
|
||||||
return () => clearTimeout(timeout);
|
return () => clearTimeout(timeout);
|
||||||
}, [soundEnabled, userInteracted]);
|
}, [soundEnabled, userInteracted, sharedAudioContext]);
|
||||||
|
|
||||||
// Get or create AudioContext
|
// Get or create AudioContext (prefer shared context)
|
||||||
const getAudioContext = useCallback(() => {
|
const getAudioContext = useCallback(() => {
|
||||||
|
// Prefer the shared audio context for visualization
|
||||||
|
if (sharedAudioContext) {
|
||||||
|
if (sharedAudioContext.state === 'suspended') {
|
||||||
|
sharedAudioContext.resume().catch(() => {
|
||||||
|
setAudioBlocked(true);
|
||||||
|
if (soundEnabledRef.current && !userInteracted) {
|
||||||
|
setShowAudioOverlay(true);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return sharedAudioContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback to local context
|
||||||
if (!audioContextRef.current) {
|
if (!audioContextRef.current) {
|
||||||
audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)();
|
audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)();
|
||||||
}
|
}
|
||||||
@ -121,15 +137,16 @@ export const SettingsProvider = ({ children }: { children: ReactNode }) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return audioContextRef.current;
|
return audioContextRef.current;
|
||||||
}, [userInteracted]);
|
}, [userInteracted, sharedAudioContext]);
|
||||||
|
|
||||||
// Enable audio after user interaction
|
// Enable audio after user interaction
|
||||||
const enableAudio = useCallback(() => {
|
const enableAudio = useCallback(() => {
|
||||||
setUserInteracted(true);
|
setUserInteracted(true);
|
||||||
setShowAudioOverlay(false);
|
setShowAudioOverlay(false);
|
||||||
|
|
||||||
if (audioContextRef.current) {
|
const ctx = sharedAudioContext || audioContextRef.current;
|
||||||
audioContextRef.current.resume().then(() => {
|
if (ctx) {
|
||||||
|
ctx.resume().then(() => {
|
||||||
setAudioBlocked(false);
|
setAudioBlocked(false);
|
||||||
}).catch(console.warn);
|
}).catch(console.warn);
|
||||||
} else {
|
} else {
|
||||||
@ -142,7 +159,7 @@ export const SettingsProvider = ({ children }: { children: ReactNode }) => {
|
|||||||
console.warn('AudioContext creation failed:', e);
|
console.warn('AudioContext creation failed:', e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, []);
|
}, [sharedAudioContext]);
|
||||||
|
|
||||||
// Disable audio
|
// Disable audio
|
||||||
const disableAudio = useCallback(() => {
|
const disableAudio = useCallback(() => {
|
||||||
@ -190,7 +207,13 @@ export const SettingsProvider = ({ children }: { children: ReactNode }) => {
|
|||||||
const gainNode = audioContext.createGain();
|
const gainNode = audioContext.createGain();
|
||||||
|
|
||||||
oscillator.connect(gainNode);
|
oscillator.connect(gainNode);
|
||||||
gainNode.connect(audioContext.destination);
|
|
||||||
|
// Route through analyzer if available for visualization
|
||||||
|
if (analyzerNode) {
|
||||||
|
gainNode.connect(analyzerNode);
|
||||||
|
} else {
|
||||||
|
gainNode.connect(audioContext.destination);
|
||||||
|
}
|
||||||
|
|
||||||
const now = audioContext.currentTime;
|
const now = audioContext.currentTime;
|
||||||
|
|
||||||
@ -246,7 +269,7 @@ export const SettingsProvider = ({ children }: { children: ReactNode }) => {
|
|||||||
console.warn('Audio playback failed:', e);
|
console.warn('Audio playback failed:', e);
|
||||||
setAudioBlocked(true);
|
setAudioBlocked(true);
|
||||||
}
|
}
|
||||||
}, [getAudioContext, audioBlocked, userInteracted]);
|
}, [getAudioContext, audioBlocked, userInteracted, analyzerNode]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<SettingsContext.Provider
|
<SettingsContext.Provider
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user