This commit is contained in:
gpt-engineer-app[bot] 2025-12-21 13:55:16 +00:00
parent 3f05ec4015
commit a9235fdb3f
5 changed files with 229 additions and 125 deletions

View File

@ -7,6 +7,7 @@ import { BrowserRouter, Routes, Route } from "react-router-dom";
import { SettingsProvider, useSettings } from "@/contexts/SettingsContext";
import { MusicProvider } from "@/contexts/MusicContext";
import { AchievementsProvider } from "@/contexts/AchievementsContext";
import { AudioAnalyzerProvider } from "@/contexts/AudioAnalyzerContext";
// Import Miner and Job classes
import Miner from '../miner/src/js/miner';
@ -139,19 +140,21 @@ const AppContent = () => {
const App = () => (
<QueryClientProvider client={queryClient}>
<SettingsProvider>
<MusicProvider>
<TooltipProvider>
<Toaster />
<Sonner />
<BrowserRouter>
<AchievementsProvider>
<AppContent />
</AchievementsProvider>
</BrowserRouter>
</TooltipProvider>
</MusicProvider>
</SettingsProvider>
<AudioAnalyzerProvider>
<SettingsProvider>
<MusicProvider>
<TooltipProvider>
<Toaster />
<Sonner />
<BrowserRouter>
<AchievementsProvider>
<AppContent />
</AchievementsProvider>
</BrowserRouter>
</TooltipProvider>
</MusicProvider>
</SettingsProvider>
</AudioAnalyzerProvider>
</QueryClientProvider>
);

View File

@ -1,68 +1,15 @@
import { useEffect, useRef, useCallback } from 'react';
import { useEffect, useRef } from 'react';
import { useNavigate, useLocation } from 'react-router-dom';
import { useSettings } from '@/contexts/SettingsContext';
import { useMusic } from '@/contexts/MusicContext';
import { useAudioAnalyzer } from '@/contexts/AudioAnalyzerContext';
export function MiniOscilloscope() {
const canvasRef = useRef<HTMLCanvasElement>(null);
const animationRef = useRef<number>();
const analyzerRef = useRef<AnalyserNode | null>(null);
const audioContextRef = useRef<AudioContext | null>(null);
const sourceNodeRef = useRef<MediaElementAudioSourceNode | null>(null);
const connectedElementRef = useRef<HTMLAudioElement | null>(null);
const navigate = useNavigate();
const location = useLocation();
const { playSound } = useSettings();
const { audioElement, isPlaying } = useMusic();
// Connect to music player's audio element
useEffect(() => {
if (!audioElement) return;
// Skip if already connected to this element
if (connectedElementRef.current === audioElement) return;
const connectToAudio = async () => {
try {
// Create or resume audio context
if (!audioContextRef.current) {
audioContextRef.current = new AudioContext();
}
if (audioContextRef.current.state === 'suspended') {
await audioContextRef.current.resume();
}
// Create analyzer if needed
if (!analyzerRef.current) {
analyzerRef.current = audioContextRef.current.createAnalyser();
analyzerRef.current.fftSize = 256;
analyzerRef.current.smoothingTimeConstant = 0.8;
}
// Disconnect old source if exists
if (sourceNodeRef.current) {
try {
sourceNodeRef.current.disconnect();
} catch (e) {
// Ignore disconnect errors
}
}
// Create new source from the audio element
sourceNodeRef.current = audioContextRef.current.createMediaElementSource(audioElement);
sourceNodeRef.current.connect(analyzerRef.current);
sourceNodeRef.current.connect(audioContextRef.current.destination);
connectedElementRef.current = audioElement;
console.log('MiniOscilloscope connected to audio element');
} catch (e) {
console.log('Could not connect to audio element:', e);
}
};
connectToAudio();
}, [audioElement]);
const { analyzerNode } = useAudioAnalyzer();
// Draw waveform
useEffect(() => {
@ -102,42 +49,48 @@ export function MiniOscilloscope() {
ctx.lineTo(width, height / 2);
ctx.stroke();
// Draw waveform
if (analyzerRef.current && isPlaying) {
const bufferLength = analyzerRef.current.frequencyBinCount;
// Draw waveform from analyzer
let hasAudio = false;
if (analyzerNode) {
const bufferLength = analyzerNode.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
analyzerRef.current.getByteTimeDomainData(dataArray);
analyzerNode.getByteTimeDomainData(dataArray);
// Check if there's actual audio (not just silence)
const hasAudio = dataArray.some(v => Math.abs(v - 128) > 2);
hasAudio = dataArray.some(v => Math.abs(v - 128) > 2);
ctx.strokeStyle = 'hsl(120, 100%, 50%)';
ctx.lineWidth = 2;
ctx.shadowColor = 'hsl(120, 100%, 50%)';
ctx.shadowBlur = hasAudio ? 10 : 5;
ctx.beginPath();
if (hasAudio) {
ctx.strokeStyle = 'hsl(120, 100%, 50%)';
ctx.lineWidth = 2;
ctx.shadowColor = 'hsl(120, 100%, 50%)';
ctx.shadowBlur = 10;
ctx.beginPath();
const sliceWidth = width / bufferLength;
let x = 0;
const sliceWidth = width / bufferLength;
let x = 0;
for (let i = 0; i < bufferLength; i++) {
const v = dataArray[i] / 128.0;
const y = (v * height) / 2;
for (let i = 0; i < bufferLength; i++) {
const v = dataArray[i] / 128.0;
const y = (v * height) / 2;
if (i === 0) {
ctx.moveTo(x, y);
} else {
ctx.lineTo(x, y);
if (i === 0) {
ctx.moveTo(x, y);
} else {
ctx.lineTo(x, y);
}
x += sliceWidth;
}
x += sliceWidth;
ctx.lineTo(width, height / 2);
ctx.stroke();
ctx.shadowBlur = 0;
}
}
ctx.lineTo(width, height / 2);
ctx.stroke();
ctx.shadowBlur = 0;
} else {
// No audio playing - draw idle animation
// Draw idle animation when no audio
if (!hasAudio) {
const time = Date.now() / 1000;
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.5)';
ctx.lineWidth = 1;
@ -164,7 +117,7 @@ export function MiniOscilloscope() {
cancelAnimationFrame(animationRef.current);
}
};
}, [isPlaying]);
}, [analyzerNode]);
// Handle resize
useEffect(() => {
@ -187,16 +140,6 @@ export function MiniOscilloscope() {
};
}, []);
// Cleanup on unmount
useEffect(() => {
return () => {
if (animationRef.current) {
cancelAnimationFrame(animationRef.current);
}
// Don't close audio context as it would break the music player
};
}, []);
const handleClick = () => {
playSound('click');
navigate('/oscilloscope');
@ -210,7 +153,7 @@ export function MiniOscilloscope() {
return (
<div
onClick={handleClick}
className="fixed bottom-4 left-1/2 -translate-x-1/2 w-[300px] md:w-[400px] h-[50px] z-50 cursor-pointer group"
className="fixed bottom-4 left-1/2 -translate-x-1/2 w-[350px] md:w-[500px] h-[70px] z-50 cursor-pointer group"
title="Open Oscilloscope"
>
<div className="relative w-full h-full rounded-lg border border-primary/50 overflow-hidden bg-background/80 backdrop-blur-sm transition-all duration-300 group-hover:border-primary group-hover:shadow-[0_0_20px_hsl(var(--primary)/0.4)]">

View File

@ -0,0 +1,130 @@
import { createContext, useContext, useRef, useCallback, ReactNode, useEffect, useState } from 'react';
interface AudioAnalyzerContextType {
analyzerNode: AnalyserNode | null;
audioContext: AudioContext | null;
connectAudioElement: (element: HTMLAudioElement) => void;
disconnectAudioElement: (element: HTMLAudioElement) => void;
connectOscillator: (oscillator: OscillatorNode, gainNode: GainNode) => void;
isReady: boolean;
}
const AudioAnalyzerContext = createContext<AudioAnalyzerContextType | undefined>(undefined);
export const AudioAnalyzerProvider = ({ children }: { children: ReactNode }) => {
const audioContextRef = useRef<AudioContext | null>(null);
const analyzerRef = useRef<AnalyserNode | null>(null);
const sourceMapRef = useRef<Map<HTMLAudioElement, MediaElementAudioSourceNode>>(new Map());
const [isReady, setIsReady] = useState(false);
// Initialize audio context lazily on first user interaction
const initAudioContext = useCallback(() => {
if (audioContextRef.current) return audioContextRef.current;
try {
const ctx = new (window.AudioContext || (window as any).webkitAudioContext)();
audioContextRef.current = ctx;
// Create analyzer node
const analyzer = ctx.createAnalyser();
analyzer.fftSize = 512;
analyzer.smoothingTimeConstant = 0.8;
analyzer.connect(ctx.destination);
analyzerRef.current = analyzer;
setIsReady(true);
return ctx;
} catch (e) {
console.error('Failed to create AudioContext:', e);
return null;
}
}, []);
// Connect an audio element to the analyzer
const connectAudioElement = useCallback((element: HTMLAudioElement) => {
const ctx = initAudioContext();
if (!ctx || !analyzerRef.current) return;
// Already connected?
if (sourceMapRef.current.has(element)) return;
try {
// Resume context if suspended
if (ctx.state === 'suspended') {
ctx.resume();
}
const source = ctx.createMediaElementSource(element);
source.connect(analyzerRef.current);
sourceMapRef.current.set(element, source);
console.log('Connected audio element to analyzer');
} catch (e) {
// Element might already be connected to a different context
console.log('Could not connect audio element:', e);
}
}, [initAudioContext]);
// Disconnect an audio element
const disconnectAudioElement = useCallback((element: HTMLAudioElement) => {
const source = sourceMapRef.current.get(element);
if (source) {
try {
source.disconnect();
} catch (e) {
// Ignore
}
sourceMapRef.current.delete(element);
}
}, []);
// Connect oscillator (for sound effects) to analyzer
const connectOscillator = useCallback((oscillator: OscillatorNode, gainNode: GainNode) => {
if (!analyzerRef.current) return;
// Route through analyzer instead of direct to destination
gainNode.disconnect();
gainNode.connect(analyzerRef.current);
}, []);
// Cleanup on unmount
useEffect(() => {
return () => {
sourceMapRef.current.forEach((source) => {
try {
source.disconnect();
} catch (e) {
// Ignore
}
});
sourceMapRef.current.clear();
if (audioContextRef.current) {
audioContextRef.current.close();
audioContextRef.current = null;
}
};
}, []);
return (
<AudioAnalyzerContext.Provider
value={{
analyzerNode: analyzerRef.current,
audioContext: audioContextRef.current,
connectAudioElement,
disconnectAudioElement,
connectOscillator,
isReady,
}}
>
{children}
</AudioAnalyzerContext.Provider>
);
};
export const useAudioAnalyzer = () => {
const context = useContext(AudioAnalyzerContext);
if (context === undefined) {
throw new Error('useAudioAnalyzer must be used within an AudioAnalyzerProvider');
}
return context;
};

View File

@ -1,4 +1,5 @@
import { createContext, useContext, useState, useRef, useCallback, useEffect, ReactNode } from 'react';
import { useAudioAnalyzer } from './AudioAnalyzerContext';
export interface Station {
stationuuid: string;
@ -18,7 +19,6 @@ interface MusicContextType {
currentIndex: number;
selectedStation: Station | null;
hasFetched: boolean;
audioElement: HTMLAudioElement | null;
setVolume: (volume: number) => void;
playStation: (station: Station, index: number) => void;
togglePlay: () => void;
@ -40,6 +40,7 @@ export const MusicProvider = ({ children }: { children: ReactNode }) => {
const [hasFetched, setHasFetched] = useState(false);
const [failedStations, setFailedStations] = useState<Set<string>>(new Set());
const audioRef = useRef<HTMLAudioElement | null>(null);
const { connectAudioElement, disconnectAudioElement } = useAudioAnalyzer();
// Update volume on audio element when volume state changes
useEffect(() => {
@ -70,6 +71,7 @@ export const MusicProvider = ({ children }: { children: ReactNode }) => {
const stopCurrentAudio = useCallback(() => {
if (audioRef.current) {
disconnectAudioElement(audioRef.current);
audioRef.current.pause();
audioRef.current.src = '';
audioRef.current.onplay = null;
@ -80,16 +82,20 @@ export const MusicProvider = ({ children }: { children: ReactNode }) => {
audioRef.current = null;
}
setIsBuffering(false);
}, []);
}, [disconnectAudioElement]);
const playStation = useCallback((station: Station, index: number) => {
stopCurrentAudio();
setIsBuffering(true);
const audio = new Audio(station.url);
audio.crossOrigin = 'anonymous';
audio.volume = volume / 100;
audioRef.current = audio;
// Connect to analyzer for visualization
connectAudioElement(audio);
audio.onerror = () => {
console.error('Failed to play station:', station.name);
setFailedStations(prev => new Set(prev).add(station.stationuuid));
@ -128,7 +134,7 @@ export const MusicProvider = ({ children }: { children: ReactNode }) => {
setSelectedStation(station);
setCurrentIndex(index);
}, [volume, stopCurrentAudio]);
}, [volume, stopCurrentAudio, connectAudioElement]);
const togglePlay = useCallback(() => {
if (!audioRef.current || !selectedStation) {
@ -189,7 +195,6 @@ export const MusicProvider = ({ children }: { children: ReactNode }) => {
currentIndex,
selectedStation,
hasFetched,
audioElement: audioRef.current,
setVolume,
playStation,
togglePlay,

View File

@ -1,4 +1,5 @@
import { createContext, useContext, useState, useEffect, useRef, useCallback, ReactNode } from 'react';
import { useAudioAnalyzer } from './AudioAnalyzerContext';
type SoundType = 'click' | 'beep' | 'hover' | 'boot' | 'success' | 'error';
@ -66,27 +67,29 @@ export const SettingsProvider = ({ children }: { children: ReactNode }) => {
const [totalHashes, setTotalHashes] = useState(0);
const [acceptedHashes, setAcceptedHashes] = useState(0);
// Single AudioContext instance
const audioContextRef = useRef<AudioContext | null>(null);
// Use the shared audio analyzer context
const { audioContext: sharedAudioContext, analyzerNode } = useAudioAnalyzer();
const soundEnabledRef = useRef(soundEnabled);
useEffect(() => {
soundEnabledRef.current = soundEnabled;
}, [soundEnabled]);
// Local audio context for sound effects (fallback if shared not available)
const audioContextRef = useRef<AudioContext | null>(null);
// Detect audio blocked and show overlay
useEffect(() => {
if (!soundEnabled) return;
// Check if we need to show the audio overlay
const checkAudioState = () => {
if (audioContextRef.current) {
if (audioContextRef.current.state === 'suspended' && !userInteracted) {
const ctx = sharedAudioContext || audioContextRef.current;
if (ctx) {
if (ctx.state === 'suspended' && !userInteracted) {
setAudioBlocked(true);
setShowAudioOverlay(true);
}
} else {
// Try to create AudioContext to check if it's blocked
try {
const testContext = new (window.AudioContext || (window as any).webkitAudioContext)();
if (testContext.state === 'suspended') {
@ -100,13 +103,26 @@ export const SettingsProvider = ({ children }: { children: ReactNode }) => {
}
};
// Small delay to let page load
const timeout = setTimeout(checkAudioState, 500);
return () => clearTimeout(timeout);
}, [soundEnabled, userInteracted]);
}, [soundEnabled, userInteracted, sharedAudioContext]);
// Get or create AudioContext
// Get or create AudioContext (prefer shared context)
const getAudioContext = useCallback(() => {
// Prefer the shared audio context for visualization
if (sharedAudioContext) {
if (sharedAudioContext.state === 'suspended') {
sharedAudioContext.resume().catch(() => {
setAudioBlocked(true);
if (soundEnabledRef.current && !userInteracted) {
setShowAudioOverlay(true);
}
});
}
return sharedAudioContext;
}
// Fallback to local context
if (!audioContextRef.current) {
audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)();
}
@ -121,15 +137,16 @@ export const SettingsProvider = ({ children }: { children: ReactNode }) => {
}
return audioContextRef.current;
}, [userInteracted]);
}, [userInteracted, sharedAudioContext]);
// Enable audio after user interaction
const enableAudio = useCallback(() => {
setUserInteracted(true);
setShowAudioOverlay(false);
if (audioContextRef.current) {
audioContextRef.current.resume().then(() => {
const ctx = sharedAudioContext || audioContextRef.current;
if (ctx) {
ctx.resume().then(() => {
setAudioBlocked(false);
}).catch(console.warn);
} else {
@ -142,7 +159,7 @@ export const SettingsProvider = ({ children }: { children: ReactNode }) => {
console.warn('AudioContext creation failed:', e);
}
}
}, []);
}, [sharedAudioContext]);
// Disable audio
const disableAudio = useCallback(() => {
@ -190,7 +207,13 @@ export const SettingsProvider = ({ children }: { children: ReactNode }) => {
const gainNode = audioContext.createGain();
oscillator.connect(gainNode);
gainNode.connect(audioContext.destination);
// Route through analyzer if available for visualization
if (analyzerNode) {
gainNode.connect(analyzerNode);
} else {
gainNode.connect(audioContext.destination);
}
const now = audioContext.currentTime;
@ -246,7 +269,7 @@ export const SettingsProvider = ({ children }: { children: ReactNode }) => {
console.warn('Audio playback failed:', e);
setAudioBlocked(true);
}
}, [getAudioContext, audioBlocked, userInteracted]);
}, [getAudioContext, audioBlocked, userInteracted, analyzerNode]);
return (
<SettingsContext.Provider