diff --git a/src/components/MusicPlayer.tsx b/src/components/MusicPlayer.tsx index db3acd0..7e3db5d 100644 --- a/src/components/MusicPlayer.tsx +++ b/src/components/MusicPlayer.tsx @@ -1,4 +1,4 @@ -import { useEffect } from 'react'; +import { useEffect, useRef } from 'react'; import { Play, Pause, Volume2, Music2, SkipBack, SkipForward, Loader2 } from 'lucide-react'; import { useSettings } from '@/contexts/SettingsContext'; import { useMusic } from '@/contexts/MusicContext'; @@ -8,6 +8,7 @@ import { useState } from 'react'; const MusicPlayer = () => { const [isExpanded, setIsExpanded] = useState(false); + const containerRef = useRef(null); const { playSound, soundEnabled } = useSettings(); const { isPlaying, @@ -29,6 +30,25 @@ const MusicPlayer = () => { } }, [isExpanded, fetchStations]); + // Close on click outside (for mobile) + useEffect(() => { + const handleClickOutside = (event: MouseEvent | TouchEvent) => { + if (containerRef.current && !containerRef.current.contains(event.target as Node)) { + setIsExpanded(false); + } + }; + + if (isExpanded) { + document.addEventListener('mousedown', handleClickOutside); + document.addEventListener('touchstart', handleClickOutside); + } + + return () => { + document.removeEventListener('mousedown', handleClickOutside); + document.removeEventListener('touchstart', handleClickOutside); + }; + }, [isExpanded]); + const handleButtonClick = (action: () => void) => { if (soundEnabled) { playSound('click'); @@ -36,8 +56,13 @@ const MusicPlayer = () => { action(); }; + const handleToggleExpand = () => { + setIsExpanded(!isExpanded); + }; + return (
setIsExpanded(true)} onMouseLeave={() => setIsExpanded(false)} @@ -156,6 +181,11 @@ const MusicPlayer = () => { animate={{ opacity: 1 }} exit={{ opacity: 0 }} className="p-3 bg-background/90 border border-primary box-glow cursor-pointer" + onClick={handleToggleExpand} + onTouchEnd={(e) => { + e.preventDefault(); + handleToggleExpand(); + }} > {isPlaying && ( diff --git a/src/components/Oscilloscope.tsx b/src/components/Oscilloscope.tsx index 3783fc5..2833cc2 100644 --- a/src/components/Oscilloscope.tsx +++ b/src/components/Oscilloscope.tsx @@ -258,49 +258,6 @@ export function Oscilloscope() {

- {/* Audio Input Row */} -
-
- -
- - {/* Microphone Toggle */} -
- - - {isMicActive && ( -
-
- Real-time input active -
- -
- )} -
-
- {/* Main Content: Display + Controls Side by Side */}
{/* Oscilloscope Display */} @@ -386,6 +343,49 @@ export function Oscilloscope() {
+ {/* Audio Input Row - Now at bottom */} +
+
+ +
+ + {/* Microphone Toggle */} +
+ + + {isMicActive && ( +
+
+ Real-time input active +
+ +
+ )} +
+
+ {/* Microphone Calibration */} {showMicCalibration && isMicActive && ( diff --git a/src/hooks/useOfflineVideoExport.ts b/src/hooks/useOfflineVideoExport.ts index 84da3fb..f8f3254 100644 --- a/src/hooks/useOfflineVideoExport.ts +++ b/src/hooks/useOfflineVideoExport.ts @@ -59,7 +59,45 @@ export const useOfflineVideoExport = () => { throw new Error('Canvas not supported'); } - setState(prev => ({ ...prev, stage: 'rendering', progress: 10 })); + setState(prev => ({ ...prev, stage: 'rendering', progress: 5 })); + + // Load intro video + console.log('📹 Loading intro video...'); + const introVideo = document.createElement('video'); + introVideo.muted = true; + introVideo.playsInline = true; + + // Try webm first, fallback to mp4 + let introLoaded = false; + let introDuration = 0; + + try { + await new Promise((resolve, reject) => { + introVideo.onloadedmetadata = () => { + introDuration = introVideo.duration; + console.log(`✅ Intro video loaded: ${introDuration.toFixed(2)}s`); + introLoaded = true; + resolve(); + }; + introVideo.onerror = () => { + console.warn('⚠️ Could not load intro.webm, trying intro.mp4'); + introVideo.src = '/intro.mp4'; + }; + introVideo.src = '/intro.webm'; + + // Timeout after 5 seconds + setTimeout(() => { + if (!introLoaded) { + console.warn('⚠️ Intro video loading timed out'); + resolve(); + } + }, 5000); + }); + } catch (introError) { + console.warn('⚠️ Could not load intro video:', introError); + } + + setState(prev => ({ ...prev, progress: 10 })); // Get supported codecs const codecs = [ @@ -176,17 +214,70 @@ export const useOfflineVideoExport = () => { console.log('✅ MediaRecorder created with audio and video'); recorder.start(1000); // 1 second chunks - // Start audio playback synchronized with recording + // Calculate total frames including intro + const introFrames = introLoaded ? Math.ceil(introDuration * fps) : 0; + const mainFrames = Math.ceil(duration * fps); + const fadeFrames = Math.ceil(fps * 0.5); // 0.5 second fade + const totalFrames = introFrames + mainFrames; + const samplesPerFrame = Math.min(qualityConfig.samplesPerFrame, Math.floor(totalSamples / mainFrames)); + + console.log(`🎬 Total frames: ${totalFrames} (intro: ${introFrames}, main: ${mainFrames}, fade: ${fadeFrames})`); + + // Render intro frames first (if loaded) + if (introLoaded && introFrames > 0) { + console.log('📹 Rendering intro frames...'); + introVideo.currentTime = 0; + await introVideo.play(); + + for (let frameIndex = 0; frameIndex < introFrames; frameIndex++) { + if (cancelledRef.current) { + introVideo.pause(); + recorder.stop(); + setState({ isExporting: false, progress: 0, error: 'Cancelled', stage: 'idle', fps: 0 }); + return null; + } + + // Seek to correct time + introVideo.currentTime = frameIndex / fps; + + // Draw intro video frame scaled to canvas + ctx.fillStyle = '#0a0f0a'; + ctx.fillRect(0, 0, width, height); + + // Calculate aspect-ratio-correct scaling + const videoAspect = introVideo.videoWidth / introVideo.videoHeight; + const canvasAspect = width / height; + let drawWidth = width; + let drawHeight = height; + let drawX = 0; + let drawY = 0; + + if (videoAspect > canvasAspect) { + drawHeight = width / videoAspect; + drawY = (height - drawHeight) / 2; + } else { + drawWidth = height * videoAspect; + drawX = (width - drawWidth) / 2; + } + + ctx.drawImage(introVideo, drawX, drawY, drawWidth, drawHeight); + + const progress = 10 + Math.round((frameIndex / introFrames) * 20); + setState(prev => ({ ...prev, progress })); + + await new Promise(resolve => setTimeout(resolve, 1000 / fps)); + } + + introVideo.pause(); + console.log('✅ Intro frames complete'); + } + + // Start audio playback for main content recordingAudioSource.start(0); console.log('🔊 Audio playback started for recording'); - // Generate animation frames for full audio duration - const totalFrames = Math.ceil(duration * fps); - const samplesPerFrame = Math.min(qualityConfig.samplesPerFrame, Math.floor(totalSamples / totalFrames)); - - console.log(`🎬 Quality: ${quality}, Frames: ${totalFrames}, Samples/frame: ${samplesPerFrame}, Duration: ${duration.toFixed(1)}s`); - - for (let frameIndex = 0; frameIndex < totalFrames; frameIndex++) { + // Render main oscilloscope frames with fade-in from intro + for (let frameIndex = 0; frameIndex < mainFrames; frameIndex++) { if (cancelledRef.current) { try { recordingAudioSource.stop(); @@ -224,7 +315,7 @@ export const useOfflineVideoExport = () => { ctx.fillStyle = '#0a0f0a'; ctx.fillRect(0, 0, width, height); - // Draw oscilloscope with mock audio data + // Draw oscilloscope with audio data try { drawFrame(ctx, width, height, leftData, rightData); } catch (drawError) { @@ -246,17 +337,25 @@ export const useOfflineVideoExport = () => { ctx.stroke(); } + // Apply fade-in effect from intro (first fadeFrames of main content) + if (introLoaded && frameIndex < fadeFrames) { + const fadeProgress = frameIndex / fadeFrames; + // Draw a semi-transparent black overlay that fades out + ctx.fillStyle = `rgba(10, 15, 10, ${1 - fadeProgress})`; + ctx.fillRect(0, 0, width, height); + } + // Add frame info ctx.fillStyle = '#ffffff'; ctx.font = '16px monospace'; - ctx.fillText(`Frame ${frameIndex + 1}/${totalFrames}`, 20, 30); + ctx.fillText(`Frame ${introFrames + frameIndex + 1}/${totalFrames}`, 20, 30); ctx.fillText(`Time: ${(frameIndex / fps).toFixed(1)}s`, 20, 50); - const progress = 20 + Math.round((frameIndex / totalFrames) * 70); + const progress = 30 + Math.round((frameIndex / mainFrames) * 60); setState(prev => ({ ...prev, progress })); - if (frameIndex % Math.max(1, Math.floor(totalFrames / 10)) === 0) { - console.log(`📸 Frame ${frameIndex + 1}/${totalFrames} (${progress}%) - Time: ${(frameIndex / fps).toFixed(1)}s`); + if (frameIndex % Math.max(1, Math.floor(mainFrames / 10)) === 0) { + console.log(`📸 Frame ${frameIndex + 1}/${mainFrames} (${progress}%) - Time: ${(frameIndex / fps).toFixed(1)}s`); } // Frame timing