mirror of
https://github.com/JorySeverijnse/ui-fixer-supreme.git
synced 2026-01-29 19:58:38 +00:00
Add intro to export
Integrate intro.mp4/intro.webm at start of exports with fade-in; fix mobile music player hover behavior; move drop/upload and mic controls beneath oscilloscope content for mobile-friendly layout. Also adjust UI to place audio input below main display. X-Lovable-Edit-ID: edt-d9fc8a84-3564-4af4-b79d-75465466c78c
This commit is contained in:
commit
043b06d6ea
@ -1,4 +1,4 @@
|
|||||||
import { useEffect } from 'react';
|
import { useEffect, useRef } from 'react';
|
||||||
import { Play, Pause, Volume2, Music2, SkipBack, SkipForward, Loader2 } from 'lucide-react';
|
import { Play, Pause, Volume2, Music2, SkipBack, SkipForward, Loader2 } from 'lucide-react';
|
||||||
import { useSettings } from '@/contexts/SettingsContext';
|
import { useSettings } from '@/contexts/SettingsContext';
|
||||||
import { useMusic } from '@/contexts/MusicContext';
|
import { useMusic } from '@/contexts/MusicContext';
|
||||||
@ -8,6 +8,7 @@ import { useState } from 'react';
|
|||||||
|
|
||||||
const MusicPlayer = () => {
|
const MusicPlayer = () => {
|
||||||
const [isExpanded, setIsExpanded] = useState(false);
|
const [isExpanded, setIsExpanded] = useState(false);
|
||||||
|
const containerRef = useRef<HTMLDivElement>(null);
|
||||||
const { playSound, soundEnabled } = useSettings();
|
const { playSound, soundEnabled } = useSettings();
|
||||||
const {
|
const {
|
||||||
isPlaying,
|
isPlaying,
|
||||||
@ -29,6 +30,25 @@ const MusicPlayer = () => {
|
|||||||
}
|
}
|
||||||
}, [isExpanded, fetchStations]);
|
}, [isExpanded, fetchStations]);
|
||||||
|
|
||||||
|
// Close on click outside (for mobile)
|
||||||
|
useEffect(() => {
|
||||||
|
const handleClickOutside = (event: MouseEvent | TouchEvent) => {
|
||||||
|
if (containerRef.current && !containerRef.current.contains(event.target as Node)) {
|
||||||
|
setIsExpanded(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (isExpanded) {
|
||||||
|
document.addEventListener('mousedown', handleClickOutside);
|
||||||
|
document.addEventListener('touchstart', handleClickOutside);
|
||||||
|
}
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
document.removeEventListener('mousedown', handleClickOutside);
|
||||||
|
document.removeEventListener('touchstart', handleClickOutside);
|
||||||
|
};
|
||||||
|
}, [isExpanded]);
|
||||||
|
|
||||||
const handleButtonClick = (action: () => void) => {
|
const handleButtonClick = (action: () => void) => {
|
||||||
if (soundEnabled) {
|
if (soundEnabled) {
|
||||||
playSound('click');
|
playSound('click');
|
||||||
@ -36,8 +56,13 @@ const MusicPlayer = () => {
|
|||||||
action();
|
action();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleToggleExpand = () => {
|
||||||
|
setIsExpanded(!isExpanded);
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
|
ref={containerRef}
|
||||||
className="fixed bottom-4 left-4 z-50"
|
className="fixed bottom-4 left-4 z-50"
|
||||||
onMouseEnter={() => setIsExpanded(true)}
|
onMouseEnter={() => setIsExpanded(true)}
|
||||||
onMouseLeave={() => setIsExpanded(false)}
|
onMouseLeave={() => setIsExpanded(false)}
|
||||||
@ -156,6 +181,11 @@ const MusicPlayer = () => {
|
|||||||
animate={{ opacity: 1 }}
|
animate={{ opacity: 1 }}
|
||||||
exit={{ opacity: 0 }}
|
exit={{ opacity: 0 }}
|
||||||
className="p-3 bg-background/90 border border-primary box-glow cursor-pointer"
|
className="p-3 bg-background/90 border border-primary box-glow cursor-pointer"
|
||||||
|
onClick={handleToggleExpand}
|
||||||
|
onTouchEnd={(e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
handleToggleExpand();
|
||||||
|
}}
|
||||||
>
|
>
|
||||||
<Music2 className="w-5 h-5 text-primary text-glow" />
|
<Music2 className="w-5 h-5 text-primary text-glow" />
|
||||||
{isPlaying && (
|
{isPlaying && (
|
||||||
|
|||||||
@ -258,49 +258,6 @@ export function Oscilloscope() {
|
|||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Audio Input Row */}
|
|
||||||
<div className="flex flex-col sm:flex-row items-start sm:items-center gap-4">
|
|
||||||
<div className="flex-1 w-full sm:w-auto">
|
|
||||||
<AudioUploader
|
|
||||||
onFileSelect={handleFileSelect}
|
|
||||||
isLoading={isLoading}
|
|
||||||
fileName={fileName}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Microphone Toggle */}
|
|
||||||
<div className="flex items-center gap-4">
|
|
||||||
<Button
|
|
||||||
onClick={toggleMic}
|
|
||||||
variant={isMicActive ? "default" : "outline"}
|
|
||||||
className={`flex items-center gap-2 font-crt ${
|
|
||||||
isMicActive
|
|
||||||
? 'bg-primary text-primary-foreground'
|
|
||||||
: 'border-primary/50 hover:bg-primary/10'
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
{isMicActive ? <MicOff size={16} /> : <Mic size={16} />}
|
|
||||||
{isMicActive ? 'STOP MIC' : 'USE MICROPHONE'}
|
|
||||||
</Button>
|
|
||||||
|
|
||||||
{isMicActive && (
|
|
||||||
<div className="flex items-center gap-4">
|
|
||||||
<div className="text-sm text-muted-foreground font-mono-crt">
|
|
||||||
Real-time input active
|
|
||||||
</div>
|
|
||||||
<Button
|
|
||||||
onClick={() => setShowMicCalibration(!showMicCalibration)}
|
|
||||||
variant="outline"
|
|
||||||
size="sm"
|
|
||||||
className="font-mono-crt text-xs"
|
|
||||||
>
|
|
||||||
Calibrate
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Main Content: Display + Controls Side by Side */}
|
{/* Main Content: Display + Controls Side by Side */}
|
||||||
<div className="grid grid-cols-1 xl:grid-cols-[1fr_320px] gap-6">
|
<div className="grid grid-cols-1 xl:grid-cols-[1fr_320px] gap-6">
|
||||||
{/* Oscilloscope Display */}
|
{/* Oscilloscope Display */}
|
||||||
@ -386,6 +343,49 @@ export function Oscilloscope() {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Audio Input Row - Now at bottom */}
|
||||||
|
<div className="flex flex-col sm:flex-row items-start sm:items-center gap-4">
|
||||||
|
<div className="flex-1 w-full sm:w-auto">
|
||||||
|
<AudioUploader
|
||||||
|
onFileSelect={handleFileSelect}
|
||||||
|
isLoading={isLoading}
|
||||||
|
fileName={fileName}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Microphone Toggle */}
|
||||||
|
<div className="flex items-center gap-4">
|
||||||
|
<Button
|
||||||
|
onClick={toggleMic}
|
||||||
|
variant={isMicActive ? "default" : "outline"}
|
||||||
|
className={`flex items-center gap-2 font-crt ${
|
||||||
|
isMicActive
|
||||||
|
? 'bg-primary text-primary-foreground'
|
||||||
|
: 'border-primary/50 hover:bg-primary/10'
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{isMicActive ? <MicOff size={16} /> : <Mic size={16} />}
|
||||||
|
{isMicActive ? 'STOP MIC' : 'USE MICROPHONE'}
|
||||||
|
</Button>
|
||||||
|
|
||||||
|
{isMicActive && (
|
||||||
|
<div className="flex items-center gap-4">
|
||||||
|
<div className="text-sm text-muted-foreground font-mono-crt">
|
||||||
|
Real-time input active
|
||||||
|
</div>
|
||||||
|
<Button
|
||||||
|
onClick={() => setShowMicCalibration(!showMicCalibration)}
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
className="font-mono-crt text-xs"
|
||||||
|
>
|
||||||
|
Calibrate
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
{/* Microphone Calibration */}
|
{/* Microphone Calibration */}
|
||||||
{showMicCalibration && isMicActive && (
|
{showMicCalibration && isMicActive && (
|
||||||
|
|||||||
@ -59,7 +59,45 @@ export const useOfflineVideoExport = () => {
|
|||||||
throw new Error('Canvas not supported');
|
throw new Error('Canvas not supported');
|
||||||
}
|
}
|
||||||
|
|
||||||
setState(prev => ({ ...prev, stage: 'rendering', progress: 10 }));
|
setState(prev => ({ ...prev, stage: 'rendering', progress: 5 }));
|
||||||
|
|
||||||
|
// Load intro video
|
||||||
|
console.log('📹 Loading intro video...');
|
||||||
|
const introVideo = document.createElement('video');
|
||||||
|
introVideo.muted = true;
|
||||||
|
introVideo.playsInline = true;
|
||||||
|
|
||||||
|
// Try webm first, fallback to mp4
|
||||||
|
let introLoaded = false;
|
||||||
|
let introDuration = 0;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
introVideo.onloadedmetadata = () => {
|
||||||
|
introDuration = introVideo.duration;
|
||||||
|
console.log(`✅ Intro video loaded: ${introDuration.toFixed(2)}s`);
|
||||||
|
introLoaded = true;
|
||||||
|
resolve();
|
||||||
|
};
|
||||||
|
introVideo.onerror = () => {
|
||||||
|
console.warn('⚠️ Could not load intro.webm, trying intro.mp4');
|
||||||
|
introVideo.src = '/intro.mp4';
|
||||||
|
};
|
||||||
|
introVideo.src = '/intro.webm';
|
||||||
|
|
||||||
|
// Timeout after 5 seconds
|
||||||
|
setTimeout(() => {
|
||||||
|
if (!introLoaded) {
|
||||||
|
console.warn('⚠️ Intro video loading timed out');
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
}, 5000);
|
||||||
|
});
|
||||||
|
} catch (introError) {
|
||||||
|
console.warn('⚠️ Could not load intro video:', introError);
|
||||||
|
}
|
||||||
|
|
||||||
|
setState(prev => ({ ...prev, progress: 10 }));
|
||||||
|
|
||||||
// Get supported codecs
|
// Get supported codecs
|
||||||
const codecs = [
|
const codecs = [
|
||||||
@ -176,17 +214,70 @@ export const useOfflineVideoExport = () => {
|
|||||||
console.log('✅ MediaRecorder created with audio and video');
|
console.log('✅ MediaRecorder created with audio and video');
|
||||||
recorder.start(1000); // 1 second chunks
|
recorder.start(1000); // 1 second chunks
|
||||||
|
|
||||||
// Start audio playback synchronized with recording
|
// Calculate total frames including intro
|
||||||
|
const introFrames = introLoaded ? Math.ceil(introDuration * fps) : 0;
|
||||||
|
const mainFrames = Math.ceil(duration * fps);
|
||||||
|
const fadeFrames = Math.ceil(fps * 0.5); // 0.5 second fade
|
||||||
|
const totalFrames = introFrames + mainFrames;
|
||||||
|
const samplesPerFrame = Math.min(qualityConfig.samplesPerFrame, Math.floor(totalSamples / mainFrames));
|
||||||
|
|
||||||
|
console.log(`🎬 Total frames: ${totalFrames} (intro: ${introFrames}, main: ${mainFrames}, fade: ${fadeFrames})`);
|
||||||
|
|
||||||
|
// Render intro frames first (if loaded)
|
||||||
|
if (introLoaded && introFrames > 0) {
|
||||||
|
console.log('📹 Rendering intro frames...');
|
||||||
|
introVideo.currentTime = 0;
|
||||||
|
await introVideo.play();
|
||||||
|
|
||||||
|
for (let frameIndex = 0; frameIndex < introFrames; frameIndex++) {
|
||||||
|
if (cancelledRef.current) {
|
||||||
|
introVideo.pause();
|
||||||
|
recorder.stop();
|
||||||
|
setState({ isExporting: false, progress: 0, error: 'Cancelled', stage: 'idle', fps: 0 });
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Seek to correct time
|
||||||
|
introVideo.currentTime = frameIndex / fps;
|
||||||
|
|
||||||
|
// Draw intro video frame scaled to canvas
|
||||||
|
ctx.fillStyle = '#0a0f0a';
|
||||||
|
ctx.fillRect(0, 0, width, height);
|
||||||
|
|
||||||
|
// Calculate aspect-ratio-correct scaling
|
||||||
|
const videoAspect = introVideo.videoWidth / introVideo.videoHeight;
|
||||||
|
const canvasAspect = width / height;
|
||||||
|
let drawWidth = width;
|
||||||
|
let drawHeight = height;
|
||||||
|
let drawX = 0;
|
||||||
|
let drawY = 0;
|
||||||
|
|
||||||
|
if (videoAspect > canvasAspect) {
|
||||||
|
drawHeight = width / videoAspect;
|
||||||
|
drawY = (height - drawHeight) / 2;
|
||||||
|
} else {
|
||||||
|
drawWidth = height * videoAspect;
|
||||||
|
drawX = (width - drawWidth) / 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.drawImage(introVideo, drawX, drawY, drawWidth, drawHeight);
|
||||||
|
|
||||||
|
const progress = 10 + Math.round((frameIndex / introFrames) * 20);
|
||||||
|
setState(prev => ({ ...prev, progress }));
|
||||||
|
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 1000 / fps));
|
||||||
|
}
|
||||||
|
|
||||||
|
introVideo.pause();
|
||||||
|
console.log('✅ Intro frames complete');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start audio playback for main content
|
||||||
recordingAudioSource.start(0);
|
recordingAudioSource.start(0);
|
||||||
console.log('🔊 Audio playback started for recording');
|
console.log('🔊 Audio playback started for recording');
|
||||||
|
|
||||||
// Generate animation frames for full audio duration
|
// Render main oscilloscope frames with fade-in from intro
|
||||||
const totalFrames = Math.ceil(duration * fps);
|
for (let frameIndex = 0; frameIndex < mainFrames; frameIndex++) {
|
||||||
const samplesPerFrame = Math.min(qualityConfig.samplesPerFrame, Math.floor(totalSamples / totalFrames));
|
|
||||||
|
|
||||||
console.log(`🎬 Quality: ${quality}, Frames: ${totalFrames}, Samples/frame: ${samplesPerFrame}, Duration: ${duration.toFixed(1)}s`);
|
|
||||||
|
|
||||||
for (let frameIndex = 0; frameIndex < totalFrames; frameIndex++) {
|
|
||||||
if (cancelledRef.current) {
|
if (cancelledRef.current) {
|
||||||
try {
|
try {
|
||||||
recordingAudioSource.stop();
|
recordingAudioSource.stop();
|
||||||
@ -224,7 +315,7 @@ export const useOfflineVideoExport = () => {
|
|||||||
ctx.fillStyle = '#0a0f0a';
|
ctx.fillStyle = '#0a0f0a';
|
||||||
ctx.fillRect(0, 0, width, height);
|
ctx.fillRect(0, 0, width, height);
|
||||||
|
|
||||||
// Draw oscilloscope with mock audio data
|
// Draw oscilloscope with audio data
|
||||||
try {
|
try {
|
||||||
drawFrame(ctx, width, height, leftData, rightData);
|
drawFrame(ctx, width, height, leftData, rightData);
|
||||||
} catch (drawError) {
|
} catch (drawError) {
|
||||||
@ -246,17 +337,25 @@ export const useOfflineVideoExport = () => {
|
|||||||
ctx.stroke();
|
ctx.stroke();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Apply fade-in effect from intro (first fadeFrames of main content)
|
||||||
|
if (introLoaded && frameIndex < fadeFrames) {
|
||||||
|
const fadeProgress = frameIndex / fadeFrames;
|
||||||
|
// Draw a semi-transparent black overlay that fades out
|
||||||
|
ctx.fillStyle = `rgba(10, 15, 10, ${1 - fadeProgress})`;
|
||||||
|
ctx.fillRect(0, 0, width, height);
|
||||||
|
}
|
||||||
|
|
||||||
// Add frame info
|
// Add frame info
|
||||||
ctx.fillStyle = '#ffffff';
|
ctx.fillStyle = '#ffffff';
|
||||||
ctx.font = '16px monospace';
|
ctx.font = '16px monospace';
|
||||||
ctx.fillText(`Frame ${frameIndex + 1}/${totalFrames}`, 20, 30);
|
ctx.fillText(`Frame ${introFrames + frameIndex + 1}/${totalFrames}`, 20, 30);
|
||||||
ctx.fillText(`Time: ${(frameIndex / fps).toFixed(1)}s`, 20, 50);
|
ctx.fillText(`Time: ${(frameIndex / fps).toFixed(1)}s`, 20, 50);
|
||||||
|
|
||||||
const progress = 20 + Math.round((frameIndex / totalFrames) * 70);
|
const progress = 30 + Math.round((frameIndex / mainFrames) * 60);
|
||||||
setState(prev => ({ ...prev, progress }));
|
setState(prev => ({ ...prev, progress }));
|
||||||
|
|
||||||
if (frameIndex % Math.max(1, Math.floor(totalFrames / 10)) === 0) {
|
if (frameIndex % Math.max(1, Math.floor(mainFrames / 10)) === 0) {
|
||||||
console.log(`📸 Frame ${frameIndex + 1}/${totalFrames} (${progress}%) - Time: ${(frameIndex / fps).toFixed(1)}s`);
|
console.log(`📸 Frame ${frameIndex + 1}/${mainFrames} (${progress}%) - Time: ${(frameIndex / fps).toFixed(1)}s`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Frame timing
|
// Frame timing
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user