mirror of
https://github.com/JorySeverijnse/ui-fixer-supreme.git
synced 2026-01-29 19:58:38 +00:00
Add live oscilloscope bar
Introduce a new MiniOscilloscope bar at bottom that links to /oscilloscope, modify MainLayout to render it, and fix offline video export scaffolding to support audio chunks and buffer, enabling integration with oscilloscope feature. Adjust layout to ensure accessibility and navigation. X-Lovable-Edit-ID: edt-b2174f84-2f48-457a-857a-e5f0e9c64c83
This commit is contained in:
commit
fd8f1671ca
@ -1,27 +1,27 @@
|
|||||||
import { Outlet } from 'react-router-dom';
|
import { Outlet, useLocation } from 'react-router-dom';
|
||||||
import { motion } from 'framer-motion';
|
import { motion } from 'framer-motion';
|
||||||
import Sidebar from './Sidebar';
|
import Sidebar from './Sidebar';
|
||||||
|
import { MiniOscilloscope } from './MiniOscilloscope';
|
||||||
|
|
||||||
const MainLayout = () => {
|
const MainLayout = () => {
|
||||||
return <motion.div initial={{
|
const location = useLocation();
|
||||||
opacity: 0,
|
// Don't show mini oscilloscope on the oscilloscope page itself
|
||||||
scale: 0.95
|
const showMiniOscilloscope = location.pathname !== '/oscilloscope';
|
||||||
}} animate={{
|
|
||||||
opacity: 1,
|
return (
|
||||||
scale: 1
|
<motion.div
|
||||||
}} transition={{
|
initial={{ opacity: 0, scale: 0.95 }}
|
||||||
duration: 0.5
|
animate={{ opacity: 1, scale: 1 }}
|
||||||
}} className="relative z-10 flex flex-col items-center pt-8 md:pt-12 px-4 w-full">
|
transition={{ duration: 0.5 }}
|
||||||
|
className="relative z-10 flex flex-col items-center pt-8 md:pt-12 px-4 w-full pb-20"
|
||||||
|
>
|
||||||
{/* Branding */}
|
{/* Branding */}
|
||||||
<motion.h1 initial={{
|
<motion.h1
|
||||||
opacity: 0,
|
initial={{ opacity: 0, y: -20 }}
|
||||||
y: -20
|
animate={{ opacity: 1, y: 0 }}
|
||||||
}} animate={{
|
transition={{ delay: 0.3, duration: 0.5 }}
|
||||||
opacity: 1,
|
className="font-minecraft text-4xl md:text-5xl lg:text-6xl text-primary text-glow-strong mb-6"
|
||||||
y: 0
|
>
|
||||||
}} transition={{
|
|
||||||
delay: 0.3,
|
|
||||||
duration: 0.5
|
|
||||||
}} className="font-minecraft text-4xl md:text-5xl lg:text-6xl text-primary text-glow-strong mb-6">
|
|
||||||
<span className="inline-block translate-y-[0.35em]">~</span>$ whoami Jory
|
<span className="inline-block translate-y-[0.35em]">~</span>$ whoami Jory
|
||||||
</motion.h1>
|
</motion.h1>
|
||||||
|
|
||||||
@ -34,6 +34,11 @@ const MainLayout = () => {
|
|||||||
<Outlet />
|
<Outlet />
|
||||||
</main>
|
</main>
|
||||||
</div>
|
</div>
|
||||||
</motion.div>;
|
|
||||||
|
{/* Mini Oscilloscope Bar */}
|
||||||
|
{showMiniOscilloscope && <MiniOscilloscope />}
|
||||||
|
</motion.div>
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
export default MainLayout;
|
export default MainLayout;
|
||||||
248
src/components/MiniOscilloscope.tsx
Normal file
248
src/components/MiniOscilloscope.tsx
Normal file
@ -0,0 +1,248 @@
|
|||||||
|
import { useEffect, useRef, useCallback } from 'react';
|
||||||
|
import { useNavigate } from 'react-router-dom';
|
||||||
|
import { useSettings } from '@/contexts/SettingsContext';
|
||||||
|
|
||||||
|
export function MiniOscilloscope() {
|
||||||
|
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||||
|
const animationRef = useRef<number>();
|
||||||
|
const analyzerRef = useRef<AnalyserNode | null>(null);
|
||||||
|
const audioContextRef = useRef<AudioContext | null>(null);
|
||||||
|
const sourceNodeRef = useRef<MediaElementAudioSourceNode | null>(null);
|
||||||
|
const connectedElementsRef = useRef<Set<HTMLMediaElement>>(new Set());
|
||||||
|
const navigate = useNavigate();
|
||||||
|
const { playSound } = useSettings();
|
||||||
|
|
||||||
|
// Find and connect to all audio elements on the page
|
||||||
|
const connectToAudioElements = useCallback(() => {
|
||||||
|
if (!audioContextRef.current || !analyzerRef.current) return;
|
||||||
|
|
||||||
|
const audioElements = document.querySelectorAll('audio, video');
|
||||||
|
|
||||||
|
audioElements.forEach((element) => {
|
||||||
|
const mediaElement = element as HTMLMediaElement;
|
||||||
|
|
||||||
|
// Skip if already connected
|
||||||
|
if (connectedElementsRef.current.has(mediaElement)) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Create a source node for this element
|
||||||
|
const source = audioContextRef.current!.createMediaElementSource(mediaElement);
|
||||||
|
source.connect(analyzerRef.current!);
|
||||||
|
source.connect(audioContextRef.current!.destination);
|
||||||
|
connectedElementsRef.current.add(mediaElement);
|
||||||
|
} catch (e) {
|
||||||
|
// Element might already be connected to a different context
|
||||||
|
console.log('Could not connect audio element:', e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Initialize audio context and analyzer
|
||||||
|
useEffect(() => {
|
||||||
|
const initAudio = async () => {
|
||||||
|
try {
|
||||||
|
audioContextRef.current = new AudioContext();
|
||||||
|
analyzerRef.current = audioContextRef.current.createAnalyser();
|
||||||
|
analyzerRef.current.fftSize = 256;
|
||||||
|
analyzerRef.current.smoothingTimeConstant = 0.8;
|
||||||
|
|
||||||
|
// Connect analyzer to destination for pass-through
|
||||||
|
// We'll connect sources as we find them
|
||||||
|
} catch (e) {
|
||||||
|
console.log('Could not initialize audio context:', e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
initAudio();
|
||||||
|
|
||||||
|
// Observe DOM for new audio elements
|
||||||
|
const observer = new MutationObserver(() => {
|
||||||
|
connectToAudioElements();
|
||||||
|
});
|
||||||
|
|
||||||
|
observer.observe(document.body, {
|
||||||
|
childList: true,
|
||||||
|
subtree: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Initial connection attempt
|
||||||
|
connectToAudioElements();
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
observer.disconnect();
|
||||||
|
if (animationRef.current) {
|
||||||
|
cancelAnimationFrame(animationRef.current);
|
||||||
|
}
|
||||||
|
if (audioContextRef.current) {
|
||||||
|
audioContextRef.current.close();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [connectToAudioElements]);
|
||||||
|
|
||||||
|
// Draw waveform
|
||||||
|
useEffect(() => {
|
||||||
|
const canvas = canvasRef.current;
|
||||||
|
if (!canvas) return;
|
||||||
|
|
||||||
|
const ctx = canvas.getContext('2d');
|
||||||
|
if (!ctx) return;
|
||||||
|
|
||||||
|
const draw = () => {
|
||||||
|
const width = canvas.width;
|
||||||
|
const height = canvas.height;
|
||||||
|
|
||||||
|
// Clear with transparent background
|
||||||
|
ctx.clearRect(0, 0, width, height);
|
||||||
|
|
||||||
|
// Draw background
|
||||||
|
ctx.fillStyle = 'rgba(0, 0, 0, 0.6)';
|
||||||
|
ctx.fillRect(0, 0, width, height);
|
||||||
|
|
||||||
|
// Draw grid lines
|
||||||
|
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.1)';
|
||||||
|
ctx.lineWidth = 1;
|
||||||
|
|
||||||
|
// Vertical grid lines
|
||||||
|
for (let x = 0; x < width; x += 20) {
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(x, 0);
|
||||||
|
ctx.lineTo(x, height);
|
||||||
|
ctx.stroke();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Center line
|
||||||
|
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.3)';
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(0, height / 2);
|
||||||
|
ctx.lineTo(width, height / 2);
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Draw waveform
|
||||||
|
if (analyzerRef.current) {
|
||||||
|
const bufferLength = analyzerRef.current.frequencyBinCount;
|
||||||
|
const dataArray = new Uint8Array(bufferLength);
|
||||||
|
analyzerRef.current.getByteTimeDomainData(dataArray);
|
||||||
|
|
||||||
|
// Check if there's actual audio (not just silence)
|
||||||
|
const hasAudio = dataArray.some(v => Math.abs(v - 128) > 2);
|
||||||
|
|
||||||
|
ctx.strokeStyle = 'hsl(120, 100%, 50%)';
|
||||||
|
ctx.lineWidth = 2;
|
||||||
|
ctx.shadowColor = 'hsl(120, 100%, 50%)';
|
||||||
|
ctx.shadowBlur = hasAudio ? 10 : 5;
|
||||||
|
ctx.beginPath();
|
||||||
|
|
||||||
|
const sliceWidth = width / bufferLength;
|
||||||
|
let x = 0;
|
||||||
|
|
||||||
|
for (let i = 0; i < bufferLength; i++) {
|
||||||
|
const v = dataArray[i] / 128.0;
|
||||||
|
const y = (v * height) / 2;
|
||||||
|
|
||||||
|
if (i === 0) {
|
||||||
|
ctx.moveTo(x, y);
|
||||||
|
} else {
|
||||||
|
ctx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
|
||||||
|
x += sliceWidth;
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.lineTo(width, height / 2);
|
||||||
|
ctx.stroke();
|
||||||
|
ctx.shadowBlur = 0;
|
||||||
|
|
||||||
|
// If no audio, draw a subtle idle animation
|
||||||
|
if (!hasAudio) {
|
||||||
|
const time = Date.now() / 1000;
|
||||||
|
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.5)';
|
||||||
|
ctx.lineWidth = 1;
|
||||||
|
ctx.beginPath();
|
||||||
|
|
||||||
|
for (let i = 0; i < width; i++) {
|
||||||
|
const y = height / 2 + Math.sin(i * 0.05 + time * 2) * 3;
|
||||||
|
if (i === 0) {
|
||||||
|
ctx.moveTo(i, y);
|
||||||
|
} else {
|
||||||
|
ctx.lineTo(i, y);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No analyzer - draw idle animation
|
||||||
|
const time = Date.now() / 1000;
|
||||||
|
ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.5)';
|
||||||
|
ctx.lineWidth = 1;
|
||||||
|
ctx.beginPath();
|
||||||
|
|
||||||
|
for (let i = 0; i < width; i++) {
|
||||||
|
const y = height / 2 + Math.sin(i * 0.05 + time * 2) * 3;
|
||||||
|
if (i === 0) {
|
||||||
|
ctx.moveTo(i, y);
|
||||||
|
} else {
|
||||||
|
ctx.lineTo(i, y);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
}
|
||||||
|
|
||||||
|
animationRef.current = requestAnimationFrame(draw);
|
||||||
|
};
|
||||||
|
|
||||||
|
draw();
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
if (animationRef.current) {
|
||||||
|
cancelAnimationFrame(animationRef.current);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Handle resize
|
||||||
|
useEffect(() => {
|
||||||
|
const canvas = canvasRef.current;
|
||||||
|
if (!canvas) return;
|
||||||
|
|
||||||
|
const resizeCanvas = () => {
|
||||||
|
const container = canvas.parentElement;
|
||||||
|
if (container) {
|
||||||
|
canvas.width = container.clientWidth;
|
||||||
|
canvas.height = container.clientHeight;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
resizeCanvas();
|
||||||
|
window.addEventListener('resize', resizeCanvas);
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
window.removeEventListener('resize', resizeCanvas);
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handleClick = () => {
|
||||||
|
playSound('click');
|
||||||
|
navigate('/oscilloscope');
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
onClick={handleClick}
|
||||||
|
className="fixed bottom-4 left-1/2 -translate-x-1/2 w-[300px] md:w-[400px] h-[50px] z-50 cursor-pointer group"
|
||||||
|
title="Open Oscilloscope"
|
||||||
|
>
|
||||||
|
<div className="relative w-full h-full rounded-lg border border-primary/50 overflow-hidden bg-background/80 backdrop-blur-sm transition-all duration-300 group-hover:border-primary group-hover:shadow-[0_0_20px_hsl(var(--primary)/0.4)]">
|
||||||
|
<canvas
|
||||||
|
ref={canvasRef}
|
||||||
|
className="w-full h-full"
|
||||||
|
/>
|
||||||
|
{/* Hover overlay */}
|
||||||
|
<div className="absolute inset-0 flex items-center justify-center bg-primary/0 group-hover:bg-primary/10 transition-colors duration-300">
|
||||||
|
<span className="opacity-0 group-hover:opacity-100 transition-opacity duration-300 font-crt text-xs text-primary text-glow">
|
||||||
|
OPEN OSCILLOSCOPE
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -95,42 +95,6 @@ export const useOfflineVideoExport = () => {
|
|||||||
throw new Error('No video codec supported');
|
throw new Error('No video codec supported');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create audio context for recording
|
|
||||||
const recordingAudioContext = new AudioContext();
|
|
||||||
|
|
||||||
// Resume audio context if suspended
|
|
||||||
if (recordingAudioContext.state === 'suspended') {
|
|
||||||
await recordingAudioContext.resume();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create audio source and destination
|
|
||||||
const recordingAudioSource = recordingAudioContext.createBufferSource();
|
|
||||||
recordingAudioSource.buffer = audioBuffer;
|
|
||||||
recordingAudioSource.loop = false;
|
|
||||||
|
|
||||||
const audioDestination = recordingAudioContext.createMediaStreamDestination();
|
|
||||||
recordingAudioSource.connect(audioDestination);
|
|
||||||
recordingAudioSource.connect(recordingAudioContext.destination);
|
|
||||||
|
|
||||||
// Combine video and audio streams
|
|
||||||
const combinedStream = new MediaStream();
|
|
||||||
canvas.captureStream(fps).getVideoTracks().forEach(track => combinedStream.addTrack(track));
|
|
||||||
audioDestination.stream.getAudioTracks().forEach(track => combinedStream.addTrack(track));
|
|
||||||
|
|
||||||
console.log(`✅ Combined stream: ${combinedStream.getVideoTracks().length} video, ${combinedStream.getAudioTracks().length} audio tracks`);
|
|
||||||
|
|
||||||
const recorder = new MediaRecorder(combinedStream, {
|
|
||||||
mimeType: selectedCodec,
|
|
||||||
videoBitsPerSecond: videoBitsPerSecond,
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('✅ MediaRecorder created with audio and video');
|
|
||||||
recorder.start(1000); // 1 second chunks
|
|
||||||
|
|
||||||
// Start audio playback synchronized with recording
|
|
||||||
recordingAudioSource.start(0);
|
|
||||||
console.log('🔊 Audio playback started for recording');
|
|
||||||
|
|
||||||
// Use real audio data if available, otherwise generate mock data
|
// Use real audio data if available, otherwise generate mock data
|
||||||
let audioBuffer: AudioBuffer;
|
let audioBuffer: AudioBuffer;
|
||||||
let sampleRate: number;
|
let sampleRate: number;
|
||||||
@ -171,6 +135,51 @@ export const useOfflineVideoExport = () => {
|
|||||||
console.log(`📊 Using mock audio: ${duration.toFixed(1)}s, ${totalSamples} samples`);
|
console.log(`📊 Using mock audio: ${duration.toFixed(1)}s, ${totalSamples} samples`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create audio context for recording
|
||||||
|
const recordingAudioContext = new AudioContext();
|
||||||
|
|
||||||
|
// Resume audio context if suspended
|
||||||
|
if (recordingAudioContext.state === 'suspended') {
|
||||||
|
await recordingAudioContext.resume();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create audio source and destination
|
||||||
|
const recordingAudioSource = recordingAudioContext.createBufferSource();
|
||||||
|
recordingAudioSource.buffer = audioBuffer;
|
||||||
|
recordingAudioSource.loop = false;
|
||||||
|
|
||||||
|
const audioDestination = recordingAudioContext.createMediaStreamDestination();
|
||||||
|
recordingAudioSource.connect(audioDestination);
|
||||||
|
recordingAudioSource.connect(recordingAudioContext.destination);
|
||||||
|
|
||||||
|
// Combine video and audio streams
|
||||||
|
const combinedStream = new MediaStream();
|
||||||
|
canvas.captureStream(fps).getVideoTracks().forEach(track => combinedStream.addTrack(track));
|
||||||
|
audioDestination.stream.getAudioTracks().forEach(track => combinedStream.addTrack(track));
|
||||||
|
|
||||||
|
console.log(`✅ Combined stream: ${combinedStream.getVideoTracks().length} video, ${combinedStream.getAudioTracks().length} audio tracks`);
|
||||||
|
|
||||||
|
// Chunks array to collect recorded data
|
||||||
|
const chunks: Blob[] = [];
|
||||||
|
|
||||||
|
const recorder = new MediaRecorder(combinedStream, {
|
||||||
|
mimeType: selectedCodec,
|
||||||
|
videoBitsPerSecond: videoBitsPerSecond,
|
||||||
|
});
|
||||||
|
|
||||||
|
recorder.ondataavailable = (e) => {
|
||||||
|
if (e.data.size > 0) {
|
||||||
|
chunks.push(e.data);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log('✅ MediaRecorder created with audio and video');
|
||||||
|
recorder.start(1000); // 1 second chunks
|
||||||
|
|
||||||
|
// Start audio playback synchronized with recording
|
||||||
|
recordingAudioSource.start(0);
|
||||||
|
console.log('🔊 Audio playback started for recording');
|
||||||
|
|
||||||
// Generate animation frames for full audio duration
|
// Generate animation frames for full audio duration
|
||||||
const totalFrames = Math.ceil(duration * fps);
|
const totalFrames = Math.ceil(duration * fps);
|
||||||
const samplesPerFrame = Math.min(qualityConfig.samplesPerFrame, Math.floor(totalSamples / totalFrames));
|
const samplesPerFrame = Math.min(qualityConfig.samplesPerFrame, Math.floor(totalSamples / totalFrames));
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user