From e23f6b55fb230448e205c094bcb78c02807bafcf Mon Sep 17 00:00:00 2001 From: "gpt-engineer-app[bot]" <159125892+gpt-engineer-app[bot]@users.noreply.github.com> Date: Sun, 21 Dec 2025 13:39:53 +0000 Subject: [PATCH] Changes --- src/components/MainLayout.tsx | 47 +++--- src/components/MiniOscilloscope.tsx | 248 ++++++++++++++++++++++++++++ src/hooks/useOfflineVideoExport.ts | 81 +++++---- 3 files changed, 319 insertions(+), 57 deletions(-) create mode 100644 src/components/MiniOscilloscope.tsx diff --git a/src/components/MainLayout.tsx b/src/components/MainLayout.tsx index c63cf00..1033748 100644 --- a/src/components/MainLayout.tsx +++ b/src/components/MainLayout.tsx @@ -1,27 +1,27 @@ -import { Outlet } from 'react-router-dom'; +import { Outlet, useLocation } from 'react-router-dom'; import { motion } from 'framer-motion'; import Sidebar from './Sidebar'; +import { MiniOscilloscope } from './MiniOscilloscope'; + const MainLayout = () => { - return + const location = useLocation(); + // Don't show mini oscilloscope on the oscilloscope page itself + const showMiniOscilloscope = location.pathname !== '/oscilloscope'; + + return ( + {/* Branding */} - + ~$ whoami Jory @@ -34,6 +34,11 @@ const MainLayout = () => { - ; + + {/* Mini Oscilloscope Bar */} + {showMiniOscilloscope && } + + ); }; + export default MainLayout; \ No newline at end of file diff --git a/src/components/MiniOscilloscope.tsx b/src/components/MiniOscilloscope.tsx new file mode 100644 index 0000000..744444d --- /dev/null +++ b/src/components/MiniOscilloscope.tsx @@ -0,0 +1,248 @@ +import { useEffect, useRef, useCallback } from 'react'; +import { useNavigate } from 'react-router-dom'; +import { useSettings } from '@/contexts/SettingsContext'; + +export function MiniOscilloscope() { + const canvasRef = useRef(null); + const animationRef = useRef(); + const analyzerRef = useRef(null); + const audioContextRef = useRef(null); + const sourceNodeRef = useRef(null); + const connectedElementsRef = useRef>(new Set()); + const navigate = useNavigate(); + const { playSound } = useSettings(); + + // Find and connect to all audio elements on the page + const connectToAudioElements = useCallback(() => { + if (!audioContextRef.current || !analyzerRef.current) return; + + const audioElements = document.querySelectorAll('audio, video'); + + audioElements.forEach((element) => { + const mediaElement = element as HTMLMediaElement; + + // Skip if already connected + if (connectedElementsRef.current.has(mediaElement)) return; + + try { + // Create a source node for this element + const source = audioContextRef.current!.createMediaElementSource(mediaElement); + source.connect(analyzerRef.current!); + source.connect(audioContextRef.current!.destination); + connectedElementsRef.current.add(mediaElement); + } catch (e) { + // Element might already be connected to a different context + console.log('Could not connect audio element:', e); + } + }); + }, []); + + // Initialize audio context and analyzer + useEffect(() => { + const initAudio = async () => { + try { + audioContextRef.current = new AudioContext(); + analyzerRef.current = audioContextRef.current.createAnalyser(); + analyzerRef.current.fftSize = 256; + analyzerRef.current.smoothingTimeConstant = 0.8; + + // Connect analyzer to destination for pass-through + // We'll connect sources as we find them + } catch (e) { + console.log('Could not initialize audio context:', e); + } + }; + + initAudio(); + + // Observe DOM for new audio elements + const observer = new MutationObserver(() => { + connectToAudioElements(); + }); + + observer.observe(document.body, { + childList: true, + subtree: true, + }); + + // Initial connection attempt + connectToAudioElements(); + + return () => { + observer.disconnect(); + if (animationRef.current) { + cancelAnimationFrame(animationRef.current); + } + if (audioContextRef.current) { + audioContextRef.current.close(); + } + }; + }, [connectToAudioElements]); + + // Draw waveform + useEffect(() => { + const canvas = canvasRef.current; + if (!canvas) return; + + const ctx = canvas.getContext('2d'); + if (!ctx) return; + + const draw = () => { + const width = canvas.width; + const height = canvas.height; + + // Clear with transparent background + ctx.clearRect(0, 0, width, height); + + // Draw background + ctx.fillStyle = 'rgba(0, 0, 0, 0.6)'; + ctx.fillRect(0, 0, width, height); + + // Draw grid lines + ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.1)'; + ctx.lineWidth = 1; + + // Vertical grid lines + for (let x = 0; x < width; x += 20) { + ctx.beginPath(); + ctx.moveTo(x, 0); + ctx.lineTo(x, height); + ctx.stroke(); + } + + // Center line + ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.3)'; + ctx.beginPath(); + ctx.moveTo(0, height / 2); + ctx.lineTo(width, height / 2); + ctx.stroke(); + + // Draw waveform + if (analyzerRef.current) { + const bufferLength = analyzerRef.current.frequencyBinCount; + const dataArray = new Uint8Array(bufferLength); + analyzerRef.current.getByteTimeDomainData(dataArray); + + // Check if there's actual audio (not just silence) + const hasAudio = dataArray.some(v => Math.abs(v - 128) > 2); + + ctx.strokeStyle = 'hsl(120, 100%, 50%)'; + ctx.lineWidth = 2; + ctx.shadowColor = 'hsl(120, 100%, 50%)'; + ctx.shadowBlur = hasAudio ? 10 : 5; + ctx.beginPath(); + + const sliceWidth = width / bufferLength; + let x = 0; + + for (let i = 0; i < bufferLength; i++) { + const v = dataArray[i] / 128.0; + const y = (v * height) / 2; + + if (i === 0) { + ctx.moveTo(x, y); + } else { + ctx.lineTo(x, y); + } + + x += sliceWidth; + } + + ctx.lineTo(width, height / 2); + ctx.stroke(); + ctx.shadowBlur = 0; + + // If no audio, draw a subtle idle animation + if (!hasAudio) { + const time = Date.now() / 1000; + ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.5)'; + ctx.lineWidth = 1; + ctx.beginPath(); + + for (let i = 0; i < width; i++) { + const y = height / 2 + Math.sin(i * 0.05 + time * 2) * 3; + if (i === 0) { + ctx.moveTo(i, y); + } else { + ctx.lineTo(i, y); + } + } + ctx.stroke(); + } + } else { + // No analyzer - draw idle animation + const time = Date.now() / 1000; + ctx.strokeStyle = 'hsl(120, 100%, 50%, 0.5)'; + ctx.lineWidth = 1; + ctx.beginPath(); + + for (let i = 0; i < width; i++) { + const y = height / 2 + Math.sin(i * 0.05 + time * 2) * 3; + if (i === 0) { + ctx.moveTo(i, y); + } else { + ctx.lineTo(i, y); + } + } + ctx.stroke(); + } + + animationRef.current = requestAnimationFrame(draw); + }; + + draw(); + + return () => { + if (animationRef.current) { + cancelAnimationFrame(animationRef.current); + } + }; + }, []); + + // Handle resize + useEffect(() => { + const canvas = canvasRef.current; + if (!canvas) return; + + const resizeCanvas = () => { + const container = canvas.parentElement; + if (container) { + canvas.width = container.clientWidth; + canvas.height = container.clientHeight; + } + }; + + resizeCanvas(); + window.addEventListener('resize', resizeCanvas); + + return () => { + window.removeEventListener('resize', resizeCanvas); + }; + }, []); + + const handleClick = () => { + playSound('click'); + navigate('/oscilloscope'); + }; + + return ( +
+
+ + {/* Hover overlay */} +
+ + OPEN OSCILLOSCOPE + +
+
+
+ ); +} \ No newline at end of file diff --git a/src/hooks/useOfflineVideoExport.ts b/src/hooks/useOfflineVideoExport.ts index a3fcf12..84da3fb 100644 --- a/src/hooks/useOfflineVideoExport.ts +++ b/src/hooks/useOfflineVideoExport.ts @@ -95,42 +95,6 @@ export const useOfflineVideoExport = () => { throw new Error('No video codec supported'); } - // Create audio context for recording - const recordingAudioContext = new AudioContext(); - - // Resume audio context if suspended - if (recordingAudioContext.state === 'suspended') { - await recordingAudioContext.resume(); - } - - // Create audio source and destination - const recordingAudioSource = recordingAudioContext.createBufferSource(); - recordingAudioSource.buffer = audioBuffer; - recordingAudioSource.loop = false; - - const audioDestination = recordingAudioContext.createMediaStreamDestination(); - recordingAudioSource.connect(audioDestination); - recordingAudioSource.connect(recordingAudioContext.destination); - - // Combine video and audio streams - const combinedStream = new MediaStream(); - canvas.captureStream(fps).getVideoTracks().forEach(track => combinedStream.addTrack(track)); - audioDestination.stream.getAudioTracks().forEach(track => combinedStream.addTrack(track)); - - console.log(`✅ Combined stream: ${combinedStream.getVideoTracks().length} video, ${combinedStream.getAudioTracks().length} audio tracks`); - - const recorder = new MediaRecorder(combinedStream, { - mimeType: selectedCodec, - videoBitsPerSecond: videoBitsPerSecond, - }); - - console.log('✅ MediaRecorder created with audio and video'); - recorder.start(1000); // 1 second chunks - - // Start audio playback synchronized with recording - recordingAudioSource.start(0); - console.log('🔊 Audio playback started for recording'); - // Use real audio data if available, otherwise generate mock data let audioBuffer: AudioBuffer; let sampleRate: number; @@ -171,6 +135,51 @@ export const useOfflineVideoExport = () => { console.log(`📊 Using mock audio: ${duration.toFixed(1)}s, ${totalSamples} samples`); } + // Create audio context for recording + const recordingAudioContext = new AudioContext(); + + // Resume audio context if suspended + if (recordingAudioContext.state === 'suspended') { + await recordingAudioContext.resume(); + } + + // Create audio source and destination + const recordingAudioSource = recordingAudioContext.createBufferSource(); + recordingAudioSource.buffer = audioBuffer; + recordingAudioSource.loop = false; + + const audioDestination = recordingAudioContext.createMediaStreamDestination(); + recordingAudioSource.connect(audioDestination); + recordingAudioSource.connect(recordingAudioContext.destination); + + // Combine video and audio streams + const combinedStream = new MediaStream(); + canvas.captureStream(fps).getVideoTracks().forEach(track => combinedStream.addTrack(track)); + audioDestination.stream.getAudioTracks().forEach(track => combinedStream.addTrack(track)); + + console.log(`✅ Combined stream: ${combinedStream.getVideoTracks().length} video, ${combinedStream.getAudioTracks().length} audio tracks`); + + // Chunks array to collect recorded data + const chunks: Blob[] = []; + + const recorder = new MediaRecorder(combinedStream, { + mimeType: selectedCodec, + videoBitsPerSecond: videoBitsPerSecond, + }); + + recorder.ondataavailable = (e) => { + if (e.data.size > 0) { + chunks.push(e.data); + } + }; + + console.log('✅ MediaRecorder created with audio and video'); + recorder.start(1000); // 1 second chunks + + // Start audio playback synchronized with recording + recordingAudioSource.start(0); + console.log('🔊 Audio playback started for recording'); + // Generate animation frames for full audio duration const totalFrames = Math.ceil(duration * fps); const samplesPerFrame = Math.min(qualityConfig.samplesPerFrame, Math.floor(totalSamples / totalFrames));