let audioContext; let mediaStream; let recognition; let isCapturing = false; let overlayInitialized = false; let activeCaptureMode = 'tab'; let overlayListening = false; let overlayHidden = false; let analyserNode = null; let meterSource = null; let meterRaf = null; chrome.runtime.onMessage.addListener((request, sender, sendResponse) => { if (request.action === 'startCapture') { activeCaptureMode = 'tab'; startCapture(request.streamId); sendResponse({ success: true }); return false; } if (request.action === 'startMicCapture') { activeCaptureMode = 'mic'; startMicCapture(); sendResponse({ success: true }); return false; } if (request.action === 'startMixedCapture') { activeCaptureMode = 'mixed'; startMixedCapture(request.streamId); sendResponse({ success: true }); return false; } if (request.action === 'stopCapture') { stopCapture(); sendResponse({ success: true }); return false; } if (request.action === 'requestMicPermission') { requestMicPermission().then(sendResponse); return true; } if (request.action === 'updateTranscript') { updateOverlay('transcript', request.transcript); return false; } if (request.action === 'updateAIResponse') { updateOverlay('response', request.response); return false; } if (request.action === 'showOverlay') { setOverlayHidden(false); return false; } if (request.action === 'hideOverlay') { setOverlayHidden(true); return false; } return false; }); async function requestMicPermission() { try { const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); stream.getTracks().forEach(track => track.stop()); return { success: true }; } catch (error) { let errorMessage = 'Microphone permission denied.'; if (error.name === 'NotAllowedError') { errorMessage = 'Microphone permission denied.'; } else if (error.name === 'NotFoundError') { errorMessage = 'No microphone found.'; } else { errorMessage = error.message || 'Unknown error occurred.'; } return { success: false, error: errorMessage }; } } function startCapture(streamId) { isCapturing = true; overlayListening = true; ensureOverlay(); updateOverlayIndicator(); updateOverlay( 'response', 'Tab audio is captured, but speech recognition uses the microphone. Use mic or mixed mode if you want transcription.' ); navigator.mediaDevices.getUserMedia({ audio: { chromeMediaSource: 'tab', chromeMediaSourceId: streamId } }).then((stream) => { mediaStream = stream; audioContext = new AudioContext(); createAudioMeter(stream); if (ensureSpeechRecognitionAvailable()) { startRecognition(); } }).catch((error) => { console.error('Error starting capture:', error); let errorMessage = 'Failed to start audio capture. '; if (error.name === 'NotAllowedError') { errorMessage += 'Please allow microphone access and try again.'; } else if (error.name === 'NotFoundError') { errorMessage += 'No microphone found.'; } else { errorMessage += error.message || 'Unknown error occurred.'; } chrome.runtime.sendMessage({action: 'updateAIResponse', response: errorMessage}); updateOverlay('response', errorMessage); overlayListening = false; updateOverlayIndicator(); }); } function startMicCapture() { isCapturing = true; overlayListening = true; ensureOverlay(); updateOverlayIndicator(); navigator.mediaDevices.getUserMedia({ audio: true }).then((stream) => { mediaStream = stream; audioContext = new AudioContext(); createAudioMeter(stream); if (ensureSpeechRecognitionAvailable()) { startRecognition(); } }).catch((error) => { console.error('Error starting mic capture:', error); let errorMessage = 'Failed to start microphone capture. '; if (error.name === 'NotAllowedError') { errorMessage += 'Please allow microphone access and try again.'; } else if (error.name === 'NotFoundError') { errorMessage += 'No microphone found.'; } else { errorMessage += error.message || 'Unknown error occurred.'; } chrome.runtime.sendMessage({action: 'updateAIResponse', response: errorMessage}); updateOverlay('response', errorMessage); overlayListening = false; updateOverlayIndicator(); }); } function startMixedCapture(streamId) { isCapturing = true; overlayListening = true; ensureOverlay(); updateOverlayIndicator(); navigator.mediaDevices.getUserMedia({ audio: { chromeMediaSource: 'tab', chromeMediaSourceId: streamId } }).then((stream) => { mediaStream = stream; audioContext = new AudioContext(); createAudioMeter(stream); if (ensureSpeechRecognitionAvailable()) { startRecognition(); } }).catch((error) => { console.error('Error starting mixed capture:', error); chrome.runtime.sendMessage({action: 'updateAIResponse', response: 'Failed to start mixed capture.'}); updateOverlay('response', 'Failed to start mixed capture.'); overlayListening = false; updateOverlayIndicator(); }); } function startRecognition() { if (recognition) { try { recognition.stop(); } catch (error) { console.warn('Failed to stop previous recognition:', error); } } recognition = new webkitSpeechRecognition(); recognition.continuous = true; recognition.interimResults = true; recognition.onresult = function(event) { let finalTranscript = ''; for (let i = event.resultIndex; i < event.results.length; ++i) { if (event.results[i].isFinal) { finalTranscript += event.results[i][0].transcript; } } if (finalTranscript.trim() !== '') { chrome.runtime.sendMessage({action: 'updateTranscript', transcript: finalTranscript}); updateOverlay('transcript', finalTranscript); chrome.runtime.sendMessage({action: 'getAIResponse', question: finalTranscript}); } }; recognition.onerror = function(event) { console.error('Speech recognition error:', event.error); if (event.error === 'no-speech' && isCapturing) { try { recognition.start(); } catch (error) { console.warn('Failed to restart recognition after no-speech:', error); } return; } chrome.runtime.sendMessage({action: 'updateAIResponse', response: `Speech recognition error: ${event.error}. Please try again.`}); updateOverlay('response', `Speech recognition error: ${event.error}. Please try again.`); }; recognition.onend = function() { if (!isCapturing) return; try { recognition.start(); } catch (error) { console.warn('Failed to restart recognition:', error); } }; recognition.start(); } function ensureSpeechRecognitionAvailable() { const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; if (!SpeechRecognition) { const message = 'Speech recognition is not available in this browser context. Use mic mode in Chrome or enable speech recognition.'; chrome.runtime.sendMessage({ action: 'updateAIResponse', response: message }); updateOverlay('response', message); overlayListening = false; updateOverlayIndicator(); return false; } return true; } function stopCapture() { isCapturing = false; overlayListening = false; updateOverlayIndicator(); stopAudioMeter(); if (mediaStream) { mediaStream.getTracks().forEach(track => track.stop()); } if (audioContext) { audioContext.close(); audioContext = null; } if (recognition) { recognition.stop(); } } function ensureOverlay() { if (overlayInitialized) return; overlayInitialized = true; if (document.getElementById('ai-interview-overlay')) { return; } const style = document.createElement('style'); style.textContent = ` #ai-interview-overlay { position: fixed; top: 24px; right: 24px; width: 420px; min-width: 280px; min-height: 240px; background: rgba(20, 20, 20, 0.35); color: #f5f5f5; border: 1px solid rgba(255, 255, 255, 0.15); border-radius: 12px; backdrop-filter: blur(10px); z-index: 2147483647; font-family: "Helvetica Neue", Arial, sans-serif; box-shadow: 0 10px 30px rgba(0, 0, 0, 0.35); user-select: none; resize: both; overflow: auto; } #ai-interview-resize { position: absolute; right: 6px; bottom: 6px; width: 14px; height: 14px; cursor: se-resize; background: radial-gradient(circle at center, rgba(255, 255, 255, 0.8) 0 2px, transparent 2px); opacity: 0.6; } #ai-interview-overlay.minimized #ai-interview-body { display: none; } #ai-interview-header { display: flex; align-items: center; justify-content: space-between; padding: 10px 12px; cursor: move; font-weight: 600; font-size: 13px; letter-spacing: 0.02em; border-bottom: 1px solid rgba(255, 255, 255, 0.1); } #ai-interview-title { display: flex; align-items: center; gap: 8px; } #ai-interview-indicator { width: 10px; height: 10px; border-radius: 50%; background: rgba(255, 255, 255, 0.25); box-shadow: 0 0 0 rgba(255, 255, 255, 0.3); } #ai-interview-indicator.active { background: #41f59a; animation: aiPulse 1.2s ease-in-out infinite; box-shadow: 0 0 8px rgba(65, 245, 154, 0.7); } @keyframes aiPulse { 0% { transform: scale(0.9); opacity: 0.6; } 50% { transform: scale(1.3); opacity: 1; } 100% { transform: scale(0.9); opacity: 0.6; } } #ai-interview-controls { display: flex; gap: 6px; } .ai-interview-btn { background: rgba(255, 255, 255, 0.12); border: none; color: #f5f5f5; font-size: 12px; padding: 4px 8px; border-radius: 6px; cursor: pointer; } .ai-interview-btn:hover { background: rgba(255, 255, 255, 0.22); } #ai-interview-body { padding: 12px; font-size: 12px; line-height: 1.4; } #ai-interview-mode { font-size: 11px; opacity: 0.8; margin-bottom: 6px; } #ai-interview-meter { height: 6px; background: rgba(255, 255, 255, 0.12); border-radius: 999px; overflow: hidden; margin-bottom: 10px; } #ai-interview-meter-bar { height: 100%; width: 0%; background: linear-gradient(90deg, #41f59a, #48c5ff); transition: width 80ms linear; } #ai-interview-transcript, #ai-interview-response { background: rgba(0, 0, 0, 0.35); border-radius: 8px; padding: 8px; margin-bottom: 8px; max-height: 200px; overflow: auto; user-select: text; } `; document.head.appendChild(style); const overlay = document.createElement('div'); overlay.id = 'ai-interview-overlay'; overlay.innerHTML = `