Files
Ai-Interview-Assistant-Chro…/sidepanel.js
Ahmed Galadima 56d56395ee feat: Enhance audio capture and monitoring features
- Added "audioCapture" permission to manifest for microphone access.
- Introduced DeepSeek as a new AI provider option in the side panel.
- Implemented a capture mode selection (tab-only, mic-only, mixed) in the side panel.
- Added options to enable/disable the extension and auto-open the assistant window.
- Integrated a mic monitor feature with live input level visualization.
- Included buttons for requesting microphone permission and granting tab access.
- Updated styles for new sections and mic level visualization.
- Enhanced model fetching logic to support DeepSeek and improved error handling.
2026-01-31 21:55:09 +01:00

916 lines
37 KiB
JavaScript

document.addEventListener('DOMContentLoaded', function() {
const toggleButton = document.getElementById('toggleListening');
const transcriptDiv = document.getElementById('transcript');
const aiResponseDiv = document.getElementById('aiResponse');
const apiKeyInput = document.getElementById('apiKeyInput');
const saveApiKeyButton = document.getElementById('saveApiKey');
const aiProviderSelect = document.getElementById('aiProvider');
const modelSelect = document.getElementById('modelSelect');
const apiKeyStatus = document.getElementById('apiKeyStatus');
const requestMicPermissionBtn = document.getElementById('requestMicPermission');
const showOverlayBtn = document.getElementById('showOverlay');
const micPermissionStatus = document.getElementById('micPermissionStatus');
const grantTabAccessBtn = document.getElementById('grantTabAccess');
const tabAccessStatus = document.getElementById('tabAccessStatus');
const speedModeToggle = document.getElementById('speedModeToggle');
const captureModeSelect = document.getElementById('captureModeSelect');
const autoOpenAssistantWindowToggle = document.getElementById('autoOpenAssistantWindow');
const extensionActiveToggle = document.getElementById('extensionActiveToggle');
const inputDeviceSelect = document.getElementById('inputDeviceSelect');
const inputDeviceStatus = document.getElementById('inputDeviceStatus');
const micLevelBar = document.getElementById('micLevelBar');
const startMicMonitorBtn = document.getElementById('startMicMonitor');
// Context management elements
const contextFileInput = document.getElementById('contextFileInput');
const uploadContextBtn = document.getElementById('uploadContextBtn');
const contextTextInput = document.getElementById('contextTextInput');
const contextTypeSelect = document.getElementById('contextTypeSelect');
const contextTitleInput = document.getElementById('contextTitleInput');
const addContextBtn = document.getElementById('addContextBtn');
const contextList = document.getElementById('contextList');
const clearAllContextBtn = document.getElementById('clearAllContextBtn');
// Multi-device elements
const enableRemoteListening = document.getElementById('enableRemoteListening');
const remoteStatus = document.getElementById('remoteStatus');
const deviceInfo = document.getElementById('deviceInfo');
const accessUrl = document.getElementById('accessUrl');
const copyUrlBtn = document.getElementById('copyUrlBtn');
const qrCode = document.getElementById('qrCode');
let isListening = false;
let remoteServerActive = false;
let micMonitorStream = null;
let micMonitorCtx = null;
let micMonitorSource = null;
let micMonitorAnalyser = null;
let micMonitorRaf = null;
// AI Provider configurations
const aiProviders = {
openai: {
name: 'OpenAI',
models: ['gpt-4o', 'gpt-4o-mini', 'gpt-4-turbo', 'gpt-3.5-turbo'],
defaultModel: 'gpt-4o-mini',
apiKeyPlaceholder: 'Enter your OpenAI API Key',
requiresKey: true
},
anthropic: {
name: 'Anthropic',
models: ['claude-3-5-sonnet-20241022', 'claude-3-5-haiku-20241022', 'claude-3-opus-20240229'],
defaultModel: 'claude-3-5-sonnet-20241022',
apiKeyPlaceholder: 'Enter your Anthropic API Key',
requiresKey: true
},
google: {
name: 'Google',
models: ['gemini-1.5-pro', 'gemini-1.5-flash', 'gemini-pro'],
defaultModel: 'gemini-1.5-flash',
apiKeyPlaceholder: 'Enter your Google AI API Key',
requiresKey: true
},
deepseek: {
name: 'DeepSeek',
models: ['deepseek-chat', 'deepseek-reasoner'],
defaultModel: 'deepseek-chat',
apiKeyPlaceholder: 'Enter your DeepSeek API Key',
requiresKey: true
},
ollama: {
name: 'Ollama',
models: ['llama3.2', 'llama3.1', 'mistral', 'codellama', 'phi3'],
defaultModel: 'llama3.2',
apiKeyPlaceholder: 'No API key required (Local)',
requiresKey: false
}
};
const modelCache = {};
const modelFetchState = {};
// Load saved settings
chrome.storage.sync.get(['aiProvider', 'selectedModel', 'apiKeys', 'speedMode', 'captureMode', 'autoOpenAssistantWindow', 'inputDeviceId', 'extensionActive'], (result) => {
const savedProvider = result.aiProvider || 'openai';
const savedModel = result.selectedModel || aiProviders[savedProvider].defaultModel;
const savedApiKeys = result.apiKeys || {};
const speedMode = Boolean(result.speedMode);
const captureMode = result.captureMode || 'tab';
const autoOpenAssistantWindow = Boolean(result.autoOpenAssistantWindow);
const savedInputDeviceId = result.inputDeviceId || '';
const extensionActive = result.extensionActive !== false;
aiProviderSelect.value = savedProvider;
if (captureModeSelect) captureModeSelect.value = captureMode;
if (speedModeToggle) speedModeToggle.checked = speedMode;
if (autoOpenAssistantWindowToggle) autoOpenAssistantWindowToggle.checked = autoOpenAssistantWindow;
if (extensionActiveToggle) extensionActiveToggle.checked = extensionActive;
refreshModelOptions(savedProvider, savedModel, savedApiKeys[savedProvider]);
updateApiKeyInput(savedProvider);
if (savedApiKeys[savedProvider] && aiProviders[savedProvider].requiresKey) {
apiKeyInput.value = savedApiKeys[savedProvider];
updateApiKeyStatus('API Key Saved', 'success');
saveApiKeyButton.textContent = 'API Key Saved';
saveApiKeyButton.disabled = true;
}
if (inputDeviceSelect) {
loadInputDevices(savedInputDeviceId);
}
});
// Load and display saved contexts
loadContexts();
// Helper functions
function updateModelOptions(provider, selectedModel = null, modelsOverride = null) {
const models = modelsOverride || modelCache[provider] || aiProviders[provider].models;
modelSelect.innerHTML = '';
models.forEach(model => {
const option = document.createElement('option');
option.value = model;
option.textContent = model;
if (selectedModel === model || (!selectedModel && model === aiProviders[provider].defaultModel)) {
option.selected = true;
}
modelSelect.appendChild(option);
});
}
function updateApiKeyInput(provider) {
const providerConfig = aiProviders[provider];
apiKeyInput.placeholder = providerConfig.apiKeyPlaceholder;
apiKeyInput.disabled = !providerConfig.requiresKey;
saveApiKeyButton.disabled = !providerConfig.requiresKey;
if (!providerConfig.requiresKey) {
apiKeyInput.value = '';
updateApiKeyStatus('No API key required', 'success');
} else {
updateApiKeyStatus('', '');
}
}
function updateApiKeyStatus(message, type) {
apiKeyStatus.textContent = message;
apiKeyStatus.className = `status-message ${type}`;
}
function updateMicPermissionStatus(message, type) {
if (!micPermissionStatus) return;
micPermissionStatus.textContent = message;
micPermissionStatus.className = `status-message ${type}`;
}
function updateInputDeviceStatus(message, type) {
if (!inputDeviceStatus) return;
inputDeviceStatus.textContent = message;
inputDeviceStatus.className = `status-message ${type}`;
}
function updateTabAccessStatus(message, type) {
if (!tabAccessStatus) return;
tabAccessStatus.textContent = message;
tabAccessStatus.className = `status-message ${type}`;
}
function pickModel(provider, preferredModel, models) {
if (preferredModel && models.includes(preferredModel)) {
return preferredModel;
}
if (aiProviders[provider].defaultModel && models.includes(aiProviders[provider].defaultModel)) {
return aiProviders[provider].defaultModel;
}
return models[0];
}
async function refreshModelOptions(provider, preferredModel, apiKey) {
if (modelFetchState[provider]) {
return;
}
modelSelect.disabled = true;
modelSelect.innerHTML = '<option>Loading models...</option>';
modelFetchState[provider] = true;
try {
let models = null;
if (provider === 'ollama') {
models = await fetchOllamaModels();
} else if (aiProviders[provider].requiresKey && apiKey) {
models = await fetchRemoteModels(provider, apiKey);
}
if (models && models.length) {
modelCache[provider] = models;
}
} catch (error) {
console.warn(`Failed to fetch models for ${provider}:`, error);
} finally {
modelFetchState[provider] = false;
const availableModels = modelCache[provider] || aiProviders[provider].models;
const selected = pickModel(provider, preferredModel, availableModels);
updateModelOptions(provider, selected, availableModels);
chrome.storage.sync.set({ selectedModel: selected });
modelSelect.disabled = false;
}
}
async function loadInputDevices(preferredDeviceId = '') {
if (!navigator.mediaDevices || !navigator.mediaDevices.enumerateDevices) {
updateInputDeviceStatus('Device enumeration is not supported in this browser.', 'error');
return;
}
try {
const devices = await navigator.mediaDevices.enumerateDevices();
const inputs = devices.filter(device => device.kind === 'audioinput');
const hasLabels = inputs.some(device => device.label);
inputDeviceSelect.innerHTML = '';
if (!inputs.length) {
const option = document.createElement('option');
option.value = '';
option.textContent = 'No input devices found';
inputDeviceSelect.appendChild(option);
inputDeviceSelect.disabled = true;
updateInputDeviceStatus('No microphone devices detected.', 'error');
return;
}
inputs.forEach((device, index) => {
const option = document.createElement('option');
option.value = device.deviceId;
option.textContent = device.label || `Microphone ${index + 1}`;
if (device.deviceId === preferredDeviceId) {
option.selected = true;
}
inputDeviceSelect.appendChild(option);
});
inputDeviceSelect.disabled = false;
const selectedOption = inputDeviceSelect.options[inputDeviceSelect.selectedIndex];
if (!hasLabels) {
updateInputDeviceStatus('Grant mic permission to see device names.', '');
} else {
updateInputDeviceStatus(`Selected: ${selectedOption ? selectedOption.textContent : 'Unknown'}`, '');
}
} catch (error) {
console.warn('Failed to enumerate devices:', error);
updateInputDeviceStatus('Failed to list input devices.', 'error');
}
}
function stopMicMonitor() {
if (micMonitorRaf) {
cancelAnimationFrame(micMonitorRaf);
micMonitorRaf = null;
}
if (micMonitorSource) {
try {
micMonitorSource.disconnect();
} catch (error) {
console.warn('Failed to disconnect mic monitor source:', error);
}
micMonitorSource = null;
}
if (micMonitorAnalyser) {
try {
micMonitorAnalyser.disconnect();
} catch (error) {
console.warn('Failed to disconnect mic monitor analyser:', error);
}
micMonitorAnalyser = null;
}
if (micMonitorCtx) {
micMonitorCtx.close();
micMonitorCtx = null;
}
if (micMonitorStream) {
micMonitorStream.getTracks().forEach(track => track.stop());
micMonitorStream = null;
}
if (micLevelBar) {
micLevelBar.style.width = '0%';
}
}
async function startMicMonitor() {
if (!micLevelBar || !inputDeviceSelect) return;
stopMicMonitor();
updateInputDeviceStatus('Requesting microphone access...', '');
const deviceId = inputDeviceSelect.value;
const constraints = deviceId ? { audio: { deviceId: { exact: deviceId } } } : { audio: true };
try {
micMonitorStream = await navigator.mediaDevices.getUserMedia(constraints);
micMonitorCtx = new AudioContext();
micMonitorAnalyser = micMonitorCtx.createAnalyser();
micMonitorAnalyser.fftSize = 512;
micMonitorAnalyser.smoothingTimeConstant = 0.8;
micMonitorSource = micMonitorCtx.createMediaStreamSource(micMonitorStream);
micMonitorSource.connect(micMonitorAnalyser);
const data = new Uint8Array(micMonitorAnalyser.fftSize);
const tick = () => {
if (!micMonitorAnalyser) return;
micMonitorAnalyser.getByteTimeDomainData(data);
let sum = 0;
for (let i = 0; i < data.length; i++) {
const v = (data[i] - 128) / 128;
sum += v * v;
}
const rms = Math.sqrt(sum / data.length);
const normalized = Math.min(1, rms * 2.5);
micLevelBar.style.width = `${Math.round(normalized * 100)}%`;
micMonitorRaf = requestAnimationFrame(tick);
};
micMonitorRaf = requestAnimationFrame(tick);
const selectedOption = inputDeviceSelect.options[inputDeviceSelect.selectedIndex];
updateInputDeviceStatus(`Mic monitor active: ${selectedOption ? selectedOption.textContent : 'Unknown'}`, 'success');
} catch (error) {
console.warn('Failed to start mic monitor:', error);
if (error && error.name === 'NotAllowedError') {
updateInputDeviceStatus('Microphone permission denied. Click "Request Microphone Permission".', 'error');
} else if (error && error.name === 'NotFoundError') {
updateInputDeviceStatus('No microphone found for the selected device.', 'error');
} else {
updateInputDeviceStatus('Microphone permission denied or unavailable.', 'error');
}
}
}
async function fetchRemoteModels(provider, apiKey) {
if (provider === 'openai') {
return fetchOpenAIModels(apiKey);
}
if (provider === 'anthropic') {
return fetchAnthropicModels(apiKey);
}
if (provider === 'google') {
return fetchGoogleModels(apiKey);
}
if (provider === 'deepseek') {
return fetchDeepSeekModels(apiKey);
}
return [];
}
async function fetchOpenAIModels(apiKey) {
const response = await fetch('https://api.openai.com/v1/models', {
headers: {
'Authorization': `Bearer ${apiKey}`
}
});
if (!response.ok) {
throw new Error(`OpenAI models request failed: ${response.status}`);
}
const data = await response.json();
const ids = (data.data || []).map((item) => item.id).filter(Boolean);
const chatModels = ids.filter((id) => (
id.startsWith('gpt-') ||
id.startsWith('o1') ||
id.startsWith('o3') ||
id.startsWith('o4') ||
id.startsWith('o5')
));
const models = chatModels.length ? chatModels : ids;
return Array.from(new Set(models)).sort();
}
async function fetchAnthropicModels(apiKey) {
const response = await fetch('https://api.anthropic.com/v1/models', {
headers: {
'Content-Type': 'application/json',
'x-api-key': apiKey,
'anthropic-version': '2023-06-01'
}
});
if (!response.ok) {
throw new Error(`Anthropic models request failed: ${response.status}`);
}
const data = await response.json();
const items = data.data || data.models || [];
const ids = items.map((item) => item.id || item.name).filter(Boolean);
return Array.from(new Set(ids)).sort();
}
async function fetchGoogleModels(apiKey) {
const response = await fetch(`https://generativelanguage.googleapis.com/v1beta/models?key=${apiKey}`);
if (!response.ok) {
throw new Error(`Google models request failed: ${response.status}`);
}
const data = await response.json();
const models = (data.models || [])
.filter((model) => (model.supportedGenerationMethods || []).includes('generateContent'))
.map((model) => model.name || '')
.map((name) => name.replace(/^models\//, ''))
.filter(Boolean);
return Array.from(new Set(models)).sort();
}
async function fetchDeepSeekModels(apiKey) {
const response = await fetch('https://api.deepseek.com/v1/models', {
headers: {
'Authorization': `Bearer ${apiKey}`
}
});
if (!response.ok) {
throw new Error(`DeepSeek models request failed: ${response.status}`);
}
const data = await response.json();
const ids = (data.data || []).map((item) => item.id).filter(Boolean);
return Array.from(new Set(ids)).sort();
}
async function fetchOllamaModels() {
const response = await fetch('http://localhost:11434/api/tags');
if (!response.ok) {
throw new Error(`Ollama models request failed: ${response.status}`);
}
const data = await response.json();
const models = (data.models || []).map((model) => model.name).filter(Boolean);
return Array.from(new Set(models)).sort();
}
// Context Management Functions
async function loadContexts() {
const result = await chrome.storage.local.get('contexts');
const contexts = result.contexts || [];
displayContexts(contexts);
updateManageTabCount(contexts.length);
}
function displayContexts(contexts) {
contextList.innerHTML = '';
if (contexts.length === 0) {
contextList.innerHTML = '<div class="no-contexts">No context added yet. Add your CV or job description to get better responses!</div>';
return;
}
contexts.forEach((context, index) => {
const contextItem = document.createElement('div');
contextItem.className = 'context-item';
contextItem.innerHTML = `
<div class="context-item-info">
<div class="context-item-title">${context.title} ${context.type ? `<span style="font-weight: 400; color: #666;">• ${context.type}</span>` : ''}</div>
<div class="context-item-preview">${context.content.substring(0, 100)}${context.content.length > 100 ? '...' : ''}</div>
</div>
<div class="context-item-actions">
<button onclick="editContext(${index})" class="edit-btn">✏️ Edit</button>
<button onclick="deleteContext(${index})" class="delete-btn danger-btn">🗑️ Delete</button>
</div>
`;
contextList.appendChild(contextItem);
});
}
function updateManageTabCount(count) {
const manageTab = document.querySelector('[data-tab="manage"]');
manageTab.textContent = `Manage (${count})`;
}
async function saveContext(title, content) {
if (!title.trim() || !content.trim()) {
alert('Please provide both title and content');
return;
}
// Optional basic guard for extremely large items (>4MB)
const approxBytes = new Blob([content]).size;
if (approxBytes > 4 * 1024 * 1024) {
alert('This context is too large to store locally. Please split it into smaller parts.');
return;
}
const result = await chrome.storage.local.get('contexts');
const contexts = result.contexts || [];
contexts.push({
id: Date.now(),
title: title.trim(),
content: content.trim(),
type: (contextTypeSelect && contextTypeSelect.value) || 'general',
createdAt: new Date().toISOString()
});
await chrome.storage.local.set({ contexts: contexts });
loadContexts();
// Clear inputs
contextTitleInput.value = '';
contextTextInput.value = '';
if (contextTypeSelect) contextTypeSelect.value = 'general';
// Switch to manage tab
switchTab('manage');
}
async function deleteContext(index) {
if (!confirm('Are you sure you want to delete this context?')) return;
const result = await chrome.storage.local.get('contexts');
const contexts = result.contexts || [];
contexts.splice(index, 1);
await chrome.storage.local.set({ contexts: contexts });
loadContexts();
}
async function clearAllContexts() {
if (!confirm('Are you sure you want to delete all contexts? This cannot be undone.')) return;
await chrome.storage.local.set({ contexts: [] });
loadContexts();
}
function switchTab(tabName) {
// Update tab buttons
document.querySelectorAll('.tab-button').forEach(btn => {
btn.classList.remove('active');
});
document.querySelector(`[data-tab="${tabName}"]`).classList.add('active');
// Update tab content
document.querySelectorAll('.tab-content').forEach(content => {
content.classList.remove('active');
});
document.getElementById(`${tabName}Tab`).classList.add('active');
}
async function processFile(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = function(e) {
const content = e.target.result;
resolve({
title: file.name,
content: content
});
};
reader.onerror = function() {
reject(new Error('Failed to read file'));
};
if (file.type === 'text/plain') {
reader.readAsText(file);
} else {
// For other file types, we'll need to extract text
// This is a simplified version - in production, you'd want proper PDF/DOC parsing
reader.readAsText(file);
}
});
}
// Multi-device functions
async function enableRemoteAccess() {
try {
remoteStatus.textContent = 'Starting server...';
remoteStatus.className = 'status-message';
// Generate a unique session ID
const sessionId = Math.random().toString(36).substring(2, 15);
const port = 8765;
const accessURL = `http://localhost:${port}?session=${sessionId}`;
// Start WebSocket server (we'll implement this)
chrome.runtime.sendMessage({
action: 'startRemoteServer',
sessionId: sessionId,
port: port
}, (response) => {
if (response.success) {
remoteServerActive = true;
accessUrl.textContent = accessURL;
deviceInfo.style.display = 'block';
remoteStatus.textContent = 'Remote access enabled!';
remoteStatus.className = 'status-message success';
enableRemoteListening.textContent = '🛑 Disable Remote Access';
// Generate QR code (simplified)
generateQRCode(accessURL);
} else {
remoteStatus.textContent = 'Failed to start server: ' + response.error;
remoteStatus.className = 'status-message error';
}
});
} catch (error) {
remoteStatus.textContent = 'Error: ' + error.message;
remoteStatus.className = 'status-message error';
}
}
function disableRemoteAccess() {
chrome.runtime.sendMessage({ action: 'stopRemoteServer' }, (response) => {
remoteServerActive = false;
deviceInfo.style.display = 'none';
remoteStatus.textContent = '';
enableRemoteListening.textContent = '🌐 Enable Remote Access';
});
}
function generateQRCode(url) {
// Simple QR code placeholder - in production, use a QR code library
qrCode.innerHTML = `
<div style="border: 2px solid #333; padding: 10px; display: inline-block;">
<div style="font-size: 8px; font-family: monospace;">QR Code</div>
<div style="font-size: 6px;">Scan to access</div>
</div>
`;
}
// Make functions available globally for onclick handlers
window.editContext = function(index) {
chrome.storage.local.get('contexts', (result) => {
const contexts = result.contexts || [];
const context = contexts[index];
if (context) {
contextTitleInput.value = context.title;
contextTextInput.value = context.content;
if (contextTypeSelect) contextTypeSelect.value = context.type || 'general';
switchTab('text');
// Remove the old context when editing
deleteContext(index);
}
});
};
window.deleteContext = deleteContext;
// Event listeners
aiProviderSelect.addEventListener('change', function() {
const selectedProvider = this.value;
updateApiKeyInput(selectedProvider);
// Load saved API key for this provider
chrome.storage.sync.get('apiKeys', (result) => {
const apiKeys = result.apiKeys || {};
if (apiKeys[selectedProvider] && aiProviders[selectedProvider].requiresKey) {
apiKeyInput.value = apiKeys[selectedProvider];
updateApiKeyStatus('API Key Saved', 'success');
saveApiKeyButton.textContent = 'API Key Saved';
saveApiKeyButton.disabled = true;
} else {
apiKeyInput.value = '';
saveApiKeyButton.textContent = 'Save API Key';
saveApiKeyButton.disabled = !aiProviders[selectedProvider].requiresKey;
}
refreshModelOptions(selectedProvider, aiProviders[selectedProvider].defaultModel, apiKeys[selectedProvider]);
});
// Save provider selection
chrome.storage.sync.set({
aiProvider: selectedProvider
});
});
modelSelect.addEventListener('change', function() {
chrome.storage.sync.set({ selectedModel: this.value });
});
if (captureModeSelect) {
captureModeSelect.addEventListener('change', function() {
chrome.storage.sync.set({ captureMode: this.value });
});
}
if (autoOpenAssistantWindowToggle) {
autoOpenAssistantWindowToggle.addEventListener('change', function() {
chrome.storage.sync.set({ autoOpenAssistantWindow: this.checked });
});
}
if (extensionActiveToggle) {
extensionActiveToggle.addEventListener('change', function() {
const isActive = this.checked;
chrome.runtime.sendMessage({ action: 'setActiveState', isActive }, (response) => {
if (chrome.runtime.lastError) {
return;
}
if (response && response.success) {
extensionActiveToggle.checked = response.isActive;
}
});
});
}
if (inputDeviceSelect) {
inputDeviceSelect.addEventListener('change', function() {
const deviceId = this.value;
chrome.storage.sync.set({ inputDeviceId: deviceId });
const selectedOption = inputDeviceSelect.options[inputDeviceSelect.selectedIndex];
updateInputDeviceStatus(`Selected: ${selectedOption ? selectedOption.textContent : 'Unknown'}`, '');
if (micMonitorStream) {
startMicMonitor();
}
});
}
if (startMicMonitorBtn) {
startMicMonitorBtn.addEventListener('click', function() {
startMicMonitor();
});
updateInputDeviceStatus('Click \"Enable Mic Monitor\" to see live input level.', '');
}
if (speedModeToggle) {
speedModeToggle.addEventListener('change', function() {
chrome.storage.sync.set({ speedMode: this.checked });
});
}
apiKeyInput.addEventListener('input', function() {
if (aiProviders[aiProviderSelect.value].requiresKey) {
saveApiKeyButton.textContent = 'Save API Key';
saveApiKeyButton.disabled = false;
updateApiKeyStatus('', '');
}
});
saveApiKeyButton.addEventListener('click', function() {
const apiKey = apiKeyInput.value.trim();
const provider = aiProviderSelect.value;
if (!aiProviders[provider].requiresKey) {
return;
}
if (apiKey) {
// Save API key for the current provider
chrome.storage.sync.get('apiKeys', (result) => {
const apiKeys = result.apiKeys || {};
apiKeys[provider] = apiKey;
chrome.storage.sync.set({ apiKeys: apiKeys }, () => {
saveApiKeyButton.textContent = 'API Key Saved';
saveApiKeyButton.disabled = true;
updateApiKeyStatus('API Key Saved', 'success');
refreshModelOptions(provider, modelSelect.value, apiKey);
});
});
} else {
updateApiKeyStatus('Please enter a valid API key', 'error');
}
});
// Context management event listeners
document.querySelectorAll('.tab-button').forEach(button => {
button.addEventListener('click', function() {
const tabName = this.getAttribute('data-tab');
switchTab(tabName);
});
});
uploadContextBtn.addEventListener('click', function() {
contextFileInput.click();
});
contextFileInput.addEventListener('change', async function() {
const files = Array.from(this.files);
for (const file of files) {
try {
const result = await processFile(file);
await saveContext(result.title, result.content);
} catch (error) {
alert('Error processing file: ' + error.message);
}
}
this.value = ''; // Clear file input
});
addContextBtn.addEventListener('click', function() {
const title = contextTitleInput.value.trim();
const content = contextTextInput.value.trim();
saveContext(title, content);
});
clearAllContextBtn.addEventListener('click', clearAllContexts);
// Multi-device event listeners
enableRemoteListening.addEventListener('click', function() {
if (remoteServerActive) {
disableRemoteAccess();
} else {
enableRemoteAccess();
}
});
copyUrlBtn.addEventListener('click', function() {
navigator.clipboard.writeText(accessUrl.textContent).then(() => {
const originalText = copyUrlBtn.textContent;
copyUrlBtn.textContent = '✅ Copied!';
setTimeout(() => {
copyUrlBtn.textContent = originalText;
}, 2000);
});
});
toggleButton.addEventListener('click', function() {
isListening = !isListening;
toggleButton.textContent = isListening ? 'Stop Listening' : 'Start Listening';
if (isListening) {
if (extensionActiveToggle && !extensionActiveToggle.checked) {
isListening = false;
toggleButton.textContent = 'Start Listening';
aiResponseDiv.textContent = 'Extension is inactive. Turn it on to start listening.';
return;
}
// Send current AI configuration with start listening
const currentProvider = aiProviderSelect.value;
const currentModel = modelSelect.value;
const captureMode = captureModeSelect ? captureModeSelect.value : 'tab';
chrome.runtime.sendMessage({
action: 'startListening',
aiProvider: currentProvider,
model: currentModel,
captureMode: captureMode
});
transcriptDiv.textContent = 'Listening for questions...';
aiResponseDiv.textContent = `Using ${aiProviders[currentProvider].name} (${currentModel}). The answer will appear here.`;
chrome.storage.sync.get(['autoOpenAssistantWindow'], (result) => {
if (result.autoOpenAssistantWindow) {
chrome.runtime.sendMessage({ action: 'openAssistantWindow' });
}
});
} else {
chrome.runtime.sendMessage({action: 'stopListening'});
transcriptDiv.textContent = '';
aiResponseDiv.textContent = '';
}
});
if (showOverlayBtn) {
showOverlayBtn.addEventListener('click', function() {
chrome.tabs.query({ active: true, currentWindow: true }, (tabs) => {
if (chrome.runtime.lastError || !tabs.length) {
return;
}
chrome.tabs.sendMessage(tabs[0].id, { action: 'showOverlay' });
});
});
}
if (requestMicPermissionBtn) {
requestMicPermissionBtn.addEventListener('click', function() {
updateMicPermissionStatus('Requesting microphone permission...', '');
navigator.mediaDevices.getUserMedia({ audio: true }).then((stream) => {
stream.getTracks().forEach(track => track.stop());
updateMicPermissionStatus('Microphone permission granted.', 'success');
if (inputDeviceSelect) {
loadInputDevices(inputDeviceSelect.value);
}
}).catch((error) => {
if (error && error.name === 'NotAllowedError') {
updateMicPermissionStatus('Microphone permission denied. Please allow access for the extension.', 'error');
} else if (error && error.name === 'NotFoundError') {
updateMicPermissionStatus('No microphone found.', 'error');
} else {
updateMicPermissionStatus(error && error.message ? error.message : 'Failed to request microphone permission.', 'error');
}
});
});
}
if (grantTabAccessBtn) {
grantTabAccessBtn.addEventListener('click', function() {
updateTabAccessStatus('Requesting tab access...', '');
chrome.runtime.sendMessage({ action: 'grantTabAccess' }, (response) => {
if (chrome.runtime.lastError) {
updateTabAccessStatus('Failed to request tab access. Click the extension icon on the target tab.', 'error');
return;
}
if (response && response.success) {
updateTabAccessStatus('Tab access granted. You can start listening now.', 'success');
} else {
updateTabAccessStatus(response && response.error ? response.error : 'Click the extension icon on the target tab to grant access.', 'error');
}
});
});
}
if (navigator.mediaDevices && navigator.mediaDevices.addEventListener) {
navigator.mediaDevices.addEventListener('devicechange', () => {
if (inputDeviceSelect) {
loadInputDevices(inputDeviceSelect.value);
}
});
}
chrome.runtime.onMessage.addListener(function(request, sender, sendResponse) {
if (request.action === 'updateTranscript') {
transcriptDiv.textContent = request.transcript;
} else if (request.action === 'updateAIResponse') {
aiResponseDiv.textContent = request.response;
}
});
});