feat: Enhance audio capture and monitoring features

- Added "audioCapture" permission to manifest for microphone access.
- Introduced DeepSeek as a new AI provider option in the side panel.
- Implemented a capture mode selection (tab-only, mic-only, mixed) in the side panel.
- Added options to enable/disable the extension and auto-open the assistant window.
- Integrated a mic monitor feature with live input level visualization.
- Included buttons for requesting microphone permission and granting tab access.
- Updated styles for new sections and mic level visualization.
- Enhanced model fetching logic to support DeepSeek and improved error handling.
This commit is contained in:
2026-01-31 21:55:09 +01:00
parent 246506b177
commit 56d56395ee
11 changed files with 1651 additions and 276 deletions

View File

@@ -1,50 +1,6 @@
document.addEventListener('DOMContentLoaded', function() {
const toggleButton = document.getElementById('toggleListening');
const transcriptDiv = document.getElementById('transcript');
const aiResponseDiv = document.getElementById('aiResponse');
const apiKeyInput = document.getElementById('apiKeyInput');
const saveApiKeyButton = document.getElementById('saveApiKey');
let isListening = false;
// Load saved API key
chrome.storage.sync.get('openaiApiKey', (result) => {
if (result.openaiApiKey) {
apiKeyInput.value = result.openaiApiKey;
saveApiKeyButton.textContent = 'API Key Saved';
saveApiKeyButton.disabled = true;
}
});
apiKeyInput.addEventListener('input', function() {
saveApiKeyButton.textContent = 'Save API Key';
saveApiKeyButton.disabled = false;
});
saveApiKeyButton.addEventListener('click', function() {
const apiKey = apiKeyInput.value.trim();
if (apiKey) {
chrome.runtime.sendMessage({action: 'setApiKey', apiKey: apiKey});
saveApiKeyButton.textContent = 'API Key Saved';
saveApiKeyButton.disabled = true;
} else {
alert('Please enter a valid API key');
}
});
toggleButton.addEventListener('click', function() {
isListening = !isListening;
toggleButton.textContent = isListening ? 'Stop Listening' : 'Start Listening';
if (isListening) {
chrome.runtime.sendMessage({action: 'startListening'});
transcriptDiv.textContent = 'Listening for questions...';
aiResponseDiv.textContent = 'The answer will appear here.';
} else {
chrome.runtime.sendMessage({action: 'stopListening'});
transcriptDiv.textContent = '';
aiResponseDiv.textContent = '';
}
});
chrome.runtime.onMessage.addListener(function(request, sender, sendResponse) {
if (request.action === 'updateTranscript') {
@@ -53,4 +9,4 @@ document.addEventListener('DOMContentLoaded', function() {
aiResponseDiv.textContent = request.response;
}
});
});
});