Initial setup
This commit is contained in:
335
remote-access.html
Normal file
335
remote-access.html
Normal file
@@ -0,0 +1,335 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>AI Interview Assistant - Remote Access</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
||||
margin: 0;
|
||||
padding: 20px;
|
||||
min-height: 100vh;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.container {
|
||||
background: white;
|
||||
border-radius: 15px;
|
||||
box-shadow: 0 20px 40px rgba(0,0,0,0.1);
|
||||
padding: 30px;
|
||||
max-width: 500px;
|
||||
width: 100%;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.logo {
|
||||
font-size: 48px;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
h1 {
|
||||
color: #2c3e50;
|
||||
margin-bottom: 10px;
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
.subtitle {
|
||||
color: #666;
|
||||
margin-bottom: 30px;
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
.status {
|
||||
padding: 15px;
|
||||
border-radius: 8px;
|
||||
margin-bottom: 20px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.status.connected {
|
||||
background: #d5f4e6;
|
||||
color: #27ae60;
|
||||
}
|
||||
|
||||
.status.disconnected {
|
||||
background: #fdf2f2;
|
||||
color: #e74c3c;
|
||||
}
|
||||
|
||||
.status.connecting {
|
||||
background: #e8f4fd;
|
||||
color: #3498db;
|
||||
}
|
||||
|
||||
.controls {
|
||||
display: flex;
|
||||
gap: 15px;
|
||||
justify-content: center;
|
||||
margin-bottom: 30px;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
button {
|
||||
padding: 12px 24px;
|
||||
border: none;
|
||||
border-radius: 8px;
|
||||
cursor: pointer;
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
transition: all 0.3s ease;
|
||||
}
|
||||
|
||||
.primary-btn {
|
||||
background: #3498db;
|
||||
color: white;
|
||||
}
|
||||
|
||||
.primary-btn:hover {
|
||||
background: #2980b9;
|
||||
transform: translateY(-2px);
|
||||
}
|
||||
|
||||
.danger-btn {
|
||||
background: #e74c3c;
|
||||
color: white;
|
||||
}
|
||||
|
||||
.danger-btn:hover {
|
||||
background: #c0392b;
|
||||
}
|
||||
|
||||
.transcript-section, .response-section {
|
||||
margin-bottom: 20px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.section-title {
|
||||
font-weight: 600;
|
||||
color: #2c3e50;
|
||||
margin-bottom: 10px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.content-box {
|
||||
background: #f8fafc;
|
||||
border: 1px solid #e0e6ed;
|
||||
border-radius: 8px;
|
||||
padding: 15px;
|
||||
min-height: 60px;
|
||||
max-height: 150px;
|
||||
overflow-y: auto;
|
||||
font-size: 14px;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.response-box {
|
||||
background: #e8f6fd;
|
||||
}
|
||||
|
||||
.device-info {
|
||||
background: #f0f4f8;
|
||||
padding: 15px;
|
||||
border-radius: 8px;
|
||||
margin-top: 20px;
|
||||
font-size: 14px;
|
||||
color: #666;
|
||||
}
|
||||
|
||||
@media (max-width: 600px) {
|
||||
.container {
|
||||
padding: 20px;
|
||||
margin: 10px;
|
||||
}
|
||||
|
||||
.controls {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
button {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="logo">🤖</div>
|
||||
<h1>AI Interview Assistant</h1>
|
||||
<div class="subtitle">Remote Access Portal</div>
|
||||
|
||||
<div id="status" class="status disconnected">
|
||||
🔴 Disconnected from main device
|
||||
</div>
|
||||
|
||||
<div class="controls">
|
||||
<button id="connectBtn" class="primary-btn">🔗 Connect</button>
|
||||
<button id="listenBtn" class="primary-btn" disabled>🎤 Start Listening</button>
|
||||
<button id="stopBtn" class="danger-btn" disabled>⏹️ Stop</button>
|
||||
</div>
|
||||
|
||||
<div class="transcript-section">
|
||||
<div class="section-title">
|
||||
<span>🎯</span>
|
||||
<span>Live Transcript</span>
|
||||
</div>
|
||||
<div id="transcript" class="content-box">
|
||||
Waiting for audio input...
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="response-section">
|
||||
<div class="section-title">
|
||||
<span>🧠</span>
|
||||
<span>AI Response</span>
|
||||
</div>
|
||||
<div id="aiResponse" class="content-box response-box">
|
||||
AI responses will appear here...
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="device-info">
|
||||
<strong>📱 How to use:</strong><br>
|
||||
1. Make sure the main Chrome extension is running<br>
|
||||
2. Click "Connect" to establish connection<br>
|
||||
3. Start listening to capture audio from this device<br>
|
||||
4. Questions will be sent to the main device for AI processing
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const statusEl = document.getElementById('status');
|
||||
const connectBtn = document.getElementById('connectBtn');
|
||||
const listenBtn = document.getElementById('listenBtn');
|
||||
const stopBtn = document.getElementById('stopBtn');
|
||||
const transcriptEl = document.getElementById('transcript');
|
||||
const aiResponseEl = document.getElementById('aiResponse');
|
||||
|
||||
let isConnected = false;
|
||||
let isListening = false;
|
||||
let recognition = null;
|
||||
let websocket = null;
|
||||
|
||||
// Get session ID from URL
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const sessionId = urlParams.get('session');
|
||||
|
||||
if (!sessionId) {
|
||||
statusEl.textContent = '❌ Invalid session. Please use the link from the main extension.';
|
||||
statusEl.className = 'status disconnected';
|
||||
}
|
||||
|
||||
connectBtn.addEventListener('click', connect);
|
||||
listenBtn.addEventListener('click', toggleListening);
|
||||
stopBtn.addEventListener('click', stopListening);
|
||||
|
||||
function connect() {
|
||||
statusEl.textContent = '🔄 Connecting...';
|
||||
statusEl.className = 'status connecting';
|
||||
|
||||
// In a real implementation, this would connect to the WebSocket server
|
||||
// For demo purposes, we'll simulate the connection
|
||||
setTimeout(() => {
|
||||
isConnected = true;
|
||||
statusEl.textContent = '🟢 Connected to main device';
|
||||
statusEl.className = 'status connected';
|
||||
connectBtn.textContent = '✅ Connected';
|
||||
connectBtn.disabled = true;
|
||||
listenBtn.disabled = false;
|
||||
}, 1500);
|
||||
}
|
||||
|
||||
function toggleListening() {
|
||||
if (!isListening) {
|
||||
startListening();
|
||||
} else {
|
||||
stopListening();
|
||||
}
|
||||
}
|
||||
|
||||
function startListening() {
|
||||
if (!('webkitSpeechRecognition' in window)) {
|
||||
alert('Speech recognition not supported in this browser');
|
||||
return;
|
||||
}
|
||||
|
||||
recognition = new webkitSpeechRecognition();
|
||||
recognition.continuous = true;
|
||||
recognition.interimResults = true;
|
||||
|
||||
recognition.onstart = function() {
|
||||
isListening = true;
|
||||
listenBtn.textContent = '🔴 Listening...';
|
||||
listenBtn.classList.remove('primary-btn');
|
||||
listenBtn.classList.add('danger-btn');
|
||||
stopBtn.disabled = false;
|
||||
transcriptEl.textContent = 'Listening for questions...';
|
||||
};
|
||||
|
||||
recognition.onresult = function(event) {
|
||||
let finalTranscript = '';
|
||||
for (let i = event.resultIndex; i < event.results.length; ++i) {
|
||||
if (event.results[i].isFinal) {
|
||||
finalTranscript += event.results[i][0].transcript;
|
||||
}
|
||||
}
|
||||
|
||||
if (finalTranscript.trim() !== '') {
|
||||
transcriptEl.textContent = finalTranscript;
|
||||
|
||||
// Check if it's a question and send to main device
|
||||
if (isQuestion(finalTranscript)) {
|
||||
sendQuestionToMainDevice(finalTranscript);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
recognition.onerror = function(event) {
|
||||
console.error('Speech recognition error:', event.error);
|
||||
aiResponseEl.textContent = `Speech recognition error: ${event.error}`;
|
||||
};
|
||||
|
||||
recognition.start();
|
||||
}
|
||||
|
||||
function stopListening() {
|
||||
if (recognition) {
|
||||
recognition.stop();
|
||||
}
|
||||
isListening = false;
|
||||
listenBtn.textContent = '🎤 Start Listening';
|
||||
listenBtn.classList.remove('danger-btn');
|
||||
listenBtn.classList.add('primary-btn');
|
||||
stopBtn.disabled = true;
|
||||
transcriptEl.textContent = 'Stopped listening.';
|
||||
}
|
||||
|
||||
function isQuestion(text) {
|
||||
const questionWords = ['what', 'when', 'where', 'who', 'why', 'how'];
|
||||
const lowerText = text.toLowerCase();
|
||||
return questionWords.some(word => lowerText.includes(word)) || text.includes('?');
|
||||
}
|
||||
|
||||
function sendQuestionToMainDevice(question) {
|
||||
// In a real implementation, this would send the question via WebSocket
|
||||
// For demo purposes, we'll just show a processing message
|
||||
aiResponseEl.textContent = '🤔 Processing your question...';
|
||||
|
||||
// Simulate AI response after a delay
|
||||
setTimeout(() => {
|
||||
aiResponseEl.textContent = `Demo response: This is where the AI would respond to "${question.substring(0, 50)}${question.length > 50 ? '...' : ''}"`;
|
||||
}, 2000);
|
||||
}
|
||||
|
||||
// Auto-connect if session ID is present
|
||||
if (sessionId) {
|
||||
setTimeout(connect, 1000);
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
Reference in New Issue
Block a user