Spaces:
Running
Running
Update index.html
Browse files- index.html +156 -225
index.html
CHANGED
|
@@ -3,15 +3,16 @@
|
|
| 3 |
<head>
|
| 4 |
<meta charset="UTF-8">
|
| 5 |
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
| 6 |
-
<title>AI Assistant (
|
| 7 |
<style>
|
| 8 |
@import url('https://fonts.googleapis.com/css2?family=Roboto:wght@300;400;500;700&display=swap');
|
| 9 |
:root {
|
| 10 |
--primary-color: #007bff; --secondary-color: #6c757d; --text-color: #212529;
|
| 11 |
--bg-color: #f8f9fa; --user-msg-bg: #e7f5ff; --user-msg-text: #004085;
|
| 12 |
--bot-msg-bg: #ffffff; --bot-msg-border: #dee2e6; --system-msg-color: #6c757d;
|
| 13 |
-
--error-color: #
|
| 14 |
-
--
|
|
|
|
| 15 |
--border-color: #dee2e6; --input-bg: #ffffff; --input-border: #ced4da;
|
| 16 |
--button-bg: var(--primary-color); --button-hover-bg: #0056b3; --button-disabled-bg: #adb5bd;
|
| 17 |
--scrollbar-thumb: var(--primary-color); --scrollbar-track: #e9ecef;
|
|
@@ -21,80 +22,58 @@
|
|
| 21 |
* { box-sizing: border-box; margin: 0; padding: 0; }
|
| 22 |
html { height: 100%; }
|
| 23 |
body {
|
| 24 |
-
font-family: 'Roboto', sans-serif; display: flex; flex-direction: column; align-items: center; justify-content: flex-start;
|
| 25 |
min-height: 100vh; background-color: var(--bg-color); color: var(--text-color); padding: 10px; overscroll-behavior: none;
|
| 26 |
}
|
| 27 |
-
#control-panel {
|
| 28 |
background: var(--header-bg); padding: 15px; border-radius: 8px; margin-bottom: 10px;
|
| 29 |
box-shadow: var(--header-shadow); width: 100%; max-width: 600px; border: 1px solid var(--border-color);
|
|
|
|
| 30 |
}
|
| 31 |
-
#control-panel h2 { font-size: 1.1em; margin-bottom: 10px; color: var(--primary-color); font-weight: 500; }
|
| 32 |
-
.model-options label { margin-right: 15px; cursor: pointer; }
|
| 33 |
-
.model-options input[type="radio"] { margin-right: 5px; }
|
| 34 |
#loadModelButton {
|
| 35 |
-
|
| 36 |
-
|
| 37 |
}
|
| 38 |
-
#loadModelButton:hover:not(:disabled) { background-color:
|
| 39 |
#loadModelButton:disabled { background-color: var(--button-disabled-bg); cursor: not-allowed; }
|
| 40 |
-
#model-status {
|
| 41 |
-
#model-status.
|
| 42 |
-
#model-status.
|
| 43 |
-
#model-status.
|
|
|
|
|
|
|
| 44 |
|
| 45 |
#chat-container {
|
| 46 |
-
width: 100%; max-width: 600px; height: 75vh;
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
}
|
| 50 |
-
h1 { /* Header ์คํ์ผ ์ ์ง */
|
| 51 |
-
text-align: center; color: var(--primary-color); padding: 15px; background-color: var(--header-bg);
|
| 52 |
-
border-bottom: 1px solid var(--border-color); font-size: 1.2em; font-weight: 500; flex-shrink: 0;
|
| 53 |
-
box-shadow: var(--header-shadow); position: relative; z-index: 10;
|
| 54 |
-
}
|
| 55 |
-
#chatbox { /* ์ฑํ
์ฐฝ ์คํ์ผ ์ ์ง */
|
| 56 |
-
flex-grow: 1; overflow-y: auto; padding: 15px; display: flex; flex-direction: column; gap: 12px;
|
| 57 |
-
scrollbar-width: thin; scrollbar-color: var(--scrollbar-thumb) var(--scrollbar-track); background-color: var(--bg-color);
|
| 58 |
}
|
|
|
|
|
|
|
| 59 |
#chatbox::-webkit-scrollbar { width: 6px; } #chatbox::-webkit-scrollbar-track { background: var(--scrollbar-track); border-radius: 3px; } #chatbox::-webkit-scrollbar-thumb { background-color: var(--scrollbar-thumb); border-radius: 3px; }
|
| 60 |
-
#messages div {
|
| 61 |
-
padding: 10px 15px; border-radius: 16px; max-width: 85%; word-wrap: break-word; line-height: 1.5;
|
| 62 |
-
font-size: 1em; box-shadow: 0 1px 2px rgba(0,0,0,0.05); position: relative; animation: fadeIn 0.25s ease-out;
|
| 63 |
-
}
|
| 64 |
@keyframes fadeIn { from { opacity: 0; transform: translateY(5px); } to { opacity: 1; transform: translateY(0); } }
|
| 65 |
.user-message { background: var(--user-msg-bg); color: var(--user-msg-text); align-self: flex-end; border-bottom-right-radius: 4px; margin-left: auto; }
|
| 66 |
.bot-message { background-color: var(--bot-msg-bg); border: 1px solid var(--bot-msg-border); align-self: flex-start; border-bottom-left-radius: 4px; margin-right: auto; }
|
| 67 |
.bot-message a { color: var(--primary-color); text-decoration: none; } .bot-message a:hover { text-decoration: underline; }
|
| 68 |
.system-message { font-style: italic; color: var(--system-msg-color); text-align: center; font-size: 0.85em; background-color: transparent; box-shadow: none; align-self: center; max-width: 100%; padding: 5px 0; animation: none; }
|
| 69 |
-
|
| 70 |
-
color: var(--error-color); font-weight: 500; background-color: var(--error-bg); border: 1px solid var(--error-border);
|
| 71 |
-
padding: 10px 15px; border-radius: 8px; align-self: stretch; text-align: left;
|
| 72 |
-
}
|
| 73 |
-
.status-indicator { /* ์ฌ๊ธฐ์๋ ์ฌ์ฉ ์ํจ, #model-status ๋ก ๋์ฒด */
|
| 74 |
-
display: none;
|
| 75 |
-
}
|
| 76 |
-
#input-area { /* ์
๋ ฅ ์์ญ ์คํ์ผ ์ ์ง */
|
| 77 |
-
display: flex; padding: 10px 12px; border-top: 1px solid var(--border-color); background-color: var(--header-bg); align-items: center; gap: 8px; flex-shrink: 0;
|
| 78 |
-
}
|
| 79 |
#userInput { flex-grow: 1; padding: 10px 15px; border: 1px solid var(--input-border); border-radius: 20px; outline: none; font-size: 1em; font-family: 'Roboto', sans-serif; background-color: var(--input-bg); transition: border-color 0.2s ease; min-height: 42px; resize: none; overflow-y: auto; }
|
| 80 |
#userInput:focus { border-color: var(--primary-color); }
|
| 81 |
-
.control-button {
|
| 82 |
-
padding: 0; border: none; border-radius: 50%; cursor: pointer; background-color: var(--button-bg); color: white; width: 42px; height: 42px; font-size: 1.3em; display: flex; align-items: center; justify-content: center; flex-shrink: 0; transition: background-color 0.2s ease, transform 0.1s ease; box-shadow: 0 1px 2px rgba(0,0,0,0.08);
|
| 83 |
-
}
|
| 84 |
.control-button:hover:not(:disabled) { background-color: var(--button-hover-bg); transform: translateY(-1px); }
|
| 85 |
.control-button:active:not(:disabled) { transform: scale(0.95); }
|
| 86 |
.control-button:disabled { background-color: var(--button-disabled-bg); cursor: not-allowed; transform: none; box-shadow: none; }
|
| 87 |
#toggleSpeakerButton.muted { background-color: #aaa; }
|
| 88 |
-
@media (max-width: 600px) {
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
}
|
| 98 |
</style>
|
| 99 |
<script type="importmap">
|
| 100 |
{ "imports": { "@xenova/transformers": "https://cdn.jsdelivr.net/npm/@xenova/[email protected]" } }
|
|
@@ -102,17 +81,9 @@
|
|
| 102 |
</head>
|
| 103 |
<body>
|
| 104 |
<div id="control-panel">
|
| 105 |
-
<h2>
|
| 106 |
-
<
|
| 107 |
-
|
| 108 |
-
<input type="radio" name="modelSelection" value="Xenova/gemma-2b-it" checked> Gemma 2B (Recommended, Stable)
|
| 109 |
-
</label>
|
| 110 |
-
<label>
|
| 111 |
-
<input type="radio" name="modelSelection" value="onnx-community/gemma-3-1b-it-ONNX-GQA"> Gemma 3 1B (Experimental, Likely Fails)
|
| 112 |
-
</label>
|
| 113 |
-
</div>
|
| 114 |
-
<button id="loadModelButton">Load Selected Model</button>
|
| 115 |
-
<div id="model-status">No model loaded. Select a model and click load.</div>
|
| 116 |
</div>
|
| 117 |
|
| 118 |
<div id="chat-container">
|
|
@@ -123,7 +94,7 @@
|
|
| 123 |
</div>
|
| 124 |
</div>
|
| 125 |
<div id="input-area">
|
| 126 |
-
<textarea id="userInput" placeholder="Please load
|
| 127 |
<button id="speechButton" class="control-button" title="Speak message" disabled>๐ค</button>
|
| 128 |
<button id="toggleSpeakerButton" class="control-button" title="Toggle AI speech output" disabled>๐</button>
|
| 129 |
<button id="sendButton" class="control-button" title="Send message" disabled>โค</button>
|
|
@@ -131,221 +102,188 @@
|
|
| 131 |
</div>
|
| 132 |
|
| 133 |
<script type="module">
|
| 134 |
-
import { pipeline, env } from '@xenova/transformers';
|
| 135 |
|
| 136 |
-
|
| 137 |
const TASK = 'text-generation';
|
| 138 |
-
const
|
| 139 |
-
const EXPERIMENTAL_MODEL = 'onnx-community/gemma-3-1b-it-ONNX-GQA'; // The problematic one
|
| 140 |
|
| 141 |
-
// ONNX Runtime & WebGPU config
|
| 142 |
env.allowLocalModels = false;
|
| 143 |
env.useBrowserCache = true;
|
| 144 |
env.backends.onnx.executionProviders = ['webgpu', 'wasm'];
|
| 145 |
console.log('Using Execution Providers:', env.backends.onnx.executionProviders);
|
| 146 |
|
| 147 |
-
// DOM Elements
|
| 148 |
const chatbox = document.getElementById('messages');
|
| 149 |
const userInput = document.getElementById('userInput');
|
| 150 |
const sendButton = document.getElementById('sendButton');
|
| 151 |
const chatbotNameElement = document.getElementById('chatbot-name');
|
| 152 |
const speechButton = document.getElementById('speechButton');
|
| 153 |
const toggleSpeakerButton = document.getElementById('toggleSpeakerButton');
|
| 154 |
-
const modelStatus = document.getElementById('model-status');
|
| 155 |
-
const loadModelButton = document.getElementById('loadModelButton');
|
| 156 |
-
|
| 157 |
-
|
| 158 |
-
//
|
| 159 |
-
let
|
| 160 |
-
let
|
| 161 |
-
let isLoadingModel = false; // Flag to prevent multiple loads
|
| 162 |
let conversationHistory = [];
|
| 163 |
let botState = { botName: "AI Assistant", userName: "User", botSettings: { useSpeechOutput: true } };
|
| 164 |
-
const stateKey = '
|
| 165 |
-
const historyKey = '
|
| 166 |
|
| 167 |
-
// Web Speech API
|
| 168 |
let recognition = null;
|
| 169 |
let synthesis = window.speechSynthesis;
|
| 170 |
let targetVoice = null;
|
| 171 |
let isListening = false;
|
| 172 |
|
| 173 |
-
// --- Initialization ---
|
| 174 |
window.addEventListener('load', () => {
|
| 175 |
-
loadState();
|
| 176 |
chatbotNameElement.textContent = botState.botName;
|
| 177 |
updateSpeakerButtonUI();
|
| 178 |
initializeSpeechAPI();
|
| 179 |
setupInputAutosize();
|
| 180 |
-
updateChatUIState(); // Initial
|
|
|
|
| 181 |
setTimeout(loadVoices, 500);
|
| 182 |
-
|
| 183 |
-
// Add event listener for the load button
|
| 184 |
loadModelButton.addEventListener('click', handleLoadModelClick);
|
| 185 |
-
|
| 186 |
-
// Restore potentially saved history (display requires chatbox ready)
|
| 187 |
-
if (conversationHistory.length > 0) displayHistory();
|
| 188 |
});
|
| 189 |
|
| 190 |
-
// --- State Persistence ---
|
| 191 |
function loadState() {
|
| 192 |
-
const savedState = localStorage.getItem(stateKey);
|
| 193 |
-
|
| 194 |
-
const savedHistory = localStorage.getItem(historyKey);
|
| 195 |
-
// Load history but don't display yet, wait for chatbox element
|
| 196 |
-
if (savedHistory) { try { conversationHistory = JSON.parse(savedHistory); } catch(e) { console.error("History parse error", e); conversationHistory = []; } }
|
| 197 |
}
|
| 198 |
function saveState() {
|
| 199 |
localStorage.setItem(stateKey, JSON.stringify(botState));
|
| 200 |
localStorage.setItem(historyKey, JSON.stringify(conversationHistory));
|
| 201 |
}
|
| 202 |
function displayHistory() {
|
| 203 |
-
chatbox.innerHTML = '';
|
| 204 |
-
conversationHistory.forEach(msg => displayMessage(msg.sender, msg.text, false));
|
| 205 |
}
|
| 206 |
|
| 207 |
-
// --- UI Update Functions ---
|
| 208 |
function displayMessage(sender, text, animate = true, isError = false) {
|
| 209 |
const messageDiv = document.createElement('div');
|
| 210 |
let messageClass = sender === 'user' ? 'user-message' : sender === 'bot' ? 'bot-message' : 'system-message';
|
| 211 |
-
if (isError) messageClass = 'error-message';
|
| 212 |
-
messageDiv.classList.add(messageClass);
|
| 213 |
-
|
| 214 |
-
|
| 215 |
-
text = text.replace(/\[(.*?)\]\((.*?)\)/g, '<a href="$2" target="_blank" rel="noopener noreferrer">$1</a>');
|
| 216 |
-
text = text.replace(/\*\*(.*?)\*\*/g, '<strong>$1</strong>').replace(/\*(.*?)\*/g, '<em>$1</em>');
|
| 217 |
-
text = text.replace(/\n/g, '<br>');
|
| 218 |
-
messageDiv.innerHTML = text;
|
| 219 |
-
chatbox.appendChild(messageDiv);
|
| 220 |
-
chatbox.scrollTo({ top: chatbox.scrollHeight, behavior: animate ? 'smooth' : 'auto' });
|
| 221 |
}
|
| 222 |
|
| 223 |
-
|
| 224 |
-
|
| 225 |
-
modelStatus.textContent = message;
|
| 226 |
-
modelStatus.className = 'model-status'; // Reset classes
|
| 227 |
-
if (type) {
|
| 228 |
-
modelStatus.classList.add(type);
|
| 229 |
-
}
|
| 230 |
-
console.log(`Model Status (${type}): ${message}`); // Also log status
|
| 231 |
}
|
| 232 |
|
| 233 |
-
|
| 234 |
-
|
| 235 |
-
|
| 236 |
-
|
| 237 |
-
|
| 238 |
-
|
| 239 |
-
|
| 240 |
-
|
| 241 |
-
|
| 242 |
-
if (
|
| 243 |
-
|
| 244 |
-
|
| 245 |
-
userInput.placeholder = "Model loading...";
|
| 246 |
-
} else {
|
| 247 |
-
userInput.placeholder = "Please load a model first...";
|
| 248 |
-
}
|
| 249 |
}
|
| 250 |
|
| 251 |
-
|
| 252 |
-
|
| 253 |
-
toggleSpeakerButton.textContent = botState.botSettings.useSpeechOutput ? '๐' : '๐';
|
| 254 |
-
toggleSpeakerButton.title = botState.botSettings.useSpeechOutput ? 'Turn off AI speech' : 'Turn on AI speech';
|
| 255 |
-
toggleSpeakerButton.classList.toggle('muted', !botState.botSettings.useSpeechOutput);
|
| 256 |
-
}
|
| 257 |
-
function showSpeechStatus(message) {
|
| 258 |
-
// Simplified: log to console, or could update a small icon later
|
| 259 |
-
console.log("Speech Status:", message);
|
| 260 |
-
// Optionally update a dedicated small status area if needed
|
| 261 |
}
|
| 262 |
-
function
|
| 263 |
-
|
| 264 |
-
userInput.style.height = 'auto';
|
| 265 |
-
userInput.style.height = userInput.scrollHeight + 'px';
|
| 266 |
-
updateChatUIState(); // Update button state based on input
|
| 267 |
-
});
|
| 268 |
-
}
|
| 269 |
|
| 270 |
-
// --- Model & AI Logic ---
|
| 271 |
async function handleLoadModelClick() {
|
| 272 |
-
if (isLoadingModel || generator) return;
|
| 273 |
-
|
| 274 |
-
const selectedModel = document.querySelector('input[name="modelSelection"]:checked').value;
|
| 275 |
-
if (!selectedModel) {
|
| 276 |
-
updateModelStatus("Please select a model first.", "error");
|
| 277 |
-
return;
|
| 278 |
-
}
|
| 279 |
-
|
| 280 |
isLoadingModel = true;
|
| 281 |
-
currentModelName =
|
| 282 |
-
|
| 283 |
-
|
|
|
|
|
|
|
| 284 |
isLoadingModel = false;
|
| 285 |
-
updateChatUIState(); // Update UI based on
|
| 286 |
}
|
| 287 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 288 |
|
| 289 |
-
|
| 290 |
-
updateModelStatus(`Loading ${modelId}... This may take time.`, 'loading');
|
| 291 |
-
let pipelineOptions = {};
|
| 292 |
-
|
| 293 |
-
// *** IMPORTANT: Only add dtype for the experimental model ***
|
| 294 |
-
if (modelId === EXPERIMENTAL_MODEL) {
|
| 295 |
-
pipelineOptions.dtype = 'q4'; // Use q4 only for Gemma 3 1B as per its (potentially faulty) example
|
| 296 |
-
updateModelStatus(`Loading ${modelId} with Q4 quantization... (Experimental, might fail)`, 'loading');
|
| 297 |
-
}
|
| 298 |
-
|
| 299 |
try {
|
| 300 |
-
|
| 301 |
-
|
| 302 |
-
|
| 303 |
-
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
|
| 309 |
-
|
| 310 |
-
|
|
|
|
| 311 |
|
| 312 |
} catch (error) {
|
| 313 |
-
console.error(`
|
| 314 |
-
|
| 315 |
-
|
| 316 |
-
|
| 317 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 318 |
}
|
| 319 |
}
|
| 320 |
|
| 321 |
function buildPrompt() {
|
| 322 |
-
|
| 323 |
-
|
| 324 |
-
|
| 325 |
-
|
| 326 |
-
|
| 327 |
-
|
| 328 |
-
|
| 329 |
|
| 330 |
-
|
| 331 |
-
|
| 332 |
-
|
| 333 |
-
|
| 334 |
-
|
| 335 |
-
|
| 336 |
|
| 337 |
-
// --- Main Interaction Logic ---
|
| 338 |
async function handleUserMessage() {
|
| 339 |
const userText = userInput.value.trim();
|
| 340 |
-
// Check if
|
| 341 |
if (!userText || !generator || isLoadingModel) return;
|
| 342 |
|
| 343 |
userInput.value = ''; userInput.style.height = 'auto';
|
| 344 |
-
updateChatUIState(); // Disable send button immediately
|
|
|
|
| 345 |
displayMessage('user', userText);
|
| 346 |
conversationHistory.push({ sender: 'user', text: userText });
|
| 347 |
|
| 348 |
-
updateModelStatus("AI thinking...", "loading");
|
| 349 |
|
| 350 |
const prompt = buildPrompt();
|
| 351 |
try {
|
|
@@ -364,34 +302,27 @@
|
|
| 364 |
displayMessage('bot', errorReply);
|
| 365 |
conversationHistory.push({ sender: 'bot', text: errorReply });
|
| 366 |
} finally {
|
| 367 |
-
updateModelStatus(`${currentModelName} ready.`, "success");
|
| 368 |
-
updateChatUIState();
|
| 369 |
userInput.focus();
|
| 370 |
}
|
| 371 |
}
|
| 372 |
|
| 373 |
-
// --- Speech API Functions
|
| 374 |
function initializeSpeechAPI() {
|
| 375 |
-
|
| 376 |
-
|
| 377 |
-
|
| 378 |
-
|
| 379 |
-
}
|
| 380 |
-
function loadVoices() {
|
| 381 |
-
if (!synthesis) return; let voices = synthesis.getVoices(); if (voices.length === 0) { synthesis.onvoiceschanged = () => { voices = synthesis.getVoices(); findAndSetVoice(voices); }; } else { findAndSetVoice(voices); }
|
| 382 |
-
}
|
| 383 |
-
function findAndSetVoice(voices) {
|
| 384 |
-
targetVoice = voices.find(v => v.lang === 'en-US') || voices.find(v => v.lang.startsWith('en-'));
|
| 385 |
-
if (targetVoice) { console.log("Using English voice:", targetVoice.name, targetVoice.lang); } else { console.warn("No suitable English voice found."); }
|
| 386 |
}
|
| 387 |
-
|
| 388 |
-
|
| 389 |
-
|
| 390 |
|
| 391 |
// --- Event Listeners ---
|
| 392 |
sendButton.addEventListener('click', handleUserMessage);
|
| 393 |
userInput.addEventListener('keypress', (e) => { if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); handleUserMessage(); } });
|
| 394 |
-
speechButton.addEventListener('click', () => { if (recognition && !isListening && generator && !isLoadingModel) { try { recognition.start(); } catch (error) { console.error("Rec start fail:", error); updateModelStatus(`Failed to start recognition`, 'error'); setTimeout(() => updateModelStatus(generator ? `${currentModelName} ready.` : 'No model loaded.', generator ? 'success' : 'info'), 2000); isListening = false; updateChatUIState(); } } });
|
| 395 |
|
| 396 |
</script>
|
| 397 |
</body>
|
|
|
|
| 3 |
<head>
|
| 4 |
<meta charset="UTF-8">
|
| 5 |
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
| 6 |
+
<title>AI Assistant (Gemma 3 1B Attempt)</title>
|
| 7 |
<style>
|
| 8 |
@import url('https://fonts.googleapis.com/css2?family=Roboto:wght@300;400;500;700&display=swap');
|
| 9 |
:root {
|
| 10 |
--primary-color: #007bff; --secondary-color: #6c757d; --text-color: #212529;
|
| 11 |
--bg-color: #f8f9fa; --user-msg-bg: #e7f5ff; --user-msg-text: #004085;
|
| 12 |
--bot-msg-bg: #ffffff; --bot-msg-border: #dee2e6; --system-msg-color: #6c757d;
|
| 13 |
+
--error-color: #721c24; --error-bg: #f8d7da; --error-border: #f5c6cb;
|
| 14 |
+
--warning-color: #856404; --warning-bg: #fff3cd; --warning-border: #ffeeba;
|
| 15 |
+
--success-color: #155724; --success-bg: #d4edda; --success-border: #c3e6cb;
|
| 16 |
--border-color: #dee2e6; --input-bg: #ffffff; --input-border: #ced4da;
|
| 17 |
--button-bg: var(--primary-color); --button-hover-bg: #0056b3; --button-disabled-bg: #adb5bd;
|
| 18 |
--scrollbar-thumb: var(--primary-color); --scrollbar-track: #e9ecef;
|
|
|
|
| 22 |
* { box-sizing: border-box; margin: 0; padding: 0; }
|
| 23 |
html { height: 100%; }
|
| 24 |
body {
|
| 25 |
+
font-family: 'Roboto', sans-serif; display: flex; flex-direction: column; align-items: center; justify-content: flex-start;
|
| 26 |
min-height: 100vh; background-color: var(--bg-color); color: var(--text-color); padding: 10px; overscroll-behavior: none;
|
| 27 |
}
|
| 28 |
+
#control-panel {
|
| 29 |
background: var(--header-bg); padding: 15px; border-radius: 8px; margin-bottom: 10px;
|
| 30 |
box-shadow: var(--header-shadow); width: 100%; max-width: 600px; border: 1px solid var(--border-color);
|
| 31 |
+
text-align: center; /* Center button and status */
|
| 32 |
}
|
|
|
|
|
|
|
|
|
|
| 33 |
#loadModelButton {
|
| 34 |
+
padding: 10px 20px; font-size: 1em; background-color: var(--primary-color); /* Use primary color */
|
| 35 |
+
color: white; border: none; border-radius: 5px; cursor: pointer; transition: background-color 0.2s; margin-bottom: 10px;
|
| 36 |
}
|
| 37 |
+
#loadModelButton:hover:not(:disabled) { background-color: var(--button-hover-bg); }
|
| 38 |
#loadModelButton:disabled { background-color: var(--button-disabled-bg); cursor: not-allowed; }
|
| 39 |
+
#model-status { font-size: 0.9em; padding: 10px; border-radius: 4px; text-align: center; min-height: 40px; line-height: 1.4; }
|
| 40 |
+
#model-status.info { background-color: #e2e3e5; border: 1px solid #d6d8db; color: #383d41; }
|
| 41 |
+
#model-status.loading { background-color: var(--warning-bg); border: 1px solid var(--warning-border); color: var(--warning-color); }
|
| 42 |
+
#model-status.success { background-color: var(--success-bg); border: 1px solid var(--success-border); color: var(--success-color); }
|
| 43 |
+
#model-status.error { background-color: var(--error-bg); border: 1px solid var(--error-border); color: var(--error-color); }
|
| 44 |
+
#model-status.tokenizer-only { background-color: var(--warning-bg); border: 1px solid var(--warning-border); color: var(--warning-color); } /* Style for tokenizer only */
|
| 45 |
|
| 46 |
#chat-container {
|
| 47 |
+
width: 100%; max-width: 600px; height: 75vh; max-height: 700px; background-color: #ffffff;
|
| 48 |
+
border-radius: 12px; box-shadow: var(--container-shadow); display: flex; flex-direction: column;
|
| 49 |
+
overflow: hidden; border: 1px solid var(--border-color);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 50 |
}
|
| 51 |
+
h1 { text-align: center; color: var(--primary-color); padding: 15px; background-color: var(--header-bg); border-bottom: 1px solid var(--border-color); font-size: 1.2em; font-weight: 500; flex-shrink: 0; box-shadow: var(--header-shadow); position: relative; z-index: 10; }
|
| 52 |
+
#chatbox { flex-grow: 1; overflow-y: auto; padding: 15px; display: flex; flex-direction: column; gap: 12px; scrollbar-width: thin; scrollbar-color: var(--scrollbar-thumb) var(--scrollbar-track); background-color: var(--bg-color); }
|
| 53 |
#chatbox::-webkit-scrollbar { width: 6px; } #chatbox::-webkit-scrollbar-track { background: var(--scrollbar-track); border-radius: 3px; } #chatbox::-webkit-scrollbar-thumb { background-color: var(--scrollbar-thumb); border-radius: 3px; }
|
| 54 |
+
#messages div { padding: 10px 15px; border-radius: 16px; max-width: 85%; word-wrap: break-word; line-height: 1.5; font-size: 1em; box-shadow: 0 1px 2px rgba(0,0,0,0.05); position: relative; animation: fadeIn 0.25s ease-out; }
|
|
|
|
|
|
|
|
|
|
| 55 |
@keyframes fadeIn { from { opacity: 0; transform: translateY(5px); } to { opacity: 1; transform: translateY(0); } }
|
| 56 |
.user-message { background: var(--user-msg-bg); color: var(--user-msg-text); align-self: flex-end; border-bottom-right-radius: 4px; margin-left: auto; }
|
| 57 |
.bot-message { background-color: var(--bot-msg-bg); border: 1px solid var(--bot-msg-border); align-self: flex-start; border-bottom-left-radius: 4px; margin-right: auto; }
|
| 58 |
.bot-message a { color: var(--primary-color); text-decoration: none; } .bot-message a:hover { text-decoration: underline; }
|
| 59 |
.system-message { font-style: italic; color: var(--system-msg-color); text-align: center; font-size: 0.85em; background-color: transparent; box-shadow: none; align-self: center; max-width: 100%; padding: 5px 0; animation: none; }
|
| 60 |
+
#input-area { display: flex; padding: 10px 12px; border-top: 1px solid var(--border-color); background-color: var(--header-bg); align-items: center; gap: 8px; flex-shrink: 0; }
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 61 |
#userInput { flex-grow: 1; padding: 10px 15px; border: 1px solid var(--input-border); border-radius: 20px; outline: none; font-size: 1em; font-family: 'Roboto', sans-serif; background-color: var(--input-bg); transition: border-color 0.2s ease; min-height: 42px; resize: none; overflow-y: auto; }
|
| 62 |
#userInput:focus { border-color: var(--primary-color); }
|
| 63 |
+
.control-button { padding: 0; border: none; border-radius: 50%; cursor: pointer; background-color: var(--button-bg); color: white; width: 42px; height: 42px; font-size: 1.3em; display: flex; align-items: center; justify-content: center; flex-shrink: 0; transition: background-color 0.2s ease, transform 0.1s ease; box-shadow: 0 1px 2px rgba(0,0,0,0.08); }
|
|
|
|
|
|
|
| 64 |
.control-button:hover:not(:disabled) { background-color: var(--button-hover-bg); transform: translateY(-1px); }
|
| 65 |
.control-button:active:not(:disabled) { transform: scale(0.95); }
|
| 66 |
.control-button:disabled { background-color: var(--button-disabled-bg); cursor: not-allowed; transform: none; box-shadow: none; }
|
| 67 |
#toggleSpeakerButton.muted { background-color: #aaa; }
|
| 68 |
+
@media (max-width: 600px) {
|
| 69 |
+
body { padding: 5px; justify-content: flex-start; }
|
| 70 |
+
#control-panel { margin-bottom: 5px; padding: 12px; }
|
| 71 |
+
#chat-container { width: 100%; height: auto; flex-grow: 1; border-radius: 12px; max-height: none; margin-bottom: 5px; }
|
| 72 |
+
h1 { font-size: 1.1em; padding: 12px; } #chatbox { padding: 12px 8px; gap: 10px; }
|
| 73 |
+
#messages div { max-width: 90%; font-size: 0.95em; padding: 9px 14px;}
|
| 74 |
+
#input-area { padding: 8px; gap: 5px; } #userInput { padding: 9px 14px; min-height: 40px; }
|
| 75 |
+
.control-button { width: 40px; height: 40px; font-size: 1.2em; }
|
| 76 |
+
}
|
|
|
|
| 77 |
</style>
|
| 78 |
<script type="importmap">
|
| 79 |
{ "imports": { "@xenova/transformers": "https://cdn.jsdelivr.net/npm/@xenova/[email protected]" } }
|
|
|
|
| 81 |
</head>
|
| 82 |
<body>
|
| 83 |
<div id="control-panel">
|
| 84 |
+
<h2>Model Loader</h2>
|
| 85 |
+
<button id="loadModelButton">Load Gemma 3 1B Model</button>
|
| 86 |
+
<div id="model-status" class="info">Click the button to load the Gemma 3 1B model. Warning: Loading may fail due to incompatibility.</div>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 87 |
</div>
|
| 88 |
|
| 89 |
<div id="chat-container">
|
|
|
|
| 94 |
</div>
|
| 95 |
</div>
|
| 96 |
<div id="input-area">
|
| 97 |
+
<textarea id="userInput" placeholder="Please load the model first..." rows="1" disabled></textarea>
|
| 98 |
<button id="speechButton" class="control-button" title="Speak message" disabled>๐ค</button>
|
| 99 |
<button id="toggleSpeakerButton" class="control-button" title="Toggle AI speech output" disabled>๐</button>
|
| 100 |
<button id="sendButton" class="control-button" title="Send message" disabled>โค</button>
|
|
|
|
| 102 |
</div>
|
| 103 |
|
| 104 |
<script type="module">
|
| 105 |
+
import { pipeline, AutoTokenizer, env } from '@xenova/transformers'; // Added AutoTokenizer
|
| 106 |
|
| 107 |
+
const MODEL_NAME = 'onnx-community/gemma-3-1b-it-ONNX-GQA'; // Target model
|
| 108 |
const TASK = 'text-generation';
|
| 109 |
+
const QUANTIZATION = 'q4'; // Attempting Q4 as per model card
|
|
|
|
| 110 |
|
|
|
|
| 111 |
env.allowLocalModels = false;
|
| 112 |
env.useBrowserCache = true;
|
| 113 |
env.backends.onnx.executionProviders = ['webgpu', 'wasm'];
|
| 114 |
console.log('Using Execution Providers:', env.backends.onnx.executionProviders);
|
| 115 |
|
|
|
|
| 116 |
const chatbox = document.getElementById('messages');
|
| 117 |
const userInput = document.getElementById('userInput');
|
| 118 |
const sendButton = document.getElementById('sendButton');
|
| 119 |
const chatbotNameElement = document.getElementById('chatbot-name');
|
| 120 |
const speechButton = document.getElementById('speechButton');
|
| 121 |
const toggleSpeakerButton = document.getElementById('toggleSpeakerButton');
|
| 122 |
+
const modelStatus = document.getElementById('model-status');
|
| 123 |
+
const loadModelButton = document.getElementById('loadModelButton');
|
| 124 |
+
|
| 125 |
+
let generator = null; // Full pipeline
|
| 126 |
+
let tokenizer = null; // Separate tokenizer instance
|
| 127 |
+
let currentModelName = null;
|
| 128 |
+
let isLoadingModel = false;
|
|
|
|
| 129 |
let conversationHistory = [];
|
| 130 |
let botState = { botName: "AI Assistant", userName: "User", botSettings: { useSpeechOutput: true } };
|
| 131 |
+
const stateKey = 'gemma3_1b_only_state_v1';
|
| 132 |
+
const historyKey = 'gemma3_1b_only_history_v1';
|
| 133 |
|
|
|
|
| 134 |
let recognition = null;
|
| 135 |
let synthesis = window.speechSynthesis;
|
| 136 |
let targetVoice = null;
|
| 137 |
let isListening = false;
|
| 138 |
|
|
|
|
| 139 |
window.addEventListener('load', () => {
|
| 140 |
+
loadState();
|
| 141 |
chatbotNameElement.textContent = botState.botName;
|
| 142 |
updateSpeakerButtonUI();
|
| 143 |
initializeSpeechAPI();
|
| 144 |
setupInputAutosize();
|
| 145 |
+
updateChatUIState(false); // Initial state: model not loaded
|
| 146 |
+
if (conversationHistory.length > 0) displayHistory();
|
| 147 |
setTimeout(loadVoices, 500);
|
|
|
|
|
|
|
| 148 |
loadModelButton.addEventListener('click', handleLoadModelClick);
|
|
|
|
|
|
|
|
|
|
| 149 |
});
|
| 150 |
|
|
|
|
| 151 |
function loadState() {
|
| 152 |
+
const savedState = localStorage.getItem(stateKey); if (savedState) { try { const loaded = JSON.parse(savedState); botState = { ...botState, ...loaded, botSettings: { ...botState.botSettings, ...(loaded.botSettings || {}) } }; } catch(e) {} }
|
| 153 |
+
const savedHistory = localStorage.getItem(historyKey); if (savedHistory) { try { conversationHistory = JSON.parse(savedHistory); } catch(e) { conversationHistory = []; } }
|
|
|
|
|
|
|
|
|
|
| 154 |
}
|
| 155 |
function saveState() {
|
| 156 |
localStorage.setItem(stateKey, JSON.stringify(botState));
|
| 157 |
localStorage.setItem(historyKey, JSON.stringify(conversationHistory));
|
| 158 |
}
|
| 159 |
function displayHistory() {
|
| 160 |
+
chatbox.innerHTML = ''; conversationHistory.forEach(msg => displayMessage(msg.sender, msg.text, false));
|
|
|
|
| 161 |
}
|
| 162 |
|
|
|
|
| 163 |
function displayMessage(sender, text, animate = true, isError = false) {
|
| 164 |
const messageDiv = document.createElement('div');
|
| 165 |
let messageClass = sender === 'user' ? 'user-message' : sender === 'bot' ? 'bot-message' : 'system-message';
|
| 166 |
+
if (isError) messageClass = 'error-message';
|
| 167 |
+
messageDiv.classList.add(messageClass); if (!animate) messageDiv.style.animation = 'none';
|
| 168 |
+
text = text.replace(/</g, "<").replace(/>/g, ">"); text = text.replace(/\[(.*?)\]\((.*?)\)/g, '<a href="$2" target="_blank" rel="noopener noreferrer">$1</a>'); text = text.replace(/\*\*(.*?)\*\*/g, '<strong>$1</strong>').replace(/\*(.*?)\*/g, '<em>$1</em>'); text = text.replace(/\n/g, '<br>');
|
| 169 |
+
messageDiv.innerHTML = text; chatbox.appendChild(messageDiv); chatbox.scrollTo({ top: chatbox.scrollHeight, behavior: animate ? 'smooth' : 'auto' });
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 170 |
}
|
| 171 |
|
| 172 |
+
function updateModelStatus(message, type = 'info') {
|
| 173 |
+
modelStatus.textContent = message; modelStatus.className = 'model-status ' + type; console.log(`Model Status (${type}): ${message}`);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 174 |
}
|
| 175 |
|
| 176 |
+
function updateChatUIState(isModelLoadedSuccessfully, isTokenizerLoaded = false) {
|
| 177 |
+
const isReadyForChat = isModelLoadedSuccessfully; // Only enable full chat if pipeline loaded
|
| 178 |
+
userInput.disabled = !isReadyForChat || isLoadingModel;
|
| 179 |
+
sendButton.disabled = !isReadyForChat || isLoadingModel || userInput.value.trim() === '';
|
| 180 |
+
speechButton.disabled = !isReadyForChat || isLoadingModel || isListening || !recognition;
|
| 181 |
+
toggleSpeakerButton.disabled = !isReadyForChat || isLoadingModel || !synthesis;
|
| 182 |
+
loadModelButton.disabled = isLoadingModel || isModelLoadedSuccessfully; // Disable load button if loading or successful
|
| 183 |
+
|
| 184 |
+
if (isReadyForChat) { userInput.placeholder = "How can I help you today?"; }
|
| 185 |
+
else if (isLoadingModel) { userInput.placeholder = "Model loading..."; }
|
| 186 |
+
else if (isTokenizerLoaded) { userInput.placeholder = "Tokenizer loaded, but chat unavailable."; } // Tokenizer only state
|
| 187 |
+
else { userInput.placeholder = "Please load the model first..."; }
|
|
|
|
|
|
|
|
|
|
|
|
|
| 188 |
}
|
| 189 |
|
| 190 |
+
function updateSpeakerButtonUI() { /* No change */
|
| 191 |
+
toggleSpeakerButton.textContent = botState.botSettings.useSpeechOutput ? '๐' : '๐'; toggleSpeakerButton.title = botState.botSettings.useSpeechOutput ? 'Turn off AI speech' : 'Turn on AI speech'; toggleSpeakerButton.classList.toggle('muted', !botState.botSettings.useSpeechOutput);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 192 |
}
|
| 193 |
+
function showSpeechStatus(message) { console.log("Speech Status:", message); }
|
| 194 |
+
function setupInputAutosize() { userInput.addEventListener('input', () => { userInput.style.height = 'auto'; userInput.style.height = userInput.scrollHeight + 'px'; updateChatUIState(generator !== null, tokenizer !== null); }); }
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 195 |
|
|
|
|
| 196 |
async function handleLoadModelClick() {
|
| 197 |
+
if (isLoadingModel || generator) return;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 198 |
isLoadingModel = true;
|
| 199 |
+
currentModelName = MODEL_NAME; // Only one model now
|
| 200 |
+
generator = null; // Reset generator
|
| 201 |
+
tokenizer = null; // Reset tokenizer
|
| 202 |
+
updateChatUIState(false); // Disable UI while loading
|
| 203 |
+
await initializeModelAndTokenizer(currentModelName);
|
| 204 |
isLoadingModel = false;
|
| 205 |
+
updateChatUIState(generator !== null, tokenizer !== null); // Update UI based on final state
|
| 206 |
}
|
| 207 |
|
| 208 |
+
async function initializeModelAndTokenizer(modelId) {
|
| 209 |
+
updateModelStatus(`Loading ${modelId} (Q4)...`, 'loading');
|
| 210 |
+
let pipelineLoaded = false;
|
| 211 |
+
let tokenizerLoaded = false;
|
| 212 |
+
let loadError = null;
|
| 213 |
|
| 214 |
+
// --- Try loading the full pipeline first ---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 215 |
try {
|
| 216 |
+
generator = await pipeline(TASK, modelId, {
|
| 217 |
+
dtype: QUANTIZATION, // Using Q4 as per model card
|
| 218 |
+
progress_callback: (progress) => {
|
| 219 |
+
const msg = `[Loading Pipeline: ${progress.status}] ${progress.file ? progress.file.split('/').pop() : ''} (${Math.round(progress.progress || 0)}%)`;
|
| 220 |
+
updateModelStatus(msg, 'loading');
|
| 221 |
+
}
|
| 222 |
+
});
|
| 223 |
+
pipelineLoaded = true;
|
| 224 |
+
tokenizer = generator.tokenizer; // Get tokenizer from successful pipeline
|
| 225 |
+
tokenizerLoaded = true;
|
| 226 |
+
updateModelStatus(`${modelId} pipeline loaded successfully!`, 'success');
|
| 227 |
+
displayMessage('system', `[SUCCESS] ${modelId} is ready for chat.`, false);
|
| 228 |
|
| 229 |
} catch (error) {
|
| 230 |
+
console.error(`Pipeline loading failed for ${modelId}:`, error);
|
| 231 |
+
loadError = error; // Store error to check later
|
| 232 |
+
generator = null; // Ensure generator is null
|
| 233 |
+
updateModelStatus(`Pipeline loading failed for ${modelId}. Attempting tokenizer only...`, 'warning');
|
| 234 |
+
|
| 235 |
+
// --- If pipeline failed, try loading only the tokenizer ---
|
| 236 |
+
try {
|
| 237 |
+
updateModelStatus(`Loading Tokenizer for ${modelId}...`, 'loading');
|
| 238 |
+
tokenizer = await AutoTokenizer.from_pretrained(modelId, {
|
| 239 |
+
progress_callback: (progress) => {
|
| 240 |
+
const msg = `[Loading Tokenizer: ${progress.status}] ${progress.file ? progress.file.split('/').pop() : ''} (${Math.round(progress.progress || 0)}%)`;
|
| 241 |
+
updateModelStatus(msg, 'loading');
|
| 242 |
+
}
|
| 243 |
+
});
|
| 244 |
+
tokenizerLoaded = true;
|
| 245 |
+
// Report specific failure for pipeline but success for tokenizer
|
| 246 |
+
updateModelStatus(`Model Pipeline failed: ${loadError.message}. Tokenizer loaded successfully. Chat is disabled.`, 'tokenizer-only');
|
| 247 |
+
displayMessage('system', `[WARNING] ${modelId} Model Pipeline failed to load. Tokenizer is available, but chat generation will not work.`, true, true);
|
| 248 |
+
|
| 249 |
+
} catch (tokenizerError) {
|
| 250 |
+
console.error(`Tokenizer loading also failed for ${modelId}:`, tokenizerError);
|
| 251 |
+
tokenizer = null; // Ensure tokenizer is null
|
| 252 |
+
// Report double failure
|
| 253 |
+
updateModelStatus(`FATAL: Failed to load both Pipeline and Tokenizer for ${modelId}. Pipeline Error: ${loadError.message}. Tokenizer Error: ${tokenizerError.message}`, 'error');
|
| 254 |
+
displayMessage('system', `[FATAL ERROR] Could not load essential components for ${modelId}. Please check browser console and model compatibility.`, true, true);
|
| 255 |
+
}
|
| 256 |
}
|
| 257 |
}
|
| 258 |
|
| 259 |
function buildPrompt() {
|
| 260 |
+
const historyLimit = 5; // Shorter context for 1B
|
| 261 |
+
const recentHistory = conversationHistory.slice(-historyLimit);
|
| 262 |
+
let prompt = "<start_of_turn>system\nYou are 'AI Assistant', a helpful AI assistant. Answer the user's questions clearly and concisely in English.\n<end_of_turn>\n";
|
| 263 |
+
recentHistory.forEach(msg => { const role = msg.sender === 'user' ? 'user' : 'model'; prompt += `<start_of_turn>${role}\n${msg.text}\n<end_of_turn>\n`; });
|
| 264 |
+
prompt += "<start_of_turn>model\n";
|
| 265 |
+
console.log("Generated Prompt:", prompt); return prompt;
|
| 266 |
+
}
|
| 267 |
|
| 268 |
+
function cleanupResponse(responseText, prompt) {
|
| 269 |
+
let cleaned = responseText; if (cleaned.startsWith(prompt)) { cleaned = cleaned.substring(prompt.length); } else { cleaned = cleaned.replace(/^model\n?/, '').trim(); }
|
| 270 |
+
cleaned = cleaned.replace(/<end_of_turn>/g, '').trim(); cleaned = cleaned.replace(/<start_of_turn>/g, '').trim(); cleaned = cleaned.replace(/^['"]/, '').replace(/['"]$/, '');
|
| 271 |
+
if (!cleaned || cleaned.length < 2) { console.warn("Generated reply empty/short:", cleaned); const fallbacks = [ "Sorry, I didn't quite understand.", "Could you please rephrase that?", "I'm not sure how to respond." ]; return fallbacks[Math.floor(Math.random() * fallbacks.length)]; }
|
| 272 |
+
return cleaned;
|
| 273 |
+
}
|
| 274 |
|
|
|
|
| 275 |
async function handleUserMessage() {
|
| 276 |
const userText = userInput.value.trim();
|
| 277 |
+
// Check if GENERATOR (full pipeline) is loaded
|
| 278 |
if (!userText || !generator || isLoadingModel) return;
|
| 279 |
|
| 280 |
userInput.value = ''; userInput.style.height = 'auto';
|
| 281 |
+
updateChatUIState(true); // Disable send button immediately
|
| 282 |
+
|
| 283 |
displayMessage('user', userText);
|
| 284 |
conversationHistory.push({ sender: 'user', text: userText });
|
| 285 |
|
| 286 |
+
updateModelStatus("AI thinking...", "loading");
|
| 287 |
|
| 288 |
const prompt = buildPrompt();
|
| 289 |
try {
|
|
|
|
| 302 |
displayMessage('bot', errorReply);
|
| 303 |
conversationHistory.push({ sender: 'bot', text: errorReply });
|
| 304 |
} finally {
|
| 305 |
+
updateModelStatus(`${currentModelName} ready.`, "success");
|
| 306 |
+
updateChatUIState(true);
|
| 307 |
userInput.focus();
|
| 308 |
}
|
| 309 |
}
|
| 310 |
|
| 311 |
+
// --- Speech API Functions ---
|
| 312 |
function initializeSpeechAPI() {
|
| 313 |
+
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
|
| 314 |
+
if (SpeechRecognition) { recognition = new SpeechRecognition(); recognition.lang = 'en-US'; recognition.continuous = false; recognition.interimResults = false; recognition.onstart = () => { isListening = true; updateChatUIState(generator !== null, tokenizer !== null); console.log('Listening...'); }; recognition.onresult = (event) => { userInput.value = event.results[0][0].transcript; userInput.dispatchEvent(new Event('input')); handleUserMessage(); }; recognition.onerror = (event) => { console.error("Speech error:", event.error); updateModelStatus(`Speech recognition error (${event.error})`, 'error'); setTimeout(() => updateModelStatus(generator ? `${currentModelName} ready.` : (tokenizer ? 'Tokenizer loaded.' : 'No model loaded.'), generator ? 'success' : (tokenizer ? 'tokenizer-only' : 'info')), 3000); }; recognition.onend = () => { isListening = false; updateChatUIState(generator !== null, tokenizer !== null); console.log('Stopped listening.'); }; } else { console.warn("Speech Recognition not supported."); }
|
| 315 |
+
if (!synthesis) { console.warn("Speech Synthesis not supported."); } else { toggleSpeakerButton.addEventListener('click', () => { botState.botSettings.useSpeechOutput = !botState.botSettings.useSpeechOutput; updateSpeakerButtonUI(); saveState(); if (!botState.botSettings.useSpeechOutput) synthesis.cancel(); }); }
|
| 316 |
+
updateChatUIState(false); // Initial state
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 317 |
}
|
| 318 |
+
function loadVoices() { if (!synthesis) return; let voices = synthesis.getVoices(); if (voices.length === 0) { synthesis.onvoiceschanged = () => { voices = synthesis.getVoices(); findAndSetVoice(voices); }; } else { findAndSetVoice(voices); } }
|
| 319 |
+
function findAndSetVoice(voices) { targetVoice = voices.find(v => v.lang === 'en-US') || voices.find(v => v.lang.startsWith('en-')); if (targetVoice) { console.log("Using English voice:", targetVoice.name, targetVoice.lang); } else { console.warn("No suitable English voice found."); } }
|
| 320 |
+
function speakText(text) { if (!synthesis || !botState.botSettings.useSpeechOutput || !targetVoice) return; synthesis.cancel(); const utterance = new SpeechSynthesisUtterance(text); utterance.voice = targetVoice; utterance.lang = targetVoice.lang; utterance.rate = 1.0; utterance.pitch = 1.0; synthesis.speak(utterance); }
|
| 321 |
|
| 322 |
// --- Event Listeners ---
|
| 323 |
sendButton.addEventListener('click', handleUserMessage);
|
| 324 |
userInput.addEventListener('keypress', (e) => { if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); handleUserMessage(); } });
|
| 325 |
+
speechButton.addEventListener('click', () => { if (recognition && !isListening && generator && !isLoadingModel) { try { recognition.start(); } catch (error) { console.error("Rec start fail:", error); updateModelStatus(`Failed to start recognition`, 'error'); setTimeout(() => updateModelStatus(generator ? `${currentModelName} ready.` : (tokenizer ? 'Tokenizer loaded.' : 'No model loaded.'), generator ? 'success' : (tokenizer ? 'tokenizer-only' : 'info')), 2000); isListening = false; updateChatUIState(generator !== null, tokenizer !== null); } } });
|
| 326 |
|
| 327 |
</script>
|
| 328 |
</body>
|