MobileLLM-R1-140M / index.js
akhaliq's picture
akhaliq HF Staff
Update index.js
094efd0 verified
import { pipeline, TextStreamer } from 'https://cdn.jsdelivr.net/npm/@huggingface/[email protected]';
class StreamingChatbot {
constructor() {
this.generator = null;
this.messages = [];
this.isProcessing = false;
this.useWebGPU = false;
this.currentStreamer = null;
this.initElements();
this.initEventListeners();
this.initModel();
}
initElements() {
this.chatMessages = document.getElementById('chatMessages');
this.userInput = document.getElementById('userInput');
this.sendButton = document.getElementById('sendButton');
this.clearButton = document.getElementById('clearButton');
this.loadingIndicator = document.getElementById('loadingIndicator');
this.initLoader = document.getElementById('initLoader');
this.deviceToggle = document.getElementById('deviceToggle');
this.deviceLabel = document.getElementById('deviceLabel');
this.charCount = document.getElementById('charCount');
}
initEventListeners() {
this.sendButton.addEventListener('click', () => this.sendMessage());
this.clearButton.addEventListener('click', () => this.clearChat());
this.userInput.addEventListener('keydown', (e) => {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault();
this.sendMessage();
}
});
this.userInput.addEventListener('input', () => {
this.updateCharCount();
this.autoResize();
});
this.deviceToggle.addEventListener('change', async (e) => {
if (!this.isProcessing) {
this.useWebGPU = e.target.checked;
this.deviceLabel.textContent = this.useWebGPU ? 'GPU' : 'CPU';
await this.reinitModel();
} else {
e.target.checked = !e.target.checked;
this.showNotification('Please wait for the current process to complete');
}
});
this.checkWebGPUSupport();
}
async checkWebGPUSupport() {
try {
if (!navigator.gpu) {
this.deviceToggle.disabled = true;
this.deviceLabel.textContent = 'CPU (GPU not available)';
return;
}
const adapter = await navigator.gpu.requestAdapter();
if (!adapter) {
this.deviceToggle.disabled = true;
this.deviceLabel.textContent = 'CPU (GPU not available)';
}
} catch (error) {
console.error('WebGPU check failed:', error);
this.deviceToggle.disabled = true;
this.deviceLabel.textContent = 'CPU (GPU not available)';
}
}
updateCharCount() {
const count = this.userInput.value.length;
this.charCount.textContent = `${count} / 1000`;
if (this.charCount) {
this.charCount.style.color = count > 900 ? '#ef4444' : '#64748b';
}
}
autoResize() {
this.userInput.style.height = 'auto';
const maxHeight = 120;
const newHeight = Math.min(this.userInput.scrollHeight, maxHeight);
this.userInput.style.height = newHeight + 'px';
}
async initModel() {
try {
this.initLoader.style.display = 'flex';
const options = {
dtype: this.useWebGPU ? "q4" : "fp32",
device: this.useWebGPU ? 'webgpu' : 'wasm'
};
this.generator = await pipeline(
"text-generation",
"onnx-community/MobileLLM-R1-140M-ONNX",
options
);
this.sendButton.disabled = false;
this.showNotification('Model loaded successfully!', 'success');
} catch (error) {
console.error('Error loading model:', error);
this.showNotification('Failed to load model. Please refresh and try again.', 'error');
} finally {
this.initLoader.style.display = 'none';
}
}
async reinitModel() {
this.initLoader.style.display = 'flex';
this.sendButton.disabled = true;
await this.initModel();
}
async sendMessage() {
const message = this.userInput.value.trim();
if (!message || this.isProcessing || !this.generator) return;
this.isProcessing = true;
this.sendButton.disabled = true;
// Add user message to chat
this.addMessage(message, 'user');
this.messages.push({ role: "user", content: message });
// Clear input
this.userInput.value = '';
this.updateCharCount();
this.autoResize();
// Show loading
this.loadingIndicator.classList.remove('hidden');
try {
// Create container for streaming response
const responseContainer = this.addMessage('', 'assistant', true);
const messageContent = responseContainer.querySelector('.message-content');
// Add streaming indicator
const streamingIndicator = document.createElement('span');
streamingIndicator.className = 'streaming-indicator';
streamingIndicator.textContent = '●';
streamingIndicator.style.animation = 'pulse 1.5s infinite';
streamingIndicator.style.color = '#10b981';
streamingIndicator.style.marginLeft = '4px';
messageContent.appendChild(streamingIndicator);
// Create enhanced TextStreamer with proper callback handling
this.currentStreamer = new TextStreamer(this.generator.tokenizer, {
skip_prompt: true,
skip_special_tokens: true,
callback_function: (text) => {
// Remove streaming indicator on first text
if (streamingIndicator.parentNode) {
streamingIndicator.remove();
}
// Append new text and scroll
messageContent.textContent += text;
this.scrollToBottom();
}
});
// Use the original working format - messages array with streamer
const output = await this.generator(this.messages, {
max_new_tokens: 500,
do_sample: false,
streamer: this.currentStreamer,
});
// Clean up streaming indicator if still present
if (streamingIndicator.parentNode) {
streamingIndicator.remove();
}
// Get the generated text and add to conversation history
const generatedText = output[0].generated_text || messageContent.textContent;
if (generatedText && !messageContent.textContent) {
messageContent.textContent = generatedText;
}
// Add assistant's response to message history
const finalResponse = messageContent.textContent.trim();
if (finalResponse) {
this.messages.push({ role: "assistant", content: finalResponse });
}
} catch (error) {
console.error('Error generating response:', error);
// Remove any existing content and show error
const responseContainer = this.chatMessages.lastElementChild;
if (responseContainer && responseContainer.classList.contains('assistant')) {
responseContainer.remove();
}
this.addMessage('Sorry, I encountered an error while generating the response. Please try again.', 'error');
} finally {
this.loadingIndicator.classList.add('hidden');
this.isProcessing = false;
this.sendButton.disabled = false;
this.currentStreamer = null;
this.userInput.focus();
this.scrollToBottom();
}
}
addMessage(content, type, isStreaming = false) {
// Remove welcome message if it exists
const welcomeMessage = this.chatMessages.querySelector('.welcome-message');
if (welcomeMessage) {
welcomeMessage.remove();
}
const messageDiv = document.createElement('div');
messageDiv.className = `message ${type}`;
const avatar = document.createElement('div');
avatar.className = 'avatar';
avatar.textContent = type === 'user' ? '👤' : type === 'error' ? '⚠️' : '🤖';
const messageContent = document.createElement('div');
messageContent.className = 'message-content';
messageContent.textContent = content;
messageDiv.appendChild(avatar);
messageDiv.appendChild(messageContent);
this.chatMessages.appendChild(messageDiv);
this.scrollToBottom();
return messageDiv;
}
scrollToBottom() {
requestAnimationFrame(() => {
this.chatMessages.scrollTop = this.chatMessages.scrollHeight;
});
}
clearChat() {
if (this.isProcessing) {
this.showNotification('Please wait for the current response to complete');
return;
}
this.messages = [];
this.chatMessages.innerHTML = `
<div class="welcome-message">
<div class="welcome-icon">🤖</div>
<h2>Welcome to AI Assistant</h2>
<p>I'm powered by MobileLLM with real-time streaming. Ask me anything!</p>
</div>
`;
this.showNotification('Chat cleared!', 'info');
}
showNotification(message, type = 'info') {
const notification = document.createElement('div');
notification.className = `notification ${type}`;
notification.textContent = message;
notification.style.cssText = `
position: fixed;
top: 20px;
right: 20px;
padding: 12px 20px;
border-radius: 8px;
color: white;
font-weight: 500;
transform: translateX(100%);
transition: all 0.3s;
z-index: 1001;
background: ${type === 'success' ? '#10b981' : type === 'error' ? '#ef4444' : '#3b82f6'};
`;
document.body.appendChild(notification);
setTimeout(() => {
notification.style.transform = 'translateX(0)';
}, 10);
setTimeout(() => {
notification.style.transform = 'translateX(100%)';
setTimeout(() => notification.remove(), 300);
}, 3000);
}
}
// Initialize chatbot when DOM is ready
document.addEventListener('DOMContentLoaded', () => {
new StreamingChatbot();
});