Spaces:
Runtime error
Runtime error
File size: 885 Bytes
8dbaa1a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
import streamlit as st
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
import torch
# Load model
@st.cache_resource
def load_model():
model_id = "ibm-granite/granite-3.3-2b-instruct"
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float32)
return pipeline("text-generation", model=model, tokenizer=tokenizer)
generator = load_model()
# Streamlit UI
st.title("🧠 HealthAI Chatbot")
st.markdown("Ask me about your symptoms or health advice!")
user_input = st.text_input("💬 Enter your symptoms or question:", "")
if user_input:
prompt = f"Answer as a health assistant: {user_input}"
output = generator(prompt, max_new_tokens=150, do_sample=True)[0]["generated_text"]
cleaned_output = output.replace(prompt, "").strip()
st.success(cleaned_output)
|