Spaces:
Runtime error
Runtime error
| import streamlit as st | |
| from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline | |
| import torch | |
| # Load model | |
| def load_model(): | |
| model_id = "ibm-granite/granite-3.3-2b-instruct" | |
| tokenizer = AutoTokenizer.from_pretrained(model_id) | |
| model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float32) | |
| return pipeline("text-generation", model=model, tokenizer=tokenizer) | |
| generator = load_model() | |
| # Streamlit UI | |
| st.title("π§ HealthAI Chatbot") | |
| st.markdown("Ask me about your symptoms or health advice!") | |
| user_input = st.text_input("π¬ Enter your symptoms or question:", "") | |
| if user_input: | |
| prompt = f"Answer as a health assistant: {user_input}" | |
| output = generator(prompt, max_new_tokens=150, do_sample=True)[0]["generated_text"] | |
| cleaned_output = output.replace(prompt, "").strip() | |
| st.success(cleaned_output) | |