import requests from langchain.chat_models import init_chat_model llm = init_chat_model( model="SpeakLeash/bielik-11b-v2.2-instruct:Q5_K_M", # lub inny z /api/tags model_provider="ollama", base_url="https://szymskul-bielik-space.hf.space", # << Twój HF Space (API Ollamy) temperature=0.4, streaming=False ) def modelLanguage(systemPrompt, chat_history = None): if chat_history is None: chat_history = [] else: chat_history = [msg for msg in chat_history if msg.get("role") != "system"] chat_history.insert(0, {"role": "system", "content": systemPrompt}) response = requests.post( "https://szymskul-bielik-space.hf.space/api/chat", json={ "model": "SpeakLeash/bielik-11b-v2.2-instruct:Q5_K_M", "messages": chat_history, "stream": False, "options": { "temperature": 0.4, "top_p": 0.9 } } ) reply = response.json()["message"]["content"] return reply