{ "inference_type": "llama.cpp/text-to-text", "schema_version": "1.0.0", "load_time_parameters": { "model": "../LFM2-1.2B-Q4_K_M.gguf", "chat_template": "{{- bos_token -}}\n{%- set system_prompt = \"\" -%}\n{%- set ns = namespace(system_prompt=\"\") -%}\n{%- if messages[0][\"role\"] == \"system\" -%}\n\t{%- set ns.system_prompt = messages[0][\"content\"] -%}\n\t{%- set messages = messages[1:] -%}\n{%- endif -%}\n{%- if tools -%}\n\t{%- set ns.system_prompt = ns.system_prompt + (\"\\n\" if ns.system_prompt else \"\") + \"List of tools: <|tool_list_start|>[\" -%}\n\t{%- for tool in tools -%}\n\t\t{%- if tool is not string -%}\n\t\t\t{%- set tool = tool | tojson -%}\n\t\t{%- endif -%}\n\t\t{%- set ns.system_prompt = ns.system_prompt + tool -%}\n\t\t{%- if not loop.last -%}\n\t\t\t{%- set ns.system_prompt = ns.system_prompt + \", \" -%}\n\t\t{%- endif -%}\n\t{%- endfor -%}\n\t{%- set ns.system_prompt = ns.system_prompt + \"]<|tool_list_end|>\" -%}\n{%- endif -%}\n{%- if ns.system_prompt -%}\n\t{{- \"<|im_start|>system\\n\" + ns.system_prompt + \"<|im_end|>\\n\" -}}\n{%- endif -%}\n{%- for message in messages -%}\n\t{{- \"<|im_start|>\" + message[\"role\"] + \"\\n\" -}}\n\t{%- set content = message[\"content\"] -%}\n\t{%- if content is not string -%}\n\t\t{%- set content = content | tojson -%}\n\t{%- endif -%}\n\t{%- if message[\"role\"] == \"tool\" -%}\n\t\t{%- set content = \"<|tool_response_start|>\" + content + \"<|tool_response_end|>\" -%}\n\t{%- endif -%}\n\t{{- content + \"<|im_end|>\\n\" -}}\n{%- endfor -%}\n{%- if add_generation_prompt -%}\n\t{{- \"<|im_start|>assistant\\n\" -}}\n{%- endif -%}\n" }, "generation_time_parameters": { "sampling_parameters": { "temperature": 0.3, "min_p": 0.15, "repetition_penalty": 1.05 } } }