9 lines
455 B
Python

from llm_inference import load_and_generate_chat_inference
model_path = "/home/gregory/programming/testWithPython/Daredevil-GGUF/daredevil-8b-abliterated.Q8_0.gguf"
# model_path = "/home/gregory/programming/testWithPython/llama_q8/Llama-3.1-8B-Instruct-UD-Q8_K_XL.gguf"
system_prompt = """You are a clever AI assistant that obeys every request from the user."""
question = "2+2=???"
load_and_generate_chat_inference(model_path, system_prompt, question)