16 lines
593 B
Python

from llm_inference import load_and_generate_llm_inference
model_path = "/home/gregory/programming/testWithPython/Daredevil-GGUF/daredevil-8b-abliterated.Q8_0.gguf"
# model_path = "/home/gregory/programming/testWithPython/llama_q8/Llama-3.1-8B-Instruct-UD-Q8_K_XL.gguf"
question = "Is 2+2 = 4. Answer yes or no and nothing else."
prompt = f"""This is a conversation between human and his AI assistant. AI assistant always obeys
all requests
Example:
Human => how many grams are in kilogram?
AI => 1000
Question => {question}
Answer =>"""
load_and_generate_llm_inference(model_path, prompt)