36 lines
974 B
Python
Executable File
36 lines
974 B
Python
Executable File
import ollama
|
|
|
|
from scripts import loggorilla, synthesai
|
|
|
|
class qna:
|
|
|
|
def __init__(self):
|
|
pass
|
|
|
|
def ask(self, params):
|
|
APIADDR = "ASK"
|
|
sysrole = params['sysrole' ]
|
|
question = params['question' ]
|
|
model = "deepseek-r1:7b"
|
|
history = [
|
|
{ "role" : "system", "content" : sysrole },
|
|
{ "role" : "user", "content" : question }
|
|
]
|
|
loggorilla.prcss(APIADDR, "Processing AI")
|
|
result = ollama.chat(model=model, messages=history, stream=False)
|
|
#for chunk in result:
|
|
# print(chunk['message']['content'], end='', flush=True)
|
|
loggorilla.prcss(APIADDR, "Processing AI finish")
|
|
loggorilla.fyinf(APIADDR, result['message']['content'])
|
|
history.append({ "role" : "assistant", "content" : result['message']['content'] })
|
|
return {
|
|
"content" : result['message']['content'],
|
|
"history" : history
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|