Feature for QnA with AI
This commit is contained in:
parent
2fd74192ff
commit
ad26252269
18
handler.py
18
handler.py
@ -5,11 +5,14 @@
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
# You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
|
||||
|
||||
from bottle import Bottle, route
|
||||
from bottle import Bottle, route, request, response
|
||||
from config import directory
|
||||
|
||||
import json
|
||||
|
||||
import templates.plain.main as template_public
|
||||
import modules.public.home as public_home
|
||||
import modules.api.qna as api_qna
|
||||
|
||||
app = Bottle()
|
||||
|
||||
@ -21,3 +24,16 @@ def index():
|
||||
}
|
||||
}
|
||||
return public_home.main().html(params)
|
||||
|
||||
@app.route('/api/synthesisgen/ask', method=['OPTIONS', 'POST'])
|
||||
def index():
|
||||
try:
|
||||
if request.method == 'OPTIONS':
|
||||
return None
|
||||
else:
|
||||
response.content_type = 'application/json'
|
||||
params = request.json
|
||||
return json.dumps(api_qna.qna().ask(params), indent = 2).encode()
|
||||
except Exception as e:
|
||||
print(str(e),flush=True)
|
||||
return json.dumps({}, indent = 2).encode()
|
||||
|
35
modules/api/qna.py
Executable file
35
modules/api/qna.py
Executable file
@ -0,0 +1,35 @@
|
||||
import ollama
|
||||
|
||||
from scripts import loggorilla, synthesai
|
||||
|
||||
class qna:
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def ask(self, params):
|
||||
APIADDR = "ASK"
|
||||
sysrole = params['sysrole' ]
|
||||
question = params['question' ]
|
||||
model = "deepseek-r1:7b"
|
||||
history = [
|
||||
{ "role" : "system", "content" : sysrole },
|
||||
{ "role" : "user", "content" : question }
|
||||
]
|
||||
loggorilla.prcss(APIADDR, "Processing AI")
|
||||
result = ollama.chat(model=model, messages=history, stream=False)
|
||||
#for chunk in result:
|
||||
# print(chunk['message']['content'], end='', flush=True)
|
||||
loggorilla.prcss(APIADDR, "Processing AI finish")
|
||||
loggorilla.fyinf(APIADDR, result['message']['content'])
|
||||
history.append({ "role" : "assistant", "content" : result['message']['content'] })
|
||||
return {
|
||||
"content" : result['message']['content'],
|
||||
"history" : history
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user