Feature for QnA with AI

This commit is contained in:
Dita Aji Pratama 2025-02-18 16:44:03 +07:00
parent 2fd74192ff
commit ad26252269
2 changed files with 52 additions and 1 deletions

View File

@ -5,11 +5,14 @@
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. # This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/. # You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
from bottle import Bottle, route from bottle import Bottle, route, request, response
from config import directory from config import directory
import json
import templates.plain.main as template_public import templates.plain.main as template_public
import modules.public.home as public_home import modules.public.home as public_home
import modules.api.qna as api_qna
app = Bottle() app = Bottle()
@ -21,3 +24,16 @@ def index():
} }
} }
return public_home.main().html(params) return public_home.main().html(params)
@app.route('/api/synthesisgen/ask', method=['OPTIONS', 'POST'])
def index():
try:
if request.method == 'OPTIONS':
return None
else:
response.content_type = 'application/json'
params = request.json
return json.dumps(api_qna.qna().ask(params), indent = 2).encode()
except Exception as e:
print(str(e),flush=True)
return json.dumps({}, indent = 2).encode()

35
modules/api/qna.py Executable file
View File

@ -0,0 +1,35 @@
import ollama
from scripts import loggorilla, synthesai
class qna:
def __init__(self):
pass
def ask(self, params):
APIADDR = "ASK"
sysrole = params['sysrole' ]
question = params['question' ]
model = "deepseek-r1:7b"
history = [
{ "role" : "system", "content" : sysrole },
{ "role" : "user", "content" : question }
]
loggorilla.prcss(APIADDR, "Processing AI")
result = ollama.chat(model=model, messages=history, stream=False)
#for chunk in result:
# print(chunk['message']['content'], end='', flush=True)
loggorilla.prcss(APIADDR, "Processing AI finish")
loggorilla.fyinf(APIADDR, result['message']['content'])
history.append({ "role" : "assistant", "content" : result['message']['content'] })
return {
"content" : result['message']['content'],
"history" : history
}