Unified API response json schema (#3170)
Browse files### What problem does this PR solve?
Unified API response json schema
### Type of change
- [x] Refactoring
This view is limited to 50 files because it contains too many changes.
See raw diff
- api/apps/api_app.py +47 -47
- api/apps/canvas_app.py +15 -15
- api/apps/chunk_app.py +22 -23
- api/apps/conversation_app.py +26 -26
- api/apps/dialog_app.py +10 -10
- api/apps/document_app.py +48 -48
- api/apps/file2document_app.py +11 -15
- api/apps/file_app.py +29 -32
- api/apps/kb_app.py +15 -15
- api/apps/llm_app.py +2 -2
- api/apps/sdk/chat.py +20 -20
- api/apps/sdk/dataset.py +535 -535
- api/apps/sdk/dify_retrieval.py +5 -5
- api/apps/sdk/doc.py +48 -48
- api/apps/sdk/session.py +394 -394
- api/apps/system_app.py +3 -5
- api/apps/tenant_app.py +3 -3
- api/apps/user_app.py +24 -24
- api/utils/api_utils.py +36 -36
- docs/references/faq.md +1 -1
- intergrations/chatgpt-on-wechat/plugins/ragflow_chat.py +7 -9
- sdk/python/test/conftest.py +6 -6
- sdk/python/test/ragflow.txt +1 -1
- web/src/components/message-input/index.tsx +4 -4
- web/src/components/message-item/hooks.ts +2 -2
- web/src/components/pdf-previewer/hooks.ts +2 -2
- web/src/hooks/chat-hooks.ts +15 -15
- web/src/hooks/chunk-hooks.ts +7 -7
- web/src/hooks/document-hooks.ts +22 -22
- web/src/hooks/file-manager-hooks.ts +12 -12
- web/src/hooks/flow-hooks.ts +4 -4
- web/src/hooks/knowledge-hooks.ts +4 -4
- web/src/hooks/llm-hooks.tsx +10 -10
- web/src/hooks/logic-hooks.ts +2 -2
- web/src/hooks/login-hooks.ts +6 -6
- web/src/hooks/user-setting-hooks.tsx +12 -12
- web/src/interfaces/database/base.ts +2 -2
- web/src/pages/add-knowledge/components/knowledge-chunk/components/chunk-creating-modal/index.tsx +1 -1
- web/src/pages/add-knowledge/components/knowledge-chunk/hooks.ts +2 -2
- web/src/pages/add-knowledge/components/knowledge-file/hooks.ts +7 -7
- web/src/pages/chat/hooks.ts +5 -5
- web/src/pages/chat/shared-hooks.ts +2 -2
- web/src/pages/document-viewer/hooks.ts +2 -2
- web/src/pages/file-manager/hooks.ts +2 -2
- web/src/pages/file-manager/move-file-modal/async-tree-select.tsx +1 -1
- web/src/pages/flow/chat/hooks.ts +2 -2
- web/src/pages/flow/hooks.ts +3 -3
- web/src/pages/flow/list/hooks.ts +1 -1
- web/src/pages/flow/utils.ts +1 -1
- web/src/pages/knowledge/hooks.ts +1 -1
api/apps/api_app.py
CHANGED
|
@@ -52,7 +52,7 @@ def new_token():
|
|
| 52 |
try:
|
| 53 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 54 |
if not tenants:
|
| 55 |
-
return get_data_error_result(
|
| 56 |
|
| 57 |
tenant_id = tenants[0].tenant_id
|
| 58 |
obj = {"tenant_id": tenant_id, "token": generate_confirmation_token(tenant_id),
|
|
@@ -68,7 +68,7 @@ def new_token():
|
|
| 68 |
obj["dialog_id"] = req["dialog_id"]
|
| 69 |
|
| 70 |
if not APITokenService.save(**obj):
|
| 71 |
-
return get_data_error_result(
|
| 72 |
|
| 73 |
return get_json_result(data=obj)
|
| 74 |
except Exception as e:
|
|
@@ -81,7 +81,7 @@ def token_list():
|
|
| 81 |
try:
|
| 82 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 83 |
if not tenants:
|
| 84 |
-
return get_data_error_result(
|
| 85 |
|
| 86 |
id = request.args["dialog_id"] if "dialog_id" in request.args else request.args["canvas_id"]
|
| 87 |
objs = APITokenService.query(tenant_id=tenants[0].tenant_id, dialog_id=id)
|
|
@@ -110,7 +110,7 @@ def stats():
|
|
| 110 |
try:
|
| 111 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 112 |
if not tenants:
|
| 113 |
-
return get_data_error_result(
|
| 114 |
objs = API4ConversationService.stats(
|
| 115 |
tenants[0].tenant_id,
|
| 116 |
request.args.get(
|
|
@@ -141,7 +141,7 @@ def set_conversation():
|
|
| 141 |
objs = APIToken.query(token=token)
|
| 142 |
if not objs:
|
| 143 |
return get_json_result(
|
| 144 |
-
data=False,
|
| 145 |
req = request.json
|
| 146 |
try:
|
| 147 |
if objs[0].source == "agent":
|
|
@@ -163,7 +163,7 @@ def set_conversation():
|
|
| 163 |
else:
|
| 164 |
e, dia = DialogService.get_by_id(objs[0].dialog_id)
|
| 165 |
if not e:
|
| 166 |
-
return get_data_error_result(
|
| 167 |
conv = {
|
| 168 |
"id": get_uuid(),
|
| 169 |
"dialog_id": dia.id,
|
|
@@ -183,11 +183,11 @@ def completion():
|
|
| 183 |
objs = APIToken.query(token=token)
|
| 184 |
if not objs:
|
| 185 |
return get_json_result(
|
| 186 |
-
data=False,
|
| 187 |
req = request.json
|
| 188 |
e, conv = API4ConversationService.get_by_id(req["conversation_id"])
|
| 189 |
if not e:
|
| 190 |
-
return get_data_error_result(
|
| 191 |
if "quote" not in req: req["quote"] = False
|
| 192 |
|
| 193 |
msg = []
|
|
@@ -257,7 +257,7 @@ def completion():
|
|
| 257 |
ans = {"answer": ans["content"], "reference": ans.get("reference", [])}
|
| 258 |
fillin_conv(ans)
|
| 259 |
rename_field(ans)
|
| 260 |
-
yield "data:" + json.dumps({"
|
| 261 |
ensure_ascii=False) + "\n\n"
|
| 262 |
|
| 263 |
canvas.messages.append({"role": "assistant", "content": final_ans["content"], "id": message_id})
|
|
@@ -267,10 +267,10 @@ def completion():
|
|
| 267 |
cvs.dsl = json.loads(str(canvas))
|
| 268 |
API4ConversationService.append_message(conv.id, conv.to_dict())
|
| 269 |
except Exception as e:
|
| 270 |
-
yield "data:" + json.dumps({"
|
| 271 |
"data": {"answer": "**ERROR**: " + str(e), "reference": []}},
|
| 272 |
ensure_ascii=False) + "\n\n"
|
| 273 |
-
yield "data:" + json.dumps({"
|
| 274 |
|
| 275 |
resp = Response(sse(), mimetype="text/event-stream")
|
| 276 |
resp.headers.add_header("Cache-control", "no-cache")
|
|
@@ -295,7 +295,7 @@ def completion():
|
|
| 295 |
conv.message.append(msg[-1])
|
| 296 |
e, dia = DialogService.get_by_id(conv.dialog_id)
|
| 297 |
if not e:
|
| 298 |
-
return get_data_error_result(
|
| 299 |
del req["conversation_id"]
|
| 300 |
del req["messages"]
|
| 301 |
|
|
@@ -310,14 +310,14 @@ def completion():
|
|
| 310 |
for ans in chat(dia, msg, True, **req):
|
| 311 |
fillin_conv(ans)
|
| 312 |
rename_field(ans)
|
| 313 |
-
yield "data:" + json.dumps({"
|
| 314 |
ensure_ascii=False) + "\n\n"
|
| 315 |
API4ConversationService.append_message(conv.id, conv.to_dict())
|
| 316 |
except Exception as e:
|
| 317 |
-
yield "data:" + json.dumps({"
|
| 318 |
"data": {"answer": "**ERROR**: " + str(e), "reference": []}},
|
| 319 |
ensure_ascii=False) + "\n\n"
|
| 320 |
-
yield "data:" + json.dumps({"
|
| 321 |
|
| 322 |
if req.get("stream", True):
|
| 323 |
resp = Response(stream(), mimetype="text/event-stream")
|
|
@@ -347,17 +347,17 @@ def get(conversation_id):
|
|
| 347 |
objs = APIToken.query(token=token)
|
| 348 |
if not objs:
|
| 349 |
return get_json_result(
|
| 350 |
-
data=False,
|
| 351 |
|
| 352 |
try:
|
| 353 |
e, conv = API4ConversationService.get_by_id(conversation_id)
|
| 354 |
if not e:
|
| 355 |
-
return get_data_error_result(
|
| 356 |
|
| 357 |
conv = conv.to_dict()
|
| 358 |
if token != APIToken.query(dialog_id=conv['dialog_id'])[0].token:
|
| 359 |
-
return get_json_result(data=False,
|
| 360 |
-
|
| 361 |
|
| 362 |
for referenct_i in conv['reference']:
|
| 363 |
if referenct_i is None or len(referenct_i) == 0:
|
|
@@ -378,7 +378,7 @@ def upload():
|
|
| 378 |
objs = APIToken.query(token=token)
|
| 379 |
if not objs:
|
| 380 |
return get_json_result(
|
| 381 |
-
data=False,
|
| 382 |
|
| 383 |
kb_name = request.form.get("kb_name").strip()
|
| 384 |
tenant_id = objs[0].tenant_id
|
|
@@ -387,19 +387,19 @@ def upload():
|
|
| 387 |
e, kb = KnowledgebaseService.get_by_name(kb_name, tenant_id)
|
| 388 |
if not e:
|
| 389 |
return get_data_error_result(
|
| 390 |
-
|
| 391 |
kb_id = kb.id
|
| 392 |
except Exception as e:
|
| 393 |
return server_error_response(e)
|
| 394 |
|
| 395 |
if 'file' not in request.files:
|
| 396 |
return get_json_result(
|
| 397 |
-
data=False,
|
| 398 |
|
| 399 |
file = request.files['file']
|
| 400 |
if file.filename == '':
|
| 401 |
return get_json_result(
|
| 402 |
-
data=False,
|
| 403 |
|
| 404 |
root_folder = FileService.get_root_folder(tenant_id)
|
| 405 |
pf_id = root_folder["id"]
|
|
@@ -410,7 +410,7 @@ def upload():
|
|
| 410 |
try:
|
| 411 |
if DocumentService.get_doc_count(kb.tenant_id) >= int(os.environ.get('MAX_FILE_NUM_PER_USER', 8192)):
|
| 412 |
return get_data_error_result(
|
| 413 |
-
|
| 414 |
|
| 415 |
filename = duplicate_name(
|
| 416 |
DocumentService.query,
|
|
@@ -419,7 +419,7 @@ def upload():
|
|
| 419 |
filetype = filename_type(filename)
|
| 420 |
if not filetype:
|
| 421 |
return get_data_error_result(
|
| 422 |
-
|
| 423 |
|
| 424 |
location = filename
|
| 425 |
while STORAGE_IMPL.obj_exist(kb_id, location):
|
|
@@ -468,7 +468,7 @@ def upload():
|
|
| 468 |
# if str(req["run"]) == TaskStatus.CANCEL.value:
|
| 469 |
tenant_id = DocumentService.get_tenant_id(doc["id"])
|
| 470 |
if not tenant_id:
|
| 471 |
-
return get_data_error_result(
|
| 472 |
|
| 473 |
# e, doc = DocumentService.get_by_id(doc["id"])
|
| 474 |
TaskService.filter_delete([Task.doc_id == doc["id"]])
|
|
@@ -490,17 +490,17 @@ def upload_parse():
|
|
| 490 |
objs = APIToken.query(token=token)
|
| 491 |
if not objs:
|
| 492 |
return get_json_result(
|
| 493 |
-
data=False,
|
| 494 |
|
| 495 |
if 'file' not in request.files:
|
| 496 |
return get_json_result(
|
| 497 |
-
data=False,
|
| 498 |
|
| 499 |
file_objs = request.files.getlist('file')
|
| 500 |
for file_obj in file_objs:
|
| 501 |
if file_obj.filename == '':
|
| 502 |
return get_json_result(
|
| 503 |
-
data=False,
|
| 504 |
|
| 505 |
doc_ids = doc_upload_and_parse(request.form.get("conversation_id"), file_objs, objs[0].tenant_id)
|
| 506 |
return get_json_result(data=doc_ids)
|
|
@@ -513,7 +513,7 @@ def list_chunks():
|
|
| 513 |
objs = APIToken.query(token=token)
|
| 514 |
if not objs:
|
| 515 |
return get_json_result(
|
| 516 |
-
data=False,
|
| 517 |
|
| 518 |
req = request.json
|
| 519 |
|
|
@@ -527,7 +527,7 @@ def list_chunks():
|
|
| 527 |
doc_id = req['doc_id']
|
| 528 |
else:
|
| 529 |
return get_json_result(
|
| 530 |
-
data=False,
|
| 531 |
)
|
| 532 |
|
| 533 |
res = retrievaler.chunk_list(doc_id=doc_id, tenant_id=tenant_id)
|
|
@@ -552,7 +552,7 @@ def list_kb_docs():
|
|
| 552 |
objs = APIToken.query(token=token)
|
| 553 |
if not objs:
|
| 554 |
return get_json_result(
|
| 555 |
-
data=False,
|
| 556 |
|
| 557 |
req = request.json
|
| 558 |
tenant_id = objs[0].tenant_id
|
|
@@ -562,7 +562,7 @@ def list_kb_docs():
|
|
| 562 |
e, kb = KnowledgebaseService.get_by_name(kb_name, tenant_id)
|
| 563 |
if not e:
|
| 564 |
return get_data_error_result(
|
| 565 |
-
|
| 566 |
kb_id = kb.id
|
| 567 |
|
| 568 |
except Exception as e:
|
|
@@ -591,7 +591,7 @@ def docinfos():
|
|
| 591 |
objs = APIToken.query(token=token)
|
| 592 |
if not objs:
|
| 593 |
return get_json_result(
|
| 594 |
-
data=False,
|
| 595 |
req = request.json
|
| 596 |
doc_ids = req["doc_ids"]
|
| 597 |
docs = DocumentService.get_by_ids(doc_ids)
|
|
@@ -605,7 +605,7 @@ def document_rm():
|
|
| 605 |
objs = APIToken.query(token=token)
|
| 606 |
if not objs:
|
| 607 |
return get_json_result(
|
| 608 |
-
data=False,
|
| 609 |
|
| 610 |
tenant_id = objs[0].tenant_id
|
| 611 |
req = request.json
|
|
@@ -617,7 +617,7 @@ def document_rm():
|
|
| 617 |
|
| 618 |
if not doc_ids:
|
| 619 |
return get_json_result(
|
| 620 |
-
data=False,
|
| 621 |
)
|
| 622 |
|
| 623 |
except Exception as e:
|
|
@@ -632,16 +632,16 @@ def document_rm():
|
|
| 632 |
try:
|
| 633 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 634 |
if not e:
|
| 635 |
-
return get_data_error_result(
|
| 636 |
tenant_id = DocumentService.get_tenant_id(doc_id)
|
| 637 |
if not tenant_id:
|
| 638 |
-
return get_data_error_result(
|
| 639 |
|
| 640 |
b, n = File2DocumentService.get_storage_address(doc_id=doc_id)
|
| 641 |
|
| 642 |
if not DocumentService.remove_document(doc, tenant_id):
|
| 643 |
return get_data_error_result(
|
| 644 |
-
|
| 645 |
|
| 646 |
f2d = File2DocumentService.get_by_document_id(doc_id)
|
| 647 |
FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.id == f2d[0].file_id])
|
|
@@ -652,7 +652,7 @@ def document_rm():
|
|
| 652 |
errors += str(e)
|
| 653 |
|
| 654 |
if errors:
|
| 655 |
-
return get_json_result(data=False,
|
| 656 |
|
| 657 |
return get_json_result(data=True)
|
| 658 |
|
|
@@ -667,11 +667,11 @@ def completion_faq():
|
|
| 667 |
objs = APIToken.query(token=token)
|
| 668 |
if not objs:
|
| 669 |
return get_json_result(
|
| 670 |
-
data=False,
|
| 671 |
|
| 672 |
e, conv = API4ConversationService.get_by_id(req["conversation_id"])
|
| 673 |
if not e:
|
| 674 |
-
return get_data_error_result(
|
| 675 |
if "quote" not in req: req["quote"] = True
|
| 676 |
|
| 677 |
msg = []
|
|
@@ -752,7 +752,7 @@ def completion_faq():
|
|
| 752 |
conv.message.append(msg[-1])
|
| 753 |
e, dia = DialogService.get_by_id(conv.dialog_id)
|
| 754 |
if not e:
|
| 755 |
-
return get_data_error_result(
|
| 756 |
del req["conversation_id"]
|
| 757 |
|
| 758 |
if not conv.reference:
|
|
@@ -804,7 +804,7 @@ def retrieval():
|
|
| 804 |
objs = APIToken.query(token=token)
|
| 805 |
if not objs:
|
| 806 |
return get_json_result(
|
| 807 |
-
data=False,
|
| 808 |
|
| 809 |
req = request.json
|
| 810 |
kb_ids = req.get("kb_id",[])
|
|
@@ -821,7 +821,7 @@ def retrieval():
|
|
| 821 |
embd_nms = list(set([kb.embd_id for kb in kbs]))
|
| 822 |
if len(embd_nms) != 1:
|
| 823 |
return get_json_result(
|
| 824 |
-
data=False,
|
| 825 |
|
| 826 |
embd_mdl = TenantLLMService.model_instance(
|
| 827 |
kbs[0].tenant_id, LLMType.EMBEDDING.value, llm_name=kbs[0].embd_id)
|
|
@@ -841,6 +841,6 @@ def retrieval():
|
|
| 841 |
return get_json_result(data=ranks)
|
| 842 |
except Exception as e:
|
| 843 |
if str(e).find("not_found") > 0:
|
| 844 |
-
return get_json_result(data=False,
|
| 845 |
-
|
| 846 |
return server_error_response(e)
|
|
|
|
| 52 |
try:
|
| 53 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 54 |
if not tenants:
|
| 55 |
+
return get_data_error_result(message="Tenant not found!")
|
| 56 |
|
| 57 |
tenant_id = tenants[0].tenant_id
|
| 58 |
obj = {"tenant_id": tenant_id, "token": generate_confirmation_token(tenant_id),
|
|
|
|
| 68 |
obj["dialog_id"] = req["dialog_id"]
|
| 69 |
|
| 70 |
if not APITokenService.save(**obj):
|
| 71 |
+
return get_data_error_result(message="Fail to new a dialog!")
|
| 72 |
|
| 73 |
return get_json_result(data=obj)
|
| 74 |
except Exception as e:
|
|
|
|
| 81 |
try:
|
| 82 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 83 |
if not tenants:
|
| 84 |
+
return get_data_error_result(message="Tenant not found!")
|
| 85 |
|
| 86 |
id = request.args["dialog_id"] if "dialog_id" in request.args else request.args["canvas_id"]
|
| 87 |
objs = APITokenService.query(tenant_id=tenants[0].tenant_id, dialog_id=id)
|
|
|
|
| 110 |
try:
|
| 111 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 112 |
if not tenants:
|
| 113 |
+
return get_data_error_result(message="Tenant not found!")
|
| 114 |
objs = API4ConversationService.stats(
|
| 115 |
tenants[0].tenant_id,
|
| 116 |
request.args.get(
|
|
|
|
| 141 |
objs = APIToken.query(token=token)
|
| 142 |
if not objs:
|
| 143 |
return get_json_result(
|
| 144 |
+
data=False, message='Token is not valid!"', code=RetCode.AUTHENTICATION_ERROR)
|
| 145 |
req = request.json
|
| 146 |
try:
|
| 147 |
if objs[0].source == "agent":
|
|
|
|
| 163 |
else:
|
| 164 |
e, dia = DialogService.get_by_id(objs[0].dialog_id)
|
| 165 |
if not e:
|
| 166 |
+
return get_data_error_result(message="Dialog not found")
|
| 167 |
conv = {
|
| 168 |
"id": get_uuid(),
|
| 169 |
"dialog_id": dia.id,
|
|
|
|
| 183 |
objs = APIToken.query(token=token)
|
| 184 |
if not objs:
|
| 185 |
return get_json_result(
|
| 186 |
+
data=False, message='Token is not valid!"', code=RetCode.AUTHENTICATION_ERROR)
|
| 187 |
req = request.json
|
| 188 |
e, conv = API4ConversationService.get_by_id(req["conversation_id"])
|
| 189 |
if not e:
|
| 190 |
+
return get_data_error_result(message="Conversation not found!")
|
| 191 |
if "quote" not in req: req["quote"] = False
|
| 192 |
|
| 193 |
msg = []
|
|
|
|
| 257 |
ans = {"answer": ans["content"], "reference": ans.get("reference", [])}
|
| 258 |
fillin_conv(ans)
|
| 259 |
rename_field(ans)
|
| 260 |
+
yield "data:" + json.dumps({"code": 0, "message": "", "data": ans},
|
| 261 |
ensure_ascii=False) + "\n\n"
|
| 262 |
|
| 263 |
canvas.messages.append({"role": "assistant", "content": final_ans["content"], "id": message_id})
|
|
|
|
| 267 |
cvs.dsl = json.loads(str(canvas))
|
| 268 |
API4ConversationService.append_message(conv.id, conv.to_dict())
|
| 269 |
except Exception as e:
|
| 270 |
+
yield "data:" + json.dumps({"code": 500, "message": str(e),
|
| 271 |
"data": {"answer": "**ERROR**: " + str(e), "reference": []}},
|
| 272 |
ensure_ascii=False) + "\n\n"
|
| 273 |
+
yield "data:" + json.dumps({"code": 0, "message": "", "data": True}, ensure_ascii=False) + "\n\n"
|
| 274 |
|
| 275 |
resp = Response(sse(), mimetype="text/event-stream")
|
| 276 |
resp.headers.add_header("Cache-control", "no-cache")
|
|
|
|
| 295 |
conv.message.append(msg[-1])
|
| 296 |
e, dia = DialogService.get_by_id(conv.dialog_id)
|
| 297 |
if not e:
|
| 298 |
+
return get_data_error_result(message="Dialog not found!")
|
| 299 |
del req["conversation_id"]
|
| 300 |
del req["messages"]
|
| 301 |
|
|
|
|
| 310 |
for ans in chat(dia, msg, True, **req):
|
| 311 |
fillin_conv(ans)
|
| 312 |
rename_field(ans)
|
| 313 |
+
yield "data:" + json.dumps({"code": 0, "message": "", "data": ans},
|
| 314 |
ensure_ascii=False) + "\n\n"
|
| 315 |
API4ConversationService.append_message(conv.id, conv.to_dict())
|
| 316 |
except Exception as e:
|
| 317 |
+
yield "data:" + json.dumps({"code": 500, "message": str(e),
|
| 318 |
"data": {"answer": "**ERROR**: " + str(e), "reference": []}},
|
| 319 |
ensure_ascii=False) + "\n\n"
|
| 320 |
+
yield "data:" + json.dumps({"code": 0, "message": "", "data": True}, ensure_ascii=False) + "\n\n"
|
| 321 |
|
| 322 |
if req.get("stream", True):
|
| 323 |
resp = Response(stream(), mimetype="text/event-stream")
|
|
|
|
| 347 |
objs = APIToken.query(token=token)
|
| 348 |
if not objs:
|
| 349 |
return get_json_result(
|
| 350 |
+
data=False, message='Token is not valid!"', code=RetCode.AUTHENTICATION_ERROR)
|
| 351 |
|
| 352 |
try:
|
| 353 |
e, conv = API4ConversationService.get_by_id(conversation_id)
|
| 354 |
if not e:
|
| 355 |
+
return get_data_error_result(message="Conversation not found!")
|
| 356 |
|
| 357 |
conv = conv.to_dict()
|
| 358 |
if token != APIToken.query(dialog_id=conv['dialog_id'])[0].token:
|
| 359 |
+
return get_json_result(data=False, message='Token is not valid for this conversation_id!"',
|
| 360 |
+
code=RetCode.AUTHENTICATION_ERROR)
|
| 361 |
|
| 362 |
for referenct_i in conv['reference']:
|
| 363 |
if referenct_i is None or len(referenct_i) == 0:
|
|
|
|
| 378 |
objs = APIToken.query(token=token)
|
| 379 |
if not objs:
|
| 380 |
return get_json_result(
|
| 381 |
+
data=False, message='Token is not valid!"', code=RetCode.AUTHENTICATION_ERROR)
|
| 382 |
|
| 383 |
kb_name = request.form.get("kb_name").strip()
|
| 384 |
tenant_id = objs[0].tenant_id
|
|
|
|
| 387 |
e, kb = KnowledgebaseService.get_by_name(kb_name, tenant_id)
|
| 388 |
if not e:
|
| 389 |
return get_data_error_result(
|
| 390 |
+
message="Can't find this knowledgebase!")
|
| 391 |
kb_id = kb.id
|
| 392 |
except Exception as e:
|
| 393 |
return server_error_response(e)
|
| 394 |
|
| 395 |
if 'file' not in request.files:
|
| 396 |
return get_json_result(
|
| 397 |
+
data=False, message='No file part!', code=RetCode.ARGUMENT_ERROR)
|
| 398 |
|
| 399 |
file = request.files['file']
|
| 400 |
if file.filename == '':
|
| 401 |
return get_json_result(
|
| 402 |
+
data=False, message='No file selected!', code=RetCode.ARGUMENT_ERROR)
|
| 403 |
|
| 404 |
root_folder = FileService.get_root_folder(tenant_id)
|
| 405 |
pf_id = root_folder["id"]
|
|
|
|
| 410 |
try:
|
| 411 |
if DocumentService.get_doc_count(kb.tenant_id) >= int(os.environ.get('MAX_FILE_NUM_PER_USER', 8192)):
|
| 412 |
return get_data_error_result(
|
| 413 |
+
message="Exceed the maximum file number of a free user!")
|
| 414 |
|
| 415 |
filename = duplicate_name(
|
| 416 |
DocumentService.query,
|
|
|
|
| 419 |
filetype = filename_type(filename)
|
| 420 |
if not filetype:
|
| 421 |
return get_data_error_result(
|
| 422 |
+
message="This type of file has not been supported yet!")
|
| 423 |
|
| 424 |
location = filename
|
| 425 |
while STORAGE_IMPL.obj_exist(kb_id, location):
|
|
|
|
| 468 |
# if str(req["run"]) == TaskStatus.CANCEL.value:
|
| 469 |
tenant_id = DocumentService.get_tenant_id(doc["id"])
|
| 470 |
if not tenant_id:
|
| 471 |
+
return get_data_error_result(message="Tenant not found!")
|
| 472 |
|
| 473 |
# e, doc = DocumentService.get_by_id(doc["id"])
|
| 474 |
TaskService.filter_delete([Task.doc_id == doc["id"]])
|
|
|
|
| 490 |
objs = APIToken.query(token=token)
|
| 491 |
if not objs:
|
| 492 |
return get_json_result(
|
| 493 |
+
data=False, message='Token is not valid!"', code=RetCode.AUTHENTICATION_ERROR)
|
| 494 |
|
| 495 |
if 'file' not in request.files:
|
| 496 |
return get_json_result(
|
| 497 |
+
data=False, message='No file part!', code=RetCode.ARGUMENT_ERROR)
|
| 498 |
|
| 499 |
file_objs = request.files.getlist('file')
|
| 500 |
for file_obj in file_objs:
|
| 501 |
if file_obj.filename == '':
|
| 502 |
return get_json_result(
|
| 503 |
+
data=False, message='No file selected!', code=RetCode.ARGUMENT_ERROR)
|
| 504 |
|
| 505 |
doc_ids = doc_upload_and_parse(request.form.get("conversation_id"), file_objs, objs[0].tenant_id)
|
| 506 |
return get_json_result(data=doc_ids)
|
|
|
|
| 513 |
objs = APIToken.query(token=token)
|
| 514 |
if not objs:
|
| 515 |
return get_json_result(
|
| 516 |
+
data=False, message='Token is not valid!"', code=RetCode.AUTHENTICATION_ERROR)
|
| 517 |
|
| 518 |
req = request.json
|
| 519 |
|
|
|
|
| 527 |
doc_id = req['doc_id']
|
| 528 |
else:
|
| 529 |
return get_json_result(
|
| 530 |
+
data=False, message="Can't find doc_name or doc_id"
|
| 531 |
)
|
| 532 |
|
| 533 |
res = retrievaler.chunk_list(doc_id=doc_id, tenant_id=tenant_id)
|
|
|
|
| 552 |
objs = APIToken.query(token=token)
|
| 553 |
if not objs:
|
| 554 |
return get_json_result(
|
| 555 |
+
data=False, message='Token is not valid!"', code=RetCode.AUTHENTICATION_ERROR)
|
| 556 |
|
| 557 |
req = request.json
|
| 558 |
tenant_id = objs[0].tenant_id
|
|
|
|
| 562 |
e, kb = KnowledgebaseService.get_by_name(kb_name, tenant_id)
|
| 563 |
if not e:
|
| 564 |
return get_data_error_result(
|
| 565 |
+
message="Can't find this knowledgebase!")
|
| 566 |
kb_id = kb.id
|
| 567 |
|
| 568 |
except Exception as e:
|
|
|
|
| 591 |
objs = APIToken.query(token=token)
|
| 592 |
if not objs:
|
| 593 |
return get_json_result(
|
| 594 |
+
data=False, message='Token is not valid!"', code=RetCode.AUTHENTICATION_ERROR)
|
| 595 |
req = request.json
|
| 596 |
doc_ids = req["doc_ids"]
|
| 597 |
docs = DocumentService.get_by_ids(doc_ids)
|
|
|
|
| 605 |
objs = APIToken.query(token=token)
|
| 606 |
if not objs:
|
| 607 |
return get_json_result(
|
| 608 |
+
data=False, message='Token is not valid!"', code=RetCode.AUTHENTICATION_ERROR)
|
| 609 |
|
| 610 |
tenant_id = objs[0].tenant_id
|
| 611 |
req = request.json
|
|
|
|
| 617 |
|
| 618 |
if not doc_ids:
|
| 619 |
return get_json_result(
|
| 620 |
+
data=False, message="Can't find doc_names or doc_ids"
|
| 621 |
)
|
| 622 |
|
| 623 |
except Exception as e:
|
|
|
|
| 632 |
try:
|
| 633 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 634 |
if not e:
|
| 635 |
+
return get_data_error_result(message="Document not found!")
|
| 636 |
tenant_id = DocumentService.get_tenant_id(doc_id)
|
| 637 |
if not tenant_id:
|
| 638 |
+
return get_data_error_result(message="Tenant not found!")
|
| 639 |
|
| 640 |
b, n = File2DocumentService.get_storage_address(doc_id=doc_id)
|
| 641 |
|
| 642 |
if not DocumentService.remove_document(doc, tenant_id):
|
| 643 |
return get_data_error_result(
|
| 644 |
+
message="Database error (Document removal)!")
|
| 645 |
|
| 646 |
f2d = File2DocumentService.get_by_document_id(doc_id)
|
| 647 |
FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.id == f2d[0].file_id])
|
|
|
|
| 652 |
errors += str(e)
|
| 653 |
|
| 654 |
if errors:
|
| 655 |
+
return get_json_result(data=False, message=errors, code=RetCode.SERVER_ERROR)
|
| 656 |
|
| 657 |
return get_json_result(data=True)
|
| 658 |
|
|
|
|
| 667 |
objs = APIToken.query(token=token)
|
| 668 |
if not objs:
|
| 669 |
return get_json_result(
|
| 670 |
+
data=False, message='Token is not valid!"', code=RetCode.AUTHENTICATION_ERROR)
|
| 671 |
|
| 672 |
e, conv = API4ConversationService.get_by_id(req["conversation_id"])
|
| 673 |
if not e:
|
| 674 |
+
return get_data_error_result(message="Conversation not found!")
|
| 675 |
if "quote" not in req: req["quote"] = True
|
| 676 |
|
| 677 |
msg = []
|
|
|
|
| 752 |
conv.message.append(msg[-1])
|
| 753 |
e, dia = DialogService.get_by_id(conv.dialog_id)
|
| 754 |
if not e:
|
| 755 |
+
return get_data_error_result(message="Dialog not found!")
|
| 756 |
del req["conversation_id"]
|
| 757 |
|
| 758 |
if not conv.reference:
|
|
|
|
| 804 |
objs = APIToken.query(token=token)
|
| 805 |
if not objs:
|
| 806 |
return get_json_result(
|
| 807 |
+
data=False, message='Token is not valid!"', code=RetCode.AUTHENTICATION_ERROR)
|
| 808 |
|
| 809 |
req = request.json
|
| 810 |
kb_ids = req.get("kb_id",[])
|
|
|
|
| 821 |
embd_nms = list(set([kb.embd_id for kb in kbs]))
|
| 822 |
if len(embd_nms) != 1:
|
| 823 |
return get_json_result(
|
| 824 |
+
data=False, message='Knowledge bases use different embedding models or does not exist."', code=RetCode.AUTHENTICATION_ERROR)
|
| 825 |
|
| 826 |
embd_mdl = TenantLLMService.model_instance(
|
| 827 |
kbs[0].tenant_id, LLMType.EMBEDDING.value, llm_name=kbs[0].embd_id)
|
|
|
|
| 841 |
return get_json_result(data=ranks)
|
| 842 |
except Exception as e:
|
| 843 |
if str(e).find("not_found") > 0:
|
| 844 |
+
return get_json_result(data=False, message='No chunk found! Check the chunk status please!',
|
| 845 |
+
code=RetCode.DATA_ERROR)
|
| 846 |
return server_error_response(e)
|
api/apps/canvas_app.py
CHANGED
|
@@ -46,8 +46,8 @@ def rm():
|
|
| 46 |
for i in request.json["canvas_ids"]:
|
| 47 |
if not UserCanvasService.query(user_id=current_user.id,id=i):
|
| 48 |
return get_json_result(
|
| 49 |
-
data=False,
|
| 50 |
-
|
| 51 |
UserCanvasService.delete_by_id(i)
|
| 52 |
return get_json_result(data=True)
|
| 53 |
|
|
@@ -66,12 +66,12 @@ def save():
|
|
| 66 |
return server_error_response(ValueError("Duplicated title."))
|
| 67 |
req["id"] = get_uuid()
|
| 68 |
if not UserCanvasService.save(**req):
|
| 69 |
-
return get_data_error_result(
|
| 70 |
else:
|
| 71 |
if not UserCanvasService.query(user_id=current_user.id, id=req["id"]):
|
| 72 |
return get_json_result(
|
| 73 |
-
data=False,
|
| 74 |
-
|
| 75 |
UserCanvasService.update_by_id(req["id"], req)
|
| 76 |
return get_json_result(data=req)
|
| 77 |
|
|
@@ -81,7 +81,7 @@ def save():
|
|
| 81 |
def get(canvas_id):
|
| 82 |
e, c = UserCanvasService.get_by_id(canvas_id)
|
| 83 |
if not e:
|
| 84 |
-
return get_data_error_result(
|
| 85 |
return get_json_result(data=c.to_dict())
|
| 86 |
|
| 87 |
|
|
@@ -93,11 +93,11 @@ def run():
|
|
| 93 |
stream = req.get("stream", True)
|
| 94 |
e, cvs = UserCanvasService.get_by_id(req["id"])
|
| 95 |
if not e:
|
| 96 |
-
return get_data_error_result(
|
| 97 |
if not UserCanvasService.query(user_id=current_user.id, id=req["id"]):
|
| 98 |
return get_json_result(
|
| 99 |
-
data=False,
|
| 100 |
-
|
| 101 |
|
| 102 |
if not isinstance(cvs.dsl, str):
|
| 103 |
cvs.dsl = json.dumps(cvs.dsl, ensure_ascii=False)
|
|
@@ -130,7 +130,7 @@ def run():
|
|
| 130 |
for k in ans.keys():
|
| 131 |
final_ans[k] = ans[k]
|
| 132 |
ans = {"answer": ans["content"], "reference": ans.get("reference", [])}
|
| 133 |
-
yield "data:" + json.dumps({"
|
| 134 |
|
| 135 |
canvas.messages.append({"role": "assistant", "content": final_ans["content"], "id": message_id})
|
| 136 |
canvas.history.append(("assistant", final_ans["content"]))
|
|
@@ -139,10 +139,10 @@ def run():
|
|
| 139 |
cvs.dsl = json.loads(str(canvas))
|
| 140 |
UserCanvasService.update_by_id(req["id"], cvs.to_dict())
|
| 141 |
except Exception as e:
|
| 142 |
-
yield "data:" + json.dumps({"
|
| 143 |
"data": {"answer": "**ERROR**: " + str(e), "reference": []}},
|
| 144 |
ensure_ascii=False) + "\n\n"
|
| 145 |
-
yield "data:" + json.dumps({"
|
| 146 |
|
| 147 |
resp = Response(sse(), mimetype="text/event-stream")
|
| 148 |
resp.headers.add_header("Cache-control", "no-cache")
|
|
@@ -168,11 +168,11 @@ def reset():
|
|
| 168 |
try:
|
| 169 |
e, user_canvas = UserCanvasService.get_by_id(req["id"])
|
| 170 |
if not e:
|
| 171 |
-
return get_data_error_result(
|
| 172 |
if not UserCanvasService.query(user_id=current_user.id, id=req["id"]):
|
| 173 |
return get_json_result(
|
| 174 |
-
data=False,
|
| 175 |
-
|
| 176 |
|
| 177 |
canvas = Canvas(json.dumps(user_canvas.dsl), current_user.id)
|
| 178 |
canvas.reset()
|
|
|
|
| 46 |
for i in request.json["canvas_ids"]:
|
| 47 |
if not UserCanvasService.query(user_id=current_user.id,id=i):
|
| 48 |
return get_json_result(
|
| 49 |
+
data=False, message='Only owner of canvas authorized for this operation.',
|
| 50 |
+
code=RetCode.OPERATING_ERROR)
|
| 51 |
UserCanvasService.delete_by_id(i)
|
| 52 |
return get_json_result(data=True)
|
| 53 |
|
|
|
|
| 66 |
return server_error_response(ValueError("Duplicated title."))
|
| 67 |
req["id"] = get_uuid()
|
| 68 |
if not UserCanvasService.save(**req):
|
| 69 |
+
return get_data_error_result(message="Fail to save canvas.")
|
| 70 |
else:
|
| 71 |
if not UserCanvasService.query(user_id=current_user.id, id=req["id"]):
|
| 72 |
return get_json_result(
|
| 73 |
+
data=False, message='Only owner of canvas authorized for this operation.',
|
| 74 |
+
code=RetCode.OPERATING_ERROR)
|
| 75 |
UserCanvasService.update_by_id(req["id"], req)
|
| 76 |
return get_json_result(data=req)
|
| 77 |
|
|
|
|
| 81 |
def get(canvas_id):
|
| 82 |
e, c = UserCanvasService.get_by_id(canvas_id)
|
| 83 |
if not e:
|
| 84 |
+
return get_data_error_result(message="canvas not found.")
|
| 85 |
return get_json_result(data=c.to_dict())
|
| 86 |
|
| 87 |
|
|
|
|
| 93 |
stream = req.get("stream", True)
|
| 94 |
e, cvs = UserCanvasService.get_by_id(req["id"])
|
| 95 |
if not e:
|
| 96 |
+
return get_data_error_result(message="canvas not found.")
|
| 97 |
if not UserCanvasService.query(user_id=current_user.id, id=req["id"]):
|
| 98 |
return get_json_result(
|
| 99 |
+
data=False, message='Only owner of canvas authorized for this operation.',
|
| 100 |
+
code=RetCode.OPERATING_ERROR)
|
| 101 |
|
| 102 |
if not isinstance(cvs.dsl, str):
|
| 103 |
cvs.dsl = json.dumps(cvs.dsl, ensure_ascii=False)
|
|
|
|
| 130 |
for k in ans.keys():
|
| 131 |
final_ans[k] = ans[k]
|
| 132 |
ans = {"answer": ans["content"], "reference": ans.get("reference", [])}
|
| 133 |
+
yield "data:" + json.dumps({"code": 0, "message": "", "data": ans}, ensure_ascii=False) + "\n\n"
|
| 134 |
|
| 135 |
canvas.messages.append({"role": "assistant", "content": final_ans["content"], "id": message_id})
|
| 136 |
canvas.history.append(("assistant", final_ans["content"]))
|
|
|
|
| 139 |
cvs.dsl = json.loads(str(canvas))
|
| 140 |
UserCanvasService.update_by_id(req["id"], cvs.to_dict())
|
| 141 |
except Exception as e:
|
| 142 |
+
yield "data:" + json.dumps({"code": 500, "message": str(e),
|
| 143 |
"data": {"answer": "**ERROR**: " + str(e), "reference": []}},
|
| 144 |
ensure_ascii=False) + "\n\n"
|
| 145 |
+
yield "data:" + json.dumps({"code": 0, "message": "", "data": True}, ensure_ascii=False) + "\n\n"
|
| 146 |
|
| 147 |
resp = Response(sse(), mimetype="text/event-stream")
|
| 148 |
resp.headers.add_header("Cache-control", "no-cache")
|
|
|
|
| 168 |
try:
|
| 169 |
e, user_canvas = UserCanvasService.get_by_id(req["id"])
|
| 170 |
if not e:
|
| 171 |
+
return get_data_error_result(message="canvas not found.")
|
| 172 |
if not UserCanvasService.query(user_id=current_user.id, id=req["id"]):
|
| 173 |
return get_json_result(
|
| 174 |
+
data=False, message='Only owner of canvas authorized for this operation.',
|
| 175 |
+
code=RetCode.OPERATING_ERROR)
|
| 176 |
|
| 177 |
canvas = Canvas(json.dumps(user_canvas.dsl), current_user.id)
|
| 178 |
canvas.reset()
|
api/apps/chunk_app.py
CHANGED
|
@@ -15,7 +15,6 @@
|
|
| 15 |
#
|
| 16 |
import datetime
|
| 17 |
import json
|
| 18 |
-
import traceback
|
| 19 |
|
| 20 |
from flask import request
|
| 21 |
from flask_login import login_required, current_user
|
|
@@ -50,10 +49,10 @@ def list_chunk():
|
|
| 50 |
try:
|
| 51 |
tenant_id = DocumentService.get_tenant_id(req["doc_id"])
|
| 52 |
if not tenant_id:
|
| 53 |
-
return get_data_error_result(
|
| 54 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 55 |
if not e:
|
| 56 |
-
return get_data_error_result(
|
| 57 |
query = {
|
| 58 |
"doc_ids": [doc_id], "page": page, "size": size, "question": question, "sort": True
|
| 59 |
}
|
|
@@ -84,8 +83,8 @@ def list_chunk():
|
|
| 84 |
return get_json_result(data=res)
|
| 85 |
except Exception as e:
|
| 86 |
if str(e).find("not_found") > 0:
|
| 87 |
-
return get_json_result(data=False,
|
| 88 |
-
|
| 89 |
return server_error_response(e)
|
| 90 |
|
| 91 |
|
|
@@ -96,7 +95,7 @@ def get():
|
|
| 96 |
try:
|
| 97 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 98 |
if not tenants:
|
| 99 |
-
return get_data_error_result(
|
| 100 |
res = ELASTICSEARCH.get(
|
| 101 |
chunk_id, search.index_name(
|
| 102 |
tenants[0].tenant_id))
|
|
@@ -115,8 +114,8 @@ def get():
|
|
| 115 |
return get_json_result(data=res)
|
| 116 |
except Exception as e:
|
| 117 |
if str(e).find("NotFoundError") >= 0:
|
| 118 |
-
return get_json_result(data=False,
|
| 119 |
-
|
| 120 |
return server_error_response(e)
|
| 121 |
|
| 122 |
|
|
@@ -139,14 +138,14 @@ def set():
|
|
| 139 |
try:
|
| 140 |
tenant_id = DocumentService.get_tenant_id(req["doc_id"])
|
| 141 |
if not tenant_id:
|
| 142 |
-
return get_data_error_result(
|
| 143 |
|
| 144 |
embd_id = DocumentService.get_embd_id(req["doc_id"])
|
| 145 |
embd_mdl = LLMBundle(tenant_id, LLMType.EMBEDDING, embd_id)
|
| 146 |
|
| 147 |
e, doc = DocumentService.get_by_id(req["doc_id"])
|
| 148 |
if not e:
|
| 149 |
-
return get_data_error_result(
|
| 150 |
|
| 151 |
if doc.parser_id == ParserType.QA:
|
| 152 |
arr = [
|
|
@@ -155,7 +154,7 @@ def set():
|
|
| 155 |
req["content_with_weight"]) if len(t) > 1]
|
| 156 |
if len(arr) != 2:
|
| 157 |
return get_data_error_result(
|
| 158 |
-
|
| 159 |
q, a = rmPrefix(arr[0]), rmPrefix(arr[1])
|
| 160 |
d = beAdoc(d, arr[0], arr[1], not any(
|
| 161 |
[rag_tokenizer.is_chinese(t) for t in q + a]))
|
|
@@ -177,10 +176,10 @@ def switch():
|
|
| 177 |
try:
|
| 178 |
tenant_id = DocumentService.get_tenant_id(req["doc_id"])
|
| 179 |
if not tenant_id:
|
| 180 |
-
return get_data_error_result(
|
| 181 |
if not ELASTICSEARCH.upsert([{"id": i, "available_int": int(req["available_int"])} for i in req["chunk_ids"]],
|
| 182 |
search.index_name(tenant_id)):
|
| 183 |
-
return get_data_error_result(
|
| 184 |
return get_json_result(data=True)
|
| 185 |
except Exception as e:
|
| 186 |
return server_error_response(e)
|
|
@@ -194,10 +193,10 @@ def rm():
|
|
| 194 |
try:
|
| 195 |
if not ELASTICSEARCH.deleteByQuery(
|
| 196 |
Q("ids", values=req["chunk_ids"]), search.index_name(current_user.id)):
|
| 197 |
-
return get_data_error_result(
|
| 198 |
e, doc = DocumentService.get_by_id(req["doc_id"])
|
| 199 |
if not e:
|
| 200 |
-
return get_data_error_result(
|
| 201 |
deleted_chunk_ids = req["chunk_ids"]
|
| 202 |
chunk_number = len(deleted_chunk_ids)
|
| 203 |
DocumentService.decrement_chunk_num(doc.id, doc.kb_id, 1, chunk_number, 0)
|
|
@@ -225,14 +224,14 @@ def create():
|
|
| 225 |
try:
|
| 226 |
e, doc = DocumentService.get_by_id(req["doc_id"])
|
| 227 |
if not e:
|
| 228 |
-
return get_data_error_result(
|
| 229 |
d["kb_id"] = [doc.kb_id]
|
| 230 |
d["docnm_kwd"] = doc.name
|
| 231 |
d["doc_id"] = doc.id
|
| 232 |
|
| 233 |
tenant_id = DocumentService.get_tenant_id(req["doc_id"])
|
| 234 |
if not tenant_id:
|
| 235 |
-
return get_data_error_result(
|
| 236 |
|
| 237 |
embd_id = DocumentService.get_embd_id(req["doc_id"])
|
| 238 |
embd_mdl = LLMBundle(tenant_id, LLMType.EMBEDDING.value, embd_id)
|
|
@@ -273,12 +272,12 @@ def retrieval_test():
|
|
| 273 |
break
|
| 274 |
else:
|
| 275 |
return get_json_result(
|
| 276 |
-
data=False,
|
| 277 |
-
|
| 278 |
|
| 279 |
e, kb = KnowledgebaseService.get_by_id(kb_id[0])
|
| 280 |
if not e:
|
| 281 |
-
return get_data_error_result(
|
| 282 |
|
| 283 |
embd_mdl = LLMBundle(kb.tenant_id, LLMType.EMBEDDING.value, llm_name=kb.embd_id)
|
| 284 |
|
|
@@ -301,8 +300,8 @@ def retrieval_test():
|
|
| 301 |
return get_json_result(data=ranks)
|
| 302 |
except Exception as e:
|
| 303 |
if str(e).find("not_found") > 0:
|
| 304 |
-
return get_json_result(data=False,
|
| 305 |
-
|
| 306 |
return server_error_response(e)
|
| 307 |
|
| 308 |
|
|
@@ -321,7 +320,7 @@ def knowledge_graph():
|
|
| 321 |
ty = sres.field[id]["knowledge_graph_kwd"]
|
| 322 |
try:
|
| 323 |
content_json = json.loads(sres.field[id]["content_with_weight"])
|
| 324 |
-
except Exception
|
| 325 |
continue
|
| 326 |
|
| 327 |
if ty == 'mind_map':
|
|
|
|
| 15 |
#
|
| 16 |
import datetime
|
| 17 |
import json
|
|
|
|
| 18 |
|
| 19 |
from flask import request
|
| 20 |
from flask_login import login_required, current_user
|
|
|
|
| 49 |
try:
|
| 50 |
tenant_id = DocumentService.get_tenant_id(req["doc_id"])
|
| 51 |
if not tenant_id:
|
| 52 |
+
return get_data_error_result(message="Tenant not found!")
|
| 53 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 54 |
if not e:
|
| 55 |
+
return get_data_error_result(message="Document not found!")
|
| 56 |
query = {
|
| 57 |
"doc_ids": [doc_id], "page": page, "size": size, "question": question, "sort": True
|
| 58 |
}
|
|
|
|
| 83 |
return get_json_result(data=res)
|
| 84 |
except Exception as e:
|
| 85 |
if str(e).find("not_found") > 0:
|
| 86 |
+
return get_json_result(data=False, message='No chunk found!',
|
| 87 |
+
code=RetCode.DATA_ERROR)
|
| 88 |
return server_error_response(e)
|
| 89 |
|
| 90 |
|
|
|
|
| 95 |
try:
|
| 96 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 97 |
if not tenants:
|
| 98 |
+
return get_data_error_result(message="Tenant not found!")
|
| 99 |
res = ELASTICSEARCH.get(
|
| 100 |
chunk_id, search.index_name(
|
| 101 |
tenants[0].tenant_id))
|
|
|
|
| 114 |
return get_json_result(data=res)
|
| 115 |
except Exception as e:
|
| 116 |
if str(e).find("NotFoundError") >= 0:
|
| 117 |
+
return get_json_result(data=False, message='Chunk not found!',
|
| 118 |
+
code=RetCode.DATA_ERROR)
|
| 119 |
return server_error_response(e)
|
| 120 |
|
| 121 |
|
|
|
|
| 138 |
try:
|
| 139 |
tenant_id = DocumentService.get_tenant_id(req["doc_id"])
|
| 140 |
if not tenant_id:
|
| 141 |
+
return get_data_error_result(message="Tenant not found!")
|
| 142 |
|
| 143 |
embd_id = DocumentService.get_embd_id(req["doc_id"])
|
| 144 |
embd_mdl = LLMBundle(tenant_id, LLMType.EMBEDDING, embd_id)
|
| 145 |
|
| 146 |
e, doc = DocumentService.get_by_id(req["doc_id"])
|
| 147 |
if not e:
|
| 148 |
+
return get_data_error_result(message="Document not found!")
|
| 149 |
|
| 150 |
if doc.parser_id == ParserType.QA:
|
| 151 |
arr = [
|
|
|
|
| 154 |
req["content_with_weight"]) if len(t) > 1]
|
| 155 |
if len(arr) != 2:
|
| 156 |
return get_data_error_result(
|
| 157 |
+
message="Q&A must be separated by TAB/ENTER key.")
|
| 158 |
q, a = rmPrefix(arr[0]), rmPrefix(arr[1])
|
| 159 |
d = beAdoc(d, arr[0], arr[1], not any(
|
| 160 |
[rag_tokenizer.is_chinese(t) for t in q + a]))
|
|
|
|
| 176 |
try:
|
| 177 |
tenant_id = DocumentService.get_tenant_id(req["doc_id"])
|
| 178 |
if not tenant_id:
|
| 179 |
+
return get_data_error_result(message="Tenant not found!")
|
| 180 |
if not ELASTICSEARCH.upsert([{"id": i, "available_int": int(req["available_int"])} for i in req["chunk_ids"]],
|
| 181 |
search.index_name(tenant_id)):
|
| 182 |
+
return get_data_error_result(message="Index updating failure")
|
| 183 |
return get_json_result(data=True)
|
| 184 |
except Exception as e:
|
| 185 |
return server_error_response(e)
|
|
|
|
| 193 |
try:
|
| 194 |
if not ELASTICSEARCH.deleteByQuery(
|
| 195 |
Q("ids", values=req["chunk_ids"]), search.index_name(current_user.id)):
|
| 196 |
+
return get_data_error_result(message="Index updating failure")
|
| 197 |
e, doc = DocumentService.get_by_id(req["doc_id"])
|
| 198 |
if not e:
|
| 199 |
+
return get_data_error_result(message="Document not found!")
|
| 200 |
deleted_chunk_ids = req["chunk_ids"]
|
| 201 |
chunk_number = len(deleted_chunk_ids)
|
| 202 |
DocumentService.decrement_chunk_num(doc.id, doc.kb_id, 1, chunk_number, 0)
|
|
|
|
| 224 |
try:
|
| 225 |
e, doc = DocumentService.get_by_id(req["doc_id"])
|
| 226 |
if not e:
|
| 227 |
+
return get_data_error_result(message="Document not found!")
|
| 228 |
d["kb_id"] = [doc.kb_id]
|
| 229 |
d["docnm_kwd"] = doc.name
|
| 230 |
d["doc_id"] = doc.id
|
| 231 |
|
| 232 |
tenant_id = DocumentService.get_tenant_id(req["doc_id"])
|
| 233 |
if not tenant_id:
|
| 234 |
+
return get_data_error_result(message="Tenant not found!")
|
| 235 |
|
| 236 |
embd_id = DocumentService.get_embd_id(req["doc_id"])
|
| 237 |
embd_mdl = LLMBundle(tenant_id, LLMType.EMBEDDING.value, embd_id)
|
|
|
|
| 272 |
break
|
| 273 |
else:
|
| 274 |
return get_json_result(
|
| 275 |
+
data=False, message='Only owner of knowledgebase authorized for this operation.',
|
| 276 |
+
code=RetCode.OPERATING_ERROR)
|
| 277 |
|
| 278 |
e, kb = KnowledgebaseService.get_by_id(kb_id[0])
|
| 279 |
if not e:
|
| 280 |
+
return get_data_error_result(message="Knowledgebase not found!")
|
| 281 |
|
| 282 |
embd_mdl = LLMBundle(kb.tenant_id, LLMType.EMBEDDING.value, llm_name=kb.embd_id)
|
| 283 |
|
|
|
|
| 300 |
return get_json_result(data=ranks)
|
| 301 |
except Exception as e:
|
| 302 |
if str(e).find("not_found") > 0:
|
| 303 |
+
return get_json_result(data=False, message='No chunk found! Check the chunk status please!',
|
| 304 |
+
code=RetCode.DATA_ERROR)
|
| 305 |
return server_error_response(e)
|
| 306 |
|
| 307 |
|
|
|
|
| 320 |
ty = sres.field[id]["knowledge_graph_kwd"]
|
| 321 |
try:
|
| 322 |
content_json = json.loads(sres.field[id]["content_with_weight"])
|
| 323 |
+
except Exception:
|
| 324 |
continue
|
| 325 |
|
| 326 |
if ty == 'mind_map':
|
api/apps/conversation_app.py
CHANGED
|
@@ -42,11 +42,11 @@ def set_conversation():
|
|
| 42 |
del req["conversation_id"]
|
| 43 |
try:
|
| 44 |
if not ConversationService.update_by_id(conv_id, req):
|
| 45 |
-
return get_data_error_result(
|
| 46 |
e, conv = ConversationService.get_by_id(conv_id)
|
| 47 |
if not e:
|
| 48 |
return get_data_error_result(
|
| 49 |
-
|
| 50 |
conv = conv.to_dict()
|
| 51 |
return get_json_result(data=conv)
|
| 52 |
except Exception as e:
|
|
@@ -55,7 +55,7 @@ def set_conversation():
|
|
| 55 |
try:
|
| 56 |
e, dia = DialogService.get_by_id(req["dialog_id"])
|
| 57 |
if not e:
|
| 58 |
-
return get_data_error_result(
|
| 59 |
conv = {
|
| 60 |
"id": conv_id,
|
| 61 |
"dialog_id": req["dialog_id"],
|
|
@@ -65,7 +65,7 @@ def set_conversation():
|
|
| 65 |
ConversationService.save(**conv)
|
| 66 |
e, conv = ConversationService.get_by_id(conv["id"])
|
| 67 |
if not e:
|
| 68 |
-
return get_data_error_result(
|
| 69 |
conv = conv.to_dict()
|
| 70 |
return get_json_result(data=conv)
|
| 71 |
except Exception as e:
|
|
@@ -79,15 +79,15 @@ def get():
|
|
| 79 |
try:
|
| 80 |
e, conv = ConversationService.get_by_id(conv_id)
|
| 81 |
if not e:
|
| 82 |
-
return get_data_error_result(
|
| 83 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 84 |
for tenant in tenants:
|
| 85 |
if DialogService.query(tenant_id=tenant.tenant_id, id=conv.dialog_id):
|
| 86 |
break
|
| 87 |
else:
|
| 88 |
return get_json_result(
|
| 89 |
-
data=False,
|
| 90 |
-
|
| 91 |
conv = conv.to_dict()
|
| 92 |
return get_json_result(data=conv)
|
| 93 |
except Exception as e:
|
|
@@ -102,15 +102,15 @@ def rm():
|
|
| 102 |
for cid in conv_ids:
|
| 103 |
exist, conv = ConversationService.get_by_id(cid)
|
| 104 |
if not exist:
|
| 105 |
-
return get_data_error_result(
|
| 106 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 107 |
for tenant in tenants:
|
| 108 |
if DialogService.query(tenant_id=tenant.tenant_id, id=conv.dialog_id):
|
| 109 |
break
|
| 110 |
else:
|
| 111 |
return get_json_result(
|
| 112 |
-
data=False,
|
| 113 |
-
|
| 114 |
ConversationService.delete_by_id(cid)
|
| 115 |
return get_json_result(data=True)
|
| 116 |
except Exception as e:
|
|
@@ -124,8 +124,8 @@ def list_convsersation():
|
|
| 124 |
try:
|
| 125 |
if not DialogService.query(tenant_id=current_user.id, id=dialog_id):
|
| 126 |
return get_json_result(
|
| 127 |
-
data=False,
|
| 128 |
-
|
| 129 |
convs = ConversationService.query(
|
| 130 |
dialog_id=dialog_id,
|
| 131 |
order_by=ConversationService.model.create_time,
|
|
@@ -152,11 +152,11 @@ def completion():
|
|
| 152 |
try:
|
| 153 |
e, conv = ConversationService.get_by_id(req["conversation_id"])
|
| 154 |
if not e:
|
| 155 |
-
return get_data_error_result(
|
| 156 |
conv.message = deepcopy(req["messages"])
|
| 157 |
e, dia = DialogService.get_by_id(conv.dialog_id)
|
| 158 |
if not e:
|
| 159 |
-
return get_data_error_result(
|
| 160 |
del req["conversation_id"]
|
| 161 |
del req["messages"]
|
| 162 |
|
|
@@ -180,14 +180,14 @@ def completion():
|
|
| 180 |
try:
|
| 181 |
for ans in chat(dia, msg, True, **req):
|
| 182 |
fillin_conv(ans)
|
| 183 |
-
yield "data:" + json.dumps({"
|
| 184 |
ConversationService.update_by_id(conv.id, conv.to_dict())
|
| 185 |
except Exception as e:
|
| 186 |
traceback.print_exc()
|
| 187 |
-
yield "data:" + json.dumps({"
|
| 188 |
"data": {"answer": "**ERROR**: " + str(e), "reference": []}},
|
| 189 |
ensure_ascii=False) + "\n\n"
|
| 190 |
-
yield "data:" + json.dumps({"
|
| 191 |
|
| 192 |
if req.get("stream", True):
|
| 193 |
resp = Response(stream(), mimetype="text/event-stream")
|
|
@@ -217,11 +217,11 @@ def tts():
|
|
| 217 |
|
| 218 |
tenants = TenantService.get_info_by(current_user.id)
|
| 219 |
if not tenants:
|
| 220 |
-
return get_data_error_result(
|
| 221 |
|
| 222 |
tts_id = tenants[0]["tts_id"]
|
| 223 |
if not tts_id:
|
| 224 |
-
return get_data_error_result(
|
| 225 |
|
| 226 |
tts_mdl = LLMBundle(tenants[0]["tenant_id"], LLMType.TTS, tts_id)
|
| 227 |
|
|
@@ -231,7 +231,7 @@ def tts():
|
|
| 231 |
for chunk in tts_mdl.tts(txt):
|
| 232 |
yield chunk
|
| 233 |
except Exception as e:
|
| 234 |
-
yield ("data:" + json.dumps({"
|
| 235 |
"data": {"answer": "**ERROR**: " + str(e)}},
|
| 236 |
ensure_ascii=False)).encode('utf-8')
|
| 237 |
|
|
@@ -250,7 +250,7 @@ def delete_msg():
|
|
| 250 |
req = request.json
|
| 251 |
e, conv = ConversationService.get_by_id(req["conversation_id"])
|
| 252 |
if not e:
|
| 253 |
-
return get_data_error_result(
|
| 254 |
|
| 255 |
conv = conv.to_dict()
|
| 256 |
for i, msg in enumerate(conv["message"]):
|
|
@@ -273,7 +273,7 @@ def thumbup():
|
|
| 273 |
req = request.json
|
| 274 |
e, conv = ConversationService.get_by_id(req["conversation_id"])
|
| 275 |
if not e:
|
| 276 |
-
return get_data_error_result(
|
| 277 |
up_down = req.get("set")
|
| 278 |
feedback = req.get("feedback", "")
|
| 279 |
conv = conv.to_dict()
|
|
@@ -301,12 +301,12 @@ def ask_about():
|
|
| 301 |
nonlocal req, uid
|
| 302 |
try:
|
| 303 |
for ans in ask(req["question"], req["kb_ids"], uid):
|
| 304 |
-
yield "data:" + json.dumps({"
|
| 305 |
except Exception as e:
|
| 306 |
-
yield "data:" + json.dumps({"
|
| 307 |
"data": {"answer": "**ERROR**: " + str(e), "reference": []}},
|
| 308 |
ensure_ascii=False) + "\n\n"
|
| 309 |
-
yield "data:" + json.dumps({"
|
| 310 |
|
| 311 |
resp = Response(stream(), mimetype="text/event-stream")
|
| 312 |
resp.headers.add_header("Cache-control", "no-cache")
|
|
@@ -324,7 +324,7 @@ def mindmap():
|
|
| 324 |
kb_ids = req["kb_ids"]
|
| 325 |
e, kb = KnowledgebaseService.get_by_id(kb_ids[0])
|
| 326 |
if not e:
|
| 327 |
-
return get_data_error_result(
|
| 328 |
|
| 329 |
embd_mdl = TenantLLMService.model_instance(
|
| 330 |
kb.tenant_id, LLMType.EMBEDDING.value, llm_name=kb.embd_id)
|
|
|
|
| 42 |
del req["conversation_id"]
|
| 43 |
try:
|
| 44 |
if not ConversationService.update_by_id(conv_id, req):
|
| 45 |
+
return get_data_error_result(message="Conversation not found!")
|
| 46 |
e, conv = ConversationService.get_by_id(conv_id)
|
| 47 |
if not e:
|
| 48 |
return get_data_error_result(
|
| 49 |
+
message="Fail to update a conversation!")
|
| 50 |
conv = conv.to_dict()
|
| 51 |
return get_json_result(data=conv)
|
| 52 |
except Exception as e:
|
|
|
|
| 55 |
try:
|
| 56 |
e, dia = DialogService.get_by_id(req["dialog_id"])
|
| 57 |
if not e:
|
| 58 |
+
return get_data_error_result(message="Dialog not found")
|
| 59 |
conv = {
|
| 60 |
"id": conv_id,
|
| 61 |
"dialog_id": req["dialog_id"],
|
|
|
|
| 65 |
ConversationService.save(**conv)
|
| 66 |
e, conv = ConversationService.get_by_id(conv["id"])
|
| 67 |
if not e:
|
| 68 |
+
return get_data_error_result(message="Fail to new a conversation!")
|
| 69 |
conv = conv.to_dict()
|
| 70 |
return get_json_result(data=conv)
|
| 71 |
except Exception as e:
|
|
|
|
| 79 |
try:
|
| 80 |
e, conv = ConversationService.get_by_id(conv_id)
|
| 81 |
if not e:
|
| 82 |
+
return get_data_error_result(message="Conversation not found!")
|
| 83 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 84 |
for tenant in tenants:
|
| 85 |
if DialogService.query(tenant_id=tenant.tenant_id, id=conv.dialog_id):
|
| 86 |
break
|
| 87 |
else:
|
| 88 |
return get_json_result(
|
| 89 |
+
data=False, message='Only owner of conversation authorized for this operation.',
|
| 90 |
+
code=RetCode.OPERATING_ERROR)
|
| 91 |
conv = conv.to_dict()
|
| 92 |
return get_json_result(data=conv)
|
| 93 |
except Exception as e:
|
|
|
|
| 102 |
for cid in conv_ids:
|
| 103 |
exist, conv = ConversationService.get_by_id(cid)
|
| 104 |
if not exist:
|
| 105 |
+
return get_data_error_result(message="Conversation not found!")
|
| 106 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 107 |
for tenant in tenants:
|
| 108 |
if DialogService.query(tenant_id=tenant.tenant_id, id=conv.dialog_id):
|
| 109 |
break
|
| 110 |
else:
|
| 111 |
return get_json_result(
|
| 112 |
+
data=False, message='Only owner of conversation authorized for this operation.',
|
| 113 |
+
code=RetCode.OPERATING_ERROR)
|
| 114 |
ConversationService.delete_by_id(cid)
|
| 115 |
return get_json_result(data=True)
|
| 116 |
except Exception as e:
|
|
|
|
| 124 |
try:
|
| 125 |
if not DialogService.query(tenant_id=current_user.id, id=dialog_id):
|
| 126 |
return get_json_result(
|
| 127 |
+
data=False, message='Only owner of dialog authorized for this operation.',
|
| 128 |
+
code=RetCode.OPERATING_ERROR)
|
| 129 |
convs = ConversationService.query(
|
| 130 |
dialog_id=dialog_id,
|
| 131 |
order_by=ConversationService.model.create_time,
|
|
|
|
| 152 |
try:
|
| 153 |
e, conv = ConversationService.get_by_id(req["conversation_id"])
|
| 154 |
if not e:
|
| 155 |
+
return get_data_error_result(message="Conversation not found!")
|
| 156 |
conv.message = deepcopy(req["messages"])
|
| 157 |
e, dia = DialogService.get_by_id(conv.dialog_id)
|
| 158 |
if not e:
|
| 159 |
+
return get_data_error_result(message="Dialog not found!")
|
| 160 |
del req["conversation_id"]
|
| 161 |
del req["messages"]
|
| 162 |
|
|
|
|
| 180 |
try:
|
| 181 |
for ans in chat(dia, msg, True, **req):
|
| 182 |
fillin_conv(ans)
|
| 183 |
+
yield "data:" + json.dumps({"code": 0, "message": "", "data": ans}, ensure_ascii=False) + "\n\n"
|
| 184 |
ConversationService.update_by_id(conv.id, conv.to_dict())
|
| 185 |
except Exception as e:
|
| 186 |
traceback.print_exc()
|
| 187 |
+
yield "data:" + json.dumps({"code": 500, "message": str(e),
|
| 188 |
"data": {"answer": "**ERROR**: " + str(e), "reference": []}},
|
| 189 |
ensure_ascii=False) + "\n\n"
|
| 190 |
+
yield "data:" + json.dumps({"code": 0, "message": "", "data": True}, ensure_ascii=False) + "\n\n"
|
| 191 |
|
| 192 |
if req.get("stream", True):
|
| 193 |
resp = Response(stream(), mimetype="text/event-stream")
|
|
|
|
| 217 |
|
| 218 |
tenants = TenantService.get_info_by(current_user.id)
|
| 219 |
if not tenants:
|
| 220 |
+
return get_data_error_result(message="Tenant not found!")
|
| 221 |
|
| 222 |
tts_id = tenants[0]["tts_id"]
|
| 223 |
if not tts_id:
|
| 224 |
+
return get_data_error_result(message="No default TTS model is set")
|
| 225 |
|
| 226 |
tts_mdl = LLMBundle(tenants[0]["tenant_id"], LLMType.TTS, tts_id)
|
| 227 |
|
|
|
|
| 231 |
for chunk in tts_mdl.tts(txt):
|
| 232 |
yield chunk
|
| 233 |
except Exception as e:
|
| 234 |
+
yield ("data:" + json.dumps({"code": 500, "message": str(e),
|
| 235 |
"data": {"answer": "**ERROR**: " + str(e)}},
|
| 236 |
ensure_ascii=False)).encode('utf-8')
|
| 237 |
|
|
|
|
| 250 |
req = request.json
|
| 251 |
e, conv = ConversationService.get_by_id(req["conversation_id"])
|
| 252 |
if not e:
|
| 253 |
+
return get_data_error_result(message="Conversation not found!")
|
| 254 |
|
| 255 |
conv = conv.to_dict()
|
| 256 |
for i, msg in enumerate(conv["message"]):
|
|
|
|
| 273 |
req = request.json
|
| 274 |
e, conv = ConversationService.get_by_id(req["conversation_id"])
|
| 275 |
if not e:
|
| 276 |
+
return get_data_error_result(message="Conversation not found!")
|
| 277 |
up_down = req.get("set")
|
| 278 |
feedback = req.get("feedback", "")
|
| 279 |
conv = conv.to_dict()
|
|
|
|
| 301 |
nonlocal req, uid
|
| 302 |
try:
|
| 303 |
for ans in ask(req["question"], req["kb_ids"], uid):
|
| 304 |
+
yield "data:" + json.dumps({"code": 0, "message": "", "data": ans}, ensure_ascii=False) + "\n\n"
|
| 305 |
except Exception as e:
|
| 306 |
+
yield "data:" + json.dumps({"code": 500, "message": str(e),
|
| 307 |
"data": {"answer": "**ERROR**: " + str(e), "reference": []}},
|
| 308 |
ensure_ascii=False) + "\n\n"
|
| 309 |
+
yield "data:" + json.dumps({"code": 0, "message": "", "data": True}, ensure_ascii=False) + "\n\n"
|
| 310 |
|
| 311 |
resp = Response(stream(), mimetype="text/event-stream")
|
| 312 |
resp.headers.add_header("Cache-control", "no-cache")
|
|
|
|
| 324 |
kb_ids = req["kb_ids"]
|
| 325 |
e, kb = KnowledgebaseService.get_by_id(kb_ids[0])
|
| 326 |
if not e:
|
| 327 |
+
return get_data_error_result(message="Knowledgebase not found!")
|
| 328 |
|
| 329 |
embd_mdl = TenantLLMService.model_instance(
|
| 330 |
kb.tenant_id, LLMType.EMBEDDING.value, llm_name=kb.embd_id)
|
api/apps/dialog_app.py
CHANGED
|
@@ -68,17 +68,17 @@ def set_dialog():
|
|
| 68 |
continue
|
| 69 |
if prompt_config["system"].find("{%s}" % p["key"]) < 0:
|
| 70 |
return get_data_error_result(
|
| 71 |
-
|
| 72 |
|
| 73 |
try:
|
| 74 |
e, tenant = TenantService.get_by_id(current_user.id)
|
| 75 |
if not e:
|
| 76 |
-
return get_data_error_result(
|
| 77 |
llm_id = req.get("llm_id", tenant.llm_id)
|
| 78 |
if not dialog_id:
|
| 79 |
if not req.get("kb_ids"):
|
| 80 |
return get_data_error_result(
|
| 81 |
-
|
| 82 |
dia = {
|
| 83 |
"id": get_uuid(),
|
| 84 |
"tenant_id": current_user.id,
|
|
@@ -96,20 +96,20 @@ def set_dialog():
|
|
| 96 |
"icon": icon
|
| 97 |
}
|
| 98 |
if not DialogService.save(**dia):
|
| 99 |
-
return get_data_error_result(
|
| 100 |
e, dia = DialogService.get_by_id(dia["id"])
|
| 101 |
if not e:
|
| 102 |
-
return get_data_error_result(
|
| 103 |
return get_json_result(data=dia.to_json())
|
| 104 |
else:
|
| 105 |
del req["dialog_id"]
|
| 106 |
if "kb_names" in req:
|
| 107 |
del req["kb_names"]
|
| 108 |
if not DialogService.update_by_id(dialog_id, req):
|
| 109 |
-
return get_data_error_result(
|
| 110 |
e, dia = DialogService.get_by_id(dialog_id)
|
| 111 |
if not e:
|
| 112 |
-
return get_data_error_result(
|
| 113 |
dia = dia.to_dict()
|
| 114 |
dia["kb_ids"], dia["kb_names"] = get_kb_names(dia["kb_ids"])
|
| 115 |
return get_json_result(data=dia)
|
|
@@ -124,7 +124,7 @@ def get():
|
|
| 124 |
try:
|
| 125 |
e, dia = DialogService.get_by_id(dialog_id)
|
| 126 |
if not e:
|
| 127 |
-
return get_data_error_result(
|
| 128 |
dia = dia.to_dict()
|
| 129 |
dia["kb_ids"], dia["kb_names"] = get_kb_names(dia["kb_ids"])
|
| 130 |
return get_json_result(data=dia)
|
|
@@ -174,8 +174,8 @@ def rm():
|
|
| 174 |
break
|
| 175 |
else:
|
| 176 |
return get_json_result(
|
| 177 |
-
data=False,
|
| 178 |
-
|
| 179 |
dialog_list.append({"id": id,"status":StatusEnum.INVALID.value})
|
| 180 |
DialogService.update_many_by_id(dialog_list)
|
| 181 |
return get_json_result(data=True)
|
|
|
|
| 68 |
continue
|
| 69 |
if prompt_config["system"].find("{%s}" % p["key"]) < 0:
|
| 70 |
return get_data_error_result(
|
| 71 |
+
message="Parameter '{}' is not used".format(p["key"]))
|
| 72 |
|
| 73 |
try:
|
| 74 |
e, tenant = TenantService.get_by_id(current_user.id)
|
| 75 |
if not e:
|
| 76 |
+
return get_data_error_result(message="Tenant not found!")
|
| 77 |
llm_id = req.get("llm_id", tenant.llm_id)
|
| 78 |
if not dialog_id:
|
| 79 |
if not req.get("kb_ids"):
|
| 80 |
return get_data_error_result(
|
| 81 |
+
message="Fail! Please select knowledgebase!")
|
| 82 |
dia = {
|
| 83 |
"id": get_uuid(),
|
| 84 |
"tenant_id": current_user.id,
|
|
|
|
| 96 |
"icon": icon
|
| 97 |
}
|
| 98 |
if not DialogService.save(**dia):
|
| 99 |
+
return get_data_error_result(message="Fail to new a dialog!")
|
| 100 |
e, dia = DialogService.get_by_id(dia["id"])
|
| 101 |
if not e:
|
| 102 |
+
return get_data_error_result(message="Fail to new a dialog!")
|
| 103 |
return get_json_result(data=dia.to_json())
|
| 104 |
else:
|
| 105 |
del req["dialog_id"]
|
| 106 |
if "kb_names" in req:
|
| 107 |
del req["kb_names"]
|
| 108 |
if not DialogService.update_by_id(dialog_id, req):
|
| 109 |
+
return get_data_error_result(message="Dialog not found!")
|
| 110 |
e, dia = DialogService.get_by_id(dialog_id)
|
| 111 |
if not e:
|
| 112 |
+
return get_data_error_result(message="Fail to update a dialog!")
|
| 113 |
dia = dia.to_dict()
|
| 114 |
dia["kb_ids"], dia["kb_names"] = get_kb_names(dia["kb_ids"])
|
| 115 |
return get_json_result(data=dia)
|
|
|
|
| 124 |
try:
|
| 125 |
e, dia = DialogService.get_by_id(dialog_id)
|
| 126 |
if not e:
|
| 127 |
+
return get_data_error_result(message="Dialog not found!")
|
| 128 |
dia = dia.to_dict()
|
| 129 |
dia["kb_ids"], dia["kb_names"] = get_kb_names(dia["kb_ids"])
|
| 130 |
return get_json_result(data=dia)
|
|
|
|
| 174 |
break
|
| 175 |
else:
|
| 176 |
return get_json_result(
|
| 177 |
+
data=False, message='Only owner of dialog authorized for this operation.',
|
| 178 |
+
code=RetCode.OPERATING_ERROR)
|
| 179 |
dialog_list.append({"id": id,"status":StatusEnum.INVALID.value})
|
| 180 |
DialogService.update_many_by_id(dialog_list)
|
| 181 |
return get_json_result(data=True)
|
api/apps/document_app.py
CHANGED
|
@@ -49,16 +49,16 @@ def upload():
|
|
| 49 |
kb_id = request.form.get("kb_id")
|
| 50 |
if not kb_id:
|
| 51 |
return get_json_result(
|
| 52 |
-
data=False,
|
| 53 |
if 'file' not in request.files:
|
| 54 |
return get_json_result(
|
| 55 |
-
data=False,
|
| 56 |
|
| 57 |
file_objs = request.files.getlist('file')
|
| 58 |
for file_obj in file_objs:
|
| 59 |
if file_obj.filename == '':
|
| 60 |
return get_json_result(
|
| 61 |
-
data=False,
|
| 62 |
|
| 63 |
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
| 64 |
if not e:
|
|
@@ -67,7 +67,7 @@ def upload():
|
|
| 67 |
err, _ = FileService.upload_document(kb, file_objs, current_user.id)
|
| 68 |
if err:
|
| 69 |
return get_json_result(
|
| 70 |
-
data=False,
|
| 71 |
return get_json_result(data=True)
|
| 72 |
|
| 73 |
|
|
@@ -78,12 +78,12 @@ def web_crawl():
|
|
| 78 |
kb_id = request.form.get("kb_id")
|
| 79 |
if not kb_id:
|
| 80 |
return get_json_result(
|
| 81 |
-
data=False,
|
| 82 |
name = request.form.get("name")
|
| 83 |
url = request.form.get("url")
|
| 84 |
if not is_valid_url(url):
|
| 85 |
return get_json_result(
|
| 86 |
-
data=False,
|
| 87 |
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
| 88 |
if not e:
|
| 89 |
raise LookupError("Can't find this knowledgebase!")
|
|
@@ -145,17 +145,17 @@ def create():
|
|
| 145 |
kb_id = req["kb_id"]
|
| 146 |
if not kb_id:
|
| 147 |
return get_json_result(
|
| 148 |
-
data=False,
|
| 149 |
|
| 150 |
try:
|
| 151 |
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
| 152 |
if not e:
|
| 153 |
return get_data_error_result(
|
| 154 |
-
|
| 155 |
|
| 156 |
if DocumentService.query(name=req["name"], kb_id=kb_id):
|
| 157 |
return get_data_error_result(
|
| 158 |
-
|
| 159 |
|
| 160 |
doc = DocumentService.insert({
|
| 161 |
"id": get_uuid(),
|
|
@@ -179,7 +179,7 @@ def list_docs():
|
|
| 179 |
kb_id = request.args.get("kb_id")
|
| 180 |
if not kb_id:
|
| 181 |
return get_json_result(
|
| 182 |
-
data=False,
|
| 183 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 184 |
for tenant in tenants:
|
| 185 |
if KnowledgebaseService.query(
|
|
@@ -187,8 +187,8 @@ def list_docs():
|
|
| 187 |
break
|
| 188 |
else:
|
| 189 |
return get_json_result(
|
| 190 |
-
data=False,
|
| 191 |
-
|
| 192 |
keywords = request.args.get("keywords", "")
|
| 193 |
|
| 194 |
page_number = int(request.args.get("page", 1))
|
|
@@ -217,8 +217,8 @@ def docinfos():
|
|
| 217 |
if not DocumentService.accessible(doc_id, current_user.id):
|
| 218 |
return get_json_result(
|
| 219 |
data=False,
|
| 220 |
-
|
| 221 |
-
|
| 222 |
)
|
| 223 |
docs = DocumentService.get_by_ids(doc_ids)
|
| 224 |
return get_json_result(data=list(docs.dicts()))
|
|
@@ -230,7 +230,7 @@ def thumbnails():
|
|
| 230 |
doc_ids = request.args.get("doc_ids").split(",")
|
| 231 |
if not doc_ids:
|
| 232 |
return get_json_result(
|
| 233 |
-
data=False,
|
| 234 |
|
| 235 |
try:
|
| 236 |
docs = DocumentService.get_thumbnails(doc_ids)
|
|
@@ -252,28 +252,28 @@ def change_status():
|
|
| 252 |
if str(req["status"]) not in ["0", "1"]:
|
| 253 |
return get_json_result(
|
| 254 |
data=False,
|
| 255 |
-
|
| 256 |
-
|
| 257 |
|
| 258 |
if not DocumentService.accessible(req["doc_id"], current_user.id):
|
| 259 |
return get_json_result(
|
| 260 |
data=False,
|
| 261 |
-
|
| 262 |
-
|
| 263 |
|
| 264 |
try:
|
| 265 |
e, doc = DocumentService.get_by_id(req["doc_id"])
|
| 266 |
if not e:
|
| 267 |
-
return get_data_error_result(
|
| 268 |
e, kb = KnowledgebaseService.get_by_id(doc.kb_id)
|
| 269 |
if not e:
|
| 270 |
return get_data_error_result(
|
| 271 |
-
|
| 272 |
|
| 273 |
if not DocumentService.update_by_id(
|
| 274 |
req["doc_id"], {"status": str(req["status"])}):
|
| 275 |
return get_data_error_result(
|
| 276 |
-
|
| 277 |
|
| 278 |
if str(req["status"]) == "0":
|
| 279 |
ELASTICSEARCH.updateScriptByQuery(Q("term", doc_id=req["doc_id"]),
|
|
@@ -304,8 +304,8 @@ def rm():
|
|
| 304 |
if not DocumentService.accessible4deletion(doc_id, current_user.id):
|
| 305 |
return get_json_result(
|
| 306 |
data=False,
|
| 307 |
-
|
| 308 |
-
|
| 309 |
)
|
| 310 |
|
| 311 |
root_folder = FileService.get_root_folder(current_user.id)
|
|
@@ -316,16 +316,16 @@ def rm():
|
|
| 316 |
try:
|
| 317 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 318 |
if not e:
|
| 319 |
-
return get_data_error_result(
|
| 320 |
tenant_id = DocumentService.get_tenant_id(doc_id)
|
| 321 |
if not tenant_id:
|
| 322 |
-
return get_data_error_result(
|
| 323 |
|
| 324 |
b, n = File2DocumentService.get_storage_address(doc_id=doc_id)
|
| 325 |
|
| 326 |
if not DocumentService.remove_document(doc, tenant_id):
|
| 327 |
return get_data_error_result(
|
| 328 |
-
|
| 329 |
|
| 330 |
f2d = File2DocumentService.get_by_document_id(doc_id)
|
| 331 |
FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.id == f2d[0].file_id])
|
|
@@ -336,7 +336,7 @@ def rm():
|
|
| 336 |
errors += str(e)
|
| 337 |
|
| 338 |
if errors:
|
| 339 |
-
return get_json_result(data=False,
|
| 340 |
|
| 341 |
return get_json_result(data=True)
|
| 342 |
|
|
@@ -350,8 +350,8 @@ def run():
|
|
| 350 |
if not DocumentService.accessible(doc_id, current_user.id):
|
| 351 |
return get_json_result(
|
| 352 |
data=False,
|
| 353 |
-
|
| 354 |
-
|
| 355 |
)
|
| 356 |
try:
|
| 357 |
for id in req["doc_ids"]:
|
|
@@ -364,7 +364,7 @@ def run():
|
|
| 364 |
# if str(req["run"]) == TaskStatus.CANCEL.value:
|
| 365 |
tenant_id = DocumentService.get_tenant_id(id)
|
| 366 |
if not tenant_id:
|
| 367 |
-
return get_data_error_result(
|
| 368 |
ELASTICSEARCH.deleteByQuery(
|
| 369 |
Q("match", doc_id=id), idxnm=search.index_name(tenant_id))
|
| 370 |
|
|
@@ -389,28 +389,28 @@ def rename():
|
|
| 389 |
if not DocumentService.accessible(req["doc_id"], current_user.id):
|
| 390 |
return get_json_result(
|
| 391 |
data=False,
|
| 392 |
-
|
| 393 |
-
|
| 394 |
)
|
| 395 |
try:
|
| 396 |
e, doc = DocumentService.get_by_id(req["doc_id"])
|
| 397 |
if not e:
|
| 398 |
-
return get_data_error_result(
|
| 399 |
if pathlib.Path(req["name"].lower()).suffix != pathlib.Path(
|
| 400 |
doc.name.lower()).suffix:
|
| 401 |
return get_json_result(
|
| 402 |
data=False,
|
| 403 |
-
|
| 404 |
-
|
| 405 |
for d in DocumentService.query(name=req["name"], kb_id=doc.kb_id):
|
| 406 |
if d.name == req["name"]:
|
| 407 |
return get_data_error_result(
|
| 408 |
-
|
| 409 |
|
| 410 |
if not DocumentService.update_by_id(
|
| 411 |
req["doc_id"], {"name": req["name"]}):
|
| 412 |
return get_data_error_result(
|
| 413 |
-
|
| 414 |
|
| 415 |
informs = File2DocumentService.get_by_document_id(req["doc_id"])
|
| 416 |
if informs:
|
|
@@ -428,7 +428,7 @@ def get(doc_id):
|
|
| 428 |
try:
|
| 429 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 430 |
if not e:
|
| 431 |
-
return get_data_error_result(
|
| 432 |
|
| 433 |
b, n = File2DocumentService.get_storage_address(doc_id=doc_id)
|
| 434 |
response = flask.make_response(STORAGE_IMPL.get(b, n))
|
|
@@ -456,13 +456,13 @@ def change_parser():
|
|
| 456 |
if not DocumentService.accessible(req["doc_id"], current_user.id):
|
| 457 |
return get_json_result(
|
| 458 |
data=False,
|
| 459 |
-
|
| 460 |
-
|
| 461 |
)
|
| 462 |
try:
|
| 463 |
e, doc = DocumentService.get_by_id(req["doc_id"])
|
| 464 |
if not e:
|
| 465 |
-
return get_data_error_result(
|
| 466 |
if doc.parser_id.lower() == req["parser_id"].lower():
|
| 467 |
if "parser_config" in req:
|
| 468 |
if req["parser_config"] == doc.parser_config:
|
|
@@ -473,23 +473,23 @@ def change_parser():
|
|
| 473 |
if ((doc.type == FileType.VISUAL and req["parser_id"] != "picture")
|
| 474 |
or (re.search(
|
| 475 |
r"\.(ppt|pptx|pages)$", doc.name) and req["parser_id"] != "presentation")):
|
| 476 |
-
return get_data_error_result(
|
| 477 |
|
| 478 |
e = DocumentService.update_by_id(doc.id,
|
| 479 |
{"parser_id": req["parser_id"], "progress": 0, "progress_msg": "",
|
| 480 |
"run": TaskStatus.UNSTART.value})
|
| 481 |
if not e:
|
| 482 |
-
return get_data_error_result(
|
| 483 |
if "parser_config" in req:
|
| 484 |
DocumentService.update_parser_config(doc.id, req["parser_config"])
|
| 485 |
if doc.token_num > 0:
|
| 486 |
e = DocumentService.increment_chunk_num(doc.id, doc.kb_id, doc.token_num * -1, doc.chunk_num * -1,
|
| 487 |
doc.process_duation * -1)
|
| 488 |
if not e:
|
| 489 |
-
return get_data_error_result(
|
| 490 |
tenant_id = DocumentService.get_tenant_id(req["doc_id"])
|
| 491 |
if not tenant_id:
|
| 492 |
-
return get_data_error_result(
|
| 493 |
ELASTICSEARCH.deleteByQuery(
|
| 494 |
Q("match", doc_id=doc.id), idxnm=search.index_name(tenant_id))
|
| 495 |
|
|
@@ -516,13 +516,13 @@ def get_image(image_id):
|
|
| 516 |
def upload_and_parse():
|
| 517 |
if 'file' not in request.files:
|
| 518 |
return get_json_result(
|
| 519 |
-
data=False,
|
| 520 |
|
| 521 |
file_objs = request.files.getlist('file')
|
| 522 |
for file_obj in file_objs:
|
| 523 |
if file_obj.filename == '':
|
| 524 |
return get_json_result(
|
| 525 |
-
data=False,
|
| 526 |
|
| 527 |
doc_ids = doc_upload_and_parse(request.form.get("conversation_id"), file_objs, current_user.id)
|
| 528 |
|
|
|
|
| 49 |
kb_id = request.form.get("kb_id")
|
| 50 |
if not kb_id:
|
| 51 |
return get_json_result(
|
| 52 |
+
data=False, message='Lack of "KB ID"', code=RetCode.ARGUMENT_ERROR)
|
| 53 |
if 'file' not in request.files:
|
| 54 |
return get_json_result(
|
| 55 |
+
data=False, message='No file part!', code=RetCode.ARGUMENT_ERROR)
|
| 56 |
|
| 57 |
file_objs = request.files.getlist('file')
|
| 58 |
for file_obj in file_objs:
|
| 59 |
if file_obj.filename == '':
|
| 60 |
return get_json_result(
|
| 61 |
+
data=False, message='No file selected!', code=RetCode.ARGUMENT_ERROR)
|
| 62 |
|
| 63 |
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
| 64 |
if not e:
|
|
|
|
| 67 |
err, _ = FileService.upload_document(kb, file_objs, current_user.id)
|
| 68 |
if err:
|
| 69 |
return get_json_result(
|
| 70 |
+
data=False, message="\n".join(err), code=RetCode.SERVER_ERROR)
|
| 71 |
return get_json_result(data=True)
|
| 72 |
|
| 73 |
|
|
|
|
| 78 |
kb_id = request.form.get("kb_id")
|
| 79 |
if not kb_id:
|
| 80 |
return get_json_result(
|
| 81 |
+
data=False, message='Lack of "KB ID"', code=RetCode.ARGUMENT_ERROR)
|
| 82 |
name = request.form.get("name")
|
| 83 |
url = request.form.get("url")
|
| 84 |
if not is_valid_url(url):
|
| 85 |
return get_json_result(
|
| 86 |
+
data=False, message='The URL format is invalid', code=RetCode.ARGUMENT_ERROR)
|
| 87 |
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
| 88 |
if not e:
|
| 89 |
raise LookupError("Can't find this knowledgebase!")
|
|
|
|
| 145 |
kb_id = req["kb_id"]
|
| 146 |
if not kb_id:
|
| 147 |
return get_json_result(
|
| 148 |
+
data=False, message='Lack of "KB ID"', code=RetCode.ARGUMENT_ERROR)
|
| 149 |
|
| 150 |
try:
|
| 151 |
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
| 152 |
if not e:
|
| 153 |
return get_data_error_result(
|
| 154 |
+
message="Can't find this knowledgebase!")
|
| 155 |
|
| 156 |
if DocumentService.query(name=req["name"], kb_id=kb_id):
|
| 157 |
return get_data_error_result(
|
| 158 |
+
message="Duplicated document name in the same knowledgebase.")
|
| 159 |
|
| 160 |
doc = DocumentService.insert({
|
| 161 |
"id": get_uuid(),
|
|
|
|
| 179 |
kb_id = request.args.get("kb_id")
|
| 180 |
if not kb_id:
|
| 181 |
return get_json_result(
|
| 182 |
+
data=False, message='Lack of "KB ID"', code=RetCode.ARGUMENT_ERROR)
|
| 183 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 184 |
for tenant in tenants:
|
| 185 |
if KnowledgebaseService.query(
|
|
|
|
| 187 |
break
|
| 188 |
else:
|
| 189 |
return get_json_result(
|
| 190 |
+
data=False, message='Only owner of knowledgebase authorized for this operation.',
|
| 191 |
+
code=RetCode.OPERATING_ERROR)
|
| 192 |
keywords = request.args.get("keywords", "")
|
| 193 |
|
| 194 |
page_number = int(request.args.get("page", 1))
|
|
|
|
| 217 |
if not DocumentService.accessible(doc_id, current_user.id):
|
| 218 |
return get_json_result(
|
| 219 |
data=False,
|
| 220 |
+
message='No authorization.',
|
| 221 |
+
code=RetCode.AUTHENTICATION_ERROR
|
| 222 |
)
|
| 223 |
docs = DocumentService.get_by_ids(doc_ids)
|
| 224 |
return get_json_result(data=list(docs.dicts()))
|
|
|
|
| 230 |
doc_ids = request.args.get("doc_ids").split(",")
|
| 231 |
if not doc_ids:
|
| 232 |
return get_json_result(
|
| 233 |
+
data=False, message='Lack of "Document ID"', code=RetCode.ARGUMENT_ERROR)
|
| 234 |
|
| 235 |
try:
|
| 236 |
docs = DocumentService.get_thumbnails(doc_ids)
|
|
|
|
| 252 |
if str(req["status"]) not in ["0", "1"]:
|
| 253 |
return get_json_result(
|
| 254 |
data=False,
|
| 255 |
+
message='"Status" must be either 0 or 1!',
|
| 256 |
+
code=RetCode.ARGUMENT_ERROR)
|
| 257 |
|
| 258 |
if not DocumentService.accessible(req["doc_id"], current_user.id):
|
| 259 |
return get_json_result(
|
| 260 |
data=False,
|
| 261 |
+
message='No authorization.',
|
| 262 |
+
code=RetCode.AUTHENTICATION_ERROR)
|
| 263 |
|
| 264 |
try:
|
| 265 |
e, doc = DocumentService.get_by_id(req["doc_id"])
|
| 266 |
if not e:
|
| 267 |
+
return get_data_error_result(message="Document not found!")
|
| 268 |
e, kb = KnowledgebaseService.get_by_id(doc.kb_id)
|
| 269 |
if not e:
|
| 270 |
return get_data_error_result(
|
| 271 |
+
message="Can't find this knowledgebase!")
|
| 272 |
|
| 273 |
if not DocumentService.update_by_id(
|
| 274 |
req["doc_id"], {"status": str(req["status"])}):
|
| 275 |
return get_data_error_result(
|
| 276 |
+
message="Database error (Document update)!")
|
| 277 |
|
| 278 |
if str(req["status"]) == "0":
|
| 279 |
ELASTICSEARCH.updateScriptByQuery(Q("term", doc_id=req["doc_id"]),
|
|
|
|
| 304 |
if not DocumentService.accessible4deletion(doc_id, current_user.id):
|
| 305 |
return get_json_result(
|
| 306 |
data=False,
|
| 307 |
+
message='No authorization.',
|
| 308 |
+
code=RetCode.AUTHENTICATION_ERROR
|
| 309 |
)
|
| 310 |
|
| 311 |
root_folder = FileService.get_root_folder(current_user.id)
|
|
|
|
| 316 |
try:
|
| 317 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 318 |
if not e:
|
| 319 |
+
return get_data_error_result(message="Document not found!")
|
| 320 |
tenant_id = DocumentService.get_tenant_id(doc_id)
|
| 321 |
if not tenant_id:
|
| 322 |
+
return get_data_error_result(message="Tenant not found!")
|
| 323 |
|
| 324 |
b, n = File2DocumentService.get_storage_address(doc_id=doc_id)
|
| 325 |
|
| 326 |
if not DocumentService.remove_document(doc, tenant_id):
|
| 327 |
return get_data_error_result(
|
| 328 |
+
message="Database error (Document removal)!")
|
| 329 |
|
| 330 |
f2d = File2DocumentService.get_by_document_id(doc_id)
|
| 331 |
FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.id == f2d[0].file_id])
|
|
|
|
| 336 |
errors += str(e)
|
| 337 |
|
| 338 |
if errors:
|
| 339 |
+
return get_json_result(data=False, message=errors, code=RetCode.SERVER_ERROR)
|
| 340 |
|
| 341 |
return get_json_result(data=True)
|
| 342 |
|
|
|
|
| 350 |
if not DocumentService.accessible(doc_id, current_user.id):
|
| 351 |
return get_json_result(
|
| 352 |
data=False,
|
| 353 |
+
message='No authorization.',
|
| 354 |
+
code=RetCode.AUTHENTICATION_ERROR
|
| 355 |
)
|
| 356 |
try:
|
| 357 |
for id in req["doc_ids"]:
|
|
|
|
| 364 |
# if str(req["run"]) == TaskStatus.CANCEL.value:
|
| 365 |
tenant_id = DocumentService.get_tenant_id(id)
|
| 366 |
if not tenant_id:
|
| 367 |
+
return get_data_error_result(message="Tenant not found!")
|
| 368 |
ELASTICSEARCH.deleteByQuery(
|
| 369 |
Q("match", doc_id=id), idxnm=search.index_name(tenant_id))
|
| 370 |
|
|
|
|
| 389 |
if not DocumentService.accessible(req["doc_id"], current_user.id):
|
| 390 |
return get_json_result(
|
| 391 |
data=False,
|
| 392 |
+
message='No authorization.',
|
| 393 |
+
code=RetCode.AUTHENTICATION_ERROR
|
| 394 |
)
|
| 395 |
try:
|
| 396 |
e, doc = DocumentService.get_by_id(req["doc_id"])
|
| 397 |
if not e:
|
| 398 |
+
return get_data_error_result(message="Document not found!")
|
| 399 |
if pathlib.Path(req["name"].lower()).suffix != pathlib.Path(
|
| 400 |
doc.name.lower()).suffix:
|
| 401 |
return get_json_result(
|
| 402 |
data=False,
|
| 403 |
+
message="The extension of file can't be changed",
|
| 404 |
+
code=RetCode.ARGUMENT_ERROR)
|
| 405 |
for d in DocumentService.query(name=req["name"], kb_id=doc.kb_id):
|
| 406 |
if d.name == req["name"]:
|
| 407 |
return get_data_error_result(
|
| 408 |
+
message="Duplicated document name in the same knowledgebase.")
|
| 409 |
|
| 410 |
if not DocumentService.update_by_id(
|
| 411 |
req["doc_id"], {"name": req["name"]}):
|
| 412 |
return get_data_error_result(
|
| 413 |
+
message="Database error (Document rename)!")
|
| 414 |
|
| 415 |
informs = File2DocumentService.get_by_document_id(req["doc_id"])
|
| 416 |
if informs:
|
|
|
|
| 428 |
try:
|
| 429 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 430 |
if not e:
|
| 431 |
+
return get_data_error_result(message="Document not found!")
|
| 432 |
|
| 433 |
b, n = File2DocumentService.get_storage_address(doc_id=doc_id)
|
| 434 |
response = flask.make_response(STORAGE_IMPL.get(b, n))
|
|
|
|
| 456 |
if not DocumentService.accessible(req["doc_id"], current_user.id):
|
| 457 |
return get_json_result(
|
| 458 |
data=False,
|
| 459 |
+
message='No authorization.',
|
| 460 |
+
code=RetCode.AUTHENTICATION_ERROR
|
| 461 |
)
|
| 462 |
try:
|
| 463 |
e, doc = DocumentService.get_by_id(req["doc_id"])
|
| 464 |
if not e:
|
| 465 |
+
return get_data_error_result(message="Document not found!")
|
| 466 |
if doc.parser_id.lower() == req["parser_id"].lower():
|
| 467 |
if "parser_config" in req:
|
| 468 |
if req["parser_config"] == doc.parser_config:
|
|
|
|
| 473 |
if ((doc.type == FileType.VISUAL and req["parser_id"] != "picture")
|
| 474 |
or (re.search(
|
| 475 |
r"\.(ppt|pptx|pages)$", doc.name) and req["parser_id"] != "presentation")):
|
| 476 |
+
return get_data_error_result(message="Not supported yet!")
|
| 477 |
|
| 478 |
e = DocumentService.update_by_id(doc.id,
|
| 479 |
{"parser_id": req["parser_id"], "progress": 0, "progress_msg": "",
|
| 480 |
"run": TaskStatus.UNSTART.value})
|
| 481 |
if not e:
|
| 482 |
+
return get_data_error_result(message="Document not found!")
|
| 483 |
if "parser_config" in req:
|
| 484 |
DocumentService.update_parser_config(doc.id, req["parser_config"])
|
| 485 |
if doc.token_num > 0:
|
| 486 |
e = DocumentService.increment_chunk_num(doc.id, doc.kb_id, doc.token_num * -1, doc.chunk_num * -1,
|
| 487 |
doc.process_duation * -1)
|
| 488 |
if not e:
|
| 489 |
+
return get_data_error_result(message="Document not found!")
|
| 490 |
tenant_id = DocumentService.get_tenant_id(req["doc_id"])
|
| 491 |
if not tenant_id:
|
| 492 |
+
return get_data_error_result(message="Tenant not found!")
|
| 493 |
ELASTICSEARCH.deleteByQuery(
|
| 494 |
Q("match", doc_id=doc.id), idxnm=search.index_name(tenant_id))
|
| 495 |
|
|
|
|
| 516 |
def upload_and_parse():
|
| 517 |
if 'file' not in request.files:
|
| 518 |
return get_json_result(
|
| 519 |
+
data=False, message='No file part!', code=RetCode.ARGUMENT_ERROR)
|
| 520 |
|
| 521 |
file_objs = request.files.getlist('file')
|
| 522 |
for file_obj in file_objs:
|
| 523 |
if file_obj.filename == '':
|
| 524 |
return get_json_result(
|
| 525 |
+
data=False, message='No file selected!', code=RetCode.ARGUMENT_ERROR)
|
| 526 |
|
| 527 |
doc_ids = doc_upload_and_parse(request.form.get("conversation_id"), file_objs, current_user.id)
|
| 528 |
|
api/apps/file2document_app.py
CHANGED
|
@@ -13,9 +13,7 @@
|
|
| 13 |
# See the License for the specific language governing permissions and
|
| 14 |
# limitations under the License
|
| 15 |
#
|
| 16 |
-
from elasticsearch_dsl import Q
|
| 17 |
|
| 18 |
-
from api.db.db_models import File2Document
|
| 19 |
from api.db.services.file2document_service import File2DocumentService
|
| 20 |
from api.db.services.file_service import FileService
|
| 21 |
|
|
@@ -28,8 +26,6 @@ from api.db import FileType
|
|
| 28 |
from api.db.services.document_service import DocumentService
|
| 29 |
from api.settings import RetCode
|
| 30 |
from api.utils.api_utils import get_json_result
|
| 31 |
-
from rag.nlp import search
|
| 32 |
-
from rag.utils.es_conn import ELASTICSEARCH
|
| 33 |
|
| 34 |
|
| 35 |
@manager.route('/convert', methods=['POST'])
|
|
@@ -54,13 +50,13 @@ def convert():
|
|
| 54 |
doc_id = inform.document_id
|
| 55 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 56 |
if not e:
|
| 57 |
-
return get_data_error_result(
|
| 58 |
tenant_id = DocumentService.get_tenant_id(doc_id)
|
| 59 |
if not tenant_id:
|
| 60 |
-
return get_data_error_result(
|
| 61 |
if not DocumentService.remove_document(doc, tenant_id):
|
| 62 |
return get_data_error_result(
|
| 63 |
-
|
| 64 |
File2DocumentService.delete_by_file_id(id)
|
| 65 |
|
| 66 |
# insert
|
|
@@ -68,11 +64,11 @@ def convert():
|
|
| 68 |
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
| 69 |
if not e:
|
| 70 |
return get_data_error_result(
|
| 71 |
-
|
| 72 |
e, file = FileService.get_by_id(id)
|
| 73 |
if not e:
|
| 74 |
return get_data_error_result(
|
| 75 |
-
|
| 76 |
|
| 77 |
doc = DocumentService.insert({
|
| 78 |
"id": get_uuid(),
|
|
@@ -104,26 +100,26 @@ def rm():
|
|
| 104 |
file_ids = req["file_ids"]
|
| 105 |
if not file_ids:
|
| 106 |
return get_json_result(
|
| 107 |
-
data=False,
|
| 108 |
try:
|
| 109 |
for file_id in file_ids:
|
| 110 |
informs = File2DocumentService.get_by_file_id(file_id)
|
| 111 |
if not informs:
|
| 112 |
-
return get_data_error_result(
|
| 113 |
for inform in informs:
|
| 114 |
if not inform:
|
| 115 |
-
return get_data_error_result(
|
| 116 |
File2DocumentService.delete_by_file_id(file_id)
|
| 117 |
doc_id = inform.document_id
|
| 118 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 119 |
if not e:
|
| 120 |
-
return get_data_error_result(
|
| 121 |
tenant_id = DocumentService.get_tenant_id(doc_id)
|
| 122 |
if not tenant_id:
|
| 123 |
-
return get_data_error_result(
|
| 124 |
if not DocumentService.remove_document(doc, tenant_id):
|
| 125 |
return get_data_error_result(
|
| 126 |
-
|
| 127 |
return get_json_result(data=True)
|
| 128 |
except Exception as e:
|
| 129 |
return server_error_response(e)
|
|
|
|
| 13 |
# See the License for the specific language governing permissions and
|
| 14 |
# limitations under the License
|
| 15 |
#
|
|
|
|
| 16 |
|
|
|
|
| 17 |
from api.db.services.file2document_service import File2DocumentService
|
| 18 |
from api.db.services.file_service import FileService
|
| 19 |
|
|
|
|
| 26 |
from api.db.services.document_service import DocumentService
|
| 27 |
from api.settings import RetCode
|
| 28 |
from api.utils.api_utils import get_json_result
|
|
|
|
|
|
|
| 29 |
|
| 30 |
|
| 31 |
@manager.route('/convert', methods=['POST'])
|
|
|
|
| 50 |
doc_id = inform.document_id
|
| 51 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 52 |
if not e:
|
| 53 |
+
return get_data_error_result(message="Document not found!")
|
| 54 |
tenant_id = DocumentService.get_tenant_id(doc_id)
|
| 55 |
if not tenant_id:
|
| 56 |
+
return get_data_error_result(message="Tenant not found!")
|
| 57 |
if not DocumentService.remove_document(doc, tenant_id):
|
| 58 |
return get_data_error_result(
|
| 59 |
+
message="Database error (Document removal)!")
|
| 60 |
File2DocumentService.delete_by_file_id(id)
|
| 61 |
|
| 62 |
# insert
|
|
|
|
| 64 |
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
| 65 |
if not e:
|
| 66 |
return get_data_error_result(
|
| 67 |
+
message="Can't find this knowledgebase!")
|
| 68 |
e, file = FileService.get_by_id(id)
|
| 69 |
if not e:
|
| 70 |
return get_data_error_result(
|
| 71 |
+
message="Can't find this file!")
|
| 72 |
|
| 73 |
doc = DocumentService.insert({
|
| 74 |
"id": get_uuid(),
|
|
|
|
| 100 |
file_ids = req["file_ids"]
|
| 101 |
if not file_ids:
|
| 102 |
return get_json_result(
|
| 103 |
+
data=False, message='Lack of "Files ID"', code=RetCode.ARGUMENT_ERROR)
|
| 104 |
try:
|
| 105 |
for file_id in file_ids:
|
| 106 |
informs = File2DocumentService.get_by_file_id(file_id)
|
| 107 |
if not informs:
|
| 108 |
+
return get_data_error_result(message="Inform not found!")
|
| 109 |
for inform in informs:
|
| 110 |
if not inform:
|
| 111 |
+
return get_data_error_result(message="Inform not found!")
|
| 112 |
File2DocumentService.delete_by_file_id(file_id)
|
| 113 |
doc_id = inform.document_id
|
| 114 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 115 |
if not e:
|
| 116 |
+
return get_data_error_result(message="Document not found!")
|
| 117 |
tenant_id = DocumentService.get_tenant_id(doc_id)
|
| 118 |
if not tenant_id:
|
| 119 |
+
return get_data_error_result(message="Tenant not found!")
|
| 120 |
if not DocumentService.remove_document(doc, tenant_id):
|
| 121 |
return get_data_error_result(
|
| 122 |
+
message="Database error (Document removal)!")
|
| 123 |
return get_json_result(data=True)
|
| 124 |
except Exception as e:
|
| 125 |
return server_error_response(e)
|
api/apps/file_app.py
CHANGED
|
@@ -18,7 +18,6 @@ import pathlib
|
|
| 18 |
import re
|
| 19 |
|
| 20 |
import flask
|
| 21 |
-
from elasticsearch_dsl import Q
|
| 22 |
from flask import request
|
| 23 |
from flask_login import login_required, current_user
|
| 24 |
|
|
@@ -32,8 +31,6 @@ from api.db.services.file_service import FileService
|
|
| 32 |
from api.settings import RetCode
|
| 33 |
from api.utils.api_utils import get_json_result
|
| 34 |
from api.utils.file_utils import filename_type
|
| 35 |
-
from rag.nlp import search
|
| 36 |
-
from rag.utils.es_conn import ELASTICSEARCH
|
| 37 |
from rag.utils.storage_factory import STORAGE_IMPL
|
| 38 |
|
| 39 |
|
|
@@ -49,24 +46,24 @@ def upload():
|
|
| 49 |
|
| 50 |
if 'file' not in request.files:
|
| 51 |
return get_json_result(
|
| 52 |
-
data=False,
|
| 53 |
file_objs = request.files.getlist('file')
|
| 54 |
|
| 55 |
for file_obj in file_objs:
|
| 56 |
if file_obj.filename == '':
|
| 57 |
return get_json_result(
|
| 58 |
-
data=False,
|
| 59 |
file_res = []
|
| 60 |
try:
|
| 61 |
for file_obj in file_objs:
|
| 62 |
e, file = FileService.get_by_id(pf_id)
|
| 63 |
if not e:
|
| 64 |
return get_data_error_result(
|
| 65 |
-
|
| 66 |
MAX_FILE_NUM_PER_USER = int(os.environ.get('MAX_FILE_NUM_PER_USER', 0))
|
| 67 |
if MAX_FILE_NUM_PER_USER > 0 and DocumentService.get_doc_count(current_user.id) >= MAX_FILE_NUM_PER_USER:
|
| 68 |
return get_data_error_result(
|
| 69 |
-
|
| 70 |
|
| 71 |
# split file name path
|
| 72 |
if not file_obj.filename:
|
|
@@ -85,13 +82,13 @@ def upload():
|
|
| 85 |
if file_len != len_id_list:
|
| 86 |
e, file = FileService.get_by_id(file_id_list[len_id_list - 1])
|
| 87 |
if not e:
|
| 88 |
-
return get_data_error_result(
|
| 89 |
last_folder = FileService.create_folder(file, file_id_list[len_id_list - 1], file_obj_names,
|
| 90 |
len_id_list)
|
| 91 |
else:
|
| 92 |
e, file = FileService.get_by_id(file_id_list[len_id_list - 2])
|
| 93 |
if not e:
|
| 94 |
-
return get_data_error_result(
|
| 95 |
last_folder = FileService.create_folder(file, file_id_list[len_id_list - 2], file_obj_names,
|
| 96 |
len_id_list)
|
| 97 |
|
|
@@ -137,10 +134,10 @@ def create():
|
|
| 137 |
try:
|
| 138 |
if not FileService.is_parent_folder_exist(pf_id):
|
| 139 |
return get_json_result(
|
| 140 |
-
data=False,
|
| 141 |
if FileService.query(name=req["name"], parent_id=pf_id):
|
| 142 |
return get_data_error_result(
|
| 143 |
-
|
| 144 |
|
| 145 |
if input_file_type == FileType.FOLDER.value:
|
| 146 |
file_type = FileType.FOLDER.value
|
|
@@ -181,14 +178,14 @@ def list_files():
|
|
| 181 |
try:
|
| 182 |
e, file = FileService.get_by_id(pf_id)
|
| 183 |
if not e:
|
| 184 |
-
return get_data_error_result(
|
| 185 |
|
| 186 |
files, total = FileService.get_by_pf_id(
|
| 187 |
current_user.id, pf_id, page_number, items_per_page, orderby, desc, keywords)
|
| 188 |
|
| 189 |
parent_folder = FileService.get_parent_folder(pf_id)
|
| 190 |
if not FileService.get_parent_folder(pf_id):
|
| 191 |
-
return get_json_result(
|
| 192 |
|
| 193 |
return get_json_result(data={"total": total, "files": files, "parent_folder": parent_folder.to_json()})
|
| 194 |
except Exception as e:
|
|
@@ -212,7 +209,7 @@ def get_parent_folder():
|
|
| 212 |
try:
|
| 213 |
e, file = FileService.get_by_id(file_id)
|
| 214 |
if not e:
|
| 215 |
-
return get_data_error_result(
|
| 216 |
|
| 217 |
parent_folder = FileService.get_parent_folder(file_id)
|
| 218 |
return get_json_result(data={"parent_folder": parent_folder.to_json()})
|
|
@@ -227,7 +224,7 @@ def get_all_parent_folders():
|
|
| 227 |
try:
|
| 228 |
e, file = FileService.get_by_id(file_id)
|
| 229 |
if not e:
|
| 230 |
-
return get_data_error_result(
|
| 231 |
|
| 232 |
parent_folders = FileService.get_all_parent_folders(file_id)
|
| 233 |
parent_folders_res = []
|
|
@@ -248,9 +245,9 @@ def rm():
|
|
| 248 |
for file_id in file_ids:
|
| 249 |
e, file = FileService.get_by_id(file_id)
|
| 250 |
if not e:
|
| 251 |
-
return get_data_error_result(
|
| 252 |
if not file.tenant_id:
|
| 253 |
-
return get_data_error_result(
|
| 254 |
if file.source_type == FileSource.KNOWLEDGEBASE:
|
| 255 |
continue
|
| 256 |
|
|
@@ -259,13 +256,13 @@ def rm():
|
|
| 259 |
for inner_file_id in file_id_list:
|
| 260 |
e, file = FileService.get_by_id(inner_file_id)
|
| 261 |
if not e:
|
| 262 |
-
return get_data_error_result(
|
| 263 |
STORAGE_IMPL.rm(file.parent_id, file.location)
|
| 264 |
FileService.delete_folder_by_pf_id(current_user.id, file_id)
|
| 265 |
else:
|
| 266 |
if not FileService.delete(file):
|
| 267 |
return get_data_error_result(
|
| 268 |
-
|
| 269 |
|
| 270 |
# delete file2document
|
| 271 |
informs = File2DocumentService.get_by_file_id(file_id)
|
|
@@ -273,13 +270,13 @@ def rm():
|
|
| 273 |
doc_id = inform.document_id
|
| 274 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 275 |
if not e:
|
| 276 |
-
return get_data_error_result(
|
| 277 |
tenant_id = DocumentService.get_tenant_id(doc_id)
|
| 278 |
if not tenant_id:
|
| 279 |
-
return get_data_error_result(
|
| 280 |
if not DocumentService.remove_document(doc, tenant_id):
|
| 281 |
return get_data_error_result(
|
| 282 |
-
|
| 283 |
File2DocumentService.delete_by_file_id(file_id)
|
| 284 |
|
| 285 |
return get_json_result(data=True)
|
|
@@ -295,30 +292,30 @@ def rename():
|
|
| 295 |
try:
|
| 296 |
e, file = FileService.get_by_id(req["file_id"])
|
| 297 |
if not e:
|
| 298 |
-
return get_data_error_result(
|
| 299 |
if file.type != FileType.FOLDER.value \
|
| 300 |
and pathlib.Path(req["name"].lower()).suffix != pathlib.Path(
|
| 301 |
file.name.lower()).suffix:
|
| 302 |
return get_json_result(
|
| 303 |
data=False,
|
| 304 |
-
|
| 305 |
-
|
| 306 |
for file in FileService.query(name=req["name"], pf_id=file.parent_id):
|
| 307 |
if file.name == req["name"]:
|
| 308 |
return get_data_error_result(
|
| 309 |
-
|
| 310 |
|
| 311 |
if not FileService.update_by_id(
|
| 312 |
req["file_id"], {"name": req["name"]}):
|
| 313 |
return get_data_error_result(
|
| 314 |
-
|
| 315 |
|
| 316 |
informs = File2DocumentService.get_by_file_id(req["file_id"])
|
| 317 |
if informs:
|
| 318 |
if not DocumentService.update_by_id(
|
| 319 |
informs[0].document_id, {"name": req["name"]}):
|
| 320 |
return get_data_error_result(
|
| 321 |
-
|
| 322 |
|
| 323 |
return get_json_result(data=True)
|
| 324 |
except Exception as e:
|
|
@@ -331,7 +328,7 @@ def get(file_id):
|
|
| 331 |
try:
|
| 332 |
e, file = FileService.get_by_id(file_id)
|
| 333 |
if not e:
|
| 334 |
-
return get_data_error_result(
|
| 335 |
b, n = File2DocumentService.get_storage_address(file_id=file_id)
|
| 336 |
response = flask.make_response(STORAGE_IMPL.get(b, n))
|
| 337 |
ext = re.search(r"\.([^.]+)$", file.name)
|
|
@@ -359,12 +356,12 @@ def move():
|
|
| 359 |
for file_id in file_ids:
|
| 360 |
e, file = FileService.get_by_id(file_id)
|
| 361 |
if not e:
|
| 362 |
-
return get_data_error_result(
|
| 363 |
if not file.tenant_id:
|
| 364 |
-
return get_data_error_result(
|
| 365 |
fe, _ = FileService.get_by_id(parent_id)
|
| 366 |
if not fe:
|
| 367 |
-
return get_data_error_result(
|
| 368 |
FileService.move_file(file_ids, parent_id)
|
| 369 |
return get_json_result(data=True)
|
| 370 |
except Exception as e:
|
|
|
|
| 18 |
import re
|
| 19 |
|
| 20 |
import flask
|
|
|
|
| 21 |
from flask import request
|
| 22 |
from flask_login import login_required, current_user
|
| 23 |
|
|
|
|
| 31 |
from api.settings import RetCode
|
| 32 |
from api.utils.api_utils import get_json_result
|
| 33 |
from api.utils.file_utils import filename_type
|
|
|
|
|
|
|
| 34 |
from rag.utils.storage_factory import STORAGE_IMPL
|
| 35 |
|
| 36 |
|
|
|
|
| 46 |
|
| 47 |
if 'file' not in request.files:
|
| 48 |
return get_json_result(
|
| 49 |
+
data=False, message='No file part!', code=RetCode.ARGUMENT_ERROR)
|
| 50 |
file_objs = request.files.getlist('file')
|
| 51 |
|
| 52 |
for file_obj in file_objs:
|
| 53 |
if file_obj.filename == '':
|
| 54 |
return get_json_result(
|
| 55 |
+
data=False, message='No file selected!', code=RetCode.ARGUMENT_ERROR)
|
| 56 |
file_res = []
|
| 57 |
try:
|
| 58 |
for file_obj in file_objs:
|
| 59 |
e, file = FileService.get_by_id(pf_id)
|
| 60 |
if not e:
|
| 61 |
return get_data_error_result(
|
| 62 |
+
message="Can't find this folder!")
|
| 63 |
MAX_FILE_NUM_PER_USER = int(os.environ.get('MAX_FILE_NUM_PER_USER', 0))
|
| 64 |
if MAX_FILE_NUM_PER_USER > 0 and DocumentService.get_doc_count(current_user.id) >= MAX_FILE_NUM_PER_USER:
|
| 65 |
return get_data_error_result(
|
| 66 |
+
message="Exceed the maximum file number of a free user!")
|
| 67 |
|
| 68 |
# split file name path
|
| 69 |
if not file_obj.filename:
|
|
|
|
| 82 |
if file_len != len_id_list:
|
| 83 |
e, file = FileService.get_by_id(file_id_list[len_id_list - 1])
|
| 84 |
if not e:
|
| 85 |
+
return get_data_error_result(message="Folder not found!")
|
| 86 |
last_folder = FileService.create_folder(file, file_id_list[len_id_list - 1], file_obj_names,
|
| 87 |
len_id_list)
|
| 88 |
else:
|
| 89 |
e, file = FileService.get_by_id(file_id_list[len_id_list - 2])
|
| 90 |
if not e:
|
| 91 |
+
return get_data_error_result(message="Folder not found!")
|
| 92 |
last_folder = FileService.create_folder(file, file_id_list[len_id_list - 2], file_obj_names,
|
| 93 |
len_id_list)
|
| 94 |
|
|
|
|
| 134 |
try:
|
| 135 |
if not FileService.is_parent_folder_exist(pf_id):
|
| 136 |
return get_json_result(
|
| 137 |
+
data=False, message="Parent Folder Doesn't Exist!", code=RetCode.OPERATING_ERROR)
|
| 138 |
if FileService.query(name=req["name"], parent_id=pf_id):
|
| 139 |
return get_data_error_result(
|
| 140 |
+
message="Duplicated folder name in the same folder.")
|
| 141 |
|
| 142 |
if input_file_type == FileType.FOLDER.value:
|
| 143 |
file_type = FileType.FOLDER.value
|
|
|
|
| 178 |
try:
|
| 179 |
e, file = FileService.get_by_id(pf_id)
|
| 180 |
if not e:
|
| 181 |
+
return get_data_error_result(message="Folder not found!")
|
| 182 |
|
| 183 |
files, total = FileService.get_by_pf_id(
|
| 184 |
current_user.id, pf_id, page_number, items_per_page, orderby, desc, keywords)
|
| 185 |
|
| 186 |
parent_folder = FileService.get_parent_folder(pf_id)
|
| 187 |
if not FileService.get_parent_folder(pf_id):
|
| 188 |
+
return get_json_result(message="File not found!")
|
| 189 |
|
| 190 |
return get_json_result(data={"total": total, "files": files, "parent_folder": parent_folder.to_json()})
|
| 191 |
except Exception as e:
|
|
|
|
| 209 |
try:
|
| 210 |
e, file = FileService.get_by_id(file_id)
|
| 211 |
if not e:
|
| 212 |
+
return get_data_error_result(message="Folder not found!")
|
| 213 |
|
| 214 |
parent_folder = FileService.get_parent_folder(file_id)
|
| 215 |
return get_json_result(data={"parent_folder": parent_folder.to_json()})
|
|
|
|
| 224 |
try:
|
| 225 |
e, file = FileService.get_by_id(file_id)
|
| 226 |
if not e:
|
| 227 |
+
return get_data_error_result(message="Folder not found!")
|
| 228 |
|
| 229 |
parent_folders = FileService.get_all_parent_folders(file_id)
|
| 230 |
parent_folders_res = []
|
|
|
|
| 245 |
for file_id in file_ids:
|
| 246 |
e, file = FileService.get_by_id(file_id)
|
| 247 |
if not e:
|
| 248 |
+
return get_data_error_result(message="File or Folder not found!")
|
| 249 |
if not file.tenant_id:
|
| 250 |
+
return get_data_error_result(message="Tenant not found!")
|
| 251 |
if file.source_type == FileSource.KNOWLEDGEBASE:
|
| 252 |
continue
|
| 253 |
|
|
|
|
| 256 |
for inner_file_id in file_id_list:
|
| 257 |
e, file = FileService.get_by_id(inner_file_id)
|
| 258 |
if not e:
|
| 259 |
+
return get_data_error_result(message="File not found!")
|
| 260 |
STORAGE_IMPL.rm(file.parent_id, file.location)
|
| 261 |
FileService.delete_folder_by_pf_id(current_user.id, file_id)
|
| 262 |
else:
|
| 263 |
if not FileService.delete(file):
|
| 264 |
return get_data_error_result(
|
| 265 |
+
message="Database error (File removal)!")
|
| 266 |
|
| 267 |
# delete file2document
|
| 268 |
informs = File2DocumentService.get_by_file_id(file_id)
|
|
|
|
| 270 |
doc_id = inform.document_id
|
| 271 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 272 |
if not e:
|
| 273 |
+
return get_data_error_result(message="Document not found!")
|
| 274 |
tenant_id = DocumentService.get_tenant_id(doc_id)
|
| 275 |
if not tenant_id:
|
| 276 |
+
return get_data_error_result(message="Tenant not found!")
|
| 277 |
if not DocumentService.remove_document(doc, tenant_id):
|
| 278 |
return get_data_error_result(
|
| 279 |
+
message="Database error (Document removal)!")
|
| 280 |
File2DocumentService.delete_by_file_id(file_id)
|
| 281 |
|
| 282 |
return get_json_result(data=True)
|
|
|
|
| 292 |
try:
|
| 293 |
e, file = FileService.get_by_id(req["file_id"])
|
| 294 |
if not e:
|
| 295 |
+
return get_data_error_result(message="File not found!")
|
| 296 |
if file.type != FileType.FOLDER.value \
|
| 297 |
and pathlib.Path(req["name"].lower()).suffix != pathlib.Path(
|
| 298 |
file.name.lower()).suffix:
|
| 299 |
return get_json_result(
|
| 300 |
data=False,
|
| 301 |
+
message="The extension of file can't be changed",
|
| 302 |
+
code=RetCode.ARGUMENT_ERROR)
|
| 303 |
for file in FileService.query(name=req["name"], pf_id=file.parent_id):
|
| 304 |
if file.name == req["name"]:
|
| 305 |
return get_data_error_result(
|
| 306 |
+
message="Duplicated file name in the same folder.")
|
| 307 |
|
| 308 |
if not FileService.update_by_id(
|
| 309 |
req["file_id"], {"name": req["name"]}):
|
| 310 |
return get_data_error_result(
|
| 311 |
+
message="Database error (File rename)!")
|
| 312 |
|
| 313 |
informs = File2DocumentService.get_by_file_id(req["file_id"])
|
| 314 |
if informs:
|
| 315 |
if not DocumentService.update_by_id(
|
| 316 |
informs[0].document_id, {"name": req["name"]}):
|
| 317 |
return get_data_error_result(
|
| 318 |
+
message="Database error (Document rename)!")
|
| 319 |
|
| 320 |
return get_json_result(data=True)
|
| 321 |
except Exception as e:
|
|
|
|
| 328 |
try:
|
| 329 |
e, file = FileService.get_by_id(file_id)
|
| 330 |
if not e:
|
| 331 |
+
return get_data_error_result(message="Document not found!")
|
| 332 |
b, n = File2DocumentService.get_storage_address(file_id=file_id)
|
| 333 |
response = flask.make_response(STORAGE_IMPL.get(b, n))
|
| 334 |
ext = re.search(r"\.([^.]+)$", file.name)
|
|
|
|
| 356 |
for file_id in file_ids:
|
| 357 |
e, file = FileService.get_by_id(file_id)
|
| 358 |
if not e:
|
| 359 |
+
return get_data_error_result(message="File or Folder not found!")
|
| 360 |
if not file.tenant_id:
|
| 361 |
+
return get_data_error_result(message="Tenant not found!")
|
| 362 |
fe, _ = FileService.get_by_id(parent_id)
|
| 363 |
if not fe:
|
| 364 |
+
return get_data_error_result(message="Parent Folder not found!")
|
| 365 |
FileService.move_file(file_ids, parent_id)
|
| 366 |
return get_json_result(data=True)
|
| 367 |
except Exception as e:
|
api/apps/kb_app.py
CHANGED
|
@@ -47,7 +47,7 @@ def create():
|
|
| 47 |
req["created_by"] = current_user.id
|
| 48 |
e, t = TenantService.get_by_id(current_user.id)
|
| 49 |
if not e:
|
| 50 |
-
return get_data_error_result(
|
| 51 |
req["embd_id"] = t.embd_id
|
| 52 |
if not KnowledgebaseService.save(**req):
|
| 53 |
return get_data_error_result()
|
|
@@ -65,24 +65,24 @@ def update():
|
|
| 65 |
if not KnowledgebaseService.accessible4deletion(req["kb_id"], current_user.id):
|
| 66 |
return get_json_result(
|
| 67 |
data=False,
|
| 68 |
-
|
| 69 |
-
|
| 70 |
)
|
| 71 |
try:
|
| 72 |
if not KnowledgebaseService.query(
|
| 73 |
created_by=current_user.id, id=req["kb_id"]):
|
| 74 |
return get_json_result(
|
| 75 |
-
data=False,
|
| 76 |
|
| 77 |
e, kb = KnowledgebaseService.get_by_id(req["kb_id"])
|
| 78 |
if not e:
|
| 79 |
return get_data_error_result(
|
| 80 |
-
|
| 81 |
|
| 82 |
if req["name"].lower() != kb.name.lower() \
|
| 83 |
and len(KnowledgebaseService.query(name=req["name"], tenant_id=current_user.id, status=StatusEnum.VALID.value)) > 1:
|
| 84 |
return get_data_error_result(
|
| 85 |
-
|
| 86 |
|
| 87 |
del req["kb_id"]
|
| 88 |
if not KnowledgebaseService.update_by_id(kb.id, req):
|
|
@@ -91,7 +91,7 @@ def update():
|
|
| 91 |
e, kb = KnowledgebaseService.get_by_id(kb.id)
|
| 92 |
if not e:
|
| 93 |
return get_data_error_result(
|
| 94 |
-
|
| 95 |
|
| 96 |
return get_json_result(data=kb.to_json())
|
| 97 |
except Exception as e:
|
|
@@ -110,12 +110,12 @@ def detail():
|
|
| 110 |
break
|
| 111 |
else:
|
| 112 |
return get_json_result(
|
| 113 |
-
data=False,
|
| 114 |
-
|
| 115 |
kb = KnowledgebaseService.get_detail(kb_id)
|
| 116 |
if not kb:
|
| 117 |
return get_data_error_result(
|
| 118 |
-
|
| 119 |
return get_json_result(data=kb)
|
| 120 |
except Exception as e:
|
| 121 |
return server_error_response(e)
|
|
@@ -145,27 +145,27 @@ def rm():
|
|
| 145 |
if not KnowledgebaseService.accessible4deletion(req["kb_id"], current_user.id):
|
| 146 |
return get_json_result(
|
| 147 |
data=False,
|
| 148 |
-
|
| 149 |
-
|
| 150 |
)
|
| 151 |
try:
|
| 152 |
kbs = KnowledgebaseService.query(
|
| 153 |
created_by=current_user.id, id=req["kb_id"])
|
| 154 |
if not kbs:
|
| 155 |
return get_json_result(
|
| 156 |
-
data=False,
|
| 157 |
|
| 158 |
for doc in DocumentService.query(kb_id=req["kb_id"]):
|
| 159 |
if not DocumentService.remove_document(doc, kbs[0].tenant_id):
|
| 160 |
return get_data_error_result(
|
| 161 |
-
|
| 162 |
f2d = File2DocumentService.get_by_document_id(doc.id)
|
| 163 |
FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.id == f2d[0].file_id])
|
| 164 |
File2DocumentService.delete_by_document_id(doc.id)
|
| 165 |
|
| 166 |
if not KnowledgebaseService.delete_by_id(req["kb_id"]):
|
| 167 |
return get_data_error_result(
|
| 168 |
-
|
| 169 |
return get_json_result(data=True)
|
| 170 |
except Exception as e:
|
| 171 |
return server_error_response(e)
|
|
|
|
| 47 |
req["created_by"] = current_user.id
|
| 48 |
e, t = TenantService.get_by_id(current_user.id)
|
| 49 |
if not e:
|
| 50 |
+
return get_data_error_result(message="Tenant not found.")
|
| 51 |
req["embd_id"] = t.embd_id
|
| 52 |
if not KnowledgebaseService.save(**req):
|
| 53 |
return get_data_error_result()
|
|
|
|
| 65 |
if not KnowledgebaseService.accessible4deletion(req["kb_id"], current_user.id):
|
| 66 |
return get_json_result(
|
| 67 |
data=False,
|
| 68 |
+
message='No authorization.',
|
| 69 |
+
code=RetCode.AUTHENTICATION_ERROR
|
| 70 |
)
|
| 71 |
try:
|
| 72 |
if not KnowledgebaseService.query(
|
| 73 |
created_by=current_user.id, id=req["kb_id"]):
|
| 74 |
return get_json_result(
|
| 75 |
+
data=False, message='Only owner of knowledgebase authorized for this operation.', code=RetCode.OPERATING_ERROR)
|
| 76 |
|
| 77 |
e, kb = KnowledgebaseService.get_by_id(req["kb_id"])
|
| 78 |
if not e:
|
| 79 |
return get_data_error_result(
|
| 80 |
+
message="Can't find this knowledgebase!")
|
| 81 |
|
| 82 |
if req["name"].lower() != kb.name.lower() \
|
| 83 |
and len(KnowledgebaseService.query(name=req["name"], tenant_id=current_user.id, status=StatusEnum.VALID.value)) > 1:
|
| 84 |
return get_data_error_result(
|
| 85 |
+
message="Duplicated knowledgebase name.")
|
| 86 |
|
| 87 |
del req["kb_id"]
|
| 88 |
if not KnowledgebaseService.update_by_id(kb.id, req):
|
|
|
|
| 91 |
e, kb = KnowledgebaseService.get_by_id(kb.id)
|
| 92 |
if not e:
|
| 93 |
return get_data_error_result(
|
| 94 |
+
message="Database error (Knowledgebase rename)!")
|
| 95 |
|
| 96 |
return get_json_result(data=kb.to_json())
|
| 97 |
except Exception as e:
|
|
|
|
| 110 |
break
|
| 111 |
else:
|
| 112 |
return get_json_result(
|
| 113 |
+
data=False, message='Only owner of knowledgebase authorized for this operation.',
|
| 114 |
+
code=RetCode.OPERATING_ERROR)
|
| 115 |
kb = KnowledgebaseService.get_detail(kb_id)
|
| 116 |
if not kb:
|
| 117 |
return get_data_error_result(
|
| 118 |
+
message="Can't find this knowledgebase!")
|
| 119 |
return get_json_result(data=kb)
|
| 120 |
except Exception as e:
|
| 121 |
return server_error_response(e)
|
|
|
|
| 145 |
if not KnowledgebaseService.accessible4deletion(req["kb_id"], current_user.id):
|
| 146 |
return get_json_result(
|
| 147 |
data=False,
|
| 148 |
+
message='No authorization.',
|
| 149 |
+
code=RetCode.AUTHENTICATION_ERROR
|
| 150 |
)
|
| 151 |
try:
|
| 152 |
kbs = KnowledgebaseService.query(
|
| 153 |
created_by=current_user.id, id=req["kb_id"])
|
| 154 |
if not kbs:
|
| 155 |
return get_json_result(
|
| 156 |
+
data=False, message='Only owner of knowledgebase authorized for this operation.', code=RetCode.OPERATING_ERROR)
|
| 157 |
|
| 158 |
for doc in DocumentService.query(kb_id=req["kb_id"]):
|
| 159 |
if not DocumentService.remove_document(doc, kbs[0].tenant_id):
|
| 160 |
return get_data_error_result(
|
| 161 |
+
message="Database error (Document removal)!")
|
| 162 |
f2d = File2DocumentService.get_by_document_id(doc.id)
|
| 163 |
FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.id == f2d[0].file_id])
|
| 164 |
File2DocumentService.delete_by_document_id(doc.id)
|
| 165 |
|
| 166 |
if not KnowledgebaseService.delete_by_id(req["kb_id"]):
|
| 167 |
return get_data_error_result(
|
| 168 |
+
message="Database error (Knowledgebase removal)!")
|
| 169 |
return get_json_result(data=True)
|
| 170 |
except Exception as e:
|
| 171 |
return server_error_response(e)
|
api/apps/llm_app.py
CHANGED
|
@@ -98,7 +98,7 @@ def set_api_key():
|
|
| 98 |
break
|
| 99 |
|
| 100 |
if msg:
|
| 101 |
-
return get_data_error_result(
|
| 102 |
|
| 103 |
llm_config = {
|
| 104 |
"api_key": req["api_key"],
|
|
@@ -278,7 +278,7 @@ def add_llm():
|
|
| 278 |
pass
|
| 279 |
|
| 280 |
if msg:
|
| 281 |
-
return get_data_error_result(
|
| 282 |
|
| 283 |
if not TenantLLMService.filter_update(
|
| 284 |
[TenantLLM.tenant_id == current_user.id, TenantLLM.llm_factory == factory, TenantLLM.llm_name == llm["llm_name"]], llm):
|
|
|
|
| 98 |
break
|
| 99 |
|
| 100 |
if msg:
|
| 101 |
+
return get_data_error_result(message=msg)
|
| 102 |
|
| 103 |
llm_config = {
|
| 104 |
"api_key": req["api_key"],
|
|
|
|
| 278 |
pass
|
| 279 |
|
| 280 |
if msg:
|
| 281 |
+
return get_data_error_result(message=msg)
|
| 282 |
|
| 283 |
if not TenantLLMService.filter_update(
|
| 284 |
[TenantLLM.tenant_id == current_user.id, TenantLLM.llm_factory == factory, TenantLLM.llm_name == llm["llm_name"]], llm):
|
api/apps/sdk/chat.py
CHANGED
|
@@ -32,7 +32,7 @@ def create(tenant_id):
|
|
| 32 |
req=request.json
|
| 33 |
ids= req.get("dataset_ids")
|
| 34 |
if not ids:
|
| 35 |
-
return get_error_data_result(
|
| 36 |
for kb_id in ids:
|
| 37 |
kbs = KnowledgebaseService.query(id=kb_id,tenant_id=tenant_id)
|
| 38 |
if not kbs:
|
|
@@ -43,7 +43,7 @@ def create(tenant_id):
|
|
| 43 |
kbs = KnowledgebaseService.get_by_ids(ids)
|
| 44 |
embd_count = list(set([kb.embd_id for kb in kbs]))
|
| 45 |
if len(embd_count) != 1:
|
| 46 |
-
return get_result(
|
| 47 |
req["kb_ids"] = ids
|
| 48 |
# llm
|
| 49 |
llm = req.get("llm")
|
|
@@ -55,7 +55,7 @@ def create(tenant_id):
|
|
| 55 |
req["llm_setting"] = req.pop("llm")
|
| 56 |
e, tenant = TenantService.get_by_id(tenant_id)
|
| 57 |
if not e:
|
| 58 |
-
return get_error_data_result(
|
| 59 |
# prompt
|
| 60 |
prompt = req.get("prompt")
|
| 61 |
key_mapping = {"parameters": "variables",
|
|
@@ -86,12 +86,12 @@ def create(tenant_id):
|
|
| 86 |
if not req.get("llm_id"):
|
| 87 |
req["llm_id"] = tenant.llm_id
|
| 88 |
if not req.get("name"):
|
| 89 |
-
return get_error_data_result(
|
| 90 |
if DialogService.query(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value):
|
| 91 |
-
return get_error_data_result(
|
| 92 |
# tenant_id
|
| 93 |
if req.get("tenant_id"):
|
| 94 |
-
return get_error_data_result(
|
| 95 |
req["tenant_id"] = tenant_id
|
| 96 |
# prompt more parameter
|
| 97 |
default_prompt = {
|
|
@@ -117,14 +117,14 @@ def create(tenant_id):
|
|
| 117 |
continue
|
| 118 |
if req['prompt_config']["system"].find("{%s}" % p["key"]) < 0:
|
| 119 |
return get_error_data_result(
|
| 120 |
-
|
| 121 |
# save
|
| 122 |
if not DialogService.save(**req):
|
| 123 |
-
return get_error_data_result(
|
| 124 |
# response
|
| 125 |
e, res = DialogService.get_by_id(req["id"])
|
| 126 |
if not e:
|
| 127 |
-
return get_error_data_result(
|
| 128 |
res = res.to_json()
|
| 129 |
renamed_dict = {}
|
| 130 |
for key, value in res["prompt_config"].items():
|
|
@@ -150,7 +150,7 @@ def create(tenant_id):
|
|
| 150 |
@token_required
|
| 151 |
def update(tenant_id,chat_id):
|
| 152 |
if not DialogService.query(tenant_id=tenant_id, id=chat_id, status=StatusEnum.VALID.value):
|
| 153 |
-
return get_error_data_result(
|
| 154 |
req =request.json
|
| 155 |
ids = req.get("dataset_ids")
|
| 156 |
if "show_quotation" in req:
|
|
@@ -170,8 +170,8 @@ def update(tenant_id,chat_id):
|
|
| 170 |
embd_count=list(set([kb.embd_id for kb in kbs]))
|
| 171 |
if len(embd_count) != 1 :
|
| 172 |
return get_result(
|
| 173 |
-
|
| 174 |
-
|
| 175 |
req["kb_ids"] = ids
|
| 176 |
llm = req.get("llm")
|
| 177 |
if llm:
|
|
@@ -182,7 +182,7 @@ def update(tenant_id,chat_id):
|
|
| 182 |
req["llm_setting"] = req.pop("llm")
|
| 183 |
e, tenant = TenantService.get_by_id(tenant_id)
|
| 184 |
if not e:
|
| 185 |
-
return get_error_data_result(
|
| 186 |
if req.get("rerank_model"):
|
| 187 |
if not TenantLLMService.query(tenant_id=tenant_id,llm_name=req.get("rerank_model"),model_type="rerank"):
|
| 188 |
return get_error_data_result(f"`rerank_model` {req.get('rerank_model')} doesn't exist")
|
|
@@ -207,18 +207,18 @@ def update(tenant_id,chat_id):
|
|
| 207 |
res = res.to_json()
|
| 208 |
if "name" in req:
|
| 209 |
if not req.get("name"):
|
| 210 |
-
return get_error_data_result(
|
| 211 |
if req["name"].lower() != res["name"].lower() \
|
| 212 |
and len(
|
| 213 |
DialogService.query(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value)) > 0:
|
| 214 |
-
return get_error_data_result(
|
| 215 |
if "prompt_config" in req:
|
| 216 |
res["prompt_config"].update(req["prompt_config"])
|
| 217 |
for p in res["prompt_config"]["parameters"]:
|
| 218 |
if p["optional"]:
|
| 219 |
continue
|
| 220 |
if res["prompt_config"]["system"].find("{%s}" % p["key"]) < 0:
|
| 221 |
-
return get_error_data_result(
|
| 222 |
if "llm_setting" in req:
|
| 223 |
res["llm_setting"].update(req["llm_setting"])
|
| 224 |
req["prompt_config"] = res["prompt_config"]
|
|
@@ -229,7 +229,7 @@ def update(tenant_id,chat_id):
|
|
| 229 |
if "dataset_ids" in req:
|
| 230 |
req.pop("dataset_ids")
|
| 231 |
if not DialogService.update_by_id(chat_id, req):
|
| 232 |
-
return get_error_data_result(
|
| 233 |
return get_result()
|
| 234 |
|
| 235 |
|
|
@@ -250,7 +250,7 @@ def delete(tenant_id):
|
|
| 250 |
id_list=ids
|
| 251 |
for id in id_list:
|
| 252 |
if not DialogService.query(tenant_id=tenant_id, id=id, status=StatusEnum.VALID.value):
|
| 253 |
-
return get_error_data_result(
|
| 254 |
temp_dict = {"status": StatusEnum.INVALID.value}
|
| 255 |
DialogService.update_by_id(id, temp_dict)
|
| 256 |
return get_result()
|
|
@@ -262,7 +262,7 @@ def list_chat(tenant_id):
|
|
| 262 |
name = request.args.get("name")
|
| 263 |
chat = DialogService.query(id=id,name=name,status=StatusEnum.VALID.value)
|
| 264 |
if not chat:
|
| 265 |
-
return get_error_data_result(
|
| 266 |
page_number = int(request.args.get("page", 1))
|
| 267 |
items_per_page = int(request.args.get("page_size", 1024))
|
| 268 |
orderby = request.args.get("orderby", "create_time")
|
|
@@ -302,7 +302,7 @@ def list_chat(tenant_id):
|
|
| 302 |
for kb_id in res["kb_ids"]:
|
| 303 |
kb = KnowledgebaseService.query(id=kb_id)
|
| 304 |
if not kb :
|
| 305 |
-
return get_error_data_result(
|
| 306 |
kb_list.append(kb[0].to_json())
|
| 307 |
del res["kb_ids"]
|
| 308 |
res["datasets"] = kb_list
|
|
|
|
| 32 |
req=request.json
|
| 33 |
ids= req.get("dataset_ids")
|
| 34 |
if not ids:
|
| 35 |
+
return get_error_data_result(message="`dataset_ids` is required")
|
| 36 |
for kb_id in ids:
|
| 37 |
kbs = KnowledgebaseService.query(id=kb_id,tenant_id=tenant_id)
|
| 38 |
if not kbs:
|
|
|
|
| 43 |
kbs = KnowledgebaseService.get_by_ids(ids)
|
| 44 |
embd_count = list(set([kb.embd_id for kb in kbs]))
|
| 45 |
if len(embd_count) != 1:
|
| 46 |
+
return get_result(message='Datasets use different embedding models."',code=RetCode.AUTHENTICATION_ERROR)
|
| 47 |
req["kb_ids"] = ids
|
| 48 |
# llm
|
| 49 |
llm = req.get("llm")
|
|
|
|
| 55 |
req["llm_setting"] = req.pop("llm")
|
| 56 |
e, tenant = TenantService.get_by_id(tenant_id)
|
| 57 |
if not e:
|
| 58 |
+
return get_error_data_result(message="Tenant not found!")
|
| 59 |
# prompt
|
| 60 |
prompt = req.get("prompt")
|
| 61 |
key_mapping = {"parameters": "variables",
|
|
|
|
| 86 |
if not req.get("llm_id"):
|
| 87 |
req["llm_id"] = tenant.llm_id
|
| 88 |
if not req.get("name"):
|
| 89 |
+
return get_error_data_result(message="`name` is required.")
|
| 90 |
if DialogService.query(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value):
|
| 91 |
+
return get_error_data_result(message="Duplicated chat name in creating chat.")
|
| 92 |
# tenant_id
|
| 93 |
if req.get("tenant_id"):
|
| 94 |
+
return get_error_data_result(message="`tenant_id` must not be provided.")
|
| 95 |
req["tenant_id"] = tenant_id
|
| 96 |
# prompt more parameter
|
| 97 |
default_prompt = {
|
|
|
|
| 117 |
continue
|
| 118 |
if req['prompt_config']["system"].find("{%s}" % p["key"]) < 0:
|
| 119 |
return get_error_data_result(
|
| 120 |
+
message="Parameter '{}' is not used".format(p["key"]))
|
| 121 |
# save
|
| 122 |
if not DialogService.save(**req):
|
| 123 |
+
return get_error_data_result(message="Fail to new a chat!")
|
| 124 |
# response
|
| 125 |
e, res = DialogService.get_by_id(req["id"])
|
| 126 |
if not e:
|
| 127 |
+
return get_error_data_result(message="Fail to new a chat!")
|
| 128 |
res = res.to_json()
|
| 129 |
renamed_dict = {}
|
| 130 |
for key, value in res["prompt_config"].items():
|
|
|
|
| 150 |
@token_required
|
| 151 |
def update(tenant_id,chat_id):
|
| 152 |
if not DialogService.query(tenant_id=tenant_id, id=chat_id, status=StatusEnum.VALID.value):
|
| 153 |
+
return get_error_data_result(message='You do not own the chat')
|
| 154 |
req =request.json
|
| 155 |
ids = req.get("dataset_ids")
|
| 156 |
if "show_quotation" in req:
|
|
|
|
| 170 |
embd_count=list(set([kb.embd_id for kb in kbs]))
|
| 171 |
if len(embd_count) != 1 :
|
| 172 |
return get_result(
|
| 173 |
+
message='Datasets use different embedding models."',
|
| 174 |
+
code=RetCode.AUTHENTICATION_ERROR)
|
| 175 |
req["kb_ids"] = ids
|
| 176 |
llm = req.get("llm")
|
| 177 |
if llm:
|
|
|
|
| 182 |
req["llm_setting"] = req.pop("llm")
|
| 183 |
e, tenant = TenantService.get_by_id(tenant_id)
|
| 184 |
if not e:
|
| 185 |
+
return get_error_data_result(message="Tenant not found!")
|
| 186 |
if req.get("rerank_model"):
|
| 187 |
if not TenantLLMService.query(tenant_id=tenant_id,llm_name=req.get("rerank_model"),model_type="rerank"):
|
| 188 |
return get_error_data_result(f"`rerank_model` {req.get('rerank_model')} doesn't exist")
|
|
|
|
| 207 |
res = res.to_json()
|
| 208 |
if "name" in req:
|
| 209 |
if not req.get("name"):
|
| 210 |
+
return get_error_data_result(message="`name` is not empty.")
|
| 211 |
if req["name"].lower() != res["name"].lower() \
|
| 212 |
and len(
|
| 213 |
DialogService.query(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value)) > 0:
|
| 214 |
+
return get_error_data_result(message="Duplicated chat name in updating dataset.")
|
| 215 |
if "prompt_config" in req:
|
| 216 |
res["prompt_config"].update(req["prompt_config"])
|
| 217 |
for p in res["prompt_config"]["parameters"]:
|
| 218 |
if p["optional"]:
|
| 219 |
continue
|
| 220 |
if res["prompt_config"]["system"].find("{%s}" % p["key"]) < 0:
|
| 221 |
+
return get_error_data_result(message="Parameter '{}' is not used".format(p["key"]))
|
| 222 |
if "llm_setting" in req:
|
| 223 |
res["llm_setting"].update(req["llm_setting"])
|
| 224 |
req["prompt_config"] = res["prompt_config"]
|
|
|
|
| 229 |
if "dataset_ids" in req:
|
| 230 |
req.pop("dataset_ids")
|
| 231 |
if not DialogService.update_by_id(chat_id, req):
|
| 232 |
+
return get_error_data_result(message="Chat not found!")
|
| 233 |
return get_result()
|
| 234 |
|
| 235 |
|
|
|
|
| 250 |
id_list=ids
|
| 251 |
for id in id_list:
|
| 252 |
if not DialogService.query(tenant_id=tenant_id, id=id, status=StatusEnum.VALID.value):
|
| 253 |
+
return get_error_data_result(message=f"You don't own the chat {id}")
|
| 254 |
temp_dict = {"status": StatusEnum.INVALID.value}
|
| 255 |
DialogService.update_by_id(id, temp_dict)
|
| 256 |
return get_result()
|
|
|
|
| 262 |
name = request.args.get("name")
|
| 263 |
chat = DialogService.query(id=id,name=name,status=StatusEnum.VALID.value)
|
| 264 |
if not chat:
|
| 265 |
+
return get_error_data_result(message="The chat doesn't exist")
|
| 266 |
page_number = int(request.args.get("page", 1))
|
| 267 |
items_per_page = int(request.args.get("page_size", 1024))
|
| 268 |
orderby = request.args.get("orderby", "create_time")
|
|
|
|
| 302 |
for kb_id in res["kb_ids"]:
|
| 303 |
kb = KnowledgebaseService.query(id=kb_id)
|
| 304 |
if not kb :
|
| 305 |
+
return get_error_data_result(message=f"Don't exist the kb {kb_id}")
|
| 306 |
kb_list.append(kb[0].to_json())
|
| 307 |
del res["kb_ids"]
|
| 308 |
res["datasets"] = kb_list
|
api/apps/sdk/dataset.py
CHANGED
|
@@ -1,535 +1,535 @@
|
|
| 1 |
-
#
|
| 2 |
-
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
| 3 |
-
#
|
| 4 |
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
-
# you may not use this file except in compliance with the License.
|
| 6 |
-
# You may obtain a copy of the License at
|
| 7 |
-
#
|
| 8 |
-
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
-
#
|
| 10 |
-
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
-
# See the License for the specific language governing permissions and
|
| 14 |
-
# limitations under the License.
|
| 15 |
-
#
|
| 16 |
-
|
| 17 |
-
from flask import request
|
| 18 |
-
from api.db import StatusEnum, FileSource
|
| 19 |
-
from api.db.db_models import File
|
| 20 |
-
from api.db.services.document_service import DocumentService
|
| 21 |
-
from api.db.services.file2document_service import File2DocumentService
|
| 22 |
-
from api.db.services.file_service import FileService
|
| 23 |
-
from api.db.services.knowledgebase_service import KnowledgebaseService
|
| 24 |
-
from api.db.services.llm_service import TenantLLMService, LLMService
|
| 25 |
-
from api.db.services.user_service import TenantService
|
| 26 |
-
from api.settings import RetCode
|
| 27 |
-
from api.utils import get_uuid
|
| 28 |
-
from api.utils.api_utils import (
|
| 29 |
-
get_result,
|
| 30 |
-
token_required,
|
| 31 |
-
get_error_data_result,
|
| 32 |
-
valid,
|
| 33 |
-
get_parser_config,
|
| 34 |
-
)
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
@manager.route("/datasets", methods=["POST"])
|
| 38 |
-
@token_required
|
| 39 |
-
def create(tenant_id):
|
| 40 |
-
"""
|
| 41 |
-
Create a new dataset.
|
| 42 |
-
---
|
| 43 |
-
tags:
|
| 44 |
-
- Datasets
|
| 45 |
-
security:
|
| 46 |
-
- ApiKeyAuth: []
|
| 47 |
-
parameters:
|
| 48 |
-
- in: header
|
| 49 |
-
name: Authorization
|
| 50 |
-
type: string
|
| 51 |
-
required: true
|
| 52 |
-
description: Bearer token for authentication.
|
| 53 |
-
- in: body
|
| 54 |
-
name: body
|
| 55 |
-
description: Dataset creation parameters.
|
| 56 |
-
required: true
|
| 57 |
-
schema:
|
| 58 |
-
type: object
|
| 59 |
-
required:
|
| 60 |
-
- name
|
| 61 |
-
properties:
|
| 62 |
-
name:
|
| 63 |
-
type: string
|
| 64 |
-
description: Name of the dataset.
|
| 65 |
-
permission:
|
| 66 |
-
type: string
|
| 67 |
-
enum: ['me', 'team']
|
| 68 |
-
description: Dataset permission.
|
| 69 |
-
language:
|
| 70 |
-
type: string
|
| 71 |
-
enum: ['Chinese', 'English']
|
| 72 |
-
description: Language of the dataset.
|
| 73 |
-
chunk_method:
|
| 74 |
-
type: string
|
| 75 |
-
enum: ["naive", "manual", "qa", "table", "paper", "book", "laws",
|
| 76 |
-
"presentation", "picture", "one", "knowledge_graph", "email"]
|
| 77 |
-
description: Chunking method.
|
| 78 |
-
parser_config:
|
| 79 |
-
type: object
|
| 80 |
-
description: Parser configuration.
|
| 81 |
-
responses:
|
| 82 |
-
200:
|
| 83 |
-
description: Successful operation.
|
| 84 |
-
schema:
|
| 85 |
-
type: object
|
| 86 |
-
properties:
|
| 87 |
-
data:
|
| 88 |
-
type: object
|
| 89 |
-
"""
|
| 90 |
-
req = request.json
|
| 91 |
-
e, t = TenantService.get_by_id(tenant_id)
|
| 92 |
-
permission = req.get("permission")
|
| 93 |
-
language = req.get("language")
|
| 94 |
-
chunk_method = req.get("chunk_method")
|
| 95 |
-
parser_config = req.get("parser_config")
|
| 96 |
-
valid_permission = ["me", "team"]
|
| 97 |
-
valid_language = ["Chinese", "English"]
|
| 98 |
-
valid_chunk_method = [
|
| 99 |
-
"naive",
|
| 100 |
-
"manual",
|
| 101 |
-
"qa",
|
| 102 |
-
"table",
|
| 103 |
-
"paper",
|
| 104 |
-
"book",
|
| 105 |
-
"laws",
|
| 106 |
-
"presentation",
|
| 107 |
-
"picture",
|
| 108 |
-
"one",
|
| 109 |
-
"knowledge_graph",
|
| 110 |
-
"email",
|
| 111 |
-
]
|
| 112 |
-
check_validation = valid(
|
| 113 |
-
permission,
|
| 114 |
-
valid_permission,
|
| 115 |
-
language,
|
| 116 |
-
valid_language,
|
| 117 |
-
chunk_method,
|
| 118 |
-
valid_chunk_method,
|
| 119 |
-
)
|
| 120 |
-
if check_validation:
|
| 121 |
-
return check_validation
|
| 122 |
-
req["parser_config"] = get_parser_config(chunk_method, parser_config)
|
| 123 |
-
if "tenant_id" in req:
|
| 124 |
-
return get_error_data_result(
|
| 125 |
-
if "chunk_count" in req or "document_count" in req:
|
| 126 |
-
return get_error_data_result(
|
| 127 |
-
|
| 128 |
-
)
|
| 129 |
-
if "name" not in req:
|
| 130 |
-
return get_error_data_result(
|
| 131 |
-
req["id"] = get_uuid()
|
| 132 |
-
req["name"] = req["name"].strip()
|
| 133 |
-
if req["name"] == "":
|
| 134 |
-
return get_error_data_result(
|
| 135 |
-
if KnowledgebaseService.query(
|
| 136 |
-
name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value
|
| 137 |
-
):
|
| 138 |
-
return get_error_data_result(
|
| 139 |
-
|
| 140 |
-
)
|
| 141 |
-
req["tenant_id"] = req["created_by"] = tenant_id
|
| 142 |
-
if not req.get("embedding_model"):
|
| 143 |
-
req["embedding_model"] = t.embd_id
|
| 144 |
-
else:
|
| 145 |
-
valid_embedding_models = [
|
| 146 |
-
"BAAI/bge-large-zh-v1.5",
|
| 147 |
-
"BAAI/bge-base-en-v1.5",
|
| 148 |
-
"BAAI/bge-large-en-v1.5",
|
| 149 |
-
"BAAI/bge-small-en-v1.5",
|
| 150 |
-
"BAAI/bge-small-zh-v1.5",
|
| 151 |
-
"jinaai/jina-embeddings-v2-base-en",
|
| 152 |
-
"jinaai/jina-embeddings-v2-small-en",
|
| 153 |
-
"nomic-ai/nomic-embed-text-v1.5",
|
| 154 |
-
"sentence-transformers/all-MiniLM-L6-v2",
|
| 155 |
-
"text-embedding-v2",
|
| 156 |
-
"text-embedding-v3",
|
| 157 |
-
"maidalun1020/bce-embedding-base_v1",
|
| 158 |
-
]
|
| 159 |
-
embd_model = LLMService.query(
|
| 160 |
-
llm_name=req["embedding_model"], model_type="embedding"
|
| 161 |
-
)
|
| 162 |
-
if not embd_model:
|
| 163 |
-
return get_error_data_result(
|
| 164 |
-
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
| 165 |
-
)
|
| 166 |
-
if embd_model:
|
| 167 |
-
if req[
|
| 168 |
-
"embedding_model"
|
| 169 |
-
] not in valid_embedding_models and not TenantLLMService.query(
|
| 170 |
-
tenant_id=tenant_id,
|
| 171 |
-
model_type="embedding",
|
| 172 |
-
llm_name=req.get("embedding_model"),
|
| 173 |
-
):
|
| 174 |
-
return get_error_data_result(
|
| 175 |
-
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
| 176 |
-
)
|
| 177 |
-
key_mapping = {
|
| 178 |
-
"chunk_num": "chunk_count",
|
| 179 |
-
"doc_num": "document_count",
|
| 180 |
-
"parser_id": "chunk_method",
|
| 181 |
-
"embd_id": "embedding_model",
|
| 182 |
-
}
|
| 183 |
-
mapped_keys = {
|
| 184 |
-
new_key: req[old_key]
|
| 185 |
-
for new_key, old_key in key_mapping.items()
|
| 186 |
-
if old_key in req
|
| 187 |
-
}
|
| 188 |
-
req.update(mapped_keys)
|
| 189 |
-
if not KnowledgebaseService.save(**req):
|
| 190 |
-
return get_error_data_result(
|
| 191 |
-
renamed_data = {}
|
| 192 |
-
e, k = KnowledgebaseService.get_by_id(req["id"])
|
| 193 |
-
for key, value in k.to_dict().items():
|
| 194 |
-
new_key = key_mapping.get(key, key)
|
| 195 |
-
renamed_data[new_key] = value
|
| 196 |
-
return get_result(data=renamed_data)
|
| 197 |
-
|
| 198 |
-
|
| 199 |
-
@manager.route("/datasets", methods=["DELETE"])
|
| 200 |
-
@token_required
|
| 201 |
-
def delete(tenant_id):
|
| 202 |
-
"""
|
| 203 |
-
Delete datasets.
|
| 204 |
-
---
|
| 205 |
-
tags:
|
| 206 |
-
- Datasets
|
| 207 |
-
security:
|
| 208 |
-
- ApiKeyAuth: []
|
| 209 |
-
parameters:
|
| 210 |
-
- in: header
|
| 211 |
-
name: Authorization
|
| 212 |
-
type: string
|
| 213 |
-
required: true
|
| 214 |
-
description: Bearer token for authentication.
|
| 215 |
-
- in: body
|
| 216 |
-
name: body
|
| 217 |
-
description: Dataset deletion parameters.
|
| 218 |
-
required: true
|
| 219 |
-
schema:
|
| 220 |
-
type: object
|
| 221 |
-
properties:
|
| 222 |
-
ids:
|
| 223 |
-
type: array
|
| 224 |
-
items:
|
| 225 |
-
type: string
|
| 226 |
-
description: List of dataset IDs to delete.
|
| 227 |
-
responses:
|
| 228 |
-
200:
|
| 229 |
-
description: Successful operation.
|
| 230 |
-
schema:
|
| 231 |
-
type: object
|
| 232 |
-
"""
|
| 233 |
-
req = request.json
|
| 234 |
-
if not req:
|
| 235 |
-
ids = None
|
| 236 |
-
else:
|
| 237 |
-
ids = req.get("ids")
|
| 238 |
-
if not ids:
|
| 239 |
-
id_list = []
|
| 240 |
-
kbs = KnowledgebaseService.query(tenant_id=tenant_id)
|
| 241 |
-
for kb in kbs:
|
| 242 |
-
id_list.append(kb.id)
|
| 243 |
-
else:
|
| 244 |
-
id_list = ids
|
| 245 |
-
for id in id_list:
|
| 246 |
-
kbs = KnowledgebaseService.query(id=id, tenant_id=tenant_id)
|
| 247 |
-
if not kbs:
|
| 248 |
-
return get_error_data_result(
|
| 249 |
-
for doc in DocumentService.query(kb_id=id):
|
| 250 |
-
if not DocumentService.remove_document(doc, tenant_id):
|
| 251 |
-
return get_error_data_result(
|
| 252 |
-
|
| 253 |
-
)
|
| 254 |
-
f2d = File2DocumentService.get_by_document_id(doc.id)
|
| 255 |
-
FileService.filter_delete(
|
| 256 |
-
[
|
| 257 |
-
File.source_type == FileSource.KNOWLEDGEBASE,
|
| 258 |
-
File.id == f2d[0].file_id,
|
| 259 |
-
]
|
| 260 |
-
)
|
| 261 |
-
File2DocumentService.delete_by_document_id(doc.id)
|
| 262 |
-
if not KnowledgebaseService.delete_by_id(id):
|
| 263 |
-
return get_error_data_result(
|
| 264 |
-
return get_result(
|
| 265 |
-
|
| 266 |
-
|
| 267 |
-
@manager.route("/datasets/<dataset_id>", methods=["PUT"])
|
| 268 |
-
@token_required
|
| 269 |
-
def update(tenant_id, dataset_id):
|
| 270 |
-
"""
|
| 271 |
-
Update a dataset.
|
| 272 |
-
---
|
| 273 |
-
tags:
|
| 274 |
-
- Datasets
|
| 275 |
-
security:
|
| 276 |
-
- ApiKeyAuth: []
|
| 277 |
-
parameters:
|
| 278 |
-
- in: path
|
| 279 |
-
name: dataset_id
|
| 280 |
-
type: string
|
| 281 |
-
required: true
|
| 282 |
-
description: ID of the dataset to update.
|
| 283 |
-
- in: header
|
| 284 |
-
name: Authorization
|
| 285 |
-
type: string
|
| 286 |
-
required: true
|
| 287 |
-
description: Bearer token for authentication.
|
| 288 |
-
- in: body
|
| 289 |
-
name: body
|
| 290 |
-
description: Dataset update parameters.
|
| 291 |
-
required: true
|
| 292 |
-
schema:
|
| 293 |
-
type: object
|
| 294 |
-
properties:
|
| 295 |
-
name:
|
| 296 |
-
type: string
|
| 297 |
-
description: New name of the dataset.
|
| 298 |
-
permission:
|
| 299 |
-
type: string
|
| 300 |
-
enum: ['me', 'team']
|
| 301 |
-
description: Updated permission.
|
| 302 |
-
language:
|
| 303 |
-
type: string
|
| 304 |
-
enum: ['Chinese', 'English']
|
| 305 |
-
description: Updated language.
|
| 306 |
-
chunk_method:
|
| 307 |
-
type: string
|
| 308 |
-
enum: ["naive", "manual", "qa", "table", "paper", "book", "laws",
|
| 309 |
-
"presentation", "picture", "one", "knowledge_graph", "email"]
|
| 310 |
-
description: Updated chunking method.
|
| 311 |
-
parser_config:
|
| 312 |
-
type: object
|
| 313 |
-
description: Updated parser configuration.
|
| 314 |
-
responses:
|
| 315 |
-
200:
|
| 316 |
-
description: Successful operation.
|
| 317 |
-
schema:
|
| 318 |
-
type: object
|
| 319 |
-
"""
|
| 320 |
-
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 321 |
-
return get_error_data_result(
|
| 322 |
-
req = request.json
|
| 323 |
-
e, t = TenantService.get_by_id(tenant_id)
|
| 324 |
-
invalid_keys = {"id", "embd_id", "chunk_num", "doc_num", "parser_id"}
|
| 325 |
-
if any(key in req for key in invalid_keys):
|
| 326 |
-
return get_error_data_result(
|
| 327 |
-
permission = req.get("permission")
|
| 328 |
-
language = req.get("language")
|
| 329 |
-
chunk_method = req.get("chunk_method")
|
| 330 |
-
parser_config = req.get("parser_config")
|
| 331 |
-
valid_permission = ["me", "team"]
|
| 332 |
-
valid_language = ["Chinese", "English"]
|
| 333 |
-
valid_chunk_method = [
|
| 334 |
-
"naive",
|
| 335 |
-
"manual",
|
| 336 |
-
"qa",
|
| 337 |
-
"table",
|
| 338 |
-
"paper",
|
| 339 |
-
"book",
|
| 340 |
-
"laws",
|
| 341 |
-
"presentation",
|
| 342 |
-
"picture",
|
| 343 |
-
"one",
|
| 344 |
-
"knowledge_graph",
|
| 345 |
-
"email",
|
| 346 |
-
]
|
| 347 |
-
check_validation = valid(
|
| 348 |
-
permission,
|
| 349 |
-
valid_permission,
|
| 350 |
-
language,
|
| 351 |
-
valid_language,
|
| 352 |
-
chunk_method,
|
| 353 |
-
valid_chunk_method,
|
| 354 |
-
)
|
| 355 |
-
if check_validation:
|
| 356 |
-
return check_validation
|
| 357 |
-
if "tenant_id" in req:
|
| 358 |
-
if req["tenant_id"] != tenant_id:
|
| 359 |
-
return get_error_data_result(
|
| 360 |
-
e, kb = KnowledgebaseService.get_by_id(dataset_id)
|
| 361 |
-
if "parser_config" in req:
|
| 362 |
-
temp_dict = kb.parser_config
|
| 363 |
-
temp_dict.update(req["parser_config"])
|
| 364 |
-
req["parser_config"] = temp_dict
|
| 365 |
-
if "chunk_count" in req:
|
| 366 |
-
if req["chunk_count"] != kb.chunk_num:
|
| 367 |
-
return get_error_data_result(
|
| 368 |
-
req.pop("chunk_count")
|
| 369 |
-
if "document_count" in req:
|
| 370 |
-
if req["document_count"] != kb.doc_num:
|
| 371 |
-
return get_error_data_result(
|
| 372 |
-
req.pop("document_count")
|
| 373 |
-
if "chunk_method" in req:
|
| 374 |
-
if kb.chunk_num != 0 and req["chunk_method"] != kb.parser_id:
|
| 375 |
-
return get_error_data_result(
|
| 376 |
-
|
| 377 |
-
)
|
| 378 |
-
req["parser_id"] = req.pop("chunk_method")
|
| 379 |
-
if req["parser_id"] != kb.parser_id:
|
| 380 |
-
if not req.get("parser_config"):
|
| 381 |
-
req["parser_config"] = get_parser_config(chunk_method, parser_config)
|
| 382 |
-
if "embedding_model" in req:
|
| 383 |
-
if kb.chunk_num != 0 and req["embedding_model"] != kb.embd_id:
|
| 384 |
-
return get_error_data_result(
|
| 385 |
-
|
| 386 |
-
)
|
| 387 |
-
if not req.get("embedding_model"):
|
| 388 |
-
return get_error_data_result("`embedding_model` can't be empty")
|
| 389 |
-
valid_embedding_models = [
|
| 390 |
-
"BAAI/bge-large-zh-v1.5",
|
| 391 |
-
"BAAI/bge-base-en-v1.5",
|
| 392 |
-
"BAAI/bge-large-en-v1.5",
|
| 393 |
-
"BAAI/bge-small-en-v1.5",
|
| 394 |
-
"BAAI/bge-small-zh-v1.5",
|
| 395 |
-
"jinaai/jina-embeddings-v2-base-en",
|
| 396 |
-
"jinaai/jina-embeddings-v2-small-en",
|
| 397 |
-
"nomic-ai/nomic-embed-text-v1.5",
|
| 398 |
-
"sentence-transformers/all-MiniLM-L6-v2",
|
| 399 |
-
"text-embedding-v2",
|
| 400 |
-
"text-embedding-v3",
|
| 401 |
-
"maidalun1020/bce-embedding-base_v1",
|
| 402 |
-
]
|
| 403 |
-
embd_model = LLMService.query(
|
| 404 |
-
llm_name=req["embedding_model"], model_type="embedding"
|
| 405 |
-
)
|
| 406 |
-
if not embd_model:
|
| 407 |
-
return get_error_data_result(
|
| 408 |
-
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
| 409 |
-
)
|
| 410 |
-
if embd_model:
|
| 411 |
-
if req[
|
| 412 |
-
"embedding_model"
|
| 413 |
-
] not in valid_embedding_models and not TenantLLMService.query(
|
| 414 |
-
tenant_id=tenant_id,
|
| 415 |
-
model_type="embedding",
|
| 416 |
-
llm_name=req.get("embedding_model"),
|
| 417 |
-
):
|
| 418 |
-
return get_error_data_result(
|
| 419 |
-
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
| 420 |
-
)
|
| 421 |
-
req["embd_id"] = req.pop("embedding_model")
|
| 422 |
-
if "name" in req:
|
| 423 |
-
req["name"] = req["name"].strip()
|
| 424 |
-
if (
|
| 425 |
-
req["name"].lower() != kb.name.lower()
|
| 426 |
-
and len(
|
| 427 |
-
KnowledgebaseService.query(
|
| 428 |
-
name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value
|
| 429 |
-
)
|
| 430 |
-
)
|
| 431 |
-
> 0
|
| 432 |
-
):
|
| 433 |
-
return get_error_data_result(
|
| 434 |
-
|
| 435 |
-
)
|
| 436 |
-
if not KnowledgebaseService.update_by_id(kb.id, req):
|
| 437 |
-
return get_error_data_result(
|
| 438 |
-
return get_result(
|
| 439 |
-
|
| 440 |
-
|
| 441 |
-
@manager.route("/datasets", methods=["GET"])
|
| 442 |
-
@token_required
|
| 443 |
-
def list(tenant_id):
|
| 444 |
-
"""
|
| 445 |
-
List datasets.
|
| 446 |
-
---
|
| 447 |
-
tags:
|
| 448 |
-
- Datasets
|
| 449 |
-
security:
|
| 450 |
-
- ApiKeyAuth: []
|
| 451 |
-
parameters:
|
| 452 |
-
- in: query
|
| 453 |
-
name: id
|
| 454 |
-
type: string
|
| 455 |
-
required: false
|
| 456 |
-
description: Dataset ID to filter.
|
| 457 |
-
- in: query
|
| 458 |
-
name: name
|
| 459 |
-
type: string
|
| 460 |
-
required: false
|
| 461 |
-
description: Dataset name to filter.
|
| 462 |
-
- in: query
|
| 463 |
-
name: page
|
| 464 |
-
type: integer
|
| 465 |
-
required: false
|
| 466 |
-
default: 1
|
| 467 |
-
description: Page number.
|
| 468 |
-
- in: query
|
| 469 |
-
name: page_size
|
| 470 |
-
type: integer
|
| 471 |
-
required: false
|
| 472 |
-
default: 1024
|
| 473 |
-
description: Number of items per page.
|
| 474 |
-
- in: query
|
| 475 |
-
name: orderby
|
| 476 |
-
type: string
|
| 477 |
-
required: false
|
| 478 |
-
default: "create_time"
|
| 479 |
-
description: Field to order by.
|
| 480 |
-
- in: query
|
| 481 |
-
name: desc
|
| 482 |
-
type: boolean
|
| 483 |
-
required: false
|
| 484 |
-
default: true
|
| 485 |
-
description: Order in descending.
|
| 486 |
-
- in: header
|
| 487 |
-
name: Authorization
|
| 488 |
-
type: string
|
| 489 |
-
required: true
|
| 490 |
-
description: Bearer token for authentication.
|
| 491 |
-
responses:
|
| 492 |
-
200:
|
| 493 |
-
description: Successful operation.
|
| 494 |
-
schema:
|
| 495 |
-
type: array
|
| 496 |
-
items:
|
| 497 |
-
type: object
|
| 498 |
-
"""
|
| 499 |
-
id = request.args.get("id")
|
| 500 |
-
name = request.args.get("name")
|
| 501 |
-
kbs = KnowledgebaseService.query(id=id, name=name, status=1)
|
| 502 |
-
if not kbs:
|
| 503 |
-
return get_error_data_result(
|
| 504 |
-
page_number = int(request.args.get("page", 1))
|
| 505 |
-
items_per_page = int(request.args.get("page_size", 1024))
|
| 506 |
-
orderby = request.args.get("orderby", "create_time")
|
| 507 |
-
if request.args.get("desc") == "False" or request.args.get("desc") == "false":
|
| 508 |
-
desc = False
|
| 509 |
-
else:
|
| 510 |
-
desc = True
|
| 511 |
-
tenants = TenantService.get_joined_tenants_by_user_id(tenant_id)
|
| 512 |
-
kbs = KnowledgebaseService.get_list(
|
| 513 |
-
[m["tenant_id"] for m in tenants],
|
| 514 |
-
tenant_id,
|
| 515 |
-
page_number,
|
| 516 |
-
items_per_page,
|
| 517 |
-
orderby,
|
| 518 |
-
desc,
|
| 519 |
-
id,
|
| 520 |
-
name,
|
| 521 |
-
)
|
| 522 |
-
renamed_list = []
|
| 523 |
-
for kb in kbs:
|
| 524 |
-
key_mapping = {
|
| 525 |
-
"chunk_num": "chunk_count",
|
| 526 |
-
"doc_num": "document_count",
|
| 527 |
-
"parser_id": "chunk_method",
|
| 528 |
-
"embd_id": "embedding_model",
|
| 529 |
-
}
|
| 530 |
-
renamed_data = {}
|
| 531 |
-
for key, value in kb.items():
|
| 532 |
-
new_key = key_mapping.get(key, key)
|
| 533 |
-
renamed_data[new_key] = value
|
| 534 |
-
renamed_list.append(renamed_data)
|
| 535 |
-
return get_result(data=renamed_list)
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
|
| 17 |
+
from flask import request
|
| 18 |
+
from api.db import StatusEnum, FileSource
|
| 19 |
+
from api.db.db_models import File
|
| 20 |
+
from api.db.services.document_service import DocumentService
|
| 21 |
+
from api.db.services.file2document_service import File2DocumentService
|
| 22 |
+
from api.db.services.file_service import FileService
|
| 23 |
+
from api.db.services.knowledgebase_service import KnowledgebaseService
|
| 24 |
+
from api.db.services.llm_service import TenantLLMService, LLMService
|
| 25 |
+
from api.db.services.user_service import TenantService
|
| 26 |
+
from api.settings import RetCode
|
| 27 |
+
from api.utils import get_uuid
|
| 28 |
+
from api.utils.api_utils import (
|
| 29 |
+
get_result,
|
| 30 |
+
token_required,
|
| 31 |
+
get_error_data_result,
|
| 32 |
+
valid,
|
| 33 |
+
get_parser_config,
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
@manager.route("/datasets", methods=["POST"])
|
| 38 |
+
@token_required
|
| 39 |
+
def create(tenant_id):
|
| 40 |
+
"""
|
| 41 |
+
Create a new dataset.
|
| 42 |
+
---
|
| 43 |
+
tags:
|
| 44 |
+
- Datasets
|
| 45 |
+
security:
|
| 46 |
+
- ApiKeyAuth: []
|
| 47 |
+
parameters:
|
| 48 |
+
- in: header
|
| 49 |
+
name: Authorization
|
| 50 |
+
type: string
|
| 51 |
+
required: true
|
| 52 |
+
description: Bearer token for authentication.
|
| 53 |
+
- in: body
|
| 54 |
+
name: body
|
| 55 |
+
description: Dataset creation parameters.
|
| 56 |
+
required: true
|
| 57 |
+
schema:
|
| 58 |
+
type: object
|
| 59 |
+
required:
|
| 60 |
+
- name
|
| 61 |
+
properties:
|
| 62 |
+
name:
|
| 63 |
+
type: string
|
| 64 |
+
description: Name of the dataset.
|
| 65 |
+
permission:
|
| 66 |
+
type: string
|
| 67 |
+
enum: ['me', 'team']
|
| 68 |
+
description: Dataset permission.
|
| 69 |
+
language:
|
| 70 |
+
type: string
|
| 71 |
+
enum: ['Chinese', 'English']
|
| 72 |
+
description: Language of the dataset.
|
| 73 |
+
chunk_method:
|
| 74 |
+
type: string
|
| 75 |
+
enum: ["naive", "manual", "qa", "table", "paper", "book", "laws",
|
| 76 |
+
"presentation", "picture", "one", "knowledge_graph", "email"]
|
| 77 |
+
description: Chunking method.
|
| 78 |
+
parser_config:
|
| 79 |
+
type: object
|
| 80 |
+
description: Parser configuration.
|
| 81 |
+
responses:
|
| 82 |
+
200:
|
| 83 |
+
description: Successful operation.
|
| 84 |
+
schema:
|
| 85 |
+
type: object
|
| 86 |
+
properties:
|
| 87 |
+
data:
|
| 88 |
+
type: object
|
| 89 |
+
"""
|
| 90 |
+
req = request.json
|
| 91 |
+
e, t = TenantService.get_by_id(tenant_id)
|
| 92 |
+
permission = req.get("permission")
|
| 93 |
+
language = req.get("language")
|
| 94 |
+
chunk_method = req.get("chunk_method")
|
| 95 |
+
parser_config = req.get("parser_config")
|
| 96 |
+
valid_permission = ["me", "team"]
|
| 97 |
+
valid_language = ["Chinese", "English"]
|
| 98 |
+
valid_chunk_method = [
|
| 99 |
+
"naive",
|
| 100 |
+
"manual",
|
| 101 |
+
"qa",
|
| 102 |
+
"table",
|
| 103 |
+
"paper",
|
| 104 |
+
"book",
|
| 105 |
+
"laws",
|
| 106 |
+
"presentation",
|
| 107 |
+
"picture",
|
| 108 |
+
"one",
|
| 109 |
+
"knowledge_graph",
|
| 110 |
+
"email",
|
| 111 |
+
]
|
| 112 |
+
check_validation = valid(
|
| 113 |
+
permission,
|
| 114 |
+
valid_permission,
|
| 115 |
+
language,
|
| 116 |
+
valid_language,
|
| 117 |
+
chunk_method,
|
| 118 |
+
valid_chunk_method,
|
| 119 |
+
)
|
| 120 |
+
if check_validation:
|
| 121 |
+
return check_validation
|
| 122 |
+
req["parser_config"] = get_parser_config(chunk_method, parser_config)
|
| 123 |
+
if "tenant_id" in req:
|
| 124 |
+
return get_error_data_result(message="`tenant_id` must not be provided")
|
| 125 |
+
if "chunk_count" in req or "document_count" in req:
|
| 126 |
+
return get_error_data_result(
|
| 127 |
+
message="`chunk_count` or `document_count` must not be provided"
|
| 128 |
+
)
|
| 129 |
+
if "name" not in req:
|
| 130 |
+
return get_error_data_result(message="`name` is not empty!")
|
| 131 |
+
req["id"] = get_uuid()
|
| 132 |
+
req["name"] = req["name"].strip()
|
| 133 |
+
if req["name"] == "":
|
| 134 |
+
return get_error_data_result(message="`name` is not empty string!")
|
| 135 |
+
if KnowledgebaseService.query(
|
| 136 |
+
name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value
|
| 137 |
+
):
|
| 138 |
+
return get_error_data_result(
|
| 139 |
+
message="Duplicated dataset name in creating dataset."
|
| 140 |
+
)
|
| 141 |
+
req["tenant_id"] = req["created_by"] = tenant_id
|
| 142 |
+
if not req.get("embedding_model"):
|
| 143 |
+
req["embedding_model"] = t.embd_id
|
| 144 |
+
else:
|
| 145 |
+
valid_embedding_models = [
|
| 146 |
+
"BAAI/bge-large-zh-v1.5",
|
| 147 |
+
"BAAI/bge-base-en-v1.5",
|
| 148 |
+
"BAAI/bge-large-en-v1.5",
|
| 149 |
+
"BAAI/bge-small-en-v1.5",
|
| 150 |
+
"BAAI/bge-small-zh-v1.5",
|
| 151 |
+
"jinaai/jina-embeddings-v2-base-en",
|
| 152 |
+
"jinaai/jina-embeddings-v2-small-en",
|
| 153 |
+
"nomic-ai/nomic-embed-text-v1.5",
|
| 154 |
+
"sentence-transformers/all-MiniLM-L6-v2",
|
| 155 |
+
"text-embedding-v2",
|
| 156 |
+
"text-embedding-v3",
|
| 157 |
+
"maidalun1020/bce-embedding-base_v1",
|
| 158 |
+
]
|
| 159 |
+
embd_model = LLMService.query(
|
| 160 |
+
llm_name=req["embedding_model"], model_type="embedding"
|
| 161 |
+
)
|
| 162 |
+
if not embd_model:
|
| 163 |
+
return get_error_data_result(
|
| 164 |
+
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
| 165 |
+
)
|
| 166 |
+
if embd_model:
|
| 167 |
+
if req[
|
| 168 |
+
"embedding_model"
|
| 169 |
+
] not in valid_embedding_models and not TenantLLMService.query(
|
| 170 |
+
tenant_id=tenant_id,
|
| 171 |
+
model_type="embedding",
|
| 172 |
+
llm_name=req.get("embedding_model"),
|
| 173 |
+
):
|
| 174 |
+
return get_error_data_result(
|
| 175 |
+
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
| 176 |
+
)
|
| 177 |
+
key_mapping = {
|
| 178 |
+
"chunk_num": "chunk_count",
|
| 179 |
+
"doc_num": "document_count",
|
| 180 |
+
"parser_id": "chunk_method",
|
| 181 |
+
"embd_id": "embedding_model",
|
| 182 |
+
}
|
| 183 |
+
mapped_keys = {
|
| 184 |
+
new_key: req[old_key]
|
| 185 |
+
for new_key, old_key in key_mapping.items()
|
| 186 |
+
if old_key in req
|
| 187 |
+
}
|
| 188 |
+
req.update(mapped_keys)
|
| 189 |
+
if not KnowledgebaseService.save(**req):
|
| 190 |
+
return get_error_data_result(message="Create dataset error.(Database error)")
|
| 191 |
+
renamed_data = {}
|
| 192 |
+
e, k = KnowledgebaseService.get_by_id(req["id"])
|
| 193 |
+
for key, value in k.to_dict().items():
|
| 194 |
+
new_key = key_mapping.get(key, key)
|
| 195 |
+
renamed_data[new_key] = value
|
| 196 |
+
return get_result(data=renamed_data)
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
@manager.route("/datasets", methods=["DELETE"])
|
| 200 |
+
@token_required
|
| 201 |
+
def delete(tenant_id):
|
| 202 |
+
"""
|
| 203 |
+
Delete datasets.
|
| 204 |
+
---
|
| 205 |
+
tags:
|
| 206 |
+
- Datasets
|
| 207 |
+
security:
|
| 208 |
+
- ApiKeyAuth: []
|
| 209 |
+
parameters:
|
| 210 |
+
- in: header
|
| 211 |
+
name: Authorization
|
| 212 |
+
type: string
|
| 213 |
+
required: true
|
| 214 |
+
description: Bearer token for authentication.
|
| 215 |
+
- in: body
|
| 216 |
+
name: body
|
| 217 |
+
description: Dataset deletion parameters.
|
| 218 |
+
required: true
|
| 219 |
+
schema:
|
| 220 |
+
type: object
|
| 221 |
+
properties:
|
| 222 |
+
ids:
|
| 223 |
+
type: array
|
| 224 |
+
items:
|
| 225 |
+
type: string
|
| 226 |
+
description: List of dataset IDs to delete.
|
| 227 |
+
responses:
|
| 228 |
+
200:
|
| 229 |
+
description: Successful operation.
|
| 230 |
+
schema:
|
| 231 |
+
type: object
|
| 232 |
+
"""
|
| 233 |
+
req = request.json
|
| 234 |
+
if not req:
|
| 235 |
+
ids = None
|
| 236 |
+
else:
|
| 237 |
+
ids = req.get("ids")
|
| 238 |
+
if not ids:
|
| 239 |
+
id_list = []
|
| 240 |
+
kbs = KnowledgebaseService.query(tenant_id=tenant_id)
|
| 241 |
+
for kb in kbs:
|
| 242 |
+
id_list.append(kb.id)
|
| 243 |
+
else:
|
| 244 |
+
id_list = ids
|
| 245 |
+
for id in id_list:
|
| 246 |
+
kbs = KnowledgebaseService.query(id=id, tenant_id=tenant_id)
|
| 247 |
+
if not kbs:
|
| 248 |
+
return get_error_data_result(message=f"You don't own the dataset {id}")
|
| 249 |
+
for doc in DocumentService.query(kb_id=id):
|
| 250 |
+
if not DocumentService.remove_document(doc, tenant_id):
|
| 251 |
+
return get_error_data_result(
|
| 252 |
+
message="Remove document error.(Database error)"
|
| 253 |
+
)
|
| 254 |
+
f2d = File2DocumentService.get_by_document_id(doc.id)
|
| 255 |
+
FileService.filter_delete(
|
| 256 |
+
[
|
| 257 |
+
File.source_type == FileSource.KNOWLEDGEBASE,
|
| 258 |
+
File.id == f2d[0].file_id,
|
| 259 |
+
]
|
| 260 |
+
)
|
| 261 |
+
File2DocumentService.delete_by_document_id(doc.id)
|
| 262 |
+
if not KnowledgebaseService.delete_by_id(id):
|
| 263 |
+
return get_error_data_result(message="Delete dataset error.(Database error)")
|
| 264 |
+
return get_result(code=RetCode.SUCCESS)
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
@manager.route("/datasets/<dataset_id>", methods=["PUT"])
|
| 268 |
+
@token_required
|
| 269 |
+
def update(tenant_id, dataset_id):
|
| 270 |
+
"""
|
| 271 |
+
Update a dataset.
|
| 272 |
+
---
|
| 273 |
+
tags:
|
| 274 |
+
- Datasets
|
| 275 |
+
security:
|
| 276 |
+
- ApiKeyAuth: []
|
| 277 |
+
parameters:
|
| 278 |
+
- in: path
|
| 279 |
+
name: dataset_id
|
| 280 |
+
type: string
|
| 281 |
+
required: true
|
| 282 |
+
description: ID of the dataset to update.
|
| 283 |
+
- in: header
|
| 284 |
+
name: Authorization
|
| 285 |
+
type: string
|
| 286 |
+
required: true
|
| 287 |
+
description: Bearer token for authentication.
|
| 288 |
+
- in: body
|
| 289 |
+
name: body
|
| 290 |
+
description: Dataset update parameters.
|
| 291 |
+
required: true
|
| 292 |
+
schema:
|
| 293 |
+
type: object
|
| 294 |
+
properties:
|
| 295 |
+
name:
|
| 296 |
+
type: string
|
| 297 |
+
description: New name of the dataset.
|
| 298 |
+
permission:
|
| 299 |
+
type: string
|
| 300 |
+
enum: ['me', 'team']
|
| 301 |
+
description: Updated permission.
|
| 302 |
+
language:
|
| 303 |
+
type: string
|
| 304 |
+
enum: ['Chinese', 'English']
|
| 305 |
+
description: Updated language.
|
| 306 |
+
chunk_method:
|
| 307 |
+
type: string
|
| 308 |
+
enum: ["naive", "manual", "qa", "table", "paper", "book", "laws",
|
| 309 |
+
"presentation", "picture", "one", "knowledge_graph", "email"]
|
| 310 |
+
description: Updated chunking method.
|
| 311 |
+
parser_config:
|
| 312 |
+
type: object
|
| 313 |
+
description: Updated parser configuration.
|
| 314 |
+
responses:
|
| 315 |
+
200:
|
| 316 |
+
description: Successful operation.
|
| 317 |
+
schema:
|
| 318 |
+
type: object
|
| 319 |
+
"""
|
| 320 |
+
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 321 |
+
return get_error_data_result(message="You don't own the dataset")
|
| 322 |
+
req = request.json
|
| 323 |
+
e, t = TenantService.get_by_id(tenant_id)
|
| 324 |
+
invalid_keys = {"id", "embd_id", "chunk_num", "doc_num", "parser_id"}
|
| 325 |
+
if any(key in req for key in invalid_keys):
|
| 326 |
+
return get_error_data_result(message="The input parameters are invalid.")
|
| 327 |
+
permission = req.get("permission")
|
| 328 |
+
language = req.get("language")
|
| 329 |
+
chunk_method = req.get("chunk_method")
|
| 330 |
+
parser_config = req.get("parser_config")
|
| 331 |
+
valid_permission = ["me", "team"]
|
| 332 |
+
valid_language = ["Chinese", "English"]
|
| 333 |
+
valid_chunk_method = [
|
| 334 |
+
"naive",
|
| 335 |
+
"manual",
|
| 336 |
+
"qa",
|
| 337 |
+
"table",
|
| 338 |
+
"paper",
|
| 339 |
+
"book",
|
| 340 |
+
"laws",
|
| 341 |
+
"presentation",
|
| 342 |
+
"picture",
|
| 343 |
+
"one",
|
| 344 |
+
"knowledge_graph",
|
| 345 |
+
"email",
|
| 346 |
+
]
|
| 347 |
+
check_validation = valid(
|
| 348 |
+
permission,
|
| 349 |
+
valid_permission,
|
| 350 |
+
language,
|
| 351 |
+
valid_language,
|
| 352 |
+
chunk_method,
|
| 353 |
+
valid_chunk_method,
|
| 354 |
+
)
|
| 355 |
+
if check_validation:
|
| 356 |
+
return check_validation
|
| 357 |
+
if "tenant_id" in req:
|
| 358 |
+
if req["tenant_id"] != tenant_id:
|
| 359 |
+
return get_error_data_result(message="Can't change `tenant_id`.")
|
| 360 |
+
e, kb = KnowledgebaseService.get_by_id(dataset_id)
|
| 361 |
+
if "parser_config" in req:
|
| 362 |
+
temp_dict = kb.parser_config
|
| 363 |
+
temp_dict.update(req["parser_config"])
|
| 364 |
+
req["parser_config"] = temp_dict
|
| 365 |
+
if "chunk_count" in req:
|
| 366 |
+
if req["chunk_count"] != kb.chunk_num:
|
| 367 |
+
return get_error_data_result(message="Can't change `chunk_count`.")
|
| 368 |
+
req.pop("chunk_count")
|
| 369 |
+
if "document_count" in req:
|
| 370 |
+
if req["document_count"] != kb.doc_num:
|
| 371 |
+
return get_error_data_result(message="Can't change `document_count`.")
|
| 372 |
+
req.pop("document_count")
|
| 373 |
+
if "chunk_method" in req:
|
| 374 |
+
if kb.chunk_num != 0 and req["chunk_method"] != kb.parser_id:
|
| 375 |
+
return get_error_data_result(
|
| 376 |
+
message="If `chunk_count` is not 0, `chunk_method` is not changeable."
|
| 377 |
+
)
|
| 378 |
+
req["parser_id"] = req.pop("chunk_method")
|
| 379 |
+
if req["parser_id"] != kb.parser_id:
|
| 380 |
+
if not req.get("parser_config"):
|
| 381 |
+
req["parser_config"] = get_parser_config(chunk_method, parser_config)
|
| 382 |
+
if "embedding_model" in req:
|
| 383 |
+
if kb.chunk_num != 0 and req["embedding_model"] != kb.embd_id:
|
| 384 |
+
return get_error_data_result(
|
| 385 |
+
message="If `chunk_count` is not 0, `embedding_model` is not changeable."
|
| 386 |
+
)
|
| 387 |
+
if not req.get("embedding_model"):
|
| 388 |
+
return get_error_data_result("`embedding_model` can't be empty")
|
| 389 |
+
valid_embedding_models = [
|
| 390 |
+
"BAAI/bge-large-zh-v1.5",
|
| 391 |
+
"BAAI/bge-base-en-v1.5",
|
| 392 |
+
"BAAI/bge-large-en-v1.5",
|
| 393 |
+
"BAAI/bge-small-en-v1.5",
|
| 394 |
+
"BAAI/bge-small-zh-v1.5",
|
| 395 |
+
"jinaai/jina-embeddings-v2-base-en",
|
| 396 |
+
"jinaai/jina-embeddings-v2-small-en",
|
| 397 |
+
"nomic-ai/nomic-embed-text-v1.5",
|
| 398 |
+
"sentence-transformers/all-MiniLM-L6-v2",
|
| 399 |
+
"text-embedding-v2",
|
| 400 |
+
"text-embedding-v3",
|
| 401 |
+
"maidalun1020/bce-embedding-base_v1",
|
| 402 |
+
]
|
| 403 |
+
embd_model = LLMService.query(
|
| 404 |
+
llm_name=req["embedding_model"], model_type="embedding"
|
| 405 |
+
)
|
| 406 |
+
if not embd_model:
|
| 407 |
+
return get_error_data_result(
|
| 408 |
+
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
| 409 |
+
)
|
| 410 |
+
if embd_model:
|
| 411 |
+
if req[
|
| 412 |
+
"embedding_model"
|
| 413 |
+
] not in valid_embedding_models and not TenantLLMService.query(
|
| 414 |
+
tenant_id=tenant_id,
|
| 415 |
+
model_type="embedding",
|
| 416 |
+
llm_name=req.get("embedding_model"),
|
| 417 |
+
):
|
| 418 |
+
return get_error_data_result(
|
| 419 |
+
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
| 420 |
+
)
|
| 421 |
+
req["embd_id"] = req.pop("embedding_model")
|
| 422 |
+
if "name" in req:
|
| 423 |
+
req["name"] = req["name"].strip()
|
| 424 |
+
if (
|
| 425 |
+
req["name"].lower() != kb.name.lower()
|
| 426 |
+
and len(
|
| 427 |
+
KnowledgebaseService.query(
|
| 428 |
+
name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value
|
| 429 |
+
)
|
| 430 |
+
)
|
| 431 |
+
> 0
|
| 432 |
+
):
|
| 433 |
+
return get_error_data_result(
|
| 434 |
+
message="Duplicated dataset name in updating dataset."
|
| 435 |
+
)
|
| 436 |
+
if not KnowledgebaseService.update_by_id(kb.id, req):
|
| 437 |
+
return get_error_data_result(message="Update dataset error.(Database error)")
|
| 438 |
+
return get_result(code=RetCode.SUCCESS)
|
| 439 |
+
|
| 440 |
+
|
| 441 |
+
@manager.route("/datasets", methods=["GET"])
|
| 442 |
+
@token_required
|
| 443 |
+
def list(tenant_id):
|
| 444 |
+
"""
|
| 445 |
+
List datasets.
|
| 446 |
+
---
|
| 447 |
+
tags:
|
| 448 |
+
- Datasets
|
| 449 |
+
security:
|
| 450 |
+
- ApiKeyAuth: []
|
| 451 |
+
parameters:
|
| 452 |
+
- in: query
|
| 453 |
+
name: id
|
| 454 |
+
type: string
|
| 455 |
+
required: false
|
| 456 |
+
description: Dataset ID to filter.
|
| 457 |
+
- in: query
|
| 458 |
+
name: name
|
| 459 |
+
type: string
|
| 460 |
+
required: false
|
| 461 |
+
description: Dataset name to filter.
|
| 462 |
+
- in: query
|
| 463 |
+
name: page
|
| 464 |
+
type: integer
|
| 465 |
+
required: false
|
| 466 |
+
default: 1
|
| 467 |
+
description: Page number.
|
| 468 |
+
- in: query
|
| 469 |
+
name: page_size
|
| 470 |
+
type: integer
|
| 471 |
+
required: false
|
| 472 |
+
default: 1024
|
| 473 |
+
description: Number of items per page.
|
| 474 |
+
- in: query
|
| 475 |
+
name: orderby
|
| 476 |
+
type: string
|
| 477 |
+
required: false
|
| 478 |
+
default: "create_time"
|
| 479 |
+
description: Field to order by.
|
| 480 |
+
- in: query
|
| 481 |
+
name: desc
|
| 482 |
+
type: boolean
|
| 483 |
+
required: false
|
| 484 |
+
default: true
|
| 485 |
+
description: Order in descending.
|
| 486 |
+
- in: header
|
| 487 |
+
name: Authorization
|
| 488 |
+
type: string
|
| 489 |
+
required: true
|
| 490 |
+
description: Bearer token for authentication.
|
| 491 |
+
responses:
|
| 492 |
+
200:
|
| 493 |
+
description: Successful operation.
|
| 494 |
+
schema:
|
| 495 |
+
type: array
|
| 496 |
+
items:
|
| 497 |
+
type: object
|
| 498 |
+
"""
|
| 499 |
+
id = request.args.get("id")
|
| 500 |
+
name = request.args.get("name")
|
| 501 |
+
kbs = KnowledgebaseService.query(id=id, name=name, status=1)
|
| 502 |
+
if not kbs:
|
| 503 |
+
return get_error_data_result(message="The dataset doesn't exist")
|
| 504 |
+
page_number = int(request.args.get("page", 1))
|
| 505 |
+
items_per_page = int(request.args.get("page_size", 1024))
|
| 506 |
+
orderby = request.args.get("orderby", "create_time")
|
| 507 |
+
if request.args.get("desc") == "False" or request.args.get("desc") == "false":
|
| 508 |
+
desc = False
|
| 509 |
+
else:
|
| 510 |
+
desc = True
|
| 511 |
+
tenants = TenantService.get_joined_tenants_by_user_id(tenant_id)
|
| 512 |
+
kbs = KnowledgebaseService.get_list(
|
| 513 |
+
[m["tenant_id"] for m in tenants],
|
| 514 |
+
tenant_id,
|
| 515 |
+
page_number,
|
| 516 |
+
items_per_page,
|
| 517 |
+
orderby,
|
| 518 |
+
desc,
|
| 519 |
+
id,
|
| 520 |
+
name,
|
| 521 |
+
)
|
| 522 |
+
renamed_list = []
|
| 523 |
+
for kb in kbs:
|
| 524 |
+
key_mapping = {
|
| 525 |
+
"chunk_num": "chunk_count",
|
| 526 |
+
"doc_num": "document_count",
|
| 527 |
+
"parser_id": "chunk_method",
|
| 528 |
+
"embd_id": "embedding_model",
|
| 529 |
+
}
|
| 530 |
+
renamed_data = {}
|
| 531 |
+
for key, value in kb.items():
|
| 532 |
+
new_key = key_mapping.get(key, key)
|
| 533 |
+
renamed_data[new_key] = value
|
| 534 |
+
renamed_list.append(renamed_data)
|
| 535 |
+
return get_result(data=renamed_list)
|
api/apps/sdk/dify_retrieval.py
CHANGED
|
@@ -37,10 +37,10 @@ def retrieval(tenant_id):
|
|
| 37 |
|
| 38 |
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
| 39 |
if not e:
|
| 40 |
-
return build_error_result(
|
| 41 |
|
| 42 |
if kb.tenant_id != tenant_id:
|
| 43 |
-
return build_error_result(
|
| 44 |
|
| 45 |
embd_mdl = LLMBundle(kb.tenant_id, LLMType.EMBEDDING.value, llm_name=kb.embd_id)
|
| 46 |
|
|
@@ -71,7 +71,7 @@ def retrieval(tenant_id):
|
|
| 71 |
except Exception as e:
|
| 72 |
if str(e).find("not_found") > 0:
|
| 73 |
return build_error_result(
|
| 74 |
-
|
| 75 |
-
|
| 76 |
)
|
| 77 |
-
return build_error_result(
|
|
|
|
| 37 |
|
| 38 |
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
| 39 |
if not e:
|
| 40 |
+
return build_error_result(message="Knowledgebase not found!", code=RetCode.NOT_FOUND)
|
| 41 |
|
| 42 |
if kb.tenant_id != tenant_id:
|
| 43 |
+
return build_error_result(message="Knowledgebase not found!", code=RetCode.NOT_FOUND)
|
| 44 |
|
| 45 |
embd_mdl = LLMBundle(kb.tenant_id, LLMType.EMBEDDING.value, llm_name=kb.embd_id)
|
| 46 |
|
|
|
|
| 71 |
except Exception as e:
|
| 72 |
if str(e).find("not_found") > 0:
|
| 73 |
return build_error_result(
|
| 74 |
+
message='No chunk found! Check the chunk status please!',
|
| 75 |
+
code=RetCode.NOT_FOUND
|
| 76 |
)
|
| 77 |
+
return build_error_result(message=str(e), code=RetCode.SERVER_ERROR)
|
api/apps/sdk/doc.py
CHANGED
|
@@ -110,13 +110,13 @@ def upload(dataset_id, tenant_id):
|
|
| 110 |
"""
|
| 111 |
if "file" not in request.files:
|
| 112 |
return get_error_data_result(
|
| 113 |
-
|
| 114 |
)
|
| 115 |
file_objs = request.files.getlist("file")
|
| 116 |
for file_obj in file_objs:
|
| 117 |
if file_obj.filename == "":
|
| 118 |
return get_result(
|
| 119 |
-
|
| 120 |
)
|
| 121 |
# total size
|
| 122 |
total_size = 0
|
|
@@ -127,15 +127,15 @@ def upload(dataset_id, tenant_id):
|
|
| 127 |
MAX_TOTAL_FILE_SIZE = 10 * 1024 * 1024
|
| 128 |
if total_size > MAX_TOTAL_FILE_SIZE:
|
| 129 |
return get_result(
|
| 130 |
-
|
| 131 |
-
|
| 132 |
)
|
| 133 |
e, kb = KnowledgebaseService.get_by_id(dataset_id)
|
| 134 |
if not e:
|
| 135 |
raise LookupError(f"Can't find the dataset with ID {dataset_id}!")
|
| 136 |
err, files = FileService.upload_document(kb, file_objs, tenant_id)
|
| 137 |
if err:
|
| 138 |
-
return get_result(
|
| 139 |
# rename key's name
|
| 140 |
renamed_doc_list = []
|
| 141 |
for file in files:
|
|
@@ -205,20 +205,20 @@ def update_doc(tenant_id, dataset_id, document_id):
|
|
| 205 |
"""
|
| 206 |
req = request.json
|
| 207 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 208 |
-
return get_error_data_result(
|
| 209 |
doc = DocumentService.query(kb_id=dataset_id, id=document_id)
|
| 210 |
if not doc:
|
| 211 |
-
return get_error_data_result(
|
| 212 |
doc = doc[0]
|
| 213 |
if "chunk_count" in req:
|
| 214 |
if req["chunk_count"] != doc.chunk_num:
|
| 215 |
-
return get_error_data_result(
|
| 216 |
if "token_count" in req:
|
| 217 |
if req["token_count"] != doc.token_num:
|
| 218 |
-
return get_error_data_result(
|
| 219 |
if "progress" in req:
|
| 220 |
if req["progress"] != doc.progress:
|
| 221 |
-
return get_error_data_result(
|
| 222 |
|
| 223 |
if "name" in req and req["name"] != doc.name:
|
| 224 |
if (
|
|
@@ -226,16 +226,16 @@ def update_doc(tenant_id, dataset_id, document_id):
|
|
| 226 |
!= pathlib.Path(doc.name.lower()).suffix
|
| 227 |
):
|
| 228 |
return get_result(
|
| 229 |
-
|
| 230 |
-
|
| 231 |
)
|
| 232 |
for d in DocumentService.query(name=req["name"], kb_id=doc.kb_id):
|
| 233 |
if d.name == req["name"]:
|
| 234 |
return get_error_data_result(
|
| 235 |
-
|
| 236 |
)
|
| 237 |
if not DocumentService.update_by_id(document_id, {"name": req["name"]}):
|
| 238 |
-
return get_error_data_result(
|
| 239 |
|
| 240 |
informs = File2DocumentService.get_by_document_id(document_id)
|
| 241 |
if informs:
|
|
@@ -266,7 +266,7 @@ def update_doc(tenant_id, dataset_id, document_id):
|
|
| 266 |
return get_result()
|
| 267 |
|
| 268 |
if doc.type == FileType.VISUAL or re.search(r"\.(ppt|pptx|pages)$", doc.name):
|
| 269 |
-
return get_error_data_result(
|
| 270 |
|
| 271 |
e = DocumentService.update_by_id(
|
| 272 |
doc.id,
|
|
@@ -278,7 +278,7 @@ def update_doc(tenant_id, dataset_id, document_id):
|
|
| 278 |
},
|
| 279 |
)
|
| 280 |
if not e:
|
| 281 |
-
return get_error_data_result(
|
| 282 |
req["parser_config"] = get_parser_config(
|
| 283 |
req["chunk_method"], req.get("parser_config")
|
| 284 |
)
|
|
@@ -292,7 +292,7 @@ def update_doc(tenant_id, dataset_id, document_id):
|
|
| 292 |
doc.process_duation * -1,
|
| 293 |
)
|
| 294 |
if not e:
|
| 295 |
-
return get_error_data_result(
|
| 296 |
ELASTICSEARCH.deleteByQuery(
|
| 297 |
Q("match", doc_id=doc.id), idxnm=search.index_name(tenant_id)
|
| 298 |
)
|
|
@@ -339,11 +339,11 @@ def download(tenant_id, dataset_id, document_id):
|
|
| 339 |
type: object
|
| 340 |
"""
|
| 341 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 342 |
-
return get_error_data_result(
|
| 343 |
doc = DocumentService.query(kb_id=dataset_id, id=document_id)
|
| 344 |
if not doc:
|
| 345 |
return get_error_data_result(
|
| 346 |
-
|
| 347 |
)
|
| 348 |
# The process of downloading
|
| 349 |
doc_id, doc_location = File2DocumentService.get_storage_address(
|
|
@@ -451,13 +451,13 @@ def list_docs(dataset_id, tenant_id):
|
|
| 451 |
description: Processing status.
|
| 452 |
"""
|
| 453 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 454 |
-
return get_error_data_result(
|
| 455 |
id = request.args.get("id")
|
| 456 |
name = request.args.get("name")
|
| 457 |
if not DocumentService.query(id=id, kb_id=dataset_id):
|
| 458 |
-
return get_error_data_result(
|
| 459 |
if not DocumentService.query(name=name, kb_id=dataset_id):
|
| 460 |
-
return get_error_data_result(
|
| 461 |
page = int(request.args.get("page", 1))
|
| 462 |
keywords = request.args.get("keywords", "")
|
| 463 |
page_size = int(request.args.get("page_size", 1024))
|
|
@@ -538,7 +538,7 @@ def delete(tenant_id, dataset_id):
|
|
| 538 |
type: object
|
| 539 |
"""
|
| 540 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 541 |
-
return get_error_data_result(
|
| 542 |
req = request.json
|
| 543 |
if not req:
|
| 544 |
doc_ids = None
|
|
@@ -559,16 +559,16 @@ def delete(tenant_id, dataset_id):
|
|
| 559 |
try:
|
| 560 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 561 |
if not e:
|
| 562 |
-
return get_error_data_result(
|
| 563 |
tenant_id = DocumentService.get_tenant_id(doc_id)
|
| 564 |
if not tenant_id:
|
| 565 |
-
return get_error_data_result(
|
| 566 |
|
| 567 |
b, n = File2DocumentService.get_storage_address(doc_id=doc_id)
|
| 568 |
|
| 569 |
if not DocumentService.remove_document(doc, tenant_id):
|
| 570 |
return get_error_data_result(
|
| 571 |
-
|
| 572 |
)
|
| 573 |
|
| 574 |
f2d = File2DocumentService.get_by_document_id(doc_id)
|
|
@@ -585,7 +585,7 @@ def delete(tenant_id, dataset_id):
|
|
| 585 |
errors += str(e)
|
| 586 |
|
| 587 |
if errors:
|
| 588 |
-
return get_result(
|
| 589 |
|
| 590 |
return get_result()
|
| 591 |
|
|
@@ -630,14 +630,14 @@ def parse(tenant_id, dataset_id):
|
|
| 630 |
type: object
|
| 631 |
"""
|
| 632 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 633 |
-
return get_error_data_result(
|
| 634 |
req = request.json
|
| 635 |
if not req.get("document_ids"):
|
| 636 |
return get_error_data_result("`document_ids` is required")
|
| 637 |
for id in req["document_ids"]:
|
| 638 |
doc = DocumentService.query(id=id, kb_id=dataset_id)
|
| 639 |
if not doc:
|
| 640 |
-
return get_error_data_result(
|
| 641 |
if doc[0].progress != 0.0:
|
| 642 |
return get_error_data_result(
|
| 643 |
"Can't stop parsing document with progress at 0 or 100"
|
|
@@ -699,14 +699,14 @@ def stop_parsing(tenant_id, dataset_id):
|
|
| 699 |
type: object
|
| 700 |
"""
|
| 701 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 702 |
-
return get_error_data_result(
|
| 703 |
req = request.json
|
| 704 |
if not req.get("document_ids"):
|
| 705 |
return get_error_data_result("`document_ids` is required")
|
| 706 |
for id in req["document_ids"]:
|
| 707 |
doc = DocumentService.query(id=id, kb_id=dataset_id)
|
| 708 |
if not doc:
|
| 709 |
-
return get_error_data_result(
|
| 710 |
if int(doc[0].progress) == 1 or int(doc[0].progress) == 0:
|
| 711 |
return get_error_data_result(
|
| 712 |
"Can't stop parsing document with progress at 0 or 1"
|
|
@@ -793,11 +793,11 @@ def list_chunks(tenant_id, dataset_id, document_id):
|
|
| 793 |
description: Document details.
|
| 794 |
"""
|
| 795 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 796 |
-
return get_error_data_result(
|
| 797 |
doc = DocumentService.query(id=document_id, kb_id=dataset_id)
|
| 798 |
if not doc:
|
| 799 |
return get_error_data_result(
|
| 800 |
-
|
| 801 |
)
|
| 802 |
doc = doc[0]
|
| 803 |
req = request.args
|
|
@@ -965,16 +965,16 @@ def add_chunk(tenant_id, dataset_id, document_id):
|
|
| 965 |
description: Important keywords.
|
| 966 |
"""
|
| 967 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 968 |
-
return get_error_data_result(
|
| 969 |
doc = DocumentService.query(id=document_id, kb_id=dataset_id)
|
| 970 |
if not doc:
|
| 971 |
return get_error_data_result(
|
| 972 |
-
|
| 973 |
)
|
| 974 |
doc = doc[0]
|
| 975 |
req = request.json
|
| 976 |
if not req.get("content"):
|
| 977 |
-
return get_error_data_result(
|
| 978 |
if "important_keywords" in req:
|
| 979 |
if type(req["important_keywords"]) != list:
|
| 980 |
return get_error_data_result(
|
|
@@ -1078,11 +1078,11 @@ def rm_chunk(tenant_id, dataset_id, document_id):
|
|
| 1078 |
type: object
|
| 1079 |
"""
|
| 1080 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 1081 |
-
return get_error_data_result(
|
| 1082 |
doc = DocumentService.query(id=document_id, kb_id=dataset_id)
|
| 1083 |
if not doc:
|
| 1084 |
return get_error_data_result(
|
| 1085 |
-
|
| 1086 |
)
|
| 1087 |
doc = doc[0]
|
| 1088 |
req = request.json
|
|
@@ -1104,7 +1104,7 @@ def rm_chunk(tenant_id, dataset_id, document_id):
|
|
| 1104 |
if not ELASTICSEARCH.deleteByQuery(
|
| 1105 |
Q("ids", values=chunk_list), search.index_name(tenant_id)
|
| 1106 |
):
|
| 1107 |
-
return get_error_data_result(
|
| 1108 |
deleted_chunk_ids = chunk_list
|
| 1109 |
chunk_number = len(deleted_chunk_ids)
|
| 1110 |
DocumentService.decrement_chunk_num(doc.id, doc.kb_id, 1, chunk_number, 0)
|
|
@@ -1170,14 +1170,14 @@ def update_chunk(tenant_id, dataset_id, document_id, chunk_id):
|
|
| 1170 |
"""
|
| 1171 |
try:
|
| 1172 |
res = ELASTICSEARCH.get(chunk_id, search.index_name(tenant_id))
|
| 1173 |
-
except Exception
|
| 1174 |
return get_error_data_result(f"Can't find this chunk {chunk_id}")
|
| 1175 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 1176 |
-
return get_error_data_result(
|
| 1177 |
doc = DocumentService.query(id=document_id, kb_id=dataset_id)
|
| 1178 |
if not doc:
|
| 1179 |
return get_error_data_result(
|
| 1180 |
-
|
| 1181 |
)
|
| 1182 |
doc = doc[0]
|
| 1183 |
query = {
|
|
@@ -1210,7 +1210,7 @@ def update_chunk(tenant_id, dataset_id, document_id, chunk_id):
|
|
| 1210 |
arr = [t for t in re.split(r"[\n\t]", d["content_with_weight"]) if len(t) > 1]
|
| 1211 |
if len(arr) != 2:
|
| 1212 |
return get_error_data_result(
|
| 1213 |
-
|
| 1214 |
)
|
| 1215 |
q, a = rmPrefix(arr[0]), rmPrefix(arr[1])
|
| 1216 |
d = beAdoc(
|
|
@@ -1317,8 +1317,8 @@ def retrieval_test(tenant_id):
|
|
| 1317 |
embd_nms = list(set([kb.embd_id for kb in kbs]))
|
| 1318 |
if len(embd_nms) != 1:
|
| 1319 |
return get_result(
|
| 1320 |
-
|
| 1321 |
-
|
| 1322 |
)
|
| 1323 |
if "question" not in req:
|
| 1324 |
return get_error_data_result("`question` is required.")
|
|
@@ -1344,7 +1344,7 @@ def retrieval_test(tenant_id):
|
|
| 1344 |
try:
|
| 1345 |
e, kb = KnowledgebaseService.get_by_id(kb_ids[0])
|
| 1346 |
if not e:
|
| 1347 |
-
return get_error_data_result(
|
| 1348 |
embd_mdl = TenantLLMService.model_instance(
|
| 1349 |
kb.tenant_id, LLMType.EMBEDDING.value, llm_name=kb.embd_id
|
| 1350 |
)
|
|
@@ -1398,7 +1398,7 @@ def retrieval_test(tenant_id):
|
|
| 1398 |
except Exception as e:
|
| 1399 |
if str(e).find("not_found") > 0:
|
| 1400 |
return get_result(
|
| 1401 |
-
|
| 1402 |
-
|
| 1403 |
)
|
| 1404 |
return server_error_response(e)
|
|
|
|
| 110 |
"""
|
| 111 |
if "file" not in request.files:
|
| 112 |
return get_error_data_result(
|
| 113 |
+
message="No file part!", code=RetCode.ARGUMENT_ERROR
|
| 114 |
)
|
| 115 |
file_objs = request.files.getlist("file")
|
| 116 |
for file_obj in file_objs:
|
| 117 |
if file_obj.filename == "":
|
| 118 |
return get_result(
|
| 119 |
+
message="No file selected!", code=RetCode.ARGUMENT_ERROR
|
| 120 |
)
|
| 121 |
# total size
|
| 122 |
total_size = 0
|
|
|
|
| 127 |
MAX_TOTAL_FILE_SIZE = 10 * 1024 * 1024
|
| 128 |
if total_size > MAX_TOTAL_FILE_SIZE:
|
| 129 |
return get_result(
|
| 130 |
+
message=f"Total file size exceeds 10MB limit! ({total_size / (1024 * 1024):.2f} MB)",
|
| 131 |
+
code=RetCode.ARGUMENT_ERROR,
|
| 132 |
)
|
| 133 |
e, kb = KnowledgebaseService.get_by_id(dataset_id)
|
| 134 |
if not e:
|
| 135 |
raise LookupError(f"Can't find the dataset with ID {dataset_id}!")
|
| 136 |
err, files = FileService.upload_document(kb, file_objs, tenant_id)
|
| 137 |
if err:
|
| 138 |
+
return get_result(message="\n".join(err), code=RetCode.SERVER_ERROR)
|
| 139 |
# rename key's name
|
| 140 |
renamed_doc_list = []
|
| 141 |
for file in files:
|
|
|
|
| 205 |
"""
|
| 206 |
req = request.json
|
| 207 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 208 |
+
return get_error_data_result(message="You don't own the dataset.")
|
| 209 |
doc = DocumentService.query(kb_id=dataset_id, id=document_id)
|
| 210 |
if not doc:
|
| 211 |
+
return get_error_data_result(message="The dataset doesn't own the document.")
|
| 212 |
doc = doc[0]
|
| 213 |
if "chunk_count" in req:
|
| 214 |
if req["chunk_count"] != doc.chunk_num:
|
| 215 |
+
return get_error_data_result(message="Can't change `chunk_count`.")
|
| 216 |
if "token_count" in req:
|
| 217 |
if req["token_count"] != doc.token_num:
|
| 218 |
+
return get_error_data_result(message="Can't change `token_count`.")
|
| 219 |
if "progress" in req:
|
| 220 |
if req["progress"] != doc.progress:
|
| 221 |
+
return get_error_data_result(message="Can't change `progress`.")
|
| 222 |
|
| 223 |
if "name" in req and req["name"] != doc.name:
|
| 224 |
if (
|
|
|
|
| 226 |
!= pathlib.Path(doc.name.lower()).suffix
|
| 227 |
):
|
| 228 |
return get_result(
|
| 229 |
+
message="The extension of file can't be changed",
|
| 230 |
+
code=RetCode.ARGUMENT_ERROR,
|
| 231 |
)
|
| 232 |
for d in DocumentService.query(name=req["name"], kb_id=doc.kb_id):
|
| 233 |
if d.name == req["name"]:
|
| 234 |
return get_error_data_result(
|
| 235 |
+
message="Duplicated document name in the same dataset."
|
| 236 |
)
|
| 237 |
if not DocumentService.update_by_id(document_id, {"name": req["name"]}):
|
| 238 |
+
return get_error_data_result(message="Database error (Document rename)!")
|
| 239 |
|
| 240 |
informs = File2DocumentService.get_by_document_id(document_id)
|
| 241 |
if informs:
|
|
|
|
| 266 |
return get_result()
|
| 267 |
|
| 268 |
if doc.type == FileType.VISUAL or re.search(r"\.(ppt|pptx|pages)$", doc.name):
|
| 269 |
+
return get_error_data_result(message="Not supported yet!")
|
| 270 |
|
| 271 |
e = DocumentService.update_by_id(
|
| 272 |
doc.id,
|
|
|
|
| 278 |
},
|
| 279 |
)
|
| 280 |
if not e:
|
| 281 |
+
return get_error_data_result(message="Document not found!")
|
| 282 |
req["parser_config"] = get_parser_config(
|
| 283 |
req["chunk_method"], req.get("parser_config")
|
| 284 |
)
|
|
|
|
| 292 |
doc.process_duation * -1,
|
| 293 |
)
|
| 294 |
if not e:
|
| 295 |
+
return get_error_data_result(message="Document not found!")
|
| 296 |
ELASTICSEARCH.deleteByQuery(
|
| 297 |
Q("match", doc_id=doc.id), idxnm=search.index_name(tenant_id)
|
| 298 |
)
|
|
|
|
| 339 |
type: object
|
| 340 |
"""
|
| 341 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 342 |
+
return get_error_data_result(message=f"You do not own the dataset {dataset_id}.")
|
| 343 |
doc = DocumentService.query(kb_id=dataset_id, id=document_id)
|
| 344 |
if not doc:
|
| 345 |
return get_error_data_result(
|
| 346 |
+
message=f"The dataset not own the document {document_id}."
|
| 347 |
)
|
| 348 |
# The process of downloading
|
| 349 |
doc_id, doc_location = File2DocumentService.get_storage_address(
|
|
|
|
| 451 |
description: Processing status.
|
| 452 |
"""
|
| 453 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 454 |
+
return get_error_data_result(message=f"You don't own the dataset {dataset_id}. ")
|
| 455 |
id = request.args.get("id")
|
| 456 |
name = request.args.get("name")
|
| 457 |
if not DocumentService.query(id=id, kb_id=dataset_id):
|
| 458 |
+
return get_error_data_result(message=f"You don't own the document {id}.")
|
| 459 |
if not DocumentService.query(name=name, kb_id=dataset_id):
|
| 460 |
+
return get_error_data_result(message=f"You don't own the document {name}.")
|
| 461 |
page = int(request.args.get("page", 1))
|
| 462 |
keywords = request.args.get("keywords", "")
|
| 463 |
page_size = int(request.args.get("page_size", 1024))
|
|
|
|
| 538 |
type: object
|
| 539 |
"""
|
| 540 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 541 |
+
return get_error_data_result(message=f"You don't own the dataset {dataset_id}. ")
|
| 542 |
req = request.json
|
| 543 |
if not req:
|
| 544 |
doc_ids = None
|
|
|
|
| 559 |
try:
|
| 560 |
e, doc = DocumentService.get_by_id(doc_id)
|
| 561 |
if not e:
|
| 562 |
+
return get_error_data_result(message="Document not found!")
|
| 563 |
tenant_id = DocumentService.get_tenant_id(doc_id)
|
| 564 |
if not tenant_id:
|
| 565 |
+
return get_error_data_result(message="Tenant not found!")
|
| 566 |
|
| 567 |
b, n = File2DocumentService.get_storage_address(doc_id=doc_id)
|
| 568 |
|
| 569 |
if not DocumentService.remove_document(doc, tenant_id):
|
| 570 |
return get_error_data_result(
|
| 571 |
+
message="Database error (Document removal)!"
|
| 572 |
)
|
| 573 |
|
| 574 |
f2d = File2DocumentService.get_by_document_id(doc_id)
|
|
|
|
| 585 |
errors += str(e)
|
| 586 |
|
| 587 |
if errors:
|
| 588 |
+
return get_result(message=errors, code=RetCode.SERVER_ERROR)
|
| 589 |
|
| 590 |
return get_result()
|
| 591 |
|
|
|
|
| 630 |
type: object
|
| 631 |
"""
|
| 632 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 633 |
+
return get_error_data_result(message=f"You don't own the dataset {dataset_id}.")
|
| 634 |
req = request.json
|
| 635 |
if not req.get("document_ids"):
|
| 636 |
return get_error_data_result("`document_ids` is required")
|
| 637 |
for id in req["document_ids"]:
|
| 638 |
doc = DocumentService.query(id=id, kb_id=dataset_id)
|
| 639 |
if not doc:
|
| 640 |
+
return get_error_data_result(message=f"You don't own the document {id}.")
|
| 641 |
if doc[0].progress != 0.0:
|
| 642 |
return get_error_data_result(
|
| 643 |
"Can't stop parsing document with progress at 0 or 100"
|
|
|
|
| 699 |
type: object
|
| 700 |
"""
|
| 701 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 702 |
+
return get_error_data_result(message=f"You don't own the dataset {dataset_id}.")
|
| 703 |
req = request.json
|
| 704 |
if not req.get("document_ids"):
|
| 705 |
return get_error_data_result("`document_ids` is required")
|
| 706 |
for id in req["document_ids"]:
|
| 707 |
doc = DocumentService.query(id=id, kb_id=dataset_id)
|
| 708 |
if not doc:
|
| 709 |
+
return get_error_data_result(message=f"You don't own the document {id}.")
|
| 710 |
if int(doc[0].progress) == 1 or int(doc[0].progress) == 0:
|
| 711 |
return get_error_data_result(
|
| 712 |
"Can't stop parsing document with progress at 0 or 1"
|
|
|
|
| 793 |
description: Document details.
|
| 794 |
"""
|
| 795 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 796 |
+
return get_error_data_result(message=f"You don't own the dataset {dataset_id}.")
|
| 797 |
doc = DocumentService.query(id=document_id, kb_id=dataset_id)
|
| 798 |
if not doc:
|
| 799 |
return get_error_data_result(
|
| 800 |
+
message=f"You don't own the document {document_id}."
|
| 801 |
)
|
| 802 |
doc = doc[0]
|
| 803 |
req = request.args
|
|
|
|
| 965 |
description: Important keywords.
|
| 966 |
"""
|
| 967 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 968 |
+
return get_error_data_result(message=f"You don't own the dataset {dataset_id}.")
|
| 969 |
doc = DocumentService.query(id=document_id, kb_id=dataset_id)
|
| 970 |
if not doc:
|
| 971 |
return get_error_data_result(
|
| 972 |
+
message=f"You don't own the document {document_id}."
|
| 973 |
)
|
| 974 |
doc = doc[0]
|
| 975 |
req = request.json
|
| 976 |
if not req.get("content"):
|
| 977 |
+
return get_error_data_result(message="`content` is required")
|
| 978 |
if "important_keywords" in req:
|
| 979 |
if type(req["important_keywords"]) != list:
|
| 980 |
return get_error_data_result(
|
|
|
|
| 1078 |
type: object
|
| 1079 |
"""
|
| 1080 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 1081 |
+
return get_error_data_result(message=f"You don't own the dataset {dataset_id}.")
|
| 1082 |
doc = DocumentService.query(id=document_id, kb_id=dataset_id)
|
| 1083 |
if not doc:
|
| 1084 |
return get_error_data_result(
|
| 1085 |
+
message=f"You don't own the document {document_id}."
|
| 1086 |
)
|
| 1087 |
doc = doc[0]
|
| 1088 |
req = request.json
|
|
|
|
| 1104 |
if not ELASTICSEARCH.deleteByQuery(
|
| 1105 |
Q("ids", values=chunk_list), search.index_name(tenant_id)
|
| 1106 |
):
|
| 1107 |
+
return get_error_data_result(message="Index updating failure")
|
| 1108 |
deleted_chunk_ids = chunk_list
|
| 1109 |
chunk_number = len(deleted_chunk_ids)
|
| 1110 |
DocumentService.decrement_chunk_num(doc.id, doc.kb_id, 1, chunk_number, 0)
|
|
|
|
| 1170 |
"""
|
| 1171 |
try:
|
| 1172 |
res = ELASTICSEARCH.get(chunk_id, search.index_name(tenant_id))
|
| 1173 |
+
except Exception:
|
| 1174 |
return get_error_data_result(f"Can't find this chunk {chunk_id}")
|
| 1175 |
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
| 1176 |
+
return get_error_data_result(message=f"You don't own the dataset {dataset_id}.")
|
| 1177 |
doc = DocumentService.query(id=document_id, kb_id=dataset_id)
|
| 1178 |
if not doc:
|
| 1179 |
return get_error_data_result(
|
| 1180 |
+
message=f"You don't own the document {document_id}."
|
| 1181 |
)
|
| 1182 |
doc = doc[0]
|
| 1183 |
query = {
|
|
|
|
| 1210 |
arr = [t for t in re.split(r"[\n\t]", d["content_with_weight"]) if len(t) > 1]
|
| 1211 |
if len(arr) != 2:
|
| 1212 |
return get_error_data_result(
|
| 1213 |
+
message="Q&A must be separated by TAB/ENTER key."
|
| 1214 |
)
|
| 1215 |
q, a = rmPrefix(arr[0]), rmPrefix(arr[1])
|
| 1216 |
d = beAdoc(
|
|
|
|
| 1317 |
embd_nms = list(set([kb.embd_id for kb in kbs]))
|
| 1318 |
if len(embd_nms) != 1:
|
| 1319 |
return get_result(
|
| 1320 |
+
message='Datasets use different embedding models."',
|
| 1321 |
+
code=RetCode.AUTHENTICATION_ERROR,
|
| 1322 |
)
|
| 1323 |
if "question" not in req:
|
| 1324 |
return get_error_data_result("`question` is required.")
|
|
|
|
| 1344 |
try:
|
| 1345 |
e, kb = KnowledgebaseService.get_by_id(kb_ids[0])
|
| 1346 |
if not e:
|
| 1347 |
+
return get_error_data_result(message="Dataset not found!")
|
| 1348 |
embd_mdl = TenantLLMService.model_instance(
|
| 1349 |
kb.tenant_id, LLMType.EMBEDDING.value, llm_name=kb.embd_id
|
| 1350 |
)
|
|
|
|
| 1398 |
except Exception as e:
|
| 1399 |
if str(e).find("not_found") > 0:
|
| 1400 |
return get_result(
|
| 1401 |
+
message="No chunk found! Check the chunk status please!",
|
| 1402 |
+
code=RetCode.DATA_ERROR,
|
| 1403 |
)
|
| 1404 |
return server_error_response(e)
|
api/apps/sdk/session.py
CHANGED
|
@@ -1,394 +1,394 @@
|
|
| 1 |
-
#
|
| 2 |
-
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
| 3 |
-
#
|
| 4 |
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
-
# you may not use this file except in compliance with the License.
|
| 6 |
-
# You may obtain a copy of the License at
|
| 7 |
-
#
|
| 8 |
-
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
-
#
|
| 10 |
-
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
-
# See the License for the specific language governing permissions and
|
| 14 |
-
# limitations under the License.
|
| 15 |
-
#
|
| 16 |
-
import json
|
| 17 |
-
from functools import partial
|
| 18 |
-
from uuid import uuid4
|
| 19 |
-
|
| 20 |
-
from flask import request, Response
|
| 21 |
-
|
| 22 |
-
from agent.canvas import Canvas
|
| 23 |
-
from api.db import StatusEnum
|
| 24 |
-
from api.db.db_models import API4Conversation
|
| 25 |
-
from api.db.services.api_service import API4ConversationService
|
| 26 |
-
from api.db.services.canvas_service import UserCanvasService
|
| 27 |
-
from api.db.services.dialog_service import DialogService, ConversationService, chat
|
| 28 |
-
from api.utils import get_uuid
|
| 29 |
-
from api.utils.api_utils import get_error_data_result
|
| 30 |
-
from api.utils.api_utils import get_result, token_required
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
@manager.route('/chats/<chat_id>/sessions', methods=['POST'])
|
| 34 |
-
@token_required
|
| 35 |
-
def create(tenant_id,chat_id):
|
| 36 |
-
req = request.json
|
| 37 |
-
req["dialog_id"] = chat_id
|
| 38 |
-
dia = DialogService.query(tenant_id=tenant_id, id=req["dialog_id"], status=StatusEnum.VALID.value)
|
| 39 |
-
if not dia:
|
| 40 |
-
return get_error_data_result(
|
| 41 |
-
conv = {
|
| 42 |
-
"id": get_uuid(),
|
| 43 |
-
"dialog_id": req["dialog_id"],
|
| 44 |
-
"name": req.get("name", "New session"),
|
| 45 |
-
"message": [{"role": "assistant", "content": "Hi! I am your assistant,can I help you?"}]
|
| 46 |
-
}
|
| 47 |
-
if not conv.get("name"):
|
| 48 |
-
return get_error_data_result(
|
| 49 |
-
ConversationService.save(**conv)
|
| 50 |
-
e, conv = ConversationService.get_by_id(conv["id"])
|
| 51 |
-
if not e:
|
| 52 |
-
return get_error_data_result(
|
| 53 |
-
conv = conv.to_dict()
|
| 54 |
-
conv['messages'] = conv.pop("message")
|
| 55 |
-
conv["chat_id"] = conv.pop("dialog_id")
|
| 56 |
-
del conv["reference"]
|
| 57 |
-
return get_result(data=conv)
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
@manager.route('/agents/<agent_id>/sessions', methods=['POST'])
|
| 61 |
-
@token_required
|
| 62 |
-
def create_agent_session(tenant_id, agent_id):
|
| 63 |
-
req = request.json
|
| 64 |
-
e, cvs = UserCanvasService.get_by_id(agent_id)
|
| 65 |
-
if not e:
|
| 66 |
-
return get_error_data_result("Agent not found.")
|
| 67 |
-
if cvs.user_id != tenant_id:
|
| 68 |
-
return get_error_data_result(
|
| 69 |
-
|
| 70 |
-
if not isinstance(cvs.dsl, str):
|
| 71 |
-
cvs.dsl = json.dumps(cvs.dsl, ensure_ascii=False)
|
| 72 |
-
|
| 73 |
-
canvas = Canvas(cvs.dsl, tenant_id)
|
| 74 |
-
conv = {
|
| 75 |
-
"id": get_uuid(),
|
| 76 |
-
"dialog_id": cvs.id,
|
| 77 |
-
"user_id": req.get("user_id", ""),
|
| 78 |
-
"message": [{"role": "assistant", "content": canvas.get_prologue()}],
|
| 79 |
-
"source": "agent"
|
| 80 |
-
}
|
| 81 |
-
API4ConversationService.save(**conv)
|
| 82 |
-
return get_result(data=conv)
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
@manager.route('/chats/<chat_id>/sessions/<session_id>', methods=['PUT'])
|
| 86 |
-
@token_required
|
| 87 |
-
def update(tenant_id,chat_id,session_id):
|
| 88 |
-
req = request.json
|
| 89 |
-
req["dialog_id"] = chat_id
|
| 90 |
-
conv_id = session_id
|
| 91 |
-
conv = ConversationService.query(id=conv_id,dialog_id=chat_id)
|
| 92 |
-
if not conv:
|
| 93 |
-
return get_error_data_result(
|
| 94 |
-
if not DialogService.query(id=chat_id, tenant_id=tenant_id, status=StatusEnum.VALID.value):
|
| 95 |
-
return get_error_data_result(
|
| 96 |
-
if "message" in req or "messages" in req:
|
| 97 |
-
return get_error_data_result(
|
| 98 |
-
if "reference" in req:
|
| 99 |
-
return get_error_data_result(
|
| 100 |
-
if "name" in req and not req.get("name"):
|
| 101 |
-
return get_error_data_result(
|
| 102 |
-
if not ConversationService.update_by_id(conv_id, req):
|
| 103 |
-
return get_error_data_result(
|
| 104 |
-
return get_result()
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
@manager.route('/chats/<chat_id>/completions', methods=['POST'])
|
| 108 |
-
@token_required
|
| 109 |
-
def completion(tenant_id, chat_id):
|
| 110 |
-
req = request.json
|
| 111 |
-
if not req.get("session_id"):
|
| 112 |
-
conv = {
|
| 113 |
-
"id": get_uuid(),
|
| 114 |
-
"dialog_id": chat_id,
|
| 115 |
-
"name": req.get("name", "New session"),
|
| 116 |
-
"message": [{"role": "assistant", "content": "Hi! I am your assistant,can I help you?"}]
|
| 117 |
-
}
|
| 118 |
-
if not conv.get("name"):
|
| 119 |
-
return get_error_data_result(
|
| 120 |
-
ConversationService.save(**conv)
|
| 121 |
-
e, conv = ConversationService.get_by_id(conv["id"])
|
| 122 |
-
session_id=conv.id
|
| 123 |
-
else:
|
| 124 |
-
session_id = req.get("session_id")
|
| 125 |
-
if not req.get("question"):
|
| 126 |
-
return get_error_data_result(
|
| 127 |
-
conv = ConversationService.query(id=session_id,dialog_id=chat_id)
|
| 128 |
-
if not conv:
|
| 129 |
-
return get_error_data_result(
|
| 130 |
-
conv = conv[0]
|
| 131 |
-
if not DialogService.query(id=chat_id, tenant_id=tenant_id, status=StatusEnum.VALID.value):
|
| 132 |
-
return get_error_data_result(
|
| 133 |
-
msg = []
|
| 134 |
-
question = {
|
| 135 |
-
"content": req.get("question"),
|
| 136 |
-
"role": "user",
|
| 137 |
-
"id": str(uuid4())
|
| 138 |
-
}
|
| 139 |
-
conv.message.append(question)
|
| 140 |
-
for m in conv.message:
|
| 141 |
-
if m["role"] == "system": continue
|
| 142 |
-
if m["role"] == "assistant" and not msg: continue
|
| 143 |
-
msg.append(m)
|
| 144 |
-
message_id = msg[-1].get("id")
|
| 145 |
-
e, dia = DialogService.get_by_id(conv.dialog_id)
|
| 146 |
-
|
| 147 |
-
if not conv.reference:
|
| 148 |
-
conv.reference = []
|
| 149 |
-
conv.message.append({"role": "assistant", "content": "", "id": message_id})
|
| 150 |
-
conv.reference.append({"chunks": [], "doc_aggs": []})
|
| 151 |
-
|
| 152 |
-
def fillin_conv(ans):
|
| 153 |
-
nonlocal conv, message_id
|
| 154 |
-
if not conv.reference:
|
| 155 |
-
conv.reference.append(ans["reference"])
|
| 156 |
-
else:
|
| 157 |
-
conv.reference[-1] = ans["reference"]
|
| 158 |
-
conv.message[-1] = {"role": "assistant", "content": ans["answer"],
|
| 159 |
-
"id": message_id, "prompt": ans.get("prompt", "")}
|
| 160 |
-
ans["id"] = message_id
|
| 161 |
-
ans["session_id"]=session_id
|
| 162 |
-
|
| 163 |
-
def stream():
|
| 164 |
-
nonlocal dia, msg, req, conv
|
| 165 |
-
try:
|
| 166 |
-
for ans in chat(dia, msg, **req):
|
| 167 |
-
fillin_conv(ans)
|
| 168 |
-
yield "data:" + json.dumps({"code": 0, "data": ans}, ensure_ascii=False) + "\n\n"
|
| 169 |
-
ConversationService.update_by_id(conv.id, conv.to_dict())
|
| 170 |
-
except Exception as e:
|
| 171 |
-
yield "data:" + json.dumps({"code": 500, "message": str(e),
|
| 172 |
-
"data": {"answer": "**ERROR**: " + str(e),"reference": []}},
|
| 173 |
-
ensure_ascii=False) + "\n\n"
|
| 174 |
-
yield "data:" + json.dumps({"code": 0, "data": True}, ensure_ascii=False) + "\n\n"
|
| 175 |
-
|
| 176 |
-
if req.get("stream", True):
|
| 177 |
-
resp = Response(stream(), mimetype="text/event-stream")
|
| 178 |
-
resp.headers.add_header("Cache-control", "no-cache")
|
| 179 |
-
resp.headers.add_header("Connection", "keep-alive")
|
| 180 |
-
resp.headers.add_header("X-Accel-Buffering", "no")
|
| 181 |
-
resp.headers.add_header("Content-Type", "text/event-stream; charset=utf-8")
|
| 182 |
-
return resp
|
| 183 |
-
|
| 184 |
-
else:
|
| 185 |
-
answer = None
|
| 186 |
-
for ans in chat(dia, msg, **req):
|
| 187 |
-
answer = ans
|
| 188 |
-
fillin_conv(ans)
|
| 189 |
-
ConversationService.update_by_id(conv.id, conv.to_dict())
|
| 190 |
-
break
|
| 191 |
-
return get_result(data=answer)
|
| 192 |
-
|
| 193 |
-
|
| 194 |
-
@manager.route('/agents/<agent_id>/completions', methods=['POST'])
|
| 195 |
-
@token_required
|
| 196 |
-
def agent_completion(tenant_id, agent_id):
|
| 197 |
-
req = request.json
|
| 198 |
-
e, cvs = UserCanvasService.get_by_id(agent_id)
|
| 199 |
-
if not e:
|
| 200 |
-
return get_error_data_result("Agent not found.")
|
| 201 |
-
if cvs.user_id != tenant_id:
|
| 202 |
-
return get_error_data_result(
|
| 203 |
-
if not isinstance(cvs.dsl, str):
|
| 204 |
-
cvs.dsl = json.dumps(cvs.dsl, ensure_ascii=False)
|
| 205 |
-
|
| 206 |
-
canvas = Canvas(cvs.dsl, tenant_id)
|
| 207 |
-
|
| 208 |
-
msg = []
|
| 209 |
-
for m in req["messages"]:
|
| 210 |
-
if m["role"] == "system":
|
| 211 |
-
continue
|
| 212 |
-
if m["role"] == "assistant" and not msg:
|
| 213 |
-
continue
|
| 214 |
-
msg.append(m)
|
| 215 |
-
if not msg[-1].get("id"): msg[-1]["id"] = get_uuid()
|
| 216 |
-
message_id = msg[-1]["id"]
|
| 217 |
-
|
| 218 |
-
if not req.get("session_id"):
|
| 219 |
-
session_id = get_uuid()
|
| 220 |
-
conv = {
|
| 221 |
-
"id": session_id,
|
| 222 |
-
"dialog_id": cvs.id,
|
| 223 |
-
"user_id": req.get("user_id", ""),
|
| 224 |
-
"message": [{"role": "assistant", "content": canvas.get_prologue()}],
|
| 225 |
-
"source": "agent"
|
| 226 |
-
}
|
| 227 |
-
API4ConversationService.save(**conv)
|
| 228 |
-
conv = API4Conversation(**conv)
|
| 229 |
-
else:
|
| 230 |
-
session_id = req.get("session_id")
|
| 231 |
-
e, conv = API4ConversationService.get_by_id(req["session_id"])
|
| 232 |
-
if not e:
|
| 233 |
-
return get_error_data_result(
|
| 234 |
-
|
| 235 |
-
if "quote" not in req: req["quote"] = False
|
| 236 |
-
stream = req.get("stream", True)
|
| 237 |
-
|
| 238 |
-
def fillin_conv(ans):
|
| 239 |
-
nonlocal conv, message_id
|
| 240 |
-
if not conv.reference:
|
| 241 |
-
conv.reference.append(ans["reference"])
|
| 242 |
-
else:
|
| 243 |
-
conv.reference[-1] = ans["reference"]
|
| 244 |
-
conv.message[-1] = {"role": "assistant", "content": ans["answer"], "id": message_id}
|
| 245 |
-
ans["id"] = message_id
|
| 246 |
-
ans["session_id"] = session_id
|
| 247 |
-
|
| 248 |
-
def rename_field(ans):
|
| 249 |
-
reference = ans['reference']
|
| 250 |
-
if not isinstance(reference, dict):
|
| 251 |
-
return
|
| 252 |
-
for chunk_i in reference.get('chunks', []):
|
| 253 |
-
if 'docnm_kwd' in chunk_i:
|
| 254 |
-
chunk_i['doc_name'] = chunk_i['docnm_kwd']
|
| 255 |
-
chunk_i.pop('docnm_kwd')
|
| 256 |
-
conv.message.append(msg[-1])
|
| 257 |
-
|
| 258 |
-
if not conv.reference:
|
| 259 |
-
conv.reference = []
|
| 260 |
-
conv.message.append({"role": "assistant", "content": "", "id": message_id})
|
| 261 |
-
conv.reference.append({"chunks": [], "doc_aggs": []})
|
| 262 |
-
|
| 263 |
-
final_ans = {"reference": [], "content": ""}
|
| 264 |
-
|
| 265 |
-
canvas.messages.append(msg[-1])
|
| 266 |
-
canvas.add_user_input(msg[-1]["content"])
|
| 267 |
-
answer = canvas.run(stream=stream)
|
| 268 |
-
|
| 269 |
-
assert answer is not None, "Nothing. Is it over?"
|
| 270 |
-
|
| 271 |
-
if stream:
|
| 272 |
-
assert isinstance(answer, partial), "Nothing. Is it over?"
|
| 273 |
-
|
| 274 |
-
def sse():
|
| 275 |
-
nonlocal answer, cvs, conv
|
| 276 |
-
try:
|
| 277 |
-
for ans in answer():
|
| 278 |
-
for k in ans.keys():
|
| 279 |
-
final_ans[k] = ans[k]
|
| 280 |
-
ans = {"answer": ans["content"], "reference": ans.get("reference", [])}
|
| 281 |
-
fillin_conv(ans)
|
| 282 |
-
rename_field(ans)
|
| 283 |
-
yield "data:" + json.dumps({"
|
| 284 |
-
ensure_ascii=False) + "\n\n"
|
| 285 |
-
|
| 286 |
-
canvas.messages.append({"role": "assistant", "content": final_ans["content"], "id": message_id})
|
| 287 |
-
if final_ans.get("reference"):
|
| 288 |
-
canvas.reference.append(final_ans["reference"])
|
| 289 |
-
cvs.dsl = json.loads(str(canvas))
|
| 290 |
-
API4ConversationService.append_message(conv.id, conv.to_dict())
|
| 291 |
-
except Exception as e:
|
| 292 |
-
yield "data:" + json.dumps({"
|
| 293 |
-
"data": {"answer": "**ERROR**: " + str(e), "reference": []}},
|
| 294 |
-
ensure_ascii=False) + "\n\n"
|
| 295 |
-
yield "data:" + json.dumps({"
|
| 296 |
-
|
| 297 |
-
resp = Response(sse(), mimetype="text/event-stream")
|
| 298 |
-
resp.headers.add_header("Cache-control", "no-cache")
|
| 299 |
-
resp.headers.add_header("Connection", "keep-alive")
|
| 300 |
-
resp.headers.add_header("X-Accel-Buffering", "no")
|
| 301 |
-
resp.headers.add_header("Content-Type", "text/event-stream; charset=utf-8")
|
| 302 |
-
return resp
|
| 303 |
-
|
| 304 |
-
final_ans["content"] = "\n".join(answer["content"]) if "content" in answer else ""
|
| 305 |
-
canvas.messages.append({"role": "assistant", "content": final_ans["content"], "id": message_id})
|
| 306 |
-
if final_ans.get("reference"):
|
| 307 |
-
canvas.reference.append(final_ans["reference"])
|
| 308 |
-
cvs.dsl = json.loads(str(canvas))
|
| 309 |
-
|
| 310 |
-
result = {"answer": final_ans["content"], "reference": final_ans.get("reference", [])}
|
| 311 |
-
fillin_conv(result)
|
| 312 |
-
API4ConversationService.append_message(conv.id, conv.to_dict())
|
| 313 |
-
rename_field(result)
|
| 314 |
-
return get_result(data=result)
|
| 315 |
-
|
| 316 |
-
|
| 317 |
-
@manager.route('/chats/<chat_id>/sessions', methods=['GET'])
|
| 318 |
-
@token_required
|
| 319 |
-
def list(chat_id,tenant_id):
|
| 320 |
-
if not DialogService.query(tenant_id=tenant_id, id=chat_id, status=StatusEnum.VALID.value):
|
| 321 |
-
return get_error_data_result(
|
| 322 |
-
id = request.args.get("id")
|
| 323 |
-
name = request.args.get("name")
|
| 324 |
-
page_number = int(request.args.get("page", 1))
|
| 325 |
-
items_per_page = int(request.args.get("page_size", 1024))
|
| 326 |
-
orderby = request.args.get("orderby", "create_time")
|
| 327 |
-
if request.args.get("desc") == "False" or request.args.get("desc") == "false":
|
| 328 |
-
desc = False
|
| 329 |
-
else:
|
| 330 |
-
desc = True
|
| 331 |
-
convs = ConversationService.get_list(chat_id,page_number,items_per_page,orderby,desc,id,name)
|
| 332 |
-
if not convs:
|
| 333 |
-
return get_result(data=[])
|
| 334 |
-
for conv in convs:
|
| 335 |
-
conv['messages'] = conv.pop("message")
|
| 336 |
-
infos = conv["messages"]
|
| 337 |
-
for info in infos:
|
| 338 |
-
if "prompt" in info:
|
| 339 |
-
info.pop("prompt")
|
| 340 |
-
conv["chat"] = conv.pop("dialog_id")
|
| 341 |
-
if conv["reference"]:
|
| 342 |
-
messages = conv["messages"]
|
| 343 |
-
message_num = 0
|
| 344 |
-
chunk_num = 0
|
| 345 |
-
while message_num < len(messages):
|
| 346 |
-
if message_num != 0 and messages[message_num]["role"] != "user":
|
| 347 |
-
chunk_list = []
|
| 348 |
-
if "chunks" in conv["reference"][chunk_num]:
|
| 349 |
-
chunks = conv["reference"][chunk_num]["chunks"]
|
| 350 |
-
for chunk in chunks:
|
| 351 |
-
new_chunk = {
|
| 352 |
-
"id": chunk["chunk_id"],
|
| 353 |
-
"content": chunk["content_with_weight"],
|
| 354 |
-
"document_id": chunk["doc_id"],
|
| 355 |
-
"document_name": chunk["docnm_kwd"],
|
| 356 |
-
"dataset_id": chunk["kb_id"],
|
| 357 |
-
"image_id": chunk["img_id"],
|
| 358 |
-
"similarity": chunk["similarity"],
|
| 359 |
-
"vector_similarity": chunk["vector_similarity"],
|
| 360 |
-
"term_similarity": chunk["term_similarity"],
|
| 361 |
-
"positions": chunk["positions"],
|
| 362 |
-
}
|
| 363 |
-
chunk_list.append(new_chunk)
|
| 364 |
-
chunk_num += 1
|
| 365 |
-
messages[message_num]["reference"] = chunk_list
|
| 366 |
-
message_num += 1
|
| 367 |
-
del conv["reference"]
|
| 368 |
-
return get_result(data=convs)
|
| 369 |
-
|
| 370 |
-
|
| 371 |
-
@manager.route('/chats/<chat_id>/sessions', methods=["DELETE"])
|
| 372 |
-
@token_required
|
| 373 |
-
def delete(tenant_id,chat_id):
|
| 374 |
-
if not DialogService.query(id=chat_id, tenant_id=tenant_id, status=StatusEnum.VALID.value):
|
| 375 |
-
return get_error_data_result(
|
| 376 |
-
req = request.json
|
| 377 |
-
convs = ConversationService.query(dialog_id=chat_id)
|
| 378 |
-
if not req:
|
| 379 |
-
ids = None
|
| 380 |
-
else:
|
| 381 |
-
ids=req.get("ids")
|
| 382 |
-
|
| 383 |
-
if not ids:
|
| 384 |
-
conv_list = []
|
| 385 |
-
for conv in convs:
|
| 386 |
-
conv_list.append(conv.id)
|
| 387 |
-
else:
|
| 388 |
-
conv_list=ids
|
| 389 |
-
for id in conv_list:
|
| 390 |
-
conv = ConversationService.query(id=id,dialog_id=chat_id)
|
| 391 |
-
if not conv:
|
| 392 |
-
return get_error_data_result(
|
| 393 |
-
ConversationService.delete_by_id(id)
|
| 394 |
-
return get_result()
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
import json
|
| 17 |
+
from functools import partial
|
| 18 |
+
from uuid import uuid4
|
| 19 |
+
|
| 20 |
+
from flask import request, Response
|
| 21 |
+
|
| 22 |
+
from agent.canvas import Canvas
|
| 23 |
+
from api.db import StatusEnum
|
| 24 |
+
from api.db.db_models import API4Conversation
|
| 25 |
+
from api.db.services.api_service import API4ConversationService
|
| 26 |
+
from api.db.services.canvas_service import UserCanvasService
|
| 27 |
+
from api.db.services.dialog_service import DialogService, ConversationService, chat
|
| 28 |
+
from api.utils import get_uuid
|
| 29 |
+
from api.utils.api_utils import get_error_data_result
|
| 30 |
+
from api.utils.api_utils import get_result, token_required
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@manager.route('/chats/<chat_id>/sessions', methods=['POST'])
|
| 34 |
+
@token_required
|
| 35 |
+
def create(tenant_id,chat_id):
|
| 36 |
+
req = request.json
|
| 37 |
+
req["dialog_id"] = chat_id
|
| 38 |
+
dia = DialogService.query(tenant_id=tenant_id, id=req["dialog_id"], status=StatusEnum.VALID.value)
|
| 39 |
+
if not dia:
|
| 40 |
+
return get_error_data_result(message="You do not own the assistant.")
|
| 41 |
+
conv = {
|
| 42 |
+
"id": get_uuid(),
|
| 43 |
+
"dialog_id": req["dialog_id"],
|
| 44 |
+
"name": req.get("name", "New session"),
|
| 45 |
+
"message": [{"role": "assistant", "content": "Hi! I am your assistant,can I help you?"}]
|
| 46 |
+
}
|
| 47 |
+
if not conv.get("name"):
|
| 48 |
+
return get_error_data_result(message="`name` can not be empty.")
|
| 49 |
+
ConversationService.save(**conv)
|
| 50 |
+
e, conv = ConversationService.get_by_id(conv["id"])
|
| 51 |
+
if not e:
|
| 52 |
+
return get_error_data_result(message="Fail to create a session!")
|
| 53 |
+
conv = conv.to_dict()
|
| 54 |
+
conv['messages'] = conv.pop("message")
|
| 55 |
+
conv["chat_id"] = conv.pop("dialog_id")
|
| 56 |
+
del conv["reference"]
|
| 57 |
+
return get_result(data=conv)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
@manager.route('/agents/<agent_id>/sessions', methods=['POST'])
|
| 61 |
+
@token_required
|
| 62 |
+
def create_agent_session(tenant_id, agent_id):
|
| 63 |
+
req = request.json
|
| 64 |
+
e, cvs = UserCanvasService.get_by_id(agent_id)
|
| 65 |
+
if not e:
|
| 66 |
+
return get_error_data_result("Agent not found.")
|
| 67 |
+
if cvs.user_id != tenant_id:
|
| 68 |
+
return get_error_data_result(message="You do not own the agent.")
|
| 69 |
+
|
| 70 |
+
if not isinstance(cvs.dsl, str):
|
| 71 |
+
cvs.dsl = json.dumps(cvs.dsl, ensure_ascii=False)
|
| 72 |
+
|
| 73 |
+
canvas = Canvas(cvs.dsl, tenant_id)
|
| 74 |
+
conv = {
|
| 75 |
+
"id": get_uuid(),
|
| 76 |
+
"dialog_id": cvs.id,
|
| 77 |
+
"user_id": req.get("user_id", ""),
|
| 78 |
+
"message": [{"role": "assistant", "content": canvas.get_prologue()}],
|
| 79 |
+
"source": "agent"
|
| 80 |
+
}
|
| 81 |
+
API4ConversationService.save(**conv)
|
| 82 |
+
return get_result(data=conv)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
@manager.route('/chats/<chat_id>/sessions/<session_id>', methods=['PUT'])
|
| 86 |
+
@token_required
|
| 87 |
+
def update(tenant_id,chat_id,session_id):
|
| 88 |
+
req = request.json
|
| 89 |
+
req["dialog_id"] = chat_id
|
| 90 |
+
conv_id = session_id
|
| 91 |
+
conv = ConversationService.query(id=conv_id,dialog_id=chat_id)
|
| 92 |
+
if not conv:
|
| 93 |
+
return get_error_data_result(message="Session does not exist")
|
| 94 |
+
if not DialogService.query(id=chat_id, tenant_id=tenant_id, status=StatusEnum.VALID.value):
|
| 95 |
+
return get_error_data_result(message="You do not own the session")
|
| 96 |
+
if "message" in req or "messages" in req:
|
| 97 |
+
return get_error_data_result(message="`message` can not be change")
|
| 98 |
+
if "reference" in req:
|
| 99 |
+
return get_error_data_result(message="`reference` can not be change")
|
| 100 |
+
if "name" in req and not req.get("name"):
|
| 101 |
+
return get_error_data_result(message="`name` can not be empty.")
|
| 102 |
+
if not ConversationService.update_by_id(conv_id, req):
|
| 103 |
+
return get_error_data_result(message="Session updates error")
|
| 104 |
+
return get_result()
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
@manager.route('/chats/<chat_id>/completions', methods=['POST'])
|
| 108 |
+
@token_required
|
| 109 |
+
def completion(tenant_id, chat_id):
|
| 110 |
+
req = request.json
|
| 111 |
+
if not req.get("session_id"):
|
| 112 |
+
conv = {
|
| 113 |
+
"id": get_uuid(),
|
| 114 |
+
"dialog_id": chat_id,
|
| 115 |
+
"name": req.get("name", "New session"),
|
| 116 |
+
"message": [{"role": "assistant", "content": "Hi! I am your assistant,can I help you?"}]
|
| 117 |
+
}
|
| 118 |
+
if not conv.get("name"):
|
| 119 |
+
return get_error_data_result(message="`name` can not be empty.")
|
| 120 |
+
ConversationService.save(**conv)
|
| 121 |
+
e, conv = ConversationService.get_by_id(conv["id"])
|
| 122 |
+
session_id=conv.id
|
| 123 |
+
else:
|
| 124 |
+
session_id = req.get("session_id")
|
| 125 |
+
if not req.get("question"):
|
| 126 |
+
return get_error_data_result(message="Please input your question.")
|
| 127 |
+
conv = ConversationService.query(id=session_id,dialog_id=chat_id)
|
| 128 |
+
if not conv:
|
| 129 |
+
return get_error_data_result(message="Session does not exist")
|
| 130 |
+
conv = conv[0]
|
| 131 |
+
if not DialogService.query(id=chat_id, tenant_id=tenant_id, status=StatusEnum.VALID.value):
|
| 132 |
+
return get_error_data_result(message="You do not own the chat")
|
| 133 |
+
msg = []
|
| 134 |
+
question = {
|
| 135 |
+
"content": req.get("question"),
|
| 136 |
+
"role": "user",
|
| 137 |
+
"id": str(uuid4())
|
| 138 |
+
}
|
| 139 |
+
conv.message.append(question)
|
| 140 |
+
for m in conv.message:
|
| 141 |
+
if m["role"] == "system": continue
|
| 142 |
+
if m["role"] == "assistant" and not msg: continue
|
| 143 |
+
msg.append(m)
|
| 144 |
+
message_id = msg[-1].get("id")
|
| 145 |
+
e, dia = DialogService.get_by_id(conv.dialog_id)
|
| 146 |
+
|
| 147 |
+
if not conv.reference:
|
| 148 |
+
conv.reference = []
|
| 149 |
+
conv.message.append({"role": "assistant", "content": "", "id": message_id})
|
| 150 |
+
conv.reference.append({"chunks": [], "doc_aggs": []})
|
| 151 |
+
|
| 152 |
+
def fillin_conv(ans):
|
| 153 |
+
nonlocal conv, message_id
|
| 154 |
+
if not conv.reference:
|
| 155 |
+
conv.reference.append(ans["reference"])
|
| 156 |
+
else:
|
| 157 |
+
conv.reference[-1] = ans["reference"]
|
| 158 |
+
conv.message[-1] = {"role": "assistant", "content": ans["answer"],
|
| 159 |
+
"id": message_id, "prompt": ans.get("prompt", "")}
|
| 160 |
+
ans["id"] = message_id
|
| 161 |
+
ans["session_id"]=session_id
|
| 162 |
+
|
| 163 |
+
def stream():
|
| 164 |
+
nonlocal dia, msg, req, conv
|
| 165 |
+
try:
|
| 166 |
+
for ans in chat(dia, msg, **req):
|
| 167 |
+
fillin_conv(ans)
|
| 168 |
+
yield "data:" + json.dumps({"code": 0, "data": ans}, ensure_ascii=False) + "\n\n"
|
| 169 |
+
ConversationService.update_by_id(conv.id, conv.to_dict())
|
| 170 |
+
except Exception as e:
|
| 171 |
+
yield "data:" + json.dumps({"code": 500, "message": str(e),
|
| 172 |
+
"data": {"answer": "**ERROR**: " + str(e),"reference": []}},
|
| 173 |
+
ensure_ascii=False) + "\n\n"
|
| 174 |
+
yield "data:" + json.dumps({"code": 0, "data": True}, ensure_ascii=False) + "\n\n"
|
| 175 |
+
|
| 176 |
+
if req.get("stream", True):
|
| 177 |
+
resp = Response(stream(), mimetype="text/event-stream")
|
| 178 |
+
resp.headers.add_header("Cache-control", "no-cache")
|
| 179 |
+
resp.headers.add_header("Connection", "keep-alive")
|
| 180 |
+
resp.headers.add_header("X-Accel-Buffering", "no")
|
| 181 |
+
resp.headers.add_header("Content-Type", "text/event-stream; charset=utf-8")
|
| 182 |
+
return resp
|
| 183 |
+
|
| 184 |
+
else:
|
| 185 |
+
answer = None
|
| 186 |
+
for ans in chat(dia, msg, **req):
|
| 187 |
+
answer = ans
|
| 188 |
+
fillin_conv(ans)
|
| 189 |
+
ConversationService.update_by_id(conv.id, conv.to_dict())
|
| 190 |
+
break
|
| 191 |
+
return get_result(data=answer)
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
@manager.route('/agents/<agent_id>/completions', methods=['POST'])
|
| 195 |
+
@token_required
|
| 196 |
+
def agent_completion(tenant_id, agent_id):
|
| 197 |
+
req = request.json
|
| 198 |
+
e, cvs = UserCanvasService.get_by_id(agent_id)
|
| 199 |
+
if not e:
|
| 200 |
+
return get_error_data_result("Agent not found.")
|
| 201 |
+
if cvs.user_id != tenant_id:
|
| 202 |
+
return get_error_data_result(message="You do not own the agent.")
|
| 203 |
+
if not isinstance(cvs.dsl, str):
|
| 204 |
+
cvs.dsl = json.dumps(cvs.dsl, ensure_ascii=False)
|
| 205 |
+
|
| 206 |
+
canvas = Canvas(cvs.dsl, tenant_id)
|
| 207 |
+
|
| 208 |
+
msg = []
|
| 209 |
+
for m in req["messages"]:
|
| 210 |
+
if m["role"] == "system":
|
| 211 |
+
continue
|
| 212 |
+
if m["role"] == "assistant" and not msg:
|
| 213 |
+
continue
|
| 214 |
+
msg.append(m)
|
| 215 |
+
if not msg[-1].get("id"): msg[-1]["id"] = get_uuid()
|
| 216 |
+
message_id = msg[-1]["id"]
|
| 217 |
+
|
| 218 |
+
if not req.get("session_id"):
|
| 219 |
+
session_id = get_uuid()
|
| 220 |
+
conv = {
|
| 221 |
+
"id": session_id,
|
| 222 |
+
"dialog_id": cvs.id,
|
| 223 |
+
"user_id": req.get("user_id", ""),
|
| 224 |
+
"message": [{"role": "assistant", "content": canvas.get_prologue()}],
|
| 225 |
+
"source": "agent"
|
| 226 |
+
}
|
| 227 |
+
API4ConversationService.save(**conv)
|
| 228 |
+
conv = API4Conversation(**conv)
|
| 229 |
+
else:
|
| 230 |
+
session_id = req.get("session_id")
|
| 231 |
+
e, conv = API4ConversationService.get_by_id(req["session_id"])
|
| 232 |
+
if not e:
|
| 233 |
+
return get_error_data_result(message="Session not found!")
|
| 234 |
+
|
| 235 |
+
if "quote" not in req: req["quote"] = False
|
| 236 |
+
stream = req.get("stream", True)
|
| 237 |
+
|
| 238 |
+
def fillin_conv(ans):
|
| 239 |
+
nonlocal conv, message_id
|
| 240 |
+
if not conv.reference:
|
| 241 |
+
conv.reference.append(ans["reference"])
|
| 242 |
+
else:
|
| 243 |
+
conv.reference[-1] = ans["reference"]
|
| 244 |
+
conv.message[-1] = {"role": "assistant", "content": ans["answer"], "id": message_id}
|
| 245 |
+
ans["id"] = message_id
|
| 246 |
+
ans["session_id"] = session_id
|
| 247 |
+
|
| 248 |
+
def rename_field(ans):
|
| 249 |
+
reference = ans['reference']
|
| 250 |
+
if not isinstance(reference, dict):
|
| 251 |
+
return
|
| 252 |
+
for chunk_i in reference.get('chunks', []):
|
| 253 |
+
if 'docnm_kwd' in chunk_i:
|
| 254 |
+
chunk_i['doc_name'] = chunk_i['docnm_kwd']
|
| 255 |
+
chunk_i.pop('docnm_kwd')
|
| 256 |
+
conv.message.append(msg[-1])
|
| 257 |
+
|
| 258 |
+
if not conv.reference:
|
| 259 |
+
conv.reference = []
|
| 260 |
+
conv.message.append({"role": "assistant", "content": "", "id": message_id})
|
| 261 |
+
conv.reference.append({"chunks": [], "doc_aggs": []})
|
| 262 |
+
|
| 263 |
+
final_ans = {"reference": [], "content": ""}
|
| 264 |
+
|
| 265 |
+
canvas.messages.append(msg[-1])
|
| 266 |
+
canvas.add_user_input(msg[-1]["content"])
|
| 267 |
+
answer = canvas.run(stream=stream)
|
| 268 |
+
|
| 269 |
+
assert answer is not None, "Nothing. Is it over?"
|
| 270 |
+
|
| 271 |
+
if stream:
|
| 272 |
+
assert isinstance(answer, partial), "Nothing. Is it over?"
|
| 273 |
+
|
| 274 |
+
def sse():
|
| 275 |
+
nonlocal answer, cvs, conv
|
| 276 |
+
try:
|
| 277 |
+
for ans in answer():
|
| 278 |
+
for k in ans.keys():
|
| 279 |
+
final_ans[k] = ans[k]
|
| 280 |
+
ans = {"answer": ans["content"], "reference": ans.get("reference", [])}
|
| 281 |
+
fillin_conv(ans)
|
| 282 |
+
rename_field(ans)
|
| 283 |
+
yield "data:" + json.dumps({"code": 0, "message": "", "data": ans},
|
| 284 |
+
ensure_ascii=False) + "\n\n"
|
| 285 |
+
|
| 286 |
+
canvas.messages.append({"role": "assistant", "content": final_ans["content"], "id": message_id})
|
| 287 |
+
if final_ans.get("reference"):
|
| 288 |
+
canvas.reference.append(final_ans["reference"])
|
| 289 |
+
cvs.dsl = json.loads(str(canvas))
|
| 290 |
+
API4ConversationService.append_message(conv.id, conv.to_dict())
|
| 291 |
+
except Exception as e:
|
| 292 |
+
yield "data:" + json.dumps({"code": 500, "message": str(e),
|
| 293 |
+
"data": {"answer": "**ERROR**: " + str(e), "reference": []}},
|
| 294 |
+
ensure_ascii=False) + "\n\n"
|
| 295 |
+
yield "data:" + json.dumps({"code": 0, "message": "", "data": True}, ensure_ascii=False) + "\n\n"
|
| 296 |
+
|
| 297 |
+
resp = Response(sse(), mimetype="text/event-stream")
|
| 298 |
+
resp.headers.add_header("Cache-control", "no-cache")
|
| 299 |
+
resp.headers.add_header("Connection", "keep-alive")
|
| 300 |
+
resp.headers.add_header("X-Accel-Buffering", "no")
|
| 301 |
+
resp.headers.add_header("Content-Type", "text/event-stream; charset=utf-8")
|
| 302 |
+
return resp
|
| 303 |
+
|
| 304 |
+
final_ans["content"] = "\n".join(answer["content"]) if "content" in answer else ""
|
| 305 |
+
canvas.messages.append({"role": "assistant", "content": final_ans["content"], "id": message_id})
|
| 306 |
+
if final_ans.get("reference"):
|
| 307 |
+
canvas.reference.append(final_ans["reference"])
|
| 308 |
+
cvs.dsl = json.loads(str(canvas))
|
| 309 |
+
|
| 310 |
+
result = {"answer": final_ans["content"], "reference": final_ans.get("reference", [])}
|
| 311 |
+
fillin_conv(result)
|
| 312 |
+
API4ConversationService.append_message(conv.id, conv.to_dict())
|
| 313 |
+
rename_field(result)
|
| 314 |
+
return get_result(data=result)
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
@manager.route('/chats/<chat_id>/sessions', methods=['GET'])
|
| 318 |
+
@token_required
|
| 319 |
+
def list(chat_id,tenant_id):
|
| 320 |
+
if not DialogService.query(tenant_id=tenant_id, id=chat_id, status=StatusEnum.VALID.value):
|
| 321 |
+
return get_error_data_result(message=f"You don't own the assistant {chat_id}.")
|
| 322 |
+
id = request.args.get("id")
|
| 323 |
+
name = request.args.get("name")
|
| 324 |
+
page_number = int(request.args.get("page", 1))
|
| 325 |
+
items_per_page = int(request.args.get("page_size", 1024))
|
| 326 |
+
orderby = request.args.get("orderby", "create_time")
|
| 327 |
+
if request.args.get("desc") == "False" or request.args.get("desc") == "false":
|
| 328 |
+
desc = False
|
| 329 |
+
else:
|
| 330 |
+
desc = True
|
| 331 |
+
convs = ConversationService.get_list(chat_id,page_number,items_per_page,orderby,desc,id,name)
|
| 332 |
+
if not convs:
|
| 333 |
+
return get_result(data=[])
|
| 334 |
+
for conv in convs:
|
| 335 |
+
conv['messages'] = conv.pop("message")
|
| 336 |
+
infos = conv["messages"]
|
| 337 |
+
for info in infos:
|
| 338 |
+
if "prompt" in info:
|
| 339 |
+
info.pop("prompt")
|
| 340 |
+
conv["chat"] = conv.pop("dialog_id")
|
| 341 |
+
if conv["reference"]:
|
| 342 |
+
messages = conv["messages"]
|
| 343 |
+
message_num = 0
|
| 344 |
+
chunk_num = 0
|
| 345 |
+
while message_num < len(messages):
|
| 346 |
+
if message_num != 0 and messages[message_num]["role"] != "user":
|
| 347 |
+
chunk_list = []
|
| 348 |
+
if "chunks" in conv["reference"][chunk_num]:
|
| 349 |
+
chunks = conv["reference"][chunk_num]["chunks"]
|
| 350 |
+
for chunk in chunks:
|
| 351 |
+
new_chunk = {
|
| 352 |
+
"id": chunk["chunk_id"],
|
| 353 |
+
"content": chunk["content_with_weight"],
|
| 354 |
+
"document_id": chunk["doc_id"],
|
| 355 |
+
"document_name": chunk["docnm_kwd"],
|
| 356 |
+
"dataset_id": chunk["kb_id"],
|
| 357 |
+
"image_id": chunk["img_id"],
|
| 358 |
+
"similarity": chunk["similarity"],
|
| 359 |
+
"vector_similarity": chunk["vector_similarity"],
|
| 360 |
+
"term_similarity": chunk["term_similarity"],
|
| 361 |
+
"positions": chunk["positions"],
|
| 362 |
+
}
|
| 363 |
+
chunk_list.append(new_chunk)
|
| 364 |
+
chunk_num += 1
|
| 365 |
+
messages[message_num]["reference"] = chunk_list
|
| 366 |
+
message_num += 1
|
| 367 |
+
del conv["reference"]
|
| 368 |
+
return get_result(data=convs)
|
| 369 |
+
|
| 370 |
+
|
| 371 |
+
@manager.route('/chats/<chat_id>/sessions', methods=["DELETE"])
|
| 372 |
+
@token_required
|
| 373 |
+
def delete(tenant_id,chat_id):
|
| 374 |
+
if not DialogService.query(id=chat_id, tenant_id=tenant_id, status=StatusEnum.VALID.value):
|
| 375 |
+
return get_error_data_result(message="You don't own the chat")
|
| 376 |
+
req = request.json
|
| 377 |
+
convs = ConversationService.query(dialog_id=chat_id)
|
| 378 |
+
if not req:
|
| 379 |
+
ids = None
|
| 380 |
+
else:
|
| 381 |
+
ids=req.get("ids")
|
| 382 |
+
|
| 383 |
+
if not ids:
|
| 384 |
+
conv_list = []
|
| 385 |
+
for conv in convs:
|
| 386 |
+
conv_list.append(conv.id)
|
| 387 |
+
else:
|
| 388 |
+
conv_list=ids
|
| 389 |
+
for id in conv_list:
|
| 390 |
+
conv = ConversationService.query(id=id,dialog_id=chat_id)
|
| 391 |
+
if not conv:
|
| 392 |
+
return get_error_data_result(message="The chat doesn't own the session")
|
| 393 |
+
ConversationService.delete_by_id(id)
|
| 394 |
+
return get_result()
|
api/apps/system_app.py
CHANGED
|
@@ -29,8 +29,6 @@ from api.utils.api_utils import (
|
|
| 29 |
get_data_error_result,
|
| 30 |
server_error_response,
|
| 31 |
generate_confirmation_token,
|
| 32 |
-
request,
|
| 33 |
-
validate_request,
|
| 34 |
)
|
| 35 |
from api.versions import get_rag_version
|
| 36 |
from rag.utils.es_conn import ELASTICSEARCH
|
|
@@ -209,7 +207,7 @@ def new_token():
|
|
| 209 |
try:
|
| 210 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 211 |
if not tenants:
|
| 212 |
-
return get_data_error_result(
|
| 213 |
|
| 214 |
tenant_id = tenants[0].tenant_id
|
| 215 |
obj = {
|
|
@@ -222,7 +220,7 @@ def new_token():
|
|
| 222 |
}
|
| 223 |
|
| 224 |
if not APITokenService.save(**obj):
|
| 225 |
-
return get_data_error_result(
|
| 226 |
|
| 227 |
return get_json_result(data=obj)
|
| 228 |
except Exception as e:
|
|
@@ -263,7 +261,7 @@ def token_list():
|
|
| 263 |
try:
|
| 264 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 265 |
if not tenants:
|
| 266 |
-
return get_data_error_result(
|
| 267 |
|
| 268 |
objs = APITokenService.query(tenant_id=tenants[0].tenant_id)
|
| 269 |
return get_json_result(data=[o.to_dict() for o in objs])
|
|
|
|
| 29 |
get_data_error_result,
|
| 30 |
server_error_response,
|
| 31 |
generate_confirmation_token,
|
|
|
|
|
|
|
| 32 |
)
|
| 33 |
from api.versions import get_rag_version
|
| 34 |
from rag.utils.es_conn import ELASTICSEARCH
|
|
|
|
| 207 |
try:
|
| 208 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 209 |
if not tenants:
|
| 210 |
+
return get_data_error_result(message="Tenant not found!")
|
| 211 |
|
| 212 |
tenant_id = tenants[0].tenant_id
|
| 213 |
obj = {
|
|
|
|
| 220 |
}
|
| 221 |
|
| 222 |
if not APITokenService.save(**obj):
|
| 223 |
+
return get_data_error_result(message="Fail to new a dialog!")
|
| 224 |
|
| 225 |
return get_json_result(data=obj)
|
| 226 |
except Exception as e:
|
|
|
|
| 261 |
try:
|
| 262 |
tenants = UserTenantService.query(user_id=current_user.id)
|
| 263 |
if not tenants:
|
| 264 |
+
return get_data_error_result(message="Tenant not found!")
|
| 265 |
|
| 266 |
objs = APITokenService.query(tenant_id=tenants[0].tenant_id)
|
| 267 |
return get_json_result(data=[o.to_dict() for o in objs])
|
api/apps/tenant_app.py
CHANGED
|
@@ -44,14 +44,14 @@ def create(tenant_id):
|
|
| 44 |
req = request.json
|
| 45 |
usrs = UserService.query(email=req["email"])
|
| 46 |
if not usrs:
|
| 47 |
-
return get_data_error_result(
|
| 48 |
|
| 49 |
user_id = usrs[0].id
|
| 50 |
user_tenants = UserTenantService.query(user_id=user_id, tenant_id=tenant_id)
|
| 51 |
if user_tenants:
|
| 52 |
if user_tenants[0].status == UserTenantRole.NORMAL.value:
|
| 53 |
-
return get_data_error_result(
|
| 54 |
-
return get_data_error_result(
|
| 55 |
|
| 56 |
UserTenantService.save(
|
| 57 |
id=get_uuid(),
|
|
|
|
| 44 |
req = request.json
|
| 45 |
usrs = UserService.query(email=req["email"])
|
| 46 |
if not usrs:
|
| 47 |
+
return get_data_error_result(message="User not found.")
|
| 48 |
|
| 49 |
user_id = usrs[0].id
|
| 50 |
user_tenants = UserTenantService.query(user_id=user_id, tenant_id=tenant_id)
|
| 51 |
if user_tenants:
|
| 52 |
if user_tenants[0].status == UserTenantRole.NORMAL.value:
|
| 53 |
+
return get_data_error_result(message="This user is in the team already.")
|
| 54 |
+
return get_data_error_result(message="Invitation notification is sent.")
|
| 55 |
|
| 56 |
UserTenantService.save(
|
| 57 |
id=get_uuid(),
|
api/apps/user_app.py
CHANGED
|
@@ -36,7 +36,7 @@ from api.utils import (
|
|
| 36 |
current_timestamp,
|
| 37 |
datetime_format,
|
| 38 |
)
|
| 39 |
-
from api.db import UserTenantRole,
|
| 40 |
from api.settings import (
|
| 41 |
RetCode,
|
| 42 |
GITHUB_OAUTH,
|
|
@@ -90,7 +90,7 @@ def login():
|
|
| 90 |
"""
|
| 91 |
if not request.json:
|
| 92 |
return get_json_result(
|
| 93 |
-
data=False,
|
| 94 |
)
|
| 95 |
|
| 96 |
email = request.json.get("email", "")
|
|
@@ -98,8 +98,8 @@ def login():
|
|
| 98 |
if not users:
|
| 99 |
return get_json_result(
|
| 100 |
data=False,
|
| 101 |
-
|
| 102 |
-
|
| 103 |
)
|
| 104 |
|
| 105 |
password = request.json.get("password")
|
|
@@ -107,7 +107,7 @@ def login():
|
|
| 107 |
password = decrypt(password)
|
| 108 |
except BaseException:
|
| 109 |
return get_json_result(
|
| 110 |
-
data=False,
|
| 111 |
)
|
| 112 |
|
| 113 |
user = UserService.query_user(email, password)
|
|
@@ -119,12 +119,12 @@ def login():
|
|
| 119 |
user.update_date = (datetime_format(datetime.now()),)
|
| 120 |
user.save()
|
| 121 |
msg = "Welcome back!"
|
| 122 |
-
return construct_response(data=response_data, auth=user.get_id(),
|
| 123 |
else:
|
| 124 |
return get_json_result(
|
| 125 |
data=False,
|
| 126 |
-
|
| 127 |
-
|
| 128 |
)
|
| 129 |
|
| 130 |
|
|
@@ -323,7 +323,7 @@ def user_info_from_feishu(access_token):
|
|
| 323 |
"Authorization": f"Bearer {access_token}",
|
| 324 |
}
|
| 325 |
res = requests.get(
|
| 326 |
-
|
| 327 |
)
|
| 328 |
user_info = res.json()["data"]
|
| 329 |
user_info["email"] = None if user_info.get("email") == "" else user_info["email"]
|
|
@@ -409,8 +409,8 @@ def setting_user():
|
|
| 409 |
):
|
| 410 |
return get_json_result(
|
| 411 |
data=False,
|
| 412 |
-
|
| 413 |
-
|
| 414 |
)
|
| 415 |
|
| 416 |
if new_password:
|
|
@@ -438,7 +438,7 @@ def setting_user():
|
|
| 438 |
except Exception as e:
|
| 439 |
stat_logger.exception(e)
|
| 440 |
return get_json_result(
|
| 441 |
-
data=False,
|
| 442 |
)
|
| 443 |
|
| 444 |
|
|
@@ -474,21 +474,21 @@ def user_profile():
|
|
| 474 |
def rollback_user_registration(user_id):
|
| 475 |
try:
|
| 476 |
UserService.delete_by_id(user_id)
|
| 477 |
-
except Exception
|
| 478 |
pass
|
| 479 |
try:
|
| 480 |
TenantService.delete_by_id(user_id)
|
| 481 |
-
except Exception
|
| 482 |
pass
|
| 483 |
try:
|
| 484 |
u = UserTenantService.query(tenant_id=user_id)
|
| 485 |
if u:
|
| 486 |
UserTenantService.delete_by_id(u[0].id)
|
| 487 |
-
except Exception
|
| 488 |
pass
|
| 489 |
try:
|
| 490 |
TenantLLM.delete().where(TenantLLM.tenant_id == user_id).execute()
|
| 491 |
-
except Exception
|
| 492 |
pass
|
| 493 |
|
| 494 |
|
|
@@ -581,16 +581,16 @@ def user_add():
|
|
| 581 |
if not re.match(r"^[\w\._-]+@([\w_-]+\.)+[\w-]{2,5}$", email_address):
|
| 582 |
return get_json_result(
|
| 583 |
data=False,
|
| 584 |
-
|
| 585 |
-
|
| 586 |
)
|
| 587 |
|
| 588 |
# Check if the email address is already used
|
| 589 |
if UserService.query(email=email_address):
|
| 590 |
return get_json_result(
|
| 591 |
data=False,
|
| 592 |
-
|
| 593 |
-
|
| 594 |
)
|
| 595 |
|
| 596 |
# Construct user info data
|
|
@@ -617,15 +617,15 @@ def user_add():
|
|
| 617 |
return construct_response(
|
| 618 |
data=user.to_json(),
|
| 619 |
auth=user.get_id(),
|
| 620 |
-
|
| 621 |
)
|
| 622 |
except Exception as e:
|
| 623 |
rollback_user_registration(user_id)
|
| 624 |
stat_logger.exception(e)
|
| 625 |
return get_json_result(
|
| 626 |
data=False,
|
| 627 |
-
|
| 628 |
-
|
| 629 |
)
|
| 630 |
|
| 631 |
|
|
@@ -661,7 +661,7 @@ def tenant_info():
|
|
| 661 |
try:
|
| 662 |
tenants = TenantService.get_info_by(current_user.id)
|
| 663 |
if not tenants:
|
| 664 |
-
return get_data_error_result(
|
| 665 |
return get_json_result(data=tenants[0])
|
| 666 |
except Exception as e:
|
| 667 |
return server_error_response(e)
|
|
|
|
| 36 |
current_timestamp,
|
| 37 |
datetime_format,
|
| 38 |
)
|
| 39 |
+
from api.db import UserTenantRole, FileType
|
| 40 |
from api.settings import (
|
| 41 |
RetCode,
|
| 42 |
GITHUB_OAUTH,
|
|
|
|
| 90 |
"""
|
| 91 |
if not request.json:
|
| 92 |
return get_json_result(
|
| 93 |
+
data=False, code=RetCode.AUTHENTICATION_ERROR, message="Unauthorized!"
|
| 94 |
)
|
| 95 |
|
| 96 |
email = request.json.get("email", "")
|
|
|
|
| 98 |
if not users:
|
| 99 |
return get_json_result(
|
| 100 |
data=False,
|
| 101 |
+
code=RetCode.AUTHENTICATION_ERROR,
|
| 102 |
+
message=f"Email: {email} is not registered!",
|
| 103 |
)
|
| 104 |
|
| 105 |
password = request.json.get("password")
|
|
|
|
| 107 |
password = decrypt(password)
|
| 108 |
except BaseException:
|
| 109 |
return get_json_result(
|
| 110 |
+
data=False, code=RetCode.SERVER_ERROR, message="Fail to crypt password"
|
| 111 |
)
|
| 112 |
|
| 113 |
user = UserService.query_user(email, password)
|
|
|
|
| 119 |
user.update_date = (datetime_format(datetime.now()),)
|
| 120 |
user.save()
|
| 121 |
msg = "Welcome back!"
|
| 122 |
+
return construct_response(data=response_data, auth=user.get_id(), message=msg)
|
| 123 |
else:
|
| 124 |
return get_json_result(
|
| 125 |
data=False,
|
| 126 |
+
code=RetCode.AUTHENTICATION_ERROR,
|
| 127 |
+
message="Email and password do not match!",
|
| 128 |
)
|
| 129 |
|
| 130 |
|
|
|
|
| 323 |
"Authorization": f"Bearer {access_token}",
|
| 324 |
}
|
| 325 |
res = requests.get(
|
| 326 |
+
"https://open.feishu.cn/open-apis/authen/v1/user_info", headers=headers
|
| 327 |
)
|
| 328 |
user_info = res.json()["data"]
|
| 329 |
user_info["email"] = None if user_info.get("email") == "" else user_info["email"]
|
|
|
|
| 409 |
):
|
| 410 |
return get_json_result(
|
| 411 |
data=False,
|
| 412 |
+
code=RetCode.AUTHENTICATION_ERROR,
|
| 413 |
+
message="Password error!",
|
| 414 |
)
|
| 415 |
|
| 416 |
if new_password:
|
|
|
|
| 438 |
except Exception as e:
|
| 439 |
stat_logger.exception(e)
|
| 440 |
return get_json_result(
|
| 441 |
+
data=False, message="Update failure!", code=RetCode.EXCEPTION_ERROR
|
| 442 |
)
|
| 443 |
|
| 444 |
|
|
|
|
| 474 |
def rollback_user_registration(user_id):
|
| 475 |
try:
|
| 476 |
UserService.delete_by_id(user_id)
|
| 477 |
+
except Exception:
|
| 478 |
pass
|
| 479 |
try:
|
| 480 |
TenantService.delete_by_id(user_id)
|
| 481 |
+
except Exception:
|
| 482 |
pass
|
| 483 |
try:
|
| 484 |
u = UserTenantService.query(tenant_id=user_id)
|
| 485 |
if u:
|
| 486 |
UserTenantService.delete_by_id(u[0].id)
|
| 487 |
+
except Exception:
|
| 488 |
pass
|
| 489 |
try:
|
| 490 |
TenantLLM.delete().where(TenantLLM.tenant_id == user_id).execute()
|
| 491 |
+
except Exception:
|
| 492 |
pass
|
| 493 |
|
| 494 |
|
|
|
|
| 581 |
if not re.match(r"^[\w\._-]+@([\w_-]+\.)+[\w-]{2,5}$", email_address):
|
| 582 |
return get_json_result(
|
| 583 |
data=False,
|
| 584 |
+
message=f"Invalid email address: {email_address}!",
|
| 585 |
+
code=RetCode.OPERATING_ERROR,
|
| 586 |
)
|
| 587 |
|
| 588 |
# Check if the email address is already used
|
| 589 |
if UserService.query(email=email_address):
|
| 590 |
return get_json_result(
|
| 591 |
data=False,
|
| 592 |
+
message=f"Email: {email_address} has already registered!",
|
| 593 |
+
code=RetCode.OPERATING_ERROR,
|
| 594 |
)
|
| 595 |
|
| 596 |
# Construct user info data
|
|
|
|
| 617 |
return construct_response(
|
| 618 |
data=user.to_json(),
|
| 619 |
auth=user.get_id(),
|
| 620 |
+
message=f"{nickname}, welcome aboard!",
|
| 621 |
)
|
| 622 |
except Exception as e:
|
| 623 |
rollback_user_registration(user_id)
|
| 624 |
stat_logger.exception(e)
|
| 625 |
return get_json_result(
|
| 626 |
data=False,
|
| 627 |
+
message=f"User registration failure, error: {str(e)}",
|
| 628 |
+
code=RetCode.EXCEPTION_ERROR,
|
| 629 |
)
|
| 630 |
|
| 631 |
|
|
|
|
| 661 |
try:
|
| 662 |
tenants = TenantService.get_info_by(current_user.id)
|
| 663 |
if not tenants:
|
| 664 |
+
return get_data_error_result(message="Tenant not found!")
|
| 665 |
return get_json_result(data=tenants[0])
|
| 666 |
except Exception as e:
|
| 667 |
return server_error_response(e)
|
api/utils/api_utils.py
CHANGED
|
@@ -97,19 +97,19 @@ def get_exponential_backoff_interval(retries, full_jitter=False):
|
|
| 97 |
return max(0, countdown)
|
| 98 |
|
| 99 |
|
| 100 |
-
def get_data_error_result(
|
| 101 |
-
|
| 102 |
import re
|
| 103 |
result_dict = {
|
| 104 |
-
"
|
| 105 |
-
"
|
| 106 |
r"rag",
|
| 107 |
"seceum",
|
| 108 |
-
|
| 109 |
flags=re.IGNORECASE)}
|
| 110 |
response = {}
|
| 111 |
for key, value in result_dict.items():
|
| 112 |
-
if value is None and key != "
|
| 113 |
continue
|
| 114 |
else:
|
| 115 |
response[key] = value
|
|
@@ -120,26 +120,26 @@ def server_error_response(e):
|
|
| 120 |
stat_logger.exception(e)
|
| 121 |
try:
|
| 122 |
if e.code == 401:
|
| 123 |
-
return get_json_result(
|
| 124 |
except BaseException:
|
| 125 |
pass
|
| 126 |
if len(e.args) > 1:
|
| 127 |
return get_json_result(
|
| 128 |
-
|
| 129 |
if repr(e).find("index_not_found_exception") >= 0:
|
| 130 |
-
return get_json_result(
|
| 131 |
-
|
| 132 |
|
| 133 |
-
return get_json_result(
|
| 134 |
|
| 135 |
|
| 136 |
-
def error_response(response_code,
|
| 137 |
-
if
|
| 138 |
-
|
| 139 |
|
| 140 |
return Response(json.dumps({
|
| 141 |
-
'
|
| 142 |
-
'
|
| 143 |
}), status=response_code, mimetype='application/json')
|
| 144 |
|
| 145 |
|
|
@@ -171,7 +171,7 @@ def validate_request(*args, **kwargs):
|
|
| 171 |
error_string += "required argument values: {}".format(
|
| 172 |
",".join(["{}={}".format(a[0], a[1]) for a in error_arguments]))
|
| 173 |
return get_json_result(
|
| 174 |
-
|
| 175 |
return func(*_args, **_kwargs)
|
| 176 |
|
| 177 |
return decorated_function
|
|
@@ -196,8 +196,8 @@ def send_file_in_mem(data, filename):
|
|
| 196 |
return send_file(f, as_attachment=True, attachment_filename=filename)
|
| 197 |
|
| 198 |
|
| 199 |
-
def get_json_result(
|
| 200 |
-
response = {"
|
| 201 |
return jsonify(response)
|
| 202 |
|
| 203 |
def apikey_required(func):
|
|
@@ -207,7 +207,7 @@ def apikey_required(func):
|
|
| 207 |
objs = APIToken.query(token=token)
|
| 208 |
if not objs:
|
| 209 |
return build_error_result(
|
| 210 |
-
|
| 211 |
)
|
| 212 |
kwargs['tenant_id'] = objs[0].tenant_id
|
| 213 |
return func(*args, **kwargs)
|
|
@@ -215,19 +215,19 @@ def apikey_required(func):
|
|
| 215 |
return decorated_function
|
| 216 |
|
| 217 |
|
| 218 |
-
def build_error_result(
|
| 219 |
-
response = {"
|
| 220 |
response = jsonify(response)
|
| 221 |
-
response.status_code =
|
| 222 |
return response
|
| 223 |
|
| 224 |
|
| 225 |
-
def construct_response(
|
| 226 |
-
|
| 227 |
-
result_dict = {"
|
| 228 |
response_dict = {}
|
| 229 |
for key, value in result_dict.items():
|
| 230 |
-
if value is None and key != "
|
| 231 |
continue
|
| 232 |
else:
|
| 233 |
response_dict[key] = value
|
|
@@ -284,7 +284,7 @@ def token_required(func):
|
|
| 284 |
objs = APIToken.query(token=token)
|
| 285 |
if not objs:
|
| 286 |
return get_json_result(
|
| 287 |
-
data=False,
|
| 288 |
)
|
| 289 |
kwargs['tenant_id'] = objs[0].tenant_id
|
| 290 |
return func(*args, **kwargs)
|
|
@@ -292,26 +292,26 @@ def token_required(func):
|
|
| 292 |
return decorated_function
|
| 293 |
|
| 294 |
|
| 295 |
-
def get_result(
|
| 296 |
-
if
|
| 297 |
if data is not None:
|
| 298 |
-
response = {"code":
|
| 299 |
else:
|
| 300 |
-
response = {"code":
|
| 301 |
else:
|
| 302 |
-
response = {"code":
|
| 303 |
return jsonify(response)
|
| 304 |
|
| 305 |
|
| 306 |
-
def get_error_data_result(
|
| 307 |
):
|
| 308 |
import re
|
| 309 |
result_dict = {
|
| 310 |
-
"code":
|
| 311 |
"message": re.sub(
|
| 312 |
r"rag",
|
| 313 |
"seceum",
|
| 314 |
-
|
| 315 |
flags=re.IGNORECASE)}
|
| 316 |
response = {}
|
| 317 |
for key, value in result_dict.items():
|
|
|
|
| 97 |
return max(0, countdown)
|
| 98 |
|
| 99 |
|
| 100 |
+
def get_data_error_result(code=RetCode.DATA_ERROR,
|
| 101 |
+
message='Sorry! Data missing!'):
|
| 102 |
import re
|
| 103 |
result_dict = {
|
| 104 |
+
"code": code,
|
| 105 |
+
"message": re.sub(
|
| 106 |
r"rag",
|
| 107 |
"seceum",
|
| 108 |
+
message,
|
| 109 |
flags=re.IGNORECASE)}
|
| 110 |
response = {}
|
| 111 |
for key, value in result_dict.items():
|
| 112 |
+
if value is None and key != "code":
|
| 113 |
continue
|
| 114 |
else:
|
| 115 |
response[key] = value
|
|
|
|
| 120 |
stat_logger.exception(e)
|
| 121 |
try:
|
| 122 |
if e.code == 401:
|
| 123 |
+
return get_json_result(code=401, message=repr(e))
|
| 124 |
except BaseException:
|
| 125 |
pass
|
| 126 |
if len(e.args) > 1:
|
| 127 |
return get_json_result(
|
| 128 |
+
code=RetCode.EXCEPTION_ERROR, message=repr(e.args[0]), data=e.args[1])
|
| 129 |
if repr(e).find("index_not_found_exception") >= 0:
|
| 130 |
+
return get_json_result(code=RetCode.EXCEPTION_ERROR,
|
| 131 |
+
message="No chunk found, please upload file and parse it.")
|
| 132 |
|
| 133 |
+
return get_json_result(code=RetCode.EXCEPTION_ERROR, message=repr(e))
|
| 134 |
|
| 135 |
|
| 136 |
+
def error_response(response_code, message=None):
|
| 137 |
+
if message is None:
|
| 138 |
+
message = HTTP_STATUS_CODES.get(response_code, 'Unknown Error')
|
| 139 |
|
| 140 |
return Response(json.dumps({
|
| 141 |
+
'message': message,
|
| 142 |
+
'code': response_code,
|
| 143 |
}), status=response_code, mimetype='application/json')
|
| 144 |
|
| 145 |
|
|
|
|
| 171 |
error_string += "required argument values: {}".format(
|
| 172 |
",".join(["{}={}".format(a[0], a[1]) for a in error_arguments]))
|
| 173 |
return get_json_result(
|
| 174 |
+
code=RetCode.ARGUMENT_ERROR, message=error_string)
|
| 175 |
return func(*_args, **_kwargs)
|
| 176 |
|
| 177 |
return decorated_function
|
|
|
|
| 196 |
return send_file(f, as_attachment=True, attachment_filename=filename)
|
| 197 |
|
| 198 |
|
| 199 |
+
def get_json_result(code=RetCode.SUCCESS, message='success', data=None):
|
| 200 |
+
response = {"code": code, "message": message, "data": data}
|
| 201 |
return jsonify(response)
|
| 202 |
|
| 203 |
def apikey_required(func):
|
|
|
|
| 207 |
objs = APIToken.query(token=token)
|
| 208 |
if not objs:
|
| 209 |
return build_error_result(
|
| 210 |
+
message='API-KEY is invalid!', code=RetCode.FORBIDDEN
|
| 211 |
)
|
| 212 |
kwargs['tenant_id'] = objs[0].tenant_id
|
| 213 |
return func(*args, **kwargs)
|
|
|
|
| 215 |
return decorated_function
|
| 216 |
|
| 217 |
|
| 218 |
+
def build_error_result(code=RetCode.FORBIDDEN, message='success'):
|
| 219 |
+
response = {"code": code, "message": message}
|
| 220 |
response = jsonify(response)
|
| 221 |
+
response.status_code = code
|
| 222 |
return response
|
| 223 |
|
| 224 |
|
| 225 |
+
def construct_response(code=RetCode.SUCCESS,
|
| 226 |
+
message='success', data=None, auth=None):
|
| 227 |
+
result_dict = {"code": code, "message": message, "data": data}
|
| 228 |
response_dict = {}
|
| 229 |
for key, value in result_dict.items():
|
| 230 |
+
if value is None and key != "code":
|
| 231 |
continue
|
| 232 |
else:
|
| 233 |
response_dict[key] = value
|
|
|
|
| 284 |
objs = APIToken.query(token=token)
|
| 285 |
if not objs:
|
| 286 |
return get_json_result(
|
| 287 |
+
data=False, message='Token is not valid!', code=RetCode.AUTHENTICATION_ERROR
|
| 288 |
)
|
| 289 |
kwargs['tenant_id'] = objs[0].tenant_id
|
| 290 |
return func(*args, **kwargs)
|
|
|
|
| 292 |
return decorated_function
|
| 293 |
|
| 294 |
|
| 295 |
+
def get_result(code=RetCode.SUCCESS, message='error', data=None):
|
| 296 |
+
if code == 0:
|
| 297 |
if data is not None:
|
| 298 |
+
response = {"code": code, "data": data}
|
| 299 |
else:
|
| 300 |
+
response = {"code": code}
|
| 301 |
else:
|
| 302 |
+
response = {"code": code, "message": message}
|
| 303 |
return jsonify(response)
|
| 304 |
|
| 305 |
|
| 306 |
+
def get_error_data_result(message='Sorry! Data missing!', code=RetCode.DATA_ERROR,
|
| 307 |
):
|
| 308 |
import re
|
| 309 |
result_dict = {
|
| 310 |
+
"code": code,
|
| 311 |
"message": re.sub(
|
| 312 |
r"rag",
|
| 313 |
"seceum",
|
| 314 |
+
message,
|
| 315 |
flags=re.IGNORECASE)}
|
| 316 |
response = {}
|
| 317 |
for key, value in result_dict.items():
|
docs/references/faq.md
CHANGED
|
@@ -276,7 +276,7 @@ $ docker ps
|
|
| 276 |
|
| 277 |
This is because you forgot to update the `vm.max_map_count` value in **/etc/sysctl.conf** and your change to this value was reset after a system reboot.
|
| 278 |
|
| 279 |
-
#### 4.10 `{"data":null,"
|
| 280 |
|
| 281 |
Your IP address or port number may be incorrect. If you are using the default configurations, enter `http://<IP_OF_YOUR_MACHINE>` (**NOT 9380, AND NO PORT NUMBER REQUIRED!**) in your browser. This should work.
|
| 282 |
|
|
|
|
| 276 |
|
| 277 |
This is because you forgot to update the `vm.max_map_count` value in **/etc/sysctl.conf** and your change to this value was reset after a system reboot.
|
| 278 |
|
| 279 |
+
#### 4.10 `{"data":null,"code":100,"message":"<NotFound '404: Not Found'>"}`
|
| 280 |
|
| 281 |
Your IP address or port number may be incorrect. If you are using the default configurations, enter `http://<IP_OF_YOUR_MACHINE>` (**NOT 9380, AND NO PORT NUMBER REQUIRED!**) in your browser. This should work.
|
| 282 |
|
intergrations/chatgpt-on-wechat/plugins/ragflow_chat.py
CHANGED
|
@@ -1,10 +1,8 @@
|
|
| 1 |
import requests
|
| 2 |
-
import
|
| 3 |
-
from bridge.context import Context, ContextType # Import Context, ContextType
|
| 4 |
from bridge.reply import Reply, ReplyType # Import Reply, ReplyType
|
| 5 |
from bridge import *
|
| 6 |
from common.log import logger
|
| 7 |
-
from config import conf
|
| 8 |
from plugins import Plugin, register # Import Plugin and register
|
| 9 |
from plugins.event import Event, EventContext, EventAction # Import event-related classes
|
| 10 |
|
|
@@ -68,12 +66,12 @@ class RAGFlowChat(Plugin):
|
|
| 68 |
logger.debug(f"[RAGFlowChat] New conversation response: {response.text}")
|
| 69 |
if response.status_code == 200:
|
| 70 |
data = response.json()
|
| 71 |
-
if data.get("
|
| 72 |
conversation_id = data["data"]["id"]
|
| 73 |
self.conversations[user_id] = conversation_id
|
| 74 |
else:
|
| 75 |
-
logger.error(f"[RAGFlowChat] Failed to create conversation: {data.get('
|
| 76 |
-
return f"Sorry, unable to create a conversation: {data.get('
|
| 77 |
else:
|
| 78 |
logger.error(f"[RAGFlowChat] HTTP error when creating conversation: {response.status_code}")
|
| 79 |
return f"Sorry, unable to connect to RAGFlow API (create conversation). HTTP status code: {response.status_code}"
|
|
@@ -100,12 +98,12 @@ class RAGFlowChat(Plugin):
|
|
| 100 |
logger.debug(f"[RAGFlowChat] Completion response: {response.text}")
|
| 101 |
if response.status_code == 200:
|
| 102 |
data = response.json()
|
| 103 |
-
if data.get("
|
| 104 |
answer = data["data"]["answer"]
|
| 105 |
return answer
|
| 106 |
else:
|
| 107 |
-
logger.error(f"[RAGFlowChat] Failed to get answer: {data.get('
|
| 108 |
-
return f"Sorry, unable to get a reply: {data.get('
|
| 109 |
else:
|
| 110 |
logger.error(f"[RAGFlowChat] HTTP error when getting answer: {response.status_code}")
|
| 111 |
return f"Sorry, unable to connect to RAGFlow API (get reply). HTTP status code: {response.status_code}"
|
|
|
|
| 1 |
import requests
|
| 2 |
+
from bridge.context import ContextType # Import Context, ContextType
|
|
|
|
| 3 |
from bridge.reply import Reply, ReplyType # Import Reply, ReplyType
|
| 4 |
from bridge import *
|
| 5 |
from common.log import logger
|
|
|
|
| 6 |
from plugins import Plugin, register # Import Plugin and register
|
| 7 |
from plugins.event import Event, EventContext, EventAction # Import event-related classes
|
| 8 |
|
|
|
|
| 66 |
logger.debug(f"[RAGFlowChat] New conversation response: {response.text}")
|
| 67 |
if response.status_code == 200:
|
| 68 |
data = response.json()
|
| 69 |
+
if data.get("code") == 0:
|
| 70 |
conversation_id = data["data"]["id"]
|
| 71 |
self.conversations[user_id] = conversation_id
|
| 72 |
else:
|
| 73 |
+
logger.error(f"[RAGFlowChat] Failed to create conversation: {data.get('message')}")
|
| 74 |
+
return f"Sorry, unable to create a conversation: {data.get('message')}"
|
| 75 |
else:
|
| 76 |
logger.error(f"[RAGFlowChat] HTTP error when creating conversation: {response.status_code}")
|
| 77 |
return f"Sorry, unable to connect to RAGFlow API (create conversation). HTTP status code: {response.status_code}"
|
|
|
|
| 98 |
logger.debug(f"[RAGFlowChat] Completion response: {response.text}")
|
| 99 |
if response.status_code == 200:
|
| 100 |
data = response.json()
|
| 101 |
+
if data.get("code") == 0:
|
| 102 |
answer = data["data"]["answer"]
|
| 103 |
return answer
|
| 104 |
else:
|
| 105 |
+
logger.error(f"[RAGFlowChat] Failed to get answer: {data.get('message')}")
|
| 106 |
+
return f"Sorry, unable to get a reply: {data.get('message')}"
|
| 107 |
else:
|
| 108 |
logger.error(f"[RAGFlowChat] HTTP error when getting answer: {response.status_code}")
|
| 109 |
return f"Sorry, unable to connect to RAGFlow API (get reply). HTTP status code: {response.status_code}"
|
sdk/python/test/conftest.py
CHANGED
|
@@ -25,16 +25,16 @@ def register():
|
|
| 25 |
register_data = {"email":EMAIL,"nickname":name,"password":PASSWORD}
|
| 26 |
res = requests.post(url=url,json=register_data)
|
| 27 |
res = res.json()
|
| 28 |
-
if res.get("
|
| 29 |
-
raise Exception(res.get("
|
| 30 |
|
| 31 |
def login():
|
| 32 |
url = HOST_ADDRESS + "/v1/user/login"
|
| 33 |
login_data = {"email":EMAIL,"password":PASSWORD}
|
| 34 |
response=requests.post(url=url,json=login_data)
|
| 35 |
res = response.json()
|
| 36 |
-
if res.get("
|
| 37 |
-
raise Exception(res.get("
|
| 38 |
auth = response.headers["Authorization"]
|
| 39 |
return auth
|
| 40 |
|
|
@@ -46,7 +46,7 @@ def get_api_key_fixture():
|
|
| 46 |
auth = {"Authorization": auth}
|
| 47 |
response = requests.post(url=url,headers=auth)
|
| 48 |
res = response.json()
|
| 49 |
-
if res.get("
|
| 50 |
-
raise Exception(res.get("
|
| 51 |
return res["data"].get("token")
|
| 52 |
|
|
|
|
| 25 |
register_data = {"email":EMAIL,"nickname":name,"password":PASSWORD}
|
| 26 |
res = requests.post(url=url,json=register_data)
|
| 27 |
res = res.json()
|
| 28 |
+
if res.get("code") != 0:
|
| 29 |
+
raise Exception(res.get("message"))
|
| 30 |
|
| 31 |
def login():
|
| 32 |
url = HOST_ADDRESS + "/v1/user/login"
|
| 33 |
login_data = {"email":EMAIL,"password":PASSWORD}
|
| 34 |
response=requests.post(url=url,json=login_data)
|
| 35 |
res = response.json()
|
| 36 |
+
if res.get("code")!=0:
|
| 37 |
+
raise Exception(res.get("message"))
|
| 38 |
auth = response.headers["Authorization"]
|
| 39 |
return auth
|
| 40 |
|
|
|
|
| 46 |
auth = {"Authorization": auth}
|
| 47 |
response = requests.post(url=url,headers=auth)
|
| 48 |
res = response.json()
|
| 49 |
+
if res.get("code") != 0:
|
| 50 |
+
raise Exception(res.get("message"))
|
| 51 |
return res["data"].get("token")
|
| 52 |
|
sdk/python/test/ragflow.txt
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
{"data":null,"
|
|
|
|
| 1 |
+
{"data":null,"code":100,"message":"TypeError(\"download_document() got an unexpected keyword argument 'tenant_id'\")"}
|
web/src/components/message-input/index.tsx
CHANGED
|
@@ -53,8 +53,8 @@ const getFileIds = (fileList: UploadFile[]) => {
|
|
| 53 |
};
|
| 54 |
|
| 55 |
const isUploadSuccess = (file: UploadFile) => {
|
| 56 |
-
const
|
| 57 |
-
return typeof
|
| 58 |
};
|
| 59 |
|
| 60 |
interface IProps {
|
|
@@ -116,7 +116,7 @@ const MessageInput = ({
|
|
| 116 |
const creatingRet = await createConversationBeforeUploadDocument(
|
| 117 |
file.name,
|
| 118 |
);
|
| 119 |
-
if (creatingRet?.
|
| 120 |
nextConversationId = creatingRet.data.id;
|
| 121 |
}
|
| 122 |
}
|
|
@@ -140,7 +140,7 @@ const MessageInput = ({
|
|
| 140 |
originFileObj: file as any,
|
| 141 |
response: ret,
|
| 142 |
percent: 100,
|
| 143 |
-
status: ret?.
|
| 144 |
});
|
| 145 |
return nextList;
|
| 146 |
});
|
|
|
|
| 53 |
};
|
| 54 |
|
| 55 |
const isUploadSuccess = (file: UploadFile) => {
|
| 56 |
+
const code = get(file, 'response.code');
|
| 57 |
+
return typeof code === 'number' && code === 0;
|
| 58 |
};
|
| 59 |
|
| 60 |
interface IProps {
|
|
|
|
| 116 |
const creatingRet = await createConversationBeforeUploadDocument(
|
| 117 |
file.name,
|
| 118 |
);
|
| 119 |
+
if (creatingRet?.code === 0) {
|
| 120 |
nextConversationId = creatingRet.data.id;
|
| 121 |
}
|
| 122 |
}
|
|
|
|
| 140 |
originFileObj: file as any,
|
| 141 |
response: ret,
|
| 142 |
percent: 100,
|
| 143 |
+
status: ret?.code === 0 ? 'done' : 'error',
|
| 144 |
});
|
| 145 |
return nextList;
|
| 146 |
});
|
web/src/components/message-item/hooks.ts
CHANGED
|
@@ -43,8 +43,8 @@ export const useRemoveMessage = (
|
|
| 43 |
const onRemoveMessage = useCallback(async () => {
|
| 44 |
const pureId = getMessagePureId(messageId);
|
| 45 |
if (pureId) {
|
| 46 |
-
const
|
| 47 |
-
if (
|
| 48 |
removeMessageById?.(messageId);
|
| 49 |
}
|
| 50 |
}
|
|
|
|
| 43 |
const onRemoveMessage = useCallback(async () => {
|
| 44 |
const pureId = getMessagePureId(messageId);
|
| 45 |
if (pureId) {
|
| 46 |
+
const code = await deleteMessage(pureId);
|
| 47 |
+
if (code === 0) {
|
| 48 |
removeMessageById?.(messageId);
|
| 49 |
}
|
| 50 |
}
|
web/src/components/pdf-previewer/hooks.ts
CHANGED
|
@@ -6,8 +6,8 @@ export const useCatchDocumentError = (url: string) => {
|
|
| 6 |
|
| 7 |
const fetchDocument = useCallback(async () => {
|
| 8 |
const { data } = await axios.get(url);
|
| 9 |
-
if (data.
|
| 10 |
-
setError(data?.
|
| 11 |
}
|
| 12 |
}, [url]);
|
| 13 |
useEffect(() => {
|
|
|
|
| 6 |
|
| 7 |
const fetchDocument = useCallback(async () => {
|
| 8 |
const { data } = await axios.get(url);
|
| 9 |
+
if (data.code !== 0) {
|
| 10 |
+
setError(data?.message);
|
| 11 |
}
|
| 12 |
}, [url]);
|
| 13 |
useEffect(() => {
|
web/src/hooks/chat-hooks.ts
CHANGED
|
@@ -99,7 +99,7 @@ export const useFetchNextDialogList = () => {
|
|
| 99 |
console.log('🚀 ~ queryFn: ~ params:', params);
|
| 100 |
const { data } = await chatService.listDialog();
|
| 101 |
|
| 102 |
-
if (data.
|
| 103 |
const list: IDialog[] = data.data;
|
| 104 |
if (list.length > 0) {
|
| 105 |
if (list.every((x) => x.id !== dialogId)) {
|
|
@@ -128,7 +128,7 @@ export const useSetNextDialog = () => {
|
|
| 128 |
mutationKey: ['setDialog'],
|
| 129 |
mutationFn: async (params: IDialog) => {
|
| 130 |
const { data } = await chatService.setDialog(params);
|
| 131 |
-
if (data.
|
| 132 |
queryClient.invalidateQueries({
|
| 133 |
exact: false,
|
| 134 |
queryKey: ['fetchDialogList'],
|
|
@@ -141,7 +141,7 @@ export const useSetNextDialog = () => {
|
|
| 141 |
i18n.t(`message.${params.dialog_id ? 'modified' : 'created'}`),
|
| 142 |
);
|
| 143 |
}
|
| 144 |
-
return data?.
|
| 145 |
},
|
| 146 |
});
|
| 147 |
|
|
@@ -200,12 +200,12 @@ export const useRemoveNextDialog = () => {
|
|
| 200 |
mutationKey: ['removeDialog'],
|
| 201 |
mutationFn: async (dialogIds: string[]) => {
|
| 202 |
const { data } = await chatService.removeDialog({ dialogIds });
|
| 203 |
-
if (data.
|
| 204 |
queryClient.invalidateQueries({ queryKey: ['fetchDialogList'] });
|
| 205 |
|
| 206 |
message.success(i18n.t('message.deleted'));
|
| 207 |
}
|
| 208 |
-
return data.
|
| 209 |
},
|
| 210 |
});
|
| 211 |
|
|
@@ -231,7 +231,7 @@ export const useFetchNextConversationList = () => {
|
|
| 231 |
enabled: !!dialogId,
|
| 232 |
queryFn: async () => {
|
| 233 |
const { data } = await chatService.listConversation({ dialogId });
|
| 234 |
-
if (data.
|
| 235 |
handleClickConversation(data.data[0].id, '');
|
| 236 |
}
|
| 237 |
return data?.data;
|
|
@@ -303,7 +303,7 @@ export const useUpdateNextConversation = () => {
|
|
| 303 |
? params.conversation_id
|
| 304 |
: getConversationId(),
|
| 305 |
});
|
| 306 |
-
if (data.
|
| 307 |
queryClient.invalidateQueries({ queryKey: ['fetchConversationList'] });
|
| 308 |
}
|
| 309 |
return data;
|
|
@@ -328,10 +328,10 @@ export const useRemoveNextConversation = () => {
|
|
| 328 |
conversationIds,
|
| 329 |
dialogId,
|
| 330 |
});
|
| 331 |
-
if (data.
|
| 332 |
queryClient.invalidateQueries({ queryKey: ['fetchConversationList'] });
|
| 333 |
}
|
| 334 |
-
return data.
|
| 335 |
},
|
| 336 |
});
|
| 337 |
|
|
@@ -353,11 +353,11 @@ export const useDeleteMessage = () => {
|
|
| 353 |
conversationId,
|
| 354 |
});
|
| 355 |
|
| 356 |
-
if (data.
|
| 357 |
message.success(i18n.t(`message.deleted`));
|
| 358 |
}
|
| 359 |
|
| 360 |
-
return data.
|
| 361 |
},
|
| 362 |
});
|
| 363 |
|
|
@@ -378,10 +378,10 @@ export const useFeedback = () => {
|
|
| 378 |
...params,
|
| 379 |
conversationId,
|
| 380 |
});
|
| 381 |
-
if (data.
|
| 382 |
message.success(i18n.t(`message.operated`));
|
| 383 |
}
|
| 384 |
-
return data.
|
| 385 |
},
|
| 386 |
});
|
| 387 |
|
|
@@ -402,7 +402,7 @@ export const useCreateNextToken = () => {
|
|
| 402 |
mutationKey: ['createToken'],
|
| 403 |
mutationFn: async (params: Record<string, any>) => {
|
| 404 |
const { data } = await chatService.createToken(params);
|
| 405 |
-
if (data.
|
| 406 |
queryClient.invalidateQueries({ queryKey: ['fetchTokenList'] });
|
| 407 |
}
|
| 408 |
return data?.data ?? [];
|
|
@@ -445,7 +445,7 @@ export const useRemoveNextToken = () => {
|
|
| 445 |
tokens: string[];
|
| 446 |
}) => {
|
| 447 |
const { data } = await chatService.removeToken(params);
|
| 448 |
-
if (data.
|
| 449 |
queryClient.invalidateQueries({ queryKey: ['fetchTokenList'] });
|
| 450 |
}
|
| 451 |
return data?.data ?? [];
|
|
|
|
| 99 |
console.log('🚀 ~ queryFn: ~ params:', params);
|
| 100 |
const { data } = await chatService.listDialog();
|
| 101 |
|
| 102 |
+
if (data.code === 0) {
|
| 103 |
const list: IDialog[] = data.data;
|
| 104 |
if (list.length > 0) {
|
| 105 |
if (list.every((x) => x.id !== dialogId)) {
|
|
|
|
| 128 |
mutationKey: ['setDialog'],
|
| 129 |
mutationFn: async (params: IDialog) => {
|
| 130 |
const { data } = await chatService.setDialog(params);
|
| 131 |
+
if (data.code === 0) {
|
| 132 |
queryClient.invalidateQueries({
|
| 133 |
exact: false,
|
| 134 |
queryKey: ['fetchDialogList'],
|
|
|
|
| 141 |
i18n.t(`message.${params.dialog_id ? 'modified' : 'created'}`),
|
| 142 |
);
|
| 143 |
}
|
| 144 |
+
return data?.code;
|
| 145 |
},
|
| 146 |
});
|
| 147 |
|
|
|
|
| 200 |
mutationKey: ['removeDialog'],
|
| 201 |
mutationFn: async (dialogIds: string[]) => {
|
| 202 |
const { data } = await chatService.removeDialog({ dialogIds });
|
| 203 |
+
if (data.code === 0) {
|
| 204 |
queryClient.invalidateQueries({ queryKey: ['fetchDialogList'] });
|
| 205 |
|
| 206 |
message.success(i18n.t('message.deleted'));
|
| 207 |
}
|
| 208 |
+
return data.code;
|
| 209 |
},
|
| 210 |
});
|
| 211 |
|
|
|
|
| 231 |
enabled: !!dialogId,
|
| 232 |
queryFn: async () => {
|
| 233 |
const { data } = await chatService.listConversation({ dialogId });
|
| 234 |
+
if (data.code === 0 && data.data.length > 0) {
|
| 235 |
handleClickConversation(data.data[0].id, '');
|
| 236 |
}
|
| 237 |
return data?.data;
|
|
|
|
| 303 |
? params.conversation_id
|
| 304 |
: getConversationId(),
|
| 305 |
});
|
| 306 |
+
if (data.code === 0) {
|
| 307 |
queryClient.invalidateQueries({ queryKey: ['fetchConversationList'] });
|
| 308 |
}
|
| 309 |
return data;
|
|
|
|
| 328 |
conversationIds,
|
| 329 |
dialogId,
|
| 330 |
});
|
| 331 |
+
if (data.code === 0) {
|
| 332 |
queryClient.invalidateQueries({ queryKey: ['fetchConversationList'] });
|
| 333 |
}
|
| 334 |
+
return data.code;
|
| 335 |
},
|
| 336 |
});
|
| 337 |
|
|
|
|
| 353 |
conversationId,
|
| 354 |
});
|
| 355 |
|
| 356 |
+
if (data.code === 0) {
|
| 357 |
message.success(i18n.t(`message.deleted`));
|
| 358 |
}
|
| 359 |
|
| 360 |
+
return data.code;
|
| 361 |
},
|
| 362 |
});
|
| 363 |
|
|
|
|
| 378 |
...params,
|
| 379 |
conversationId,
|
| 380 |
});
|
| 381 |
+
if (data.code === 0) {
|
| 382 |
message.success(i18n.t(`message.operated`));
|
| 383 |
}
|
| 384 |
+
return data.code;
|
| 385 |
},
|
| 386 |
});
|
| 387 |
|
|
|
|
| 402 |
mutationKey: ['createToken'],
|
| 403 |
mutationFn: async (params: Record<string, any>) => {
|
| 404 |
const { data } = await chatService.createToken(params);
|
| 405 |
+
if (data.code === 0) {
|
| 406 |
queryClient.invalidateQueries({ queryKey: ['fetchTokenList'] });
|
| 407 |
}
|
| 408 |
return data?.data ?? [];
|
|
|
|
| 445 |
tokens: string[];
|
| 446 |
}) => {
|
| 447 |
const { data } = await chatService.removeToken(params);
|
| 448 |
+
if (data.code === 0) {
|
| 449 |
queryClient.invalidateQueries({ queryKey: ['fetchTokenList'] });
|
| 450 |
}
|
| 451 |
return data?.data ?? [];
|
web/src/hooks/chunk-hooks.ts
CHANGED
|
@@ -57,7 +57,7 @@ export const useFetchNextChunkList = (): ResponseGetType<{
|
|
| 57 |
available_int: available,
|
| 58 |
keywords: searchString,
|
| 59 |
});
|
| 60 |
-
if (data.
|
| 61 |
const res = data.data;
|
| 62 |
return {
|
| 63 |
data: res.chunks,
|
|
@@ -126,11 +126,11 @@ export const useDeleteChunk = () => {
|
|
| 126 |
mutationKey: ['deleteChunk'],
|
| 127 |
mutationFn: async (params: { chunkIds: string[]; doc_id: string }) => {
|
| 128 |
const { data } = await kbService.rm_chunk(params);
|
| 129 |
-
if (data.
|
| 130 |
setPaginationParams(1);
|
| 131 |
queryClient.invalidateQueries({ queryKey: ['fetchChunkList'] });
|
| 132 |
}
|
| 133 |
-
return data?.
|
| 134 |
},
|
| 135 |
});
|
| 136 |
|
|
@@ -152,11 +152,11 @@ export const useSwitchChunk = () => {
|
|
| 152 |
doc_id: string;
|
| 153 |
}) => {
|
| 154 |
const { data } = await kbService.switch_chunk(params);
|
| 155 |
-
if (data.
|
| 156 |
message.success(t('message.modified'));
|
| 157 |
queryClient.invalidateQueries({ queryKey: ['fetchChunkList'] });
|
| 158 |
}
|
| 159 |
-
return data?.
|
| 160 |
},
|
| 161 |
});
|
| 162 |
|
|
@@ -179,11 +179,11 @@ export const useCreateChunk = () => {
|
|
| 179 |
service = kbService.set_chunk;
|
| 180 |
}
|
| 181 |
const { data } = await service(payload);
|
| 182 |
-
if (data.
|
| 183 |
message.success(t('message.created'));
|
| 184 |
queryClient.invalidateQueries({ queryKey: ['fetchChunkList'] });
|
| 185 |
}
|
| 186 |
-
return data?.
|
| 187 |
},
|
| 188 |
});
|
| 189 |
|
|
|
|
| 57 |
available_int: available,
|
| 58 |
keywords: searchString,
|
| 59 |
});
|
| 60 |
+
if (data.code === 0) {
|
| 61 |
const res = data.data;
|
| 62 |
return {
|
| 63 |
data: res.chunks,
|
|
|
|
| 126 |
mutationKey: ['deleteChunk'],
|
| 127 |
mutationFn: async (params: { chunkIds: string[]; doc_id: string }) => {
|
| 128 |
const { data } = await kbService.rm_chunk(params);
|
| 129 |
+
if (data.code === 0) {
|
| 130 |
setPaginationParams(1);
|
| 131 |
queryClient.invalidateQueries({ queryKey: ['fetchChunkList'] });
|
| 132 |
}
|
| 133 |
+
return data?.code;
|
| 134 |
},
|
| 135 |
});
|
| 136 |
|
|
|
|
| 152 |
doc_id: string;
|
| 153 |
}) => {
|
| 154 |
const { data } = await kbService.switch_chunk(params);
|
| 155 |
+
if (data.code === 0) {
|
| 156 |
message.success(t('message.modified'));
|
| 157 |
queryClient.invalidateQueries({ queryKey: ['fetchChunkList'] });
|
| 158 |
}
|
| 159 |
+
return data?.code;
|
| 160 |
},
|
| 161 |
});
|
| 162 |
|
|
|
|
| 179 |
service = kbService.set_chunk;
|
| 180 |
}
|
| 181 |
const { data } = await service(payload);
|
| 182 |
+
if (data.code === 0) {
|
| 183 |
message.success(t('message.created'));
|
| 184 |
queryClient.invalidateQueries({ queryKey: ['fetchChunkList'] });
|
| 185 |
}
|
| 186 |
+
return data?.code;
|
| 187 |
},
|
| 188 |
});
|
| 189 |
|
web/src/hooks/document-hooks.ts
CHANGED
|
@@ -69,7 +69,7 @@ export const useFetchNextDocumentList = () => {
|
|
| 69 |
page_size: pagination.pageSize,
|
| 70 |
page: pagination.current,
|
| 71 |
});
|
| 72 |
-
if (ret.data.
|
| 73 |
return ret.data.data;
|
| 74 |
}
|
| 75 |
|
|
@@ -118,7 +118,7 @@ export const useSetNextDocumentStatus = () => {
|
|
| 118 |
doc_id: documentId,
|
| 119 |
status: Number(status),
|
| 120 |
});
|
| 121 |
-
if (data.
|
| 122 |
message.success(i18n.t('message.modified'));
|
| 123 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 124 |
}
|
|
@@ -149,11 +149,11 @@ export const useSaveNextDocumentName = () => {
|
|
| 149 |
doc_id: documentId,
|
| 150 |
name: name,
|
| 151 |
});
|
| 152 |
-
if (data.
|
| 153 |
message.success(i18n.t('message.renamed'));
|
| 154 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 155 |
}
|
| 156 |
-
return data.
|
| 157 |
},
|
| 158 |
});
|
| 159 |
|
|
@@ -176,7 +176,7 @@ export const useCreateNextDocument = () => {
|
|
| 176 |
name,
|
| 177 |
kb_id: knowledgeId,
|
| 178 |
});
|
| 179 |
-
if (data.
|
| 180 |
if (page === 1) {
|
| 181 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 182 |
} else {
|
|
@@ -185,7 +185,7 @@ export const useCreateNextDocument = () => {
|
|
| 185 |
|
| 186 |
message.success(i18n.t('message.created'));
|
| 187 |
}
|
| 188 |
-
return data.
|
| 189 |
},
|
| 190 |
});
|
| 191 |
|
|
@@ -215,12 +215,12 @@ export const useSetNextDocumentParser = () => {
|
|
| 215 |
doc_id: documentId,
|
| 216 |
parser_config: parserConfig,
|
| 217 |
});
|
| 218 |
-
if (data.
|
| 219 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 220 |
|
| 221 |
message.success(i18n.t('message.modified'));
|
| 222 |
}
|
| 223 |
-
return data.
|
| 224 |
},
|
| 225 |
});
|
| 226 |
|
|
@@ -246,12 +246,12 @@ export const useUploadNextDocument = () => {
|
|
| 246 |
|
| 247 |
try {
|
| 248 |
const ret = await kbService.document_upload(formData);
|
| 249 |
-
const
|
| 250 |
-
if (
|
| 251 |
message.success(i18n.t('message.uploaded'));
|
| 252 |
}
|
| 253 |
|
| 254 |
-
if (
|
| 255 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 256 |
}
|
| 257 |
return ret?.data;
|
|
@@ -281,12 +281,12 @@ export const useNextWebCrawl = () => {
|
|
| 281 |
formData.append('kb_id', knowledgeId);
|
| 282 |
|
| 283 |
const ret = await kbService.web_crawl(formData);
|
| 284 |
-
const
|
| 285 |
-
if (
|
| 286 |
message.success(i18n.t('message.uploaded'));
|
| 287 |
}
|
| 288 |
|
| 289 |
-
return
|
| 290 |
},
|
| 291 |
});
|
| 292 |
|
|
@@ -317,13 +317,13 @@ export const useRunNextDocument = () => {
|
|
| 317 |
doc_ids: documentIds,
|
| 318 |
run,
|
| 319 |
});
|
| 320 |
-
const
|
| 321 |
-
if (
|
| 322 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 323 |
message.success(i18n.t('message.operated'));
|
| 324 |
}
|
| 325 |
|
| 326 |
-
return
|
| 327 |
},
|
| 328 |
});
|
| 329 |
|
|
@@ -338,7 +338,7 @@ export const useFetchDocumentInfosByIds = () => {
|
|
| 338 |
initialData: [],
|
| 339 |
queryFn: async () => {
|
| 340 |
const { data } = await kbService.document_infos({ doc_ids: ids });
|
| 341 |
-
if (data.
|
| 342 |
return data.data;
|
| 343 |
}
|
| 344 |
|
|
@@ -357,7 +357,7 @@ export const useFetchDocumentThumbnailsByIds = () => {
|
|
| 357 |
initialData: {},
|
| 358 |
queryFn: async () => {
|
| 359 |
const { data } = await kbService.document_thumbnails({ doc_ids: ids });
|
| 360 |
-
if (data.
|
| 361 |
return data.data;
|
| 362 |
}
|
| 363 |
return {};
|
|
@@ -377,11 +377,11 @@ export const useRemoveNextDocument = () => {
|
|
| 377 |
mutationKey: ['removeDocument'],
|
| 378 |
mutationFn: async (documentIds: string | string[]) => {
|
| 379 |
const { data } = await kbService.document_rm({ doc_id: documentIds });
|
| 380 |
-
if (data.
|
| 381 |
message.success(i18n.t('message.deleted'));
|
| 382 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 383 |
}
|
| 384 |
-
return data.
|
| 385 |
},
|
| 386 |
});
|
| 387 |
|
|
@@ -398,7 +398,7 @@ export const useDeleteDocument = () => {
|
|
| 398 |
mutationKey: ['deleteDocument'],
|
| 399 |
mutationFn: async (documentIds: string[]) => {
|
| 400 |
const data = await kbService.document_delete({ doc_ids: documentIds });
|
| 401 |
-
// if (data.
|
| 402 |
// queryClient.invalidateQueries({ queryKey: ['fetchFlowList'] });
|
| 403 |
// }
|
| 404 |
return data;
|
|
|
|
| 69 |
page_size: pagination.pageSize,
|
| 70 |
page: pagination.current,
|
| 71 |
});
|
| 72 |
+
if (ret.data.code === 0) {
|
| 73 |
return ret.data.data;
|
| 74 |
}
|
| 75 |
|
|
|
|
| 118 |
doc_id: documentId,
|
| 119 |
status: Number(status),
|
| 120 |
});
|
| 121 |
+
if (data.code === 0) {
|
| 122 |
message.success(i18n.t('message.modified'));
|
| 123 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 124 |
}
|
|
|
|
| 149 |
doc_id: documentId,
|
| 150 |
name: name,
|
| 151 |
});
|
| 152 |
+
if (data.code === 0) {
|
| 153 |
message.success(i18n.t('message.renamed'));
|
| 154 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 155 |
}
|
| 156 |
+
return data.code;
|
| 157 |
},
|
| 158 |
});
|
| 159 |
|
|
|
|
| 176 |
name,
|
| 177 |
kb_id: knowledgeId,
|
| 178 |
});
|
| 179 |
+
if (data.code === 0) {
|
| 180 |
if (page === 1) {
|
| 181 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 182 |
} else {
|
|
|
|
| 185 |
|
| 186 |
message.success(i18n.t('message.created'));
|
| 187 |
}
|
| 188 |
+
return data.code;
|
| 189 |
},
|
| 190 |
});
|
| 191 |
|
|
|
|
| 215 |
doc_id: documentId,
|
| 216 |
parser_config: parserConfig,
|
| 217 |
});
|
| 218 |
+
if (data.code === 0) {
|
| 219 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 220 |
|
| 221 |
message.success(i18n.t('message.modified'));
|
| 222 |
}
|
| 223 |
+
return data.code;
|
| 224 |
},
|
| 225 |
});
|
| 226 |
|
|
|
|
| 246 |
|
| 247 |
try {
|
| 248 |
const ret = await kbService.document_upload(formData);
|
| 249 |
+
const code = get(ret, 'data.code');
|
| 250 |
+
if (code === 0) {
|
| 251 |
message.success(i18n.t('message.uploaded'));
|
| 252 |
}
|
| 253 |
|
| 254 |
+
if (code === 0 || code === 500) {
|
| 255 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 256 |
}
|
| 257 |
return ret?.data;
|
|
|
|
| 281 |
formData.append('kb_id', knowledgeId);
|
| 282 |
|
| 283 |
const ret = await kbService.web_crawl(formData);
|
| 284 |
+
const code = get(ret, 'data.code');
|
| 285 |
+
if (code === 0) {
|
| 286 |
message.success(i18n.t('message.uploaded'));
|
| 287 |
}
|
| 288 |
|
| 289 |
+
return code;
|
| 290 |
},
|
| 291 |
});
|
| 292 |
|
|
|
|
| 317 |
doc_ids: documentIds,
|
| 318 |
run,
|
| 319 |
});
|
| 320 |
+
const code = get(ret, 'data.code');
|
| 321 |
+
if (code === 0) {
|
| 322 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 323 |
message.success(i18n.t('message.operated'));
|
| 324 |
}
|
| 325 |
|
| 326 |
+
return code;
|
| 327 |
},
|
| 328 |
});
|
| 329 |
|
|
|
|
| 338 |
initialData: [],
|
| 339 |
queryFn: async () => {
|
| 340 |
const { data } = await kbService.document_infos({ doc_ids: ids });
|
| 341 |
+
if (data.code === 0) {
|
| 342 |
return data.data;
|
| 343 |
}
|
| 344 |
|
|
|
|
| 357 |
initialData: {},
|
| 358 |
queryFn: async () => {
|
| 359 |
const { data } = await kbService.document_thumbnails({ doc_ids: ids });
|
| 360 |
+
if (data.code === 0) {
|
| 361 |
return data.data;
|
| 362 |
}
|
| 363 |
return {};
|
|
|
|
| 377 |
mutationKey: ['removeDocument'],
|
| 378 |
mutationFn: async (documentIds: string | string[]) => {
|
| 379 |
const { data } = await kbService.document_rm({ doc_id: documentIds });
|
| 380 |
+
if (data.code === 0) {
|
| 381 |
message.success(i18n.t('message.deleted'));
|
| 382 |
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
| 383 |
}
|
| 384 |
+
return data.code;
|
| 385 |
},
|
| 386 |
});
|
| 387 |
|
|
|
|
| 398 |
mutationKey: ['deleteDocument'],
|
| 399 |
mutationFn: async (documentIds: string[]) => {
|
| 400 |
const data = await kbService.document_delete({ doc_ids: documentIds });
|
| 401 |
+
// if (data.code === 0) {
|
| 402 |
// queryClient.invalidateQueries({ queryKey: ['fetchFlowList'] });
|
| 403 |
// }
|
| 404 |
return data;
|
web/src/hooks/file-manager-hooks.ts
CHANGED
|
@@ -103,11 +103,11 @@ export const useDeleteFile = () => {
|
|
| 103 |
mutationKey: ['deleteFile'],
|
| 104 |
mutationFn: async (params: { fileIds: string[]; parentId: string }) => {
|
| 105 |
const { data } = await fileManagerService.removeFile(params);
|
| 106 |
-
if (data.
|
| 107 |
setPaginationParams(1); // TODO: There should be a better way to paginate the request list
|
| 108 |
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
| 109 |
}
|
| 110 |
-
return data.
|
| 111 |
},
|
| 112 |
});
|
| 113 |
|
|
@@ -125,11 +125,11 @@ export const useRenameFile = () => {
|
|
| 125 |
mutationKey: ['renameFile'],
|
| 126 |
mutationFn: async (params: { fileId: string; name: string }) => {
|
| 127 |
const { data } = await fileManagerService.renameFile(params);
|
| 128 |
-
if (data.
|
| 129 |
message.success(t('message.renamed'));
|
| 130 |
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
| 131 |
}
|
| 132 |
-
return data.
|
| 133 |
},
|
| 134 |
});
|
| 135 |
|
|
@@ -170,12 +170,12 @@ export const useCreateFolder = () => {
|
|
| 170 |
...params,
|
| 171 |
type: 'folder',
|
| 172 |
});
|
| 173 |
-
if (data.
|
| 174 |
message.success(t('message.created'));
|
| 175 |
setPaginationParams(1);
|
| 176 |
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
| 177 |
}
|
| 178 |
-
return data.
|
| 179 |
},
|
| 180 |
});
|
| 181 |
|
|
@@ -208,12 +208,12 @@ export const useUploadFile = () => {
|
|
| 208 |
});
|
| 209 |
try {
|
| 210 |
const ret = await fileManagerService.uploadFile(formData);
|
| 211 |
-
if (ret?.data.
|
| 212 |
message.success(t('message.uploaded'));
|
| 213 |
setPaginationParams(1);
|
| 214 |
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
| 215 |
}
|
| 216 |
-
return ret?.data?.
|
| 217 |
} catch (error) {
|
| 218 |
console.log('🚀 ~ useUploadFile ~ error:', error);
|
| 219 |
}
|
|
@@ -235,11 +235,11 @@ export const useConnectToKnowledge = () => {
|
|
| 235 |
mutationKey: ['connectFileToKnowledge'],
|
| 236 |
mutationFn: async (params: IConnectRequestBody) => {
|
| 237 |
const { data } = await fileManagerService.connectFileToKnowledge(params);
|
| 238 |
-
if (data.
|
| 239 |
message.success(t('message.operated'));
|
| 240 |
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
| 241 |
}
|
| 242 |
-
return data.
|
| 243 |
},
|
| 244 |
});
|
| 245 |
|
|
@@ -263,11 +263,11 @@ export const useMoveFile = () => {
|
|
| 263 |
mutationKey: ['moveFile'],
|
| 264 |
mutationFn: async (params: IMoveFileBody) => {
|
| 265 |
const { data } = await fileManagerService.moveFile(params);
|
| 266 |
-
if (data.
|
| 267 |
message.success(t('message.operated'));
|
| 268 |
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
| 269 |
}
|
| 270 |
-
return data.
|
| 271 |
},
|
| 272 |
});
|
| 273 |
|
|
|
|
| 103 |
mutationKey: ['deleteFile'],
|
| 104 |
mutationFn: async (params: { fileIds: string[]; parentId: string }) => {
|
| 105 |
const { data } = await fileManagerService.removeFile(params);
|
| 106 |
+
if (data.code === 0) {
|
| 107 |
setPaginationParams(1); // TODO: There should be a better way to paginate the request list
|
| 108 |
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
| 109 |
}
|
| 110 |
+
return data.code;
|
| 111 |
},
|
| 112 |
});
|
| 113 |
|
|
|
|
| 125 |
mutationKey: ['renameFile'],
|
| 126 |
mutationFn: async (params: { fileId: string; name: string }) => {
|
| 127 |
const { data } = await fileManagerService.renameFile(params);
|
| 128 |
+
if (data.code === 0) {
|
| 129 |
message.success(t('message.renamed'));
|
| 130 |
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
| 131 |
}
|
| 132 |
+
return data.code;
|
| 133 |
},
|
| 134 |
});
|
| 135 |
|
|
|
|
| 170 |
...params,
|
| 171 |
type: 'folder',
|
| 172 |
});
|
| 173 |
+
if (data.code === 0) {
|
| 174 |
message.success(t('message.created'));
|
| 175 |
setPaginationParams(1);
|
| 176 |
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
| 177 |
}
|
| 178 |
+
return data.code;
|
| 179 |
},
|
| 180 |
});
|
| 181 |
|
|
|
|
| 208 |
});
|
| 209 |
try {
|
| 210 |
const ret = await fileManagerService.uploadFile(formData);
|
| 211 |
+
if (ret?.data.code === 0) {
|
| 212 |
message.success(t('message.uploaded'));
|
| 213 |
setPaginationParams(1);
|
| 214 |
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
| 215 |
}
|
| 216 |
+
return ret?.data?.code;
|
| 217 |
} catch (error) {
|
| 218 |
console.log('🚀 ~ useUploadFile ~ error:', error);
|
| 219 |
}
|
|
|
|
| 235 |
mutationKey: ['connectFileToKnowledge'],
|
| 236 |
mutationFn: async (params: IConnectRequestBody) => {
|
| 237 |
const { data } = await fileManagerService.connectFileToKnowledge(params);
|
| 238 |
+
if (data.code === 0) {
|
| 239 |
message.success(t('message.operated'));
|
| 240 |
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
| 241 |
}
|
| 242 |
+
return data.code;
|
| 243 |
},
|
| 244 |
});
|
| 245 |
|
|
|
|
| 263 |
mutationKey: ['moveFile'],
|
| 264 |
mutationFn: async (params: IMoveFileBody) => {
|
| 265 |
const { data } = await fileManagerService.moveFile(params);
|
| 266 |
+
if (data.code === 0) {
|
| 267 |
message.success(t('message.operated'));
|
| 268 |
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
| 269 |
}
|
| 270 |
+
return data.code;
|
| 271 |
},
|
| 272 |
});
|
| 273 |
|
web/src/hooks/flow-hooks.ts
CHANGED
|
@@ -131,7 +131,7 @@ export const useSetFlow = () => {
|
|
| 131 |
avatar?: string;
|
| 132 |
}) => {
|
| 133 |
const { data = {} } = await flowService.setCanvas(params);
|
| 134 |
-
if (data.
|
| 135 |
message.success(
|
| 136 |
i18n.t(`message.${params?.id ? 'modified' : 'created'}`),
|
| 137 |
);
|
|
@@ -154,7 +154,7 @@ export const useDeleteFlow = () => {
|
|
| 154 |
mutationKey: ['deleteFlow'],
|
| 155 |
mutationFn: async (canvasIds: string[]) => {
|
| 156 |
const { data } = await flowService.removeCanvas({ canvasIds });
|
| 157 |
-
if (data.
|
| 158 |
queryClient.invalidateQueries({ queryKey: ['fetchFlowList'] });
|
| 159 |
}
|
| 160 |
return data?.data ?? [];
|
|
@@ -173,7 +173,7 @@ export const useRunFlow = () => {
|
|
| 173 |
mutationKey: ['runFlow'],
|
| 174 |
mutationFn: async (params: { id: string; dsl: DSL }) => {
|
| 175 |
const { data } = await flowService.runCanvas(params);
|
| 176 |
-
if (data.
|
| 177 |
message.success(i18n.t(`message.modified`));
|
| 178 |
}
|
| 179 |
return data?.data ?? {};
|
|
@@ -209,7 +209,7 @@ export const useTestDbConnect = () => {
|
|
| 209 |
mutationKey: ['testDbConnect'],
|
| 210 |
mutationFn: async (params: any) => {
|
| 211 |
const ret = await flowService.testDbConnect(params);
|
| 212 |
-
if (ret?.data?.
|
| 213 |
message.success(ret?.data?.data);
|
| 214 |
} else {
|
| 215 |
message.error(ret?.data?.data);
|
|
|
|
| 131 |
avatar?: string;
|
| 132 |
}) => {
|
| 133 |
const { data = {} } = await flowService.setCanvas(params);
|
| 134 |
+
if (data.code === 0) {
|
| 135 |
message.success(
|
| 136 |
i18n.t(`message.${params?.id ? 'modified' : 'created'}`),
|
| 137 |
);
|
|
|
|
| 154 |
mutationKey: ['deleteFlow'],
|
| 155 |
mutationFn: async (canvasIds: string[]) => {
|
| 156 |
const { data } = await flowService.removeCanvas({ canvasIds });
|
| 157 |
+
if (data.code === 0) {
|
| 158 |
queryClient.invalidateQueries({ queryKey: ['fetchFlowList'] });
|
| 159 |
}
|
| 160 |
return data?.data ?? [];
|
|
|
|
| 173 |
mutationKey: ['runFlow'],
|
| 174 |
mutationFn: async (params: { id: string; dsl: DSL }) => {
|
| 175 |
const { data } = await flowService.runCanvas(params);
|
| 176 |
+
if (data.code === 0) {
|
| 177 |
message.success(i18n.t(`message.modified`));
|
| 178 |
}
|
| 179 |
return data?.data ?? {};
|
|
|
|
| 209 |
mutationKey: ['testDbConnect'],
|
| 210 |
mutationFn: async (params: any) => {
|
| 211 |
const ret = await flowService.testDbConnect(params);
|
| 212 |
+
if (ret?.data?.code === 0) {
|
| 213 |
message.success(ret?.data?.data);
|
| 214 |
} else {
|
| 215 |
message.error(ret?.data?.data);
|
web/src/hooks/knowledge-hooks.ts
CHANGED
|
@@ -70,7 +70,7 @@ export const useCreateKnowledge = () => {
|
|
| 70 |
mutationKey: ['createKnowledge'],
|
| 71 |
mutationFn: async (params: { id?: string; name: string }) => {
|
| 72 |
const { data = {} } = await kbService.createKb(params);
|
| 73 |
-
if (data.
|
| 74 |
message.success(
|
| 75 |
i18n.t(`message.${params?.id ? 'modified' : 'created'}`),
|
| 76 |
);
|
|
@@ -93,7 +93,7 @@ export const useDeleteKnowledge = () => {
|
|
| 93 |
mutationKey: ['deleteKnowledge'],
|
| 94 |
mutationFn: async (id: string) => {
|
| 95 |
const { data } = await kbService.rmKb({ kb_id: id });
|
| 96 |
-
if (data.
|
| 97 |
message.success(i18n.t(`message.deleted`));
|
| 98 |
queryClient.invalidateQueries({ queryKey: ['fetchKnowledgeList'] });
|
| 99 |
}
|
|
@@ -120,7 +120,7 @@ export const useUpdateKnowledge = () => {
|
|
| 120 |
kb_id: knowledgeBaseId,
|
| 121 |
...params,
|
| 122 |
});
|
| 123 |
-
if (data.
|
| 124 |
message.success(i18n.t(`message.updated`));
|
| 125 |
queryClient.invalidateQueries({ queryKey: ['fetchKnowledgeDetail'] });
|
| 126 |
}
|
|
@@ -155,7 +155,7 @@ export const useTestChunkRetrieval = (): ResponsePostType<ITestingResult> & {
|
|
| 155 |
page,
|
| 156 |
size: pageSize,
|
| 157 |
});
|
| 158 |
-
if (data.
|
| 159 |
const res = data.data;
|
| 160 |
return {
|
| 161 |
chunks: res.chunks,
|
|
|
|
| 70 |
mutationKey: ['createKnowledge'],
|
| 71 |
mutationFn: async (params: { id?: string; name: string }) => {
|
| 72 |
const { data = {} } = await kbService.createKb(params);
|
| 73 |
+
if (data.code === 0) {
|
| 74 |
message.success(
|
| 75 |
i18n.t(`message.${params?.id ? 'modified' : 'created'}`),
|
| 76 |
);
|
|
|
|
| 93 |
mutationKey: ['deleteKnowledge'],
|
| 94 |
mutationFn: async (id: string) => {
|
| 95 |
const { data } = await kbService.rmKb({ kb_id: id });
|
| 96 |
+
if (data.code === 0) {
|
| 97 |
message.success(i18n.t(`message.deleted`));
|
| 98 |
queryClient.invalidateQueries({ queryKey: ['fetchKnowledgeList'] });
|
| 99 |
}
|
|
|
|
| 120 |
kb_id: knowledgeBaseId,
|
| 121 |
...params,
|
| 122 |
});
|
| 123 |
+
if (data.code === 0) {
|
| 124 |
message.success(i18n.t(`message.updated`));
|
| 125 |
queryClient.invalidateQueries({ queryKey: ['fetchKnowledgeDetail'] });
|
| 126 |
}
|
|
|
|
| 155 |
page,
|
| 156 |
size: pageSize,
|
| 157 |
});
|
| 158 |
+
if (data.code === 0) {
|
| 159 |
const res = data.data;
|
| 160 |
return {
|
| 161 |
chunks: res.chunks,
|
web/src/hooks/llm-hooks.tsx
CHANGED
|
@@ -211,12 +211,12 @@ export const useSaveApiKey = () => {
|
|
| 211 |
mutationKey: ['saveApiKey'],
|
| 212 |
mutationFn: async (params: IApiKeySavingParams) => {
|
| 213 |
const { data } = await userService.set_api_key(params);
|
| 214 |
-
if (data.
|
| 215 |
message.success(t('message.modified'));
|
| 216 |
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
|
| 217 |
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
|
| 218 |
}
|
| 219 |
-
return data.
|
| 220 |
},
|
| 221 |
});
|
| 222 |
|
|
@@ -242,10 +242,10 @@ export const useSaveTenantInfo = () => {
|
|
| 242 |
mutationKey: ['saveTenantInfo'],
|
| 243 |
mutationFn: async (params: ISystemModelSettingSavingParams) => {
|
| 244 |
const { data } = await userService.set_tenant_info(params);
|
| 245 |
-
if (data.
|
| 246 |
message.success(t('message.modified'));
|
| 247 |
}
|
| 248 |
-
return data.
|
| 249 |
},
|
| 250 |
});
|
| 251 |
|
|
@@ -263,12 +263,12 @@ export const useAddLlm = () => {
|
|
| 263 |
mutationKey: ['addLlm'],
|
| 264 |
mutationFn: async (params: IAddLlmRequestBody) => {
|
| 265 |
const { data } = await userService.add_llm(params);
|
| 266 |
-
if (data.
|
| 267 |
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
|
| 268 |
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
|
| 269 |
message.success(t('message.modified'));
|
| 270 |
}
|
| 271 |
-
return data.
|
| 272 |
},
|
| 273 |
});
|
| 274 |
|
|
@@ -286,12 +286,12 @@ export const useDeleteLlm = () => {
|
|
| 286 |
mutationKey: ['deleteLlm'],
|
| 287 |
mutationFn: async (params: IDeleteLlmRequestBody) => {
|
| 288 |
const { data } = await userService.delete_llm(params);
|
| 289 |
-
if (data.
|
| 290 |
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
|
| 291 |
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
|
| 292 |
message.success(t('message.deleted'));
|
| 293 |
}
|
| 294 |
-
return data.
|
| 295 |
},
|
| 296 |
});
|
| 297 |
|
|
@@ -309,12 +309,12 @@ export const useDeleteFactory = () => {
|
|
| 309 |
mutationKey: ['deleteFactory'],
|
| 310 |
mutationFn: async (params: IDeleteLlmRequestBody) => {
|
| 311 |
const { data } = await userService.deleteFactory(params);
|
| 312 |
-
if (data.
|
| 313 |
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
|
| 314 |
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
|
| 315 |
message.success(t('message.deleted'));
|
| 316 |
}
|
| 317 |
-
return data.
|
| 318 |
},
|
| 319 |
});
|
| 320 |
|
|
|
|
| 211 |
mutationKey: ['saveApiKey'],
|
| 212 |
mutationFn: async (params: IApiKeySavingParams) => {
|
| 213 |
const { data } = await userService.set_api_key(params);
|
| 214 |
+
if (data.code === 0) {
|
| 215 |
message.success(t('message.modified'));
|
| 216 |
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
|
| 217 |
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
|
| 218 |
}
|
| 219 |
+
return data.code;
|
| 220 |
},
|
| 221 |
});
|
| 222 |
|
|
|
|
| 242 |
mutationKey: ['saveTenantInfo'],
|
| 243 |
mutationFn: async (params: ISystemModelSettingSavingParams) => {
|
| 244 |
const { data } = await userService.set_tenant_info(params);
|
| 245 |
+
if (data.code === 0) {
|
| 246 |
message.success(t('message.modified'));
|
| 247 |
}
|
| 248 |
+
return data.code;
|
| 249 |
},
|
| 250 |
});
|
| 251 |
|
|
|
|
| 263 |
mutationKey: ['addLlm'],
|
| 264 |
mutationFn: async (params: IAddLlmRequestBody) => {
|
| 265 |
const { data } = await userService.add_llm(params);
|
| 266 |
+
if (data.code === 0) {
|
| 267 |
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
|
| 268 |
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
|
| 269 |
message.success(t('message.modified'));
|
| 270 |
}
|
| 271 |
+
return data.code;
|
| 272 |
},
|
| 273 |
});
|
| 274 |
|
|
|
|
| 286 |
mutationKey: ['deleteLlm'],
|
| 287 |
mutationFn: async (params: IDeleteLlmRequestBody) => {
|
| 288 |
const { data } = await userService.delete_llm(params);
|
| 289 |
+
if (data.code === 0) {
|
| 290 |
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
|
| 291 |
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
|
| 292 |
message.success(t('message.deleted'));
|
| 293 |
}
|
| 294 |
+
return data.code;
|
| 295 |
},
|
| 296 |
});
|
| 297 |
|
|
|
|
| 309 |
mutationKey: ['deleteFactory'],
|
| 310 |
mutationFn: async (params: IDeleteLlmRequestBody) => {
|
| 311 |
const { data } = await userService.deleteFactory(params);
|
| 312 |
+
if (data.code === 0) {
|
| 313 |
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
|
| 314 |
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
|
| 315 |
message.success(t('message.deleted'));
|
| 316 |
}
|
| 317 |
+
return data.code;
|
| 318 |
},
|
| 319 |
});
|
| 320 |
|
web/src/hooks/logic-hooks.ts
CHANGED
|
@@ -248,8 +248,8 @@ export const useSpeechWithSse = (url: string = api.tts) => {
|
|
| 248 |
});
|
| 249 |
try {
|
| 250 |
const res = await response.clone().json();
|
| 251 |
-
if (res?.
|
| 252 |
-
message.error(res?.
|
| 253 |
}
|
| 254 |
} catch (error) {
|
| 255 |
console.warn('🚀 ~ error:', error);
|
|
|
|
| 248 |
});
|
| 249 |
try {
|
| 250 |
const res = await response.clone().json();
|
| 251 |
+
if (res?.code !== 0) {
|
| 252 |
+
message.error(res?.message);
|
| 253 |
}
|
| 254 |
} catch (error) {
|
| 255 |
console.warn('🚀 ~ error:', error);
|
web/src/hooks/login-hooks.ts
CHANGED
|
@@ -26,7 +26,7 @@ export const useLogin = () => {
|
|
| 26 |
mutationKey: ['login'],
|
| 27 |
mutationFn: async (params: { email: string; password: string }) => {
|
| 28 |
const { data: res = {}, response } = await userService.login(params);
|
| 29 |
-
if (res.
|
| 30 |
const { data } = res;
|
| 31 |
message.success(t('message.logged'));
|
| 32 |
const authorization = response.headers.get(Authorization);
|
|
@@ -42,7 +42,7 @@ export const useLogin = () => {
|
|
| 42 |
Token: token,
|
| 43 |
});
|
| 44 |
}
|
| 45 |
-
return res.
|
| 46 |
},
|
| 47 |
});
|
| 48 |
|
|
@@ -64,10 +64,10 @@ export const useRegister = () => {
|
|
| 64 |
nickname: string;
|
| 65 |
}) => {
|
| 66 |
const { data = {} } = await userService.register(params);
|
| 67 |
-
if (data.
|
| 68 |
message.success(t('message.registered'));
|
| 69 |
}
|
| 70 |
-
return data.
|
| 71 |
},
|
| 72 |
});
|
| 73 |
|
|
@@ -84,12 +84,12 @@ export const useLogout = () => {
|
|
| 84 |
mutationKey: ['logout'],
|
| 85 |
mutationFn: async () => {
|
| 86 |
const { data = {} } = await userService.logout();
|
| 87 |
-
if (data.
|
| 88 |
message.success(t('message.logout'));
|
| 89 |
authorizationUtil.removeAll();
|
| 90 |
history.push('/login');
|
| 91 |
}
|
| 92 |
-
return data.
|
| 93 |
},
|
| 94 |
});
|
| 95 |
|
|
|
|
| 26 |
mutationKey: ['login'],
|
| 27 |
mutationFn: async (params: { email: string; password: string }) => {
|
| 28 |
const { data: res = {}, response } = await userService.login(params);
|
| 29 |
+
if (res.code === 0) {
|
| 30 |
const { data } = res;
|
| 31 |
message.success(t('message.logged'));
|
| 32 |
const authorization = response.headers.get(Authorization);
|
|
|
|
| 42 |
Token: token,
|
| 43 |
});
|
| 44 |
}
|
| 45 |
+
return res.code;
|
| 46 |
},
|
| 47 |
});
|
| 48 |
|
|
|
|
| 64 |
nickname: string;
|
| 65 |
}) => {
|
| 66 |
const { data = {} } = await userService.register(params);
|
| 67 |
+
if (data.code === 0) {
|
| 68 |
message.success(t('message.registered'));
|
| 69 |
}
|
| 70 |
+
return data.code;
|
| 71 |
},
|
| 72 |
});
|
| 73 |
|
|
|
|
| 84 |
mutationKey: ['logout'],
|
| 85 |
mutationFn: async () => {
|
| 86 |
const { data = {} } = await userService.logout();
|
| 87 |
+
if (data.code === 0) {
|
| 88 |
message.success(t('message.logout'));
|
| 89 |
authorizationUtil.removeAll();
|
| 90 |
history.push('/login');
|
| 91 |
}
|
| 92 |
+
return data.code;
|
| 93 |
},
|
| 94 |
});
|
| 95 |
|
web/src/hooks/user-setting-hooks.tsx
CHANGED
|
@@ -32,7 +32,7 @@ export const useFetchUserInfo = (): ResponseGetType<IUserInfo> => {
|
|
| 32 |
gcTime: 0,
|
| 33 |
queryFn: async () => {
|
| 34 |
const { data } = await userService.user_info();
|
| 35 |
-
if (data.
|
| 36 |
i18n.changeLanguage(
|
| 37 |
LanguageTranslationMap[
|
| 38 |
data.data.language as keyof typeof LanguageTranslationMap
|
|
@@ -54,7 +54,7 @@ export const useFetchTenantInfo = (): ResponseGetType<ITenantInfo> => {
|
|
| 54 |
gcTime: 0,
|
| 55 |
queryFn: async () => {
|
| 56 |
const { data: res } = await userService.get_tenant_info();
|
| 57 |
-
if (res.
|
| 58 |
// llm_id is chat_id
|
| 59 |
// asr_id is speech2txt
|
| 60 |
const { data } = res;
|
|
@@ -116,11 +116,11 @@ export const useSaveSetting = () => {
|
|
| 116 |
userInfo: { new_password: string } | Partial<IUserInfo>,
|
| 117 |
) => {
|
| 118 |
const { data } = await userService.setting(userInfo);
|
| 119 |
-
if (data.
|
| 120 |
message.success(t('message.modified'));
|
| 121 |
queryClient.invalidateQueries({ queryKey: ['userInfo'] });
|
| 122 |
}
|
| 123 |
-
return data?.
|
| 124 |
},
|
| 125 |
});
|
| 126 |
|
|
@@ -135,7 +135,7 @@ export const useFetchSystemVersion = () => {
|
|
| 135 |
try {
|
| 136 |
setLoading(true);
|
| 137 |
const { data } = await userService.getSystemVersion();
|
| 138 |
-
if (data.
|
| 139 |
setVersion(data.data);
|
| 140 |
setLoading(false);
|
| 141 |
}
|
|
@@ -156,7 +156,7 @@ export const useFetchSystemStatus = () => {
|
|
| 156 |
const fetchSystemStatus = useCallback(async () => {
|
| 157 |
setLoading(true);
|
| 158 |
const { data } = await userService.getSystemStatus();
|
| 159 |
-
if (data.
|
| 160 |
setSystemStatus(data.data);
|
| 161 |
setLoading(false);
|
| 162 |
}
|
|
@@ -200,7 +200,7 @@ export const useRemoveSystemToken = () => {
|
|
| 200 |
mutationKey: ['removeSystemToken'],
|
| 201 |
mutationFn: async (token: string) => {
|
| 202 |
const { data } = await userService.removeToken({}, token);
|
| 203 |
-
if (data.
|
| 204 |
message.success(t('message.deleted'));
|
| 205 |
queryClient.invalidateQueries({ queryKey: ['fetchSystemTokenList'] });
|
| 206 |
}
|
|
@@ -221,7 +221,7 @@ export const useCreateSystemToken = () => {
|
|
| 221 |
mutationKey: ['createSystemToken'],
|
| 222 |
mutationFn: async (params: Record<string, any>) => {
|
| 223 |
const { data } = await userService.createToken(params);
|
| 224 |
-
if (data.
|
| 225 |
queryClient.invalidateQueries({ queryKey: ['fetchSystemTokenList'] });
|
| 226 |
}
|
| 227 |
return data?.data ?? [];
|
|
@@ -264,10 +264,10 @@ export const useAddTenantUser = () => {
|
|
| 264 |
mutationKey: ['addTenantUser'],
|
| 265 |
mutationFn: async (email: string) => {
|
| 266 |
const { data } = await addTenantUser(tenantInfo.tenant_id, email);
|
| 267 |
-
if (data.
|
| 268 |
queryClient.invalidateQueries({ queryKey: ['listTenantUser'] });
|
| 269 |
}
|
| 270 |
-
return data?.
|
| 271 |
},
|
| 272 |
});
|
| 273 |
|
|
@@ -296,7 +296,7 @@ export const useDeleteTenantUser = () => {
|
|
| 296 |
tenantId: tenantId ?? tenantInfo.tenant_id,
|
| 297 |
userId,
|
| 298 |
});
|
| 299 |
-
if (data.
|
| 300 |
message.success(t('message.deleted'));
|
| 301 |
queryClient.invalidateQueries({ queryKey: ['listTenantUser'] });
|
| 302 |
queryClient.invalidateQueries({ queryKey: ['listTenant'] });
|
|
@@ -342,7 +342,7 @@ export const useAgreeTenant = () => {
|
|
| 342 |
mutationKey: ['agreeTenant'],
|
| 343 |
mutationFn: async (tenantId: string) => {
|
| 344 |
const { data } = await agreeTenant(tenantId);
|
| 345 |
-
if (data.
|
| 346 |
message.success(t('message.operated'));
|
| 347 |
queryClient.invalidateQueries({ queryKey: ['listTenant'] });
|
| 348 |
}
|
|
|
|
| 32 |
gcTime: 0,
|
| 33 |
queryFn: async () => {
|
| 34 |
const { data } = await userService.user_info();
|
| 35 |
+
if (data.code === 0) {
|
| 36 |
i18n.changeLanguage(
|
| 37 |
LanguageTranslationMap[
|
| 38 |
data.data.language as keyof typeof LanguageTranslationMap
|
|
|
|
| 54 |
gcTime: 0,
|
| 55 |
queryFn: async () => {
|
| 56 |
const { data: res } = await userService.get_tenant_info();
|
| 57 |
+
if (res.code === 0) {
|
| 58 |
// llm_id is chat_id
|
| 59 |
// asr_id is speech2txt
|
| 60 |
const { data } = res;
|
|
|
|
| 116 |
userInfo: { new_password: string } | Partial<IUserInfo>,
|
| 117 |
) => {
|
| 118 |
const { data } = await userService.setting(userInfo);
|
| 119 |
+
if (data.code === 0) {
|
| 120 |
message.success(t('message.modified'));
|
| 121 |
queryClient.invalidateQueries({ queryKey: ['userInfo'] });
|
| 122 |
}
|
| 123 |
+
return data?.code;
|
| 124 |
},
|
| 125 |
});
|
| 126 |
|
|
|
|
| 135 |
try {
|
| 136 |
setLoading(true);
|
| 137 |
const { data } = await userService.getSystemVersion();
|
| 138 |
+
if (data.code === 0) {
|
| 139 |
setVersion(data.data);
|
| 140 |
setLoading(false);
|
| 141 |
}
|
|
|
|
| 156 |
const fetchSystemStatus = useCallback(async () => {
|
| 157 |
setLoading(true);
|
| 158 |
const { data } = await userService.getSystemStatus();
|
| 159 |
+
if (data.code === 0) {
|
| 160 |
setSystemStatus(data.data);
|
| 161 |
setLoading(false);
|
| 162 |
}
|
|
|
|
| 200 |
mutationKey: ['removeSystemToken'],
|
| 201 |
mutationFn: async (token: string) => {
|
| 202 |
const { data } = await userService.removeToken({}, token);
|
| 203 |
+
if (data.code === 0) {
|
| 204 |
message.success(t('message.deleted'));
|
| 205 |
queryClient.invalidateQueries({ queryKey: ['fetchSystemTokenList'] });
|
| 206 |
}
|
|
|
|
| 221 |
mutationKey: ['createSystemToken'],
|
| 222 |
mutationFn: async (params: Record<string, any>) => {
|
| 223 |
const { data } = await userService.createToken(params);
|
| 224 |
+
if (data.code === 0) {
|
| 225 |
queryClient.invalidateQueries({ queryKey: ['fetchSystemTokenList'] });
|
| 226 |
}
|
| 227 |
return data?.data ?? [];
|
|
|
|
| 264 |
mutationKey: ['addTenantUser'],
|
| 265 |
mutationFn: async (email: string) => {
|
| 266 |
const { data } = await addTenantUser(tenantInfo.tenant_id, email);
|
| 267 |
+
if (data.code === 0) {
|
| 268 |
queryClient.invalidateQueries({ queryKey: ['listTenantUser'] });
|
| 269 |
}
|
| 270 |
+
return data?.code;
|
| 271 |
},
|
| 272 |
});
|
| 273 |
|
|
|
|
| 296 |
tenantId: tenantId ?? tenantInfo.tenant_id,
|
| 297 |
userId,
|
| 298 |
});
|
| 299 |
+
if (data.code === 0) {
|
| 300 |
message.success(t('message.deleted'));
|
| 301 |
queryClient.invalidateQueries({ queryKey: ['listTenantUser'] });
|
| 302 |
queryClient.invalidateQueries({ queryKey: ['listTenant'] });
|
|
|
|
| 342 |
mutationKey: ['agreeTenant'],
|
| 343 |
mutationFn: async (tenantId: string) => {
|
| 344 |
const { data } = await agreeTenant(tenantId);
|
| 345 |
+
if (data.code === 0) {
|
| 346 |
message.success(t('message.operated'));
|
| 347 |
queryClient.invalidateQueries({ queryKey: ['listTenant'] });
|
| 348 |
}
|
web/src/interfaces/database/base.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
export interface ResponseType<T = any> {
|
| 2 |
-
|
| 3 |
data: T;
|
| 4 |
-
|
| 5 |
status: number;
|
| 6 |
}
|
| 7 |
|
|
|
|
| 1 |
export interface ResponseType<T = any> {
|
| 2 |
+
code: number;
|
| 3 |
data: T;
|
| 4 |
+
message: string;
|
| 5 |
status: number;
|
| 6 |
}
|
| 7 |
|
web/src/pages/add-knowledge/components/knowledge-chunk/components/chunk-creating-modal/index.tsx
CHANGED
|
@@ -30,7 +30,7 @@ const ChunkCreatingModal: React.FC<IModalProps<any> & kFProps> = ({
|
|
| 30 |
const { t } = useTranslation();
|
| 31 |
|
| 32 |
useEffect(() => {
|
| 33 |
-
if (data?.
|
| 34 |
const { content_with_weight, important_kwd = [] } = data.data;
|
| 35 |
form.setFieldsValue({ content: content_with_weight });
|
| 36 |
setKeywords(important_kwd);
|
|
|
|
| 30 |
const { t } = useTranslation();
|
| 31 |
|
| 32 |
useEffect(() => {
|
| 33 |
+
if (data?.code === 0) {
|
| 34 |
const { content_with_weight, important_kwd = [] } = data.data;
|
| 35 |
form.setFieldsValue({ content: content_with_weight });
|
| 36 |
setKeywords(important_kwd);
|
web/src/pages/add-knowledge/components/knowledge-chunk/hooks.ts
CHANGED
|
@@ -96,14 +96,14 @@ export const useUpdateChunk = () => {
|
|
| 96 |
|
| 97 |
const onChunkUpdatingOk = useCallback(
|
| 98 |
async ({ content, keywords }: { content: string; keywords: string }) => {
|
| 99 |
-
const
|
| 100 |
content_with_weight: content,
|
| 101 |
doc_id: documentId,
|
| 102 |
chunk_id: chunkId,
|
| 103 |
important_kwd: keywords, // keywords
|
| 104 |
});
|
| 105 |
|
| 106 |
-
if (
|
| 107 |
hideChunkUpdatingModal();
|
| 108 |
}
|
| 109 |
},
|
|
|
|
| 96 |
|
| 97 |
const onChunkUpdatingOk = useCallback(
|
| 98 |
async ({ content, keywords }: { content: string; keywords: string }) => {
|
| 99 |
+
const code = await createChunk({
|
| 100 |
content_with_weight: content,
|
| 101 |
doc_id: documentId,
|
| 102 |
chunk_id: chunkId,
|
| 103 |
important_kwd: keywords, // keywords
|
| 104 |
});
|
| 105 |
|
| 106 |
+
if (code === 0) {
|
| 107 |
hideChunkUpdatingModal();
|
| 108 |
}
|
| 109 |
},
|
web/src/pages/add-knowledge/components/knowledge-file/hooks.ts
CHANGED
|
@@ -148,20 +148,20 @@ export const useHandleUploadDocument = () => {
|
|
| 148 |
async (fileList: UploadFile[]): Promise<number | undefined> => {
|
| 149 |
if (fileList.length > 0) {
|
| 150 |
const ret: any = await uploadDocument(fileList);
|
| 151 |
-
if (typeof ret?.
|
| 152 |
return;
|
| 153 |
}
|
| 154 |
-
const count = getUnSupportedFilesCount(ret?.
|
| 155 |
/// 500 error code indicates that some file types are not supported
|
| 156 |
-
let
|
| 157 |
if (
|
| 158 |
-
ret?.
|
| 159 |
-
(ret?.
|
| 160 |
) {
|
| 161 |
-
|
| 162 |
hideDocumentUploadModal();
|
| 163 |
}
|
| 164 |
-
return
|
| 165 |
}
|
| 166 |
},
|
| 167 |
[uploadDocument, hideDocumentUploadModal],
|
|
|
|
| 148 |
async (fileList: UploadFile[]): Promise<number | undefined> => {
|
| 149 |
if (fileList.length > 0) {
|
| 150 |
const ret: any = await uploadDocument(fileList);
|
| 151 |
+
if (typeof ret?.message !== 'string') {
|
| 152 |
return;
|
| 153 |
}
|
| 154 |
+
const count = getUnSupportedFilesCount(ret?.message);
|
| 155 |
/// 500 error code indicates that some file types are not supported
|
| 156 |
+
let code = ret?.code;
|
| 157 |
if (
|
| 158 |
+
ret?.code === 0 ||
|
| 159 |
+
(ret?.code === 500 && count !== fileList.length) // Some files were not uploaded successfully, but some were uploaded successfully.
|
| 160 |
) {
|
| 161 |
+
code = 0;
|
| 162 |
hideDocumentUploadModal();
|
| 163 |
}
|
| 164 |
+
return code;
|
| 165 |
}
|
| 166 |
},
|
| 167 |
[uploadDocument, hideDocumentUploadModal],
|
web/src/pages/chat/hooks.ts
CHANGED
|
@@ -176,7 +176,7 @@ export const useEditDialog = () => {
|
|
| 176 |
async (dialogId?: string) => {
|
| 177 |
if (dialogId) {
|
| 178 |
const ret = await fetchDialog(dialogId);
|
| 179 |
-
if (ret.
|
| 180 |
setDialog(ret.data);
|
| 181 |
}
|
| 182 |
}
|
|
@@ -393,7 +393,7 @@ export const useSendNextMessage = (controller: AbortController) => {
|
|
| 393 |
controller,
|
| 394 |
);
|
| 395 |
|
| 396 |
-
if (res && (res?.response.status !== 200 || res?.data?.
|
| 397 |
// cancel loading
|
| 398 |
setValue(message.content);
|
| 399 |
console.info('removeLatestMessage111');
|
|
@@ -421,7 +421,7 @@ export const useSendNextMessage = (controller: AbortController) => {
|
|
| 421 |
true,
|
| 422 |
conversationId,
|
| 423 |
);
|
| 424 |
-
if (data.
|
| 425 |
setConversationIsNew('');
|
| 426 |
const id = data.data.id;
|
| 427 |
// currentConversationIdRef.current = id;
|
|
@@ -541,7 +541,7 @@ export const useRenameConversation = () => {
|
|
| 541 |
is_new: false,
|
| 542 |
});
|
| 543 |
|
| 544 |
-
if (ret.
|
| 545 |
hideConversationRenameModal();
|
| 546 |
}
|
| 547 |
},
|
|
@@ -551,7 +551,7 @@ export const useRenameConversation = () => {
|
|
| 551 |
const handleShowConversationRenameModal = useCallback(
|
| 552 |
async (conversationId: string) => {
|
| 553 |
const ret = await fetchConversation(conversationId);
|
| 554 |
-
if (ret.
|
| 555 |
setConversation(ret.data);
|
| 556 |
}
|
| 557 |
showConversationRenameModal();
|
|
|
|
| 176 |
async (dialogId?: string) => {
|
| 177 |
if (dialogId) {
|
| 178 |
const ret = await fetchDialog(dialogId);
|
| 179 |
+
if (ret.code === 0) {
|
| 180 |
setDialog(ret.data);
|
| 181 |
}
|
| 182 |
}
|
|
|
|
| 393 |
controller,
|
| 394 |
);
|
| 395 |
|
| 396 |
+
if (res && (res?.response.status !== 200 || res?.data?.code !== 0)) {
|
| 397 |
// cancel loading
|
| 398 |
setValue(message.content);
|
| 399 |
console.info('removeLatestMessage111');
|
|
|
|
| 421 |
true,
|
| 422 |
conversationId,
|
| 423 |
);
|
| 424 |
+
if (data.code === 0) {
|
| 425 |
setConversationIsNew('');
|
| 426 |
const id = data.data.id;
|
| 427 |
// currentConversationIdRef.current = id;
|
|
|
|
| 541 |
is_new: false,
|
| 542 |
});
|
| 543 |
|
| 544 |
+
if (ret.code === 0) {
|
| 545 |
hideConversationRenameModal();
|
| 546 |
}
|
| 547 |
},
|
|
|
|
| 551 |
const handleShowConversationRenameModal = useCallback(
|
| 552 |
async (conversationId: string) => {
|
| 553 |
const ret = await fetchConversation(conversationId);
|
| 554 |
+
if (ret.code === 0) {
|
| 555 |
setConversation(ret.data);
|
| 556 |
}
|
| 557 |
showConversationRenameModal();
|
web/src/pages/chat/shared-hooks.ts
CHANGED
|
@@ -96,7 +96,7 @@ export const useSendSharedMessage = (conversationId: string) => {
|
|
| 96 |
messages: [...(derivedMessages ?? []), message],
|
| 97 |
});
|
| 98 |
|
| 99 |
-
if (res && (res?.response.status !== 200 || res?.data?.
|
| 100 |
// cancel loading
|
| 101 |
setValue(message.content);
|
| 102 |
removeLatestMessage();
|
|
@@ -111,7 +111,7 @@ export const useSendSharedMessage = (conversationId: string) => {
|
|
| 111 |
sendMessage(message);
|
| 112 |
} else {
|
| 113 |
const data = await setConversation('user id');
|
| 114 |
-
if (data.
|
| 115 |
const id = data.data.id;
|
| 116 |
sendMessage(message, id);
|
| 117 |
}
|
|
|
|
| 96 |
messages: [...(derivedMessages ?? []), message],
|
| 97 |
});
|
| 98 |
|
| 99 |
+
if (res && (res?.response.status !== 200 || res?.data?.code !== 0)) {
|
| 100 |
// cancel loading
|
| 101 |
setValue(message.content);
|
| 102 |
removeLatestMessage();
|
|
|
|
| 111 |
sendMessage(message);
|
| 112 |
} else {
|
| 113 |
const data = await setConversation('user id');
|
| 114 |
+
if (data.code === 0) {
|
| 115 |
const id = data.data.id;
|
| 116 |
sendMessage(message, id);
|
| 117 |
}
|
web/src/pages/document-viewer/hooks.ts
CHANGED
|
@@ -8,8 +8,8 @@ export const useCatchError = (api: string) => {
|
|
| 8 |
const fetchDocument = useCallback(async () => {
|
| 9 |
const ret = await axios.get(api);
|
| 10 |
const { data } = ret;
|
| 11 |
-
if (!(data instanceof ArrayBuffer) && data.
|
| 12 |
-
setError(data.
|
| 13 |
}
|
| 14 |
return ret;
|
| 15 |
}, [api]);
|
|
|
|
| 8 |
const fetchDocument = useCallback(async () => {
|
| 9 |
const ret = await axios.get(api);
|
| 10 |
const { data } = ret;
|
| 11 |
+
if (!(data instanceof ArrayBuffer) && data.code !== 0) {
|
| 12 |
+
setError(data.message);
|
| 13 |
}
|
| 14 |
return ret;
|
| 15 |
}, [api]);
|
web/src/pages/file-manager/hooks.ts
CHANGED
|
@@ -141,8 +141,8 @@ export const useHandleDeleteFile = (
|
|
| 141 |
const handleRemoveFile = () => {
|
| 142 |
showDeleteConfirm({
|
| 143 |
onOk: async () => {
|
| 144 |
-
const
|
| 145 |
-
if (
|
| 146 |
setSelectedRowKeys([]);
|
| 147 |
}
|
| 148 |
return;
|
|
|
|
| 141 |
const handleRemoveFile = () => {
|
| 142 |
showDeleteConfirm({
|
| 143 |
onOk: async () => {
|
| 144 |
+
const code = await removeDocument({ fileIds, parentId });
|
| 145 |
+
if (code === 0) {
|
| 146 |
setSelectedRowKeys([]);
|
| 147 |
}
|
| 148 |
return;
|
web/src/pages/file-manager/move-file-modal/async-tree-select.tsx
CHANGED
|
@@ -22,7 +22,7 @@ const AsyncTreeSelect = ({ value, onChange }: IProps) => {
|
|
| 22 |
const onLoadData: TreeSelectProps['loadData'] = useCallback(
|
| 23 |
async ({ id }) => {
|
| 24 |
const ret = await fetchList(id);
|
| 25 |
-
if (ret.
|
| 26 |
setTreeData((tree) => {
|
| 27 |
return tree.concat(
|
| 28 |
ret.data.files
|
|
|
|
| 22 |
const onLoadData: TreeSelectProps['loadData'] = useCallback(
|
| 23 |
async ({ id }) => {
|
| 24 |
const ret = await fetchList(id);
|
| 25 |
+
if (ret.code === 0) {
|
| 26 |
setTreeData((tree) => {
|
| 27 |
return tree.concat(
|
| 28 |
ret.data.files
|
web/src/pages/flow/chat/hooks.ts
CHANGED
|
@@ -151,7 +151,7 @@ export const useSendMessage = (
|
|
| 151 |
const res = await send(params);
|
| 152 |
|
| 153 |
if (receiveMessageError(res)) {
|
| 154 |
-
antMessage.error(res?.data?.
|
| 155 |
|
| 156 |
// cancel loading
|
| 157 |
setValue(message.content);
|
|
@@ -227,7 +227,7 @@ export const useSendNextMessage = () => {
|
|
| 227 |
const res = await send(params);
|
| 228 |
|
| 229 |
if (receiveMessageError(res)) {
|
| 230 |
-
antMessage.error(res?.data?.
|
| 231 |
|
| 232 |
// cancel loading
|
| 233 |
setValue(message.content);
|
|
|
|
| 151 |
const res = await send(params);
|
| 152 |
|
| 153 |
if (receiveMessageError(res)) {
|
| 154 |
+
antMessage.error(res?.data?.message);
|
| 155 |
|
| 156 |
// cancel loading
|
| 157 |
setValue(message.content);
|
|
|
|
| 227 |
const res = await send(params);
|
| 228 |
|
| 229 |
if (receiveMessageError(res)) {
|
| 230 |
+
antMessage.error(res?.data?.message);
|
| 231 |
|
| 232 |
// cancel loading
|
| 233 |
setValue(message.content);
|
web/src/pages/flow/hooks.ts
CHANGED
|
@@ -497,15 +497,15 @@ export const useSaveGraphBeforeOpeningDebugDrawer = (show: () => void) => {
|
|
| 497 |
const { send } = useSendMessageWithSse(api.runCanvas);
|
| 498 |
const handleRun = useCallback(async () => {
|
| 499 |
const saveRet = await saveGraph();
|
| 500 |
-
if (saveRet?.
|
| 501 |
// Call the reset api before opening the run drawer each time
|
| 502 |
const resetRet = await resetFlow();
|
| 503 |
// After resetting, all previous messages will be cleared.
|
| 504 |
-
if (resetRet?.
|
| 505 |
// fetch prologue
|
| 506 |
const sendRet = await send({ id });
|
| 507 |
if (receiveMessageError(sendRet)) {
|
| 508 |
-
message.error(sendRet?.data?.
|
| 509 |
} else {
|
| 510 |
refetch();
|
| 511 |
show();
|
|
|
|
| 497 |
const { send } = useSendMessageWithSse(api.runCanvas);
|
| 498 |
const handleRun = useCallback(async () => {
|
| 499 |
const saveRet = await saveGraph();
|
| 500 |
+
if (saveRet?.code === 0) {
|
| 501 |
// Call the reset api before opening the run drawer each time
|
| 502 |
const resetRet = await resetFlow();
|
| 503 |
// After resetting, all previous messages will be cleared.
|
| 504 |
+
if (resetRet?.code === 0) {
|
| 505 |
// fetch prologue
|
| 506 |
const sendRet = await send({ id });
|
| 507 |
if (receiveMessageError(sendRet)) {
|
| 508 |
+
message.error(sendRet?.data?.message);
|
| 509 |
} else {
|
| 510 |
refetch();
|
| 511 |
show();
|
web/src/pages/flow/list/hooks.ts
CHANGED
|
@@ -53,7 +53,7 @@ export const useSaveFlow = () => {
|
|
| 53 |
// },
|
| 54 |
});
|
| 55 |
|
| 56 |
-
if (ret?.
|
| 57 |
hideFlowSettingModal();
|
| 58 |
navigate(`/flow/${ret.data.id}`);
|
| 59 |
}
|
|
|
|
| 53 |
// },
|
| 54 |
});
|
| 55 |
|
| 56 |
+
if (ret?.code === 0) {
|
| 57 |
hideFlowSettingModal();
|
| 58 |
navigate(`/flow/${ret.data.id}`);
|
| 59 |
}
|
web/src/pages/flow/utils.ts
CHANGED
|
@@ -144,7 +144,7 @@ export const buildDslComponentsByGraph = (
|
|
| 144 |
};
|
| 145 |
|
| 146 |
export const receiveMessageError = (res: any) =>
|
| 147 |
-
res && (res?.response.status !== 200 || res?.data?.
|
| 148 |
|
| 149 |
// Replace the id in the object with text
|
| 150 |
export const replaceIdWithText = (
|
|
|
|
| 144 |
};
|
| 145 |
|
| 146 |
export const receiveMessageError = (res: any) =>
|
| 147 |
+
res && (res?.response.status !== 200 || res?.data?.code !== 0);
|
| 148 |
|
| 149 |
// Replace the id in the object with text
|
| 150 |
export const replaceIdWithText = (
|
web/src/pages/knowledge/hooks.ts
CHANGED
|
@@ -27,7 +27,7 @@ export const useSaveKnowledge = () => {
|
|
| 27 |
name,
|
| 28 |
});
|
| 29 |
|
| 30 |
-
if (ret?.
|
| 31 |
hideModal();
|
| 32 |
navigate(
|
| 33 |
`/knowledge/${KnowledgeRouteKey.Configuration}?id=${ret.data.kb_id}`,
|
|
|
|
| 27 |
name,
|
| 28 |
});
|
| 29 |
|
| 30 |
+
if (ret?.code === 0) {
|
| 31 |
hideModal();
|
| 32 |
navigate(
|
| 33 |
`/knowledge/${KnowledgeRouteKey.Configuration}?id=${ret.data.kb_id}`,
|