Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,7 +3,6 @@ import requests
|
|
| 3 |
import json
|
| 4 |
from decouple import Config
|
| 5 |
|
| 6 |
-
# Function to interact with Vectara API
|
| 7 |
def query_vectara(question, chat_history, uploaded_file):
|
| 8 |
# Handle file upload to Vectara
|
| 9 |
customer_id = config('CUSTOMER_ID') # Read from .env file
|
|
@@ -20,7 +19,7 @@ def query_vectara(question, chat_history, uploaded_file):
|
|
| 20 |
"file": (uploaded_file.name, uploaded_file),
|
| 21 |
"doc_metadata": (None, json.dumps({"metadata_key": "metadata_value"})), # Replace with your metadata
|
| 22 |
}
|
| 23 |
-
response = requests.post(url, files=files,
|
| 24 |
|
| 25 |
if response.status_code == 200:
|
| 26 |
upload_status = "File uploaded successfully"
|
|
@@ -30,48 +29,68 @@ def query_vectara(question, chat_history, uploaded_file):
|
|
| 30 |
# Get the user's message from the chat history
|
| 31 |
user_message = chat_history[-1][0]
|
| 32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
query_body = {
|
| 34 |
"query": [
|
| 35 |
{
|
| 36 |
-
"query": user_message,
|
|
|
|
| 37 |
"start": 0,
|
| 38 |
"numResults": 10,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 39 |
"corpusKey": [
|
| 40 |
{
|
| 41 |
"customerId": customer_id,
|
| 42 |
"corpusId": corpus_id,
|
| 43 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 44 |
}
|
| 45 |
]
|
| 46 |
}
|
| 47 |
]
|
| 48 |
}
|
| 49 |
|
| 50 |
-
|
| 51 |
-
return f"{upload_status}\n\nResponse from Vectara API: {response.text}"
|
| 52 |
|
| 53 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
|
| 55 |
-
|
| 56 |
-
# def query_vectara(question, chat_history, uploaded_file):
|
| 57 |
-
# # Handle file upload to Vectara
|
| 58 |
-
#
|
| 59 |
-
# # Get the user's message from the chat history
|
| 60 |
-
# user_message = chat_history[-1][0]
|
| 61 |
-
#
|
| 62 |
-
# # Create a VectaraRetriever instance
|
| 63 |
-
# retriever = VectaraRetriever(index=your_vectara_index) # Replace with your VectaraIndex instance
|
| 64 |
-
#
|
| 65 |
-
# # Create a QueryBundle with the user's message
|
| 66 |
-
# query_bundle = QueryBundle(query_str=user_message)
|
| 67 |
-
#
|
| 68 |
-
# # Retrieve the top k most similar nodes
|
| 69 |
-
# top_k_nodes = retriever._retrieve(query_bundle)
|
| 70 |
-
#
|
| 71 |
-
# # Format the nodes for display
|
| 72 |
-
# responses = "\n".join([f"{node.node.text} (score: {node.score})" for node in top_k_nodes])
|
| 73 |
-
#
|
| 74 |
-
# return f"{upload_status}\n\nResponse from Vectara API: {responses}"
|
| 75 |
|
| 76 |
# Create a Gradio ChatInterface with a text input, a file upload input, and a text output
|
| 77 |
iface = gr.Interface(
|
|
@@ -82,4 +101,5 @@ iface = gr.Interface(
|
|
| 82 |
description="Ask me anything using the Vectara API!"
|
| 83 |
)
|
| 84 |
|
| 85 |
-
iface.launch()
|
|
|
|
|
|
| 3 |
import json
|
| 4 |
from decouple import Config
|
| 5 |
|
|
|
|
| 6 |
def query_vectara(question, chat_history, uploaded_file):
|
| 7 |
# Handle file upload to Vectara
|
| 8 |
customer_id = config('CUSTOMER_ID') # Read from .env file
|
|
|
|
| 19 |
"file": (uploaded_file.name, uploaded_file),
|
| 20 |
"doc_metadata": (None, json.dumps({"metadata_key": "metadata_value"})), # Replace with your metadata
|
| 21 |
}
|
| 22 |
+
response = requests.post(url, files=files, headers=post_headers)
|
| 23 |
|
| 24 |
if response.status_code == 200:
|
| 25 |
upload_status = "File uploaded successfully"
|
|
|
|
| 29 |
# Get the user's message from the chat history
|
| 30 |
user_message = chat_history[-1][0]
|
| 31 |
|
| 32 |
+
# Query Vectara API
|
| 33 |
+
query_url = "https://api.vectara.io/v1/query/v1/query"
|
| 34 |
+
|
| 35 |
+
headers = {
|
| 36 |
+
"Content-Type": "application/json",
|
| 37 |
+
"Authorization": f"Bearer {api_key}",
|
| 38 |
+
"customer-id": customer_id,
|
| 39 |
+
}
|
| 40 |
+
|
| 41 |
query_body = {
|
| 42 |
"query": [
|
| 43 |
{
|
| 44 |
+
"query": user_message,
|
| 45 |
+
"queryContext": "",
|
| 46 |
"start": 0,
|
| 47 |
"numResults": 10,
|
| 48 |
+
"contextConfig": {
|
| 49 |
+
"charsBefore": 0,
|
| 50 |
+
"charsAfter": 0,
|
| 51 |
+
"sentencesBefore": 2,
|
| 52 |
+
"sentencesAfter": 2,
|
| 53 |
+
"startTag": "%START_SNIPPET%",
|
| 54 |
+
"endTag": "%END_SNIPPET%",
|
| 55 |
+
},
|
| 56 |
+
"rerankingConfig": {
|
| 57 |
+
"rerankerId": 272725718,
|
| 58 |
+
"mmrConfig": {
|
| 59 |
+
"diversityBias": 0.3
|
| 60 |
+
}
|
| 61 |
+
},
|
| 62 |
"corpusKey": [
|
| 63 |
{
|
| 64 |
"customerId": customer_id,
|
| 65 |
"corpusId": corpus_id,
|
| 66 |
+
"semantics": 0,
|
| 67 |
+
"metadataFilter": "",
|
| 68 |
+
"lexicalInterpolationConfig": {
|
| 69 |
+
"lambda": 0
|
| 70 |
+
},
|
| 71 |
+
"dim": []
|
| 72 |
+
}
|
| 73 |
+
],
|
| 74 |
+
"summary": [
|
| 75 |
+
{
|
| 76 |
+
"maxSummarizedResults": 5,
|
| 77 |
+
"responseLang": "eng",
|
| 78 |
+
"summarizerPromptName": "vectara-summary-ext-v1.2.0"
|
| 79 |
}
|
| 80 |
]
|
| 81 |
}
|
| 82 |
]
|
| 83 |
}
|
| 84 |
|
| 85 |
+
query_response = requests.post(query_url, json=query_body, headers=headers)
|
|
|
|
| 86 |
|
| 87 |
+
if query_response.status_code == 200:
|
| 88 |
+
query_data = query_response.json()
|
| 89 |
+
response_message = f"{upload_status}\n\nResponse from Vectara API: {json.dumps(query_data, indent=2)}"
|
| 90 |
+
else:
|
| 91 |
+
response_message = f"{upload_status}\n\nError: {query_response.status_code}"
|
| 92 |
|
| 93 |
+
return response_message
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 94 |
|
| 95 |
# Create a Gradio ChatInterface with a text input, a file upload input, and a text output
|
| 96 |
iface = gr.Interface(
|
|
|
|
| 101 |
description="Ask me anything using the Vectara API!"
|
| 102 |
)
|
| 103 |
|
| 104 |
+
iface.launch()
|
| 105 |
+
|