Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -15,8 +15,6 @@ from langchain_core.documents import Document
|
|
15 |
from huggingface_hub import InferenceClient
|
16 |
import inspect
|
17 |
|
18 |
-
print(inspect.signature(client.chat_completion))
|
19 |
-
|
20 |
# Environment variables and configurations
|
21 |
huggingface_token = os.environ.get("HUGGINGFACE_TOKEN")
|
22 |
llama_cloud_api_key = os.environ.get("LLAMA_CLOUD_API_KEY")
|
@@ -112,6 +110,7 @@ def generate_chunked_response(prompt, max_tokens=1000, max_chunks=5, temperature
|
|
112 |
clean_response = clean_response.replace("Using the following context from the PDF documents:", "").strip()
|
113 |
|
114 |
return clean_response
|
|
|
115 |
|
116 |
def duckduckgo_search(query):
|
117 |
with DDGS() as ddgs:
|
|
|
15 |
from huggingface_hub import InferenceClient
|
16 |
import inspect
|
17 |
|
|
|
|
|
18 |
# Environment variables and configurations
|
19 |
huggingface_token = os.environ.get("HUGGINGFACE_TOKEN")
|
20 |
llama_cloud_api_key = os.environ.get("LLAMA_CLOUD_API_KEY")
|
|
|
110 |
clean_response = clean_response.replace("Using the following context from the PDF documents:", "").strip()
|
111 |
|
112 |
return clean_response
|
113 |
+
print(inspect.signature(client.chat_completion))
|
114 |
|
115 |
def duckduckgo_search(query):
|
116 |
with DDGS() as ddgs:
|