Spaces:
Sleeping
Sleeping
Commit
·
0268ea7
1
Parent(s):
45e331f
Update app.py
Browse files
app.py
CHANGED
@@ -29,7 +29,7 @@ document_prompt = PromptTemplate(
|
|
29 |
)
|
30 |
prompt = PromptTemplate(
|
31 |
template=
|
32 |
-
"""Write a personalised newsletter for a researcher. The researcher describes his work as follows:"{context}". Base the newsletter on the
|
33 |
input_variables=["context", "text"])
|
34 |
|
35 |
# llm = FakeListLLM(responses=list(map(str, range(100))))
|
@@ -58,6 +58,7 @@ def process_document(doc: Document):
|
|
58 |
return Document(page_content=doc.metadata["Summary"], metadata=metadata)
|
59 |
|
60 |
def get_data(user_query: str):
|
|
|
61 |
docs = loader.load()
|
62 |
docs = [process_document(doc) for doc in docs]
|
63 |
db = Chroma.from_documents(docs, embeddings)
|
@@ -69,7 +70,8 @@ def get_data(user_query: str):
|
|
69 |
articles += f"**Title: {doc.metadata['Title']}**\n\nAbstract: {doc.metadata['Summary']}\n\n"
|
70 |
output = stuff_chain({"input_documents": relevant_docs, "context": user_query})
|
71 |
output_text = output["output_text"].split("<|end|>")[0]
|
72 |
-
|
|
|
73 |
|
74 |
demo = gr.Interface(
|
75 |
fn=get_data,
|
|
|
29 |
)
|
30 |
prompt = PromptTemplate(
|
31 |
template=
|
32 |
+
"""Write a personalised newsletter for a researcher on the most recent exciting developments in his field. The researcher describes his work as follows:"{context}". Base the newsletter on the articles below. Extract the most exciting points and combine them into an excillerating newsletter.\n#ARTICLES\n\n"{text}"\n\nNEWSLETTER:\n# Your AI curated newsletter\n""",
|
33 |
input_variables=["context", "text"])
|
34 |
|
35 |
# llm = FakeListLLM(responses=list(map(str, range(100))))
|
|
|
58 |
return Document(page_content=doc.metadata["Summary"], metadata=metadata)
|
59 |
|
60 |
def get_data(user_query: str):
|
61 |
+
print("User query:", user_query)
|
62 |
docs = loader.load()
|
63 |
docs = [process_document(doc) for doc in docs]
|
64 |
db = Chroma.from_documents(docs, embeddings)
|
|
|
70 |
articles += f"**Title: {doc.metadata['Title']}**\n\nAbstract: {doc.metadata['Summary']}\n\n"
|
71 |
output = stuff_chain({"input_documents": relevant_docs, "context": user_query})
|
72 |
output_text = output["output_text"].split("<|end|>")[0]
|
73 |
+
print("LLM output:", output_text)
|
74 |
+
return f"# Your AI curated newsletter\n{output['output_text']}\n\n\n\n## Used articles:\n\n{articles}"
|
75 |
|
76 |
demo = gr.Interface(
|
77 |
fn=get_data,
|