File size: 2,578 Bytes
a576a00 0770009 5a92112 a576a00 e283888 5a92112 351d301 5a92112 a576a00 5a92112 a576a00 9105c19 351d301 a576a00 5a92112 351d301 5a92112 351d301 5a92112 351d301 5a92112 b09d26d 5a92112 a576a00 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
import streamlit as st
from haystack import Pipeline
from haystack_integrations.document_stores.pinecone import PineconeDocumentStore
from haystack.components.builders.answer_builder import AnswerBuilder
from haystack.components.builders.prompt_builder import PromptBuilder
from haystack_integrations.components.embedders.cohere import CohereTextEmbedder
from haystack_integrations.components.retrievers.pinecone import PineconeEmbeddingRetriever
from haystack_integrations.components.generators.cohere import CohereGenerator
from haystack import Document
def start_haystack(openai_key):
document_store = PineconeDocumentStore(dimension=1024, index="zen", environment = "gcp-starter")
question = "It doesn't work on Android. The app is not blocking call!!!"
template = """
You are a support agent replying to customers' messages. Use the context to answer the customer, starting by greeting them and ending with goodbyes.
DO NOT TRY TO GUESS INFORMATION. If the context doesn't provide you with the answer, ONLY say this: [].
Context:
{% for document in documents %}
{{ document.content }}
{% endfor %}
Customer's message: {{ query }}?
"""
st.session_state["haystack_started"] = True
pipe = Pipeline()
pipe.add_component("text_embedder", CohereTextEmbedder(model="embed-english-v3.0"))
pipe.add_component("retriever", PineconeEmbeddingRetriever(document_store=document_store, top_k=3))
pipe.add_component("prompt_builder", PromptBuilder(template=template))
pipe.add_component("llm", CohereGenerator(model="command-nightly"))
pipe.add_component("answer_builder", AnswerBuilder())
pipe.connect("text_embedder.embedding", "retriever.query_embedding")
pipe.connect("retriever", "prompt_builder.documents")
pipe.connect("prompt_builder", "llm")
pipe.connect("llm.replies", "answer_builder.replies")
pipe.connect("llm.meta", "answer_builder.meta")
pipe.connect("retriever", "answer_builder.documents")
return pipe
@st.cache_data(show_spinner=True)
def query(pipe):
try:
replies = pipe.run({
"text_embedder": {
"text": question
},
"prompt_builder": {
"query": question
},
"answer_builder": {
"query": question
}
})
print(result)
result = replies['answer_builder']['answers']
except Exception as e:
print(e)
result = ["Something went wrong!"]
return result |