Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,4 @@
|
|
1 |
from langchain.prompts.prompt import PromptTemplate
|
2 |
-
# from langchain.llms import OpenAI
|
3 |
from langchain.llms import OpenAIChat
|
4 |
from langchain.chains import ChatVectorDBChain
|
5 |
from langchain.embeddings import HuggingFaceEmbeddings, HuggingFaceInstructEmbeddings
|
@@ -76,17 +75,9 @@ def get_chain(vectorstore):
|
|
76 |
condense_question_prompt=CONDENSE_QUESTION_PROMPT,
|
77 |
)
|
78 |
return qa_chain
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
If no api_key, then None is returned.
|
83 |
-
"""
|
84 |
-
if api_key:
|
85 |
-
os.environ["OPENAI_API_KEY"] = api_key
|
86 |
-
chain = get_chain(vectorstore)
|
87 |
-
os.environ["OPENAI_API_KEY"] = ""
|
88 |
-
return chain
|
89 |
-
|
90 |
class ChatWrapper:
|
91 |
|
92 |
def __init__(self):
|
@@ -98,15 +89,9 @@ class ChatWrapper:
|
|
98 |
self.lock.acquire()
|
99 |
try:
|
100 |
history = history or []
|
101 |
-
# If chain is None, that is because no API key was provided.
|
102 |
-
# if chain is None:
|
103 |
-
# history.append((inp, "Please paste your OpenAI key to use"))
|
104 |
-
# return history, history
|
105 |
# Set OpenAI key
|
106 |
-
|
107 |
-
openai.api_key = os.environ["OPENAI_API_KEY"]
|
108 |
# Run chain and append input.
|
109 |
-
chain = get_chain(vectorstore)
|
110 |
output = chain({"question": inp, "chat_history": history})["answer"]
|
111 |
history.append((inp, output))
|
112 |
except Exception as e:
|
@@ -125,11 +110,13 @@ with block:
|
|
125 |
|
126 |
openai_api_key_textbox = gr.Textbox(
|
127 |
placeholder="Paste your OpenAI API key (sk-...)",
|
|
|
128 |
show_label=False,
|
129 |
lines=1,
|
130 |
type="password",
|
131 |
)
|
132 |
|
|
|
133 |
with gr.Row():
|
134 |
embeddings = gr.Radio(choices=model_options_list,value=model_options_list[0], label='Choose your Embedding Model',
|
135 |
interactive=True)
|
@@ -167,14 +154,8 @@ with block:
|
|
167 |
agent_state = gr.State()
|
168 |
|
169 |
|
170 |
-
submit.click(chat, inputs=[
|
171 |
-
message.submit(chat, inputs=[
|
172 |
-
|
173 |
-
openai_api_key_textbox.change(
|
174 |
-
set_openai_api_key,
|
175 |
-
inputs=[openai_api_key_textbox],
|
176 |
-
outputs=[agent_state],
|
177 |
-
)
|
178 |
|
179 |
gr.Markdown("")
|
180 |
|
|
|
1 |
from langchain.prompts.prompt import PromptTemplate
|
|
|
2 |
from langchain.llms import OpenAIChat
|
3 |
from langchain.chains import ChatVectorDBChain
|
4 |
from langchain.embeddings import HuggingFaceEmbeddings, HuggingFaceInstructEmbeddings
|
|
|
75 |
condense_question_prompt=CONDENSE_QUESTION_PROMPT,
|
76 |
)
|
77 |
return qa_chain
|
78 |
+
|
79 |
+
chain = get_chain(vectorstore)
|
80 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
81 |
class ChatWrapper:
|
82 |
|
83 |
def __init__(self):
|
|
|
89 |
self.lock.acquire()
|
90 |
try:
|
91 |
history = history or []
|
|
|
|
|
|
|
|
|
92 |
# Set OpenAI key
|
93 |
+
|
|
|
94 |
# Run chain and append input.
|
|
|
95 |
output = chain({"question": inp, "chat_history": history})["answer"]
|
96 |
history.append((inp, output))
|
97 |
except Exception as e:
|
|
|
110 |
|
111 |
openai_api_key_textbox = gr.Textbox(
|
112 |
placeholder="Paste your OpenAI API key (sk-...)",
|
113 |
+
value=os.environ["OPENAI_API_KEY"],
|
114 |
show_label=False,
|
115 |
lines=1,
|
116 |
type="password",
|
117 |
)
|
118 |
|
119 |
+
|
120 |
with gr.Row():
|
121 |
embeddings = gr.Radio(choices=model_options_list,value=model_options_list[0], label='Choose your Embedding Model',
|
122 |
interactive=True)
|
|
|
154 |
agent_state = gr.State()
|
155 |
|
156 |
|
157 |
+
submit.click(chat, inputs=[message, state, agent_state], outputs=[chatbot, state])
|
158 |
+
message.submit(chat, inputs=[message, state, agent_state], outputs=[chatbot, state])
|
|
|
|
|
|
|
|
|
|
|
|
|
159 |
|
160 |
gr.Markdown("")
|
161 |
|