Spaces:
Running
Running
Commit
·
a218b95
1
Parent(s):
9856f9f
Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,6 @@ import google.generativeai as genai
|
|
8 |
genai.configure(api_key=os.getenv('PALM_API_KEY'))
|
9 |
|
10 |
# Gradio
|
11 |
-
|
12 |
chat_defaults = {
|
13 |
'model': 'models/chat-bison-001',
|
14 |
'temperature': 0.25,
|
@@ -17,44 +16,65 @@ chat_defaults = {
|
|
17 |
'top_p': 0,
|
18 |
}
|
19 |
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
print(
|
24 |
-
print(history)
|
25 |
context = "You are an intelligent chatbot powered by biggest technology company."
|
26 |
print("Generating Chat Message...")
|
27 |
-
print(f"User Message:\n{
|
28 |
-
|
29 |
try:
|
30 |
response = genai.chat(
|
31 |
**chat_defaults,
|
32 |
context=context,
|
33 |
-
messages=
|
34 |
)
|
35 |
-
print(f"\n{response}\n")
|
36 |
result = response.last
|
37 |
if result is None:
|
38 |
result = "Apologies but something went wrong. Please try again later."
|
39 |
-
|
40 |
else:
|
41 |
-
|
42 |
-
pass
|
43 |
except Exception as e:
|
44 |
result = "Apologies but something went wrong. Please try again later."
|
45 |
-
|
46 |
-
print(f"Exception {e}\n")
|
47 |
-
|
48 |
print(f"Bot Message:\n{result}\n")
|
|
|
49 |
|
50 |
-
|
51 |
-
|
52 |
-
|
|
|
|
|
|
|
53 |
|
54 |
-
|
55 |
-
|
56 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
58 |
|
59 |
app.queue()
|
60 |
app.launch()
|
|
|
8 |
genai.configure(api_key=os.getenv('PALM_API_KEY'))
|
9 |
|
10 |
# Gradio
|
|
|
11 |
chat_defaults = {
|
12 |
'model': 'models/chat-bison-001',
|
13 |
'temperature': 0.25,
|
|
|
16 |
'top_p': 0,
|
17 |
}
|
18 |
|
19 |
+
chat_history = []
|
20 |
+
|
21 |
+
def generate_chat(prompt: str, chat_messages=chat_history):
|
22 |
+
print(chat_messages)
|
|
|
23 |
context = "You are an intelligent chatbot powered by biggest technology company."
|
24 |
print("Generating Chat Message...")
|
25 |
+
print(f"User Message:\n{prompt}\n")
|
26 |
+
chat_messages.append(prompt)
|
27 |
try:
|
28 |
response = genai.chat(
|
29 |
**chat_defaults,
|
30 |
context=context,
|
31 |
+
messages=chat_messages
|
32 |
)
|
|
|
33 |
result = response.last
|
34 |
if result is None:
|
35 |
result = "Apologies but something went wrong. Please try again later."
|
36 |
+
chat_messages = chat_messages[:-1]
|
37 |
else:
|
38 |
+
chat_messages.append(result)
|
|
|
39 |
except Exception as e:
|
40 |
result = "Apologies but something went wrong. Please try again later."
|
41 |
+
chat_messages = chat_messages[:-1]
|
42 |
+
print(f"Exception {e} occured\n")
|
43 |
+
chat_history = chat_messages
|
44 |
print(f"Bot Message:\n{result}\n")
|
45 |
+
return result
|
46 |
|
47 |
+
with gr.Blocks(theme='HaleyCH/HaleyCH_Theme') as app:
|
48 |
+
gr.Markdown(
|
49 |
+
f"""
|
50 |
+
# {title}
|
51 |
+
### {description}
|
52 |
+
""")
|
53 |
|
54 |
+
chatbot = gr.Chatbot(height=500)
|
55 |
+
msg = gr.Textbox()
|
56 |
+
clear = gr.Button("Clear")
|
57 |
+
|
58 |
+
def user(user_message, history):
|
59 |
+
return "", history + [[user_message, None]]
|
60 |
+
|
61 |
+
def bot(history):
|
62 |
+
bot_message = generate_chat(history[-1][0])
|
63 |
+
history[-1][1] = ""
|
64 |
+
for character in bot_message:
|
65 |
+
history[-1][1] += character
|
66 |
+
time.sleep(0.01)
|
67 |
+
yield history
|
68 |
+
|
69 |
+
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
|
70 |
+
bot, chatbot, chatbot
|
71 |
)
|
72 |
+
clear.click(lambda: None, None, chatbot, queue=False)
|
73 |
+
|
74 |
+
gr.Markdown(
|
75 |
+
f"""
|
76 |
+
Testing by __G__
|
77 |
+
""")
|
78 |
|
79 |
app.queue()
|
80 |
app.launch()
|