Update app.py
Browse files
app.py
CHANGED
@@ -1,71 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
-
from
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
"""
|
26 |
|
27 |
-
|
28 |
-
|
29 |
-
chat_history_container = gr.HTML("<div class='chat-messages'>Привет! Я Помогатор, готов помочь тебе с любыми вопросами!) 😊</div>")
|
30 |
-
with gr.Row():
|
31 |
-
input_message = gr.Textbox(placeholder="Введите ваше сообщение здесь...", lines=2, interactive=True, max_lines=5)
|
32 |
-
send_button = gr.Button("Отправить")
|
33 |
-
attach_button = gr.File(label="", file_count="single", interactive=True)
|
34 |
-
attach_button.style(icon="paperclip", hide_label=True)
|
35 |
-
clear_button = gr.Button(label="")
|
36 |
-
clear_button.style(icon="times", hide_label=True, visible=False)
|
37 |
-
|
38 |
-
def preview_image(file_info):
|
39 |
-
if file_info is not None:
|
40 |
-
clear_button.style(visible=True)
|
41 |
-
attach_button.style(visible=False)
|
42 |
-
image = Image.open(io.BytesIO(file_info["content"]))
|
43 |
-
img_str = encode_image_to_base64(image)
|
44 |
-
return f"<div class='chat-messages'><img class='image-preview' src='data:image/jpeg;base64,{img_str}' /></div>"
|
45 |
-
else:
|
46 |
-
clear_button.style(visible=False)
|
47 |
-
attach_button.style(visible=True)
|
48 |
-
return "<div class='chat-messages'></div>"
|
49 |
-
|
50 |
-
def clear_image():
|
51 |
-
attach_button.reset()
|
52 |
-
clear_button.style(visible=False)
|
53 |
-
attach_button.style(visible=True)
|
54 |
-
return "<div class='chat-messages'></div>"
|
55 |
-
|
56 |
-
def handle_send(message, image=None):
|
57 |
-
response, img_str = send_message(message, image)
|
58 |
-
new_message = f"<div class='chat-messages'><strong>Вы:</strong> {message}</div>" if message else ""
|
59 |
-
if img_str:
|
60 |
-
new_message += f"<div class='chat-messages'><img class='image-preview' src='data:image/jpeg;base64,{img_str}' /></div>"
|
61 |
-
new_message += f"<div class='chat-messages'><strong>Помогатор:</strong> {response}</div>"
|
62 |
-
chat_history_container.update(new_message + chat_history_container.value)
|
63 |
-
input_message.reset()
|
64 |
-
clear_image()
|
65 |
|
66 |
-
|
67 |
-
attach_button.change(preview_image, inputs=[attach_button], outputs=[chat_history_container])
|
68 |
-
clear_button.click(clear_image, inputs=[], outputs=[chat_history_container])
|
69 |
|
70 |
-
|
71 |
-
demo.launch()
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
# List of dependencies
|
4 |
+
dependencies = ["transformers", "torch"]
|
5 |
+
|
6 |
+
# Check if each dependency is installed
|
7 |
+
for dependency in dependencies:
|
8 |
+
try:
|
9 |
+
__import__(dependency)
|
10 |
+
print(f"{dependency} is already installed.")
|
11 |
+
except ImportError:
|
12 |
+
print(f"{dependency} is not installed. Installing...")
|
13 |
+
os.system(f'pip install {dependency}')
|
14 |
import gradio as gr
|
15 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
16 |
+
|
17 |
+
TITLE = "Chattybot"
|
18 |
+
EXAMPLE_INPUT = "hello"
|
19 |
+
SYSTEM_PROMPT = "As a generative chatbot (you are not a GPT but your structure is 50% the same), your primary function is to provide helpful and friendly responses to user queries. Feel free to add some personality, but make sure your responses are accurate and helpful. Your owner and developer is: @Costikoooo (Discord user) other developers are unknown. Your name is Chattybot."
|
20 |
+
|
21 |
+
model_name = "HuggingFaceH4/zephyr-7b-beta"
|
22 |
+
model = AutoModelForCausalLM.from_pretrained(model_name)
|
23 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
24 |
+
|
25 |
+
def predict_local(message, chatbot=[], system_prompt=""):
|
26 |
+
input_prompt = "\n" + system_prompt + "</s>\n\n"
|
27 |
+
for interaction in chatbot:
|
28 |
+
input_prompt = input_prompt + str(interaction[0]) + "</s>\n\n" + str(interaction[1]) + "\n</s>\n\n"
|
29 |
+
|
30 |
+
input_prompt = input_prompt + str(message) + "</s>\n"
|
31 |
+
|
32 |
+
inputs = tokenizer(input_prompt, return_tensors="pt")
|
33 |
+
outputs = model(**inputs)
|
34 |
+
generated_text = tokenizer.decode(outputs["logits"][0], skip_special_tokens=True)
|
35 |
+
|
36 |
+
return generated_text
|
37 |
+
|
38 |
+
def test_preview_chatbot(message, history):
|
39 |
+
response = predict_local(message, history, SYSTEM_PROMPT)
|
40 |
+
text_start = response.rfind("") + len("")
|
41 |
+
response = response[text_start:]
|
42 |
+
return response
|
43 |
+
|
44 |
+
welcome_preview_message = f"""
|
45 |
+
Welcome to **{TITLE}**! Say something like:
|
46 |
+
"{EXAMPLE_INPUT}"
|
47 |
"""
|
48 |
|
49 |
+
chatbot_preview = gr.Chatbot(layout="panel", value=[(None, welcome_preview_message)])
|
50 |
+
textbox_preview = gr.Textbox(scale=7, container=False, value=EXAMPLE_INPUT)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
|
52 |
+
demo = gr.ChatInterface(test_preview_chatbot, chatbot=chatbot_preview, textbox=textbox_preview)
|
|
|
|
|
53 |
|
54 |
+
demo.launch()
|
|