bigPear commited on
Commit
38a35b2
·
1 Parent(s): 7975f51

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +51 -26
app.py CHANGED
@@ -1,35 +1,60 @@
 
1
  import gradio as gr
2
 
3
- def add_text(history, text):
4
- history = history + [(text, None)]
5
- return history, ""
 
6
 
7
- def add_file(history, file):
8
- history = history + [((file.name,), None)]
9
- return history
10
 
11
- def bot(history):
12
- response = "**That's cool!**"
13
- history[-1][1] = response
14
- return history
 
 
 
 
 
 
 
 
 
 
15
 
16
  with gr.Blocks() as demo:
17
- chatbot = gr.Chatbot([], elem_id="chatbot").style(height=750)
18
-
19
- with gr.Row():
20
- with gr.Column(scale=0.85):
21
- txt = gr.Textbox(
22
- show_label=False,
23
- placeholder="Enter text and press enter, or upload an image",
24
- ).style(container=False)
25
- with gr.Column(scale=0.15, min_width=0):
26
- btn = gr.UploadButton("📁", file_types=["image", "video", "audio"])
27
-
28
- txt.submit(add_text, [chatbot, txt], [chatbot, txt]).then(
29
- bot, chatbot, chatbot
30
- )
31
- btn.upload(add_file, [chatbot, btn], [chatbot]).then(
32
- bot, chatbot, chatbot
 
 
33
  )
 
 
 
 
 
 
 
 
 
 
 
34
 
35
  demo.launch()
 
1
+ from transformers import AutoModel, AutoTokenizer
2
  import gradio as gr
3
 
4
+ tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
5
+ model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
6
+ #.half().cuda()
7
+ model = model.eval()
8
 
9
+ # def add_text(history, text):
10
+ # history = history + [(text, None)]
11
+ # return history, ""
12
 
13
+ # def add_file(history, file):
14
+ # history = history + [((file.name,), None)]
15
+ # return history
16
+
17
+ # def bot(history):
18
+ # response = "**That's cool!**"
19
+ # history[-1][1] = response
20
+ # return history
21
+
22
+ def predict(input, history=None):
23
+ if history is None:
24
+ history = []
25
+ response, history = model.chat(tokenizer, input, history)
26
+ return history, history
27
 
28
  with gr.Blocks() as demo:
29
+ chatbot = gr.Chatbot()
30
+ msg = gr.Textbox()
31
+ clear = gr.Button("Clear")
32
+
33
+ def user(user_message, history):
34
+ return "", history + [[user_message, None]]
35
+
36
+ def bot(msg, history):
37
+ # bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
38
+ bot_message = predict(msg, history)
39
+ history[-1][1] = ""
40
+ for character in bot_message:
41
+ history[-1][1] += character
42
+ time.sleep(0.05)
43
+ yield history
44
+
45
+ msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
46
+ bot, [msg, chatbot], [msg, chatbot]
47
  )
48
+ clear.click(lambda: None, None, chatbot, queue=False)
49
+
50
+ demo.queue()
51
+ demo.launch()
52
+
53
+ # txt.submit(predict, [txt, state], [chatbot, state])
54
+ # button.click(predict, [txt, state], [chatbot, state])
55
+
56
+ # btn.upload(add_file, [chatbot, btn], [chatbot]).then(
57
+ # bot, chatbot, chatbot
58
+ # )
59
 
60
  demo.launch()