disLodge commited on
Commit
41ad78f
·
verified ·
1 Parent(s): 94b1c33

New updates

Browse files
Files changed (1) hide show
  1. app.py +7 -79
app.py CHANGED
@@ -1,12 +1,8 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
- """
5
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
- """
7
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
 
9
-
10
  def respond(
11
  message,
12
  history: list[tuple[str, str]],
@@ -16,16 +12,9 @@ def respond(
16
  top_p,
17
  ):
18
  messages = [{"role": "system", "content": system_message}] + history
19
- messages.append({"role":"user","content":message})
20
 
21
  response = ""
22
-
23
- # for val in history:
24
- # if val[0]:
25
- # messages.append({"role": "user", "content": val[0]})
26
- # if val[1]:
27
- # messages.append({"role": "assistant", "content": val[1]})
28
-
29
  for part in client.chat_completion(
30
  messages, max_tokens=max_tokens, stream=True, temperature=temperature,
31
  top_p=top_p
@@ -34,52 +23,13 @@ def respond(
34
  if token:
35
  response += token
36
 
37
- history.append({"role":"user", "content": message})
38
- history.append({"role":"assistant", "content": response})
39
-
40
- return history,""
41
 
42
- # for message in client.chat_completion(
43
- # messages,
44
- # max_tokens=max_tokens,
45
- # stream=True,
46
- # temperature=temperature,
47
- # top_p=top_p,
48
- # ):
49
- # token = message.choices[0].delta.content
50
-
51
- # response += token
52
- # yield response
53
-
54
-
55
- """
56
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
57
- """
58
- # demo = gr.ChatInterface(
59
- # respond,
60
- # additional_inputs=[
61
- # gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
62
- # gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
63
- # gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
64
- # gr.Slider(
65
- # minimum=0.1,
66
- # maximum=1.0,
67
- # value=0.95,
68
- # step=0.05,
69
- # label="Top-p (nucleus sampling)",
70
- # ),
71
- # ],
72
- # )
73
 
74
  with gr.Blocks() as demo:
75
- gr.Markdown("## Zephyr Chatbot with Custom UI")
76
-
77
- chatbot = gr.Chatbot(type="messages", label="Chatbot")
78
- state = gr.State([])
79
-
80
- with gr.Row():
81
- msg = gr.Textbox(label="Type your message...", scale=6)
82
- send_btn = gr.Button("Send", scale=1)
83
 
84
  role_dropdown = gr.Dropdown(choices=["SDE", "BA"], label="Select Role", value="SDE")
85
 
@@ -92,30 +42,8 @@ with gr.Blocks() as demo:
92
  clear_btn = gr.Button("Clear Chat")
93
  dummy_btn = gr.Button("Dummy Action")
94
 
95
- def handle_submit(message, history, system, max_tokens, temperature, top_p):
96
- return respond(message, history, system, max_tokens, temperature, top_p)
97
- # final_response = ""
98
- # for r in response_gen:
99
- # final_response = r
100
- # updated_history = history + [(message, final_response)]
101
- # return updated_history, updated_history, ""
102
-
103
- send_btn.click(
104
- handle_submit,
105
- [msg, state, system, max_tokens, temperature, top_p],
106
- [chatbot, state, msg],
107
- )
108
-
109
- msg.submit(
110
- handle_submit,
111
- [msg, state, system, max_tokens, temperature, top_p],
112
- [chatbot, state, msg],
113
- )
114
-
115
- clear_btn.click(lambda: ([], [], ""), None, [chatbot, state, msg])
116
-
117
- dummy_btn.click(lambda: gr.Info("Dummy action clicked!"), None, None)
118
-
119
 
120
  if __name__ == "__main__":
121
  demo.launch()
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
 
 
 
4
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
5
 
 
6
  def respond(
7
  message,
8
  history: list[tuple[str, str]],
 
12
  top_p,
13
  ):
14
  messages = [{"role": "system", "content": system_message}] + history
15
+ messages.append({"role": "user", "content": message})
16
 
17
  response = ""
 
 
 
 
 
 
 
18
  for part in client.chat_completion(
19
  messages, max_tokens=max_tokens, stream=True, temperature=temperature,
20
  top_p=top_p
 
23
  if token:
24
  response += token
25
 
26
+ history.append({"role": "user", "content": message})
27
+ history.append({"role": "assistant", "content": response})
 
 
28
 
29
+ return history, ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
 
31
  with gr.Blocks() as demo:
32
+ gr.Markdown("## Zephyr Chatbot Controls")
 
 
 
 
 
 
 
33
 
34
  role_dropdown = gr.Dropdown(choices=["SDE", "BA"], label="Select Role", value="SDE")
35
 
 
42
  clear_btn = gr.Button("Clear Chat")
43
  dummy_btn = gr.Button("Dummy Action")
44
 
45
+ clear_btn.click(lambda: gr.Info("Chat cleared!"))
46
+ dummy_btn.click(lambda: gr.Info("Dummy action clicked!"))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
 
48
  if __name__ == "__main__":
49
  demo.launch()