zizo66 commited on
Commit
da7db39
·
verified ·
1 Parent(s): 0877c16

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -35
app.py CHANGED
@@ -1,13 +1,22 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
- """
5
- For more information on huggingface_hub Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
- """
7
  client = InferenceClient("meta-llama/Llama-2-7b-chat-hf")
8
 
 
 
 
 
 
 
 
9
 
 
 
 
10
 
 
11
  def respond(
12
  message,
13
  history: list[tuple[str, str]],
@@ -36,39 +45,10 @@ def respond(
36
  top_p=top_p,
37
  ):
38
  token = message.choices[0].delta.content
39
-
40
  response += token
41
  yield response
42
 
43
-
44
- """
45
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
46
- """
47
- demo = gr.ChatInterface(
48
- respond,
49
- additional_inputs=[
50
- gr.Textbox(value="You are a language tutor AI. Help users practice real-life conversations.", label="System message")
51
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
52
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
53
- gr.Slider(
54
- minimum=0.1,
55
- maximum=1.0,
56
- value=0.95,
57
- step=0.05,
58
- label="Top-p (nucleus sampling)",
59
- ),
60
- ],
61
- )
62
- scenarios = {
63
- "restaurant": "You are in a restaurant. Help the user order food in English.",
64
- "airport": "You are at an airport. Help the user check in and find their gate.",
65
- "hotel": "You are in a hotel. Help the user book a room.",
66
- "shopping": "You are in a store. Help the user ask for prices and sizes.",
67
- }
68
-
69
- def scenario_prompt(choice):
70
- return scenarios.get(choice, "You are a language tutor AI. Help users practice real-life conversations.")
71
-
72
  demo = gr.ChatInterface(
73
  respond,
74
  additional_inputs=[
@@ -80,6 +60,5 @@ demo = gr.ChatInterface(
80
  ],
81
  )
82
 
83
-
84
  if __name__ == "__main__":
85
- demo.launch()
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
+ # تحميل النموذج من Hugging Face
 
 
5
  client = InferenceClient("meta-llama/Llama-2-7b-chat-hf")
6
 
7
+ # قائمة السيناريوهات
8
+ scenarios = {
9
+ "restaurant": "You are in a restaurant. Help the user order food in English.",
10
+ "airport": "You are at an airport. Help the user check in and find their gate.",
11
+ "hotel": "You are in a hotel. Help the user book a room.",
12
+ "shopping": "You are in a store. Help the user ask for prices and sizes.",
13
+ }
14
 
15
+ # دالة لاختيار السيناريو المناسب
16
+ def scenario_prompt(choice):
17
+ return scenarios.get(choice, "You are a language tutor AI. Help users practice real-life conversations.")
18
 
19
+ # دالة لمعالجة المحادثة
20
  def respond(
21
  message,
22
  history: list[tuple[str, str]],
 
45
  top_p=top_p,
46
  ):
47
  token = message.choices[0].delta.content
 
48
  response += token
49
  yield response
50
 
51
+ # واجهة Gradio للمحادثة
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
  demo = gr.ChatInterface(
53
  respond,
54
  additional_inputs=[
 
60
  ],
61
  )
62
 
 
63
  if __name__ == "__main__":
64
+ demo.launch()