Greff3 commited on
Commit
f983dbd
·
verified ·
1 Parent(s): 259a60c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -39
app.py CHANGED
@@ -24,31 +24,8 @@ client = OpenAI(
24
  # Create supported models
25
  model_links = {
26
  "Meta-Llama-3.1-70B-Instruct": "meta-llama/Meta-Llama-3.1-70B-Instruct",
27
- "Meta-Llama-3.1-405B-Instruct-FP8": "meta-llama/Meta-Llama-3.1-405B-Instruct-FP8",
28
- "Meta-Llama-3.1-405B-Instruct": "meta-llama/Meta-Llama-3.1-405B-Instruct",
29
  "Meta-Llama-3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct",
30
- "Meta-Llama-3-70B-Instruct": "meta-llama/Meta-Llama-3-70B-Instruct",
31
- "Meta-Llama-3-8B-Instruct": "meta-llama/Meta-Llama-3-8B-Instruct",
32
  "C4ai-command-r-plus": "CohereForAI/c4ai-command-r-plus",
33
- "Aya-23-35B": "CohereForAI/aya-23-35B",
34
- "Zephyr-orpo-141b-A35b-v0.1": "HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
35
- "Mixtral-8x7B-Instruct-v0.1": "mistralai/Mixtral-8x7B-Instruct-v0.1",
36
- "Codestral-22B-v0.1": "mistralai/Codestral-22B-v0.1",
37
- "Nous-Hermes-2-Mixtral-8x7B-DPO": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
38
- "Yi-1.5-34B-Chat": "01-ai/Yi-1.5-34B-Chat",
39
- "Gemma-2-27b-it": "google/gemma-2-27b-it",
40
- "Meta-Llama-2-70B-Chat-HF": "meta-llama/Llama-2-70b-chat-hf",
41
- "Meta-Llama-2-7B-Chat-HF": "meta-llama/Llama-2-7b-chat-hf",
42
- "Meta-Llama-2-13B-Chat-HF": "meta-llama/Llama-2-13b-chat-hf",
43
- "Mistral-7B-Instruct-v0.1": "mistralai/Mistral-7B-Instruct-v0.1",
44
- "Mistral-7B-Instruct-v0.2": "mistralai/Mistral-7B-Instruct-v0.2",
45
- "Mistral-7B-Instruct-v0.3": "mistralai/Mistral-7B-Instruct-v0.3",
46
- "Gemma-1.1-7b-it": "google/gemma-1.1-7b-it",
47
- "Gemma-1.1-2b-it": "google/gemma-1.1-2b-it",
48
- "Zephyr-7B-Beta": "HuggingFaceH4/zephyr-7b-beta",
49
- "Zephyr-7B-Alpha": "HuggingFaceH4/zephyr-7b-alpha",
50
- "Phi-3-mini-128k-instruct": "microsoft/Phi-3-mini-128k-instruct",
51
- "Phi-3-mini-4k-instruct": "microsoft/Phi-3-mini-4k-instruct",
52
  }
53
 
54
  #Random dog images for error message
@@ -94,9 +71,9 @@ st.sidebar.button('Reset Chat', on_click=reset_conversation) #Reset button
94
 
95
 
96
  # Create model description
97
- st.sidebar.write(f"You're now chatting with **{selected_model}**")
98
- st.sidebar.markdown("*Generated content may be inaccurate or false.*")
99
- st.sidebar.markdown("\n[TypeGPT](https://typegpt.net).")
100
 
101
 
102
 
@@ -116,7 +93,7 @@ if st.session_state.prev_option != selected_model:
116
  repo_id = model_links[selected_model]
117
 
118
 
119
- st.subheader(f'TypeGPT.net - {selected_model}')
120
  # st.title(f'ChatBot Using {selected_model}')
121
 
122
  # Set a default model
@@ -136,7 +113,7 @@ for message in st.session_state.messages:
136
 
137
 
138
  # Accept user input
139
- if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question"):
140
  # Display user message in chat message container
141
  with st.chat_message("user"):
142
  st.markdown(prompt)
@@ -162,21 +139,13 @@ if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question"):
162
 
163
  except Exception as e:
164
  # st.empty()
165
- response = "😵‍💫 Looks like someone unplugged something!\
166
- \n Either the model space is being updated or something is down.\
167
- \n\
168
- \n Try again later. \
169
- \n\
170
- \n Here's a random pic of a 🐶:"
171
  st.write(response)
172
  random_dog_pick = 'https://random.dog/'+ random_dog[np.random.randint(len(random_dog))]
173
  st.image(random_dog_pick)
174
  st.write("This was the error message:")
175
  st.write(e)
176
 
177
-
178
-
179
-
180
-
181
-
182
  st.session_state.messages.append({"role": "assistant", "content": response})
 
24
  # Create supported models
25
  model_links = {
26
  "Meta-Llama-3.1-70B-Instruct": "meta-llama/Meta-Llama-3.1-70B-Instruct",
 
 
27
  "Meta-Llama-3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct",
 
 
28
  "C4ai-command-r-plus": "CohereForAI/c4ai-command-r-plus",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  }
30
 
31
  #Random dog images for error message
 
71
 
72
 
73
  # Create model description
74
+ st.sidebar.write(f"Сейчас вы общаетесь с **GPT-ChatBot**")
75
+ st.sidebar.markdown("*Созданный контент может быть неточным.*")
76
+ st.sidebar.markdown("\n[GPT-ChatBot.ru](https://gpt-chatbot.ru/).")
77
 
78
 
79
 
 
93
  repo_id = model_links[selected_model]
94
 
95
 
96
+ st.subheader(f'GPT-chatbot')
97
  # st.title(f'ChatBot Using {selected_model}')
98
 
99
  # Set a default model
 
113
 
114
 
115
  # Accept user input
116
+ if prompt := st.chat_input(f"Привет. Я GPT-ChatBot, задай здесь мне свой вопрос"):
117
  # Display user message in chat message container
118
  with st.chat_message("user"):
119
  st.markdown(prompt)
 
139
 
140
  except Exception as e:
141
  # st.empty()
142
+ response = "Похоже, кто-то что-то отключил!\
143
+ \n Либо пространство модели обновляется, либо что-то не работает.\
144
+ \n Повторите попытку позже :( "
 
 
 
145
  st.write(response)
146
  random_dog_pick = 'https://random.dog/'+ random_dog[np.random.randint(len(random_dog))]
147
  st.image(random_dog_pick)
148
  st.write("This was the error message:")
149
  st.write(e)
150
 
 
 
 
 
 
151
  st.session_state.messages.append({"role": "assistant", "content": response})