Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
from huggingface_hub import InferenceClient
|
|
|
|
| 3 |
import os
|
| 4 |
|
| 5 |
# 제거할 모델들을 MODELS 사전에서 제외
|
|
@@ -126,6 +127,41 @@ def cohere_respond(
|
|
| 126 |
chat_history.append((message, error_message))
|
| 127 |
return chat_history
|
| 128 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 129 |
def clear_conversation():
|
| 130 |
return []
|
| 131 |
|
|
@@ -201,6 +237,44 @@ with gr.Blocks() as demo:
|
|
| 201 |
cohere_chatbot
|
| 202 |
)
|
| 203 |
cohere_clear_button.click(clear_conversation, outputs=cohere_chatbot, queue=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 204 |
|
| 205 |
if __name__ == "__main__":
|
| 206 |
demo.launch()
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
from huggingface_hub import InferenceClient
|
| 3 |
+
import openai
|
| 4 |
import os
|
| 5 |
|
| 6 |
# 제거할 모델들을 MODELS 사전에서 제외
|
|
|
|
| 127 |
chat_history.append((message, error_message))
|
| 128 |
return chat_history
|
| 129 |
|
| 130 |
+
def chatgpt_respond(
|
| 131 |
+
message,
|
| 132 |
+
chat_history,
|
| 133 |
+
system_message,
|
| 134 |
+
max_tokens,
|
| 135 |
+
temperature,
|
| 136 |
+
top_p,
|
| 137 |
+
):
|
| 138 |
+
openai.api_key = os.getenv("OPENAI_API_KEY")
|
| 139 |
+
if not openai.api_key:
|
| 140 |
+
chat_history.append((message, "OPENAI_API_KEY 환경 변수가 필요합니다."))
|
| 141 |
+
return chat_history
|
| 142 |
+
|
| 143 |
+
messages = [{"role": "system", "content": system_message}]
|
| 144 |
+
for human, assistant in chat_history:
|
| 145 |
+
messages.append({"role": "user", "content": human})
|
| 146 |
+
messages.append({"role": "assistant", "content": assistant})
|
| 147 |
+
messages.append({"role": "user", "content": message})
|
| 148 |
+
|
| 149 |
+
try:
|
| 150 |
+
response = openai.ChatCompletion.create(
|
| 151 |
+
model="gpt-4o-mini", # 또는 다른 모델 ID 사용
|
| 152 |
+
messages=messages,
|
| 153 |
+
max_tokens=max_tokens,
|
| 154 |
+
temperature=temperature,
|
| 155 |
+
top_p=top_p,
|
| 156 |
+
)
|
| 157 |
+
assistant_message = response.choices[0].message['content']
|
| 158 |
+
chat_history.append((message, assistant_message))
|
| 159 |
+
return chat_history
|
| 160 |
+
except Exception as e:
|
| 161 |
+
error_message = f"오류가 발생했습니다: {str(e)}"
|
| 162 |
+
chat_history.append((message, error_message))
|
| 163 |
+
return chat_history
|
| 164 |
+
|
| 165 |
def clear_conversation():
|
| 166 |
return []
|
| 167 |
|
|
|
|
| 237 |
cohere_chatbot
|
| 238 |
)
|
| 239 |
cohere_clear_button.click(clear_conversation, outputs=cohere_chatbot, queue=False)
|
| 240 |
+
|
| 241 |
+
with gr.Tab("ChatGPT"):
|
| 242 |
+
with gr.Row():
|
| 243 |
+
chatgpt_system_message = gr.Textbox(
|
| 244 |
+
value="""반드시 한글로 답변할 것.
|
| 245 |
+
너는 ChatGPT, OpenAI에서 개발한 언어 모델이다.
|
| 246 |
+
내가 요구하는 것을 최대한 자세하고 정확하게 답변하라.
|
| 247 |
+
""",
|
| 248 |
+
label="System Message",
|
| 249 |
+
lines=3
|
| 250 |
+
)
|
| 251 |
+
chatgpt_max_tokens = gr.Slider(minimum=1, maximum=4096, value=1024, step=1, label="Max Tokens")
|
| 252 |
+
chatgpt_temperature = gr.Slider(minimum=0.1, maximum=2.0, value=0.7, step=0.05, label="Temperature")
|
| 253 |
+
chatgpt_top_p = gr.Slider(
|
| 254 |
+
minimum=0.1,
|
| 255 |
+
maximum=1.0,
|
| 256 |
+
value=0.95,
|
| 257 |
+
step=0.05,
|
| 258 |
+
label="Top-P",
|
| 259 |
+
)
|
| 260 |
+
|
| 261 |
+
chatgpt_chatbot = gr.Chatbot(height=600)
|
| 262 |
+
chatgpt_msg = gr.Textbox(label="메세지를 입력하세요")
|
| 263 |
+
with gr.Row():
|
| 264 |
+
chatgpt_submit_button = gr.Button("전송")
|
| 265 |
+
chatgpt_clear_button = gr.Button("대화 내역 지우기")
|
| 266 |
+
|
| 267 |
+
chatgpt_msg.submit(
|
| 268 |
+
chatgpt_respond,
|
| 269 |
+
[chatgpt_msg, chatgpt_chatbot, chatgpt_system_message, chatgpt_max_tokens, chatgpt_temperature, chatgpt_top_p],
|
| 270 |
+
chatgpt_chatbot
|
| 271 |
+
)
|
| 272 |
+
chatgpt_submit_button.click(
|
| 273 |
+
chatgpt_respond,
|
| 274 |
+
[chatgpt_msg, chatgpt_chatbot, chatgpt_system_message, chatgpt_max_tokens, chatgpt_temperature, chatgpt_top_p],
|
| 275 |
+
chatgpt_chatbot
|
| 276 |
+
)
|
| 277 |
+
chatgpt_clear_button.click(clear_conversation, outputs=chatgpt_chatbot, queue=False)
|
| 278 |
|
| 279 |
if __name__ == "__main__":
|
| 280 |
demo.launch()
|