Spaces:
Sleeping
Sleeping
# ํ์ํ ๋ผ์ด๋ธ๋ฌ๋ฆฌ๋ฅผ ๊ฐ์ ธ์ต๋๋ค. | |
import gradio as gr | |
import google.generativeai as genai | |
import os | |
# --- UI ๋ฐ ์ฑ๋ด ์ค๋ช --- | |
# Gradio Blocks๋ฅผ ์ฌ์ฉํ์ฌ ์ข ๋ ์ ์ฐํ UI๋ฅผ ๊ตฌ์ฑํฉ๋๋ค. | |
with gr.Blocks(theme=gr.themes.Default(primary_hue="blue")) as demo: | |
gr.Markdown( | |
""" | |
# โ๏ธ Gemini API ์ฑ๋ด (Secrets ์ฌ์ฉ) | |
Google Gemini API๋ฅผ ์ฌ์ฉํ๋ ์ฑ๋ด์ ๋๋ค. | |
Hugging Face Spaces์ 'Settings' ํญ์ ์๋ 'Repository secrets'์ `GEMINI_API_KEY`๊ฐ ์ค์ ๋์ด ์์ด์ผ ํฉ๋๋ค. | |
[API ํค ๋ฐ๊ธ๋ฐ๊ธฐ](https://aistudio.google.com/app/apikey) | |
""" | |
) | |
# Gradio ์ฑ๋ด UI ์ปดํฌ๋ํธ | |
chatbot = gr.Chatbot(label="Gemini ์ฑ๋ด", height=600) | |
with gr.Row(): | |
# ์ฌ์ฉ์ ๋ฉ์์ง ์ ๋ ฅ๋ | |
msg = gr.Textbox( | |
label="๋ฉ์์ง ์ ๋ ฅ", | |
placeholder="๋ฌด์์ด๋ ๋ฌผ์ด๋ณด์ธ์...", | |
scale=7, | |
) | |
# ์ ์ก ๋ฒํผ | |
submit_button = gr.Button("์ ์ก", variant="primary", scale=1) | |
with gr.Accordion("๊ณ ๊ธ ์ค์ ", open=False): | |
# LLM์ ์ญํ ์ ์ ์ํ๋ ์์คํ ๋ฉ์์ง | |
system_message = gr.Textbox( | |
value="You are a helpful and friendly chatbot.", label="์์คํ ๋ฉ์์ง" | |
) | |
# ๋ชจ๋ธ์ ์ฐฝ์์ฑ์ ์กฐ์ ํ๋ ์ฌ๋ผ์ด๋ | |
temperature = gr.Slider( | |
minimum=0.0, maximum=1.0, value=0.7, step=0.1, label="Temperature" | |
) | |
# ์์ฑํ ์ต๋ ํ ํฐ ์๋ฅผ ์กฐ์ ํ๋ ์ฌ๋ผ์ด๋ | |
max_tokens = gr.Slider( | |
minimum=1, maximum=4096, value=1024, step=1, label="Max new tokens" | |
) | |
# --- Gemini API ํธ์ถ ํจ์ --- | |
def respond(message, chat_history, system_prompt, temp, max_output_tokens): | |
# ํจ์๊ฐ ํธ์ถ๋ ๋๋ง๋ค ํ๊ฒฝ๋ณ์์์ API ํค๋ฅผ ์ง์ ๊ฐ์ ธ์ต๋๋ค. | |
# ์ด๋ ๊ฒ ํ๋ฉด ์ฑ ์์ ์์ ์ ํค๋ฅผ ๋ชป ๋ถ๋ฌ์ค๋ ๋ฌธ์ ๋ฅผ ํด๊ฒฐํ ์ ์์ต๋๋ค. | |
GEMINI_API_KEY = os.environ.get("GEMINI_API_KEY") | |
# ํ๊ฒฝ๋ณ์์์ ๊ฐ์ ธ์จ API ํค๊ฐ ์์ผ๋ฉด ์๋ด ๋ฉ์์ง๋ฅผ ๋์๋๋ค. | |
if not GEMINI_API_KEY: | |
# UI์ ์ง์ ๊ฒฝ๊ณ ๋ฅผ ํ์ํ๊ธฐ ์ํด gr.Warning์ ์ฌ์ฉํ ์ ์์ง๋ง, | |
# ์ฌ๊ธฐ์๋ ์ฑํ ์๋ต์ผ๋ก ์ฒ๋ฆฌํฉ๋๋ค. | |
yield "โ ๏ธ **์ค๋ฅ**: `GEMINI_API_KEY`๊ฐ ์ค์ ๋์ง ์์์ต๋๋ค.\n\nHugging Face Spaces์ **Settings > Repository secrets**์ API ํค๋ฅผ ์ถ๊ฐํ๋์ง ํ์ธํด์ฃผ์ธ์." | |
return | |
try: | |
# API ํค๋ฅผ ์ค์ ํฉ๋๋ค. | |
genai.configure(api_key=GEMINI_API_KEY) | |
except Exception as e: | |
yield f"API ํค ์ค์ ์ ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {e}" | |
return | |
# ์ฌ์ฉํ ๋ชจ๋ธ๊ณผ ์์คํ ํ๋กฌํํธ๋ฅผ ์ค์ ํฉ๋๋ค. | |
model = genai.GenerativeModel( | |
model_name='gemini-2.0-flash', # ์ต์ Flash ๋ชจ๋ธ ์ฌ์ฉ | |
system_instruction=system_prompt | |
) | |
# Gradio์ ๋ํ ๊ธฐ๋ก์ Gemini API๊ฐ ์ดํดํ ์ ์๋ ํ์์ผ๋ก ๋ณํํฉ๋๋ค. | |
gemini_history = [] | |
for user_msg, model_msg in chat_history: | |
if user_msg: | |
gemini_history.append({"role": "user", "parts": [user_msg]}) | |
if model_msg: | |
gemini_history.append({"role": "model", "parts": [model_msg]}) | |
# ์ด์ ๋ํ ๊ธฐ๋ก์ ๋ฐํ์ผ๋ก ์ฑํ ์ธ์ ์ ์์ํฉ๋๋ค. | |
chat = model.start_chat(history=gemini_history) | |
# ๋ชจ๋ธ ์์ฑ ๊ด๋ จ ์ค์ ์ ๊ตฌ์ฑํฉ๋๋ค. | |
generation_config = genai.types.GenerationConfig( | |
temperature=temp, | |
max_output_tokens=int(max_output_tokens), | |
) | |
try: | |
# ์คํธ๋ฆฌ๋ฐ ๋ฐฉ์์ผ๋ก ๋ฉ์์ง๋ฅผ ๋ณด๋ด๊ณ ์๋ต์ ๋ฐ์ต๋๋ค. | |
response = chat.send_message( | |
message, | |
stream=True, | |
generation_config=generation_config | |
) | |
# ์คํธ๋ฆฌ๋ฐ ์๋ต์ ์ค์๊ฐ์ผ๋ก UI์ ํ์ํฉ๋๋ค. | |
full_response = "" | |
for chunk in response: | |
if hasattr(chunk, 'text'): | |
full_response += chunk.text | |
yield full_response | |
except Exception as e: | |
# API ํธ์ถ ์ค ์๋ฌ๊ฐ ๋ฐ์ํ๋ฉด UI์ ํ์ํฉ๋๋ค. | |
yield f"์๋ต ์์ฑ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {e}" | |
# --- Gradio ์ด๋ฒคํธ ๋ฆฌ์ค๋ --- | |
def on_submit(message, chat_history, system_prompt, temp, max_output_tokens): | |
chat_history.append((message, None)) | |
bot_response_stream = respond(message, chat_history, system_prompt, temp, max_output_tokens) | |
for partial_response in bot_response_stream: | |
chat_history[-1] = (message, partial_response) | |
yield "", chat_history | |
msg.submit( | |
on_submit, | |
[msg, chatbot, system_message, temperature, max_tokens], | |
[msg, chatbot] | |
) | |
submit_button.click( | |
on_submit, | |
[msg, chatbot, system_message, temperature, max_tokens], | |
[msg, chatbot] | |
) | |
if __name__ == "__main__": | |
demo.launch(debug=True) | |