Spaces:
Sleeping
Sleeping
# νμν λΌμ΄λΈλ¬λ¦¬λ₯Ό κ°μ Έμ΅λλ€. | |
import gradio as gr | |
import google.generativeai as genai | |
import os | |
# --- Hugging Face Spacesμ Secrets λλ λ‘컬 νκ²½λ³μμμ API ν€λ₯Ό κ°μ Έμ΅λλ€. --- | |
# μ΄ μ±μ Spacesμ λ°°ν¬ν λλ 'Settings' -> 'Repository secrets'μ | |
# 'GEMINI_API_KEY'λΌλ μ΄λ¦μΌλ‘ ν€λ₯Ό μ μ₯ν΄μΌ ν©λλ€. | |
GEMINI_API_KEY = os.environ.get("GEMINI_API_KEY") | |
# --- UI λ° μ±λ΄ μ€λͺ --- | |
# Gradio Blocksλ₯Ό μ¬μ©νμ¬ μ’ λ μ μ°ν UIλ₯Ό ꡬμ±ν©λλ€. | |
with gr.Blocks(theme=gr.themes.Default(primary_hue="blue")) as demo: | |
gr.Markdown( | |
""" | |
# βοΈ Gemini API μ±λ΄ (Secrets μ¬μ©) | |
Google Gemini APIλ₯Ό μ¬μ©νλ μ±λ΄μ λλ€. | |
Hugging Face Spacesμ 'Settings' νμ μλ 'Repository secrets'μ `GEMINI_API_KEY`κ° μ€μ λμ΄ μμ΄μΌ ν©λλ€. | |
[API ν€ λ°κΈλ°κΈ°](https://aistudio.google.com/app/apikey) | |
""" | |
) | |
# API ν€κ° μ€μ λμλμ§ νμΈνκ³ μλ΄ λ©μμ§λ₯Ό νμν©λλ€. | |
if not GEMINI_API_KEY: | |
gr.Warning("β οΈ Gemini API ν€κ° μ€μ λμ§ μμμ΅λλ€. Hugging Face Spacesμ 'Repository secrets'μ GEMINI_API_KEYλ₯Ό μΆκ°ν΄μ£ΌμΈμ.") | |
# Gradio μ±λ΄ UI μ»΄ν¬λνΈ | |
chatbot = gr.Chatbot(label="Gemini μ±λ΄", height=600) | |
with gr.Row(): | |
# μ¬μ©μ λ©μμ§ μ λ ₯λ | |
msg = gr.Textbox( | |
label="λ©μμ§ μ λ ₯", | |
placeholder="무μμ΄λ λ¬Όμ΄λ³΄μΈμ...", | |
container=False, | |
scale=7, | |
) | |
# μ μ‘ λ²νΌ | |
submit_button = gr.Button("μ μ‘", variant="primary", scale=1) | |
with gr.Accordion("κ³ κΈ μ€μ ", open=False): | |
# LLMμ μν μ μ μνλ μμ€ν λ©μμ§ | |
system_message = gr.Textbox( | |
value="You are a helpful and friendly chatbot.", label="μμ€ν λ©μμ§" | |
) | |
# λͺ¨λΈμ μ°½μμ±μ μ‘°μ νλ μ¬λΌμ΄λ | |
temperature = gr.Slider( | |
minimum=0.0, maximum=1.0, value=0.7, step=0.1, label="Temperature" | |
) | |
# μμ±ν μ΅λ ν ν° μλ₯Ό μ‘°μ νλ μ¬λΌμ΄λ | |
max_tokens = gr.Slider( | |
minimum=1, maximum=4096, value=1024, step=1, label="Max new tokens" | |
) | |
# --- Gemini API νΈμΆ ν¨μ --- | |
def respond(message, chat_history, system_prompt, temp, max_output_tokens): | |
# νκ²½λ³μμμ κ°μ Έμ¨ API ν€κ° μμΌλ©΄ μλ΄ λ©μμ§λ₯Ό λμλλ€. | |
if not GEMINI_API_KEY: | |
yield "Google API ν€κ° μ€μ λμ§ μμμ΅λλ€. κ΄λ¦¬μμκ² λ¬ΈμνμΈμ." | |
return | |
try: | |
# API ν€λ₯Ό μ€μ ν©λλ€. | |
genai.configure(api_key=GEMINI_API_KEY) | |
except Exception as e: | |
yield f"API ν€ μ€μ μ μ€λ₯κ° λ°μνμ΅λλ€: {e}" | |
return | |
# μ¬μ©ν λͺ¨λΈκ³Ό μμ€ν ν둬ννΈλ₯Ό μ€μ ν©λλ€. | |
model = genai.GenerativeModel( | |
model_name='gemini-2.0-flash', # μ΅μ Flash λͺ¨λΈ μ¬μ© | |
system_instruction=system_prompt | |
) | |
# Gradioμ λν κΈ°λ‘μ Gemini APIκ° μ΄ν΄ν μ μλ νμμΌλ‘ λ³νν©λλ€. | |
gemini_history = [] | |
for user_msg, model_msg in chat_history: | |
if user_msg: | |
gemini_history.append({"role": "user", "parts": [user_msg]}) | |
if model_msg: | |
gemini_history.append({"role": "model", "parts": [model_msg]}) | |
# μ΄μ λν κΈ°λ‘μ λ°νμΌλ‘ μ±ν μΈμ μ μμν©λλ€. | |
chat = model.start_chat(history=gemini_history) | |
# λͺ¨λΈ μμ± κ΄λ ¨ μ€μ μ ꡬμ±ν©λλ€. | |
generation_config = genai.types.GenerationConfig( | |
temperature=temp, | |
max_output_tokens=int(max_output_tokens), | |
) | |
try: | |
# μ€νΈλ¦¬λ° λ°©μμΌλ‘ λ©μμ§λ₯Ό 보λ΄κ³ μλ΅μ λ°μ΅λλ€. | |
response = chat.send_message( | |
message, | |
stream=True, | |
generation_config=generation_config | |
) | |
# μ€νΈλ¦¬λ° μλ΅μ μ€μκ°μΌλ‘ UIμ νμν©λλ€. | |
full_response = "" | |
for chunk in response: | |
if hasattr(chunk, 'text'): | |
full_response += chunk.text | |
yield full_response | |
except Exception as e: | |
# API νΈμΆ μ€ μλ¬κ° λ°μνλ©΄ UIμ νμν©λλ€. | |
yield f"μλ΅ μμ± μ€ μ€λ₯κ° λ°μνμ΅λλ€: {e}" | |
# --- Gradio μ΄λ²€νΈ 리μ€λ --- | |
# μ¬μ©μκ° λ©μμ§λ₯Ό μ λ ₯νκ³ 'μ μ‘' λ²νΌμ λλ₯΄κ±°λ μν°λ₯Ό μ³€μ λ μ€νλ λ‘μ§ | |
def on_submit(message, chat_history, system_prompt, temp, max_output_tokens): | |
# λν κΈ°λ‘μ μ¬μ©μ λ©μμ§λ₯Ό μΆκ°ν©λλ€. | |
chat_history.append((message, None)) | |
# μ€νΈλ¦¬λ° μλ΅μ μν΄ λΉ λ¬Έμμ΄λ‘ μλ΅μ μμν©λλ€. | |
bot_response_stream = respond(message, chat_history, system_prompt, temp, max_output_tokens) | |
# μ€νΈλ¦¬λ° μλ΅μ UIμ μ λ°μ΄νΈν©λλ€. | |
for partial_response in bot_response_stream: | |
chat_history[-1] = (message, partial_response) | |
yield "", chat_history | |
# λ©μμ§ μ μ‘(λ²νΌ ν΄λ¦ λλ μν°) μ on_submit ν¨μλ₯Ό νΈμΆν©λλ€. | |
# UIμμ API ν€ μ λ ₯λμ΄ μ κ±°λμμΌλ―λ‘, μ λ ₯(inputs) 리μ€νΈμμλ μ μΈν©λλ€. | |
msg.submit( | |
on_submit, | |
[msg, chatbot, system_message, temperature, max_tokens], | |
[msg, chatbot] | |
) | |
submit_button.click( | |
on_submit, | |
[msg, chatbot, system_message, temperature, max_tokens], | |
[msg, chatbot] | |
) | |
# μ€ν¬λ¦½νΈκ° μ§μ μ€νλ λ Gradio μ±μ μ€νν©λλ€. | |
if __name__ == "__main__": | |
demo.launch(debug=True) | |