Spaces:
Sleeping
Sleeping
File size: 1,471 Bytes
226a535 cb245e5 d034f8c 732091b d034f8c 732091b cb245e5 732091b cb245e5 226a535 732091b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
import gradio as gr
import google.generativeai as genai
import os
# API ν€ μ€μ
api_key = os.environ.get("GEMINI_API_KEY")
if api_key:
genai.configure(api_key=api_key)
def chat(message, history):
if not api_key:
return "β API ν€κ° μ€μ λμ§ μμμ΅λλ€. HF Spaces Settingsμμ GEMINI_API_KEYλ₯Ό μΆκ°νμΈμ."
try:
# Gemini λͺ¨λΈ μ΄κΈ°ν
model = genai.GenerativeModel('gemini-2.0-flash')
# λν κΈ°λ‘ λ³ν
chat_history = []
for human, assistant in history:
if human:
chat_history.append({"role": "user", "parts": [human]})
if assistant:
chat_history.append({"role": "model", "parts": [assistant]})
# μ±ν
μΈμ
μμ
chat_session = model.start_chat(history=chat_history)
# μλ΅ μμ±
response = chat_session.send_message(message)
return response.text
except Exception as e:
return f"β μ€λ₯ λ°μ: {str(e)}"
# Gradio μΈν°νμ΄μ€
demo = gr.ChatInterface(
fn=chat,
title="π€ Gemini μ±λ΄",
description="Google Gemini APIλ₯Ό μ¬μ©ν κ°λ¨ν μ±λ΄μ
λλ€.",
examples=["μλ
νμΈμ!", "μ€λ λ μ¨λ μ΄λ?", "νμ΄μ¬μ λν΄ μ€λͺ
ν΄μ€"],
retry_btn=None,
undo_btn="μ΄μ λν μμ ",
clear_btn="μ 체 λν μμ ",
)
if __name__ == "__main__":
demo.launch() |