|
import os |
|
import time |
|
import gradio as gr |
|
import google.generativeai as genai |
|
from typing import Optional, List |
|
|
|
|
|
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") |
|
|
|
if not GOOGLE_API_KEY: |
|
raise ValueError("GOOGLE_API_KEY is not set.") |
|
|
|
|
|
genai.configure(api_key=GOOGLE_API_KEY) |
|
|
|
def transform_history(history): |
|
"""Transforma el historial en el formato esperado por Gemini.""" |
|
new_history = [] |
|
for user, assistant in history: |
|
new_history.append({"role": "user", "content": user}) |
|
if assistant: |
|
new_history.append({"role": "assistant", "content": assistant}) |
|
return new_history |
|
|
|
def bot(files: Optional[List[str]], model_choice: str, system_instruction: Optional[str], history): |
|
"""Procesa la interacci贸n del chatbot.""" |
|
chat_history = transform_history(history) |
|
|
|
if system_instruction: |
|
chat_history.insert(0, {"role": "system", "content": system_instruction}) |
|
|
|
|
|
generation_config = genai.types.GenerationConfig( |
|
temperature=0.7, |
|
max_output_tokens=8192, |
|
top_k=10, |
|
top_p=0.9 |
|
) |
|
|
|
|
|
if files: |
|
for file_path in files: |
|
with open(file_path, "r") as file: |
|
file_content = file.read() |
|
chat_history.append({"role": "user", "content": f"Archivo cargado: {file_content}"}) |
|
|
|
response = genai.ChatCompletion.create( |
|
model=model_choice, |
|
messages=chat_history, |
|
generation_config=generation_config |
|
) |
|
|
|
reply = response['candidates'][0]['content'] |
|
for i in range(len(reply)): |
|
time.sleep(0.05) |
|
yield history + [[None, reply[:i + 1]]] |
|
|
|
|
|
with gr.Blocks() as demo: |
|
chatbot = gr.Chatbot(elem_id="chatbot", bubble_full_width=False, type="messages") |
|
|
|
chat_input = gr.Textbox( |
|
placeholder="Escribe un mensaje...", |
|
show_label=False |
|
) |
|
|
|
submit_btn = gr.Button("Enviar") |
|
system_input = gr.Textbox(placeholder="Instrucci贸n del sistema (opcional)", show_label=True, lines=2) |
|
model_choice = gr.Dropdown(choices=["gemini-1.5-flash"], value="gemini-1.5-flash", label="Modelo") |
|
file_input = gr.File(label="Subir archivo (opcional)", file_types=[".txt", ".md", ".json"]) |
|
|
|
|
|
submit_btn.click( |
|
bot, |
|
inputs=[file_input, model_choice, system_input, chatbot], |
|
outputs=chatbot |
|
) |
|
|
|
demo.launch() |
|
|