Spaces:
Running
Running
File size: 9,046 Bytes
ba345bc c83ea75 bdf9792 8f83d30 21984da ba345bc 8f83d30 b65ed51 21984da 6333c01 21984da b65ed51 21984da 6333c01 21984da 6333c01 21984da b65ed51 6333c01 d3a63c7 6333c01 89c2219 6333c01 89c2219 21984da 6333c01 21984da 89c2219 21984da b65ed51 21984da 6333c01 21984da 89c2219 820d582 21984da 89c2219 e04910c 6333c01 83c1d27 e04910c 6333c01 83c1d27 e04910c 6333c01 dd1004f 6333c01 dd1004f 6333c01 d8b712b e04910c 21984da 6333c01 21984da b65ed51 21984da 6333c01 130079c 6333c01 820d582 6333c01 e04910c d3a63c7 e04910c 21984da 330831b 37eb470 c6bb867 352c4a4 21984da bdf9792 21984da 83c1d27 be6ef73 83c1d27 d9fcab3 6333c01 bdf9792 6333c01 dd1004f 31bb3de 6333c01 ba345bc 6333c01 ba345bc 6333c01 8609555 6333c01 21984da 34dc74c 21984da 34dc74c ba345bc 34dc74c ba345bc 34dc74c 21984da 34dc74c bdf9792 6cdc3c5 6333c01 21984da 6333c01 bdf9792 6cdc3c5 21984da 31bb3de bdf9792 34dc74c bdf9792 21984da 34dc74c 21984da 34dc74c bdf9792 21984da 34dc74c 21984da 34dc74c 21984da 34dc74c 21984da 6333c01 21984da 83c1d27 7544a8f 352c4a4 ba345bc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 |
# interface.py
import gradio as gr
import uuid
from ai_logic import (
responder_como_aldo,
build_and_save_vector_store,
MODELS,
DEFAULT_MODEL,
inicializar_sistema # <--- Import the initialization function
)
css_customizado = """
.gradio-container {
max-width: 1400px !important;
margin: 0 auto;
width: 99%;
height: 100vh !important;
display: flex;
flex-direction: column;
overflow: hidden;
}
.main-content {
display: flex;
flex-direction: column;
height: 100vh;
overflow: hidden;
flex-shrink: 0;
}
.titulo-principal {
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%) !important;
color: white !important;
padding: 10px !important;
border-radius: 10px !important;
margin-bottom: 10px !important;
text-align: center !important;
flex-shrink: 0;
}
.chat-area {
flex: 1;
display: flex;
flex-direction: column;
overflow: hidden;
}
.chat-container {
flex: 1;
overflow-y: auto;
margin-bottom: 10px;
}
.input-container {
flex-shrink: 0;
padding: 10px 0;
display: flex;
flex-direction: column;
justify-content: center;
}
.additional-content {
overflow-y: auto;
padding-top: 20px;
}
.gr-textbox textarea {
font-size: 14px !important;
line-height: 1.5 !important;
}
.resposta-container {
background-color: #ffffff !important;
color: #1a1a1a !important;
border: 1px solid #e0e0e0 !important;
border-radius: 20px !important;
padding: 20px !important;
margin: 10px 0 !important;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05) !important;
}
.resposta-container pre code {
color: #1a1a1a !important;
background-color: #f8f9fa !important;
}
.pergunta-container {
background-color: #f0f8ff !important;
border-radius: 8px !important;
padding: 15px !important;
}
.modelo-dropdown {
margin-bottom: 15px !important;
}
#entrada_usuario textarea {
color: white !important;
font-size: large !important;
background-color: #1a1a1a !important;
min-height: 60px !important;
}
.message-content {
opacity: 1 !important;
font-size: larger;
color: white !important;
background-color: #1a1a1a !important;
}
/* Responsivo */
@media (max-width: 768px) {
.titulo-principal {
padding: 10px !important;
}
#entrada_usuario textarea {
min-height: 50px !important;
font-size: 16px !important;
}
}
#component-6{flex-grow: initial !important;}
#component-7{flex-grow: initial !important;}
.message-wrap.svelte-gjtrl6 .prose.chatbot.md {
opacity: 1 !important;
}
"""
def criar_interface():
with gr.Blocks(title="Dr. Aldo Henrique - API Externa", theme=gr.themes.Soft(), css=css_customizado) as interface:
session_id_state = gr.State(str(uuid.uuid4())) # Geração do session_id único
with gr.Column(elem_classes="main-content"):
gr.HTML("""
<div class="titulo-principal">
<h4 style="margin: 0;">🤖 iAldo - Converse com o <a href="https://aldohenrique.com.br/" style="color: white; text-decoration: underline;">Prof. Dr. Aldo Henrique</a></h4>
</div>
""")
with gr.Column(elem_classes="chat-area"):
with gr.Column(elem_classes="chat-container"):
chatbot = gr.Chatbot(
label="💬 Área do chat",
elem_id="chat",
height="100%"
)
with gr.Column(elem_classes="input-container"):
with gr.Row():
# The choices for the dropdown now depend on the globally updated MODELS from ai_logic
modelo_select = gr.Dropdown(
choices=list(MODELS.keys()), # <--- Uses the MODELS dict from ai_logic
value=DEFAULT_MODEL,
label="🧠 Selecione o Modelo de Pensamento",
elem_classes="modelo-dropdown"
)
with gr.Row():
user_input = gr.Textbox(
show_label=False,
placeholder="Digite sua pergunta e pressione Enter ou clique em Enviar",
lines=2,
elem_id="entrada_usuario"
)
enviar_btn = gr.Button("Enviar", variant="primary")
with gr.Column(elem_classes="additional-content"):
with gr.Accordion("⚙️ Controle do Conhecimento (RAG)", open=False):
status_rag = gr.Textbox(label="Status do Retreino", interactive=False)
botao_retreinar = gr.Button("🔄 Atualizar Conhecimento do Blog", variant="stop")
download_faiss_file = gr.File(label="Download do Índice FAISS", interactive=False, file_count="single", file_types=[".pkl"])
download_urls_file = gr.File(label="Download das URLs Processadas", interactive=False, file_count="single", file_types=[".pkl"])
with gr.Accordion("📚 Exemplos de Perguntas", open=False):
gr.Examples(
examples=[
["Como implementar uma lista ligada em C com todas as operações básicas?", DEFAULT_MODEL],
["Qual a sua opinião sobre o uso de ponteiros em C++ moderno, baseada no seu blog?", "Mistral 7B (Mais acertivo)"], # Use label, not model name
["Resuma o que você escreveu sobre machine learning no seu blog.", "Zephyr 7B (Meio Termo)"], # Use label, not model name
],
inputs=[user_input, modelo_select]
)
with gr.Accordion("🔧 Status da API", open=False):
# We can't directly show model status here without re-running tests.
# For simplicity, we'll assume if the page loads, models are good.
status_api = gr.Textbox(label="Status dos Modelos", interactive=False, lines=8,
value="Modelos carregados com sucesso! Verifique o console para detalhes.")
with gr.Accordion("ℹ️ Informações", open=False):
gr.Markdown("""
### Sobre o Dr. Aldo Henrique:
- **Especialidade**: Linguagens C, Java, Desenvolvimento Web, Inteligência Artificial
- **Conhecimento Adicional**: Conteúdo do blog aldohenrique.com.br
### Dicas para melhores respostas:
- Faça perguntas específicas sobre o conteúdo do blog para ver o RAG em ação!
- Peça resumos ou opiniões sobre temas que o professor aborda.
""")
# ✅ Função corrigida com uso de session_id
def responder(chat_history, user_msg, modelo, session_id):
if not user_msg.strip():
return chat_history, ""
chat_history = chat_history + [[user_msg, "Dr. Aldo Henrique está digitando..."]]
yield chat_history, ""
resposta_final = responder_como_aldo(session_id, user_msg, modelo)
chat_history[-1][1] = resposta_final
yield chat_history, ""
# ✅ Botão e Enter usam o novo estado de sessão
enviar_btn.click(
fn=responder,
inputs=[chatbot, user_input, modelo_select, session_id_state],
outputs=[chatbot, user_input],
show_progress=True
)
user_input.submit(
fn=responder,
inputs=[chatbot, user_input, modelo_select, session_id_state],
outputs=[chatbot, user_input],
show_progress=True
)
botao_retreinar.click(
fn=build_and_save_vector_store,
outputs=[status_rag, download_faiss_file, download_urls_file],
show_progress=True
)
gr.HTML("""
<script>
window.addEventListener("load", function() {
const textarea = document.querySelector("#entrada_usuario textarea");
if (textarea) {
setTimeout(() => textarea.focus(), 100);
}
});
</script>
""")
return interface
def configurar_interface():
# Attempt to initialize the backend system and check for model availability
if inicializar_sistema(): # <--- This is the key change
return criar_interface() # Only create the Gradio interface if successful
else:
# If initialization fails, display an error page instead of the full interface
return gr.HTML("<h1>Erro ao carregar a página: Não há modelos de IA suficientes disponíveis.</h1><p>Verifique o console para mais detalhes sobre os modelos e sua conexão com o Hugging Face.</p>")
# This part runs when you execute interface.py
if __name__ == "__main__":
app = configurar_interface()
app.launch() |