Spaces:
Sleeping
Sleeping
File size: 2,594 Bytes
f92e548 eb2d772 f92e548 f7db7d2 f92e548 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 |
import os
from openai import AzureOpenAI
import gradio as gr
from dotenv import load_dotenv
load_dotenv()
client = AzureOpenAI(
azure_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT"),
api_key=os.getenv("AZURE_OPENAI_API_KEY"),
api_version="2024-02-01"
)
# Define three prompt options with user-friendly names
prompts = [
os.getenv("prompt1"),
os.getenv("prompt2"),
os.getenv("prompt3"),
"Seja um assistente de IA focado em ajudar profissionais do direito. Seja técnico e responda conforme livros técnicos de direito, Leis brasileiras e interpretações de tribunais brasileiros"
]
option_names = [
"Sentença em linguagem simples",
"Sentença em linguagem simples (Júri)",
"Transcrição de oitiva",
"genérico",
]
# Initialize a dictionary to store chat histories per user
chat_histories = {}
def predict(message, history, selected_option, username):
# Get the user's chat history or create a new one
if username not in chat_histories:
chat_histories[username] = []
history = chat_histories[username]
# Get the index of the selected option
selected_index = option_names.index(selected_option)
# Get the corresponding prompt based on the selected index
selected_prompt = prompts[selected_index]
history_openai_format = []
history_openai_format.append({"role": "system", "content": selected_prompt})
for human, assistant in history:
history_openai_format.append({"role": "user", "content": human })
history_openai_format.append({"role": "assistant", "content":assistant})
history_openai_format.append({"role": "user", "content": message})
response = client.chat.completions.create(
model='gpt-4o-mini',
messages= history_openai_format,
temperature=1,
max_tokens=2000,
stream=True)
partial_message = ""
for chunk in response:
# Check if choices exist and are not empty
if chunk.choices and len(chunk.choices) > 0:
if chunk.choices[0].delta.content is not None:
partial_message = partial_message + chunk.choices[0].delta.content
yield partial_message
# Update the chat history for the user
chat_histories[username].append((message, partial_message))
option_dropdown = gr.Dropdown(choices=option_names, label="Select Option")
username_textbox = gr.Textbox(label="Username", placeholder="Enter your username")
gr.ChatInterface(predict, additional_inputs=[option_dropdown, username_textbox]
).launch(share=True, auth=(os.getenv("username"), os.getenv("password"))) |