Spaces:
Sleeping
Sleeping
import gradio as gr | |
import requests | |
import os | |
OPENAI_API_KEY = os.getenv("API_KEY") | |
API_URL = os.getenv("BASE_URL") | |
# Список для хранения сообщений в чате | |
chat_history = [] | |
# Функция для генерации ответов на основе диалога | |
def generate_response(dialog): | |
headers = { | |
'Content-Type': 'application/json', | |
'Authorization': f'Bearer {OPENAI_API_KEY}', | |
} | |
data = { | |
'model': 'gpt-3.5-turbo', | |
'messages': [{'role': 'system', 'content': 'You are a helpful assistant.'}] + dialog, | |
'max_tokens': 150, | |
} | |
response = requests.post(API_URL, json=data, headers=headers) | |
response_data = response.json() | |
generated_text = response_data['choices'][0]['message']['content'].strip() | |
return generated_text | |
# Функция для обновления чата | |
def update_chat(input_text): | |
user_message = {'role': 'user', 'content': input_text} | |
chat_history.append(user_message) | |
# Генерация ответа на основе текущего диалога | |
assistant_reply = generate_response(chat_history) | |
assistant_message = {'role': 'assistant', 'content': assistant_reply} | |
chat_history.append(assistant_message) | |
# Обновление текста в поле вывода | |
output_text = "Chat History:\n" | |
for message in chat_history: | |
output_text += f"{message['role']}: {message['content']}\n" | |
return output_text | |
# Интерфейс Gradio для чата | |
iface = gr.Interface( | |
fn=update_chat, | |
inputs=gr.Textbox(), | |
outputs=gr.Textbox(type="readonly"), | |
live=True, | |
title="Chat with GPT-3.5-turbo" | |
) | |
iface.launch() | |