Spaces:
Build error
Build error
import os | |
import json | |
import gradio as gr | |
from utils.response_manager import ResponseManager # Import the ResponseManager class | |
""" | |
This script sets up a Gradio interface to host an AI chatbot using RAG (Retrieval-Augmented Generation) | |
to provide responses to user queries. Response API from OpenAI is used for both retrieval and generation of responses. | |
""" | |
# Vector store ID for the retrieval of knowledge base documents | |
# Load the vector store ID from the environment variable | |
vector_store_id = os.getenv('VECTOR_STORE_ID') | |
# Check if the VECTOR_STORE_ID environment variable is set | |
if not vector_store_id: | |
raise ValueError("VECTOR_STORE_ID environment variable is not set.") | |
# Initialize the ResponseManager with the vector store ID | |
response_manager = ResponseManager(vector_store_id) | |
# Set parameters for the response generation | |
model = "gpt-4o-mini" # Set the model to be used for response generation | |
temperature=0 # Set the temperature for response generation | |
max_output_tokens=800 # Set the maximum number of output tokens | |
max_num_results=7 # Set the maximum number of knowledge base documents to return for retrieval | |
# Load the configuration for Gradio GUI interface from the JSON file | |
with open('config/gradio_config.json', 'r') as config_file: | |
config = json.load(config_file) | |
# Check if the configuration file is loaded successfully | |
if not config: | |
raise ValueError("Failed to load the configuration file.") | |
# Extract the configuration parameters | |
title = config["chatbot_title"] | |
description = config["chatbot_description"] | |
chatbot_input_label = config["chatbot_input_label"] | |
chatbot_input_placeholder = config["chatbot_input_placeholder"] | |
chatbot_output_label = config["chatbot_output_label"] | |
chatbot_output_placeholder = config["chatbot_output_placeholder"] | |
chatbot_submit_button = config["chatbot_submit_button"] | |
chatbot_reset_button = config["chatbot_reset_button"] | |
# Check if the configuration parameters are set correctly | |
if not all([title, description, | |
chatbot_input_label, chatbot_input_placeholder, | |
chatbot_output_label, chatbot_output_placeholder, | |
chatbot_submit_button, chatbot_reset_button]): | |
raise ValueError("One or more configuration parameters are missing or empty.") | |
# Define the chatbot function to handle user queries and generate responses | |
def chatbot(query: str) -> str: | |
# """ | |
# Function to handle the chatbot interaction. | |
# :param query: The user query to respond to. | |
# :return: The response text from the chatbot. | |
# """ | |
# try: | |
# if query.strip(): | |
# response = response_manager.create_response(query, model, temperature, max_output_tokens, max_num_results) | |
# if not response: | |
# return "Sorry, I couldn't generate a response at this time. Please try again later." | |
# # Return the response from the AI model | |
# return response | |
# else: | |
# return "Please enter a valid query." | |
# except Exception as e: | |
# return str(e) | |
return """ | |
## Test Response | |
- **Bold text** | |
- *Italic text* | |
- `Inline code` | |
```python | |
# Block code example | |
print("Hello World") | |
``` | |
""" | |
# Create a Gradio GUI interface | |
inputs = gr.Textbox(lines=7, label=chatbot_input_label, placeholder=chatbot_input_placeholder) | |
outputs = gr.Markdown(label=chatbot_output_label)#, placeholder=chatbot_output_placeholder) | |
iface = gr.Interface(fn=chatbot, | |
inputs=inputs, | |
outputs=outputs, | |
title=title, | |
description=description) | |
if __name__ == "__main__": | |
iface.launch() |