File size: 3,838 Bytes
a5a8279
 
73432f0
a5a8279
73432f0
a5a8279
 
73432f0
3f14ec0
a5a8279
 
e9c5287
73432f0
a5a8279
 
 
73432f0
a5a8279
 
73432f0
a5a8279
 
 
 
 
73432f0
a5a8279
 
 
 
 
 
 
895e722
 
a5a8279
 
 
 
 
 
73432f0
a5a8279
be166c7
a5a8279
 
 
 
73432f0
a5a8279
 
 
d32f2e5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73432f0
7e89408
97f5ad3
7e89408
 
ca2a285
 
7e89408
3f14ec0
ca2a285
 
 
 
 
 
 
 
 
7e89408
 
ca2a285
 
 
7e89408
 
ca2a285
7e89408
ca2a285
97f5ad3
ca2a285
73432f0
895e722
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
import os
import json
import gradio as gr
from utils.response_manager import ResponseManager  # Import the ResponseManager class 
"""
This script sets up a Gradio interface to host an AI chatbot using RAG (Retrieval-Augmented Generation)
to provide responses to user queries. Response API from OpenAI is used for both retrieval and generation of responses.
"""

# Vector store ID for the retrieval of knowledge base documents
# Load the vector store ID from the environment variable
vector_store_id = os.getenv('VECTOR_STORE_ID')

# Check if the VECTOR_STORE_ID environment variable is set
if not vector_store_id:
    raise ValueError("VECTOR_STORE_ID environment variable is not set.")

# Initialize the ResponseManager with the vector store ID
response_manager = ResponseManager(vector_store_id)

# Set parameters for the response generation
model = "gpt-4o-mini" # Set the model to be used for response generation
temperature=0  # Set the temperature for response generation 
max_output_tokens=800  # Set the maximum number of output tokens
max_num_results=7 # Set the maximum number of knowledge base documents to return for retrieval

# Load the configuration for Gradio GUI interface from the JSON file
with open('config/gradio_config.json', 'r') as config_file:
    config = json.load(config_file)
# Check if the configuration file is loaded successfully
if not config:
    raise ValueError("Failed to load the configuration file.")
# Extract the configuration parameters
chatbot_title = config["chatbot_title"]
chatbot_description = config["chatbot_description"]
chatbot_input_label = config["chatbot_input_label"]
chatbot_input_placeholder = config["chatbot_input_placeholder"]
chatbot_output_label = config["chatbot_output_label"]
chatbot_output_placeholder = config["chatbot_output_placeholder"]
chatbot_submit_button = config["chatbot_submit_button"]
chatbot_reset_button = config["chatbot_reset_button"]

# Check if the configuration parameters are set correctly
if not all([chatbot_title, chatbot_description,
            chatbot_input_label, chatbot_input_placeholder,
            chatbot_output_label, chatbot_output_placeholder,
            chatbot_submit_button, chatbot_reset_button]):
    raise ValueError("One or more configuration parameters are missing or empty.")

# Define the chatbot function to handle user queries and generate responses
def chatbot(query: str) -> str:
    """
    Function to handle the chatbot interaction.
    :param query: The user query to respond to.
    :return: The response text from the chatbot.
    """
    try:
        if query.strip():
            response = response_manager.create_response(query, model, temperature, max_output_tokens, max_num_results)
            if not response:
                return "Sorry, I couldn't generate a response at this time. Please try again later."
            # Return the response from the AI model
            return response
        else:
            return "Please enter a valid query."
    except Exception as e:
        return str(e)

# Define the reset function
def reset_output():
    return ""

# Create a Gradio Blocks interface
with gr.Blocks() as demo:
    gr.Markdown(chatbot_title)

    with gr.Row():
        user_input = gr.Textbox(
            lines=7, 
            label=chatbot_input_label, 
            placeholder=chatbot_input_placeholder
        )

    with gr.Row():
        output = gr.Markdown(
            label=chatbot_output_label,
            value=chatbot_output_placeholder
        )

    with gr.Row():
        submit = gr.Button(chatbot_submit_button)
        reset = gr.Button(chatbot_reset_button)

    # Define button click actions
    submit.click(fn=chatbot, inputs=user_input, outputs=output)
    reset.click(fn=reset_output, inputs=None, outputs=output)
    
if __name__ == "__main__":
    demo.launch()