File size: 6,367 Bytes
ea7808e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f3f49bc
ea7808e
 
 
 
 
 
 
f3f49bc
ea7808e
 
 
f3f49bc
ea7808e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90477c4
 
 
 
ea7808e
f3f49bc
ea7808e
 
 
 
 
 
 
 
f3f49bc
ea7808e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f2970c1
 
cde067d
f2970c1
 
 
 
 
 
 
 
 
 
 
ea7808e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
import os
import json
import logging
from typing import Optional
import gradio as gr
from utils.response_manager import ResponseManager 

class ChatbotInterface:
    def __init__(self, 
                 config_path: str = 'config/gradio_config.json',
                 model: str = "gpt-4o-mini",
                 temperature: float = 0,
                 max_output_tokens: int = 800,
                 max_num_results: int = 15,
                 vector_store_id: Optional[str] = None, 
                 api_key: Optional[str] = None, 
                 meta_prompt_file: Optional[str] = None):
        """
        Initialize the ChatbotInterface with configuration and custom parameters for ResponseManager.
        :param config_path: Path to the configuration JSON file.
        :param model: The OpenAI model to use (default: 'gpt-4o-mini').
        :param temperature: The temperature for response generation (default: 0).
        :param max_output_tokens: The maximum number of output tokens (default: 800).
        :param max_num_results: The maximum number of search results to return (default: 15).
        :param vector_store_id: The ID of the vector store to use for file search.
        :param api_key: The OpenAI API key for authentication.
        :param meta_prompt_file: Path to the meta prompt file .
        """
        self.config = self.load_config(config_path)
        self.title = self.config["chatbot_title"]
        self.description = self.config["chatbot_description"]
        self.input_placeholder = self.config["chatbot_input_placeholder"]
        self.output_label = self.config["chatbot_output_label"]

        # Initialize ResponseManager with custom parameters
        try:
            self.response_manager = ResponseManager(
                model=model,
                temperature=temperature,
                max_output_tokens=max_output_tokens,
                max_num_results=max_num_results,
                vector_store_id=vector_store_id,
                api_key=api_key,
                meta_prompt_file=meta_prompt_file
            ) 
            
            logging.info(
                "ChatbotInterface initialized with the following parameters:\n"
                f"  - Model: {model}\n"
                f"  - Temperature: {temperature}\n"
                f"  - Max Output Tokens: {max_output_tokens}\n"
                f"  - Max Number of Results: {max_num_results}\n"
            )
            
        except Exception as e:
            logging.error(f"Failed to initialize ResponseManager: {e}")
            raise
           
    @staticmethod
    def load_config(config_path: str) -> dict:
        """
        Load the configuration for Gradio GUI interface from the JSON file.
        :param config_path: Path to the configuration JSON file.
        :return: Configuration dictionary.
        """
        logging.info(f"Loading configuration from {config_path}...")
        if not os.path.exists(config_path):
            logging.error(f"Configuration file not found: {config_path}")
            raise FileNotFoundError(f"Configuration file not found: {config_path}")

        with open(config_path, 'r') as config_file:
            config = json.load(config_file)

        required_keys = [
            "chatbot_title", 
            "chatbot_description", 
            "chatbot_input_placeholder", 
            "chatbot_output_label"
        ]
        
        for key in required_keys:
            if key not in config:
                logging.error(f"Missing required configuration key: {key}")
                raise ValueError(f"Missing required configuration key: {key}")

        logging.info("Configuration loaded successfully.")
        return config

        
    def create_interface(self) -> gr.Blocks:
        """
        Create the Gradio Blocks interface that displays a single container including both
        the text input and a small arrow submit button. The interface will clear the text input
        after each message is submitted.
        """
        logging.info("Creating Gradio interface...")

        with gr.Blocks() as demo: 
            # Title and description area.
            gr.Markdown(f"## {self.title}\n{self.description}")

            # Chatbot output area.
            chatbot_output = gr.Chatbot(label=self.output_label, type="messages")

            # Use a gr.Row container as the input box with an integrated submit button.
            with gr.Row(elem_id="input-container", equal_height=True):
                user_input = gr.Textbox(
                    lines=1,
                    show_label=False,              # Hide label for a unified look.
                    elem_id="chat-input",
                    placeholder=self.input_placeholder,
                    scale=500,
                )
                reset = gr.ClearButton(
                    value="Reset 🔄",  
                    variant="secondary",
                    elem_id="reset-button",
                    size="lg"
                )

            # Define a local function to reset input  
            def reset_output() -> list:
                """
                Reset the chatbot output.
                :return: An empty list to reset the output.
                """
                return [], ""
                
            # Define a local function to process input
            def process_input(user_message, chat_history):
                """
                Call generate_response with the user's message and chat history.
                Return a tuple with the updated chat history and an empty string to clear the input.
                """
                updated_history = self.response_manager.generate_response(user_message, chat_history)
                return updated_history, ""
            
            # Bind the reset button click to the reset function
            reset.click(
                fn=reset_output,
                inputs=None,
                outputs=[chatbot_output, user_input]
            )
            
            # Bind the Enter key (textbox submit) to the same processing function
            user_input.submit(
                fn=process_input,
                inputs=[user_input, chatbot_output],
                outputs=[chatbot_output, user_input]
            )

        logging.info("Gradio interface created successfully.")
        return demo