AashitaK commited on
Commit
ccba7a6
·
verified ·
1 Parent(s): 2b526c1

Delete utils/session_history.py

Browse files
Files changed (1) hide show
  1. utils/session_history.py +0 -249
utils/session_history.py DELETED
@@ -1,249 +0,0 @@
1
- import os
2
- import openai
3
- import logging
4
- import json
5
- import gradio as gr
6
- from typing import Optional
7
-
8
- class ResponseManager:
9
- """
10
- This class initializes the OpenAI client and provides methods to create responses,
11
- maintain conversation history, and handle user queries.
12
- """
13
- def __init__(self,
14
- vector_store_id: Optional[str] = None,
15
- api_key: Optional[str] = None,
16
- meta_prompt_file: Optional[str] = None,
17
- model: str = "gpt-4o-mini",
18
- temperature: float = 0,
19
- max_output_tokens: int = 800,
20
- max_num_results: int = 15):
21
- """
22
- Initialize the ResponseManager with optional parameters for configuration.
23
- :param vector_store_id: The ID of the vector store to use for file search.
24
- :param api_key: The OpenAI API key for authentication.
25
- :param meta_prompt_file: Path to the meta prompt file (default: 'config/meta_prompt.txt').
26
- :param model: The OpenAI model to use (default: 'gpt-4o-mini').
27
- :param temperature: The temperature for response generation (default: 0).
28
- :param max_output_tokens: The maximum number of output tokens (default: 800).
29
- :param max_num_results: The maximum number of search results to return (default: 15).
30
- """
31
- # Load vector_store_id and api_key from environment variables if not provided
32
- self.vector_store_id = vector_store_id or os.getenv('VECTOR_STORE_ID')
33
- if not self.vector_store_id:
34
- logging.error("VECTOR_STORE_ID is not provided or set in the environment.")
35
- raise ValueError("VECTOR_STORE_ID is required.")
36
-
37
- self.api_key = api_key or os.getenv('OPENAI_API_KEY')
38
- if not self.api_key:
39
- logging.error("OPENAI_API_KEY is not provided or set in the environment.")
40
- raise ValueError("OPENAI_API_KEY is required.")
41
-
42
- # Initialize other attributes
43
- self.meta_prompt_file = meta_prompt_file or 'config/meta_prompt.txt'
44
- self.previous_response_id = None
45
-
46
- # Initialize the OpenAI client
47
- self.client = openai.OpenAI(api_key=self.api_key)
48
-
49
- # Load the meta prompt from the specified file
50
- self.meta_prompt = self._load_meta_prompt(self.meta_prompt_file)
51
-
52
- # Set default parameters for response generation
53
- self.model = model
54
- self.temperature = temperature
55
- self.max_output_tokens = max_output_tokens
56
- self.max_num_results = max_num_results
57
-
58
- def _load_meta_prompt(self, meta_prompt_file: str) -> str:
59
- """
60
- Load the meta prompt from the specified file.
61
- :param meta_prompt_file: Path to the meta prompt file.
62
- :return: The meta prompt as a string.
63
- """
64
- if not os.path.exists(meta_prompt_file):
65
- logging.error(f"Meta prompt file '{meta_prompt_file}' not found.")
66
- raise FileNotFoundError(f"Meta prompt file '{meta_prompt_file}' not found.")
67
- with open(meta_prompt_file, 'r', encoding='utf-8') as file:
68
- meta_prompt = file.read().strip()
69
- logging.info(f"Meta prompt loaded successfully from '{meta_prompt_file}'.")
70
- return meta_prompt
71
-
72
- def generate_response(self, query: str, history: list) -> tuple:
73
- """
74
- Generate a response to a user query using the OpenAI API.
75
- This method interacts with the OpenAI API to create a response based on the user's query.
76
- It supports optional parameters for model configuration and handles errors gracefully.
77
- Args:
78
- query (str): The user query to respond to.
79
- history (list): The conversation history from the chatbot.
80
- Returns:
81
- tuple: (updated conversation list for display, updated conversation list for state)
82
- """
83
- # Prepare the input for the API call
84
- input_data = [{"role": "developer", "content": self.meta_prompt}] if self.previous_response_id is None else []
85
- input_data.append({"role": "user", "content": query})
86
-
87
- # Validate the query
88
- if not query.strip():
89
- logging.warning("Empty or invalid query received.")
90
- warning_message = "Please enter a valid query."
91
- input_data.append({"role": "assistant", "content": warning_message})
92
- new_history = history + input_data
93
- return new_history, new_history
94
-
95
- try:
96
- logging.info("Sending request to OpenAI API...")
97
- response = self.client.responses.create(
98
- model=self.model,
99
- previous_response_id=self.previous_response_id,
100
- input=input_data,
101
- tools=[{
102
- "type": "file_search",
103
- "vector_store_ids": [self.vector_store_id],
104
- "max_num_results": self.max_num_results
105
- }],
106
- truncation="auto",
107
- temperature=self.temperature,
108
- max_output_tokens=self.max_output_tokens
109
- )
110
- self.previous_response_id = response.id
111
- logging.info("Response received successfully.")
112
- input_data.append({"role": "assistant", "content": response.output_text})
113
- new_history = history + input_data
114
- return new_history, new_history
115
- except Exception as e:
116
- logging.error(f"An error occurred while generating a response: {e}")
117
- error_message = "Sorry, I couldn't generate a response at this time. Please try again later."
118
- input_data.append({"role": "assistant", "content": error_message})
119
- new_history = history + input_data
120
- return new_history, new_history
121
-
122
- class ChatbotInterface:
123
- def init(self,
124
- config_path: str = 'config/gradio_config.json',
125
- model: str = "gpt-4o-mini",
126
- temperature: float = 0,
127
- max_output_tokens: int = 800,
128
- max_num_results: int = 15,
129
- vector_store_id: Optional[str] = None,
130
- api_key: Optional[str] = None,
131
- meta_prompt_file: Optional[str] = None):
132
- """
133
- Initialize the ChatbotInterface with configuration and custom parameters for ResponseManager.
134
- :param config_path: Path to the configuration JSON file.
135
- :param model: The OpenAI model to use (default: 'gpt-4o-mini').
136
- :param temperature: The temperature for response generation (default: 0).
137
- :param max_output_tokens: The maximum number of output tokens (default: 800).
138
- :param max_num_results: The maximum number of search results to return (default: 15).
139
- :param vector_store_id: The ID of the vector store to use for file search.
140
- :param api_key: The OpenAI API key for authentication.
141
- :param meta_prompt_file: Path to the meta prompt file.
142
- """
143
- self.config = self.load_config(config_path)
144
- self.title = self.config["chatbot_title"]
145
- self.description = self.config["chatbot_description"]
146
- self.input_label = self.config["chatbot_input_label"]
147
- self.input_placeholder = self.config["chatbot_input_placeholder"]
148
- self.output_label = self.config["chatbot_output_label"]
149
- self.reset_button = self.config["chatbot_reset_button"]
150
- self.submit_button = self.config["chatbot_submit_button"]
151
-
152
- # Initialize ResponseManager with custom parameters
153
- try:
154
- self.response_manager = ResponseManager(
155
- model=model,
156
- temperature=temperature,
157
- max_output_tokens=max_output_tokens,
158
- max_num_results=max_num_results,
159
- vector_store_id=vector_store_id,
160
- api_key=api_key,
161
- meta_prompt_file=meta_prompt_file
162
- )
163
- self.generate_response = self.response_manager.generate_response
164
- logging.info(
165
- "ChatbotInterface initialized with the following parameters:\n"
166
- f" - Model: {model}\n"
167
- f" - Temperature: {temperature}\n"
168
- f" - Max Output Tokens: {max_output_tokens}\n"
169
- f" - Max Number of Results: {max_num_results}\n"
170
- f" - Vector Store ID: {vector_store_id}\n"
171
- f" - API Key: {'Provided' if api_key else 'Not Provided'}\n"
172
- f" - Meta Prompt File: {meta_prompt_file or 'Default'}"
173
- )
174
- except Exception as e:
175
- logging.error(f"Failed to initialize ResponseManager: {e}")
176
- raise
177
-
178
-
179
- @staticmethod
180
- def load_config(config_path: str) -> dict:
181
- """
182
- Load the configuration for Gradio GUI interface from the JSON file.
183
- :param config_path: Path to the configuration JSON file.
184
- :return: Configuration dictionary.
185
- """
186
- logging.info(f"Loading configuration from {config_path}...")
187
- if not os.path.exists(config_path):
188
- logging.error(f"Configuration file not found: {config_path}")
189
- raise FileNotFoundError(f"Configuration file not found: {config_path}")
190
-
191
- with open(config_path, 'r') as config_file:
192
- config = json.load(config_file)
193
-
194
- required_keys = [
195
- "chatbot_title", "chatbot_description", "chatbot_input_label",
196
- "chatbot_input_placeholder", "chatbot_output_label",
197
- "chatbot_reset_button", "chatbot_submit_button"
198
- ]
199
- for key in required_keys:
200
- if key not in config:
201
- logging.error(f"Missing required configuration key: {key}")
202
- raise ValueError(f"Missing required configuration key: {key}")
203
-
204
- logging.info("Configuration loaded successfully.")
205
- return config
206
-
207
- def reset_output(self) -> list:
208
- """
209
- Reset the chatbot output.
210
- :return: An empty list to reset the output.
211
- """
212
- return []
213
-
214
- def create_interface(self) -> gr.Blocks:
215
- """
216
- Create the Gradio Blocks interface.
217
- :return: A Gradio Blocks interface object.
218
- """
219
- logging.info("Creating Gradio interface...")
220
-
221
- # Define the Gradio Blocks interface
222
- with gr.Blocks() as demo:
223
- gr.Markdown(f"## {self.title}\n{self.description}")
224
-
225
- # Chatbot history component
226
- chatbot_output = gr.Chatbot(label=self.output_label, type="messages")
227
-
228
- # Adding a session-specific state to store conversation history.
229
- conversation_state = gr.State([])
230
-
231
- # User input
232
- user_input = gr.Textbox(
233
- lines=2,
234
- label=self.input_label,
235
- placeholder=self.input_placeholder
236
- )
237
-
238
- # Buttons
239
- with gr.Row():
240
- reset = gr.Button(self.reset_button, variant="secondary")
241
- submit = gr.Button(self.submit_button, variant="primary")
242
-
243
- submit.click(fn=self.generate_response, inputs=[user_input, conversation_state], outputs=[chatbot_output, conversation_state])
244
- user_input.submit(fn=self.generate_response, inputs=[user_input, conversation_state], outputs=[chatbot_output, conversation_state])
245
- reset.click(fn=self.reset_output, inputs=None, outputs=chatbot_output)
246
-
247
-
248
- logging.info("Gradio interface created successfully.")
249
- return demo