Spaces:
Build error
Build error
Update utils/response_manager.py
Browse files- utils/response_manager.py +38 -9
utils/response_manager.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1 |
import os
|
2 |
import openai
|
3 |
import logging
|
|
|
|
|
4 |
"""
|
5 |
This module manages responses from the OpenAI Response API for an IT Helpdesk assistant
|
6 |
at Harvey Mudd College. It initializes the OpenAI client and provides methods to generate
|
@@ -9,8 +11,9 @@ to retrieve relevant knowledge base documents and uses the specified OpenAI mode
|
|
9 |
generate responses. Additionally, it loads a meta prompt from a configuration file to
|
10 |
enhance the AI model's contextual understanding.
|
11 |
"""
|
|
|
12 |
# Configure logging to both file and console
|
13 |
-
log_file_path = "logs/response_manager.log"
|
14 |
logging.basicConfig(
|
15 |
level=logging.INFO,
|
16 |
format="%(asctime)s - %(levelname)s - %(message)s",
|
@@ -25,6 +28,7 @@ api_key = os.getenv("OPENAI_API_KEY")
|
|
25 |
if not api_key:
|
26 |
raise ValueError("OPENAI_API_KEY environment variable is not set.")
|
27 |
|
|
|
28 |
class ResponseManager:
|
29 |
"""
|
30 |
A class to manage responses from the OpenAI API for an IT Helpdesk assistant.
|
@@ -32,27 +36,47 @@ class ResponseManager:
|
|
32 |
to user queries using the specified OpenAI model.
|
33 |
"""
|
34 |
|
35 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
"""
|
37 |
Initialize the ResponseManager with a vector store ID and meta prompt file.
|
38 |
:param vector_store_id: The ID of the vector store to use for file search.
|
39 |
:param meta_prompt_file: Path to the meta prompt file (default: 'config/meta_prompt.txt').
|
40 |
"""
|
41 |
-
|
|
|
|
|
42 |
self.vector_store_id = vector_store_id
|
|
|
|
|
|
|
43 |
self.previous_response_id = None
|
44 |
|
45 |
# Load the meta prompt from the specified file
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
46 |
if not os.path.exists(meta_prompt_file):
|
47 |
logging.error(f"Meta prompt file '{meta_prompt_file}' not found.")
|
48 |
raise FileNotFoundError(f"Meta prompt file '{meta_prompt_file}' not found.")
|
49 |
-
with open(meta_prompt_file, 'r') as file:
|
50 |
-
|
51 |
-
logging.info("Meta prompt loaded successfully.")
|
|
|
52 |
|
53 |
-
def create_response(self, query: str, model: str =
|
54 |
-
temperature: float =
|
55 |
-
max_num_results: int =
|
56 |
"""
|
57 |
Create a response to a user query using the OpenAI API.
|
58 |
:param query: The user query to respond to.
|
@@ -66,6 +90,11 @@ class ResponseManager:
|
|
66 |
logging.error("Query is empty or invalid.")
|
67 |
raise ValueError("Query cannot be empty.")
|
68 |
|
|
|
|
|
|
|
|
|
|
|
69 |
# Prepare the input for the API call
|
70 |
input_data = [{"role": "developer", "content": self.meta_prompt}] if self.previous_response_id is None else []
|
71 |
input_data.append({"role": "user", "content": query})
|
|
|
1 |
import os
|
2 |
import openai
|
3 |
import logging
|
4 |
+
from typing import Optional
|
5 |
+
|
6 |
"""
|
7 |
This module manages responses from the OpenAI Response API for an IT Helpdesk assistant
|
8 |
at Harvey Mudd College. It initializes the OpenAI client and provides methods to generate
|
|
|
11 |
generate responses. Additionally, it loads a meta prompt from a configuration file to
|
12 |
enhance the AI model's contextual understanding.
|
13 |
"""
|
14 |
+
|
15 |
# Configure logging to both file and console
|
16 |
+
log_file_path = "logs/response_manager.log"
|
17 |
logging.basicConfig(
|
18 |
level=logging.INFO,
|
19 |
format="%(asctime)s - %(levelname)s - %(message)s",
|
|
|
28 |
if not api_key:
|
29 |
raise ValueError("OPENAI_API_KEY environment variable is not set.")
|
30 |
|
31 |
+
|
32 |
class ResponseManager:
|
33 |
"""
|
34 |
A class to manage responses from the OpenAI API for an IT Helpdesk assistant.
|
|
|
36 |
to user queries using the specified OpenAI model.
|
37 |
"""
|
38 |
|
39 |
+
DEFAULT_META_PROMPT_FILE = 'config/meta_prompt.txt'
|
40 |
+
DEFAULT_MODEL = "gpt-4o-mini"
|
41 |
+
DEFAULT_TEMPERATURE = 0
|
42 |
+
DEFAULT_MAX_OUTPUT_TOKENS = 800
|
43 |
+
DEFAULT_MAX_NUM_RESULTS = 15
|
44 |
+
|
45 |
+
def __init__(self, vector_store_id: str, meta_prompt_file: Optional[str] = None):
|
46 |
"""
|
47 |
Initialize the ResponseManager with a vector store ID and meta prompt file.
|
48 |
:param vector_store_id: The ID of the vector store to use for file search.
|
49 |
:param meta_prompt_file: Path to the meta prompt file (default: 'config/meta_prompt.txt').
|
50 |
"""
|
51 |
+
if not vector_store_id:
|
52 |
+
logging.error("Vector store ID is not provided.")
|
53 |
+
raise ValueError("Vector store ID cannot be empty.")
|
54 |
self.vector_store_id = vector_store_id
|
55 |
+
|
56 |
+
self.meta_prompt_file = meta_prompt_file or self.DEFAULT_META_PROMPT_FILE
|
57 |
+
self.client = openai.OpenAI(api_key=api_key)
|
58 |
self.previous_response_id = None
|
59 |
|
60 |
# Load the meta prompt from the specified file
|
61 |
+
self.meta_prompt = self._load_meta_prompt(self.meta_prompt_file)
|
62 |
+
|
63 |
+
def _load_meta_prompt(self, meta_prompt_file: str) -> str:
|
64 |
+
"""
|
65 |
+
Load the meta prompt from the specified file.
|
66 |
+
:param meta_prompt_file: Path to the meta prompt file.
|
67 |
+
:return: The meta prompt as a string.
|
68 |
+
"""
|
69 |
if not os.path.exists(meta_prompt_file):
|
70 |
logging.error(f"Meta prompt file '{meta_prompt_file}' not found.")
|
71 |
raise FileNotFoundError(f"Meta prompt file '{meta_prompt_file}' not found.")
|
72 |
+
with open(meta_prompt_file, 'r', encoding='utf-8') as file:
|
73 |
+
meta_prompt = file.read().strip()
|
74 |
+
logging.info(f"Meta prompt loaded successfully from '{meta_prompt_file}'.")
|
75 |
+
return meta_prompt
|
76 |
|
77 |
+
def create_response(self, query: str, model: Optional[str] = None,
|
78 |
+
temperature: Optional[float] = None, max_output_tokens: Optional[int] = None,
|
79 |
+
max_num_results: Optional[int] = None) -> str:
|
80 |
"""
|
81 |
Create a response to a user query using the OpenAI API.
|
82 |
:param query: The user query to respond to.
|
|
|
90 |
logging.error("Query is empty or invalid.")
|
91 |
raise ValueError("Query cannot be empty.")
|
92 |
|
93 |
+
model = model or self.DEFAULT_MODEL
|
94 |
+
temperature = temperature if temperature is not None else self.DEFAULT_TEMPERATURE
|
95 |
+
max_output_tokens = max_output_tokens if max_output_tokens is not None else self.DEFAULT_MAX_OUTPUT_TOKENS
|
96 |
+
max_num_results = max_num_results if max_num_results is not None else self.DEFAULT_MAX_NUM_RESULTS
|
97 |
+
|
98 |
# Prepare the input for the API call
|
99 |
input_data = [{"role": "developer", "content": self.meta_prompt}] if self.previous_response_id is None else []
|
100 |
input_data.append({"role": "user", "content": query})
|