import os import openai import logging """ This module manages responses from the OpenAI Response API for an IT Helpdesk assistant at Harvey Mudd College. It initializes the OpenAI client and provides methods to generate responses using Retrieval-Augmented Generation (RAG). The module leverages a vector store to retrieve relevant knowledge base documents and uses the specified OpenAI model to generate responses. Additionally, it loads a meta prompt from a configuration file to enhance the AI model's contextual understanding. """ # Configure logging to both file and console log_file_path = "logs/response_manager.log" logging.basicConfig( level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s", handlers=[ logging.FileHandler(log_file_path, mode='a', encoding='utf-8'), # Save logs to a file logging.StreamHandler() # Print logs to the console ] ) # Load the OpenAI API key from the environment variable api_key = os.getenv("OPENAI_API_KEY") if not api_key: raise ValueError("OPENAI_API_KEY environment variable is not set.") class ResponseManager: """ A class to manage responses from the OpenAI API for an IT Helpdesk assistant. This class initializes the OpenAI client and provides a method to create responses to user queries using the specified OpenAI model. """ def __init__(self, vector_store_id: str, meta_prompt_file: str = 'config/meta_prompt.txt'): """ Initialize the ResponseManager with a vector store ID and meta prompt file. :param vector_store_id: The ID of the vector store to use for file search. :param meta_prompt_file: Path to the meta prompt file (default: 'config/meta_prompt.txt'). """ self.client = openai.OpenAI(api_key=api_key) self.vector_store_id = vector_store_id self.previous_response_id = None # Load the meta prompt from the specified file if not os.path.exists(meta_prompt_file): logging.error(f"Meta prompt file '{meta_prompt_file}' not found.") raise FileNotFoundError(f"Meta prompt file '{meta_prompt_file}' not found.") with open(meta_prompt_file, 'r') as file: self.meta_prompt = file.read().strip() logging.info("Meta prompt loaded successfully.") def create_response(self, query: str, model: str = "gpt-4o-mini", temperature: float = 0, max_output_tokens: int = 800, max_num_results: int = 15) -> str: """ Create a response to a user query using the OpenAI API. :param query: The user query to respond to. :param model: The OpenAI model to use (default is "gpt-4o-mini"). :param temperature: The temperature for the response (default is 0). :param max_output_tokens: The maximum number of output tokens (default is 800). :param max_num_results: The maximum number of search results to return (default is 15). :return: The response text from the OpenAI API. """ if not query.strip(): logging.error("Query is empty or invalid.") raise ValueError("Query cannot be empty.") # Prepare the input for the API call input_data = [{"role": "developer", "content": self.meta_prompt}] if self.previous_response_id is None else [] input_data.append({"role": "user", "content": query}) try: logging.info("Sending request to OpenAI API...") response = self.client.responses.create( model=model, previous_response_id=self.previous_response_id, input=input_data, tools=[{ "type": "file_search", "vector_store_ids": [self.vector_store_id], "max_num_results": max_num_results }], temperature=temperature, max_output_tokens=max_output_tokens ) self.previous_response_id = response.id logging.info("Response received successfully.") return response.output_text except openai.error.OpenAIError as e: logging.error(f"OpenAI API error: {e}") raise RuntimeError(f"Failed to generate response: {e}") except Exception as e: logging.error(f"Unexpected error: {e}") raise RuntimeError(f"An unexpected error occurred: {e}")