# agents/quiz_agent.py """ Quiz Generation Agent - Creates quizzes and flashcards using Generative AI. """ import re from .agent_helpers import format_history_for_prompt class QuizAgent: def __init__(self, gemini_model=None): """ Initializes the agent with the Gemini model. Args: gemini_model: An instance of the Gemini model client. """ self.model = gemini_model def _extract_topic(self, query: str) -> str: """A simple helper to extract the core topic from the user's query.""" # Remove common phrases used to request a quiz patterns = [ r"make a quiz on", r"create a quiz on", r"give me a quiz on", r"quiz on", r"quiz about", r"test me on" ] topic = query.lower() for p in patterns: topic = re.sub(p, "", topic) # Clean up any extra whitespace return topic.strip() def process_query(self, query: str, file_context: str = "", chat_history: list = None): """ Processes a query to generate a quiz. Args: query (str): The user's full query (e.g., "Make a quiz on analgesics"). file_context (str): Optional text content from an uploaded file. chat_history (list): The history of the conversation. Returns: dict: A dictionary containing the quiz and agent metadata. """ if not self.model: return {'message': "❓ The question bank is locked! The Gemini API key is missing.", 'agent_used': 'quiz_generation', 'status': 'error_no_api_key'} history_for_prompt = format_history_for_prompt(chat_history) topic = self._extract_topic(query) context_section = f"---\nCONTEXT FROM KNOWLEDGE BASE:\n{file_context}\n---" if file_context else "" task_description = f"Generate a short quiz (3-5 questions) on the topic: **{topic.title()}**." if file_context: task_description += f"\nIf relevant, use text from the student's notes for context:\n---\n{file_context}\n---" prompt = f"""You are "Quiz Master," an AI that creates educational quizzes like Maryada Ramanna. Maryada Ramannaβ€”he’s a legendary character from Indian (particularly South Indian) folklore, often associated with justice, integrity, and cleverness. **CRITICAL INSTRUCTION FOR CITATIONS:** When you use information from the KNOWLEDGE BASE CONTEXT, you MUST cite the source at the end of the relevant sentence using the format `[Source: filename, Page: page_number]`. CONVERSATION HISTORY: {history_for_prompt} {context_section} CURRENT TASK: {task_description} Based on the CURRENT TASK and conversation history, create a quiz. If the user is asking for a change to a previous quiz (e.g., "make it harder"), do that. Include a mix of MCQs, True/False, and Fill-in-the-Blank questions. CRITICAL: Provide a clearly separated "Answer Key" section with answers and brief explanations. """ try: response = self.model.generate_content(prompt) return {'message': response.text, 'agent_used': 'quiz_generation', 'status': 'success'} except Exception as e: print(f"Quiz Agent Error: {e}") return {'message': f"My question book seems to be stuck. Error: {e}", 'agent_used': 'quiz_generation', 'status': 'error_api_call'} # def process_query(self, query: str, file_context: str = "",chat_history: list = None): # """ # Processes a query to generate a quiz. The agent prioritizes file_context if provided. # Args: # query (str): The user's full query (e.g., "Make a quiz on analgesics"). # file_context (str): Optional text content from an uploaded file. # Returns: # dict: A dictionary containing the quiz and agent metadata. # """ # if not self.model: # return { # 'message': "❓ **Quiz Master**\n\nThe question bank is locked! The Gemini API key is missing, so I can't generate quizzes. Please configure the API key to enable this feature.", # 'agent_type': 'quiz_generation', # 'status': 'error_no_api_key' # } # topic = self._extract_topic(query) # task_description = f"Generate a short quiz (3-5 questions) for a B.Pharmacy student on the topic: **{topic.title()}**." # if file_context: # task_description += f"\n\nIf relevant, use the following text from the student's uploaded notes for additional context:\n---\n{file_context}\n---" # else: # return { # 'message': "Please tell me what to quiz you on! Either upload a file or ask for a quiz on a specific topic, like 'quiz on antibiotics'.", # 'agent_type': 'quiz_generation', # 'status': 'error_no_topic' # } # # Construct a specialized prompt for the Gemini model # prompt = f""" # You are "Quiz Master," an AI that creates engaging and effective study quizzes for B.Pharmacy students in India. # **Your Task:** # {task_description} # **Instructions:** # 1. **Question Variety:** Create a mix of question types: # * Multiple Choice Questions (MCQs) with 4 options. # * True/False questions. # * Fill-in-the-Blank questions. # 2. **Clarity:** Ensure questions are clear, concise, and relevant. # 3. **Answer Key:** THIS IS ESSENTIAL. After all the questions, provide a clearly separated "πŸ”‘ Answer Key" section with the correct answers. For MCQs, also provide a brief (one-sentence) explanation for why the answer is correct. # 4. **Formatting:** Use markdown for headings, bolding, and lists. Use emojis to make it fun and engaging. # Good luck! 🌟 # **Example Output Structure:** # πŸ“ **Quiz Time: [Topic Name]** # **Q1. [MCQ Question]** # A) Option 1 # B) Option 2 # ... # **Q2. [True/False Question]** # **Q3. [Fill-in-the-Blank Question]** # --- # πŸ”‘ **Answer Key** # 1. **Answer:** B) Correct Option. *Explanation: [Brief reason why B is correct].* # 2. **Answer:** True. # 3. **Answer:** [Correct word(s)]. # Let's test your knowledge! Good luck! 🌟 # """ # try: # # Generate content using the AI model # ai_response = self.model.generate_content(prompt, chat_history) # return { # 'message': ai_response.text, # 'agent_used': 'quiz_generation', # 'status': 'success' # } # except Exception as e: # print(f"Quiz Agent Error: {e}") # return { # 'message': f"I'm sorry, my question book seems to be stuck. I ran into an error: {str(e)}", # 'agent_type': 'quiz_generation', # 'status': 'error_api_call' # }