mcq / app /services /gemini.py
puzan789's picture
Added
775ba42
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_core.prompts import PromptTemplate
from app.core.config import settings
from app.schemas import MCQResponse
class GenerateAnswer:
"""
Class to generate answers using Google Gemini API.
"""
def __init__(self):
self.llm = ChatGoogleGenerativeAI(
model="gemini-2.0-flash",
temperature=0.6,
api_key=settings.GOOGLE_API_KEY,
)
async def generate_mcq(self, topic: str, solo_level: str):
"""
Generate an answer to the given question using Google Gemini API.
"""
prompt = PromptTemplate(
template="""You are an AI tutor. Based on the SOLO taxonomy level and the content snippet provided, generate a single multiple-choice question (MCQ) that matches the SOLO level.
Content Snippet:
\"\"\"
Photosynthesis is the process by which plants use sunlight, water, and carbon dioxide to create glucose and oxygen. Chlorophyll absorbs sunlight.
\"\"\"
SOLO Level: {solo_level}
You should be based on this Topic: {topic}
SOLO Level Consideration:
- Unistructural: Focus on recalling a single piece of information from the content_snippet.
- Multistructural: Focus on recalling several pieces of information from the content_snippet.
Generate one MCQ with:
- "question_text": A single question aligned to the SOLO level
- "options": 3–4 plausible answer choices
- "correct_answer": The correct answer (must match one of the options)""",
input_variables=["topic", "solo_level"],
)
model=self.llm.with_structured_output(MCQResponse)
chain=prompt | model
response= await chain.ainvoke({"topic": topic, "solo_level": solo_level})
return response