Spaces:
Sleeping
Sleeping
File size: 1,937 Bytes
775ba42 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 |
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_core.prompts import PromptTemplate
from app.core.config import settings
from app.schemas import MCQResponse
class GenerateAnswer:
"""
Class to generate answers using Google Gemini API.
"""
def __init__(self):
self.llm = ChatGoogleGenerativeAI(
model="gemini-2.0-flash",
temperature=0.6,
api_key=settings.GOOGLE_API_KEY,
)
async def generate_mcq(self, topic: str, solo_level: str):
"""
Generate an answer to the given question using Google Gemini API.
"""
prompt = PromptTemplate(
template="""You are an AI tutor. Based on the SOLO taxonomy level and the content snippet provided, generate a single multiple-choice question (MCQ) that matches the SOLO level.
Content Snippet:
\"\"\"
Photosynthesis is the process by which plants use sunlight, water, and carbon dioxide to create glucose and oxygen. Chlorophyll absorbs sunlight.
\"\"\"
SOLO Level: {solo_level}
You should be based on this Topic: {topic}
SOLO Level Consideration:
- Unistructural: Focus on recalling a single piece of information from the content_snippet.
- Multistructural: Focus on recalling several pieces of information from the content_snippet.
Generate one MCQ with:
- "question_text": A single question aligned to the SOLO level
- "options": 3–4 plausible answer choices
- "correct_answer": The correct answer (must match one of the options)""",
input_variables=["topic", "solo_level"],
)
model=self.llm.with_structured_output(MCQResponse)
chain=prompt | model
response= await chain.ainvoke({"topic": topic, "solo_level": solo_level})
return response
|