File size: 3,214 Bytes
04bd6b2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import streamlit as st
from langchain_google_genai import ChatGoogleGenerativeAI  
from langchain.tools import Tool
from langchain.agents import initialize_agent, AgentType
from langchain.tools import DuckDuckGoSearchRun

# Streamlit UI Setup
st.set_page_config(page_title="Azure Certification Prep Assistant", layout="wide")
st.markdown("<div class='title'>Azure Certification Prep Assistant</div>", unsafe_allow_html=True)

# User input for API key
user_api_key = st.text_input("πŸ” Enter your Gemini API Key", type="password")

# Certification name input
cert_name = st.text_input("πŸ“˜ Enter Azure Certification Name (e.g., AZ-900)", "")

if st.button("Get Certification Details"):
    if not user_api_key:
        st.error("Please enter your Gemini API key.")
    elif not cert_name:
        st.warning("Please enter a certification name.")
    else:
        try:
            # Create LLM and Agent only after API key is provided
            llm = ChatGoogleGenerativeAI(model="gemini-1.5-flash", google_api_key=user_api_key)
            ddgs = DuckDuckGoSearchRun()
            search_tool = Tool(
                name="Web Search",
                func=ddgs.run,
                description="Searches the web for relevant certification information."
            )
            agent = initialize_agent(
                tools=[search_tool],
                llm=llm,
                agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
                verbose=True
            )

            # Define main function
            def azure_cert_bot(cert_name):
                query = f"Microsoft Azure {cert_name} certification curriculum site:microsoft.com"
                search_results = ddgs.run(query).split("\n")
                prompt = f"Based on the following curriculum details, generate key questions and answers in markdown format for the {cert_name} certification exam. Do not include any metadata or unnecessary text, only return the formatted Q&A:\n{search_results}"
                response = llm.invoke(prompt)
                try:
                    response_text = response.get("content", "No response generated.") if isinstance(response, dict) else response
                    response_text = "\n".join([line for line in response_text.split("\n") if not line.lower().startswith("content=") and "metadata" not in line.lower()])
                except Exception as e:
                    response_text = f"Error processing response: {str(e)}"
                return search_results, response_text

            # Run the bot
            links, qa_content = azure_cert_bot(cert_name)

            st.markdown("<div class='subheader'>Certification Links & Curriculum</div>", unsafe_allow_html=True)
            for link in links:
                if link.strip():
                    st.markdown(f"<div class='markdown-text-container'>- <a href='{link}' target='_blank'>{link}</a></div>", unsafe_allow_html=True)

            st.markdown("<div class='subheader'>Exam Questions & Answers</div>", unsafe_allow_html=True)
            st.markdown(f"<div class='markdown-text-container'>{qa_content}</div>", unsafe_allow_html=True)
        except Exception as e:
            st.error(f"❌ Error: {str(e)}")