sunbal7 commited on
Commit
ebbd85b
Β·
verified Β·
1 Parent(s): cde1a59

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +62 -131
app.py CHANGED
@@ -1,133 +1,64 @@
1
- import os
2
- from datetime import datetime
3
- from PyPDF2 import PdfReader
4
- from docx import Document
5
  import streamlit as st
6
- from groq import Groq
7
- import textwrap
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
- # βœ… Page Configuration
10
- st.set_page_config(page_title="AI Study Plan Assistant", layout="wide")
11
-
12
- # βœ… Load Groq Client
13
- @st.cache_resource
14
- def load_groq_client():
15
- return Groq(api_key=os.getenv("GROQ_API_KEY"))
16
-
17
- groq_client = load_groq_client()
18
-
19
- # βœ… File Text Extraction
20
- def extract_text(file):
21
- ext = os.path.splitext(file.name)[1].lower()
22
- if ext == ".txt":
23
- return file.read().decode("utf-8")
24
- elif ext == ".docx":
25
- doc = Document(file)
26
- return "\n".join([para.text for para in doc.paragraphs])
27
- elif ext == ".pdf":
28
- pdf_reader = PdfReader(file)
29
- text = ""
30
- for page in pdf_reader.pages:
31
- page_text = page.extract_text()
32
- if page_text:
33
- text += page_text + "\n"
34
- return text
35
- else:
36
- raise ValueError("Only .txt, .docx, and .pdf files are supported.")
37
-
38
- # βœ… Chunking helper
39
- def chunk_text(text, chunk_size=1500):
40
- return textwrap.wrap(text, width=chunk_size, break_long_words=False, replace_whitespace=False)
41
-
42
- # βœ… Query Groq LLM
43
- def query_groq(prompt, temperature=0.4):
44
- try:
45
- chat = groq_client.chat.completions.create(
46
- messages=[{"role": "user", "content": prompt}],
47
- model="llama3-8b-8192",
48
- temperature=temperature
49
- )
50
- return chat.choices[0].message.content.strip()
51
- except Exception as e:
52
- return f"⚠️ Groq API Error: {str(e)}"
53
-
54
- # βœ… Generate Study Plan with chunking
55
- def generate_plan(file, hours_per_day, exam_date, language):
56
- try:
57
- content = extract_text(file)
58
- today = datetime.now().date()
59
- exam = datetime.strptime(exam_date.strip(), "%Y-%m-%d").date()
60
- days = (exam - today).days
61
-
62
- if days <= 0:
63
- return "❌ Exam date must be in the future."
64
-
65
- chunks = chunk_text(content, chunk_size=1500)
66
- plan_parts = []
67
-
68
- for idx, chunk in enumerate(chunks):
69
- prompt = f"""This is part {idx + 1} of {len(chunks)} of a syllabus.
70
- Create a study plan segment for this syllabus. Total study duration is {days} days with {hours_per_day} hours/day.
71
- Write the study plan in {language}.
72
-
73
- Syllabus:
74
- \"\"\"
75
- {chunk}
76
- \"\"\"
77
- """
78
- response = query_groq(prompt)
79
- plan_parts.append(response)
80
-
81
- return "\n\n".join(plan_parts)
82
-
83
- except Exception as e:
84
- return f"⚠️ Error: {str(e)}"
85
-
86
- # βœ… Ask Question with context chunking
87
- def ask_question(file, question):
88
- try:
89
- context = extract_text(file)
90
- chunks = chunk_text(context, chunk_size=1500)
91
- answers = []
92
-
93
- for idx, chunk in enumerate(chunks):
94
- prompt = f"""Use the following part of study material to answer the question:
95
-
96
- Material:
97
- \"\"\"
98
- {chunk}
99
- \"\"\"
100
-
101
- Question: {question}
102
- Answer:"""
103
- answer = query_groq(prompt)
104
- answers.append(f"Part {idx + 1}:\n{answer}")
105
-
106
- return "\n\n".join(answers)
107
-
108
- except Exception as e:
109
- return f"⚠️ Error: {str(e)}"
110
-
111
- # βœ… Streamlit UI
112
- st.sidebar.title("πŸ“š Study Assistant Options")
113
- uploaded_file = st.sidebar.file_uploader("Upload syllabus (.txt, .docx, .pdf)", type=["txt", "docx", "pdf"])
114
- study_hours = st.sidebar.number_input("Study hours per day", min_value=1, max_value=12, value=3)
115
- exam_date = st.sidebar.text_input("Exam Date (YYYY-MM-DD)", value="2025-06-30")
116
- language = st.sidebar.selectbox("Select Language", ["English", "Urdu"])
117
-
118
- st.title("🧠 AI Study Plan & QA Assistant")
119
-
120
- tab1, tab2 = st.tabs(["πŸ“… Generate Study Plan", "❓ Ask a Question"])
121
-
122
- with tab1:
123
- st.subheader("Generate a Personalized Study Plan")
124
- if uploaded_file and st.button("Generate Study Plan"):
125
- result = generate_plan(uploaded_file, study_hours, exam_date, language)
126
- st.text_area("Study Plan", result, height=400)
127
-
128
- with tab2:
129
- st.subheader("Ask Questions from Uploaded Material")
130
- question = st.text_input("Enter your question:")
131
- if uploaded_file and question and st.button("Get Answer"):
132
- answer = ask_question(uploaded_file, question)
133
- st.text_area("Answer", answer, height=300)
 
 
 
 
 
1
  import streamlit as st
2
+ import requests
3
+
4
+ # Hugging Face API setup
5
+ API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.1"
6
+ headers = {"Authorization": "Bearer YOUR_HF_API_KEY"}
7
+
8
+ def query_llm(prompt):
9
+ payload = {"inputs": prompt}
10
+ response = requests.post(API_URL, headers=headers, json=payload)
11
+ return response.json()[0]['generated_text']
12
+
13
+ # Static templates for known experiments
14
+ experiment_templates = {
15
+ "Vinegar and Baking Soda": {
16
+ "goal": "Observe a chemical reaction and gas formation.",
17
+ "materials": "Vinegar, Baking Soda, Container",
18
+ "default_hypothesis": "Mixing will cause bubbles or fizz due to gas release.",
19
+ "result": "Bubbles and fizz from CO2 gas produced in an acid-base reaction.",
20
+ "explanation": "Acetic acid in vinegar reacts with sodium bicarbonate to form carbon dioxide gas."
21
+ },
22
+ "Lemon Battery": {
23
+ "goal": "Generate electricity using a lemon as a battery.",
24
+ "materials": "Lemon, Copper coin, Zinc nail, Wires, LED",
25
+ "default_hypothesis": "The lemon will generate voltage to light up a small LED.",
26
+ "result": "LED glows slightly due to electron flow.",
27
+ "explanation": "The citric acid acts as electrolyte between the copper and zinc electrodes."
28
+ }
29
+ }
30
+
31
+ # Streamlit UI
32
+ st.set_page_config(page_title="Science Lab Assistant", layout="centered")
33
+ st.title("πŸ§ͺ Science Lab Assistant")
34
+
35
+ # Select known or custom experiment
36
+ exp_option = st.selectbox("Choose an experiment", ["Custom"] + list(experiment_templates.keys()))
37
+
38
+ if exp_option != "Custom":
39
+ data = experiment_templates[exp_option]
40
+ st.subheader("πŸ“‹ Experiment Summary")
41
+ st.write(f"**Goal:** {data['goal']}")
42
+ st.write(f"**Materials:** {data['materials']}")
43
+ st.write(f"**Suggested Hypothesis:** {data['default_hypothesis']}")
44
+
45
+ if st.button("πŸ” Show Result & Explanation"):
46
+ st.success(f"**Result:** {data['result']}")
47
+ st.info(f"**Why?:** {data['explanation']}")
48
+
49
+ else:
50
+ st.subheader("πŸ”¬ Describe your experiment")
51
+ experiment_name = st.text_input("Name or short description of your experiment")
52
+ goal = st.text_area("What is the goal of your experiment?")
53
+ materials = st.text_area("List the materials involved")
54
+
55
+ if st.button("🧠 Generate Hypothesis and Expected Result"):
56
+ prompt = f"""I am conducting a school science experiment.
57
+ Experiment: {experiment_name}
58
+ Goal: {goal}
59
+ Materials: {materials}
60
+ Please suggest a hypothesis and likely result with a brief scientific explanation."""
61
+ result = query_llm(prompt)
62
+ st.markdown("### πŸ€– Assistant's Suggestion")
63
+ st.write(result)
64