PaperPilot / app.py
flytoe's picture
remove function
f3f61db verified
raw
history blame
4.25 kB
import os
import streamlit as st
import arxiv
import datetime
# -------------------------------
# Groq API Client
# -------------------------------
from groq import Groq
client = Groq(
api_key=os.environ.get("GROQ_API_KEY"),
)
# -------------------------------
# Helper Functions (Groq-based)
# -------------------------------
def groq_summarize(text: str) -> str:
response = client.chat.completions.create(
messages=[
{"role": "user", "content": f"Summarize the following text concisely:\n\n{text}"}
],
model="llama-3.3-70b-versatile",
)
return response.choices[0].message.content.strip()
def groq_eli5(text: str) -> str:
response = client.chat.completions.create(
messages=[
{"role": "user", "content": f"Explain this like I'm 5 years old:\n\n{text}"}
],
model="llama-3.3-70b-versatile",
)
return response.choices[0].message.content.strip()
def groq_key_takeaways(text: str) -> str:
response = client.chat.completions.create(
messages=[
{"role": "user", "content": f"List the key takeaways from this research:\n\n{text}"}
],
model="llama-3.3-70b-versatile",
)
return response.choices[0].message.content.strip()
# -------------------------------
# Paper Retrieval & Processing
# -------------------------------
def retrieve_papers(query, max_results=5):
search = arxiv.Search(query=query, max_results=max_results)
papers = []
for result in search.results():
paper = {
"title": result.title,
"summary": result.summary,
"url": result.pdf_url,
"authors": [author.name for author in result.authors],
"published": result.published
}
papers.append(paper)
return papers
# -------------------------------
# Streamlit Interface
# -------------------------------
st.title("πŸ“š PaperPilot – Intelligent Academic Navigator")
st.write("""
PaperPilot helps you quickly analyze research papers by summarizing them, highlighting key takeaways, and explaining complex topics in simple terms.
Enter a query and get structured insights instantly!
""")
with st.sidebar:
st.header("πŸ” Search Parameters")
query = st.text_input("Research topic or question:")
if st.button("πŸš€ Find Articles"):
if query.strip():
with st.spinner("Searching arXiv..."):
papers = retrieve_papers(query)
if papers:
st.session_state.papers = papers
st.success(f"Found {len(papers)} papers!")
st.session_state.active_section = "review"
else:
st.error("No papers found. Try different keywords.")
else:
st.warning("Please enter a search query")
if 'papers' in st.session_state and st.session_state.papers:
papers = st.session_state.papers
if st.session_state.active_section == "review":
st.header("πŸ“š Literature Review & Summary")
for idx, paper in enumerate(papers, 1):
with st.expander(f"{idx}. {paper['title']}"):
st.markdown(f"**Authors:** {', '.join(paper['authors'])}")
pub_date = paper['published'].strftime('%Y-%m-%d') if isinstance(paper['published'], datetime.datetime) else "n.d."
st.markdown(f"**Published:** {pub_date}")
st.markdown(f"**Link:** [PDF]({paper['url']})")
with st.spinner("Generating insights..."):
short_description = groq_summarize(paper['summary'])
key_takeaways = groq_key_takeaways(paper['summary'])
eli5_explanation = groq_eli5(paper['summary'])
st.subheader("Short Description")
st.write(short_description)
st.subheader("Key Takeaways")
st.write(key_takeaways)
st.subheader("Explain Like I'm 5 (ELI5)")
st.write(eli5_explanation)
else:
st.info("Enter a query in the sidebar and click 'Find Articles' to get started.")
st.caption("Built with ❀️ using AI")