Spaces:
Sleeping
Sleeping
import os | |
import streamlit as st | |
import arxiv | |
import networkx as nx | |
import matplotlib.pyplot as plt | |
import datetime | |
# ------------------------------- | |
# Groq API Client | |
# ------------------------------- | |
from groq import Groq | |
client = Groq( | |
api_key=os.environ.get("GROQ_API_KEY"), | |
) | |
# ------------------------------- | |
# Helper Functions (Groq-based) | |
# ------------------------------- | |
def groq_summarize(text: str) -> str: | |
""" | |
Summarize the given text using Groq's chat completion API. | |
""" | |
response = client.chat.completions.create( | |
messages=[ | |
{"role": "user", "content": f"Summarize the following text in detail:\n\n{text}"} | |
], | |
model="llama-3.3-70b-versatile", | |
) | |
return response.choices[0].message.content.strip() | |
def groq_simplify(text: str) -> str: | |
""" | |
Explain Like I'm 5 (ELI5) version of the summary. | |
""" | |
response = client.chat.completions.create( | |
messages=[ | |
{"role": "user", "content": f"Explain the following like I'm 5 years old:\n\n{text}"} | |
], | |
model="llama-3.3-70b-versatile", | |
) | |
return response.choices[0].message.content.strip() | |
def groq_generate_key_takeaways(text: str) -> str: | |
""" | |
Generate key takeaways from the paper. | |
""" | |
response = client.chat.completions.create( | |
messages=[ | |
{"role": "user", "content": f"Provide key takeaways from this research paper:\n\n{text}"} | |
], | |
model="llama-3.3-70b-versatile", | |
) | |
return response.choices[0].message.content.strip() | |
def retrieve_papers(query, max_results=5): | |
"""Retrieve academic papers from arXiv, including DOI and tools for relevance/trust scoring.""" | |
search = arxiv.Search(query=query, max_results=max_results) | |
papers = [] | |
for result in search.results(): | |
paper = { | |
"title": result.title, | |
"summary": result.summary, | |
"url": result.pdf_url, | |
"authors": [author.name for author in result.authors], | |
"published": result.published, | |
"doi": result.doi if hasattr(result, "doi") else f"https://doi.org/10.48550/arXiv.{result.entry_id.split('/')[-1]}", | |
"litmaps": f"https://app.litmaps.com/preview/{result.entry_id.split('/')[-1]}", | |
"connected_papers": f"https://www.connectedpapers.com/main/{result.entry_id.split('/')[-1]}", | |
"scite_ai": f"https://scite.ai/reports/{result.entry_id.split('/')[-1]}", | |
"biblio_explorer": f"https://arxiv.org/bib_explorer/{result.entry_id.split('/')[-1]}", | |
} | |
papers.append(paper) | |
return papers | |
def get_cached_summary(paper_id, text): | |
"""Retrieve or generate summaries, ELI5 explanations, and key takeaways.""" | |
if 'summaries' not in st.session_state: | |
st.session_state.summaries = {} | |
if paper_id not in st.session_state.summaries: | |
st.session_state.summaries[paper_id] = { | |
"summary": groq_summarize(text), | |
"eli5": groq_simplify(text), | |
"key_takeaways": groq_generate_key_takeaways(text), | |
} | |
return st.session_state.summaries[paper_id] | |
st.title("π PaperPilot β Intelligent Academic Navigator") | |
with st.sidebar: | |
st.header("π Search Parameters") | |
query = st.text_input("Research topic or question:") | |
if st.button("π Find Articles"): | |
if query.strip(): | |
with st.spinner("Searching arXiv..."): | |
papers = retrieve_papers(query) | |
if papers: | |
st.session_state.papers = papers | |
st.success(f"Found {len(papers)} papers!") | |
st.session_state.active_section = "review" | |
else: | |
st.error("No papers found. Try different keywords.") | |
else: | |
st.warning("Please enter a search query") | |
if 'active_section' not in st.session_state: | |
st.session_state.active_section = "none" | |
if 'papers' in st.session_state and st.session_state.papers: | |
papers = st.session_state.papers | |
if st.session_state.active_section == "review": | |
st.header("π Literature Review & Summary") | |
for idx, paper in enumerate(papers, 1): | |
with st.expander(f"Summary: {paper['title']}"): | |
with st.spinner(f"Analyzing {paper['title']}..."): | |
paper_id = f"paper_{idx}" | |
summary_data = get_cached_summary(paper_id, paper['summary']) | |
st.markdown(f"**Summary:** {summary_data['summary']}") | |
st.markdown(f"**ELI5:** {summary_data['eli5']}") | |
st.markdown("**Key Takeaways:**") | |
st.write(summary_data['key_takeaways']) | |
st.markdown(f"**DOI:** [Link]({paper['doi']})") | |
st.markdown(f"**Bibliographic Explorer:** [View]({paper['biblio_explorer']})") | |
st.markdown(f"**Connected Papers:** [View]({paper['connected_papers']})") | |
st.markdown(f"**Litmaps:** [View]({paper['litmaps']})") | |
st.markdown(f"**Scite.ai Citations:** [View]({paper['scite_ai']})") | |
else: | |
st.info("Enter a query in the sidebar and click 'Find Articles' to get started.") | |
st.caption("Built with β€οΈ using AI") |