Spaces:
Sleeping
Sleeping
File size: 5,268 Bytes
fbec6c3 7aa208d 1220468 fbec6c3 aee8230 fbec6c3 42e32f8 fbec6c3 7aa208d f3f61db 42e32f8 7aa208d 42e32f8 fbec6c3 42e32f8 fbec6c3 7aa208d 42e32f8 fbec6c3 42e32f8 fbec6c3 42e32f8 fbec6c3 42e32f8 fbec6c3 42e32f8 1220468 fbec6c3 42e32f8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 |
import os
import streamlit as st
import arxiv
import networkx as nx
import matplotlib.pyplot as plt
import datetime
# -------------------------------
# Groq API Client
# -------------------------------
from groq import Groq
client = Groq(
api_key=os.environ.get("GROQ_API_KEY"),
)
# -------------------------------
# Helper Functions (Groq-based)
# -------------------------------
def groq_summarize(text: str) -> str:
"""
Summarize the given text using Groq's chat completion API.
"""
response = client.chat.completions.create(
messages=[
{"role": "user", "content": f"Summarize the following text in detail:\n\n{text}"}
],
model="llama-3.3-70b-versatile",
)
return response.choices[0].message.content.strip()
def groq_simplify(text: str) -> str:
"""
Explain Like I'm 5 (ELI5) version of the summary.
"""
response = client.chat.completions.create(
messages=[
{"role": "user", "content": f"Explain the following like I'm 5 years old:\n\n{text}"}
],
model="llama-3.3-70b-versatile",
)
return response.choices[0].message.content.strip()
def groq_generate_key_takeaways(text: str) -> str:
"""
Generate key takeaways from the paper.
"""
response = client.chat.completions.create(
messages=[
{"role": "user", "content": f"Provide key takeaways from this research paper:\n\n{text}"}
],
model="llama-3.3-70b-versatile",
)
return response.choices[0].message.content.strip()
def retrieve_papers(query, max_results=5):
"""Retrieve academic papers from arXiv, including DOI and tools for relevance/trust scoring."""
search = arxiv.Search(query=query, max_results=max_results)
papers = []
for result in search.results():
paper = {
"title": result.title,
"summary": result.summary,
"url": result.pdf_url,
"authors": [author.name for author in result.authors],
"published": result.published,
"doi": result.doi if hasattr(result, "doi") else f"https://doi.org/10.48550/arXiv.{result.entry_id.split('/')[-1]}",
"litmaps": f"https://app.litmaps.com/preview/{result.entry_id.split('/')[-1]}",
"connected_papers": f"https://www.connectedpapers.com/main/{result.entry_id.split('/')[-1]}",
"scite_ai": f"https://scite.ai/reports/{result.entry_id.split('/')[-1]}",
"biblio_explorer": f"https://arxiv.org/bib_explorer/{result.entry_id.split('/')[-1]}",
}
papers.append(paper)
return papers
def get_cached_summary(paper_id, text):
"""Retrieve or generate summaries, ELI5 explanations, and key takeaways."""
if 'summaries' not in st.session_state:
st.session_state.summaries = {}
if paper_id not in st.session_state.summaries:
st.session_state.summaries[paper_id] = {
"summary": groq_summarize(text),
"eli5": groq_simplify(text),
"key_takeaways": groq_generate_key_takeaways(text),
}
return st.session_state.summaries[paper_id]
st.title("π PaperPilot β Intelligent Academic Navigator")
with st.sidebar:
st.header("π Search Parameters")
query = st.text_input("Research topic or question:")
if st.button("π Find Articles"):
if query.strip():
with st.spinner("Searching arXiv..."):
papers = retrieve_papers(query)
if papers:
st.session_state.papers = papers
st.success(f"Found {len(papers)} papers!")
st.session_state.active_section = "review"
else:
st.error("No papers found. Try different keywords.")
else:
st.warning("Please enter a search query")
if 'active_section' not in st.session_state:
st.session_state.active_section = "none"
if 'papers' in st.session_state and st.session_state.papers:
papers = st.session_state.papers
if st.session_state.active_section == "review":
st.header("π Literature Review & Summary")
for idx, paper in enumerate(papers, 1):
with st.expander(f"Summary: {paper['title']}"):
with st.spinner(f"Analyzing {paper['title']}..."):
paper_id = f"paper_{idx}"
summary_data = get_cached_summary(paper_id, paper['summary'])
st.markdown(f"**Summary:** {summary_data['summary']}")
st.markdown(f"**ELI5:** {summary_data['eli5']}")
st.markdown("**Key Takeaways:**")
st.write(summary_data['key_takeaways'])
st.markdown(f"**DOI:** [Link]({paper['doi']})")
st.markdown(f"**Bibliographic Explorer:** [View]({paper['biblio_explorer']})")
st.markdown(f"**Connected Papers:** [View]({paper['connected_papers']})")
st.markdown(f"**Litmaps:** [View]({paper['litmaps']})")
st.markdown(f"**Scite.ai Citations:** [View]({paper['scite_ai']})")
else:
st.info("Enter a query in the sidebar and click 'Find Articles' to get started.")
st.caption("Built with β€οΈ using AI") |