File size: 1,720 Bytes
d84bb55 cbd44c9 eec5410 a6e0ce3 eec5410 d84bb55 a6e0ce3 eec5410 a6e0ce3 eec5410 a6e0ce3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
import os
import spaces
import gradio as gr
from huggingface_hub import InferenceClient
from pydantic import BaseModel
HF_TOKEN = os.getenv("HF_TOKEN")
class PaperAnalysis(BaseModel):
title: str
abstract_summary: str
response_format = {
"type": "json_schema",
"json_schema": {
"name": "PaperAnalysis",
"schema": PaperAnalysis.model_json_schema(),
"strict": True,
},
}
client = InferenceClient(
provider="auto",
api_key=HF_TOKEN,
)
@spaces.GPU(duration=60)
def extract_info(paper_text: str):
if not paper_text.strip():
return {"title": "", "abstract_summary": ""}
messages = [
{"role": "system", "content": "Extract the paper title and summarize its abstract."},
{"role": "user", "content": paper_text},
]
resp = client.chat.completions.create(
model="meta-llama/Llama-3.2-1B-Instruct",
messages=messages,
response_format=response_format,
)
# parse response
parsed = resp.choices[0].message
# assuming the message is a dict
return {
"title": parsed.content.get("title", ""),
"abstract_summary": parsed.content.get("abstract_summary", ""),
}
with gr.Blocks() as demo:
gr.Markdown("# 🎓 Paper Analysis Tool")
with gr.Row():
paper_input = gr.Textbox(label="Paper Text (include Title/Abstract)", lines=10)
with gr.Column():
title_out = gr.Textbox(label="Title", lines=1)
summary_out = gr.Textbox(label="Abstract Summary", lines=5)
analyze_btn = gr.Button("Extract Info")
analyze_btn.click(fn=extract_info, inputs=paper_input, outputs=[title_out, summary_out])
if __name__ == "__main__":
demo.launch() |