Update app.py
Browse files
app.py
CHANGED
@@ -1,59 +1,40 @@
|
|
1 |
import os
|
2 |
import spaces
|
3 |
import gradio as gr
|
4 |
-
from huggingface_hub import
|
5 |
from pydantic import BaseModel
|
|
|
6 |
|
7 |
-
|
8 |
|
9 |
-
class
|
10 |
-
|
11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
|
13 |
-
|
14 |
-
"type": "json_schema",
|
15 |
-
"json_schema": {
|
16 |
-
"name": "PaperAnalysis",
|
17 |
-
"schema": PaperAnalysis.model_json_schema(),
|
18 |
-
"strict": True,
|
19 |
-
},
|
20 |
-
}
|
21 |
|
22 |
-
|
23 |
-
|
24 |
-
|
|
|
|
|
25 |
)
|
26 |
|
27 |
-
@spaces.GPU(duration=60)
|
28 |
-
def extract_info(paper_text: str):
|
29 |
-
if not paper_text.strip():
|
30 |
-
return {"title": "", "abstract_summary": ""}
|
31 |
-
messages = [
|
32 |
-
{"role": "system", "content": "Extract the paper title and summarize its abstract."},
|
33 |
-
{"role": "user", "content": paper_text},
|
34 |
-
]
|
35 |
-
resp = client.chat.completions.create(
|
36 |
-
model="meta-llama/Llama-3.2-1B-Instruct",
|
37 |
-
messages=messages,
|
38 |
-
response_format=response_format,
|
39 |
-
)
|
40 |
-
# parse response
|
41 |
-
parsed = resp.choices[0].message
|
42 |
-
# assuming the message is a dict
|
43 |
-
return {
|
44 |
-
"title": parsed.content.get("title", ""),
|
45 |
-
"abstract_summary": parsed.content.get("abstract_summary", ""),
|
46 |
-
}
|
47 |
-
|
48 |
with gr.Blocks() as demo:
|
49 |
gr.Markdown("# 🎓 Paper Analysis Tool")
|
50 |
-
with gr.Row():
|
51 |
-
paper_input = gr.Textbox(label="Paper Text (include Title/Abstract)", lines=10)
|
52 |
-
with gr.Column():
|
53 |
-
title_out = gr.Textbox(label="Title", lines=1)
|
54 |
-
summary_out = gr.Textbox(label="Abstract Summary", lines=5)
|
55 |
-
analyze_btn = gr.Button("Extract Info")
|
56 |
-
analyze_btn.click(fn=extract_info, inputs=paper_input, outputs=[title_out, summary_out])
|
57 |
|
58 |
if __name__ == "__main__":
|
59 |
demo.launch()
|
|
|
1 |
import os
|
2 |
import spaces
|
3 |
import gradio as gr
|
4 |
+
from huggingface_hub import login as hf_login
|
5 |
from pydantic import BaseModel
|
6 |
+
from vllm import LLM
|
7 |
|
8 |
+
hf_login(token=os.getenv("HF_TOKEN"))
|
9 |
|
10 |
+
class PatientRecord(BaseModel):
|
11 |
+
life_style: str
|
12 |
+
family_history: str
|
13 |
+
social_history: str
|
14 |
+
medical_surgical_history: str
|
15 |
+
signs_symptoms: str
|
16 |
+
comorbidities: str
|
17 |
+
diagnostic_techniques_procedures: str
|
18 |
+
diagnosis: str
|
19 |
+
laboratory_values: str
|
20 |
+
pathology: str
|
21 |
+
pharmacological_therapy: str
|
22 |
+
interventional_therapy: str
|
23 |
+
patient_outcome_assessment: str
|
24 |
+
age: str
|
25 |
+
gender: str
|
26 |
|
27 |
+
model_name = "meta-llama/Llama-3.2-1B-Instruct"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
|
29 |
+
model = LLM(
|
30 |
+
model=model_name,
|
31 |
+
dtype=torch.bfloat16,
|
32 |
+
trust_remote_code=True,
|
33 |
+
enforce_eager=True,
|
34 |
)
|
35 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
with gr.Blocks() as demo:
|
37 |
gr.Markdown("# 🎓 Paper Analysis Tool")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
|
39 |
if __name__ == "__main__":
|
40 |
demo.launch()
|