gregorlied's picture
Update app.py
00f162e verified
raw
history blame
853 Bytes
import os
import spaces
import gradio as gr
from huggingface_hub import login as hf_login
from pydantic import BaseModel
from vllm import LLM
hf_login(token=os.getenv("HF_TOKEN"))
class PatientRecord(BaseModel):
life_style: str
family_history: str
social_history: str
medical_surgical_history: str
signs_symptoms: str
comorbidities: str
diagnostic_techniques_procedures: str
diagnosis: str
laboratory_values: str
pathology: str
pharmacological_therapy: str
interventional_therapy: str
patient_outcome_assessment: str
age: str
gender: str
model_name = "meta-llama/Llama-3.2-1B-Instruct"
model = LLM(
model=model_name,
dtype=torch.bfloat16,
trust_remote_code=True,
enforce_eager=True,
)
with gr.Blocks() as demo:
gr.Markdown("# πŸŽ“ Paper Analysis Tool")
if __name__ == "__main__":
demo.launch()