serenarolloh commited on
Commit
f291dba
·
verified ·
1 Parent(s): cadf158

Update endpoints.py

Browse files
Files changed (1) hide show
  1. endpoints.py +3 -71
endpoints.py CHANGED
@@ -1,18 +1,10 @@
1
- from fastapi import FastAPI, APIRouter, File, UploadFile, Form
2
  from fastapi.middleware.cors import CORSMiddleware
3
- from typing import Optional
4
- from PIL import Image
5
- import urllib.request
6
- from io import BytesIO
7
- import json
8
- from config import settings
9
- import utils
10
  from routers import inference, training
11
- from routers.donut_inference import process_document_donut
12
  from huggingface_hub import login
 
13
  import os
14
-
15
- # Login using Hugging Face token from environment
16
  login(os.getenv("HUGGINGFACE_KEY"))
17
 
18
  app = FastAPI(openapi_url="/api/v1/sparrow-ml/openapi.json", docs_url="/api/v1/sparrow-ml/docs")
@@ -28,66 +20,6 @@ app.add_middleware(
28
  app.include_router(inference.router, prefix="/api-inference/v1/sparrow-ml", tags=["Inference"])
29
  app.include_router(training.router, prefix="/api-training/v1/sparrow-ml", tags=["Training"])
30
 
31
- router = APIRouter()
32
-
33
- def count_values(obj):
34
- if isinstance(obj, dict):
35
- return sum(count_values(v) for v in obj.values())
36
- elif isinstance(obj, list):
37
- return sum(count_values(i) for i in obj)
38
- else:
39
- return 1
40
-
41
- @router.post("/inference")
42
- async def run_inference(
43
- file: Optional[UploadFile] = File(None),
44
- image_url: Optional[str] = Form(None),
45
- shipper_id: int = Form(...),
46
- model_in_use: str = Form('donut')
47
- ):
48
- result = []
49
-
50
- # Dynamically select model
51
- model_url = settings.get_model_url(shipper_id)
52
- model_name = model_url.replace("https://huggingface.co/spaces/", "")
53
- print(f"[DEBUG] Using model: {model_name}")
54
-
55
- if file:
56
- if file.content_type not in ["image/jpeg", "image/jpg"]:
57
- return {"error": "Invalid file type. Only JPG images are allowed."}
58
-
59
- image = Image.open(BytesIO(await file.read()))
60
- result, processing_time = process_document_donut(image, model_url)
61
- utils.log_stats(settings.inference_stats_file, [processing_time, count_values(result), file.filename, model_name])
62
- print(f"Processing time: {processing_time:.2f} seconds")
63
-
64
- elif image_url:
65
- with urllib.request.urlopen(image_url) as url:
66
- image = Image.open(BytesIO(url.read()))
67
-
68
- result, processing_time = process_document_donut(image, model_url)
69
- file_name = image_url.split("/")[-1]
70
- utils.log_stats(settings.inference_stats_file, [processing_time, count_values(result), file_name, model_name])
71
- print(f"Processing time inference: {processing_time:.2f} seconds")
72
-
73
- else:
74
- result = {"info": "No input provided"}
75
-
76
- return result
77
-
78
- @router.get("/statistics")
79
- async def get_statistics():
80
- file_path = settings.inference_stats_file
81
- if os.path.exists(file_path):
82
- with open(file_path, 'r') as file:
83
- try:
84
- content = json.load(file)
85
- except json.JSONDecodeError:
86
- content = []
87
- else:
88
- content = []
89
- return content
90
-
91
  @app.get("/")
92
  async def root():
93
  return {"message": "Senga delivery notes inferencing"}
 
1
+ from fastapi import FastAPI
2
  from fastapi.middleware.cors import CORSMiddleware
 
 
 
 
 
 
 
3
  from routers import inference, training
 
4
  from huggingface_hub import login
5
+ from config import settings
6
  import os
7
+ # login(settings.huggingface_key)
 
8
  login(os.getenv("HUGGINGFACE_KEY"))
9
 
10
  app = FastAPI(openapi_url="/api/v1/sparrow-ml/openapi.json", docs_url="/api/v1/sparrow-ml/docs")
 
20
  app.include_router(inference.router, prefix="/api-inference/v1/sparrow-ml", tags=["Inference"])
21
  app.include_router(training.router, prefix="/api-training/v1/sparrow-ml", tags=["Training"])
22
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  @app.get("/")
24
  async def root():
25
  return {"message": "Senga delivery notes inferencing"}