wjm55 commited on
Commit
ae1b016
·
1 Parent(s): 4160d5b

fixing app

Browse files
Files changed (2) hide show
  1. app.py +7 -5
  2. test.py +4 -0
app.py CHANGED
@@ -15,14 +15,15 @@ def init_model(model_id: str):
15
  }
16
 
17
  if model_id in MODEL_OPTIONS:
18
- os.makedirs("models", exist_ok=True)
19
  path = hf_hub_download(
20
  repo_id="biglam/medieval-manuscript-yolov11",
21
- filename=MODEL_OPTIONS[model_id]
22
  )
23
- local_path = os.path.join("models", path)
24
  # Initialize and return model
25
  model = YOLO(path)
 
26
  return model
27
  else:
28
  raise ValueError(f"Model {model_id} not found")
@@ -39,15 +40,16 @@ async def predict(image: UploadFile,
39
  conf: float = 0.25,
40
  iou: float = 0.7
41
  ):
 
42
  # Initialize model for each request
43
  model = init_model(model_id)
44
 
45
  # Open image from uploaded file
46
  image = Image.open(image.file)
47
-
48
  # Run inference with the PIL Image
49
  results = model.predict(source=image, conf=conf, iou=iou)
50
-
51
  # Extract detection results
52
  result = results[0]
53
  detections = []
 
15
  }
16
 
17
  if model_id in MODEL_OPTIONS:
18
+ print(MODEL_OPTIONS[model_id])
19
  path = hf_hub_download(
20
  repo_id="biglam/medieval-manuscript-yolov11",
21
+ filename=MODEL_OPTIONS[model_id],
22
  )
23
+ print(path)
24
  # Initialize and return model
25
  model = YOLO(path)
26
+ print("Model initialized")
27
  return model
28
  else:
29
  raise ValueError(f"Model {model_id} not found")
 
40
  conf: float = 0.25,
41
  iou: float = 0.7
42
  ):
43
+ print(model_id, conf, iou)
44
  # Initialize model for each request
45
  model = init_model(model_id)
46
 
47
  # Open image from uploaded file
48
  image = Image.open(image.file)
49
+ print("Image opened")
50
  # Run inference with the PIL Image
51
  results = model.predict(source=image, conf=conf, iou=iou)
52
+ print("Inference done")
53
  # Extract detection results
54
  result = results[0]
55
  detections = []
test.py CHANGED
@@ -21,6 +21,10 @@ with open(image_path, 'rb') as f:
21
  response = requests.post(local_app + ':7860/predict',
22
  files=files,
23
  params=params)
 
 
 
 
24
 
25
  # Print the results
26
  print(response.json())
 
21
  response = requests.post(local_app + ':7860/predict',
22
  files=files,
23
  params=params)
24
+ print(response.json())
25
+ response = requests.post(hf_app + ':7860/predict',
26
+ files=files,
27
+ params=params)
28
 
29
  # Print the results
30
  print(response.json())