Kushalmanda commited on
Commit
a8b1887
·
verified ·
1 Parent(s): 1aaad69

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -13
app.py CHANGED
@@ -6,7 +6,7 @@ import numpy as np
6
  # Load the YOLOv5 model (adjust the path to your model if needed)
7
  model = torch.hub.load('ultralytics/yolov5', 'custom', path='yolov5s.pt') # Adjust if needed
8
 
9
- # Example function to calculate materials based on detected areas
10
  def calculate_materials(detected_objects, image_width, image_height):
11
  materials = {
12
  "cement": 0,
@@ -16,16 +16,20 @@ def calculate_materials(detected_objects, image_width, image_height):
16
 
17
  # Proportionality factors (simplified for this example, adjust based on real-world data)
18
  for obj in detected_objects:
19
- # Calculate bounding box area in real-world units (cm or meters, as per the blueprint size)
20
- x1, y1, x2, y2 = obj['bbox'] # Coordinates of the bounding box
21
- width = (x2 - x1) * image_width # Convert to real-world width
22
- height = (y2 - y1) * image_height # Convert to real-world height
23
-
24
- # Calculate the area (length × width)
25
- area = width * height # Simplified area calculation
26
-
27
- print(f"Detected {obj['name']} with area {area} cm²") # Debugging output
28
 
 
 
 
 
 
 
 
29
  if obj['name'] == 'wall': # Example: For 'wall' objects
30
  materials['cement'] += area * 0.1 # Cement estimation (in kg)
31
  materials['bricks'] += area * 10 # Bricks estimation
@@ -46,7 +50,7 @@ def predict_image(image):
46
  # Get the detected objects as pandas dataframe (xywh format)
47
  detected_objects = results.pandas().xywh[0] # First image in batch
48
 
49
- # Print out the detection results for debugging purposes
50
  print(f"Detected objects: {detected_objects}")
51
 
52
  # Set the confidence threshold (e.g., 0.5 means 50% confidence)
@@ -54,8 +58,8 @@ def predict_image(image):
54
  detected_objects = detected_objects[detected_objects['confidence'] > confidence_threshold]
55
 
56
  # Assume blueprint image size (in cm, adjust based on real-world image size)
57
- image_width = 91 # Example width in cm (adjust this)
58
- image_height = 61 # Example height in cm (adjust this)
59
 
60
  # Process the detected objects and calculate materials
61
  detected_objects_list = []
 
6
  # Load the YOLOv5 model (adjust the path to your model if needed)
7
  model = torch.hub.load('ultralytics/yolov5', 'custom', path='yolov5s.pt') # Adjust if needed
8
 
9
+ # Define the function to calculate materials based on detected areas
10
  def calculate_materials(detected_objects, image_width, image_height):
11
  materials = {
12
  "cement": 0,
 
16
 
17
  # Proportionality factors (simplified for this example, adjust based on real-world data)
18
  for obj in detected_objects:
19
+ # Get the bounding box coordinates
20
+ x1, y1, x2, y2 = obj['bbox']
21
+
22
+ # Convert the bounding box coordinates to real-world units based on image size and blueprint size
23
+ width = (x2 - x1) * image_width # Convert to real-world width (in cm or meters)
24
+ height = (y2 - y1) * image_height # Convert to real-world height (in cm or meters)
 
 
 
25
 
26
+ # Calculate the area (length × width) in real-world units
27
+ area = width * height # cm² or m² based on the scale
28
+
29
+ # Debugging output to verify bounding box size
30
+ print(f"Detected {obj['name']} with area {area} cm²") # Adjust units based on the scale
31
+
32
+ # Example material estimation based on detected object type
33
  if obj['name'] == 'wall': # Example: For 'wall' objects
34
  materials['cement'] += area * 0.1 # Cement estimation (in kg)
35
  materials['bricks'] += area * 10 # Bricks estimation
 
50
  # Get the detected objects as pandas dataframe (xywh format)
51
  detected_objects = results.pandas().xywh[0] # First image in batch
52
 
53
+ # Debugging output: Print out the detected objects for inspection
54
  print(f"Detected objects: {detected_objects}")
55
 
56
  # Set the confidence threshold (e.g., 0.5 means 50% confidence)
 
58
  detected_objects = detected_objects[detected_objects['confidence'] > confidence_threshold]
59
 
60
  # Assume blueprint image size (in cm, adjust based on real-world image size)
61
+ image_width = 91 # Example width in cm (adjust this to the real-world blueprint size)
62
+ image_height = 61 # Example height in cm (adjust this to the real-world blueprint size)
63
 
64
  # Process the detected objects and calculate materials
65
  detected_objects_list = []