Skip to content

Commit a4f3855

Browse files
committed
auto_calculate_crop_values
1 parent 78f27fc commit a4f3855

2 files changed

Lines changed: 19 additions & 18 deletions

File tree

patched_yolo_infer/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@
33
get_crops,
44
visualize_results,
55
create_masks_from_polygons,
6+
basic_crop_size_calculation,
7+
auto_calculate_crop_values
68
)
79

810
from .nodes.MakeCropsDetectThem import MakeCropsDetectThem

patched_yolo_infer/functions_extra.py

Lines changed: 17 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -503,7 +503,7 @@ def basic_crop_size_calculation(width, height):
503503
return crop_shape_x, crop_shape_y, crop_overlap_x, crop_overlap_y
504504

505505

506-
def auto_calculation_crop_values(image, type="network_analysis", model=None, classes_list=None):
506+
def auto_calculate_crop_values(image, type="network_analysis", model=None, classes_list=None):
507507
"""
508508
Automatically calculate the optimal crop size and overlap for an image.
509509
@@ -512,7 +512,7 @@ def auto_calculation_crop_values(image, type="network_analysis", model=None, cla
512512
the crop size and overlap accordingly.
513513
514514
Parameters:
515-
image (numpy.ndarray): The input image.
515+
image (numpy.ndarray): The input BGR image.
516516
type (str): The type of analysis to perform. Can be "image_size_analysis" or "network_analysis".
517517
Default is "network_analysis".
518518
model (YOLO): The YOLO model to use for object detection. If None, a default model yolov8m
@@ -525,26 +525,28 @@ def auto_calculation_crop_values(image, type="network_analysis", model=None, cla
525525
(crop_shape_y), overlap in the x direction (crop_overlap_x), and overlap in the y direction (crop_overlap_y).
526526
"""
527527
height, width = image.shape[:2]
528-
528+
529529
# If the type is 'image_size_analysis', calculate crop size based on image dimensions
530530
if type == 'image_size_analysis':
531531
crop_shape_x, crop_shape_y, crop_overlap_x, crop_overlap_y = basic_crop_size_calculation(width, height)
532532
else:
533533
# If no model is provided, load a default YOLO model
534534
if model is None:
535535
model = YOLO("yolov8m.pt")
536-
536+
537537
# Perform object detection on the image
538538
result = model.predict(image, conf=0.25, iou=0.75, classes=classes_list, verbose=False)
539539

540540
# If no objects are detected, calculate crop size based on image dimensions
541541
if len(result[0].boxes) == 0:
542-
crop_shape_x, crop_shape_y, crop_overlap_x, crop_overlap_y = basic_crop_size_calculation(width, height)
542+
crop_shape_x, crop_shape_y, crop_overlap_x, crop_overlap_y = (
543+
basic_crop_size_calculation(width, height)
544+
)
543545
return crop_shape_x, crop_shape_y, crop_overlap_x, crop_overlap_y
544-
546+
545547
max_width = 0
546548
max_height = 0
547-
549+
548550
# Iterate through detected boxes to find the maximum width and height
549551
for box in result[0].boxes:
550552
_, _, box_width, box_height = box.xywh[0].tolist()
@@ -555,34 +557,31 @@ def auto_calculation_crop_values(image, type="network_analysis", model=None, cla
555557

556558
# Determine the maximum dimension (width or height) of the detected objects
557559
max_value = max(box_height, max_width)
558-
559-
# Adjust crop size and overlap based on the aspect ratio of the image and the maximum detected object dimension
560+
561+
# Adjust crop size and overlap based on the maximum detected object dimension
560562
if width > height:
561563
crop_shape_x = int(max_value * 3)
562564
crop_shape_y = int(max_value * 2)
563-
crop_overlap_x = int(max_width/crop_shape_x * 1.2 * 100)
564-
crop_overlap_y = int(max_height/crop_shape_y * 1.2 * 100)
565565
elif width < height:
566566
crop_shape_x = int(max_value * 2)
567567
crop_shape_y = int(max_value * 3)
568-
crop_overlap_x = int(max_width/crop_shape_x * 1.2 * 100)
569-
crop_overlap_y = int(max_height/crop_shape_y * 1.2 * 100)
570568
else:
571569
crop_shape_x = int(max_value * 2.5)
572570
crop_shape_y = int(max_value * 2.5)
573-
crop_overlap_x = int(max_width/crop_shape_x * 1.2 * 100)
574-
crop_overlap_y = int(max_height/crop_shape_y * 1.2 * 100)
571+
572+
crop_overlap_x = int(max_width/crop_shape_x * 1.2 * 100)
573+
crop_overlap_y = int(max_height/crop_shape_y * 1.2 * 100)
575574

576575
# Ensure the overlap does not exceed 70%
577576
if crop_overlap_x > 70:
578577
crop_overlap_x = 70
579578
if crop_overlap_y > 70:
580579
crop_overlap_y = 70
581-
580+
582581
# Ensure the number of crops does not exceed 7 in each direction
583582
if height // crop_shape_y > 7:
584583
crop_shape_y = height // 7
585584
if width // crop_shape_x > 7:
586585
crop_shape_x = width // 7
587-
588-
return crop_shape_x, crop_shape_y, crop_overlap_x, crop_overlap_y
586+
587+
return crop_shape_x, crop_shape_y, crop_overlap_x, crop_overlap_y

0 commit comments

Comments
 (0)