Skip to content

Commit 78f27fc

Browse files
committed
auto crop calc
1 parent be7ed4d commit 78f27fc

1 file changed

Lines changed: 127 additions & 1 deletion

File tree

patched_yolo_infer/functions_extra.py

Lines changed: 127 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import random
44
import numpy as np
55
import matplotlib.pyplot as plt
6+
from ultralytics import YOLO
67

78

89
def visualize_results_usual_yolo_inference(
@@ -459,4 +460,129 @@ def create_masks_from_polygons(polygons, image):
459460
# Add the mask to the list
460461
masks.append(mask)
461462

462-
return masks
463+
return masks
464+
465+
466+
def basic_crop_size_calculation(width, height):
467+
"""
468+
Calculate the basic crop size and overlap based on the image dimensions.
469+
470+
This function determines the optimal crop size and overlap for an image based on its width and height.
471+
The function uses predefined thresholds to decide the crop size and overlap, ensuring efficient processing
472+
for different image resolutions.
473+
474+
Parameters:
475+
width (int): The width of the image in pixels.
476+
height (int): The height of the image in pixels.
477+
478+
Returns:
479+
tuple: A tuple containing the crop size in the x direction (crop_shape_x), crop size in the y direction (crop_shape_y),
480+
overlap in the x direction (crop_overlap_x), and overlap in the y direction (crop_overlap_y).
481+
"""
482+
total_pixels = width * height
483+
484+
if total_pixels <= 640*480:
485+
crop_shape_x, crop_shape_y = width, height
486+
crop_overlap_x, crop_overlap_y = 0, 0
487+
elif total_pixels < 720*576:
488+
crop_shape_x, crop_shape_y = int(width / 1.25), int(height / 1.25)
489+
crop_overlap_x, crop_overlap_y = 50, 50
490+
elif total_pixels < 1920*1080:
491+
crop_shape_x, crop_shape_y = width // 2, height // 2
492+
crop_overlap_x, crop_overlap_y = 40, 40
493+
elif total_pixels < 3840*2160:
494+
crop_shape_x, crop_shape_y = width // 3, height // 3
495+
crop_overlap_x, crop_overlap_y = 30, 30
496+
elif total_pixels < 7680*4320:
497+
crop_shape_x, crop_shape_y = width // 4, height // 4
498+
crop_overlap_x, crop_overlap_y = 25, 25
499+
else:
500+
crop_shape_x, crop_shape_y = width // 5, height // 5
501+
crop_overlap_x, crop_overlap_y = 25, 25
502+
503+
return crop_shape_x, crop_shape_y, crop_overlap_x, crop_overlap_y
504+
505+
506+
def auto_calculation_crop_values(image, type="network_analysis", model=None, classes_list=None):
507+
"""
508+
Automatically calculate the optimal crop size and overlap for an image.
509+
510+
This function determines the optimal crop size and overlap for an image based on either the image size
511+
or the detected objects within the image. The function can use a YOLO model to detect objects and adjust
512+
the crop size and overlap accordingly.
513+
514+
Parameters:
515+
image (numpy.ndarray): The input image.
516+
type (str): The type of analysis to perform. Can be "image_size_analysis" or "network_analysis".
517+
Default is "network_analysis".
518+
model (YOLO): The YOLO model to use for object detection. If None, a default model yolov8m
519+
will be loaded. Default is None.
520+
classes_list (list): A list of class indices to consider for object detection. If None, all classes
521+
will be considered. Default is None.
522+
523+
Returns:
524+
tuple: A tuple containing the crop size in the x direction (crop_shape_x), crop size in the y direction
525+
(crop_shape_y), overlap in the x direction (crop_overlap_x), and overlap in the y direction (crop_overlap_y).
526+
"""
527+
height, width = image.shape[:2]
528+
529+
# If the type is 'image_size_analysis', calculate crop size based on image dimensions
530+
if type == 'image_size_analysis':
531+
crop_shape_x, crop_shape_y, crop_overlap_x, crop_overlap_y = basic_crop_size_calculation(width, height)
532+
else:
533+
# If no model is provided, load a default YOLO model
534+
if model is None:
535+
model = YOLO("yolov8m.pt")
536+
537+
# Perform object detection on the image
538+
result = model.predict(image, conf=0.25, iou=0.75, classes=classes_list, verbose=False)
539+
540+
# If no objects are detected, calculate crop size based on image dimensions
541+
if len(result[0].boxes) == 0:
542+
crop_shape_x, crop_shape_y, crop_overlap_x, crop_overlap_y = basic_crop_size_calculation(width, height)
543+
return crop_shape_x, crop_shape_y, crop_overlap_x, crop_overlap_y
544+
545+
max_width = 0
546+
max_height = 0
547+
548+
# Iterate through detected boxes to find the maximum width and height
549+
for box in result[0].boxes:
550+
_, _, box_width, box_height = box.xywh[0].tolist()
551+
if box_width > max_width:
552+
max_width = box_width
553+
if box_height > max_height:
554+
max_height = box_height
555+
556+
# Determine the maximum dimension (width or height) of the detected objects
557+
max_value = max(box_height, max_width)
558+
559+
# Adjust crop size and overlap based on the aspect ratio of the image and the maximum detected object dimension
560+
if width > height:
561+
crop_shape_x = int(max_value * 3)
562+
crop_shape_y = int(max_value * 2)
563+
crop_overlap_x = int(max_width/crop_shape_x * 1.2 * 100)
564+
crop_overlap_y = int(max_height/crop_shape_y * 1.2 * 100)
565+
elif width < height:
566+
crop_shape_x = int(max_value * 2)
567+
crop_shape_y = int(max_value * 3)
568+
crop_overlap_x = int(max_width/crop_shape_x * 1.2 * 100)
569+
crop_overlap_y = int(max_height/crop_shape_y * 1.2 * 100)
570+
else:
571+
crop_shape_x = int(max_value * 2.5)
572+
crop_shape_y = int(max_value * 2.5)
573+
crop_overlap_x = int(max_width/crop_shape_x * 1.2 * 100)
574+
crop_overlap_y = int(max_height/crop_shape_y * 1.2 * 100)
575+
576+
# Ensure the overlap does not exceed 70%
577+
if crop_overlap_x > 70:
578+
crop_overlap_x = 70
579+
if crop_overlap_y > 70:
580+
crop_overlap_y = 70
581+
582+
# Ensure the number of crops does not exceed 7 in each direction
583+
if height // crop_shape_y > 7:
584+
crop_shape_y = height // 7
585+
if width // crop_shape_x > 7:
586+
crop_shape_x = width // 7
587+
588+
return crop_shape_x, crop_shape_y, crop_overlap_x, crop_overlap_y

0 commit comments

Comments
 (0)