def __haar_detections(self, image, scale_factor=1.06):
"""Detect objects using the Haar Cascade classifier.
This classifier already delete overlapped detections and too small
detected objects. Returned detections are a tuple with 3 values:
- a list of N-list of rectangles;
- a list of N times the same int value (why???);
- a list of N weight values.
Notice that the scale factor has a big impact on the estimation time:
- with 1.04 => 5.3x real time,
- with 1.10 => 2.2x real time, but -6% of detected objects (relevant ones?)
:param image: (sppasImage)
:param scale_factor: (float) how much the image size is reduced at each image scale
:return: (numpy arrays)
"""
w, h = image.size()
min_w = int(float(w) * self.get_min_ratio())
min_h = int(float(h) * self.get_min_ratio())
try:
detections = self._detector.detectMultiScale3(image, scaleFactor=scale_factor, minNeighbors=3, minSize=(min_w, min_h), flags=0, outputRejectLevels=True)
except cv2.error as e:
self._coords = list()
raise sppasError('HaarCascadeClassifier detection failed: {}'.format(str(e)))
return detections