Deprecating C++ code. Now, we depends only on bob.extension

parent 85edd3cd
Pipeline #55154 failed with stage
in 10 minutes and 48 seconds
# import Libraries of other lib packages
import bob.ip.base
import bob.io.base
import bob.learn.boosting
from . import version
from .version import module as __version__
from ._library import FeatureExtractor, BoundingBox, prune_detections, group_detections, overlapping_detections
from .detector import *
from .train import *
from .detect import default_cascade, average_detections, best_detection, detect_single_face, detect_all_faces
def get_config():
"""Returns a string containing the configuration information.
"""Returns a string containing the configuration information.
"""
import bob.extension
return bob.extension.get_config(__name__, version.externals)
import bob.extension
return bob.extension.get_config(__name__, version.externals)
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
__all__ = [_ for _ in dir() if not _.startswith("_")]
This diff is collapsed.
#include "features.h"
#include <bob.core/logging.h>
boost::shared_ptr<bob::ip::facedetect::BoundingBox> bob::ip::facedetect::BoundingBox::overlap(const BoundingBox& other) const{
// compute intersection rectangle
double t = std::max(top(), other.top()),
b = std::min(bottom(), other.bottom()),
l = std::max(left(), other.left()),
r = std::min(right(), other.right());
return boost::shared_ptr<BoundingBox>(new BoundingBox(t, l, b-t, r-l));
}
double bob::ip::facedetect::BoundingBox::similarity(const BoundingBox& other) const{
// compute intersection rectangle
double t = std::max(top(), other.top()),
b = std::min(bottom(), other.bottom()),
l = std::max(left(), other.left()),
r = std::min(right(), other.right());
// no overlap?
if (l >= r || t >= b) return 0.;
// compute overlap
double intersection = (b-t) * (r-l);
return intersection / (area() + other.area() - intersection);
}
typedef std::pair<double, int> indexer;
// sort descending
bool gt(const indexer& a, const indexer& b){
return a.first > b.first;
}
void bob::ip::facedetect::pruneDetections(const std::vector<boost::shared_ptr<BoundingBox>>& boxes, const blitz::Array<double, 1>& weights, double threshold, std::vector<boost::shared_ptr<BoundingBox>>& pruned_boxes, blitz::Array<double, 1>& pruned_weights, const int number_of_detections){
// sort boxes
std::vector<indexer> sorted(boxes.size());
for (int i = boxes.size(); i--;){
sorted[i] = std::make_pair(weights(i), i);
}
std::sort(sorted.begin(), sorted.end(), gt);
std::list<indexer> pruned;
std::vector<indexer>::const_iterator sit;
std::list<indexer>::const_iterator pit;
if (threshold >= 1.){
// for overlap == 1 (or larger), all detections will be returned, but sorted
pruned.insert(pruned.end(), sorted.begin(), sorted.end());
} else {
// prune detections (attention, this is O(n^2)!)
for (sit = sorted.begin(); sit != sorted.end(); ++sit){
for (pit = pruned.begin(); pit != pruned.end(); ++pit){
if (boxes[pit->second]->similarity(*boxes[sit->second]) > threshold) break;
}
if (pit == pruned.end()){
pruned.push_back(*sit);
if (number_of_detections > 0 && pruned.size() == (unsigned)number_of_detections){
break;
}
}
}
}
// fill pruned boxes
pruned_boxes.reserve(pruned.size());
pruned_weights.resize(pruned.size());
int i = 0;
for (pit = pruned.begin(); pit != pruned.end(); ++pit, ++i){
pruned_boxes.push_back(boxes[pit->second]);
pruned_weights(i) = pit->first;
}
// done.
}
void bob::ip::facedetect::groupDetections(const std::vector<boost::shared_ptr<BoundingBox>>& boxes, const blitz::Array<double, 1>& weights, double overlap_threshold, double weight_threshold, unsigned box_count_threshold, std::vector<std::vector<boost::shared_ptr<BoundingBox>>>& grouped_boxes, std::vector<blitz::Array<double, 1>>& grouped_weights){
if (boxes.empty()){
bob::core::error << "Cannot find any box to compute overlaps" << std::endl;
return;
}
// sort boxes
std::vector<indexer> sorted(boxes.size());
for (int i = boxes.size(); i--;){
sorted[i] = std::make_pair(weights(i), i);
}
std::sort(sorted.begin(), sorted.end(), gt);
// compute all overlapping detections
// **this is O(n^2)!**
std::list<std::list<indexer> > collected;
std::list<indexer> best;
best.push_back(sorted.front());
collected.push_back(best);
std::vector<indexer>::const_iterator sit = sorted.begin();
std::list<std::list<indexer> >::iterator cit;
for (++sit; sit != sorted.end(); ++sit){
std::list<std::list<indexer> >::iterator best_cit = collected.end();
double best_overlap = overlap_threshold, current_overlap;
if (sit->first < weight_threshold)
// we have reached our weight limit; do not consider more bounding boxes
break;
// check if there is a good-enough overlap with one of the already collected bounding boxes
for (cit = collected.begin(); cit != collected.end(); ++cit){
current_overlap = boxes[sit->second]->similarity(*boxes[cit->front().second]);
if (current_overlap > best_overlap){
// get the bounding box with the highest overlap value
best_overlap = current_overlap;
best_cit = cit;
}
}
if (best_cit == collected.end()){
// no such overlap was found, add a new list of bounding boxes
std::list<indexer> novel;
novel.push_back(*sit);
collected.push_back(novel);
} else {
// add the bounding box to the list with the highest overlap
best_cit->push_back(*sit);
}
}
// now, convert lists to resulting grouped vectors of vectors of bounding boxes
grouped_boxes.reserve(collected.size());
grouped_weights.reserve(collected.size());
std::list<indexer>::const_iterator oit;
for (cit = collected.begin(); cit != collected.end(); ++cit){
if (cit->size() >= box_count_threshold){
blitz::Array<double,1> current_weights(cit->size());
std::vector<boost::shared_ptr<BoundingBox>> current_boxes(cit->size());
int o = 0;
for (oit = cit->begin(); oit != cit->end(); ++oit, ++o){
current_weights(o) = oit->first;
current_boxes[o] = boxes[oit->second];
}
grouped_boxes.push_back(current_boxes);
grouped_weights.push_back(current_weights);
}
}
// done.
}
void bob::ip::facedetect::bestOverlap(const std::vector<boost::shared_ptr<BoundingBox>>& boxes, const blitz::Array<double, 1>& weights, double overlap_threshold, std::vector<boost::shared_ptr<BoundingBox>>& overlapping_boxes, blitz::Array<double, 1>& overlapping_weights){
if (boxes.empty()){
bob::core::error << "Cannot find any box to compute overlaps" << std::endl;
return;
}
// sort boxes
std::vector<indexer> sorted(boxes.size());
for (int i = boxes.size(); i--;){
sorted[i] = std::make_pair(weights(i), i);
}
std::sort(sorted.begin(), sorted.end(), gt);
std::list<indexer> overlapping;
std::list<indexer>::const_iterator oit;
// compute all overlapping detections
// **this is O(n^2)!**
std::list<std::list<indexer> > collected;
std::list<indexer> best;
best.push_back(sorted.front());
collected.push_back(best);
std::vector<indexer>::const_iterator sit = sorted.begin();
std::list<std::list<indexer> >::iterator cit;
for (++sit; sit != sorted.end(); ++sit){
for (cit = collected.begin(); cit != collected.end(); ++cit){
if (boxes[sit->second]->similarity(*boxes[cit->front().second]) > overlap_threshold){
cit->push_back(*sit);
break;
}
}
if (cit == collected.end()){
std::list<indexer> novel;
novel.push_back(*sit);
collected.push_back(novel);
}
}
// now, take the list with the highest TOTAL detection value
double best_total = 0.;
for (cit = collected.begin(); cit != collected.end(); ++cit){
double current_total = 0.;
for (oit = cit->begin(); oit != cit->end(); ++oit){
current_total += std::max(oit->first, 0.);
}
if (current_total > best_total){
best_total = current_total;
overlapping = *cit;
}
}
// fill overlapping boxes
overlapping_boxes.reserve(overlapping.size());
overlapping_weights.resize(overlapping.size());
int i = 0;
for (oit = overlapping.begin(); oit != overlapping.end(); ++oit, ++i){
overlapping_boxes.push_back(boxes[oit->second]);
overlapping_weights(i) = oit->first;
}
// done.
}
This diff is collapsed.
#ifndef BOB_IP_FACEDETECT_FEATURES_H
#define BOB_IP_FACEDETECT_FEATURES_H
#include <bob.io.base/HDF5File.h>
#include <bob.ip.base/LBP.h>
#include <bob.ip.base/IntegralImage.h>
#include <bob.ip.base/Affine.h>
#include <bob.core/array_convert.h>
#include <boost/shared_ptr.hpp>
#include <limits.h>
namespace bob { namespace ip { namespace facedetect {
class BoundingBox{
public:
// default constructor
BoundingBox(double top, double left, double height, double width) : m_top(top), m_left(left), m_height(height), m_width(width), m_area(width*height) {}
// copy constructor
BoundingBox(const BoundingBox& other) : m_top(other.m_top), m_left(other.m_left), m_height(other.m_height), m_width(other.m_width), m_area(m_width*m_height) {}
// create boundingbox by shifting
boost::shared_ptr<BoundingBox> shift(double y, double x) const {return boost::shared_ptr<BoundingBox>(new BoundingBox(m_top + y, m_left + x, m_height, m_width));}
// create boundingbox by scaling
boost::shared_ptr<BoundingBox> scale(double scale) const {return boost::shared_ptr<BoundingBox>(new BoundingBox(m_top*scale, m_left*scale, m_height*scale, m_width*scale));}
// create boundingbox by scaling based on the center of the bounding box
boost::shared_ptr<BoundingBox> scaleCentered(double scale) const {return boost::shared_ptr<BoundingBox>(new BoundingBox(m_top - m_height/2.*(scale-1.), m_left - m_width/2.*(scale-1.), m_height*scale, m_width*scale));}
// create a bounding box that is mirrored horizontically, adapted to the image width
boost::shared_ptr<BoundingBox> mirrorX(int width) const {return boost::shared_ptr<BoundingBox>(new BoundingBox(m_top, width - m_width - m_left, m_height, m_width));}
boost::shared_ptr<BoundingBox> overlap(const BoundingBox& other) const;
bool operator == (const BoundingBox& other){return top() == other.top() && left() == other.left() && height() == other.height() && width() == other.width();}
// query functions
double top() const {return m_top;}
double bottom() const {return m_top + m_height;}
double left() const {return m_left;}
double right() const {return m_left + m_width;}
double height() const {return m_height;}
double width() const {return m_width;}
blitz::TinyVector<double,2> center() const {return blitz::TinyVector<double,2>(m_top + m_height/2, m_left + m_width/2.);}
int itop() const {return irnd(top());}
int ibottom() const {return irnd(bottom());}
int ileft() const {return irnd(left());}
int iright() const {return irnd(right());}
int iheight() const {return irnd(height());}
int iwidth() const {return irnd(width());}
double area() const{return m_area;}
// Jaccard similarity between bounding boxes
double similarity(const BoundingBox& other) const;
bool isValidFor(blitz::TinyVector<int,2> shape) const {return m_top >= 0 && m_top + m_height < shape[0] && m_left >= 0 && m_left + m_width < shape[1];}
bool contains(blitz::TinyVector<double,2> point) const {return point[0] >= m_top && point[1] >= m_left && point[0] < bottom() && point[1] < right();}
private:
int irnd(double x) const {return (int)round(x);}
double m_top, m_left, m_height, m_width;
double m_area;
};
void groupDetections(const std::vector<boost::shared_ptr<BoundingBox>>& detections, const blitz::Array<double, 1>& predictions, double overlap_threshold, double weight_threshold, unsigned box_count_threshold, std::vector<std::vector<boost::shared_ptr<BoundingBox>>>& grouped_boxes, std::vector<blitz::Array<double, 1>>& grouped_weights);
void pruneDetections(const std::vector<boost::shared_ptr<BoundingBox>>& detections, const blitz::Array<double, 1>& predictions, double threshold, std::vector<boost::shared_ptr<BoundingBox>>& pruned_boxes, blitz::Array<double, 1>& pruned_weights, const int number_of_detections);
void bestOverlap(const std::vector<boost::shared_ptr<BoundingBox>>& detections, const blitz::Array<double, 1>& predictions, double threshold, std::vector<boost::shared_ptr<BoundingBox>>& pruned_boxes, blitz::Array<double, 1>& pruned_weights);
class FeatureExtractor{
public:
FeatureExtractor(const blitz::TinyVector<int,2>& patchSize);
// Creates all possible combinations of LBP extractors using the given template
FeatureExtractor(const blitz::TinyVector<int,2>& patchSize, const bob::ip::base::LBP& templAte, bool overlap = false, bool square = false, int min_size=1, int max_size=INT_MAX, int distance=1);
// Uses the given LBP extractors only; Please don't mix MB-LBP with regular LBP's
FeatureExtractor(const blitz::TinyVector<int,2>& patchSize, const std::vector<boost::shared_ptr<bob::ip::base::LBP>>& extractors);
// copy constructor
FeatureExtractor(const FeatureExtractor& other);
// Reads the LBP extractor types from File
FeatureExtractor(bob::io::base::HDF5File& file);
// concatenates the given FeatureExtractor to this one
void append(const FeatureExtractor& other);
// append the given LBP extractor ONLY at the given offset positions
void append(const boost::shared_ptr<bob::ip::base::LBP>& lbp, const std::vector<blitz::TinyVector<int32_t,2> >& offsets);
void load(bob::io::base::HDF5File& file);
void save(bob::io::base::HDF5File& file) const;
const std::vector<boost::shared_ptr<bob::ip::base::LBP>>& getExtractors() const {return m_extractors;}
// Model indices
void setModelIndices(const blitz::Array<int32_t,1>& indices) {m_modelIndices.resize(indices.shape()); m_modelIndices = indices;}
blitz::Array<int32_t,1> getModelIndices() const {return m_modelIndices;}
// feature information
int numberOfFeatures() const {return m_featureStarts((int)m_extractors.size());}
uint16_t getMaxLabel() const {return m_extractors[0]->getMaxLabel();}
template <typename T>
void prepare(const blitz::Array<T,2>& image, double scale, bool computeIntegralSquareImage);
// the prepared image
const blitz::Array<double,2>& getImage() const {return m_image;}
// Extract the features
void extractAll(const BoundingBox& boundingBox, blitz::Array<uint16_t,2>& dataset, int datasetIndex) const;
void extractSome(const BoundingBox& boundingBox, blitz::Array<uint16_t,1>& featureVector) const;
void extractIndexed(const BoundingBox& boundingBox, blitz::Array<uint16_t,1>& featureVector, const blitz::Array<int32_t,1>& indices) const;
double mean(const BoundingBox& boundingBox) const;
double variance(const BoundingBox& boundingBox) const;
blitz::TinyVector<double,2> meanAndVariance(const BoundingBox& boundingBox) const;
blitz::TinyVector<int,2> patchSize() const {return m_patchSize;}
const boost::shared_ptr<bob::ip::base::LBP> extractor(int32_t index) const {return m_extractors[m_lookUpTable(index,0)];}
blitz::TinyVector<int32_t,2> offset(int32_t index) const {return blitz::TinyVector<int,2>(m_lookUpTable(index,1), m_lookUpTable(index,2));}
private:
void init();
// look up table storing three information: lbp index, offset y, offset x
blitz::TinyVector<int,2> m_patchSize;
blitz::Array<int,2> m_lookUpTable;
std::vector<boost::shared_ptr<bob::ip::base::LBP>> m_extractors;
blitz::Array<int32_t,1> m_featureStarts;
blitz::Array<int32_t,1> m_modelIndices;
blitz::Array<double,2> m_image;
blitz::Array<double,2> m_integralImage;
blitz::Array<double,2> m_integralSquareImage;
mutable std::vector<blitz::Array<uint16_t,2> > m_featureImages;
bool m_isMultiBlock;
bool m_hasSingleOffsets;
};
template <typename T>
inline void FeatureExtractor::prepare(const blitz::Array<T,2>& image, double scale, bool computeIntegralSquareImage){
// TODO: implement different MB-LBP behaviour here (i.e., scaling the LBP's instead of scaling the image)
// scale image
m_image.resize(bob::ip::base::getScaledShape(image.shape(), scale));
bob::ip::base::scale(image, m_image);
if (m_isMultiBlock or computeIntegralSquareImage){
// compute integral image of scaled image
m_integralImage.resize(m_image.extent(0)+1, m_image.extent(1)+1);
if (computeIntegralSquareImage){
m_integralSquareImage.resize(m_integralImage.extent(0), m_integralImage.extent(1));
bob::ip::base::integral<double>(m_image, m_integralImage, m_integralSquareImage, true);
} else {
bob::ip::base::integral<double>(m_image, m_integralImage, true);
}
}
}
} } } // namespaces
#endif // BOB_IP_FACEDETECT_FEATURES_H
from .sampler import Sampler
from .cascade import Cascade
import numpy
from .._library import FeatureExtractor
import bob.learn.boosting
class Cascade:
"""This class defines a cascade of strong classifiers :py:class:`bob.learn.boosting.BoostedMachine`.
For each strong classifier, a threshold exists.
When the weighted sum of predictions of classifiers gets below this threshold, the classification is stopped.
**Constructor Documentation:**
The constructor has two different ways to be called.
The first and most obvious way is to load the cascade from the given ``cascade_file``.
The second way instantiates an empty cascade, with the given ``feature_extractor``.
Please use the :py:meth:`add` function to add new strong classifiers with according thresholds.
**Parameters:**
``cascade_file`` : :py:class:`bob.io.base.HDF5File`
An HDF5 file open for reading
``feature_extractor`` : :py:class:`FeatureExtractor`
A feature extractor that will be used to extract features for the strong classifiers.
"""
def __init__(self, cascade_file=None, feature_extractor=None):
# initializes the cascade
if cascade_file is not None:
self.load(cascade_file)
else:
self.extractor = feature_extractor
self.cascade = []
self.indices = []
self.thresholds = []
self._indices()
def add(self, classifier, threshold, begin=None, end=None):
"""Adds a new strong classifier with the given threshold to the cascade.
**Parameters:**
classifier : :py:class:`bob.learn.boosting.BoostedMachine`
A strong classifier to add
``threshold`` : float
The classification threshold for this cascade step
``begin``, ``end`` : int or ``None``
If specified, only the weak machines with the indices ``range(begin,end)`` will be added.
"""
boosted_machine = bob.learn.boosting.BoostedMachine()
if begin is None: begin = 0
if end is None: end = len(classifier.weak_machines)
for i in range(begin, end):
boosted_machine.add_weak_machine(classifier.weak_machines[i], classifier.weights[i])
self.cascade.append(boosted_machine)
self.thresholds.append(threshold)
self._indices()
def create_from_boosted_machine(self, boosted_machine, classifiers_per_round, classification_thresholds=-5.):
"""Creates this cascade from the given boosted machine, by simply splitting off strong classifiers that have classifiers_per_round weak classifiers.
**Parameters:**
``boosted_machine`` : :py:class:`bob.learn.boosting.BoostedMachine`
The strong classifier to split into a regular cascade.
``classifiers_per_round`` : int
The number of classifiers that each cascade step should contain.
``classification_threshold`` : float
A single threshold that will be applied in all rounds of the cascade.
"""
indices = list(range(0, len(boosted_machine.weak_machines), classifiers_per_round))
if indices[-1] != len(boosted_machine.weak_machines): indices.append(len(boosted_machine.weak_machines))
self.cascade = []
self.indices = []
for i in range(len(indices)-1):
machine = bob.learn.boosting.BoostedMachine()
for index in range(indices[i], indices[i+1]):
machine.add_weak_machine(boosted_machine.weak_machines[index], boosted_machine.weights[index, 0])
self.cascade.append(machine)
if isinstance(classification_thresholds, (int, float)):
self.thresholds = [classification_thresholds] * len(self.cascade)
else:
self.thresholds = classification_thresholds
def generate_boosted_machine(self):
"""generate_boosted_machine() -> strong
Creates a single strong classifier from this cascade by concatenating all strong classifiers.
**Returns:**
``strong`` : :py:class:`bob.learn.boosting.BoostedMachine`
The strong classifier as a combination of all classifiers in this cascade.
"""
strong = bob.learn.boosting.BoostedMachine()
for machine, index in zip(self.cascade, self.indices):
weak = machine.weak_machines
weights = machine.weights
for i in range(len(weak)):
strong.add_weak_machine(weak[i], weights[i])
return strong
def _indices(self):
# computes the list of indices from the current classifiers
self.indices = []
for classifier in self.cascade:
self.indices.append(classifier.indices)
self.feature = numpy.zeros(self.extractor.number_of_features, numpy.uint16)
def prepare(self, image, scale):
"""Prepares the cascade for extracting features of the given image in the given scale.
**Parameters:**
``image`` : array_like (2D, float)
The image from which features will be extracted
``scale`` : float
The scale of the image, for which features will be extracted
"""
# prepare the feature extractor with the given image and scale
self.extractor.prepare(image, scale)
def __call__(self, bounding_box):
"""__call__(bounding_box) -> sum
Computes the classification result of this cascade for the given bounding_box.
The features will be extracted from the image at the scale that was set by the latest call to :py:meth:`prepare`.
The classification result is obtained by summing all results of all cascade steps, as long as the sum is not below the threshold of the current cascade step.
Finally, the sum is returned.
**Parameters:**
``bounding_box`` : :py:class:`BoundingBox`
The bounding box for which the features should be classified.
Please assure that the bounding box is inside the image resolution at the scale that was set by the latest call to :py:meth:`prepare`.
**Returns:**
``sum`` : float
The sum of the cascaded classifiers (which might have been stopped before the last classifier)
"""
# computes the classification for the given bounding box
result = 0.
for i in range(len(self.indices)):
# extract the features that we need for this round
self.extractor.extract_indexed(bounding_box, self.feature, self.indices[i])
result += self.cascade[i](self.feature)
if result < self.thresholds[i]:
# break the cascade when the patch can already be rejected
break
return result
def save(self, hdf5):
"""Saves this cascade into the given HDF5 file.
**Parameters:**
``hdf5`` : :py:class:`bob.io.base.HDF5File`
An HDF5 file open for writing
"""
# write the cascade to file
hdf5.set("Thresholds", self.thresholds)
for i in range(len(self.cascade)):
hdf5.create_group("Classifier_%d" % (i+1))
hdf5.cd("Classifier_%d" % (i+1))
self.cascade[i].save(hdf5)
hdf5.cd("..")
hdf5.create_group("FeatureExtractor")
hdf5.cd("FeatureExtractor")
self.extractor.save(hdf5)
hdf5.cd("..")
def load(self, hdf5):
"""Loads this cascade from the given HDF5 file.
**Parameters:**
``hdf5`` : :py:class:`bob.io.base.HDF5File`
An HDF5 file open for reading
"""
# write the cascade to file
self.thresholds = hdf5.read("Thresholds")
self.cascade = []
for i in range(len(self.thresholds)):
hdf5.cd("Classifier_%d" % (i+1))
self.cascade.append(bob.learn.boosting.BoostedMachine(hdf5))
hdf5.cd("..")
hdf5.cd("FeatureExtractor")
self.extractor = FeatureExtractor(hdf5)
hdf5.cd("..")
self._indices()