summaryrefslogtreecommitdiff
path: root/thirdparty1/linux/include/opencv2/face/facerec.hpp
blob: 40f62f16521feaf59e6f12eb339b7f1e7f1716dc (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.

// Copyright (c) 2011,2012. Philipp Wagner <bytefish[at]gmx[dot]de>.
// Third party copyrights are property of their respective owners.

#ifndef __OPENCV_FACEREC_HPP__
#define __OPENCV_FACEREC_HPP__

#include "opencv2/face.hpp"
#include "opencv2/core.hpp"

namespace cv { namespace face {

//! @addtogroup face
//! @{

// base for two classes
class CV_EXPORTS_W BasicFaceRecognizer : public FaceRecognizer
{
public:
    /** @see setNumComponents */
    CV_WRAP virtual int getNumComponents() const = 0;
    /** @copybrief getNumComponents @see getNumComponents */
    CV_WRAP virtual void setNumComponents(int val) = 0;
    /** @see setThreshold */
    CV_WRAP virtual double getThreshold() const = 0;
    /** @copybrief getThreshold @see getThreshold */
    CV_WRAP virtual void setThreshold(double val) = 0;
    CV_WRAP virtual std::vector<cv::Mat> getProjections() const = 0;
    CV_WRAP virtual cv::Mat getLabels() const = 0;
    CV_WRAP virtual cv::Mat getEigenValues() const = 0;
    CV_WRAP virtual cv::Mat getEigenVectors() const = 0;
    CV_WRAP virtual cv::Mat getMean() const = 0;
};

/**
@param num_components The number of components (read: Eigenfaces) kept for this Principal
Component Analysis. As a hint: There's no rule how many components (read: Eigenfaces) should be
kept for good reconstruction capabilities. It is based on your input data, so experiment with the
number. Keeping 80 components should almost always be sufficient.
@param threshold The threshold applied in the prediction.

### Notes:

-   Training and prediction must be done on grayscale images, use cvtColor to convert between the
    color spaces.
-   **THE EIGENFACES METHOD MAKES THE ASSUMPTION, THAT THE TRAINING AND TEST IMAGES ARE OF EQUAL
    SIZE.** (caps-lock, because I got so many mails asking for this). You have to make sure your
    input data has the correct shape, else a meaningful exception is thrown. Use resize to resize
    the images.
-   This model does not support updating.

### Model internal data:

-   num_components see createEigenFaceRecognizer.
-   threshold see createEigenFaceRecognizer.
-   eigenvalues The eigenvalues for this Principal Component Analysis (ordered descending).
-   eigenvectors The eigenvectors for this Principal Component Analysis (ordered by their
    eigenvalue).
-   mean The sample mean calculated from the training data.
-   projections The projections of the training data.
-   labels The threshold applied in the prediction. If the distance to the nearest neighbor is
    larger than the threshold, this method returns -1.
 */
CV_EXPORTS_W Ptr<BasicFaceRecognizer> createEigenFaceRecognizer(int num_components = 0, double threshold = DBL_MAX);

/**
@param num_components The number of components (read: Fisherfaces) kept for this Linear
Discriminant Analysis with the Fisherfaces criterion. It's useful to keep all components, that
means the number of your classes c (read: subjects, persons you want to recognize). If you leave
this at the default (0) or set it to a value less-equal 0 or greater (c-1), it will be set to the
correct number (c-1) automatically.
@param threshold The threshold applied in the prediction. If the distance to the nearest neighbor
is larger than the threshold, this method returns -1.

### Notes:

-   Training and prediction must be done on grayscale images, use cvtColor to convert between the
    color spaces.
-   **THE FISHERFACES METHOD MAKES THE ASSUMPTION, THAT THE TRAINING AND TEST IMAGES ARE OF EQUAL
    SIZE.** (caps-lock, because I got so many mails asking for this). You have to make sure your
    input data has the correct shape, else a meaningful exception is thrown. Use resize to resize
    the images.
-   This model does not support updating.

### Model internal data:

-   num_components see createFisherFaceRecognizer.
-   threshold see createFisherFaceRecognizer.
-   eigenvalues The eigenvalues for this Linear Discriminant Analysis (ordered descending).
-   eigenvectors The eigenvectors for this Linear Discriminant Analysis (ordered by their
    eigenvalue).
-   mean The sample mean calculated from the training data.
-   projections The projections of the training data.
-   labels The labels corresponding to the projections.
 */
CV_EXPORTS_W Ptr<BasicFaceRecognizer> createFisherFaceRecognizer(int num_components = 0, double threshold = DBL_MAX);

class CV_EXPORTS_W LBPHFaceRecognizer : public FaceRecognizer
{
public:
    /** @see setGridX */
    CV_WRAP virtual int getGridX() const = 0;
    /** @copybrief getGridX @see getGridX */
    CV_WRAP virtual void setGridX(int val) = 0;
    /** @see setGridY */
    CV_WRAP virtual int getGridY() const = 0;
    /** @copybrief getGridY @see getGridY */
    CV_WRAP virtual void setGridY(int val) = 0;
    /** @see setRadius */
    CV_WRAP virtual int getRadius() const = 0;
    /** @copybrief getRadius @see getRadius */
    CV_WRAP virtual void setRadius(int val) = 0;
    /** @see setNeighbors */
    CV_WRAP virtual int getNeighbors() const = 0;
    /** @copybrief getNeighbors @see getNeighbors */
    CV_WRAP virtual void setNeighbors(int val) = 0;
    /** @see setThreshold */
    CV_WRAP virtual double getThreshold() const = 0;
    /** @copybrief getThreshold @see getThreshold */
    CV_WRAP virtual void setThreshold(double val) = 0;
    CV_WRAP virtual std::vector<cv::Mat> getHistograms() const = 0;
    CV_WRAP virtual cv::Mat getLabels() const = 0;
};

/**
@param radius The radius used for building the Circular Local Binary Pattern. The greater the
radius, the
@param neighbors The number of sample points to build a Circular Local Binary Pattern from. An
appropriate value is to use `8` sample points. Keep in mind: the more sample points you include,
the higher the computational cost.
@param grid_x The number of cells in the horizontal direction, 8 is a common value used in
publications. The more cells, the finer the grid, the higher the dimensionality of the resulting
feature vector.
@param grid_y The number of cells in the vertical direction, 8 is a common value used in
publications. The more cells, the finer the grid, the higher the dimensionality of the resulting
feature vector.
@param threshold The threshold applied in the prediction. If the distance to the nearest neighbor
is larger than the threshold, this method returns -1.

### Notes:

-   The Circular Local Binary Patterns (used in training and prediction) expect the data given as
    grayscale images, use cvtColor to convert between the color spaces.
-   This model supports updating.

### Model internal data:

-   radius see createLBPHFaceRecognizer.
-   neighbors see createLBPHFaceRecognizer.
-   grid_x see createLBPHFaceRecognizer.
-   grid_y see createLBPHFaceRecognizer.
-   threshold see createLBPHFaceRecognizer.
-   histograms Local Binary Patterns Histograms calculated from the given training data (empty if
    none was given).
-   labels Labels corresponding to the calculated Local Binary Patterns Histograms.
 */
CV_EXPORTS_W Ptr<LBPHFaceRecognizer> createLBPHFaceRecognizer(int radius=1, int neighbors=8, int grid_x=8, int grid_y=8, double threshold = DBL_MAX);

//! @}

}} //namespace cv::face

#endif //__OPENCV_FACEREC_HPP__