tuple>> getFaceBox(Net net, Mat& frame, double conf_threshold)
{
Mat frameOpenCVDNN = frame.clone();
int frameHeight = frameOpenCVDNN.rows;
int frameWidth = frameOpenCVDNN.cols;
double inScaleFactor = 1.0;
Size size = Size(300, 300);
// std::vector meanVal = {104, 117, 123};
Scalar meanVal = Scalar(104, 117, 123);
cv::Mat inputBlob;
inputBlob = cv::dnn::blobFromImage(frameOpenCVDNN, inScaleFactor, size, meanVal, true, false);
net.setInput(inputBlob, "data");
cv::Mat detection = net.forward("detection_out");
cv::Mat detectionMat(detection.size[2], detection.size[3], CV_32F, detection.ptr());
vector> bboxes;
for (int i = 0; i < detectionMat.rows; i++) {
float confidence = detectionMat.at(i, 2);
if (confidence > conf_threshold) {
int x1 = static_cast(detectionMat.at(i, 3) * frameWidth);
int y1 = static_cast(detectionMat.at(i, 4) * frameHeight);
int x2 = static_cast(detectionMat.at(i, 5) * frameWidth);
int y2 = static_cast(detectionMat.at(i, 6) * frameHeight);
vector box = { x1, y1, x2, y2 };
bboxes.push_back(box);
cv::rectangle(frameOpenCVDNN, cv::Point(x1, y1), cv::Point(x2, y2), cv::Scalar(0, 0, 255));
}
}
return make_tuple(frameOpenCVDNN, bboxes);
}
int main()
{
clock_t startTime, endTime;
cv::Mat frame;
int padding = 20;
Scalar MODEL_MEAN_VALUES = Scalar(78.4263377603, 87.7689143744, 114.895847746);
vector ageList = { "(0-2)","(4-6)","(8-12)","(15-20)","(25-32)","(38-43)","(48-53)","(60-100)" };
vector genderList = { "Male","Female" };
std::string netRootPath = "D://";
std::string imagepath = "D://sunsuyuan//project//data//doublefusion//results//FittingSmpl//color.png";
//frame = cv::imread(netRootPath + "sample3.jpg", 1);
frame = cv::imread(netRootPath+"3.png", 1);
// frame = cv::imread(imagepath, 1);
cv::cvtColor(frame, frame, CV_BGRA2BGR);
if (frame.empty()) {
fprintf(stderr, "cv::imread %s failed\n", netRootPath + "sample3.jpg");
return -1;
}
// Load Network
Net ageNet = readNet(netRootPath+"age_net.caffemodel", netRootPath+"age_deploy.prototxt");
Net genderNet = readNet(netRootPath+"gender_net.caffemodel", netRootPath+"gender_deploy.prototxt");
Net faceNet = readNet(netRootPath+"opencv_face_detector_uint8.pb", netRootPath+"opencv_face_detector.pbtxt");
vector> bboxes;
Mat frameFace;
tie(frameFace, bboxes) = getFaceBox(faceNet, frame, 0.7);
if (bboxes.size() == 0) {
cout << "No face detected, checking next frame." << endl;
}
else {
startTime = clock();
for (auto it = begin(bboxes); it != end(bboxes); ++it) {
Rect rec(it->at(0) - padding, it->at(1) - padding, it->at(2) - it->at(0) + 2 * padding, it->at(3) - it->at(1) + 2 * padding);
Mat face = frame(rec); // take the ROI of box on the frame
Mat blob = blobFromImage(face, 1, Size(227, 227), MODEL_MEAN_VALUES, false);
genderNet.setInput(blob);
vector genderPreds = genderNet.forward();
// find max element index (distance function does the argmax() work in C++)
int max_index_gender = std::distance(genderPreds.begin(), max_element(genderPreds.begin(), genderPreds.end()));
string gender = genderList[max_index_gender];
ageNet.setInput(blob);
vector agePreds = ageNet.forward();
// finding maximum indicd in the age_preds vector
int max_indice_age = std::distance(agePreds.begin(), max_element(agePreds.begin(), agePreds.end()));
string age = ageList[max_indice_age];
cout << "Gender: " << gender << " Age: " << age << endl;
string label = gender + ", " + age; // label
cv::putText(frameFace, label, Point(it->at(0), it->at(1) - 15), cv::FONT_HERSHEY_SIMPLEX, 0.7, Scalar(0, 255, 255));
}
endTime = clock();//计时结束
std::cout << "The run time is: " << (double)1000*(endTime - startTime) / CLOCKS_PER_SEC << "ms" <