Antal.Ai - Virtual Makeup
NeuralBase.h
Go to the documentation of this file.
1 #pragma once
2 
3 #include "Detection.h"
4 #include <opencv2/dnn.hpp>
5 
11 namespace core {
12 
20  class NeuralBase {
21  public:
22  NeuralBase() = default;
23 
28  std::string getName();
29 
34  double getInferenceTime();
35 
49  void loadNetwork(
50  std::string framework,
51  std::string modelWeightsFile,
52  std::string classesFile = "",
53  std::string modelConfigurationFile = "",
54  std::string inputLayerName = "",
55  std::string outputLayerName = ""
56  );
57 
72  void configureNetwork(std::string name,
73  //Preprocessor params
74  int inputWidth,
75  int inputHeight,
76  double scale,
77  double mean0,
78  double mean1,
79  double mean2,
80  //Processing params
81  std::string configBackend,
82  std::string configTarget,
83  //Postprocessing params
84  double confidenceThreshold);
85 
92  std::vector<std::string> getOutputsNames(const cv::dnn::Net& net);
93 
94  protected:
95  std::string mFramework;
96  std::string mInputLayerName;
97  std::string mOutputLayerName;
98  std::string mName;
99 
100  // Loaded model
101  cv::dnn::Net mNet;
102 
103  // Object class names
104  std::vector<std::string> classes;
105 
106  // Initialize the parameters
107 
108  // Preprocessor params:
109  double scaleFactor = 1.0;
110  cv::Scalar mean{ 0.0, 0.0, 0.0 }; // Mean of network's input image
111  cv::Size size{ 128, 128 }; // Width and Height of network's input image
112 
113  // Processor params:
114  cv::dnn::Backend selectedBackend =
115  cv::dnn::DNN_BACKEND_OPENCV; // Select preferable backend
116 
117  cv::dnn::Target selectedTarget =
118  cv::dnn::DNN_TARGET_CPU; // Select preferable target
119 
120  // Postprocessor params:
121  float confThreshold = 0.5F; // Confidence threshold
122 
123  // Choosable processor params
124  std::map<std::string, cv::dnn::Backend> backends = {
125  {"DNN_BACKEND_DEFAULT", cv::dnn::DNN_BACKEND_DEFAULT},
126  {"DNN_BACKEND_HALIDE", cv::dnn::DNN_BACKEND_HALIDE},
127  {"DNN_BACKEND_INFERENCE_ENGINE", cv::dnn::DNN_BACKEND_INFERENCE_ENGINE},
128  {"DNN_BACKEND_OPENCV", cv::dnn::DNN_BACKEND_OPENCV},
129  {"DNN_BACKEND_CUDA", cv::dnn::DNN_BACKEND_CUDA}
130  };
131 
132  std::map<std::string, cv::dnn::Target> targets = {
133  {"DNN_TARGET_CPU", cv::dnn::DNN_TARGET_CPU},
134  {"DNN_TARGET_CUDA", cv::dnn::DNN_TARGET_CUDA},
135  {"DNN_TARGET_OPENCL", cv::dnn::DNN_TARGET_OPENCL},
136  {"DNN_TARGET_OPENCL_FP16", cv::dnn::DNN_TARGET_OPENCL_FP16},
137  {"DNN_TARGET_MYRIAD", cv::dnn::DNN_TARGET_MYRIAD} };
138  };
139 } // namespace core
Definition: NeuralBase.h:20
double getInferenceTime()
Gets the inference time of the network.
Definition: NeuralBase.cpp:114
std::vector< std::string > getOutputsNames(const cv::dnn::Net &net)
Gets the names of the output layers of the network.
Definition: NeuralBase.cpp:97
void configureNetwork(std::string name, int inputWidth, int inputHeight, double scale, double mean0, double mean1, double mean2, std::string configBackend, std::string configTarget, double confidenceThreshold)
Configures the network with various parameters.
Definition: NeuralBase.cpp:45
std::string getName()
Gets the name of the neural network.
Definition: NeuralBase.cpp:8
void loadNetwork(std::string framework, std::string modelWeightsFile, std::string classesFile="", std::string modelConfigurationFile="", std::string inputLayerName="", std::string outputLayerName="")
Loads the neural network from memory buffer.
Definition: NeuralBase.cpp:10