aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorzongwave <wei.zong@intel.com>2019-03-29 09:08:36 +0800
committerZong Wei <wei.zong@intel.com>2019-03-29 16:50:43 +0800
commiteba35e966ae28a7348e57d3a4fd2696e8d212981 (patch)
treef0ba8c9d7a3aefc97792164b8b47383d363fc1fe
parentd59a869bb8cc47c008525c87197f75074b64ba27 (diff)
downloadlibxcam-eba35e966ae28a7348e57d3a4fd2696e8d212981.tar.gz
dnn: check function return value
-rwxr-xr-xmodules/dnn/dnn_inference_engine.cpp134
-rwxr-xr-xmodules/dnn/dnn_inference_engine.h37
-rw-r--r--tests/test-dnn-inference.cpp42
3 files changed, 128 insertions, 85 deletions
diff --git a/modules/dnn/dnn_inference_engine.cpp b/modules/dnn/dnn_inference_engine.cpp
index d510ea2..256b2d8 100755
--- a/modules/dnn/dnn_inference_engine.cpp
+++ b/modules/dnn/dnn_inference_engine.cpp
@@ -29,7 +29,6 @@ namespace XCam {
DnnInferenceEngine::DnnInferenceEngine (DnnInferConfig& config)
: _model_created (false)
, _model_loaded (false)
- , _target_device (InferenceEngine::TargetDevice::eCPU)
, _input_image_width (0)
, _input_image_height (0)
{
@@ -44,41 +43,38 @@ DnnInferenceEngine::~DnnInferenceEngine ()
}
-void
+XCamReturn
DnnInferenceEngine::create_model (DnnInferConfig& config)
{
XCAM_LOG_DEBUG ("DnnInferenceEngine::create_model");
if (_model_created) {
XCAM_LOG_INFO ("model already created!");
- return;
+ return XCAM_RETURN_NO_ERROR;
}
// 1. Read the Intermediate Representation
XCAM_LOG_DEBUG ("pre-trained model file name: %s", config.model_filename);
if (NULL == config.model_filename) {
XCAM_LOG_ERROR ("Model file name is empty!");
- return;
+ return XCAM_RETURN_ERROR_PARAM;
}
- _model_file = config.model_filename;
- _network_reader.ReadNetwork (get_filename_prefix (_model_file) + ".xml");
- _network_reader.ReadWeights (get_filename_prefix (_model_file) + ".bin");
+ _network_reader.ReadNetwork (get_filename_prefix (config.model_filename) + ".xml");
+ _network_reader.ReadWeights (get_filename_prefix (config.model_filename) + ".bin");
// 2. Prepare inputs and outputs format
_network = _network_reader.getNetwork ();
_inputs_info = _network.getInputsInfo ();
_outputs_info = _network.getOutputsInfo ();
- _target_device = get_device_from_id (config.target_id);
-
// 3. Select Plugin - Select the plugin on which to load your network.
// 3.1. Create the plugin with the InferenceEngine::PluginDispatcher load helper class.
if (NULL == config.plugin_path) {
InferenceEngine::PluginDispatcher dispatcher ({""});
- _plugin = dispatcher.getPluginByDevice (getDeviceName (_target_device));
+ _plugin = dispatcher.getPluginByDevice (getDeviceName (get_device_from_id (config.target_id)));
} else {
InferenceEngine::PluginDispatcher dispatcher ({config.plugin_path});
- _plugin = dispatcher.getPluginByDevice (getDeviceName (_target_device));
+ _plugin = dispatcher.getPluginByDevice (getDeviceName (get_device_from_id (config.target_id)));
}
// 3.2. Pass per device loading configurations specific to this device,
@@ -112,36 +108,40 @@ DnnInferenceEngine::create_model (DnnInferConfig& config)
}
_model_created = true;
+
+ return XCAM_RETURN_NO_ERROR;
}
-void
+XCamReturn
DnnInferenceEngine::load_model (DnnInferConfig& config)
{
XCAM_LOG_DEBUG ("DnnInferenceEngine::load_model");
if (! _model_created) {
XCAM_LOG_ERROR ("Please create the model firstly!");
- return;
+ return XCAM_RETURN_ERROR_ORDER;
}
if (_model_loaded) {
XCAM_LOG_INFO ("model already loaded!");
- return;
+ return XCAM_RETURN_NO_ERROR;
}
- _execute_network = _plugin.LoadNetwork (_network, {});
+ InferenceEngine::ExecutableNetwork execute_network = _plugin.LoadNetwork (_network, {});
- _infer_request = _execute_network.CreateInferRequest ();
+ _infer_request = execute_network.CreateInferRequest ();
_model_loaded = true;
+
+ return XCAM_RETURN_NO_ERROR;
}
-void
+XCamReturn
DnnInferenceEngine::get_info (DnnInferenceEngineInfo& info, DnnInferInfoType type)
{
XCAM_LOG_DEBUG ("DnnInferenceEngine::get_info type %d", type);
if (! _model_created) {
XCAM_LOG_ERROR ("Please create the model firstly!");
- return;
+ return XCAM_RETURN_ERROR_ORDER;
}
info.type = type;
@@ -162,17 +162,19 @@ DnnInferenceEngine::get_info (DnnInferenceEngineInfo& info, DnnInferInfoType typ
} else {
XCAM_LOG_WARNING ("DnnInferenceEngine::get_info type %d not supported!", type);
}
+ return XCAM_RETURN_NO_ERROR;
}
-void
+XCamReturn
DnnInferenceEngine::set_batch_size (const size_t size)
{
if (! _model_created) {
XCAM_LOG_ERROR ("Please create the model firstly!");
- return;
+ return XCAM_RETURN_ERROR_ORDER;
}
_network.setBatchSize (size);
+ return XCAM_RETURN_NO_ERROR;
}
size_t
@@ -186,14 +188,14 @@ DnnInferenceEngine::get_batch_size ()
return _network.getBatchSize ();
}
-void
+XCamReturn
DnnInferenceEngine::start (bool sync)
{
XCAM_LOG_DEBUG ("Start inference sync(%d)", sync);
if (! _model_loaded) {
XCAM_LOG_ERROR ("Please load the model firstly!");
- return;
+ return XCAM_RETURN_ERROR_ORDER;
}
if (sync) {
@@ -202,6 +204,8 @@ DnnInferenceEngine::start (bool sync)
_infer_request.StartAsync ();
_infer_request.Wait (IInferRequest::WaitMode::RESULT_READY);
}
+
+ return XCAM_RETURN_NO_ERROR;
}
size_t
@@ -228,19 +232,19 @@ DnnInferenceEngine::get_output_size ()
return outputs_info.size ();
}
-void
+XCamReturn
DnnInferenceEngine::set_input_presion (uint32_t idx, DnnInferPrecisionType precision)
{
if (! _model_created) {
XCAM_LOG_ERROR ("Please create the model firstly!");
- return;
+ return XCAM_RETURN_ERROR_ORDER;
}
uint32_t id = 0;
if (idx > _inputs_info.size ()) {
XCAM_LOG_ERROR ("Input is out of range");
- return;
+ return XCAM_RETURN_ERROR_PARAM;
}
for (auto & item : _inputs_info) {
@@ -251,6 +255,8 @@ DnnInferenceEngine::set_input_presion (uint32_t idx, DnnInferPrecisionType preci
}
id++;
}
+
+ return XCAM_RETURN_NO_ERROR;
}
DnnInferPrecisionType
@@ -278,19 +284,19 @@ DnnInferenceEngine::get_input_presion (uint32_t idx)
return DnnInferPrecisionUnspecified;
}
-void
+XCamReturn
DnnInferenceEngine::set_output_presion (uint32_t idx, DnnInferPrecisionType precision)
{
if (! _model_created) {
XCAM_LOG_ERROR ("Please create the model firstly!");
- return;
+ return XCAM_RETURN_ERROR_ORDER;
}
uint32_t id = 0;
if (idx > _outputs_info.size ()) {
XCAM_LOG_ERROR ("Output is out of range");
- return;
+ return XCAM_RETURN_ERROR_PARAM;
}
for (auto & item : _outputs_info) {
@@ -301,6 +307,8 @@ DnnInferenceEngine::set_output_presion (uint32_t idx, DnnInferPrecisionType prec
}
id++;
}
+
+ return XCAM_RETURN_NO_ERROR;
}
DnnInferPrecisionType
@@ -328,18 +336,18 @@ DnnInferenceEngine::get_output_presion (uint32_t idx)
return DnnInferPrecisionUnspecified;
}
-void
+XCamReturn
DnnInferenceEngine::set_input_layout (uint32_t idx, DnnInferLayoutType layout)
{
if (! _model_created) {
XCAM_LOG_ERROR ("Please create the model firstly!");
- return;
+ return XCAM_RETURN_ERROR_ORDER;
}
uint32_t id = 0;
if (idx > _inputs_info.size ()) {
XCAM_LOG_ERROR ("Input is out of range");
- return;
+ return XCAM_RETURN_ERROR_PARAM;
}
/** Iterating over all input blobs **/
for (auto & item : _inputs_info) {
@@ -351,21 +359,23 @@ DnnInferenceEngine::set_input_layout (uint32_t idx, DnnInferLayoutType layout)
}
id++;
}
+
+ return XCAM_RETURN_NO_ERROR;
}
-void
+XCamReturn
DnnInferenceEngine::set_output_layout (uint32_t idx, DnnInferLayoutType layout)
{
if (! _model_created) {
XCAM_LOG_ERROR ("Please create the model firstly!");
- return;
+ return XCAM_RETURN_ERROR_ORDER;
}
uint32_t id = 0;
if (idx > _outputs_info.size ()) {
XCAM_LOG_ERROR ("Output is out of range");
- return;
+ return XCAM_RETURN_ERROR_PARAM;
}
/** Iterating over all output blobs **/
for (auto & item : _outputs_info) {
@@ -377,14 +387,15 @@ DnnInferenceEngine::set_output_layout (uint32_t idx, DnnInferLayoutType layout)
id++;
}
+ return XCAM_RETURN_NO_ERROR;
}
-void
+XCamReturn
DnnInferenceEngine::get_model_input_info (DnnInferInputOutputInfo& info)
{
if (!_model_created) {
XCAM_LOG_ERROR ("Please create the model firstly!");
- return;
+ return XCAM_RETURN_ERROR_ORDER;
}
int id = 0;
@@ -405,21 +416,23 @@ DnnInferenceEngine::get_model_input_info (DnnInferInputOutputInfo& info)
}
info.batch_size = get_batch_size ();
info.numbers = _inputs_info.size ();
+
+ return XCAM_RETURN_NO_ERROR;
}
-void
+XCamReturn
DnnInferenceEngine::set_model_input_info (DnnInferInputOutputInfo& info)
{
XCAM_LOG_DEBUG ("DnnInferenceEngine::set_model_input_info");
if (!_model_created) {
XCAM_LOG_ERROR ("Please create the model firstly!");
- return;
+ return XCAM_RETURN_ERROR_ORDER;
}
if (info.numbers != _inputs_info.size ()) {
XCAM_LOG_ERROR ("Input size is not matched with model info numbers %d !", info.numbers);
- return;
+ return XCAM_RETURN_ERROR_PARAM;
}
int id = 0;
@@ -430,14 +443,16 @@ DnnInferenceEngine::set_model_input_info (DnnInferInputOutputInfo& info)
item.second->setLayout (layout);
id++;
}
+
+ return XCAM_RETURN_NO_ERROR;
}
-void
+XCamReturn
DnnInferenceEngine::get_model_output_info (DnnInferInputOutputInfo& info)
{
if (!_model_created) {
XCAM_LOG_ERROR ("Please create the model firstly!");
- return;
+ return XCAM_RETURN_ERROR_ORDER;
}
int id = 0;
@@ -464,20 +479,22 @@ DnnInferenceEngine::get_model_output_info (DnnInferInputOutputInfo& info)
info.numbers = _outputs_info.size ();
} else {
XCAM_LOG_ERROR ("Get output info error!");
+ return XCAM_RETURN_ERROR_UNKNOWN;
}
+ return XCAM_RETURN_NO_ERROR;
}
-void
+XCamReturn
DnnInferenceEngine::set_model_output_info (DnnInferInputOutputInfo& info)
{
if (!_model_created) {
XCAM_LOG_ERROR ("Please create the model firstly!");
- return;
+ return XCAM_RETURN_ERROR_ORDER;
}
if (info.numbers != _outputs_info.size()) {
XCAM_LOG_ERROR ("Output size is not matched with model!");
- return;
+ return XCAM_RETURN_ERROR_PARAM;
}
int id = 0;
@@ -488,9 +505,11 @@ DnnInferenceEngine::set_model_output_info (DnnInferInputOutputInfo& info)
item.second->setLayout (layout);
id++;
}
+
+ return XCAM_RETURN_NO_ERROR;
}
-void
+XCamReturn
DnnInferenceEngine::set_input_blob (uint32_t idx, DnnInferData& data)
{
unsigned int id = 0;
@@ -498,7 +517,7 @@ DnnInferenceEngine::set_input_blob (uint32_t idx, DnnInferData& data)
if (idx > _inputs_info.size()) {
XCAM_LOG_ERROR ("Input is out of range");
- return;
+ return XCAM_RETURN_ERROR_PARAM;
}
for (auto & item : _inputs_info) {
@@ -511,12 +530,12 @@ DnnInferenceEngine::set_input_blob (uint32_t idx, DnnInferData& data)
if (item_name.empty ()) {
XCAM_LOG_ERROR ("item name is empty!");
- return;
+ return XCAM_RETURN_ERROR_PARAM;
}
if (data.batch_idx > get_batch_size ()) {
XCAM_LOG_ERROR ("Too many input, it is bigger than batch size!");
- return;
+ return XCAM_RETURN_ERROR_PARAM;
}
Blob::Ptr blob = _infer_request.GetBlob (item_name);
@@ -533,14 +552,16 @@ DnnInferenceEngine::set_input_blob (uint32_t idx, DnnInferData& data)
copy_data_to_blob<uint8_t>(data, blob, data.batch_idx);
}
}
+
+ return XCAM_RETURN_NO_ERROR;
}
-void
+XCamReturn
DnnInferenceEngine::set_inference_data (std::vector<std::string> images)
{
if (!_model_created) {
XCAM_LOG_ERROR ("Please create the model firstly!");
- return;
+ return XCAM_RETURN_ERROR_ORDER;
}
uint32_t idx = 0;
@@ -587,6 +608,8 @@ DnnInferenceEngine::set_inference_data (std::vector<std::string> images)
continue;
}
}
+
+ return XCAM_RETURN_NO_ERROR;
}
std::shared_ptr<uint8_t>
@@ -717,7 +740,6 @@ DnnInferenceEngine::convert_layout_type (DnnInferLayoutType layout)
default:
return InferenceEngine::Layout::ANY;
}
-
}
DnnInferLayoutType
@@ -822,7 +844,7 @@ DnnInferenceEngine::get_filename_prefix (const std::string &file_path)
return file_path.substr (0, pos);
}
-template <typename T> void
+template <typename T> XCamReturn
DnnInferenceEngine::copy_image_to_blob (const DnnInferData& data, Blob::Ptr& blob, int batch_index)
{
SizeVector blob_size = blob.get()->dims ();
@@ -836,7 +858,7 @@ DnnInferenceEngine::copy_image_to_blob (const DnnInferData& data, Blob::Ptr& blo
if (width != data.width || height != data.height) {
XCAM_LOG_ERROR ("Input Image size (%dx%d) is not matched with model required size (%dx%d)!",
data.width, data.height, width, height);
- return;
+ return XCAM_RETURN_ERROR_PARAM;
}
int batch_offset = batch_index * height * width * channels;
@@ -866,9 +888,11 @@ DnnInferenceEngine::copy_image_to_blob (const DnnInferData& data, Blob::Ptr& blo
}
}
}
+
+ return XCAM_RETURN_NO_ERROR;
}
-template <typename T> void
+template <typename T> XCamReturn
DnnInferenceEngine::copy_data_to_blob (const DnnInferData& data, Blob::Ptr& blob, int batch_index)
{
SizeVector blob_size = blob.get ()->dims ();
@@ -878,6 +902,8 @@ DnnInferenceEngine::copy_data_to_blob (const DnnInferData& data, Blob::Ptr& blob
int batch_offset = batch_index * data.size;
memcpy (blob_data + batch_offset, buffer, data.size);
+
+ return XCAM_RETURN_NO_ERROR;
}
void
diff --git a/modules/dnn/dnn_inference_engine.h b/modules/dnn/dnn_inference_engine.h
index 1019f66..eebf93d 100755
--- a/modules/dnn/dnn_inference_engine.h
+++ b/modules/dnn/dnn_inference_engine.h
@@ -197,30 +197,30 @@ public:
explicit DnnInferenceEngine (DnnInferConfig& config);
virtual ~DnnInferenceEngine ();
- void create_model (DnnInferConfig& config);
- void load_model (DnnInferConfig& config);
+ XCamReturn create_model (DnnInferConfig& config);
+ XCamReturn load_model (DnnInferConfig& config);
- void get_info (DnnInferenceEngineInfo& info, DnnInferInfoType type);
+ XCamReturn get_info (DnnInferenceEngineInfo& info, DnnInferInfoType type);
- void set_batch_size (const size_t size);
+ XCamReturn set_batch_size (const size_t size);
size_t get_batch_size ();
bool ready_to_start () const {
return _model_created && _model_loaded;
};
- void start (bool sync = true);
+ XCamReturn start (bool sync = true);
size_t get_input_size ();
size_t get_output_size ();
- void set_input_presion (uint32_t idx, DnnInferPrecisionType precision);
+ XCamReturn set_input_presion (uint32_t idx, DnnInferPrecisionType precision);
DnnInferPrecisionType get_input_presion (uint32_t idx);
- void set_output_presion (uint32_t idx, DnnInferPrecisionType precision);
+ XCamReturn set_output_presion (uint32_t idx, DnnInferPrecisionType precision);
DnnInferPrecisionType get_output_presion (uint32_t idx);
- void set_input_layout (uint32_t idx, DnnInferLayoutType layout);
- void set_output_layout (uint32_t idx, DnnInferLayoutType layout);
+ XCamReturn set_input_layout (uint32_t idx, DnnInferLayoutType layout);
+ XCamReturn set_output_layout (uint32_t idx, DnnInferLayoutType layout);
uint32_t get_input_image_height () const {
return _input_image_height;
@@ -229,13 +229,13 @@ public:
return _input_image_width;
};
- void set_model_input_info (DnnInferInputOutputInfo& info);
- void get_model_input_info (DnnInferInputOutputInfo& info);
+ XCamReturn set_model_input_info (DnnInferInputOutputInfo& info);
+ XCamReturn get_model_input_info (DnnInferInputOutputInfo& info);
- void set_model_output_info (DnnInferInputOutputInfo& info);
- void get_model_output_info (DnnInferInputOutputInfo& info);
+ XCamReturn set_model_output_info (DnnInferInputOutputInfo& info);
+ XCamReturn get_model_output_info (DnnInferInputOutputInfo& info);
- void set_inference_data (std::vector<std::string> images);
+ XCamReturn set_inference_data (std::vector<std::string> images);
void* get_inference_results (uint32_t idx, uint32_t& size);
std::shared_ptr<uint8_t> read_inference_image (std::string image);
@@ -258,17 +258,15 @@ protected:
void print_performance_counts (const std::map<std::string, InferenceEngine::InferenceEngineProfileInfo>& performance_map);
private:
- void set_input_blob (uint32_t idx, DnnInferData& data);
- template <typename T> void copy_image_to_blob (const DnnInferData& data, InferenceEngine::Blob::Ptr& blob, int batch_index);
- template <typename T> void copy_data_to_blob (const DnnInferData& data, InferenceEngine::Blob::Ptr& blob, int batch_index);
+ XCamReturn set_input_blob (uint32_t idx, DnnInferData& data);
+ template <typename T> XCamReturn copy_image_to_blob (const DnnInferData& data, InferenceEngine::Blob::Ptr& blob, int batch_index);
+ template <typename T> XCamReturn copy_data_to_blob (const DnnInferData& data, InferenceEngine::Blob::Ptr& blob, int batch_index);
protected:
bool _model_created;
bool _model_loaded;
- std::string _model_file;
- InferenceEngine::TargetDevice _target_device;
InferenceEngine::InferencePlugin _plugin;
InferenceEngine::InputsDataMap _inputs_info;
@@ -279,7 +277,6 @@ protected:
InferenceEngine::CNNNetReader _network_reader;
InferenceEngine::CNNNetwork _network;
- InferenceEngine::ExecutableNetwork _execute_network;
InferenceEngine::InferRequest _infer_request;
std::vector<InferenceEngine::CNNLayerPtr> _layers;
diff --git a/tests/test-dnn-inference.cpp b/tests/test-dnn-inference.cpp
index 68d0c63..f8f7662 100644
--- a/tests/test-dnn-inference.cpp
+++ b/tests/test-dnn-inference.cpp
@@ -349,26 +349,36 @@ int main (int argc, char *argv[])
SmartPtr<DnnInferenceEngine> infer_engine = new DnnInferenceEngine (infer_config);
DnnInferenceEngineInfo infer_info;
- infer_engine->get_info (infer_info, DnnInferInfoEngine);
+ CHECK (
+ infer_engine->get_info (infer_info, DnnInferInfoEngine),
+ "get inference engine info failed!");
XCAM_LOG_DEBUG ("Inference Engine version: %d.%d", infer_info.major, infer_info.minor);
- infer_engine->get_info (infer_info, DnnInferInfoPlugin);
+ CHECK (
+ infer_engine->get_info (infer_info, DnnInferInfoPlugin),
+ "get inference engine info failed!");
XCAM_LOG_DEBUG ("Inference Engine plugin discription: %s", infer_info.desc);
XCAM_LOG_DEBUG ("Inference Engine plugin version: %d.%d", infer_info.major, infer_info.minor);
- infer_engine->get_info (infer_info, DnnInferInfoNetwork);
+ CHECK (
+ infer_engine->get_info (infer_info, DnnInferInfoNetwork),
+ "get inference engine info failed!");
XCAM_LOG_DEBUG ("Inference Engine network name: %s", infer_info.name);
XCAM_LOG_DEBUG ("Inference Engine network discription: %s", infer_info.desc);
XCAM_LOG_DEBUG ("Inference Engine network version: %d.%d", infer_info.major, infer_info.minor);
// --------------------------- 3. Get model input infos --------------------------------------------------
XCAM_LOG_DEBUG ("3. Get/Set model input infos");
- infer_engine->get_model_input_info (infer_config.input_infos);
+ CHECK (
+ infer_engine->get_model_input_info (infer_config.input_infos),
+ "get model input info failed!");
XCAM_LOG_DEBUG ("Input info :");
for (uint32_t i = 0; i < infer_config.input_infos.numbers; i++) {
infer_config.input_infos.data_type[i] = DnnInferDataTypeImage;
- infer_engine->set_input_presion (i, DnnInferPrecisionU8);
+ CHECK (
+ infer_engine->set_input_presion (i, DnnInferPrecisionU8),
+ "set input presion failed!");
XCAM_LOG_DEBUG ("Idx %d : [%d X %d X %d] , [%d %d %d], batch size = %d", i,
infer_config.input_infos.width[i], infer_config.input_infos.height[i], infer_config.input_infos.channels[i],
infer_config.input_infos.precision[i], infer_config.input_infos.layout[i], infer_config.input_infos.data_type[i],
@@ -377,11 +387,15 @@ int main (int argc, char *argv[])
// --------------------------- 4. Get model output infos -------------------------------------------------
XCAM_LOG_DEBUG ("4. Get/Set model output infos");
- infer_engine->get_model_output_info (infer_config.output_infos);
+ CHECK (
+ infer_engine->get_model_output_info (infer_config.output_infos),
+ "get model output info failed!");
+ XCAM_LOG_DEBUG ("Output info (numbers %d) :", infer_config.output_infos.numbers);
- XCAM_LOG_DEBUG ("Output info :");
for (uint32_t i = 0; i < infer_config.output_infos.numbers; i++) {
- infer_engine->set_output_presion (i, DnnInferPrecisionFP32);
+ CHECK (
+ infer_engine->set_output_presion (i, DnnInferPrecisionFP32),
+ "set output presion failed!");
XCAM_LOG_DEBUG ("Idx %d : [%d X %d X %d] , [%d %d %d], batch size = %d", i,
infer_config.output_infos.width[i], infer_config.output_infos.height[i], infer_config.output_infos.channels[i],
infer_config.output_infos.precision[i], infer_config.output_infos.layout[i], infer_config.output_infos.data_type[i],
@@ -390,16 +404,22 @@ int main (int argc, char *argv[])
// --------------------------- 5. load inference model -------------------------------------------------
XCAM_LOG_DEBUG ("5. load inference model");
- infer_engine->load_model (infer_config);
+ CHECK (
+ infer_engine->load_model (infer_config),
+ "load model failed!");
// --------------------------- 6. Set inference data --------------------------------------------------------
XCAM_LOG_DEBUG ("6. Set inference data");
- infer_engine->set_inference_data (images);
+ CHECK (
+ infer_engine->set_inference_data (images),
+ "set inference data failed!");
// --------------------------- 7. Do inference ---------------------------------------------------------
XCAM_LOG_DEBUG ("7. Start inference iterations");
if (infer_engine->ready_to_start ()) {
- infer_engine->start ();
+ CHECK (
+ infer_engine->start (),
+ "inference failed!");
}
FPS_CALCULATION (inference_engine, XCAM_OBJ_DUR_FRAME_NUM);