/tests/nnstreamer_filter_openvino/unittest_openvino.cc

https://github.com/nnstreamer/nnstreamer · C++ · 1187 lines · 902 code · 173 blank · 112 comment · 65 complexity · 65f42bf52e05fa37692a67473c329431 MD5 · raw file

  1. /* SPDX-License-Identifier: LGPL-2.1-only */
  2. /**
  3. * @file unittest_openvino.cc
  4. * @author Wook Song <wook16.song@samsung.com>
  5. * @bug No known bugs
  6. */
  7. #include <gtest/gtest.h>
  8. #include <glib/gstdio.h>
  9. #include <gst/check/gstcheck.h>
  10. #include <gst/check/gstharness.h>
  11. #include <gst/check/gsttestclock.h>
  12. #include <gst/gst.h>
  13. #include <nnstreamer_plugin_api_filter.h>
  14. #include <string.h>
  15. #include <tensor_common.h>
  16. #include <tensor_filter_openvino.hh>
  17. const static gchar MODEL_BASE_NAME_MOBINET_V2[] = "openvino_mobilenetv2-int8-tf-0001";
  18. const static uint32_t MOBINET_V2_IN_NUM_TENSOR = 1;
  19. const static uint32_t MOBINET_V2_IN_DIMS[NNS_TENSOR_SIZE_LIMIT] = {
  20. 224, 224, 3, 1,
  21. };
  22. const static uint32_t MOBINET_V2_OUT_NUM_TENSOR = 1;
  23. const static uint32_t MOBINET_V2_OUT_DIMS[NNS_TENSOR_SIZE_LIMIT] = {
  24. 1001, 1, 1, 1,
  25. };
  26. /** @brief wooksong: please fill in */
  27. class TensorFilterOpenvinoTest : public TensorFilterOpenvino
  28. {
  29. public:
  30. typedef TensorFilterOpenvino super;
  31. TensorFilterOpenvinoTest (std::string path_model_xml, std::string path_model_bin);
  32. ~TensorFilterOpenvinoTest ();
  33. InferenceEngine::InputsDataMap &getInputsDataMap ();
  34. void setInputsDataMap (InferenceEngine::InputsDataMap &map);
  35. InferenceEngine::OutputsDataMap &getOutputsDataMap ();
  36. void setOutputsDataMap (InferenceEngine::OutputsDataMap &map);
  37. private:
  38. TensorFilterOpenvinoTest ();
  39. };
  40. /** @brief wooksong: please fill in */
  41. TensorFilterOpenvinoTest::TensorFilterOpenvinoTest (
  42. std::string path_model_xml, std::string path_model_bin)
  43. : super (path_model_xml, path_model_bin)
  44. {
  45. /* Nothing to do */
  46. ;
  47. }
  48. /** @brief wooksong: please fill in */
  49. TensorFilterOpenvinoTest::~TensorFilterOpenvinoTest ()
  50. {
  51. /* Nothing to do */
  52. ;
  53. }
  54. /** @brief wooksong: please fill in */
  55. InferenceEngine::InputsDataMap &
  56. TensorFilterOpenvinoTest::getInputsDataMap ()
  57. {
  58. return this->_inputsDataMap;
  59. }
  60. /** @brief wooksong: please fill in */
  61. void
  62. TensorFilterOpenvinoTest::setInputsDataMap (InferenceEngine::InputsDataMap &map)
  63. {
  64. this->_inputsDataMap = map;
  65. }
  66. /** @brief wooksong: please fill in */
  67. InferenceEngine::OutputsDataMap &
  68. TensorFilterOpenvinoTest::getOutputsDataMap ()
  69. {
  70. return this->_outputsDataMap;
  71. }
  72. /** @brief wooksong: please fill in */
  73. void
  74. TensorFilterOpenvinoTest::setOutputsDataMap (InferenceEngine::OutputsDataMap &map)
  75. {
  76. this->_outputsDataMap = map;
  77. }
  78. /**
  79. * @brief Test cases for open and close callbacks varying the model files
  80. */
  81. TEST (tensorFilterOpenvino, openAndClose0)
  82. {
  83. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  84. const gchar fw_name[] = "openvino";
  85. const GstTensorFilterFramework *fw = nnstreamer_filter_find (fw_name);
  86. GstTensorFilterProperties *prop = NULL;
  87. gpointer private_data = NULL;
  88. std::string str_test_model;
  89. gchar *test_model;
  90. gint ret;
  91. /* Check if mandatory methods are contained */
  92. ASSERT_TRUE (fw && fw->open && fw->close);
  93. /* supposed to run test in build directory */
  94. if (root_path == NULL)
  95. root_path = "..";
  96. test_model = g_build_filename (root_path, "tests", "test_models", "models",
  97. MODEL_BASE_NAME_MOBINET_V2, NULL);
  98. /* prepare properties */
  99. prop = g_new0 (GstTensorFilterProperties, 1);
  100. ASSERT_TRUE (prop != NULL);
  101. prop->fwname = fw_name;
  102. prop->num_models = 1;
  103. prop->accl_str = "true:cpu";
  104. {
  105. const gchar *model_files[] = {
  106. test_model, NULL,
  107. };
  108. prop->model_files = model_files;
  109. ret = fw->open (prop, &private_data);
  110. #ifdef __OPENVINO_CPU_EXT__
  111. EXPECT_EQ (ret, 0);
  112. #else
  113. EXPECT_NE (ret, 0);
  114. EXPECT_EQ (ret, TensorFilterOpenvino::RetENoDev);
  115. #endif
  116. }
  117. fw->close (prop, &private_data);
  118. g_free (test_model);
  119. {
  120. gchar *test_model_xml = g_build_filename (root_path, "tests", "test_models", "models",
  121. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  122. .append (TensorFilterOpenvino::extXml)
  123. .c_str (),
  124. NULL);
  125. gchar *test_model_bin = g_build_filename (root_path, "tests", "test_models", "models",
  126. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  127. .append (TensorFilterOpenvino::extBin)
  128. .c_str (),
  129. NULL);
  130. const gchar *model_files[] = {
  131. test_model_xml, test_model_bin,
  132. };
  133. prop->num_models = 2;
  134. prop->model_files = model_files;
  135. ret = fw->open (prop, &private_data);
  136. #ifdef __OPENVINO_CPU_EXT__
  137. EXPECT_EQ (ret, 0);
  138. #else
  139. EXPECT_NE (ret, 0);
  140. EXPECT_EQ (ret, TensorFilterOpenvino::RetENoDev);
  141. #endif
  142. fw->close (prop, &private_data);
  143. g_free (test_model_xml);
  144. g_free (test_model_bin);
  145. }
  146. test_model = g_build_filename (root_path, "tests", "test_models", "models",
  147. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  148. .append (TensorFilterOpenvino::extBin)
  149. .c_str (),
  150. NULL);
  151. {
  152. const gchar *model_files[] = {
  153. test_model, NULL,
  154. };
  155. prop->num_models = 1;
  156. prop->model_files = model_files;
  157. ret = fw->open (prop, &private_data);
  158. #ifdef __OPENVINO_CPU_EXT__
  159. EXPECT_EQ (ret, 0);
  160. #else
  161. EXPECT_NE (ret, 0);
  162. EXPECT_EQ (ret, TensorFilterOpenvino::RetENoDev);
  163. #endif
  164. }
  165. fw->close (prop, &private_data);
  166. g_free (test_model);
  167. test_model = g_build_filename (root_path, "tests", "test_models", "models",
  168. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  169. .append (TensorFilterOpenvino::extXml)
  170. .c_str (),
  171. NULL);
  172. {
  173. const gchar *model_files[] = {
  174. test_model, NULL,
  175. };
  176. prop->num_models = 1;
  177. prop->model_files = model_files;
  178. ret = fw->open (prop, &private_data);
  179. #ifdef __OPENVINO_CPU_EXT__
  180. EXPECT_EQ (ret, 0);
  181. #else
  182. EXPECT_NE (ret, 0);
  183. EXPECT_EQ (ret, TensorFilterOpenvino::RetENoDev);
  184. #endif
  185. }
  186. fw->close (prop, &private_data);
  187. g_free (test_model);
  188. g_free (prop);
  189. }
  190. /**
  191. * @brief A test case for open and close callbacks with the private_data, which has the models already loaded
  192. */
  193. TEST (tensorFilterOpenvino, openAndClose1)
  194. {
  195. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  196. const gchar fw_name[] = "openvino";
  197. const GstTensorFilterFramework *fw = nnstreamer_filter_find (fw_name);
  198. std::string str_test_model;
  199. GstTensorFilterProperties *prop = NULL;
  200. gpointer private_data = NULL;
  201. TensorFilterOpenvino *tfOv;
  202. gchar *test_model_xml;
  203. gchar *test_model_bin;
  204. gint ret;
  205. /* Check if mandatory methods are contained */
  206. ASSERT_TRUE (fw && fw->open && fw->close);
  207. /* supposed to run test in build directory */
  208. if (root_path == NULL)
  209. root_path = "..";
  210. test_model_xml = g_build_filename (root_path, "tests", "test_models", "models",
  211. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  212. .append (TensorFilterOpenvino::extXml)
  213. .c_str (),
  214. NULL);
  215. test_model_bin = g_build_filename (root_path, "tests", "test_models", "models",
  216. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  217. .append (TensorFilterOpenvino::extBin)
  218. .c_str (),
  219. NULL);
  220. tfOv = new TensorFilterOpenvino (str_test_model.assign (test_model_xml),
  221. str_test_model.assign (test_model_bin));
  222. ret = tfOv->loadModel (ACCL_CPU);
  223. #ifdef __OPENVINO_CPU_EXT__
  224. EXPECT_EQ (ret, 0);
  225. #else
  226. EXPECT_NE (ret, 0);
  227. EXPECT_EQ (ret, TensorFilterOpenvino::RetENoDev);
  228. #endif
  229. private_data = (gpointer)tfOv;
  230. /* prepare properties */
  231. prop = g_new0 (GstTensorFilterProperties, 1);
  232. ASSERT_TRUE (prop != NULL);
  233. prop->fwname = fw_name;
  234. prop->num_models = 2;
  235. prop->accl_str = "true:cpu";
  236. {
  237. const gchar *model_files[] = {
  238. test_model_xml, test_model_bin,
  239. };
  240. prop->model_files = model_files;
  241. ret = fw->open (prop, &private_data);
  242. #ifdef __OPENVINO_CPU_EXT__
  243. EXPECT_EQ (ret, 0);
  244. #else
  245. EXPECT_NE (ret, 0);
  246. EXPECT_EQ (ret, TensorFilterOpenvino::RetENoDev);
  247. #endif
  248. }
  249. fw->close (prop, &private_data);
  250. g_free (test_model_xml);
  251. g_free (test_model_bin);
  252. g_free (prop);
  253. }
  254. /**
  255. * @brief A test case for open and close callbacks with the private_data, which has the models are not loaded
  256. */
  257. TEST (tensorFilterOpenvino, openAndClose2)
  258. {
  259. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  260. const gchar fw_name[] = "openvino";
  261. const GstTensorFilterFramework *fw = nnstreamer_filter_find (fw_name);
  262. std::string str_test_model;
  263. GstTensorFilterProperties *prop = NULL;
  264. gpointer private_data = NULL;
  265. TensorFilterOpenvino *tfOv;
  266. gchar *test_model_xml;
  267. gchar *test_model_bin;
  268. gint ret;
  269. /* Check if mandatory methods are contained */
  270. ASSERT_TRUE (fw && fw->open && fw->close);
  271. /* supposed to run test in build directory */
  272. if (root_path == NULL)
  273. root_path = "..";
  274. test_model_xml = g_build_filename (root_path, "tests", "test_models", "models",
  275. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  276. .append (TensorFilterOpenvino::extXml)
  277. .c_str (),
  278. NULL);
  279. test_model_bin = g_build_filename (root_path, "tests", "test_models", "models",
  280. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  281. .append (TensorFilterOpenvino::extBin)
  282. .c_str (),
  283. NULL);
  284. tfOv = new TensorFilterOpenvino (str_test_model.assign (test_model_xml),
  285. str_test_model.assign (test_model_bin));
  286. private_data = (gpointer)tfOv;
  287. /* prepare properties */
  288. prop = g_new0 (GstTensorFilterProperties, 1);
  289. ASSERT_TRUE (prop != NULL);
  290. prop->fwname = fw_name;
  291. prop->num_models = 2;
  292. prop->accl_str = "true:cpu";
  293. {
  294. const gchar *model_files[] = {
  295. test_model_xml, test_model_bin,
  296. };
  297. prop->model_files = model_files;
  298. ret = fw->open (prop, &private_data);
  299. #ifdef __OPENVINO_CPU_EXT__
  300. EXPECT_EQ (ret, 0);
  301. #else
  302. EXPECT_NE (ret, 0);
  303. EXPECT_EQ (ret, TensorFilterOpenvino::RetENoDev);
  304. #endif
  305. }
  306. fw->close (prop, &private_data);
  307. g_free (test_model_xml);
  308. g_free (test_model_bin);
  309. g_free (prop);
  310. }
  311. /**
  312. * @brief Negative test cases for open and close callbacks with wrong model files
  313. */
  314. TEST (tensorFilterOpenvino, openAndClose0_n)
  315. {
  316. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  317. const gchar fw_name[] = "openvino";
  318. const GstTensorFilterFramework *fw = nnstreamer_filter_find (fw_name);
  319. GstTensorFilterProperties *prop = NULL;
  320. gpointer private_data = NULL;
  321. gchar *test_model;
  322. gint ret;
  323. /* Check if mandatory methods are contained */
  324. ASSERT_TRUE (fw && fw->open && fw->close);
  325. /* supposed to run test in build directory */
  326. if (root_path == NULL)
  327. root_path = "..";
  328. test_model = g_build_filename (
  329. root_path, "tests", "test_models", "models", "NOT_EXIST", NULL);
  330. const gchar *model_files[] = {
  331. test_model, NULL,
  332. };
  333. /* prepare properties */
  334. prop = g_new0 (GstTensorFilterProperties, 1);
  335. ASSERT_TRUE (prop != NULL);
  336. prop->fwname = fw_name;
  337. prop->model_files = model_files;
  338. prop->num_models = 1;
  339. prop->accl_str = "true:cpu";
  340. ret = fw->open (prop, &private_data);
  341. EXPECT_NE (ret, TensorFilterOpenvino::RetSuccess);
  342. EXPECT_EQ (ret, TensorFilterOpenvino::RetEInval);
  343. g_free (test_model);
  344. fw->close (prop, &private_data);
  345. {
  346. std::string str_test_model;
  347. gchar *test_model_xml1 = g_build_filename (root_path, "tests", "test_models", "models",
  348. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  349. .append (TensorFilterOpenvino::extXml)
  350. .c_str (),
  351. NULL);
  352. gchar *test_model_xml2 = g_build_filename (root_path, "tests", "test_models", "models",
  353. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  354. .append (TensorFilterOpenvino::extXml)
  355. .c_str (),
  356. NULL);
  357. const gchar *model_files[] = {
  358. test_model_xml1, test_model_xml2,
  359. };
  360. prop->num_models = 2;
  361. prop->model_files = model_files;
  362. ret = fw->open (prop, &private_data);
  363. EXPECT_NE (ret, TensorFilterOpenvino::RetSuccess);
  364. EXPECT_EQ (ret, TensorFilterOpenvino::RetEInval);
  365. fw->close (prop, &private_data);
  366. g_free (test_model_xml1);
  367. g_free (test_model_xml2);
  368. }
  369. {
  370. std::string str_test_model;
  371. gchar *test_model_bin1 = g_build_filename (root_path, "tests", "test_models", "models",
  372. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  373. .append (TensorFilterOpenvino::extBin)
  374. .c_str (),
  375. NULL);
  376. gchar *test_model_bin2 = g_build_filename (root_path, "tests", "test_models", "models",
  377. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  378. .append (TensorFilterOpenvino::extBin)
  379. .c_str (),
  380. NULL);
  381. const gchar *model_files[] = {
  382. test_model_bin1, test_model_bin2,
  383. };
  384. prop->num_models = 2;
  385. prop->model_files = model_files;
  386. ret = fw->open (prop, &private_data);
  387. EXPECT_NE (ret, TensorFilterOpenvino::RetSuccess);
  388. EXPECT_EQ (ret, TensorFilterOpenvino::RetEInval);
  389. fw->close (prop, &private_data);
  390. g_free (test_model_bin1);
  391. g_free (test_model_bin2);
  392. }
  393. g_free (prop);
  394. }
  395. /**
  396. * @brief Negative test cases for open and close callbacks with accelerator
  397. * property values, which are not supported
  398. */
  399. TEST (tensorFilterOpenvino, openAndClose1_n)
  400. {
  401. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  402. const gchar fw_name[] = "openvino";
  403. const GstTensorFilterFramework *fw = nnstreamer_filter_find (fw_name);
  404. GstTensorFilterProperties *prop = NULL;
  405. gpointer private_data = NULL;
  406. gchar *test_model;
  407. gint ret;
  408. /* Check if mandatory methods are contained */
  409. ASSERT_TRUE (fw && fw->open && fw->close);
  410. /* supposed to run test in build directory */
  411. if (root_path == NULL)
  412. root_path = "..";
  413. test_model = g_build_filename (root_path, "tests", "test_models", "models",
  414. MODEL_BASE_NAME_MOBINET_V2, NULL);
  415. const gchar *model_files[] = {
  416. test_model, NULL,
  417. };
  418. /* prepare properties */
  419. prop = g_new0 (GstTensorFilterProperties, 1);
  420. ASSERT_TRUE (prop != NULL);
  421. prop->fwname = fw_name;
  422. prop->model_files = model_files;
  423. prop->num_models = 1;
  424. ret = fw->open (prop, &private_data);
  425. EXPECT_NE (ret, TensorFilterOpenvino::RetSuccess);
  426. EXPECT_EQ (ret, TensorFilterOpenvino::RetEInval);
  427. fw->close (prop, &private_data);
  428. prop->accl_str = "true:auto";
  429. ret = fw->open (prop, &private_data);
  430. EXPECT_NE (ret, TensorFilterOpenvino::RetSuccess);
  431. EXPECT_EQ (ret, TensorFilterOpenvino::RetEInval);
  432. fw->close (prop, &private_data);
  433. prop->accl_str = "true:gpu";
  434. ret = fw->open (prop, &private_data);
  435. EXPECT_NE (ret, TensorFilterOpenvino::RetSuccess);
  436. EXPECT_EQ (ret, TensorFilterOpenvino::RetEInval);
  437. fw->close (prop, &private_data);
  438. #ifdef __OPENVINO_CPU_EXT__
  439. prop->accl_str = "true:npu.movidius";
  440. ret = fw->open (prop, &private_data);
  441. EXPECT_NE (ret, TensorFilterOpenvino::RetSuccess);
  442. EXPECT_EQ (ret, TensorFilterOpenvino::RetEInval);
  443. #else
  444. prop->accl_str = "true:cpu";
  445. ret = fw->open (prop, &private_data);
  446. EXPECT_NE (ret, TensorFilterOpenvino::RetSuccess);
  447. EXPECT_EQ (ret, TensorFilterOpenvino::RetEInval);
  448. #endif
  449. fw->close (prop, &private_data);
  450. g_free (prop);
  451. g_free (test_model);
  452. }
  453. /**
  454. * @brief Negative test cases for open and close callbacks with accelerator
  455. * property values, which are wrong
  456. */
  457. TEST (tensorFilterOpenvino, openAndClose2_n)
  458. {
  459. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  460. const gchar fw_name[] = "openvino";
  461. const GstTensorFilterFramework *fw = nnstreamer_filter_find (fw_name);
  462. GstTensorFilterProperties *prop = NULL;
  463. gpointer private_data = NULL;
  464. gchar *test_model;
  465. gint ret;
  466. /* Check if mandatory methods are contained */
  467. ASSERT_TRUE (fw && fw->open && fw->close);
  468. /* supposed to run test in build directory */
  469. if (root_path == NULL)
  470. root_path = "..";
  471. test_model = g_build_filename (root_path, "tests", "test_models", "models",
  472. MODEL_BASE_NAME_MOBINET_V2, NULL);
  473. const gchar *model_files[] = {
  474. test_model, NULL,
  475. };
  476. /* prepare properties */
  477. prop = g_new0 (GstTensorFilterProperties, 1);
  478. ASSERT_TRUE (prop != NULL);
  479. prop->fwname = fw_name;
  480. prop->model_files = model_files;
  481. prop->num_models = 1;
  482. ret = fw->open (prop, &private_data);
  483. EXPECT_NE (ret, TensorFilterOpenvino::RetSuccess);
  484. EXPECT_EQ (ret, TensorFilterOpenvino::RetEInval);
  485. fw->close (prop, &private_data);
  486. #ifdef __OPENVINO_CPU_EXT__
  487. prop->accl_str = "true:npu.movidius";
  488. ret = fw->open (prop, &private_data);
  489. EXPECT_NE (ret, TensorFilterOpenvino::RetSuccess);
  490. EXPECT_EQ (ret, TensorFilterOpenvino::RetEInval);
  491. #else
  492. prop->accl_str = "true:cpu";
  493. ret = fw->open (prop, &private_data);
  494. EXPECT_NE (ret, TensorFilterOpenvino::RetSuccess);
  495. EXPECT_EQ (ret, TensorFilterOpenvino::RetEInval);
  496. #endif
  497. fw->close (prop, &private_data);
  498. g_free (prop);
  499. g_free (test_model);
  500. }
  501. /**
  502. * @brief Test cases for getInputTensorDim and getOutputTensorDim callbacks
  503. */
  504. TEST (tensorFilterOpenvino, getTensorDim0)
  505. {
  506. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  507. const gchar fw_name[] = "openvino";
  508. const GstTensorFilterFramework *fw = nnstreamer_filter_find (fw_name);
  509. GstTensorFilterProperties *prop = NULL;
  510. GstTensorsInfo nns_tensors_info;
  511. gpointer private_data = NULL;
  512. std::string str_test_model;
  513. gchar *test_model;
  514. gint ret;
  515. /* Check if mandatory methods are contained */
  516. ASSERT_TRUE (fw && fw->open && fw->close);
  517. /* supposed to run test in build directory */
  518. if (root_path == NULL)
  519. root_path = "..";
  520. test_model = g_build_filename (root_path, "tests", "test_models", "models",
  521. MODEL_BASE_NAME_MOBINET_V2, NULL);
  522. /* prepare properties */
  523. prop = g_new0 (GstTensorFilterProperties, 1);
  524. ASSERT_TRUE (prop != NULL);
  525. prop->fwname = fw_name;
  526. prop->num_models = 1;
  527. prop->accl_str = "true:cpu";
  528. {
  529. const gchar *model_files[] = {
  530. test_model, NULL,
  531. };
  532. prop->model_files = model_files;
  533. ret = fw->open (prop, &private_data);
  534. #ifdef __OPENVINO_CPU_EXT__
  535. EXPECT_EQ (ret, 0);
  536. #else
  537. EXPECT_NE (ret, 0);
  538. EXPECT_EQ (ret, TensorFilterOpenvino::RetENoDev);
  539. #endif
  540. }
  541. /* Test getInputDimension () */
  542. ASSERT_TRUE (fw->getInputDimension);
  543. ret = fw->getInputDimension (prop, &private_data, &nns_tensors_info);
  544. EXPECT_EQ (ret, 0);
  545. EXPECT_EQ (nns_tensors_info.num_tensors, MOBINET_V2_IN_NUM_TENSOR);
  546. for (uint32_t i = 0; i < MOBINET_V2_IN_NUM_TENSOR; ++i) {
  547. for (uint32_t j = 0; j < NNS_TENSOR_RANK_LIMIT; ++j) {
  548. EXPECT_EQ (nns_tensors_info.info[i].dimension[j], MOBINET_V2_IN_DIMS[j]);
  549. }
  550. }
  551. /* Test getOutputDimension () */
  552. ASSERT_TRUE (fw->getOutputDimension);
  553. ret = fw->getOutputDimension (prop, &private_data, &nns_tensors_info);
  554. EXPECT_EQ (ret, 0);
  555. EXPECT_EQ (nns_tensors_info.num_tensors, MOBINET_V2_OUT_NUM_TENSOR);
  556. for (uint32_t i = 0; i < MOBINET_V2_OUT_NUM_TENSOR; ++i) {
  557. for (uint32_t j = 0; j < NNS_TENSOR_RANK_LIMIT; ++j) {
  558. EXPECT_EQ (nns_tensors_info.info[i].dimension[j], MOBINET_V2_OUT_DIMS[j]);
  559. }
  560. }
  561. fw->close (prop, &private_data);
  562. g_free (test_model);
  563. g_free (prop);
  564. }
  565. /**
  566. * @brief A negative test case for getInputTensorDim callbacks (The number of tensors is exceeded NNS_TENSOR_SIZE_LIMIT)
  567. */
  568. TEST (tensorFilterOpenvino, getTensorDim0_n)
  569. {
  570. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  571. const gchar fw_name[] = "openvino";
  572. const GstTensorFilterFramework *fw = nnstreamer_filter_find (fw_name);
  573. std::string str_test_model;
  574. GstTensorFilterProperties *prop = NULL;
  575. gpointer private_data = NULL;
  576. GstTensorsInfo nns_tensors_info;
  577. gchar *test_model_xml;
  578. gchar *test_model_bin;
  579. gint ret;
  580. /* supposed to run test in build directory */
  581. if (root_path == NULL)
  582. root_path = "..";
  583. test_model_xml = g_build_filename (root_path, "tests", "test_models", "models",
  584. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  585. .append (TensorFilterOpenvino::extXml)
  586. .c_str (),
  587. NULL);
  588. test_model_bin = g_build_filename (root_path, "tests", "test_models", "models",
  589. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  590. .append (TensorFilterOpenvino::extBin)
  591. .c_str (),
  592. NULL);
  593. {
  594. TensorFilterOpenvinoTest tfOvTest (str_test_model.assign (test_model_xml),
  595. str_test_model.assign (test_model_bin));
  596. /** A test case when the number of tensors in input exceed is exceeded
  597. * NNS_TENSOR_SIZE_LIMIT */
  598. std::string name_input = std::string ("input");
  599. InferenceEngine::InputsDataMap inDataMap;
  600. InferenceEngine::SizeVector dims = InferenceEngine::SizeVector ();
  601. InferenceEngine::Data *data = new InferenceEngine::Data (
  602. name_input, dims, InferenceEngine::Precision::FP32);
  603. InferenceEngine::InputInfo *info = new InferenceEngine::InputInfo ();
  604. info->setInputData (InferenceEngine::DataPtr (data));
  605. inDataMap[name_input] = InferenceEngine::InputInfo::Ptr (info);
  606. for (int i = 1; i < NNS_TENSOR_SIZE_LIMIT + 1; ++i) {
  607. InferenceEngine::InputInfo *info = new InferenceEngine::InputInfo ();
  608. std::string name_input_n = std::string ((char *)&i);
  609. inDataMap[name_input_n] = InferenceEngine::InputInfo::Ptr (info);
  610. }
  611. tfOvTest.setInputsDataMap (inDataMap);
  612. ret = tfOvTest.loadModel (ACCL_CPU);
  613. private_data = (gpointer)&tfOvTest;
  614. #ifdef __OPENVINO_CPU_EXT__
  615. EXPECT_EQ (ret, 0);
  616. #else
  617. EXPECT_NE (ret, 0);
  618. EXPECT_EQ (ret, TensorFilterOpenvino::RetENoDev);
  619. #endif
  620. /* prepare properties */
  621. prop = g_new0 (GstTensorFilterProperties, 1);
  622. ASSERT_TRUE (prop != NULL);
  623. prop->fwname = fw_name;
  624. /* Test getInputDimension () */
  625. ASSERT_TRUE (fw->getInputDimension);
  626. ret = fw->getInputDimension (prop, &private_data, &nns_tensors_info);
  627. EXPECT_NE (ret, 0);
  628. g_free (prop);
  629. }
  630. g_free (test_model_xml);
  631. g_free (test_model_bin);
  632. }
  633. /**
  634. * @brief A negative test case for the getInputTensorDim callback (A wrong rank)
  635. */
  636. TEST (tensorFilterOpenvino, getTensorDim1_n)
  637. {
  638. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  639. const gchar fw_name[] = "openvino";
  640. const GstTensorFilterFramework *fw = nnstreamer_filter_find (fw_name);
  641. std::string str_test_model;
  642. GstTensorFilterProperties *prop = NULL;
  643. gpointer private_data = NULL;
  644. GstTensorsInfo nns_tensors_info;
  645. gchar *test_model_xml;
  646. gchar *test_model_bin;
  647. gint ret;
  648. /* supposed to run test in build directory */
  649. if (root_path == NULL)
  650. root_path = "..";
  651. test_model_xml = g_build_filename (root_path, "tests", "test_models", "models",
  652. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  653. .append (TensorFilterOpenvino::extXml)
  654. .c_str (),
  655. NULL);
  656. test_model_bin = g_build_filename (root_path, "tests", "test_models", "models",
  657. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  658. .append (TensorFilterOpenvino::extBin)
  659. .c_str (),
  660. NULL);
  661. {
  662. TensorFilterOpenvinoTest tfOvTest (str_test_model.assign (test_model_xml),
  663. str_test_model.assign (test_model_bin));
  664. /** A test case when the number of ranks of a tensor in the input exceed is
  665. * exceeded NNS_TENSOR_RANK_LIMIT */
  666. std::string name_input = std::string ("input");
  667. InferenceEngine::SizeVector dims;
  668. InferenceEngine::InputsDataMap inDataMap;
  669. InferenceEngine::Data *data;
  670. InferenceEngine::InputInfo *info;
  671. dims = InferenceEngine::SizeVector (NNS_TENSOR_RANK_LIMIT + 1, 1);
  672. data = new InferenceEngine::Data (name_input, dims,
  673. InferenceEngine::Precision::FP32, InferenceEngine::ANY);
  674. info = new InferenceEngine::InputInfo ();
  675. info->setInputData (InferenceEngine::DataPtr (data));
  676. inDataMap[name_input] = InferenceEngine::InputInfo::Ptr (info);
  677. tfOvTest.setInputsDataMap (inDataMap);
  678. ret = tfOvTest.loadModel (ACCL_CPU);
  679. private_data = (gpointer)&tfOvTest;
  680. #ifdef __OPENVINO_CPU_EXT__
  681. EXPECT_EQ (ret, 0);
  682. #else
  683. EXPECT_NE (ret, 0);
  684. EXPECT_EQ (ret, TensorFilterOpenvino::RetENoDev);
  685. #endif
  686. /* prepare properties */
  687. prop = g_new0 (GstTensorFilterProperties, 1);
  688. ASSERT_TRUE (prop != NULL);
  689. prop->fwname = fw_name;
  690. /* Test getInputDimension () */
  691. ASSERT_TRUE (fw->getInputDimension);
  692. ret = fw->getInputDimension (prop, &private_data, &nns_tensors_info);
  693. EXPECT_NE (ret, 0);
  694. g_free (prop);
  695. }
  696. g_free (test_model_xml);
  697. g_free (test_model_bin);
  698. }
  699. /**
  700. * @brief A negative test case for getOutputTensorDim callbacks (The number of tensors is exceeded NNS_TENSOR_SIZE_LIMIT)
  701. */
  702. TEST (tensorFilterOpenvino, getTensorDim2_n)
  703. {
  704. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  705. const gchar fw_name[] = "openvino";
  706. const GstTensorFilterFramework *fw = nnstreamer_filter_find (fw_name);
  707. std::string str_test_model;
  708. GstTensorFilterProperties *prop = NULL;
  709. gpointer private_data = NULL;
  710. GstTensorsInfo nns_tensors_info;
  711. gchar *test_model_xml;
  712. gchar *test_model_bin;
  713. gint ret;
  714. /* supposed to run test in build directory */
  715. if (root_path == NULL)
  716. root_path = "..";
  717. test_model_xml = g_build_filename (root_path, "tests", "test_models", "models",
  718. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  719. .append (TensorFilterOpenvino::extXml)
  720. .c_str (),
  721. NULL);
  722. test_model_bin = g_build_filename (root_path, "tests", "test_models", "models",
  723. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  724. .append (TensorFilterOpenvino::extBin)
  725. .c_str (),
  726. NULL);
  727. {
  728. TensorFilterOpenvinoTest tfOvTest (str_test_model.assign (test_model_xml),
  729. str_test_model.assign (test_model_bin));
  730. /** A test case when the number of tensors in input exceed is exceeded
  731. * NNS_TENSOR_SIZE_LIMIT */
  732. InferenceEngine::OutputsDataMap outDataMap;
  733. InferenceEngine::SizeVector dims = InferenceEngine::SizeVector ();
  734. for (int i = 0; i < NNS_TENSOR_SIZE_LIMIT + 1; ++i) {
  735. std::string name_output_n = std::string ((char *)&i);
  736. InferenceEngine::Data *data = new InferenceEngine::Data (
  737. name_output_n, dims, InferenceEngine::Precision::FP32);
  738. InferenceEngine::DataPtr outputDataPtr (data);
  739. outDataMap[name_output_n] = outputDataPtr;
  740. }
  741. tfOvTest.setOutputsDataMap (outDataMap);
  742. ret = tfOvTest.loadModel (ACCL_CPU);
  743. private_data = (gpointer)&tfOvTest;
  744. #ifdef __OPENVINO_CPU_EXT__
  745. EXPECT_EQ (ret, 0);
  746. #else
  747. EXPECT_NE (ret, 0);
  748. EXPECT_EQ (ret, TensorFilterOpenvino::RetENoDev);
  749. #endif
  750. /* prepare properties */
  751. prop = g_new0 (GstTensorFilterProperties, 1);
  752. ASSERT_TRUE (prop != NULL);
  753. prop->fwname = fw_name;
  754. /* Test getOutputDimension () */
  755. ASSERT_TRUE (fw->getOutputDimension);
  756. ret = fw->getOutputDimension (prop, &private_data, &nns_tensors_info);
  757. EXPECT_NE (ret, 0);
  758. g_free (prop);
  759. }
  760. g_free (test_model_xml);
  761. g_free (test_model_bin);
  762. }
  763. /**
  764. * @brief A negative test case for getOutputTensorDim callbacks (The number of tensors is exceeded NNS_TENSOR_SIZE_LIMIT)
  765. */
  766. TEST (tensorFilterOpenvino, getTensorDim3_n)
  767. {
  768. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  769. const gchar fw_name[] = "openvino";
  770. const GstTensorFilterFramework *fw = nnstreamer_filter_find (fw_name);
  771. std::string str_test_model;
  772. GstTensorFilterProperties *prop = NULL;
  773. gpointer private_data = NULL;
  774. GstTensorsInfo nns_tensors_info;
  775. gchar *test_model_xml;
  776. gchar *test_model_bin;
  777. gint ret;
  778. /* supposed to run test in build directory */
  779. if (root_path == NULL)
  780. root_path = "..";
  781. test_model_xml = g_build_filename (root_path, "tests", "test_models", "models",
  782. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  783. .append (TensorFilterOpenvino::extXml)
  784. .c_str (),
  785. NULL);
  786. test_model_bin = g_build_filename (root_path, "tests", "test_models", "models",
  787. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  788. .append (TensorFilterOpenvino::extBin)
  789. .c_str (),
  790. NULL);
  791. {
  792. TensorFilterOpenvinoTest tfOvTest (str_test_model.assign (test_model_xml),
  793. str_test_model.assign (test_model_bin));
  794. /** A test case when the number of ranks of a tensor in the input exceed is
  795. * exceeded NNS_TENSOR_RANK_LIMIT */
  796. std::string name_output = std::string ("output");
  797. InferenceEngine::SizeVector dims;
  798. InferenceEngine::OutputsDataMap outDataMap;
  799. InferenceEngine::Data *data;
  800. dims = InferenceEngine::SizeVector (NNS_TENSOR_RANK_LIMIT + 1, 1);
  801. data = new InferenceEngine::Data (name_output, dims,
  802. InferenceEngine::Precision::FP32, InferenceEngine::ANY);
  803. outDataMap[name_output] = InferenceEngine::DataPtr (data);
  804. tfOvTest.setOutputsDataMap (outDataMap);
  805. ret = tfOvTest.loadModel (ACCL_CPU);
  806. private_data = (gpointer)&tfOvTest;
  807. #ifdef __OPENVINO_CPU_EXT__
  808. EXPECT_EQ (ret, 0);
  809. #else
  810. EXPECT_NE (ret, 0);
  811. EXPECT_EQ (ret, TensorFilterOpenvino::RetENoDev);
  812. #endif
  813. /* prepare properties */
  814. prop = g_new0 (GstTensorFilterProperties, 1);
  815. ASSERT_TRUE (prop != NULL);
  816. prop->fwname = fw_name;
  817. /* Test getOutputDimension () */
  818. ASSERT_TRUE (fw->getOutputDimension);
  819. ret = fw->getOutputDimension (prop, &private_data, &nns_tensors_info);
  820. EXPECT_NE (ret, 0);
  821. g_free (prop);
  822. }
  823. g_free (test_model_xml);
  824. g_free (test_model_bin);
  825. }
  826. /**
  827. * @brief A test case for the helper function, convertFromIETypeStr ()
  828. */
  829. TEST (tensorFilterOpenvino, convertFromIETypeStr0)
  830. {
  831. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  832. const std::vector<std::string> ie_suport_type_strs = {
  833. "I8", "I16", "I32", "U8", "U16", "FP32",
  834. };
  835. const std::vector<tensor_type> nns_support_types = {
  836. _NNS_INT8, _NNS_INT16, _NNS_INT32, _NNS_UINT8, _NNS_UINT16, _NNS_FLOAT32,
  837. };
  838. std::string str_test_model;
  839. gchar *test_model_xml;
  840. gchar *test_model_bin;
  841. /* supposed to run test in build directory */
  842. if (root_path == NULL)
  843. root_path = "..";
  844. test_model_xml = g_build_filename (root_path, "tests", "test_models", "models",
  845. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  846. .append (TensorFilterOpenvino::extXml)
  847. .c_str (),
  848. NULL);
  849. test_model_bin = g_build_filename (root_path, "tests", "test_models", "models",
  850. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  851. .append (TensorFilterOpenvino::extBin)
  852. .c_str (),
  853. NULL);
  854. {
  855. TensorFilterOpenvinoTest tfOvTest (str_test_model.assign (test_model_xml),
  856. str_test_model.assign (test_model_bin));
  857. for (size_t i = 0; i < ie_suport_type_strs.size (); ++i) {
  858. tensor_type ret_type;
  859. ret_type = tfOvTest.convertFromIETypeStr (ie_suport_type_strs[i]);
  860. EXPECT_EQ (ret_type, nns_support_types[i]);
  861. }
  862. }
  863. g_free (test_model_xml);
  864. g_free (test_model_bin);
  865. }
  866. /**
  867. * @brief A negative test case for the helper function, convertFromIETypeStr ()
  868. */
  869. TEST (tensorFilterOpenvino, convertFromIETypeStr0_n)
  870. {
  871. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  872. const std::vector<std::string> ie_not_suport_type_strs = {
  873. "F64",
  874. };
  875. const std::vector<tensor_type> nns_support_types = {
  876. _NNS_FLOAT64,
  877. };
  878. std::string str_test_model;
  879. gchar *test_model_xml;
  880. gchar *test_model_bin;
  881. /* supposed to run test in build directory */
  882. if (root_path == NULL)
  883. root_path = "..";
  884. test_model_xml = g_build_filename (root_path, "tests", "test_models", "models",
  885. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  886. .append (TensorFilterOpenvino::extXml)
  887. .c_str (),
  888. NULL);
  889. test_model_bin = g_build_filename (root_path, "tests", "test_models", "models",
  890. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  891. .append (TensorFilterOpenvino::extBin)
  892. .c_str (),
  893. NULL);
  894. {
  895. TensorFilterOpenvinoTest tfOvTest (str_test_model.assign (test_model_xml),
  896. str_test_model.assign (test_model_bin));
  897. for (size_t i = 0; i < ie_not_suport_type_strs.size (); ++i) {
  898. tensor_type ret_type;
  899. ret_type = tfOvTest.convertFromIETypeStr (ie_not_suport_type_strs[i]);
  900. EXPECT_NE (ret_type, nns_support_types[i]);
  901. }
  902. }
  903. g_free (test_model_xml);
  904. g_free (test_model_bin);
  905. }
  906. /**
  907. * @brief A negative test case for the helper function, convertFromIETypeStr ()
  908. */
  909. TEST (tensorFilterOpenvino, convertFromIETypeStr1_n)
  910. {
  911. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  912. const std::string ie_suport_type_str ("Q78");
  913. std::string str_test_model;
  914. gchar *test_model_xml;
  915. gchar *test_model_bin;
  916. /* supposed to run test in build directory */
  917. if (root_path == NULL)
  918. root_path = "..";
  919. test_model_xml = g_build_filename (root_path, "tests", "test_models", "models",
  920. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  921. .append (TensorFilterOpenvino::extXml)
  922. .c_str (),
  923. NULL);
  924. test_model_bin = g_build_filename (root_path, "tests", "test_models", "models",
  925. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  926. .append (TensorFilterOpenvino::extBin)
  927. .c_str (),
  928. NULL);
  929. {
  930. TensorFilterOpenvinoTest tfOvTest (str_test_model.assign (test_model_xml),
  931. str_test_model.assign (test_model_bin));
  932. tensor_type ret_type;
  933. ret_type = tfOvTest.convertFromIETypeStr (ie_suport_type_str);
  934. EXPECT_EQ (_NNS_END, ret_type);
  935. }
  936. g_free (test_model_xml);
  937. g_free (test_model_bin);
  938. }
  939. #define TEST_BLOB(prec, nns_type) \
  940. do { \
  941. const InferenceEngine::Precision _prc (prec); \
  942. InferenceEngine::TensorDesc tensorTestDesc (_prc, InferenceEngine::ANY); \
  943. InferenceEngine::SizeVector dims (NNS_TENSOR_RANK_LIMIT); \
  944. TensorFilterOpenvinoTest tfOvTest (str_test_model.assign (test_model_xml), \
  945. str_test_model.assign (test_model_bin)); \
  946. InferenceEngine::Blob::Ptr ret; \
  947. GstTensorMemory mem; \
  948. \
  949. mem.size = gst_tensor_get_element_size (nns_type); \
  950. for (int i = 0; i < NNS_TENSOR_RANK_LIMIT; ++i) { \
  951. dims[i] = MOBINET_V2_IN_DIMS[i]; \
  952. mem.size *= MOBINET_V2_IN_DIMS[i]; \
  953. } \
  954. tensorTestDesc.setDims (dims); \
  955. mem.data = (void *)g_malloc0 (mem.size); \
  956. \
  957. ret = tfOvTest.convertGstTensorMemoryToBlobPtr (tensorTestDesc, &mem, nns_type); \
  958. EXPECT_EQ (mem.size, ret->byteSize ()); \
  959. EXPECT_EQ (gst_tensor_get_element_size (nns_type), ret->element_size ()); \
  960. g_free (mem.data); \
  961. } while (0);
  962. /**
  963. * @brief A test case for the helper function, convertFromIETypeStr ()
  964. */
  965. TEST (tensorFilterOpenvino, convertGstTensorMemoryToBlobPtr0)
  966. {
  967. const gchar *root_path = g_getenv ("NNSTREAMER_SOURCE_ROOT_PATH");
  968. std::string str_test_model;
  969. gchar *test_model_xml = NULL;
  970. gchar *test_model_bin = NULL;
  971. /* supposed to run test in build directory */
  972. if (root_path == NULL)
  973. root_path = "..";
  974. test_model_xml = g_build_filename (root_path, "tests", "test_models", "models",
  975. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  976. .append (TensorFilterOpenvino::extXml)
  977. .c_str (),
  978. NULL);
  979. EXPECT_EQ (g_file_test (test_model_xml, G_FILE_TEST_IS_REGULAR), TRUE);
  980. test_model_bin = g_build_filename (root_path, "tests", "test_models", "models",
  981. str_test_model.assign (MODEL_BASE_NAME_MOBINET_V2)
  982. .append (TensorFilterOpenvino::extBin)
  983. .c_str (),
  984. NULL);
  985. EXPECT_EQ (g_file_test (test_model_bin, G_FILE_TEST_IS_REGULAR), TRUE);
  986. TEST_BLOB (InferenceEngine::Precision::FP32, _NNS_FLOAT32);
  987. TEST_BLOB (InferenceEngine::Precision::U8, _NNS_UINT8);
  988. TEST_BLOB (InferenceEngine::Precision::U16, _NNS_UINT16);
  989. TEST_BLOB (InferenceEngine::Precision::I8, _NNS_INT8);
  990. TEST_BLOB (InferenceEngine::Precision::I16, _NNS_INT16);
  991. TEST_BLOB (InferenceEngine::Precision::I32, _NNS_INT32);
  992. g_free (test_model_xml);
  993. g_free (test_model_bin);
  994. }
  995. /**
  996. * @brief Main function for unit test.
  997. */
  998. int
  999. main (int argc, char **argv)
  1000. {
  1001. int ret = -1;
  1002. try {
  1003. testing::InitGoogleTest (&argc, argv);
  1004. } catch (...) {
  1005. g_warning ("catch 'testing::internal::<unnamed>::ClassUniqueToAlwaysTrue'");
  1006. }
  1007. gst_init (&argc, &argv);
  1008. try {
  1009. ret = RUN_ALL_TESTS ();
  1010. } catch (...) {
  1011. g_warning ("catch `testing::internal::GoogleTestFailureException`");
  1012. }
  1013. return ret;
  1014. }