test_misc.cpp 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902
  1. // This file is part of OpenCV project.
  2. // It is subject to the license terms in the LICENSE file found in the top-level directory
  3. // of this distribution and at http://opencv.org/license.html.
  4. //
  5. // Copyright (C) 2017, Intel Corporation, all rights reserved.
  6. // Third party copyrights are property of their respective owners.
  7. #include "test_precomp.hpp"
  8. #include <opencv2/core/ocl.hpp>
  9. #include <opencv2/core/opencl/ocl_defs.hpp>
  10. #include <opencv2/dnn/layer.details.hpp> // CV_DNN_REGISTER_LAYER_CLASS
  11. namespace opencv_test { namespace {
  12. TEST(blobFromImage_4ch, Regression)
  13. {
  14. Mat ch[4];
  15. for(int i = 0; i < 4; i++)
  16. ch[i] = Mat::ones(10, 10, CV_8U)*i;
  17. Mat img;
  18. merge(ch, 4, img);
  19. Mat blob = dnn::blobFromImage(img, 1., Size(), Scalar(), false, false);
  20. for(int i = 0; i < 4; i++)
  21. {
  22. ch[i] = Mat(img.rows, img.cols, CV_32F, blob.ptr(0, i));
  23. ASSERT_DOUBLE_EQ(cvtest::norm(ch[i], cv::NORM_INF), i);
  24. }
  25. }
  26. TEST(blobFromImage, allocated)
  27. {
  28. int size[] = {1, 3, 4, 5};
  29. Mat img(size[2], size[3], CV_32FC(size[1]));
  30. Mat blob(4, size, CV_32F);
  31. void* blobData = blob.data;
  32. dnn::blobFromImage(img, blob, 1.0 / 255, Size(), Scalar(), false, false);
  33. ASSERT_EQ(blobData, blob.data);
  34. }
  35. TEST(imagesFromBlob, Regression)
  36. {
  37. int nbOfImages = 8;
  38. std::vector<cv::Mat> inputImgs(nbOfImages);
  39. for (int i = 0; i < nbOfImages; i++)
  40. {
  41. inputImgs[i] = cv::Mat::ones(100, 100, CV_32FC3);
  42. cv::randu(inputImgs[i], cv::Scalar::all(0), cv::Scalar::all(1));
  43. }
  44. cv::Mat blob = cv::dnn::blobFromImages(inputImgs, 1., cv::Size(), cv::Scalar(), false, false);
  45. std::vector<cv::Mat> outputImgs;
  46. cv::dnn::imagesFromBlob(blob, outputImgs);
  47. for (int i = 0; i < nbOfImages; i++)
  48. {
  49. EXPECT_EQ(0, cvtest::norm(inputImgs[i], outputImgs[i], NORM_INF))
  50. << "i=" << i
  51. << " inputImgs[i]=" << inputImgs[i].size
  52. << " outputImgs[i]=" << outputImgs[i].size;
  53. }
  54. }
  55. TEST(readNet, Regression)
  56. {
  57. Net net = readNet(findDataFile("dnn/squeezenet_v1.1.prototxt"),
  58. findDataFile("dnn/squeezenet_v1.1.caffemodel", false));
  59. EXPECT_FALSE(net.empty());
  60. net = readNet(findDataFile("dnn/opencv_face_detector.caffemodel", false),
  61. findDataFile("dnn/opencv_face_detector.prototxt"));
  62. EXPECT_FALSE(net.empty());
  63. net = readNet(findDataFile("dnn/openface_nn4.small2.v1.t7", false));
  64. EXPECT_FALSE(net.empty());
  65. net = readNet(findDataFile("dnn/tiny-yolo-voc.cfg"),
  66. findDataFile("dnn/tiny-yolo-voc.weights", false));
  67. EXPECT_FALSE(net.empty());
  68. net = readNet(findDataFile("dnn/ssd_mobilenet_v1_coco.pbtxt"),
  69. findDataFile("dnn/ssd_mobilenet_v1_coco.pb", false));
  70. EXPECT_FALSE(net.empty());
  71. }
  72. TEST(readNet, do_not_call_setInput) // https://github.com/opencv/opencv/issues/16618
  73. {
  74. // 1. load network
  75. const string proto = findDataFile("dnn/squeezenet_v1.1.prototxt");
  76. const string model = findDataFile("dnn/squeezenet_v1.1.caffemodel", false);
  77. Net net = readNetFromCaffe(proto, model);
  78. // 2. mistake: no inputs are specified through .setInput()
  79. // 3. try inference
  80. Mat res;
  81. EXPECT_THROW(
  82. {
  83. res = net.forward(); // no inputs after loading => should fail
  84. }, cv::Exception);
  85. EXPECT_TRUE(res.empty()) << res.size;
  86. }
  87. TEST(Net, empty_forward_18392)
  88. {
  89. cv::dnn::Net net;
  90. Mat image(Size(512, 512), CV_8UC3, Scalar::all(0));
  91. Mat inputBlob = cv::dnn::blobFromImage(image, 1.0, Size(512, 512), Scalar(0,0,0), true, false);
  92. net.setInput(inputBlob);
  93. EXPECT_ANY_THROW(Mat output = net.forward());
  94. }
  95. #ifdef HAVE_INF_ENGINE
  96. static
  97. void test_readNet_IE_do_not_call_setInput(Backend backendId)
  98. {
  99. const Target targetId = DNN_TARGET_CPU;
  100. const std::string& model = findDataFile("dnn/layers/layer_convolution.bin");
  101. const std::string& proto = findDataFile("dnn/layers/layer_convolution.xml");
  102. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
  103. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_API);
  104. else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  105. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
  106. else
  107. FAIL() << "Unknown backendId";
  108. Net net = readNet(model, proto);
  109. net.setPreferableBackend(backendId);
  110. net.setPreferableTarget(targetId);
  111. // 2. mistake: no inputs are specified through .setInput()
  112. // 3. try inference
  113. Mat res;
  114. EXPECT_THROW(
  115. {
  116. res = net.forward(); // no inputs after loading => should fail
  117. }, cv::Exception);
  118. EXPECT_TRUE(res.empty()) << res.size;
  119. }
  120. #ifdef HAVE_DNN_IE_NN_BUILDER_2019
  121. TEST(readNet, do_not_call_setInput_IE_NN_BUILDER_2019)
  122. {
  123. test_readNet_IE_do_not_call_setInput(DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019);
  124. }
  125. #endif
  126. #ifdef HAVE_DNN_NGRAPH
  127. TEST(readNet, do_not_call_setInput_IE_NGRAPH)
  128. {
  129. test_readNet_IE_do_not_call_setInput(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
  130. }
  131. #endif
  132. #endif // HAVE_INF_ENGINE
  133. typedef testing::TestWithParam<tuple<Backend, Target> > dump;
  134. TEST_P(dump, Regression)
  135. {
  136. const int backend = get<0>(GetParam());
  137. const int target = get<1>(GetParam());
  138. Net net = readNet(findDataFile("dnn/squeezenet_v1.1.prototxt"),
  139. findDataFile("dnn/squeezenet_v1.1.caffemodel", false));
  140. ASSERT_EQ(net.getLayerInputs(net.getLayerId("fire2/concat")).size(), 2);
  141. int size[] = {1, 3, 227, 227};
  142. Mat input = cv::Mat::ones(4, size, CV_32F);
  143. net.setInput(input);
  144. net.setPreferableBackend(backend);
  145. net.setPreferableTarget(target);
  146. EXPECT_FALSE(net.dump().empty());
  147. net.forward();
  148. EXPECT_FALSE(net.dump().empty());
  149. }
  150. INSTANTIATE_TEST_CASE_P(/**/, dump, dnnBackendsAndTargets());
  151. class FirstCustomLayer CV_FINAL : public Layer
  152. {
  153. public:
  154. FirstCustomLayer(const LayerParams &params) : Layer(params) {}
  155. static Ptr<Layer> create(LayerParams& params)
  156. {
  157. return Ptr<Layer>(new FirstCustomLayer(params));
  158. }
  159. void forward(InputArrayOfArrays, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays) CV_OVERRIDE
  160. {
  161. CV_TRACE_FUNCTION();
  162. CV_TRACE_ARG_VALUE(name, "name", name.c_str());
  163. std::vector<Mat> outputs;
  164. outputs_arr.getMatVector(outputs);
  165. outputs[0].setTo(1);
  166. }
  167. };
  168. class SecondCustomLayer CV_FINAL : public Layer
  169. {
  170. public:
  171. SecondCustomLayer(const LayerParams &params) : Layer(params) {}
  172. static Ptr<Layer> create(LayerParams& params)
  173. {
  174. return Ptr<Layer>(new SecondCustomLayer(params));
  175. }
  176. void forward(InputArrayOfArrays, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays) CV_OVERRIDE
  177. {
  178. CV_TRACE_FUNCTION();
  179. CV_TRACE_ARG_VALUE(name, "name", name.c_str());
  180. std::vector<Mat> outputs;
  181. outputs_arr.getMatVector(outputs);
  182. outputs[0].setTo(2);
  183. }
  184. };
  185. TEST(LayerFactory, custom_layers)
  186. {
  187. LayerParams lp;
  188. lp.name = "name";
  189. lp.type = "CustomType";
  190. Mat inp(1, 1, CV_32FC1);
  191. for (int i = 0; i < 3; ++i)
  192. {
  193. if (i == 0) { CV_DNN_REGISTER_LAYER_CLASS(CustomType, FirstCustomLayer); }
  194. else if (i == 1) { CV_DNN_REGISTER_LAYER_CLASS(CustomType, SecondCustomLayer); }
  195. else if (i == 2) { LayerFactory::unregisterLayer("CustomType"); }
  196. Net net;
  197. net.addLayerToPrev(lp.name, lp.type, lp);
  198. net.setInput(inp);
  199. net.setPreferableBackend(DNN_BACKEND_OPENCV);
  200. Mat output = net.forward();
  201. if (i == 0) { EXPECT_EQ(output.at<float>(0), 1); }
  202. else if (i == 1) { EXPECT_EQ(output.at<float>(0), 2); }
  203. else if (i == 2) { EXPECT_EQ(output.at<float>(0), 1); }
  204. }
  205. LayerFactory::unregisterLayer("CustomType");
  206. }
  207. typedef testing::TestWithParam<tuple<float, Vec3f, int, tuple<Backend, Target> > > setInput;
  208. TEST_P(setInput, normalization)
  209. {
  210. const float kScale = get<0>(GetParam());
  211. const Scalar kMean = get<1>(GetParam());
  212. const int dtype = get<2>(GetParam());
  213. const int backend = get<0>(get<3>(GetParam()));
  214. const int target = get<1>(get<3>(GetParam()));
  215. const bool kSwapRB = true;
  216. if(backend == DNN_BACKEND_CUDA)
  217. applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA);
  218. if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16 && dtype != CV_32F)
  219. applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
  220. if (backend == DNN_BACKEND_VKCOM && dtype != CV_32F)
  221. applyTestTag(CV_TEST_TAG_DNN_SKIP_VULKAN);
  222. Mat inp(5, 5, CV_8UC3);
  223. randu(inp, 0, 255);
  224. Mat ref = blobFromImage(inp, kScale, Size(), kMean, kSwapRB, /*crop*/false);
  225. LayerParams lp;
  226. Net net;
  227. net.addLayerToPrev("testLayer", "Identity", lp);
  228. net.setPreferableBackend(backend);
  229. net.setPreferableTarget(target);
  230. Mat blob = blobFromImage(inp, 1.0, Size(), Scalar(), kSwapRB, /*crop*/false, dtype);
  231. ASSERT_EQ(blob.type(), dtype);
  232. net.setInput(blob, "", kScale, kMean);
  233. Mat out = net.forward();
  234. ASSERT_EQ(out.type(), CV_32F);
  235. normAssert(ref, out, "", 4e-4, 1e-3);
  236. }
  237. INSTANTIATE_TEST_CASE_P(/**/, setInput, Combine(
  238. Values(1.0f, 1.0 / 127.5),
  239. Values(Vec3f(), Vec3f(50, 50, 50), Vec3f(10, 50, 140)),
  240. Values(CV_32F, CV_8U),
  241. dnnBackendsAndTargets()
  242. ));
  243. class CustomLayerWithDeprecatedForward CV_FINAL : public Layer
  244. {
  245. public:
  246. CustomLayerWithDeprecatedForward(const LayerParams &params) : Layer(params) {}
  247. static Ptr<Layer> create(LayerParams& params)
  248. {
  249. return Ptr<Layer>(new CustomLayerWithDeprecatedForward(params));
  250. }
  251. virtual void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals) CV_OVERRIDE
  252. {
  253. CV_Assert_N(inputs[0]->depth() == CV_32F, outputs[0].depth() == CV_32F);
  254. cv::add(*inputs[0], 0.5f, outputs[0]);
  255. }
  256. };
  257. class CustomLayerWithDeprecatedForwardAndFallback CV_FINAL : public Layer
  258. {
  259. public:
  260. CustomLayerWithDeprecatedForwardAndFallback(const LayerParams &params) : Layer(params) {}
  261. static Ptr<Layer> create(LayerParams& params)
  262. {
  263. return Ptr<Layer>(new CustomLayerWithDeprecatedForwardAndFallback(params));
  264. }
  265. void forward(InputArrayOfArrays inputs, OutputArrayOfArrays outputs, OutputArrayOfArrays internals) CV_OVERRIDE
  266. {
  267. CV_TRACE_FUNCTION();
  268. CV_TRACE_ARG_VALUE(name, "name", name.c_str());
  269. CV_OCL_RUN(preferableTarget == DNN_TARGET_OPENCL || preferableTarget == DNN_TARGET_OPENCL_FP16,
  270. forward_ocl(inputs, outputs, internals));
  271. Layer::forward_fallback(inputs, outputs, internals);
  272. }
  273. virtual void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals) CV_OVERRIDE
  274. {
  275. CV_Assert_N(inputs[0]->depth() == CV_32F, outputs[0].depth() == CV_32F);
  276. cv::add(*inputs[0], 0.5f, outputs[0]);
  277. }
  278. #ifdef HAVE_OPENCL
  279. bool forward_ocl(InputArrayOfArrays inputs_arr, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays internals_arr)
  280. {
  281. if (inputs_arr.depth() != CV_32F)
  282. return false;
  283. std::vector<UMat> inputs;
  284. std::vector<UMat> outputs;
  285. inputs_arr.getUMatVector(inputs);
  286. outputs_arr.getUMatVector(outputs);
  287. cv::add(inputs[0], 0.5f, outputs[0]);
  288. return true;
  289. }
  290. #endif
  291. };
  292. typedef testing::TestWithParam<tuple<Backend, Target> > DeprecatedForward;
  293. TEST_P(DeprecatedForward, CustomLayer)
  294. {
  295. const int backend = get<0>(GetParam());
  296. const int target = get<1>(GetParam());
  297. Mat inp(5, 5, CV_32FC1);
  298. randu(inp, -1.0f, 1.0f);
  299. inp = blobFromImage(inp);
  300. CV_DNN_REGISTER_LAYER_CLASS(CustomType, CustomLayerWithDeprecatedForward);
  301. try
  302. {
  303. LayerParams lp;
  304. Net net;
  305. net.addLayerToPrev("testLayer", "CustomType", lp);
  306. net.setPreferableBackend(backend);
  307. net.setPreferableTarget(target);
  308. net.setInput(inp);
  309. Mat out = net.forward();
  310. normAssert(out, inp + 0.5f, "", 2e-4, 7e-4);
  311. }
  312. catch (...)
  313. {
  314. LayerFactory::unregisterLayer("CustomType");
  315. throw;
  316. }
  317. LayerFactory::unregisterLayer("CustomType");
  318. }
  319. TEST_P(DeprecatedForward, CustomLayerWithFallback)
  320. {
  321. const int backend = get<0>(GetParam());
  322. const int target = get<1>(GetParam());
  323. Mat inp(5, 5, CV_32FC1);
  324. randu(inp, -1.0f, 1.0f);
  325. inp = blobFromImage(inp);
  326. CV_DNN_REGISTER_LAYER_CLASS(CustomType, CustomLayerWithDeprecatedForwardAndFallback);
  327. try
  328. {
  329. LayerParams lp;
  330. Net net;
  331. net.addLayerToPrev("testLayer", "CustomType", lp);
  332. net.setPreferableBackend(backend);
  333. net.setPreferableTarget(target);
  334. net.setInput(inp);
  335. Mat out = net.forward();
  336. normAssert(out, inp + 0.5f, "", 2e-4, 7e-4);
  337. }
  338. catch (...)
  339. {
  340. LayerFactory::unregisterLayer("CustomType");
  341. throw;
  342. }
  343. LayerFactory::unregisterLayer("CustomType");
  344. }
  345. INSTANTIATE_TEST_CASE_P(/**/, DeprecatedForward, dnnBackendsAndTargets());
  346. TEST(Net, forwardAndRetrieve)
  347. {
  348. std::string prototxt =
  349. "input: \"data\"\n"
  350. "layer {\n"
  351. " name: \"testLayer\"\n"
  352. " type: \"Slice\"\n"
  353. " bottom: \"data\"\n"
  354. " top: \"firstCopy\"\n"
  355. " top: \"secondCopy\"\n"
  356. " slice_param {\n"
  357. " axis: 0\n"
  358. " slice_point: 2\n"
  359. " }\n"
  360. "}";
  361. Net net = readNetFromCaffe(&prototxt[0], prototxt.size());
  362. net.setPreferableBackend(DNN_BACKEND_OPENCV);
  363. Mat inp(4, 5, CV_32F);
  364. randu(inp, -1, 1);
  365. net.setInput(inp);
  366. std::vector<String> outNames;
  367. outNames.push_back("testLayer");
  368. std::vector<std::vector<Mat> > outBlobs;
  369. net.forward(outBlobs, outNames);
  370. EXPECT_EQ(outBlobs.size(), 1);
  371. EXPECT_EQ(outBlobs[0].size(), 2);
  372. normAssert(outBlobs[0][0], inp.rowRange(0, 2), "first part");
  373. normAssert(outBlobs[0][1], inp.rowRange(2, 4), "second part");
  374. }
  375. #ifdef HAVE_INF_ENGINE
  376. static const std::chrono::milliseconds async_timeout(10000);
  377. // This test runs network in synchronous mode for different inputs and then
  378. // runs the same model asynchronously for the same inputs.
  379. typedef testing::TestWithParam<tuple<int, tuple<Backend, Target> > > Async;
  380. TEST_P(Async, model_optimizer_pipeline_set_and_forward_single)
  381. {
  382. const int dtype = get<0>(GetParam());
  383. const Backend backendId = get<0>(get<1>(GetParam()));
  384. const Target targetId = get<1>(get<1>(GetParam()));
  385. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && targetId == DNN_TARGET_MYRIAD)
  386. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
  387. if (backendId != DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && backendId != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  388. throw SkipTestException("No support for async forward");
  389. const std::string& model = findDataFile("dnn/layers/layer_convolution.bin");
  390. const std::string& proto = findDataFile("dnn/layers/layer_convolution.xml");
  391. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
  392. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_API);
  393. else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  394. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
  395. else
  396. FAIL() << "Unknown backendId";
  397. Net netSync = readNet(model, proto);
  398. netSync.setPreferableBackend(backendId);
  399. netSync.setPreferableTarget(targetId);
  400. Net netAsync = readNet(model, proto);
  401. netAsync.setPreferableBackend(backendId);
  402. netAsync.setPreferableTarget(targetId);
  403. // Generate inputs.
  404. const int numInputs = 10;
  405. std::vector<Mat> inputs(numInputs);
  406. int blobSize[] = {2, 6, 75, 113};
  407. for (int i = 0; i < numInputs; ++i)
  408. {
  409. inputs[i].create(4, &blobSize[0], dtype);
  410. randu(inputs[i], 0, 255);
  411. }
  412. // Run synchronously.
  413. std::vector<Mat> refs(numInputs);
  414. for (int i = 0; i < numInputs; ++i)
  415. {
  416. netSync.setInput(inputs[i]);
  417. refs[i] = netSync.forward().clone();
  418. }
  419. // Run asynchronously. To make test more robust, process inputs in the reversed order.
  420. for (int i = numInputs - 1; i >= 0; --i)
  421. {
  422. netAsync.setInput(inputs[i]);
  423. AsyncArray out = netAsync.forwardAsync();
  424. ASSERT_TRUE(out.valid());
  425. Mat result;
  426. EXPECT_TRUE(out.get(result, async_timeout));
  427. normAssert(refs[i], result, format("Index: %d", i).c_str(), 0, 0);
  428. }
  429. }
  430. TEST_P(Async, model_optimizer_pipeline_set_and_forward_all)
  431. {
  432. const int dtype = get<0>(GetParam());
  433. const Backend backendId = get<0>(get<1>(GetParam()));
  434. const Target targetId = get<1>(get<1>(GetParam()));
  435. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && targetId == DNN_TARGET_MYRIAD)
  436. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
  437. if (backendId != DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && backendId != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  438. throw SkipTestException("No support for async forward");
  439. const std::string& model = findDataFile("dnn/layers/layer_convolution.bin");
  440. const std::string& proto = findDataFile("dnn/layers/layer_convolution.xml");
  441. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
  442. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_API);
  443. else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  444. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
  445. else
  446. FAIL() << "Unknown backendId";
  447. Net netSync = readNet(model, proto);
  448. netSync.setPreferableBackend(backendId);
  449. netSync.setPreferableTarget(targetId);
  450. Net netAsync = readNet(model, proto);
  451. netAsync.setPreferableBackend(backendId);
  452. netAsync.setPreferableTarget(targetId);
  453. // Generate inputs.
  454. const int numInputs = 10;
  455. std::vector<Mat> inputs(numInputs);
  456. int blobSize[] = {2, 6, 75, 113};
  457. for (int i = 0; i < numInputs; ++i)
  458. {
  459. inputs[i].create(4, &blobSize[0], dtype);
  460. randu(inputs[i], 0, 255);
  461. }
  462. // Run synchronously.
  463. std::vector<Mat> refs(numInputs);
  464. for (int i = 0; i < numInputs; ++i)
  465. {
  466. netSync.setInput(inputs[i]);
  467. refs[i] = netSync.forward().clone();
  468. }
  469. // Run asynchronously. To make test more robust, process inputs in the reversed order.
  470. std::vector<AsyncArray> outs(numInputs);
  471. for (int i = numInputs - 1; i >= 0; --i)
  472. {
  473. netAsync.setInput(inputs[i]);
  474. outs[i] = netAsync.forwardAsync();
  475. }
  476. for (int i = numInputs - 1; i >= 0; --i)
  477. {
  478. ASSERT_TRUE(outs[i].valid());
  479. Mat result;
  480. EXPECT_TRUE(outs[i].get(result, async_timeout));
  481. normAssert(refs[i], result, format("Index: %d", i).c_str(), 0, 0);
  482. }
  483. }
  484. TEST_P(Async, create_layer_pipeline_set_and_forward_all)
  485. {
  486. const int dtype = get<0>(GetParam());
  487. const Backend backendId = get<0>(get<1>(GetParam()));
  488. const Target targetId = get<1>(get<1>(GetParam()));
  489. if (backendId != DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && backendId != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  490. throw SkipTestException("No support for async forward");
  491. // Exception: Default implementation fallbacks in asynchronous mode
  492. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && dtype == CV_8U)
  493. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
  494. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
  495. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_API);
  496. else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  497. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
  498. else
  499. FAIL() << "Unknown backendId";
  500. Net netSync;
  501. Net netAsync;
  502. {
  503. int inChannels = 4;
  504. int outChannels = 12;
  505. int group = 3;
  506. Size inSize(113, 75);
  507. Size kernel(4, 5);
  508. Size stride(2, 3);
  509. Size pad(0, 1);
  510. Size dilation(1, 1);
  511. bool hasBias = true;
  512. int sz[] = {outChannels, inChannels / group, kernel.height, kernel.width};
  513. Mat weights(4, &sz[0], CV_32F);
  514. randu(weights, -1.0f, 1.0f);
  515. LayerParams lp;
  516. lp.set("kernel_w", kernel.width);
  517. lp.set("kernel_h", kernel.height);
  518. lp.set("pad_w", pad.width);
  519. lp.set("pad_h", pad.height);
  520. lp.set("stride_w", stride.width);
  521. lp.set("stride_h", stride.height);
  522. lp.set("dilation_w", dilation.width);
  523. lp.set("dilation_h", dilation.height);
  524. lp.set("num_output", outChannels);
  525. lp.set("group", group);
  526. lp.set("bias_term", hasBias);
  527. lp.type = "Convolution";
  528. lp.name = "testLayer";
  529. lp.blobs.push_back(weights);
  530. if (hasBias)
  531. {
  532. Mat bias(1, outChannels, CV_32F);
  533. randu(bias, -1.0f, 1.0f);
  534. lp.blobs.push_back(bias);
  535. }
  536. int inpSz[] = {1, inChannels, inSize.height, inSize.width};
  537. Mat input(4, &inpSz[0], CV_32F);
  538. netSync.addLayerToPrev(lp.name, lp.type, lp);
  539. netAsync.addLayerToPrev(lp.name, lp.type, lp);
  540. }
  541. netSync.setPreferableBackend(backendId);
  542. netSync.setPreferableTarget(targetId);
  543. netAsync.setPreferableBackend(backendId);
  544. netAsync.setPreferableTarget(targetId);
  545. // Generate inputs.
  546. const int numInputs = 10;
  547. std::vector<Mat> inputs(numInputs);
  548. int blobSize[] = {1, 4, 75, 113};
  549. for (int i = 0; i < numInputs; ++i)
  550. {
  551. inputs[i].create(4, &blobSize[0], dtype);
  552. randu(inputs[i], 0, 255);
  553. }
  554. // Run synchronously.
  555. std::vector<Mat> refs(numInputs);
  556. for (int i = 0; i < numInputs; ++i)
  557. {
  558. netSync.setInput(inputs[i]);
  559. refs[i] = netSync.forward().clone();
  560. }
  561. // Run asynchronously. To make test more robust, process inputs in the reversed order.
  562. std::vector<AsyncArray> outs(numInputs);
  563. for (int i = numInputs - 1; i >= 0; --i)
  564. {
  565. netAsync.setInput(inputs[i]);
  566. outs[i] = netAsync.forwardAsync();
  567. }
  568. for (int i = numInputs - 1; i >= 0; --i)
  569. {
  570. ASSERT_TRUE(outs[i].valid());
  571. Mat result;
  572. EXPECT_TRUE(outs[i].get(result, async_timeout));
  573. normAssert(refs[i], result, format("Index: %d", i).c_str(), 0, 0);
  574. }
  575. }
  576. INSTANTIATE_TEST_CASE_P(/**/, Async, Combine(
  577. Values(CV_32F, CV_8U),
  578. dnnBackendsAndTargetsIE()
  579. ));
  580. typedef testing::TestWithParam<tuple<Backend, Target> > Test_Model_Optimizer;
  581. TEST_P(Test_Model_Optimizer, forward_two_nets)
  582. {
  583. const Backend backendId = get<0>(GetParam());
  584. const Target targetId = get<1>(GetParam());
  585. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && targetId == DNN_TARGET_MYRIAD)
  586. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
  587. const std::string& model = findDataFile("dnn/layers/layer_convolution.bin");
  588. const std::string& proto = findDataFile("dnn/layers/layer_convolution.xml");
  589. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
  590. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_API);
  591. else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  592. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
  593. else
  594. FAIL() << "Unknown backendId";
  595. Net net0 = readNet(model, proto);
  596. net0.setPreferableTarget(targetId);
  597. Net net1 = readNet(model, proto);
  598. net1.setPreferableTarget(targetId);
  599. // Generate inputs.
  600. int blobSize[] = {2, 6, 75, 113};
  601. Mat input(4, &blobSize[0], CV_32F);
  602. randu(input, 0, 255);
  603. net0.setInput(input);
  604. Mat ref0 = net0.forward().clone();
  605. net1.setInput(input);
  606. Mat ref1 = net1.forward();
  607. net0.setInput(input);
  608. Mat ref2 = net0.forward();
  609. normAssert(ref0, ref2, 0, 0);
  610. }
  611. TEST_P(Test_Model_Optimizer, readFromBuffer)
  612. {
  613. const Backend backendId = get<0>(GetParam());
  614. const Target targetId = get<1>(GetParam());
  615. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && targetId == DNN_TARGET_MYRIAD)
  616. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
  617. if (backendId != DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && backendId != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  618. throw SkipTestException("No support for async forward");
  619. const std::string& weightsFile = findDataFile("dnn/layers/layer_convolution.bin");
  620. const std::string& modelFile = findDataFile("dnn/layers/layer_convolution.xml");
  621. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
  622. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_API);
  623. else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  624. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
  625. else
  626. FAIL() << "Unknown backendId";
  627. Net net1 = readNetFromModelOptimizer(modelFile, weightsFile);
  628. net1.setPreferableBackend(backendId);
  629. net1.setPreferableTarget(targetId);
  630. std::vector<char> modelConfig;
  631. readFileContent(modelFile, modelConfig);
  632. std::vector<char> weights;
  633. readFileContent(weightsFile, weights);
  634. Net net2 = readNetFromModelOptimizer(
  635. (const uchar*)modelConfig.data(), modelConfig.size(),
  636. (const uchar*)weights.data(), weights.size()
  637. );
  638. net2.setPreferableBackend(backendId);
  639. net2.setPreferableTarget(targetId);
  640. int blobSize[] = {2, 6, 75, 113};
  641. Mat input(4, &blobSize[0], CV_32F);
  642. randu(input, 0, 255);
  643. Mat ref, actual;
  644. {
  645. net1.setInput(input);
  646. ref = net1.forward();
  647. }
  648. {
  649. net2.setInput(input);
  650. actual = net2.forward();
  651. }
  652. normAssert(ref, actual, "", 0, 0);
  653. }
  654. TEST_P(Test_Model_Optimizer, flexible_inputs)
  655. {
  656. const Backend backendId = get<0>(GetParam());
  657. const Target targetId = get<1>(GetParam());
  658. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && targetId == DNN_TARGET_MYRIAD)
  659. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
  660. const std::string& model = findDataFile("dnn/layers/layer_convolution.bin");
  661. const std::string& proto = findDataFile("dnn/layers/layer_convolution.xml");
  662. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
  663. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_API);
  664. else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  665. setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
  666. else
  667. FAIL() << "Unknown backendId";
  668. Net net0 = readNet(model, proto);
  669. net0.setPreferableTarget(targetId);
  670. Net net1 = readNet(model, proto);
  671. net1.setPreferableTarget(targetId);
  672. // Generate inputs.
  673. int blobSize0[] = {2, 6, 75, 113};
  674. Mat input0(4, &blobSize0[0], CV_32F);
  675. randu(input0, 0, 255);
  676. net0.setInput(input0);
  677. Mat ref = net0.forward().clone();
  678. int blobSize1[] = {1, 6, 10, 9};
  679. Mat input1(4, &blobSize1[0], CV_32F);
  680. randu(input1, 0, 255);
  681. net1.setInput(input1);
  682. Mat out = net1.forward();
  683. EXPECT_NE(out.size, ref.size);
  684. net1.setInput(input0);
  685. out = net1.forward();
  686. normAssert(ref, out, 0, 0);
  687. }
  688. INSTANTIATE_TEST_CASE_P(/**/, Test_Model_Optimizer,
  689. dnnBackendsAndTargetsIE()
  690. );
  691. #endif // HAVE_INF_ENGINE
  692. typedef testing::TestWithParam<tuple<MatDepth, MatDepth, tuple<Backend, Target> > > Test_two_inputs;
  693. TEST_P(Test_two_inputs, basic)
  694. {
  695. static const float kScale = 0.5f;
  696. static const float kScaleInv = 1.0f / kScale;
  697. Backend backendId = get<0>(get<2>(GetParam()));
  698. Target targetId = get<1>(get<2>(GetParam()));
  699. int type1 = get<0>(GetParam());
  700. int type2 = get<1>(GetParam());
  701. if (backendId == DNN_BACKEND_VKCOM && !(type1 == CV_32F && type2 == CV_32F))
  702. applyTestTag(CV_TEST_TAG_DNN_SKIP_VULKAN);
  703. Net net;
  704. LayerParams lp;
  705. lp.type = "Eltwise";
  706. lp.name = "testLayer";
  707. lp.set("operation", "sum");
  708. int eltwiseId = net.addLayerToPrev(lp.name, lp.type, lp); // connect to a first input
  709. net.connect(0, 1, eltwiseId, 1); // connect to a second input
  710. int inpSize[] = {1, 2, 3, 4};
  711. Mat firstInp(4, &inpSize[0], type1);
  712. Mat secondInp(4, &inpSize[0], type2);
  713. randu(firstInp, 0, 100);
  714. randu(secondInp, 0, 100);
  715. #ifndef CV_CXX11
  716. std::vector<String> input_names;
  717. input_names.push_back("data");
  718. input_names.push_back("second_input");
  719. net.setInputsNames(input_names);
  720. #else
  721. net.setInputsNames({"data", "second_input"});
  722. #endif
  723. net.setInput(firstInp, "data", kScale);
  724. net.setInput(secondInp, "second_input", kScaleInv);
  725. net.setPreferableBackend(backendId);
  726. net.setPreferableTarget(targetId);
  727. Mat out = net.forward();
  728. Mat ref;
  729. addWeighted(firstInp, kScale, secondInp, kScaleInv, 0, ref, CV_32F);
  730. double l1 = (targetId == DNN_TARGET_OPENCL_FP16 || targetId == DNN_TARGET_MYRIAD) ? 0.06 : 1e-6;
  731. double lInf = (targetId == DNN_TARGET_OPENCL_FP16 || targetId == DNN_TARGET_MYRIAD) ? 0.3 : 1e-5;
  732. normAssert(out, ref, "", l1, lInf);
  733. if (cvtest::debugLevel > 0 || HasFailure())
  734. {
  735. std::cout << "input1 scale=" << kScale << " input2 scale=" << kScaleInv << std::endl;
  736. std::cout << "input1: " << firstInp.size << " " << firstInp.reshape(1, 1) << std::endl;
  737. std::cout << "input2: " << secondInp.size << " " << secondInp.reshape(1, 1) << std::endl;
  738. std::cout << "ref: " << ref.reshape(1, 1) << std::endl;
  739. std::cout << "out: " << out.reshape(1, 1) << std::endl;
  740. }
  741. }
  742. INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_two_inputs, Combine(
  743. Values(CV_32F, CV_8U),
  744. Values(CV_32F, CV_8U),
  745. dnnBackendsAndTargets()
  746. ));
  747. }} // namespace