test_darknet_importer.cpp 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986
  1. /*M///////////////////////////////////////////////////////////////////////////////////////
  2. //
  3. // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
  4. //
  5. // By downloading, copying, installing or using the software you agree to this license.
  6. // If you do not agree to this license, do not download, install,
  7. // copy or use the software.
  8. //
  9. //
  10. // License Agreement
  11. // For Open Source Computer Vision Library
  12. // (3-clause BSD License)
  13. //
  14. // Copyright (C) 2017, Intel Corporation, all rights reserved.
  15. // Third party copyrights are property of their respective owners.
  16. //
  17. // Redistribution and use in source and binary forms, with or without modification,
  18. // are permitted provided that the following conditions are met:
  19. //
  20. // * Redistributions of source code must retain the above copyright notice,
  21. // this list of conditions and the following disclaimer.
  22. //
  23. // * Redistributions in binary form must reproduce the above copyright notice,
  24. // this list of conditions and the following disclaimer in the documentation
  25. // and/or other materials provided with the distribution.
  26. //
  27. // * Neither the names of the copyright holders nor the names of the contributors
  28. // may be used to endorse or promote products derived from this software
  29. // without specific prior written permission.
  30. //
  31. // This software is provided by the copyright holders and contributors "as is" and
  32. // any express or implied warranties, including, but not limited to, the implied
  33. // warranties of merchantability and fitness for a particular purpose are disclaimed.
  34. // In no event shall copyright holders or contributors be liable for any direct,
  35. // indirect, incidental, special, exemplary, or consequential damages
  36. // (including, but not limited to, procurement of substitute goods or services;
  37. // loss of use, data, or profits; or business interruption) however caused
  38. // and on any theory of liability, whether in contract, strict liability,
  39. // or tort (including negligence or otherwise) arising in any way out of
  40. // the use of this software, even if advised of the possibility of such damage.
  41. //
  42. //M*/
  43. #include "test_precomp.hpp"
  44. #include "npy_blob.hpp"
  45. #include <opencv2/dnn/shape_utils.hpp>
  46. namespace opencv_test { namespace {
  47. template<typename TString>
  48. static std::string _tf(TString filename)
  49. {
  50. return (getOpenCVExtraDir() + "/dnn/") + filename;
  51. }
  52. TEST(Test_Darknet, read_tiny_yolo_voc)
  53. {
  54. Net net = readNetFromDarknet(_tf("tiny-yolo-voc.cfg"));
  55. ASSERT_FALSE(net.empty());
  56. }
  57. TEST(Test_Darknet, read_yolo_voc)
  58. {
  59. Net net = readNetFromDarknet(_tf("yolo-voc.cfg"));
  60. ASSERT_FALSE(net.empty());
  61. }
  62. TEST(Test_Darknet, read_yolo_voc_stream)
  63. {
  64. applyTestTag(CV_TEST_TAG_MEMORY_1GB);
  65. Mat ref;
  66. Mat sample = imread(_tf("dog416.png"));
  67. Mat inp = blobFromImage(sample, 1.0/255, Size(416, 416), Scalar(), true, false);
  68. const std::string cfgFile = findDataFile("dnn/yolo-voc.cfg");
  69. const std::string weightsFile = findDataFile("dnn/yolo-voc.weights", false);
  70. // Import by paths.
  71. {
  72. Net net = readNetFromDarknet(cfgFile, weightsFile);
  73. net.setInput(inp);
  74. net.setPreferableBackend(DNN_BACKEND_OPENCV);
  75. ref = net.forward();
  76. }
  77. // Import from bytes array.
  78. {
  79. std::vector<char> cfg, weights;
  80. readFileContent(cfgFile, cfg);
  81. readFileContent(weightsFile, weights);
  82. Net net = readNetFromDarknet(cfg.data(), cfg.size(), weights.data(), weights.size());
  83. net.setInput(inp);
  84. net.setPreferableBackend(DNN_BACKEND_OPENCV);
  85. Mat out = net.forward();
  86. normAssert(ref, out);
  87. }
  88. }
  89. class Test_Darknet_layers : public DNNTestLayer
  90. {
  91. public:
  92. void testDarknetLayer(const std::string& name, bool hasWeights = false, bool testBatchProcessing = true)
  93. {
  94. SCOPED_TRACE(name);
  95. Mat inp = blobFromNPY(findDataFile("dnn/darknet/" + name + "_in.npy"));
  96. Mat ref = blobFromNPY(findDataFile("dnn/darknet/" + name + "_out.npy"));
  97. std::string cfg = findDataFile("dnn/darknet/" + name + ".cfg");
  98. std::string model = "";
  99. if (hasWeights)
  100. model = findDataFile("dnn/darknet/" + name + ".weights");
  101. checkBackend(&inp, &ref);
  102. Net net = readNet(cfg, model);
  103. net.setPreferableBackend(backend);
  104. net.setPreferableTarget(target);
  105. net.setInput(inp);
  106. Mat out = net.forward();
  107. normAssert(out, ref, "", default_l1, default_lInf);
  108. if (inp.size[0] == 1 && testBatchProcessing) // test handling of batch size
  109. {
  110. SCOPED_TRACE("batch size 2");
  111. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
  112. if (target == DNN_TARGET_MYRIAD && name == "shortcut")
  113. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
  114. #endif
  115. std::vector<int> sz2 = shape(inp);
  116. sz2[0] = 2;
  117. Net net2 = readNet(cfg, model);
  118. net2.setPreferableBackend(backend);
  119. net2.setPreferableTarget(target);
  120. Range ranges0[4] = { Range(0, 1), Range::all(), Range::all(), Range::all() };
  121. Range ranges1[4] = { Range(1, 2), Range::all(), Range::all(), Range::all() };
  122. Mat inp2(sz2, inp.type(), Scalar::all(0));
  123. inp.copyTo(inp2(ranges0));
  124. inp.copyTo(inp2(ranges1));
  125. net2.setInput(inp2);
  126. Mat out2 = net2.forward();
  127. EXPECT_EQ(0, cv::norm(out2(ranges0), out2(ranges1), NORM_INF)) << "Batch result is not equal: " << name;
  128. Mat ref2 = ref;
  129. if (ref.dims == 2 && out2.dims == 3)
  130. {
  131. int ref_3d_sizes[3] = {1, ref.rows, ref.cols};
  132. ref2 = Mat(3, ref_3d_sizes, ref.type(), (void*)ref.data);
  133. }
  134. /*else if (ref.dims == 3 && out2.dims == 4)
  135. {
  136. int ref_4d_sizes[4] = {1, ref.size[0], ref.size[1], ref.size[2]};
  137. ref2 = Mat(4, ref_4d_sizes, ref.type(), (void*)ref.data);
  138. }*/
  139. ASSERT_EQ(out2.dims, ref2.dims) << ref.dims;
  140. normAssert(out2(ranges0), ref2, "", default_l1, default_lInf);
  141. normAssert(out2(ranges1), ref2, "", default_l1, default_lInf);
  142. }
  143. }
  144. };
  145. class Test_Darknet_nets : public DNNTestLayer
  146. {
  147. public:
  148. // Test object detection network from Darknet framework.
  149. void testDarknetModel(const std::string& cfg, const std::string& weights,
  150. const std::vector<std::vector<int> >& refClassIds,
  151. const std::vector<std::vector<float> >& refConfidences,
  152. const std::vector<std::vector<Rect2d> >& refBoxes,
  153. double scoreDiff, double iouDiff, float confThreshold = 0.24, float nmsThreshold = 0.4)
  154. {
  155. checkBackend();
  156. Mat img1 = imread(_tf("dog416.png"));
  157. Mat img2 = imread(_tf("street.png"));
  158. std::vector<Mat> samples(2);
  159. samples[0] = img1; samples[1] = img2;
  160. // determine test type, whether batch or single img
  161. int batch_size = refClassIds.size();
  162. CV_Assert(batch_size == 1 || batch_size == 2);
  163. samples.resize(batch_size);
  164. Mat inp = blobFromImages(samples, 1.0/255, Size(416, 416), Scalar(), true, false);
  165. Net net = readNet(findDataFile("dnn/" + cfg),
  166. findDataFile("dnn/" + weights, false));
  167. net.setPreferableBackend(backend);
  168. net.setPreferableTarget(target);
  169. net.setInput(inp);
  170. std::vector<Mat> outs;
  171. net.forward(outs, net.getUnconnectedOutLayersNames());
  172. for (int b = 0; b < batch_size; ++b)
  173. {
  174. std::vector<int> classIds;
  175. std::vector<float> confidences;
  176. std::vector<Rect2d> boxes;
  177. for (int i = 0; i < outs.size(); ++i)
  178. {
  179. Mat out;
  180. if (batch_size > 1){
  181. // get the sample slice from 3D matrix (batch, box, classes+5)
  182. Range ranges[3] = {Range(b, b+1), Range::all(), Range::all()};
  183. out = outs[i](ranges).reshape(1, outs[i].size[1]);
  184. }else{
  185. out = outs[i];
  186. }
  187. for (int j = 0; j < out.rows; ++j)
  188. {
  189. Mat scores = out.row(j).colRange(5, out.cols);
  190. double confidence;
  191. Point maxLoc;
  192. minMaxLoc(scores, 0, &confidence, 0, &maxLoc);
  193. if (confidence > confThreshold) {
  194. float* detection = out.ptr<float>(j);
  195. double centerX = detection[0];
  196. double centerY = detection[1];
  197. double width = detection[2];
  198. double height = detection[3];
  199. boxes.push_back(Rect2d(centerX - 0.5 * width, centerY - 0.5 * height,
  200. width, height));
  201. confidences.push_back(confidence);
  202. classIds.push_back(maxLoc.x);
  203. }
  204. }
  205. }
  206. // here we need NMS of boxes
  207. std::vector<int> indices;
  208. NMSBoxes(boxes, confidences, confThreshold, nmsThreshold, indices);
  209. std::vector<int> nms_classIds;
  210. std::vector<float> nms_confidences;
  211. std::vector<Rect2d> nms_boxes;
  212. for (size_t i = 0; i < indices.size(); ++i)
  213. {
  214. int idx = indices[i];
  215. Rect2d box = boxes[idx];
  216. float conf = confidences[idx];
  217. int class_id = classIds[idx];
  218. nms_boxes.push_back(box);
  219. nms_confidences.push_back(conf);
  220. nms_classIds.push_back(class_id);
  221. if (cvtest::debugLevel > 0)
  222. {
  223. std::cout << b << ", " << class_id << ", " << conf << "f, "
  224. << box.x << "f, " << box.y << "f, "
  225. << box.x + box.width << "f, " << box.y + box.height << "f,"
  226. << std::endl;
  227. }
  228. }
  229. if (cvIsNaN(iouDiff))
  230. {
  231. if (b == 0)
  232. std::cout << "Skip accuracy checks" << std::endl;
  233. continue;
  234. }
  235. normAssertDetections(refClassIds[b], refConfidences[b], refBoxes[b], nms_classIds,
  236. nms_confidences, nms_boxes, format("batch size %d, sample %d\n", batch_size, b).c_str(), confThreshold, scoreDiff, iouDiff);
  237. }
  238. }
  239. void testDarknetModel(const std::string& cfg, const std::string& weights,
  240. const std::vector<int>& refClassIds,
  241. const std::vector<float>& refConfidences,
  242. const std::vector<Rect2d>& refBoxes,
  243. double scoreDiff, double iouDiff, float confThreshold = 0.24, float nmsThreshold = 0.4)
  244. {
  245. testDarknetModel(cfg, weights,
  246. std::vector<std::vector<int> >(1, refClassIds),
  247. std::vector<std::vector<float> >(1, refConfidences),
  248. std::vector<std::vector<Rect2d> >(1, refBoxes),
  249. scoreDiff, iouDiff, confThreshold, nmsThreshold);
  250. }
  251. void testDarknetModel(const std::string& cfg, const std::string& weights,
  252. const cv::Mat& ref, double scoreDiff, double iouDiff,
  253. float confThreshold = 0.24, float nmsThreshold = 0.4)
  254. {
  255. CV_Assert(ref.cols == 7);
  256. std::vector<std::vector<int> > refClassIds;
  257. std::vector<std::vector<float> > refScores;
  258. std::vector<std::vector<Rect2d> > refBoxes;
  259. for (int i = 0; i < ref.rows; ++i)
  260. {
  261. int batchId = static_cast<int>(ref.at<float>(i, 0));
  262. int classId = static_cast<int>(ref.at<float>(i, 1));
  263. float score = ref.at<float>(i, 2);
  264. float left = ref.at<float>(i, 3);
  265. float top = ref.at<float>(i, 4);
  266. float right = ref.at<float>(i, 5);
  267. float bottom = ref.at<float>(i, 6);
  268. Rect2d box(left, top, right - left, bottom - top);
  269. if (batchId >= refClassIds.size())
  270. {
  271. refClassIds.resize(batchId + 1);
  272. refScores.resize(batchId + 1);
  273. refBoxes.resize(batchId + 1);
  274. }
  275. refClassIds[batchId].push_back(classId);
  276. refScores[batchId].push_back(score);
  277. refBoxes[batchId].push_back(box);
  278. }
  279. testDarknetModel(cfg, weights, refClassIds, refScores, refBoxes,
  280. scoreDiff, iouDiff, confThreshold, nmsThreshold);
  281. }
  282. };
  283. TEST_P(Test_Darknet_nets, YoloVoc)
  284. {
  285. applyTestTag(
  286. #if defined(OPENCV_32BIT_CONFIGURATION) && defined(HAVE_OPENCL)
  287. CV_TEST_TAG_MEMORY_2GB,
  288. #else
  289. CV_TEST_TAG_MEMORY_1GB,
  290. #endif
  291. CV_TEST_TAG_LONG
  292. );
  293. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000) // nGraph compilation failure
  294. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL)
  295. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  296. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
  297. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  298. #endif
  299. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
  300. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_OPENCL_FP16)
  301. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
  302. #endif
  303. #if defined(INF_ENGINE_RELEASE)
  304. if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 || backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) &&
  305. target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
  306. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X); // need to update check function
  307. #endif
  308. // batchId, classId, confidence, left, top, right, bottom
  309. Mat ref = (Mat_<float>(6, 7) << 0, 6, 0.750469f, 0.577374f, 0.127391f, 0.902949f, 0.300809f, // a car
  310. 0, 1, 0.780879f, 0.270762f, 0.264102f, 0.732475f, 0.745412f, // a bicycle
  311. 0, 11, 0.901615f, 0.1386f, 0.338509f, 0.421337f, 0.938789f, // a dog
  312. 1, 14, 0.623813f, 0.183179f, 0.381921f, 0.247726f, 0.625847f, // a person
  313. 1, 6, 0.667770f, 0.446555f, 0.453578f, 0.499986f, 0.519167f, // a car
  314. 1, 6, 0.844947f, 0.637058f, 0.460398f, 0.828508f, 0.66427f); // a car
  315. double nmsThreshold = (target == DNN_TARGET_MYRIAD) ? 0.397 : 0.4;
  316. double scoreDiff = 8e-5, iouDiff = 3e-4;
  317. if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD)
  318. {
  319. scoreDiff = 1e-2;
  320. iouDiff = 0.018;
  321. }
  322. else if (target == DNN_TARGET_CUDA_FP16)
  323. {
  324. scoreDiff = 0.03;
  325. iouDiff = 0.018;
  326. }
  327. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
  328. // accuracy
  329. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
  330. {
  331. iouDiff = std::numeric_limits<double>::quiet_NaN();
  332. }
  333. #endif
  334. std::string config_file = "yolo-voc.cfg";
  335. std::string weights_file = "yolo-voc.weights";
  336. {
  337. SCOPED_TRACE("batch size 1");
  338. testDarknetModel(config_file, weights_file, ref.rowRange(0, 3), scoreDiff, iouDiff);
  339. }
  340. {
  341. SCOPED_TRACE("batch size 2");
  342. testDarknetModel(config_file, weights_file, ref, scoreDiff, iouDiff, 0.24, nmsThreshold);
  343. }
  344. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
  345. // accuracy
  346. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
  347. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  348. #endif
  349. }
  350. TEST_P(Test_Darknet_nets, TinyYoloVoc)
  351. {
  352. applyTestTag(CV_TEST_TAG_MEMORY_512MB);
  353. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000) // nGraph compilation failure
  354. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL)
  355. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  356. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
  357. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  358. #endif
  359. #if defined(INF_ENGINE_RELEASE)
  360. if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 || backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) &&
  361. target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
  362. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X); // need to update check function
  363. #endif
  364. // batchId, classId, confidence, left, top, right, bottom
  365. Mat ref = (Mat_<float>(4, 7) << 0, 6, 0.761967f, 0.579042f, 0.159161f, 0.894482f, 0.31994f, // a car
  366. 0, 11, 0.780595f, 0.129696f, 0.386467f, 0.445275f, 0.920994f, // a dog
  367. 1, 6, 0.651450f, 0.460526f, 0.458019f, 0.522527f, 0.5341f, // a car
  368. 1, 6, 0.928758f, 0.651024f, 0.463539f, 0.823784f, 0.654998f); // a car
  369. double scoreDiff = 8e-5, iouDiff = 3e-4;
  370. if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD)
  371. {
  372. scoreDiff = 8e-3;
  373. iouDiff = 0.018;
  374. }
  375. else if(target == DNN_TARGET_CUDA_FP16)
  376. {
  377. scoreDiff = 0.008;
  378. iouDiff = 0.02;
  379. }
  380. std::string config_file = "tiny-yolo-voc.cfg";
  381. std::string weights_file = "tiny-yolo-voc.weights";
  382. {
  383. SCOPED_TRACE("batch size 1");
  384. testDarknetModel(config_file, weights_file, ref.rowRange(0, 2), scoreDiff, iouDiff);
  385. }
  386. {
  387. SCOPED_TRACE("batch size 2");
  388. testDarknetModel(config_file, weights_file, ref, scoreDiff, iouDiff);
  389. }
  390. }
  391. #ifdef HAVE_INF_ENGINE
  392. static const std::chrono::milliseconds async_timeout(10000);
  393. typedef testing::TestWithParam<tuple<std::string, tuple<Backend, Target> > > Test_Darknet_nets_async;
  394. TEST_P(Test_Darknet_nets_async, Accuracy)
  395. {
  396. Backend backendId = get<0>(get<1>(GetParam()));
  397. Target targetId = get<1>(get<1>(GetParam()));
  398. std::string prefix = get<0>(GetParam());
  399. applyTestTag(CV_TEST_TAG_MEMORY_512MB);
  400. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
  401. if (INF_ENGINE_VER_MAJOR_LT(2019020000) && backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
  402. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
  403. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  404. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
  405. #endif
  406. if (backendId != DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && backendId != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  407. throw SkipTestException("No support for async forward");
  408. #if defined(INF_ENGINE_RELEASE)
  409. #if INF_ENGINE_VER_MAJOR_GE(2021040000)
  410. if (targetId == DNN_TARGET_MYRIAD && prefix == "yolov3") // NC_OUT_OF_MEMORY
  411. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  412. #else
  413. if (targetId == DNN_TARGET_MYRIAD && prefix == "yolov4") // NC_OUT_OF_MEMORY
  414. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  415. #endif
  416. #endif
  417. const int numInputs = 2;
  418. std::vector<Mat> inputs(numInputs);
  419. int blobSize[] = {1, 3, 416, 416};
  420. for (int i = 0; i < numInputs; ++i)
  421. {
  422. inputs[i].create(4, &blobSize[0], CV_32F);
  423. randu(inputs[i], 0, 1);
  424. }
  425. Net netSync = readNet(findDataFile("dnn/" + prefix + ".cfg"),
  426. findDataFile("dnn/" + prefix + ".weights", false));
  427. netSync.setPreferableBackend(backendId);
  428. netSync.setPreferableTarget(targetId);
  429. // Run synchronously.
  430. std::vector<Mat> refs(numInputs);
  431. for (int i = 0; i < numInputs; ++i)
  432. {
  433. netSync.setInput(inputs[i]);
  434. refs[i] = netSync.forward().clone();
  435. }
  436. Net netAsync = readNet(findDataFile("dnn/" + prefix + ".cfg"),
  437. findDataFile("dnn/" + prefix + ".weights", false));
  438. netAsync.setPreferableBackend(backendId);
  439. netAsync.setPreferableTarget(targetId);
  440. double l1 = 0.0;
  441. double lInf = 0.0;
  442. #if defined(INF_ENGINE_RELEASE)
  443. if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
  444. {
  445. if (targetId == DNN_TARGET_MYRIAD && prefix == "yolo-voc")
  446. {
  447. l1 = 0.02;
  448. lInf = 0.15;
  449. }
  450. if (targetId == DNN_TARGET_OPENCL_FP16 && prefix == "yolo-voc")
  451. {
  452. l1 = 0.02;
  453. lInf = 0.1;
  454. }
  455. if (targetId == DNN_TARGET_OPENCL_FP16 && prefix == "yolov3")
  456. {
  457. l1 = 0.001;
  458. lInf = 0.007;
  459. }
  460. if (targetId == DNN_TARGET_OPENCL_FP16 && prefix == "yolov4")
  461. {
  462. l1 = 0.001;
  463. lInf = 0.005;
  464. }
  465. }
  466. #endif
  467. // Run asynchronously. To make test more robust, process inputs in the reversed order.
  468. for (int i = numInputs - 1; i >= 0; --i)
  469. {
  470. netAsync.setInput(inputs[i]);
  471. AsyncArray out = netAsync.forwardAsync();
  472. ASSERT_TRUE(out.valid());
  473. Mat result;
  474. EXPECT_TRUE(out.get(result, async_timeout));
  475. normAssert(refs[i], result, format("Index: %d", i).c_str(), l1, lInf);
  476. }
  477. }
  478. INSTANTIATE_TEST_CASE_P(/**/, Test_Darknet_nets_async, Combine(
  479. Values("yolo-voc", "tiny-yolo-voc", "yolov3", "yolov4", "yolov4-tiny"),
  480. dnnBackendsAndTargets()
  481. ));
  482. #endif
  483. TEST_P(Test_Darknet_nets, YOLOv3)
  484. {
  485. applyTestTag(CV_TEST_TAG_LONG, (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_1GB : CV_TEST_TAG_MEMORY_2GB));
  486. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000) // nGraph compilation failure
  487. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL)
  488. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  489. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
  490. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  491. #endif
  492. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_MYRIAD)
  493. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
  494. // batchId, classId, confidence, left, top, right, bottom
  495. const int N0 = 3;
  496. const int N1 = 6;
  497. static const float ref_[/* (N0 + N1) * 7 */] = {
  498. 0, 16, 0.998836f, 0.160024f, 0.389964f, 0.417885f, 0.943716f,
  499. 0, 1, 0.987908f, 0.150913f, 0.221933f, 0.742255f, 0.746261f,
  500. 0, 7, 0.952983f, 0.614621f, 0.150257f, 0.901368f, 0.289251f,
  501. 1, 2, 0.997412f, 0.647584f, 0.459939f, 0.821037f, 0.663947f,
  502. 1, 2, 0.989633f, 0.450719f, 0.463353f, 0.496306f, 0.522258f,
  503. 1, 0, 0.980053f, 0.195856f, 0.378454f, 0.258626f, 0.629257f,
  504. 1, 9, 0.785341f, 0.665503f, 0.373543f, 0.688893f, 0.439244f,
  505. 1, 9, 0.733275f, 0.376029f, 0.315694f, 0.401776f, 0.395165f,
  506. 1, 9, 0.384815f, 0.659824f, 0.372389f, 0.673927f, 0.429412f,
  507. };
  508. Mat ref(N0 + N1, 7, CV_32FC1, (void*)ref_);
  509. double scoreDiff = 8e-5, iouDiff = 3e-4;
  510. if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD)
  511. {
  512. scoreDiff = 0.006;
  513. iouDiff = 0.042;
  514. }
  515. else if (target == DNN_TARGET_CUDA_FP16)
  516. {
  517. scoreDiff = 0.04;
  518. iouDiff = 0.03;
  519. }
  520. std::string config_file = "yolov3.cfg";
  521. std::string weights_file = "yolov3.weights";
  522. #if defined(INF_ENGINE_RELEASE)
  523. if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ||
  524. backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && target == DNN_TARGET_MYRIAD &&
  525. getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
  526. {
  527. scoreDiff = 0.04;
  528. iouDiff = 0.2;
  529. }
  530. #endif
  531. {
  532. SCOPED_TRACE("batch size 1");
  533. testDarknetModel(config_file, weights_file, ref.rowRange(0, N0), scoreDiff, iouDiff);
  534. }
  535. #if defined(INF_ENGINE_RELEASE)
  536. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
  537. {
  538. if (target == DNN_TARGET_OPENCL)
  539. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  540. else if (target == DNN_TARGET_OPENCL_FP16 && INF_ENGINE_VER_MAJOR_LE(202010000))
  541. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  542. else if (target == DNN_TARGET_MYRIAD &&
  543. getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
  544. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
  545. }
  546. #endif
  547. {
  548. SCOPED_TRACE("batch size 2");
  549. testDarknetModel(config_file, weights_file, ref, scoreDiff, iouDiff);
  550. }
  551. }
  552. TEST_P(Test_Darknet_nets, YOLOv4)
  553. {
  554. applyTestTag(CV_TEST_TAG_LONG, (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_1GB : CV_TEST_TAG_MEMORY_2GB));
  555. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000) // nGraph compilation failure
  556. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL)
  557. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  558. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
  559. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  560. #endif
  561. #if defined(INF_ENGINE_RELEASE)
  562. if (target == DNN_TARGET_MYRIAD) // NC_OUT_OF_MEMORY
  563. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  564. #endif
  565. // batchId, classId, confidence, left, top, right, bottom
  566. const int N0 = 3;
  567. const int N1 = 7;
  568. static const float ref_[/* (N0 + N1) * 7 */] = {
  569. 0, 16, 0.992194f, 0.172375f, 0.402458f, 0.403918f, 0.932801f,
  570. 0, 1, 0.988326f, 0.166708f, 0.228236f, 0.737208f, 0.735803f,
  571. 0, 7, 0.94639f, 0.602523f, 0.130399f, 0.901623f, 0.298452f,
  572. 1, 2, 0.99761f, 0.646556f, 0.45985f, 0.816041f, 0.659067f,
  573. 1, 0, 0.988913f, 0.201726f, 0.360282f, 0.266181f, 0.631728f,
  574. 1, 2, 0.98233f, 0.452007f, 0.462217f, 0.495612f, 0.521687f,
  575. 1, 9, 0.919195f, 0.374642f, 0.316524f, 0.398126f, 0.393714f,
  576. 1, 9, 0.856303f, 0.666842f, 0.372215f, 0.685539f, 0.44141f,
  577. 1, 9, 0.313516f, 0.656791f, 0.374734f, 0.671959f, 0.438371f,
  578. 1, 9, 0.256625f, 0.940232f, 0.326931f, 0.967586f, 0.374002f,
  579. };
  580. Mat ref(N0 + N1, 7, CV_32FC1, (void*)ref_);
  581. double scoreDiff = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.006 : 8e-5;
  582. double iouDiff = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.042 : 3e-4;
  583. if (target == DNN_TARGET_CUDA_FP16)
  584. {
  585. scoreDiff = 0.008;
  586. iouDiff = 0.03;
  587. }
  588. std::string config_file = "yolov4.cfg";
  589. std::string weights_file = "yolov4.weights";
  590. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
  591. // accuracy (batch 1)
  592. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
  593. {
  594. iouDiff = std::numeric_limits<double>::quiet_NaN();
  595. }
  596. #endif
  597. #if defined(INF_ENGINE_RELEASE)
  598. if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ||
  599. backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && target == DNN_TARGET_MYRIAD &&
  600. getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
  601. {
  602. scoreDiff = 0.04;
  603. iouDiff = 0.2;
  604. }
  605. #endif
  606. {
  607. SCOPED_TRACE("batch size 1");
  608. testDarknetModel(config_file, weights_file, ref.rowRange(0, N0), scoreDiff, iouDiff);
  609. }
  610. {
  611. SCOPED_TRACE("batch size 2");
  612. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
  613. // accuracy (batch 1)
  614. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
  615. {
  616. iouDiff = 0.45f;
  617. }
  618. #endif
  619. #if defined(INF_ENGINE_RELEASE)
  620. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
  621. {
  622. if (target == DNN_TARGET_OPENCL)
  623. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  624. else if (target == DNN_TARGET_OPENCL_FP16 && INF_ENGINE_VER_MAJOR_LE(202010000))
  625. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  626. else if (target == DNN_TARGET_MYRIAD &&
  627. getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
  628. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
  629. }
  630. #endif
  631. testDarknetModel(config_file, weights_file, ref, scoreDiff, iouDiff);
  632. }
  633. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
  634. // accuracy
  635. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
  636. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  637. #endif
  638. }
  639. TEST_P(Test_Darknet_nets, YOLOv4_tiny)
  640. {
  641. applyTestTag(
  642. target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB
  643. );
  644. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2021010000) // nGraph compilation failure
  645. if (target == DNN_TARGET_MYRIAD)
  646. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  647. #endif
  648. const double confThreshold = 0.5;
  649. // batchId, classId, confidence, left, top, right, bottom
  650. const int N0 = 2;
  651. const int N1 = 3;
  652. static const float ref_[/* (N0 + N1) * 7 */] = {
  653. 0, 7, 0.85935f, 0.593484f, 0.141211f, 0.920356f, 0.291593f,
  654. 0, 16, 0.795188f, 0.169207f, 0.386886f, 0.423753f, 0.933004f,
  655. 1, 2, 0.996832f, 0.653802f, 0.464573f, 0.815193f, 0.653292f,
  656. 1, 2, 0.963325f, 0.451151f, 0.458915f, 0.496255f, 0.52241f,
  657. 1, 0, 0.926244f, 0.194851f, 0.361743f, 0.260277f, 0.632364f,
  658. };
  659. Mat ref(N0 + N1, 7, CV_32FC1, (void*)ref_);
  660. double scoreDiff = 0.01f;
  661. double iouDiff = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.15 : 0.01f;
  662. if (target == DNN_TARGET_CUDA_FP16)
  663. iouDiff = 0.02;
  664. std::string config_file = "yolov4-tiny.cfg";
  665. std::string weights_file = "yolov4-tiny.weights";
  666. #if defined(INF_ENGINE_RELEASE)
  667. if (target == DNN_TARGET_MYRIAD) // bad accuracy
  668. iouDiff = std::numeric_limits<double>::quiet_NaN();
  669. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_OPENCL)
  670. iouDiff = std::numeric_limits<double>::quiet_NaN();
  671. if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ||
  672. backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && target == DNN_TARGET_OPENCL_FP16)
  673. iouDiff = std::numeric_limits<double>::quiet_NaN();
  674. #endif
  675. {
  676. SCOPED_TRACE("batch size 1");
  677. testDarknetModel(config_file, weights_file, ref.rowRange(0, N0), scoreDiff, iouDiff, confThreshold);
  678. }
  679. {
  680. SCOPED_TRACE("batch size 2");
  681. testDarknetModel(config_file, weights_file, ref, scoreDiff, iouDiff, confThreshold);
  682. }
  683. #if defined(INF_ENGINE_RELEASE)
  684. if (target == DNN_TARGET_MYRIAD) // bad accuracy
  685. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  686. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_OPENCL)
  687. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  688. if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ||
  689. backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && target == DNN_TARGET_OPENCL_FP16)
  690. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  691. #endif
  692. }
  693. TEST_P(Test_Darknet_nets, YOLOv4x_mish)
  694. {
  695. applyTestTag(CV_TEST_TAG_LONG, (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_1GB : CV_TEST_TAG_MEMORY_2GB));
  696. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
  697. // IE exception: Ngraph operation Transpose with name permute_168 has dynamic output shape on 0 port, but CPU plug-in supports only static shape
  698. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
  699. applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16,
  700. CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION
  701. );
  702. #endif
  703. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000) // nGraph compilation failure
  704. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL)
  705. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  706. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
  707. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  708. #endif
  709. #if defined(INF_ENGINE_RELEASE)
  710. if (target == DNN_TARGET_MYRIAD) // NC_OUT_OF_MEMORY
  711. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  712. #endif
  713. // batchId, classId, confidence, left, top, right, bottom
  714. const int N0 = 3;
  715. const int N1 = 5;
  716. static const float ref_[/* (N0 + N1) * 7 */] = {
  717. 0, 16, 0.925536f, 0.17188f, 0.386832f, 0.406138f, 0.941696f,
  718. 0, 1, 0.912028f, 0.162125f, 0.208863f, 0.741316f, 0.729332f,
  719. 0, 7, 0.841018f, 0.608953f, 0.128653f, 0.900692f, 0.295657f,
  720. 1, 2, 0.925697f, 0.650438f, 0.458118f, 0.813927f, 0.661775f,
  721. 1, 0, 0.882156f, 0.203644f, 0.365763f, 0.265473f, 0.632195f,
  722. 1, 2, 0.848857f, 0.451044f, 0.462997f, 0.496629f, 0.522719f,
  723. 1, 9, 0.736015f, 0.374503f, 0.316029f, 0.399358f, 0.392883f,
  724. 1, 9, 0.727129f, 0.662469f, 0.373687f, 0.687877f, 0.441335f,
  725. };
  726. Mat ref(N0 + N1, 7, CV_32FC1, (void*)ref_);
  727. double scoreDiff = 8e-5;
  728. double iouDiff = 3e-4;
  729. if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD || target == DNN_TARGET_CUDA_FP16)
  730. {
  731. scoreDiff = 0.006;
  732. iouDiff = 0.042;
  733. }
  734. std::string config_file = "yolov4x-mish.cfg";
  735. std::string weights_file = "yolov4x-mish.weights";
  736. #if defined(INF_ENGINE_RELEASE)
  737. if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ||
  738. backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && target == DNN_TARGET_MYRIAD &&
  739. getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
  740. {
  741. scoreDiff = 0.04;
  742. iouDiff = 0.2;
  743. }
  744. #endif
  745. {
  746. SCOPED_TRACE("batch size 1");
  747. testDarknetModel(config_file, weights_file, ref.rowRange(0, N0), scoreDiff, iouDiff);
  748. }
  749. {
  750. SCOPED_TRACE("batch size 2");
  751. #if defined(INF_ENGINE_RELEASE)
  752. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
  753. {
  754. if (target == DNN_TARGET_OPENCL)
  755. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  756. else if (target == DNN_TARGET_OPENCL_FP16 && INF_ENGINE_VER_MAJOR_LE(202010000))
  757. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  758. else if (target == DNN_TARGET_MYRIAD &&
  759. getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
  760. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
  761. }
  762. #endif
  763. testDarknetModel(config_file, weights_file, ref, scoreDiff, iouDiff);
  764. }
  765. }
  766. INSTANTIATE_TEST_CASE_P(/**/, Test_Darknet_nets, dnnBackendsAndTargets());
  767. TEST_P(Test_Darknet_layers, shortcut)
  768. {
  769. testDarknetLayer("shortcut");
  770. testDarknetLayer("shortcut_leaky");
  771. testDarknetLayer("shortcut_unequal");
  772. testDarknetLayer("shortcut_unequal_2");
  773. }
  774. TEST_P(Test_Darknet_layers, upsample)
  775. {
  776. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021030000)
  777. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_MYRIAD)
  778. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH); // exception
  779. #endif
  780. testDarknetLayer("upsample");
  781. }
  782. TEST_P(Test_Darknet_layers, mish)
  783. {
  784. testDarknetLayer("mish", true);
  785. }
  786. TEST_P(Test_Darknet_layers, tanh)
  787. {
  788. testDarknetLayer("tanh");
  789. }
  790. TEST_P(Test_Darknet_layers, avgpool_softmax)
  791. {
  792. testDarknetLayer("avgpool_softmax");
  793. }
  794. TEST_P(Test_Darknet_layers, region)
  795. {
  796. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
  797. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && INF_ENGINE_VER_MAJOR_GE(2020020000))
  798. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  799. #endif
  800. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
  801. // accuracy on CPU, OpenCL
  802. // Expected: (normInf) <= (lInf), actual: 0.763223 vs 0.0001
  803. // |ref| = 1.207319974899292
  804. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_CPU)
  805. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_CPU, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  806. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
  807. applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16,
  808. CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION
  809. );
  810. #endif
  811. testDarknetLayer("region");
  812. }
  813. TEST_P(Test_Darknet_layers, reorg)
  814. {
  815. testDarknetLayer("reorg");
  816. }
  817. TEST_P(Test_Darknet_layers, route)
  818. {
  819. testDarknetLayer("route");
  820. testDarknetLayer("route_multi");
  821. }
  822. TEST_P(Test_Darknet_layers, maxpool)
  823. {
  824. #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2020020000)
  825. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_MYRIAD)
  826. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
  827. #endif
  828. testDarknetLayer("maxpool");
  829. }
  830. TEST_P(Test_Darknet_layers, convolutional)
  831. {
  832. if (target == DNN_TARGET_MYRIAD)
  833. {
  834. default_l1 = 0.01f;
  835. }
  836. testDarknetLayer("convolutional", true);
  837. }
  838. TEST_P(Test_Darknet_layers, scale_channels)
  839. {
  840. bool testBatches = backend == DNN_BACKEND_CUDA;
  841. testDarknetLayer("scale_channels", false, testBatches);
  842. }
  843. TEST_P(Test_Darknet_layers, connected)
  844. {
  845. if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
  846. applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
  847. testDarknetLayer("connected", true);
  848. }
  849. TEST_P(Test_Darknet_layers, relu)
  850. {
  851. if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
  852. applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
  853. testDarknetLayer("relu");
  854. }
  855. TEST_P(Test_Darknet_layers, sam)
  856. {
  857. testDarknetLayer("sam", true);
  858. }
  859. INSTANTIATE_TEST_CASE_P(/**/, Test_Darknet_layers, dnnBackendsAndTargets());
  860. }} // namespace