gapi_infer_ie_test.cpp 105 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920
  1. // This file is part of OpenCV project.
  2. // It is subject to the license terms in the LICENSE file found in the top-level directory
  3. // of this distribution and at http://opencv.org/license.html.
  4. //
  5. // Copyright (C) 2019-2021 Intel Corporation
  6. #include "../test_precomp.hpp"
  7. #ifdef HAVE_INF_ENGINE
  8. #include <stdexcept>
  9. #include <mutex>
  10. #include <condition_variable>
  11. #include <inference_engine.hpp>
  12. #include <ade/util/iota_range.hpp>
  13. #include <opencv2/gapi/infer/ie.hpp>
  14. #include <opencv2/gapi/streaming/cap.hpp>
  15. #include "backends/ie/util.hpp"
  16. #include "backends/ie/giebackend/giewrapper.hpp"
  17. #ifdef HAVE_NGRAPH
  18. #if defined(__clang__) // clang or MSVC clang
  19. #pragma clang diagnostic push
  20. #pragma clang diagnostic ignored "-Wunused-parameter"
  21. #elif defined(_MSC_VER)
  22. #pragma warning(push)
  23. #pragma warning(disable : 4100)
  24. # if _MSC_VER < 1910
  25. # pragma warning(disable:4268) // Disable warnings of ngraph. OpenVINO recommends to use MSVS 2019.
  26. # pragma warning(disable:4800)
  27. # endif
  28. #elif defined(__GNUC__)
  29. #pragma GCC diagnostic push
  30. #pragma GCC diagnostic ignored "-Wunused-parameter"
  31. #endif
  32. #include <ngraph/ngraph.hpp>
  33. #endif
  34. namespace opencv_test
  35. {
  36. namespace {
  37. class TestMediaBGR final: public cv::MediaFrame::IAdapter {
  38. cv::Mat m_mat;
  39. using Cb = cv::MediaFrame::View::Callback;
  40. Cb m_cb;
  41. public:
  42. explicit TestMediaBGR(cv::Mat m, Cb cb = [](){})
  43. : m_mat(m), m_cb(cb) {
  44. }
  45. cv::GFrameDesc meta() const override {
  46. return cv::GFrameDesc{cv::MediaFormat::BGR, cv::Size(m_mat.cols, m_mat.rows)};
  47. }
  48. cv::MediaFrame::View access(cv::MediaFrame::Access) override {
  49. cv::MediaFrame::View::Ptrs pp = { m_mat.ptr(), nullptr, nullptr, nullptr };
  50. cv::MediaFrame::View::Strides ss = { m_mat.step, 0u, 0u, 0u };
  51. return cv::MediaFrame::View(std::move(pp), std::move(ss), Cb{m_cb});
  52. }
  53. cv::util::any blobParams() const override {
  54. return std::make_pair<InferenceEngine::TensorDesc,
  55. InferenceEngine::ParamMap>({IE::Precision::U8,
  56. {1, 3, 300, 300},
  57. IE::Layout::NCHW},
  58. {{"HELLO", 42},
  59. {"COLOR_FORMAT",
  60. InferenceEngine::ColorFormat::NV12}});
  61. }
  62. };
  63. class TestMediaNV12 final: public cv::MediaFrame::IAdapter {
  64. cv::Mat m_y;
  65. cv::Mat m_uv;
  66. public:
  67. TestMediaNV12(cv::Mat y, cv::Mat uv) : m_y(y), m_uv(uv) {
  68. }
  69. cv::GFrameDesc meta() const override {
  70. return cv::GFrameDesc{cv::MediaFormat::NV12, cv::Size(m_y.cols, m_y.rows)};
  71. }
  72. cv::MediaFrame::View access(cv::MediaFrame::Access) override {
  73. cv::MediaFrame::View::Ptrs pp = {
  74. m_y.ptr(), m_uv.ptr(), nullptr, nullptr
  75. };
  76. cv::MediaFrame::View::Strides ss = {
  77. m_y.step, m_uv.step, 0u, 0u
  78. };
  79. return cv::MediaFrame::View(std::move(pp), std::move(ss));
  80. }
  81. };
  82. // FIXME: taken from DNN module
  83. static void initDLDTDataPath()
  84. {
  85. #ifndef WINRT
  86. static bool initialized = false;
  87. if (!initialized)
  88. {
  89. const char* omzDataPath = getenv("OPENCV_OPEN_MODEL_ZOO_DATA_PATH");
  90. if (omzDataPath)
  91. cvtest::addDataSearchPath(omzDataPath);
  92. const char* dnnDataPath = getenv("OPENCV_DNN_TEST_DATA_PATH");
  93. if (dnnDataPath) {
  94. // Add the dnnDataPath itself - G-API is using some images there directly
  95. cvtest::addDataSearchPath(dnnDataPath);
  96. cvtest::addDataSearchPath(dnnDataPath + std::string("/omz_intel_models"));
  97. }
  98. initialized = true;
  99. }
  100. #endif // WINRT
  101. }
  102. #if INF_ENGINE_RELEASE >= 2020010000
  103. static const std::string SUBDIR = "intel/age-gender-recognition-retail-0013/FP32/";
  104. #else
  105. static const std::string SUBDIR = "Retail/object_attributes/age_gender/dldt/";
  106. #endif
  107. // FIXME: taken from the DNN module
  108. void normAssert(cv::InputArray ref, cv::InputArray test,
  109. const char *comment /*= ""*/,
  110. double l1 = 0.00001, double lInf = 0.0001)
  111. {
  112. double normL1 = cvtest::norm(ref, test, cv::NORM_L1) / ref.getMat().total();
  113. EXPECT_LE(normL1, l1) << comment;
  114. double normInf = cvtest::norm(ref, test, cv::NORM_INF);
  115. EXPECT_LE(normInf, lInf) << comment;
  116. }
  117. namespace IE = InferenceEngine;
  118. void setNetParameters(IE::CNNNetwork& net, bool is_nv12 = false) {
  119. auto ii = net.getInputsInfo().at("data");
  120. ii->setPrecision(IE::Precision::U8);
  121. ii->getPreProcess().setResizeAlgorithm(IE::RESIZE_BILINEAR);
  122. if (is_nv12) {
  123. ii->getPreProcess().setColorFormat(IE::ColorFormat::NV12);
  124. }
  125. }
  126. bool checkDeviceIsAvailable(const std::string& device) {
  127. const static auto available_devices = [&](){
  128. auto devices = cv::gimpl::ie::wrap::getCore().GetAvailableDevices();
  129. return std::unordered_set<std::string>{devices.begin(), devices.end()};
  130. }();
  131. return available_devices.find(device) != available_devices.end();
  132. }
  133. void skipIfDeviceNotAvailable(const std::string& device) {
  134. if (!checkDeviceIsAvailable(device)) {
  135. throw SkipTestException("Device: " + device + " isn't available!");
  136. }
  137. }
  138. void compileBlob(const cv::gapi::ie::detail::ParamDesc& params,
  139. const std::string& output,
  140. const IE::Precision& ip) {
  141. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  142. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  143. for (auto&& ii : net.getInputsInfo()) {
  144. ii.second->setPrecision(ip);
  145. }
  146. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  147. std::ofstream out_file{output, std::ios::out | std::ios::binary};
  148. GAPI_Assert(out_file.is_open());
  149. this_network.Export(out_file);
  150. }
  151. std::string compileAgeGenderBlob(const std::string& device) {
  152. const static std::string blob_path = [&](){
  153. cv::gapi::ie::detail::ParamDesc params;
  154. const std::string model_name = "age-gender-recognition-retail-0013";
  155. const std::string output = model_name + ".blob";
  156. params.model_path = findDataFile(SUBDIR + model_name + ".xml");
  157. params.weights_path = findDataFile(SUBDIR + model_name + ".bin");
  158. params.device_id = device;
  159. compileBlob(params, output, IE::Precision::U8);
  160. return output;
  161. }();
  162. return blob_path;
  163. }
  164. } // anonymous namespace
  165. // TODO: Probably DNN/IE part can be further parametrized with a template
  166. // NOTE: here ".." is used to leave the default "gapi/" search scope
  167. TEST(TestAgeGenderIE, InferBasicTensor)
  168. {
  169. initDLDTDataPath();
  170. cv::gapi::ie::detail::ParamDesc params;
  171. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  172. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  173. params.device_id = "CPU";
  174. // Load IE network, initialize input data using that.
  175. cv::Mat in_mat;
  176. cv::Mat gapi_age, gapi_gender;
  177. IE::Blob::Ptr ie_age, ie_gender;
  178. {
  179. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  180. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  181. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  182. auto infer_request = this_network.CreateInferRequest();
  183. const auto &iedims = net.getInputsInfo().begin()->second->getTensorDesc().getDims();
  184. auto cvdims = cv::gapi::ie::util::to_ocv(iedims);
  185. in_mat.create(cvdims, CV_32F);
  186. cv::randu(in_mat, -1, 1);
  187. infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat));
  188. infer_request.Infer();
  189. ie_age = infer_request.GetBlob("age_conv3");
  190. ie_gender = infer_request.GetBlob("prob");
  191. }
  192. // Configure & run G-API
  193. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  194. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  195. cv::GMat in;
  196. cv::GMat age, gender;
  197. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  198. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  199. auto pp = cv::gapi::ie::Params<AgeGender> {
  200. params.model_path, params.weights_path, params.device_id
  201. }.cfgOutputLayers({ "age_conv3", "prob" });
  202. comp.apply(cv::gin(in_mat), cv::gout(gapi_age, gapi_gender),
  203. cv::compile_args(cv::gapi::networks(pp)));
  204. // Validate with IE itself (avoid DNN module dependency here)
  205. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  206. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  207. }
  208. TEST(TestAgeGenderIE, InferBasicImage)
  209. {
  210. initDLDTDataPath();
  211. cv::gapi::ie::detail::ParamDesc params;
  212. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  213. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  214. params.device_id = "CPU";
  215. // FIXME: Ideally it should be an image from disk
  216. // cv::Mat in_mat = cv::imread(findDataFile("grace_hopper_227.png"));
  217. cv::Mat in_mat(cv::Size(320, 240), CV_8UC3);
  218. cv::randu(in_mat, 0, 255);
  219. cv::Mat gapi_age, gapi_gender;
  220. // Load & run IE network
  221. IE::Blob::Ptr ie_age, ie_gender;
  222. {
  223. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  224. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  225. setNetParameters(net);
  226. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  227. auto infer_request = this_network.CreateInferRequest();
  228. infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat));
  229. infer_request.Infer();
  230. ie_age = infer_request.GetBlob("age_conv3");
  231. ie_gender = infer_request.GetBlob("prob");
  232. }
  233. // Configure & run G-API
  234. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  235. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  236. cv::GMat in;
  237. cv::GMat age, gender;
  238. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  239. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  240. auto pp = cv::gapi::ie::Params<AgeGender> {
  241. params.model_path, params.weights_path, params.device_id
  242. }.cfgOutputLayers({ "age_conv3", "prob" });
  243. comp.apply(cv::gin(in_mat), cv::gout(gapi_age, gapi_gender),
  244. cv::compile_args(cv::gapi::networks(pp)));
  245. // Validate with IE itself (avoid DNN module dependency here)
  246. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  247. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  248. }
  249. struct InferWithReshape: public ::testing::Test {
  250. cv::gapi::ie::detail::ParamDesc params;
  251. cv::Mat m_in_mat;
  252. std::vector<cv::Rect> m_roi_list;
  253. std::vector<size_t> reshape_dims;
  254. std::vector<cv::Mat> m_out_ie_ages;
  255. std::vector<cv::Mat> m_out_ie_genders;
  256. std::vector<cv::Mat> m_out_gapi_ages;
  257. std::vector<cv::Mat> m_out_gapi_genders;
  258. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  259. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  260. InferenceEngine::CNNNetwork net;
  261. InferenceEngine::Core plugin;
  262. InferWithReshape() {
  263. // FIXME: it must be cv::imread(findDataFile("../dnn/grace_hopper_227.png", false));
  264. m_in_mat = cv::Mat(cv::Size(320, 240), CV_8UC3);
  265. cv::randu(m_in_mat, 0, 255);
  266. m_out_gapi_ages.resize(1);
  267. m_out_gapi_genders.resize(1);
  268. // both ROIs point to the same face, with a slightly changed geometry
  269. m_roi_list = {
  270. cv::Rect(cv::Point{64, 60}, cv::Size{ 96, 96}),
  271. cv::Rect(cv::Point{50, 32}, cv::Size{128, 160}),
  272. };
  273. // New dimensions for "data" input
  274. reshape_dims = {1, 3, 70, 70};
  275. initDLDTDataPath();
  276. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  277. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  278. params.device_id = "CPU";
  279. plugin = cv::gimpl::ie::wrap::getPlugin(params);
  280. net = cv::gimpl::ie::wrap::readNetwork(params);
  281. setNetParameters(net);
  282. net.reshape({{"data", reshape_dims}});
  283. }
  284. void inferROIs(IE::Blob::Ptr blob) {
  285. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  286. auto infer_request = this_network.CreateInferRequest();
  287. for (auto &&rc : m_roi_list) {
  288. const auto ie_rc = IE::ROI {
  289. 0u
  290. , static_cast<std::size_t>(rc.x)
  291. , static_cast<std::size_t>(rc.y)
  292. , static_cast<std::size_t>(rc.width)
  293. , static_cast<std::size_t>(rc.height)
  294. };
  295. infer_request.SetBlob("data", IE::make_shared_blob(blob, ie_rc));
  296. infer_request.Infer();
  297. using namespace cv::gapi::ie::util;
  298. m_out_ie_ages.push_back(to_ocv(infer_request.GetBlob("age_conv3")).clone());
  299. m_out_ie_genders.push_back(to_ocv(infer_request.GetBlob("prob")).clone());
  300. }
  301. }
  302. void infer(cv::Mat& in, const bool with_roi = false) {
  303. if (!with_roi) {
  304. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  305. auto infer_request = this_network.CreateInferRequest();
  306. infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in));
  307. infer_request.Infer();
  308. using namespace cv::gapi::ie::util;
  309. m_out_ie_ages.push_back(to_ocv(infer_request.GetBlob("age_conv3")).clone());
  310. m_out_ie_genders.push_back(to_ocv(infer_request.GetBlob("prob")).clone());
  311. } else {
  312. auto frame_blob = cv::gapi::ie::util::to_ie(in);
  313. inferROIs(frame_blob);
  314. }
  315. }
  316. void validate() {
  317. // Validate with IE itself (avoid DNN module dependency here)
  318. GAPI_Assert(!m_out_gapi_ages.empty());
  319. ASSERT_EQ(m_out_gapi_genders.size(), m_out_gapi_ages.size());
  320. ASSERT_EQ(m_out_gapi_ages.size(), m_out_ie_ages.size());
  321. ASSERT_EQ(m_out_gapi_genders.size(), m_out_ie_genders.size());
  322. const size_t size = m_out_gapi_ages.size();
  323. for (size_t i = 0; i < size; ++i) {
  324. normAssert(m_out_ie_ages [i], m_out_gapi_ages [i], "Test age output");
  325. normAssert(m_out_ie_genders[i], m_out_gapi_genders[i], "Test gender output");
  326. }
  327. }
  328. }; // InferWithReshape
  329. struct InferWithReshapeNV12: public InferWithReshape {
  330. cv::Mat m_in_uv;
  331. cv::Mat m_in_y;
  332. void SetUp() {
  333. cv::Size sz{320, 240};
  334. m_in_y = cv::Mat{sz, CV_8UC1};
  335. cv::randu(m_in_y, 0, 255);
  336. m_in_uv = cv::Mat{sz / 2, CV_8UC2};
  337. cv::randu(m_in_uv, 0, 255);
  338. setNetParameters(net, true);
  339. net.reshape({{"data", reshape_dims}});
  340. auto frame_blob = cv::gapi::ie::util::to_ie(m_in_y, m_in_uv);
  341. inferROIs(frame_blob);
  342. }
  343. };
  344. struct ROIList: public ::testing::Test {
  345. cv::gapi::ie::detail::ParamDesc params;
  346. cv::Mat m_in_mat;
  347. std::vector<cv::Rect> m_roi_list;
  348. std::vector<cv::Mat> m_out_ie_ages;
  349. std::vector<cv::Mat> m_out_ie_genders;
  350. std::vector<cv::Mat> m_out_gapi_ages;
  351. std::vector<cv::Mat> m_out_gapi_genders;
  352. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  353. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  354. void SetUp() {
  355. initDLDTDataPath();
  356. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  357. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  358. params.device_id = "CPU";
  359. // FIXME: it must be cv::imread(findDataFile("../dnn/grace_hopper_227.png", false));
  360. m_in_mat = cv::Mat(cv::Size(320, 240), CV_8UC3);
  361. cv::randu(m_in_mat, 0, 255);
  362. // both ROIs point to the same face, with a slightly changed geometry
  363. m_roi_list = {
  364. cv::Rect(cv::Point{64, 60}, cv::Size{ 96, 96}),
  365. cv::Rect(cv::Point{50, 32}, cv::Size{128, 160}),
  366. };
  367. // Load & run IE network
  368. {
  369. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  370. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  371. setNetParameters(net);
  372. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  373. auto infer_request = this_network.CreateInferRequest();
  374. auto frame_blob = cv::gapi::ie::util::to_ie(m_in_mat);
  375. for (auto &&rc : m_roi_list) {
  376. const auto ie_rc = IE::ROI {
  377. 0u
  378. , static_cast<std::size_t>(rc.x)
  379. , static_cast<std::size_t>(rc.y)
  380. , static_cast<std::size_t>(rc.width)
  381. , static_cast<std::size_t>(rc.height)
  382. };
  383. infer_request.SetBlob("data", IE::make_shared_blob(frame_blob, ie_rc));
  384. infer_request.Infer();
  385. using namespace cv::gapi::ie::util;
  386. m_out_ie_ages.push_back(to_ocv(infer_request.GetBlob("age_conv3")).clone());
  387. m_out_ie_genders.push_back(to_ocv(infer_request.GetBlob("prob")).clone());
  388. }
  389. } // namespace IE = ..
  390. } // ROIList()
  391. void validate() {
  392. // Validate with IE itself (avoid DNN module dependency here)
  393. ASSERT_EQ(2u, m_out_ie_ages.size());
  394. ASSERT_EQ(2u, m_out_ie_genders.size());
  395. ASSERT_EQ(2u, m_out_gapi_ages.size());
  396. ASSERT_EQ(2u, m_out_gapi_genders.size());
  397. normAssert(m_out_ie_ages [0], m_out_gapi_ages [0], "0: Test age output");
  398. normAssert(m_out_ie_genders[0], m_out_gapi_genders[0], "0: Test gender output");
  399. normAssert(m_out_ie_ages [1], m_out_gapi_ages [1], "1: Test age output");
  400. normAssert(m_out_ie_genders[1], m_out_gapi_genders[1], "1: Test gender output");
  401. }
  402. }; // ROIList
  403. struct ROIListNV12: public ::testing::Test {
  404. cv::gapi::ie::detail::ParamDesc params;
  405. cv::Mat m_in_uv;
  406. cv::Mat m_in_y;
  407. std::vector<cv::Rect> m_roi_list;
  408. std::vector<cv::Mat> m_out_ie_ages;
  409. std::vector<cv::Mat> m_out_ie_genders;
  410. std::vector<cv::Mat> m_out_gapi_ages;
  411. std::vector<cv::Mat> m_out_gapi_genders;
  412. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  413. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  414. void SetUp() {
  415. initDLDTDataPath();
  416. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  417. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  418. params.device_id = "CPU";
  419. cv::Size sz{320, 240};
  420. m_in_y = cv::Mat{sz, CV_8UC1};
  421. cv::randu(m_in_y, 0, 255);
  422. m_in_uv = cv::Mat{sz / 2, CV_8UC2};
  423. cv::randu(m_in_uv, 0, 255);
  424. // both ROIs point to the same face, with a slightly changed geometry
  425. m_roi_list = {
  426. cv::Rect(cv::Point{64, 60}, cv::Size{ 96, 96}),
  427. cv::Rect(cv::Point{50, 32}, cv::Size{128, 160}),
  428. };
  429. // Load & run IE network
  430. {
  431. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  432. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  433. setNetParameters(net, true);
  434. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  435. auto infer_request = this_network.CreateInferRequest();
  436. auto frame_blob = cv::gapi::ie::util::to_ie(m_in_y, m_in_uv);
  437. for (auto &&rc : m_roi_list) {
  438. const auto ie_rc = IE::ROI {
  439. 0u
  440. , static_cast<std::size_t>(rc.x)
  441. , static_cast<std::size_t>(rc.y)
  442. , static_cast<std::size_t>(rc.width)
  443. , static_cast<std::size_t>(rc.height)
  444. };
  445. infer_request.SetBlob("data", IE::make_shared_blob(frame_blob, ie_rc));
  446. infer_request.Infer();
  447. using namespace cv::gapi::ie::util;
  448. m_out_ie_ages.push_back(to_ocv(infer_request.GetBlob("age_conv3")).clone());
  449. m_out_ie_genders.push_back(to_ocv(infer_request.GetBlob("prob")).clone());
  450. }
  451. } // namespace IE = ..
  452. } // ROIList()
  453. void validate() {
  454. // Validate with IE itself (avoid DNN module dependency here)
  455. ASSERT_EQ(2u, m_out_ie_ages.size());
  456. ASSERT_EQ(2u, m_out_ie_genders.size());
  457. ASSERT_EQ(2u, m_out_gapi_ages.size());
  458. ASSERT_EQ(2u, m_out_gapi_genders.size());
  459. normAssert(m_out_ie_ages [0], m_out_gapi_ages [0], "0: Test age output");
  460. normAssert(m_out_ie_genders[0], m_out_gapi_genders[0], "0: Test gender output");
  461. normAssert(m_out_ie_ages [1], m_out_gapi_ages [1], "1: Test age output");
  462. normAssert(m_out_ie_genders[1], m_out_gapi_genders[1], "1: Test gender output");
  463. }
  464. };
  465. struct SingleROI: public ::testing::Test {
  466. cv::gapi::ie::detail::ParamDesc params;
  467. cv::Mat m_in_mat;
  468. cv::Rect m_roi;
  469. cv::Mat m_out_gapi_age;
  470. cv::Mat m_out_gapi_gender;
  471. cv::Mat m_out_ie_age;
  472. cv::Mat m_out_ie_gender;
  473. void SetUp() {
  474. initDLDTDataPath();
  475. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  476. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  477. params.device_id = "CPU";
  478. // FIXME: it must be cv::imread(findDataFile("../dnn/grace_hopper_227.png", false));
  479. m_in_mat = cv::Mat(cv::Size(320, 240), CV_8UC3);
  480. cv::randu(m_in_mat, 0, 255);
  481. m_roi = cv::Rect(cv::Point{64, 60}, cv::Size{96, 96});
  482. // Load & run IE network
  483. IE::Blob::Ptr ie_age, ie_gender;
  484. {
  485. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  486. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  487. setNetParameters(net);
  488. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  489. auto infer_request = this_network.CreateInferRequest();
  490. const auto ie_rc = IE::ROI {
  491. 0u
  492. , static_cast<std::size_t>(m_roi.x)
  493. , static_cast<std::size_t>(m_roi.y)
  494. , static_cast<std::size_t>(m_roi.width)
  495. , static_cast<std::size_t>(m_roi.height)
  496. };
  497. IE::Blob::Ptr roi_blob = IE::make_shared_blob(cv::gapi::ie::util::to_ie(m_in_mat), ie_rc);
  498. infer_request.SetBlob("data", roi_blob);
  499. infer_request.Infer();
  500. using namespace cv::gapi::ie::util;
  501. m_out_ie_age = to_ocv(infer_request.GetBlob("age_conv3")).clone();
  502. m_out_ie_gender = to_ocv(infer_request.GetBlob("prob")).clone();
  503. }
  504. }
  505. void validate() {
  506. // Validate with IE itself (avoid DNN module dependency here)
  507. normAssert(m_out_ie_age , m_out_gapi_age , "Test age output");
  508. normAssert(m_out_ie_gender, m_out_gapi_gender, "Test gender output");
  509. }
  510. };
  511. struct SingleROINV12: public ::testing::Test {
  512. cv::gapi::ie::detail::ParamDesc params;
  513. cv::Mat m_in_y;
  514. cv::Mat m_in_uv;
  515. cv::Rect m_roi;
  516. cv::Mat m_out_gapi_age;
  517. cv::Mat m_out_gapi_gender;
  518. cv::Mat m_out_ie_age;
  519. cv::Mat m_out_ie_gender;
  520. void SetUp() {
  521. initDLDTDataPath();
  522. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  523. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  524. params.device_id = "CPU";
  525. cv::Size sz{320, 240};
  526. m_in_y = cv::Mat{sz, CV_8UC1};
  527. cv::randu(m_in_y, 0, 255);
  528. m_in_uv = cv::Mat{sz / 2, CV_8UC2};
  529. cv::randu(m_in_uv, 0, 255);
  530. m_roi = cv::Rect(cv::Point{64, 60}, cv::Size{96, 96});
  531. // Load & run IE network
  532. IE::Blob::Ptr ie_age, ie_gender;
  533. {
  534. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  535. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  536. setNetParameters(net, /* NV12 */ true);
  537. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  538. auto infer_request = this_network.CreateInferRequest();
  539. auto blob = cv::gapi::ie::util::to_ie(m_in_y, m_in_uv);
  540. const auto ie_rc = IE::ROI {
  541. 0u
  542. , static_cast<std::size_t>(m_roi.x)
  543. , static_cast<std::size_t>(m_roi.y)
  544. , static_cast<std::size_t>(m_roi.width)
  545. , static_cast<std::size_t>(m_roi.height)
  546. };
  547. IE::Blob::Ptr roi_blob = IE::make_shared_blob(blob, ie_rc);
  548. infer_request.SetBlob("data", roi_blob);
  549. infer_request.Infer();
  550. using namespace cv::gapi::ie::util;
  551. m_out_ie_age = to_ocv(infer_request.GetBlob("age_conv3")).clone();
  552. m_out_ie_gender = to_ocv(infer_request.GetBlob("prob")).clone();
  553. }
  554. }
  555. void validate() {
  556. // Validate with IE itself (avoid DNN module dependency here)
  557. normAssert(m_out_ie_age , m_out_gapi_age , "Test age output");
  558. normAssert(m_out_ie_gender, m_out_gapi_gender, "Test gender output");
  559. }
  560. };
  561. TEST_F(ROIList, TestInfer)
  562. {
  563. cv::GArray<cv::Rect> rr;
  564. cv::GMat in;
  565. cv::GArray<cv::GMat> age, gender;
  566. std::tie(age, gender) = cv::gapi::infer<AgeGender>(rr, in);
  567. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  568. auto pp = cv::gapi::ie::Params<AgeGender> {
  569. params.model_path, params.weights_path, params.device_id
  570. }.cfgOutputLayers({ "age_conv3", "prob" });
  571. comp.apply(cv::gin(m_in_mat, m_roi_list),
  572. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  573. cv::compile_args(cv::gapi::networks(pp)));
  574. validate();
  575. }
  576. TEST_F(ROIList, TestInfer2)
  577. {
  578. cv::GArray<cv::Rect> rr;
  579. cv::GMat in;
  580. cv::GArray<cv::GMat> age, gender;
  581. std::tie(age, gender) = cv::gapi::infer2<AgeGender>(in, rr);
  582. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  583. auto pp = cv::gapi::ie::Params<AgeGender> {
  584. params.model_path, params.weights_path, params.device_id
  585. }.cfgOutputLayers({ "age_conv3", "prob" });
  586. comp.apply(cv::gin(m_in_mat, m_roi_list),
  587. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  588. cv::compile_args(cv::gapi::networks(pp)));
  589. validate();
  590. }
  591. TEST(DISABLED_TestTwoIENNPipeline, InferBasicImage)
  592. {
  593. initDLDTDataPath();
  594. cv::gapi::ie::detail::ParamDesc AGparams;
  595. AGparams.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml", false);
  596. AGparams.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin", false);
  597. AGparams.device_id = "MYRIAD";
  598. // FIXME: Ideally it should be an image from disk
  599. // cv::Mat in_mat = cv::imread(findDataFile("grace_hopper_227.png"));
  600. cv::Mat in_mat(cv::Size(320, 240), CV_8UC3);
  601. cv::randu(in_mat, 0, 255);
  602. cv::Mat gapi_age1, gapi_gender1, gapi_age2, gapi_gender2;
  603. // Load & run IE network
  604. IE::Blob::Ptr ie_age1, ie_gender1, ie_age2, ie_gender2;
  605. {
  606. auto AGplugin1 = cv::gimpl::ie::wrap::getPlugin(AGparams);
  607. auto AGnet1 = cv::gimpl::ie::wrap::readNetwork(AGparams);
  608. setNetParameters(AGnet1);
  609. auto AGplugin_network1 = cv::gimpl::ie::wrap::loadNetwork(AGplugin1, AGnet1, AGparams);
  610. auto AGinfer_request1 = AGplugin_network1.CreateInferRequest();
  611. AGinfer_request1.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat));
  612. AGinfer_request1.Infer();
  613. ie_age1 = AGinfer_request1.GetBlob("age_conv3");
  614. ie_gender1 = AGinfer_request1.GetBlob("prob");
  615. auto AGplugin2 = cv::gimpl::ie::wrap::getPlugin(AGparams);
  616. auto AGnet2 = cv::gimpl::ie::wrap::readNetwork(AGparams);
  617. setNetParameters(AGnet2);
  618. auto AGplugin_network2 = cv::gimpl::ie::wrap::loadNetwork(AGplugin2, AGnet2, AGparams);
  619. auto AGinfer_request2 = AGplugin_network2.CreateInferRequest();
  620. AGinfer_request2.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat));
  621. AGinfer_request2.Infer();
  622. ie_age2 = AGinfer_request2.GetBlob("age_conv3");
  623. ie_gender2 = AGinfer_request2.GetBlob("prob");
  624. }
  625. // Configure & run G-API
  626. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  627. G_API_NET(AgeGender1, <AGInfo(cv::GMat)>, "test-age-gender1");
  628. G_API_NET(AgeGender2, <AGInfo(cv::GMat)>, "test-age-gender2");
  629. cv::GMat in;
  630. cv::GMat age1, gender1;
  631. std::tie(age1, gender1) = cv::gapi::infer<AgeGender1>(in);
  632. cv::GMat age2, gender2;
  633. // FIXME: "Multi-node inference is not supported!", workarounded 'till enabling proper tools
  634. std::tie(age2, gender2) = cv::gapi::infer<AgeGender2>(cv::gapi::copy(in));
  635. cv::GComputation comp(cv::GIn(in), cv::GOut(age1, gender1, age2, gender2));
  636. auto age_net1 = cv::gapi::ie::Params<AgeGender1> {
  637. AGparams.model_path, AGparams.weights_path, AGparams.device_id
  638. }.cfgOutputLayers({ "age_conv3", "prob" });
  639. auto age_net2 = cv::gapi::ie::Params<AgeGender2> {
  640. AGparams.model_path, AGparams.weights_path, AGparams.device_id
  641. }.cfgOutputLayers({ "age_conv3", "prob" });
  642. comp.apply(cv::gin(in_mat), cv::gout(gapi_age1, gapi_gender1, gapi_age2, gapi_gender2),
  643. cv::compile_args(cv::gapi::networks(age_net1, age_net2)));
  644. // Validate with IE itself (avoid DNN module dependency here)
  645. normAssert(cv::gapi::ie::util::to_ocv(ie_age1), gapi_age1, "Test age output 1");
  646. normAssert(cv::gapi::ie::util::to_ocv(ie_gender1), gapi_gender1, "Test gender output 1");
  647. normAssert(cv::gapi::ie::util::to_ocv(ie_age2), gapi_age2, "Test age output 2");
  648. normAssert(cv::gapi::ie::util::to_ocv(ie_gender2), gapi_gender2, "Test gender output 2");
  649. }
  650. TEST(TestAgeGenderIE, GenericInfer)
  651. {
  652. initDLDTDataPath();
  653. cv::gapi::ie::detail::ParamDesc params;
  654. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  655. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  656. params.device_id = "CPU";
  657. cv::Mat in_mat(cv::Size(320, 240), CV_8UC3);
  658. cv::randu(in_mat, 0, 255);
  659. cv::Mat gapi_age, gapi_gender;
  660. // Load & run IE network
  661. IE::Blob::Ptr ie_age, ie_gender;
  662. {
  663. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  664. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  665. setNetParameters(net);
  666. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  667. auto infer_request = this_network.CreateInferRequest();
  668. infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat));
  669. infer_request.Infer();
  670. ie_age = infer_request.GetBlob("age_conv3");
  671. ie_gender = infer_request.GetBlob("prob");
  672. }
  673. // Configure & run G-API
  674. cv::GMat in;
  675. GInferInputs inputs;
  676. inputs["data"] = in;
  677. auto outputs = cv::gapi::infer<cv::gapi::Generic>("age-gender-generic", inputs);
  678. auto age = outputs.at("age_conv3");
  679. auto gender = outputs.at("prob");
  680. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  681. cv::gapi::ie::Params<cv::gapi::Generic> pp{
  682. "age-gender-generic", params.model_path, params.weights_path, params.device_id};
  683. comp.apply(cv::gin(in_mat), cv::gout(gapi_age, gapi_gender),
  684. cv::compile_args(cv::gapi::networks(pp)));
  685. // Validate with IE itself (avoid DNN module dependency here)
  686. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  687. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  688. }
  689. TEST(TestAgeGenderIE, InvalidConfigGeneric)
  690. {
  691. initDLDTDataPath();
  692. std::string model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  693. std::string weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  694. std::string device_id = "CPU";
  695. // Configure & run G-API
  696. cv::GMat in;
  697. GInferInputs inputs;
  698. inputs["data"] = in;
  699. auto outputs = cv::gapi::infer<cv::gapi::Generic>("age-gender-generic", inputs);
  700. auto age = outputs.at("age_conv3");
  701. auto gender = outputs.at("prob");
  702. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  703. auto pp = cv::gapi::ie::Params<cv::gapi::Generic>{
  704. "age-gender-generic", model_path, weights_path, device_id
  705. }.pluginConfig({{"unsupported_config", "some_value"}});
  706. EXPECT_ANY_THROW(comp.compile(cv::GMatDesc{CV_8U,3,cv::Size{320, 240}},
  707. cv::compile_args(cv::gapi::networks(pp))));
  708. }
  709. TEST(TestAgeGenderIE, CPUConfigGeneric)
  710. {
  711. initDLDTDataPath();
  712. std::string model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  713. std::string weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  714. std::string device_id = "CPU";
  715. // Configure & run G-API
  716. cv::GMat in;
  717. GInferInputs inputs;
  718. inputs["data"] = in;
  719. auto outputs = cv::gapi::infer<cv::gapi::Generic>("age-gender-generic", inputs);
  720. auto age = outputs.at("age_conv3");
  721. auto gender = outputs.at("prob");
  722. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  723. auto pp = cv::gapi::ie::Params<cv::gapi::Generic> {
  724. "age-gender-generic", model_path, weights_path, device_id
  725. }.pluginConfig({{IE::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS,
  726. IE::PluginConfigParams::CPU_THROUGHPUT_NUMA}});
  727. EXPECT_NO_THROW(comp.compile(cv::GMatDesc{CV_8U,3,cv::Size{320, 240}},
  728. cv::compile_args(cv::gapi::networks(pp))));
  729. }
  730. TEST(TestAgeGenderIE, InvalidConfig)
  731. {
  732. initDLDTDataPath();
  733. std::string model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  734. std::string weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  735. std::string device_id = "CPU";
  736. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  737. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  738. cv::GMat in;
  739. cv::GMat age, gender;
  740. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  741. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  742. auto pp = cv::gapi::ie::Params<AgeGender> {
  743. model_path, weights_path, device_id
  744. }.cfgOutputLayers({ "age_conv3", "prob" })
  745. .pluginConfig({{"unsupported_config", "some_value"}});
  746. EXPECT_ANY_THROW(comp.compile(cv::GMatDesc{CV_8U,3,cv::Size{320, 240}},
  747. cv::compile_args(cv::gapi::networks(pp))));
  748. }
  749. TEST(TestAgeGenderIE, CPUConfig)
  750. {
  751. initDLDTDataPath();
  752. std::string model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  753. std::string weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  754. std::string device_id = "CPU";
  755. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  756. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  757. cv::GMat in;
  758. cv::GMat age, gender;
  759. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  760. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  761. auto pp = cv::gapi::ie::Params<AgeGender> {
  762. model_path, weights_path, device_id
  763. }.cfgOutputLayers({ "age_conv3", "prob" })
  764. .pluginConfig({{IE::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS,
  765. IE::PluginConfigParams::CPU_THROUGHPUT_NUMA}});
  766. EXPECT_NO_THROW(comp.compile(cv::GMatDesc{CV_8U,3,cv::Size{320, 240}},
  767. cv::compile_args(cv::gapi::networks(pp))));
  768. }
  769. TEST_F(ROIList, MediaInputBGR)
  770. {
  771. initDLDTDataPath();
  772. cv::GFrame in;
  773. cv::GArray<cv::Rect> rr;
  774. cv::GArray<cv::GMat> age, gender;
  775. std::tie(age, gender) = cv::gapi::infer<AgeGender>(rr, in);
  776. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  777. auto frame = MediaFrame::Create<TestMediaBGR>(m_in_mat);
  778. auto pp = cv::gapi::ie::Params<AgeGender> {
  779. params.model_path, params.weights_path, params.device_id
  780. }.cfgOutputLayers({ "age_conv3", "prob" });
  781. comp.apply(cv::gin(frame, m_roi_list),
  782. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  783. cv::compile_args(cv::gapi::networks(pp)));
  784. validate();
  785. }
  786. TEST_F(ROIListNV12, MediaInputNV12)
  787. {
  788. initDLDTDataPath();
  789. cv::GFrame in;
  790. cv::GArray<cv::Rect> rr;
  791. cv::GArray<cv::GMat> age, gender;
  792. std::tie(age, gender) = cv::gapi::infer<AgeGender>(rr, in);
  793. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  794. auto frame = MediaFrame::Create<TestMediaNV12>(m_in_y, m_in_uv);
  795. auto pp = cv::gapi::ie::Params<AgeGender> {
  796. params.model_path, params.weights_path, params.device_id
  797. }.cfgOutputLayers({ "age_conv3", "prob" });
  798. comp.apply(cv::gin(frame, m_roi_list),
  799. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  800. cv::compile_args(cv::gapi::networks(pp)));
  801. validate();
  802. }
  803. TEST(TestAgeGenderIE, MediaInputNV12)
  804. {
  805. initDLDTDataPath();
  806. cv::gapi::ie::detail::ParamDesc params;
  807. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  808. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  809. params.device_id = "CPU";
  810. cv::Size sz{320, 240};
  811. cv::Mat in_y_mat(sz, CV_8UC1);
  812. cv::randu(in_y_mat, 0, 255);
  813. cv::Mat in_uv_mat(sz / 2, CV_8UC2);
  814. cv::randu(in_uv_mat, 0, 255);
  815. cv::Mat gapi_age, gapi_gender;
  816. // Load & run IE network
  817. IE::Blob::Ptr ie_age, ie_gender;
  818. {
  819. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  820. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  821. setNetParameters(net, true);
  822. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  823. auto infer_request = this_network.CreateInferRequest();
  824. infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in_y_mat, in_uv_mat));
  825. infer_request.Infer();
  826. ie_age = infer_request.GetBlob("age_conv3");
  827. ie_gender = infer_request.GetBlob("prob");
  828. }
  829. // Configure & run G-API
  830. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  831. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  832. cv::GFrame in;
  833. cv::GMat age, gender;
  834. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  835. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  836. auto frame = MediaFrame::Create<TestMediaNV12>(in_y_mat, in_uv_mat);
  837. auto pp = cv::gapi::ie::Params<AgeGender> {
  838. params.model_path, params.weights_path, params.device_id
  839. }.cfgOutputLayers({ "age_conv3", "prob" });
  840. comp.apply(cv::gin(frame), cv::gout(gapi_age, gapi_gender),
  841. cv::compile_args(cv::gapi::networks(pp)));
  842. // Validate with IE itself (avoid DNN module dependency here)
  843. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  844. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  845. }
  846. TEST(TestAgeGenderIE, MediaInputBGR)
  847. {
  848. initDLDTDataPath();
  849. cv::gapi::ie::detail::ParamDesc params;
  850. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  851. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  852. params.device_id = "CPU";
  853. cv::Size sz{320, 240};
  854. cv::Mat in_mat(sz, CV_8UC3);
  855. cv::randu(in_mat, 0, 255);
  856. cv::Mat gapi_age, gapi_gender;
  857. // Load & run IE network
  858. IE::Blob::Ptr ie_age, ie_gender;
  859. {
  860. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  861. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  862. setNetParameters(net);
  863. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  864. auto infer_request = this_network.CreateInferRequest();
  865. infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat));
  866. infer_request.Infer();
  867. ie_age = infer_request.GetBlob("age_conv3");
  868. ie_gender = infer_request.GetBlob("prob");
  869. }
  870. // Configure & run G-API
  871. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  872. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  873. cv::GFrame in;
  874. cv::GMat age, gender;
  875. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  876. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  877. auto frame = MediaFrame::Create<TestMediaBGR>(in_mat);
  878. auto pp = cv::gapi::ie::Params<AgeGender> {
  879. params.model_path, params.weights_path, params.device_id
  880. }.cfgOutputLayers({ "age_conv3", "prob" });
  881. comp.apply(cv::gin(frame), cv::gout(gapi_age, gapi_gender),
  882. cv::compile_args(cv::gapi::networks(pp)));
  883. // Validate with IE itself (avoid DNN module dependency here)
  884. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  885. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  886. }
  887. TEST(InferROI, MediaInputBGR)
  888. {
  889. initDLDTDataPath();
  890. cv::gapi::ie::detail::ParamDesc params;
  891. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  892. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  893. params.device_id = "CPU";
  894. cv::Size sz{320, 240};
  895. cv::Mat in_mat(sz, CV_8UC3);
  896. cv::randu(in_mat, 0, 255);
  897. cv::Mat gapi_age, gapi_gender;
  898. cv::Rect rect(cv::Point{64, 60}, cv::Size{96, 96});
  899. // Load & run IE network
  900. IE::Blob::Ptr ie_age, ie_gender;
  901. {
  902. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  903. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  904. setNetParameters(net);
  905. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  906. auto infer_request = this_network.CreateInferRequest();
  907. const auto ie_rc = IE::ROI {
  908. 0u
  909. , static_cast<std::size_t>(rect.x)
  910. , static_cast<std::size_t>(rect.y)
  911. , static_cast<std::size_t>(rect.width)
  912. , static_cast<std::size_t>(rect.height)
  913. };
  914. IE::Blob::Ptr roi_blob = IE::make_shared_blob(cv::gapi::ie::util::to_ie(in_mat), ie_rc);
  915. infer_request.SetBlob("data", roi_blob);
  916. infer_request.Infer();
  917. ie_age = infer_request.GetBlob("age_conv3");
  918. ie_gender = infer_request.GetBlob("prob");
  919. }
  920. // Configure & run G-API
  921. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  922. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  923. cv::GFrame in;
  924. cv::GOpaque<cv::Rect> roi;
  925. cv::GMat age, gender;
  926. std::tie(age, gender) = cv::gapi::infer<AgeGender>(roi, in);
  927. cv::GComputation comp(cv::GIn(in, roi), cv::GOut(age, gender));
  928. auto frame = MediaFrame::Create<TestMediaBGR>(in_mat);
  929. auto pp = cv::gapi::ie::Params<AgeGender> {
  930. params.model_path, params.weights_path, params.device_id
  931. }.cfgOutputLayers({ "age_conv3", "prob" });
  932. comp.apply(cv::gin(frame, rect), cv::gout(gapi_age, gapi_gender),
  933. cv::compile_args(cv::gapi::networks(pp)));
  934. // Validate with IE itself (avoid DNN module dependency here)
  935. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  936. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  937. }
  938. TEST(InferROI, MediaInputNV12)
  939. {
  940. initDLDTDataPath();
  941. cv::gapi::ie::detail::ParamDesc params;
  942. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  943. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  944. params.device_id = "CPU";
  945. cv::Size sz{320, 240};
  946. auto in_y_mat = cv::Mat{sz, CV_8UC1};
  947. cv::randu(in_y_mat, 0, 255);
  948. auto in_uv_mat = cv::Mat{sz / 2, CV_8UC2};
  949. cv::randu(in_uv_mat, 0, 255);
  950. cv::Mat gapi_age, gapi_gender;
  951. cv::Rect rect(cv::Point{64, 60}, cv::Size{96, 96});
  952. // Load & run IE network
  953. IE::Blob::Ptr ie_age, ie_gender;
  954. {
  955. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  956. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  957. setNetParameters(net, true);
  958. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  959. auto infer_request = this_network.CreateInferRequest();
  960. const auto ie_rc = IE::ROI {
  961. 0u
  962. , static_cast<std::size_t>(rect.x)
  963. , static_cast<std::size_t>(rect.y)
  964. , static_cast<std::size_t>(rect.width)
  965. , static_cast<std::size_t>(rect.height)
  966. };
  967. IE::Blob::Ptr roi_blob = IE::make_shared_blob(cv::gapi::ie::util::to_ie(in_y_mat, in_uv_mat), ie_rc);
  968. infer_request.SetBlob("data", roi_blob);
  969. infer_request.Infer();
  970. ie_age = infer_request.GetBlob("age_conv3");
  971. ie_gender = infer_request.GetBlob("prob");
  972. }
  973. // Configure & run G-API
  974. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  975. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  976. cv::GFrame in;
  977. cv::GOpaque<cv::Rect> roi;
  978. cv::GMat age, gender;
  979. std::tie(age, gender) = cv::gapi::infer<AgeGender>(roi, in);
  980. cv::GComputation comp(cv::GIn(in, roi), cv::GOut(age, gender));
  981. auto frame = MediaFrame::Create<TestMediaNV12>(in_y_mat, in_uv_mat);
  982. auto pp = cv::gapi::ie::Params<AgeGender> {
  983. params.model_path, params.weights_path, params.device_id
  984. }.cfgOutputLayers({ "age_conv3", "prob" });
  985. comp.apply(cv::gin(frame, rect), cv::gout(gapi_age, gapi_gender),
  986. cv::compile_args(cv::gapi::networks(pp)));
  987. // Validate with IE itself (avoid DNN module dependency here)
  988. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  989. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  990. }
  991. TEST_F(ROIList, Infer2MediaInputBGR)
  992. {
  993. cv::GArray<cv::Rect> rr;
  994. cv::GFrame in;
  995. cv::GArray<cv::GMat> age, gender;
  996. std::tie(age, gender) = cv::gapi::infer2<AgeGender>(in, rr);
  997. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  998. auto frame = MediaFrame::Create<TestMediaBGR>(m_in_mat);
  999. auto pp = cv::gapi::ie::Params<AgeGender> {
  1000. params.model_path, params.weights_path, params.device_id
  1001. }.cfgOutputLayers({ "age_conv3", "prob" });
  1002. comp.apply(cv::gin(frame, m_roi_list),
  1003. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  1004. cv::compile_args(cv::gapi::networks(pp)));
  1005. validate();
  1006. }
  1007. TEST_F(ROIListNV12, Infer2MediaInputNV12)
  1008. {
  1009. cv::GArray<cv::Rect> rr;
  1010. cv::GFrame in;
  1011. cv::GArray<cv::GMat> age, gender;
  1012. std::tie(age, gender) = cv::gapi::infer2<AgeGender>(in, rr);
  1013. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1014. auto frame = MediaFrame::Create<TestMediaNV12>(m_in_y, m_in_uv);
  1015. auto pp = cv::gapi::ie::Params<AgeGender> {
  1016. params.model_path, params.weights_path, params.device_id
  1017. }.cfgOutputLayers({ "age_conv3", "prob" });
  1018. comp.apply(cv::gin(frame, m_roi_list),
  1019. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  1020. cv::compile_args(cv::gapi::networks(pp)));
  1021. validate();
  1022. }
  1023. TEST_F(SingleROI, GenericInfer)
  1024. {
  1025. // Configure & run G-API
  1026. cv::GMat in;
  1027. cv::GOpaque<cv::Rect> roi;
  1028. cv::GInferInputs inputs;
  1029. inputs["data"] = in;
  1030. auto outputs = cv::gapi::infer<cv::gapi::Generic>("age-gender-generic", roi, inputs);
  1031. auto age = outputs.at("age_conv3");
  1032. auto gender = outputs.at("prob");
  1033. cv::GComputation comp(cv::GIn(in, roi), cv::GOut(age, gender));
  1034. cv::gapi::ie::Params<cv::gapi::Generic> pp{
  1035. "age-gender-generic", params.model_path, params.weights_path, params.device_id
  1036. };
  1037. pp.cfgNumRequests(2u);
  1038. comp.apply(cv::gin(m_in_mat, m_roi), cv::gout(m_out_gapi_age, m_out_gapi_gender),
  1039. cv::compile_args(cv::gapi::networks(pp)));
  1040. validate();
  1041. }
  1042. TEST_F(SingleROI, GenericInferMediaBGR)
  1043. {
  1044. // Configure & run G-API
  1045. cv::GFrame in;
  1046. cv::GOpaque<cv::Rect> roi;
  1047. cv::GInferInputs inputs;
  1048. inputs["data"] = in;
  1049. auto outputs = cv::gapi::infer<cv::gapi::Generic>("age-gender-generic", roi, inputs);
  1050. auto age = outputs.at("age_conv3");
  1051. auto gender = outputs.at("prob");
  1052. cv::GComputation comp(cv::GIn(in, roi), cv::GOut(age, gender));
  1053. cv::gapi::ie::Params<cv::gapi::Generic> pp{
  1054. "age-gender-generic", params.model_path, params.weights_path, params.device_id
  1055. };
  1056. pp.cfgNumRequests(2u);
  1057. auto frame = MediaFrame::Create<TestMediaBGR>(m_in_mat);
  1058. comp.apply(cv::gin(frame, m_roi), cv::gout(m_out_gapi_age, m_out_gapi_gender),
  1059. cv::compile_args(cv::gapi::networks(pp)));
  1060. validate();
  1061. }
  1062. TEST_F(SingleROINV12, GenericInferMediaNV12)
  1063. {
  1064. // Configure & run G-API
  1065. cv::GFrame in;
  1066. cv::GOpaque<cv::Rect> roi;
  1067. cv::GInferInputs inputs;
  1068. inputs["data"] = in;
  1069. auto outputs = cv::gapi::infer<cv::gapi::Generic>("age-gender-generic", roi, inputs);
  1070. auto age = outputs.at("age_conv3");
  1071. auto gender = outputs.at("prob");
  1072. cv::GComputation comp(cv::GIn(in, roi), cv::GOut(age, gender));
  1073. cv::gapi::ie::Params<cv::gapi::Generic> pp{
  1074. "age-gender-generic", params.model_path, params.weights_path, params.device_id
  1075. };
  1076. pp.cfgNumRequests(2u);
  1077. auto frame = MediaFrame::Create<TestMediaNV12>(m_in_y, m_in_uv);
  1078. comp.apply(cv::gin(frame, m_roi), cv::gout(m_out_gapi_age, m_out_gapi_gender),
  1079. cv::compile_args(cv::gapi::networks(pp)));
  1080. validate();
  1081. }
  1082. TEST_F(ROIList, GenericInfer)
  1083. {
  1084. cv::GMat in;
  1085. cv::GArray<cv::Rect> rr;
  1086. cv::GInferInputs inputs;
  1087. inputs["data"] = in;
  1088. auto outputs = cv::gapi::infer<cv::gapi::Generic>("age-gender-generic", rr, inputs);
  1089. auto age = outputs.at("age_conv3");
  1090. auto gender = outputs.at("prob");
  1091. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1092. cv::gapi::ie::Params<cv::gapi::Generic> pp{
  1093. "age-gender-generic", params.model_path, params.weights_path, params.device_id
  1094. };
  1095. pp.cfgNumRequests(2u);
  1096. comp.apply(cv::gin(m_in_mat, m_roi_list),
  1097. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  1098. cv::compile_args(cv::gapi::networks(pp)));
  1099. validate();
  1100. }
  1101. TEST_F(ROIList, GenericInferMediaBGR)
  1102. {
  1103. cv::GFrame in;
  1104. cv::GArray<cv::Rect> rr;
  1105. cv::GInferInputs inputs;
  1106. inputs["data"] = in;
  1107. auto outputs = cv::gapi::infer<cv::gapi::Generic>("age-gender-generic", rr, inputs);
  1108. auto age = outputs.at("age_conv3");
  1109. auto gender = outputs.at("prob");
  1110. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1111. cv::gapi::ie::Params<cv::gapi::Generic> pp{
  1112. "age-gender-generic", params.model_path, params.weights_path, params.device_id
  1113. };
  1114. pp.cfgNumRequests(2u);
  1115. auto frame = MediaFrame::Create<TestMediaBGR>(m_in_mat);
  1116. comp.apply(cv::gin(frame, m_roi_list),
  1117. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  1118. cv::compile_args(cv::gapi::networks(pp)));
  1119. validate();
  1120. }
  1121. TEST_F(ROIListNV12, GenericInferMediaNV12)
  1122. {
  1123. cv::GFrame in;
  1124. cv::GArray<cv::Rect> rr;
  1125. cv::GInferInputs inputs;
  1126. inputs["data"] = in;
  1127. auto outputs = cv::gapi::infer<cv::gapi::Generic>("age-gender-generic", rr, inputs);
  1128. auto age = outputs.at("age_conv3");
  1129. auto gender = outputs.at("prob");
  1130. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1131. cv::gapi::ie::Params<cv::gapi::Generic> pp{
  1132. "age-gender-generic", params.model_path, params.weights_path, params.device_id
  1133. };
  1134. pp.cfgNumRequests(2u);
  1135. auto frame = MediaFrame::Create<TestMediaNV12>(m_in_y, m_in_uv);
  1136. comp.apply(cv::gin(frame, m_roi_list),
  1137. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  1138. cv::compile_args(cv::gapi::networks(pp)));
  1139. validate();
  1140. }
  1141. TEST_F(ROIList, GenericInfer2)
  1142. {
  1143. cv::GArray<cv::Rect> rr;
  1144. cv::GMat in;
  1145. GInferListInputs list;
  1146. list["data"] = rr;
  1147. auto outputs = cv::gapi::infer2<cv::gapi::Generic>("age-gender-generic", in, list);
  1148. auto age = outputs.at("age_conv3");
  1149. auto gender = outputs.at("prob");
  1150. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1151. cv::gapi::ie::Params<cv::gapi::Generic> pp{
  1152. "age-gender-generic", params.model_path, params.weights_path, params.device_id
  1153. };
  1154. pp.cfgNumRequests(2u);
  1155. comp.apply(cv::gin(m_in_mat, m_roi_list),
  1156. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  1157. cv::compile_args(cv::gapi::networks(pp)));
  1158. validate();
  1159. }
  1160. TEST_F(ROIList, GenericInfer2MediaInputBGR)
  1161. {
  1162. cv::GArray<cv::Rect> rr;
  1163. cv::GFrame in;
  1164. GInferListInputs inputs;
  1165. inputs["data"] = rr;
  1166. auto outputs = cv::gapi::infer2<cv::gapi::Generic>("age-gender-generic", in, inputs);
  1167. auto age = outputs.at("age_conv3");
  1168. auto gender = outputs.at("prob");
  1169. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1170. cv::gapi::ie::Params<cv::gapi::Generic> pp{
  1171. "age-gender-generic", params.model_path, params.weights_path, params.device_id
  1172. };
  1173. pp.cfgNumRequests(2u);
  1174. auto frame = MediaFrame::Create<TestMediaBGR>(m_in_mat);
  1175. comp.apply(cv::gin(frame, m_roi_list),
  1176. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  1177. cv::compile_args(cv::gapi::networks(pp)));
  1178. validate();
  1179. }
  1180. TEST_F(ROIListNV12, GenericInfer2MediaInputNV12)
  1181. {
  1182. cv::GArray<cv::Rect> rr;
  1183. cv::GFrame in;
  1184. GInferListInputs inputs;
  1185. inputs["data"] = rr;
  1186. auto outputs = cv::gapi::infer2<cv::gapi::Generic>("age-gender-generic", in, inputs);
  1187. auto age = outputs.at("age_conv3");
  1188. auto gender = outputs.at("prob");
  1189. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1190. cv::gapi::ie::Params<cv::gapi::Generic> pp{
  1191. "age-gender-generic", params.model_path, params.weights_path, params.device_id
  1192. };
  1193. pp.cfgNumRequests(2u);
  1194. auto frame = MediaFrame::Create<TestMediaNV12>(m_in_y, m_in_uv);
  1195. comp.apply(cv::gin(frame, m_roi_list),
  1196. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  1197. cv::compile_args(cv::gapi::networks(pp)));
  1198. validate();
  1199. }
  1200. TEST(Infer, SetInvalidNumberOfRequests)
  1201. {
  1202. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  1203. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  1204. cv::gapi::ie::Params<AgeGender> pp{"model", "weights", "device"};
  1205. EXPECT_ANY_THROW(pp.cfgNumRequests(0u));
  1206. }
  1207. TEST(Infer, TestStreamingInfer)
  1208. {
  1209. initDLDTDataPath();
  1210. std::string filepath = findDataFile("cv/video/768x576.avi");
  1211. cv::gapi::ie::detail::ParamDesc params;
  1212. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  1213. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  1214. params.device_id = "CPU";
  1215. // Load IE network, initialize input data using that.
  1216. cv::Mat in_mat;
  1217. cv::Mat gapi_age, gapi_gender;
  1218. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  1219. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  1220. cv::GMat in;
  1221. cv::GMat age, gender;
  1222. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  1223. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  1224. auto pp = cv::gapi::ie::Params<AgeGender> {
  1225. params.model_path, params.weights_path, params.device_id
  1226. }.cfgOutputLayers({ "age_conv3", "prob" })
  1227. .cfgNumRequests(4u);
  1228. std::size_t num_frames = 0u;
  1229. std::size_t max_frames = 10u;
  1230. cv::VideoCapture cap;
  1231. cap.open(filepath);
  1232. if (!cap.isOpened())
  1233. throw SkipTestException("Video file can not be opened");
  1234. cap >> in_mat;
  1235. auto pipeline = comp.compileStreaming(cv::compile_args(cv::gapi::networks(pp)));
  1236. pipeline.setSource<cv::gapi::wip::GCaptureSource>(filepath);
  1237. pipeline.start();
  1238. while (num_frames < max_frames && pipeline.pull(cv::gout(gapi_age, gapi_gender)))
  1239. {
  1240. IE::Blob::Ptr ie_age, ie_gender;
  1241. {
  1242. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  1243. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  1244. setNetParameters(net);
  1245. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  1246. auto infer_request = this_network.CreateInferRequest();
  1247. infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat));
  1248. infer_request.Infer();
  1249. ie_age = infer_request.GetBlob("age_conv3");
  1250. ie_gender = infer_request.GetBlob("prob");
  1251. }
  1252. // Validate with IE itself (avoid DNN module dependency here)
  1253. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  1254. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  1255. ++num_frames;
  1256. cap >> in_mat;
  1257. }
  1258. pipeline.stop();
  1259. }
  1260. TEST(InferROI, TestStreamingInfer)
  1261. {
  1262. initDLDTDataPath();
  1263. std::string filepath = findDataFile("cv/video/768x576.avi");
  1264. cv::gapi::ie::detail::ParamDesc params;
  1265. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  1266. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  1267. params.device_id = "CPU";
  1268. // Load IE network, initialize input data using that.
  1269. cv::Mat in_mat;
  1270. cv::Mat gapi_age, gapi_gender;
  1271. cv::Rect rect(cv::Point{64, 60}, cv::Size{96, 96});
  1272. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  1273. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  1274. cv::GMat in;
  1275. cv::GOpaque<cv::Rect> roi;
  1276. cv::GMat age, gender;
  1277. std::tie(age, gender) = cv::gapi::infer<AgeGender>(roi, in);
  1278. cv::GComputation comp(cv::GIn(in, roi), cv::GOut(age, gender));
  1279. auto pp = cv::gapi::ie::Params<AgeGender> {
  1280. params.model_path, params.weights_path, params.device_id
  1281. }.cfgOutputLayers({ "age_conv3", "prob" })
  1282. .cfgNumRequests(4u);
  1283. std::size_t num_frames = 0u;
  1284. std::size_t max_frames = 10u;
  1285. cv::VideoCapture cap;
  1286. cap.open(filepath);
  1287. if (!cap.isOpened())
  1288. throw SkipTestException("Video file can not be opened");
  1289. cap >> in_mat;
  1290. auto pipeline = comp.compileStreaming(cv::compile_args(cv::gapi::networks(pp)));
  1291. pipeline.setSource(
  1292. cv::gin(cv::gapi::wip::make_src<cv::gapi::wip::GCaptureSource>(filepath), rect));
  1293. pipeline.start();
  1294. while (num_frames < max_frames && pipeline.pull(cv::gout(gapi_age, gapi_gender)))
  1295. {
  1296. // Load & run IE network
  1297. IE::Blob::Ptr ie_age, ie_gender;
  1298. {
  1299. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  1300. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  1301. setNetParameters(net);
  1302. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  1303. auto infer_request = this_network.CreateInferRequest();
  1304. const auto ie_rc = IE::ROI {
  1305. 0u
  1306. , static_cast<std::size_t>(rect.x)
  1307. , static_cast<std::size_t>(rect.y)
  1308. , static_cast<std::size_t>(rect.width)
  1309. , static_cast<std::size_t>(rect.height)
  1310. };
  1311. IE::Blob::Ptr roi_blob = IE::make_shared_blob(cv::gapi::ie::util::to_ie(in_mat), ie_rc);
  1312. infer_request.SetBlob("data", roi_blob);
  1313. infer_request.Infer();
  1314. ie_age = infer_request.GetBlob("age_conv3");
  1315. ie_gender = infer_request.GetBlob("prob");
  1316. }
  1317. // Validate with IE itself (avoid DNN module dependency here)
  1318. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  1319. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  1320. ++num_frames;
  1321. cap >> in_mat;
  1322. }
  1323. pipeline.stop();
  1324. }
  1325. TEST(InferList, TestStreamingInfer)
  1326. {
  1327. initDLDTDataPath();
  1328. std::string filepath = findDataFile("cv/video/768x576.avi");
  1329. cv::gapi::ie::detail::ParamDesc params;
  1330. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  1331. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  1332. params.device_id = "CPU";
  1333. // Load IE network, initialize input data using that.
  1334. cv::Mat in_mat;
  1335. std::vector<cv::Mat> ie_ages, ie_genders, gapi_ages, gapi_genders;
  1336. std::vector<cv::Rect> roi_list = {
  1337. cv::Rect(cv::Point{64, 60}, cv::Size{ 96, 96}),
  1338. cv::Rect(cv::Point{50, 32}, cv::Size{128, 160}),
  1339. };
  1340. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  1341. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  1342. cv::GMat in;
  1343. cv::GArray<cv::Rect> roi;
  1344. cv::GArray<GMat> age, gender;
  1345. std::tie(age, gender) = cv::gapi::infer<AgeGender>(roi, in);
  1346. cv::GComputation comp(cv::GIn(in, roi), cv::GOut(age, gender));
  1347. auto pp = cv::gapi::ie::Params<AgeGender> {
  1348. params.model_path, params.weights_path, params.device_id
  1349. }.cfgOutputLayers({ "age_conv3", "prob" })
  1350. .cfgNumRequests(4u);
  1351. std::size_t num_frames = 0u;
  1352. std::size_t max_frames = 10u;
  1353. cv::VideoCapture cap;
  1354. cap.open(filepath);
  1355. if (!cap.isOpened())
  1356. throw SkipTestException("Video file can not be opened");
  1357. cap >> in_mat;
  1358. auto pipeline = comp.compileStreaming(cv::compile_args(cv::gapi::networks(pp)));
  1359. pipeline.setSource(
  1360. cv::gin(cv::gapi::wip::make_src<cv::gapi::wip::GCaptureSource>(filepath), roi_list));
  1361. pipeline.start();
  1362. while (num_frames < max_frames && pipeline.pull(cv::gout(gapi_ages, gapi_genders)))
  1363. {
  1364. {
  1365. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  1366. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  1367. setNetParameters(net);
  1368. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  1369. auto infer_request = this_network.CreateInferRequest();
  1370. auto frame_blob = cv::gapi::ie::util::to_ie(in_mat);
  1371. for (auto &&rc : roi_list) {
  1372. const auto ie_rc = IE::ROI {
  1373. 0u
  1374. , static_cast<std::size_t>(rc.x)
  1375. , static_cast<std::size_t>(rc.y)
  1376. , static_cast<std::size_t>(rc.width)
  1377. , static_cast<std::size_t>(rc.height)
  1378. };
  1379. infer_request.SetBlob("data", IE::make_shared_blob(frame_blob, ie_rc));
  1380. infer_request.Infer();
  1381. using namespace cv::gapi::ie::util;
  1382. ie_ages.push_back(to_ocv(infer_request.GetBlob("age_conv3")).clone());
  1383. ie_genders.push_back(to_ocv(infer_request.GetBlob("prob")).clone());
  1384. }
  1385. } // namespace IE = ..
  1386. // Validate with IE itself (avoid DNN module dependency here)
  1387. normAssert(ie_ages [0], gapi_ages [0], "0: Test age output");
  1388. normAssert(ie_genders[0], gapi_genders[0], "0: Test gender output");
  1389. normAssert(ie_ages [1], gapi_ages [1], "1: Test age output");
  1390. normAssert(ie_genders[1], gapi_genders[1], "1: Test gender output");
  1391. ie_ages.clear();
  1392. ie_genders.clear();
  1393. ++num_frames;
  1394. cap >> in_mat;
  1395. }
  1396. }
  1397. TEST(Infer2, TestStreamingInfer)
  1398. {
  1399. initDLDTDataPath();
  1400. std::string filepath = findDataFile("cv/video/768x576.avi");
  1401. cv::gapi::ie::detail::ParamDesc params;
  1402. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  1403. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  1404. params.device_id = "CPU";
  1405. // Load IE network, initialize input data using that.
  1406. cv::Mat in_mat;
  1407. std::vector<cv::Mat> ie_ages, ie_genders, gapi_ages, gapi_genders;
  1408. std::vector<cv::Rect> roi_list = {
  1409. cv::Rect(cv::Point{64, 60}, cv::Size{ 96, 96}),
  1410. cv::Rect(cv::Point{50, 32}, cv::Size{128, 160}),
  1411. };
  1412. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  1413. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  1414. cv::GArray<cv::Rect> rr;
  1415. cv::GMat in;
  1416. cv::GArray<cv::GMat> age, gender;
  1417. std::tie(age, gender) = cv::gapi::infer2<AgeGender>(in, rr);
  1418. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1419. auto pp = cv::gapi::ie::Params<AgeGender> {
  1420. params.model_path, params.weights_path, params.device_id
  1421. }.cfgOutputLayers({ "age_conv3", "prob" })
  1422. .cfgNumRequests(4u);
  1423. std::size_t num_frames = 0u;
  1424. std::size_t max_frames = 10u;
  1425. cv::VideoCapture cap;
  1426. cap.open(filepath);
  1427. if (!cap.isOpened())
  1428. throw SkipTestException("Video file can not be opened");
  1429. cap >> in_mat;
  1430. auto pipeline = comp.compileStreaming(cv::compile_args(cv::gapi::networks(pp)));
  1431. pipeline.setSource(
  1432. cv::gin(cv::gapi::wip::make_src<cv::gapi::wip::GCaptureSource>(filepath), roi_list));
  1433. pipeline.start();
  1434. while (num_frames < max_frames && pipeline.pull(cv::gout(gapi_ages, gapi_genders)))
  1435. {
  1436. {
  1437. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  1438. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  1439. setNetParameters(net);
  1440. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  1441. auto infer_request = this_network.CreateInferRequest();
  1442. auto frame_blob = cv::gapi::ie::util::to_ie(in_mat);
  1443. for (auto &&rc : roi_list) {
  1444. const auto ie_rc = IE::ROI {
  1445. 0u
  1446. , static_cast<std::size_t>(rc.x)
  1447. , static_cast<std::size_t>(rc.y)
  1448. , static_cast<std::size_t>(rc.width)
  1449. , static_cast<std::size_t>(rc.height)
  1450. };
  1451. infer_request.SetBlob("data", IE::make_shared_blob(frame_blob, ie_rc));
  1452. infer_request.Infer();
  1453. using namespace cv::gapi::ie::util;
  1454. ie_ages.push_back(to_ocv(infer_request.GetBlob("age_conv3")).clone());
  1455. ie_genders.push_back(to_ocv(infer_request.GetBlob("prob")).clone());
  1456. }
  1457. } // namespace IE = ..
  1458. // Validate with IE itself (avoid DNN module dependency here)
  1459. normAssert(ie_ages [0], gapi_ages [0], "0: Test age output");
  1460. normAssert(ie_genders[0], gapi_genders[0], "0: Test gender output");
  1461. normAssert(ie_ages [1], gapi_ages [1], "1: Test age output");
  1462. normAssert(ie_genders[1], gapi_genders[1], "1: Test gender output");
  1463. ie_ages.clear();
  1464. ie_genders.clear();
  1465. ++num_frames;
  1466. cap >> in_mat;
  1467. }
  1468. pipeline.stop();
  1469. }
  1470. TEST(InferEmptyList, TestStreamingInfer)
  1471. {
  1472. initDLDTDataPath();
  1473. std::string filepath = findDataFile("cv/video/768x576.avi");
  1474. cv::gapi::ie::detail::ParamDesc params;
  1475. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  1476. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  1477. params.device_id = "CPU";
  1478. // Load IE network, initialize input data using that.
  1479. cv::Mat in_mat;
  1480. std::vector<cv::Mat> ie_ages, ie_genders, gapi_ages, gapi_genders;
  1481. // NB: Empty list of roi
  1482. std::vector<cv::Rect> roi_list;
  1483. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  1484. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  1485. cv::GMat in;
  1486. cv::GArray<cv::Rect> roi;
  1487. cv::GArray<GMat> age, gender;
  1488. std::tie(age, gender) = cv::gapi::infer<AgeGender>(roi, in);
  1489. cv::GComputation comp(cv::GIn(in, roi), cv::GOut(age, gender));
  1490. auto pp = cv::gapi::ie::Params<AgeGender> {
  1491. params.model_path, params.weights_path, params.device_id
  1492. }.cfgOutputLayers({ "age_conv3", "prob" })
  1493. .cfgNumRequests(4u);
  1494. std::size_t num_frames = 0u;
  1495. std::size_t max_frames = 1u;
  1496. cv::VideoCapture cap;
  1497. cap.open(filepath);
  1498. if (!cap.isOpened())
  1499. throw SkipTestException("Video file can not be opened");
  1500. cap >> in_mat;
  1501. auto pipeline = comp.compileStreaming(cv::compile_args(cv::gapi::networks(pp)));
  1502. pipeline.setSource(
  1503. cv::gin(cv::gapi::wip::make_src<cv::gapi::wip::GCaptureSource>(filepath), roi_list));
  1504. pipeline.start();
  1505. while (num_frames < max_frames && pipeline.pull(cv::gout(gapi_ages, gapi_genders)))
  1506. {
  1507. EXPECT_TRUE(gapi_ages.empty());
  1508. EXPECT_TRUE(gapi_genders.empty());
  1509. }
  1510. }
  1511. TEST(Infer2EmptyList, TestStreamingInfer)
  1512. {
  1513. initDLDTDataPath();
  1514. std::string filepath = findDataFile("cv/video/768x576.avi");
  1515. cv::gapi::ie::detail::ParamDesc params;
  1516. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  1517. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  1518. params.device_id = "CPU";
  1519. // Load IE network, initialize input data using that.
  1520. cv::Mat in_mat;
  1521. std::vector<cv::Mat> ie_ages, ie_genders, gapi_ages, gapi_genders;
  1522. // NB: Empty list of roi
  1523. std::vector<cv::Rect> roi_list;
  1524. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  1525. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  1526. cv::GArray<cv::Rect> rr;
  1527. cv::GMat in;
  1528. cv::GArray<cv::GMat> age, gender;
  1529. std::tie(age, gender) = cv::gapi::infer2<AgeGender>(in, rr);
  1530. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1531. auto pp = cv::gapi::ie::Params<AgeGender> {
  1532. params.model_path, params.weights_path, params.device_id
  1533. }.cfgOutputLayers({ "age_conv3", "prob" })
  1534. .cfgNumRequests(4u);
  1535. std::size_t num_frames = 0u;
  1536. std::size_t max_frames = 1u;
  1537. cv::VideoCapture cap;
  1538. cap.open(filepath);
  1539. if (!cap.isOpened())
  1540. throw SkipTestException("Video file can not be opened");
  1541. cap >> in_mat;
  1542. auto pipeline = comp.compileStreaming(cv::compile_args(cv::gapi::networks(pp)));
  1543. pipeline.setSource(
  1544. cv::gin(cv::gapi::wip::make_src<cv::gapi::wip::GCaptureSource>(filepath), roi_list));
  1545. pipeline.start();
  1546. while (num_frames < max_frames && pipeline.pull(cv::gout(gapi_ages, gapi_genders)))
  1547. {
  1548. EXPECT_TRUE(gapi_ages.empty());
  1549. EXPECT_TRUE(gapi_genders.empty());
  1550. }
  1551. }
  1552. TEST_F(InferWithReshape, TestInfer)
  1553. {
  1554. // IE code
  1555. infer(m_in_mat);
  1556. // G-API code
  1557. cv::GMat in;
  1558. cv::GMat age, gender;
  1559. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  1560. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  1561. auto pp = cv::gapi::ie::Params<AgeGender> {
  1562. params.model_path, params.weights_path, params.device_id
  1563. }.cfgOutputLayers({ "age_conv3", "prob" }).cfgInputReshape({{"data", reshape_dims}});
  1564. comp.apply(cv::gin(m_in_mat), cv::gout(m_out_gapi_ages.front(), m_out_gapi_genders.front()),
  1565. cv::compile_args(cv::gapi::networks(pp)));
  1566. // Validate
  1567. validate();
  1568. }
  1569. TEST_F(InferWithReshape, TestInferInImage)
  1570. {
  1571. // Input image already has 70x70 size
  1572. cv::Mat rsz;
  1573. cv::resize(m_in_mat, rsz, cv::Size(70, 70));
  1574. // IE code
  1575. infer(rsz);
  1576. // G-API code
  1577. cv::GMat in;
  1578. cv::GMat age, gender;
  1579. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  1580. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  1581. auto pp = cv::gapi::ie::Params<AgeGender> {
  1582. params.model_path, params.weights_path, params.device_id
  1583. }.cfgOutputLayers({ "age_conv3", "prob" }).cfgInputReshape({"data"});
  1584. // Reshape CNN input by input image size
  1585. comp.apply(cv::gin(rsz), cv::gout(m_out_gapi_ages.front(), m_out_gapi_genders.front()),
  1586. cv::compile_args(cv::gapi::networks(pp)));
  1587. // Validate
  1588. validate();
  1589. }
  1590. TEST_F(InferWithReshape, TestInferForSingleLayer)
  1591. {
  1592. // IE code
  1593. infer(m_in_mat);
  1594. // G-API code
  1595. cv::GMat in;
  1596. cv::GMat age, gender;
  1597. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  1598. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  1599. auto pp = cv::gapi::ie::Params<AgeGender> {
  1600. params.model_path, params.weights_path, params.device_id
  1601. }.cfgOutputLayers({ "age_conv3", "prob" })
  1602. .cfgInputReshape("data", reshape_dims);
  1603. comp.apply(cv::gin(m_in_mat), cv::gout(m_out_gapi_ages.front(), m_out_gapi_genders.front()),
  1604. cv::compile_args(cv::gapi::networks(pp)));
  1605. // Validate
  1606. validate();
  1607. }
  1608. TEST_F(InferWithReshape, TestInferList)
  1609. {
  1610. // IE code
  1611. infer(m_in_mat, true);
  1612. // G-API code
  1613. cv::GArray<cv::Rect> rr;
  1614. cv::GMat in;
  1615. cv::GArray<cv::GMat> age, gender;
  1616. std::tie(age, gender) = cv::gapi::infer<AgeGender>(rr, in);
  1617. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1618. auto pp = cv::gapi::ie::Params<AgeGender> {
  1619. params.model_path, params.weights_path, params.device_id
  1620. }.cfgOutputLayers({ "age_conv3", "prob" }).cfgInputReshape({{"data", reshape_dims}});
  1621. comp.apply(cv::gin(m_in_mat, m_roi_list),
  1622. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  1623. cv::compile_args(cv::gapi::networks(pp)));
  1624. // Validate
  1625. validate();
  1626. }
  1627. TEST_F(InferWithReshape, TestInferList2)
  1628. {
  1629. // IE code
  1630. infer(m_in_mat, true);
  1631. // G-API code
  1632. cv::GArray<cv::Rect> rr;
  1633. cv::GMat in;
  1634. cv::GArray<cv::GMat> age, gender;
  1635. std::tie(age, gender) = cv::gapi::infer2<AgeGender>(in, rr);
  1636. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1637. auto pp = cv::gapi::ie::Params<AgeGender> {
  1638. params.model_path, params.weights_path, params.device_id
  1639. }.cfgOutputLayers({ "age_conv3", "prob" }).cfgInputReshape({{"data", reshape_dims}});
  1640. comp.apply(cv::gin(m_in_mat, m_roi_list),
  1641. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  1642. cv::compile_args(cv::gapi::networks(pp)));
  1643. // Validate
  1644. validate();
  1645. }
  1646. TEST_F(InferWithReshape, TestInferListBGR)
  1647. {
  1648. // IE code
  1649. infer(m_in_mat, true);
  1650. // G-API code
  1651. cv::GArray<cv::Rect> rr;
  1652. cv::GFrame in;
  1653. cv::GArray<cv::GMat> age, gender;
  1654. std::tie(age, gender) = cv::gapi::infer<AgeGender>(rr, in);
  1655. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1656. auto frame = MediaFrame::Create<TestMediaBGR>(m_in_mat);
  1657. auto pp = cv::gapi::ie::Params<AgeGender> {
  1658. params.model_path, params.weights_path, params.device_id
  1659. }.cfgOutputLayers({ "age_conv3", "prob" }).cfgInputReshape({{"data", reshape_dims}});
  1660. comp.apply(cv::gin(frame, m_roi_list),
  1661. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  1662. cv::compile_args(cv::gapi::networks(pp)));
  1663. // Validate
  1664. validate();
  1665. }
  1666. TEST_F(InferWithReshapeNV12, TestInferListYUV)
  1667. {
  1668. // G-API code
  1669. cv::GFrame in;
  1670. cv::GArray<cv::Rect> rr;
  1671. cv::GArray<cv::GMat> age, gender;
  1672. std::tie(age, gender) = cv::gapi::infer<AgeGender>(rr, in);
  1673. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1674. auto frame = MediaFrame::Create<TestMediaNV12>(m_in_y, m_in_uv);
  1675. auto pp = cv::gapi::ie::Params<AgeGender> {
  1676. params.model_path, params.weights_path, params.device_id
  1677. }.cfgOutputLayers({ "age_conv3", "prob" }).cfgInputReshape({{"data", reshape_dims}});
  1678. comp.apply(cv::gin(frame, m_roi_list),
  1679. cv::gout(m_out_gapi_ages, m_out_gapi_genders),
  1680. cv::compile_args(cv::gapi::networks(pp)));
  1681. // Validate
  1682. validate();
  1683. }
  1684. TEST_F(ROIList, CallInferMultipleTimes)
  1685. {
  1686. cv::GArray<cv::Rect> rr;
  1687. cv::GMat in;
  1688. cv::GArray<cv::GMat> age, gender;
  1689. std::tie(age, gender) = cv::gapi::infer<AgeGender>(rr, in);
  1690. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  1691. auto pp = cv::gapi::ie::Params<AgeGender> {
  1692. params.model_path, params.weights_path, params.device_id
  1693. }.cfgOutputLayers({ "age_conv3", "prob" });
  1694. auto cc = comp.compile(cv::descr_of(cv::gin(m_in_mat, m_roi_list)),
  1695. cv::compile_args(cv::gapi::networks(pp)));
  1696. for (int i = 0; i < 10; ++i) {
  1697. cc(cv::gin(m_in_mat, m_roi_list), cv::gout(m_out_gapi_ages, m_out_gapi_genders));
  1698. }
  1699. validate();
  1700. }
  1701. TEST(IEFrameAdapter, blobParams)
  1702. {
  1703. cv::Mat bgr = cv::Mat::eye(240, 320, CV_8UC3);
  1704. cv::MediaFrame frame = cv::MediaFrame::Create<TestMediaBGR>(bgr);
  1705. auto expected = std::make_pair(IE::TensorDesc{IE::Precision::U8, {1, 3, 300, 300},
  1706. IE::Layout::NCHW},
  1707. IE::ParamMap{{"HELLO", 42}, {"COLOR_FORMAT",
  1708. IE::ColorFormat::NV12}});
  1709. auto actual = cv::util::any_cast<decltype(expected)>(frame.blobParams());
  1710. EXPECT_EQ(expected, actual);
  1711. }
  1712. namespace
  1713. {
  1714. struct Sync {
  1715. std::mutex m;
  1716. std::condition_variable cv;
  1717. int counter = 0;
  1718. };
  1719. class GMockMediaAdapter final: public cv::MediaFrame::IAdapter {
  1720. public:
  1721. explicit GMockMediaAdapter(cv::Mat m, std::shared_ptr<Sync> sync)
  1722. : m_mat(m), m_sync(sync) {
  1723. }
  1724. cv::GFrameDesc meta() const override {
  1725. return cv::GFrameDesc{cv::MediaFormat::BGR, m_mat.size()};
  1726. }
  1727. cv::MediaFrame::View access(cv::MediaFrame::Access) override {
  1728. cv::MediaFrame::View::Ptrs pp = { m_mat.ptr(), nullptr, nullptr, nullptr };
  1729. cv::MediaFrame::View::Strides ss = { m_mat.step, 0u, 0u, 0u };
  1730. return cv::MediaFrame::View(std::move(pp), std::move(ss));
  1731. }
  1732. ~GMockMediaAdapter() {
  1733. {
  1734. std::lock_guard<std::mutex> lk{m_sync->m};
  1735. m_sync->counter--;
  1736. }
  1737. m_sync->cv.notify_one();
  1738. }
  1739. private:
  1740. cv::Mat m_mat;
  1741. std::shared_ptr<Sync> m_sync;
  1742. };
  1743. // NB: This source is needed to simulate real
  1744. // cases where the memory resources are limited.
  1745. // GMockSource(int limit) - accept the number of MediaFrames that
  1746. // the source can produce until resources are over.
  1747. class GMockSource : public cv::gapi::wip::IStreamSource {
  1748. public:
  1749. explicit GMockSource(int limit)
  1750. : m_limit(limit), m_mat(cv::Size(1920, 1080), CV_8UC3),
  1751. m_sync(new Sync{}) {
  1752. cv::randu(m_mat, cv::Scalar::all(0), cv::Scalar::all(255));
  1753. }
  1754. bool pull(cv::gapi::wip::Data& data) {
  1755. std::unique_lock<std::mutex> lk(m_sync->m);
  1756. m_sync->counter++;
  1757. // NB: Can't produce new frames until old ones are released.
  1758. m_sync->cv.wait(lk, [this]{return m_sync->counter <= m_limit;});
  1759. data = cv::MediaFrame::Create<GMockMediaAdapter>(m_mat, m_sync);
  1760. return true;
  1761. }
  1762. GMetaArg descr_of() const override {
  1763. return GMetaArg{cv::GFrameDesc{cv::MediaFormat::BGR, m_mat.size()}};
  1764. }
  1765. private:
  1766. int m_limit;
  1767. cv::Mat m_mat;
  1768. std::shared_ptr<Sync> m_sync;
  1769. };
  1770. struct LimitedSourceInfer: public ::testing::Test {
  1771. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  1772. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  1773. LimitedSourceInfer()
  1774. : comp([](){
  1775. cv::GFrame in;
  1776. cv::GMat age, gender;
  1777. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  1778. return cv::GComputation(cv::GIn(in), cv::GOut(age, gender));
  1779. }) {
  1780. initDLDTDataPath();
  1781. }
  1782. GStreamingCompiled compileStreaming(int nireq) {
  1783. cv::gapi::ie::detail::ParamDesc params;
  1784. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  1785. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  1786. params.device_id = "CPU";
  1787. auto pp = cv::gapi::ie::Params<AgeGender> {
  1788. params.model_path, params.weights_path, params.device_id }
  1789. .cfgOutputLayers({ "age_conv3", "prob" })
  1790. .cfgNumRequests(nireq);
  1791. return comp.compileStreaming(cv::compile_args(cv::gapi::networks(pp)));
  1792. }
  1793. void run(const int max_frames, const int limit, const int nireq) {
  1794. auto pipeline = compileStreaming(nireq);
  1795. pipeline.setSource<GMockSource>(limit);
  1796. pipeline.start();
  1797. int num_frames = 0;
  1798. while (num_frames != max_frames &&
  1799. pipeline.pull(cv::gout(out_age, out_gender))) {
  1800. ++num_frames;
  1801. }
  1802. }
  1803. cv::GComputation comp;
  1804. cv::Mat out_age, out_gender;
  1805. };
  1806. } // anonymous namespace
  1807. TEST_F(LimitedSourceInfer, ReleaseFrame)
  1808. {
  1809. constexpr int max_frames = 50;
  1810. constexpr int resources_limit = 1;
  1811. constexpr int nireq = 1;
  1812. run(max_frames, resources_limit, nireq);
  1813. }
  1814. TEST_F(LimitedSourceInfer, ReleaseFrameAsync)
  1815. {
  1816. constexpr int max_frames = 50;
  1817. constexpr int resources_limit = 4;
  1818. constexpr int nireq = 8;
  1819. run(max_frames, resources_limit, nireq);
  1820. }
  1821. TEST(TestAgeGenderIE, InferWithBatch)
  1822. {
  1823. initDLDTDataPath();
  1824. constexpr int batch_size = 4;
  1825. cv::gapi::ie::detail::ParamDesc params;
  1826. params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml");
  1827. params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin");
  1828. params.device_id = "CPU";
  1829. cv::Mat in_mat({batch_size, 3, 320, 240}, CV_8U);
  1830. cv::randu(in_mat, 0, 255);
  1831. cv::Mat gapi_age, gapi_gender;
  1832. // Load & run IE network
  1833. IE::Blob::Ptr ie_age, ie_gender;
  1834. {
  1835. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  1836. auto net = cv::gimpl::ie::wrap::readNetwork(params);
  1837. setNetParameters(net);
  1838. net.setBatchSize(batch_size);
  1839. auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
  1840. auto infer_request = this_network.CreateInferRequest();
  1841. infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat));
  1842. infer_request.Infer();
  1843. ie_age = infer_request.GetBlob("age_conv3");
  1844. ie_gender = infer_request.GetBlob("prob");
  1845. }
  1846. // Configure & run G-API
  1847. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  1848. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  1849. cv::GMat in;
  1850. cv::GMat age, gender;
  1851. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  1852. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  1853. auto pp = cv::gapi::ie::Params<AgeGender> {
  1854. params.model_path, params.weights_path, params.device_id
  1855. }.cfgOutputLayers({ "age_conv3", "prob" })
  1856. .cfgBatchSize(batch_size);
  1857. comp.apply(cv::gin(in_mat), cv::gout(gapi_age, gapi_gender),
  1858. cv::compile_args(cv::gapi::networks(pp)));
  1859. // Validate with IE itself (avoid DNN module dependency here)
  1860. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  1861. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  1862. }
  1863. TEST(ImportNetwork, Infer)
  1864. {
  1865. const std::string device = "MYRIAD";
  1866. skipIfDeviceNotAvailable(device);
  1867. initDLDTDataPath();
  1868. cv::gapi::ie::detail::ParamDesc params;
  1869. params.model_path = compileAgeGenderBlob(device);
  1870. params.device_id = device;
  1871. cv::Mat in_mat(320, 240, CV_8UC3);
  1872. cv::randu(in_mat, 0, 255);
  1873. cv::Mat gapi_age, gapi_gender;
  1874. // Load & run IE network
  1875. IE::Blob::Ptr ie_age, ie_gender;
  1876. {
  1877. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  1878. auto this_network = cv::gimpl::ie::wrap::importNetwork(plugin, params);
  1879. auto infer_request = this_network.CreateInferRequest();
  1880. IE::PreProcessInfo info;
  1881. info.setResizeAlgorithm(IE::RESIZE_BILINEAR);
  1882. infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat), info);
  1883. infer_request.Infer();
  1884. ie_age = infer_request.GetBlob("age_conv3");
  1885. ie_gender = infer_request.GetBlob("prob");
  1886. }
  1887. // Configure & run G-API
  1888. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  1889. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  1890. cv::GMat in;
  1891. cv::GMat age, gender;
  1892. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  1893. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  1894. auto pp = cv::gapi::ie::Params<AgeGender> {
  1895. params.model_path, params.device_id
  1896. }.cfgOutputLayers({ "age_conv3", "prob" });
  1897. comp.apply(cv::gin(in_mat), cv::gout(gapi_age, gapi_gender),
  1898. cv::compile_args(cv::gapi::networks(pp)));
  1899. // Validate with IE itself (avoid DNN module dependency here)
  1900. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  1901. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  1902. }
  1903. TEST(ImportNetwork, InferNV12)
  1904. {
  1905. const std::string device = "MYRIAD";
  1906. skipIfDeviceNotAvailable(device);
  1907. initDLDTDataPath();
  1908. cv::gapi::ie::detail::ParamDesc params;
  1909. params.model_path= compileAgeGenderBlob(device);
  1910. params.device_id = device;
  1911. cv::Size sz{320, 240};
  1912. cv::Mat in_y_mat(sz, CV_8UC1);
  1913. cv::randu(in_y_mat, 0, 255);
  1914. cv::Mat in_uv_mat(sz / 2, CV_8UC2);
  1915. cv::randu(in_uv_mat, 0, 255);
  1916. cv::Mat gapi_age, gapi_gender;
  1917. // Load & run IE network
  1918. IE::Blob::Ptr ie_age, ie_gender;
  1919. {
  1920. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  1921. auto this_network = cv::gimpl::ie::wrap::importNetwork(plugin, params);
  1922. auto infer_request = this_network.CreateInferRequest();
  1923. IE::PreProcessInfo info;
  1924. info.setResizeAlgorithm(IE::RESIZE_BILINEAR);
  1925. info.setColorFormat(IE::ColorFormat::NV12);
  1926. infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in_y_mat, in_uv_mat), info);
  1927. infer_request.Infer();
  1928. ie_age = infer_request.GetBlob("age_conv3");
  1929. ie_gender = infer_request.GetBlob("prob");
  1930. }
  1931. // Configure & run G-API
  1932. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  1933. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  1934. cv::GFrame in;
  1935. cv::GMat age, gender;
  1936. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  1937. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  1938. auto frame = MediaFrame::Create<TestMediaNV12>(in_y_mat, in_uv_mat);
  1939. auto pp = cv::gapi::ie::Params<AgeGender> {
  1940. params.model_path, params.device_id
  1941. }.cfgOutputLayers({ "age_conv3", "prob" });
  1942. comp.apply(cv::gin(frame), cv::gout(gapi_age, gapi_gender),
  1943. cv::compile_args(cv::gapi::networks(pp)));
  1944. // Validate with IE itself (avoid DNN module dependency here)
  1945. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  1946. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  1947. }
  1948. TEST(ImportNetwork, InferROI)
  1949. {
  1950. const std::string device = "MYRIAD";
  1951. skipIfDeviceNotAvailable(device);
  1952. initDLDTDataPath();
  1953. cv::gapi::ie::detail::ParamDesc params;
  1954. params.model_path = compileAgeGenderBlob(device);
  1955. params.device_id = device;
  1956. cv::Mat in_mat(320, 240, CV_8UC3);
  1957. cv::randu(in_mat, 0, 255);
  1958. cv::Mat gapi_age, gapi_gender;
  1959. cv::Rect rect(cv::Point{64, 60}, cv::Size{96, 96});
  1960. // Load & run IE network
  1961. IE::Blob::Ptr ie_age, ie_gender;
  1962. {
  1963. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  1964. auto this_network = cv::gimpl::ie::wrap::importNetwork(plugin, params);
  1965. auto infer_request = this_network.CreateInferRequest();
  1966. const auto ie_rc = IE::ROI {
  1967. 0u
  1968. , static_cast<std::size_t>(rect.x)
  1969. , static_cast<std::size_t>(rect.y)
  1970. , static_cast<std::size_t>(rect.width)
  1971. , static_cast<std::size_t>(rect.height)
  1972. };
  1973. IE::Blob::Ptr roi_blob = IE::make_shared_blob(cv::gapi::ie::util::to_ie(in_mat), ie_rc);
  1974. IE::PreProcessInfo info;
  1975. info.setResizeAlgorithm(IE::RESIZE_BILINEAR);
  1976. infer_request.SetBlob("data", roi_blob, info);
  1977. infer_request.Infer();
  1978. ie_age = infer_request.GetBlob("age_conv3");
  1979. ie_gender = infer_request.GetBlob("prob");
  1980. }
  1981. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  1982. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  1983. cv::GMat in;
  1984. cv::GOpaque<cv::Rect> roi;
  1985. cv::GMat age, gender;
  1986. std::tie(age, gender) = cv::gapi::infer<AgeGender>(roi, in);
  1987. cv::GComputation comp(cv::GIn(in, roi), cv::GOut(age, gender));
  1988. auto pp = cv::gapi::ie::Params<AgeGender> {
  1989. params.model_path, params.device_id
  1990. }.cfgOutputLayers({ "age_conv3", "prob" });
  1991. comp.apply(cv::gin(in_mat, rect), cv::gout(gapi_age, gapi_gender),
  1992. cv::compile_args(cv::gapi::networks(pp)));
  1993. // Validate with IE itself (avoid DNN module dependency here)
  1994. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  1995. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  1996. }
  1997. TEST(ImportNetwork, InferROINV12)
  1998. {
  1999. const std::string device = "MYRIAD";
  2000. skipIfDeviceNotAvailable(device);
  2001. initDLDTDataPath();
  2002. cv::gapi::ie::detail::ParamDesc params;
  2003. params.model_path = compileAgeGenderBlob(device);
  2004. params.device_id = device;
  2005. cv::Size sz{320, 240};
  2006. cv::Mat in_y_mat(sz, CV_8UC1);
  2007. cv::randu(in_y_mat, 0, 255);
  2008. cv::Mat in_uv_mat(sz / 2, CV_8UC2);
  2009. cv::randu(in_uv_mat, 0, 255);
  2010. cv::Rect rect(cv::Point{64, 60}, cv::Size{96, 96});
  2011. cv::Mat gapi_age, gapi_gender;
  2012. // Load & run IE network
  2013. IE::Blob::Ptr ie_age, ie_gender;
  2014. {
  2015. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  2016. auto this_network = cv::gimpl::ie::wrap::importNetwork(plugin, params);
  2017. auto infer_request = this_network.CreateInferRequest();
  2018. const auto ie_rc = IE::ROI {
  2019. 0u
  2020. , static_cast<std::size_t>(rect.x)
  2021. , static_cast<std::size_t>(rect.y)
  2022. , static_cast<std::size_t>(rect.width)
  2023. , static_cast<std::size_t>(rect.height)
  2024. };
  2025. IE::Blob::Ptr roi_blob =
  2026. IE::make_shared_blob(cv::gapi::ie::util::to_ie(in_y_mat, in_uv_mat), ie_rc);
  2027. IE::PreProcessInfo info;
  2028. info.setResizeAlgorithm(IE::RESIZE_BILINEAR);
  2029. info.setColorFormat(IE::ColorFormat::NV12);
  2030. infer_request.SetBlob("data", roi_blob, info);
  2031. infer_request.Infer();
  2032. ie_age = infer_request.GetBlob("age_conv3");
  2033. ie_gender = infer_request.GetBlob("prob");
  2034. }
  2035. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  2036. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  2037. cv::GFrame in;
  2038. cv::GOpaque<cv::Rect> roi;
  2039. cv::GMat age, gender;
  2040. std::tie(age, gender) = cv::gapi::infer<AgeGender>(roi, in);
  2041. cv::GComputation comp(cv::GIn(in, roi), cv::GOut(age, gender));
  2042. auto frame = MediaFrame::Create<TestMediaNV12>(in_y_mat, in_uv_mat);
  2043. auto pp = cv::gapi::ie::Params<AgeGender> {
  2044. params.model_path, params.device_id
  2045. }.cfgOutputLayers({ "age_conv3", "prob" });
  2046. comp.apply(cv::gin(frame, rect), cv::gout(gapi_age, gapi_gender),
  2047. cv::compile_args(cv::gapi::networks(pp)));
  2048. // Validate with IE itself (avoid DNN module dependency here)
  2049. normAssert(cv::gapi::ie::util::to_ocv(ie_age), gapi_age, "Test age output" );
  2050. normAssert(cv::gapi::ie::util::to_ocv(ie_gender), gapi_gender, "Test gender output");
  2051. }
  2052. TEST(ImportNetwork, InferList)
  2053. {
  2054. const std::string device = "MYRIAD";
  2055. skipIfDeviceNotAvailable(device);
  2056. initDLDTDataPath();
  2057. cv::gapi::ie::detail::ParamDesc params;
  2058. params.model_path = compileAgeGenderBlob(device);
  2059. params.device_id = device;
  2060. cv::Mat in_mat(320, 240, CV_8UC3);
  2061. cv::randu(in_mat, 0, 255);
  2062. std::vector<cv::Rect> roi_list = {
  2063. cv::Rect(cv::Point{64, 60}, cv::Size{ 96, 96}),
  2064. cv::Rect(cv::Point{50, 32}, cv::Size{128, 160}),
  2065. };
  2066. std::vector<cv::Mat> out_ie_ages, out_ie_genders, out_gapi_ages, out_gapi_genders;
  2067. // Load & run IE network
  2068. {
  2069. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  2070. auto this_network = cv::gimpl::ie::wrap::importNetwork(plugin, params);
  2071. auto infer_request = this_network.CreateInferRequest();
  2072. for (auto &&rc : roi_list) {
  2073. const auto ie_rc = IE::ROI {
  2074. 0u
  2075. , static_cast<std::size_t>(rc.x)
  2076. , static_cast<std::size_t>(rc.y)
  2077. , static_cast<std::size_t>(rc.width)
  2078. , static_cast<std::size_t>(rc.height)
  2079. };
  2080. IE::Blob::Ptr roi_blob =
  2081. IE::make_shared_blob(cv::gapi::ie::util::to_ie(in_mat), ie_rc);
  2082. IE::PreProcessInfo info;
  2083. info.setResizeAlgorithm(IE::RESIZE_BILINEAR);
  2084. infer_request.SetBlob("data", roi_blob, info);
  2085. infer_request.Infer();
  2086. using namespace cv::gapi::ie::util;
  2087. out_ie_ages.push_back(to_ocv(infer_request.GetBlob("age_conv3")).clone());
  2088. out_ie_genders.push_back(to_ocv(infer_request.GetBlob("prob")).clone());
  2089. }
  2090. }
  2091. // Configure & run G-API
  2092. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  2093. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  2094. cv::GArray<cv::Rect> rr;
  2095. cv::GMat in;
  2096. cv::GArray<cv::GMat> age, gender;
  2097. std::tie(age, gender) = cv::gapi::infer<AgeGender>(rr, in);
  2098. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  2099. auto pp = cv::gapi::ie::Params<AgeGender> {
  2100. params.model_path, params.device_id
  2101. }.cfgOutputLayers({ "age_conv3", "prob" });
  2102. comp.apply(cv::gin(in_mat, roi_list), cv::gout(out_gapi_ages, out_gapi_genders),
  2103. cv::compile_args(cv::gapi::networks(pp)));
  2104. // Validate with IE itself (avoid DNN module dependency here)
  2105. GAPI_Assert(!out_gapi_ages.empty());
  2106. ASSERT_EQ(out_gapi_genders.size(), out_gapi_ages.size());
  2107. ASSERT_EQ(out_gapi_ages.size(), out_ie_ages.size());
  2108. ASSERT_EQ(out_gapi_genders.size(), out_ie_genders.size());
  2109. const size_t size = out_gapi_ages.size();
  2110. for (size_t i = 0; i < size; ++i) {
  2111. normAssert(out_ie_ages [i], out_gapi_ages [i], "Test age output");
  2112. normAssert(out_ie_genders[i], out_gapi_genders[i], "Test gender output");
  2113. }
  2114. }
  2115. TEST(ImportNetwork, InferListNV12)
  2116. {
  2117. const std::string device = "MYRIAD";
  2118. skipIfDeviceNotAvailable(device);
  2119. initDLDTDataPath();
  2120. cv::gapi::ie::detail::ParamDesc params;
  2121. params.model_path = compileAgeGenderBlob(device);
  2122. params.device_id = device;
  2123. cv::Size sz{320, 240};
  2124. cv::Mat in_y_mat(sz, CV_8UC1);
  2125. cv::randu(in_y_mat, 0, 255);
  2126. cv::Mat in_uv_mat(sz / 2, CV_8UC2);
  2127. cv::randu(in_uv_mat, 0, 255);
  2128. std::vector<cv::Rect> roi_list = {
  2129. cv::Rect(cv::Point{64, 60}, cv::Size{ 96, 96}),
  2130. cv::Rect(cv::Point{50, 32}, cv::Size{128, 160}),
  2131. };
  2132. std::vector<cv::Mat> out_ie_ages, out_ie_genders, out_gapi_ages, out_gapi_genders;
  2133. // Load & run IE network
  2134. {
  2135. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  2136. auto this_network = cv::gimpl::ie::wrap::importNetwork(plugin, params);
  2137. auto infer_request = this_network.CreateInferRequest();
  2138. for (auto &&rc : roi_list) {
  2139. const auto ie_rc = IE::ROI {
  2140. 0u
  2141. , static_cast<std::size_t>(rc.x)
  2142. , static_cast<std::size_t>(rc.y)
  2143. , static_cast<std::size_t>(rc.width)
  2144. , static_cast<std::size_t>(rc.height)
  2145. };
  2146. IE::Blob::Ptr roi_blob =
  2147. IE::make_shared_blob(cv::gapi::ie::util::to_ie(in_y_mat, in_uv_mat), ie_rc);
  2148. IE::PreProcessInfo info;
  2149. info.setResizeAlgorithm(IE::RESIZE_BILINEAR);
  2150. info.setColorFormat(IE::ColorFormat::NV12);
  2151. infer_request.SetBlob("data", roi_blob, info);
  2152. infer_request.Infer();
  2153. using namespace cv::gapi::ie::util;
  2154. out_ie_ages.push_back(to_ocv(infer_request.GetBlob("age_conv3")).clone());
  2155. out_ie_genders.push_back(to_ocv(infer_request.GetBlob("prob")).clone());
  2156. }
  2157. }
  2158. // Configure & run G-API
  2159. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  2160. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  2161. cv::GArray<cv::Rect> rr;
  2162. cv::GFrame in;
  2163. cv::GArray<cv::GMat> age, gender;
  2164. std::tie(age, gender) = cv::gapi::infer<AgeGender>(rr, in);
  2165. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  2166. auto pp = cv::gapi::ie::Params<AgeGender> {
  2167. params.model_path, params.device_id
  2168. }.cfgOutputLayers({ "age_conv3", "prob" });
  2169. auto frame = MediaFrame::Create<TestMediaNV12>(in_y_mat, in_uv_mat);
  2170. comp.apply(cv::gin(frame, roi_list), cv::gout(out_gapi_ages, out_gapi_genders),
  2171. cv::compile_args(cv::gapi::networks(pp)));
  2172. // Validate with IE itself (avoid DNN module dependency here)
  2173. GAPI_Assert(!out_gapi_ages.empty());
  2174. ASSERT_EQ(out_gapi_genders.size(), out_gapi_ages.size());
  2175. ASSERT_EQ(out_gapi_ages.size(), out_ie_ages.size());
  2176. ASSERT_EQ(out_gapi_genders.size(), out_ie_genders.size());
  2177. const size_t size = out_gapi_ages.size();
  2178. for (size_t i = 0; i < size; ++i) {
  2179. normAssert(out_ie_ages [i], out_gapi_ages [i], "Test age output");
  2180. normAssert(out_ie_genders[i], out_gapi_genders[i], "Test gender output");
  2181. }
  2182. }
  2183. TEST(ImportNetwork, InferList2)
  2184. {
  2185. const std::string device = "MYRIAD";
  2186. skipIfDeviceNotAvailable(device);
  2187. initDLDTDataPath();
  2188. cv::gapi::ie::detail::ParamDesc params;
  2189. params.model_path = compileAgeGenderBlob(device);
  2190. params.device_id = device;
  2191. cv::Mat in_mat(320, 240, CV_8UC3);
  2192. cv::randu(in_mat, 0, 255);
  2193. std::vector<cv::Rect> roi_list = {
  2194. cv::Rect(cv::Point{64, 60}, cv::Size{ 96, 96}),
  2195. cv::Rect(cv::Point{50, 32}, cv::Size{128, 160}),
  2196. };
  2197. std::vector<cv::Mat> out_ie_ages, out_ie_genders, out_gapi_ages, out_gapi_genders;
  2198. // Load & run IE network
  2199. {
  2200. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  2201. auto this_network = cv::gimpl::ie::wrap::importNetwork(plugin, params);
  2202. auto infer_request = this_network.CreateInferRequest();
  2203. for (auto &&rc : roi_list) {
  2204. const auto ie_rc = IE::ROI {
  2205. 0u
  2206. , static_cast<std::size_t>(rc.x)
  2207. , static_cast<std::size_t>(rc.y)
  2208. , static_cast<std::size_t>(rc.width)
  2209. , static_cast<std::size_t>(rc.height)
  2210. };
  2211. IE::Blob::Ptr roi_blob =
  2212. IE::make_shared_blob(cv::gapi::ie::util::to_ie(in_mat), ie_rc);
  2213. IE::PreProcessInfo info;
  2214. info.setResizeAlgorithm(IE::RESIZE_BILINEAR);
  2215. infer_request.SetBlob("data", roi_blob, info);
  2216. infer_request.Infer();
  2217. using namespace cv::gapi::ie::util;
  2218. out_ie_ages.push_back(to_ocv(infer_request.GetBlob("age_conv3")).clone());
  2219. out_ie_genders.push_back(to_ocv(infer_request.GetBlob("prob")).clone());
  2220. }
  2221. }
  2222. // Configure & run G-API
  2223. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  2224. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  2225. cv::GArray<cv::Rect> rr;
  2226. cv::GMat in;
  2227. cv::GArray<cv::GMat> age, gender;
  2228. std::tie(age, gender) = cv::gapi::infer2<AgeGender>(in, rr);
  2229. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  2230. auto pp = cv::gapi::ie::Params<AgeGender> {
  2231. params.model_path, params.device_id
  2232. }.cfgOutputLayers({ "age_conv3", "prob" });
  2233. comp.apply(cv::gin(in_mat, roi_list), cv::gout(out_gapi_ages, out_gapi_genders),
  2234. cv::compile_args(cv::gapi::networks(pp)));
  2235. // Validate with IE itself (avoid DNN module dependency here)
  2236. GAPI_Assert(!out_gapi_ages.empty());
  2237. ASSERT_EQ(out_gapi_genders.size(), out_gapi_ages.size());
  2238. ASSERT_EQ(out_gapi_ages.size(), out_ie_ages.size());
  2239. ASSERT_EQ(out_gapi_genders.size(), out_ie_genders.size());
  2240. const size_t size = out_gapi_ages.size();
  2241. for (size_t i = 0; i < size; ++i) {
  2242. normAssert(out_ie_ages [i], out_gapi_ages [i], "Test age output");
  2243. normAssert(out_ie_genders[i], out_gapi_genders[i], "Test gender output");
  2244. }
  2245. }
  2246. TEST(ImportNetwork, InferList2NV12)
  2247. {
  2248. const std::string device = "MYRIAD";
  2249. skipIfDeviceNotAvailable(device);
  2250. initDLDTDataPath();
  2251. cv::gapi::ie::detail::ParamDesc params;
  2252. params.model_path = compileAgeGenderBlob(device);
  2253. params.device_id = device;
  2254. cv::Size sz{320, 240};
  2255. cv::Mat in_y_mat(sz, CV_8UC1);
  2256. cv::randu(in_y_mat, 0, 255);
  2257. cv::Mat in_uv_mat(sz / 2, CV_8UC2);
  2258. cv::randu(in_uv_mat, 0, 255);
  2259. std::vector<cv::Rect> roi_list = {
  2260. cv::Rect(cv::Point{64, 60}, cv::Size{ 96, 96}),
  2261. cv::Rect(cv::Point{50, 32}, cv::Size{128, 160}),
  2262. };
  2263. std::vector<cv::Mat> out_ie_ages, out_ie_genders, out_gapi_ages, out_gapi_genders;
  2264. // Load & run IE network
  2265. {
  2266. auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
  2267. auto this_network = cv::gimpl::ie::wrap::importNetwork(plugin, params);
  2268. auto infer_request = this_network.CreateInferRequest();
  2269. for (auto &&rc : roi_list) {
  2270. const auto ie_rc = IE::ROI {
  2271. 0u
  2272. , static_cast<std::size_t>(rc.x)
  2273. , static_cast<std::size_t>(rc.y)
  2274. , static_cast<std::size_t>(rc.width)
  2275. , static_cast<std::size_t>(rc.height)
  2276. };
  2277. IE::Blob::Ptr roi_blob =
  2278. IE::make_shared_blob(cv::gapi::ie::util::to_ie(in_y_mat, in_uv_mat), ie_rc);
  2279. IE::PreProcessInfo info;
  2280. info.setResizeAlgorithm(IE::RESIZE_BILINEAR);
  2281. info.setColorFormat(IE::ColorFormat::NV12);
  2282. infer_request.SetBlob("data", roi_blob, info);
  2283. infer_request.Infer();
  2284. using namespace cv::gapi::ie::util;
  2285. out_ie_ages.push_back(to_ocv(infer_request.GetBlob("age_conv3")).clone());
  2286. out_ie_genders.push_back(to_ocv(infer_request.GetBlob("prob")).clone());
  2287. }
  2288. }
  2289. // Configure & run G-API
  2290. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  2291. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  2292. cv::GArray<cv::Rect> rr;
  2293. cv::GFrame in;
  2294. cv::GArray<cv::GMat> age, gender;
  2295. std::tie(age, gender) = cv::gapi::infer2<AgeGender>(in, rr);
  2296. cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
  2297. auto pp = cv::gapi::ie::Params<AgeGender> {
  2298. params.model_path, params.device_id
  2299. }.cfgOutputLayers({ "age_conv3", "prob" });
  2300. auto frame = MediaFrame::Create<TestMediaNV12>(in_y_mat, in_uv_mat);
  2301. comp.apply(cv::gin(frame, roi_list), cv::gout(out_gapi_ages, out_gapi_genders),
  2302. cv::compile_args(cv::gapi::networks(pp)));
  2303. // Validate with IE itself (avoid DNN module dependency here)
  2304. GAPI_Assert(!out_gapi_ages.empty());
  2305. ASSERT_EQ(out_gapi_genders.size(), out_gapi_ages.size());
  2306. ASSERT_EQ(out_gapi_ages.size(), out_ie_ages.size());
  2307. ASSERT_EQ(out_gapi_genders.size(), out_ie_genders.size());
  2308. const size_t size = out_gapi_ages.size();
  2309. for (size_t i = 0; i < size; ++i) {
  2310. normAssert(out_ie_ages [i], out_gapi_ages [i], "Test age output");
  2311. normAssert(out_ie_genders[i], out_gapi_genders[i], "Test gender output");
  2312. }
  2313. }
  2314. TEST(TestAgeGender, ThrowBlobAndInputPrecisionMismatch)
  2315. {
  2316. const std::string device = "MYRIAD";
  2317. skipIfDeviceNotAvailable(device);
  2318. initDLDTDataPath();
  2319. cv::gapi::ie::detail::ParamDesc params;
  2320. // NB: Precision for inputs is U8.
  2321. params.model_path = compileAgeGenderBlob(device);
  2322. params.device_id = device;
  2323. // Configure & run G-API
  2324. using AGInfo = std::tuple<cv::GMat, cv::GMat>;
  2325. G_API_NET(AgeGender, <AGInfo(cv::GMat)>, "test-age-gender");
  2326. cv::GMat in, age, gender;
  2327. std::tie(age, gender) = cv::gapi::infer<AgeGender>(in);
  2328. cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
  2329. auto pp = cv::gapi::ie::Params<AgeGender> {
  2330. params.model_path, params.device_id
  2331. }.cfgOutputLayers({ "age_conv3", "prob" });
  2332. cv::Mat in_mat(320, 240, CV_32FC3);
  2333. cv::randu(in_mat, 0, 1);
  2334. cv::Mat gapi_age, gapi_gender;
  2335. // NB: Blob precision is U8, but user pass FP32 data, so exception will be thrown.
  2336. // Now exception comes directly from IE, but since G-API has information
  2337. // about data precision at the compile stage, consider the possibility of
  2338. // throwing exception from there.
  2339. EXPECT_ANY_THROW(comp.apply(cv::gin(in_mat), cv::gout(gapi_age, gapi_gender),
  2340. cv::compile_args(cv::gapi::networks(pp))));
  2341. }
  2342. #ifdef HAVE_NGRAPH
  2343. TEST(Infer, ModelWith2DInputs)
  2344. {
  2345. const std::string model_name = "ModelWith2DInputs";
  2346. const std::string model_path = model_name + ".xml";
  2347. const std::string weights_path = model_name + ".bin";
  2348. const std::string device_id = "CPU";
  2349. const int W = 10;
  2350. const int H = 5;
  2351. // NB: Define model with 2D inputs.
  2352. auto in1 = std::make_shared<ngraph::op::Parameter>(
  2353. ngraph::element::Type_t::u8,
  2354. ngraph::Shape(std::vector<size_t>{(size_t)H, (size_t)W})
  2355. );
  2356. auto in2 = std::make_shared<ngraph::op::Parameter>(
  2357. ngraph::element::Type_t::u8,
  2358. ngraph::Shape(std::vector<size_t>{(size_t)H, (size_t)W})
  2359. );
  2360. auto result = std::make_shared<ngraph::op::v1::Add>(in1, in2);
  2361. auto func = std::make_shared<ngraph::Function>(
  2362. ngraph::OutputVector{result},
  2363. ngraph::ParameterVector{in1, in2}
  2364. );
  2365. cv::Mat in_mat1(std::vector<int>{H, W}, CV_8U),
  2366. in_mat2(std::vector<int>{H, W}, CV_8U),
  2367. gapi_mat, ref_mat;
  2368. cv::randu(in_mat1, 0, 100);
  2369. cv::randu(in_mat2, 0, 100);
  2370. cv::add(in_mat1, in_mat2, ref_mat, cv::noArray(), CV_32F);
  2371. // Compile xml file
  2372. IE::CNNNetwork(func).serialize(model_path);
  2373. // Configure & run G-API
  2374. cv::GMat g_in1, g_in2;
  2375. cv::GInferInputs inputs;
  2376. inputs[in1->get_name()] = g_in1;
  2377. inputs[in2->get_name()] = g_in2;
  2378. auto outputs = cv::gapi::infer<cv::gapi::Generic>(model_name, inputs);
  2379. auto out = outputs.at(result->get_name());
  2380. cv::GComputation comp(cv::GIn(g_in1, g_in2), cv::GOut(out));
  2381. auto pp = cv::gapi::ie::Params<cv::gapi::Generic>(model_name,
  2382. model_path,
  2383. weights_path,
  2384. device_id);
  2385. comp.apply(cv::gin(in_mat1, in_mat2), cv::gout(gapi_mat),
  2386. cv::compile_args(cv::gapi::networks(pp)));
  2387. normAssert(ref_mat, gapi_mat, "Test model output");
  2388. }
  2389. #endif // HAVE_NGRAPH
  2390. } // namespace opencv_test
  2391. #endif // HAVE_INF_ENGINE