From 3d1ecaaeb58f18bdfebd56248f0d69c250cb1091 Mon Sep 17 00:00:00 2001 From: Danish Farid Date: Thu, 25 Jun 2020 03:19:14 -0400 Subject: [PATCH] updated UT test for Python (3) AugOps with BBox - MD5 checks + imrpv comments --- .../random_crop_and_resize_with_bbox_op.cc | 3 +- .../kernels/image/random_crop_with_bbox_op.cc | 3 +- .../random_vertical_flip_with_bbox_op.cc | 3 +- .../random_crop_with_bbox_01_c_result.npz | Bin 0 -> 1654 bytes ...dom_resized_crop_with_bbox_01_c_result.npz | Bin 0 -> 1654 bytes ...om_vertical_flip_with_bbox_01_c_result.npz | Bin 0 -> 1654 bytes .../test_random_crop_and_resize_with_bbox.py | 216 +++++------------- .../dataset/test_random_crop_with_bbox.py | 211 +++++------------ .../test_random_vertical_flip_with_bbox.py | 95 ++++++-- tests/ut/python/dataset/util.py | 31 +-- 10 files changed, 211 insertions(+), 351 deletions(-) create mode 100644 tests/ut/data/dataset/golden/random_crop_with_bbox_01_c_result.npz create mode 100644 tests/ut/data/dataset/golden/random_resized_crop_with_bbox_01_c_result.npz create mode 100644 tests/ut/data/dataset/golden/random_vertical_flip_with_bbox_01_c_result.npz diff --git a/mindspore/ccsrc/dataset/kernels/image/random_crop_and_resize_with_bbox_op.cc b/mindspore/ccsrc/dataset/kernels/image/random_crop_and_resize_with_bbox_op.cc index b820779ed1..fbaf2c9326 100644 --- a/mindspore/ccsrc/dataset/kernels/image/random_crop_and_resize_with_bbox_op.cc +++ b/mindspore/ccsrc/dataset/kernels/image/random_crop_and_resize_with_bbox_op.cc @@ -30,8 +30,7 @@ Status RandomCropAndResizeWithBBoxOp::Compute(const TensorRow &input, TensorRow BOUNDING_BOX_CHECK(input); CHECK_FAIL_RETURN_UNEXPECTED(input[0]->shape().Size() >= 2, "The shape of input is abnormal"); - (*output).push_back(nullptr); // init memory for return vector - (*output).push_back(nullptr); + output->resize(2); (*output)[1] = std::move(input[1]); // move boxes over to output size_t bboxCount = input[1]->shape()[0]; // number of rows in bbox tensor diff --git a/mindspore/ccsrc/dataset/kernels/image/random_crop_with_bbox_op.cc b/mindspore/ccsrc/dataset/kernels/image/random_crop_with_bbox_op.cc index 2be37f1da3..c873307afd 100644 --- a/mindspore/ccsrc/dataset/kernels/image/random_crop_with_bbox_op.cc +++ b/mindspore/ccsrc/dataset/kernels/image/random_crop_with_bbox_op.cc @@ -36,8 +36,7 @@ Status RandomCropWithBBoxOp::Compute(const TensorRow &input, TensorRow *output) int32_t padded_image_h; int32_t padded_image_w; - (*output).push_back(nullptr); - (*output).push_back(nullptr); + output->resize(2); (*output)[1] = std::move(input[1]); // since some boxes may be removed bool crop_further = true; // Whether further cropping will be required or not, true unless required size matches diff --git a/mindspore/ccsrc/dataset/kernels/image/random_vertical_flip_with_bbox_op.cc b/mindspore/ccsrc/dataset/kernels/image/random_vertical_flip_with_bbox_op.cc index c6aa8450a8..ffea851eac 100644 --- a/mindspore/ccsrc/dataset/kernels/image/random_vertical_flip_with_bbox_op.cc +++ b/mindspore/ccsrc/dataset/kernels/image/random_vertical_flip_with_bbox_op.cc @@ -45,8 +45,7 @@ Status RandomVerticalFlipWithBBoxOp::Compute(const TensorRow &input, TensorRow * RETURN_IF_NOT_OK(input[1]->SetItemAt({i, 1}, newBoxCorner_y)); } - (*output).push_back(nullptr); - (*output).push_back(nullptr); + output->resize(2); (*output)[1] = std::move(input[1]); return VerticalFlip(input[0], &(*output)[0]); diff --git a/tests/ut/data/dataset/golden/random_crop_with_bbox_01_c_result.npz b/tests/ut/data/dataset/golden/random_crop_with_bbox_01_c_result.npz new file mode 100644 index 0000000000000000000000000000000000000000..0c220fd09d2f82888b93437e370325ab758da34d GIT binary patch literal 1654 zcmbW&dr(wW90%}w7Z6wzP*-^+uClpHZVRIF5=8>hys<0?$o0Wv9~YO$K7Mx(;tJ@Z zJS3nXKtv^JYMRoaQmo*gv@x8VCJGuSGwrV?njTixRKwWVJ%{UU`lEmP-PwER{_gpl z@9xf-drVwFpo*EVRm`o_!@j#g3`0JljEb=cLV1$GpjS7k|9>g{ z0XA2i)8jMP+yZZK*45%F@%vhQ%5s6Xxm{i?)Y-5SnE6pkmwo;zupHA0iX7Go0qqco zAV|ltoPtxd!2)Th9s4}I5`xVN@n;sKK?uiiY&(P^Q0lk}L3v7j@j`@Jf4WOD2ScYKv!+=bP z{C5C@AURa4iEt^%utVR$o84Wd_vp3dQPQ~~?7<_5wlibcj@EC?Qve6QwLy#bu z$EgX9+IRcYvZ5_k?MU9unk7lF7K2_kPY`23kSLiXYGU6FENbR1s5HlV-}KyAx(U`{ zNS4idVm2VyD47&$ULU)ZRq&2_a(GMwrN76OK`Mqc*`yPbfnbwlGO4kz)O=}pEo5d} z#_`;S;TBbkS&=fsd>3~{d*-D?p7nWmPKc#e}fzhxw6?tOdf*mlG#B` zx_PPhr?88;+Q#5#&q8NX=l_MB~wr3quW2_bzHSj^2X;3?|tW z5`z#FNv4>ZZ<1nXXM=rs@RuLnO21Zbf)WhRSb4KjTzwXo%Z)9#%I$((rC^qq;BGS0 z9t3-(nfB3{BJYGoj>kq!ef!ajFy3&<2BjFvStZIi!-4CL!)e(h&hblh-CcK>~Ow+a&){p;f=TL`Q4hvBxuHP zR5r(mX+h8`nKo)tGE)W$W2a^>{>hA}YNAg-JBH)3=^*A=1f7!UqQ>j^@YdPxw#_}} zR|X8)`y6y*cuqFY6VrpBS28b9qi>#^IM?sLj)$&QU;V=GhCU4avKb)e1cE`yoTR4a z{P*WS&xm)Xt#0Zci@DweLl{n3c^<=QtHH#v3RS>=Z5860lgdxtj{j^NCN5|JX|Upv JDqa)l-QR!@8nyrc literal 0 HcmV?d00001 diff --git a/tests/ut/data/dataset/golden/random_resized_crop_with_bbox_01_c_result.npz b/tests/ut/data/dataset/golden/random_resized_crop_with_bbox_01_c_result.npz new file mode 100644 index 0000000000000000000000000000000000000000..a909cbe88c5719a72a75f24017e303be51301f54 GIT binary patch literal 1654 zcmbW&dr(wW90%}wdCG?I2vK?HqQEYS2q;NN0fLs?faSsfg4x)ucgn&*6BRX6&DSclO@7zk5FC zySsDd9)sZR6UHr849A~6t(o)XIPwYL!Z@=mS0*Rf8$3K0Oq*mNc}VJbVQGP3H_y4b zMx9M`T4h~^R@YRdOVsMB9Ws{9_DY9r6G?lAxy~sneWzq@5S4pRolXM)|&GkET)0tNlG#i6O#rKO~ZbL?MV~A*-bD2bt9vw5o|A zW(|V1%*0X?m^U3aGF~-Vf8p$Fx8@d15QiaNH9BJS2ojiCM@Gnv$+84hzDp1`qBOMY*;n!WxiJdWWB)npNq zjUb1aZPZkJ5Hr|u=gY3~+H00kbKBRDiy==n+lkqM;7MklqUO$Q&}>!78QY}3ZCB+? zF@l{K@>L@c^E842W(ujv?zsL7dW|5;3r zEd4HrpI-aZfZ% zwdd}>JaYQ2w`;+KVV{~_NlX<2Gc%S48!HBzYD8kH5$tD1qUQYNerI&arq~j@3@_Z%9or+#N4;+S_~wnJqu{{Mpc)`XM&M)yQ*$XlGCu!M z>tLf)KH^SU*$OTUZq*zh=2-+DW}c%auJ3x;k%eSONl&~hFT@=SjTo9#(@abYf`iPo zQqw-*xiDsuLbgXVkA6GdSq^O&4ymS{m<|M;%ydz6vFBve-uzbsJ_`yx@0q?g3Edca zRMSh$^9cHwIZVyOo5s5Fb7?;Aj_Fs%fBDe_M=-Ioc+F`Q7%5HTkayui$h)TB*bLCZo`Uw%Jg`1W#XHV}p}oU(`_hL)ZZomfZdfN--)!SctfOUXl zFbX(uDg$9APG*SFKb$%DqQsc&56{b3# z>*Tt1cFALN>vA-@o=RPcM%O62vD@mj$Zoqt+C|njkL2%r99Ea)zt^XyXwnl?G`*Vt zU#h-9o3o?cYdRF_#8!v@_ywE1tU%t$P7Q+LI(GjCZkjP9DH6Iy$R-f-?X4=N; z(1Z+gE+k_}QOssy3>2@SGS~l=gd4YyR_ED8R5&F@?kwAt+|1gqnQs>T^Zst76}WnUhgx zzIH(=hBCzn#FQhbV8%#|7JomW9Vq)Q#;!fJGXDBkP>I2$m?~lrf@)?SrDh>K?u*Sw zFGTJTnkI_PubzNi78j)=ll702zZzF+hwPfrlAPP2u6W{2PWT zIqb_i-)m8ebI^gIQ!!n{>_^~b<^VMbt-qS(Okdx7^Hnp^B@>I#jiE;|y~I3*;2<-7 z)Wp>+sNz59ujroc>I~g^WCI++(65*QVh$r1WX4Ag-#>i)himo6jhA1UyIpgs9)>Us zD`te4#}OQ1W|W!>Cnm3KnYYijl+0%?yjgS+#xOjgn4`oziQp+_o~C9`!P`}Q%jc(L z=jm@tlNYbSIEG`2nIL8o!86QEQIk@gI(B<&i1^!|r432oIs?-fj*F6n;aSmO68Hdh g;D2ou{x!#{kGviK**Hu>a0qGe{v*qOO{8~!10tFi!T plot_rows: - orig = np.split(orig[:comp_set*plot_rows], comp_set) + [orig[comp_set*plot_rows:]] - aug = np.split(aug[:comp_set*plot_rows], comp_set) + [aug[comp_set*plot_rows:]] + # Create batches of required size and add remainder to last batch + orig = np.split(orig[:split_point], batch_size) + ([orig[split_point:]] if (split_point < orig.shape[0]) else []) # check to avoid empty arrays being added + aug = np.split(aug[:split_point], batch_size) + ([aug[split_point:]] if (split_point < aug.shape[0]) else []) else: orig = [orig] aug = [aug] for ix, allData in enumerate(zip(orig, aug)): - base_ix = ix * plot_rows # will signal what base level we're on + base_ix = ix * plot_rows # current batch starting index + curPlot = len(allData[0]) - sub_plot_count = 2 if (len(allData[0]) < 2) else len(allData[0]) # if 1 image remains, create subplot for 2 to simplify axis selection - fig, axs = plt.subplots(sub_plot_count, 2) + fig, axs = plt.subplots(curPlot, 2) fig.tight_layout(pad=1.5) for x, (dataA, dataB) in enumerate(zip(allData[0], allData[1])): cur_ix = base_ix + x + (axA, axB) = (axs[x, 0], axs[x, 1]) if (curPlot > 1) else (axs[0], axs[1]) # select plotting axes based on number of image rows on plot - else case when 1 row - axs[x, 0].imshow(dataA["image"]) - add_bounding_boxes(axs[x, 0], dataA["annotation"]) - axs[x, 0].title.set_text("Original" + str(cur_ix+1)) - logger.info("Original **\n{} : {}".format(str(cur_ix+1), dataA["annotation"])) + axA.imshow(dataA["image"]) + add_bounding_boxes(axA, dataA["annotation"]) + axA.title.set_text("Original" + str(cur_ix+1)) - axs[x, 1].imshow(dataB["image"]) - add_bounding_boxes(axs[x, 1], dataB["annotation"]) - axs[x, 1].title.set_text("Augmented" + str(cur_ix+1)) + axB.imshow(dataB["image"]) + add_bounding_boxes(axB, dataB["annotation"]) + axB.title.set_text("Augmented" + str(cur_ix+1)) + + logger.info("Original **\n{} : {}".format(str(cur_ix+1), dataA["annotation"])) logger.info("Augmented **\n{} : {}\n".format(str(cur_ix+1), dataB["annotation"])) plt.show()