From 2fcca3c3577004ae58cf2683a21209760260284a Mon Sep 17 00:00:00 2001 From: zhujingxuan Date: Tue, 6 Apr 2021 15:10:15 +0800 Subject: [PATCH] update example --- .../micro/example/mnist_x86/CMakeLists.txt | 2 - .../example/mnist_x86/benchmark/benchmark.cc | 2 +- .../example/mnist_x86/benchmark/load_input.c | 0 .../example/mnist_x86/benchmark/load_input.h | 0 .../micro/example/mnist_x86/mnist_input.bin | Bin 3136 -> 784 bytes .../example/mnist_x86/src/CMakeLists.txt | 20 ++- .../lite/micro/example/mnist_x86/src/model.h | 7 +- .../lite/micro/example/mnist_x86/src/net.bin | Bin 15632 -> 15632 bytes .../lite/micro/example/mnist_x86/src/net.c | 159 +++++++++--------- .../micro/example/mnist_x86/src/net.cmake | 3 +- .../micro/example/mnist_x86/src/session.cc | 5 +- .../micro/example/mnist_x86/src/session.h | 1 + .../lite/micro/example/mnist_x86/src/tensor.h | 1 - .../lite/micro/example/mnist_x86/src/weight.c | 22 ++- .../lite/micro/example/mnist_x86/src/weight.h | 6 +- .../lite/micro/example/mobilenetv2/README.md | 31 +++- 16 files changed, 143 insertions(+), 116 deletions(-) mode change 100755 => 100644 mindspore/lite/micro/example/mnist_x86/benchmark/load_input.c mode change 100755 => 100644 mindspore/lite/micro/example/mnist_x86/benchmark/load_input.h diff --git a/mindspore/lite/micro/example/mnist_x86/CMakeLists.txt b/mindspore/lite/micro/example/mnist_x86/CMakeLists.txt index 72cb7f92b8..f61fb44189 100644 --- a/mindspore/lite/micro/example/mnist_x86/CMakeLists.txt +++ b/mindspore/lite/micro/example/mnist_x86/CMakeLists.txt @@ -22,12 +22,10 @@ endif() if(PLATFORM_ARM64) add_compile_definitions(ENABLE_ARM64) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=armv8.2-a+dotprod") endif() if(PLATFORM_ARM32) add_compile_definitions(ENABLE_ARM32) - add_definitions(-mfloat-abi=softfp -mfpu=neon) endif() set(CMAKE_C_FLAGS "${CMAKE_ENABLE_C99} ${CMAKE_C_FLAGS}") diff --git a/mindspore/lite/micro/example/mnist_x86/benchmark/benchmark.cc b/mindspore/lite/micro/example/mnist_x86/benchmark/benchmark.cc index 7c9cb3f6ec..7e7473baf1 100644 --- a/mindspore/lite/micro/example/mnist_x86/benchmark/benchmark.cc +++ b/mindspore/lite/micro/example/mnist_x86/benchmark/benchmark.cc @@ -111,7 +111,7 @@ int main(int argc, const char **argv) { } lite::Context *context = nullptr; - if (argc >= 5) { + if (argc >= 6) { // config benchmark context context = new (std::nothrow) lite::Context(); if (context == nullptr) { diff --git a/mindspore/lite/micro/example/mnist_x86/benchmark/load_input.c b/mindspore/lite/micro/example/mnist_x86/benchmark/load_input.c old mode 100755 new mode 100644 diff --git a/mindspore/lite/micro/example/mnist_x86/benchmark/load_input.h b/mindspore/lite/micro/example/mnist_x86/benchmark/load_input.h old mode 100755 new mode 100644 diff --git a/mindspore/lite/micro/example/mnist_x86/mnist_input.bin b/mindspore/lite/micro/example/mnist_x86/mnist_input.bin index 32154910c70f17c0ca6b72640be27b27838e1c18..3f84f468b7c18b8ad849c06fb5d149d58fc71ee2 100644 GIT binary patch literal 784 zcmZo@7@VMa(xgfMIP_OwY9$84XdRva@kDp(vG-q#~ryEAp9@9V!wxoCM%9n%CG(O0#B*Ep_-J6Z#=Z?XZEYz)vh zES*UIF+)m2>B|eQ)M2~}HVnIyF|Uv0si!V>oH>gIb5?;wuO*prX(!Au4J4=UX<}9W zM(7uehqD@bbWN!w8?9E3lfi*ZsH!q<+-QZyPlW0Igip|3euj4`VG&W~H^YDqOVX)p z#PG6|$nv;xx^veXp4+Q)L^FF46=)l+9PXLRjjV~p-^ouQpIL;l?q2AYz=1EkQ%w28 zep>gB36zPyChJtr!OGu!+@4}FB4p_U(*F@UM??(;gdc7rLGB(@)bl%s>zAN*HP)8iHyK_9)94qlqq(I2lz-B8~>3 zVje$=ZLMb-Jk-(L)(9rjN^!kDAEvQdgy~SBo1DXmStuWJ%ue77e?ndpr@jQ z*$3sySYW`zHkl-xU*rs$UXqw{_6qD$v;)Ry6A@UQL7pxa1&c?$G^R|2`+3rv>Q^{b zT4;WNufbFJZS^3W4{v7vol{6o?lgeZy>F1WqL{poOM}40=~N{06|Ok37y?JE@!gt6 z6ioVvr$jhlyjT-(K$@FVy&M*7d=Bi5w-^&rh!*#PQ2THdT(MIHTa7P7)8{$OULp=$ zscM|go&ow=Js%GCL`?@7Zrw(MMYiHOu`b%pE290l{3JoqgYfSLMPFf@7r8r4LwvB{Y<2+5#K zL~vFqm_W5dm5)X>!7Q8itedcPmk5k60%AEwi3)ysehXm=Zp2Izu ztc2=AwjiG$OgQ}2G~d*Ue8{i@J-L%qeNy4C?`bf!P@2uX9)>MeuGsZ$32^p0BePG6 zxocfOFNWNOhkf73!=J10^R>;`Ld+m?i4Mf?497=#*61vxk8LqrDBOOra)x0%+~1;z zV*bKd_*n|mlvfx^j<13JR!U@xk1Qz?nakR4DkJYceIO@|u7IU^HuPPvBo6C2kXKko zKa5SHMrR#=Fe(i-l;;*m%?EYrbHQCbo5~~;`Ziex zJ)UU6o1iX|UKx(}q=TWgQxZ3nE~3e?EZ8UB!5lto(Dv8F+Quv}o1P|ZV=>f!{}_x+ zPggo{Szd0CGFlBTM!RLtr~;3l9ja&{2lhTj|Epm*H)Fgq{_`0c)vZWIQ_s`PFkvcN zw-B9HrGd1E5@>f1)7Bn;^r?;~mlRjx{W1;K>N(%2ReH$lZO3 zq#Ytj>fRn2AsqxVpWcH?O)M(do4^klce4I}6*#KV1G{zX*a)3*(rzQd#)z<3*%t-H zOc`v><%d9lEwIoh1!gS0MePi$>8w>^FmQh{z2{|&aSIMn`K`0Cv}Yk{osz@X=6mpH zq9Q1~s;04y{JgSoN%U@3$KM1005+_EM(Cx>IFD(mWEG0ptikld=kZcs9@O3uV>g5) z6RUn}8f3JXEqGpwyQg=Ocx`DKS6l*Wx*DXe?J_-{E=ZQjieOM-3`iLnf>ezRh#wY( zaNB-baxb2!cBGJ~rBYbdI|B~*<%8<~{3ur|mdBfvr!&3@fyEU^8XcX5Z{MCZHlI-p z|E=oB1ic~V#G@h7oN$?>+|b7Qu0-m&vxJ=bsg5c2%OQ_SZAQkRs;Gi%%UscOOb*ndmC>BN z03K#G5Zp9Y{(LUyo1ri zX5C%l;TB5`Peeh-DmRpqD~4aK9=Lh)7N~MUh)uFbn~Fd9;{I;bz3M`w41VIN!`s0r zqZq&B1cCmLC^~3}lLM8Wu-!L>biQB9O{5*gv!LVxw?a<6$Dfjd{v z!l5^}jc0dqxT^|zL~phOe0m%KC7VCeJ1;Ll!GIUucC3dRbESCguBPO_=ELB)yOG$& zS%BB;CTJi3g?q%jAupjGT6BBJ_|b*jTfA4q_oM*JFBSz)ubV;Y=nbYnwg*fS?dhOt zEFBV2$B~1w+%HL&ss4_Oz;Vhje%;;;n(oK&TT4B|&!`h;l?1R!(||7n#?G2zLHJ zdAB8aEBWtR6kQ_Dj~}4=>thhA_=HT|7%-ly=%5o`J+R0llwyG>Z@=Rl=$qRFt7~te zgTM|r$EJPL3ouuFKeHdr4)zbQPw=tz*>NcCK+ cVaM@7vQqRKOn9UbA&CfFbtD4z$ePms1M?*C)&Kwi diff --git a/mindspore/lite/micro/example/mnist_x86/src/CMakeLists.txt b/mindspore/lite/micro/example/mnist_x86/src/CMakeLists.txt index 9b18bb607b..068b562762 100644 --- a/mindspore/lite/micro/example/mnist_x86/src/CMakeLists.txt +++ b/mindspore/lite/micro/example/mnist_x86/src/CMakeLists.txt @@ -8,8 +8,9 @@ endif() get_filename_component(PKG_PATH ${PKG_PATH} ABSOLUTE BASE_DIR ${CMAKE_CURRENT_BINARY_DIR}) -set(OP_LIB ${PKG_PATH}/tools/codegen/operator_library/lib/libops.a) -set(OP_HEADER_PATH ${PKG_PATH}/tools/codegen/operator_library/include) +set(OP_LIB ${PKG_PATH}/inference/lib/libmindspore-lite.a) +set(WRAPPER_LIB ${PKG_PATH}/tools/codegen/lib/libwrapper.a) +set(OP_HEADER_PATH ${PKG_PATH}/tools/codegen/include) set(HEADER_PATH ${PKG_PATH}/inference) message("operator lib path: ${OP_LIB}") @@ -20,6 +21,13 @@ add_compile_definitions(NOT_USE_STL) include_directories(${OP_HEADER_PATH}) include_directories(${HEADER_PATH}) +if(NOT PLATFORM_ARM32 AND NOT PLATFORM_ARM64) + include_directories(${PKG_PATH}/tools/codegen/third_party/include) + include_directories(${PKG_PATH}/tools/codegen/third_party/include/CMSIS/Core/Include) + include_directories(${PKG_PATH}/tools/codegen/third_party/include/CMSIS/DSP/Include) + include_directories(${PKG_PATH}/tools/codegen/third_party/include/CMSIS/NN/Include) +endif() + include(net.cmake) option(PLATFORM_ARM64 "build android arm64" OFF) @@ -32,12 +40,10 @@ endif() if(PLATFORM_ARM64) add_compile_definitions(ENABLE_ARM64) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=armv8.2-a+dotprod") endif() if(PLATFORM_ARM32) add_compile_definitions(ENABLE_ARM32) - add_definitions(-mfloat-abi=softfp -mfpu=neon) endif() set(CMAKE_C_FLAGS "${CMAKE_ENABLE_C99} ${CMAKE_C_FLAGS}") @@ -64,11 +70,17 @@ function(create_library) COMMAND rm -rf tmp COMMAND mkdir tmp COMMAND cd tmp && ar -x ${OP_LIB} + COMMAND cd tmp && ar -x ${WRAPPER_LIB} COMMAND echo "raw static library ${library_name} size:" COMMAND ls -lh ${library_name} COMMAND mv ${library_name} ./tmp && cd tmp && ar -x ${library_name} COMMENT "unzip raw static library ${library_name}" ) + if(NOT PLATFORM_ARM32 AND NOT PLATFORM_ARM64) + set(CMSIS_LIB ${PKG_PATH}/tools/codegen/third_party/lib/libcmsis_nn.a) + add_custom_command(TARGET net POST_BUILD COMMAND cd tmp && ar -x ${CMSIS_LIB}) + endif() + foreach(object_file ${OP_SRC}) add_custom_command(TARGET net POST_BUILD COMMAND mv ./tmp/${object_file} .) endforeach() diff --git a/mindspore/lite/micro/example/mnist_x86/src/model.h b/mindspore/lite/micro/example/mnist_x86/src/model.h index 838cbbbc90..0dc11c0b0e 100644 --- a/mindspore/lite/micro/example/mnist_x86/src/model.h +++ b/mindspore/lite/micro/example/mnist_x86/src/model.h @@ -1,6 +1,6 @@ /** - * Copyright 2020 Huawei Technologies Co., Ltd + * Copyright 2021 Huawei Technologies Co., Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,12 +46,11 @@ class MModel : public Model { Model *Model::Import(const char *model_buf, size_t size) { MS_NULLPTR_IF_NULL(model_buf); - MModel *model = new (std::nothrow) MModel(); - MS_NULLPTR_IF_NULL(model); if (size == 0) { - delete model; return nullptr; } + MModel *model = new (std::nothrow) MModel(); + MS_NULLPTR_IF_NULL(model); model->buf = reinterpret_cast(malloc(size)); if (model->buf == nullptr) { delete model; diff --git a/mindspore/lite/micro/example/mnist_x86/src/net.bin b/mindspore/lite/micro/example/mnist_x86/src/net.bin index 2bde284518ca9c546ef6d550b83ffa5f4dc4db06..7fe3011570629ea738d0bf21f732318006d8f396 100644 GIT binary patch literal 15632 zcmbVzd7PBhneO?jzS{S-YwN1&>V1P28f*a(6%9e6j3LG`#CQ{~!+6o_7;lDgoEzgc zhB02lc!}dUjAJs4;itwRA%q|z0uW7JLi7$se%`Q#IF zg-g&MEL;B9=p`~v|83dpHiE!Y0H=xO>oIltWHpd&s$Euir-c$n)eE%U=I1aS=UvW7+FjZkW1B zZQ1@m!JQyH#9+A!yh0u&FO$cXy?%h%#2jW?mc8CaZ1g7STXy}QBB}a2%(i9sulHyl zy@zaBuKUO)!g69}*Ru8BMugnUWbLx$pJu+rQL10|`sZi^{S!LKavwhOBwb7QEq6KD zMP8+k(1(}pKR5X&`VaJ-Wv^G0f2IFSUtjk6edH(|r#F|q{s4KFW9g&I_TL=C(L(+E z%dS5!eU)lyV%hrlFnWgNO3T*&4_Qh-yql0G)0BRRGJKj+`roH0J-0yVwck*B-)TyB zsR)VqDNUZCbU!^t_EQ^qo%WNDfZdZs4)(MczkftGksh$AJ=8{b&_Vhv*+vIQh;F7Y z(f7y^u#}g;bM^u4OH@sEg0tE1`%UU0ZtQWMc<4b|ThfkQB*VQ)pCkVp5-wn$r^w?p zMUNqebI`4e?4e$c(B07GZ7|!@hyRv(*bwdgWZ)}lliWdim~Num@EnE~ zgIo^&yvgYimqGaRFns+sbAaiGeh0}D&}#s%Hrh{P@XZ-=05KxWZhD3elV_O}SeSe% z@x%r1_0zL-gc9aBy+mvDLHaRdzeHGg_yirIkI^HXmTW>i+MwkjL}i4oAcv9X+ad84 ztmW|8uc?W78?{@b-)9~sAJB)%J1z6aX%7&Yr9U8h7!g^|Y$V^MIWj|!L;kCX`;U<~zo9RaZssq^deX`K1GxsBykwEY z$n_=dA4Amtj!e->+C+}Q!f#S9{Y!d{t|i|k571Wfzo5ZE`Um7>FL|6S(mzrsq1+%n zhe#hq)DO{zzGEV+yE(aq$?`-sdQf*^Srd$wo#_ zPLdipfRo`dFD)gWWT+5~{Q{k%d#MExI}G*`pdM6M4OzaEJP4l*(#Od``Xat7C#UdR zfxdxS=b={*!*3Bh2kA!S=OmRPPu@gKEaXjS(?iZLY5(tNi0j0>xR3l8e*T#LIeCG6 zihoZLKKUcPPNUq1bbvlU#N=P-{X{_ijTyqbJkktgI6xxM^snIiR$^lABAtliee`o$ zM>Uj4k_Kq^lJ<9#U382lvCmKG3yA%lK#d1TJF%0WQysaBr0KuY-Q*K0CKPycnf@Q( z%NX#g8MSZ6b1V6KctZit#py5c-M^v~Wz<_rVStsiKEPaakKKYmiNEg`)Ee6QLm=!P3CyAEXhPXYC@9(4! zk`r`4=^z|Ev!wi2P~BTleJ#+o1*kJf&r=V#m+obDFt5-McGClCY|O2RbP!pvmzcor zY*de~poY#jxdGJ15#$TY?7-R0ZhU_`aKnlHE|DWk^2lM%MBfGq{fLav_qbQ+3Sh~0 zZa;IJX(7kCH|SB+z%I@UEZj}rVIHKG$jf$CP=m_^5e3zkTn2#}ce;*M{ zAwLsXb&v!QpQRorVBvXWiWiviEZs(4KqeeQ3?3v-WbiX&BeLQq-AdnsZyzV8;in92 z*P`-WsE`j3p$o|O58&yo8Fzj){^OUY*;UC$gu_uWoo@RplX6Fsdl zuagv~hc6Fsr+|oRXz8Xs%(K*q@3P!(#AZLq!T(1f;d$h2jmcq;y@($nTNocY$D^>I ze@PzW1kygsr4Yppuzmo~kB|{NQ01@F63`$-Z)L|R@X8pviyHB@kq)|-^N`)N#MKa$ z5ceWtIDlyEVV=PH=gBMh?UpWToFpw}|2y_|IPY^BSC!p73(D@0V-`mi$2QnW6GQ9_D zh0JOCJ`nk3AnF!m&ouh-Qu4)gC)h#=_3Z)Ln$Y7*$iNskK;DK#F>Rp-nBCxm2k{L# z*MifT5_rjr$jJj(C&pPp2Utc4KdXU}JHXT?Da#B)KX3;8yVS#nUVgm(DtC!nM?XgW zen59IZ*a%S{isBi5i-ZXiB5o7^+N+AW233MhrG`PxCHR2gUab~^k6+V$ZdnxDdq?~ zXd%PgK5if4R{Jl^A(ZwdcVB>yA4TO% z(;js1OR!}lcDn*xejIylMY*z84U^giPH7+HlZCqUS<^lez{`zqs3K+6cK zu7r$k0|(-klpm+_U@uPCAR>RFVpP~)QUU2EjmUtlsKjg7C4guQLZT1s>Tz(f9|C{n zXt`1d&ZXYjHGnx+3jo1wp!^um`GOa>j~-{8%^ zL%*I~(*85xbw`1f+qZs)RKzG>v70bPVE>}U;kAT~p0_wa5)_4W6 zKLLK`#`IFc3OUx)pl<w^+;M!K0SD?M>&OoD-RF=|CsB#}m$d&Q_=(U%VEkhG z1ZwCKD(7uXc(?kk6C9=kyx=f6fE>K51AVVVQ(twyUDSu)2T*%XvJd^jhw3bGj74cPcGzV+m-GXUgH3h!=&{9i9Ae-|+9S@=AKUb>0A1k^r=-WB6eOYlnz zc^xxh4h;PkqV|Jr*O(`ebuWS!?M5B4G==*`dU$FOEd3^U&x>G!CGZmhHgyP$_CJq4 zL00WS)J~wA%pqrY!w*}K`C{@2R`y^vwP7-R3zA=h4CFk$huH}X>R=9`Qd2+}CD_*p zYR3sdtH9UZgxoiAhPe}+>EZuE{B^{LsVaa99HdjUmwT6fgHqgq-3Nc4Lq?tBKEqr* zg_!)1{+J{=9&bPT(fGg{?-in#p>^7j%4^DK7`_Yb~B3iWN|A@UebW|p!) zk7?lx>|{cYKMm~fLwCO$b{WCL{ucA-L%{x5py6p~^E33CH*j~tN&X5f=Nr&CjFX2C z;i12P)YH)7dAu`C`C$Him_>PDWlPCVA&Wjh6mKEM5&9r^77^UZy$${t!#N^OpwQ`$ zbI;&B;z#IBTfv040H2P*_a7r`bI1cP^3nzQYjHC5KC0r@iPoE(6ZkAeX-FLa3WR_N4}#wxqa%potJwQIy7@j#r9vQE2^H}KvmG81qY9Mhcq`DMd%*e& z!04l2Vdn*!171KkAqze~t>^LH|1T zvw?TqjtqMl*nb!J#1H7-Uuh1fDY3cg+;j}OVPix6DoETZvh3tA1Yz$|s$oZYbP7{#*BYfv55Y3D3xEXwH zJNm>p7`O)&`y~4C%V5eY;KQ}Z!a?#bGCv3F=a9!sc>ov+i&*UO|Icr4&Au#pLJZt` z0KL5*F%+W?379fu6J%uS@5@ z5FOJ8X^#O(M}YGiAZMCe3y(YwJ9)5ON|ea;_rO2j#p&<{(#81D`%VH~PS6N4d^7U= zBjjxW+O5U;z*DG;XMlZ}VbH0Udfb}YPr-gACb+Zc*^{v8N0{NZLBbHC5&#!|6W2kY z`3INj7##pApQgV?^^|~uw`PK+Orj^Kwa$x z3j<;^+n|pgTB+fMeX#p&aKt{$o7<5YyTHQWU33QUCNhlc*8Nr=V!(n2?gj3e(8~{i z2MmLYEp7jAN|bh#DK+OPeL6$wqg6^z1u6YOp3-m~_YI1a{-HwY?KMiz<0=`2#SB}a zn#Lx9oZl!miTR=?i%rOPM9B(zQEJ%CNG*!fj!fgEpl!Xu!k2t%8g+bG zQ(GePX-mjDDUC$CzrEN#VgCa!&^607%!tO;!U6C39IKDW0A@EF@FA9?q)?8j}+ZUDhi^?xT0y5e_~!1EAon> zqD8OC^Jk?3b%p3NghuA#A}O&7RLOKfD(=j%!{c0{B&WhfLBti{3ar$?$g+a$dZxLa z4;WjTZNZ9KA7wH&X3-;;hRUi*yDYEeO|D9L+?*hqP+Pj$H8vlooQK<7?Gfocvtmi9 zX`*^>zq~yw2$yRmkv6pG7tP5UB4$>fi-%N(urevQ|Dr3Nk5~V=UU6yV#HWFVS%uCo zNzAM|DGZtTmN92iWY&l*7J04ASW^y-(3n!M%FGF)!$S8~SwK@SPe>%pVrb*_wuPj! zba;S*XHIFvP3>?S)38NQnuu6{?RMe#8leoSu6G^vz~@@!^2Z# zZSAs!Ss~$nXZ7Lx-<0@=fB*DNDyb4>X(*W#a;d60GQidO8DbDMSu_0l8D2w7pLY9l zHGVlG{ko_z98T8M1AYV1^Sc`iG!!h;nbJr@J~AOLSJSQ$RlbpFnv`eGMuQ%{kY^@t zv23&G52tykRD%?eOO#i=fhJney%%{t>tspYHM)bz%pkV2HVbM`=+j! z-1isP*G%4i=c4sS-XP@+ zEeo9bK1Ex+k>~4e4+~^rdxu--9M(*?D#of=evw2J^Yaz)Rho*)k|D+%3NC~rcMJs= zJy9(cNMZ%HFdwbcdQ3f=5NFgPt<|d(H-^OtXHKB*;hEwzug!EW%IW%~MYTquY57Lx ztU^*$X6=e_pfWg{mUM;u%BsI`c1FN2s>Mp4Nzxs(-zLd2jB`|?Z*O${H%!M&Dp2 zIX!#dY`fpkZ}gX{MsIg`MVHz>6&H$35t-d(E7zR$n_a2(j_fEYEJ_4gmZZWQsDUx#?Wc`XnQ>ZJ|=CpMR}Etx1~v z!i!vpFH_NEWUYE-#ynEvW#>D5gY$;+U?N_RC#!I!lFLjxW2e&5qaJxl<2~yF%*c;U zG|{wLx$3@%e*W5=D{-arnK=0Ajn$I^k!oJc%M^6=d{*t1rMOg1QL2a(!iloPWQueu zOQAL3V(&fbB z?5wa@3l>W}U3I2X9Bc3klA(Bwoz~Jb6QriMh#ErGScI?G>=E@_{WFt+s-mtDSSGK_ z7E|?n#X(04@_aa>NJ#3PDVi(H=OqH3n%6E>WK(JLf;AY>NJDEDho(xy%`s!B<9v6o zx7>SP9d5MWp2%}{u}W-n3Y1yKHCr}}^Z4@OtiP<5<%%W;$yj@I0Y|GgXwKdz|OXXQwv5m zkrrt+@k`ap#d722yQ$cDd9ZPM#wd(Cqh|Gug-}kl>GmQ&>~Jm6)#E-{w=j|th;7M6 zRbJmUQVpeUDr&u)h#BwZ@iYF05+j^Z%aeYVqm1w-EvIs{+$4pk(*~k87E-K;q8uVc zmCLWHvV~M+&J+%MH(pyytW_iYI)@mNj~P$VKscZW7FDBYiyx?88KTiZl^D!WN8SeF#{e#2~Lw=jC`-Tdp+6kA9M z798VFo=5CbNXM*``Em87fWa6VEoe^0hh-z8s=bk)%9T74KF>vp9Fr^v1BI0lk5u*b z$!ddl=1)5Rd_=#_7Z9DD_`N^MO)an`c|K#G^Hz)LaM13UE;W$CIs==ps)b3NP|kNr zCo0yIdZq+;tm`iRUdgGW_8V;{&8z*v+^j;gqO)ZzZZ_0vOhUoSB%;#eta(1N$OvQC zJq2UYUt5UJ8Kk$-hGs{q!Lv54>voA)omnA}cG-F{tUZt+HY8C^h<=2fYxRuBv83q1jQK9I;ndXlm#t5pZ(;*p3-CS~QyKy=+_6Bo5t z^O+z6Be$l9>|x8eTyE#uw?)#UzV0#Sn)T{Clu227p(*d|%~$$WN|ver;-{SB@p5GvJk ztdU2(sz#=b*Xia>=~S`A%2-|?Z}9fgbel-om8h0@9sSXpW?|G8(--TqqKYr&IRmy# zl~)o*ti@cGG5HHa%ypS+NYJcu1Zx6eYABTBX|v76tlQ5{XDWb1`>4tvGhb2iTT1%Y zNhdqeH7d0=BulO?y4JzyEjH)EoIcru`QQDHr=}3Jx+3Bbo z*4+%qrloFP=h}>B?PZU&+;&0U(-1OC_(Wt&$piW5SP8f8;?Ad7?5VCT|KgUbSn^Oxt29QBi16p2@Ra=4Nqk)=+cI=$Q4*D_NIL ztj&vixcXi4YFpd9KHth+Ze*&X>+C1$K}lt7PLkwt6KNY$?h4dMGNs8S#DYad%|vuU ziL9t7$Kp~6S89y2f+{~I)X9}nol0Df)%->?t0dWIt-Vl?FlD98#Lvjh!dk_c)?IZp zslDqvThtp2+VZNN;+U$WWelwvYUWF*$gNBkRyQYg_PO~{M^)rfvGIoXp|V1ITUKaW z6%9oI+*Q8F63c`g(EvH8ay#xkcRinu*USsaahJ`<_ur8Bak7@QD=Vt?msI8^p)OXL zYbrb9<1WY8jzY`jz=hSdF;+fgj2)ckUt#ezC4&)>kLQjkih{CKthg*_iieGLm)>EyGtS8kec3`)l9faZ^}DHI zvR;e`d#5z?j>!}!)qP~Pk?5+XPbL$KjaM=f#h|18$Q7Ts@PRI`xWX9~^_d6KvV>MC z9qSUrXIfH8x6I`VZcyg+aqmo+xyza9;PT4&?1+r_fL4{iIA77{@=bnwqODIFRj!xt z1)_G|<(M%s=;NgQi!Onv&)z@nuZEJYr#0eshyNk3dOWystEG7%#R&4kv_RRBT(elM zI5<0;FB7&einyFd6mu|&TwGL7^n6jx)*CAEhLJ+rAQ9yz#O{(hS}#k)%9?L6S`bc8 zizO>riDr7frchRC3pBmApi`%$(p*v~JXchfb@r;sP?nL5gZSyk306?;LQ6{s8T zteFiw=ioIy7Ab%iTyO)6a+P2#M@EN88{PB9p**&b`)T7@xm{4+KqM{si+7axtifDT_oRi3 z=J`gU+~YsjEA@%|^?Z6w{gPcVkf>zM z8m-}0gvDBjTWPE1e7N4+t4wdnL?u3DxNhnXc<*dIBia8*|7<{es~Q+RneaI zEY{=cFV~Axi#flt;QL3Dr_84uduar%wI(fiLQ#P{9de5NeC4H$^Dal(C#6I%cgm5? zDyxIFQo)RoN9&eEW#-?hx=Kp7EPDMj0bQLjM53CZSyB?DI&+DI(&Fe^fihg;=i|1L zcGP8Pm{CgHB0b~EHHh^^ZT3@lIu{%_7DAs#or(B$DLm5Eg7QPpj694l&M+xYA{;WsW($%?_RBd{ULGa9Vl1Du`&+tJ&O61`1jpAcb}S~k3y!gN-<{1O zMb_&YRL?slD+)%|?C8zux&n@lkb_w;tz>MAfd`%KbaN?SvXw6^I@=l-o1M{W;@i1~ z?>yewC$)Evi?WO28Y@iOGrZoJ37xeV7g-Dbi_g@m=^SGeQ19@`b&+q;g1|7bULrBN zb(c*1L{9CIR!J>WQ94~!zTV8Q-EcR}Tr`m}U8q^Zx@-PlN(L1sU$#CVYS2)J;F5G+ zMJMW1bVr5hNoV4%=j4ewWh!md=hC`bSNZcq$yr{N6AsFVydH|MkBmfz*CL~CJ#(V&Pc|1^8OsW{#u8c@q>6}i`#6H3{f zz#%otSI)DH%bcF{-%SELu5X2GErKbD$4l$hS9`aJ1`nj76c`GqdQ&IW)pg`Tqv>%lKM(^F4nYY%TqpICQ@w?3!+7zqb8CV)1?moSa955 z^H@dIaTQx+xZ0IPsxQx{Lh*6w?DaEg-prS}S?{HKpJc4ko~p9mP1VGP@bI~sZoWZ$ zTeIDvl;&LZV4JF=r>kr#mUY6YUC`BO8cHYh#)4LUjTD5d1@y*1S)FpNGw@_ z)}`7zcGVM)+s@KyQC#6{R?4LbO{S!v1_6!!NoVG#R8n8Pz|RZ{#2KZhO{)$}$mbj` zw;q4@&6_9U*Dvz;q|w;^4~sp@_M_W zHSvJT_Sr8kOuYAJNiJ|Ewdcu?h8pR2j(+!N)3y#Awy$0D!Bzd$S%Kv((`V~CMt&)| z^MPX<{5=mmFnD=WF1uEB_95T49a|?(+FE|oE1z8#+3{zIqn@9%ov$k1x4(Dr+mDrZ zT+~$?-`#4{UP$=2gmPbG@BV)a={EN!^S@jhU4L=Sha|PL*JAG2liT|K;`^Js9t!D> zGSmNjR{UqTw@oPQH>08Ckg72K^y%-l{boG>Veup7M()b(+?^JIq4UoJqn~|Yhb`-H beG?b{8w2iMQQC-~NAc>z)rX%y!Rvnjw(<#F literal 15632 zcmbVziGP!2w*T{H-?yeonx<)*?)$#c0tH&ASg~RSL`6hoxDMB09Av!8wbWr8?hHDN z*KwE`#-AcGsHlu0n@|cZrIfam(k0!SrdgXdOOrI~`}_{`5A=Te@=24t$@8A`e9!lM z&v~9V9LH^FKQTj~WggAMK};bksmmOWzQP}%GuXM@;YdeR;WAuHS?&ee@n#~8^6s+7 zvx!*5B06Zxwa+AehjwAhvhm+YsmE){oMqc@#tcG)wlCZMZ_z&q8fsbg_^qfE<)CxR z9;i!GO8o|k=kY3-;BEm8R{XIYyTQ*qWbaE%l3ai`UUX! zkINoELL^aWNV;72Vdi7_4BozM`#bSUVh?FuHh%T^b94+pv26R(m`151$ChnhgW4!F zNEcZyfNv2iXfoo{@N&n0h4&CCXlmL1(}*$3HgtB`{(nTAM`2vOT=&sD>Vx=mmYshY z=q>abKDg}hNAO8P7pYvfem0;M{2t+4w*A}WJpK_)F5CWA%s`K$+sn4!5h+4%;S0;Q zzaO}lf-09C|J(RD8HXM&o4*gy^Y{j)FMIxJh=(uYlgrkRJnW2|CzjuTos66d@1vYu zHhu_M$djmH+4J8=o{nrImaqTaix}?-W86++d@O{qErRg_c=01E4yW!D^zlS;W;VSUx1nR|q17s9vE#8M^ zFv^32WDa^4*C85M+k!F(JN^W}iV%$Y7IR?SQugc7rpQ7XV7}0<) zLu?K}FL(}LKq^487axJoJ#4X7HaA>m2pp*DD+=ZL*3@(EC9YQDYSNJ@(v_HXLgASd=SMZPcJuE@ZcoNQj zgA>se{5T%MV)Sct1bI$Tksb6T!W6o=e>9Z zIsx1|iZ`Np;KvAF3wXQ5_bTedJJD~D z9dbc{UV=S@V}N)K%;2>mo3qA(w{ z*T8-URMx$aSBLP~M^zfSf!&0kygt_rt1jiu@R&@CocdRib_9 z7>r!%*#2m@24=>BT6Yw!0wo#*WgCR3zlFNM4;~%9<;_FwaOOBV2{>KAhw*;! zX$2_udysdZVA!&QGY3I2uA(4*6C$-AGIA;VG>EGR?Zpj%D}t=v3A3yOt1VDL!?*=r z*#Y}Jm|+#_1+3GEHguloC7tB+5kEvs1+i}cq`M#%7a)V4$3>_Oy0pts(YqjSOC8O? znLOl13PKHad=Dw0_VIaO+ACYM`^^X#0+4Ji1p;_p!6iUnKY4kXe!(e z9NC03AoJY7qgtp%Bq&!UZYJJ@nxS3FJo*)MFSW=FYU?H`NQS=#fBuQ(gb!~;vCtd5 z1(jqQ@a@q|TOrd(GKAxa|BW0X{}@Ri`of!$E%F+E2EB*l$!F1DBiYCgx$+-)6}pGI z5PM1c(J;$0^cZk012i)Ys`n8X^#CePDb)FgaGh7t8OV_lQ20lceGh&Gy@8T356|Hr zP!|ar3)SZt)PR4EXYn|B8`#G|f5C(3XXs-TSW*Ey$>$-TR*`)q36a;~PI3hx$RU8! z5N#7wK?UG=Byt#PfdDm=YRH65#MMX%{+K*XG?PKRK5`ZxCf1`9kuj8mC7|WIK(n_1 z3uVOAlJ-d?1#Z0sefxWm0e{1r(Jj!e?Z9#+*tg@0XfrZF41)0KYryIcfbkRLT2M&= z+6O(T4SGU1{ylKr4J^Kf?ZAd3@ZDXo510kL_)1Wm z{68GTd5CTosv=lW@!QC1P&pbB!7h9rTz_r*J3H5fu8J<(jvSKbiRlb zLC<#q>SsMXpM~|Kis*;9AA|Z)2H!uiqysC!%Tu7h7tlN8PTWEkLH_?9(+D-FAq|vp z46xzgKG1m!J^+fd2YRj+@=Rnm-VJwIB$12?%}))xHC;yawIA1s%XU;Ywk^ zr7Lm-cy$)4UOgTIz1c>d#X6*+^hVC(%18@3La0C^EkrGJ?Jekek_TPd5s2OHCG&qL zsNZ3z>mty%YjE`gP|2&IcM`#zpr$}I>4J{%JVc@z@*TJc-{~NA@Es9wUO`lY+A5F^ zy$QN`8ub$=fT@?E4p&3HY=Mq&srj2=m0AT1`wHzu7s)R2Q8l0ebIt<}uR`7X7MRR~ z?tlTcM2c)=HDFYUKZFi%FQ`E!Dj^l1S)W2*`YCYe6yyd%DfoM+jVX{zf+g)A1V4{~ z2M3|AdKWrb1n*%@2Awkvdbkc)Lwy2lZbP3yo!<+4n|82s0&jwhOoA~|hP)PlZ$3i~f@&`tD$o?QmyTVQ^tF%NR`9Ps=jKz=W% zfrxn21v&u_7Fh}3?m}hwFxiFA!T8hgTnX>{VTGVV$00&0=#Td;Ie#hCRT_NnUEsL{ za$pZA;W5yeX^7oEKz}=Y{u(YJb^;GB!piR`+79tP1+gvx)L6tO@+xozst=k#(`Ykv z4XXgLBj_?HWi3>Pr2vQlOy}@#A>++Rh(fU1$%4MO6g1I@e~0JE2l&tUzo9o=2R*_A z9Ky9k1$b4Cz925)8Umy15nW^#tn^+X?!&5i9DNQo>fFC~ky4-pE*1VYyL9g=jgw20BTPIbOBvB9X*Ba!Fn(kZ3T~OQ9g`~ zf;ip;&pv>is6!C>tI$2Bn}XWOBJyvj9@s~N8d(RczYb8)J#f_th?8eY{OwS8!zhoK zhZ?&D{=EZZsv&kZuzDP{Wsu&zA^6}p8y2U)oZ@aVyL#G@7G zX2J%(&I2PF;N3a&)spi!f>KtX1i}qCN1+Y)7RrDKY=!>y9{vt=>17;){tVZ77It(h zh%do2HuS-Nf<5{|^h@#q`W@&)KNjOf=*oiR8e%IjcMLL61gyRZ`-V#$0YLQ^C>?s{ zb;yCA1pN-fj$=O5cL%P4PV7mDaW}-I7@mKH{s&l}2p$f@u8;Frfy)11 zpdb&>pCChafEqs9PpJUSUg~h;)8MTVRynnRR~7Uf1)!Vn;j7TW>;^44fww{B-SzJ- z!vm-yP#p^KJiY~$sRVWzSAo*I@mZ(`m(fvJ36j9vIzk4P88EJTDf6Hl&^1f0@qL*la9Mr+Oq#C;Ald#G^0P0xtvg zDfNPf7SNyzkM6-k?HF6qz5>+Kf~NtmU1%S15HJuRAGscKzycPv(3PrSCch^>hPe9C zRf0h|jn@)o#AeF7@STqc0j%<8@L=R4%4t}29D*IN?PwSAD&;D~zZV}}av$Cg{OJNT zEr4o0L{^HXLFaZr-R%V~NPvs`fX^#H*G3>a&p;hG5AnSK`xVVt1lXN|NVS08?+2~u z0NiTfI`5$pVih3xXy+VkB$?M_TcLQZw+YsVK{dfu2&EB;Z4Y{b5M)@ zu*=gAdr;dz1$0ChR-2!K+MkF_K|gSTaN={Y3qZpM2q|HQUm8xeS>`X<-yC3?j6AEVz@`J#f7mDUjGLmIU2xtK{v>M&`+60HzvCAr%I>ZNSYGu zy;^CP`!b4G2YE(!wm;%j6bsFhg%zootcbJ~5=L>HP;}2`3I5nK6E4rXILc`soA3N- zp<+-qyx><9-O-&-i@)aB&|i=<8Dd6U_pH;?UMcel+NinZBrQBgr^uOFhkAsQ6lIs$ z%e7hJC!7;)BRGk>hRrc_sAa9pqIpp{|5F`ZDy_=midVaabzzrWOvl#0yU1jd&+7^5 zdx(eh1tNu${55h;2_sT+)&d?+y#P0<8&xSr3ci=kVGGHHCFIA!Gz z8|+0{=~Uqm_RaAV#kx7VxA7v;z0 z4)VArXJb&Ex>J>l3k&RX7K#%k=A?veb~~M3Tph_+M{qiRN-eEiSIY*Nl+6J7n^FPLVcJ8TQBb~G69hXu(FePR^8 z45?%gutc*;BgNj0?12$BZ7_lyB9)mRH5KTbkqwRqY{Dd7J)Lgg1veW6xm-qkwn35a zb@+pljG(m0?WIS`Ly2@RC7i~X<_9O{)t1gdR|5Z`);t?%ZU3fy)avIu7Zywm#}zeI zKP%D`Gn3S~kJnu!W^#$*w3QkmrGRLjT6?};72goJ+t^p=3x75;6&S}cZf$09d?-Ue zVWr4zMOVsrDF(ep%qwyg|6`%xTCm>F;)q_LU%jO4NuwT|j*qWqhK&=S^o{S@AdRB1 z;v9-|<-%vfR#s|ufA`Fd6z-Fh@Om=?jW3!QHHK_P)@@E8(O@bco+kAzeS-PHd&KPY zfaeEMUq~kO=_Hp-{c>7nzUM% z9CK(~u8ekFG0tfH?AlaW!X3XLom$8aGFY6ci2G@CVM0*BH)R^{6eo*|>~@vgjOKhA z{~EbPC}ng_+YcPXB-V?oKcwFzF4}JQhXxadb>qZSL8E@yS$6^g@yCIaJ{ zp~;8NuZ9+_=8&874e!gqO?N2%3HtLKt3NL8`wUrcQI#f3s%DA17zwV)BC8 z;ICU#MBK{p`iE9--HE4M&s4DHa>A4JCz>`Uyttlo)7frcXg@unNu{VenYZT$(!CWu z)&uB%u6NF0>}Q<|+Bl=F z4^yd0hIsDi+$B0crKj7WtTz8d6^1!ElikrH_VM9T#V8Ry-fYYBM=lmDv<*|JBf_r5 zKJN>~!)k%iw}K!|B4M705V%6R=`3N-J#+M>9m(G*=07tjG8kp^d$G#&%)=+>M1T@S zPYvN!*ApUEL(B-X!#A?n_TvP7R{l-mMQ<}>GUG&|nDc0cl@Ng4z{z|ZiVYgfKBd4q8?a8wX9Bm|4E(mL56H>If3$w8Tqt>Y^ z>T3n-r|8-ZW86rVU6JQmzh=ba4%_mo zv$R7h=Mx%@q;R~}+M%huTJv;nz~g@Q>#6jTyV;?l7#tUU`Et3N-z<*JGi(tS%M_d( zT|Y-HE>s)xtPwvW!oP2LSSGT28}*q(9B4XN6TJmaMaK9HQ!JKWzCh27wCL3IuEj!) z-2S3YVt4CvIwWUKKDM&|xzuP^lXt!EMnah)%PuTF!GALU-i~OAXbQ*Zl!}KL10OF^ zoV47hb3^weo!^8MLT-oXVSnpUrAnG2Rz&gy*|rR2$TS-yR7&Zbhh>;^JdDs&9ePCI ziJw|*?P_Tt5=yV-ei9N=lmZ$%xn!A) zXCxDYX65yJeZh=$oHuYmIXg6^Vnx^-g@Uy%uA^>+COg)tO)8BECeUe{i)7l$RU}$S z9Vw^vFYa0u`J>O}9$#6Ntcs1#ZAvYcoT)bkqJrjaUn$pVd&AuDLO6YG+k&VtyJV$f zUMxsEC`%Q@d+N5>fKT_n$T9Y~1~Zy)H?@E}M2xJ{`-hi6umm zGOKfitwgJutzyW&yqa2+xwCIqV(j8BscBO*aZsDc=uJ+i2NPI(r=vPWtb}k%c+8V6 zuw;<&nNB$~q+k>X$E0!c&GYAz)~7KFqc#e>8;QHJms9Bdel99hY{^+OzH^r9s(&HZ zU-?pSKCiG=u&54;YAeypCWb3XYk6WryeQR|DOJt582K7yD%I6sHFMH?SSvPYH`TAu zW@ak`k*7APiL`$C<~Tl89$U>xeEc^)85!37Ye1dwP+as`O>II}4LXr3tfr^^3w$W5^PBkxy7+?AJF&Y`2n~ z`s~UWt))i9zQ+!YCDq*#wMA4iiAu#&u1#~vhdJdL`6aaqQ-duo9Gw**h^HJ1ADO_E z);Q8;r3>sPZf0Xn8f)DEb23@S9{EL1uOKS*ex(nQTuwHd!P_Lyp~tRJ^SyD1=@zlr zdHT?rBz{+xb9`txLvAjtc_>xPIAWM-9tShZvnDnHQ_sq!S+IaHoi>sn5^1jd}(n;iDfB*6pne!hm?{cv)~CEP|KU!G-6R)s8m+iJymV-+(|)2pNF z1>Z2{#~b_oZBHYn)3Qd~=4z?)@gEv*WW9L%PR!+>I8u#md}|~=+ahz9t#)`yN4uVk zOTHU7A?zrXZ4#(*xp#T6h2580y;82^*V3z{x&@I7SGnTL*}f6af-Y&=B5aY_T=Zsb zxZlm@Tnz>mT$RC;KMt_YiuOt$xYE@!My^WO{ci-LUT7ia|bms-ui*JyrH&ZyR+k zyQ8uAC(Dd(+A^b2-Jxu}@4iw%_xLhzQXLH=)b^BA*SY*L{-V48f@3_{HTL4guNKw6 z``U4r-pZMrX>zU5SuZ0K7t<|)c7F5^_8$e7JHBYrpEUfCYM2Ol2XZI)G9IJTy29e! zpupd-My6UQvo)cb+Y=LFR#j*Ekir&=q9-rBG=;2YFDsIpF7eL}^QVSWL>Z+{^MA%s zT;+0MyfC%0Gi|`L>b=Dqd zp|);~`B|()nV3@IdCHP0iOYJbaPS!}gSFzx*b3N-BX4U0&g@x;;7P~SYuaK?J^=CIS zhnvteFnL2TwkSI46~oSj*i&0kn&@7g zE0b=7T}kOE;v}Y)Y|FaG9ll=5C3xYfn#b<$Nu67rP%pC;1(iLTWLv2CcAm2|(el^I zyR6%*M;{xy8OSbfFS2__QUcLL>cZx)2UZc&+bFBK4t4$a6{Rc=$x<>Gbq{!Ak0iL0 zKR@}nUT2-9-?F7kV=iy*DeEiNbq?frhL7alNV)oyH2do(qx@epBY9su`NXvKZCvxd zQJXZsK8qE|i7!3l)2@%of3tzymLpQ?it?yJ>tkETVypRTS!z?v`kc+LRF8i~N!Hso zzP$RyRn@A#>!JLtT5SwnQGtd0Mv_spC!n-*pJEx+wEZ!CM4acT`{j{Q>S|&~0=-xu znXyMX@LYk}7sX$Y%s8h-lozjFyBHcIs!S@8d%oGLh`%j-VR&wqL);rs4zk8u0`<;< zir8#}PaT?7?&PYtPQz_SjUbaj>EVf~iyS?_orrb)SS0T?ag)Lpzqz+3k(JL*Q;^1} z6#k44|$xDhn9BUG# z5TzAKQ{u-a85}OL4s+Az_$8lt0(@P!tN)f!%=u}-o9d7V43kaQu{9Aj}0(572Lnoz-ba;$8izjZ1wIYvpOp>=%YH@Dlz17}aP%AUrb zySx8>HvImX|K;`Azihp5ssH@lE7qPNhR?h>+N~ep5A=sz3pojaJI~Sv%=DhFD`DgM*Uj(NjMT%^P^*_X&r(%#vzsTQGR2*W#GnW3q`u6;XqZlg;;f zCa%s1l|N6MjmsXN_fl7LW!>nq0-;ifJ?@1TW+ZR0P ztg!4a)}O{&!)(s~%3df>vdYqBla;g-SYIz8-v5pO`CK)W~9Gz26>g8 zth;N}H@LDECs!PH=sA=D6D?hKpH9w2C(jOzsi{iKXg_u&D5dPcNU7sqyky>3N*NlR zj~TXROwRGt)59I{Mobq|1jYchX2_w^xyM`u3!cf8KUcqexypO>L`&Mda)+owYwk1y1B^Da@)VlXrhf2h%f1CC8IA!9T<6fB|j z(;1hUh9462oY+drENlXf%(R_h4n7rgDaGBTPY5*Y<`qN!SnHojW$RzdJMZQ_vyyRP zHs3fg%87~*-FA3_Q>`uKp>Z{7pK47T9O5G%+ozbPn-|>3z9lg(KD93+$( zP-!;7KJ>~zg`L;oB99mT}Bk80`5+B5O=%YQeGwxLt zi`1Mu-S1zT?lGy9r{|f2KmN6;KN!tKp+LGA%Vnv_#Z>JYSIoZ>m|}m#6DYA6h(W^w z-IPsX(jw%zFJ-KQw)b4|z@+}cTrItOGbO}+B1!PiMQ(mU8ZB8+C-xxb+^bZ-s=Du) z3Qe&iE&;EOPWOw93>h^qGoDGd$k57MxzJmnHqFG|7P2WblF?+L^`D*7op)v{VvJL@ zO`X$LvGr!{w=SMIL~Ph#{qAdjD{FxgekLO8^+=ctSrYNhiy~~lCwhBwZlu6$4%*Um z1C4#YDffloCpqg1CpY)dz7x%vX~i~!!&OsKH;M~h%vDD zUI)+TVL$%SoxLvDpT7i;c6k2^JbnS^{s8a$;Pu<^8n*M{UK)aX(FpD$WB5H4#=nGz z8QxF*Qtlbod|sKheV}R0>m%*y!=E*BYs}Y1uT3_Y`<$K^z3RsJ7m5zP{A*oyW5%yX zua6}R{71n;Q*K$?c2<*d=%}i34QFgu;md1PU9aqTti3Pzr@uV@n`5_8h0s12YHVta zs=RQs=~D9AOmR+p@&jo*|Kr9dZa!z0H(njud!M`Sb?vv!zxoQ%-fpTZy%^7Q28$BR zJ1*sM_zw4-rY%o5ZoL^VDg2hv_&0x3<)Slu_5ri!;Y-t^gz}YU&!#!FI_etPh2E3n#=l>5Qa?zFm diff --git a/mindspore/lite/micro/example/mnist_x86/src/net.c b/mindspore/lite/micro/example/mnist_x86/src/net.c index b128efafff..422a4d8845 100644 --- a/mindspore/lite/micro/example/mnist_x86/src/net.c +++ b/mindspore/lite/micro/example/mnist_x86/src/net.c @@ -37,12 +37,12 @@ int CopyOutputsData(void **outputs, int num) { if (num != 1) { return RET_ERROR; } - memcpy(outputs[0], g_Buffer+56, 40); + memcpy(outputs[0], g_Buffer + 32, 40); return RET_OK; } int GetBufferSize() { - return 40032; + return 39248; } int SetBuffer( void *buffer) { if (buffer == NULL) { @@ -62,108 +62,107 @@ void FreeResource() { } void Inference() { { -DoQuantizeFp32ToInt8((float *)(g_Input0), (int8_t *)(g_Buffer+0), 0.007874015718698501587, 0, 784, false); - } - { -memset((int16_t *)(g_Buffer+10928), 0, 2048); -memset((int16_t *)(g_Buffer+12976), 0, 256); -memset((int *)(g_Buffer+13232), 0, 6144); -memset((int8_t *)(g_Buffer+19376), 0, 8112); -memset((int16_t *)(g_Buffer+27488), 0, 12544); -QuantArg conv_param__quant_arg_in[1] = {{0.007874015718698501587, 0}}; -QuantArg conv_param__quant_arg_w[12] = {{0.003238174133002758026, -6}, {0.003890725085511803627, -8}, {0.003394871251657605171, -7}, {0.001685356837697327137, -127}, {0.004322394262999296188, 1}, {0.002274985425174236298, -56}, {0.003617759561166167259, 17}, {0.004447745624929666519, 23}, {0.004683905746787786484, 26}, {0.004021023400127887726, 24}, {0.005650237202644348145, 11}, {0.001966834301128983498, -84}}; -QuantArg conv_param__quant_arg_out[1] = {{0.01778890006244182587, 0}}; -double conv_param__real_multiplier[12] = {0.001433333970799530351, 0.001722176774828924938, 0.00150269379968211614, 0.0007460003866156953226, 0.001913249346122961134, 0.001006991503636309139, 0.001601352314486244018, 0.001968734305210294733, 0.002073267527210802957, 0.00177985160945266568, 0.002501001060249878095, 0.0008705926067589928779}; +memset((int16_t *)(g_Buffer + 10144), 0, 2048); +memset((int16_t *)(g_Buffer + 12192), 0, 256); +memset((int *)(g_Buffer + 12448), 0, 6144); +memset((int8_t *)(g_Buffer + 18592), 0, 8112); +memset((int16_t *)(g_Buffer + 26704), 0, 12544); +QuantArg conv_param__quant_arg_in[1] = {{0.003921568859368562698, -128}}; +QuantArg conv_param__quant_arg_w[12] = {{0.005689438898116350174, 0}, {0.006241692230105400085, 0}, {0.007301395758986473083, 0}, {0.005148916970938444138, 0}, {0.005132303573191165924, 0}, {0.004976313561201095581, 0}, {0.00564815988764166832, 0}, {0.002269793068990111351, 0}, {0.0030086529441177845, 0}, {0.005234404932707548141, 0}, {0.007580270525068044662, 0}, {0.004589735530316829681, 0}}; +QuantArg conv_param__quant_arg_out[1] = {{0.01811622083187103271, 17}}; +double conv_param__real_multiplier[12] = {0.001231577267748737653, 0.001351122051282624588, 0.00158051323770531417, 0.001114571969708069233, 0.001110975704014940469, 0.001077209041359399825, 0.001222641776980984765, 0.0004913359221160916793, 0.0006512749113606706042, 0.001133077320583530554, 0.001640880438584302065, 0.0009935275121536731122}; int conv_param__left_shift[12] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; -int conv_param__right_shift[12] = {-9, -9, -9, -10, -9, -9, -9, -8, -8, -9, -8, -10}; -int conv_param__quant_multiplier[12] = {1575967367, 1893553389, 1652229306, 1640472199, 2103639903, 1107198867, 1760705490, 1082323130, 1139790877, 1956967540, 1374939873, 1914453388}; -int conv_param__out_act_min[1] = {0}; +int conv_param__right_shift[12] = {-9, -9, -9, -9, -9, -9, -9, -10, -10, -9, -9, -9}; +int conv_param__quant_multiplier[12] = {1354133526, 1485574406, 1737792683, 1225484841, 1221530705, 1184403867, 1344308850, 1080459119, 1432168676, 1245831689, 1804167122, 1092395052}; +int conv_param__out_act_min[1] = {-128}; int conv_param__out_act_max[1] = {127}; -ConvQuantArg conv_param__conv_quant_arg = {(RoundingMode)(1), 2, conv_param__quant_arg_in, conv_param__quant_arg_w, conv_param__quant_arg_out, conv_param__real_multiplier, conv_param__left_shift, conv_param__right_shift, conv_param__quant_multiplier, conv_param__out_act_min, conv_param__out_act_max, 1, 12, 1, 2}; +ConvQuantArg conv_param__conv_quant_arg = {(RoundingMode)(2), 2, conv_param__quant_arg_in, conv_param__quant_arg_w, conv_param__quant_arg_out, conv_param__real_multiplier, conv_param__left_shift, conv_param__right_shift, conv_param__quant_multiplier, conv_param__out_act_min, conv_param__out_act_max, 1, 12, 1, 2}; int thread_num = MSMIN(g_thread_num, 26); -ConvParameter conv_param_ = {{ "", 35, g_thread_num}, conv_param__conv_quant_arg, 3, 3, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 28, 28, 1, 1, 26, 26, 12, thread_num, 0, 0, (PadMode)(2), (ActType)(1), 0, 0, 0}; -PackInputToC8Int8((int8_t *)(g_Buffer+0), (int16_t *)(g_Buffer+27488), &conv_param_); -Conv3x3Int8((int16_t *)(g_Buffer+27488), g_Weight10, g_Weight11, (int8_t *)(g_Buffer+784), (int16_t *)(g_Buffer+10928), (int16_t *)(g_Buffer+12976), (int *)(g_Buffer+13232), (int8_t *)(g_Buffer+19376), 0, &conv_param_); -PackNC4HW4ToNHWCInt8((int8_t *)(g_Buffer+19376), (int8_t *)(g_Buffer+784), 1, 676, 12); +ConvParameter conv_param_ = {{ "", true, 35, g_thread_num, 0}, conv_param__conv_quant_arg, 3, 3, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 28, 28, 1, 1, 26, 26, 12, thread_num, 0, 0, (PadMode)(2), (ActType)(0), 0, 0, 0}; +PackInputToC8Int8((int8_t *)(g_Input0), (int16_t *)(g_Buffer + 26704), &conv_param_); +Conv3x3Int8((int16_t *)(g_Buffer + 26704), g_Weight10, g_Weight11, (int8_t *)(g_Buffer + 0), (int16_t *)(g_Buffer + 10144), (int16_t *)(g_Buffer + 12192), (int *)(g_Buffer + 12448), (int8_t *)(g_Buffer + 18592), 0, &conv_param_); +PackNC4HW4ToNHWCInt8((int8_t *)(g_Buffer + 18592), (int8_t *)(g_Buffer + 0), 1, 676, 12); } { -static QuantArg pooling_parameter_quant_in = {0.01778890006244182587, 0}; -static QuantArg pooling_parameter_quant_out = {0.01778890006244182587, 0}; +static QuantArg pooling_parameter_quant_in = {0.01811622083187103271, 17}; +static QuantArg pooling_parameter_quant_out = {0.01811622083187103271, 17}; static QuantArg *pooling_parameter_quant[2] = { &pooling_parameter_quant_in, &pooling_parameter_quant_out}; -const PoolingParameter pooling_parameter = {{ "", 92, g_thread_num}, (PoolMode)(1), (RoundMode)(2), (PadMode)(2), (ActType)(0), 0, false, 2, 2, 2, 2, 26, 26, 1, 12, 13, 13, 1, 12, 0, 0, 0, 0, 0, pooling_parameter_quant, false}; -MaxPoolingInt8((int8_t *)(g_Buffer+784), (int8_t *)(g_Buffer+8896), (PoolingParameter *)&pooling_parameter, 0); +const PoolingParameter pooling_parameter = {{ "", true, 92, g_thread_num, 0}, (PoolMode)(1), (RoundMode)(2), (PadMode)(2), (ActType)(0), 0, false, 2, 2, 2, 2, 26, 26, 1, 12, 13, 13, 1, 12, 0, 0, 0, 0, 0, pooling_parameter_quant, false}; +MaxPoolingInt8((int8_t *)(g_Buffer + 0), (int8_t *)(g_Buffer + 8112), (PoolingParameter *)&pooling_parameter, 0); } { -memset((int16_t *)(g_Buffer+10928), 0, 4096); -memset((int16_t *)(g_Buffer+15024), 0, 256); -memset((int *)(g_Buffer+15280), 0, 6144); -memset((int8_t *)(g_Buffer+21424), 0, 1452); -memset((int16_t *)(g_Buffer+22876), 0, 5408); -QuantArg conv_param__quant_arg_in[1] = {{0.01778890006244182587, 0}}; -QuantArg conv_param__quant_arg_w[12] = {{0.005374609492719173431, 33}, {0.005837683100253343582, 22}, {0.004709810949862003326, -15}, {0.003726204857230186462, 27}, {0.00318551529198884964, -8}, {0.003453079145401716232, 50}, {0.004045850131660699844, -9}, {0.003903790842741727829, 30}, {0.004003710579127073288, -10}, {0.00560879148542881012, 27}, {0.005486610345542430878, -23}, {0.003554018214344978333, 4}}; -QuantArg conv_param__quant_arg_out[1] = {{0.07183934003114700317, 0}}; -double conv_param__real_multiplier[12] = {0.001330863973520378732, 0.001445530533608141606, 0.001166246148374064893, 0.0009226850783705293785, 0.0007887991893445710223, 0.0008550534992628172192, 0.001001835847923064193, 0.0009666590447744700769, 0.0009914011740411567478, 0.001388852288199173826, 0.00135859773990280961, 0.0008800481219728497088}; +memset((int16_t *)(g_Buffer + 10144), 0, 4096); +memset((int16_t *)(g_Buffer + 14240), 0, 256); +memset((int *)(g_Buffer + 14496), 0, 6144); +memset((int8_t *)(g_Buffer + 20640), 0, 1452); +memset((int16_t *)(g_Buffer + 22092), 0, 5408); +QuantArg conv_param__quant_arg_in[1] = {{0.01811622083187103271, 17}}; +QuantArg conv_param__quant_arg_w[12] = {{0.006381968967616558075, 0}, {0.005092236679047346115, 0}, {0.004954888485372066498, 0}, {0.007594361435621976852, 0}, {0.006317862775176763535, 0}, {0.004739056341350078583, 0}, {0.004733041394501924515, 0}, {0.005125139374285936356, 0}, {0.005773660261183977127, 0}, {0.007067613303661346436, 0}, {0.00728381425142288208, 0}, {0.004714466165751218796, 0}}; +QuantArg conv_param__quant_arg_out[1] = {{0.118615470826625824, 31}}; +double conv_param__real_multiplier[12] = {0.0009747224012760375951, 0.0007777407468524931162, 0.0007567634496453238277, 0.001159891919861241348, 0.0009649314419479496259, 0.0007237992569070154231, 0.0007228806183814449719, 0.0007827659621256170689, 0.0008818150205007141765, 0.001079441365823280083, 0.001112461807995879974, 0.0007200436103814696152}; int conv_param__left_shift[12] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; -int conv_param__right_shift[12] = {-9, -9, -9, -10, -10, -10, -9, -10, -9, -9, -9, -10}; -int conv_param__quant_multiplier[12] = {1463300414, 1589377630, 1282301201, 2029005945, 1734587761, 1880282530, 1101530164, 2125705720, 1090057119, 1527059240, 1493794012, 1935246286}; -int conv_param__out_act_min[1] = {0}; +int conv_param__right_shift[12] = {-10, -10, -10, -9, -10, -10, -10, -10, -10, -9, -9, -10}; +int conv_param__quant_multiplier[12] = {2143437228, 1710269989, 1664140425, 1275314653, 2121906681, 1591651398, 1589631291, 1721320554, 1939131737, 1186858333, 1223164693, 1583392644}; +int conv_param__out_act_min[1] = {-128}; int conv_param__out_act_max[1] = {127}; ConvQuantArg conv_param__conv_quant_arg = {(RoundingMode)(1), 2, conv_param__quant_arg_in, conv_param__quant_arg_w, conv_param__quant_arg_out, conv_param__real_multiplier, conv_param__left_shift, conv_param__right_shift, conv_param__quant_multiplier, conv_param__out_act_min, conv_param__out_act_max, 1, 12, 1, 2}; int thread_num = MSMIN(g_thread_num, 11); -ConvParameter conv_param_ = {{ "", 35, g_thread_num}, conv_param__conv_quant_arg, 3, 3, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 13, 13, 12, 1, 11, 11, 12, thread_num, 0, 0, (PadMode)(2), (ActType)(1), 0, 0, 0}; -PackInputToC8Int8((int8_t *)(g_Buffer+8896), (int16_t *)(g_Buffer+22876), &conv_param_); -Conv3x3Int8((int16_t *)(g_Buffer+22876), g_Weight12, g_Weight13, (int8_t *)(g_Buffer+0), (int16_t *)(g_Buffer+10928), (int16_t *)(g_Buffer+15024), (int *)(g_Buffer+15280), (int8_t *)(g_Buffer+21424), 0, &conv_param_); -PackNC4HW4ToNHWCInt8((int8_t *)(g_Buffer+21424), (int8_t *)(g_Buffer+0), 1, 121, 12); +ConvParameter conv_param_ = {{ "", true, 35, g_thread_num, 0}, conv_param__conv_quant_arg, 3, 3, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 13, 13, 12, 1, 11, 11, 12, thread_num, 0, 0, (PadMode)(2), (ActType)(0), 0, 0, 0}; +PackInputToC8Int8((int8_t *)(g_Buffer + 8112), (int16_t *)(g_Buffer + 22092), &conv_param_); +Conv3x3Int8((int16_t *)(g_Buffer + 22092), g_Weight12, g_Weight13, (int8_t *)(g_Buffer + 0), (int16_t *)(g_Buffer + 10144), (int16_t *)(g_Buffer + 14240), (int *)(g_Buffer + 14496), (int8_t *)(g_Buffer + 20640), 0, &conv_param_); +PackNC4HW4ToNHWCInt8((int8_t *)(g_Buffer + 20640), (int8_t *)(g_Buffer + 0), 1, 121, 12); } { -static QuantArg pooling_parameter_quant_in = {0.07136065512895584106, 0}; -static QuantArg pooling_parameter_quant_out = {0.07136065512895584106, 0}; +static QuantArg pooling_parameter_quant_in = {0.118615470826625824, 31}; +static QuantArg pooling_parameter_quant_out = {0.118615470826625824, 31}; static QuantArg *pooling_parameter_quant[2] = { &pooling_parameter_quant_in, &pooling_parameter_quant_out}; -const PoolingParameter pooling_parameter = {{ "", 92, g_thread_num}, (PoolMode)(1), (RoundMode)(2), (PadMode)(2), (ActType)(0), 0, false, 2, 2, 2, 2, 11, 11, 1, 12, 5, 5, 1, 12, 0, 0, 0, 0, 0, pooling_parameter_quant, false}; -MaxPoolingInt8((int8_t *)(g_Buffer+0), (int8_t *)(g_Buffer+1456), (PoolingParameter *)&pooling_parameter, 0); +const PoolingParameter pooling_parameter = {{ "", true, 92, g_thread_num, 0}, (PoolMode)(1), (RoundMode)(2), (PadMode)(2), (ActType)(0), 0, false, 2, 2, 2, 2, 11, 11, 1, 12, 5, 5, 1, 12, 0, 0, 0, 0, 0, pooling_parameter_quant, false}; +MaxPoolingInt8((int8_t *)(g_Buffer + 0), (int8_t *)(g_Buffer + 1456), (PoolingParameter *)&pooling_parameter, 0); } { -const ReshapeQuantArg reshape_quant_arg = {{0.07136065512895584106, 0}, {0.07136065512895584106, 0}, -128, 127}; -Int8Reshape((int8_t *)(g_Buffer+1456), (int8_t *)(g_Buffer+0), 300, reshape_quant_arg); +const ReshapeQuantArg reshape_quant_arg = {{0.118615470826625824, 31}, {0.118615470826625824, 31}, -128, 127}; +Int8Reshape((int8_t *)(g_Buffer + 1456), (int8_t *)(g_Buffer + 0), 300, reshape_quant_arg); } { -int32_t tmp_weight_zp = 1; -RowMajor2Row16x4MajorInt8((int8_t *)(g_Buffer+0)+0, (int8_t *)(g_Buffer+10928), 1, 300); -CalcInputSums((int8_t *)(g_Buffer+0)+0, 1, 300, tmp_weight_zp, (int *)(g_Buffer+12144), RowMajor); -float filter_scale[20] = {0.003479549195617437363, 0.004490676335990428925, 0.004529818892478942871, 0.002983231563121080399, 0.003455155529081821442, 0.003223794745281338692, 0.003272445406764745712, 0.003801185870543122292, 0.003679843153804540634, 0.003040234791114926338, 0.003704284550622105598, 0.003355232765898108482, 0.002904496388509869576, 0.003024494973942637444, 0.002794801956042647362, 0.004355110693722963333, 0.003499472280964255333, 0.004184196703135967255, 0.003057289868593215942, 0.003264668164774775505}; -int filter_zp[20] = {1, 12, 3, 2, -10, -5, -11, 5, 12, 22, 16, 1, -5, 15, 13, 5, -10, -5, -6, 0}; -int left_shift[20] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; -int right_shift[20] = {-10, -9, -9, -10, -10, -10, -10, -9, -9, -10, -9, -10, -10, -10, -10, -9, -10, -9, -10, -10}; -int multiplier[20] = {2108215049, 1360422072, 1372280070, 1807502393, 2093435146, 1953256619, 1982733521, 1151545365, 1114785262, 1842040025, 1122189669, 2032893316, 1759797843, 1832503464, 1693335354, 1319353429, 2120286176, 1267576078, 1852373503, 1978021333}; -const MatmulQuantParameter matmul_quant_parameter = {{0.07136065512895584106, 0}, {0, 0}, {0.258998185396194458, 0}, -128, 127, filter_scale, filter_zp, left_shift, right_shift, multiplier}; -int32_t *cur_left = matmul_quant_parameter.left_shift_ + 0; -int32_t *cur_right = matmul_quant_parameter.right_shift_ + 0; -int32_t *cur_mul = matmul_quant_parameter.quant_multiplier_ + 0; -int32_t *cur_zp = matmul_quant_parameter.filter_zp_ + 0; -MatmulInt8Opt((int8_t *)(g_Buffer+10928), g_Weight15+0 + 0, (int8_t *)(g_Buffer+304)+0+0, 1, 20, 304, (int *)(g_Buffer+12144), g_Weight16+0, -128, 127, 0, cur_mul, cur_left, cur_right, 20, true, cur_zp); +int32_t tmp_weight_zp = 0; +RowMajor2Row16x4MajorInt8((int8_t *)(g_Buffer + 0)+0, (int8_t *)(g_Buffer + 10144), 1, 300); +CalcInputSums((int8_t *)(g_Buffer + 0)+0, 1, 300, tmp_weight_zp, (int *)(g_Buffer + 11360), RowMajor); +float filter_scale[1] = {0.007667620200663805008}; +int filter_zp[1] = {0}; +int left_shift[1] = {0}; +int right_shift[1] = {-8}; +int multiplier[1] = {1379728867}; +const MatmulQuantParameter matmul_quant_parameter = {{0.118615470826625824, 31}, {0, 0}, {0.3623915016651153564, 11}, -128, 127, filter_scale, filter_zp, left_shift, right_shift, multiplier}; +int32_t *cur_left = matmul_quant_parameter.left_shift_; +int32_t *cur_right = matmul_quant_parameter.right_shift_; +int32_t *cur_mul = matmul_quant_parameter.quant_multiplier_ ; +int32_t *cur_zp = matmul_quant_parameter.filter_zp_ ; +MatmulInt8Opt((int8_t *)(g_Buffer + 10144), g_Weight15+0 + 0, (int8_t *)(g_Buffer + 304)+0+0, 1, 20, 304, (int *)(g_Buffer + 11360), g_Weight16+0, -128, 127, 11, cur_mul, cur_left, cur_right, 20, false, cur_zp); } { -int32_t tmp_weight_zp = 1; -RowMajor2Row16x4MajorInt8((int8_t *)(g_Buffer+304)+0, (int8_t *)(g_Buffer+10928), 1, 20); -CalcInputSums((int8_t *)(g_Buffer+304)+0, 1, 20, tmp_weight_zp, (int *)(g_Buffer+11056), RowMajor); -float filter_scale[10] = {0.004678330849856138229, 0.005127115640789270401, 0.00471437256783246994, 0.004531511571258306503, 0.005476122256368398666, 0.004348111804574728012, 0.004803542047739028931, 0.006081215571612119675, 0.004532597027719020844, 0.004762654658406972885}; -int filter_zp[10] = {7, -2, 9, 2, -6, 21, 16, 10, -19, 8}; -int left_shift[10] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; -int right_shift[10] = {-8, -8, -8, -8, -8, -8, -8, -8, -8, -8}; -int multiplier[10] = {1242805482, 1362025788, 1252380041, 1203802750, 1454739904, 1155082292, 1276068015, 1615483838, 1204091115, 1265206260}; -const MatmulQuantParameter matmul_quant_parameter = {{0.258998185396194458, 0}, {0, 0}, {0.5359870791435241699, 0}, -128, 127, filter_scale, filter_zp, left_shift, right_shift, multiplier}; -int32_t *cur_left = matmul_quant_parameter.left_shift_ + 0; -int32_t *cur_right = matmul_quant_parameter.right_shift_ + 0; -int32_t *cur_mul = matmul_quant_parameter.quant_multiplier_ + 0; -int32_t *cur_zp = matmul_quant_parameter.filter_zp_ + 0; -MatmulInt8Opt((int8_t *)(g_Buffer+10928), g_Weight18+0 + 0, (int8_t *)(g_Buffer+0)+0+0, 1, 10, 32, (int *)(g_Buffer+11056), g_Weight19+0, -128, 127, 0, cur_mul, cur_left, cur_right, 10, true, cur_zp); +int32_t tmp_weight_zp = 0; +RowMajor2Row16x4MajorInt8((int8_t *)(g_Buffer + 304)+0, (int8_t *)(g_Buffer + 10144), 1, 20); +CalcInputSums((int8_t *)(g_Buffer + 304)+0, 1, 20, tmp_weight_zp, (int *)(g_Buffer + 10272), RowMajor); +float filter_scale[1] = {0.006908571347594261169}; +int filter_zp[1] = {0}; +int left_shift[1] = {0}; +int right_shift[1] = {-8}; +int multiplier[1] = {1282256865}; +const MatmulQuantParameter matmul_quant_parameter = {{0.3623915016651153564, 11}, {0, 0}, {1.073398709297180176, -20}, -128, 127, filter_scale, filter_zp, left_shift, right_shift, multiplier}; +int32_t *cur_left = matmul_quant_parameter.left_shift_; +int32_t *cur_right = matmul_quant_parameter.right_shift_; +int32_t *cur_mul = matmul_quant_parameter.quant_multiplier_ ; +int32_t *cur_zp = matmul_quant_parameter.filter_zp_ ; +MatmulInt8Opt((int8_t *)(g_Buffer + 10144), g_Weight18+0 + 0, (int8_t *)(g_Buffer + 0)+0+0, 1, 10, 32, (int *)(g_Buffer + 10272), g_Weight19+0, -128, 127, -20, cur_mul, cur_left, cur_right, 10, false, cur_zp); } { -DoDequantizeInt8ToFp32((int8_t *)(g_Buffer+0), (float *)(g_Buffer+16), 0.5359870791435241699, 0, 10); +const SoftmaxQuantArg quant_args = {{1.073398709297180176, 20}, {0.00390625, -128}, -128, 127, 1152553088, 27, 27}; +const SoftmaxParameter softmax_parameter = {{ "", true, 138, g_thread_num, 0}, 1, {1, 10}, 10, 2}; +memset((int *)(g_Buffer + 10144), 0, 40); +memset((int *)(g_Buffer + 10184), 0, 40); +SoftmaxInt8((int8_t *)(g_Buffer + 0), (int8_t *)(g_Buffer + 16), 1, (int *)(g_Buffer + 10144), (int *)(g_Buffer + 10184), quant_args, (SoftmaxParameter *)&softmax_parameter); } { -const SoftmaxParameter softmax_parameter = {{ "", 138, g_thread_num}, 1, {1, 10}, 10, 2}; -memset((float *)(g_Buffer+10928), 0, 4); -Softmax((float *)(g_Buffer+16), (float *)(g_Buffer+56), (float *)(g_Buffer+10928), &softmax_parameter); +DoDequantizeInt8ToFp32((int8_t *)(g_Buffer + 16), (float *)(g_Buffer + 32), 0.00390625, -128, 10); } } diff --git a/mindspore/lite/micro/example/mnist_x86/src/net.cmake b/mindspore/lite/micro/example/mnist_x86/src/net.cmake index ca68e45c83..db04299e56 100644 --- a/mindspore/lite/micro/example/mnist_x86/src/net.cmake +++ b/mindspore/lite/micro/example/mnist_x86/src/net.cmake @@ -4,7 +4,6 @@ set(OP_SRC common_func_int8.c.o conv3x3_int8.c.o conv_int8.c.o - exp_fp32.c.o fixed_point.c.o matmul_int8.c.o matmul_int8_wrapper.c.o @@ -12,7 +11,7 @@ set(OP_SRC pooling_int8.c.o quant_dtype_cast_int8.c.o reshape_int8.c.o - softmax_fp32.c.o + softmax_int8.c.o weight.c.o net.c.o session.cc.o diff --git a/mindspore/lite/micro/example/mnist_x86/src/session.cc b/mindspore/lite/micro/example/mnist_x86/src/session.cc index 68c48116af..9774db8c65 100644 --- a/mindspore/lite/micro/example/mnist_x86/src/session.cc +++ b/mindspore/lite/micro/example/mnist_x86/src/session.cc @@ -30,14 +30,14 @@ int LiteSession::CompileGraph(lite::Model *model) { in_shape_0[1] = 28; in_shape_0[2] = 28; in_shape_0[3] = 1; - inputs_[0] = new (std::nothrow) MTensor(String("graph_input-0"), kNumberTypeFloat32, in_shape_0); + inputs_[0] = new (std::nothrow) MTensor(String("graph_input-0"), kNumberTypeInt8, in_shape_0); MS_ERROR_IF_NULL(inputs_[0]); outputs_.resize(1); Vector out_shape_0; out_shape_0.resize(2); out_shape_0[0] = 1; out_shape_0[1] = 10; - outputs_[0] = new (std::nothrow) MTensor(String("Softmax-7"), kNumberTypeFloat32, out_shape_0); + outputs_[0] = new (std::nothrow) MTensor(String("int8toft32_Softmax-7_post0/output-0"), kNumberTypeFloat32, out_shape_0); MS_ERROR_IF_NULL(outputs_[0]); int ret = Init(model->buf, static_cast(model)->buf_size()); return ret; @@ -126,7 +126,6 @@ mindspore::tensor::MSTensor *LiteSession::GetOutputByTensorName(const String &te } return nullptr; } - } // namespace lite session::LiteSession *session::LiteSession::CreateSession(const lite::Context *context) { auto *session = new (std::nothrow) lite::LiteSession(); diff --git a/mindspore/lite/micro/example/mnist_x86/src/session.h b/mindspore/lite/micro/example/mnist_x86/src/session.h index 0c4f085091..6a4e628140 100644 --- a/mindspore/lite/micro/example/mnist_x86/src/session.h +++ b/mindspore/lite/micro/example/mnist_x86/src/session.h @@ -78,6 +78,7 @@ class LiteSession : public session::LiteSession { Vector outputs_; void *runtime_buffer_; }; + } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/micro/example/mnist_x86/src/tensor.h b/mindspore/lite/micro/example/mnist_x86/src/tensor.h index 6c49a322f2..dbd9302c5d 100644 --- a/mindspore/lite/micro/example/mnist_x86/src/tensor.h +++ b/mindspore/lite/micro/example/mnist_x86/src/tensor.h @@ -59,7 +59,6 @@ class MTensor : public mindspore::tensor::MSTensor { void *data_ = nullptr; Vector quant_params_; }; - } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/micro/example/mnist_x86/src/weight.c b/mindspore/lite/micro/example/mnist_x86/src/weight.c index 8d97badd6c..c6f319e66b 100644 --- a/mindspore/lite/micro/example/mnist_x86/src/weight.c +++ b/mindspore/lite/micro/example/mnist_x86/src/weight.c @@ -29,6 +29,10 @@ int32_t *g_Weight16 = NULL; int32_t *g_Weight17 = NULL; int8_t *g_Weight18 = NULL; int32_t *g_Weight19 = NULL; +int8_t g_Weight6[6000]; +int32_t g_Weight7[20]; +int8_t g_Weight8[200]; +int32_t g_Weight9[10]; int Init(void *weight_buffer, int weight_size) { if (weight_buffer == NULL) { @@ -39,19 +43,19 @@ int Init(void *weight_buffer, int weight_size) { size_t size; size_t offset; }; - int8_t *g_Weight6 = (weight_buffer + 9312); - int32_t *g_Weight7 = (weight_buffer + 15312); - int8_t *g_Weight8 = (weight_buffer + 15392); - int32_t *g_Weight9 = (weight_buffer + 15592); struct ModelParameter model_params[] = { {g_Weight10, 3072, 0}, {g_Weight11, 48, 3072}, {g_Weight12, 6144, 3120}, {g_Weight13, 48, 9264}, + {g_Weight6, 6000, 9312}, + {g_Weight7, 80, 15312}, + {g_Weight8, 200, 15392}, + {g_Weight9, 40, 15592}, }; - for(int i = 0; i < 4; ++i) { + for(int i = 0; i < 8; ++i) { if (model_params[i].offset + model_params[i].size > weight_size) { return RET_ERROR; } @@ -74,8 +78,8 @@ if (g_Weight15 == NULL) { return RET_ERROR; } memset(g_Weight15, 0, 6080); -int init_filter_zp[20] = {1, 12, 3, 2, -10, -5, -11, 5, 12, 22, 16, 1, -5, 15, 13, 5, -10, -5, -6, 0}; -InitInt8MatrixB(g_Weight6, g_Weight16, g_Weight15, 1, 300, 20, 20, 304, 0, init_filter_zp, g_Weight14, true, true); +int init_filter_zp[1] = {0}; +InitInt8MatrixB(g_Weight6, g_Weight16, g_Weight15, 1, 300, 20, 20, 304, 31, init_filter_zp, g_Weight14, true, false); } { g_Weight17 = malloc(48); @@ -94,8 +98,8 @@ if (g_Weight18 == NULL) { return RET_ERROR; } memset(g_Weight18, 0, 384); -int init_filter_zp[10] = {7, -2, 9, 2, -6, 21, 16, 10, -19, 8}; -InitInt8MatrixB(g_Weight8, g_Weight19, g_Weight18, 1, 20, 10, 12, 32, 0, init_filter_zp, g_Weight17, true, true); +int init_filter_zp[1] = {0}; +InitInt8MatrixB(g_Weight8, g_Weight19, g_Weight18, 1, 20, 10, 12, 32, 11, init_filter_zp, g_Weight17, true, false); } return RET_OK; } diff --git a/mindspore/lite/micro/example/mnist_x86/src/weight.h b/mindspore/lite/micro/example/mnist_x86/src/weight.h index 7be657370a..e56fb5223d 100644 --- a/mindspore/lite/micro/example/mnist_x86/src/weight.h +++ b/mindspore/lite/micro/example/mnist_x86/src/weight.h @@ -17,7 +17,6 @@ #include "nnacl/common_func.h" #include "nnacl/errorcode.h" -#include "nnacl/fp32/softmax_fp32.h" #include "nnacl/int8/common_func_int8.h" #include "nnacl/int8/conv3x3_int8.h" #include "nnacl/int8/conv_int8.h" @@ -25,6 +24,7 @@ #include "nnacl/int8/pooling_int8.h" #include "nnacl/int8/quant_dtype_cast_int8.h" #include "nnacl/int8/reshape_int8.h" +#include "nnacl/int8/softmax_int8.h" #include "wrapper/int8/matmul_int8_wrapper.h" #include #include @@ -45,3 +45,7 @@ extern int32_t *g_Weight16; extern int32_t *g_Weight17; extern int8_t *g_Weight18; extern int32_t *g_Weight19; +extern int8_t g_Weight6[]; +extern int32_t g_Weight7[]; +extern int8_t g_Weight8[]; +extern int32_t g_Weight9[]; diff --git a/mindspore/lite/micro/example/mobilenetv2/README.md b/mindspore/lite/micro/example/mobilenetv2/README.md index 353dcdf30d..50bc6edc0f 100755 --- a/mindspore/lite/micro/example/mobilenetv2/README.md +++ b/mindspore/lite/micro/example/mobilenetv2/README.md @@ -55,12 +55,26 @@ codegen编译[MobileNetv2模型](https://download.mindspore.cn/model_zoo/officia 在编译此工程之前需要预先获取安卓平台对应的[Release包](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html)。 -算子静态库的目录如下: - -```bash -├── operator_library # 对应平台算子库目录 - ├── include # 对应平台算子库头文件目录 - └── lib # 对应平台算子库静态库目录 +安卓平台对应的Release包的目录如下: +```text +mindspore-lite-{version}-inference-android-{arch} +├── inference +│ ├── include # 推理框架头文件 +│ ├── lib # 推理框架库 +│ │ ├── libmindspore-lite.a # MindSpore Lite推理框架的静态库 +│ │ └── libmindspore-lite.so # MindSpore Lite推理框架的动态库 +│ ├── minddata # 图像处理库 +│ │ ├── include +│ │ └── lib +│ │ └── libminddata-lite.so # 图像处理动态库文件 +│ └── third_party # NPU库 +│ └── hiai_ddk +└── tools + ├── benchmark # 基准测试工具 + │ └── benchmark + └── codegen # 代码生成工具 + ├── include # 算子头文件 + └── lib # 算子静态库 ``` 生成代码工程目录如下: @@ -68,7 +82,6 @@ codegen编译[MobileNetv2模型](https://download.mindspore.cn/model_zoo/officia ```bash ├── mobilenetv2 # 生成代码的根目录 ├── benchmark # 生成代码的benchmark目录 - ├── include # 模型推理代码对外暴露头文件目录 └── src # 模型推理代码目录 ``` @@ -91,7 +104,7 @@ cmake -DCMAKE_BUILD_TYPE=Release \ -DANDROID_TOOLCHAIN_NAME="aarch64-linux-android-clang" \ -DANDROID_NATIVE_API_LEVEL="19" \ -DPLATFORM_ARM64=ON \ --DPKG_PATH={path to}/mindspore-lite-{version}-inference-android .. +-DPKG_PATH={path to}/mindspore-lite-{version}-inference-android-{arch} .. make ``` @@ -104,7 +117,7 @@ cmake -DCMAKE_BUILD_TYPE=Release \ -DANDROID_TOOLCHAIN_NAME="clang" \ -DANDROID_NATIVE_API_LEVEL="19" \ -DMICRO_BUILD_ARM32=ON \ --DPKG_PATH={path to}/mindspore-lite-{version}-inference-android .. +-DPKG_PATH={path to}/mindspore-lite-{version}-inference-android-{arch} .. make ```