From 300896ed4718e29ae2aae6198b836b6df2f9974e Mon Sep 17 00:00:00 2001 From: Balint Cristian Date: Thu, 26 Nov 2015 06:55:47 +0200 Subject: [PATCH] Add HDF module & implement HDF version 5 I/O. --- modules/README.md | 2 + modules/hdf/CMakeLists.txt | 21 + modules/hdf/README.md | 4 + modules/hdf/doc/pics/hdfview_demo.gif | Bin 0 -> 34574 bytes modules/hdf/include/opencv2/hdf.hpp | 54 ++ modules/hdf/include/opencv2/hdf/hdf5.hpp | 681 ++++++++++++++ modules/hdf/src/hdf5.cpp | 1051 ++++++++++++++++++++++ modules/hdf/src/precomp.hpp | 43 + 8 files changed, 1856 insertions(+) create mode 100644 modules/hdf/CMakeLists.txt create mode 100644 modules/hdf/README.md create mode 100644 modules/hdf/doc/pics/hdfview_demo.gif create mode 100644 modules/hdf/include/opencv2/hdf.hpp create mode 100644 modules/hdf/include/opencv2/hdf/hdf5.hpp create mode 100644 modules/hdf/src/hdf5.cpp create mode 100644 modules/hdf/src/precomp.hpp diff --git a/modules/README.md b/modules/README.md index dbdbfe19a..46b6530b4 100644 --- a/modules/README.md +++ b/modules/README.md @@ -53,3 +53,5 @@ $ cmake -D OPENCV_EXTRA_MODULES_PATH=/modules -D BUILD_opencv_re 22. **opencv_xphoto**: Additional photo processing algorithms: Color balance / Denoising / Inpainting. 23. **opencv_stereo**: Stereo Correspondence done with different descriptors: Census / CS-Census / MCT / BRIEF / MV. + +24. **opencv_hdf**: Hierarchical Data Format I/O. diff --git a/modules/hdf/CMakeLists.txt b/modules/hdf/CMakeLists.txt new file mode 100644 index 000000000..9bba8b48a --- /dev/null +++ b/modules/hdf/CMakeLists.txt @@ -0,0 +1,21 @@ +set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}) + +find_package(HDF5) +if(HDF5_FOUND) + set(HAVE_HDF5 1) + message(STATUS "HDF5: YES") +else() + ocv_module_disable(hdf) + message(STATUS "HDF5: NO") +endif() + +if(${HDF5_FOUND}) + include_directories(${HDF5_INCLUDE_DIRS}) +endif() + +set(the_description "Hierarchical Data Format I/O") +ocv_define_module(hdf opencv_core WRAP python) + +if(${HDF5_FOUND}) + target_link_libraries(opencv_hdf ${HDF5_LIBRARIES}) +endif() diff --git a/modules/hdf/README.md b/modules/hdf/README.md new file mode 100644 index 000000000..92c1a8484 --- /dev/null +++ b/modules/hdf/README.md @@ -0,0 +1,4 @@ +HDF I/O +============================================================ + +The module contains I/O routines for Hierarchical Data Formats. diff --git a/modules/hdf/doc/pics/hdfview_demo.gif b/modules/hdf/doc/pics/hdfview_demo.gif new file mode 100644 index 0000000000000000000000000000000000000000..410bdf59a55592fca970cc731119d41ae0cb501b GIT binary patch literal 34574 zcmV($K;yqhNk%w1VZ;O50pkz=0096qG%`a)Lt2tb z>)q$_>g?(8^6&rt@BRP$A^s6Va%Ew3Wn>_CX>@2HM@dak04x9i006`T+W|lX{*VDw z0=PKq&Ab0#D2`-lo@lDBZ0o*oEYEap-*~R?eDD9jPN0x!7>~vSu$;>FVJH?e6Wp@9*&ivhcp5 z0F2E1{r>*|0}32Su%N+%2oow?$grWqhY%x5oJg^v#fum-YTU@NqsNc_AVV@_fZ##` zD*OVZWTMi^08;G;4oL98-2*->S-xz5U;vYzqz1GE%3uJ3nn#}`El{xK&zD7yx(q6{ zrGS$Rm=1KBb81$tEdxL`DzdEEvuM+*UCXwu+qZDz%56x}Bp^_D^KuHHx3Ay7fCCF2 zOt`S&!-x|rPE=Rly2p?sOP)-*vgON|Gi%<=xwGfbpuv^YC+ujiq|auVwze7<>eEhA z%W56Ew(Z-vbL-yCySMM(z(v~)PQ1AB0^|PW^`bI`}YCf7Z~8ar~7vG=`Sdd|33c(V)gz9;CloPSYUhx+PB|- z0w8c-feTc3-9KCb)j&)STy+ar{tN*~Pb>)_mQ>=EXyS<|rl{hIEVf17fdRxQBZBW$ z2cZBECa52N3(Vx7djaNXpn&)#fMbt93aOq+4OpkZlTbz}<&;!bY2}qzW=W-lEq3YU zmtck|=9pwcM4pyXN{Hr~Y_@6TlF?N+<^j#%QXK;X+*pMVA`=%9oaYUrVe zCaUP7j5g}%qmV`_>7Ev`?6Ju<8?Cd_K0EET)@EDn zw$M`REwnR)hD)xv=$@;tx$KtfZo2Kl`|i5)#!Iif_})vb8UXC8#f$(0j0K;v z-m+e;vZDHr0tA#O>nyZZ$0vj|8i?S<1YQhqlJE&hARi*e$g#&gI!H2&6$k7d$XAVg zV8%HfDI~@ndu$ZR9zXD&&hGKNfX_hhEOgI9|6Fv?L?>;u(nv4ubkjaRee~2!Q!RDX zSYNF*)K+s{px0Y>Eq2ymmpyjaWS?zz+H9lk_S$c&EqB{+*FAUKbl+_^#SI_WH_q@8 z@E!mS6At9x?h!oddQF8Pi! z`s%E=?)vMn$1XeUidS)Pbp;m#IY0{gvuYDh25^9g13(!-@WBr+K=Dv2&%6S{hb&{1 zmDfoM=C@)IFkf46ul**thmU)%-CxI z=}Q2zFj4>-1V9D^00FD02Pq0j5)n1%IS`DX1Sd$r3R>`j$ABNH5H=_4#iCYx!k`F8 zNWv1D@PsJ*jRtS$I51hR6xiDnDPWPp8rtxNILx6AGXnrw^iTp9kbn%h4O3RmtJ^1{)KQ4JItc~7PrX7E_yLEPux?%HaI^qQSpNklOYzZKpWm1 zNL!l(o1JtWu9huJp@?un%K;yHn+*mZhG^Z z+&l%s#7E9@n)96KOs6{6$2TD+Q zHiMLTGG!E>*`;V&QH?Z&7!3X=TE&Wf@S?QBC`C1D(TsX@qaW=kNJlEtl5+H^rkVzDNS{1)13Nrr$6l}P=_kiqVn{pK}9N2m1@+aI(4Z} zZ7Ni!D%Gko)rrg$QwX9Z4Lf3sHKw3V!5<;`2=8rR)K z?4VgK!EbCYWM)rDm#gkTT$5k_`@}Yp`--)Cco&wBnbXqY{ za7#2?@f5eWC^{~9l>Yl2=6Xkg%apBNY0FpJdNqx?t<_Buz}@b8_q*T??{@>>-K&WA zyy*3A3tLqT0iYzb0FhA>*xLm6RzUy^@h$YCBmgY+*Gkhn9+duTB?3!MEB3twe*M7{ zpA@6P%z*HNK{R0pdx63NLGXJQ%nu{cgdgywP>eFPVSjkIA0aM=hVN@)4aIlF2~IIH zPMlvR#F)d(Xw@HBoS|iec*hF1v5yxFWElfw#X*KuF<#P@CeZi+Mlk}0M}=dY6a>je zo^L;%Yz!Qy1jZ#k2$sF9<&~H@$4W-CLB{+lfynp9ZEgs3W82J9F|suvOluy4^-d8!YC&5nwj8gRrjL-WX3f=bRFSb`|zkhx*x*c zaOrAaLR4-d0&Q5i;n@sB=cbmsdPI9je{)lm&2{6b=z`37ob5!ce$*HLY4Mxw8HeNGy#Fj7`xR6z#)K?y~${2#?;w71^aO7`>?wcy-dq0cqp$o!b}x9~$G#p7Fw!#kj(p`$zUj>e za_gV|e9{MADp8hj);WK5)OUaTpntv2KVD}JTGy|gMme9in{@H^WPmei|DX$ozy|(p zSttTP#Jc5MkouFT7<@25`sZ{r(htGqfA=SVC@~NS*kSlLfY|1D_V;D-HUO(YB1Lh5 zqhf&**m)XQ1swQ)M<#(HxPPtY6akoj@5O=P#%}u;BmAafB*-cw27nKU5-vy(4Tx|~ z@PRp2gYyN1Cg^TLNPvS^gc&x26o?QzNQA#PYd_(G80Ui*B859xg&o*~9f*Y`_zwpt zg{Iem8Mr*lkb+S6Y64h>52%J|SZ`67g<(h{0K;KpC>31jWH+d7Z8#7fw?^ehaxP?Y z>6db#CV$|CK=5aX;N>_w7Ds5pv{ zc#5m%gVPs`lbCx2D2kjoj6j%!)HsWh*Nl+&jJ?+njTnt`CX8|DcVeM$JBWz%28!ZH zZ_9X%=*Vq5xQlG4f_f;AkT`&5D2w4&6W~~j8W)BGXpYjBi}i?)z_@jyvU4DZX(DHS zC3ju@Gj!Ueh~jm0q$ZKvWh#8*>9*KKKNqgIv{*<36cRguv3^nQo($Pca=j~m0f6nQ+a)vw|PG)VIWDCCt;Ri8Ixd%aZu@GD0vkw$(29Z zePuaiBe<3n*OsX^T=W%yH3^hZrg;*UVdLkAvlTy@7KjdcMW4og7WtUs^>PHUlpHZ> zb77fsftd?InNo>p48fTip_y+NJ(GVIg?2l zoC+6}0%4ufIg-vq&uX>nGTo@!~G{)U|KwrlME6`v6{pOgu44)#mvaY6euqg$lDQ(B_K@n=n6+snN4Gq+X)16wle*cLQYV-h zH$WrOVUxyY+SzbK+H?caqfqFi!>N?onVnA>X5JYPPbME0fL#-&LH zrf!&}WEx^tDP_eqnDAzoZn`UW>4i3CrS)Z}HxXP75{p9CrFa-&bLtOB>ZN{~r^ET9 z_lI>t%9CkIW<9#6^a!Z~D5)qxsW#?{YKf+gimCoK7$QYV5vs{^3h8qUshH?Tql6fu z0VNNt+N!Sls<0ZXvO24@TC28ttGJr0y1J{p+N-|$tH2tp!aA(PTCB!;tjLadH+g;e;2SpczSetd+fEZP?mNcvUoL-uQN8V z1B7|$s-+6coC9ZN?DehwItr1z9Y{y;mp{=rlrnX0=QA)OFil<1qgkrm$2&buG#)VAEoci;Ii5kgK`1o3~k(yJW^{21vCp zdP+b?QG>XC*IK+FdS}nIw8`7FBr3H|8@13owJmCx(}kF=*^t!gp(c9@%oD6)ErORbq2C4)Kq2n834ayPwUi`&i9L8ci#$^MfSd)ItOcUn%46`y&TI+M z49Z#%T6bZFcVW$M(1}=(qlj<@+&l=esLSMB&gOj1=zPw+Ov&;C%w@1=+_3+B4EdP}1q#(k}hdFdfso`~Z{e%eF!uYA_-O z-3HSP2R;2gJN^CzKMl~EfXaLj%QBtRO1;!f-PBI42J39o?)=nLUDZ~7)mWX?TI~i= z&Bwp>)nFagVm;PmUDjrO)@Ys9YQ5HM-PUfcI_w-B7S#)?RSk7b3Rhtb4Fh0K@+41i zB?7i4flVcYy*cVv#)zHRioMv3Eg3fbn!4c82<=)CO<4t$S_@Hd8NyRzPi(B5R=-r7yolYj@2APU^m-A2F)*5n6AVBZ^13l~rl1Rg|OFd9g$ z1^TVu^WESM4hP>n2w(6HV~`KRpbH1R85u4a8=e^*KHMN4;vzoczmX3oaS*9%5h<<_ zE1nQ7KEyB{<1#+uG+yIivO&pA)C>)hpIin{a1!Gk%ga2_W5GT1K^3m7c?gu?VDQIB z9u+Ii$qrorK<-{-Ajmro-_axUoKH0a%w=YGEEjNa&u?i&YVyplfY z{*+$nmVW7&p6Qyt>73r_p8n~e9_pe#>ZGoSWW*gW&Ltf2;x^vuuKwz<9_uaA%Lj48 zY>O6f!X6YuAUD$!3a|it;2sCSJpbYAz^)%D;~xE$BNf9j2x9EP-XjUH00;yjs_qYY z2NWB!b^hC@`RICcH?y)n?&MzX=6>#x(;k3RAU0AU3eX_O{U6U>Jjbr=4x#`;;WE4a z9?(u7M&cemvh3DQDvhJZ27mAfpYRI5@C@JZ4*&2FAMp}@Pe;rI^&=&}gS1M##T?)9 z9{=$mAMzqU@+4pKCV%o=Y{Ln&JGyf}@#=y*xe6avuC&Ly1d+pfg05ZB;ylm(fQi<6 z20`;tDiQsQ?nZy~NT2i|u{|o00b6lDgwq639{?_I@xRL?L)%aruP5u?^??dK33~Gxu=+HF96~c<=UfulIR>_k92NejoUIFZg{w z_=fK`8IvaggE)Xg^#s85F~azNNN6nYJG-MWRnG*nqd(c3^Gg5updb38Kl-me`A94f zz0(g2qdTFEJfSUMgZ*CyW;{^>+MNHI!yDPYz**!)M44@pr65||(AR15=*EBi$iGFV z-w~V71USEE$shgFKmF9d{H`gyMoazLzx~`#LKQG-2!IPeL=NMR3)cRRy|{0*kk$R{ z-~R63*cnQyS&wI150S2<=%0~G$+Z9c-~ayq{}ABhq}5)W_2%7wFce3!G*2{DSJq;l zhhY+#u7L7+_Qkl%fWU>SFaI?v1j03{?- zEj80q)Ko7v##Yx?*jU+F+FILN+}w%ID=Gp>1|FXWAj!>RT2t2 z>}>6A?r!gI@Nn^Q@^bTY^mO%g_OjkconT5(V`cmMC1xlo=>8e0Vt@e!4kTF6;6a25 z6)t4h(BVUf5hYHfSkdA|j2Sg<UN01>!jwE^T#VB9EgjK4C&*e)U`mpd5)Y^Z8S%H>gy*ZS7|C zN!M@AV4#Gzb|pQSapTH#7V5?*AUMB$)oYDxUuu5=OA|aeRbj)Y5GPi%m@yj1kH18g z+=X)GW>D7JOd0QOF}b1f(WQCUW{?D_Rj+2<+I8y!0uUe&P~i1#+^j9Z2yg^}&jDI6 z^)~VwjBsQH6eu_#V7Y(;%cUy_Fx~p~>CK-;=r|>YJk8HGc0jL|K>0;1`$bIg2UsC08xiA4rDOD;N~+hi3I#p0!H&Z)M3UO z9o*!?On?lKh#8F>qR9uKlu{EVg>>=~B%j2m0L`3u=LVJL0ZlYAks%F~ey%_*H#XaJ z^UchvoCS*OiVc|6_uFHGDEW_HFJP-Rn-FiV0BeX!hm56CXbQz0a~?m$-i53HKKtg zfh9uNA&3>iSRIhHGrn0XETRDcuEeBQXNQe8$_JUnwc28@Rf1Y1pjC#~1e%p`2&@zp zGF^1nwK7}%%q=2Vqo5Qw$_8K{c2Y9>)7DpApS>2-BEY5cUv8yU7~5JK4%pv#w>>u# zgB>0hVF~3mYAqbgmH2^;W9V4p7ewx_OSZuL)Cagmb@|L^P?aKqk~(s^(VOLpH0R`a zhG}P?4-T5rq3;2@=ogW;=V+zvM47X;CSiHZrbC0-t}0Gb^;K4X*a#>^bU&n+U!O_xtdqK_=YW zhe4*Twy|XsfN~ft$6WHu3#fc@&mm46@(7e1>kN7Kp}1{0U|;r zAY?Qe0Mkx)L)6W%S%zX=>Hdr$!;&chhJMrG?yf|q83jO7^W)7DS?9zi%7BScf@0%_ zs62T!35X$5VG+k5sD450ipRs17i)NlF~;tQ6N{Yiu=u+qt?`XQ^dT9AmPR?wPm093 zq8DTI77NrmqWXyd-w&nX=*p=u;3_7)~kZ}Rhy z8)6KrLZX+%$z z?$e?%6#~GdCC?mgGe<5JrYK1%HDxaIl_H!83A1LxYDV>IungufV`)`aVilEI1tC}2 z=hdo#wP|8i8d;H6Oimp$eaUQ@QZdoY5=!BOQoRjr=(?I%dCaGVY!5p7I=e6`F0j`c zEW|1nxo&pzrvD7U*pvm&TNzfbaML1Ahfq)>99FXOQs^9U=*BokY_AVJf@b+jQPFbC zvt#GB4 z*n!TCSpW&Bh`pp@CZ{}Z@0eDns^^#h{WXcbo*9dTZs$AWxngXR}0AX;iB`%aiZoamN zL!}RwL}uJ-9Z1B2BTt8(E88M^*gS>wjfqb;VkcGU#GR!vVhwp98RHPcNVsr6jrboI zALv3{YJew)%zz???8r$j8H`VtVs3Dq*Y5m}t08(MU+a2!@anZ}iWBUb3L)0|(x`%#-L_^kwAk-@ws|fW z#v0Y8WZCxcwo<)qRBM}-&qlGaZ{1CAS=-#nZdR(t#%((LlHAG;HgpHJ>-HRo+k_jy zwc{;J4V#+?_SGY+^asGY=1dnE$GyF|iJv{>ARimp z#U6CBM~vQ^^ZGV%p0BjKo$MeYw{tb_u)CMt<4wLXiJ@NghJ$Ww#a1=r7q9q$t6J}e zzY@72zv^z{J={@80@#VS_qiiS=Nlh<$K?rK&AVIDPZlo+FKu3xFL={VZMoLw>fYL< zCN`pOLY5{9T7-I5mcEa35{UnFh!KCtOBBxXrPzEI3leweN_61#tmrzY{uDp)(f)q! z<9)_`{`%G@KI~YuBI$FVLfj{+a+UvP8vO}MHTo@zaAfE50et*Ix{!)cbRrk=zW{8J z6d}OmgTLw%4gn+=`$Il)dcQp@82r1x?rT8(NGW%MK}$}Z{|nUBfB*y2Lk$}a56BF73AG8~pN{K6<)E&(YS zb`b(EJQnPEEd9H}Gz=q!fffd^mw_3>-r6gJX+yzr7t?8%AEU$mj{(Hp;=xzDuHVX{ zdErBR!L8jo7VA>OF#H$}EWS+aL)FTzNx2t9EEYThL1QwxAw)SCG(sfQ8k&h8jnF{_ z`$0yL#YCaSK!HUc#6`EV#X@O4OCv%|Gs0z7d%E}Oh#o~MrLeAXM9Fzj7Djk zMry1^YrIAmG%)p{2YPVDkzlH3hyrgMM{+Djb38|MOhl!1Ia34O#zf9ywqku=GY49S!{NtHxNmu&t?m}E(rTuGXI$(gjtnvBVu zyh)wJNuTUVpyWxJl!4D6fIc}&q)bYsTuP>FN~e5EsEkUfoJy*!N~^p|tjtQS+)A$O zO0WD%unbGF980n+%kTh-qGS$W;fdc+kH!KPx9kudp@I2{OC*C!^~g*0Fp9hk4k;Rd zy}Zjg2u$@DOualzy&QwQG)%;7Ovijo$c#+MoJ`8BOv}7X%*;&9+)U2wOwasG&|1`Kua+($^(E++{{hg+)du>P2c=Y;0#XT98Tga zPUAdIP$@T9FhFY%>1m+ z1H}r~5>WF5(BzCz37t?1txyZSPz=q`&s6`fKltx_w!QY_6642?NdMf)4q7U{x&TGs&EP`FbhMS0kIH@u#f?zNK_z712_eb z>IjbCppNI54(Gts@7UB$T@F3bR8R$vPu&wb1yoc`RaIS8R&7;ReN|Y+Q$d{!c`Sx5 z@C97WRbAaxUhP$1?bTQfR$(1hVl7r%BRS#wb%mzr000o1nT<>y zI8eQOS@j@VlRa9bHO~BTO!|@8x_nH8y;psVhfgRMtVoTM00Npp18Tj?5t&+W`~tp6 zg|V&2!d%Q3_}a3ShOR~1wh72ISleWsTRg24C=bTd8GM zv!D$W=>gAS)@JpB^-x@|)!VO4*?T=($t_H}&0Njh+;-3uF7RA0xF0o$T5%nbEHDKG z_=GZuR?I~*Xg!6mcw5*-1=juAdgKDxrH0xS+u1c+&i!5B4PN0LUg9lYclcZ5O7-CpkPUhmBrB2?JotHL6ft*7vqP)q_v%!KtN zU*AEL_WfS@onQK`U;DjZ{7n>K{7%H2(Iu5k6#>jJfKne#QW~Alz%&X4hEXML;2oV& zAT7|yOi;Jf(E=9Qq|IOr-e3;y;NaX}4`xja4q*~5VG}-K6sAlNPGJ^qVHbX37>?n{ z1jgvCVH>_-9L`}K-eGol3!1|k;XU^rqRPGTj_3Lbu9D2`$&o?`EzaWq5aTK?V>3QuG)`mYU5T&#VlD{dGH~NmkYl18<2bfs zE|yz0-eW%QWBxz>V>@MIKptd5E@VSK)C3c(rCb)1{w+-CEKLj^;SrWhUe2&hKqBEAW^B%8ZQf>X7RE2F zOdvo^ZDN20@SGj}%xHGd*zD3v4uDA>0|c04oRjBvhAw)hWOrVGd~Roc)@OOP=LLx1 z1i0jBW+Tjtt{%A@kFn-B!>n$8Xo!wziJoXiVGc}AEKHtdOBR5O{%4F1=zi;_d(Py2 z)@Oj8{+xmK=Zp?tSiWVKercG_=;)wmnyzV^zGP)blPYVZ(hmZ;T>C;ervdnYq_3lx~^-xzH7YBYrWoUzV2(k{%gPvY{4FE!Y*vXK5WFM>jP6J zqa*^Ogi}C?-?u6iwJt^*0BX$6Y|Y+m&hBi_{%p_=ZP6ZW(k^Y&K5f)aZPi|F)^6=s zKB_LYgvK@k1%T{BGLDRrn{sgxBqB2UXdUY_X3F*itg-F!H4@3L-sEm-=5B81er_fH z0DuRO?j;BS<96)phHX@Yf!QvC+D4opGL9J7q!*B(b1FPIQn52ihO{PbUufq6Sb$)N zZx!fRWy0j^5;bF6sULvq^S}NhWXrx8(btZ+X5Q zPPXs(rf>s?=?9?X4xi)?Uw{xFaStzX5I1oVM{yEAaTZVU7GH4}k8v0eaT!-}8-MW| zuW=mTaUJjR8PD+__i-Q}avl$IAunlcE0WohYwn|?n(v#8~AYRrg81|Anx|n^J?#Kd~XS_Wc$A7yjhL`XzBh-1?d8J z4o;Sh1yBG~Yj6eFvefD$x9)LnW^aT)b+8}g%o^(nVa6yM~k}mL)wq!?t=`P1| zP~Y+_H*!%A^-nK#RDbeQU-eOMbyIisREKp|e|1`q^;(~GThDb|KlLxCaeao9_G;($Z};|W4|j4O zcXOYPa~F4TPxo?PcXwBJcyIT3XZLKU_ji|fe7E;`zjuAlcYgo(eGhnl7x;cJcz_>x zgg1DCKX`>tc!q!Yg%^12Kmw$^gy~3j`B?U9UjknB?y-!C;g0jf9{zcfFL{$cd6Z9i zm0x+5Z+Vx0d6-sxu1KwPfn*_c1*~5o;U8R=hUnZfadVH?7$o7==wvNh<+K;6ac_}o|OSZr03>$q%X;(fKNyPRcVrBDpy@%lE?TWKUE4RF>pS23KQJg95{t$o zGO1iLo6aXRDxFfR)@xAqa+yvNiUmU&lgs8%ccoIdwy_i_5OU_AJR(6Ow^F9!jTO|+I^Kml$Q zlU=)*ksip9B1e)eY4Rk>lqy%UY zjMgztwv$(%sm#-jPg1OB)2?m%HtyWIck}-4?fWOLiURGL4(uwndA*iGFgL@Pue1+ zlqpbIWt3G~aHW)Mn3mxxH~N#KO&=NrLu?mlX#_E!q3WtqycsRBNLlN|QQIoDv+6RG?-S70*>4sDRN5=QQ4AA+!EtsQ8**WeD(<+klzUK`JE2R;$6JO> zFN`F=XtIkax2Q6UEU)OYiZG|>B`u()ML{2=W(>>5XwGFq0znHsG|@#HeKgWZD_ykC zJ9kik0#QpnHPuyHeKpou{%d`80Zxz5wbvnl9rg!fll}DBWvA_d*=uLew%d7q`K^ZG zx@|_!1mptr(tRHtz|gn)J$TTn1ptBo4V)lA8Haz6_~Noatv~=%3s5ywP#v(f)Rjwp zIn|j9usP>ggAI8pjDOI8y&SMUfa|Sq@Otd9&o2Ad8KBO9>K|N`LFNPOGRE$`d+_@M z!4J>D>58Ww0`emi554WMG#|MKj%!d1L(X%Lm-kc^5PtW%i|>5y4@{r^)4OQD0rCYP z&;aXTE8jr^aD#CD^X&U>`~8ULJl>swe+2X&2i~Uv1p*I&8(^Ra3~0Ounqhq%fM5Y9 zW;1X_=5A!j+uk1jK)4a+uNcE9015qOxCCmzUMLV=14p1h?YWQvFpS0I4gk8*Ma^>x z7=Y9U07S}ZO@~qAArYfC#2+rPbW{Ud4R5HuhLoZJwxc2gtoSyNY4HJA?BW-%$VC7y zu#9iZKoo~Z!$>g?WEhDd2+|10VaP@USbQV%&^Uk>sq$3A}M?cx!act3pqxPnFNKPtJfuP(# zDHYiPL8kJOs?;PRF?mT(@^Y8HY^5#jxJoIqu$QykBr%Qo3c1znEMR)f#XR`I9eE5y z9Q&ClKC%9+ZE~}l-uxyw!zs>jlCzxVJSRHSsm^t>vz_jICp_aR&w0|bp7y*aKJ%&1 zee$!P{uHMMok=%~*-e@x0Aa^Om_olTW1)j%3ssi)Kx&jBqS>RUA<-C(QjF$Nqs8E;pU^h~i>@@M8BOU;FQ|+T*bt^g z4QeSAhSZ}jkEvrA8&4BBR1fq?12R2sH%}!To(zqHmpR9MyW)fTR_y8;EKHHuYDS23DQA<|UnwA8X{H0Y@Yf}9=aI>-n zEi6U*S=oZXv%cdkT6OEd}aKlY3m(3ipCBC1Xtg$h+2B6^|Ows#kTXU1f^! zw$>GaZ-cAd(GFLZ+chs<+1A`*k|Dh8&8T~Yi>)j=&bRZ`mVA|!UOOtbVvX(KW6?m( zxR?tAmsQ$P4vaL2!c=>Uf-MK4>fn3XwSaZKE#cVL<)!i{ihHnm zBi!J-F5|=$R|&_MSOhml#;RQ{Is%Baj1i1lAjna0srriT>EjwM?Isvtw zvafNSK3QMa*I9(F8Hf$+1{^!Z!o(#jLuu?T?i867rggJJ3+)>^cLThe_Ts>uI&puS z*cZI~5y_6Pb6 zXA5t;!h*c-@E)$Li2obn_0HvA|1C;f^Lp7F5B9D>UcCqknz4i?H3$$LhRMFVXv082 z%ZVli3wA)*8L;^SaISOQ-JItISKk097+XFQRp@J_Is9(rfdl3i=-9)b(nFACqz|3W zx(oWfIUrrtZC!V=Ibhe-_jRMgnCgneu2`d<>v;RTe2(%e15a|8V144*ij z_SWeC%OTTiK;;H*J@k+E`bj^!`UWljaIh~N?FWbZwzvN8MIS0rFQ55+Kz)x?zw+o8 z&9bZ)L!M7rbJg{J)*yarV80y%4Y0iZ_223M;Qr;>{dJx%$xr(%AoEn10`3_9 znH1m6PNYE4{{fHlG(dfc0CuHM{2`#TUEsoT-~lq+2Tq_e0p8(_pxPLq0IpyNoZ#Sz zmkb7=1d1J2MIiD>SFiOS4`x>l7F+-2*v&DJ2G*PrCLs`(ptQZpi*tDTY(cK8hVRxya zw%Or!Vb=liq3ZbI6Jie_GM5Y9VC|6=P0bx15@F6Ukm2Ro5<1~8DdGnhUJRn4-svC+ z1{}btAa~JKA>v=~b$}k$A1KP995Rw7&Qcs=A|yqkAqwFQGU5?Zp)FG2{J|pr1tBEn z+Ssw88|I>GiQmmQ78jnM7ZRBIT|fe4Qzw9*2YKIbNTY91V{cd^Z(!qYXrpd$qwqZ% zF@j6Z6dKAoU#g8E`%Pb^LF3(cBW}3kZNQ^#$YX8LBW>8@Y~Ukn93RCDnwT78^P!(L z`ONCEA4ViV8Jb}i#GNEQj^O=IM#Y^1eUFh*{v`Q;kNK33`Vd+ANaO-SQ2el+{Y(qD zS(T>k-QXcl0FeMgf}%$rB6@Wq1Qs6Inc_=Y9Zhz?`h?w1{$ftzVBY;4<_Vtsj8fZh zUhO0$Ar7VDiO|?sq}MfNEPCE1qRmfA;X{g4+c1y=&5+!AkBP~~6E(;{_WpM#ok~JJKP2ekC7u$(ZQkhcc zt=Y5Pr7s=k7A!YE>Jk+G&81vooc#4A*fd0IuBL3|=8=WjZ0eG%iQ-(* zWhmL^at7KJBH#FtUimSfIXYt}{1bSFr+AJhd6uVno+o;yr+Tg@d$y;0dQ&)(pE^F4 zIkFtQFu7*a)EpDhoIw%Lms=!kCUe+od9mDEUi z7`|}Wj>@QBwOLoy=2~F5bnR)W0_rdMma5v1o33iF!s(oT zO?+zSd@7nif?)`;-^1WVTdKmBU1*;f8BkFxM_#K&dS;!?7y`x1lVuf1ahVMY<}F!i ziM^S(>S&h*lAKkiZvrW|&Y8SQ>%2aww!&+@?o_)zDB{tmcHJ3C;vv97jKIRwpY>+B zMyr)JD32Ohkwz(q3I67`rf4q~ne!a%g%V-J_U6R;YazyKm6@29qMgLztH)w%lveAI zX{5>`EPsXR2FWKml3IOA)3I(EJH}1Uu8lsfM$b;h&q_wn&K@|zEU0Ct^4+WlnQ8Q$ zPy*OXe#Y9rZDzYITefK)viT0C{U*lwUA|>487SP(DcqtaY6{LI*(Tf6#-(_zo5W=c z#pRO45hkm`?Zrh~#^n;$dR)l;ngRN4#}T62uB}#vo4LKs#93_@1+Kq!ZF9np*?6V) zeC?D`XXD~+a7G)to!hM(?znmG<{ECg2`=H%ZRq}OwvpzJ6>HMYtf4V2^p&CDyx&4* zC8|1T8X~6O{?;A*RAt%OF1yj8R307hc4YB#rlQJZ;0-Se?$u33B=D|JpfbskdQj_)=dQ|8?%g&p?ZVaSI%NU+fEY}yQMTWBy& zaPUoNumwL2>mu+m!fpdQFa|r?^`$WEt*{GI@C!%d2$N|ED=@0rXY>_A)UM*-iJIVohK(wMH%HR|_@z?d>4Njp49EV02y(G5A|NW3*8IJS2l#LXc5x`8F%w3i9XX&KGawCG@f;r<8SCFZda*3pY6876 z1{R?(n(;6euz!&-7n(5i>F~1xvN2KO8?xaMS8+ok@7ZpqDKha?A~986a`T=sDIT#Z z#xXFWa)fQIB&#wOA95eV^34%oCZQrG_TnJ^l_?4DER%8*#$v^W=7UmlAvdBgf3dCk ziYKYC~DpDhVZkEFWBL){hGA?I&%S%wCWUP{U{|F zf1dZMuT`>iOg$x${jU?^oJ-bkQ_`C;btOl=4Or4I`|58_C$&aG+& z4@vSdPTF+xRCOI2B~f$rbHVgeA9eq%Bw1@URYMGknKd{k^dhSTLziVfx16(<&<@k> z3~jVVcV@}jwcm**aB}qBCKKHnCu|NjY8Liu{?Wbib#ubTBh@Nz9)58w{>4Pc4xPCZ#Q>$ zw|Bo&a~mT$C*!G^GY-d`Qc*Mtdn|~SD1r{~GNovXk}+cWYm8dyeHWmWs;HORXt?%h zjt2Ni>GzD9E0YqaN)f4r<|s-PxP%?}iBc7m8X3d-sEO`(mS#AYYWSDJ*nZD9f(I3d zG1))MH;~d)g;)5)Qjvx8@q?plnW0&X_nw&XH^`1vmo}_}!nfFj_dRnbbVqbSLZ7QW zMO`E6D8CnZE%AC)Dt$RQevKF6T55L5Y7BWQsb=}BifWY`Q+%ZWT0o`0D*k4-YHV$} zse1YC`kf$;`Qq^D&sjMp1uLD-DV)!FBTPLxm+xvX z5$bF`SKA;uqBp9c%YN;_=`S^C4UR=N0LZEdd`|G))nZ6=bsEc&RS}De& zdccBsN)ar%KAX9+x{5D2t$&z>E_SR_tT(r8t3zVCZmheW`dL*hzcyKmjV$~IyUFf1 z%33SSHhW9$b*`Umwu-EjrPpo4y23`4l%+Vhs+q>#y0mXQOi7u!zZj1$&$W+xyISeB zi!85?IJ!zKrt`Dgm}AJ~yT0!`zxTVp|2x13{L8SlKRR~;lXvQ+{-wY#Ji|A6bq0&j3G!JnOf<>%Tti$G+_T&%P1R1OYe^?&rSl?>_JM zzVH7&@CU!}4?po2zwsYG@+ZIYFF*4)zwt9_U<+rI$C;~y4w00J6oG#S+=egPo#=x4?&A%%$+K1oQT zW!K1)B0Du$Ss`W1m)mI8w0RR}PEy@w*|C-9gP?+c^mMF*Ac!E7M|vodndx7Hca!`K zwGiNG$EZbba59-m0Ma0R_88UU)#b&kk=BycieY3Cv=e6LsGt&(NCf0A#9XQqaA3iM z2^X%w^P4lEYvdWG0EEGhtBxsGwtN|LX8vJ_oBe#VRm&#OSVn6$eHwLY)vHl)_8AUD zjeQUi`2k|_HlzUWB0n+mqCnH#^QiFdRg|R^;kEZng=qXqBDW2G3u($p3W<1`Sa#Y%_`U6lt_L?eS zKJG&J%?a@&oRGo_EnI^<#V*{C!wxcXQDGz6?S+D*P>9k?$qb-?15ilM%LCdO z0EL3Q96+Kki9H~IZ^Ko?U2!E0tt5Ic>DtMD18|}2!UYqT<-F_SHxaFRk?z-*18}GdJ-ka~f{r(&9 zzy%+i@WKs09Pz{zU!3u|832HgfgT8B^2sT%Tm#HU(EJ0=8+gF;Ej$OEgU~}f9re^z zM{R`w%B}(Q*e^G|gV;5X)8OuihlLE%u5w+#A`(m2!@#Ibhn^7O*-QdCKlrbIXY9n|YQTa; zkPQ<6u)+oWAx915FpXZ+qhpFlg$w|3h#ENJC4w*kC2k7|?~{c)5+JlkRgQFznn0yw zajHsE3X+pc3KG}Hs!S3R5SqN?q(&JwQ@SjTn;at{rBcE$N)mwW@E;3r=(_Ax0&?hZ zA}BYx3I52fkvNS?r7-JvN@E@qnY99%u_gsXO1Y$%o0Px-kidf<`~Z6bYh)1Mmy}YL za*~)tRVgF&O^tA~RNX{pug=*?N+uJY`5GoIbx5|bs1kyas)JbQ2?!SgK!5C!UJ#n$ zsQ!_0m^)#_Hc+EuT9b)(S)>sZNJRJE=~`F2 z{@xW<4}1ac_}W*${uQu+73^RMTUf&$7O{y{>|z<)SjRpVvXPbSWGP!&%U%|7PYBW?P^)uTGzf7wy~A%Y-w9t+X|Mn>*MQCKo@}CF7^nd zqXg)Z@BrTu_PCNA3Ly@gT*NxpxPXQ3ZK+#b>s}YT+12iLx!c{>lGd-&J??R-V~@|_ zcDNQnz~mMYUE_X4kqtN&d5P772eelZoqQs4e^=l0%GbQVh2j7L5ElO0j=TY}uX{1F zVDCN{!V#A6gehEM3tu>}x$VnC$g&*|kYf>)m<=E1(G{7P_$w~y!8PT3OB+!B04`z0uiPh8yK%8 z#ykF>gI>&>BMYdPJbHIp%!&D z7(HrJpBmMv7DaCH+G6_ zavR;}26waLo$jQR+uPxG_rCevZ-2k+8`8eBS21_{AsQagTo-AG&huqjNm-)|q!C*vVdYvy0tC3c&E%+1_@yza8#zw{Qy1{&f@89q)P9JJ}hPn8jox?Q$3V;0a%N z!yg{;hc%A}OoWWScc$%uPyFO5UwO-49`mz}#bHg<-N*aS1pc@yggX{++3EJSAOb!; zVI?2l#IFAIyv_XVXXXxt>C#A`=2fpBr*(>%Oia1fc6U;Q+Y~0ljVk7r+1)AOO5B0wGZBhJx!RZ~`0f z0mEYKTCcC1tGyNpbnXek!b`Vk0C#?d{Z^0#ThIkrknRZ30_|=9B{1wVFaZ(}>>h9e zE06(mPyxAa917q9(M|xtt_L?k0fF!bzpeqht_LYl{tA(h3Z?J~zs?-M&I5PLc0#bP zHbKC+Fm|R36JF2^&kzmMu<&9K38T>K?(XZ9pbD?h0TBQSi*W3skPelA2k=k}kB|Vp z&Iq_p9XOEdw54OZt`O~T3Z+n6+^`Yxum=~B5Z6o#wGY|OA$H)4xYn%uG!gim>$t#A z4NK7!PZ1Tliw*G(4zZBH3~>_A!4MY_1If+}DNzm=APE-W5&tmYYH;s@Q5c7j2GuSV zj}aM@Q5g?!73;1Qi4hv3vG`!%XsXc~jb;m%Q5(0B8@n;PQlL}P?i$Cj8jJ4%P=*}S zQ61+19oNwv-w__;Q6A@!9_!H_?-3vKQ6KmIkstffAO8^`!I4wMF(8j-8nF%`6H*}; zk|7(?As-SVBT^zKQX;E{B4@;%cIF~Ck|R6PBR>)(LsBG1a_MwS0C{iiOfn-EB?b<~ zC0`OIV^St(k|t}?CT|iab5bXFk|%r8UEs&>cuz|nEcDokC4CYplTs;{k|~?gDW4K5 zqf#oT@=8K4XByx{Kt~2}XDcto^LVP1OvFl}ku1y7EYA`x(^4(hk}ccPE#DF@<5Di? zk}m7gF7FaA^U^J?#41lR^tO_(WWXy$@AX`7x5Q7eigKool1lW_F&`5$BU3UblQJvQ zGA|P|GgC7+b1kc+FN5+gjj^w!MD+d&VG{#vz^>1@OjGp8FAN<^F~hPYr^FjWYcOv! zc5qWGbCWlB(>EVLH-WP^h4VLq(>RCoIFVC1i*q@N(>as#IhzwYr&Bt6lRB9bJEJo@ zuTwj_lRLk&I=3@Cy>mRlQ#{X;JkirU(~~{d(>==*KGicm2MYi|r#=NM0Mvvl5d|n| z0V{=4H2F#a26GI%&?NVZ1Pk;7PY}9PaIq9Kr^J#<#E>upkTYMcZ>ejq^lRG(}q!Mwc^2Uvx!lltypVMRRmUb<{?A6i0UyNP9F$ z(UY)RVE8Tt00{H}k~BZ}bN)&`)96C;uLhJt@5|(RZ@*x#1aYSf9~3MdMM9_KD*lW% zJi@%FkN?ax(lB5FL~1HR0xsB0Kmf4%T;dg%08Rm7&CKG`^mNcPATB_F0488iC7@6n z;7}8lP#4uu85L0-RZ$yNQXh3vBb8Dw)lv@?Qz4a8E45QM)l)+iR7W*aKebdz)l^ee zR8JLES5;M86;@eQR$p~iUDZ}&l~!~0R%;bkcU4z=6l?6nagQ4j}!7zv>343_HuwVeG2K z?0OJ)7l0Cl*AjV=cc<`dE3v?U(QX5_cbRwZF4uA;*K(!Tda3t%rx$avmwUI@d$kvQ zvp0OZcYMEBeD@YIAB zf%yK2Q&@?Un24Jgil11Dmw1Yun2N91inBP1v3QHMxQe?NjK5fnxp<7dn2gW(h6{3j zbH;wH(sn%fu|ODf@^^&2c7;zEkMmfM_n43S*pL4hkONte2bqx5WP_(wKnd%PTUcu+ zCLcqXs?61nYm<^I*^)0A_tuz$6M3)z({=N;`O#r<6dug1$ce$QR z=9veenx9z#qM2jDgqu;BN+d;C5@uZpfnk!uVT8h2TE#_(4yI$;<{r94+*zV&`2^%y zAGtX~?UgJKF{gy9%i65Z+El#Sronn2L%M6L1g+y*uIIW@)H?F|xsIzOk@lLe``WMn z8n6Rfum_v43)`>{8?h5xu@{@M8{4rT8?qx?vL~CeE8DU!8?!T8vp1Ww{yW>VKO3^` zn(gR#tfhpmPaCzjTC~?rti746QyaEpTh>TuX(45WXyvvcWwlq=s`GlJWgEDIo37G< z;7U3vN~i^R!(dwAU|OdI_^AL~N4X=T1qvXyE#;7td9?Amv{weSzZ<;6TfE1cyvy6X z&l|ndTfNttz1usHj=NE=o1k8RJ&YT^ABRC2hQ1$XEa)46l47reV7KwNx4S!Ggd4#V z{Hut&bsohI>U%8sNxoU2zmvtk<(oYOJiFWZu1CqA;X1<=T*OBlsAzk-kK4LyySS_S zwq1z9g}`-QoVKm|QRsWXEsV5X+q6TR#CzPwt=Do!Oh+*`FQSqy5xneQ1QdrlY)@ zkDSUq-DP@R0N(uD;k>|hBiQpi+}r%z-F)1YK-$~g-QWHl-s4@~=bhf=o!W1_!i*fN z(VW}Gji{X}QtW$GydB)@924ri5F8-g(>=})zTO`m;v-(-C!XRf-rjkt(l7nmGd~z7%RS~<-sLSG=W|}?cb?~a-skE3 z-mM*~Z@tW?oZmq{Qbb}Tcd-Jb9J-tYe&@Y^2lEnV)1 zp0`i=t$!QrZ)WcU-|-(G@*`jJ+kNmg-tY;$=>Btk+mW7M8h`RXAM`_C^hZC@E5Go+ zy2uM$^U)l_uGE!ZANFIv6pEuro1gigKl!15`Jhv{Ka4W z+n@d4KmFl<{o|kh=imO_zy9(6{^ei)`=9^+9|9PAa#kv_&A0Qvh1%#Q*}DAZR^r^c#km8#UNtFUrq<@Kwon3851 zTMym$AG((Ctc-@r=%6D~aXuuR3L3^OkKxD;7w zXIb17xVEk42hx5XU?|jK2}DTlaI@&2<%+&KkN*43!&QR;zO}&xD0Pe+o@^v?$}Q1ekCS zo&77~)qAO~&#QNQUj z_m534U>2TuFLhRgdDWn&9w`y7qM?RMt;ouMx*T|%07_Wsi+(f0mGq+1Q$u-KC`^TU;JQq6T21 zC}t>vW+fzEo!0fKSfPG3Dp#d)x~WyCs+#IllbKZ$T3HS>X_s%62bvP1WokmJP_62! zuT25_tFTHDJFKxsA$zQ{L@~Rpqp@1{A$VKX8j?>X=GsYmyms5qJZps;&$#5iWA3@_ zq^s^a?6#ZEyYQYP@4V&IYwtMt<{OT*3}9Jpq}ZC|Wd)b&+CpjyFU)Ym4nGWW#1cDv z?ZC|xpg_hNZ~n}2#~yzSa>ycoT!6*Ulx(syD5sps$}C^v^2?RPEHe-^*X+a1IP<_D ztp9rWh_#aL?2)#Xniv$q4@}H-#0f6gbkqwo6F}8IOqrY|RR?gjlm%g&@c{}R;PKW~ zkG!$i0+8LY*#V#(^2%5vu!Cv@GUtNb3r?`~0C)4PH{N~!owwh6&wRt&bsJtl11`9I ziQ+m0?)U?c>maw+lQUj<5R&IW`N@fEp2OBUFfah=sH>iO)-SY9_v@tpkh$xnTfKwd zH-t_>>$g2__6=?efVc~v<6t@tykB0)@s`vc`|VsKkGt^I13$g>*<+6h^gARU`Rv*s z^c-c={(2WMtwh6g>z7E&h2GLq&!1dsZqDEIwcF=_TH%-MkAwd8?PM8SO~y{sHn?%j zfC@k$$KIwuk)ce0s_{Vs0C+(iWH1LC#6bsd@WGt9fPw?CAn`2M3=*2~akLv@0{qv5 z2AmKGDvaR_FXloR>R^BU!Cdri(8C-A5eGwjUJeKNJq@}CfDe%2<0`j7CI*miHalVk zlgI-nqA(LRgqamfh{e){5qV=2UiZGp11a{Pi()iL`FsYjpb2b!6ET{>-ZvNd;jd`{ zSXdwTI5GY8V0&sTBN@&346s2kV~gzAY1lThMmCaTkBnf)P)5i!5{QbH+a&ee^GQzr zRt=QaBV`r0$U+_v33L;xL?%^f%EHkMlZUKZEPe1wTXM2xsqAGNh4-h&1=DiF3_vl5 zdCFY=5{3X=B?SiLN#(V2ibh1GHERjFTdGE9JH%%3l<7@hB66AI3}*|iDaAsLF`eY| zR~&JoJ~}?5VBO-@HS`EbiPbLw_sb{51jz$70zr)e?Op*9aKHhGOaO3WpaVh1P)lxX zf-gj37H{YREqV~6P2{LXLC8^_p>c9H;sFCfn!Ac#F`E@V9Y-muL*zry|U$ zO2N2Pa*A`T*CcCKt%?b>y3w6LgJ)*sIZr#*^E^uPC)1uLC%gW02+Z5w=hWF(95}CK z8dDo27b!LZ9d=}f)fi$2v^I%y%zM4$on<*!xVu&EvYPE>_OeG=RJttyj?-K8)cJtF zYHw?$HQWz@I6B*X)@_bMt!lBy+C;|ow5#>2f*KcF;2Q6@ox_@NQ%hXWjx)5g1z#;k z>$uekfTyMnU`v(TOXl{rx1=4MY=LlF(Jptl<}4-7YM!d1cYWH22aygmpMP5#0=qcF}aT(dWF z=X{WgCxI_fsXN-UVjm{)hcPTO6u(TxEMsxXTx>EJTTI3SqcOH@Tq_NC2)}al@P{wb zJ`w--o_Z~^T}@1298XHgTw=0_oGczFZ$`?Rp>kWS%o8l{M8{WZD~OQ%h65XS__-3{!DDPt-Gr1IbSW<(mrFI zJ$>g|U#B*)(>8W4jcpffyV1zr0hb>&EB%HUAwiz&iR!9mhow2*3S))-x2N&K0lo34Ntkib^`2B;4BNd4Rx;U~Wj_-}H zG~*%n!N_^}0F9R#2gylp%8gsOmMk9$s)8?uK?ykdX)jLCpcj=~`S_p>=J z^h)U5izzR;t)0E>N~6}&O*eYfeX#UuGd=4zcRDGHs`Ia(FziR2*?pabNj#hp5N=~*77$jSWH?@ms!+q~>4C%x!PPx-D!et$gOU*~bXYr;ETnx9X7Id_Lu z+cS0ZC|~{4e}DCecU||z-o5YLIQ+%mK<5p3+LV%C)aFZR>`s?{%gtVv4gZYCZVl1e z+0F_7zMaT&_hb3%J%n0=)&kR1r}}}&ei#Eg+^EDGvF%T3{DV>d;gfw#08yq16~b8-R*S{xWV`?LN(6$pR5Lrc>}e>CV`D|ma%27KfMgyLp{ z;)H_>c!OG3O+u(`4A2116operN>%7gNB98e#$oB^ecvZ$z2bIB19#_Vh71E{K`?bJ zp=B=tWou{=Fb0Pv(S~dYhILqcr+~ktm6iNQsqbiI<3pnW%}I$cdfkiJu6Hp(u)@NQ$Lsil>N*si=yp$cnA#imwQZ zlo*H|rgnv>10m*aWJrEzc!p`%V0-v|f9Qw6xOTz_62{mO$jA}Os1aPaeQ4KxzF3I; zbueXke!wwZ=eH$;f z{9nJpg$VurrYND3AO2J?w~U*~2^1)q3n@Hhoul$YndbCr#>D zU+q|t)kKl}$d3I8k{>COeHdC689p1yJ;;ZUJMcNy_d_k|e1b(fF3FMH*pUS}UFGPF z;rNa_IgU0ni(V*=V2F$2r;9egiwZ+`XT}U%)kGKsl^PV49JGcoWl=kzMSZALSb0L~ zWR)k!K@z!@N`zBt1w?mNMB;{)Yl(7Z2~t18l~0sW)1yv2bxLj-mXGEGPMLUkNrX`c zb=BmTTlG~d_cdebRWS5*{)DEOKgF0xw3t;@n3={*Q6-g^NtKuhl-ajq(D-FT6K2$y zi12oXQ-T2HM>Q5WN}?1^z!OWW8A`Evj_A|_&a^wzG*B#Po3a#v;3QMKG)TIsflTL{ zN_b2rNKD9SOv;H&=Tw~6bVXNnRIn*cF=Lm>q?)j_OWWj1EGT@mS%l^knBIAi&)J9O zsfXZ+P17@O1wfaF)SmB&Nb&hi;q*!;MV+q6ny=ZKoiv>07L@r#h_;9WMOlrzSbhm| zn%X!7PWe$8eQ@H1O6!(>Zhk2J^p}E(VlPPROHB_XBqCr(~ zD_Wr@YJ|*lp>p~DlrCjmOy`#~%6FB(mj^kR<2i&sYGTC90U3s(Hw&rb=h8I*(Iwao6=Z zv?@S!imNTfsChb%4yluBs*YwVT#Txv(FmG9W}u@9V2hXz#JFzEm=V$%htL{k)LIeP zDiPWm5#0XTPMq0g-1nIT8ipZ|i}0f~YsaM(X01QwttwfJ?RsMJnq+r)tm8_qhA4*T znny|5i^;lT^txZ~dYHi|unTsuzZi`7Dm3}3ujXo$=xUASw;HKo8)KS;nYL;H*bLFe zXT;`}84GH}242+mv6Qxf5{PlPr)rHBYcBgtuoknj*0NBlvgOuux^uF+#%t=d9EoPK zx0h>k$^#xdn@0PWa|WbDOLa)=syX1aT6vI0>uubYvF+Jyn1;1LOMe$ge=q28f)=)g zHnzMbwum>iL2I->JF_smwZlefXA7b{!?1JYumvht5!tlE8r9+04e2W@>3w@kNqPUA*yk|?9=b_gNOP)7Ak*A~j2)n3fyQZhUv`c!Z z>$?2IH?DV6U<+|tmAKz|z9DL(g9Lf0t9q%sw5qGL)|S5s$u$Msd&OIQ&PRPl=aLOv za1UHOx@&awE4&vRycyiO$_Ku;$#SG8!5peV!~1~Xif#hBuIsj}UuuRGdm7bC{%>rC zfunp<07CtAxP`#QYO` zex}14+N)4}O*xpuHtfScn1XJq#Y?!lWo(3DyoDJ{#-p1!Kg`BV2ZdTIg;XfVR>(J4 zNJ?28$6s5;`v-viM*;sA$bLM?s`|uxJb_D^#QkT-Q%ppE?8NYA!d-~M083!eJATva zuNZ3)QVXxm;K|LHun)!(p}=#pt}X=&&m+z0Rr*v1rS;jLW&K%e&0W zz3j`s49vkS%)?B~kod|2ny;9AlxDWc*4xU_I?53i%@a1w?E1{tddw;Q9GZE1pqkvm zOUaaEdXKzgk0{BeDT}EliIV|Ikp9TdtYeT7n9k#qI}Ev|zzgd6FKf%1B73P&<<`Igd0s(GoqI2~DT?OpZONlO4U!2KiSJxy~77 z(Zm|W2Z_a5NV=qNtf66@)m#nKH58dj zdPQlC){Y6*U@4`RnVEC#K~s5`XMNUubJS@~)F4!qQJUA`^wj=Ll$CNF({SWv4Qtak zAj_G|%yM@>JzZp}$;P{hOTt;zvx%O_bDT}QoSxm;My%P{sl3uDRQbu9)Ks6ND@B&g zooaZby-C^wY{s!oM83pM|1;Y-<(scWpWyl1zC9g(t)AsHJ^0z5sIA<|-Jf06+umfC zpDo(IjeGl<*|%NXxDCJL6rChBoy{F_*=))Ch0}Xmeq!p(BPzaQow((zk9g^#@vWls zO?+0p)i@}lPbJk-bwj!OqC;I)?p@!MN!Df!-~o)_N7tA4omLF4k_A;zPO8=*D&Y}s zvk*R+`+cP#wc!x@)*#A8{_UcIC4(f2)&CvVAwGHdeg0lLs^M8W$umuh$V{${z06>G z-VExisM=k&MLMMVrzj_@s4A0Zs;I$LT#0I^Sv=$!DX5zosX6(MKjhz$?9fdPkx(wD zu1a5M+^9sJ&sknQ>NVzGzT<&4Uq243=p0^{x~oMVJ#Rj%`^KwgKB{nzlao5EYf7w( zYUe|q&Q*To$W^O)%BD=-=EqazPM)c`rP0P3@kN&7?lBvR<#WKCq^K>g63j&CJQLeyyV3V!-}l z!X9J9K4Z8}>8WnsawpkpCc|AzPBr_otR}P4{@%7W8?}p*JwuyQeLSb2drsLtvXmyZ zP77^8`_)03;BbuYPwTab?1Wqk?^pY6TKu+EOK$SMvbAQxhvsK;>+gidYssdzLCdz* z{@JCzl(4IFgle627YCQY9-$}KYX6+f@YA~zuGJDwCC+Uiv?3Uimk4b@L2ggm!6oZGveduQODa{yeM0Vufr=D+yM^R=z+3VyglZwUxYzzWR1 zwCi=I_w)mf-4tAKyer{_x4asR^)XLz#=A1e+rA#0^&8yvj;r$kMDv?t^H;y~wik9T zf2J(|@`*}2$iCRhzUxWZPw{4V@B6|2O`pDi4|ZYyzkZKv@yk-MH}JD3_7jZsGUdIG zcfdiPcm=%fwB5Ldk4ycl`E*A0(Z|3Y9Ky^O`ee^hI@j-Gzx4`-!KSYRSvPX07sC3h z`fV@sun(fFhxm}M_!VFIOdt5cKlp+_c7z|^k)GIgd-o*ItU4WUE<6A*%=hW%#zZWF z*N?>5@7Z=f#Ko3@yOjD@yrg50g4 z#vF}o?{j>GbqoPXu=hf>_|{UR6+V}YL z;~R`xn}cCEs0c|2A%KKjWJCY}a6HIV+vM!@`~(g4oD2!E3CWCvY(!m5c!iCXou#d{ zy~Ul?6y@#p{gnC@9_9@u7BxBcG*)g_X7){XK8BvQF22t8t=8^V0e?Z17)%&)WC32C{0*!@G6uwX7h8xd?UP@? zRxg)U6B&(a!J8+qb`4rIu+y1e`yy>ywzFWlIr*;8pa5nqz#j-FUK;~))S-GOS7Tg+ z<13wYm@%BUdiLu}xVNG00XiDvq48pvj+(g=>z}QIU#5Ih`OuYjZ!*l8`{(qJoy(7I zy!4b95WU8lE0D4A;uEhq@&r84i1sFnZJKjnI%_8M)bfWda(IdXu6^VZYA>P&(96W4 zb{QZ6>^Kpv{(%W*ld*))8rWbqE&K~>1sA=8v6C9R=%vLMvwFZtAUBz zfRdLfn^=uEACKJeM=GZtUF-|pboKwed&ZM$SF5S4I z0Nc`>(>^We{4Yr_)iiX^6~K#e#!ZHNGdMfV!4pw7{rtu@OwG|$xA551bIDCPfs~vJ z&5|jSv@E&F9S^&4>n(jE3Ca}}a|xhDCUm{EmR*w3Q5q^mvw{%;1_M(+W_aw8$Y41s zRtqM$0`?avs;yG6Y}3#-%3iMR)>~e%6*nqp%iW+4K630Q?A z|0pyV>^aa2(kmc<4!6N7T`x@3iC`Bd$Svb=4}%SQp$k7F!wY_9fbFXwzFJ6{7n(48 zWy2c$=m5J6sS0QDsGaSy(KDaKMID()z(J%qMJiVDA4;?q7LlaIj&ZS2UThW^>nFy7 zk#US>yb~HZaVnc>Xm&_UiW+$lt0s=%^;nPIVyO8lek0=ct6zW%C^9y%mj z5;>Mdf`yH-bK^T`x5qr%113Gu9nkLByWw?`FLt&kEcrmPAorJ_oxu(Bzx zR0=G0^2j1e;)t!Pq%Y{0NdnZ0lb)2t6M%`7N#wy(2}?i&d_Xo)7&D6}u){IK#!PBD z>04yDL)t*$%xON8o5hJ{!}wqXZpw?C;mpAwJe9_8Isu(;c!8l>5YHCPQ%uQ(gOBXl zv36q9HVKe|zLF^eamv65^;~8>^I52VLNf^&GsbKXjphfej zu_`LkkdAJo3`Hk6A8OK)B9sgCGgm{oIh%{F00H}yXF~BA&#(|N{#CYoBQA3}$4iDt z9!_kub16*4d6ttdlb@SIfIr*j!+|?7b#!d%M`JuE(za^#z_^c*`~W>UFkrt?8f& zTUo7boDQ5u1vA!g2lLOvpcY{AZk4{hZE%S5idWN)xWn_s z>|GnYy6<|JNfnZd~}YOms>^ z8h(M#bwpsW-!z$h64WT!&KuTgI{C`rn<-f76=w9B#-U{}K{=@qEj3pGOl^+V4TW&# zJEZxYINri%cc5n*a6!s1{c;^vYUh>0Ef6Zxft#-YWpnH~g#@It5eQudLj!RQjNTER zKZxZ>UmBfzKpCh3aRQQwx)7t5GnlQMWmAhM$}#>x5mg_J;4Dx24pNSFk~AG)Ob@Zo z7)v27f7*^)>TJlCJVoxPySq&y+1XF62TX|}%bXNa+U1~jJ+e)YZDae`-8L0gu6@Q( z`nb6qqU5o)P~>H6vK3Z@cf93&fR@zT-uK3LzV*Fte)rqo{|0!#1wL?wnA?a!HMY7r zG2Nf_l6TKWa=Ot7ZgEgN92WNm#;5Xajt^PmOZm3JfeK`yDtzHZ_1RCVJD2Nzz%d9| zYVjO2zV_CMeEF9e7>{N_w|RyL4XA|m41GEIHF|K6-o+Ls{e;15o^~E6^#xM> zJxu?34$UX11QmViFTDB}o9@2)0KH9z@%|w5us(AUCg|w|ijaYz z+q*kGeF}m);1lS%dsDRYC8)8BGWd)C*!Pk$;Mq5!uXnBuMX ztY=i97(gTYMCv%j*ITHlG%7lDDpS{&QtiP;eZP(SnW#HblQQRgLIti)tFQFyM=$y< z(@YT<8uqF#@c_wBe%&aqBeSjR)ciRw=98G1GHbu}J1YhWKuha{15~}0DI08&7kC+i z2Sk{HnwO3_Ky2BVh=B}<;lO^0{u#u&z{ybOQ5K^d5Vk7)rp9-KgF$+ZlGK@D7;6SP3|3P2$=LS}$MWx1_f z>#6rc73MlPlA}L~Q#giGIsIcf&EvfPi@viOz%aERF~0O0 zF5XK&+mo8Vah>4{jz0`9N|8f~+7sweow!+?M;XL=Im0uAo`X@IMU0x~L%$1CKh@F1 z5!*x=D?RNyl0?+8))_+ZOFyn5zZ|)}QS`%0v=qWID^H9?6!S#o!ooJ%Lg$*l`m;Yh z!apN>IQ}z?t1?5ad!NPrOT5PWyJXxn6$&9{+#dO9ykS@&9Ri@-+8^TxplrNG4QfV6 zL%d{=4g`uiup_*4grIN4p>TA%5rU!5*v9#Z6a`APY9x{tio|6M7ZXxDb(}MH6d~K9 zp?9pIg7io50m!?vEg#Ab3i>J``bTNhx*rm};6owvnMV(*$7e)5imW>JXh{8V9+B*x zV0%3IE4g0805C)nY{kIM#|}<=#a?8|l`9d5D@L7!%CQv7vgFCDl%uH( zI*-fB$YaW5L(2Xzbhj~FssYdy6e-4rT0oVev;ssDnL;OX$}++1v~(ILeo85KTB!h2 zD0Db`#~ z%q*$bL`=}kGTF>2K&v>*R7^C(k6qifisH-(BgE9Kvo14EZ>r3~q$%JeDb|$CG9wG2 ztR=`Jxr9?m`l~!ykxR_mzok+#@A5pTTCZ_?FF2&I|5C9Lo2w8@&svPb3fn5-0;~a} zPXn7wLWECR1Srh1trgRc9rG_w%&h%Hug)6H&$6){%ulXit<~z!2ZgQILajnsP`MJw z|FnSI!v3mhn=!ZAD&SHA`Q(5EJuU&ePe+umS^O;x)c_VP0T<2Ct7=gh4NzK4(dL6u zBb3k>A}lN!H(}Glk<-PLyw136$*-(S$okGs+OF{o1=T3er>sz=!;Jgtt^T@EDt)XP zRnfcRupR|a6AjZ7>rxkEuo1&j7L%|O#i1$1P!YTW4ReFjj4>NsPxZvp64O%~O+h#H zPgVpkPpBL&bsg@ivB;!TKrJvr^-vp~#Yd&FNM%&tnk};$MgLq>1hhqTm`;?mPL=FR zF+@^e8UcU;jJ#a6dl|D--OKPGv%*X?HVe!(d$Ue%4+eqasR&pw}ZB-d$Mb>N8)lVz6n87tF zDA zw6?6yt+YQQ^-7@uOJI7-1XLunR3o)C7qgT~aq~%wow%Un)cDiNiCxO<%)hym$&^*u zOdzC?D+`%Q>!ZqB(Mcl+y+{IMjU-O(l8(ly=FMcvd@-PL8?x&1M&CE1~ZTXvw` z+O^%=#ogT1-QDHg-u2zz1>WEl-r*(Q;x*pmMc(99-sNT9=5^lZh2H3u-sz>@>b2hL z_0+D_+^@a7>-FC61>f)$-|;2i@-^S{Mc?#Q-}PnR_T}B|jon-V5$)7k_qE^q#ozqZ z-~Hv^{`KGg1>gYQT`XMK133&!9K*5D21 z;A7a?)&=1Z7U2;l;Sx6C6Gq__R^b(9;nh_OS;>bOMgDaPU|&f+W9;x6XmFSg<^ z-eNKKVloEfF-GGuPUACH<2GjFH#XxqUSm0SV>*W8ImY8U&f`1Q<38r%Qg~s@IOO|9zrsPZJV4mJZ;93GufMZ7H zWLD;7X69yg=4XcHXqM(_rsit4=4;00Y}V#&=H_np=5Ge)a2DrrCg*ZC=W|BqbXMne zX6JTx=XZwZW+rA^R)DLu=X=KIeAefE=I4I)=YIz1fEMV1Cg_1?fMcF#+A-z>Xn}@y z=!b^rh?eMyrs#^c=!?eajMnIl=ID<0=#K{JkQV8YCg}l~250$_HtCZ_>6BLKm1gOd zcIlUf>6n)3nWpKQZfFWX=!IQq9Es>R;pv~|XrG2?eW8LXKp=c#4#j9_;8E&{evcL) zYMYkosix|xw(6_K>a5o4t>)^k_Uf+&>y%!EoG#{i#*uAo0sfy>YaS}3x}iw5Dv&RwmGP>dj8=#&+%3hV9sv?b)X7+P3Z6 zmgvV$Xe8Qc&{k{9-noqjn6WW@?9yZ_5sC^PqyL_U`=F@BQZQ z{`T+x=5FvVYla5!0yppjNALt!@C9e^iWYEsCh!KA{_qK>@Cvu^3&-%OesG00>#&vp ze~JQihhF(SMe2R@fLS+j^1$I?r@Ek0M>SC5SIW1-~)%gi=L)v5f^F^ z|M9IB@h3=bb(n#rE-sK(?d(eNjBfI~T=EyE@+!CTE2rrg$Aq2MXrjjNT!Cv4cy2-v zj^YaJrH&^0cIdo#Yb7u7&rb8726E#TjyP`-GB0tpzVj2W^AtdCFfW1@n1Lh!?mdrg z5pVB@KJ&j0Y1M9TMhEN(5CQ5|a~Wyt&c^ag*Yr*2bQpgCvWD=Dz9=;ZZ6??kyuPVK zH*Ki?ZHRvK&Q_=(=b+_YKJxZ);680rpX)sSC-qfl?zyIQwLa}uw{=%9>PRncNdIU@ zzjTPMaZ0cBSMT&`r}k>+bS+l|F2`sick}UX=%Psn!$|JBM)MRSk zaXSC$L7y_sb_B$rcRKfWc^CK4j&prK_g*LNh~{<|c=s}|bd8SkS9o$ooAg_6XeXa? zYnS+mr+5m-_Me#XnI8B9AMWFJX+D?e6c_S}C;5^$`IDdMi`Vv;9*l_pZ}Sdql$ZIL zr}>(vYL(A;o7efB=lP!Z`H;r>2*>DL@pz9Od5|u6iT3teKl-9~>G}TosF(VwulS(f z=*??yVwZE$NV4>XdAHV%@;;V_mVy2@4-G{pM;rn6U61RrU+XsK^|Mzdn9m-Mc5bT2 z`@GltDz|!s=WHs_dSOrP`&Mhr?(EHG^yPl&%^vQfPkg6GZ8uc z_TsEJ@BV|KI8w)>C?orUgu*l^S%EAgTf)PXgng5$|bYud_tqrDYa_7 zVzb&Ux9h#3R4f-vhB~9u>NUIVe#7JPIlXSbJ7c6xq-W)hr0CK80ArmC*8wz|H;#>&pp*4p0U=IZY9_WJ$;2MZ4q zXFjYr<=y0obR zq)HVqom256*REc_f(C z?-_`J{Z<7mSmK+lWFJG0EO|2J%G}H@1IVc)PMs!eHry0fF2T+#KgFnH)M2kj6 zBekf|rw(UBRq*o+Zq{uaTMjOKIPv1fCu7!(HrBad<#;{|1pGMl>ejDg&mJu~C*`Z| z3OEf+GxX>bwWCk3em(p4G?0mOFMmG$`u6W*$NXhK|Nj2}0~nxy0}@!Efd?X(pn?km z0UrVnLKvYngcDL&p@kP>n4yLna@e7VAA%U7h$E6%qKPM>n4*d+ve=@FFTxn3j5E?$ Oqm4J>*dYKB0029iY``i2 literal 0 HcmV?d00001 diff --git a/modules/hdf/include/opencv2/hdf.hpp b/modules/hdf/include/opencv2/hdf.hpp new file mode 100644 index 000000000..4ca6d11bb --- /dev/null +++ b/modules/hdf/include/opencv2/hdf.hpp @@ -0,0 +1,54 @@ +/********************************************************************* + * Software License Agreement (BSD License) + * + * Copyright (c) 2015 + * Balint Cristian + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + *********************************************************************/ + +#ifndef __OPENCV_HDF_HPP__ +#define __OPENCV_HDF_HPP__ + +#include "opencv2/hdf/hdf5.hpp" + +/** @defgroup hdf Hierarchical Data Format I/O routines + +This module provides storage routines for Hierarchical Data Format objects. + + @{ + @defgroup hdf5 Hierarchical Data Format version 5 + +Hierarchical Data Format version 5 +-------------------------------------------------------- + + + @} +*/ + +#endif diff --git a/modules/hdf/include/opencv2/hdf/hdf5.hpp b/modules/hdf/include/opencv2/hdf/hdf5.hpp new file mode 100644 index 000000000..504cca105 --- /dev/null +++ b/modules/hdf/include/opencv2/hdf/hdf5.hpp @@ -0,0 +1,681 @@ +/********************************************************************* + * Software License Agreement (BSD License) + * + * Copyright (c) 2015 + * Balint Cristian + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + *********************************************************************/ + +#ifndef __OPENCV_HDF5_HPP__ +#define __OPENCV_HDF5_HPP__ + +#include + +#include + + + +using namespace std; + +namespace cv +{ +namespace hdf +{ + +//! @addtogroup hdf5 +//! @{ + + +/** @brief Hierarchical Data Format version 5 interface. + +Notice that module is compiled only when hdf5 is correctly installed. + + */ +class CV_EXPORTS_W HDF5 +{ +public: + + CV_WRAP enum + { + H5_UNLIMITED = -1, H5_NONE = -1, H5_GETDIMS = 100, H5_GETMAXDIMS = 101, + }; + + virtual ~HDF5() {} + + /** @brief Close and release hdf5 object. + */ + CV_WRAP virtual void close( ) = 0; + + /** @brief Create a group. + @param grlabel specify the hdf5 group label. + + Create a hdf5 group. + + @note Groups are useful for better organise multiple datasets. It is possible to create subgroups within any group. + Existence of a particular group can be checked using hlexists(). In case of subgroups label would be e.g: 'Group1/SubGroup1' + where SubGroup1 is within the root group Group1. + + - In this example Group1 will have one subgrup labeled SubGroup1: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create Group1 if does not exists + if ( ! h5io->hlexists( "Group1" ) ) + h5io->grcreate( "Group1" ); + else + printf("Group1 already created, skipping\n" ); + // create SubGroup1 if does not exists + if ( ! h5io->hlexists( "Group1/SubGroup1" ) ) + h5io->grcreate( "Group1/SubGroup1" ); + else + printf("SubGroup1 already created, skipping\n" ); + // release + h5io->close(); + @endcode + + @note When a dataset is created with dscreate() or kpcreate() it can be created right within a group by specifying + full path within the label, in our example would be: 'Group1/SubGroup1/MyDataSet'. It is not thread safe. + */ + CV_WRAP virtual void grcreate( String grlabel ) = 0; + + /** @brief Check if label exists or not. + @param label specify the hdf5 dataset label. + + Returns **true** if dataset exists, and **false** if does not. + + @note Checks if dataset, group or other object type (hdf5 link) exists under the label name. It is thread safe. + */ + CV_WRAP virtual bool hlexists( String label ) const = 0; + + /* @overload */ + CV_WRAP virtual void dscreate( const int rows, const int cols, const int type, + String dslabel, const int compresslevel = HDF5::H5_NONE, + const vector& dims_chunks = vector() ) const = 0; + /** @brief Create and allocate storage for two dimensional single or multi channel dataset. + @param rows declare amount of rows + @param cols declare amount of cols + @param type type to be used + @param dslabel specify the hdf5 dataset label, any existing dataset with the same label will be overwritten. + @param compresslevel specify the compression level 0-9 to be used, by default H5_NONE means none at all. + @param dims_chunks each array member specify chunking sizes to be used for block i/o, + by default NULL means none at all. + + @note If the dataset already exists an exception will be thrown. + + - Existence of the dataset can be checked using hlexists(), see in this example: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create space for 100x50 CV_64FC2 matrix + if ( ! h5io->hlexists( "hilbert" ) ) + h5io->dscreate( 100, 50, CV_64FC2, "hilbert" ); + else + printf("DS already created, skipping\n" ); + // release + h5io->close(); + @endcode + + @note Activating compression requires internal chunking. Chunking can significantly improve access + speed booth at read or write time especially for windowed access logic that shifts offset inside dataset. + If no custom chunking is specified default one will be invoked by the size of **whole** dataset + as single big chunk of data. + + - See example of level 9 compression using internal default chunking: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create level 9 compressed space for CV_64FC2 matrix + if ( ! h5io->hlexists( "hilbert", 9 ) ) + h5io->dscreate( 100, 50, CV_64FC2, "hilbert", 9 ); + else + printf("DS already created, skipping\n" ); + // release + h5io->close(); + @endcode + + @note A value of H5_UNLIMITED for **rows** or **cols** or booth means **unlimited** data on the specified dimension, + thus is possible to expand anytime such dataset on row, col or booth directions. Presence of H5_UNLIMITED on any + dimension **require** to define custom chunking. No default chunking will be defined in unlimited scenario since + default size on that dimension will be zero, and will grow once dataset is written. Writing into dataset that have + H5_UNLIMITED on some of its dimension requires dsinsert() that allow growth on unlimited dimension instead of dswrite() + that allows to write only in predefined data space. + + - Example below shows no compression but unlimited dimension on cols using 100x100 internal chunking: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create level 9 compressed space for CV_64FC2 matrix + int chunks[2] = { 100, 100 }; + h5io->dscreate( 100, cv::hdf::HDF5::H5_UNLIMITED, CV_64FC2, "hilbert", cv::hdf::HDF5::H5_NONE, chunks ); + // release + h5io->close(); + @endcode + + @note It is **not** thread safe, it must be called only once at dataset creation otherwise exception will occur. + Multiple datasets inside single hdf5 file is allowed. + */ + CV_WRAP virtual void dscreate( const int rows, const int cols, const int type, + String dslabel, const int compresslevel = HDF5::H5_NONE, const int* dims_chunks = NULL ) const = 0; + + /* @overload */ + CV_WRAP virtual void dscreate( const vector& sizes, const int type, String dslabel, + const int compresslevel = HDF5::H5_NONE, const vector& dims_chunks = vector() ) const = 0; + /** @brief Create and allocate storage for n-dimensional dataset, single or mutichannel type. + @param n_dims declare number of dimensions + @param sizes array containing sizes for each dimensions + @param type type to be used + @param dslabel specify the hdf5 dataset label, any existing dataset with the same label will be overwritten. + @param compresslevel specify the compression level 0-9 to be used, by default H5_NONE means none at all. + @param dims_chunks each array member specify chunking sizes to be used for block i/o, + by default NULL means none at all. + @note If the dataset already exists an exception will be thrown. Existence of the dataset can be checked + using hlexists(). + + - See example below that creates a 6 dimensional storage space: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create space for 6 dimensional CV_64FC2 matrix + if ( ! h5io->hlexists( "nddata" ) ) + int n_dims = 5; + int dsdims[n_dims] = { 100, 100, 20, 10, 5, 5 }; + h5io->dscreate( n_dims, sizes, CV_64FC2, "nddata" ); + else + printf("DS already created, skipping\n" ); + // release + h5io->close(); + @endcode + + @note Activating compression requires internal chunking. Chunking can significantly improve access + speed booth at read or write time especially for windowed access logic that shifts offset inside dataset. + If no custom chunking is specified default one will be invoked by the size of **whole** dataset + as single big chunk of data. + + - See example of level 0 compression (shallow) using chunking against first + dimension, thus storage will consists by 100 chunks of data: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create space for 6 dimensional CV_64FC2 matrix + if ( ! h5io->hlexists( "nddata" ) ) + int n_dims = 5; + int dsdims[n_dims] = { 100, 100, 20, 10, 5, 5 }; + int chunks[n_dims] = { 1, 100, 20, 10, 5, 5 }; + h5io->dscreate( n_dims, dsdims, CV_64FC2, "nddata", 0, chunks ); + else + printf("DS already created, skipping\n" ); + // release + h5io->close(); + @endcode + + @note A value of H5_UNLIMITED inside the **sizes** array means **unlimited** data on that dimension, thus is + possible to expand anytime such dataset on those unlimited directions. Presence of H5_UNLIMITED on any dimension + **require** to define custom chunking. No default chunking will be defined in unlimited scenario since default size + on that dimension will be zero, and will grow once dataset is written. Writing into dataset that have H5_UNLIMITED on + some of its dimension requires dsinsert() instead of dswrite() that allow growth on unlimited dimension instead of + dswrite() that allows to write only in predefined data space. + + - Example below shows a 3 dimensional dataset using no compression with all unlimited sizes and one unit chunking: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + int n_dims = 3; + int chunks[n_dims] = { 1, 1, 1 }; + int dsdims[n_dims] = { cv::hdf::HDF5::H5_UNLIMITED, cv::hdf::HDF5::H5_UNLIMITED, cv::hdf::HDF5::H5_UNLIMITED }; + h5io->dscreate( n_dims, dsdims, CV_64FC2, "nddata", cv::hdf::HDF5::H5_NONE, chunks ); + // release + h5io->close(); + @endcode + */ + CV_WRAP virtual void dscreate( const int n_dims, const int* sizes, const int type, + String dslabel, const int compresslevel = HDF5::H5_NONE, const int* dims_chunks = NULL ) const = 0; + + /** @brief Fetch dataset sizes + @param dslabel specify the hdf5 dataset label to be measured. + @param dims_flag will fetch dataset dimensions on H5_GETDIMS, and dataset maximum dimensions on H5_GETMAXDIMS. + + Returns vector object containing sizes of dataset on each dimensions. + + @note Resulting vector size will match the amount of dataset dimensions. By default H5_GETDIMS will return + actual dataset dimensions. Using H5_GETMAXDIM flag will get maximum allowed dimension which normally match + actual dataset dimension but can hold H5_UNLIMITED value if dataset was prepared in **unlimited** mode on + some of its dimension. It can be useful to check existing dataset dimensions before overwrite it as whole or subset. + Trying to write with oversized source data into dataset target will thrown exception. + */ + CV_WRAP virtual vector dsgetsize( String dslabel, int dims_flag = HDF5::H5_GETDIMS ) const = 0; + + /** @brief Fetch dataset type + @param dslabel specify the hdf5 dataset label to be checked. + + Returns the stored matrix type. This is an identifier compatible with the CvMat type system, + like e.g. CV_16SC5 (16-bit signed 5-channel array), and so on. + + @note Result can be parsed with CV_MAT_CN() to obtain amount of channels and CV_MAT_DEPTH() to obtain native cvdata type. + It is thread safe. + */ + CV_WRAP virtual int dsgettype( String dslabel ) const = 0; + + /* @overload */ + CV_WRAP virtual void dswrite( InputArray Array, String dslabel, + const vector& dims_offset = vector(), + const vector& dims_counts = vector() ) const = 0; + /** @brief Write or overwrite a Mat object into specified dataset of hdf5 file. + @param Array specify Mat data array to be written. + @param dslabel specify the target hdf5 dataset label. + @param dims_offset each array member specify the offset location + over dataset's each dimensions from where InputArray will be (over)written into dataset. + @param dims_counts each array member specify the amount of data over dataset's + each dimensions from InputArray that will be written into dataset. + + Writes Mat object into targeted dataset. + + @note If dataset is not created and does not exist it will be created **automatically**. Only Mat is supported and + it must to be **continuous**. It is thread safe but it is recommended that writes to happen over separate non overlapping + regions. Multiple datasets can be written inside single hdf5 file. + + - Example below writes a 100x100 CV_64FC2 matrix into a dataset. No dataset precreation required. If routine + is called multiple times dataset will be just overwritten: + @code{.cpp} + // dual channel hilbert matrix + cv::Mat H(100, 100, CV_64FC2); + for(int i = 0; i < H.rows; i++) + for(int j = 0; j < H.cols; j++) + { + H.at(i,j)[0] = 1./(i+j+1); + H.at(i,j)[1] = -1./(i+j+1); + count++; + } + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // write / overwrite dataset + h5io->dswrite( H, "hilbert" ); + // release + h5io->close(); + @endcode + + - Example below writes a smaller 50x100 matrix into 100x100 compressed space optimised by two 50x100 chunks. + Matrix is written twice into first half (0->50) and second half (50->100) of data space using offset. + @code{.cpp} + // dual channel hilbert matrix + cv::Mat H(50, 100, CV_64FC2); + for(int i = 0; i < H.rows; i++) + for(int j = 0; j < H.cols; j++) + { + H.at(i,j)[0] = 1./(i+j+1); + H.at(i,j)[1] = -1./(i+j+1); + count++; + } + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // optimise dataset by two chunks + int chunks[2] = { 50, 100 }; + // create 100x100 CV_64FC2 compressed space + h5io->dscreate( 100, 100, CV_64FC2, "hilbert", 9, chunks ); + // write into first half + int offset1[2] = { 0, 0 }; + h5io->dswrite( H, "hilbert", offset1 ); + // write into second half + int offset2[2] = { 50, 0 }; + h5io->dswrite( H, "hilbert", offset2 ); + // release + h5io->close(); + @endcode + */ + CV_WRAP virtual void dswrite( InputArray Array, String dslabel, + const int* dims_offset = NULL, const int* dims_counts = NULL ) const = 0; + + /* @overload */ + CV_WRAP virtual void dsinsert( InputArray Array, String dslabel, + const vector& dims_offset = vector(), + const vector& dims_counts = vector() ) const = 0; + /** @brief Insert or overwrite a Mat object into specified dataset and autoexpand dataset size if **unlimited** property allows. + @param Array specify Mat data array to be written. + @param dslabel specify the target hdf5 dataset label. + @param dims_offset each array member specify the offset location + over dataset's each dimensions from where InputArray will be (over)written into dataset. + @param dims_counts each array member specify the amount of data over dataset's + each dimensions from InputArray that will be written into dataset. + + Writes Mat object into targeted dataset and **autoexpand** dataset dimension if allowed. + + @note Unlike dswrite(), datasets are **not** created **automatically**. Only Mat is supported and it must to be **continuous**. + If dsinsert() happen over outer regions of dataset dimensions and on that dimension of dataset is in **unlimited** mode then + dataset is expanded, otherwise exception is thrown. To create datasets with **unlimited** property on specific or more + dimensions see dscreate() and the optional H5_UNLIMITED flag at creation time. It is not thread safe over same dataset + but multiple datasets can be merged inside single hdf5 file. + + - Example below creates **unlimited** rows x 100 cols and expand rows 5 times with dsinsert() using single 100x100 CV_64FC2 + over the dataset. Final size will have 5x100 rows and 100 cols, reflecting H matrix five times over row's span. Chunks size is + 100x100 just optimized against the H matrix size having compression disabled. If routine is called multiple times dataset will be + just overwritten: + @code{.cpp} + // dual channel hilbert matrix + cv::Mat H(50, 100, CV_64FC2); + for(int i = 0; i < H.rows; i++) + for(int j = 0; j < H.cols; j++) + { + H.at(i,j)[0] = 1./(i+j+1); + H.at(i,j)[1] = -1./(i+j+1); + count++; + } + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // optimise dataset by chunks + int chunks[2] = { 100, 100 }; + // create Unlimited x 100 CV_64FC2 space + h5io->dscreate( cv::hdf::HDF5::H5_UNLIMITED, 100, CV_64FC2, "hilbert", cv::hdf::HDF5::H5_NONE, chunks ); + // write into first half + int offset[2] = { 0, 0 }; + for ( int t = 0; t < 5; t++ ) + { + offset[0] += 100 * t; + h5io->dsinsert( H, "hilbert", offset ); + } + // release + h5io->close(); + @endcode + */ + CV_WRAP virtual void dsinsert( InputArray Array, String dslabel, + const int* dims_offset = NULL, const int* dims_counts = NULL ) const = 0; + + + /* @overload */ + CV_WRAP virtual void dsread( OutputArray Array, String dslabel, + const vector& dims_offset = vector(), + const vector& dims_counts = vector() ) const = 0; + /** @brief Read specific dataset from hdf5 file into Mat object. + @param Array Mat container where data reads will be returned. + @param dslabel specify the source hdf5 dataset label. + @param dims_offset each array member specify the offset location over + each dimensions from where dataset starts to read into OutputArray. + @param dims_counts each array member specify the amount over dataset's each + dimensions of dataset to read into OutputArray. + + Reads out Mat object reflecting the stored dataset. + + @note If hdf5 file does not exist an exception will be thrown. Use hlexists() to check dataset presence. + It is thread safe. + + - Example below reads a dataset: + @code{.cpp} + // open hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // blank Mat container + cv::Mat H; + // read hibert dataset + h5io->read( H, "hilbert" ); + // release + h5io->close(); + @endcode + + - Example below perform read of 3x5 submatrix from second row and third element. + @code{.cpp} + // open hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // blank Mat container + cv::Mat H; + int offset[2] = { 1, 2 }; + int counts[2] = { 3, 5 }; + // read hibert dataset + h5io->read( H, "hilbert", offset, counts ); + // release + h5io->close(); + @endcode + */ + CV_WRAP virtual void dsread( OutputArray Array, String dslabel, + const int* dims_offset = NULL, const int* dims_counts = NULL ) const = 0; + + /** @brief Fetch keypoint dataset size + @param kplabel specify the hdf5 dataset label to be measured. + @param dims_flag will fetch dataset dimensions on H5_GETDIMS, and dataset maximum dimensions on H5_GETMAXDIMS. + + Returns size of keypoints dataset. + + @note Resulting size will match the amount of keypoints. By default H5_GETDIMS will return actual dataset dimension. + Using H5_GETMAXDIM flag will get maximum allowed dimension which normally match actual dataset dimension but can hold + H5_UNLIMITED value if dataset was prepared in **unlimited** mode. It can be useful to check existing dataset dimension + before overwrite it as whole or subset. Trying to write with oversized source data into dataset target will thrown + exception. + */ + CV_WRAP virtual int kpgetsize( String kplabel, int dims_flag = HDF5::H5_GETDIMS ) const = 0; + + /** @brief Create and allocate special storage for cv::KeyPoint dataset. + @param size declare fixed number of KeyPoints + @param kplabel specify the hdf5 dataset label, any existing dataset with the same label will be overwritten. + @param compresslevel specify the compression level 0-9 to be used, by default H5_NONE means none at all. + @param chunks each array member specify chunking sizes to be used for block i/o, + by default H5_NONE means none at all. + @note If the dataset already exists an exception will be thrown. Existence of the dataset can be checked + using hlexists(). + + - See example below that creates space for 100 keypoints in the dataset: + @code{.cpp} + // open hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + if ( ! h5io->hlexists( "keypoints" ) ) + h5io->kpcreate( 100, "keypoints" ); + else + printf("DS already created, skipping\n" ); + @endcode + + @note A value of H5_UNLIMITED for **size** means **unlimited** keypoints, thus is possible to expand anytime such + dataset by adding or inserting. Presence of H5_UNLIMITED **require** to define custom chunking. No default chunking + will be defined in unlimited scenario since default size on that dimension will be zero, and will grow once dataset + is written. Writing into dataset that have H5_UNLIMITED on some of its dimension requires kpinsert() that allow + growth on unlimited dimension instead of kpwrite() that allows to write only in predefined data space. + + - See example below that creates unlimited space for keypoints chunking size of 100 but no compression: + @code{.cpp} + // open hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + if ( ! h5io->hlexists( "keypoints" ) ) + h5io->kpcreate( cv::hdf::HDF5::H5_UNLIMITED, "keypoints", cv::hdf::HDF5::H5_NONE, 100 ); + else + printf("DS already created, skipping\n" ); + @endcode + */ + virtual void kpcreate( const int size, String kplabel, + const int compresslevel = H5_NONE, const int chunks = H5_NONE ) const = 0; + + /** @brief Write or overwrite list of KeyPoint into specified dataset of hdf5 file. + @param keypoints specify keypoints data list to be written. + @param kplabel specify the target hdf5 dataset label. + @param offset specify the offset location on dataset from where keypoints will be (over)written into dataset. + @param counts specify the amount of keypoints that will be written into dataset. + + Writes vector object into targeted dataset. + + @note If dataset is not created and does not exist it will be created **automatically**. It is thread safe but + it is recommended that writes to happen over separate non overlapping regions. Multiple datasets can be written + inside single hdf5 file. + + - Example below writes a 100 keypoints into a dataset. No dataset precreation required. If routine is called multiple + times dataset will be just overwritten: + @code{.cpp} + // generate 100 dummy keypoints + std::vector keypoints; + for(int i = 0; i < 100; i++) + keypoints.push_back( cv::KeyPoint(i, -i, 1, -1, 0, 0, -1) ); + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // write / overwrite dataset + h5io->kpwrite( keypoints, "keypoints" ); + // release + h5io->close(); + @endcode + + - Example below uses smaller set of 50 keypoints and writes into compressed space of 100 keypoints optimised by 10 chunks. + Same keypoint set is written three times, first into first half (0->50) and at second half (50->75) then into remaining slots + (75->99) of data space using offset and count parameters to settle the window for write access.If routine is called multiple times + dataset will be just overwritten: + @code{.cpp} + // generate 50 dummy keypoints + std::vector keypoints; + for(int i = 0; i < 50; i++) + keypoints.push_back( cv::KeyPoint(i, -i, 1, -1, 0, 0, -1) ); + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create maximum compressed space of size 100 with chunk size 10 + h5io->kpcreate( 100, "keypoints", 9, 10 ); + // write into first half + h5io->kpwrite( keypoints, "keypoints", 0 ); + // write first 25 keypoints into second half + h5io->kpwrite( keypoints, "keypoints", 50, 25 ); + // write first 25 keypoints into remained space of second half + h5io->kpwrite( keypoints, "keypoints", 75, 25 ); + // release + h5io->close(); + @endcode + */ + virtual void kpwrite( const vector keypoints, String kplabel, + const int offset = H5_NONE, const int counts = H5_NONE ) const = 0; + + /** @brief Insert or overwrite list of KeyPoint into specified dataset and autoexpand dataset size if **unlimited** property allows. + @param keypoints specify keypoints data list to be written. + @param kplabel specify the target hdf5 dataset label. + @param offset specify the offset location on dataset from where keypoints will be (over)written into dataset. + @param counts specify the amount of keypoints that will be written into dataset. + + Writes vector object into targeted dataset and **autoexpand** dataset dimension if allowed. + + @note Unlike kpwrite(), datasets are **not** created **automatically**. If dsinsert() happen over outer region of dataset + and dataset has been created in **unlimited** mode then dataset is expanded, otherwise exception is thrown. To create datasets + with **unlimited** property see kpcreate() and the optional H5_UNLIMITED flag at creation time. It is not thread safe over same + dataset but multiple datasets can be merged inside single hdf5 file. + + - Example below creates **unlimited** space for keypoints storage, and inserts a list of 10 keypoints ten times into that space. + Final dataset will have 100 keypoints. Chunks size is 10 just optimized against list of keypoints. If routine is called multiple + times dataset will be just overwritten: + @code{.cpp} + // generate 10 dummy keypoints + std::vector keypoints; + for(int i = 0; i < 10; i++) + keypoints.push_back( cv::KeyPoint(i, -i, 1, -1, 0, 0, -1) ); + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create unlimited size space with chunk size of 10 + h5io->kpcreate( cv::hdf::HDF5::H5_UNLIMITED, "keypoints", -1, 10 ); + // insert 10 times same 10 keypoints + for(int i = 0; i < 10; i++) + h5io->kpinsert( keypoints, "keypoints", i * 10 ); + // release + h5io->close(); + @endcode + */ + virtual void kpinsert( const vector keypoints, String kplabel, + const int offset = H5_NONE, const int counts = H5_NONE ) const = 0; + + /** @brief Read specific keypoint dataset from hdf5 file into vector object. + @param keypoints vector container where data reads will be returned. + @param kplabel specify the source hdf5 dataset label. + @param offset specify the offset location over dataset from where read starts. + @param counts specify the amount of keypoints from dataset to read. + + Reads out vector object reflecting the stored dataset. + + @note If hdf5 file does not exist an exception will be thrown. Use hlexists() to check dataset presence. + It is thread safe. + + - Example below reads a dataset containing keypoints starting with second entry: + @code{.cpp} + // open hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // blank KeyPoint container + std::vector keypoints; + // read keypoints starting second one + h5io->kpread( keypoints, "keypoints", 1 ); + // release + h5io->close(); + @endcode + + - Example below perform read of 3 keypoints from second entry. + @code{.cpp} + // open hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // blank KeyPoint container + std::vector keypoints; + // read three keypoints starting second one + h5io->kpread( keypoints, "keypoints", 1, 3 ); + // release + h5io->close(); + @endcode + */ + virtual void kpread( vector& keypoints, String kplabel, + const int offset = H5_NONE, const int counts = H5_NONE ) const = 0; + +}; + + /** @brief Open or create hdf5 file + @param HDF5Filename specify the HDF5 filename. + + Returns pointer to the hdf5 object class + + @note If hdf5 file does not exist it will be created. Any operations except dscreate() functions on object + will be thread safe. Multiple datasets can be created inside single hdf5 file, and can be accessed + from same hdf5 object from multiple instances as long read or write operations are done over + non-overlapping regions of dataset. Single hdf5 file also can be opened by multiple instances, + reads and writes can be instantiated at the same time as long non-overlapping regions are involved. Object + is released using close(). + + - Example below open and then release the file. + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // ... + // release + h5io->close(); + @endcode + + ![Visualization of 10x10 CV_64FC2 (Hilbert matrix) using HDFView tool](pics/hdfview_demo.gif) + + - Text dump (3x3 Hilbert matrix) of hdf5 dataset using **h5dump** tool: + @code{.txt} + $ h5dump test.h5 + HDF5 "test.h5" { + GROUP "/" { + DATASET "hilbert" { + DATATYPE H5T_ARRAY { [2] H5T_IEEE_F64LE } + DATASPACE SIMPLE { ( 3, 3 ) / ( 3, 3 ) } + DATA { + (0,0): [ 1, -1 ], [ 0.5, -0.5 ], [ 0.333333, -0.333333 ], + (1,0): [ 0.5, -0.5 ], [ 0.333333, -0.333333 ], [ 0.25, -0.25 ], + (2,0): [ 0.333333, -0.333333 ], [ 0.25, -0.25 ], [ 0.2, -0.2 ] + } + } + } + } + @endcode + */ + CV_EXPORTS_W Ptr open( String HDF5Filename ); + +//! @} + +} // end namespace hdf +} // end namespace cv +#endif // _OPENCV_HDF5_HPP_ diff --git a/modules/hdf/src/hdf5.cpp b/modules/hdf/src/hdf5.cpp new file mode 100644 index 000000000..6e0fb52da --- /dev/null +++ b/modules/hdf/src/hdf5.cpp @@ -0,0 +1,1051 @@ +/********************************************************************* + * Software License Agreement (BSD License) + * + * Copyright (c) 2015 + * Balint Cristian + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + *********************************************************************/ + +#include "precomp.hpp" + + + +using namespace std; + +namespace cv +{ +namespace hdf +{ + +class HDF5Impl : public HDF5 +{ +public: + + HDF5Impl( String HDF5Filename ); + + virtual ~HDF5Impl() { close(); }; + + // close and release + virtual void close( ); + + /* + * h5 generic + */ + + // check if object / link exists + virtual bool hlexists( String label ) const; + + /* + * h5 group + */ + + // create a group + virtual void grcreate( String grlabel ); + + /* + * cv::Mat + */ + + // get sizes of dataset + virtual vector dsgetsize( String dslabel, int dims_flag = H5_GETDIMS ) const; + + // get data type of dataset + virtual int dsgettype( String dslabel ) const; + + // overload dscreate() + virtual void dscreate( const int rows, const int cols, const int type, + String dslabel, const int compresslevel = H5_NONE, + const vector& dims_chunks = vector() ) const; + + // create two dimensional single or mutichannel dataset + virtual void dscreate( const int rows, const int cols, const int type, + String dslabel, const int compresslevel = H5_NONE, const int* dims_chunks = NULL ) const; + + // overload dscreate() + virtual void dscreate( const vector& sizes, const int type, String dslabel, + const int compresslevel = H5_NONE, const vector& dims_chunks = vector() ) const; + + // create n-dimensional single or mutichannel dataset + virtual void dscreate( const int n_dims, const int* sizes, const int type, + String dslabel, const int compresslevel = H5_NONE, const int* dims_chunks = NULL ) const; + + // overload dswrite() + virtual void dswrite( InputArray Array, String dslabel, + const vector& dims_offset = vector(), + const vector& dims_counts = vector() ) const; + + // write into dataset + virtual void dswrite( InputArray Array, String dslabel, + const int* dims_offset = NULL, const int* dims_counts = NULL ) const; + + // overload dsinsert() + virtual void dsinsert( InputArray Array, String dslabel, + const vector& dims_offset = vector(), + const vector& dims_counts = vector() ) const; + + // append / merge into dataset + virtual void dsinsert( InputArray Array, String dslabel, + const int* dims_offset = NULL, const int* dims_counts = NULL ) const; + + // overload dsread() + virtual void dsread( OutputArray Array, String dslabel, + const vector& dims_offset = vector(), + const vector& dims_counts = vector() ) const; + + // read from dataset + virtual void dsread( OutputArray Array, String dslabel, + const int* dims_offset = NULL, const int* dims_counts = NULL ) const; + + /* + * std::vector + */ + + // get size of keypoints dataset + virtual int kpgetsize( String kplabel, int dims_flag = H5_GETDIMS ) const; + + // create KeyPoint structure + virtual void kpcreate( const int size, String kplabel, + const int compresslevel = H5_NONE, const int chunks = H5_NONE ) const; + + // write KeyPoint structures + virtual void kpwrite( const vector keypoints, String kplabel, + const int offset = H5_NONE, const int counts = H5_NONE ) const; + + // append / merge KeyPoint structures + virtual void kpinsert( const vector keypoints, String kplabel, + const int offset = H5_NONE, const int counts = H5_NONE ) const; + + // read KeyPoint structure + virtual void kpread( vector& keypoints, String kplabel, + const int offset = H5_NONE, const int counts = H5_NONE ) const; + +private: + + // store filename + String m_hdf5_filename; + + // hdf5 file handler + hid_t m_h5_file_id; + + // translate cvType -> h5Type + inline hid_t GetH5type( int cvType ) const; + + // translate h5Type -> cvType + inline int GetCVtype( hid_t h5Type ) const; + +}; + +inline hid_t HDF5Impl::GetH5type( int cvType ) const +{ + hid_t h5Type = -1; + + switch ( CV_MAT_DEPTH( cvType ) ) + { + case CV_64F: + h5Type = H5T_NATIVE_DOUBLE; + break; + case CV_32F: + h5Type = H5T_NATIVE_FLOAT; + break; + case CV_8U: + h5Type = H5T_NATIVE_UCHAR; + break; + case CV_8S: + h5Type = H5T_NATIVE_CHAR; + break; + case CV_16U: + h5Type = H5T_NATIVE_USHORT; + break; + case CV_16S: + h5Type = H5T_NATIVE_SHORT; + break; + case CV_32S: + h5Type = H5T_NATIVE_INT; + break; + default: + CV_Error( Error::StsInternal, "Unknown cvType." ); + } + return h5Type; +} + +inline int HDF5Impl::GetCVtype( hid_t h5Type ) const +{ + int cvType = -1; + + if ( H5Tequal( h5Type, H5T_NATIVE_DOUBLE ) ) + cvType = CV_64F; + else if ( H5Tequal( h5Type, H5T_NATIVE_FLOAT ) ) + cvType = CV_32F; + else if ( H5Tequal( h5Type, H5T_NATIVE_UCHAR ) ) + cvType = CV_8U; + else if ( H5Tequal( h5Type, H5T_NATIVE_CHAR ) ) + cvType = CV_8S; + else if ( H5Tequal( h5Type, H5T_NATIVE_USHORT ) ) + cvType = CV_16U; + else if ( H5Tequal( h5Type, H5T_NATIVE_SHORT ) ) + cvType = CV_16S; + else if ( H5Tequal( h5Type, H5T_NATIVE_INT ) ) + cvType = CV_32S; + else + CV_Error( Error::StsInternal, "Unknown H5Type." ); + + return cvType; +} + +HDF5Impl::HDF5Impl( String _hdf5_filename ) + : m_hdf5_filename( _hdf5_filename ) +{ + // save old + // error handler + void *errdata; + H5E_auto2_t errfunc; + hid_t stackid = H5E_DEFAULT; + H5Eget_auto( stackid, &errfunc, &errdata ); + + // turn off error handling + H5Eset_auto( stackid, NULL, NULL ); + + // check HDF5 file presence (err supressed) + htri_t check = H5Fis_hdf5( m_hdf5_filename.c_str() ); + + // restore previous error handler + H5Eset_auto( stackid, errfunc, errdata ); + + if ( check == 1 ) + // open the HDF5 file + m_h5_file_id = H5Fopen( m_hdf5_filename.c_str(), + H5F_ACC_RDWR, H5P_DEFAULT ); + else + // create the HDF5 file + m_h5_file_id = H5Fcreate( m_hdf5_filename.c_str(), + H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT ); +} + +void HDF5Impl::close() +{ + if ( m_h5_file_id != -1 ) + H5Fclose( m_h5_file_id ); + // mark closed + m_h5_file_id = -1; + + H5close( ); +} + +/* + * h5 generic + */ + +bool HDF5Impl::hlexists( String label ) const +{ + bool exists = false; + + hid_t lid = H5Pcreate( H5P_LINK_ACCESS ); + if ( H5Lexists(m_h5_file_id, label.c_str(), lid) == 1 ) + exists = true; + + H5Pclose(lid); + return exists; +} + +/* + * h5 group + */ + +void HDF5Impl::grcreate( String grlabel ) +{ + hid_t gid = H5Gcreate( m_h5_file_id, grlabel.c_str(), + H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + H5Gclose( gid ); +} + +/* + * cv:Mat + */ + +vector HDF5Impl::dsgetsize( String dslabel, int dims_flag ) const +{ + // open dataset + hid_t dsdata = H5Dopen( m_h5_file_id, dslabel.c_str(), H5P_DEFAULT ); + + // get file space + hid_t fspace = H5Dget_space( dsdata ); + + // fetch rank + int n_dims = H5Sget_simple_extent_ndims( fspace ); + + // fetch dims + hsize_t dsdims[n_dims]; + if ( dims_flag == H5_GETDIMS ) + H5Sget_simple_extent_dims( fspace, dsdims, NULL ); + else + H5Sget_simple_extent_dims( fspace, NULL, dsdims ); + + // fill with size data + vector SizeVect( n_dims ); + for ( int d = 0; d < n_dims; d++ ) + SizeVect[d] = (int) dsdims[d]; + + H5Dclose( dsdata ); + H5Sclose( fspace ); + + return SizeVect; +} + +int HDF5Impl::dsgettype( String dslabel ) const +{ + hid_t h5type; + + // open dataset + hid_t dsdata = H5Dopen( m_h5_file_id, dslabel.c_str(), H5P_DEFAULT ); + + // get data type + hid_t dstype = H5Dget_type( dsdata ); + + int channs = 1; + if ( H5Tget_class( dstype ) == H5T_ARRAY ) + { + // fetch channs + hsize_t ardims[1]; + H5Tget_array_dims( dstype, ardims ); + channs = ardims[0]; + // fetch depth + hid_t tsuper = H5Tget_super( dstype ); + h5type = H5Tget_native_type( tsuper, H5T_DIR_ASCEND ); + H5Tclose( tsuper ); + } + else + h5type = H5Tget_native_type( dstype, H5T_DIR_DESCEND ); + + // convert to CVType + int cvtype = GetCVtype( h5type ); + + H5Tclose( dstype ); + H5Dclose( dsdata ); + + return CV_MAKETYPE( cvtype, channs ); +} + +// overload +void HDF5Impl::dscreate( const int rows, const int cols, const int type, + String dslabel, const int compresslevel, + const vector& dims_chunks ) const +{ + CV_Assert( &dims_chunks[0] == NULL || dims_chunks.size() == 2 ); + dscreate( rows, cols, type, dslabel, compresslevel, &dims_chunks[0] ); +} + +void HDF5Impl::dscreate( const int rows, const int cols, const int type, + String dslabel, const int compresslevel, const int* dims_chunks ) const +{ + // dataset dims + int dsizes[2] = { rows, cols }; + + // create the two dim array + dscreate( 2, dsizes, type, dslabel, compresslevel, dims_chunks ); +} + +// overload +void HDF5Impl::dscreate( const vector& sizes, const int type, + String dslabel, const int compresslevel, + const vector& dims_chunks ) const +{ + CV_Assert( &dims_chunks[0] == NULL || dims_chunks.size() == sizes.size() ); + + const int n_dims = (int) sizes.size(); + dscreate( n_dims, &sizes[0], type, dslabel, compresslevel, &dims_chunks[0] ); +} + +void HDF5Impl::dscreate( const int n_dims, const int* sizes, const int type, + String dslabel, const int compresslevel, const int* dims_chunks ) const +{ + // compress valid H5_NONE, 0-9 + CV_Assert( compresslevel >= H5_NONE && compresslevel <= 9 ); + + if ( hlexists( dslabel ) == true ) + CV_Error( Error::StsInternal, "Requested dataset already exists." ); + + int channs = CV_MAT_CN( type ); + + hsize_t chunks[n_dims]; + hsize_t dsdims[n_dims]; + hsize_t maxdim[n_dims]; + + // dimension space + for ( int d = 0; d < n_dims; d++ ) + { + CV_Assert( sizes[d] >= H5_UNLIMITED ); + + // dataset dimension + if ( sizes[d] == H5_UNLIMITED ) + { + CV_Assert( dims_chunks != NULL ); + + dsdims[d] = 0; + maxdim[d] = H5S_UNLIMITED; + } + else + { + dsdims[d] = sizes[d]; + maxdim[d] = sizes[d]; + } + // default chunking + if ( dims_chunks == NULL ) + chunks[d] = sizes[d]; + else + chunks[d] = dims_chunks[d]; + } + + // create dataset space + hid_t dspace = H5Screate_simple( n_dims, dsdims, maxdim ); + + // create data property + hid_t dsdcpl = H5Pcreate( H5P_DATASET_CREATE ); + + // set properties + if ( compresslevel >= 0 ) + H5Pset_deflate( dsdcpl, compresslevel ); + + if ( dims_chunks != NULL || compresslevel >= 0 ) + H5Pset_chunk( dsdcpl, n_dims, chunks ); + + // convert to h5 type + hid_t dstype = GetH5type( type ); + + // expand channs + if ( channs > 1 ) + { + hsize_t adims[1] = { channs }; + dstype = H5Tarray_create( dstype, 1, adims ); + } + + // create data + H5Dcreate( m_h5_file_id, dslabel.c_str(), dstype, + dspace, H5P_DEFAULT, dsdcpl, H5P_DEFAULT ); + + if ( channs > 1 ) + H5Tclose( dstype ); + + H5Pclose( dsdcpl ); + H5Sclose( dspace ); +} + +// overload +void HDF5Impl::dsread( OutputArray Array, String dslabel, + const vector& dims_offset, + const vector& dims_counts ) const +{ + dsread( Array, dslabel, &dims_offset[0], &dims_counts[0] ); +} + +void HDF5Impl::dsread( OutputArray Array, String dslabel, + const int* dims_offset, const int* dims_counts ) const +{ + // only Mat support + CV_Assert( Array.isMat() ); + + hid_t h5type; + + // open the HDF5 dataset + hid_t dsdata = H5Dopen( m_h5_file_id, dslabel.c_str(), H5P_DEFAULT ); + + // get data type + hid_t dstype = H5Dget_type( dsdata ); + + int channs = 1; + if ( H5Tget_class( dstype ) == H5T_ARRAY ) + { + // fetch channs + hsize_t ardims[1]; + H5Tget_array_dims( dstype, ardims ); + channs = ardims[0]; + // fetch depth + hid_t tsuper = H5Tget_super( dstype ); + h5type = H5Tget_native_type( tsuper, H5T_DIR_ASCEND ); + H5Tclose( tsuper ); + } else + h5type = H5Tget_native_type( dstype, H5T_DIR_ASCEND ); + + int dType = GetCVtype( h5type ); + + // get file space + hid_t fspace = H5Dget_space( dsdata ); + + // fetch rank + int n_dims = H5Sget_simple_extent_ndims( fspace ); + + // fetch dims + hsize_t dsdims[n_dims]; + H5Sget_simple_extent_dims( fspace, dsdims, NULL ); + + // set amount by custom offset + if ( dims_offset != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + dsdims[d] -= dims_offset[d]; + } + + // set custom amount of data + if ( dims_counts != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + dsdims[d] = dims_counts[d]; + } + + // get memory write window + int mxdims[n_dims]; + hsize_t foffset[n_dims]; + for ( int d = 0; d < n_dims; d++ ) + { + foffset[d] = 0; + mxdims[d] = (int) dsdims[d]; + } + + // allocate persistent Mat + Array.create( n_dims, mxdims, CV_MAKETYPE(dType, channs) ); + + // get blank data space + hid_t dspace = H5Screate_simple( n_dims, dsdims, NULL ); + + // get matrix write window + H5Sselect_hyperslab( dspace, H5S_SELECT_SET, + foffset, NULL, dsdims, NULL ); + + // set custom offsets + if ( dims_offset != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + foffset[d] = dims_offset[d]; + } + + // get a file read window + H5Sselect_hyperslab( fspace, H5S_SELECT_SET, + foffset, NULL, dsdims, NULL ); + + // read from DS + Mat matrix = Array.getMat(); + H5Dread( dsdata, dstype, dspace, fspace, H5P_DEFAULT, matrix.data ); + + H5Tclose( dstype ); + H5Sclose( dspace ); + H5Sclose( fspace ); + H5Dclose( dsdata ); +} + +// overload +void HDF5Impl::dswrite( InputArray Array, String dslabel, + const vector& dims_offset, + const vector& dims_counts ) const +{ + dswrite( Array, dslabel, &dims_offset[0], &dims_counts[0] ); +} + +void HDF5Impl::dswrite( InputArray Array, String dslabel, + const int* dims_offset, const int* dims_counts ) const +{ + // only Mat support + CV_Assert( Array.isMat() ); + + Mat matrix = Array.getMat(); + + // memory array should be compact + CV_Assert( matrix.isContinuous() ); + + int n_dims = matrix.dims; + int channs = matrix.channels(); + + int dsizes[n_dims]; + hsize_t dsdims[n_dims]; + hsize_t offset[n_dims]; + // replicate Mat dimensions + for ( int d = 0; d < n_dims; d++ ) + { + offset[d] = 0; + dsizes[d] = matrix.size[d]; + dsdims[d] = matrix.size[d]; + } + + // pre-create dataset if needed + if ( hlexists( dslabel ) == false ) + dscreate( n_dims, dsizes, matrix.type(), dslabel ); + + // set custom amount of data + if ( dims_counts != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + dsdims[d] = dims_counts[d]; + } + + // open dataset + hid_t dsdata = H5Dopen( m_h5_file_id, dslabel.c_str(), H5P_DEFAULT ); + + // create input data space + hid_t dspace = H5Screate_simple( n_dims, dsdims, NULL ); + + // set custom offsets + if ( dims_offset != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + offset[d] = dims_offset[d]; + } + + // create offset write window space + hid_t fspace = H5Dget_space( dsdata ); + H5Sselect_hyperslab( fspace, H5S_SELECT_SET, + offset, NULL, dsdims, NULL ); + + // convert type + hid_t dstype = GetH5type( matrix.type() ); + + // expand channs + if ( matrix.channels() > 1 ) + { + hsize_t adims[1] = { channs }; + dstype = H5Tarray_create( dstype, 1, adims ); + } + + // write into dataset + H5Dwrite( dsdata, dstype, dspace, fspace, + H5P_DEFAULT, matrix.data ); + + if ( matrix.channels() > 1 ) + H5Tclose( dstype ); + + H5Sclose( dspace ); + H5Sclose( fspace ); + H5Dclose( dsdata ); +} + +// overload +void HDF5Impl::dsinsert( InputArray Array, String dslabel, + const vector& dims_offset, + const vector& dims_counts ) const +{ + dsinsert( Array, dslabel, &dims_offset[0], &dims_counts[0] ); +} + +void HDF5Impl::dsinsert( InputArray Array, String dslabel, + const int* dims_offset, const int* dims_counts ) const +{ + // only Mat support + CV_Assert( Array.isMat() ); + + // check dataset exists + if ( hlexists( dslabel ) == false ) + CV_Error( Error::StsInternal, "Dataset does not exist." ); + + Mat matrix = Array.getMat(); + + // memory array should be compact + CV_Assert( matrix.isContinuous() ); + + int n_dims = matrix.dims; + int channs = matrix.channels(); + + hsize_t dsdims[n_dims]; + hsize_t offset[n_dims]; + // replicate Mat dimensions + for ( int d = 0; d < n_dims; d++ ) + { + offset[d] = 0; + dsdims[d] = matrix.size[d]; + } + + // set custom amount of data + if ( dims_counts != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + { + CV_Assert( dims_counts[d] <= matrix.size[d] ); + dsdims[d] = dims_counts[d]; + } + } + + // open dataset + hid_t dsdata = H5Dopen( m_h5_file_id, dslabel.c_str(), H5P_DEFAULT ); + + // create input data space + hid_t dspace = H5Screate_simple( n_dims, dsdims, NULL ); + + // set custom offsets + if ( dims_offset != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + offset[d] = dims_offset[d]; + } + + // get actual file space and dims + hid_t fspace = H5Dget_space( dsdata ); + int f_dims = H5Sget_simple_extent_ndims( fspace ); + hsize_t fsdims[f_dims]; + H5Sget_simple_extent_dims( fspace, fsdims, NULL ); + H5Sclose( fspace ); + + CV_Assert( f_dims == n_dims ); + + // compute new extents + hsize_t nwdims[n_dims]; + for ( int d = 0; d < n_dims; d++ ) + { + // init + nwdims[d] = 0; + // add offset + if ( dims_offset != NULL ) + nwdims[d] += dims_offset[d]; + // add counts or matrixsize + if ( dims_counts != NULL ) + nwdims[d] += dims_counts[d]; + else + nwdims[d] += matrix.size[d]; + + // clamp back if smaller + if ( nwdims[d] < fsdims[d] ) + nwdims[d] = fsdims[d]; + } + + // extend dataset + H5Dextend( dsdata, nwdims ); + + // get the extended data space + fspace = H5Dget_space( dsdata ); + + H5Sselect_hyperslab( fspace, H5S_SELECT_SET, + offset, NULL, dsdims, NULL ); + + // convert type + hid_t dstype = GetH5type( matrix.type() ); + + // expand channs + if ( matrix.channels() > 1 ) + { + hsize_t adims[1] = { channs }; + dstype = H5Tarray_create( dstype, 1, adims ); + } + + // write into dataset + H5Dwrite( dsdata, dstype, dspace, fspace, + H5P_DEFAULT, matrix.data ); + + if ( matrix.channels() > 1 ) + H5Tclose( dstype ); + + H5Sclose( dspace ); + H5Sclose( fspace ); + H5Dclose( dsdata ); +} + +/* + * std::vector + */ + +int HDF5Impl::kpgetsize( String kplabel, int dims_flag ) const +{ + vector sizes = dsgetsize( kplabel, dims_flag ); + + CV_Assert( sizes.size() == 1 ); + + return sizes[0]; +} + +void HDF5Impl::kpcreate( const int size, String kplabel, + const int compresslevel, const int chunks ) const +{ + // size valid + CV_Assert( size >= H5_UNLIMITED ); + + // valid chunks + CV_Assert( chunks == H5_NONE || chunks > 0 ); + + // compress valid -1, 0-9 + CV_Assert( compresslevel >= H5_NONE && compresslevel <= 9 ); + + if ( hlexists( kplabel ) == true ) + CV_Error( Error::StsInternal, "Requested dataset already exists." ); + + hsize_t dchunk[1]; + hsize_t dsdims[1]; + hsize_t maxdim[1]; + + // dataset dimension + if ( size == H5_UNLIMITED ) + { + dsdims[0] = 0; + maxdim[0] = H5S_UNLIMITED; + } + else + { + dsdims[0] = size; + maxdim[0] = size; + } + + // default chunking + if ( chunks == H5_NONE ) + if ( size == H5_UNLIMITED ) + dchunk[0] = 1; + else + dchunk[0] = size; + else + dchunk[0] = chunks; + + // dataset compound type + hid_t dstype = H5Tcreate( H5T_COMPOUND, sizeof( KeyPoint ) ); + H5Tinsert( dstype, "xpos", HOFFSET( KeyPoint, pt.x ), H5T_NATIVE_FLOAT ); + H5Tinsert( dstype, "ypos", HOFFSET( KeyPoint, pt.y ), H5T_NATIVE_FLOAT ); + H5Tinsert( dstype, "size", HOFFSET( KeyPoint, size ), H5T_NATIVE_FLOAT ); + H5Tinsert( dstype, "angle", HOFFSET( KeyPoint, angle ), H5T_NATIVE_FLOAT ); + H5Tinsert( dstype, "response", HOFFSET( KeyPoint, response ), H5T_NATIVE_FLOAT ); + H5Tinsert( dstype, "octave", HOFFSET( KeyPoint, octave ), H5T_NATIVE_INT32 ); + H5Tinsert( dstype, "class_id", HOFFSET( KeyPoint, class_id ), H5T_NATIVE_INT32 ); + + // create dataset space + hid_t dspace = H5Screate_simple( 1, dsdims, maxdim ); + + // create data property + hid_t dsdcpl = H5Pcreate( H5P_DATASET_CREATE ); + + // set properties + if ( compresslevel >= 0 ) + H5Pset_deflate( dsdcpl, compresslevel ); + + // if chunking or compression + if ( dchunk[0] > 0 || compresslevel >= 0 ) + H5Pset_chunk( dsdcpl, 1, dchunk ); + + // create data + H5Dcreate( m_h5_file_id, kplabel.c_str(), dstype, + dspace, H5P_DEFAULT, dsdcpl, H5P_DEFAULT ); + + H5Tclose( dstype ); + H5Pclose( dsdcpl ); + H5Sclose( dspace ); +} + +void HDF5Impl::kpwrite( const vector keypoints, String kplabel, + const int offset, const int counts ) const +{ + CV_Assert( keypoints.size() > 0 ); + + hsize_t dsddims[1]; + hsize_t doffset[1]; + + // replicate vector dimension + doffset[0] = 0; + dsddims[0] = keypoints.size(); + + // pre-create dataset if needed + if ( hlexists( kplabel ) == false ) + kpcreate( dsddims[0], kplabel ); + + // set custom amount of data + if ( counts != H5_NONE ) + dsddims[0] = counts; + + // open dataset + hid_t dsdata = H5Dopen( m_h5_file_id, kplabel.c_str(), H5P_DEFAULT ); + + // create input data space + hid_t dspace = H5Screate_simple( 1, dsddims, NULL ); + + // set custom offsets + if ( offset != H5_NONE ) + doffset[0] = offset; + + // create offset write window space + hid_t fspace = H5Dget_space( dsdata ); + H5Sselect_hyperslab( fspace, H5S_SELECT_SET, + doffset, NULL, dsddims, NULL ); + + // memory compound type + hid_t mmtype = H5Tcreate( H5T_COMPOUND, sizeof( KeyPoint ) ); + H5Tinsert( mmtype, "xpos", HOFFSET( KeyPoint, pt.x ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "ypos", HOFFSET( KeyPoint, pt.y ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "size", HOFFSET( KeyPoint, size ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "angle", HOFFSET( KeyPoint, angle ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "response", HOFFSET( KeyPoint, response ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "octave", HOFFSET( KeyPoint, octave ), H5T_NATIVE_INT32 ); + H5Tinsert( mmtype, "class_id", HOFFSET( KeyPoint, class_id ), H5T_NATIVE_INT32 ); + + // write into dataset + H5Dwrite( dsdata, mmtype, dspace, fspace, H5P_DEFAULT, &keypoints[0] ); + + H5Tclose( mmtype ); + H5Sclose( dspace ); + H5Sclose( fspace ); + H5Dclose( dsdata ); +} + +void HDF5Impl::kpinsert( const vector keypoints, String kplabel, + const int offset, const int counts ) const +{ + CV_Assert( keypoints.size() > 0 ); + + // check dataset exists + if ( hlexists( kplabel ) == false ) + CV_Error( Error::StsInternal, "Dataset does not exist." ); + + hsize_t dsddims[1]; + hsize_t doffset[1]; + + // replicate vector dimension + doffset[0] = 0; + dsddims[0] = keypoints.size(); + + // set custom amount of data + if ( counts != H5_NONE ) + dsddims[0] = counts; + + // open dataset + hid_t dsdata = H5Dopen( m_h5_file_id, kplabel.c_str(), H5P_DEFAULT ); + + // create input data space + hid_t dspace = H5Screate_simple( 1, dsddims, NULL ); + + // set custom offsets + if ( offset != H5_NONE ) + doffset[0] = offset; + + // get actual file space and dims + hid_t fspace = H5Dget_space( dsdata ); + int f_dims = H5Sget_simple_extent_ndims( fspace ); + hsize_t fsdims[f_dims]; + H5Sget_simple_extent_dims( fspace, fsdims, NULL ); + H5Sclose( fspace ); + + CV_Assert( f_dims == 1 ); + + // compute new extents + hsize_t nwdims[1] = { 0 }; + // add offset + if ( offset != H5_NONE ) + nwdims[0] += offset; + // add counts or matrixsize + if ( counts != H5_NONE ) + nwdims[0] += counts; + else + nwdims[0] += keypoints.size(); + + // clamp back if smaller + if ( nwdims[0] < fsdims[0] ) + nwdims[0] = fsdims[0]; + + // extend dataset + H5Dextend( dsdata, nwdims ); + + // get the extended data space + fspace = H5Dget_space( dsdata ); + + H5Sselect_hyperslab( fspace, H5S_SELECT_SET, + doffset, NULL, dsddims, NULL ); + + // memory compound type + hid_t mmtype = H5Tcreate( H5T_COMPOUND, sizeof( KeyPoint ) ); + H5Tinsert( mmtype, "xpos", HOFFSET( KeyPoint, pt.x ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "ypos", HOFFSET( KeyPoint, pt.y ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "size", HOFFSET( KeyPoint, size ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "angle", HOFFSET( KeyPoint, angle ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "response", HOFFSET( KeyPoint, response ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "octave", HOFFSET( KeyPoint, octave ), H5T_NATIVE_INT32 ); + H5Tinsert( mmtype, "class_id", HOFFSET( KeyPoint, class_id ), H5T_NATIVE_INT32 ); + + // write into dataset + H5Dwrite( dsdata, mmtype, dspace, fspace, H5P_DEFAULT, &keypoints[0] ); + + H5Tclose( mmtype ); + H5Sclose( dspace ); + H5Sclose( fspace ); + H5Dclose( dsdata ); +} + +void HDF5Impl::kpread( vector& keypoints, String kplabel, + const int offset, const int counts ) const +{ + CV_Assert( keypoints.size() == 0 ); + + // open the HDF5 dataset + hid_t dsdata = H5Dopen( m_h5_file_id, kplabel.c_str(), H5P_DEFAULT ); + + // get data type + hid_t dstype = H5Dget_type( dsdata ); + + // get file space + hid_t fspace = H5Dget_space( dsdata ); + + // fetch rank + int n_dims = H5Sget_simple_extent_ndims( fspace ); + + CV_Assert( n_dims == 1 ); + + // fetch dims + hsize_t dsddims[1]; + H5Sget_simple_extent_dims( fspace, dsddims, NULL ); + + // set amount by custom offset + if ( offset != H5_NONE ) + dsddims[0] -= offset; + + // set custom amount of data + if ( counts != H5_NONE ) + dsddims[0] = counts; + + // get memory write window + hsize_t foffset[1] = { 0 }; + + // allocate keypoints vector + keypoints.resize( dsddims[0] ); + + // get blank data space + hid_t dspace = H5Screate_simple( 1, dsddims, NULL ); + + // get matrix write window + H5Sselect_hyperslab( dspace, H5S_SELECT_SET, + foffset, NULL, dsddims, NULL ); + + // set custom offsets + if ( offset != H5_NONE ) + foffset[0] = offset; + + // get a file read window + H5Sselect_hyperslab( fspace, H5S_SELECT_SET, + foffset, NULL, dsddims, NULL ); + + // read from DS + H5Dread( dsdata, dstype, dspace, fspace, H5P_DEFAULT, &keypoints[0] ); + + H5Tclose( dstype ); + H5Sclose( dspace ); + H5Sclose( fspace ); + H5Dclose( dsdata ); +} + +CV_EXPORTS Ptr open( String HDF5Filename ) +{ + return makePtr( HDF5Filename ); +} + +} // end namespace hdf +} // end namespace cv diff --git a/modules/hdf/src/precomp.hpp b/modules/hdf/src/precomp.hpp new file mode 100644 index 000000000..3069155f7 --- /dev/null +++ b/modules/hdf/src/precomp.hpp @@ -0,0 +1,43 @@ +/********************************************************************* + * Software License Agreement (BSD License) + * + * Copyright (c) 2015 + * Balint Cristian + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + *********************************************************************/ + +#ifndef __OPENCV_HDF_PRECOMP_H__ +#define __OPENCV_HDF_PRECOMP_H__ + +#include "opencv2/core.hpp" + +#include + +#include "opencv2/hdf.hpp" +#endif