From db20f3a8bdb2e9347b36a9325ca935dae4f27f20 Mon Sep 17 00:00:00 2001 From: Ke Yang Date: Mon, 22 Jun 2020 09:35:38 -0400 Subject: [PATCH 1/7] Add skyline selection --- .../accuracy_vs_di-skyline-order.png | Bin 0 -> 43040 bytes .../accuracy_vs_di-skyline_formula.png | Bin 0 -> 42840 bytes .../accuracy_vs_fnr-skyline-order.png | Bin 0 -> 43584 bytes .../accuracy_vs_fnr-skyline_formula.png | Bin 0 -> 43356 bytes .../accuracy_vs_fpr-skyline-order.png | Bin 0 -> 43619 bytes .../accuracy_vs_fpr-skyline_formula.png | Bin 0 -> 43487 bytes examples_skyline/fair_prep_results.py | 92 + examples_skyline/helper.py | 53 + examples_skyline/missing_data_results.py | 99 + .../results_play_skyline_formula.ipynb | 1778 +++++++++++++++++ .../results_play_skyline_order.ipynb | 1767 ++++++++++++++++ fp/dataset_experiments.py | 126 +- fp/dataset_experiments_old.py | 341 ++++ fp/experiments.py | 90 +- fp/experiments_old.py | 383 ++++ fp/utils.py | 43 + 16 files changed, 4691 insertions(+), 81 deletions(-) create mode 100644 examples_skyline/accuracy_vs_di-skyline-order.png create mode 100644 examples_skyline/accuracy_vs_di-skyline_formula.png create mode 100644 examples_skyline/accuracy_vs_fnr-skyline-order.png create mode 100644 examples_skyline/accuracy_vs_fnr-skyline_formula.png create mode 100644 examples_skyline/accuracy_vs_fpr-skyline-order.png create mode 100644 examples_skyline/accuracy_vs_fpr-skyline_formula.png create mode 100644 examples_skyline/fair_prep_results.py create mode 100644 examples_skyline/helper.py create mode 100644 examples_skyline/missing_data_results.py create mode 100644 examples_skyline/results_play_skyline_formula.ipynb create mode 100644 examples_skyline/results_play_skyline_order.ipynb create mode 100644 fp/dataset_experiments_old.py create mode 100644 fp/experiments_old.py create mode 100644 fp/utils.py diff --git a/examples_skyline/accuracy_vs_di-skyline-order.png b/examples_skyline/accuracy_vs_di-skyline-order.png new file mode 100644 index 0000000000000000000000000000000000000000..8673899d43952f6cd3a8a90887cb93ee847696aa GIT binary patch literal 43040 zcmeFZcUV>1mOZ!$3W9(MQ6#A#QBcW95)}j_CqaUWWC=>n2L&-9dJs^u zM$(ZCN=Aa@OwWb)y?Ryk>Q(n2zy7+uuKV44kDjo@+H1`@#+YNS$EwP5R21|S7=}^F zU%8}?VZ_ZCwkw8wH@xHA;?N2&r1moMn&j}02l?$s@b^7-S9I+$jPd~bkEo^BYXjaC zakzZVLBrP6!TIJr6U_RigWVllhdUOxSe#7m*<0A!@bjMIJe+>pYz(@ zGvkY!6YIh-7EJ!q1x=TTU%jr^f*rTEe-kboy1egmy`SJ0Re4rgjq3uTJaVTQg~i`= zu>Q2oPE3>-Id`2Sk|#WbJML`c1i7+_O@fA)njSBINPt3E2rkxGA%%zO!pg?hz+_4s zu7{ic%szyY7;)5>X;A*7gz0^kmb_`)7CQ2b#a5;IN7h=t-CO+ zcKN$;pzE)~%+e*jsm`2g<0cx+k=8wisLiVCNbsvyE=7CWV))G(+s3De%spU)Qolrq z+P4+jo2{;|r@1druMIq)(Vy&d`AOwC+0l?Uot9aKXJr(zlQyn?uGRg)x;;_pSVM%U z2428@=}xcPWO_!m+3nk3O>+wGH^p3d@apyJWcZ0*Sy|cIpg(i2`L`1UdxwcO`ONaI zqU)aA+V52F+ zIP;|tjhb?#SWSPwQK*5_8wP5*H}gy$vlk5P8wA(Z`?n3p2@xS7i7#I4Biy@m$#|Mg_LHT)v$?>sbD~>e!QK&oot4U&sbB zrC;=?F6`*MBUHLJkv7qi@U+Nb{Di#Qbm3^*+lyq5zrL&4maQgse^8E=OebSTncbHAg+c)w!Of-BWhTQ{?Fa+L1geJ`)1Bnaa; z(){+~2=n%e|M>U!ah*kuX`Gy#XW>Z+9zA+=r2Y;6sZ*z_dfaD9Px0|JMTj}p-ua&O z@rZH#&p;`gUrxnV-RsLU!uYFdYA?@Lz4jp?3u|sx&9NU<*3i_vO+iW7FwvgY0CVW7 zy!>N%MMeJ8dbwxdG4od)$uX%r+?uMzJ^#C!IsC_uTlTX97v`t(`{|QfE%fqjtXo=J z4Vq|=9kc3#lT#R3B`HVNS2KJ6(qj^~iEv%&Gz&OewJ=F&Dp~BfUUkIyp1pko%wevc zjtqn1t92o#8_c3TO(T?(3TMjJuLhk`s%MY(P#pfE=7PpS28Lf@bWBV( zKYskU>g4pHrl#hsL9vq{EQ&A~gv_$F+Fa}IjyruNoiO>UcnnLv`}+GIOKR0Moth+M z3R`#Oo<(DzoYYz$A=;|%K9!fTh_8p|prow)oRfp&j8(_Gv!RA=DV^_){cGy#_<#TY zEsO6gcFu&u!)Oqm(F@iN4Gp~xVpj=!c~UM+j>+}&qF9)K#VsbcvEx4-#FQ>vxZo#s z5G&ZXBu$ch_-A#XPW5xf;o;$g#|PO{6+?Nu;6ZOK<(H4Y^=DGo*N?a(6n^65Ni@^b zujfy|rR+?!Y!gL6+rFL_dE3@RB@G9M+>+AL_FM~%%@N^pwb8~%lblWgy8-E5yGp9A zteX#5Ee1dN75#jEUf|C6Gce4YA|lsdV1fe!15YVOBs=%H%HeaZ2)rh@Z<`N&eMu|n z?BP+OuCC51UszbUHWn9nh?$vB-m)tDU%t+ z%94_jZWAdQ0i!+#f^5h8|C^On?O+HOK9iN$L8*DVM zyWC?RDCYDlIVOfBfL>6cJKr{xm5rT!64qX6M$rV9phe3ic+Tizg>3uuN1Pa_{ih3l zQZG)lE21;PjLWlVJq6cZFd_~YOF==Q{`Bzag+T9(i~0Rt#Tf-ZDDv)fs^^$BIW_QC zafpiQ&W|?fz+&Fm++2jP$tftdZEQMH%|3 zvXU+M;UcUKWq3wfFbA|V4GjiAP&0}RE$QifxRU}g@0F-?)w82>^YcN`(a}+^BS+C4 z!t`mszl#!uAv6)bP4^ZX%rC&HeMMQ;2RF|x zF0KzN_8WTBb?!Eyh}WQqgN^O7)YeKIlh?}4fMdeS5J!e>(jcUZ*$v>V3S(lVMMXtT z8V#~;_{`1Cy$TH-EfTAq9jsieVUHez*NLvYv_zHY>9GFv>%tH;uL$=#1a-Fyq|Sx$ zu?h%iLNpTd+9;Sbx^Thg$IqW8`P~MNnYuZ$__p`Paxm0IOz^-mBO(}fk&zWwd2RAb zE_>G2*532MUrz>dNJ$wfCutaY{Pttz6cW;o_TDPY&dzrD z@p+HL`mY>XN$;CC-=Z}2!MeM)yZbsCtDQ)`UEIH^j83z*IH{$s9{cOpoqUPB{9dPU z)5eGti00aBXYe&a?30<^n`x5U8}z)ENKR>@PK`ZTqOs;!FU-ze9doKfHKll6h%joES*CtvB3}#WTvhXAv*H6ZBWfq=* zH9>&soD2ErsPQWpH5#c6RR~BXjSvb5v6r9bdagS~N_y%AN-Vubk>c{DORq$|Kl$#_ zD{*-bX$mG%411Kzr@Y3xIz@Ce&*k1EX73F(JWS+t@2&Z0y3Haih6j)$T{>jeqV28q z_Hu9UJS&3od8c2^?K(!MQx?9Deg1q&DN0;-Yip}$IOO_5tBO>Le9$QXGr=evW@Vl6 z^Yt|!Z%M$%->iIS0Z`(}{1N!XS=_IgG`0q+c&KSdBfw#>VD0=trkt&FT&D(QPV^&=@Z4 zIHB1PCa4HUbYe8t4<_ip1Y9*1Al%msaH74)(dx3StmW??Uxpi*#T=5O+$K0TArtM) zCRnD$j5!_|;WkC`%YE*dyJ?u-ef@fcib?#t^=40@J?+t>DPEgvDFD*w#T+;YS-Vw@ z?~MF1i@=(%M2a~oprl#0Ho;9`Yo#x$S_vbHDas$B$@q8gcpx690LE@jz=CCPvZc-tH7uK=B%*lZsyx@mlZs#o(n73y^ym>~ zBjB9UBBv=XBO@aN@6E;9`ucaPCx~3?A;b$fPP9Gks`}NL<2ad~udQ);`01lZ%|(ur z`gOpC0CQ*n>bKk|^qG*x$v$uzsXwX`s>){4-V3O<_ii;KLb$vuD*(1EetmzB5amwD zRf(1i6S;d!=@msxNE^Xxx4kvBx1 z*ExIktYv3*LSuwzTtm39RRxZSV((r}xUt}nkdZJc^JbRK)h6!_91+P7#8$O9Sx!JH zPQ?c4dU*(epF4L>f4+QUPVvi^FPtna(s%FPU7M@pW0MECr4=pd*<%?cmU$p;&!%#I1$yvqvR1Po+3;2#|Yud9BqMZN)x&wiie* zfq4&I*OKp^gAU!+`G{!4rLV!nvR9kozww4>!UytfIHbh+Pn3MGKYfy!pDv!Ex3#qm zdi~n`?;PzIa5T52UlssH;HrH@U1nqUP*9v>_F8!h$v_BF^n3Jogwy^auYh;L0ol9( z!W3@PTl5By+-nud)z61$Y2R>aB)*2}ebo3FB78nRJ|pS*edjJ;zHC~9XFGM~Ofv(Z zaCo%GU?k>yo%;(d$6J#E<>lqiRvobi{t^a{{0$&7`?(=`{(D1L<^eh#F@ClRvrY-| z(~C3`w}Gz$|KnILT^R~ex$5HL;x|K(Sne?fRzE0on32&Mk>T3%aZG%gu=(y>=`YBt zXMjt4tlnz{ktHZ9$_Cg^y@rZ}$=dRa?X$BZLp4E(a7U&9l}CUip}(8Xk2C<4W%Ay* zYnl=Za5rnxmQaBpLzkmP!#H_65 z#Ujbop@W1EOEdkptZ!mtV->8stE;O$RtEi%XfX0pj_It9fn)oZvUVKXFCW>`EZ2Ec zT3Xt1xB)F7lY%xqA~4Z|&u>S$Ppg@mC+p>!zeNP`r0lciNBd6>0j~p;lLD&|5kviX z+wx5(N5CvZL_|g7iQ!06qGA-&gv&!Sx@>FBO1{OP*(({In0f6h&bg%}Z6KErI#K~E z)DQ?d!u=jOJ6sbGSUb#mLBL0V_aY+Rt!w{KzdU~FA@#4CiKSXOKhDPSy_(&`PSBaJj5M;#lEY_E%AXc>j z(LyY!*L!;t$VwqFLC1D29o~UjJDY zpaH|Qm?X6+55XoJR&0p15$H?wXK(oe(T|1MSg>S6lIMp7xD5hdr=X?|X)v`9@bmW% zJuf97(6;DlYMR*Rxoog&_wJ%CgpB=Zn4KCdVChb-1N)4~j{w$@`m<-0iiSo9WDHh| zxuKd2tn_xxf!=#{r{@+I+hG|aaNAevUTj$a>XhTcm>R*U>w|R~@Bu|U3IPzz5liPc ztxw)Kw&=aFvEcyoJO4dM6SbhS%zeOIIy$|y-xxY+!}V-|097;sYz~M7pnW9#0ede? z4nwT7>!@8vru$&#WQc?sIn#bwKji@~kqLr3nmaS;nl_`%mX&YuU} zhj2|>Tf1=Hn=$v|#fyR_wTBp`ym}V-fRO}>&pjFz1IF*t*UkeG;1B}?k37B5o!8Hq zBvPlk^SObc!9|9!8r-X41B~qFG}SenI{5kV!HgSTHVp{u|6yiHyC@G|%itFi>#`2K z>g82dw!P(kSx)YH-(jpttozuzg>0ae2shf;t3-+=5RvTW0D>m9F?-Rk$?-T)^EMT! z&7q|fxVN{OMeM8Z@c09a;x4N2Ny)&xrfD}WX@86w-@;745zJ<{@q|!KP0e_dx)6)W zj`U!+Ul3)`93GF}-bi}#^y#pJ7$lTLK-YR1#-AVUUxUb(3(qW4)Ls^)3OBbR5K1_A zWR$8v=1ekX4^^qng`kI2$4;d<%+vwPb?oI;plo_&LmxVW`GQ#bA~0o+vc>eXkks4Q0nNs`g6ALTb4>okjQ2hojH zo?SVjxp=0GTb^F@UOY9ky7s1jVdlHU%H^B*nz^sQoXhj}NlUly8?EICv1z9P7^PFH#KJg1Z5vO0IY8 zFjqm2odAUv7B?LGv~6S{+`4On2zOf!Klqk%aj8 zWQfigqVN<5UtgXS1SYvTX|&w}A$S2WjeC|Qmi5q(4AZJ+8neICTcia51EjwoK!b@u zl`XzJp|!CGMlR?wdqciukznNA=Y}OujyYngj-+Gux6RCo4K^fLSwBzxn8LT^n5iI8 zgkShb#>^ostOJTno*p}1r)c5nIkfUK!_T@6%O19uY$Gz+lCxX+xKEwN@}W#u@2`~n^@hsk(Yd9rs)ON#)Y zN0m??`i>+OWSL5K?;Nzq|Az|abcQAOR2Hc=a?ro0H)HpT5fttj{?BWi69qpz)S}DuBA4Z zn=N?j%Q0HoggU-*eUQV{ZrzIYPuq=I?Tn^=RYu6{d(+BlxyJz)0i7;D6sd+A)?Fw{ z{p*I`611Kf#}HA%jBx;MUo^6Dz5{9?V<1c}N^2$W0g$Y7<;tTlA?t~8b)24#T4tK! zSTS9op^hM_dBuGQaF9TZ0B>Y>-m#ekeSdgnX69|JYUH!CwKX;0U;qmR&(SA5aWgQG z=`R(56aPG7UO}AHiiFFyaU+fb7ol(^5ZhDc-TQH2GMQEkV~$#KxZONI=oEfdG#w3K z+@=wr-iSzhh*7oi!*`eaz00j!0V$y+F+2sV;36dECjkMF1YhtL|2p@js>%|wQv0t!5~%!9w#z znnEU2n@RKD-a>jY$|WTw;_@vZhP3Bd>3}X?=O|bFq-6FJWzml(+zwx8b;iEX894+< zEw*XELvIlz=*O+BxWIF6S}bZT3xO)drQTv^9Y`UFy?Cw;hw2sHOF)$~7}$yRnR4gw zZQzO5Np|m51>V%2Z)-F>GGdq|iJ8o6W={5(OO-x19NquRXSd~}%HwRTS9a}6sC)A! zp$>mL->B!qOwlASl=2D$o#czwr;AbaP>K}OLS-_bdQ7L`so)(}W^W=aRO~#{0UBaj zE}1DnwY9a$%wZ7afwA_2c!$tHl-HUCN(c}k+CX?(1fDSlwUQR3FoPVa2$2WTM8EF7 z57yxTr{dv@UysD4WfUopkWsgx-sv6U`+Q|9G6-b@z2pU+45V(Nq&g~n+_`)`5;D^%BcpzbXe)TI zs8R#9GuGg;=)S&RDp=}$xGSq?8zn%TU5%rVd3P{$dRQThXiO|5%U0Zo92`Z3*y=Exc8l#~=atAFKN ztIX1lIj56QKez4{l24so`IrNebrQN^I8_iUh^3PNd^89d2Ei|1z6A_mh<}C0s}DM5 zK%pE>JjkZCO-%#uaFu6si=J#9PhL&nq$7m>)FS<+(NJ!}Cu3E?PMs<3*JVYzpey5}Q} z05aT8r4KNO1HjTE2^&ytDfaBSyIH)@q9|zHC9nt*1JJzS>5_S61J~hTz^ARStZ3nc zNWz8irYbFs>;3@ITM(ip9DfY(z&A?15*B#`g@F8tqTgj0+2GsW>GC5W2eUjMSbGXQ z$h#+rVxEY1P6C%z$vjxvcyh1h7r3YC@dK(<@Nnx4oPn~;R{?N&J!@xt=R-^8Oo@hl zp5See?}6b$IY|lh@dl_nSPr|^t+_74Y}Sq{E{i2-H6%rQO^hGt(QKQn{gGm*qurn0 zH0HQSCWj{x{AyFalG6+UhLuA?!T``IRMoU#5X21*;EW)x(YP6GUcl{E9X+j^{kfG5 zlMaX@v@Y{X| z4L~`@0QYk_d+OvbwJe(*A_X((5 zln&S@0cev8WI`!mVYU)JrsLI5MwkI^vK^u&B*&}r3FYLY4dLwah~2c6x;w)}FmH*M zul@S98A>k#-di3hwN^f)ve_vlLQy`|tSJhmo$g*>kgqF`U%qt3&@jP>aY4+@pt1@w z;6o87kylw<7j|9Vg^#a4_(tJcFpGTla&qK(YLCrD0+Q@exfi)bP-O}gZcuF3dww*zKQ8z6a9;6`PFwWX=Fq$oE9BhSTP z;PMD&g61_}Qr+~1Uk;2Ap-?e;&McKtQ6b&%`fR1ON-Km3gZU|^VkXz2{Q*b$WnnTw z&7^(Zlqi|r|DwW`7ZeKak>~goLlg9hiz!N}^8x^iy!gEMIAhjB`+}O!0Rnr69&Wlj zKT}3VhE*Qm2#lHPUv*+wA@X=wa_vxw1>UQVN}i4c19tf^DDZT=)76;%fOwTvbIb)q zhT>Ziw}Jd9;&)Rspfb38%gXPciy-bqF0TMs=$vvwydTw`xI7qP1=Th-cDz1o%qkBV zJbz9IOO}89xZHO%hk`<(+KN-UbyBq(h3p!!mnIe^si1rqxN|5dDD)_2fnVXSXMeo> zK}1TcjsRP>f}CX2E$X%5lGovQ94T<9o(E}50R*xxs9B~-(1A%#8#G8bSOug{v z0Bbr%`r3EEw4pt4;Ky1hWD`;Qkukt0ItUSgld0EZz5#+qhjE}pB2@P#Cekvsp^imN zOx*JUs`&4&+m>n}SQ)HinBqQNC0T?lvtLk+7POklh%kVMh|;RYA-0$l>V%*OaR zBQ;xwie|@S9=~0_oEBHq(;2sa$%FR;VlQIw0ss?`*q&j$HW8HV)#tgk@C_=W%rsh> z%iktbM}7sfPCNk!|Aygti*Zy=w!*SFJ6n70?z&lV&BBvNJq zP8(9qBD3}x^uz);E&#a;4hp(hX)&pPMwlv_FQ;LmP(2%@&}J* zfGYUlAnNm`gLMMHF9ahs(9gOf037HYJax4h6omA)HqFgJ8t+-vJWMqos&g^>k$DQ- zQtc@k?8p!Tia^T~TJD~a?{|bE#a-3$$oT*Uy>AB-h@~TcPK^^|SRNn9m2cm^%@~GI zf`lRKMpU?}q!JzjK%)(^B9zp1x=r8*m%ugpiW3wc#0jAwo~aZrw1eae?UPDMdZbFO zDl2Q>yO+&6J{s+<4fud8HN77c1eg9)+%i>^3=Z6y9N@fykY;z98HC1Es=C~Hz!)C>)2Qi$iC5W2{ z40nGAeQbQK43r@xnOhd~V9E2}|0US^XLb3ay;g(_Ex1W|a)M@!5%V2pCMM$$SP3Xn zLjLmvTF#UV)@3Y88Ye0D|7>PpSh6)JO#hW81@=1SC~;1CvAaKf0hNqBRtF2g08vlv zIzwk?Cj}Li<7s~|cR+@)u}8HsP!%umm2YH26a{UssEhX?To#iIF5~{ISBGFe>_`UlTZ1|lBcra8>(dB^nlm@Gv@2$a0Y756Ebsi zT$>YiE=4EcMDqCjBucFDr`?r3d(3XAx?&u=B%4>+tjB(lb}Ch_N) zsSqEBfw%^>_1@e0ibs#C%gc`wEDP4EXrx$E<3Dc>ea7R7H)l!M*@-b+?MdmlZz7&W zzk>GF9V5dq4oV{WfQNx9o$SX$gsEXz4-*T}2PjA4p(5vXFE%tZQ_QhwCFmd#CjIjQ zPB!jaMx=q7TFu<=J%pl@E%B$71jbs44TA9;)zr$Yq!<=_5x&#{Uy@p3v)EcL6Z^pv z^757I35`8z2gtv{#84-1Hg^D+n;P?V(~<$uyn=pHy9kVz-~O0yW49wY)>DrSqcPi{ zhbvRQCf%vi%V7%xIzT|b`~$nz+eJLW*SIXmbc*o3q|Cn@LS^}QS zgCMO&#=^Q|%RRD00&Zqfy<9x6o~;hIo|X|%g<{aR0pDPIx#_toiF`FIfGOIBXz>6M zJ(aRLerVDw$!2ve_oo(RI016{%paBHF{+^iiwEjLEI2 zx4{M9%bSoO&&C#&K`PC5ckkaC90J0O{V(q4ll?lmhwpO9k>l0W8Ko2z_RksanztsB zCnUH6J0)inl~Ey{I6*xq9XI@aQ1w$PDea+P!v})%ml6+X2S?Q#&wTvtk6994o}`}Y z42r?yTcXemUi!s3B|p{mEeKcSE75G83-hAr)4cAVSwXcIYb#^+y5itBnxmjVu~12~ zz#5Zr9OiGWnA%bRMeQ*%To@dsvoxvXE*6C*9_?Y(zkM+DQ^RS7)0P1@x-aINIUsbr zt;TY<{kqViR?9w{8xWyHkffSx9loagRik)OsaMo$2-*Ojogw+7gE~`6X=r*T#iGwKo*V*WLRNA0g-WML; zlU~C$zSByzUuPWzF|Nm`rt@EW)igg3zBqA$l`pd;lUlA&jIN(*-Zi6D+P*J@mFw26A=VZCKxR!6i5@s@{5m1f#{MiWqTNfnRpR`vwc{bR@ zL+8)H+6vqs<1=^L#K5kkgi~7xBFqFG(50xXEXwDh!e44fAXaX|lGs%H!ax5X=a700 z{~8|8YGkzhK2a&5L|lhT{-jWpaDr}5i$(d!;J2L9W4>zl3|_P6(rciDvEgA!)#YD|N=ayd7{f_|oc+*%$~h$>PW z9893uLlfHPp`n*M7V7WkA8PaPAUP3cegvX6GuK@iBVDhR<7#Yd(}CQaoXs>+nsd)2 ziw$bOzEDtLl`3!HCv8gkxb-a^-f&yyjkL@!miqPU2=3vR_myvv*s|ywdcyG83u7`u z8Yw*`1b=^ph-RIT)}%T^qdpgd@BsAKk&eR3YXpi*%e9$E0tWLc|+FiUuCwC-dm=aGG*ts-3J6tra#A3w(_0 z+&OhHU#dghNWh|n6)d;0J*&_~@+2@Y^9#k~hj#}}yyrAwT0aD{!rEZZ!8I;dQ^R5> zr^}bw>Z;lG@k)}CgHN6)XoC%1gro2Yx^nO%K<(C`?)dTJAVcul!_-RzuSq6UeF)mF zj)ss1qQ!%Ssbg*qYWF2eHAY36sTDgdrS2NBd`Fbrq^_ZX946rDkFQ?@yN-ggatP}t zAWi@=X#;likr`|*igCQ)BCA9LOfaP2HP#I(;afe;-U zs#D%COf%^D;18%p%p0nj`gOnzHSO%|-q>qsXeZ{=o|p>Gd^5ax|c)icuq=pyMH&C`N*y=ZT$IcGuBKFLiZ#9+Hc?? zZwKo*G6aKl3JRnl+8O%eVWDARsZc4c0i>PPrS~a;i%9bTv~R_{L`Ylq*^xn^C8)<1 zod!@|e3*L!KPG02VhsNMn1Q$SAmpq!x+>aUR9 z&HYA;TjKqxrNTh1&h)9HiN}1 z34F&&?+q&;5%9roDs?Cdp^h`4Lu1H>H{T>RGd_pKbTz|wodmyM6zTKc_5^Lt6RDCt zRi2)nsFXpYwF_$x{B!2rW8mRKEd?3FpihrO_3#P!A7325D1PlIrpdR)h_z+xb|xTi32d1 zJn)G@pOQQtWNmT%{aD-HHG`k83M9YLMm2G9aH;-Sy9DW@5+*=MJTmGRJ5AwR#~lK| zabEX;mPooGre9D}mpz*&fFxOxYU#)rI8=~n-VKT&CeOY>l_m{}T`Hk4j?cXaBA{C7 z(vzur!tu7iZHWw6DbShdw(+|fb^Cx|?ELqOSD?qU_9vFv7?&w|1`Dv(T%;umkb{{9 z@@S~08+!&kiWI;;s8ZvfMvR+~E4PXOgsH$3{0`tk=WPua=>ohQoBamADkc4|v(0g` z0WHvDvIvrMDp(?6x`lyam{doMvxJ|P?10k>68hUXUQW($@LUfaIg;EDOCNG6-bxmc~_PuQ2<8R|akH2iUE4hDYXed%R2-%=bCm5aj@3V}y zVsElh;~%W{>>0&VN_a{>$BxEm8{8=;!LAWlNOD zz+?9+Dr#EDY-?i`72%*xkpZKA9r<$_ThvrE9jLTpgtYSd-@ddHiGM_1Q=8nmDZ5qc6GVTnUsc>=7EbRPQuBGc$x?@g8}*v&oh zBz5V$0P*9;>OSl2CaB^{rfNbp$0{Sg1}UYEp&SM{bxQPKCY=wXvQsPL?40MX1J(}{ z47&dyAtt89k?&Gz3}kVNdOkI~49zjxSnu8S8aLNJY`jxTvyAJ18&%6Sdhq>R6_KU_ z%(Nzn<%mglP;VgXraI5`tr5z%v<83PfA9b`R1xMNJJK>R=#TV(W`)`{hHa25V9%aC z_;@I7Q$fN16E`j?a&u^&aNgF#6QEK(#Wp zYo?jp*DsjqgIb3Oj>{^G$gMq9%cc%#_JL;%-Iocztp^Ct>a)s+0tqQT_HLZ0}68cyZJH$w@~0{0>wKP zl%%Em^T7eQYWUjSIlHs_8r)> zhvkK@?-4527ru(vg+dpSlb3!y&}Z1E$d(W*XY}kDfJhTnkm%ay{fVpgzGUgKJl~v77!{^X@UwK#IJm zX=n&gfkB;T%uD1xiOSn^HBy0}XjEsgisJiD6xHCA6>$1>4f>)CwbQgfTvzMtJg7HF z>*GeV9p=4S!q=D~p`ac7X~8q+wyuekb5cC(sL-}J1#?WFAVXkV8Z(V${H-FcPpG4@Ql8LNA5r&T;Cuq4=TLP7oh&N>58Gw&#s;5u`Rr^>+ z6>!kUw{$mCMx~-Z{=`5(CPTECanoh!=hn{)X0J94SM>&j0^$t+y7nYHacDS2av!v9 z$AT$2IX72xd)a$?bkfE(qTC>2>b?7l`(!{iIT5jRa!2vQ5p?xrd8alNg@(CP)ZX)) zoABPb37&^HkI?m>o(1z7axmCHBy?;A<##%0D?TBiP}W~OVdTfJ9I{jQGd`@1A9EC8 zpck==X)tm~G`>(9#M9 zF6Z(6WE16^r#Tl#0*DWq{)UlH23|%guj(hNmPJC#VJ|otD`TGUuyG~zA)M9L8-uL;j zdKZ7)MSkAs7iU^;r@Tv2L)9>X@tp0U}$%)DjIB1A~T{i)ujX9X_E!$IrWdrEh8oS$f z`KptbsttUSDeUu$pfp&&Fhx?rJ4WmPjS&ved@=!*c6IQ}v5vnni~4;xv!vRR>ZKk% zoKmD*HPiXHpOqB0bIK4tqYOB$1=Lf)`x6U_eG*Hl1~YD-Tq)&o{dJ<1;26qg5H|@e z35t2p0}lcBiv4I~I}nD2G^0%gXu`|LEGEOK9=g%V6fBX3f887h+@X<7w z@?aGwkvs@05zu^djz4=+kp9_|f_Uf6Gd??eQoR1NC&kr2ds3YG(`QY9KzV=qGzK<v>SKBwBJ-`uIIL&#NouD!kTQ` zxG@45iR$bj;@s%)bC?Xk`VcE}L9^^lQyU)HqW>epvdZdd8^GP2wpE}7p zY9%-I*9Se%j;h~|cfWo+K8JDfbG!)V_3q-+F1ag>gy&snv0W-Bq}N5$w6Z(n<5_EN z-IeUR)9BPj67&OGDeQT4RDD?YBzy2ncn67-x^{^6I^g4&Tga*^n`6w8ieSmrHt224svleH#Gp}Qx@@Z}fb%{{lZxqXgC zeIyrW8IKK8c#h%8#8=&^qJy!DqT&mtM)!r@(v3A^Z6pPJ}>dk=i}9j zP3KQ(ao2D(AJ}vdJ#VxRyN|D#lq%3CO_m}SxK9sn$UOYdZ?Nsv{k`uWzwy@_+9xhm ztZlg$mu)&dQI6zNzjTQ#A-=wOIJ8)2D~#_xr$$7pkeGwrL$fj#jFc@%@ksWM<2&~d zKU1DRnU*=HWiVCZyU~^+1CiIhaJoxI0ImRwd3(l>glN3mHmTr~eZa)TInVXe`2?7p zQI~PD(qfLOOWo!XnT!>MP*8}2`bR=yq9yz>Z+adYISrt^hwLVd&OL*}OuKotS4i$3 z(-7GQx|i5kTgoTBZX$hsh=Zq8qTWHBj0Ou8DLtgXz(psf#(^y=}>DtSo~J9 zs(s>2{mR2GD_9=3!@pMEO-d;nv;T#0k2u)?Kr9%{4Y~o=0cpP5Z3eD0D!Fi4K2Z@Vn%ro{f}SvsW@ zKOY$eMeP?-wu>KKA7nzrU?KfdU2O%pFJlVg75uq)DhP^*P!39i9xr54M`jx&SHK9` ztV77+-maD}1Dyk9rt0;XCNPPNN-H+*f^8?pJ(8*h?R+;}(`XuxdH8WMVEgXhyLT_w zec1*!n3020u3nKt3Y0f{mX3l=*~(!Cx z;55iWx1nBQ%ldN708{lXNUzhx}P0wJeC~Ad9I}sYAORHq~WuU13%Qt&qVmzfSgPY*{hqG41OB;UWe6dcwLYZg>6(<=MzM(^3J;^rq zoNS&w_Us8pPU=(gLW5bvng*fg6MtnJjZ|L@WeV1buU-rig z(h<8}B9k6_|F<}Ngk#XP_E1Xfr_%J;`{Va;x-fEn(#+q8aMBeHqA@q^=%`nyLPmCD zO%u%@T|?u#x`^5sPeM9#k+)th&hJjJ?iP2Z+2A+I$zAAqO8$nI?6n+jH&#JuIqht8 zjpOLU`QM{bp{K6eHD1iHsB5w<*yp)q#U%MF;uIZSwS(w@I)oey<4d0MS`H2#oz}6Z z*?35vmse_f+3yf78*E`QUQH%LyK_RHH5KK+g*Pf=3Ya z_(c0Rrx~e7Qy=zuC}E4$^LVD$vUdIfX~LJOyaL~mXbR8o;o4>_Ze;TZsn$9H4+oy)1|XglpXVJEV3j0D?9{QA?~5QP6BihQlQolCxD z;Nt_h;Uox12rB+9Aoa{Q3z$67)E?>6)*iWoLI^1g(h0C!0fV%$hR#>#%{%X}379o9 zfW@V{+n`bxdOq{uocdglfttk}&Jd_say#=ogT;k)ZwC5ty4qy_>V7@5t3sSufVk@) zAQRSj8xR~|F@R*Rj$ibXqYN$=i{3|7Apl{a%FGTBsjF2F^P?+vZ6v zS1(@}M;cG-D@T0+q6&r)>5~JxkYX@89f#bdrzaZ;)YxbxOjH(D2XnRcFZ&@5;^xuj zX)>RlO`36JzhA$6@MXYB!JlrtVb~37z^D5TyfaA8rpn<=xs2z@=79g-sl1czWI}xe zvwsa%5AvfO8QR4BIK`@enEy2YhEhkJTBVzBP{3GG)!T&5%_n3t+nzVLHr5kTFm-C* z-Xl=njqy=>GpFH_p@znE-wJ}f0$H*1?;7rvIblA2fR3d}Chel~bm8(@ve0rhzrk;M z<>j>12M!drS!Q9U9(YeX?RE}$7V%$xc;t2#;P#OphJnE_nk>CN_0n0lAQNHOKZ0$R z7g?W=57^hkC9tTXKhmMA`RkA9Do80pL`XN#ipEKQJm8NrLJ?sf2k*_t7c(SC>Cy1u zq}7_(sI4+2cmRC=OtPZc^G$jaT_QQFcABOtiDGJZRV7$NC!WbRnK; zDi|7eOr7v<ETaI68>cO&;%z-t8PtJ;#6`~*7aKYNp04e5fg;*dH9f3%M> zf?ZQkS2^^5qV_}ZBF7o$6}AACFx52FJy8GpEK<@D(*-GV5-eC!W>Vn|r{G_C)DkyjCsNYWTA`z- zSL>QahENMS3x}j+pY^3efL7pswORlC7rM*hntm4jH zQ-C(Ku{w`D$XZ%j7O)-7U!euLO5v9l(8q-Kj)8riP(KIQtJaIUu>m@ma{FQd-2{h+ zPj(1-t~n5z(1v}`$ItUXO`_}~V-{+uz3R+;|VE>U<{_SVxHy~U_@c8+O zGEX@fEt8Y)^KijPAK4i>W8|lHB5822Xg(!nWHBD@E`tult2FKmE{j#ub7?3Q4XMkLC44L z+j$B?;jdn?>hd(og$G#0;GX_K9|5L_@zG897$sT!vO|Nv-)xG>N}}d_z`8FeH?qjehOE< z!_lM1nm_8FAK*igqJkYT)QOY5-6u{)PT{0Z?)*GUk1bqDK>^Qmcr>n|YM1Y<_f9-o zc?MFV?7%;+>Hq%PfzanG*3_t9dp-ZnX4x^2m}waqW5~7t-9tGbJWxiubM2&Q@Hi-% z1Z!&}rjmuxwM%Dlah)SP7u(q?lQa!Ihd5<)?bt`ls=a`JOHV^EeeSk%?WElQj+*OB zyF4zSgcvS;CI^Bk)iV@mu!?^zV-Riu-+`*G1_(H&4PlOr^I#j(MxE2J_Y#DdQs`8X z9Akq6dH8gHkNht&kaan1&tL&H3(1L{FZ=Bu%lYTLjnE5zHe56d0IL5Rfd!)C#jAtK z{{LNIK|PyLbS>@GazU+)sDIeVnS|*B$Vf)cA2T6tsZLJ#{E_cqi-=iAI`BiS@t|09 zvKDlBD7HIyv-9#cE~33B(qSuvf(~Ouy`U9-r_~iHflOm(H+FgdY?c-&$$}k2ev2lP zO~`H9^$l=e9TZW7%?F_^EnUKWDLFP)2Gvgu4Wqgxp$_f5Sps+kwqNM%?uGg*4w#B( z2y9B%5G`c{tV$d4X3*%Nw;MIw!2vYkmuSl{=v(K6-O-?J0qvEQS&Tvj-*IWezXvno z06k&T&(^lHR*CmmTz(5vQ*A`@J)75%ipXCZEcvK|G|;kXI5Xr$1P69VlFbbs5~dz> zUNAx?!{z2l+8%j0UB2xF#GebcDBIpzE-&r5DB$~aGm??oAG);%*BGKjzi?}+PS(eH zwb4Y&t+0_$jX;x%Ec6Hbh8;@GPV<6~~6UTYmp>^xV9Wd^G z{rWX|aTT2K-AB&dexzn#V11`6w`L4lRYKh#J+ymYJ`Y4Z;JQuWm%7q9=C-1>w|~xb zQa0w;zB(LP`PSw-w~)|MNmC}+VA0-cJcpkmOng%Ac|q^eKA*u`-lM8=>1oXHXl7k~ zrA|l-vxE@I8Ei}8D)P||gkW183AgQlzERI!#?06b*WHV8TQaNjY%y zhV6|}sUq02K@eIljGXu2^OT^GH`Ymu>(o!#66+K0DE$UV>|r(kMe6`eU|q^p|(b*-I2jJkF2fy3~Z0RObJp-BcWG@1QOxQ?F|>oO(>S86&2~h?lzLs!ONzuj{6waq$kljPRX*#Ti_82~1Lwg&`>18vEnsjJJm(=G;SYZ2|Y2xJGE^U=;6 z9zDiw)nB$6(f+Sd-dnDI{{CrbZvfB?U|0j)rzHO2322`{Q|RgrPQ-5fokRW8{N>|j z*_Qkgwn{|}eXvE%=D3Qtx}#$rYt5jYIrIQaP9Mkg#Vi@^cp<#EX@K`7*Ktx?z6Gcg zqFrOqcY-e6qlX_e7ikk4(1(CY6$2sAf?S#n<=8X=HJA20^aAGMUB zy|;c$+O#QxF$rS5rk2()gd)dVXZKdlIpDMzdS6gOJ8Xy*KQUpsv&BL?l)!g-kzuDa*x@7*Ii6tK4o%pC)B&75 z;BAtJkTEDB z8xYHK0pSp~py|Gigho6D>P2Xy7lOWhKpG9&C=9iJLX!w2Uq}E2UbfH)jr4rjHWKx7 zOjM#yA?Ub8TdV$6jvRLKLfca5NWn4R0Uo#vK0MS%3ww|sBb1?9Ajs;F!zg?I+L{4| zGvVnL73LxfV~e)Mgne97VLV`K8h1A!N#s z43VVFGE@=~k$G0Fq)aVKg~*gKv{Et?LQ#f{5yHwWWXSAXkG0pk*V^yiXP@_T-gEvq zpYzY&OMdtc&-2{(eO=dm-R;s(Nmq!3)7Mlq)LV>wE=JmDFO>y@(st*b0H{S9(M^<2 zoUT!7*P(AF89qY$cz9IM1;sSnhNldx*+g_96KPUtxgS0O{uaVAsJh374 zBE{%_NXP|vQ!@hsEJy1PS_dRp(2$b<4A%2p$WL1`O#Mz78Cerg@p3qixz+V39|Rm{ z&L-{NvX_@vI6eQHiV3a_IbS&Ua7*aV$Pir}sNI~RII6NrH4Uz zK170w2;bw#aaggQ%aH^4fB0~k3zkt1{%C_{G4`B|KgaAN20Y<2ZRfbgV3iG{t_sU^L5rB~1oU`V$ z2l56*_z<1a)U+?2JBvSX_7$bM&jB_#<*+_J2LqGRq|{Vl;`G)7jq0Hdr2=U2xk%i} zL$M`oA%yBdqEZeosLRYg=N8cBCXlLa89C<&TAa0LuJP&0WfDVQ7!>U{0rQ;RL%ht1 z)j5{Mxx?@nBC8eu-Q{WwZJ7f*F?jIdj)UWtT1B9*uRw?Ayj2fU?DDuEX$x4J&7VFg zau#`w(E}`$e6(GskKE#LeKYfg-|LbpoEMy7vtclTR%THk7Xhdx491*;{fRkI<|0f? zSegx;BF`~bQzAn{`(4v>b6b7={e@RHT3gZ56hD`^`0Vwkr?x~NKYrZM3Tb+tATW+n zfrw&zNHo*?q~Xie1ErrR-hqJ?nwAbNjop%A285!Be!Bn&s~$~AAe%b~!;PUPM_5;~ zprAa23NrvMAIv1rJOxY9WFp+H2g;IQtP97!UyRk-R!6Cj9E) z*wbv+*x0BjCx?u4ECwfw32f4R&KYD&GyAxCbD)YCM<-s+G+3*H!0<0_-MW?8Yrd`1 ze|UIU-}q@dRc=z!oAop_LU?z*7H()$3ji*a7if1eDGfgYtlX0L?xlZsD5MxZFu@r; zmEH&0oi(~rCu2RRSvPBlecpoRJcmU#_~&9}qPl@_PC0!VavUt`){gzb4Q$6qcHgX(J!fu=DuR98RCpZlg1tNTWiv0=|(Wcf2S1E~tYI2}pBm;~Xvlfr5 zsMz<{?zs2h!4TXG#QH=awVQ(PZq6Jd>>zNEn8TWg7>*E^Dxwd8n^ZlFNn+surxHw@ z;VK0-5Vv(WT@kfr&AhaQN#bIjFT=kR|u--4b@XV639X#%(trSXaynM76Mhff$3oZ)yDFriK(+zTFVBM$i?NBZ`# zl9Cc(+iv+4?{^$^+-MQnHPO#Xabk4FRWdjIIj%OWg*yQ=<-^}bvoEq9mq0=X8^H#AjCHwq#9iBB9l!hV-Yls zx1OH}7gi>?=ZgCh{3{yah_YK5rqW5I&48%Ja1q2}o%mcTe;{EM@h@w6=IIF-VUcVA zyqptgFHy@$!8n&x$h|CtDtrVd0b>mx_;rT z)V#b^#jnKd4~F-|+%W6YRX#(;v1^pa>k%_jQM04%|tPCw^07$Pj z%6?+>j(jM&KIaLE6a~|*mc-$Juv@S~OM;mPAawx;Hp(O0m^m95&*hyU290FS34*Yy zVXl20E|p6eISAn=;k9dl-cfCIi^exSHRe}NKXMIi5%4csyuPTHap5+G9}v5|cE>g= zQf7bpI^+%$U??d}iJHR5-`j9^s6s0 zJxdH^6OSeP2Ihz%=a|#IuCD!D1gHH;j50~Vl7Lcc)8_THnYzy}1|Z*qyJ!mViO30e zU@_@MORmWAEMTy*Lxg95hNJ_?y)71vkGg()yT{Ta07|=@BgPd`a4}QXF|fT!Qd)Qz zS|Y-e^9URVT};#3TgYOQuo8&BS9(rwIJMODti>pt6&s?Tu3p2MU=u-LPvX-^J9Rijm01aZJN_q~+uz{!f-|o=4(NcN=L_{w!Uz4r^m`CKjAUnyN41~=l zIK;J2lK~~^d0#P82*Y`*T{tKU?3o)WgFzIJ`*69;dRPPkIO6$TB_t^j&+#d{PP`X& zSeUV8N3Qo|agF!!BWbI>=V&0NT2)8-;bD_YoZ(43GNu|dCY_42`-1|G;1W0U)WeE= zSa_0u)oQP{&yMi&0!oB;GHkkH`2H9xkq;r89fnp zu&knhnqs%|)4-7x{vu6r`CE&87SRgc(O*ZII_gcWij+?isVT0cChsI1#3oVbH2W1? z-$Ps~0I4Aq0g7GaY-nf^DHFRCPOwJ!VaVZU(4c=WB7)bi?DUW{QZOAPo)6r7d@eBi zknjk#&wWM3V=2eeCX^bCaZOVdZ04_iLi0J2h_ww&KfZge%ZrCilU*ko9iu)Zw>mR@ zX??A?vU|)=)JxUt9o(tqR(ti$fgGSJKmV*BMpQOdNEgo_^AhrizzW`na}N#ir0osP zhy=n%CwvT*+x!;17v%OT!NjQwmBaVoAaVG+5gB@)hbOiJ@sNr9zV5S3l7mc4(=2B% z{>Eb=zrg05xETdWNdq$p^Do5l9GrZ$N6|pA;8&KHLks3X4Nj~(b2LbA4P5J0MDT( zNlC|oCyw{P_)Q1?Gej_fvT!lKf)lzY%UwADPWNFfOt2mI7c-gpFrN)9u`ozDjTGY7 zAtz$hYl!hfgB=BV#E}RluPVVr$%X`BmD`HS)E2H$1H@Dd zLUA%Qt9d@fg&!qQ5)88Lg@q*(|JP+S3Xu9%3bua4)dwzZJujDGa*=|di@NGKX;(r{ z$=@nN$((qgFN!uk3--+F{h+E3zy1qx6`b01Tyh%pEPnTZcFV2-aGdq1r3lVk0HZ20 z+#(YWtjZsNb7!6_r6vk$@V8T}*T`@w;`)R68AP#y)|x1!y9vb26fg7iWP31gV{+0z@-R3y(GuX$hZ8%-B#Lk;msc+21vt z2fO1YoS(@^FDtRwNPiUOWf&P5rNx~<)FP%hBol?m0})CUERv9iG(~+V$sy)8z4@!7 z397RLH1UdhVb2#&8#;b8CH~B?YCfDbO)``~$**_039O1~Fg8-6?lU$d$%I>2Cyvkp zqwmD92iMww{(UT|CrDQbh=2$agYgw)m3z%r8Efk5!mi4Rgo#`fLI80|45Hz1gArEl z$jdfZx7xw(Xn-shl2H+P4kiUXfn{b23U8#;!x5cn1NHo#<1{4P!IO~5Jq`1xRFGP0 zfX?CWo83TXc9<%O4sVlhD+CO5v*X5Fhdx<1!Nbs!WG%=>lj@fM`=>|ImWqvfLa~B; z;QnJfYb*O+K>|BNZ&D?&f#3_o_k=X9;dz;(gt^#Jxr+uYG-87Y=H!rkj}^s?jxpzY z(*}uA>_WO#!k>4rON3gAg7MHs=4`+JGqDAOK6;ljtnX3%rw#P%hVmOZOt5|N>5#ypDruIQmdnG-A)9T24Uv-4E`j0bs%tJl#vk zEs>Y$T)LFpfdO)Mo>`FIBg50e(McSuJ;uwJf*<$y_v`n~6UQ}FC3Em#-zCmuEQSsW zh0guncBjZP@TJf?zwu}J@Nqepk!9fWmA!O_$GlgJs*#f}r(!LVK{zBtFLe$ zD_%E+h9Kg0VqGB}1pvVk!HiYtqCYtxrW$oDfjyA%R=+T*ZRZQX&ZASO zc&Z2(e?J21X}Hai1x_5xNCylQkp#GbW`SUJ#Gj3@S&pgjk0i017&Q{NO;|-83nn=g z@vlL#Zs(Dsd>WF|7neUCB%}}Wk`J(4c_X(jSbQLKai7wvnK}?kytM^3l!_tqhA9%A30|6!OMB~5hnd&?u5{_X zTdumI9fwakBs-XWd!X-hagkQOA@^0W>eW{_aB39&$PTy2Lb3-CF-Xe_vajzB4=54W z^qU2e9Iin$_iB_(ADD1pqP*%M{~}J9sF&Wy6GagUd>5LVPchIbI+E88Gy3CD6P^o|3_ZMzIxLKQR25UGIkYeVQ zP9v;~dO(3I#$#AOMEFOvJH(wF=AxubpTb@+Ad}9pkEY-nYdF?|TA`jR=x{8ggh}`$ z@#}wqkU(ntAep6XVq>`7Yeam3&@gd1=VnVcbU^*3LVzavx~UqL(pGxgox4p%x}bH@7mPgoqt0Tr@ZvbFob| zhCVaE))>aP?=qV;AXT>fDHh<>$f}1JAJSX{So{N^MYJdpC>ktpRu3E6Jdeyf2%;tK zB|)v=5~>fI{aUED^XKL$tm`wsUPnvMZSh*~MEm};=>?DAFqw(^Q~3ijNpPkv#fz|l zB;L!-4^&J|;|Y3!8i;r=BEST5&nxr_OyivpKUy4Xk5f{3U}z~%Rw!ZzF^nUEa0y`g znAaJAa}=c@!2yu!Pc~?r9DoND&w&G4$Ps}tcvdd~-&Q$xtQMy-w=z!b(;q)_8ByGC z=qshs93i*vrP^rJ z9NHPd3TcN1M4m%l*y2bZ?!&X{F*XnZsz_|a88Agga3Z5Q4fA#~-vPK?Han4oBhG5! z&5^!%GLeCRT7cFkQGMs~rHa=h7h9npexGjEh~?f)F{696E3WC(klfEyPjSDu+NQxA zc+}Gy1S1F<*hk@4F1{EaeB$$S$OUFbzlrfi9jPW!udAqWfB*so{lE-J;hFpFRnG58 zo)+_NQHy#$d2?^?`~CsXw-F0;pV*OVfeomQ{7 z|1IWshs8h2i;$4P5$i7TOGh|f1TivpgrDlw@=9X_QQ|=`;tC^y7bX=O$#%koH=DLT zp#HVUK&Q|ZYl$k60DJBYXDDO*&`qvCg7F^ONORHB&;ZlS01hdA(&yS%fwUFyeDGt` z+r%@ru%CxA6G>!pl+PZ2l34eH=s_O{EDOj{J4v880y3gZ>Kn0MH>^3jXu|ygW$bl- z0r?IW#gp$-GdomnX1_Pr?YFaLVwA|*_10p_u!u4vf+T|yV_6vzg^s6;gJzPH2-smz zHs!7e9=-8iyw#gxZsa&p&=W@4RQabPmSV$!=Ox>p@eley3ZE}jwiLOCq}BNBkEC_s zUuRfQ_)in?*a2pY1SaVCx*NCQ{u9NJ?)ZJ)I*fCOCxbrGx9|Z}4oMAoc;9)* zcK&527}TH)&$Z%m9?M!w*`5s#yZ_X2P=gdL^Wu9}%7h^EKh%xd0G7}f7^^{1$SBePu#m0?7Fj)mG1w<$#O!G{^=&+ zhjIl!KX9Ph9C%1V{=jsCs)_#Wp3{n4rc~o{m$qq(+4DTq2mnR9>U8*;{si~<2HvjVS`A|@f!xy9MCX-R9LJoxz5=N0y zgY6B>4n=@1Gk_lpM4>H?1_@SdCVxamdHL5j=%ksE?KvA@#s|3HTxSfvjg5)f(Njr9 z*}G0dYGL6@?bSQwrrSX`b)ODT8`#-(9j2&9tuYr1FyPx66{P#?cHHjRp{OkHO_aefJ~kWboQPK& zDEX!AkhT8xss;3>XQ;h%FVOE$EgzVkvA*;+osSaz+ox+jZ-1>2r8FF%V%=TJ(Fy<2 zHRhok+}t%*V|*tI@P#|m)6?0I5~$71puS^ALFR{qy9^9I8eilzJfgzFTQP+?2zA14kRMDSN&FC70&;qvM=f3k2)k!`_~RDnC3M^udN^y6 z^kbhq`L!wbDgSxrnMemFIpGU(-U)>Pyws|TN^dzjHFHo`D7m@0@fu_cpkIyyxiL{s zMtz$Do>zu2S*g3jEErsI_(KaYIXTJgv545Y+e@u|_ETSX|EHX~39seRmWldACFfzq zS+Cny>7$dcq@~@wv~5e|E!L(FA2wq?0*lnw(b1s$8j}K}U%v)oY{hRO#!sF<7n1P$ zxXxqt+ZS7~O}iU`ul=$h?qGIyyQ}9+Y4&u@(r~2R`}a4KO*rySEg8oesCGpc>TNg{ z2@XnOa(cQPvR=Zr*R{8^gPM^Z$OW%ZankeKFWKl)I+zrED#zel+D!*0b$UFahh^Rk z$q5MwEf4}@pgj))85>|bJ6^+F;X~G+n2}a67o=mi0@B|`OH0e5eMT~O+EJmc)T(YK ztlZo!`#|pWZu`1WskU62q)iDnvV47Fg(ras(d%9a1CXSddYbcf^r5(9iFZuU> z&=hxyPA30A*~KlZ;g%|~j-qnRJ7HSaVIw8vF6G3?K>uFe`z~3hQzf$0=_!N33O_a6 zPfzzPS{m~0>T;?U5tjet*w(-oE8Z?wcK?3zyfj6&%ywOP>%qEJC0$*E5!f6Q$rNe4F!gh3T|AS5*=Jj0{zV!J*BWz+<2f(WGNt&`Eqa>t zH^?8~Q0`V?5d31Yq?I=GV6Kh2TVC5t(0nf#$)cvufCqVsnd0M9V?Q7m5p-9L%9hoCLzml%ev>Kk(mUdJ50dDeL# z{^$9sWf52Be)YM7{-sZ&M8{_>BB-s@L|HaR@3^O}{z|WM@XA=Y#W`aQnGKDK-aeZ8 zN2ttpR23ClrrcbVmr*q_xwj;sxs8hTw9Cg$Vl;Y7v_={ShWE^!W8_Wx!0BVbC3}>I zf7>dvE=@1X?~$DhR&-ekJ!Y(NBV8~1ThD}_`7ruO+atxL!O*Qrqe^eva_PHAkq!zC zKj0TOHdsc>=A4+xnvOTD8!M8G8x1IA4(#f8i{qDw`udJL+i_&%PIcn`IL0K+Rl-{P z>UT4XOtJ1&x8AZy+cEZV+xhHE!})>5JvW}6)0u77zTA?zgE~k@@ql{r!^ z3cDKr^!-oqId3}e2CWrhl0Pc^`g$16POqcVS0CK>O3@{st7J1zsqTGm(kH8Qp?*!= z6U$+?^7ju<_)+iXce<2*Ue1Di2pecn^#vvIQ`eL|4a*t*N}Y8SbyCc{P0z&{AwQo}5zHKvz`1 zbpFl_3&GCb$2a}Qt4>NdrasOrb}pCn{+9BQvMA~`Av!*~yfiuYnbev+Nuc4@Y+0$9d*!1m+NCRNBR zl*+t@f{WgGri3YjFW@bY#=M`h$F?-_H+RPaRWuWrXk&&g4#|$TJmd2G?&KwkF*B`V zU|@zj3M7GXhs~gL;sjTJw&{zYo~ebGVDgnXk##JB9jB7>1(edr^#Z{uBRK;Qoq&^G zbM=BwUqAW!np;~37R@~=s?3(yokp&A?ogCi;{K)1x|bS78^!DPD9E!(4LZ{7GR==W z?Q$+qU+ei9mrhigwsi_j0UTr?-FW5xr}AgcoJlpvIapdLwjU4NZ6Ry6VL08nzb~?G zvUAIDyVTY3H*L*E+vdxb=8H}iGA5AQYHZbK=G!Ad*O+>7RJPN;{zP5NOm&bbVth8r z+JmXSFuV51?_9K!_UiC4m(YGabT`MKYvYu&(euYY`8#RyNKGVb;cY`NKN1SnMgKTt zXC&gXXv=FIcVYjgxg6#4K6o>E3ThvPI{j=lbHZtB0SptRoXMm!aLaK#IGNHrNf!HP z)^svlxHHFUb05Dt<6wdLdQ+|ERC9GooB>5Menq~>k3(g=Jjt+~aZLO)HDqk$`Qq^A zKI2op)7s^d<@IhXDo-vl(R=nE<-7w1GS3zut*@`xX#T{(bl;Tw8P2xT!zGp>yNvqR zbNM^vw;db(Qe2Yj28bP#d1#z0VV8&zNKEI04@q93}dTv9P2 z{d>l_9}*Pbt950Ao$7%E!Jpr~i$?J9VKy%;3nq^(Qy-|3u9i!7oESCTTY#Hg{>Hz& z*?6{6=RByz@jkDmzHy+v6~L-9_-M{nvWz;5K8%$cXuRz0S>x8Rh5lw_Hj*38a`C>1 zEs&mGE!Uz_mFi(1P56E=#OQtXwZbl+ZQU4BddYpRJ}MAX4R^A#vUdLxwQ!-yluA_` zpVUsEJrk4O_#1mac$(Kx4fI^6;q&dBFX;3uEG#6O4E;07Nj04?{o&PrvA4CYtp}W< zdxZTB5QIh|qEH~=IT(S_CRN~y+PM=$y1LC{nHGX=|GFol_eEZIBIQLla>Sb_~ z@&*XwP)NfD^%0AMvP$w~g+r&C%*?@|HkbeJ^5E*127@05T-A=y?-uG{uzMbzl_iR) zn44khW{?lD*Eqn|lMVwEt>lZVCo$cF5wM>I#93U`aQ$@dobO(dg8%$2iex5MWG zcYCEHpP89CbgUH*QVfKG`(zwvWJYfCQCBtF`@jE@&(IkK!I8ZJ!|8VJJddfCyXILa zDa!Bpb}nEVwb8dxl8!i@TiJ5VCX-=G2J9o~_&&MSunoXL3IAga{9XYnt?)lY@ zz9KNv-E(eo{c@1{PjCI5wzTukG z?O{ZbTi4dMO-f3tuCtR9f8fM$d*1iQH|rDg^wj-b^JB*59zLyjawVVD$>VdnFWuJa zg1PP|F~ouCq1FuBMbyCKjFT9Fq~S${8` z6`?k@$MtqDjdVV;ZwH^yvqxrD*#O*)hLNO$V$Htl4;RYw&Ra9SLwy5Ss3m>x^9Jf{ZA{c9c zgC7H3>rDcK*u*z`ecR)8wYBz(r6kFZ$HH`m6x$DD=8}^vB_$-F7cqI4GcKhC0>2}fhxV)r!^+w|3o*<1`zxM8 z_j&a;V`2FLYS|9|9F2si*q52rmm|kE+7G>(Tzr|AoA15$i#85cu%4_1lJ4LITF}63 zU|Yt(qO(?DYPV!Z4(y7%70xRrWksk-%6uU)vwMCFUcmrAW~ zf_P(?FL811kUzQ)XP8l;`)vRGi4!LX0FjOLDU`ZCTG-#I(d=MQ4U>PFalw|snXKhE#QflM7tP5Ne9$jCpjsyg*YmB$-dMpL4a z>Y2@}OL5q!SJG}iMVc*xtHX{nID@1VzUV8SytjLGJrmikVIH_kfksbpHF3Jn>e}0z|EkAg7JUuszmAVlN#NNcj!~m1(>YADu zfm)Lz;1Fu^h=27b>^!3B*u*vwbcNs`prn?vFzE46SM_IZ{r(Z@;#bX*{3ljAmUh3*($UlAW_^gjXg|M{3oo(5vNnt!FKmHKW35 zYK`Yxo4aHE`0hZ=#3w!b_GCF8KzXX#x+I1_eTr;uayF4e39=_Yv<8czho^BSPG&A{ zrdPBm%)y1CQSPiGS=mro_|=^5SaO4bH^+F#_d-!?h&r6-1`^|niex+&o4usaUb~c9 z#Tv)>sKs=d#iHxTjeXj*J?tS5r@cc?<4kvY2ooB6M@Moc8GNP1z6a_6zs_IPetv)?rHhPCVe_W5Y@De0c~)z3nL zDUm-9)bEitKJmBDd&h>3C*UV`$(q3eaW1OGlrzHLhoT_ZbHYtl<+ykCo8ga*%dh$7 zKVJ}Dv-b|mm9?#_PxFXm-E$I2zIh;~YDuV}@@d0( zzP>#b6-H)8!jHE^pUcS2&ENQ==8N$&dPPffubab|XfFZn^Z$8C-Dus~lw$tn&t7iX zf*Tdjr20KGX$+nHV7>7b%MY~zEOSSx8oLd>#zf0)UMLtJ=7@~j;rfoT(|V)qDFPQR z#&}x@7IQ9)1{BZuwb*?7{X4_*>wl?h)z20Zs&nyHxq3%txQ%sVMaKOIV!X$WmTLd7Fr$+__RAKP0Z zKV+f3=)jkIh|@JuCS+`bX6$WNxu`+9r4Y-Qz_8V{3S#ZKtjp;X2L1>8J%3fnVH^yR ztW{q#WI^$!UnRV?IG2@j77h*yzot`Dn0Lq?rB8iAMN!n1U$2mXfAO^;BVL040})oS zc&CEUs+XZ?^$k`V!sciH7e8pP=TP)m`0+w%6FJ~#O{ zlRY{XKjYnm_(vKfE36U@doHmp+VLa4{g`H5SO&hBCSUYy6eeF#k`Cb`|MWkiRdmN` z<-L@9iV*~D+mS$!h-eD1{yAN7vv(0OnEwR>CnrfB8U@0i2Ch*|pI>V|ku2=w*Vk&zKw zB43{zhgxnYA7<|AzPzkQlHb>iJX$~RFFX+!XSBYSd*8(k2NcW#l2hq?k&ylc$W_bz3Qy2G5P9>W;@ARo zb)VU7&G5gU`Fogk$#3Z|Ig)`Vp=xRtpsDA1C21EXyd6zFlf`%OT_}B!)U+;uF{|<6 z=QWS)4W*-kkf+kqFD@?D0fe3O2;`6oUl5;4V&zDz@!;!jdG<>Ukmyr^AD(mx#va0Q zaY%}fH+{$c1vBf+FI{4>gWG_eVrLNsVs}FsA2w#`;OMB>$3L&YXIyxg6w{COU0q#+ zWfD2GN_&I_1YX`Vmp{j9D9!oD?}Rgs`Yr8LUHXE80$X&&h{1)I9I_Z$Sz$1N8a}A7 z2-*PMd}rI7s>hX~ib@4oyU`m(L`2G<3o}7}B#4)T{v@|Q5M3x$gP`rM0DU`)a#Tqe z9|asw8pht_UQY2|dAqF9Y7X}H@=8h-p?09ISUtRe`9A~ZfAzsy^>O0e@-JBu4F-tm z_doI~JmBie+B4hN)_qSWH-qm7N|Huv%c*hl&7}z$$)4A@OPaKcW)zGU`n(t&rRHC) z6W+F#Qdz}r&eK8X`Rh61!$BB)EZ{kIgO){W4W&VdD*VoOKVvpE2|9|p$|_-oE9L%8 zk0)er4h`NhxAZf=%$x5)FuzsDcFJmuGxB|63kY~`QY~`mP({hjhe9dqLtg*_t*-Vh zECdKz(pFel;^NJ-1D};1yJohO#3(O+^j+w)?TiO1&&aNCgnq@h_?aaul11{_ z7@BojU8_0Y{P0elW@FU(krE`Un%kYCsrE)SX`~v8HM)5Jh<&dv{ZLd%ILNSmQ$WDY z35pi5#Ax@cIlk`F=ZmlV`uLQzt)=;~k17S{kF3j=tMs=vGBDC{NNg2!b8KRu;)Z(C z`$A8f%aHsrsX9=}#}0m|c6-cq&e*t1tmHD=;0;NfF~PT6UFbGi^qnB~Oy1nBE*hhV z4%=@#b~H%h;6HZol;mjd#gY7ED6T1YFT^@_%;vBCsez7s?@(c6)|f%@BwNJQ))p#C z1CP|Y<0U}fz4hb!&MK2bFH@zaICxX^o?qcAtNX(I)QhaF3Md3cyF`o^2X#y!>3@bc zl58ZLRR%jKvh2PAeE+bb`dVPEoz0KgtEH+TbB`-1kir}}W+*~~P)U@7KEh&$(?bPBF(%mF z^z;Np;#FBo5tG8j%XxE{{X?% z5&v`M#@E*u9c5)WgwErg*+Q04hNk09P(HuK{5wiz@z%f}z|#C_n-_sO<#6qfA>EOc zh6sWzb|e)3caWnL%D>`K)42={>8-5jv0btZ4fS-^P}DuZn|goiAH35)>6fT$4R_?Ac;Eh^ z*5Uj_uWxVfv%c_$|M9`UY(@8uRdN(>&0L#?pAFQ`VmY4qZ=-~h#Qx#7#sU*kX(--K zII#ZnZ83Lk6s9nkmijoAO!yWTZON&&bLWF95YcA`b5_%8oc;R~phUl1Un{Wo z_ZO?lPsN%bYiUWiaaW|HYY$xsG`Hq(OV7!%;h-cPSS74_?Qev5v`zlmBV8?cu}M<7 zHm|n6{qp1T)TBI{M2Le3MK+l*NdIyz{_{KeGm%~${bYY^+JV?-&&FOp**|p0n^y$) zMA6LnXE(K7l2Cw`F1L)SIdce#>59=c?7k|nf`FG(>JehYh7B7c?AtX?N->am zwm=PcLm+#)dNUQ}&E0)$zo7F_^fgHR^X2|51okeo%4%nJA8NQFRsIB!SzLR=9$sDE)tU^r;rFIEo9Em~`%qJqz8DnZFr?4fu z0xrQUc1k%o-SzeL%MH+?8BzCrG`lA-8udf ztS!%2h^306FJ1pzpL6xriXQumUyaI|D3o<_%JN4vIbf2tHPH5dHDg;t+0OM894g3P`R#xyal3I z!QM|0Bq?@w!YCE5^Gj-BMRJ0p`XVm+VdrSWL?|Pn)6D7h5o1o-!^inknP=;m;e+=pyL)I}zihZx3!Y zKQ)dzbyKxTX0cv|wg~;CrEu1Y9P2^zX!9*}zt436#@_%$dZM&wd|T0}-XF!cnawSF zqF#~_tg}Hd1CO3~CUF4Z|IyvPEij1`F2L*Wyn-E&$qM=%u5QiGA z$#L7mGW$fRBuq%bz$Rh8cL|S%%X28pbH8=o$jj8@tExYvA063c;R-88lr1E3>pjri z;(tqZZOzHbBi?*5=d;#E37RO%Z+*Ge3&>lgK3DvyccYI69SAq)(ih1@f3t8LEpbr| zj7?LooyA1_5194NDvPhL-F4y2*c=*G2WKjE3OARQmX5>ASM*|a1Pha){i!BXL4_4O zZjAp-Kl)`Q)gr{`R|0)X<;BAhtmXHGCKevE4a@7wyL#oC{uj0A^Sn=0IZ$VNd1dp{RL1x==lO!C=R$ zk3vnwZzr$$C`+Jq!364kdV1)fpO&M8nN?=~V@4ZSZ<=}o0+w=Jt0`kEU6t`T{!(lf5e>?R4Ibi)C&fflg#Qgsk@c*B{zkdQ_%TzaFRf68k T{S*QLEk#*DO+H=j?DhWv+7o#k literal 0 HcmV?d00001 diff --git a/examples_skyline/accuracy_vs_di-skyline_formula.png b/examples_skyline/accuracy_vs_di-skyline_formula.png new file mode 100644 index 0000000000000000000000000000000000000000..271ba877f30856507b9b09d19f5444fe4898ddd7 GIT binary patch literal 42840 zcmeFZ2UJyCmNk4xf+C0kRH6YzK)?VZNK^p#MsWxN|=xD-(Kgn zw7Jh0Jty3OVMj38tCDJtVH4d>T7kH&?OB54fva>a-@PVdPI2vwxsw>EGyEkXcEH^x zQMU43wo!*6Z?sotb@tb^vy3t_mmZ4OcFDzOpW7j_PI6;- zC?hB3hlg9FB#iKI#uKA~hqn@Vv_;pCNRYw9$^RZ7YdM?Tnv|-OD;MzIW6P=Awq7)j zkm0&&)9pCm8y6vHJA8d}h|l8{`-9#Ry{M?DIKu79{t9pU!-qN9*w~7fI?Qs`w1Z!V zg{9!}Sa2H_X>H|zBf%Qph^`L4=5?72wJ4DF-ouvGN?0KpY$rus@iy%(am!f~#*~78 zlF~%#pWl5YHa0emm#pNi9wi0#lU(p~i{q{G z)5Xj02QqSdoaIGbmh429dbhVU1MD_dtbLgT6(&1#H4E)0`S1T=mQ~T!4Vx;OSE!m- zRaG_0&|>y9`S$YYb6?-qNXbWoLqoCAGXAFRnMQtu(WY3R9@qIB@oR1A2I=3vUCXiP z*48VuZw=zn$-Y;|xH#EKFzG4xqQlw@N5%=-oF(}d1E3kXz@9&?M zKg`%ZxnwgUI`{2(!E7bdV!g*!R?YP4uPclx8p#Z&wIXL`tX9|7Oq*h4xlW(96Dcui zJ}D(FT~lALNJdH9LNM^q8(U|+VE$HYb#Q;(yYn(VJw0nnotA;&;i+TI@yTxMOS@=k z+u**AtWcP&&J8j08^2_AosrBjBPdu~TZahQ#%(Rci;sQkw0NDHo4bRCF=l&n+QYfe zld>i52Ginb;}w_XDO^$9*E@}`j>|CDmWtgSx)Zn=z5`zaCafk)NYl%4Temm5f1j8b z|Gp{Qp(jx_Ucsa;MBurPPxEtn?l-SrN7FD}NPsQn;^b^hP>wzTThh{^W_0(i@!Vjw zx_faTwx*2kjGWx_t(7Wq)1jI`VW+v6%*;%?UtgYax$Rh8T`ia{ zS6bV0A4;1sDgR5kljEkkZdzDagz%dL6yS()@H@n5 zkL^NMw*_S>DXHo^8K>CUuYUggxue)6SK+-#Vz9omY^runKzw|B;_IG!Kf)3}ems+3 zSeWcIKfE?+>5-hKS6Fi=!+56ebHQSp4!ZlYO--@#xaDpKvoBBg+SuB@E-fu}UKl;P zXV0D+A^ftIrJJAF$ND}$lDl!E6%2sOZIV+p?$p_{$^maeLzDN2tw^ZGU4O+cVA-Q9 zG<=`v`b3tEZL%{FXKUN+Nk(b6G;tF* z-cmhg_4L=TJ2&2)lM>%tczwa5>l7bfx@qYM^;F@s7}|mdou67q8>3mOcsbbFNB4NF zJW4CwauzUayMF%O_m|$@-X$AzwOpma0Rj6?oH#M&bx{8$98oxitj?oHQ+4y3&c@r0 z{0O<^kk-}R-I{46FWdAp!>~2i(!j~t*>vJ(#@xa}3*4aAl$+<_qN`1)vk%jwb5eU$ zf-E6`o?GLheee3}>R6d4S38TE!$`W zyCOe5=tL)-&2`4}-1+k?>FWA40sMxQyW#H0J-KZRMjLX``h$pw2y7^vI@8IH9ASs4 zu(mY4WHGn3Qx`94sod1s26xaJEwcyiwMlc_jleYGVD)Q-=~(q-4O2M0`L_4#-}6{n zS~|J75LZVcG?O(_vz-@Bd2VK24Hw2WS$eGNun60IJG=eu)p6}o_u}`?REsOBQ63h0?p%b+SP5G1mYP7+D5DIQ)-7}f}^|hv^rs>`J`*xE#gqkzE zDR%4_hcjF{CMxceD$M7s&-;zWLd^WDfq1bAqJnXvzz(0BMXJSn+kixH$Eyv zh^3yt|KpUfu=bPN7xj~i7}>|3Fj*-1Fz{r9<)W>GT`8PcpKMJIe)#B74xIS-3PJWn zom>mK>(^W0J0h!CU9JyQ`D?nnyL%DGTM}Qx)lF&fo;szZp`np)8+`ia2iSn2p&_BP zA7^a_uemKW$_V4e-@p-wf(sEEUY=-IMZWAoZuhXAfbV$GNbSzuyVcCiQ+|XA4Xmgt zDfRt4XF#v1hYJi5v`ov+=C9Inb}rc1++?XO-OAVW4h|+5ZCl6&ZvSCVc_|wkt~XCJ zPSEgggUpS_WQ7pn)dde^d0-X2v=QSnjD&%N~;SMBv$MZw-eb~@7>l% zR{$odrmday;>GTGo}A`*MM9X6ZA+2UJQthvIqyqcxrO-`+@X0MMZU8QAZfU7xFemQ%yALA~ z9{RamHa^XKdt06A1K$al1qH!oLnx^}vwLV{ga}?;6TUr=(1=c5*hS@xjEtJPI(2RB z5C&ddtufg*_aIzCI8aBi2O|2ySbRj#rS!}{J!EievL>JCLctE8laQN;0Mv&IiNkH{OuJ?wbH zsft!+`t#%Mg7Kte<^i2g=E_=HZ;>0^-dyJ9I660w7=ZT&oBU%sJaO*y6|q#i^J#nEqg>D@?K9&Ol)7)(c!bM#)G!5KxkbNYio&vFHO~A(Xd>Zb=Ol`gR@IJHRv&3uMU2z> z7?m1b(rCuU#>U}h<@?9R#GE>HDxu8`!xvN@Wrk!LzmvHfCE@=qQ|$i{b-@2F@_%Oi z_K)vd^ylBX{akRVF*x&GG&DGz5Z0%X+if4ZwOa2{Q1HT3R#7n+a(+2EIbFS7N+-yT z`5gtj?OxNeiA{{3GwtSxaQy%kTMdDrsgb;xIiLHWGmcf%`CDzR{1tCnaIxBx2X~ek zU9jv)Ieh+}Zlmz8U%%`|!X4U_r0BD>Q#%T9$%aCwPF-_yax!g8xtUseK?$m4rj3Oc+Pp}@S9BaQH(W)rE8RNdWW&(9jjxrZsxJhaK zaKoX!k&U+|x(hYk))r9tggh=pGg%cIn@mztQs#L`*7RLxrM$eolOeki_Sh;|D3m^Y>n`k3qujywdMYf8YR0XN;F7+J#+~ zQkq>HXTKh7%`lYd#u4;_i?$&In}M?)Z;Guo5>JM#4~n_#`%hkf2{|_G+5-65v4*HC z>gyj#TBr{$XF+sS>!{t@S}wg5v_0$ZkpMZGX>**fv>)?W_Q7$;q*27_`1BwdesvVG zHUbK58}s#MP$Wb6)VO-}YHUuph?5Eg9_7wY59s&p`v6sQ?W`8m#g+bS>9Vg+R##QY zz)m)T|2`oba^XQpQ**O`^MWx-1k*h>lU4pwIG3{PT6FK{P(ZmY3WZP%Y(jaP|Dpcy znL3OS4@vQ1NlD2{sE~a>d^lzZ<^U%q$m-rF(p@k|sH z6qV2RbG(2|f^oWZyF^(<<&G=tp)@Shm(6|U^O@Zhhaf&S=9p;?K%A|F{O`qkQ5VMU z0^E7AwbA3zd3itI!g9|7I#F?E_i=9S+L`(J^BO5yw+|dV$OQT8xs8pD7f=u#7Z>lH zSz5YKvbm%q!vnYO{J#DBX)4aVpk#F{J0$9y{VFifXmD`w5Y!jvSUonZ;p+xoD~g?k zFn>>EZ8Tc?_U$NTB_*Z)`?HNQY=n*x1;-;49r| zg6no1PmJFkTBi})VNo!4a=_nXBib738^h_I;!1T*JtOnZPjwlr4h{}zm!*}JU(G^2 zd{`s7^%NJEO;NhP*xI*@G$=t22fP5Y;!5qTsHkv+{K0pAq@KxNd@Ij-bCF;$CPVYK zoWYo%%D{a!7OM2cGO|4hP~0&>Zua5B2d*%WUbkh=GiQ`|bUwA99By>fSV~5wwyy5R zap@OIrS6*xpDEcc4)=m_k9XuGySlmIIv?aFni0|phJthkA#vQf|D-W2Iv{4b*TVqz zqB+~789fhWUxJK3Ye$}SbWu@}9Uv;)s0v(wDj6LBbf|V$C|$n%kby@#29-Zxql#jy zF#!Pq%mR7&`C}-3gIZ1dlev>rgr>g!d-(D2O88%3XsC-6kwR&icOffjA(I;Ai5%}8 zF9S~W2F&rj>zpj$Ly9IQCU6j63)@d5c(XcRd;k9Z-0W;4Y>H)49HjWf`Fe2z9QIR+ zii(gpj=y)AYJ@7B*>-JdG8rni-!<@j#C8hGqF0B41v00YEQ^#I8ymsJy^FgMI{dSM zriGrApYbtNKQ+C*22hZkWngC3?cXRXlc-vT-`CE%=L-hP>|sCgGYS%Op<%nxhR?HZ zkY4lM{83wL+hRRS+(Ajnef;>900dsh;PrD%1E=x^Xu*|^Rk3-fNlJRcf`|$If~Bjz z?i;f*Rn8r`mVB^i*(SIGzQ2@nX5fz0wg;m1;1_uTVo%JwX;$`t>kTsxnZT)-E zWB>}dz@WkOQWtV~Q6awumK3&Rx>;~g&PXSK?-UPDD|n?AC>?7{6x7>j@)$`NY0B{= zWXNm@`Yt!Ve*LOlXrF37-J=&f(&ID|j*NI~G2J6ECgv*qw48!M8+^lc!d@1ZR6sm5 zz!5dA&X43kF%2oJaZ7@7K+JGms9E{roz8==WEKG7F#)(FpypG_cYu@t1&MTD>8F%c zk(PdfzE^!UM_yiDT}vwgs^`N`qQKFEP6IRMrmY}JSFaa-1-XJ8nAcmKVzqhlTv zo440Q$r9M$+S{}469K?z`22|Sge+_xGC6h=SOKa^^__<3!!J1~0NmD*V-_5fS!@jz z0v8WYYIYE88VZ++BD0ToQByC%xiA}Rid6_M>x7aL;R@hLh=H2=uiy+?(V?RVk2flxw2C;CwO~(=^0>#D2!fxKg39FY5y>6ON8=@czVA_yfkpTWl z)pb6mOZF+OE)!<~$GZvGZCegOKN*T%`2bf}S197yOa>}_$A4x-Bj5~-?`i-lY-Np# z(-b4b0^44{eVYJ4CPJ6M>9)cRP1Ui;|2n%encJHNwMsHvn0CHREZR7@^YQ`j9e?c< zUGB1lYPk6!1@-lGCMyA-S)$u7U=$<^z*=#rf(JM+jaLfLSz(*Or#W^GTXuZ{ z-k_ljGL2C{u?mJm%phrNV5f_$%`mEE@Otuu3s5|`JEeoO03Xn>iRt$D_cNZmdo9N= zmIBj^CJ4}qZ_GSGjt{JY2vuZCBhDa?vEi%7C%7fi>16!;%;nav0RF;(iL<#ayn>tx zQlQu`U#>z{QG5127%roL>6?4s-$ubX)16nEL+S-7I3B4lU((^m znxuHNvl{?v%5mRxju+zOOtD*rW(YPyz5h! z_+#}oS;$9yR@T=qb&HNc!5-YTnE>9wFZCSwT4xCLQ2RPm7u>mX=fM8`$7Lap7QodR z@QYMeSHtE$O|F&TAwpf9ET6DRB5*?0v=|(r??PdCO>~_9n_@3h8u`j8&b6>toOu%%7z?jkncd}=4O}c( zdtBmTG>U8e{d3?Q5!6_`wTD9U$DIsB1uVJ~nUr)!aW}|{)7GdO@Uk1;P5d*q(yCRZ zo^hTLwu^L;o;xb# z&d$su1Kb3HV$his zbIpUucOq~z-Wu{OlfE)fs0(~hMA=0Uzx$1@D%J zta`EJh6W|X>xG;%dJfc5pQFLSo?0Lw()5d+5s8J+(vp>5)X2{KMdqF!JAQl-@U=^I z+smcfsVIUTJ9Zi1F#z1AjEH1PVw#HukkU2~M{&HL8H^)?eom@fe*q`&Q&^D3r_-_* zIs;SJfgn?d;G}9890Q@%wDQH_kh6wQwxFgNEm@yx2EWQK8?O-F1dJMFK}UTLpO4el zxwSOe8DH$OTvJhT1*{!m?vU^$j=XnKfNg-%E_DIQGlY4eEddDsv>Sk%yLRqujFElK zrMC>#0>VTA4wrv&dvuMMPJoIDx3%F^ygpS32xYpOu5Jn-WuWp6JV@C)=8P z?l6@#pWxe$(goLZ!97+x*8(wcM2IN1jJ^4`@o@^_T))dKNc;L7C;To=y$lTv1v82T z7*&&q1D=!@s!bphk~Gux<>cj?gALphp?;uYxWCzMOjU_>7N-_z{#Mk8JwI%QP%{j{xzrw5o=l7+^i}lCdMf0yl|qvSg9H4AD2|U zLJfU={lJnoAaq+Hb%QcjmTO(4ySx28rbb!pr%yPr7i@aY!{tD-h(#OO9+Ajc`Cu+_ z@lv^YQ4ta5UQK&46(~gjt(R*$5qx27Q5d145Q1vY?Djo$_A)G>xx{V#O+dg0_+W?= zx90cmZaqZgt;`PO0Fk*lELfV@OrQ)o=d9NqMQCLs2O}bN0l$3%aQJeMiz51B#C!s>oAmLc5_o>75t>ib z*VWdl0yGY31rQ#`+!k9k1K8QvB5eGZscC3n&*B$gCv{ZX>ka{?;nBXlt50P?*jZ2Y zDjtl%wa9L^M}RLqb2enI;mRJM>zXq*z!uGbb>6jS4*^2wBA8-`{so2$M;7Xz3Rq`;L#c+ql#H-$U`u#*-*w_D;B z*))U6`!q*!0*~|wR=`HLxPblJ;C5n4;FD-5s~}_g$4ZL^{9Pj>P6#A$wor=qn{0tY zAh4zl>2LvP8&1fgC=U^ZKHauD;DmO@9Vst=4h{|?6o?6Pu9)xsV%kL(&3=sWCFGPQ zplk?;$;-Ql@RNRTHhXJzbwQV<2{|@J1%)6e!FJFxYeB?dm@{U_qzwg}tPzbfNET30j;Peox|0=W*k$1h8^M_&+R>L;NCjM{ z%?p{US5JX!wk(>hG%~mZ^)$H7-6j1D|EA?0S)Z*_#x_Fi5HsR0We zX6cFGZIC@esuma$k_2^?F~D2TU%Y4mI}9IY6SBFA(rie~(2mC!J|M9=Td0?*eL^rd zGu_^qr8v#nkeXjf9vHOe?T4Zt{vwrC(Y6n{bD{EDfO#=-*pCd+;E;2+0@a9o89Jkg zdPQ~xxsxXN2~;C}t*jhp+g|Yo>*M9oFVX_iJd4$7@Rc0FZEjFjbm^xdqP5^)M(bD> zs}tK&Rl(L5fomEE<}V%cb$;`oXQZxP)tv(Y$p{g+**OU903LzW#X~`K>46CVW}${9 zb@!T4)XOleLWdpXCj#rYNfKBK^`1X_wX4QCys4=sC|0s8CLX;TFm4e7ucf4av=LN|Speok_E|~v}1ei%~#QVvUaeyW4 zHsV2clk$ji4`QtWa_z`4^k%Pie`k1TeR(<^FkQ8G@7@6sr+nwm2XxiooZO%eUjqR1 z&*&U1lAE2aR^`t|061t7qCD57OFH0?0i^*Naq7gpo2%;qfFU5KlMVP0ekuN;Pp%`0 zm2c!OXKks0)~o4P+%6;UyD{VFB(QA5IBz-?yV5@^uqK>!udXRR!aeEfQ>w|Xd{toO zlI+~U#V!1}G(DP8#j1Ms>SIWGSb|j{#J`&=UeH?&ISbAroRr>Y zh}L>~JJK59ctk^<6SS^nVUY&bA6AJDU^heZ!h{{g>JCnKN7rzc?Fl)wcS}oca3lom zC$vzx;b*wU7%=DF19)h&$JRXCL3Oq5Mq<)Pc_90^PmQx){=`zI;rr(1+D{@?RfOu< z`k}?{f~)Cf1s8WPCP;ni`E+vcTRw(^2jih&MkfexWulQ{7)e7}yE)Fj{Fd8gcrVZ? zrd?jdccc=A;Kk8L!4)GV+}kfEWH-pf@AKm}n2Z@$gf#b%FrN7g2?_{g_`{@soY@T} zpY?t+{+kO0ai02`Zp>sfZxQeW#lvDlnR$ zj=n1jkW5q_*SMV)61qvybI@baB*z5OA+AI;IP`P<3z>`%V)h}5 zsH><1AhlY4f;N_504CNzKG{>ObM)v@R6!y1srogC8Q8Ug?P&%dKs(tH_%+^@pT#Xl zfp@OzZl%DmaeZ*4q-10)wIn1YQ27Nnhe6Qpb|$7eL*j%K5(o&wy~C9F_JXu?2#G+z zQ7%FbX9Cg)M3jKHKO!hN5+1yB=T6AUUb1gOq7C?;y<`rJj=nx?gpR;W{%7^y3tU}a zCqlmKH-w7k-rio5A0U;=0mw$S3BW})K=%L)(1YAkVgBEN46Q&?ndYuYpAr&!SD(tb z($&WW=K#B*q^9-;>ZVIb^ai;KM4KikRKkYIDJV?9O$EdNk1mbMZ3r;jHE_*dpgp~T zu(N-RDFiIc)$$Qjb9+?410vPMP;GGSos7IZ8_Y_SwjaZvz3^&;~x2G*fo z90Qa&otTM5_CV~1ll@S{#KFP4?v)tEPr^**_6mhiK&b%MdkY2X8`uaC z7+$Q~xocMw1NQl3jOp=i_+APTY#siyz_gukV7ulb=P5`N~ree8vVwi*z z;OplK?4AXOhR#<%v#`(~ZyKsmQ~)jd4N4Nm8`9pi7rdUF{8n*XOGi=uwSz$OOT}Gw zZg)~9)`ectVVD{Xyh#q;#Ioo|_%K~;SMc%blbGxY{;%dQj$S3h@LX_J)z2V(cz9)=h6$5(aOrwft4Tpvd2GI z(-uDID6p)uVU>n9m)9Szsj96r?s4PJT}w1UQVbO9JGgJ#Jv1*)->~q8i=4FuwK$&p zr*kLOclfI|NW$S4|4zlzcjgsppk?xhmL= z9Zo6^j)tCKUfn|4@kVO>fwo$JSGv{_{=}JC~Ufg2UYfK zIq*qVmt6dgXPB>a)xFpI_Qmwws)oWho}xKAaV7MZUVR)UQs0pL6IhMYo>oV=di zPCVDILw_EX5dL-d!jyEap{49lcrfa%&(S2w`!UvR52mValNoA1l|8;-;=E6Z&6lXjEm?OOKd{8I(1*hmv}{xDrR`C8|ud|MM?*Sr)C+^B>i>G*y) zKGl#^5~=DB{>3H)&tOW+eyqMYs66E4sqGd{iSA6W1mnzX%Bn3@mXsR6ltO=mbC)iw z@za*!&BIWZ*?!RM##;8G$}(2erN!gU zdtr04sfCZBp}_@qQ5+9o&juqlo5QF4VNEp8R% zZI+TEN%&2$^z9m>4`YTShj)GikrU5Nu5M*>3odG$2DNmg(SdsqiuiUKZoW0P2oxcP zX@2~RFUT_&x|nC)Ug>mL!mUd7F^1(muhUrK5V|RM1BVm9lAeMf4oTJiL5GN1rA2Ux zMs;24L~^%&{~J*k_jf8@@gx|2$Ma+>j6MV0+gk&RoKOU<3h|y-?|D-=9klxj7jN2( z37iHJRD-_)YY&+HZS8^AIl0zOh-l8hsvGbC;-#Ms!t@F<7=??ttbN5OdtyIzzY`4KC7&TdLKgqF=O zQTyqqzdFts`^lP;dqzU0A2)4d_T8IMva)JdefTKWW8;eUr-|Tr3d-QrC;O=&W-@~n z9ZIsgY~Tcjr*4usKmXhEjOpt~v)W@?)DpAR=9YLbU77j7XwKG^2v1n{W+q+;op0uS zL1L^6bx~%?oxsj!3-sV7PDmyX*@YI&71p5n-gczMqv*qIM*>MgIuh^$NbdFcvGLZ% zYOu&w-=jTna&--tD#2{9bt|)+oOEWz=6?=nZnBSmUtQN!j&0eA$BjxBo>8B}EIJQy5Jy zf9IrE@C7s&t4_bYe=Otu1@){QkzySn+rNDIa!aiIK|m=lBYu8uk;oku0pujeEn04S zE$TSiue!Vk_uY^OZtsk)>tzG_`tQVs&v6Y6f=hPFW9O$3@dASnun0qEx@U}`umJ(~3~4$kAz6Oa2Dc-TEW5+8KR z$MNjeOMv|hWQ_GYE9evY(Ctz3;@14!9OKf+@G(~lE09${nXt9BCG5VDXSfP%-Q4mr zpMwQd1wFuP>>|+b-yi+K>#uTCB2{n_xsnu+2Y-`fH{0my>au7I-jS3vauDLNWX9Yn81!pwFOSTx|O~G9MEg$RmG{#ZPPKSO)WwJ=d1s`x?H>9Fz8khtv)J#mqhB!X-B+adg!$BWK z64^^4(QTSd0>nn^cw1^HYBNFo1Fb`mjOL#*neY^p@(l9r1RF~#jo(Y`6JEH4CFuAr zaWSJl5vbbUfaL;i2R$aEfSfdeHX2kb1LJc_QWtdSvrGdy+V1Ven>9>hkro0SJPiRY z1voS4ebGmzk3e~VR}z7rN9CNJ9wXLrYK;YJ372My6a>Hu?x`sta9q5+ZByNasMtif zzB6cf^z>MhsnRY~RbT+glx(g_2#0LW3l0tr;?*1fkc34JH~!_9F&4l;6vBmt zf{?-lNSs>J9MhGVe%_Sbm?^_1EjF$ZE&A9HSaTcO^`S}BX##wm)om$E>C&hJrj*P0 z051j_MW|c*wJaJxctHZMtF;$vq5aDmm?ArKEz^NDyck4)&a~vTf*LP+5{b#kUID$O zP5YY$88wq8g6N^7cWOKUe=@3(m8|$~^*)d?B8~Tx+owSC!mW{Fg}aF<4U9V6cvDtIy3fK)+38l|Z!+)uG5*{F#1c==WA-wVLo#ie$Otk=mLmY=H zv$AnIB%9G<4jpBN<^%+4u+&0dL)_MR;C_5AU%7(hn1IL`%&kCoTLLiH05v87 zzySbZ@~eWzkGgp~%m72*eVB`jQw8pl#a+BGE*4DcmVWf^h5&xo6)Qw#p4(l~G7lJl z9kfN7!nMoE$|7hLXzsA#)7shB{$Swv9j0`F+DbbqFGHr>E}-REpw{i~eX{(o2}iiW zoF{jyG1)?>Wl17P_r3w|3Lr=jXl#Kgb88>@&rufoABs=~!|^oDwBx7KeMG+~gWF|2J#yM5(~6rZ!QUf zAPvNk%^z=9n5J0pGynBl@jtZ^%bXP7A-J1FO($C9nNoll>IJI;$YYq%#TUFZasnU) z;a;A7{FwX%JNvZ&_pR0YCpb6=Kh2^oHWpsLd3BVYUY1Au+SZmz=O-ioKOzns{u*&j zE*t8GQrCkC35G82Z3(Kl#_0x$Sj~?zDIcsc^(NRIdA{Pol@ch=fy2~*=)E{w#iqF? z;^E=JBJSRm;ri>#PC$EtF9|~I12RV1i}?K6Gv({oU$ArX^Q&EXvis~TZJ+7ECD$$I z0pH-@H!qL0Z%tojx-`5k>o&~uN;4yBkb;_ju-}_6oV+D-eCZND3Algn2X6#vBRjxZ z^nl>3c@B^RB-qqlassXL8gy#dP5e9yzY9GgvOs?o-0cC;_5$EDX28-3Xq+Dpu32)` z|5ho;yDGtTfh*$Aub!P9vfTM}#KNmA&^WtF!r(LndO!>6j7Ff>&kTC67=>(NoR=mN zLDYui%3xp3PiUB!=Yftzc*@PwK^>==c-3M>BPLj-MAcfsq{{AbcOGY8K7I4FkwN>F z9l9CPg%r+HUHN`iI&g@9`a^zM-Pz*b;|K9Wd3kw^iTf8d1D9MgT@Eak1unpqUtsb) zoNycb!ZMLiKU)k47J{ZEAMMEhc#NN4y<@(5O1XvXkI+Hq2fIQ9ARiHfceC2&VHUU6v$aYiN?2lsz8Xk)9~+SCeyhVGdaoD9TA8m-k*UM26}+B_>H^t{IR`}{giPp`sl*sA_r=CjQ5}BX-uaE`qjqeQN}_={;duXHF=-FNxuPb6O-B`(_9N|gHE+6Q z?!Fn~W>Iz0+nn2+tJC1*l_xbz!PTG2rJwF_J$;wV?#2yY03W-m&YnH{;@gU3LwY~k zf+g0E-%2xb92(dfko8%3o7B3MQm>a?aIF>h-Zv#JS)4Lcui1;>sewaD1;G;&q$s}dRkF~DuUj>Qg+tt1k3^EbqD=Oc1 zj6ypc5(=`MIOY4N767T6*$E1G@MKH2$>oX-d@6hY=BkoC_(F%Bt`8Xh$oEM<74*QX z&X7^Yj%ZA8Ao=@y75W{4pTdDK1+p1=z`0b^)dRPtx3}th>*fk1bY{!V%vRRNeKU8G zJe0^=a^R`sW?N=cG`+DIJdw$nqwaP39PGQ9W%5Mygq*K@pcF%FWo(UD4X>Fc+Pl-B z2EU)i;0lsf#J%}vf+l4YlsfbBE}#JFj=s`kGeJAIUqk#9Lx~+O?73b!^RlxWB+j@yHU+ll`ttB#=0lSPtH4-qP_>X9wp ziLj}jJ!@iRBbRw4mZ5bWTA34}G@yadaxDN!z{PWG98%iOjLBEPkH#}hZR%*LDftE8aM?SfD=A^xQse3fPS8uzDFYA z6J1J8Ht~EH`5V}(Dq=&_BWQsG)$HVS9ER6y?Daa6tj@&~$jGSc|M-5HW%;q+f(9Oh z>HjA%89;9T+87F2Qd;jfK_{iI=CQKSdgKD8dBk;ZBw1L+QNSs0zPo!UO(xWrjs}bR zo>3i@4PcSu#mgpH&4WL$#`iMsmiVkg)j*P#a4jiq>2z|MeIOZj+28Q>l~42+0w?)i zAH8C$Gr(y5I0^i=%jnHlDQEy#cC$(blkzP*}$g z)4&OPP672yEnR>6!er**v$=Ecrd}G%^;y0bMyltO&tAdxVD7r}5W{vh z$J?8!xa{ZYw^W$%Wo2D@M`rd$ z?hR4M*nE?dtG=Xcj}HGaAC&JrcKWP4UvLye0E(ur*&#G~^y->vWe3 zNYS|IqOfgU4?iXsG3LyTNPuA3hXtiWX~)yeGELv#lRH z#btA&bF{6F{ykbqz;%n;-~7BK=3${gb^VCqE6q-f`i!GqeRbSuZBCwG-|g#VTyg|3 zEBV{IC?7Tn_8!Qum_G~D_}Va25CINB4|F3a3ERZX8u2Tr)Arrv)=X1_kpj@$0L}9+ zq=_5AvukQ-1Z8GrAp!Avp4Vp|mMwfk0FS$!iTru8eIGW?);;DOczO_{5hGav`5~Xoj60LZ?`b247}O5F}o!2O2K;e?@(Lz z%8H}EGf3^=-+%?srNPTP74bYV%Hp4hyJ-H6J`kMKmVx|Ng=-`lQ&! z&Js#*bhRL;-E8RSD@WU9NY03rqiG!oS%bnYxuirNh(a|`@}U$GwZlUWU*WxnfO?yt z^$&GFqmBn?4%_Trh%DQoAYlx9J|o0D@s-9AdfPlfD=1Qfx_h?TVekbsGlJ4FC_Qus zM&gV4mQ_AX0vB3#hLX|KmqMKclfWO9IQ}ssk_*GA&X60Ef3k@G8M3Z61g5DkjhDW~ z&1=sS>0C5wtSJP5FdG>9Pc0>S7@KrRqhF%WP*NSk=p+t#0(tcJ(}Jk9|JHXuCbKwp zRq2kmC2(7OFThkXDg?nl93hV!kU&9323 zWS$Q>fsuMk_nk?I!nV^>qMVAKbVu|t?4jft%fSQRA5fC+1cylV;yFge@VV%f>X}>? zDt1`NkuzMRN3_W2r4>GR*&lfcmq~Yso`ml7L#N~MzwRXB%<41SQXfT6N*sX&{_*B@ zX$6MbF@KV)RENy#Xt18v)KIg)=*k)rewO0^y02U_Bz9k7Zi>6nqeHg6LP$vxUOjmkzF`_jT zvYAbg^+>`Ts4_}Mo#_&sS>g?*`plrBM1O%Ll4&#<>i74T-tt^1r%t1r^bK9pGGv>f zwKztOmB}dN^b|Lbb+k4Fr|*nh{aXWJ%CH%dGrD(=3rcF3XkwiG1BN3xfS3kx-3Y|; z*MUfndEBO5*f~xl3Lr`xCKCW03u-M*Xmo@Qi7p(V3H)wrc1Rrzs9gg!g7F{VMc+V=4LA$qt~_fbm68bu#f=F}@&|`Cs%w;D%(;6c$jK758je4O(53l2UQXH*@Lub!R)pY1~eDm5B_a+wr#C z`XSU7;3-iXby8|->#wgq(33T)JOa7^2Y>!4&?1A&RfTaKY}(Y`S2EjUGS5|?(e=L& znheLLjESm&<0*(Kp;K4@V4FpdzM`JoKb#RrE#w29z=SMlvVmJ=0`ni7e?8*>1ax#H z$UFYSvBZLotdxQdb;MlLzz$vg$KL44{x`v)+`#T!)Pd8NfFs^ZW0r8VHMcqSz&_*{ ztDjQG1i#RQv%S=KCvZwR|CU?PVpZ_2Z=cHJGB=IR-Ayp}cn5XjV`D51PFM?&QWn7~ zswaHDE*j^q->0X=JX4Kvuabx&mLUnh%Rq||>=vzUQ#`7gCBL{lf11?){Sm1&7*abeMnnNxlyry_3gN+rVzeQ1`cARS75{)%OKT2 zGAdqlQnj^mRVV6fl5DWgD9x6j=H?ChAi8X>Y?qzPEHQH&j%Y*qf7lXRvZHidT3+C2 z`95mz{4SAj2QK`>Q5KRNzu4C{H@On z-VzeX70{jjamVM%lqk72?e7;Fzw^-gce`k5oIG5qxXilmgXb_z7Q*PRr)U5JW@>CD zdg1mW4SGKVdOuxNvA0AmDjDd0hq;*A32LOI8OX>Mkw`VD#U?0vK8hZ zd5lKd{bx@exenu#R6x@LoKgb-Gz!k{(7J&JnEcVp3F48SJ*ii}-4Z|qn$;BlH-FRV0l?&R<<(R+Ci}GSWrza&n zC${ZIxe9DUxe{dJ7BO3#1+mBW~V5^%t1E>UY+n<-#$0K6?EWPu!MxxO}-PZP2ox9;m0)wSOj zjWpVSxRq7{!3+kM)%^I;26$`>paTF=o5FA*?Go2~P_+iWd>QFQeQ>R?9iyWIi=&H! zlm=RL`jo+u^Dozs`ksRumw57}#Y?eAn8U^oA;9yPxGAJZ7B zeXfoe`Lk;tf*92aTJO7AvaPLY1DWRt8SFPxx6BUG&8*B|RL9`5c@h#vdiT|n~JMt7-#~4~tL;u6y)pSow$+!eb$rRR*gzaEs#UvkN zG}%Xz)iXO|ThyxWSc`T{c9Btp?fw0d`b9j$y_~u5J^a+@?+^c-!};e@H+=6!W)Or=@%mj~<8p|3Dc^CYGuSVZGNJ6GjPLn#_g(c>YMO;Lo)XgF zlQ4H>eTy_^ssreh_8sglc*jjiN`cPVij;N60hY;|3@npel%$X&p|5S_YM%so(cO$Z z)=7JdBUk$j!AE0#f*RbOhd+D&mzIuQephicHdc1;-ZUBSie8>w&MBZe#ORWkX{+zO zhApo$GDu$|p`qcLMs~Dxyb>g0`8NDJ(K?T) z(I1p=Y__7IY-@L%SCYMEKrY2c)P3 z{)j|GK>_^$U~l>8T+511U}`~N8ixJhC6<&qNZpr2^#5WJ@{?! zaSo2EpG8htGT~@I2XM+us?^q>dOWt>3@e_{NZZ@nN7t7^t9Iqny~kfXd9nkI`ltjc zuWQ%k?8oOf!+Tw(MF*g-X3;vCl+sOa9^JDD8S-P&kveppc<+*0-ooo3hKP4{2Qm-| zOkp~yU8B@Nqz7TGWj%H=Y;2l&YLpn_y%p`WB65sw@9z&rCvMXoSph5?_3a~lI~q;t zAy^eZN-TP`6}?)%I#0EIL}L9XSQ?tpw!JYTjz)>0hIJ3Yop@PbUjQI@C$_y|i@E>` zfRNqnj=^6spfu>8r}8)=ksBFjLBn=>R)Fbo?T~iVRNZ2cn7K3(W{cXNt8*+| z_4LA_eQRJuW0lq|+yO35G0%gwpN)K!;md`-Q_z}au)U@U(+Xe~$u-o%K3z1w(S17m zBbta%=`TN;4u^_{h6ZH33%1Fw-X%XSjf#hGbcy@V?6E&)E@@82x`t+-2!$)4E;NT7{{^IxZ^`}6u1F%dpr0KA+ z8(pToQR!x^2pgY&D)B`DbXgF^{-p@la2@I+NY8uF;2e&oPRb%Zjz!7pJv5OIwbVxF?7(=uOK2}1KHeZf4bsr83Hw5T zCb)w%DCv?$i5OJs4VMAHP=}lcWXtP?oLHa00uDni5<3G~X!c8_>PCw}s8II-Y=&`7 z7!S`sMV`W;%ldcE6ifs35TwM49+uEr-KM9Fd^DZEZzeZXm`Xpx!|bL269< zfE$a|IiJrDvf&pbUY|wD>woJp#n?ZOy8kRt{QrZ@|5GgHw<)1O_djUh^Z)K7s<-~{ zC9?l>56)3uXe>bO0i~fNWwmG&H5&Cj>u^2a4k~KLP*cPB^zyVqfki!+F70t<|m(cd{0PK*$Eb83CkzR3X7d~h>d5gO2-QLK4 zTbO!;rqO}&115CEL#CA!7bhKnhFTyX9MplJkn4c?0Eg@EgA51ecgM!ZCqqAb0dyv| zHZ&ZO)y}huLWp+n#;_o2$blY99K>qWREI=nKSY;xyREBOgk4u=ObjZgBAb}27D#r|Vg4CC ztX7^k|4a816wV&)piyykErR*RLw3U5rMXu9mtmdU7cOXk+jhw9haT)E=-WY;2;&UU zNMKgN2s~+%YYXTY>L{jXVnU;c#$iIVJppmufiUw9bss^aAQ~qEL)cf(Z_!|mG9Z#d z1IZwdNr1o&^Ly$z*S$g5;q0o9#)cpm3E^`y{S{Eo4!c_bd4Qt*8>sVO*taISc8xUM zL^Qb;W{YJ4yAPThmyS=U^$|83x|Won4GMZ=D3TEhB}g^_ib@VDnWK`ElEdN7McMD$-EVc@zTf@E{c*=P24fV3`|Pv#UVA-h z&SyRlZ2*HOywt>DLFMJ0N;zn6lddTUQXfM#3-M*$)BH*w%Viy==$}?bGL^_EmQ>vg2^86bz-ikeE-qeX;L>_lA4)*xvN79CkrI~yQ4V_YWDoVyYC&OK zn>pS*e!V}QTR_fR1aC!LdU|D2db)(ye7_MJ(QKO-YPDvUw12(@RFzRQXhlzmgZw8K zhw#44?ibywPRKhJX9RftV2evgD6^N2Rz}#6>D+&uh_r#4S0>dRWs0`wvZRcRz3A&Q zj=~ZsJ2+&LP|y@%r84+C1vIF_C{MCLa%jkLpe3(v3azY|9S{hnFALtKq$5T~cI@&VAE%(^A(yTR zns7lhqQWwCIR0MW(7-|pwjyZQB#Ly9Jl<nkf$aKo!v5gd)35I2K2>r zI^snsLo4ZNWI#j|Hm?MG-xSXS}Abw!I>OF5c|vm8J| zVDsx-RZS{7-K?yvfe@&pl8qLD7h_=$*ajN2tmX7hAZrv1KeryxXl8pRhd|#Fhg@Fm6QqOwsajikHC^`^=3gaz+Qpa zt5k?Jil?Ne%Jl`}Y>`k^hdm2c&Zruto7r44b!9dWYghN?{d+O2H}0rp%Vq8VXbVxtEz8%j+eZ++;`)VW{k{^{anna~0$F_C1>I{`@ zVdpDhG2`bHGEV>sW|CiLEe1YwwSO~3Lnq{Ys$2~yu1%R$Uwdb+G6Rb3^gsD9Uw3Mw z6dnlf(S`Rl5TLmMg95Pektl5l7Q96Ptlw0q8V;F)txe0rxfi2+2%f?wFY@VgS$;{JRILj$>RK50d3A9dE z>;iI_Y#;b(dc;Q^K3uUYB1g8EOC}n3*0(TUvXF?T{KY3x&fYGxS4<2{l$WjM_lKPC zp0j=4zM^`?7NUVd22T7`x1Iq;oJ^vHsZ*(VE?Qth`axr)Ou3&Z%b|^wta1`;64I#0 zdYSv7utr+%cW6W#$ei-Mg$r6qU3&JHHdieb$dIiG6Cf`D<$8RK&p%A{_+}0|d{NR3`8D z(5_rNg#s8I6(+QvCq(w)!gS0+4Ov~Vi%te?7P74!Hgsuq?t`!x>^c#@?}KG2-PMno z7F1G1Ke*Z8kz=KlimstsJQ-UcT1*UjJ(vSfZafBkt}6N2G^mEuShNzLuJrmwIO^Pb zB1ns`nSqh)OILt=>iAIWDb&D-t_&d}!$iRXSm3E%7|1LqIu`ifo@>1KwGJHcg`Fh? zY4QbdR2QHsO498^p+vWZtSll%)Y9;B$i2~}7Jwq))eq|N~NcJlDNvd-vqk|ry7x#5-{9tXI??>g-*0L#&A*ZGJ( z10qR8OZ#z&<1X4BIlrzZm^*|0ED>jdQRszWEqQjZMzh{|^egm{eb0QmXIBpzg!kU1 zD^@S*QBM0OFr*kZQdi&M3Zi2%Xkh)p8a{TFY#cn$AX;50gJB6< zf_+ph*btiyT_0d>LOj3_uq&)VvcsH(b4_`eKqHRv1?rK)`!M??otT5oMjkaGkC#y# zQK%r8v-}G*7lq%v`Cx3!2^vny*u8a6&c2iFBR$<{9zfWLhs7($NVynXypD1g3V#Fx z4k>L1K}GZP8y*M`VPD+bMj8U7T}NVJar^7e(?9%4~YO5OQ1vq z119_uTyC^Zm2K`j+g!DCL0{Mx&LS@`z7TD=WH~M8feo}~i8@Tly-sF_rbxblnm=t@}lamoTWw6kGxwMUR-%mYQ-L>h5YktGg@J}a4VU^-A2UcT5*k$uH>xrK%Nr2GH#{}hBr;7D^RsXdPkuPifFI}RC7^LehXiR>X|A82rXg3@EtnN_67Ns!K z(=~vjrz9s!osCB5G7llQ@zbdtYvGZOK?@t?enK^rZ@N`1xV~oLu8L6;`i`GphLeHk zgq>606cAq{P4pBosv_i+-)+3t#6{wO9A#xatHVZ{{d>GWpu#5qO_5yq%>pxJuErW4Hz1#{EW*e^V=Ru^}qr zQ7hyk2~|%4`eT~LE=0)1?kIT7;~eAYoPH}afesND?)#u?2psYGA~Tk_bkW7}1Uj3^BH0|dkQE?F|s7%M`A zn^5+SKsqoe=nX|#=vyWuTU8>ioajh-8I`xaJHROS2plI;5GR6rCA4uN0ZgDGqQ3+l zfx@eM{lb?pO)-mVZcI9|Lel`YWd}?1@>qw9wM`_e9&;XZmIs-w<9{G?<}O1Z$nl|K z9tVLxbWEK>hf_5oQ_xq0dWZ%Y#4fh0BDSdoV+uly#{o-9ST8Yy*&vETL|zo~7o;9d zA}eB>ix3eK^$L>xqL)LBI1eK9j!a({U<@mihb^m$PQO}4WI!=oBol*9kWz?4IFUo{ zf=))!#9dP2hTTOMXfbi2<-;(Fx^S)eB*=th9_%#6aTy{F4hohaKr1qJLOLhfAE2!z z!8#;lET_y!&*YE1Cbxa?pn~TEc5Q>f-3q5r3JtMWJ^21#UFx07DSCfs66 zl>x?25vh4#N_gG{W<^8feZc0+wrM^NjT;21bt+t*j{qf+2}%|3-qj)R*^F4$5~iKJ z>*P&0&nnE*jk=HC0{{qMoRT6DeXMD@4wUB$KF=sg}5>sUu7f%0dqq z^txjvvWfgXQdc)Hd?LQ&-@kb?keBjdpOK2u=3r=UM$AolZ@8y#+X%IH>klysMsS!` zN9U2TO;-7xj(TEO)RYtw<`ErT?A&@#4+K?F!mx2FHUdYK zq$iq0=hynj6>yR6j~3BOvUqZM8c2djVR%%ZKYGf$>0z9~SAnl6G&DefsimBRrrE|X z=*bp?J~4@a&dx*;m3ShL0j<7rYD;1*f+?c1NA@Nhy%YAgHP)NRBm)jeFPb<>>MoEZ znV2d9P>=f6r+G~z07O{b5_^)c=xs~p&lBmtpQt6+3#4jCVmJUToZZBmVn9H0R1~|x zxwqR1MRA)YvRgtZBGPS>5%;zcmrd$HL&dYU*zKqorJL~3#op=Z@9BRTN*B{Z_RA=& zad(VgM;d(k*Fa~K2Rdle@eyly#2bY9L6lY#a?7xJFo4!SQZc}?1Y%SJ3wl3B-u6pO zOv;ZwU8DISPk!vGI`v(D{jK65R~QnG_Jw6UGx;4aemnO(Ag%Ff$!d;JD6yeFp`)!G zho&>4r$w{<#fMtml)VNAf+n|q1PMXObiqnW2TaflU#Wjvx?OS;x_2m8X+mp-=mCL) z(Nd18guu*6yf4I3ZhPD7WcB6EO>@sm^BB+L-l!I6P8Zho*25}h>}XoP{%%kI`W|q3 zR%}I#N%Xz9S;8TaeFqEG-F;lV6>@A2pSzg|*nSwB zM^Y+F%jEl>6DPr0I1i#$i~Q?pjV+JzQr44DRBzH)INUK4FGl(`ON9keO-K z5FK_T%6^1VJXEqktGn+`@>obcWy5Hrm1ZANu0$_w3c9FC^JNX>`|J>I^g)G)NXx30 zC}6}H890eVfi%dYHtSr@;o=%7h5;O(4ce`;`ydKjO9rH2XN!x8Wet?lSIg4XH{mQ+ zP*fjt9+9RO>k#;Fvprc8Q2~e+`9HrZo(6e1IO{leE7AN7-G;JDKWY7dcg?bEmp3^7 z>1^mD(ozOhB*={n9t4fJ%!hsq~`S%B@OVG^}t=n3n<`J#sI?OoqPU zOkHsZ^2QXDME17WgBXcp%mf)IcB^8Ghp=~Wl`F|r628PeAau2I`)jJJI~;t~q46}^ zjexfoK^=N#iGDluO3U3)H)(n}C>@6Sk~WzMM{FCU;XxvQo0!2Ed!@Q5dXuMtg~>Xc zm5@g45)fk$HQ&%IjP!(YjLz|bjU!U|lM#mjp^p$!CC$8M5~9!+{f{5~Pws~QVuXC(L<%28r~+boR|BMN>#-Dd zjdqZ0N8of!ZmPqVjP(K->n;@!T%HV$#6mi@Uv_+togE$(H9$**a3F}U)g9`wMefdBA7PnR7mavAXGs5W?!VGubl(JMJnj&R0WpgmAtF zHAv4gHgn1nDH5d!AMjaL-4*=Ud==$EV6mWw2;m$3skV0}%Kp{L{mkCxmu2J`z^iaX zUj-dAs)DI7EVZ4NC0_|=xeOahh$tdMT+KM<{Gps3833Wl>FD&J5UX%D8oNw7Zhdu* z&8bPjV8v&aKcoIQ;Og{Z#{2(d`A^n0I!H(N)g_{5cnL)yW0$7^_rrxN8HCvO|1WTP zh4w!-l!V$wT-~fHoOPu0=au3IwfUyT?upWGY#`H@k<$t7YWHeOLS`wQIQbh zcei>nt{1q|F4cSh9>?I=ArwmV^f|}T*@mBO$=(Yz0=OmTNJeY?-RTIy_h0cJ>mHxl zOJ*;Hj)Bsn3*ds>Ci=2Y`J5kQ8_)YC4PD0?+AlG zkBs;y!r&77lW8EnuCOD7L7!a+0lSB#G#Qs$!o zj=t8Pq+a4wYZ3Ql;q8M$WNBPwT!?%M5(3itN_acYD0t5#XX7eH#OcRYlCjL7bP-4x zwGLJY;^V{Lj+8?l1aU~{BM7gOBK_Z*4x(3RYMnSigwrP-P~#wDfJ3A)W~Tu*^RdFh zbQMVvP%Pju+RKcu@Y?TV>xWiTG6A^`raTqOvIO~@LFEt>PN`VC+~%{n&D&KG;GC0= z9_ogNlMQB)o@%D2tp5yh{~>5F60o2g2Y|f$JxLVM4k0{;k{~}7KsP}|5Ch5eL6`$S zsz>1`Noi@)5seJgEf0K41i*O=lB(u?1J&{cyJ4DXf7-D7MenFcE&Wy7cIBC9ux8SXu(^tYY&NdWA+BzN9 zQ>))yf3KqFVNLwI?Mj(Budf+MSM+4?|KUTOEl=hckbFBWE#EEGEtL^*d7yjLbe3vb zxz|pidv@Mqc4npv57z;iaZhjWHT13q<|RCSEC5`Eg^39i0INdqnjLV1=0^l6lw&&- zL+V;u*nv0&14A!IX$E!mA%B>)a)h_0Xq{kU8tsV-A$VjLLaytWj#7>?8;82OIt$)a zMe)d&A{Q$wD|-e8*d1*V8OIGH`rV0fZlzX`S`S#TS0Mg?E1eyRMK~7$bVwO-?HwoT|vRq^c9phM;#lNVwlHJa8KWL$qQLd zE$v3q!TvipJV{+mc@r#CZO_`@7`>v3Czw+pdO3wPjIuV-p-1bp8-;seiG^fPPEOVj ziXv?#j$_-aoRt+Q*Hn8{nvo*5gN{`rPRoDiCZ2CqAr@AYTpBH zVFn8CGeWZc#~ax3jek<9^+5Y(PC&(G zk3GF7fNpS9J(`V`^*YpPUPCj23nC;zAQ=)QzFhyurz$TAK-nA+ya>i%pR#h000L9C z=NG713(U;&_+8aZD_H;VS7v&#Ae%EH#(fHhC#DiA0bue;OG}$z5-oOhpkB7sF$^vn zywzmanlFxI#-MZ+3s{r>)bjHCPlxFD@F>=OtTYobez4wQ#JHC$+jsdD^S+9X4&(QD zpT=B4$`z56l-` z-q);{Vl98Sv5B{2V~)PcZf#=|gigT_PR2EBl=^pFh@2c70~RLhP?3RndZR%}k1zDq zYo7sq2({`R98{-8`H4zyDN>l)*D~TwtGebWAP}Ey%2{xPX7upnZ=JE3C)OPDMc34c zvGMUzIPxy&l-JbNF+-+DvNs<^o>JIM>N9XfM3X{~T-k5=_=@vpE#@T_toKTN#FD;! z`vz{RBfn9xEM|OcK{BesZ&)Y^6UhZsk*g>@F_9OU!$z{Lj^E=6>zxrbY2@!0j8#T& z!<9?k{uYO#qoZp+eUkdt0*YrDJ8~J{V`)3PmGfzV{Q8`|Gt8&gT#}})6Qk(7`s6fE zXLTK$t0A_Z@rxh%#Xe<7KA)!8iigB*{?v7O$M3x}40*pQ)@|8QaCYx=otp9G=MMNJ zi0BnE)L+`ak5Bpy9o^kBgV0$RPS zGI!uxSSbnn|2z%%@+B7E-=;XmezwYN))Z)5A{D!O`Lt<+)Q#|y0~6W-;`^c|>`Sant?PFTDf&0=L~V4yQ@yYB5n*Ll0j5qUPfA+Gy_J>yeRbj_T@6F70yJ0Tvp$ zTAA6-eZ`CvEytFNJB)UmRFsru9}Tk8q0Q zD(h4t3~xnWEcYxL6QTsJk9`|qWpm2fT|c(A>~Z#%z(z^dVJ=4uVP9>%*SMnRfxVQ z6%uDZ?0uT;jSt0_#ciiqzu&H^CM01SIOHhLWRSUiCQi=J^yHX|yVLw! z=1kNwxtgdUnwMjK@+zzbz9oCpQ#1B!U0ieTzC;*qSd}?+NhnG-WJb@?Q^WVZg~sG4 z^_!D6MBRS#JFcZDzF`&9a>m=2IydSnbym^3dpckE_P!HdSL8-}-QDlNnkChG=8%t{ z&dGgr=DSi$UN|B((|X2_R@g*kpAD z26$&qf2+!<jwgRR z;xe49Az*GZ$0%xjym4a<_Y#Xk;i*lE7p?A%*bVaK$Mt$UA6(F!HI?*>TS%O*8Q0g{ z@gmz@)5S?nmn~cI@h`E-YIh`FylJqGRm@f&+v{4FkrB}OE%S$TaIvFU<6s)?^L1yL zrqUW0tGG=`$8x?W3)19H#@SremC9S{J53NZtP{V zYuh5w9T?qIb529;_rkmxYd))Fd?o$jr@8G`>27%FYAy23NK)bM!(HS**K;?;*&iHF zyxwqBKr8kA#e|{XTO`k~b5fE}7tSvzuRPuItR~CC_&Yvnwc}AE4hmRe(O0Ro+CGKD zP&4;NW_I=kX9}gkeARO2q}PxDb>%GSVS&a%|aLJu(Dcs<@q1W ze9FR`Df2CkRi&u9JQegkoK!2{mN+Ys*z!RppJ!s=two3fYxwB8*!**Bu`zDn?o53( zqJ4GkxVYQ|wD}VeuXI=}y=&UJL}Wc4&X;aj-f*t?gX(G)>gE`y0lkCPzN3LIlm_w@ z5#G}gqe2Vcg+@CWy(Q&GuW7yp1jtruBI1QIuDJoLdEDVap&jEkS>L#M%4Doggd2)o zPN}f*3HE>1acMR2F_{=A?g~u4%Wh%9j*1a3!9pILt4~CfojH~~U;3yxsFD558&?&9 zl-dA4E$6(o)v(MV?X zCWUdw-A)G=;#<8i*^Wy5IP$31iC~~dikfREDP?74y^aBEz+9gr?0EddW@gWOno)He z3&V$IlZ~!M^@MHR8qDrcaOL`*wj-&9!YYPe&+`p~_9QHr_a(H33dMdt8dZVD^SQa1 z3Us>ik3E%#!zV^L7i{FkQ)M`3-E_qkx|g`Sgy_qDp9tDp=9RfHR~43VW!D*8T6T80 z-@9v-2jt!ja`d2uUM@jhW|XQPBQJ*n^~4%JlL{S6uHi|qdsQ(df_Fp0N2Sj^(8@m` zcfW~0nVZUyeYncw`|e!28K9cLJk0_*t0F~s5JBHQeXu_JR@1MV;|q6i=z1L5cT4U^ z(H52HcQ!tlOcg-l$hdNJ+XgUtjrg)0S29pL!98`KTf4Ez=P54j_OafB2a-}+mi1sD z!yek0=7QMg&!0^oy*P@xRXGat;m#-OQ}G?AP8FG{9%C8aqB(pvDXm3t>)gmy1@`v& zAr~@omWj!MHr;Km&*Us{cC;()M10duzQSrILlHXo^R$VBkqlPme$NOFfT5z!IC!Jo7eG@5vceLgpkNR38t4 zQnKT9Rq5u@V2@gNmfd_2bQhrPmudFlZVeR-)xCm+BQKxGyI5H4gq#91Xv7`^tp$65 z(6J%}<#7C9t}y%QBUlpyZACJepU8%yN_k?I7wh{&Rmn7(e`hDNT;gSm3VR7^>mA!o zN=hDJ_68x3D@6iH1u}Lw^e$bWfgppE(rb`2*+Gc;eGMZcuV))5QP3{u0l8s4s)hcb zik3ocNW*O&^XLiT)BKcR(14R%V(gkQmf?!L6V**ko6-Jt9ZU!j%u)+?g$z<+zLQV> zEtnI4J*61-$O$oeTYdW%OGhs_HP5&gfv^$8~3%N;@<|kd~MqG)NJEASsd>BgdbqQ>6>!}^DW(5v6-gl0$G@7To1@i?8rYcqPZ}pX`0Src?G!{z$ zdv--E84F|Mdb0|R%5x#Lc}+%ca>WuO*^$m|X)6^HnYa>lIC)aG!<+z0VDVhQVgv)f zEme{KeiO1In>A+LAgmuyTwE+^-=Ub5lcS6WIoDt`-=NwA61b2Tnu*Hc0c5^r*BOsR zb@8j474mhPy+B_rq1;>p%ZgWi>;YqGe1svZr>AEoFfoy)=%3fOf0jB}vrrf25)kFx zIVU-#vYdA=O4%dos9RvtZR}(6TD%_xxI+!0^@LPTxqbNXArrMIy-FyjbM^ea2?BJ4D;U9iMifL=ixeGYF6kmt*cQ z6N-Q#sD}UMHXqbet;uUhwNLq>X!90Qb#};_M&X-oJKLC>n>(F9A1t-EII#~bFUe!^ zbiXB^D=Ro>yNTox#o+IiJvo0PYbEU?GkNKIqegN*rso8-@s$@h3B$G(jz=3j6Vo2Zb$t91o7bh^a8a-E@7HVfXe~&jVe? zlkE?7rZqQO@-TjSGpM{9P>Ew9dhFuL?SR?O&15WG@Y{Gq1X2etScX}3n|CGr7Sksy zEKV$`*X*DqoU%#?jM1E=G|gG?`-|fr#U!Z))X&c{>m_JKCr=*kzF2|BO8^C~_Sv>c z!YkY)>gVUJzI69_P*MWZWS@{@U~3>W`FF9|9XDi7zl)%#7oT8 z3e6vdEh=XOO_B_id?FZd*ut7my*aW+R*XV5Fs)POOcBa;iH#6AwFxdSK*(z}U=$pU zGT?WtT+?qj4uE~TN@)Eh zT>#cX-U*e+g30)c_k_9kZRZo>0za}{!vtcl|M{6EKf|Mv1i0PAsPj|&?`AfPAcTJ7fHm@qWe`nfs3A1n;osO<8!x*dyZYM-a9(;AbjSa=` zo4U>#KEKnj@0u7l)nHy!d`+Jk4g;I|7jO~GL5C;&To$s}A)W)KCnAp3?HT61>qcjO zeEIfTAFSesdf_Am_*c;2?8&^o#hb@qb{6pDN+W}oNuKyNuOuqe@xCusiS@>4dhs3B zW#hcLi!R0i3U*5F?(PGqLKwmX=smMP-FUVk>;3FCv;V#-PjXe7Lo!8s# zh(r|*C_bWckPG&8AYUruok`{og+8LGzQ0(mFDvkn!$L?uZ)bWzhz$7G93FkYx$#4 z@}d*&V-2boxRCexw~p8qpIFcwNMY1aJ|G!)l6{zaHBPU(mMPY{ro(t%Av0B8l4UL3 z&aep|olr{jP3^%ar!;>a@AE`_CUCDz`B7DRCuPh9p7M~B`+0nYN!dMo;x<0f*f`^D zqU&kDhn4tp%N3d1cJ55<46-DoylD z=RSrc;bz|hXY)`VlQJ!1!^!^k3lxcN9N;%w;kh+@n#J2)MKY#a&M#B@^#^`Eupw2dSmA${}Co53Vn>j!?ZgNRs zs+~)Ps%hH#_LN6~6y@_s&k1q=U@jm*pF{=5{shXAoV^T4A5>EKW0^vod_3MKcaRCm^=d1s`i{ zpip?0SnTx0EQ99|{irZDSW6M!@8h>ePnUrb_=o@ciZl4j`c9ONDBFmz(qdg!r#>=T zzgewZtBR^3{Y+jR3o(#)({Ia6b}R!Ix_<2NUny!}PL#M1d(&kW;p+lCtMMpIth z?}t$5-Cd@U{^ce`{aae)vNu99d7&!#_BVE=KBfC!g?|W=sXkND_dCWAt@sze_(-2F z-5dBztm^T~EhmR0PhD&HROaflOX5ja0V%Oey}oY8z`yqJ=5?XB>cn}npO)#QS2i{< z#L{czb~Se#W;nO}?zXQ^AKpB9>X*(**=UTD{i+!DC`v@^okQam#;?ubRO^E-0A4#2 zBl-+7Ri}>&Z=!58cO4%r?7@goUbn&8p@P8H9i>;%rnNil#)`# zVPVyl?`)cTC1@u{#;FK(If|kwb8Ir^6T}0WNZLks_ij_;?em;T4%E%!h0|t@8|knk zlimIvn5e&V^WfU~d8Svd;&bQck1u0zKIoJ1ZR0AYQj^zn(@g5>@wufBmtNM2*m1Jn ze(u@9wTcEcSA6recCL7TWA!F+ic|+SdV}c^h8qkR%g`WN^U%+-j^B#Xhk~)<_-|QB zr)eIJSw8tdA+-#y?0@~jH@*~f80GhO!<`RORM9rg{b}lan>17HA*W6XB{VSW2WRi0 zj|6lPX1iKO&VJ*}!}Dz_Ay(b$P_3l%+4kA3u7`Z?+W9syX;}M1dydEq-`+G>XvvOC zSUj3svKWy|IwVmXojaocYK%SV+~<#UlrQ1;eK!)@q-fYHrN@NhfBrJI1oN1gz*aip zJMlWEo>mTj3a6&his2$t@m#94X^s%YQ0| zA50u3P!U;+5;8J`&~dRr2(5PSqohPZB^h7Cw~QirC5WxO@?vncqy4I9404r!orIR@ zKScnh_tZ?Ce0J+r_~|e*p)4`6+xXy$YU-MG>&h_DTsZ`2BAc(TuW9-xu+j$*!64zy zpXyOjAoSRz^LHqeuQiVL6?a~WtVG4Z7FfeUY!(tAfbYu%Jn|Z_AdC0TD0^KmHBnN+ zzbM(+4fm8IMBMW&a&DwxsgDP9Y`Nv%!ynex{gKuXE?~p-X?65!NR2*V^xO&VWo=J? ze*i8#C@(!Vl`r3`#@Ydt9|ve~2YRLD=DHv;?Q~g9nKoNhF8O!K7`x1QeiHK_=`l^u z!*pWhVc}wssWL$k(EG!9-?9dJik1!inB?CjP5k=LgomC{gY^ns9U#vlk4{5QCMHND zg+D>~W%l8ZVp1k6Ac0|73IbfbtNJmZDphb-pFl2qkb0o>J| zZ;MXre-%vpH#-`x@aI`u74r*FZ{tZ&=PMcdw{a**}|Mj6>V5EY=!{p?#9K9@`^%U-KLH4zewAM$`Q7REV{OTfL~sSP_|H?)B@SXi>Sr^m-TamH=}_*;ML zf819RN4`DqmsLxl7jH21AL76N(UScSQ1&kY=+oDo7BjKJxpTcXE-tXOl<;aS?2YBE ztxOnErxJ2Ic$UOFOjq`wccw#Oj>967{_nBm?+1PScG)jt$+2E2h%f#8nJn_S8vgu; zgQ=3*yN?7$1iV~~XmRPPEB+=TCbYTQ(Yfh()72~JxV@b^eRdt)d5hLR7=$FP--_aM z*x~;jULLjYpO*DZvQ$4kCHD4TF6)_cEURRYbHB#rZgRtw3_+@Z=jtTMvfl89wT(g~ z``L{^gjpyXavisJSyP}s9Af1wUWT(=G0H*&+nK!N9&r=6(PGu}!*Ne;~O8k!j z|NGBT{_1-2PA~wSuC6@~u4>Fo5vBp^cMkj~o@~3#f8rOTnc9dwm^|vk|M8lA6h@R1 zo9oeks{J}d2+GjUM2u1|8kWwtDc}hICZ4S%B_x4I zO~^d#`*n0nKeH0omh;M;hV3g(Q%kZ$dk(v3WB4n_1X<2R(H~pXqv2=G4EC%4X}~!g zRown8)|Kmv!t~cnLPx!gKqouqP8t{(AQKetEC(T%6*c|^*`*t`t>zV>V3r_pk3vQR_vh zc>iy-RG=baIa3s+o?L*GD+Te z3g0GW#$^+}j!H(~DfhRsWW}77%KQ}+O44RfCkYoM|K;zi?`z@#Yoe`;SFmJ`!(AUk zjFw$cRobWXeWijtDZ@`)_1?n4!Qp@&qcT*~i+1b2`6O94j3GFSh3cB_LH9&g2dlDI zqM|ubce;7SPe)#N1I7R9W_~^4IRXT|T9|KNkn_L^)|HS@I(-_g-?D{>?E`n8c|ya` z0Iw-DJ#!isx{hAv0%}kq?}75`8ZHGdk10}Iq-7h;X8-)&$!4^|j@Z$@*&WYInK6^) z56S%cUREJ_>5!uC4VxD~`qBIlf@c-|ol4M<_SvHLfP{pE#Rx!Q`mJVJmcpZ)=-x>+ zY3gaWKmqe_)2agNWBK0XbOfcgw9Ae6;|s5&i(|L^L}o}SUq)f5vy^z1tE70Kz}L~a zN3~0O0*SPP0~%DOnt)ME1+`BFJwrRoQ^A#zzNkV;&&sjJXVc-iT4* z#P^%)AF~nV)L3i!?v9(+(Y5Edrg1l4>TUuzI1Xz_#)FYq&EuPB%r(@Yc(-mv?YWqQ z1$G`(Hj=yv2ndWq|3r#eYkz@L;GvebW7e!kZy$#slb))qNZ5i8~sDJ?6&BS zxksBmURw{JKx8BTiN5pSp2AP{uDs_3Vt623&8}t$FQ<^+VW6dg-ETe}dfg&?rwLt!GO)Lj0@oj^W@Na-?Z$va5?mm)f3 zMgRvAH~Yy{2Dh_mzQ541CcL?{bhQa~8i_p9)aa77m~t$Vrj+1JzLUZ+p~w&vu`!;a z6PURU1&m)5W@Di}UQ(u3}S&FjpG|p!rX&j2_-j4~k1Ae?r4QjL{vjrt= z1fW8ViK1Phx186SH75{*cPq?@_Uhl_o&U^v{;#pO{x^E>|8oKV=L!6O`~>E_FPU7~ VBa;}y0s1LL^`M4Q`hMd}{}-3hDRKY+ literal 0 HcmV?d00001 diff --git a/examples_skyline/accuracy_vs_fnr-skyline-order.png b/examples_skyline/accuracy_vs_fnr-skyline-order.png new file mode 100644 index 0000000000000000000000000000000000000000..27de7a95802e9783e412cc0940848099eff9e369 GIT binary patch literal 43584 zcmeFZ2UL{Vwl(?ziUAQ75k*3)AP5SgB*_L)KypSxE0QJW3@W0Mv`9*poRK6sm_TwW zKqQN#BIgWmZMyrMbMHOpf9JjN|8KlI#_ciMvdXG2>~HV2=A3J;{X$OWHu*uCgBXU9 zOWe7ofMLYV7`8WNKPh}--)hqaFMF(SNGR=x|6KMzd=BpqSlm&!#xSyD=zpTtUbjv7 zkk3X;%|_8&-^TvFl^$k#-^SvRxy>V^2fx|rSy>yIn_XqU!p?c|H$xj63w{oczyCYC zxs?G&+&upe4Eqg}xOH90A!4%EQ8U=KZ)c7-oaOOv#_Z`u?{FSui65T7?cg&WXJLP+ zuGrIO;-O$at%~y!=PG zt8;z^vyhP1$B!RFba?e@kN4VkYT@D|B52pv*Hh!;<5TO>jtveDY8047rqyB19&po@ zd!rnF9%zWTrnt7YMreuiPOB@5Z*GC)sz{5T7h>+@2b&k!SeMo8IXTZs80GE+{ydiIeO9=E<&Hx(UPn^0GeX8KI>05O%H9H*ZdIn{?cz;rnW(4%xA>v5XR@PMsq3mK4A*5<4>PO$4}a z>U!1jJ1?gXSv(&bGqfFRzOK-9e}7w&tg4by(B{?_x80;#*OZ+^1?@9V(;r;uDo##LUeVr+m+pPI z`}+0kgZuZ(X=+B4uZ^6pwuXln#$y@=_kqtdQlz4$Hks}zI!H#Q=yT#yP(;LJUV3Y* zLUn)LiAx$R!ou3s{&dz(47vIZ3WkQsW<|pxQXP9SswUI5l;6ax%-pSX@BZ=BA7FNd2i? zxItZCPWSb;=({&=Jl5^nSefC)-IbSr%~|z^h=labt5-*IO*+{Xm6RUB6*j!2VyiEA z-MDLHlp-N3%PSqm#o7JA^lH_~T-yniwsiHYxpq?;@X#MvSz9;o_Pgj#+Lajfl@`_1 z*W0ACoj+gYvN=~B>XJ3pUBGM7nQ41BfL`cF={g~;>ZI=a@|4L){ad)45qyfGeqG3= z(5*?vw=?A%jhu8RPUt_-)s=>uz2>x-Jl>k%vo@i#W9-YR|M?kt70hwLCA?ylSwZ;Y z$B%tNIuoT&FMOreIjQS=&2I9|koEWX_n#a%c<@Tq$%Rj39B=4_oZc|FZ?>G&z1x|o z)sU*3Mq5|7&?wNbINs*R+1MRdYT5z|G1q=ZTTxE#chW;Ae~)roH-bwmDk?gE`Eql* zW>ITUP|%xvlg_ZpO3`Y5jy(A8(9n=>N2Znu8mT;^w!7Yk>FT?34d|k!Y;5vsYHB!P zg6XeJ_a+*(C5|jKiSXK$^a?J2*G1RwvOb;!gV5))AZ@+1)EN$|>tso<*)dAWhC<7s zh`#DCFDRwm+}w(y&taMemPJXD^Eh>@lC?_gtnBSOzh~>VrzkS(J$yK3?!FNogwt=JdeIJbqcv{IwT|}O5$W9`4g1WRE#!P=WvW1931fAWpDe_zRt?Z zg6DnjZGd8c;QaF+`6fv)jN}Z$ngVu{7J{SG)0x!T4%hqo`ixuSYs9ja=I0gk^%G^H z1XD&D!qW_zqWnjFIoDyLC&ME!hB<)35aW21^R%VQVOTM3%+W5f2Pn^L%4fhb&*HbA zPCqXl2Fs~WexlTIfy-u0j+~BPX?l8^M$qBC)5>%PTxD=c#9p!!W1SV-`J@!|7vXY+ z=Dtw!TaWmbZ_Z04H8DYqhAH1t&ETGX|MRo)k#O^nf`S5Vr*WpDlAZaq$zJ<@Wh<+k z$4{ONPfcZ9P)r;f3J|g5*VL^Ix&Xs&^vA;^=JJpXziQzB;K76AM~=jJcwn$trC?NA zJW2M^dY_M&sq`d)ITV@3 z(^O61Lm6jd0b4>m9BC=OrhXx$LbD?2wgrvb?hB~J+jSZ?b}6G>t|CyXFftoP2i z*|*z1d-~My=a&~wJ6kJtuG^bC$L}bV*iGeGkIGc%afHA=MFG)9^ z?BGG0#WDG$CHL8n`}|iN%G1-+eM0tO!5`@jv0$t9MI#7q*ADM{Bjo(SZt4ex31_B*Dqax&G6KBPcL1j*?ks ztX);7?AJOO8JR6i!hGX)$*8C(jWVZv2#*sE+XWR{8Tw688A}tLX?b}{j*gB$at#88 z)=Fl|?@QPGR#t!L7wMSrmdqMLig=(W%Jw z!b;K0?Xvm#~AR3%e01zlHi@|~=T?c&n1vi1u19jj3Wc$O;Z zYVV}Oc~aiLmunK)DQ%-tKQ9}V9v{!BXJ8P`aWws+q?laG)0;^IF( z*=I8OJ^SU$mkL?hW$ey(bKp09P7{~DR#&GvE{woLEEo4ZIp|O>8!a3LkB;7Xl5=fi z!=%!K$X||00v-lFQ>!E*bJOzLP<24(XtaCgJ+mS)046rRS?amsfKJddXV{+ImynRq zzB&h2*{JN`P>8UVeZQ+JPxw@(N*}JoC0h_n{mAEjZEtT;zXGF8jxGBcw|No*%E+ z*{H||^eA@gR$(zMW|5+^MNnPe=--(pTw!HmX1)ywgZ07%OWO;TRA)rv8cW6#!x4;D z4V9k)6d40(sJ6df2k260q&tLA0-$~fIb0S4q7ZLY-Ctvx`b&iN;?XGQOyg6EiFaGH z?0c2PsFW`X3Ti^QwZ7~9>=~sHsA6-eP)+uEpTX(!)sz)9N@rGY600lQ26`SXG6QoAalzoLSenD{syT?QnX>P-kD z>oYss=7=f6?b8Um6e57|^ywZ%g0r);Yinz*>UV}ibs%o3Y;SKPs1~Z@c0oep{qLTD zQ>T;`d{15FdhlhxL@RKMr7x7SEL>bl5G6YRso6qoOj^18j|6w70wo4M%h(?U=I&ih z2$sC`^~Jy8)WsKC>Po-8BBtZMmA^dMWv$1+V$hahNIt@(7CmCoZb}rlX@HcKfzY&Idqn#$TQv zJ$UGl5&-h0?Ts0MsPHR#FU14sGtl6HwgAW!->Kt%NdjG;m6a72z&cPl2u<$Wxj8vG zzYs8!{WKgx$H0(EE+w&zp0%{ojD@a2f_;0q{1z@i+(-64KJ#Kt2)m9{lVb9(Ow+ERb0?3|Pcl zKvF>b>YtIH_66MPNJvTY?KR^Csz~K_BHC>|Fan+zPf)l1XFMtHDiOwwhi`8vyZ7!hA zB))&&TngF!yr5tYj5dE?XHv8qL9JtZYr~HbroN3>-SYA>51VQx*VnIKN8sK#;rVX0 zyWu^UCOyR?XW^VFfUMvtB?HLga#}P5o~pvPb1pzI7LWtCQR^)P8JgeS_CoJ2JY}{W+n>-6O{JN%9d0dot%v8-*AF>R>EjBTpLWlrv#ptiC~eH zmGxr~(bg)qX_+6c8y*^p6PPZ%nUa!%iw9Xj#(8A zz3V$&vE$b8_G&e!EWkZ9J^*URLEef3C8ZpM5?1wPKipAc`bN9eah$7*x1-{o53|6m{)Z)oCqe!O@BvF52sd(AWBn9 zkB*KC5DG2I?%%&(TUQszDE;R0$D=VoE3*V! zVW8St*H{A=gm7N93#_^;RV*AFvWTp}W6exWM6}P2G{V}AL$Jau<P3!oFX zFIT)Df#f0s&kG)WCqy8VqQ-CE?j#9a0qWDS((j%H@9OG^2Q)^Vo&;;30Fnt9vdO3q zR#3A0&i26r2U;jZHZ5vwwPb*s*-U=tOft`JobgiViuz?ODJ2yg9-anVOUq73HRn-f z`~K!+N+5d7FmKD2I_|NuvbF+`4|@IDQY@?NPkQY-U&9Pp^L4M8s*_VO#Ij4BoUl|} zH>L^@7kKI8lmGiiVm*C*V-US|H5Z^x;J88h+u4xgQgRTg&@@u%CCD2)FySwkXZYvHVa6Jp8NG6M4~ma zA9!iIZ`tM>w%kAxK>mZT2lF3wwQ@E+JwTaf*2|AZ3<#_L9Av^2Km&;UBRX`?ysW)- zDXjwdD4HoZy$>tP$oPkG8O4GO%ohbbBB((>P|1$(+b1uw;Gm#*pvESEL}aS(kt92( zFXq5d5@0oTfTV{9wG{A*En>xtHINKZtPkPP$>cHpk*t^`^8+MHMk9!7xLPMIz#JUv zvc|@#kQIT>3HZ@oQ{X6_eT;%%^@4qmr8lJB7<4w!2hfl}aAlNe65dGHc3aciyJwFf z=-MHnZlar6gX}Ipl%ge*VO2cR8VFSjC z#p{ERP?bPx;ew3X>oDY}TsN>V)A1j0|6jgY9;=QFg_K!oK-8zYxsP+0l+)BpfU zN=qw)tU`-mjyMQ=Z9*$Oad0Pha}An0fP87Gabs#Xvq1aNrC8hxx+Xqb_jbN{zqTI@ zcPor*p3S&IlH--DS8eMq72Ls*9;S6@S-|AFTPCoW`Tfn*-|FhDi&vc?uC#saX1c)7 zuK}_W=9&*7 zG%6~yu#itR)N=vvW!hR|Z>}0D$24Y3E~T|v(B6z+HdWAZ(y~n{+gY-F{r$U|D?HmD z748)vk$@tVc=Vh&UxW3pZ%3=WCUjuEf?VmE$wGy(`e}mb-TkvP`S}%^rJUu;v#Q6hWWpfnRIMlvxw-7huqe ziVvOAMfw&9IaVeQx$dB!okl;K=Od+gUujm51}lH_M#?Ds4j(HsZm3`MbERD-Eiso|bzxD__*^xE{arxNTEH)|OCur$>{-vB7YDTq za4kN+Q#%dnS4eeGR-57gD!~2d`(=q%D0L-6J!~vBbPbIil+dVzD<0MygQ%=Mw}}~3 zIQ4Rj+2&JJi;~#-f`SZ{o_G`j0dJ)*S{2*RWP&1e2M3BZ3Zh1fV=cnk;fE`5+9w5PX@-tcoWX}J39*K6!rHZV!NyuBe+1YR-5 zv-G-cOd(8#>I&5+py8#tZLOz(3Qu!Y-xnSSlY~h}x_l_R7CzTNwwi8=T@MO5#nZ(T zHc-4AheD8zZ^dG()XV43n-O?{+-w5z$bZFlx=e7P{wkp(BOIz>i@;^Z>NqMO5!Wq( zIEMzh-+ia7ZF6UHR0LEwcOVvN@GSUk33$B;5;dU=q2@> zTR5As<}*l}{u33l=I_%7-hwbCXn<5O@8PMcA^9IJF&#!f1(E|Wy|6xqA#x$8J~<6| z?py3Kh(-wmsN*D4Xqjm@57Kg>4tJ#rU1O&Q5s8wfW;%#bgo&&Q709zr%abVxSd}f0 zwZtLC7ns;NfN*r6t^>KC;e2qb$T=JqFe+ePQhP57A(9_J6(pSpYYUiYR@&XHaFG3k zHzuzFb!wd;uz>W{&Uj%wREK5s`0*e=oecekuyG`TLi!r!D%Yz!v5R-OfXTr9*39)p z+V@=ms0;Tb?M=auiFevZPQ&Q#?vBtM%w0uL*8zI6!R#eGJ8dQhD0O>#rGiaB;2s() zSOjeXv|4|80nz{$*FgCQQd=hsfl=Fe^3yzb0eA*O%}3miCIKnvfbmgn7co#+yGE5_8#6;xqUL14zQ7w5IC)y>Y%%2`@w`*Nb1 z4}yNM%2=*mRfO7V&b&%y7_1k_CkS+BsAbOB*xSD7>%)M zPb$y^P+tYOXMy&xUAiQVdkFe7R6EqQ%^)s)Fzr4s5i0058v&J4bf>Vmq8Z1bw79<9 zZQhbd7s?R|c+AbM z9HeK<7L$dA1zW&ZsC0EzzrJ~wJHz^8TXCp>lngcWJ7}pe1@|_`6c(=L%urQ+9kcCl z?uD2!W@}4JM3a2Jrl!@aKXYw0$WJvGGPEYtoh5ouWTAb zUnR(~LrvVBnsFd+ERDC_hRZEO{AOva&?*c2U zBvAS|w74WtT3`HOl9d9C+h4~F2d;y2Xej`+ejghb$L%otyBQ;;B`PK7C@;dH zX~+w}xVzW_Npdv7IZH}b7K&h@fH6DR!Wi-?oM8bx>^4$d4Ii{}|EM)+{;cjpo67LS z8yNC7Q$tr*SEgX@F~BE&^g$Bzj4xoAO=2- zGAO!-YS{y@MM)1Bj*P6xR-Ud4U`#?!Q7$BjxP9~vq6mG(=jc2IStXPaF{3{-2A5e6 zA6iULD1@5Vk@fB(o<<6eUyI|=Ud+_B!Mu{%woZv!1d0bF)nf^52d)Ey+L~VdWwvqGp#6U=& zK*1c

ykp)RU|#5o)exA<`%!x(53?e%Z3+5+vFs9}yS{gp#OFPIEEi3S8SMTiI7 z55FBj8OL>fu?5ME$W8-`xDBP}v9U29Z4q07(o}zi4wS$7-8KsW3!>V43g8!iA!($? z)JMO16@w~ppfv?Sd;s%DyPF!uTCE>=j56VIj&{=IWR6iv)pVvpr&BNJg2ThZ09F|# z7K@9Yqo^6bB;h~nB6D5d6W77quI4cfRXwO$@0RgEfmDFv8qm@N#1?UY^YBnXLOiF2rWL^3{+GJ z5CwW2Mg2K{zQU=27W0Vy?2o+W z$aDZ=HL=;kQt!y@70QU6lZ@?r$1`}+FedEjy0->RR{HF$1uAml@*Z_v0i2*^wGGh} z6&n3@SRvB=D041AA}$o`A&*Jm9)(_emJK*$9l)I}ASGaWEZp46V3?45^eA0;XVZEM z{sk_r185+JlPS1m4%6}>JsX(-_$_}vg~yFFWZ+L)Toq90g0asBe7Gy=Dq9TFb6}ZA zHLlC18J6P4Y?Gz;!~B$>54y;}!H~oWe=t!PJs4xm_Kh)DR0$@kOXAuK|6w25f zy_VHNpkETcn-B zlF(|I09%N9q@KD;-|{rX7&6urfj`2vL4N03#|7U5WLK*iGA1qQ%ciZ26)WO8)vGyS zKqbebP~VtC8-Ielh$W3FwPA(YBe3l2v_0l zSI)oOg_UQmKOSICj{e?gIJU~$eEEGOn>R+T)(n%GPRJ?fnPSr)jKu7L2dR&C=jG-J z)&SiYA~JZ`NcRE0U(=p|m$EA<>)@IP(jzeah?I-#Khhl_LB*iAYH5j=1q1m5<5FSq zp_q?xoysEXTD?_gX#c15`2UHElN=BF&v76DAfg$=ilG*Y2B!n8=v2TYP@HdtETAdJ zgyDt3b!zAVF>`U?3579~r=Vu61o;gFiO3Phwc$`IsAYhc0^!Drovj%yrAru|efLNI zO3MJkY6E(`MC%JmW+eWo1|7um{gcwjFboemkDwRGyopaBf`A7v$uaTnTck#fLFu7m zye$dri^XB=l$bFS_+Vo+LHh>gfTMTcv^4F?5&P2}@N0T{b65{Va0LYgMhTIf&0L_e zT5^}znM6dQrxfbmOA~nD=Uk!yM))lTJaqYac~#->G7I%#+)}{(UNeGaLKYAZaxnqo z8{*RW2kipMLVZ8)t2;UIcPE70567E=0DzIBUzyu3gVYy&kuV&nihc{`|} zOcMPr3x1$Df~#Qor4><(3b=elNq&uwr@XytK={yYrUN`eXt0HEff^t*JbNB&_IbZ8E9V^FjKLn(OM5rQx%PxZN9pvkxSS2`3AyzP7E(%V`8#ivywtef^2sO)Y%$2-g-SC9})z>J7^3@Ak-KWo7;E=S4tG>#V3+;)(wBdd_YUEKu4w+#uZ z#TVfWcw&$i;PwAZtZA0}{z$>0@Yl5VvSDsf@mUNbFv_Y@V=<)g`GZ)fz3@avYVll> zIB*~nR=o%Ra{#P2U;Pa$K@w2A;Xz9CID*z>2bLW+ULj3#7HIPI(nY<|X6Jg1FKp*o4JrpM?6)8)4@zSMc3K2~>=z#+YDk0!8 zU%ul&ip3}ZaG|qWezeBCrOX8vlS&T^^k!*Uoyd4rEK;GD&=*?EU2}SR;V?G#EAku5 z5r_6;qSCO|ZSPEU9C-Qs&g%mEH;lZF&fXW!o)ycur`kJL{~?mctKt;<3Uv?FwtnFw zfudnv3>zI%=@X@EE4IB6OgggAW%nZpF#KLIJ!V$@=x-`N=X374G24vB-^4IRkVyDs zTh|$HBe0n)DlzfL0l;&?_bY;B$C#eVj|6KzaunV_T@Q8V4tQBbgc&}E#sXU0KBl_r zx;0h?VQpba`Y+1D&_@Ajzw*pG=f?`ql340R?wVOCzHxs8Z!Cz|8+7ZXYJ((kzvgM+vd=dQR6{+TGPI5rY2W_ytF#x8T=lxJP7IF0EWgi1>(+)$3L zQ{DY{w+9ql0}mu5Dt>iYA3RuFHL+ZaRqkC4A?NlGYWDCMRchl% z=fVXAwKu9=-hN)cdX8!LKz;lxs z(8WAOVKruZ0M3QM(8EH4Rt-<5Zy(wcXL&ylZ$B>i)PE?{l$rM!HsFLNp`KJ>Yg=zV zyc}jo%;n_oVxc-NezW(O}=(b|zJDMG9Ilv0D858Y2q4Dv>bC}+$ zFHkEC;lr9F(!SdsL-_D?&jlv2gJgj{J+<_T3VhqA+Mfl&su%4IeI@$4 zxZi#BWdC(bP4M4lvaV^_y-6=yvvpUx3$&PrhHsh$cA^Vn+D{I9zlKKk@DSt90|V3@ zT_W9y^zPC@N_AxWu$T~C3a;Pf6)20F%itl?xLtb-WVvU%eX1SS|)BC=jaP5-hwP z378&qzp6RSQOLI0V9{kapFxv)0%qvv#;8j>dn&D}+4$Ap$H#A4%CM)e{FjPcHNEp) zAbu}jygFOiWIoo9Bb|9F$*HAh>Vez6HehhVv$<+syz-79ok6fEdS~dZN$@R+y69 zugwgNT~T2HuJ%3#1RI|9;`5qoqv;v}`rn0R97j7N|UkS8^0h@LCVunNk-U z64kSyIvnF~(#!vDvr;qa>C^cqmSRI5@d$r_phZtbI~|C_#e&o+(F*S0^u@(Rn)Jh} zXGKIRK0XTNJ=@n@b5q^ezBO6?h0@p=x<@04`lg>v+6|gteUNa4EbKwb!oqR~tPU1; zM@L3jSy+;L9NPfvH44lq6{dgra*JuZKY`-h*|X56qneQalaP><#2Yd7AU9?8qrtGz z&T#6@uYRX%7R(v$9I(txn;<4P4#(f5{Y3PykTiRkIf!5D0D;=GbVLAyD&0`PDLRBG z@iZh{llOvT){G(N464;kedxx6Zv|v+wkuZ@khvKgyCZKnfTe&k7#JQNPL{U!V)EFr z2+c4`<>xo~uozDoew*ZHhv^7h%V7NGmzI5dC4rQUI91fpu!~N)?)1J?$Tptqwfcb? zAC$`PDFzo8vDIC=8<3Y*Q&qMH!y8a(I9;at^>#p=>Rzi*`9iBj zLd&g;lF%dMG4yF;xJXT-{mP(R;crVwD9FDS9CitxPGoJ4eu?7?B^BKYdG(u~oWO|A%BOz*_PYov60A}=xq!$Clp)qjmuziMd6+OV zOKEGfhAc!5$<*EEaA2}#mlnENLE;gnY)-QHcrO`9!_26_gT(H%IwE708;`l1KM3C2 z!=fY18XTT;Jk{V{{8&fV0oucAP0ce{jDCAJCInV>)BbWT)Yld-5r~H;01Dg~94nKZ zJo(gt1Ff&m&2uLWn@Lic(6uMF(NM@)Q*u&09B<>Z5mnSy3H%52(4I* zWDA^R1mBI7fnJ3KG;kGyR|I+85w8HKtAXHd8Z=0-{&^$j!(|qOH-wNXMXIB^h)u|8cOdjDbLNkbVQdscvRiV zI54u^fv$*5b}}rLcIz;?aS+Qg%bm0ejDTA&E zirwG7Zxt90-~rxv4ModOp(_J#ekYVxm!Nc-3bk3V2DOn;2o4Z(u$bDGndVna2!h5{ zwbx-<8XBqBpO9Vv@vdU6!5o#7p$-o<_>y9-m_{_HMy@z3nO`h;5v7_ZxU~djGV}~b z+Usue_4M{GfwaM&SXNecY3mBOb^OkrJ&Q~j(vkd2v7-#ofAICo7ZxbqLa|&$R8%w= zPmWg^&CCyTJ$p~Q$}&UQOl`nPwR;A`I^bF^NjxDXZv~-L&o_Iq7>7G1v^wwvnPNaO z2@VTO1(uxTh|GCVtTTjytkdeCH<)~qQ!mmb+p^ncC>Lt$ZP3Y?1FZOyLJf&*Bx@}3 zh*p3X3n|d3g#*ksP-sNmA)&5Xuuk~xCX=DnA`R41s9!}uBdW&WCWs?6B5p3qA&2q& znkM;nX^$#M2u$J9l6Ql0a+!2%=_x4>v?MFQp`*de1l`s|Ew($uGsPI2OJIk`N21y5^#~qy8_L>2t1=X%%%O&986L_9$ zc6V#*0uPcUi2KFI{C45;d;Jsh;$Kdq{H~x#IB9Kft@_4YE!gdut!X&^1b54f2miUG z$L3Wip}vF32F{*_xm>6rz@R|oD*>D~lejGC6>0@>i?$8y7N7>R+V?>gMAZ^1b~a$| zFJ8RB#e+l+9#3_LeaU9yZ!omuYFM})+s6Q3fdIi~2`OEjFEwAnr z=Sdp})-DSS?Bv?7Sjx9M?o{_bVR`xLxd!WJzgw|yJb0fQT}ae>jhh-ajWbbY>eipi zH+}m-&@D4m><3{u%%sHY?Q}{;3*Q#{vU{Oj|lV|X8ZADAzUi4m>X9OmHI zu$7RIsI94Kf%QfO&0N83ss!jdlFD~PekfQ_wouw_S8u3Hz%Nrv6p$<`n>}^TaLQtX zrx+OYR(;{)|;^VAaC>0J-Cq$^0m7{mQ4eVy7L zlGyZ@yGBNDKxfdWrw5SWzmJVLQ!{c?3@2j3qrNDB8LCMyANNQ8C+I2zBWKPdo^a&N z!}&Dr>=FD{@47wlivwqyee?wLtmLAALrJkQotb%3;)_KN{5u#`FmR@VN6SP+9GLze z`vUMYL<>2qfQf72cd^QV%wNJgJmEp%Y|1M9!rCo5%vt*9F3Jq#kJ-=a+qU!f^vOt5 zf&(yNVzvB_H->q@e9xGz0Z!@IjX#pr=!(a`pa z>b*U)Pv9EBUzmX^){WHy_R~sfDj9-mM`z>29dG)ItLK@=^nVO*SDvtN7SQLpyYq?4 zIEVTuyJn#r7-SGm);swOidFZZl2zNLM}(spC+DQmYbp11+h?w#n(=vRO4Imf0?$EB zPdE`E!8|B3qU?gBWh3G-@9X)jOmx_9{@F>t9K(LmiYL5jX&r;hcE(}fON)knj+4m_ z#kdc$a&;;4X*&$=jl5aE79DLj{D*Ol>LRW;4Mwp9;KJ{ufAOSlOVE2w_MS300>;|k7W`V?j!o(@Nw{e5S^ zn*rYTDE7GfN?O5gbML~P+(3x(tK_T$(mbs{pnZM z7|zvot{F;T#k2NLUcNjStmU$oeSl5SW*@m(Ps`;od#w3=*yOclav~b>xCy0_4@5N-`eU8 za*Msi-r60b%`*dmYIIce_+K)E-@)aEy7Q3J8e~!w(;3INR{B>g^gQ+{Yh{PN9^`XB zd-l`o2NP=tZ79mpn_w~|wcEYkD^0qTUbA-$Wb$zc(k;*x{SGSc60I|CD*_a%dmV_v z8_t=t(mSa~!$XxFI!opt7bxJR+YrW$98u2>or(q6WC&$l<5+tF$-ofIW+TmqL=?tW zL{7ksG@#1>_4I)xZ#ekLYaM(ziwn^^nlq&f{z(AP_;3F0sASE`pvq=A&y=a2=6NJ{ z|Nc7^kimf52sbYp8Q1@j+3}%-YUR5c(lIjBGzVe#N2z0Ok`$4LlNcN(yDPp&GbN{W z?)9uM=}*et9WRpEptb;7HVAfzvlH)rco`ruynFMP5 zFT_jvk>rD5DE&56vncfKg}tQ3{(DLJtB3}Six#AuZiR<2YZ=oflM~Sxrk0M?3WZD8 z*5b~dWAgbr$H`WQu7{0#X@2PK&8t-3dzN_DDn~qPX=}~rFGCY2m$J6qa8$$j;StH; zV1M8jnC&pbPT^)@Mv}Dpx!mJj=M_$r_8EJY8p?3slFr&ZfWM*M`Kv~ z=);fx>#5UtdE4y!?UDScUFxt-!y5#DWz|hQvMB&h%0E7pw_XUNU?=w!0HH6- z+S<>wPYL5PHq&1Cm~h4dyel5}92AL~lZzZ1_JuyHq=V;fHYmMUG;U;sn<3qJzdbzK zLN8t_%r6X1`9Aeb&qv9p7^B2(|L;o;yr=l>*1dQA9N4h1Nb^h@%}=m)Vm7~?Yw5TKXVywqD?c7>93D^0qLp&ep^b$vVypZzgIJ7t zd5y)hQC8SsWPRkcSvCq5xI|zwDflSCk5q|sw@pP$)8-jIv?Z&8MyeneQ^JR z#ld};g#Qug->bYDqeZfy=?S{!(x7D-;cTEcRIs7I6%IBwDcG$67Y{wbVC3s2X^-Ur zG&dQ_zqq-epsv0&cj6MGeMQ4$U=>&}FkzO97wcEU>{Z=<$BRC==9V#Sg z;Iv2k!a)BPY>Dyf3I}XnGcFQ=(VRY=1|K+OF^oJkFv!&*$jQH+xYmr*y`PePv8LDj z>PEcf5GZ=H*Z+>clvPv^{sLy+px2lz9~W`^8)}b(n*}e!50ZA`KCkwJXtZaqLvI|i zOFjd`A|$F__nj?Na6|58u-(A{q!As04wg7DBq2Tx5n2(Dlb{y=0F|0rZ2?*=1_imo z1p&x$4uVtNch~|TGWy=d6UbXecH*MpSSwbUw!5er?h;{_{jAaY`s1%?37P31#5{~Z zWegS5e2_8?K?rtY;u>hXa+QvkqfGAi7+omL;6b(c>#IFpB(t_X^~W(xk&pPF2Vne# z60Mgnahsd83{!Tps(V0l{dx8VX8SDoBN0~aaSnpxA+{$Zm{hAUh|MDPZYcBZrqZj&bdi?csNDArowy+U_ znT&ew-?LXw9*p`Qzk00LA*WghvT#U0=>R3bczc>uBihSPU?i{@@u_;VuD$14T=FdCRn|!v=hba4b z7zvRm^Dw#<9Im=q4AYQwcE1y_TIHs*UZpM411dE5`5b=I&-rbY zFZz|a6$X@8KpQxX<3D`ljjj^&_**WuA?GpcNd{v+5M)E>)ZlJJyV#_{3`N`k=&Cu1 z>VEiz>>%x!kCKv-AwUdp38Qet8%*9svZLB|=pHM+OJz;@X7!gjn>40RpAvEgntM}X+EK`g-?|63rL zu4`=6k9Ig(qP5=Sp0gyf6Bzq^e|4=^j}#R);K%1RboPH0N^1UND3N~B%bt)VrKg9x zf5*|$zuK-O36cZUcdRoTcdQ!95`E7U`HWwIh5;fH3@C8g1LH4tsdiXHSZ{^*Y0G;MHbVS51`Qlu8N8R7K!IX5+)K5Hqa*Y4P$+46B zJbrw)?+z|MqvgoF@bH^lUyWdb`5oq4aoXsM5e?@-4!xvt=Y-GkQ{xWxT5wdEs*yv% zK_%L^1!_AXXkW~1ph|G7ft2WclF_Rd$K0`xveFMs8*|SM&6RJF)iKF$Y z&2&}qppDjPF;7o_%gWQ}O5yNb@#z;YTv5nTqh-j=DfBqf_XMq6EK4`{=$pZ^e-fGd zzgT$}-iQFt3g$vlNB5HzA|eD#`VfIH%DdRP|1*5y;O-STt4$w4iZgsQAb`XE+ftw^ zpfL1~g2P_TcI%;b)Wee^+9DFq?HCXiUp)_Kv>}+4jgG)5q0Wa051G=f;kJy z<#5~H7F)mCQ3S$Pk1$lY<;bA>z+9eF=!mErsMVf^uT+13v}Ys@B}RC&3-v{*R8D%H z-i_>{>y}=xQ^OfmGcKRCEGN<2y?W6HUPlX2YR7)d{(0r99Q~^8u*uw5=AOT6mc6UngJpgB_s|P8rL}@k1UoBbf$!y#PI*t8 z9*GB7h)6vC6JTkAQ~*8QMq@cx<%A`GzrU6j_Wy|H{rf+&Ao}~VMHpq^(-mo`vwwL?~4n;bgZNRP*Ew`nGGAQ@p*_pWzff8g@?9T zsUMh=6aVc_%>zr9`r4mi=VDrRj|i)|a_=LaK3&iAp77msD3oE?zoNJQ`M#SyChd$? z%xaosU%s>w6dwX(4Tc&1W>*pv#U30i@$Aen1SQJY)6|Gii;~oQdnGBUSuH6^J7g%h zyK>1HMf!XAN_XjZj3>GkZ~L?861ZN~-@S0T+4kb)W={XA&;4zC;oPeS!yD#cN5R)1 z_PrjhuV<)k;oDu`G_U?1nN?q+3H~Q;`v*Mn_fYyDpLOA@UUo4t2{$+Y0F>1phYkuO zAfh~JcR@&t^@62fW1Y2Nh}zU18F(mwh;XmxbR9Ak6n>uE=e8SHlX21JX0*b)76CaX zl$TtNp;#0n`42hh&#yknl~7k#co_dMn@?C@&ywhjCIXz82U*vnJ^J{U{|N&9(@W0$ z2x1cS`JiUHhDd>e9}Cd~OP#GOi*t-;qsx+NsSNJzV419O?_`ifh zQBbde^zaySeGPj$K&P7k!@Ud@2sOyJvXGN=h??@*&P{B{gt?ZEYjPnLVd{b^HR zODbe}<|tdlp{<_)vSs}!1;_(k2ogAGMaLJI_djE&%3p;Y8QP)zBpd2U#`Vk<7al$_ zeymhOUjArVBN_Lz=u0|7QyWjURRw(RpL~|&GkYGkTj&G<7nPNya8QvwC*cG;K^RBq zz`a5Mfbv34#}o)jNV_xYJ>iq~Nq9~eqECS9Smbw(K5tiOk~vr{hF~HXQ=yGMH9h?< z6tuvL6;>AkTu49!`gO{n&7!Ya4*|cM2rm8KlBj;ro-b zt)>Y2aB=O%?XV|As|VG(f8C`9x9Og1D18922S5KvhYtKl3S=rf+CMBoAd$h0lrzUju!wyrLbg-QaY9PBAGJ3T~J#6cfD>t;2fzMQ3lSaLk8mwVkfo#%@wotg%ZP>pN;I$|;->KdV-4{4!+vll0>N;jC zeYzmKBwjP3 zy$p~W81i5H?s}qzMqScH{+XLuCO;41eV{Mx(;Jv>uyi;L8fkTbBIP&gfcuvdDsj(P z8j^xSLr+5k5+~YkbqHXKI5aZD!F3Aj)j=|t%Z=WuzkKMk8f*i=sb7CmHi&Q)o(MJS z3Y>$E9j@QKE zFLXG9$-p=!6^ogMG9m-OLteD|&e~`_^wL%j7y0@g5`u2te77x#=BPxwe(@;m#I|*~ zP|#hL8t9+0duw_P7z!9AP@QHuqCsB~uoCEO*O-rp#)(#tum_w2KWHId{uFG=XI{Q0 zj@A^^*a@ECH#7s+16Vtl%__F+P&3jF^q{5*8Dy(1!gH8_Z-I|=Lr@NE3Uupqki`gh zp`;1LOJkQza0;WnSGdjlN=Y*hVg&lF!{v>CR8e5yJ*po6J4p%><$rxD=;wc<_y07v z{m;+--xM8ST>s~1|1-J%OBDRyIV6TZVY6t|OyE#_2wR#2A09q>dy!c-)DrbKKp#Uq z2+Oje#D!n~3KbrsQ1+7o3tuw`XewD*l+e|PIyWE;r*(D`5y)=G3@U=?-v3+H|JB}? z$78v+Yd_IEE@jA+La7vGN@S=Ek)jaEkPsP?%(H4$W-BU`DMAP$8ntR&d^hAlb_5od6aWW$GpLw zdj$@$-T?vR#3s5`7;-2E5>O5?FU&&-VvT?|XK->KBZ4lir_iquR}*3>XJi!1eFcOy z0`?PoPe?OPL$h9!ZQVmW6TpW&)~I724#SBZ;Sg~*G`_E>c!&V%Wu;^iLPlO_so^v| zGY>nEc-Wx!5WpK@3$bJyKmxA@bqBN}b%3~_uV_EK2ppVjmki9z2{`n6!AG%c_yJ>f zqT&V4M|8bNqr>$wfdxQMLCiXE+FCVS`|^4jaaMw>0C@aE!dFd-E&y5igA33q8az^o z=qk(>%1?C!D8WPrhY|v)YR!kI;6ZQ(dLSI-F~ny90c-$g;Sgq zNQon8niQPyv*z;(etzKXZ_m#qmb5TdW z0mXt%04L`f+I4oN#X9#1Y=alkX3knIrI z%nn()C*c%6zmKtpe@$@9B#!wB_^7EtZ9}RMlF*JO%}Ub7F-z0>`1%SVD)&X6(S^(8 z;BDh8MI8+&LFvQ~hcq*o3POl>{N%Se7G-rmyug<$p&*ALv4UF^7oH~zPV>GyPd?mw zM}I3QWP%6-F7MMye-GoVLjWUu^{79COAsXbL4d@Js7wHMu=-%>#xyiKz#G^PkHj1x zF&T13OUo367*@D$mEfdY|L%Ydj(#g@+kf8*?&GXr`j%jJ`9kW< zis!AEAyWH;(Amep08Z4Mn)d<)u^32kQ5Oyxwbp&KziW>|y5;5~`ad9 zL@rU$&)I&UurceojNT>z8_WZ`h%X#s3184fSP(W^%}~F6`zAWDe!~V~N$RvgOkQ4V z7E&uecu_Ossfh^)9UGhZbKU%A^3;rc+9uO;bK-@;6c$}W!>9Lt`|TpU_#2x!@KA87 z9h`SWpp(4xBruT>hQE~`^w-Tpr6>)whbGt-vUZJ(wZeI|Qc3V5>ju-$o$;XGPjq0>>m@$X#QeKmRzhNYHxg z3qXN{xS2uTyJ{i6efw(%NWuNadMa7r*>C`0ck1MY4I#%ov@Q@?%?lO!2nMylc&r99Ez|2J#a_5VB{gZ{vBNuY~#Z#Q8Hs8LrRvG z0daru!#HT^#q5=4Vqy|6E%9YG_y~H?f%WnS!lC>d%f?VvWx<=p7rR@}=jqWdzmECu zhvZNKt%d`d0W@{>y2dC@lt2WgQ$nIG1MEF=FwUrkgJ2i&mp=ojK)|@>sY1#9J*vLg zMOg}^m{#^es{eYX6*fLsib_gY@G?nX0bg*f#Uqgs2t7Z?i!xCZ)OrRm@V}#9*N*WL zmhOP!xP;({VelQ(hg|Lh+~pGJ#Nr1Xe;W4K z@vYB5$}We3haY*UnZ+D64?E))%r;xeOykOJF-GMNgOiz`oeX=sEC8W+%-9&LI|$td zIxiSW+W4ZHV+)>luN%*Y3Ef^sAK7zB6KOD(o=EEN zNqa@T_5#Ash04YG0Bc>eg}~=f6(+&%pwwkH*nm8F@&x;roxl1-me0ULZZx+zpL{O! zbh)*03)9pKm{e8J$=~9XerIMg+}hO5mQNVJ*qu$`KNKUIx@W@Ou43R@S7)|LLianggI;P zgf}V-6t6ef!Mub}pRf>qTPmaAFs#A8T@Mi^aUcuq`ecZGSj?vPH{z-wwz~z~l0IC~RoXZ`gW3npnl=Wpv+meJ%p-X#>37 zIN+$#eE3{_SEBakU|wEcRIGXjLo@RI#amV$0sNhM5C+sp!j+JUE}f^S$7!MH4@qy% zU}FW$d*FEhsq2jhcdBEsPcI+o`9X~RiFqFEvv?FRcWMYR`?`-jxiyd!at7q(~(yQb0CcO^=)6>uwjSmX9B!P)7-}=U)W}aLywf?w6F&f z1_0@#(BU?hoB-2msf>xcJ`!M4KSKUYtcDbbnGl(z3N;Yll8k;o7;zZNT8M`y06Clt zB%UMCO6JV})C+=B0(sHO5)=1vW=)K^FJ2% z<6>_HxR|SBcx>@Sz3z)s?mEK*#07@zDW5zdibMXUlKy90zaU2CD2ICZrtE%vnDDid>paN>X<z3sJ}OqXWx5l8(Z_khcdV z6P6eZ%)mYQZN#Bg7_S>OC_+=O|Bl#V4_G>Z={U)tFBh^&cqWTE+2_17hUz%yMjG3o zpxxgmz_L6)rSHdNB(L7Lp}C@9r?v+WnsdYR*l6M@&^Oa04qI&Uj%pweXeMc-p2J;b zSo~daeV`%#PW3hHOk_Cv1g_CpcJPBkIbpNL_NH#ng}jBH4@{18l2`WTrxZAhq)KiJ zgtH}}RrVteD!56)aD--E`#YP2Q=MbiENP&D%wkF=fpQJ*Q-Pnr6oysS7r%V5w>XV0Rw?Ez-{g(t)y5leK^ZchxTjbqxRC4MVIeM zd9HcIrn!Zfzmbmvz%W-=MDxK zvpW%Cw{s4C)SK#oevbSUE7X^E!O57IPl9oZ9W;V3UO~)tHXYfCgw`$nt(aaRI2O;d zFiU#rYAp8JE&#okWQD^S)3U3V)rB#?;!xE9rUA0s_YVs=g@ywiAZ&fb?t5f$b(EPW4rA$RlfRV2?)0HP1ei<7`CsEDF5`S9zC*%%ARWX30`GUCdA1K9Lg+AWUr8nHh;0 zm1kQ`9IY4_c9vOjck@WicnxQDZEU;tZi5r7Y{T`SDj4aFdwi8}+JynfB0f7ANO&@! z1Wv#jl<3jXhqtz46J=b?Dclmg3>9So0XLW938W5Ks73NZH^lG6 zFrKfAl&7#u@+uJb5crDqyIcji888VU!T7TszAl?84qpvoScdQi9*1ElhC#8waRsAF za0|@qiFm061#JE#+{O72O^QCaOT?1TZCii{*ArVtgvaD$m6|D2PKQ$=qMB0(vof)G z6TW=8i2OUPx$#{6IIwjSU@7BUxbTGdS=^|-G6KE>o{<|VroS>G9EOq~}fj2v`Lm{qMqF1UW_x2$45>na zvE&%dYli5?>H(L)8!M|JbX5-a@X15;9anf66EyC7;p8KI89A0a#hugB&1P|WH9cWs zwu#N35d?1lVt{>jE9}}1BE)Ulo>7-xIEUW)E8#23(gJg3A@uo4s%rzM}6p11OpTdV3Mk$?%nKRJik{$|UJ0mM0c800T z;G_%X)Rz2>?fxJ~_?;lZDR!o%wr=#mIY~k7oqF~Nnm8s-5I<*BO0*HGTboIG#Chqi zvMj}UAo+LaLF3HHRxC@t@~V|9Rw!H->V-g!h@>%ZgW!OzzcuhF5-?NZ-4ACu!qFua z=u&jyx)0|%BqU$cQuAT|H8I$vN@Oqy#)sMTa%>Wabql;@ub^QXIr6v8J z^`HAXxP`p1AL1{VjwVVP9G|AxBKgKP!D5X#NAB2h6bGZ-d5d)(cgd`lyn~{W>mQ`D zr$wWp{FS4croS_+B95s;O)uq1rig}XWDSz}hdm3a{XiX(0VshK;oa)g{Xk>9V{xH_ z@QqPJehRRrK4%Cp1E~@>XIsY;^bt9r4q8d5BY$5iNa-!k5`@zh!TsJHJDxN*R)&jf z5PA^2^+*a0DJ#jth&mL;PuvQR4_Vq(WHy;U!U=|?XB%b?ar?enc%G87o*YfL;6veB z_I`SFL#(2LmqFf*b~O}8c@VHGkcolq0wC@*+m2+Sx3lxvN-S)A`p<|1&`R)n56abnR5 zF)U7U(r51;H&O6@%v>hz8r`T6-8jR7;+)>ku}^+_JD`2*q4P#|6}-+I0ZCZoArP=J zj=}m{2|S84cQ`Gacu!r?2_8e--VZCqIyhS14-HK~74;A{)Ms4dIa<{Nk9JkuSlNG4 z=GW{|UYNecVz3kg8&dUBalB~tl$MnABT2F^NI=Ec40{AYr*Y!pfZD5&ruUt#1aoQ0 z9TE`;c=JRaWP(Xa4!~6$E>Q%#AlYiZlWj(7GIBm7`h?L*O#exJ8kiRuLX5KM7!0;h zAkRR5O9cWHs9hhWrsnYi@}IRZj)u66dipGZ**y+gdA+4lc|uL{e-iE1@_KS|I=-f-JqM^34}2u{2|7S zV$;LRvP?+;z%Nj>9gDBOZ}YW#Xl&3i-iUg3V=GhMKW&UJ50c;*K<*h`-K)OvWzvKI zxJAzy4=}s8JpIw6eWax85fz25qp4%3FVC^(S4fPDbrc`Q+)Zm@8RTTK6oBqLAH6bA z@u)`@>DIK)m?CtY+(Q5~JkPAvrvz7c#QEy>3^CNUd9&uxu1P@`b}8C|8w)N* zC~2=jr~%<|sfp;Ru%$qKtIyWg#@OU(^M^lJZ4|nA6KV;_r~ukID+kvYlAJ(R&=TpP zwUOZKMp&}{l>rE0ze5RQ)Q0(#7E0;RU3K>fP$K32+}Hnm?5qD5s>B?mF%w#^5RYvj zD9Mv9@E9gj4JY{?uq#Oe$0P8wbKqE_M*=E_x?9~cf8ycfa9a{tMmRS4tRo!1Gm{RW zk*`V%Qq@5?=6=&F% z{SYIvDN5B30B`i$whgE=2777Xp9i94^D( zI7ra)FA`%J85trF41CI8IDZ!CFlk%E^I-)gm2e9=NHZHWfHs2(HmPKl__HU$=FtQk zFf9y9 zq&|+5<$QU6tx`~2hFsdKo8e}qqHdd@G%lAb>|LAQ{_>W6VZ`122!O-U3xyLGM#D#3cpg0}+0@%} zJ^Ow4bvimLO!i*Pd+7|NZj>&E6&SATQJ&e+j4NnexX^`$um&TW*v06RlasqrW(JL= zSWzVP_4F*~%iV`+ZqsyifEzuN_O&-z139_5F}b85? zX>DagancKrb`kv3JNuJT{c%(mqi@_L9O`s*qqqwp6w*>0+6Hu)fH`{tnh_TWRl1F} z8O5gbHAF^3L&L__*3T>#rYofdx;Zvih-q&(*ry(QFA~<*W^~+1=35Z-T(a3XZkZPMbWZ!r%v5&bSbETvC4v3ip{duAHR*!wYAa72Qap8 z+Q`^0FJt)f0Xziz!|6<(osp2>zteLDziVFRQ9~E?L{4JXL`$5ag6Pf`m5mt)#X^%Tj-}5XfyTz8V z^oNcW9KMZ!PY&IGZES3ckO2#PojcTe?f5>uf+J{^vyDl|=1dsKXILMX-_LGRJb{mk zG&y#JD(Um3Z@Pa+)6Nhcxw6Xdd{(ePORaHH0+_-UL32GZ%r!1XRvp?JtG+0=* z^D?Wr3B9CB$P+Zw(u7SXJXXzWM*eB(@Eh1Fm7*NPGEm?$-$hsIf66>kkSAJ^%EmGr zQOCTDg+J*8IKuQ$uud)&bBX1u~*bG>a-z$h1{^W_P(&(0|NKFZd)_qr?3^j-v3ocH0s{? zxUZDv_Riw2yrGI^byJWd~D!^xSCksckT)8YhUaY zBXL?5R~f`8AAv5BUwr?!U+lUN9#KaB{g|@-(I>K=lmjK4$DY~8_!v7mt$lJh$ZIR} zp7c}3fBedy4hpX(^P(HHxA%w1y&HR0yMK53YC1{@cCC9K4gb6dcWAh4-P--fmMcba zpX8Nuzp`sfb*r{R#?@P!f5C-Hwyi@6{u$wAm6Wizy*1u%-n@ODRvpV=eBWAmK9-K| z-Mib3(eu~0lED+Gx5|vyS}z(DxlHtS;J)OL!C=Or;CBvom}`PT1{UnM*U_I*cz7;s z_-=Abl&3EyjFEa^EJOU<*SNUo(^;=1+zhrvE%Qj*TUO&;=GNzR=dO>)*Box^tOQu@ zQB=+Q*xy$s@5iyP z6Vsz*sS>viSHCKi#|?Hfb62faJn*%7a(Z=D)w3hFma`mPvrF6FHa0`m9rHyk;wX(@ z)bcGUi!+h6%?VO8LPC2hx8D$Z;J%x~=c*q4sm0ViWJUc9mce2wiEe3*>1zupiRPG; zp*{&!))1t6+)|4=Gcn2d{Ya#!-#fZCX}@x*`vJr3ZC2wm$2vr&>n>_%4&`iRX7=g{ zkdkU>$dAD}oW>L%Ad{I{=x}MeY)EJ&SCz`U3X?4x-|lU(ee&i~33*_e=9y`C4YkI$kP(v%5Lyfgy*P=^e@j;SZdQ9$rowQ*z~lqhq|^ zKR(*>^6}*WGqcdP&G|{6vM*jHXPQVgC1kg=mB@9CZ4CX=OWd}W%xs- zpIN!iy~ouw$2;IYWp1DY; zUdpG^H|KpWi3*Im2$kN834K4gFVUYS^xy)!v}3)62NiV;j#S4_W}UPg)6#=zM_1Eb zC7o!O>Ej|vt<sW`7|D<7809Jp)_v zLTO+J=!{RYMoU<`Zm#3=^z?lA@ZmHf!3$sepBby?wiFxQ4)1+`&+FZui1Q807ufHg ztCu|g^iscwbO(u1%M0hr4MrjzzlVyVU9gM{sADvY4fvdLLZ9xoic2(4B-^4= zjbQnsCxy3a5}SE^)RZtfNhvvdADv#k8vIIc{CDC_hUyb%F-l3*Z{_4T8TW-uFKmfa zaJqb65KpZ2*Ap+iUB_R9XrQDWYCIm)@6WyQj`|Qb9QxGwq406{Rk7+B%8UsiIUB0* zI^3X^a2;F#d7%>}3C9(@H}_iS7zg4x*5f&J$#ZbxIS6^mUutnoDgZ52HGjW&)a4r& zhZiY~*BBk05N#7wsN83Ehr01YCCflbgLWT{Vt0M2o2?57C4LZ)Af!Ls6Q=qV_u;!S za(tXV&L};T>Fub=Twj55eO1R%N57O8+jMpbN1tv;fHo) zcF!%qK4o~mI#Ef)xxbxi(-CjlI}+t`I-^7@xM^#@-P+O->hMSjL_9Oo-*}{$xwyDS z?cJyB52BdbmxgH0eO0%SpRg@!)MA$um5U0_hZxGQOzyMs?krGOUOWPR&jY@7QN4JV zx(%9S^mp<*D#*~E?wpOpwiP1bYg)@(FV_Olr`+%b6s3lbh;G=hN(AVGJpYSesxh#To z;fcCLDN6BO(LQ|MXo8XXd2zLh*raYEuW@t7GC>2q-jZ{{%W@JD_Mk*5I!S&0vcem1 z;OPP5yLayzz$?>gB!8*|Wex)@ofjxyS~a>18gTf)zp)6s)J{|&1MCrP$|iFtUbY#D z#SvuH9^=fvrdoJFX*RcZ1EY+L%hz>!o~Trnpy1Mp+{yTi>B?uYs*K{|MA?rWU51o> zeSNtXTH)pq6BEOQ<{NBKfO?UIidhwx=SKcEuyDcsOdq<`ocYhZyj;_9%J}z-7cXuH zfst*~kss1aqYThG|8p#D-BZj=I5-(=+t#aFHJmptF4vt*o6e4$%O<;kU2`NW3yb%P zb&S6GmS$!?C{-(@!cWx~AaUvA_VKvr`VBjcMq+b>f~Z%;Zml5dBlFg*gJi7f=#Zx! zEu%71OcmPrI9MdQ#eyvb2u-2#j5ShB!mxZX1qzlo0%e8N zA>5m?Yrm>0HD!2sxR=`06cHXC{sOd)ju= zZr$pM*Dr5oW;Tvso;=e(2HxJf;`e>?gZpbRtNPY6rXz<3SB?dgTYruGes!yAS*Bv# zC}Q~Qt+p%ICF;d21oTC>Fcp7M-J;y3Eo!gNHhqh>o8M%!pPh35sXq=bUuSo`tm#`) z)3<7=+Hm_fZ%W$`)chSp=joc@0(ubA{J@|o#`|Ld z>z2P(YQ;zGxz|09Vg%xaG@vO77R!WWhyk+YN9d;PcS{i9sJn#9Fp z*if!5DY$y|sz3NjMXc>V?yNnxT!m#;e5<8RreA{HSJC>1(=ZGpx%!6pQ!QKWUnVXN_PJ;GJH*}m#Dj@$nhmznwd z3aS@LXN(F`yW{;z4!0Zpf>H=f#DU-BT?`{h z1*cv}cdogy#7u9lS@in16RjW%gxmO1%`&&jz74|NDl_@er>3tu@OWNJjjxA$O5vyKQiOw?9^MF(Bsu+e!8c5TdYy*U|m6C zcWhxyLsN8wmbyJxRpF0yg}ykN@6vs|8bnkCksya;L>HYK6>+) z@;Vt(Jr5O(`i%lLI|KoP@Kswe>HMmWGXqP{s^dX488NA;{Gh=wS-Nzwc0tn|fD$vO z+sKp~f5CVxzk1FQk7&G|O@NVTS8@7t@aKsrIzSX*(V0-V4;%U6Zvwdk_(iPKqOqma9WyG7!Aas27ZR~b-$E%Sf9n5EClydTVhll+E z@u}y~i^c{HdTEvTog3=Ya-0=^<|ZL=n{H}LQ$gx0AsLT5T4w?!&Xi|WCraonJ6$>H znt{x3@7ci9^+>wo)rJ#{PhU-Tr4drQ7ig2{KejUD?9R9dBt&m#z05n{Icf5Yb7#{Q zaSo=trtlxi$usuN^N@sGh#Qf(n zHNWap3G#BBx(-i%m|B}E!fpNX$6Xv0ACO2jrc z->uA!Ruk#|o(ik0UO)ZYx;f2rM9*HQC`h^B^W?W6I=EJjOS;QR<@&RaHLXzdsISFy z-H7h(+w*Hj5Sk?6mo9e87>p)_053=FK0U!K7+%Q{snZS7`acE3sI&QxRZ2+_pcQru z0hKP_1p%~vc>3m?^5bDwB+-9;K62&UU}6?du+M0H){5E$iF-c-4keQ4x3*xCo%#!F z9dWF5-m9|bz5;D>gh<=m$2C>bXT~SxK*@UHipH6)RG}(k4fW6Y&h`ZH1!L3z4%!3; z{EQZavE?^ET!|9QITb2yjgmV(@E|>^QPKPi|Eo`t9HZHlr$)w%$6uONgyk#_Cayhx z{P>HDZ*IPrvaql?C?q8GH3xYB4`24TK!U2WLG2y3^Vm#mN<_+;xR2r4-xZlCbphs+ zA0;jEda2a6#!L_Fiy1VEKuqW)VLleEAnf{@v%nkT(GD)}<_s>El_`RJAqF~~j}NYk zfxK#^m#DOJ1c9sMO!UZM>$R>2;(;X!MIp4b;`;DskZINUQR=JJTj%z%i0Rj>BoFkB z^kc_DwCU+d5)ue47z!^|oN#w9oI7i@*HQV-owe2x%3&qdHD?|Ad-Q_(4&J7XT9*7T z;aAjxpiN+%b3&p+fYs~4Ywp7ubBd1{`^1K4+1ZC*nKx|){nQHsMsz@z{zP3NhnGcR zL_~7bOSuSpcML8C^(rq5wR&D15fyG@vF?=WHTaUmq$J*)oSc#j(hc1}4;$`bWEI-O zX_UkW^!fOY>lqirN;xV^Z*Ai9rc&b^e!P(#o$f-+u$-b^EMoUMg#2=eM=(k2(%dHXL8J9V_wLP5Eto%!d-jt|WEQk;=HiQz`DJ=IW z8+;EBJfD1O=aKl}-wy6i7b=gAlFN#HoMRFajI=MPs-E1Z+j;??zq~y%pm*4RD_LQC z^6c!_RntQk;+y^Bt+|cecMLqEoXoBYF*AMqv~T-zQ*=cwsvaFU+}T@wOq~9Pj;Mzf zU)}-U1GE`K)q5H&lb*45jVRW;){wjjzgeo{f{YP6O)dGZOnH!1h@pA=!e zS1!Bn&(JEHd>8zFRfdw& zC#HfQw>441+IQjAzV?|4`-j=bC{eq~=R#;*1OHn7AgNS0G%&(?#23YYlA73)r76rv zRzT1zrQLMo7Zk#YfkX6723m?>LyNfEMw?$Hev0nrs^V$-m&?8-%ZwD+$lmht@gaUr zPQI)!HxREAJfO@H;O1iFLLb`CL{-6&g@>eS-7;e~t!%xZDniA0TJ zStcENT3Xr{h<>daZSC#lrz$4{2me^%5AxjNQVR<^BodAducOpg(EA7d>v{?n^atKl zHr5?;r0PkYY@cpk77ej!QdZ2q^T3RJ9Zb7DbdMW9SFlH4a7{W7B7lKK2! zsXCLxC~lpVI1*kS1gsyR(Y6mlj~Yot7`-b zl4E15Hc~QhYJb@E*zbKeCnKNld&9M<*{H!#>KH+cGzfqH=MD_pU zQ^|0VnkJaGRx=Fe8bYDxeD|^z9?_Aml$}e+W&-T2%j%%sdX1r|20|PiQ zMfDrKq!Fxt_m4pi<-b>6{^#-fuf3oz%m1k;tztn&RXXE+4_8hum;bi?c^qu}@&Y{H zU*<)>yL!|BURRF)+w-sTI75SfII>if_CuI79lBJMW(lHf_#ycBMd<^Ife2V1p8k!DYS<&fh50|8pA>1GvK}s^VkV}0^S+NgNbz*41>{Ph zc<)>~K17|;f4E)y{f8tc=i_8;ojN8Gl(+TvIUYO`Am)@_wXv93fZ29c%hxvRlbAm0 zYI$$h&{w~k59P~di>b_LBb?{Ypiaueiq zDF5#a{rn&sr5vZoxte{&XI49Bej)qpc#L=i@Ls@g0^GF#qegwTk z{8wNCv=4r~B@mDh&2CppE>g-%*lrjS2_xkojcXu6JbC;Ndy*(9OIu)&8Gv%QI%%7j z_$$RV5MUv|>o=e0UcGv?m4)TewKj<bL>319@Q0nXT z35Y6WbOfUtrCQFfi6TNmPg9eQ)l)mvxe_Oxqf^B*J|fjW5$p487XhG`_$>U3Kl$nC zUckl^R1oCAT?(lj+qU_WHq{X@;ZmixgYQ<7` z2V5ARv#iBk5o$bBv9Ge->&sjdB!?9&NYY5zOi~)G`rgemqXF)0mX75YI)zgmPvv#* zxAX!#TiJ0)8B`!L;sjdfH@NNBWh4Jm3TU&{C*ejJjIp*;U+k;lJ-2Gxg3b^S+E9R* zw>~MPAjPS7ATfxVBDtn&CnO1(MeyY^0Mrby+V#Vt3H6&Ym75QEtH8NNQMrK2q%VsB z=!0+_X!s0hGs{d8%6Fop?<_?Bhc1G`Zvv#mkofALQcr}8%n{frjN+z`@WIz{ty-B) z-$3H6f)p`?&A}8MP7s-Vz|a%nLQg zjC4qazJv+%_q8W!6^+HFxTAl0B{}_OQ^`r)pMn4Wo0mRM@D5K;QjQ1!liua6FciT7 zQ-zBQqfG9jOx=8``bs?}lUq`+0P;g6pd?6H@_d6BYnjb(8Nt=Sytp`z%7;^SIoCbh z^dGOvt*K;qeI5kE{v7jPR&8l^c6Kh!uQU|6S7wM3E<`1SIt0x2!E?E1OMg&m{+(2M zp%X)OpUJHU4C62gOSz6|}a;fSSoPxxDK&S0RU zlfuio1X;`rl=zm`Uir>(+YD(vjqq`MTk~H7r(>jMW9}i=3qXa!*BnyXZ_sQV>7Iz3 zYweLk$wbq?hUEiED}N?x43OZ}hV{{ZXk&r)P6#jW*hNx%3|A@J7DEuRHdru_ze* zU8bw*!{>VFxfja~}+h^zIxDZZZPbf*5^Xr)QBf+Mp zClWkakec#_ZBv)FR}_Z!s!6;a?;H6tzT70_4lsXlFp~r)8>3}`SVku&TfDU@;rY-r zHVWqvaR6^q#ZdZa#+fd=j3OH{%y-O7Bs3)Pet7(m`;9twO-4w1)#4H<-T-3xH%IBS zfUO()qBPVJ(!5gF)cANF$@#CFB7gSSAs3ah`t!r=LS)i>1*?i?hB2-ju;7qPQR3zdpX^&QOJV*ElJvzQXErAyNty<4tInz zK(ueF0@@4*K*byoE0~2Q(d?F7VE5w_gihjYA18$p1#8>?F=6h15)Ay`i4Ok<(a8V5 kf&c#o{`YU-=HjN$7uH>0oNcNGADf~eds-&t*u|Uw3pPyB=l}o! literal 0 HcmV?d00001 diff --git a/examples_skyline/accuracy_vs_fnr-skyline_formula.png b/examples_skyline/accuracy_vs_fnr-skyline_formula.png new file mode 100644 index 0000000000000000000000000000000000000000..3738ce64a7a6acf22a568d500d7250fbe815d2d7 GIT binary patch literal 43356 zcmeFZbyQVr_cpu`0SmBDX@d|@Q0YcdbkkiDD$+>jCQU3rHzi0Z-AXqo3Ift4NQ$&H zTN=K3bIy65^FGfnzVDCk9q(_9_l$84N8D@eweFbnx~_T6`-!sRRmweddoT>6#9h0r zieWpNFpMl_7diaKrNyxoe(ZF(gj3%I|9I@W{TTkd+xD7{1BUHAi2fyM>GfKNUkW*1 z(RNg`F>!RcX>W|(x#?(YW#edNe(SiivAu)2jWsX#1@7}_j+;3;+KTY-{PXX*ZR|~X zV&_G=G3+>oyL?gIHR4yVyS9dL-_~5C%>FC;FDnccEIg3i!SpUiUZMVxiriJPm&OWn z%ksX-X$C(kcI@DTA2&!y=-|gYAB+-yXiNPcahWn{$l2=&O-;%xtE(2@^X~BH zMdsx2J$&#$+pF(6Ew5v%f*_8Nm7P62Ihoqh(lWu_INtO|x@aWa^5^Ji;@V7kc~zdn zi3_~E#@Su!s;WOaM6fo)sA+BtqkBuiVtxo)?z$fuo9UYyCzEbz# zj}Fov;#ivv3~F#Vzh#Js^RLsAl9Em{efo7FXB{MdkG!@?xt?#`P`$Y^<1I({J=33g z-l!_+&i6cv-lE*;qDgLChVQ;>rrysb%$km~va*h+q-GET0s_jmHr?x^CHo8gs2D}Y z`~>@KlWjgfrFr_~i3;4g{k_rE^y~SGGi7TT_@(Ko3p4Sd1|p+T13cyHs#D+dU4$Rp z?8-8J`t&J*B|b4RRViA+qAM%DJJ&K%a${U6IAgLcS*@Y+#M{_dbSoU8*ShmpS2hg| z4Xd}ew;!{l!NZ3S30}hch6+cc)Yg7aIZdYLN9FeMTXrP5FHO%cEU4MqzB6fz(jWY7 z=r&A)Uih`Z9o1szDLt=*c)3tc5s}rjQT0WG^_52N4h=1RRu~q2mtH3k`=5^;Hjf&5 zyxH7X<2rs^s=2vYR!%N;k@zBxLoH!!#(S$c%6rodP5#CzQN(E?Xu5RCfQ*cc=jk5vr_Y`#sHnWuDaGe@6}x0|aB=aPo0})NEsU5iJu>T zVPWCOSaWQGWoO1nruRnb`Knib^n&4R3gNGOnebl@A2||!@7}%aj?OYq_l5DqXt?dl zTs3oKW@VKhy#h=(T)-aX`f>4BSL^z0B{e*Hk<+9}N1D!%UNtOGGguCHp4)j!a^iVJ z9LH{yt&GsCDJ$O~r(!T;@>;$L&n7G^%y#ZvQ_0rmT0%}vo5%8?FZy#~eQl3@-B~sc zj+(;tQOT_?J-g2|Be1DQe3`w@!R+w{i2v4(&#WE_khCr-Ev<*|mAA8dA13CO_5SNj#G5cq8fNCqn3&^kuC8Y5%X1Sw zh1o5wt;GED4FziP#TU1Xj38RTo?w6I>&qoBu1`V5(6pDQB<9Fj-6WlClcLp$R32A% zwFG&U+qdJy#Knh)hvWMc&5%vIqrT(>`Qhu-V>qnIIY}C}$ zWE2#sFy`~We|Id;4dvPnUc$9x=obftg(V+2B@^NB{SdpNx~%L|77iXBCCiSq=1)%! zvEmF247y>t_%jHfG&D5ScN-4Ya1(ZF6kj?G+u-iqyL7_#ad|fV(;iDU3J_s8IkdTs_Ezb_lFD|Nkc$82vOQe~% zCe+l`$sJ;oS2i(8@Mjb?Nv6V$u8WiUJcab6J`w5Cry?)^e8-L*hxhM~i4=92`10~p zce!`@Lem4o@9^_uW##a=PVPdWw6wH-NN0wADpzKSDh$M)UAvkf^hWp|RM)((&ctx^ ztS*bu^CM@pW9){iYQBG8i})VJXHE#=)=yDOR5TxNiJxds4TamW2Ky3G$j~o#*08cl z)66u`kB`puT>ITHY#{-w5v>Rmp3;z8|GeH7vWMh(_3l+*@*$(>(Zp;`8UHY9i6?g93IhNfh(fZ*&#o5v$u;y&b$_R&W53zqz9?~GR>N{qf#j4kjMhmRg*H+t@H zou>UbPRqb8K3w2Vio$CF^X9i#!o}R?-MsszdyBcm#PncMx4*seOz(Fa#5o-f4vtlb zV2cnLEPs7_FXA@;)2-9|bEfz2tIVG2kY#Hd8#^a6%eiq+{ruXW(r_(8d>*T!w`nV| z8;V%Co@)``o~ktoE3kc}{&nQghYuenIy0luypioaqSrKOD*rj3OgZ{MDb6mgP;JRY2wm(bXffQJKkB(Up7=B%z{Q_OuRz(CnmNSR*B3SbOEB{-PX3r>-vk3{=v@6AU#zefo5opI_ti=g+I1<(u&+ zxqfsTUHfZOqD)@|p}IZOP%_uDQ&H6UR};$Q$?M<7;^J5o!ufDsUS36WRrEv%A}P2) zHb_^&zo&as-@Q8rc+G6?$7ezugbZ_txbQxzGkv9v?($iXt_=V)K=M(90M!CZh|i+! z^ym<5z4rB$1*d+`rJDYJL)Zh3U9rM$^S7sZ3R_iAzZSB4E3xwPkiPp=u0?BtpMytl zk<-)X&x0~EGdCfhbMo`&ejEz2Y`I6a_jdmGc1WqEo2!#BVy5-90#>j3@RMi#7=&Bg zG=iFAuX1s5$;Dm`2nh&iywZ7qjxL3UOLHvDG)e^!Ck_1ZsDbN;d1DG@_h09UX! zR8%IKV}0?&iTG+J_g`-(MW>Pzqgq`0Jrgz4b;lu3W-fl4S=fjok-B2_s4*Gilakz8 z&qh#WW37Kn6V_a>zW;TnU*A-0O6DFtqGFtwR>fj2-T;tEK915cq#$}vSA(qEPhh2H zl>XLr3EhEFlIPEpHw{2mqczI$Sg{YC`|{;lXr-8<`;ebt(2Ey0%;~9orMDI&w;VcL zA zhZ_8k;4lC8A^&Ic&Ohd@?K*(GW}^V{1jsHNCr^It9magFZ;S|*H}`LCy10a@uy%EI z%@c_lz#NL@NVp*l6Z>FeE!GS67Zg9I%eNbnp=1)%1`v?-eQGqa{AMM&(^R(xFbp~| z*LRSxEy}K8*4fTey1f8;kLLQ*@|uhRjY&y(%4wSaJ=bdkKWeY7{R&7{JAg8}egS31 zT}@_9va+&0`j>*$)m@$)IqUfSPX74AqXt4V3jqwmZ-EVsp?CyrBuT<^6+v8n6;2;r zRl_yfru_$ERBWui=P8upiHIvz1u$nqdcdjb>V{vsbSYxtmveEH`?N4X=;SNUj&O2t zH2)hvb7)}ZGkXD%?>up)->|H#4B=0ICh?R94|e8Sb*a98|GxI?SI35}^@TJpWKp0>QZS$X)_u_XA~ zaR~R#KqVHx@;0Jeg(lU}aZ%V()ONr}#*Z$(qC(1Pq77$Lwvw>$9%ji5a1EWLSIPSN zdRwX%k0-HRix3C+j!RVZ`l+k_C=sE2Y;0muYmzOz6=+-L6aYyMrGK~t)PrR2jY-3! zL=n;u+88DwptigB?&akvT_kXDa$Z1Oer|D*4~N5<3|0BD2?>1@78DcfxqS8N>tDaD z5S6mGx0k@=Q`rK#u;U)R51~Bo1Omgm&%=2@t2@1Sb$FgC?w*oj_Xp*K5 zgM|f8syuEv*L;;ZY<70mq0C^Z#~~n0#3`jt8_1Iz&{*#ED-;9H<Mx(t2Acynb>CQP1K+HCyy0x{nWNo@QAt~ug$?V6yRo3~h z0ltkk65$DjJ9BONB~H}<=rwFADg|x{0+J1EwXc8yztU*w*}doCzp>jrMuo>??(C^-&$1_0$^c`AWUS9VK{xd&6&uvkPS@e&4IiD!~mD6eHtsfvZ{}}cHo05`} z`07|}cbR7~gsL1o(EXO`K=Wv_U{oIXhVfG@9n<(L~T3%;taAG?adqx^qG*)<6h0+`aqv zfr3XE0jUaZpr)jB@`?sT4ggDwgoLC)5_WbN6_eOlMC)IiNYYYRQnFvCI_x_oHMN$t zbq0%hOB}8ND>Z9A+3PywH$OMmJXqU&m7;idw26)Qi>GgUZlr)%?u62!rt&da5eoT zym7%|i(K&Q*IYof&WMT@x2%*?b^k~7#-8!w#fzr;^(P_303+Il)8n&)m1urcRaN=S zTUcmSlLNNHeSu_?YN>o z*;%U0*&pbki^z?={o%uh9s8#Az7&zToPt*x1GWar2(gwcGpV3JTP~Da1CL)N6JLVd z){>%;y7+;DIVmSc9h5~7{L~Z`{f9+BR#=?M@3#PMiPFxG&(DHizI+3+$TdPqNy%Z* zY$%vLlqENpW09DW>@icv=+g5BHnWoC`s{rc4naXJW{=qqWH!ZVY4UJm*q}K``X~m z`AxpQ+Kab_snAT*eq$QtCIjqJ1O8~>I{0v5lT_s;a8*m1~JYY$ck=}V=~M;3l2r2D6ds>vYk84e*fwU>7*=pUBAA{YlmUVJo*mx z{6#QsP7C#d#l?r~U!9i*rrQ4T;oe^TJ<6xPky2t(;##onXUew>LiL@)0H)EA?QkT6 z@k@OCco)j)*(UWOlXI}4DQPcU269KoXQmwNA5Vt4#CG!ULyHSiPA2T%iHX$Aj1H&h zGPgnG}Cg`aKg`^lW7VSjctj zX4pAtNOXX=vrePK(%y8ENVUJGSltH-GnyYzN@D z-{2PIy{CcC{kK)GQ>Q+DZ7ScXp1C^~rPQ1Gi)r*TgoKYr&}Z`f7>6&y-BzAwGT16pu< z1S?!ZmCuej`N?@M7q@?FBQC|gSu6@yb=jI6VuQ5WooyQMd+K{4BbO6 zLVuLlZh&Bx`rW#?o-uxhRB-bo7Bc}4G`qOi)>G)f@3Cy_G})m5_(=`0MuJjwMlhF_ zEa#S?(dEkz&~(8*0Hvy?UW3$!e|uoLjkM&JwX>&_Cz+%9Wr%~~NeE z-^(-R5e*~A?HB8c7A?`M=EYA;W#f88awcu{$xYl`4|bw$f}=tn2)jO0(BF4u?gj;2*Z zymgw&?SyzPEB$DninDWG+`Kp6bm1ryA|gNxn%rkfwZ>N|DCvHdI5va41=clo2EeFZer;UT~S#)Aw2#M$wH3 z(NDBG8b85t7;%=5#R8f|o1H4PKE6Aqg_BPr>$_-qB~D)63M`(at}<$?Q9 zt7!(@GJ0VPbk;Mzp=2pkAAbn@g$$E9AER46y$L2g9kGS{s664VhQ4DjQu;)_kv zMMM6giy&a7LZ$NKf&7BoqihznI?Pq{LhpfskTs`{2yE@Uh z#+)?9ofbwLJ0U)v0bL!hFaDXyEanjl>&85FtvMOl@X*mzlx9{&UL!=>k25R z1QtXH5gJ^0Pj6GA!oQ9&>V-fhv57KY{q!n-HrN?5%}w~U9izX4TP zEltf3c-Zth?qYc;G=$WfKt;LNvBAk3q+Po6w;t%`n4N$}@}uRI0c6zzxZbIfQV^Aa zfReXDU7`^`Js$40wbpM5)dYZve4YyL~VtuGn2gf-uNxF6eTuFw9 zvIN1F5EsaK5*(V z+#lXf7qNO4bI9uW!G}lN;JQ&sYeHL&xf)VKAUfnj>75^D)0zcSKTz6$&wPEs_$fIx zlYC}s<35Wkq1^gC`a2rl0g|d~toicg9iR8$C8Lj(e($?#%enQ;+yeb-pB!T6WN56b z0}76+A+5zOHrWu*bdwX`E&)O8%gvfveF_OoXUYXqtCmVRtOsTW993J)g9pzf_cE&9 zy7d-{%j(+NN!P6lufP!4yWlXdhXz{WV@^a0C@)3k1DA3M#V~$IV&1m<{!7mY@e8O5 zG=Jn6#2JCEGz`Ce`_`gY6H0#EU@-C|Hfx#jjh~(tJhx-k45X!Gyy*ioOX&^e2 zr>3gf7JHQ%WehyjvLlBex~aj`p*7QMSIvlWDNFE{v=C$sTniK|-=fkPq_RKjpr|AP zY3}Oz^XCD?2iC0vbG8_3dIQ)F%8{jd(|KeS^$b)s)}dlzwdEoJKqj0JnKN*o;)gg= zg(9Yqvrte0rwZ9;GD2M1kdCZg@M)avn2M+i!#5BWK<0{vvbVldyGD13TXsglk3GGf zOT7qcA{LASGY1FGrj!_BT}srfj}+DF)N}A-0#4X0!3t>FL@ zNt~1-KY6p?|R zzd~hE%Zx%90v|{#>H5WaElyht(OYdX7w@CWHEvj8t5r!dn1zx-NEu4Xgt+BpS5E-V zrs9$?S-W@dMnx_sD7i!}jDD%BQvgQQ1A-L%C54a=Teq1kRu5<@a>jC zG57bJmE4fVFr?8QX%$V)5&L_7kdhbHK>kRIi@RKP4Ae{ufbZOs!eHgd?QoJn(G(RM z5t-sOZD@lBw{Tj+QgF_!;^|p)oNqhda1S1XS%0wNt3#K`S=^feyF-?snW2{qT-u=_ z;PK-oSgVm+NuX!}u+fL|xH2ex5^KMBafBhU>T@tbTG1Z!PXJB7g&~TMi!&RjxQlAC z?9m=?qrFyf!2WcB1?n`O<>G1umkz({>RO`XM%gwC0Xh91Z*hWb4uRp^?VuOz^d+!47E%v{n56?&f_YS$o4| zgBR%x657E)G|^j}Z(T4*4pxgvB!Phy2Nt{*!rJ}){l@xG2`w5=c#ZZd@mH4`(j%eJ z>IgB&5?oRZhb+DB06)VI2Wz454X->YxwWFiUxLTDyT67J0xUBF1C&-=`7N%MTL1j= z5>gsJtVrG@8n8;(x$G7~4w!i3%0K^KKYmw~+rboE>*u;8{y-r`ByoB#v zX^RI&knL|2bC)it>C~GJ#bar5pclL$PIf}IRT;KsubWZ?iE<+=OI!Eu`%b${Gv;8& zBsnK$n(o0UY2lAQj{W8QyYW(Z=)oaA`I1JnU(2FRLbDQDlq7WTVgTWQiQqJ4;$7aV zF@SYFpD@28r5ScHof?b)`Hwt!@ZdB%`&B3-JG*dDs@}YLGt&GgsDBQ9UHi*w zGZsaGmVl#UFRT#9F5^%jQRG}eFT~w{JG#(H#R9EyTP$An`@e<9|4n{Nx@3R^P{+Ci zJG3QDC%XYJ22$dE3TjQjMhFbSHY#`{izT^(KeTCAa7f4mq~$hn3L+&gDCjWMkqf@M z0e3*i5fmwfl0F>Qm2BVqB6jhmkiw60+=A1{HqwKrWbsD-(@9Y z_u~P}IQS-zNN_b7Qyz6KDh7+{R9$^N(zYCgA%Y-ntu6wU!60F_0(wdT%!%B8x%zBa z8x9;CQV)^XXn?5n*pd_?`y8(EI?!iKBWv& z7eGh75wVTMRyslJ2te2gZEflsgVf%(YPoPx(w&`Sym4SX1y(uH^yWTt5rK4Q1|C*! z2XHt*Rp$f_WfZXQ^r&#c6+J%J-J(x`q6<2B!!TrZ&z0d&ZU+?QkhLzeY&AtON-P;7 zUZ-;ydaOe`9BJYpebokWAe&0AaorKb4Mkn1gM57_sW$$7S*Sw+BYG(gCXxheqksq8 zNkL(`wYkwS4B`wVkK(Ukf;LfrCfRYwdjt^0>F1XN`t~)f+zWq`w7`dR-y+Xdn-CIG z<*r&i8--yLr2cm$q`^i8YGo`KS(1~IuE9n@)y}#Iu*2N>_UeV~$K0(tF8aM2j3`NmN$AY>ef*f_ zM~eB#9*DV%A89(_n`iZ&>ukOB^}KIRV@+fc`-B`4ehv+ZARb5s9(UMT#4m58sXP}H z-YQ1c5^g(AsE3rTu+6QX5IJ#CRx#vbB`YLROZyA#`*&crLE@TS-h3ALaIZYTFRez++U-x2P9Eg*~ zFcz52SboXK1a=&doP}J)t)YgWL)+KjI*s<+R)FtdAR&#(@fhn}f1UVPAJYz_9f_ zT7%fY$v0Pb+=U@=0H+cxsDJMmn819cv%a%Z2h3;shnKiJr(5sCd>;7*?jjL%*oO53 z<$~DPT28}|xCavwp_!V3-O0-I11zEM;q#?*dZzk$2bM*Y& zYUz2{Qy#UCbfE*H=gj{y|Mrxg-P)=DxRimEvW;Z-?%Y>5EuD-o_c!xl5M{_Jw+gL3#DsnrQ3xJ{80||XoMZe<-`t$3 zJL_|XwC|WgQZ)Fi%x=J^ft4lQ`!OGAmgR5CVW+X;xAP!=_B|W6B;@$zjZGO&T}Q{w zM#PCzuHQqg?{#->tl%53o3L3bSngc2e>B><=jq;{oV)b9G27^wawYVe+S)yf8%v99 zZIZAw@7BQNQiWe@rcvqVnZ>*cFuWmjZUO2FPm`co$e@ zar8*))tm|xzPsP&dah+?46JiEcVD@zn(v}|2M*M6^f+8b{!B=1Xds~=d5^_TSOS>TMu>jRlgF@rk3K=& zDHHkcWEC%`Suqq`Lme#2<@FI9D+N(8r(xdhV#raC3taNzI4ir{;2@!1bCX{t8<55sIF*>0x&AD4o&R@Z?Kwg%VwLsUEjHg}Dx5J|aG_>z*k_+k^(G zdo4<9YrBybyTHlZUhem^xZ*=Tao%R; zsS=iETapz>_L?u*H6m~?eGahNUU0VP7CWZ{MgghvE!5OPL3aGw4CFW0bIln^Ajn4h zI)#i|ujLU7&EaCNls{AjkCaCf(s1YZRE#DFT|Sqgre<~;Q~p5f7aY0q>XmkW!_&*@ z*OZjB5)~t1lTh$OO%UXqCU7)!Mn=Pz0e0)S^-!w)Hgo23Yj$nb-4yV{5w(k*cRzi6 zEhOKifaNtL2ATse)10R`g-(l}c(d+0Iy*ZbNKsi?7rNEImFgOCXcv7>Pmdm$xJ5#x zFI@_H{rdV~56Ib2UQy~aaKCdWm_l6#wsv%sh5(g183gKyLfA{3R9|Uu*OG*eX$eqW zlT}b?2SvC&j!@|dGG}l^#2FmG_~;-Q{%7rjm!MlpFnnn^{&A?|j~fa16p!(r4SpJa zjY7A(ZC=$q5>H9j>fQq0v42O;8MfA7G64}PSGE(%d`X|S3|7wp#kfySnbM(aHwRr= zbd762Y?FRAT>YuDT&LkH8_pj*+TTYwNM4wk!ONGj6i4!7KNV*709>!R(4>Oi3+!j; zk-^r|5S|adX+V}r2!+w7>`Xb%QNt=G$7wjy5|JbW``_#VI1TurSVPGyp$9hLR`8G` ze-+g3EQf1@8{7bUg`c~54}d0pD%qK&Uk4-f!Vk4S2HF|(@=u3O-M2s`iSaBkuukiy z?MfoQd2`rBJ>85Ui496pbez7@7OgTXc}F<2-A=b2$#Cx$`Z-{*1Eb5T2} zd$3WOig+@30*c;y* z7)zH>(zc8TJIyye-14ZD%RJQCUSFU|xT?!;Ikvu&io)u~mBCyj$)* z6oObl6+0pQsNnqtEXiO|6id>l{Jq~FBt19R_4y%w^OnnClCVzOg;|hqP+=BY_bHn| z@Y5zb3J8-HUb7xpHWP?~(7YE)hB?(gs zNV$MX+|Xn0A!u!^II!Yb1F}Ys>Zbvr3dP`<0x29lVOLX<3Vd3w1srRc(8L1umL_2V z&s9f3&7BzkeJ|gt(`&AI@pdsqeBX>>AX=rsqL>87Wbkmtb%8=6C}6TKYi_;(2a>aIz@)FQ*$1K>$G-(0;%xXQ)_ctN1+@HBJA&zCg}ThHk_ zVXT;tlN1D6?Svv`ue^1K!{Z!zHLs_B07}ROGtygKl z{SO>CfGikb1#{{cLW}Iyty`!_&aGbr-w~rbK|Ybv+NoPF62*FIsHB5a@QiKwvndx0 zo20w#6A}lHVpc<7y-=l%HvlupbjgAuXv>=5U<99@3Y6#SA|UCQfk=SdTGM4~)=*MR z5Rs#ph;Y!iq?y)g_oCp_{s-n@TsLKIM|)SW&s7@Pc)+^{;xLk1VG5#_-2Owy$d&j7 zs6z1B{rEI-hB3*ZpaaS+=)TvsDVVV_t}4h|L7wGutS`L9wr{(!Qab4SNUk6r;zgjHv_0-()aBDS(2UI=4nf^OA@|NH z=qEu{6sXj4dWZ=N>(tcL5aN)k3}v16ygPcxJc}nrtuMz0z2>5!fgT>&4 zU8~DO0gWpq0|Pgv`pf$ZBLV|s2L=ZAj3y)`OhBzB)lvV+lP9wB@ygc72s^Q@g_wzQ#;3b|1b}zY-$2b{`@ai*jTf}kkWVw{?86R?vGimGZ z{GL|bwP?9Px(JRoy^tm7=LP@4Qc2@85YRz5$pJSh2nk#;4B*lhu|&O6M5rw!gO{~k zlUF&+!Skfs2a?s3S|4*YCo@uhi^g0G7ta0>Nyb|!dLl*XzBu`+Jd__-S0^(YMH|5# zirSUI@YkMxT@ZE5ynOjGT*9*m#iNMO@YSuZ;+a`2=Y#kzNe&cf^;MY1uo~w zT*%Il>YdDAL&2!-_t!Aq$vDLdMyL6sxXrY#+xqpc6nr=V`n$Bmlp69eC^w0e|6W?^ ze6afvA2ildYwpx56j^!3;@x4Dbqw;vP5v$p24qh|{tPJabXf@?Z%F*nhOM1qU%q;7 zPX6Q}IK4k~gnd@_xy!7x6Tf3YX*8Wkx65~$+~cxd<#TFn_fBi7e%3J0BEduXO~hg3 zxx*Sf79F$=QE=_>vSnRb$?u9Q5lc>~opZh79a$0{`A+C==dF_af~l!*DyKHunwmF@ zw=`D!XS6n-c+g%5@;+%eKH0jvxxmFv#rrQWz%2y8Mo*y z8J!%P8-Kgk`Js?K!F0nJndzrBs!XbQ^upNaW4`B@$0GlD!xRXgDWK{yrR`e27+Z2e zCiYCX;JLmJuiM+327h~5jA`%pc+welWBp^WdDhVb+}iJ?0OP2DG0*teBV<%a*DGvk zH6|g@jZ>bdGj1-sJ4e-*mhp%xe@RWc`tc!EuWOWo5#(kpZ%tU0X6w+KuxU8GZy)t)$#fJjF8ka)Gj~dYQ3FkTLQwSi05jK zb;+CzGUg(uC-dekg;FTlwfQE`&CK5ymbuP^)4Q}uR*AZ17tzi1^eIs?-U&FX(IKA6 zc|l0^h|XbJhUB!DI%yeCxlGRQtDJQ_Ks*5=E%?{$%_jk5p!!xo+aY@TNior#GcO_| z(?Pmc3T-}@k||q0+om9%=$viy{g^)vN<%~d!T6$mNBj8t&Ud&aR?TJ)Tb69Ravjnh zRS5q@J62mO8@W|{(bza1BBZ4oS<43&VnkTwrh)G?@usexHRT>r=bZCIFpP4%dykEogJ z*d0%*Ax4jbsj(mLhxZ)z2hGtOoLahN9)-xc2r43u(C5A7Nv%chs!XJDr=|5@IdtHF z|KaPI`5Uv}zOU}95?#*e=FYnD;-a{vJxLL`KZ=G!t}nug1)M;^2#dx28x55jBnA~G z3txPn`u26O!x~Gpdr2miRHHDc4aQTX9HS%%KoJ#}!^a|5v1K8+FCTZ74+VcACJw52 zy7v1^XaT)AA)RyM!wytdT^(2QPI2jTt8N`8k&3)oa$jlaR&=v!j@+P<4-ZH-~!x?*kB#n(i*?+QFD$VV0`?68Rk?SY1lGVJoQv} zKWlmaGKa;*pkKwb4{VL$RulR%kJNQ1x;1qt%qV1aXDa<`yg&`B#Eh5< zJyGn{Abg7UHLtqvV0UuPTXovkD+){_z{uK)AMbLQ!fregEsr}IkyO|k-+i|mlvK8_ z*GJZjO=L+(GVYL&_&Z|-y3^|frUQBoiE_E5xuD-D1PRtJWp~A;o#D8knx)MUprf<> zvE<}Xn?{E4WS^0Y@)`<0YUL4SuRvcn(3YOe=dE}NybcOw*U%7V+TDmv)JksJtvFv- zdYvU>!twXrnMJ&P?w;Fb6gNmQH_DhX!@=o?wRuqjJ9sB{n!?jPUGc&^$c}Qxz5#Lf zU;b1(wc?fbo-*d;&?RF&z7crRZ^ol*8a(|Pr8_PX=k*Upi(zy$ul;WBrS#=(^5HS^ zyMtl!AaRvF=g%X-&iP@K%NMxyDF)N!tp8+!7jA;uuLygZXI^`I*7)cXQpg zzM70Q>~-(J&@Q+Muidx^GbcvJFPOP9(6YbaKHNk^u=Cr9G?`TF$Ou2P9^DQXSPlgb z(M`f%cY4^G@9F_R)xZ%6b&Su#+F;>B$|GB_%J^JgRwijmDs-!-NTvJ8n09%W6v1)% zoxY)W58lS|I`uk9dfZa?cD;RDDB;fK%R(P{(brQ7hH~B3kk+Gu`%p3G zP1f$j<)i^6*@VH^R;AmF9yFVusZq5CJsUXX=`vsiBdQSw3=v9>eq=W0X?R z%k07BCzl&*{;j|+7I-zX;sE|^`!6$=^IG}I~m4yZg=yV7dch5t_5Lv0f4Mx22*=H%qKs#l% zZ@GQ198TV0fhOWEdCb@t9H>&*cdN3I2Tu0rxfPq-{ocqI;cBRALH{}GOhdwqTr`jo zOF-Xn!b>^}vXBrjC;N2XyvYH>4P`*9r+)RSbs;YaXDqrdKzErA_tE`t&7pj*308 zKI|Xj=cyEc(lBy`i*le4tu7ssGBPp>CJO(ybun6@}gY6m2G!vU^Gg<*m;)|f1q}ueC zLCvlepd<2GorWS2RO(y7Bi#-SG)kdVu4G3-=I_8J>Fkc82P% z54ne~Ny5-b{OI0YwmG1t)BWwE#kN?P|Ld?F*o{4=pxR>C-y-cF0oq5zIA2ah1v(3c znyr@{uYh=Zmiy#RWm9ch25exj5GiF$!@CPYSjACTEN!%!u*!)M~}9|z40?&BL65^~s4q4n1_5K{j5$jdMeew*p; zj*^vjp6#3aLSPYUh`?r9kH~@l=H8V!4{8#+S-S{fk^!y@7vkpaJM-q}i^8%@vT<&1 zsv5yVFK2slV9=PW!wz9(_c$z*xOzY^uCpPX-3?h>l(M~z>eUBYq=cVR>N!;=(Vu-H zAwt4FIJ z1v!wM#Oyxk8R;1IZrHr7`U2+$ZrL^lP5u^zIwB%qWZM zeLA%C*yh(#yv;9Sr6+nytnJI+ekBfZ0fzgZ+0_!c8o@@1UOo1pSOF&KbNIS`$o7qqq-y8vG>a6*2tUYJ@LUA}$ zLNiqZh_XdrNg?V{fCeQ_2w2bv%Dn*)2L!V1Lo=YiM79)W22P@IkS}@M%e}Ew-nF{Y zsCM|kpmyOi_v9T|$pJMX?iWEpaS(=}BTWiiN66|2f~?-^b2|Q7w~$}xT$OyAXt1A( zE1zxR`!=PDFH$$)rhOC?J@YN^ySP3rO!6ry!wNWp0Vr$>;-01ME~wp69KYBETs9rJ z2afQFnmGg9g%FYz!IEaR+;CP1i_&Xr; zH)NhY@(lU9pe;tw(OfD}UHL`^d{F5J_)7zxJwDe-g^gy<#_gFe`0rK%ay_V(fJ_t##&^uIIx zkz~OBf`DmK&PU?Or!7xajjcvb?rCENq%pe(;7#ON4Gp)VgP0%g!;^5HkOFm4<)hm> zg)aT^z+b;WkT9mX_79wNRAuKjPU3zmwE@Tq^O33RXiWf;@^qhn(3K-s$ggr*-VU)6 zFMPa%=qrq_RLLgh9C+0nvwdIe57cQ;{SRyz`CX*)**Ua-Txp8+WtBo@%gak0yvmHO z|JaU z!@^X3Cc97XL6Nv``=qy$r}b>04843w`2555YweF6mt3h~d@!uUzh36Xqa`#EQ0o0V zM)o*V$M|E;UDnQ^SGFto}|uEbRSIO0bI$1uB`C2 z-T2wQ{d&v$najPh6j8jwJRVaUO6r(u*8&ErsQzZPb0l>;B>su=rT8_E-K7U0fMLfs z-hI4AR3M~k9eep|20OYvmM^q#?zG;%>9w?TJMvM$!&u;w@7v#gkn4E`q(^L41$MqV zI6DBu@R>G5Jc}}fZU@x11wAU@6`BCE{G3#<6zQ#p2MV4*a`U17&wv#72J-xX+tDpa zwt^IWI~v91nEx7G2@bZBraOX>&h3H3&T-x};B?@q|= z#4KST1L4SINR&mc7fc`A<*{{`t-56IdHqo8yYf|7a9_O~EF({TDI4GqVc zGhgGCfLccwNmxPq#LM!;y%6wj=F5$bLj*JumF$0i7ANp} zbYjPigOXp9Okvsi%oyKZpLM&wy@O*84EoX%q4I~<=jSdn{e4^a& zFyZF4l@nm zk@K=`oRg%9vXaEM_dxmV$&Y%X9tXD{3V`k5{R7=c_7D8yqX61;bX5P)J+re4eD-q} z>;-RA#flYBUyIKOko|YUL+1@>SwuagU}T4Weuw4-)Yb_nas{FK>Zi-4aKIl^5}a0w zMOEDS#r>bbBE40lN(I-Aj9 zTyQKz>9bX1V`DhjP>v839i0i7Nx(5V=`%VT3HH8g(Y}bp^9+F{C1OgU1MTn)w38*u zM$29E{C!^X$0jXI|IqXLZUf*77j{VK^IE%uYk?ijic=CNs{3ugQBCMXCTPfSD+2j$ zZ31DC@3-tUgxLCgjaruN_LUy0{=D`_9MU@uf+g@(RD}ZhF89{wJ;)NeDC{`)Vqp@H zOEQR2l;A7HFtG<53`b}GTLuMv#ktnKB5-g67ayNGOv{2}7hG+2eCOD|;2P+*oQmik z%vfAp6iKFZ@-_P5Da7W-T-_vUr&mH<9cKsJ39h`c!vUol6l#b+p@|~Xx_C+e9q)j; zCEx&$_j13*;lQ#q%~U40pD)SNnq2g(&40sTWfnaJc4#K;?Jwj{n`5D(Z0+c9h-aTa z22U6~f8XGKuEUMFswm+Zrmut!g@E%V5RL>}ZaR@-bZeu0D-nPy{|%?ISiJdfXvjcr zZm{obK`*{sG&J|2b4k&W2I!m@@TK-jx37}B_&?cp_oIpn$dcm9tUaH#M;VVzmQWk% zx61fru%nZue%3%YF}QX+;FJi9U-{5<&nV%c4WQgzI%Jf$M<;w^-H0E$-|qC4=;+ZY zd-n*l5l&el?<=%A%u5B^9yN+|xskRY@!Uy)o}?&>0j>teGJyr1m<5LOvLq!9jbJA4 zbvvm4X1=Lgpq)Nb@8!h|=gc6}KbX}ibrX5)MqjN@8g79z{SBPAmhp(2nhg|QiYk$r zO6|@ZJNn|(bI@|-2`1a4xuzy2MON3(9ML%w@@y6PR+Yiikfv!_+P;fVKtn@gb(qJy zW~04yml31#(y>W8!5xblu23Z8S?Ph6>O>e+;3bNH16}}+=zg|}nl7*BS#2xZ!9hVu zB_(>$N|M`f;sLS+!$xwn{v)@hRUx7mEflEi3r=SiT))SH zA4-9+@St-v|4rRPcm&cvUgQ&P(ouBc<9VPzmFM;uf#tLcPAjpPDu)9Fj6hSNR0&rC z1L`^861PyJRaIBEsUOt8hoHP}JDv?mcZgkb<;l%skKRKEsnOS0=c7)yn-)08HwZ$j z#}L>e%q#ZXB>&rtIK)iWz@HR#(50u}^SaknHqX3IRpXbW0q1tn@F zgAtF|g1$5soMtNV?eOzPICO~!BK;(sqSFCyNSCCG71%#_sTc;861t0C#6VRMbvSo; zL7QkG1Tgv}PVmdaC#Zq()5D=gL+cJB&>=H4uLo7%C^&;i*RUKot{)wLiiF%wACV8P z6naqz9MIjLkTmbO4fRJt96XD`Vnn@(nwwi8FuPZWcEzaNC?Vwz;BAs(ENIiA1sel2 zd+DNG1qYf=+^xl8tk3uTKkdDFIMsXGHol}e)z)MRg`|WC$y`X9s7#5JIZ~N257iFI zP*R36Q!;KznN?US5;A0nSe1E}d3eu{eee6;&po{N{k*^9cO36~98Z7jVf9_#^&LLL zb)DCFo);Z3+2hS_(|ttPL|ldSM<10KRPFgFjf18RVRic{Z%0wHM`di0sG8~T0zYk< z4Y_sL^reD{tU`S?1ZFhK_`Rh{Q(1OZ%Cfu}%yXMa+S-7VVu z*+)urYW1wSI>6*efdC;MTte8T9R{CGXJB0i z6OGuD!;KnFe8;dO2gZS5b^&5Vr{45ZY@AXb9_>4Yn5%ZZs&q7bU5JwnA-vfl_R(qO zhRg~3EO9y@1yTHVL*Lbo5km8EoB9+9I7$PHdKDp0!3|fAY#fIUc@qx{1d02mBlp=H zB;MZeN!qx1bMnK>1r4W*;A9&IZ(}}amLLj6#o%heC1SGeH!?8T zCTD?9>isefddg332y#Bk&F8h#Xtj@zu~~k}`58!4fC{JVBl&V7XeLPr-iU|bKdM?OJ0LneQJ0*;>B3ltvh_X ztLzIc`uAtk_lzLzVJ2pCxF0{ZOIfeAF-}Oxf*>ardL=62az7uRm9a1qpC}P>JMRl` zR|$9^TR{tNSnSORTHqdY*8o_TwVFYvJM`HwfCYKVL8zPK54Sxw@EXM-{r5#piG{e#fsE-3 zj~Z@K(HaL>|;q+))!eKB9UKIMw*m2=AwHHysX|xuo zmH^iz9m=KP0ad`s85J8? zoL5A(DIGk>it%_5f1g6Tah{n9rFytr8U$Rz!~umHCcp^@2Z`Gm4Q5;-3`M)4q`E-7 z0z;s8viecZLPG#g#qc)5_07NySRBmRGnvGolB()Hb&7^5Pldap394-^D3Q z65T1-T3!L6iE5FS|D!nqjw(a)TAm;J|5p!RYYPh8F#Pl>9oUs$^+yi}yhWsUB zWF;DGqQw}B7^8}cio!o)ZD4`eoJs_KmSi&f;dXUeVlmRCFXJ zCNd<|9`;x&%_kyKtqr5!^URq6Ydzao7w9B3;D}aZNy*8gIxjC-euG@teX>m77aq;5 ztslQo^kDxuF*(TspAg#@u)JCrxHfVfUX8eiSWxb$@XGI;ao5`^pPMm&%uh112_8zg zpQN%nhMH}Uue{<$#Mn6_3HBtAKS_*7tZgVyT)PJMvv896=N1kQUg@aE8xcGuSK;0d z&zVyoHp1gQ7IHjPOgsIh`YVyw+c%7xp>9jKuyxa$v4o&-f@x~grLU`-43HoJZfuxC zvq}SZ0SJK4SakXf7S|V-z9f>o3G2B|wskx)pdnsdShj=V8qKdV)pyyQzi2m6WD;8q zDE_*)&2x&m4tz|@4l6OZTWg-*JHAAIHgPUH+0rFT&CRXA4Xz-V4QP&|Z7)WoeL!EU6;Fk_gq;=g@IC_W6y6O-w3m$@f5{ntU zt{U+Bj)jUS1=5O);6+Ma&sn!gW3mxcIDYIWJXNeakD5HsXI-Qp^zBanZcQtigpED1 zDasv{!7>6Pmnax+9%*SDp0P?6$HmmS16D&C{k}m{hN>auuA7>ecoMa1oATs7*XD-3 zUoVjRC$dZ!*+B9eToECYq(9rOb@=#kO?CAeV||DzbpcNkZ#`mUqAM691$t5>ad?6c z_R&_i=@g!)sGMP~dvQi=4l9Qy(iu47$H4XU6ej!AbbXkOLCG;nY*s*O!me2d7bY>c z83$q>fs%E-ko)8jvJn9WCQMm!5tw*H08G@58YlsRw2G=%5=5220_t$3c2lDlh^-w) zZ^fQwVyV5S7ezhIO}9(F_r8=OZtHz5`DaKw1tdJ9?SQqgg$rk{rTl#IVyNu^afE`Q zXbN=chXSERjV7eqK*A`7rPQHSn|6T|VT!@I!xa{zfpJ8D3NH$nnAG#&O%j)zQ>rrY zxGduCT?gMYFmM%3Bk|15;n0Us^N4gHz`~bE=KgrLy0^V)tH%?@s5UV;89pZNe#Esq z7Sfk4LVSmMq}izti+l>~d-=d@Ba;F~C)bXQxz0^?5H1I?B?z-ft%8B===Wg#vtu%t z&!m+@B9`B04Hw~c5sXpvl6Cq89G*a+s>d(YAO<00XbN^0y}fb}Jre^2R2B)MKH3GI ziAFpoJ>e~rNQsm+@V~?d1b>4Ss~QY|+H4zB(oTSMi9ml8WtD2;6ajTDoWyp5B0`WM zL~38AS~M(L3-XDPq|D^Edt_CCOC<3^!JDeCaii1(G*Z1*eX;o{Lny4IC`(K^@Z@8~ zFMi&l0*QDv%wV3wql%zD#2b(>*i@_|Hjp3zHY|D;G6b%I+Z~>8vU7j7a^d8M%ewtN zE(q(0@Pn{Ph&f8i*?b*@n!!9W@R22!{TtV=>6+h*?ZQwB6(zE9G2SVIgUZdTnQ|Bu)%20p8#Z32Qha6{TpnQoIg z4_pyJS@Mv+o)gT?%v}ON!Ou9}TKbUKJ&|~X1hLRQTcF@US_eHTJ0NpD*@CrTc5J~_ z9xGlQ9H@1lOc&IA&PAXsiC8@S{L0cL2x@AfK1Hw#z(WJCo^S*gO6b@aV~I~2tXQ2u zVt!8iD90KtIN(!Qemsf$+yW=Je!RI2f$1f4i2 zdF+Pl6y1G`MwehK(YrKvDN{#C=Pmcvt^45s`GAKwZ`A`avjwWWG-W*{b?L(4?y5|c z%p@rwj*plwj{#7k{&W;~nVX;wZVprmJSr&doKRF$R0#IbQP~r{+wx z=NC>n96EV25DWynudnDJjVzyskL z7*KJrXGsD|LzFTk+aQe@pv1rTZ6ZY&wKYIiwKxLi3vF$Qbgwp3B>_e?Q&X=EnjRaV z^|{NAXtcQLPQmvER-QGVj2#333V!R8(<*q|6tKcjuP=mtb%@%D6VZrDv7S2S1o862 z_HiCDR*srO1$Qn)4O9d~`w(HhGe~#HdBjURx-lb% z%~pG!CuSfBNDVF+P4M6elxo*!p%lD38rwWd2{iinTS(Y2-`P!Dzh*G^xX>GZxsR$9 z@6fe&Hi?QsyBUUwrT!!LQ6=~D1y+Ulw!b+7k*7(;qYF-Q@T$^;6%_>b1m-8=3#@-d zo~x6J%-so-cfk&pj15=~eoGvm1iOjYwB~;s!Sa3rh1O^G4@+3@SF}Rr`@i}DeVm8xjD)-5Qj-C+S z3TOK+2RFMTO&h6Gg1PnynECOPh6ZuYb}AFwJzMz7b-vpkXTfKg8YRSoBFtTlnp88+rR^jblaN;?!1Lis;VB zazvCkpfT6x43J$F0m{OM$UL$05Z@nypd-2^-eE~GF$bV5(2SII)jjJB@6v_U29MG> zU7NH}LFmdSAdo`MI0ixtv8y0{NszLH!weZarughE<&}1+cE>|tpf&e=eK}>-67N5d zY)q>YtJjUio>xlHh$ARaHQC|H4hO=)iQu=Ak!&z{Al~lqwJY*kt6w!uj0oX;W?q|k zA8RbBhY?g3t|P=rj68kPhY9OHc|3&cgUoh6$aV-NlkHlOgVq8^j>Qw<11uvqRTc0Y zxgp^=5z9EtUIG}PiOFzGy=MLBEBdu&h&EvHl<+gZ0ByubG>t!jn_E@g-7B_Jw63T7 zr&ll$D}~`%{S99(?d@SIoVfX5*XbA%u+|phjZ~yrobjju*@qx3wpA&96|KtfF@p%}*!``XgT}sgtG!T%0u|Y5 z9pm}bFUd$}!5peHrf}{)=K){ittx1WKzQay2lfKeAYL7DIvr~@ds+(%5z0IUi}Og} z@krSe;-P`ZK{QHQ`mWo^rsbG2osDlkH~GLyY-$j_OdtQVVs0qco0pi+B8qYyh^a&lp&N)Rc%rS%qiCSrA1*Z#w zFa*qi3K9J&CW{^1B`;tGlM#W<%obKnPa#;!!A7fh_T|L}5pZog3XuV85@^bkfrUg? z#72~8Cj$cmk!U_f?S^RAx*IZ{09Vo*BY`RLs0NZo3^K*s=Uh~(5sMH-ECxi`WE-!F zfX?$u7>NArj1Aa{O&DMh+o}~5EpIX+wzHW1f{g~-Th=S=Q?LJ-ZvH?usc%lj0HXs9E3>h}`aS%qgr-m(=HMV)vcm-PqzG$PywmpN!Zm2hF zm~7D;0*68Fhke~?2fTwwpSO;GB50ZBs!k2M^C;je!v~+p2~E zYsV4UNNgg&@;>*?QNnnsk`7WJJBe~(=lXDodG@V8+QG;*|p`I$~M8X{) z&Gl|*zXaBHm1;m<#6c6o!4EAOanB^?C&>+YC@4N!brk|>#5ErU97G;>nfV!tv@dz( zrGRAAc35h5K(&W8D*=VFBr=o{RuczR zf-(}5bJFrl6#a#e#HZA`aw2UbRyD|>q3%0Fpc2v?j@axjNm4#UB#a9}?dKutOG-*mQZ52Y(S+qHjQU z?fUNRL>qm)4hV+%*JRc4W80Hzcd9_%r;gMBxaKH;w$2w#m+tXkMIde8{>a!7U8*cB z-grj3)~&<`59QVx$oN(K_s_%LJcx&EEpQ+Tj6+WqGtdo~^3l2FtoXxYu(Yj)INf~K zom6VEzY+^x9u*|`A|sX*wMgb39^tx4RutXzSFSA2)ZdClvV+A3!nfd67jeH`GdaXy z4XGhRks4Uq&tXImJi8hFM2`35JVGFGgcuaprRk@^DILjcE>=`|_VdX}8&PL3-fR|C zpa@P-;Q`?7joCI&crIW|PXy4F>5hUY!WyX#ufs}8H;Dn!?`Vw~cHuytY2zQe$1s@S zlYfnP{sAr$&@a#>qY^Q!?8H}klHdPjGZl2QM427FM`A{Aey0N%9Wy81-u8 z(0veD1Ol(SJmE6Vn(<5#@cd)(wez);g$ki2xcumjEw3^Kw!Er3>qSt4wXzfPc=IH$ z2V8W@VjdxI=VkZlhs2v3(||DOh)puj1?=!)h%c~5hBLLPHvwb>17buUZCDIeL4aH>Jg@qwmmhk?bp82FibW~9#LA#stEW`MsJbeAfSndZpRQ`W;5me z*OhUgftjExfR72T3P;oP$!GH$`|7b;J}Y=HlJc1L1rFzH)vFy4k}fQb;m!z)nQWr2 zO&Mi~eO3}}F~UueGCJ9^pr@F9##BKu`^MaZ!x)J3YEnVu4vHu?)PmcDJG!7>dc{M# zzUp|TsMVvko(Y!0)tfu`VO4R|3_A;utM?wCim_v3iG1cw=d#H#{PMZvTlY>?_DC4 zNUFBhJYhgUr+`2IsE_)$>?Sa;{_p&ntoh&5t`H!wiWqql;Te|MSyU+oBfDq(3pBt90DM6<_Nl6{D zCu5r?eG`IZ5%mGg4dQ_d-U}%0Lqcl6?e{Zp*O6I*R0_3K36F zwGh%D2+;%MV1Wnk(3;Cby9K1A9{%NIAU3^P26UO|Z2;P<0ShF}EKt`AmWA5B5tEvy zOKyHL0Cf|x>Z%5T3~#5?@sSn#IrZ&NyWc++i~F#qjU%TH`yARd~95#nL@lK%$lGxasfeQ4a31lv_& zydTrCYUN5%Yyt=;k08vPLcip;YdZ-Oa0LVmDBPL@k|t({m;vyiAD(Y*`LgWv=@xtX zd4XL?my&%HPi2%{H@d&**2%IuNm;`a>r`IeFO5t18tQ*btVz5kRL3H);`ok$)k{jL zW{S!I>FME@PJa>>_^ioV(*Hc2S5zpD^+3b%b2kOd#cB>!MwQ9BKk9w|Y*laOqw$f+ zatYIc>1OAdcPaIgPG9Pepu3r6u~_76uPF`7yWfCIhg8* z#8%bV$cp5~$J5hO`^Cl6C|RE`7PGUn8;a$e<>TTamNpIp&aG#BPSR2M=%g4qq6-V< zuv5vpG&eVIZk?d+J*b<3NIna6Od6<syU^)G508ia#+zL!UKU@X{qd3dKOIj zX^5`;S||Kky&+U+O10y0h#^V^Wg(yK%hB#wg-KPBfd{kL0P=-8bYj0nQDD$B1r|Rl;5BFa7 z_x48T(&-_<2-MT2;Tk^HxOow}spUU^{!B*$H56X0Q2*Obcy6c{o=w)x7C{$bW=6)y z>E@31XfF;QBtpAk4b1@*?@&!`?R`v3iqGp`{Dq&te5vg0Jnd}*D&X|sM_hp);=;$D zjp-#&MZ?hqk?$AUnYhft2UEU0rAbi0R$H22>ex-mJ4-{42Nx5_>1 zG(Ej|n5HD^yFOh-;eDLM9}~;n{_0w@mA-zz1Q*x69eR3jVi}cJxY^z7vR!)ZzAIbo zIVgOvncrEnN+>RT8D&_(!*gYAb>LMF%3CP4cGh&)GWz*(e-sn{Q9)0+$&IhRH{yEM z;LWm!tGnAetsxnfd1_{wZ5}Hhyo0rw+C`^-xC4dEjMlxBZ}(MxRr}C8uJN_A<_|8w zYKr5NWvq#zvjucp1{Rwon2b#?^~hTv6&=Nl87ZAJXAF_F#I_V73pM~4 zB`+vBr$uAOB&qyDePE9KFyE%6A&4|ljFKD$_ z6qH`O=7$?OC+a)vQc_Zc5eAx>nR#O^EqfI5ewVsFz+A+%NS(PpQmqrB3XBQ)N;-fHk`=g8{SjK;$qYBu9> z@0VAq@$2YV7l~6PRXG>RQf$hK%iDVPVv~7rD7oq`(>Lrm3u~Y&&D?I)sW( zDKe}P9scd9n3*s7kGRjj6&vKX9@&o1xYyKvKbvuy?a__yJw3e@NjI$|UAl5ADs-2T;-N7=fpbOT76>V?$T)~#G>G!`(*rssr;?$IN?z+Z5f5_WF zsoiz&oxo|+F3FWgJv?;`0#{QW?WSWjzojBhx$*`hcpHv+wE`pfL5LB(dQxo4Zlmv8 zSH9A0QM0(%v0D9MtR-!>d=I1h5Yvls-T|@@-0w5vXE4stw}0Ny z#aN)|(QSnzaU5OeHP3|)O`_>v{faO3`HMF-@3N(Y{6fc?_@&C^;lrD2DC@c86*Ac@ zt~%-IUD`G3uzf?=ugfV#N4b7A-z)0)IAae}SWh?C(q%Rs)IHIaJc<$! zs84NPcU+;(@$C@r!gsjw3rBcG{&El=k;vT@H+i=%TB3I6=|%-k9WAC4`jO6Jmv1ck z1)7tP2>PoG_%=OW?DBd;K^ukq^3 z(l7>zg=4QDO=0d)7#VBZ9jGA@Rd>_~%17FYBraC1R?kZ9$cdp1`AZCH zrdswmrwCnYzGBF*W)1a>R|~6b{g8vQqN0)PotE`qs)sB5+ms`oh+{ykyn5ia+2OOxVV@eKm$B{)QWB*cwAknR`w=Ji*QGx;z{80? zYCy+q`7@@rO2YWtgpcRD)xyTrL5v-&PeiHRN>tt5!dSlfZkthjA56;gKGQNbq;~Cz z5$ra=zuG^}4@8y*r_NP-+cYFSTOTIBoRKQ8P!jf*i&amL+Dlg}G;*yxq)q!AuRht@ z4(llvReYh`;lwF_3sTv|%f3k74cEYZRC2M$42Ipo5ZmRH$*uAVe!tmlqI^@XUD}Cm zJkkWg?iNWnvHf|;)oVr~jPte%TD=j{dzQusw)jkac8?Mi3Exu|E6;n>d6}B723r(5 z4OU4ENecc&GkM(p7-!yzBb>T zHn+22Bz&NFx@Ejz;&Z9&CbV2pG0JFg0B8M(9;-&`MXg-sq~Eq0pV+cKKL5rW{?hHY zgO1}ey}dsjml8ntDOSXEw}JdqTj%;(sY*Z%eGXp4+QQ z7*8AB4Sd>2$wx(D;;pjmx>ZbqzQA1g$yNpcSJ4F!vEN76ew%o~fSsBzlJqvGc?2@O zRlG0|_1pWgcU;>}WztQhmvY<&+`J9--s~)hHQLcS5YZErN_9Zjf+M=@waLF0hV0*!YLTM;@7sWadUGzPx9y&km`mTY9*mb8oxq{HSVd zeEc>9#~k*}R?5;qtm6e&y)PY#P;oLjEsTfm@#&2Wt_p0C=aDDGNw#LW8%>QSl=-udjb&O$ebxvf`NpiJw z&b4v2<1A1&HvQ6FiUZ>^ z3**7?%9Y)7KQ7PlPEAc6*4rfCf_DmY`p4$qvQ|oE$XMoX8{G9$8UYCg+qr-lUJ6)Yw-w zwkCwR=Rp07M21SF{UDNPGH^!PR;yUS>@%Rpe%XI3{&P&>Txnv~7hh1&e6>ZvLTE+R z#vaAST$2463uf%bCi-f_tt$EEt7PlQHJhD!83|KvaIjQCL)h+rG-$B!SK0|T5R zjj&7n47!Rg8a6p5v8{aHctqm@SKk*0$yFSn25YAqC>;%#6yxHm09JE-b`M28Ffm+l z0yo1nGq4c71q=G2qjLbUyz#pz4&wwA{cfYb*7f@030TV&j`hEj8CTpaEivVypr9B! zbGqi6>%RT#FAPw-cOgA25N;2Pjvo}vAFXC)X7(l%-b-$#Z9V@cpSYgv)XwH+7NNYc z1J56|tV?1_@h@M-08e_%zWK7knX-@$q1@hQhI406ojTQv>g9w3kol^b8qpt_U%q@H zwFdT|8OZfwXiuD-!IF~NB&h4~C~?PQTwi1E_SsqP@kqb7DfvMk4hFr8R65oYyk4|w ztlt5Oy1vf7K0h4x89Aly)du4s5P|)|BvLY>W@;PfL*@qkIH_f6`J49^cKq-j|5lMd zlOIgucP?0G8ewg6;r8@Xo!srUWX7OQy>Dg&ML!nQ8*b@O+|>65gJCM9u>NmJrWvB` z{{H@n9*pi2^}wTK5Dqhu#l6MC>pB^=VTYXeMpRtKYs*3z#1IuVF>2+^f5g=24`u+rM#j56u4j?_BTwBJ=f)Ru^x(Nv5%K~*oxXzFcqwui z8el&Ml48Tc!9PUILbIjE%f-1ZGrhxZbDwlvVk5;h4%a>bFitMjqN%AVHb39BIGej( zT`$vIqu?<8+oYlA_Oz~DBz1QAnWXPWrVS{zG+43tg4(*VD{>KU>i5wH!p>1=j=P(t z^`#1~rG;c+nFnuf_SD?|_m^L* zog;(Dzf`LI)tQ0vakEF&%t=h&#~L|w*14NWzO4+LU_2m7!_MUdx^R(PeSLi?8miH% zj|d5zSEAgE&cAWjQ?p5;aVMtZI(-ilX^cc6q+7>jC%?6A`+?LESu=eVpuYC^QHs)O z$vTZv%mRoqP6Vf9@W^gYG~as?y~9uBVK*x(1q<(>%is;pUFVh54!jslHE)zU^ogX< z{_5B}fwTL9GiNt30l37EURgHnn$G!E1Bs9GZ-^#obv$?_r)y}~FYQah5c~Wdy&;f6 zohK&Fv`%S0vVOl~Tk`V5Z%5uQ93Fb~(tN^S6Ymq2zWTNP*H8h;U05i_3&!(>B_x>9 zIKe;~t*o*79>xx{_afkZb?H9#ICAAN=Jd9Rl@YDT%z=luhs!$G5cJFVVvW}m-z_Wj zwX}-oW@A4xeRYg?^oftor{_yeWPd}ZU`c@c49x+}|2PR~VhFmw6t*&igRQ zHWj6%&+h2f_&iT8O;k1A_PIT!L<6@^P?9|r5{45Fz|DrxDgFL$T+D^!8$CD3jwm*u zSM+1Y1OKr#QCWBkYcY0VVChXmo#w$`j^uCuwtr;95oKrPZ9UySA^Y|iRL38+S6@PsCh?%_<{0 zoGVGAf#nd$1jWl_2d^=*t(M$p5VqU#q0BT_flDwh1XthsY1yK;Qhn_AD=LL>=#tJfm+$;m75h4h zL5bvuoG9LIfa%U{@`PS})t`<+@lrA;H`fr0xfOg7N>M^$hijkpOvS*=aWX*#>>MeM zxK&mJyt!AEHcJLMr`#W$DdN;ANDI`^^WYdxwa@1Vn|{gxgiu4Q%R?A8BWPr0k4Qlx z^?LFxo#LCCDP1})jZz7_^z^dRTOSN0k??pe8kyB}c6NFbLSoJaiN}9C)(K&idw6&@ z$9U1{g0yEfiXNVaPuvkZaXYIrQB04{|KqUheN_)nk&|~0)?RI)l&been_j(?;<89I z=%jxv3F+vAnOit@h6^{!pA9naYzQ^|@t`kOWJRT#$;xN!FZ%N6s76v&yl-IB z9c9AeL{$VtC;#DDxWsy5cC!AYfK)(T_to~JkA(60v*_=wo|?Zg>&yuppu|rRq#>}u zF-V6Ohra8TJ}f{oPNS5GTT zyj|+Ib;vI%=bC$8K{>mcvis82bK6LriFq&nio6=%hwR$7z`%*!UAFn0C_X&Dam_gf zlTiB2;q2N~^s9Xh9_*uR4yXK!|4_SE9$^B^wSwrBVSNnVvwHmY_>&6GtJ-3jM>%g zebA8Wd`+dj*6GjxG^*vZJFO-u_FVbpR-5g|J(do0-Mck0A{P)M-WP83>9LYa*Ad$E z9LrR{xmm*0Kcl?B{*d-UAp8C1At_{wiL^&CWY#(#I_fBzi^jkqLUw%?*&Q5~0Zx6RUNwO|0kO+&Rwvy@2LBI-( zl#7dmTQV{-N~uA#2FUrRPnsNdRPE0;?Nw=Ay*gVwgof#8u6F9wCvT4LV8APq|FU?@ zJ|@uyd618)YxgO`-(gW(TSdiUya7WL%qN9H*29g8RuB|=vgO{~TyscAk(ZUI_>YLG z$$BFfhBivVeoITss-`A30_S4mI!Msl)6a~G#lUxEpL0a5RTvIB2Ei*f$gf^KF31dU zN-W_|C@9$cSqLQzTk;&!t-rt5{_Wg;e4nRa@TO2P5Ybp1S|L5MAQL;RCw^!9cFAw4 zbt`a^#Qc1j#-=9aN4dzMyxh-bm>fg)LCOq4GNjeqzbqxZTCS(ZxX1M1(Xb%rzV4lN z@O?kK%WxS(Pd96}lm;mmym5-A>{i6APR_9TR+q|{O{=J=uA!%VKjg90Gk@?vqvP*W zIXQk?pR{~~Nc4?G>!PlmPtsBr75A5}{~RKt`mSK#5TYUB!6?FVjV0j+Vls{9H8*-GXGwBu^bDII6Lw+3Akx`(2(7?EEX=9lg+rW#fIAarh3v z&J?w}T|Gy$;dJEj%;dD`dk}vqL1OskC4#XgpRNnF1`U8>EVO{xw^v3tTbwP*5F0*Z z#3BVh{?pt6D=P70HG&r3gz3oXa+B|tE?!>w_azF$vmdQeXMU_6}t>qhK2 zpd-8$u4JrxOUoMl-&FHJCsIq+PUnKn8!ICtG;3FP99}{>9was#LP1UI?^8yr50LK| zz#_#8R4S^_Y_(7cGzdO02)QG)M2|bb+-P~};yjEhGOjVF(m~eB>U~y*bui*K3FwOL z{;n)>SWQ5g2tNNPz~M)a9XpneDbPmy>tqXih?4njA^#nx9v9rVeGfmz- zzo&Q>Dt}4wv#y?t(iHWTbgX<~OlJ>C ztzDSIK99-7r37)4k86{-S=p2$EH0LZFY`Ez-Bbzdu2BZ#9uvji%Qx$n( zyvG)8t@J4Hj=Ct<3k!irsP{#21FElgPENChxW%Ha@rD}_>h&6nEZXy30cWTDpA=#L`OW=DrQNT) z|9&YBdIkeNVwu5H;o$?nc5Gd?=352*D^zm6J~`{K6|XZ>#bfEj(+3Zp zk5FoO$;V_vzqr{kHC-5$UQfiba`*q?%`(5+xtZei&StwY<* z=Oeor$qN(H(W$N|T8P4Hweuxo(yhK_lN#h(*dpSqp+^bkzID70=jTw7Y>n@q#gr z2(v34e2XW?bY=4Ie`V;hGlZ;t5@>P@X!~&+>)oAZEd_u`?oEi^RLsN zc*A6X`04}>1;N5ZdiX7OydWFJgNre$w6djzqpz=zyMoYji{#QkLv=7Kb+EIw{T44i zTq?6ewf~5*&HVd&(8iAgRpkR`)t_PSekG(3SbLw6vfSQYAbeI~glC6-%~QA8oxp=EktuTiYYN z!B{9E(!(6)Yb2*`Hb9=C)&l72JQ{ zH~2-Wv{}#ldt<3iD=L>fJ746P)#k~@#ntY~cFDVNI=7H*B*qO_-JX#pf=M{}gw zn7Y$PaNz|I3zZTiQ_KCQ)@?nGThHyj??0mz@!Y<)OyuhJuQxbn&XHCMu@z_y?^J9B zRsbMOznp&XR0GDw#>_5V64drnyMX%LhQj&j!pp8dMPgEJys2!_5Tu)0EDbS~vg?4% zfTJp`5B35wdGtBg{_-QZ6~(4Hf=WaqPm@MY1`$T->l@2x0P19ZFdwJ}38@4Z<`ppB z+pr$7jN#c&U_*hldNmJ0gfNta2EjUe@S4e!cYs`#qzjCULwNpF5RJ&VOh5PfxDy)P{kTGNRKxo4$zcqfE3l2HB0@?=CoeIpHsHK+ffHqJ30PftlrZ}0%^G~ODi%u!H1s+# z-E>scol3XM7fBFolq9lkg%H!RFfw8Rj(xF57O|PuLP=BAZTOy5ljX~ePjx(QeQJ3< zw4(CLBEh`DHxgJUX$Zijf7lUZesp}C=_zn#93r*YjM)pST*C>gAIh@BcNqulu3n{r zO5I>MH-`YR9j&?qjc!pKfgjV7gzV_lR(Nt;@c;g-Oz^h1cbaRTMQlr%kqBAkJ|p`S zz9z3!i68E|<|!?qlXetWHuH6voHvwDTM4SSPJQVe{YI%@qP7a90(1Wiz3@yaxB&^{ z-QC^os&n5|7fOeuu)MgI?pwgvn#E>sA68UUd`96nWFQ<5zzWtIR8%HTe_N=?DW@c{ zVL0rvV}w062>|~cb?iUi9RC~p=>Hu0KTqKQohR^mewnP0;K5I#0?$EOrl=?#S4=%{ H?)v`%N(v2P literal 0 HcmV?d00001 diff --git a/examples_skyline/accuracy_vs_fpr-skyline-order.png b/examples_skyline/accuracy_vs_fpr-skyline-order.png new file mode 100644 index 0000000000000000000000000000000000000000..ba45fb6549543282de4c380b51f379ea94c80510 GIT binary patch literal 43619 zcmeFZ1yq(@yDfYJDi$^d0v4!%fGDA~iGYMONT`Sc(hY)OU{Ojdse+`SBAu@RN(hqD zA|)UoEq&(VeZSiK>~r@1$N0xNXN>)h@qNJKbJx1px~@6rHLrC~QC^B+In#2Aq8Ox4 z9ao|#+6szV9JzD}KC!Q~slty%)<>mPmf}C>r5Eqw?`0OJG_5IW#X9mon#$&DGx+e3 z%?S+~W%Elm_UEk(DAV&c7AEF4CPo*w+Zk9{8=0FOqT&$xv)NHw&0ubRNXX;q(i>-a5}(|bp*7YpW;f{d)JwH#`tn$3o!8>X zwUqvPNavWdsB=?jpLTa>xl5^s{bk#Og_~(`_pxj2)i>cMRn5N05I+iNY5DQv91RT< ze!O&}81O?w(v6JDzU`89__1+KBsp@$Qj8bnw)p>!!+2ggPg*Try!ca5kyN(*U{YFt z)br<@`}bE2TU9-mtD2o?<&`cjE%o>D@wtEhetcFA=UOJFc(1KOSy?M7hNXB~?QcCX z5w)R*t^Gqn5?IBYGO|LiEMK)s&CD!GR8&-3XE{~99M5X&&3E#{hYv|S?O7KuUfjEL z=ToJUjT<+rSqj)5Q*|A@owbcdTY{F~O(sO#B}>@u=UzQMJ=?xoxso7W`_k!eH&<7u zUQsW~nV;#F@QvCf+o~urCmaVeKM()vyW2--zrOQtB9Pr$7@KUT!ww=?D zCwG#x2D5DYs$ZNHnw*;A+`e7%;N{QzY^TlhXVdWcbMCWgv8Ih_@~uYouMIHxS;mhl z=XmCR-I0jZ%fB`}HdYgPerj?brto)}$KO$lSYkPri?nhsSQl zzCK=+%WKQQlQHsPR#U9j-Jex0EM5-eO%GhDT}&AqpcJKD=cilc?CjDwI5`h+aBvhh zHa0SgIaSZh4!R1l2;1xzu^-^a`|hfEa%{JlSo4OB8y|oCcxvSRdWGPoCT;7sw~JE@ zN;c|y^105mZe)DR#ZGUJTij&y;7Rq zdhN=64-YwG<9MTnH*xi;SAy_m77@GPuCkC21@X!E8yFcMB_{408LQLVn%!)s^~Ugg zWo0FE<9%;$!ELTHLjw6%G&F)IJNy)wtDc?oI*^<{mxWU_nq`;m(|B31S-&*Xy1R6| zzpsW=iuoAo^%d!mC*`>=|Yu9ld=;m@zs zC0)V{@wRQUZ*{rURr;uKb9cTE-tpVFZ%hBUNiV0X8{X5JpYJ%3+3m?!c+tS%Q6Tr( z$EjE99zT7$bL7*Wh+zCJoaPvPlYTW5=J~=$Gc(Qy^$S;4z0#Mm9{PTw=-R#e_YXXJ z^k_FPZ~4#G{8s)ewWZGE^_R&V?}>1(bZ~G8R*;yj#*_c7Pbqtm-eT;{eE7moyX<>D zK0@t-Wg>XJ%$b4@gn5OR-2f*E2F2s3mHgU&6q2@ltuXOU(>#-tV_>%$gh4 zzUNVYNX_1nm}z_5m_N~Sm8H=O=TQ(HIvzK>TOr&;*Lmm!UibS1=_tw{G7j??4sBoPu(POT{&JmY9zU3sXuO9P+z)M(G8 zx3`uRZq~Q5vO0|rSN7$Zlowwo{SS3r#}BW<@h0k4vWk9v^?7pI={j_h4>U;fAE#1}dXH>N6~# z>Ccf1#KgpK?A>eO>@%1*om4uzlw$mYotfe^{o}@BOw2&6+nL$fR800TQAZ^yZ)QvF z#}=80a(mg>j)#Yb2Q=s^Dn6(hMLe@@G)<2)C%n5aXPU=CX6uX>YB?Dw~uS~(7{ zJo&UTES?m4ZrR1ln|#IWDjroU&Eye}ZcZ$YiMw1LzI@rThiqkGBI>QqBVJiHJx_y! zH^0zK>B`F2J9X-=H>+50uo=SYV}7HZ(oX%Ubsg>PyS8oHCKoQM>HEf}N9o1cba^a@ zUd+oh^VR{4@zI_t85x<%6}&myB_t%Irjas|YZBD^W+q3brd#KeLH>SY_Z5w z!@@C0Z%j396qHu7v3VmtKVxlh=~8d$qk~SfV|B@yvxrqloM-To9%6;mChH}W5Y9KR zo<9|UfF2_kA{dvEp@H`=Z1K%ae~xaoXe3GYUQ7a&{pqc`PXEV7x>g$qv$y;;sa=|oB~NU66$tPn7+ zK80wJZ#@0#)vMR_CQOH|qLAf#3|+rnryq@xn3cXdF;MgI;V$kv=V)1})cGMF*Vx$D zQ+C-padC0Q#l@Vd^TB-jZ|wTD$aDjmavCWU>g?={&U2X#Y;diPJ)@O!$t%sQIUWxz z7baZOb8nyD9tAZZlmiD2uu1Qc53MK;;7XppN>k7Yc*q%jHr>prH!h|+MqwLdCrH!Ztqy(g*wc5|BdA>KA&R0WNvtFm!=on|KpnzWrbe0Z8G_!_t$JlX0%|bKZ*V z(ip~SJVI#r%$e|^_M*O&a`7sxfETqd&Wbb$nS71Bdw1#N@83Vt^JiZuDJfOIF{)ag zJmI@+{cgjG$Rk);%0=ERV+aCyZ%cTzGZddZd1Bm>lSyK9y-8Za)Jo<^FB!XnS|(3T}$PCe!rh z=ZH4i6BD#m_O6~IZQ{#`{=P_V|Dp+suNAt3SQ>f z`bFLZJLnAdilZfw=oIT;POUJO!u;>3w0p2e|iX!(0{2o#}T&1z%2rN#`{J*zJ%yQ8~1 zrlO+4YT6u;TPMrrVS0M{Kvo}*!NrRzt`8~gOGvR!zy6?4zQXbsAJ#j;7{JZXud-Fh z%;q#bJw2C@kUH>3O`d5a=bFef$&4E}W@X)y;dn7{sqAr2Q2tDGi$lAYRgi?^aMVd} z<~ZQY6G%jr-04kEU+EXANySmdr*v4#-KOqRUc&kl1I@klDKXA2E+l%__uEM;f$Siv zv&L9&qaR|`Gt*v@z&YY2vld~Q50o5CHoxeKP->5DwF3u!q z5)81hVbi7+3VY12|+3t2j>Xn!~tWH1T2V1l4&E-N5odF>4 z97q7pB&-u?P0eq&9KUG;zsp#SI`X&V2RlJGt+wJJlfK#)b<8DmQ+4^m&k5eJ1%xBW z=Gz}P!#*GFZ_U@EqhF;`63AVXW_mVvyx~g2%kwu$Cg9e5eH;%($nYgoDIeCFUv2Md zUK_~T_Et-8bN+Q*zXd6X-?)0O7mJ8mUS1v<$-JpvF6l}Th=50r;?BRjO_UDTsj7&- zw1T2}HvjYiKK;TU*XdawBqt~D+_`fU12~;Ee(>BQnqD`cxX0hVog1k=YyJr7d*qAM zwk-+@3WNd*nzxwH($X@!96NgSG^qcl(o%j$N5|bMIjMvbZgwuh4bE>pH{;me-mafF*|k*)Q_0FhT3%kTaPtDe##255QzcyLpv@1( zk(rTl(9!xF$CRO8zxJ1lPba==><1rAE_E3q7k%*H!IgpJqHV5Cr^7|-XX$+nW>;@Lx+YB@ZaQ*XFjK| zUCVPsqCahD82hQb+}=5vaMM>w@9wPLD*Ze%GV*BO`0(%n!vEae-MfG?h05M%@Dk8pABS z0!rg8p6fJ5#s~0p*X)3OzF)Ih9p0EGdmm=l!fvQf>zZ_mI5z+^qsemB)QY z8?GdiWs9Z%Sk%$#gMIb<(9qBqOtG+r&*5TRkP1usXuoL%6{g(xGzfyexBy9?=^=z* z>~T#p47e>_bvZ;0TjrG+9mB7F4QECB(k;4v{kpKzs9`{2He@;Ph}_&Kz`!8x*qv3KW-&YamS}sCM*>$I9v!U)UwDCN z1C{YKw7SHK*En%LLDEE!Ql8X_?O))Rse9UX1-qvDBg z)JZQUqtPEoL)?LB(e2eVsviY8Nj?L7ccSDMA;^lIUpwwp81foS!s1 zJt6B0HvG&-eCm`hKY|;P4N@+b*CICB4m2I^|N82f(x>{2(NP=FZM~F0h=?Q=^5*=y z0ZD6a!gXF9a;B}*q86+oMCAj>*Tz^y!^6XsAO>AuDj_rFvPp^y|T)Mw_h5jhZqn3Dq*o zo4iD%2;UAcKSWZM1odP!gz0MJ8`G8?O<#UYeQ9=mo$_$8WK9JttJe_TYA}<$HuD|j z%^mfH^vGH?gP5i+8@O+QGg`DAC36PB)t4>z*V`3N$nKqkgUL_=v~-2O8Jl$tTYsmh zNcpUh&mtt)^dYfY*mB@`9+Q-ZME|iSnyR_63w4lcOXSO`x#>Y$#QdzR4UtE-O9owd zcbj4P>ecE%N&L`m61s@iQ{YB(-_ujY#Ka^zDS9!buAi6y>exmPHB=t6qK?P`(S3QN z{E=MqCiSnQWzy-Y1+?EH?_OJnNg?N_TPPM=*B>R&UhW5_Fd`EtL2S(63?(JH+8u&Jo%=pN__fo#pT zbvo*0^llB_3x`lH?mvH;#N5fJU$R74EJw!|iaeQT(v`!VC1i?rc63x5hEWz`oBOE9 zVbPMo8(d|fhn0QA#)HW`6B*1=hBh<&>sLm+9?}N>W0Rhq7*w~gu#gMViUE`>X%3r@ z$=*2run*_@(ecSIiRanL{k5vw=D34=TspKEj1m)8(vC7BR)@BP`v(Nr)-?JcKz%#J z>4zc>S!S)R^YOMHii)_z#dUo*u(F!kl6$;mV zZ?&5IDwU+FSD4lNcD_Mp2Y@8>s!FixfQX1>fBy|cM~Ex`8p+PJk;&hBdg2aScNzZr_D63+Y63FSGw279Lr|x9;j1$2`k}3@Evx5wh|SDz zOU&(zjK9BzqOUb;IZitDCA<7_r z*?$^`#59UhK=#E(206gd4VVE4SeckIT}SR*VuEPW^(FgWHuf9W#0QjIdlGpm zBQrA&Ps1ZNwIpMmzRtS>4Ryywh6OvXJ>g$DBlhe`d#mKTf>vu>@49edS(%)I*qFb* z6O@0lI6B9EHfO7U`SJyduo^NKSrVMW!dgUrU{ss-tFBgIU=ddF<#(NP790PnUzhtC z@{chlq`_ysR~Ot-=HP7FCfWZ5HLb#|`F9%X(-sW;ibW2Nj--O*)aJga8j7(@at!38 zhLp=bY(|w)(h!Eoh^VR>)ZRt*pL{%`o~ZEzb&q%~5ysxtk>lqQrUAqL%6;ROmWH=|Ihkm#FB$&76 zA!$?*1lBdBudmPI!y^nb1UkbWz%0E5YTLE(l!!QlWhc}nSW29SKkezQit%w8{}~5K zk3b^dH|O6ivF)imfqZI{^JPE_n2)>Jd890BvMWU3>bQy2G*@yCA%e}L)w$cy*g0~ zQNQ@bYRR6>eVhB3n7k$FYTjS+``#X%ljGat>FH}EecvNH&sJjU$1}tv8>X-9M#cUd zgeES866m7U?LHDoj9hA`e1STiLo@FM$^@r!i1sc(m4%dA1A+P8zI!KNP`p9Fv_T~~ zZz^8UtSJ^Kk$^QP)F4NHrj++Slw$oNF+Zt5C=QT!ZdVor?`X%7E+arw$gZFzs`GQR zsu~)xSl7J?$$3hsppl^?Xo@?;&+l1hNl+gy4}?+GQx!vEHI{9V(;E&62|b*$#+O|d z82K5pXuMI?^S}vus!+Hv>-N1}6_Ntdr+)WrDrR?dWbdy_z*U`yBs$(`E`nl3p3VZS zA=D3Z#kMpX(p$&CKoUl~=l4*1K*N9=4IQDFK%}wwM)Ajw6@;P>aJeAl`kb-!{9>&(>*AAY%$#c)wHtl0RXxm?wnn1I_jTD5+vKll6N3 z-BOxkGJo>|+}26`8U6U$Sb`pz@_?3kU1_UWMPq?32=`U`G&efh;Xtdn#naZ)mSHUo zQ|#_}`;)KU_N$2vy$A6Gh$ErmT;iH?u#j@eTBIm+Ag= z->8x9@;<mtAwE=QA)Qe)E4VRdulSKqn zl4)Fz;!D_J>yyWBul#}v#AuDWp0qF22HD&eUX<%GsjBkl#(1UKc}@22+qZ8bn>K?< zpG9Bi*m1Wd@zNKb+H}?nm=F4n#}8L4EA$}uc=y=1I7&fIQ3hrs1qjU6^1!-D5WQ`8wyB|jN@gpjFc+3IxmZo^ReRvb!!P=pGObBGLkzOTfZ>Q1 zUp9CZ{=dp3i`gIpPL(YEcetM!VeHKeaw{>}MEJD^6)Ok@I>8VKwLN!|5%Ow3x z{veiK&BwGp&cdwwasBOK2}Q#COkEB|8tV2I?1vS-zJE`e=dn2j4kIHgOK8{4)#9<3 zPFY!ORM7dv1SU^gTY4HM$rtJ1Na{fpRpb3CY2X{v|I%pE`v8>xgp?E)QU<7?vZSP& zzOaNuzD#-m<_e~brZ0>E@rdlY6T+>K;JGzkMwU*Zf-&{ogO_BES${vjb_WANeHPktuWJVR2;zS%;s8k#*GLkCNVFSdtWdaG zhWYxE_=l#+a!*o@bp^LLKTfoW+6kJF-jMO1BG7fFg zXOjjvRYE+&tmS2371h9ew0;8-4>kiC%y+ZQMtOO8HHc)kpd)>m-Qm`6kf_y#nr-|@ z<%zHk0;HmRD^pr2bB9{5q?PJPI(@F3pM0eAK83Omd?9kn83LM`WyMWNq!`;O6?xtGLgN+XnO=uZ#tv!U1D`MMy=I%k#*uWjEXVR3 zZm1#OH6Iu&^4jc_5qu$8W3^{VY=5Dhsf4P~7fd<-2M;1q`d1_x19JW|g7lDvN(Jg% zMt__E^tXnl76iw_6I7jzAhq;1XQ%5rb~2Kh#6w6^FkqNr&g!JjKy8u~@w+9r%=?$KwC<7cb%N~G8g7s|(jhsrOJk5^6cmC`pC5K4ke6)jPeeta+$A9D;&sq@<^E;D_tnWVde=2rrQZeWQ@gNdU_< zo`e?!Ot6R4uv{jZ?aKx&fs73=<(v^3=B13hiuXSrpTs$ z2u!1V+y@EKTaOdqlDugYtw|z9W|#3b>C}Gr6&k1u&uULwf+?plj6W*f&SV z#_k>Kt&v6HI~hD;6fq(mRl?r76nS}Gb#vQE>|549x({rUd3VzhTir4K4;9I0XQ?#H|yrBqzlH= z4wr2c5B_rq@7vnv8Mn0^S?%2i0f3zQeXQ#GLMv-`6Nw*%L~rZc7w9dBn;$P`w8MkUXyZ&Cus|qU-)vc}0n#3D4W!u5=7^uH^y8+?MP! zM0$hzzS*Mpu)ZgLakxjrv=2vKf0Zg$)FN z@$i@PtDDf9R`1s9VF&{KT}dx)=@NV z_6-RsxyDI9y)<ZmPw(l5(4vl&E z_^3NOBUJ)kBH!1XbryT@_;K-HC#xgRO8^~I${uLt#pUJdGBWpXuiiFLz30#&jSxXo zUp5yA3Q!G+-;=9&v744V?6nLJP%fQeKQ<^&=ZfoJInNI<%?Z?+90u@M0fx#II;Ra&f4#-$bNdpS8Pp<>2^32||A z%YFLv30Tm&cLWU1xHOorA@^!Jc)@5$>rCN^8XOe)91_aQ|H8bWC~Y@&N4*^)B4<(e z@a_SihB*2zuAG#}3@f(hLY@-_as}zss4^h8bz*5n06^qIX*Dh;yb>vt)Hq+A3D&n7)JRJ1MN@#Eeb#(myk6XRzNf0d%B3|5+{%;v2>^&;s!D z{~s&2WamH0E16k4DJniJCSe7oD230RbNqwU4}?8DbAVgg(lQAjyBQa*HKYRk9{f^v z$YbM8j$)zQoIELpNUH+}U!>AeiVBdmROBsQrrJ?d?=sjnmy29$dfwl7EZ}i(Qon0i z%}|7p)xfuYZuZ?-&ky10VCR-}XO=Ll`s3zMmIL20-!CW1nOU40E9&_*flXmyQ<{C=T3WQj-f$k?p8OSNEUlt(-*71?FE2xrpH(U>Z`Za{8D zZ!K=+ZjgG95Pe|HDoWCYq|zPd&)>7MpRhBwwx3Yb*et|AOEGOEw`tTUDBsp5Jv_Yp zVaIOXvFY2@36G%w}GGNj%54hhH&W?9x&Pd# z*XsrvDwdbHuh_pPkH(n$FWl!5+$YC9x^E%N!9)>h+_`c~W{%De1O2tk%ET?yq@#tffI=6)4KZL7e*8KZE0M7DDen*e7+fVo^ zoSjS_PI`}bUd>6K%gRKMLtM9bIzN5wrHlTjWtFshgf^;Oz|t^g!5oYYy>%+8qvWg;RKeIGm)l$mlSVrf3*G(JDKj(n2x;j$(J;+>fBSc6if+Xo z9x0*MxLz%#GSn_S9TBKl7gNr3Xkl$=uZStPSiJD7{~cU@B`%L=CtVNm{{My#1_d!t zhQZ$3(v7Ca6{DYLUBM3)iO&uidKSy*-8l9C#<%>N4E-#39~(-L)fHS*7p5FRF?qKa znF>4X*ZuH;y}j*&#};9Smls7%8Uu%3Cbh${$m#czW7O&9On*QdGXYbKD)KhLt-jdg7dp3V%v*2erA(I)S!0hbw zw{JJE?xQ1ztgjQ3mwxAq39?ftljA+_ar}{SeU0S1b14BiulDacW_Hrps%fwDCF#54 z!^_V&III&|Q@dW&olY_U&=gasVm~|Eq5OffzC|aVnRy(IQm@icULU`BVQAxzUfLqG zG_b~7*n>2kC^FtC$vE;d(XLvPbeOosKM3CIDAIb08&KP>n(NQc)@<6t&Mv)Uhc6V3 z%SuYpBE5`eI~O4Wa#~t0>RRGPN;Cg)i%P-0vfb{tWd;V=BLqVd)Kx}$EqW+7rUKo_6&2H+A*vR1PRU6#033C5NL|zks^R_urNwY38d{jpMWN zqh_c*DOW%F=rl!DL?ZGWdjCOMT3YGUDd{zvww`E^18gP0J1}4`bnM z)D6FafJ||5THOhA&;Es}mus%sAFK6mA;dgP{wlD}LaDZ`k`wv_GYB9=(qAjXAoSCN zhi3{Ot^APez32E%ilN}f{igGm+jvBIlPjk&CE}G;RHD$=qS9BFOq48OK7i1!>dPod zk_rHE55PxBdTDs#H(Gc>2Po#U5SI;9e-Lv^PPFdzlU;}t8GoKM=jF@enb{%}whPhP z{-I7yY{vS9O{DmbrIPlt~eWTF#aR#JGKtrb) zO-g{VQl?*}%-!>&Jr8c-6#U>VXfI^%{@dNZ+OF5%Ka9JxTh?nU;EPg8U=r76f!hRZ zDnd?!iQs$i=i*`|NlD2#7iFL`Qvb_L0*pZ=P1YA`KI7J{XQNYVrEs;MF;>-qE*BBQ z-acPs5dKL8+QK?|HsgyE4<*EEM}}ioIbI!UDp_)4;q<3Zd!kt(3IK--jX+=oD76o% z&aPrU6iMzLWl+l_0d9RCY<-lqCzNQ1Qd#??-I69iA}OZr5UI-^9h5>Bh#Hh*LMu)j zKW>P=2{cCqJktZ527}m>&egha8Nba&*7|!>cd)`MBpJ3GI1UhOX?%6EGbrpi?AB^G z8EI@!Sy+5()R4U*m0JXWp<4L&9MzcGB481h5)#tUqdBZOaFkM%)o{4Dr*|94NbA&F z6#NK039vf+Eg;O}KIh9dRQ1lF!q{84d`kpx+?0KLP+$FK0XgM&t%)j?SnJFcG?Luw zH|>dzkwqMjxYfOcOcEK5tcd41Ato6B>53i%INWHxE*gm&j=izw9KG46cIeC}}_9zh-A=?}4tI#KRDqzOPxqIxdAz z>HPUmdxY9lmbbG|)rn49BiZ9n8X-a^VQMI;7)=g$5;?oYv71FbXbGiye`pnDET79z z0Y#}2HTn%(wENd?Pq63ojWjl zT0yV;-MeG>-R}GeN>S`@kF0~r5pG-LIeZC;Ft}|}TSV2|{M!YHbQ!Ojz+Acb zo{q*^N@aDSNTeC?7Ke>pY?Gdq`j4NA_2UiNI_o&&T6F1Vp|Em5f+RYuCm*p!+*rIq z8~9n}Fpe8f!EsPdhnEktNpHD)U_-tz3ea_7-K}*3V}g=sxI2R?B(YMnOQY(;leCnI zr*CIboMtzXvAESOi{t!Xa$Ki$=<&L^i5H9bX`tA!m?9L}{`F{X4csHApGVtOn~jY# z{Qwoc0_%oTcbNS}Pmpn!DAb(PgD{ZZL#twEZ_M7{PJL#VkWMq@!JAisyRCkDV#O@{ zD%GGsV`W0E8H5vBdD=_%2Q6g(oUjIHZ(NYzJUHKu#ir9_!H()H)AHUHLM~}1AhNZk zDy2BKdC;waH|c7q?`?LH_|2@8@9N#fAC|M$EUMyKb}WB94R$J?j;5I zj=@1ac3ZmC=IHL=OC-lwiWd0?(PVrc^R}m`zFq^jz$nZzL&%v23N^Hn&|QS*nl5!?Ip2#g zPLURx*d>A?8@BNpTs&3i?_5~4{%PMMEK<}7V&I4ZmFzTefTwcK*H4?#P+2HMY#b+o)fqerT1Xz^PLYhC|D%ISAPrJDHnvPtg}m zEh9>egE=FgIU7$R(xR5mzIwHfQa2D5!nm~WC`!Sc0s^VUX*%1W^S<^eZtHa!Rr`Im z_>kX|3(ARsj!FfG<18Pl{2Yj@HYkY-(qox*!-(~tke0`si%6#cR z`#qAre$`mf8rdc-CuguN;F;jhU5&aScBQ{x?4B2sDqGGI(gs?{E^Vngaok(>@=;!i zU}c7oxRF0c_m4l=gG~Kvy`7%5EcpRVT|{mh8j47ZUuMaN_wQi~J4OZ&SNF%Yp<(OQ z45HXabb}hy3^)_E#IC@+<}kZ*L*+rcXnQ3`G3V*DI^p#)ZgbKGMIuzqcD`4m^CJo! zUN^_2b90wW$p8;bP4y+0^mqTSTqIVbveMFDGA!F+QB(^vpi5O|2z%Ob+OSi_2BXLD z{kx33-(RdIR4~E5w}0&qz@I~JO)V{75iEpamzrVyzbF7zqtHv@qvRvye$%Pp!z9I^Qvj1 zSK3UiEw_>oZ=H4{cj=oKnzm=lLK^m^?6u`CDa$=w_kP_Pe%awwYmXetm)N~qDpTZ; z#qCFRBSXLVN*}#@w7O`fG@G~8I!|!x_4*P=Q?~)sj7Rz)y2(8{Z~_9~>z6wVJgTdb z_fEaUYd)>&wePB<~&{-_< zy5{(+Ea4xnbW8fcDb`&cVJ|KLF$o5Dqw^=+f?m}#vxx7ncxc9+&b#?sx@>aJaC@7q zEK9kjPXXiXs+VrfJTS`ov}sy7&$iCHobZb`Gi9vRHFC%il-$I~h+fH$lcMPSa}Uz{ zy!u;j_0SEPL@C9Cqap>gadF;TtM!tiI_~dQ)!}_sCm^c3HS#QqX0Ee+dh4A_P%iy* zUx3fua9OA!dT=K${dLsVIb`L)+cOKNP!)JuqMMKu^rer9H93rqI8o(b%!6b1!THTo z&<)>QDq~70q0NzHY2AG@+oF0UZduZRA$}&6>8DoS*4(7Fp$00<-#-dIfFEcK_3zMF z_7Yyfjp~aX9tr2kEwOKl-7>{;?)n@4BIX|@F_!#h&UnVZUGeP@9NDB;O=Ntc4gw0y z!YKGUYhPsTPdlSQ-A}E0_wNrs@Q^L#!SIRPPi|ZNhtnc~V|X#=QlxqFe`buF%zU{Y zMMAE}Dz>?vx^=%COx<*0RXlq1XlVIaG{vLfrm3-3@oJ~K;igSjzcDrlhWH1t2Lv2u zO;}!@no*kb{LX{{26U%jh|Tv~&tkURZts)%>tru+G9GU3fb*Mq%00NUv^E-2Ip3F? zCjZ_cx_)*Q)f-gz`Fb8Ha6&+y{drl!k1hB2caJQX74{`9k7Y^DINHc{ZE5-Od^)$O z&eUBMD>m+{kP~W-+NA4ngg7x_Pw9CL1o}Y5j@|BN*5ALq@4|4_Du>m{3>By&krq?p zox4)UlRG~*-CO%2q{e_IRVnXVYcx|6V}tI{F^An4WSYR$qei+y?(JwFAb=O9%V=0f zuE5;n8-@1Ag73v{4P5!b9n*s>>W22c9UsFOdUyH_Ur<(NsS;%BSly=i!`{t3J}c>F zUe{O-k0j|R1(vQQ4o=twr7O41{i63spiK%d+ih_1P*R22OA|qHji$TT=r!*42|GWo zlWWsRHw$Y>Hz&@TE08)HV1q8H>7l-HlT&9Ty^C?wamxdaO9gHvZWjRkI&@cz!M50o zxSiSJGNF@DLPw=PX{FitJ!@4&mn}_}HhT1IP>h2sN3OW6Of^|I_szLKXfZRX^k?|q zxk<^0u2h;SB?#V1$UW#VG`8iS6k~qaONw2R3KsqSFvIQtn3h(e^b|GbLox7EcG>>EI~h00-riww zStNY@Q`9eh+7+F9U$au?>qG-%>LHoA{1Nr)?|bVfD*as+)4;)?q#XVBXq~CMyLUfD zSr^l@zfQR#vG;uI74_oh8w?^o2ktmlACnUBn!|lf|E{GKl5`LJ^vy)5NUMR({->HB z$G=@ycXLDF22NY9U0d_8%?3XeCj6*z6%`vQvV`4AUOmU!ps`J7;lRM%iZJbv+F z{}OgQqi?gIk#d#|r_zC$DCLLC=eFylg)_t*ewId?$V0iU{>FKVVlBLsFe`a0#kN27 z@Z8L|tN97ZD;VfN%z9bXkI??w`Gz0wq*^RFWBEFY+0Y2@$s)gGFk^>K^Q~jlukGq0 zcbFKqSx3@js86y874tIf%WpsFl-1rVMg5{xKS=%&=W6m0F{7v)W*R29tCmF!lv@xO zy#+6i0{yui>D*4my>YRa6~Ac4rVbZ$_E7u2r=3vH6l1Kx)pV0uB%E*)3aA zXBEi=D`Sb~@ZobQhX*pgY_DUXhgY(H-O8j+Nr9ymZb9wVlv}bzBl~9UlIzB}ciy6I ziqdOKlii;YNVjlC?{XR{@2zRjISX$3_&;QBxdm|zP9UHgZ4DDX{5B<9f*xXEUthAa zZQD`r$AbM)KYsA1nwH#QKp075q}4QDqlz07k4*aWizU3uDr#LBT0?K$Oy$}~AmIG1<`|-n&bUhHuJL+lL<#j`U}&uAs6(kYTg({u}KJ+GR1xUYQwXA^c(JqdpgTX`QkFemkGzHuZX{l?2b4!Wk0FMEN1hi_59+`%0`($Jl z?QvNJ1$7u`*of;3UcQ%+4Y#8sBi%++80;gSb%*=i_egGPiMsb*Z*l2PNjkUH@Nm~X z+$9xSjsgoKE9<~(1>6g|pOQg06HqQBT{GyH2=MpscuhnKrE9^bneax?zEtBl%b;`e z@VNcN(qN}!8zOKx~(!FmscBY4#mwOT2!K-u!u#my2tzdDv3 zNzF<7X;PlRFa}9FQl3wXu3)8t?zc~VSB}ko-+7d>y&e6QCb;v{3N-VW#_D-OJv!)F zOYJZ5+vG6Z(V&=*EY!R5nE+Kf+_5-3KHjUxIL{p~z4vsCJYof-DZSf$e}%!RxvNFA z3WyxE@q12Bb0+T)sNq|qs2lR$MnDhl3&qQomD&<2Y~@>|X}@Knp0ev0Cs#_AFPG|@ z-zq99omDkUD_ZW6)2@_tr~9?|wEBn`xfSJojH?P7`E<%A_LCy8=>0v($2Hd4>-^Se z-L%uHO}`(K6y;X*t0Q13SG#wQ@mOy0js#9!7LqJ~ZDtso8zM-nGvgN@SIJCu>eO83 z)?UZ)4J4(0<8vOnk%LcM#zeDGX22_6si)JQoabt$Cssai)D2}K)^~pDI}>HcX04}t z9qkq}_pi;Jn>qxHzMq}&NgkZ;vW}Qbc;`ZHLTU&%(Wm6nDikq2TE~ib|If+WW-%HR z6G8OA$qZ5GWE>blr@ow}r6qCfpvyJMDQOLkP&>bB)Bx3I(p+j<|N6w*wQI4XhZ2?( ztYe|mXrx6ei6dzMXMM?@wD}p{@s|n~MVddy%LDyIJ z>vB}^ZGU|`!ob9Tif+lu3Iyi9?9;e0jawYdsTSD825xfBUAvCKn9p?J;@$Cq<^*i9 z5j2Zl6?hwYq>1SY8!7CIO>5h`ckef5)tlCs{A0M*vj1ubB34{tN)$BF2&-#-fHq(Y z@Axx~))&)dkT5{xTre3 z+~NECWh|l&icnQHs;p2cHw#iyiu!5tX+N_M2M1Z-Sc#(sV98`(6~r1D;y^&)B7W8~ zND~m`{MpFouz?`GRKXoF21_^`$fe(luiuY;F5~W=)VO>FOCzR&J1l^u(AB(>X19?@U}Y(ioFmH%WPqH4Ymv0Pq4no1FiwcgEw{#nuw$ ztlQocgG1&UevbVWwr9^p>H~b4)lW14lAQoDGx(@D-eVGNBD3STkOct*s7*U%ZP>yA z11@V-RpttMs`^g#i#X1Gmbq)9Uq&g$oy}Ymy+eN$|2H}lLE3|q#*S{A51T?&(pVP2 z4n^5UDVN;mJrrjm!nA~}AxReLtq-3U6eewYx=3)9O2bl~<3=ABaH;Kk^aPH+>bY;S zg+pSJsLDQg5#v?x(_r&Lz4cFMtNqq2=;>27SJ$FvVP^%~ z51ekp%=&%3nI}5xl)FSz)?tyxA?@#Wk-yh2EFXzeaCct#Y=?K=sFkfQ{rWP)Tj-|0j1#)YtiR1 z)-*e`EjO*g^q2i=vgneE7bNh1LDzUGocrYNmh7iTM2I|qUzab09>pzzyN_eZ;M)P` zjgG1;>?-$<4+{>ky1KB+u766aeS9g(%V%oF9NXGzYdW9K9Q~tOrRa^9d>C`vqZLbC zn8>3QtYhe!5)F|q~ll+wI1y8k$)i{{fE&pF|xk$;i7<2o=!<`+8}Ct6D6TZ;UtE58=9^L4m=G- zKNh-Hf?ObMhU&G#P3iRg-X2a-wFNw+5zP%RE&OKIu7Bw6-nHu{u-50bqFYdDEucqX zf&&gC?3$(q`HFOI!dw>QLb`8foKTMgVLl6lN3q^pcx>2l$XGz1jIOLyB5lef3t;oE z7o_n8J*sH%`vPQ!_SHxAUVfs_}km=>uV)C-~n(Om~or3%{s^=9v* zemN{GDC$4QBQaI}ztGQLigwu>7}+PMMOgdo61ZroZ6}Tu8UVb|ziy27-@qYq_Q1*_ zeVO~CvlD9SODOT@uWz5eI}WpO0o~uDWOR<`m0lVuLRIsayj<#YRtK=#Z`ZA?I9qhJ zwexK$xAKI<9qMT;;-JS4k@8`jEiF;fv$a5P$**r~*W3MG_GkVM4jBSt zI4C}GM@+P)@@i2+qa+vDcE`IIKr;-?f0mH{V)~cCxFPgioUCrHul-6g_f<50IE07wJMdU;y_~UW zVcByqLR<2tPMUh@z1fA`rpV1h;G9Fdh_lXt4YXL1#)FS!MV>ywEVN3Xi!Ga z8y^DK5f|g)ByM+G5xL-(G`U5$_>PX?##PA+u`5<4M!qZUUEe~Sj`#fs;lHD>5UW)_ zT=J`|ivD;CTL}yWhksstOWbqe-c^b&?b`p2EDVgmKV**o+4NLDhuMw|6jc#3(3s-- zp^ucpU`zB54vu~Q{&+6-e%q+B42N=AY|Sq54-7O${}yR2fEy8hxhSkknA9_W3PCL6 z*2$`V%88_Mj`3t9ZwjO>V6(lbyvRM~{{mW-iZ(YQ?@(@cwoB?^iv|T$B5lNd?%q|@ z)oo#%exZ}ChPDyX;D&0nQUp7`T5uGp-yK_t7^4Y|>~BD}4?{$f8_b`dW1L19;)1Q) z*FP+btn^y@IEG@B17fi4f`-m*sdi>fUhG%VyG~R~SC=(6Y02+&ZL!Y}Q-9d1jvQS1 z?q$|eh1YN$lKL`=9NuiiI_?{VP4!;Cot2f9B?U(~#?>90aPD4^C2R9ny0@)W#1N)f z=DliHcE>q|?s7Qr+(LUdh%)veB$WZ686t$CQk4DE`sY`^?mA!{m2|V_-oN9%pk+&! zR#ZmIl4dG+7J5m`7rHD+uOD%egK=xUzIcyx%s~iuN6%Deaex~9MDV~JbxZm;NJz}Q zMYOxx`>~9)w56sxLbR#(h%i2@209Mo83lhoj@B1a#!*gz_i7aFIB)B5EqK zf0Im3*qoc>G%C!0anTQ9D>Ub#_fOZi8XH}*NL-t_?}BFAQEc5|j7=EFbixgcU#m<- zPh-nVvgIJ`yvcRKFI4=jN0VJ=WwEv47t*QS+DXIn!^&e(&8XL=vv1K9xM(&pRe6SY!{gZH1m!5FRee0TQ`B^aay#AipVoD$dkUYnbA zVKl4vOJBtUKKd;0QuH%n@5C?YbWX-VD5P8}gJ~M=C|MBBxsyH%2k`{7V4qpEYJSO7 znL?Ilmqyo31oprqI141facr;Z31cg7O5w;qUUwpal~i!SKS8|zFnwQVGOmt&xNiOW zFjq86`#*g61nn3Eun|%-cx^&N+N!<<4RWM&d87JjO5j*BU3`(uElCCe^A_!^<3BCy z17Rfx73@R79p;U05?>M3WPGis0R%}y7^(V_de>7MDwfZQMNtgcQV8_M9f~{gk7t^g zW6uNJLqJf_Ci7^?SD}5-*k{&Xhv3OBGa*}3oFLDJ){gkHOpNMmaM z9bI}ANGICS!;`K7rV@y9OIB`<#LgxdgfBOi(|p6*a@)q|P8X|>f-MYKDGOS@ERCD+ zvE-Y)@coaWx0ZEb8)9r=oAB2^B>(y|3gbmYJ~>Bn;(z!GkXHUf0sp@b`*$^s{~nV6 z9umxf|1D(wKfc~{c~A-0bDd4CF`#1g)0^!=NKYQK1B;HA?kHg}gPbrJ^sa`!QUdrucuE+iR zm2cfw5VA|Qy8(SB8v(JA{a*e@dv6}k^}4nV|D4rlzjGOObeX!W2512yp>h z1q)sCf!EivX)D-9XNh5Y0rmQBm;R!7hl}X|j zh4Kd%V2#QjUaKLk8L-qPDrrcI5Qc5O-0ZH}L2Nbf9SOgUbhHDBo=N4JrB% zgGJ+aQ^X4;JUo2NWn;YfB;dt~&*6*b-oAZHuA>wPk_lp!uGeXlE1{bAj*jMmb9Cj0 z%E}{IHUmcUaMnFcy2`)Zxx<3o@i6X@7fk;?FiY6oOe^{Q3i)#6!T37f@Mmh#M6c=+ z^W7APSEJBkDFzSI%g=9P)XSGu)13!gu6uEae<6>Dg_Ay7XvIOo#_9n>BX4OrnE`Yj zB-FR4xVR7aCjbqsH|P~egqYF(7D~$e$*K>kBVn_cBMI*Sd@Tga6E8?inPDy=jFc{9 zP8vSg#aQ^>&?R+Xg~RGs+zT>QZdj15rl%Jswjoj2E_TR%yn%9bKp!p)_Lmp#ahD)g zwh_t0@Rr5M_Pt}nof~=g?-zzAgxQUz^iQ4hz{17)=Ri^bMrJB9%K`XScGjT<788VTPlh+9d6J0WS!f#FlcCbn{!@ z0Ezd4g%KOl!eSV>88qK~PftDq9F@ECehAQ-UdMmn>cRyWCP%!~h z;jr`KSU4LA) zaDV(*IkEv=6<;*VHPFH!1KgEI)%h+<36gcLdsZ6OW<$N!0iy_ODWtL`-r=Bp6vKXg!Dg0pAl%A+^;X-fo;aj6uI*8FQ z{?CYb*f*f|VHy&G8BPbrXB=zsu_t#RU}hWQxhLU zvh@!htjUzb5rBAFT2_`YX-6;tXATyYo`Vw|HTBTYi$h!<4_`N!yQ{#(-86SXov7;T z@)W=~2AS29HMbJY$~xQ!&_}cT!(p}ui#=juOz z@vYXeYg1MdZDlXa!7hg!6ule37CR*>sjtO+hd%?8W5$i}f1 z5vBo4*)QUywfLbx1eI*5qxk@AQ}15l1*(BM@cGN7Aal@VQM>brh%{s-=9FmbWbokg z?Zpeoe;zfm=r5boR8e8HEL2HeT{X;%0{G_RD3~IO5;>rRL{8(7B*cbeeK-cKyQ|*L zG*?zu&S3Q?lCd5nxDuaLcqCcDW%HW*i&7#?CW`y+h`m`~zj!Euj2ZPnO5G#O^Agc_Ht5wflbvK8(cAQ>Z+u*43QxKe;7 zu1Me^>D?#x+Y-&E-uqaeXJOiqeJFDSdB}JY;#PyFSYw(^o%g!@SDV&t?;@q#9Q0L zu!bx{GRcAXinlkr08b$z0>tOdK-q8_#J_lPiYOC63t3K@0P+!zN0`&(`GA0c=lCo5 z2WPRi$>WLB2#K;g6~moppA*^ztPp{?5z`b^D=RDS3hhjs669jmZ&H$Y3X%hH-oeO{ zTG4`GEwY8@@QP6T^4h45?@tFx)+44N&QsZ~na%YxKH`H_;HK6B)XK#Lg$ZmZ{=84o z9>!l2@D19FsTbtXbHYv77yT;@wzU$fbR<8|Ur!-7sDf$NfT}7hZu4eFM&faT2JA&` z23I3GC|Zb*r85kEFKD00-Tz^Q<{hBCXUO6t8yYX${rFng!%NH+x|u76weH-xLwLn? z5R0JVc3eS~wE}ZJ)NkB)LGGf<;{0Bn%8H6FM6!XT7|fKJ+PG*Yf+oR>cJ0!e&q9?F zs%;Lu~xctjH21DZ$tbC~<0OAce6>(j$+8aWJtk-ipbF*#0c%puBC~^SvtPp>BQ7k%8G-5*j&^Z z)ULDF{?^`l^V7|+7i}kVlMesUrnfyWx#!4{DUZ5kqeAuobjzwp-)-YYi=q^$y1*po zQUna2Se<*<3Dlsw*aTOtS?E{bwXtT?vQ}n9)Svftc`y9_)~mZ2<5^knmTI*B6XuN0 ze3y4$7yKR()apIXQbl}OzJ@Ei%aE}VVZ*X55oUP+z=?-nOt7RwI*Jb7k0UPck^U3v zk`JeJ54xnE(pfh*k#hn${DEGFoo$S zW0aXIQ@3@Vn~sJ98e}|D%8sxr8MdBh8>XL^tY-j8Qx6X%h^|=p0ZeVcI2}Q!5ZK}o zG!HYPNEwLG9{_j`(J?EfkrONlLwFYWMZ=_`7sz;4uRf7hV1$-VOmZ^pM`2%?JhDiU zpv@gr7#FL2&u$puM&>sm{>0vcjc5GNRiNLi0tI}?^UbnSl6W~l)iIQGu&)R-=>$h7 z*szmdf{Sb9J0Tf2ilkA(CvjA1(xB*|&Fe z5FmsKMYgu>v5i5lXQD5{8w)$(u&t#p{HzbD{7 zPTyz5+Btu^=s>FTg1ur5fYnr|IU7Pn29}-1>IR70msAhLix3Oke!A@*$)Y3);EWqd z6j3k?lOY5*a#|n(>Lb%E&=kV~cCIr8@jelThut`85nKs(8v>AB2kkIL*wv{Z01~%fq}orUI4RxM z<6^Cas}CtFoKgIf)SRmy-P;%mn54Tn)lr_ZFi69rGr8io-7wXe5@Vg=^LT&h&V#j^PsiWPQ zl7Z|PRDY5kqtd8Ravt48aAI+r{wk6Y04UyrO^S3cx80%4I18p_7>gygiiSpgMu#Zd zNPdViThV8XWkUh12-(KP>_ZQrU1|N}UMuexyX^C1WDm(wh@*<(67HQ?ev%PY4fMD0 zm9Bhpf;9FZj178^lkVvg(!xWO%Q`fS?`s6lCqEvXr~u-hNSOlAfKB^;MYRgfeMgYf z5PN0h7k)gzPR?MI!Z{o`_)sx$uRP%g=i_)jUm?3;Jz_bGla*+!F#Ld?7`YK68hjO$ ze9=%lkWZxHXa=WBB7P&bVC!9`Z!e;-IO+H6?b~}phPHwTii_CuSxGwJhh|(G(!9FG z7nCH~Ge<&9v=OBwb&``*FA=@5R{*EJ$eG8cN#ocjTd-h&&m{uO3*n-4V{B>Ve8J012Kn zE~VAeaUZM^n4f3&P0sz9cL5uuBk%UDH#iPh0UIDR3!xvP-w$|4rzImYe+WrxZJEQ7 zbK_aTV?Ue~l3&w6z@oQHRQVo4M*dn3A{{`dF&P;$X&d9KrxPQ>i26)gJP?)eupQCX zB=S>HdfCZS8U8`}`8VJw{c(8slO_xf2#~AMViOPFx^-*zL@pXbwNVP|70=@MMvvUG zuEY_hwBuw(MoqoY!Gq)E78TD?51$zqvl9dP^w4(HyvtU#JwynP!F?oLdM3nY(r&Yh zC?(Iq7=mc5+b@PLl$QHCv`Qri4}pK-!J4{w;--}turwn5RrH~eywM=w%_*&e%NcLX+ij%h==NMk2(K9SrP^9@3q0} z!5CMGDHuBxYXHxr!W>(%2A~&-D?kBBSi(PIy*rMHct>%1qC@$lY!Q2?27Si>I06E? z%YKo*s^Bt-1HOP}T6ONle8Y7 z;5l{mpDn+yim)LeniC|G-V;W>zMQU_nsSFsGxK3KZ#POsb zipDzPC@5wPI!9;?vPvFBq>y!FANHh3FN~`a7bQ*uiL!9T4h6L~s*%rc3 zPWv2&VUTfNkofjih!4FO?GhpR>*(kqxDqIy#m+7dp`lpc*V%nvr^^`f5N!Z88M#Cu z8N$68H46XM2Bg-tGuFgs4U|)syH9bkjIo>eOUNL{i+%AGz{g?`X^x{NK8q$biq?zC zL+cApgDVkPe`_~51lBQxS7}V#Dgl>1;xTD{qXJk@ZIBb;3`ar5%v0pDk+_w6P~+e# z5bG_Tk2;^&i{TsDL!5&ViqD}608UK4C8A3WGNd@-n}iT}0pvBN){E-u&)^_vh-Cf( z>P-?t5mQ&u(h^~-Mn5O+Br?cl-*+pN#!Na=?W+oq!`p|hC+E!0{ioN`S$$iMobtr( zkGui045CJdJA0hP5{~q4U;x>K1%N-`riq!wrUKOc6eM^Ii@3t`W~s@yEH(7xE2Oc& zgQZ>a;MkQ!Qk9<13dDLAW(X5~x=R-jEkDD%p7<@HTFhck0PCKpQU7!(?jcfUs0#>_ zjwA#X>x}s)hzAz*+8(~XWH`sN0K2W9>W$IzNqJED0GSM6y@viB8A@WkMva`@nPzqa z!tdR^4$*<8*kPQUnfM6n{mp>ppbe&=0O2EGEi*F66dcB$d*eer7W3v1C6Df;N09p| zk=a_^XLMJmD;rsJ?an8OE&P!qJji^jQ6I!2Fp?cprz1ig;kKNA(SgO^6wnGMz#Nv` z3R^)!+*}TYL07bF-mb~n^v@Z!z*Bf+BSVjEvlD$U;2$1d$wf1K9?1;26_QvsyJVpw zfkQ7rkn7;Q80Y>>;~ox%pITq(hK2DsH08lB*ZT#uzU5N2>;Vsw?l>Gj>VZ@R>(E(_ zM`!u}XC2L9mH$$Tqq6}PG~oikaifoOY5E?xpsro0j{eRACGOP!%%4FDk%(Q@3n%dU z-nsVn$404n&(b{T(1rV+9@eBfAUM9GbA&vkO59 z42P!toE!;E4X&P%z@Q@nI+aP0r9y1!r7`AWIqku^ndRR^k=O{9zw%S;=AJNz!D{08 zhSZ4wj3oX+Xg^z4_Rm~Vx^?j;cE~J6G=vLo{kj_)_TMOJ2q!2F#fM@I{gLJftWQ$A z)ay7TmN`Ro4w}?!+xl{Jl4R$wu@eNcqYu@{_h7>**v)e9Qd^PsiFPnt#mq|Qbvxeie z*1L0pbo6d|tM|37QHqV16N^q$yv-(1_uJWxM-B|GzpUkdfB%huz+bdz^$WK5O_gx5 ze0`OxaG|F^Y{BtrP+g0|l_W{(DQBmlg;yBEVTUqOA3-Yxr9lI9`45a2XOox2P#m-8 z&vzb(di`2R3ac#^+mX_7+O?$w8A;Uh=X;(hg_EJfUYJQ%h%I|hOI1~MQ~unyAug`V z0DVXDrg9H>pQWN4cl&+yRCQAm8&({PV@p-l214?>r)3G}$-)w;$&@!8KH}}j+&$5D zvC1i1N!h-0#rSGatN5f);uqueDY66V*rBxeL#Z9JuzVUDpHk1xI7*GqILgZ2Xw8$% z%E~$$m;S21IrFmlByxrXqXQau@ zH#_8F9dOpzW}$6tY@C{&E-zb%`us8&l=>GPX{@ZRGnM2lk}2Cce##8#V^dUr`^JHg z=Yxb);o7xp`?TKN{2=V@?Ts@l%iY}$6L#9YkF>b|enxDMw*?i2`PzzcDYVTtphWRn zwQfrxa#Q`0_98)4f|_}bw?Hx4#l@93;%!!lZXFxvY}LKXK(@?LxG0=HeY!M&nj*06 zR2#EAc$}%JsdU$~aRAf0grO%Z0XO^(VtHt=O#b2_bbXoS5%>SoAC*&meB7^p<}xx~ z=gYxy9V)dBvtJrlQ39SuaYv>JU#?GjHx?H54&-|+{@g5zUJi>JjYrLw^dl1M8`2W0 z7$|kfEC1pi4>ar$js5@K&@=Bk87oT#Y_w3 z+z#%lf)AptY_5DfGSu8%j%Jkj2NZg*kg?pojesga?qjL0t=$a7>9cdPuERiDb#pU2 zEJw|YS*{g=mJt3+fU^9HneRBs1uWi&4skH1SjMh@bg;g@ek%Z9Po!ZYz!;g)8}R_u zjUn?V+&{10Xn48f@QsEI^;(vv9osjBAAf#y zrEQIirF*3`OPl}wK3Z?LfW(q4f)33c{w&5)c3bh(P%ZxUApJ?Rskdjp*gH7rgGPM2 zl8u!$G9$yb!Pvpx{xWEc=4f8|YTQzG#uw1Op-<}-w*JwnzW#nM;2=ep?r|LM@B5cs zQ=F1jng%##Dt@(rWw>d+iY85`(&*(6@dE5G`5^IS%#?PitwxsDMcwf&r z?dcP{FRp@BRm8RCL+8q$Yd_c2Y}~(p|FK5XoRQ)km1b|+?wY??&8J(U%27S5Y}{|3 zAW#=|O+hJD>ReE}zql&rV^GGaC?}##w43zow!Q32)UHmiNRmBInC|``EXD7(I(5qo zoZE#p93ji<%jP`w6b2}du?wuk#JHZF_KZv1FClh+L)^iEv8JfO;iuUf>o|0_4vY<) z@ZSkQg_VQjj^tSfvaHtm`i#-UpBgOJ`f=c6No~h7`O~$9KW8>Xz z0vl4MM9i`+DQY~nSI_8Ghya$nE>eLVf{ZN2JVHO4ql;dq80FSKe%mHu=9)YCEuecW z9LMopP1)7{J{>t!lY=J?y?mJ!ySu%qwPkL$fM1SrwtTGt2#*o|cbftXb*ZyAt31T1 z>w1>YW@<RrU) zD~%u9U{Ise)>T_9>1AS=HS4QCs|NWTemU8p*JHCXU&zmezT@wY@{QU5lhyTvpda;9 z`C;*j(0DqVHj4bSAbleagLPMmj(@TC`fj_A^En;XjH`!VDSO+o#6ISvIK~&NQXHcX zLir(6blk*e`0W>K_PDr5j|z>Cl1C%aQx-h?gxm3@H3uzU?>*^LIa>-xAC@Bi>z6s7 zl35cYrXUiymwjd0waO~$rwr<~ZqCd_ zv>x63WXAgp-@4XM;$gk+WD7MW*A=HWJ(I&8naf|4Hc^>--gUV#(Yty?IeR$795Kbl zF>#HT+sJ(Otv~nCcz^f(#ctm)4oU2)K0SA-+P*8lo(y|{p1$i{D_P-%k)xSVxoXaR7%Q* zu#!k=JJXWkv0rE!*H4Ijy|R*(*WvIOWl~tjVds%;N9J=LxJ@3zYpto0`TF`X?Sn^? zGXpBN(^WHE*7xe==H@3Ob#_0&2JQb^PJR63zuRt(KPt+Z$V)RE5-=4LlRtbdCD*B& zy?gbOTa=F7wpRxu+@_V2e+LuV&0H{8H8)g+A|r~cZZ6n($F(C(Z#9Jhg1+KAcU&n~ z;F|5VZXFfXhqbu&ThC0;Y~Op3ErQd&`zg3IN{t3GHajIQI_T~AF8sUrWy@2Zd2!xb zeS^1T^5zR=?j2~jxxi5{U3$aS88qc~bZLDt-K8uAyrI-Gw;L>Nk61Ii-OX}aSU*SZ zHj0w=xwZ4HI(x^JL)mwte5^6*wmgF}n_&NF4Y#h3}cIQ?U{Gz)+&=O1G;cCbVL z%umnPdOV{x&J}MLQMk}O6n8aX(wp(?gRc}0Msu+0i@{{M3}ArR=OEwhkS-SO3fTm- zL7_3`r~zBgBUYxDObV;~MfNRl)WvPg7vy5YyVk!rzbR1L=6v~+BL~3$O4Q7Ec9KA2 zWF&lPCLGC;Blo~)#4B&Bliq6^Jfc>dX!c$7F;ctV%KVW2F?|)lcs(tgMY3v$N^-J#&d7eC8P&@G;`g<_kuS z8XT5s1*>bsA%Du@b|tu+6Ac&vBr97UE-f7sznz=GI^9M=p|DOvA!oF-rS(>4fcuc+ z?)LbE(%DmkCnOGQxhSN?7n*r}xLZy`42L9)jHmK)-!q8sF%!g+{1k|6p$=NN_!_hyd+M4(%EqSm8D!rbg`1R4rEtY@(atT_+9vFb&5sIJYdj)<<=9o~aWq9Zz<{d9eh@^9 zYa=jhOAH{CydJo!?rE-MyEb>^Z3gLP+XQ_X7ut^v;=$Fy4{WIPI|22(%wq-IM>&kV zSg?fscMCaXIhwn!i{j$3XNZF)XigSgl)Q?ettsoY$P zAb5xpB}fq_FA6rK<4neN;PWe+ImD_kE6!W-cGBmIfy#^WGi}qbC}LYLF)TyIBvrSz zaxM+8U)q4_W%!WF7I z78yLQ4H=%;@tSHDlo~JhcZynoT9-JGx!Fm;2yTzYDrNh>zaBi2e)yeiCof9aik_Ag zqPvZZjM|05XHNa?<&FqcX8t{#d~(@!|CEo+(AU?yp%UT*bn7@j!ntHVK0aQE-bp(M zuKRkY&~jk|HP!vuvuAH$);H;}fGL#TWG1VrsgDrOU?14_pUqzt$3A)cM4jQBc%1yC z&(+j}ED@+e^f4CecBQXKySvEXn+}iJZx z9ki2Yg=AvNuDJTR2T8j)OMuscOCR1n1tk(E$`3&_0)s@%FYi15iU;CVr-D9>#0yO5 zcyO3xDnDjO=iJ2-@buR>R|_AY&!@@YJ_YN!_N`HMRpaInmOvM5=I4f z#*%FgH*&fphwkgcd-6ek{a@?C+}vDbm7hOvA_W%sT5qmcACpVrJa?X1(M6+N?=7j2 zND_`4FDF}wZiB?tA*u+!D>jU6n5icAbIGToqCzv(P=(#5;nMrOu*xp(@Ja>QLO4W5 zB_tS^8^>E=jH*?*$GI(2y^~Gc$T#zy+6&!NjT5-Idxq|^?VE8-m@)%K>nP+K##; z=jBNOfft*01aSm)sC0iCvQTpX-`j>OjZ93uzt~pLh6?J-Bi;>dQ2Sh0*D1yHeBYp& z*-({`IrrMRqn%@n*Xz~g*6MN z81rUKC7|`8UC8}Q7LG)p)clK!Lrc#2eZZGCgCy;Xf;s<3+OZ-mtUqtZ(PPJsg&_Yo z2Yyg;JvMa9wGeZ8D_h$VumSFAU3;l_rL_IPTc(KSq0b~AsPtJ-S`4vou5B2-v%f=k zLkAZw8i)w2qmC#mM(!MNYFim^=?CfkiP-Y)=y-NV4>7$26Dg$?3 zFR24qkJzQf(av$qs0>41Jsh_!qT8Zn-xEVnjOp4~DJDXBlmK;_tYN?U^#T2v9Pg87 zM3lwJS(TfN5pJYd_sB|*iP?GK!iBr31KF{?KOFGc`iVt5a*f@!WoudUOHmM9N za02=#tl-UkIB9#2bsq|m`{+^#g8}-bW(yrlja0tm1x_Pxd9)=Vm4H=c5{%)?IANzo@PKH1x*D8?I8Az}P`BobO;z%y*gBswl*0G75ZP z{Jt|L|MuvEGNy`$!D7OZ&z=njRXz!QB=!r8Ud0p%*3mpFIFLr3$EBBFsb?6bYSPAQzu#J6S*Sf0$z2(%hUj;u` zGYXnqkTo%BOL-ByiL=|G;?X0<{QDmEOFN(CQ#0%0GGirQ%fTG`k+~a(Sp)CFlOBoF$d2)}z*OlRZYAJeG4Z(%4>PzmWqe1fk!&h)F|MwlIIO zFaMbqi&x^Qen~NpBM)Xq`@_34svI)+*a+);WOpeE<>t#Og)zR${oDo$v?jBx~f1O z`|DOVHpS!sj`ZaBz^LZ^ItT{S&0SJkrDv;sYxvhkTp71L7iH91Y|^@a{7#qE6@HoZ zB4llr?oB3o30@Eew_FQ%vT4T1QgUgZaY>z%SGgjWlkbl-cT2i?l-8BQGWkuAb3~@4 z2?K=>wAmxDi?98d!Dc!2!nwVhX2oBXA2#*(j9*>#z?x~~=|K;!z3mHB`W5&koP-WW z;1${H=)KNnhKd;(ouAkkv4gsF;Fy7?PcM!9^N@6Np|lrSYpNblb&fx^koix)JQ1Pw z$0H2}dClU~nausuqO#n)ZoaUwMzKtJ@Zh;Y-D#BC$3N7*>fHt4!AmByyky&+reW_8 zv=%68SM}KP+(Kn((%U@aa`IP4zlC;GCLo2rwVRj?s1+Y}jDph?MeTWT7e&0y+6o1I zMDMvdmFDy|Hk5>vII%h@+J-+K)f4}7NZGA6h+1o=X*fpFm8M2@p!MmB=Pa*93ySF0 zGK<97tvK``%xH;$>G-Fhaw`6-$^`{ScL8m%cf7YY`pT)Fl&wO%df&Hi3XxqM9h;IO zFndr-ynwl^^Mu4uM$l0zYTCz8R37(r@^{_0=j!D&`XHgBu6OC;gC_P&s;qbw{pUm0 z(q1VqnDuwtV?*7i%2j$O@X@^ZIahzWaRU>RJ9}7wV(HBfN+ksu^E#|Jbfx_5m)P!b zs$xyS+bd`o_`>$EKM?#9ODSl;9npWGMCF>YlS~8+UqR77>3Y{IC1n-GYrp5}9k-YN z25-2(Adr+eUM9;%zQ+b&Tqq0j4W_Cc{D=#xEXZKf$ma#@juOXw=3)%{ayfsqk*||@ zI`ZB9tiFd9fX-R<5^DXA*Ax7y0etM#x0Vpt7LtCV5%xdie<*c$@QLDHJ4!qpFZ{jg zMkwc~saSbg5<1*Yy%Gz?_^Jnt92|_h!!$JmbW~NRMHM-9Jt}GO=W-TJj&GkgRk!Bb zTBmN%uIqnC#ZpzxXg*O#>JJkKR4+ z_I}JYqlM3umM;4mh26ETn(i1byNIT=&MD5Jv8Q5iIXDc*J4R%Q=9qdgFf|LR87HYp z4jnn705>_uF7Z~yrzhg&zMc_E7Yn`w|2mec2F)NF^N4!+**PiBR|(0<2heZc0>S+8 zLx&C(B5%+KpIQ*4vK{r>2qq~Oou&Tq(HGcnu%35vvbUC#y~H%0?dkba3}!hGRsl)g zw0p1>6UK`sMrRSWQY=)^%zlAT|xyYD`e& zAARYfPWlviLYJ%oWeY+}U9&ahsJ6EDV4b+V!@cmUA(aa3mR@tRGw3x(to?*TF)KS; zAK-8b7_!~Kg28VDYCY2x&N+Vz;uk@r7bEbOiF4* z*XR03yC=u>z?Bs4%WUF$<+;E(Gr^aGvmi!{F_MV@;;EhP9Ke`y_ z^12va+H}YaT=4_x-Sx8YHobvjx5ke=1S-`e-H~JURn zV#lwPoo=#a_(v2;FDMJJ6on$u5;TP~{iWoYmZ`k0 zfZjbw<_^)8Xa&;>%JbJlT6G6$?K!cgQb^Yk32Nlir#oQF$Ogi{7k~_UozG~lk;Sq* z%#I8Emd!-9>A^%zd2VRNDMl;yF3(c!-Rg0ve_x$S{s=}x{|JdFn9m3%6JP;RG$36M zeYH(8T2nRGJuu)6D{(q0Gebl7mX?+s^`t%+fn8his;ycZ;2?ZYA4J#=%->UH=oi-r zK;O*`vJr9YAk>e0NrHg-?vez?@tK2-x;~R10V~P+vt@|Z z-@kTEjwM~ko5N)I00W4>A_J+P+r5M;+y@5-gHW12=(}+7V$?+F!uD$jt=P0|W3zj; zH8cX}&hbpGrPN$nTOvg+7^$`)S!jR>9Z7qjGeSGHCnk`=XFeB7mgTPmO_6Hc7K-c0 zE;KGl^?+ci`BLvFN$N;%7o-3mOdT%)g#`^;)H;H7fOP3UflOj{!CHv^|L2mji~_Bw?UT}})r{-=${|JPS|8UC-G$xeOJne2@D zx1GrdEieS`8X7mTh0*pmsu|K!5C=FbB?Dh@P~tIHjIO#N=bE6 zWK#`4yd<=&tYlNu(=oQf!!1&J9 z_G9PDs|*$YM{eW4-sfX1D0zbp?~hDRA8x3AXoERszoo9D+^w%>tZrmYG1h3irKHpb zn_T`t9c_y8fS1g%$3}m-t2NyCh5caWl>th@^oEG1r-TphGNz>zqvK%^|Cb9uIS%&^ z1HONbs9QFykZDAEWUw!AQxH`|Ga;4y+q3Dv`CHeG|MF%0Ysi zRi$TT{q2Fu{97MZDx3;sRZ&&7EAQIBy?i6F&+^yg-z{+MF>%lO&!_Fbe53A5kVXHu zvN{T-<~z?jr}2r2B1qlaA1Z9S4?qPW%oKzO0r2aEe`M`Vdc%dr?BlawyhIoM$FLo0 za~_{|4V677;6FiwQvbJI^zo4b1467p>PzKvQ zT1}DhAR+oBe{;V&1DI!ZChWB*QCG!DfJ|;G2VSPi)+T^jmV&?*tj*V#%B!j3b$eG`~jfRi@<=3ow|!VxwxS2 z1gFwvCeTH+e}ZI4@7C-tgi@4YwyWZ?&~E3+*Wm%_4RjXa9$h*rKJ&KcC8pv<1~+wn z*zDs3@$`QyAKOH6^$&WN;tLT=AxK*K2+!4kaxjC)x;#u$9Lj@j$lLBCmn+W5n@B{X z`!wl;pAl*M?G1!ypZVD+uI5}a>!bb#ZhZq{b6YxDGv)G4BZU;4GAGYo{F13uWSPvX zu$a9s%t4KP>b?a!SlF7&+;N0*--iTC8L9SlK6C~Q-|21zX;x&f(%;hv%ZD7^i}RIB zj_i9+)j#z)=>necGE9LCTm(|Sczf~CW8V^}$bGA=t<_AdcvDl)5xkfrGAqV+pCTzw zk_Cb~6UY#_)A~jhM|29j0dL-9PdHg$VBQ0HN z8hN7u{_&?Z1gH48+97}U23RTya&gY)_~_uCK87eP1?-|N-friEd8NZzHnfe2hhBL`%VvGbN zJMQ$682#{HUz)r?2q0NT#u4N`M_}$&v_Sl-l^jPrb-nUup5(I({q3c%;w&~tlcQ*F za?Zgd>qoaQ57r%U9`VGQHlDBMT&T7Sq@B$=R@KU+psNs*;{A=+@dr2OeUf zN@F7sPo9s*=gOezn!vo;eDn)2LSk_drNOu^1evtCi`lIUrb~z@#ZjrX zfJZMb#D>~jSTb`Sd$I^b>-Ex$*A=}kvpuo*v|b~fS(Ki8E!{VSf4V@RmH-%rfnUG( zr%rAY=lomF@ud=%`4SQJ<7uDkIYRzec;2E_`aghe`ojE#hpcKlj9Bj=zi@Xao(UtQ z0npi7GcnjvgKCEb;;3yB{7wjHxVL1*c`qacoplgB;GT2z z54`a7)2??_Sc8^qBPKsRIZ^^0Mw@1iojE>Ltn!Y6?w&rq1F9l=UZFOP*Ud3!x&+$u z5Su*|eR*94VaA9{?2ZV!-UKN9Bbrk&3(bnf%bj+?zeYKDSH?hvi1XxsPrCL$Px$`t oEBxPk@xP&N_&>3X3YOMuzpkDeT19;Y#AS-2+!@)VU-W+cUnM$6?EnA( literal 0 HcmV?d00001 diff --git a/examples_skyline/accuracy_vs_fpr-skyline_formula.png b/examples_skyline/accuracy_vs_fpr-skyline_formula.png new file mode 100644 index 0000000000000000000000000000000000000000..172bfe9e4b8dbca7a624a56a758a5f5fe1fe0d92 GIT binary patch literal 43487 zcmeFZXH=Ehwk`URQ3OnYBEbNXL6jgFBa(BJpdvYE$(Tz8Q6wl5mLeGzIY&W3at6sD zk|ig}Z%nH8uCwi0?NzI)1&bBtH@`W@=%e>O<}(!~8Or_i`!Niol)Z6P z4Z{eVF^mL9P72SswK})K1F_R(Sq*ad$BX>#AMpD=`y0AW7)Egz{YTK+@3RF@UUa^u zg;yg(FC)(?QCyt=WK0p=cKENqmzZ5?FHWRy!_lJ&7Ga?Mfv#t{yVRoqZwb! zqG%6>oy26XUedT1@vGlmKg48bcVX?)LDqvTif?9qKDkWD`&KW`Wa>an=*#@q78kyD z4Q4ASzQ1OitD_aF*lqm0%DVN;#Zg2G7FrWVi-P7PQ1?m9~+#(>Lhlg}Oj1nGnr2Wt&ai5eXf(P3FGv3zzV6-lbi-qNCX=!OshOta)DjN@v z;^)twL&x8XIK|nPtZ2!qnVQCVd3$T>>+83T7BGl7UhOjSE!o=IS}rG|Ztw9(I(_EM z6`}WvW=&BpR*V=uud*8*h6S`dq?Sx@UmBYndvE9J#u2igl2X&w);1M?Mnpu{rYE~) zvMt^utJ~7W08{DxPDF_t;IB&f^yy2#iT>gEw#0AWzF9W(9$WJlja0gJ^{NJ!xNy}B z0q*u0X`+PYU)@<9iRx_Y>+2k+PhYMJ<8v!n>%3`~=(#qFU)x!V-n|h-#U!4nFZ_do0JYLoQO(rFCOvdS$?KF41jnz+|HYK8~*c$ku3>QIhxOs)^EVol>uY zj~_p>$oA&is7FhBwr79`x3;a*17|-wiu9i}OAL?^$dKF}4 zpLOTiMb~`$b`F-2psK3sSn1~Mg@S^DvG{18^Y4Z2gsP4$eL151_R%Bau1so%i?>+b zef)UZ*q{F)DdqW7r%su!&GyG7Bz!wcOB*g|)%kXNbM1Xl(BU_sp{9Dd))u*TgSP(s zV|9FG0;zgA=b62iUvu#C3hCIt*pp{xC%L_>4p+gZ6k%`k zRPnvgI~?vL{H`py{%Nv1D}H_WDYHL+mGj&{e~Lj-XO90(+k&t4?=DmyQ(qld9ccP! zT>TO@SSi1|ySr&K?$X$687BU!V{bx2ObCgH8pxQvZo#}a$S`}~h2IUPQ@#arsOv{} z9W&n98kc%+g)gXr9sOks#uewgyEC>>&75ZMDZcRKNL6odZ;Dabz)>2ScQtR=P89Xq zM!An2SJ%-o4|?+^px>4+t3+z&w{h3AE-4Z8*}BK0mwh8B+pLMn_|vZ&}rG)nwl2a zUV;ukuF3|5g@vuo4W_|F)^v4gZ*Qz5H;%RUyA7tkdUcS5l$4W$qb5gEc5!~b`Hiw9 zhn<~Wj&=9q1NV~R;^3$#d|SM7GOV1bj^wau31k1A<#?&dm_W(aIK}tvxi&XtThV;? z4#PKZz#I!&w0`33@YtNmOSbC5TXtv0Rt21hj}&v03!>%G`;?!5@$1*GHE=0co>FN- zB*}K2(Ub437`8XB@;_eq<2e`lObNIetqX>6u;N( zVp+ilk1Drs-(Fl!8Ca=-O=S|-m3lMYXM2O=+&L9^i(Zjy>a@m#=a#SJ9*+F@`ohG& zrQ$J#pl#npcTZ0v=l(Zm6NHeTa3*!3iP~v~aF^d`(07(?uik`NQq0K6(91Gy zVD&9cPd~>f>ikWxA1)}zyybGx@~^C>TDSp_iNb|!!dWaPTVv7pRa8_6m^Yua=`ZXM zoP&iJ5)qLC%UNS*X9vG})2>7_Sv#%s-37DdK;Nw^L}XO$A=-wzv3k9Ec5@p8)z#H5 zuRpX+wtLQ%jBi8&C}}o}_KvH3N`1437J`qX&0uYAwa83Qj+6IbripxxMceg;a6u$%rHrq%HlDZr}5PwwulmC-G)kCK=|Y~7f#^G$~Rs3P-{p#`pkhK`QS*x$-vwBw{K zEIRe2v4$M?B~u>5Vx2c^%J}7_>sPO`b8{<9cc!+%RM-7VoN0}Hp{Ay`TuCi80o%Og zdxo*gSp7S>F!$1jWYmSFP4)E}a80$_zXByz-#R)ug}i^CEIe@UO3=|0Cz3P%@t8Hnd*Zhp_h2M^lIe0||(1qwRelvdXS1&Hlum7`K% zK!v4{fnp$M-_%orFZt;#YS2d4%haJ7GANw5ICueLrzW76E`S%EdEYQ5%6JPhYEOx0 z9?RrQvDxMb5vOeLP1obb*}l6zB9TffGrii{hVHR(%F%d8FYHoMgESL&?%Y|PjF0BL za6vs@DYDh6Ny4pVg^rds35S!07!nd6AK#gJQ+w=m#vshBUiz*3LbiPg5adnF%&g`H zO7LrTMKePE-fR7fi;GE%7A$Bv8tp97d2dX{KMxFa%i)8N0GlM_>C>l#goG?1QFm^2 zK={X}q;N-Gbhr#Fv-S7FNV0&@?b{z1sGcfCin8+Y@c{~$day#xB(6jC^ontne+z^y z{h7Re*Y}3^hDl)U$itHv`b+DLGaWmy$pI;)K#Y%#jpg9ts%v#Ts}zx77Ug~&ZBNJ} z%kApQn)hBCxqSKZgf$xl*E0!~ZWkw%9eArJ#)fvPp&ms$P zqysLAhK43~pu|(#Ln$*lI@-{CMKj=puoA)_u(Sj%+u6G|0eEygAfa&H-CnC395jNo z%9$nOwD_|I-NjfHA&8aUi#2S83somN;uKG4ZvrSnAsW!b_ClbqhNY!t!8nJuDGUsO z4^h`&2{3~go2JgZ*&=`UJqpA0od0P zBU1;P*0Eun0(Mqy2-gaPg(SH9XrsO0D>YxweHbXUlfSV%8IzE}zB18rrNm<;Gh2dl zX}L{#8uGF_Y?u{53L5uj-Au~NFcqfv!xw4%u1^4vXr*Zwj_jQQhl)+pfW# zRpEZ;Hbs}xSv1@R;X&uRLINEB9clr;>yv4|+sSu~ji;~ObB&Lc4};V-3FCKmf z{~-UbELZ7M64m3UAfj}QHN2-`Vw#+2QPH#U;;^S}4U zVw#uzu0IsEACmT7Y7m~~jp>B4Q&zao{^8d4wn<(u*G!qi$aMruBqb#aho78)q-Iri z1GCMt?H5&icYZV|l;3Ggq2FUVRZv6q>#x7c8>+oR77ta1*7P)~B_RQQKu>p;S--*M zoM7!WlQ4t0!R=W$kA<&vT#>twqv&Anv-7)jQ*{vp$XjIKmZB9d*zdd3_)tB4s~$Qo5z3 zMNVGcX@yz*UOJQSmILLnbJ75y=tUeqy=0ccL$QQPSU_F1-@e(e>_UM_bK(S-udgrQ z@TsLHsW28E-OQ^nDh~Cfz5*wd31rovUS;x{zx4RYlO%&8R|LjWj7Ur?-H?NVJhi8aRD~}>D$0Ac z+l-E}Zf7;GtRQaCXVb=IZomMgp!wm-I)=wRS!RJxs2G#V%cW86+$6b~(wSzI25?PF zK9u`)T3Xu2m>532UcmYSFib$^Nl-<=y~)#`Ylsk0hhd;hHwrn?u*73BIjuB^MKMZT z_uad9XgYwwSgD}kL!embeZxxhquNjjz^(nRy*4o4>UZyc+JSo5sMVE}`+_u)ii*mu z%U7;6K)q-U5Vir}qnWsbgrb^SPfCmBi?GpWnk|e^sf#B%I$|n-d05F3?}(CTvjX%+Kt*mBGQuc^(j!5TG8x zs$))oFOM8Ncx%9W%|Km4<1Q_qp*4V&dRQb-_nG;_M65&kZIA;mRj{zIXe8#|Cn5{qUEAG>UI%nn z53Gd3dm)>8D0~d+7M7OUuQvFruf8^wWYP8pD z{xjJaa6hJy{l~(gc5PRr)Tv-H7a&L92}5%p^j@o}tCNSdplNMw{dgNb+wk$DM+vYx z^|H-h|M>CaD8O!NYHCVGQ4O>ip$atEwkz9qMcfRO6dNmPzNrWZ#J#r-0>JrWWF!Z6 zr)(=AcO0)lp*aAe#pUI8sOLITZwjXxmgu42u|D>GudsxQK6-VwKUp>Add;zzpFi(5 zM2czOzkeT=SetR6L|l8jhBhUX#vXvAMrUSHLwRl*wyxGTG=!9u`Mwi^jXC=Br+%v= zL3@eQh^}T;rqjT^5lV@TiI}N@k|J1vE8mR+Eyf!o9YrIdkP3kBOG6kDYTZ`=?C=F+ z^zwqJio2O>-2(eN4XR3ZDEHj@9G;>Dr5z0@(CXLs3_!@O(L2j*uwAABRwcYPF@nMy zr~-8Wk^=6Fce@5`aQK6`mXi#k&Nra2&g{0v=ccEp@85j-=uv*Dn`op5OzzQx2ZuH` z0HQhnp1-Xf4V+X#sa<#GC`lVoH+a!TBRd_Px0zX4tg?W`9Z4p>zMz{9y=nV8Ffi~X zlSEQ+vG~etf3Id`n$HRsql8C$P6S}-Kat&ZzUsPUM0f6Hi>LHsyw92^;$3N=v@Y^gAfHV7H|VnmJW zqdLFq3S(OtiGko0b)HZO;4Xs2i=XrM@(PZQZW8&CpiT^n2^A(VW)pvh91~Mht?#>G zm&AY~g)Q0%@y@`=tTCbmM!?h60v}8(?f$-=Ipm4l6hkC?p;}3O~=t6DLyq zO`x#!fc#arybaq&;8r;X3wmO6uB>dLkP|MA4D!mydeu~;mygvZqkcNc$;kzWgv3(8 z0xyh8;yz(7OejGAXlHw~uB}ZSVsaAflWgnm^MI*S<(38@fa;Za2}F1tI&=L-P;qhbQD)|}r%WC`J|r@g{f0)l30s?xjQ2Z^iMEQuoAMZ5ZuseD_N}N;$&x6QBrzYy0s`T z8!Iz%{``3jH8sZ;y~~#`qlrTMrnaf6YjuYmEtf%C2%?W4;*NR3l}i0f6Li$;_%^*sEqD5i`E3Hmj-@? zT%6G=b#Sx*NZ}mQ2KtiaR(ZrXqg-PSOoC-wTtIRY)N0wcD@ngrSHn$c>)1g=FC?$2 zuC!>q9Lw1EwuLpV+sRTu1;Y2LIXE}~lb`|AuAuezb6uOTI2Gsa(nv>o+BUt z1*}!e$7{2^BndP?5n{Oeb#--ZQ0}HB;|ps6rzsK=6f#VArD@;0moMrzn*wOcXucG7 z^s7@>fPRbxzFvD;Uk|evt}3hoLexhEu!kU!LWnFBEd1rVmg&N zWLQTv)5MZ>Z>}Pca$40d89R#IofO}myTql9=Wb(@kT3v3C5C14$qC#1A#W^U?$|Vj z+aPZquW4-Lby`{*8X7t;Y!~(k*nYIkg0j8V9U-Jo05WmWW5e|KSBYW?l1;TmLtai} z^(}~IbQ^G&L5j))pkGpSvnV!L+1R*1BiZ9F0fcqBBvQ%&K`t)-TdMzSdO+VCd>EA1 zZvpQf@ps92>RT6Le4;>Goqjs$A?6*0!{N>=E5jBp9Ga8?j?I21q^6;v1NIrs!Grdn zGY=m*67u#fyX<~y>OAMB!8mqMd>~f3x*cDk@BOw7xpidruqCl^r00TNfur@AHA`+| z1DoM8*YRfo0Uf2@o6bY!1T?g?oWNbgTo2%E8wZe!KhY(%*5`!S*zuQY1h}4m%nM|A z#F_~JONsE_NL2vBTCklyXeMjPa&8+rz)df7ok=<)^O7B?)&@-?KS6&NOvSse3jfkx zC(DrmpUm**xMF;FHwOX758O#frr+Vwerfa5(BFlwTBO;J1XvWfWHle3(wm)gb8~5F zWjj4Q)3%n7_CE{_-2y(Ypy(*BvA{Fs&9S+u{l*3>!0SZ#Mq#%Tu0lR*u4Do4o-k_E25=+!479JG_Ef}Y>vC1(QWN9be~MW4c&Zz zb3A-zU3i1%&z`9&DEI?G@6w^E%>nruC<%2+t<9|LY=MAd{6LNZ`8Wa8ZRt;w#<%=e zP6$~i%x}|hX|YR4l+ z$1}$%MJ7QEHwO?PsA(E`Zz?gKcVp){4VS>3FXTvROO*24c$hJ3hCa&^cVF+P{SDWm zpTnvfQ}+5B=OrX0VD%&vIE@?58(zQu#I|_hI?TPrw^xY%4A$6?8G$g};YUE!4tiqA z?#?C+ukZZOXZ%e%Mln|f*CGSRQo*3iO=y>GgdlkU_VahaCKxFo@J%3d4S}lDwzb8x zSO6`G@`1<7)TqbD{;$OlR|_|$l3gIYO#{p8kOL934VH12OP4|Xvm)(R;%#aH6?q0K4t~|zI<81pQTcdQj#I;}q265vZUEPl>dSd$VYvs!DZKm0p&mB@ z9BOCD+CVbg5|AF5B~ze$QGnv7BSAGLDTmAy5F5y=PeGZ0vK8++TOb3`q5VRWcnswU zp^3~o-Zli*U`gpxOKqjk7B4AngP5lQZv!Rc_X}Q62zZF_M49fmv2%l9mpO<%qMoZM zp}Yo3sM3OafHVk?wOIqxfsyJ!tM$dvo+7t2YoYf_QR0byepmp#U{YU+;^W7UQIUxP zJA}nZNpC$EN_MdXcCM4ZQ&15Cw56I)tX7-6{y4;UD{Mk801bmh zw)JPtn-$1|1p`H+p*MdH4}bXM4{}DRv-GAJ&)@w63hWLLb6`1r1Lhn6rizD0A--f) z@9UQ@6R1{?P>M%584`dTltHf>OxvK=@Mtmdg(8t&!Xulv=+}8DS?{2bpZrUtT8NO16tR+2oS zjnVT^>BnW6HPs%A0la64Ze@W}d}~{qpzn?s7xQQks4tcP2k6CIQ#w<0V~n<^v|S2) za2&=d$zrgH$k6wF_|Vytiois^!$|FcAtKTPDxih3)PS-B#9wHbRbFonKoi-*iS>u$ z%hO%T2*AUBV3kE^GjGr**|xus1BzFK4_P=^Sw}zC9lvOg(@N3#04O>VkR!<8U7!cK z%;a=g0-FH92kjylk<;g5K;sBNLH#4(HmmOq8wg>8GRLMh3fMlwl~1AU*_KzEBs$R; zC2@1ojKuy_m1tFA+#eWadysC{r=>vr`tRV}u3%JbKvr~8EL-idLMswwF#l229Hegf8sO zzc45^R~!S8x~mJ3v_SME*$tK!LYe1W&FGc@crD- zePKQ{qMyEexf&ZEk8rknoPsISFqK&AYt(H^IHy%5aO8MhUdJ2 zRnnZL#yN`TG@uF^nDTGO&dBzZdKZyW(tkqb>F@brQLo?W$r7TXdO)yvY_D{28MpyT zML7(39lb#D#Xpk00nktNfwr`BdEvqlElR(Mqs1SFK0Djw|v@~ioT zUGWDQy@wVPcDl$?WUx5T?(MP{3~f;`d}bVWfY{(C@F`d>0naH`tgc(X}bS+>`z$1_iy0%`%oyX{kK&;T)U2pP55)TYr73P7i zUdnUTn(?B&G{h-U_r*`Z3JE}RKkpe1I@rWi^s84mAb+fuCtA?nmz9-mu;s=&M(l4q zd!}YMslyhZ<*b*K6`y>r^8@}6B=(?e{u-L7rW&qV_13CSq7=G|9Oc=NJaON(=4-3fh5C>B~wO-l!!hh-!6^edL1xs@*oZfF&r(%1S*Q zcUoMO>z!tozVMIHx^uRvdAdNbEnHOzhJOmmkwsy}8wd;M)nB^DjgKBbo(OzmsBR;a z=~mhJTErJ^fGm^)!Wh(I@}Qz6wXF~(oO(y&Z$XozeZ-h?>PRC>cnQ7xLM7MkzQ&)XwfVnrr4A>oGS>MvDkX+N0uWypN8K@1@gmEZI1jF2pb@Hnt1 zKmvUPhXMy=ooKJcXFgjCGLaIVTJmAMCAS7ZsYKKaBA}yucie#vl!Lg8*heL$Mw0FA zt*xU_tL#AtFx~{a?J)6SvxoFPztvLp6y)#xAbCofDCchHE8WF~MUJ+ykSV8kAyHF#dqJs-LHhBKB363@d?80eCpjs*_ub4c4@gubsnWcaM-;21(`T07Kt&`DyjFRx20>GyU z69ZW}5$t3|yeaFkk16P;kXZ?WGUeP$5-= z6Gv6`^>O|?k946heb1E?wde!mUUR*|N=f!t44%?4qvbh&mb?Dg`_yHZme>H7Dgplm zh^v!77|e0BtjjkNvM5NW-vF;_S^0z#aRuqOD>+N-Nu`OxUa6ST9q42O#6SmOIY7)! zN*yz=KQ@ocRh}edErdQ@uQc+pi zQQ&0O5GAqsVC}+%3oezE!h7HZN)6zW`&C%6b}2A{nm>SO0A-1r1@JHcC$u4b0c5OO zb{~6ZCIG_2%=e5diHV6DmCQRsah3!)AK2W|N3v~tgmk$DJ8c}=sARr~UiT8sC6*mh+8(28M2r@$S;49n8RES3F8Ip+yKtboR`s3j4 z`+%<__VaiKPE>X@6jTHvD0ef@wgZrSj^~=K`@LX^d+)$p1FW@dD`@Nq01|{XdeeW7 zjb%o1eipjyecg}L?<_kz`wcQ`CTB%66B7=0_5_9K^`&tuDAj^c{f2-Iq{TMCaH?@e zckX-ywt-c4czBpz{GR;`?I8K5*w~CeKdnln8EYm6Aqs$tx2XM)9|$cW;o(WZorg>w zp{Gv*i-KHG>CT1~kkrXQSI)B2VN*0Xv~~5cS^3W|;(Td8+xtLoqoLa1ET8XP;ZT`@gs@;Pvkh;*w9X zvR(teyn~2Z90y~w{LA_wE?zs^tgNJ@<>;8nGFi>+s{vu?M3T)#Fc@`T54m-M02}fl zo>(F#A($n4HUc6OM2xOy3OIi=7coK3#RqWNt($lvV0}WK|px(pb7vg5|8mn zx4T*0eflD3k_qGOgU|5tDgpay(i7z|#nH6}6haS}!VJRDlzR9{{J&`NR`I5KdN)s= zi77UX>S+-dA%^R8FDwF=eQ@O|*0XZ%?y+r3W|P@0tStFyyclMA+!IHjp$v8U@OngX zajPqa(eo1u_)XQ+XLnLz(q}IDk*Y|bqLTnKX92N4>DRYIPjhW^xOf__+}sUKK2x^J z+55R#CS0&i;wu5BB7}z{!>FO3Jdbn()zcFR-_-`7Ub~wHw+1vBsdMPnF*ZQByO!{Q7q7A?6qN7^B3=ftyO@-o4D?l1i5D zpEHbc{Y>i4igH2XQOmEkX^I$%?ga~DxP_6^mod#wyupK>9IleOCjil@QDwagtuer9cL)?44V z)|njjQVKO^-W2#9#F{4@=%}!c(B?Du%MW1Yj_~cUy8g5IUn9EmRQF8KQ<=%+m_-?P zZDU$kgEYLSiB%-1o(8{VAKe~oBaduswUCu#->s;ytS7{zODbV*JKh8@I~|r}<4emf z88apD+qhq8_M2oCZ1GX``{ABGN@N2~ zGVA27iT@Q-JUXJ@W9XEH`E5G-*c`$QaTAo4D>c`$y+Vi82-}_PJI92p8-QU-)62nl$kXd#otojWT zdN8J=K7ln0FpB(W?o4%!jdeGl)+&0*a9iA(8vu=TfWt3Gb||zKpUd%OJM0x*la-{0 zJ?tlrNVhrs{Nu`?*BdbYwJxON$`kEMIT)#XbBx8UC*TWlRV+3Nj*p>yc)l2%{rV{- zX08W|6%{(bE`#cDfk}uI613piC5?4z8g#4alOH@-tS))p(~=Hrp#81N?jkd%imDhU zpfxZs8t6%r6LT9)4S&{?`k+AVZZnt7d+X+%2bqL0+%tPrGH&Btk_L(24I+Jtp&QZyD zfvcXLPaz(@qaZ)A5ACq)08USyBQg&U3aXWyy{VQ(81u)q+pQOl&nE86d9W9X1W#o> zpdjY5lz9kS3L@aX4>bBGbyUUZxl#LxwBlPq`9%)TQ#lB0w}916Y}I~R*-w$=Nty4ANiz=dGGO|M zEnOIAYY~l%Z43JPqSng#l1myv7!{Uq2FmE6AXv5b;%L?OOZ@YVFst?`Vi|7!%TxO( zbN>CQcW^BW9KB<(UT4)pxPJZkA;)a&*+LQ`>iFeD>dhHB`6aZHU_lLVttMr|o>QbJ zp!bvi>vBWJ2Gi49ExNusjpNguL|Kh}N-Ji=PW24G+SyraU2Dqd{=}iqo4BCEC9XoKIlpYX83O*K&wqTr`>m6^#{|_ee-a*HRN01rdG%B3l~~zEEaT43r__ z{K%0QpsQVGdNjd%4FJNP8JLyB+HAG%wLk@qbHu~Xhd)B7zjX~cQD(~I1V%-&z(vwqikB~sYIkVHHtkw;@!l`jZ;FkJ zvjj=Q68O%7IkvwPwB?aNDNY`qRH|ev zUhlIbYOc8&I_!dWyd)&D_wUbg9dmgCXju`QK(C;eXd=83)Ft?(z*}8ieFZoi?fewr z@>tKFl?#HRK=9uD5PfP6Ke(}s#WX*bTAzVi1xbfvNh9v^_7y>^Z`=Ws{A1(29y^Qp zE0U6ZYnjg=Ku8}!)@GJl&9|7sTRle&Z)vsd}SRVSuKA2>s@|^M>_)Tw?`~ zSj+EpGI}gL;m<~c5DQOGe`*z)B1E#NxCrTAH8;1$XM}dWydDSv#?t?v8EWKC6aB@m zZqqm767oh|k^8WzeH3}Wu8yDtBlu%$+9CoHRboUNLDL*`i9LRY;h02(ud}j-MCBf$ z*Q1X*UKCD@GSnfuqNq5z$h}W^2Z`p)73i+;W)&Acjv{hJ>xw{#!QIMh7XfYgL-xI{ z;oPrKF!RsnQd&qSI+C@M!S%HG60>|1J8e-rtv{)dKt95VwWnS`feQmw14Ql#5Q#g1 z4CptYYMx+a36}KZ;!at9LVx=$0-38u&Sy0qEl^=oS}~N(FKBsM!I=ag znoTiMR15x< zUOK?RgMd}C++&qiP$YmQsQSBMDsJ>Peug=gAxNbMDLWacB-lZ1g)Y;}K#@$wh8a4# zxnnBEe|;)hBDX=I26BUgmcDU*2K+@xqIjP~=oemnj|_83hG~EbZ>p9n%~^35_$^V# zpML;1^W?zsA62y&&Xj+EK)T;DJKp{a7!E2&#sj;c^$5%dMH#2D1V%KZJQ$KBs7Fvl z&4$~;7KYlZU*{0kPE{p_CEN@UlFqZ~)dJ1TQ87)wKn<7~2jgp)irBF^zbBtF>P~HP zxw?x_q6ZOI5n!;B5?i8(wt-(}KXvL7Vq*X7i~zxMdKQYRDj3m9^nXdDlC725nZsyi z+Y{fE{{BHRIyNR31ZKT{{Xa*8+mY{i`*VNO_)nTq4h!SM1cd~a*#vq)$Y(dxoh1XE zTln1L7fkd2x;E+UP(&1pP32wpq*?~&P&QDoB_LLuubKdb?oHdIvzsoaG6Gk!!PhR; zzW2%#IH}z&r;^RtB1>rJin{k>AGDijf=4m6sE*K&9%jAWcQ5&e0b*t5V{V{7U#Cbj ztR~Qd^@GeM8>{>SNZ?oCjzG>7VQmsjFmQn?VfU~H3%mQ>%!;kbyKfW$1IRJGt6y)T z)QFKF^Z5DgH6n4DJSI*d? zJB5K9e?SHzj~IA8fyI*!Xs2v<8=p0v5+4nI?a*Hx{FETSDfXoshE3C(_=SEtb>)dU zwC0?z(g{xk9uOQDVL&>@FE2YIiv{S>3emu`|3fB2f=j^h3vp-V$)-|Y^j9C9RqtP* zlIC&vC)1w(=0k=>Ho*os?@^Z_NTk5Na-l{;FbIS+!aDq~7?Fw{^(9Z?L`K)J0E{K%T)!Jo;`c`?W&iI$)*kAA)7uB zH=sd|RTi4#VxZm9(z-uD)z+4`H{tN?z;(W-W!#XsyAW=rhN>xv%uh}LdhO;!%b=!CY)C@wizopQomRlcvh;BYYVLoj zzk7v&M1b*mJg$@b*Lpj5eqVUhpY`@srI6z<$BDY}-2{u>i}flJhyzb8ftC4ig|MiY#Xs zA{3IQrWg3O?QL$J0NsUgw=19M>Kr;K{g0O}i(&J4KgwosTB?DL+2{5xAtgn_!Xgpu zcHH;_hBn?tk9p-vRot~ico?S=OgZA51&_ZE12`sHv-rlz?0v%BKVzL|%#Up_ z4teXI$keb5_O#}*VKt#lyBIIq(M5Ni?#SV(UibI=UH4ZbV+i2r1W>@Idh^o2(g|&) z+ci^}e%GBuV;RLhy*kKo>az?lFMrW3zqU!v())G~R>=1*C%Pos;N>$uPDuehp)1d& zPK)du;@K>bZ?sTbSvl_!$$Xv=I<=-mdLc4#Cwz$oT|KlUx9oeqLre$j?-XrAe*g4y zm2ahdV}=W__pba7(N4kFKbjI&-6q{yR6JG%Fg2BTn4z?NZo-A~N?9`V_&2@M@6%qg z#wfjt@eDy~Uwb~mxeB5NnI zguF;5q-Q&cZ>IV~c=-K<8#ZJ>AV#j39b*JHCgdgrKoF}9j{SH*fHw_jo7p=kb|P!${`n1lBs!Ak z5=i=vz62M(Rd=QdayFuVHE{CW2m&qD+`&Y8MAPWm_VyZ{D|_UJenDFrz8TLe#8oF~ zQ^_~@$nL`+0d3NBA=QRnIYmC!pRCPC$tRj*hqKEc zCgm|I)rV!=I?)o716B(prEY*Uj0e0>J++A8YMSRT3I_uj6&Q*^DlA!~YopF#FzF?M z&h7Ez8Cwc31s2Y*O_h$-k&)&=lrTeN@%alEa=@3GEKlHPdaaD8_?vCm;-WrX{Lj%t zQ4epZ>>Xx-OWb1)up@uA4DH){vYZ%E4A?Dbeu0N`_xDb?c>CuG7Ycu#aB=pZZ}W?` z|9jD{9K*1C%mnoFL`w0&?>Iuj&yB9ITV7@&s{oIMxk0! zK`i3d9Q=?3y+v17%_4C;w^gmn74sA6czAeyJ%Bmbe%7AT$tG?waIEgthpm|PTK)I3 ze^4C;?Tq^9(YL+8Kp3bWkh^R1P0ix*_$4_mxsaUuuLEwuXt$*GBMN3PzaGJ_xT_Ec z-0(JHgcRtTy1UhM-j3&{pX+k<{j7fpEB(kp%KGuL)qn_I;>?N9aHPj?f+7RQi=VVo zS>7u?4|J2r?M=m}++5?p>9R6EXfj$(c}huHHkxOv zBvW7qf1{7@`aToD{D*y0$iQv4?e))Vxl3sjJNFNnV-5IRk^ATa1M|~Qz-?;a^;F2M z(DP`dIbc--*RaEJkoQ;BI*=~r9%eiXCc9EK{GUJkUR?lc3jXh?a0`DD+jTB=N*LaK z3rw1YCf|Z)iQH#ud9Q9ps&3zRW)>D$vqqyCmEachnPx1UcKRg|M#um}YTKQD2Ta-z z4(Bk}(~qJmziejk;FOthmz8BE(W!%AHaI!d(L}(jOm^$_l>E8HCDuxNqX)N3#|69Z zA50)`miAlP7{|imiPCSpF?jGfhamx+Fy!{3UR2*_@GDi@4)Ezd$u+#6hxDl0A7Yfm z7bVFs;umL&h#y*ss8ACM931-1fF0xx|8@8PepFfled34{CY^4cDSs|MlxXD^>*bT> znroXW++EoL`4_eC*-kaxe@GU&u`$gJWGn{ zMjc{E_W}E$#Hu^Ef&`)I4ISYSDyH6c`daxJzw7_Jc~ ztMB|8K+P@a=Sc1A(+5vgLVCIiGG;>+4n~g2jOfbYSMVr&3Z7@5CuOhcH7|7c}!9mVr=)ctfKOBg# z603D!uOFj+Bl{*O2ztp9Ow$GjVFdP3Q!VY|FnlMf?4NoXwZpH_hbs(YekW1m0}OMf zzepRhn;x9^0=GeYwE#4}zB7K%58nzhB$`Z$B$7tv7Ze{k_z5YoOY8yy8i-i{hCmZ; zBs#4CH65J^6yI}+fSnH*v<_r^1JM<^;*j|s+T2ok8O;S~>8`4yy{e~Vwv=|_NSzNHAi8F?g3~pE(V7koZ453yfae z5$(Fx{u~=JtehIyOo7N>)?ERbO-E_RdO(@+gTVcd_kk14FYarA76C@^5<&}{Y4o2W zKCwu=y4;OdmoMuZMJgqOWL2K$aOl**FFNigFl_1AebNWVxa^3qTTHN(@1zt{Vt($# z<|qHiSQNsrF%%N2YJpaLD+8PCuNS%vV-Fbi-a`f91vIz4WHY25p=41_HK;%I>}&i< z7&hidooG0eSge2EpPBC7;$xhi!A+mixen5~({KS={+-yl$}srDXwQ&={#9~Zce=(! zEX&LNKARDqa^NfM2-8Y)%D&@0be(1LeS;Eu^ zCPodc02m;Nf~N>N=;FLJyIzHi(vv6P74H=4rK8&<1hANyGic7Ag<#&`@4R1NsprLc z>ci@!CGkZ0m!niz!;{zw0%D@LWD@4b+E;yt_UuiN+9*#?VmHyVfu+xg7w3lo zA`8yi(S#6+oS&#|20Wt3=*qMy4vnqha9kB|FehaT=PM{u*=gWCv?rybk#7qe7*0|z zA$o#es9F;B9KmKqtu+Q7ld*bvwvl;tpn&*Ydt1>$4h@W25G8>4L$WjS$%C5#yamhO zn}+S-+=4&d-4PKC6iF-0K+HtMc-X%L{|ytt2KWDN1Tr;^V?kK~nkXz+H@0<^l&HoN zm;xkZ02zg_(j3hXwPA6tvn@ z?c7}QoIERiT6$Bk9k6=i{W6rus{jhSyY>4EW-m8A!J$GzL2C}jehlzZo=e4*_kk#_ zB77ZpAETo)vlYW|79#%!>&`uG9E3oCg+=l_e(Y{bLE!|ENq}#hD*k+zDd(4D?5 zQsHU!Iqx2sOF^6V6_3Xl=+$bawWX5Mg1Q#f?wA9)_%V9!vMZ8tWl$DKVQA@{k?CV z|0&E>T)r3V{4QivP)vb128RtE;VQ;4p)dL8lYS?2pvfOLFb6-sDo7||w0^8I`XWRRHrPW}&}SjHeUlV!uUdI7Kh0(n!^z)2+9b{6Jya|A!SGhAcYOOv=br5 zdH|&=0Hha?*`DLS#_ccu1PNO9DK)bebV7!4t$+r{8W9gsX{ZjC_`ydA3tbPUpr8Ob zaU??-te~5ly>?{=)RMn%1yrL(!!hdQYk-Z-{^M}Pd9y$>lRl#?YUq;~> z`p2M{)-*K4qw@mKoT)U92A(g!%m|_ew4BV$oF^z}`(K1S%2M0es80f{JjfdbT(VXe zB_@e#$V-;se-0)+Kk<&AoSot{rp40JvQftD3_!Bk!7l7s8w*vNe ziTc;u%4I|%)h<6cdicl2l(U|}A&i)V{(R$QKT@w(Z5$@1a_j3sxARL)P2~oC?OaT- zgLf^hzCD`2fiKQWMJrT8Z>k_QTZ5A`GmZ%(*UJ(7nZj)~bzd2hv19@cH#RdmI*>|) zJutMPEXbW9-wy>U+1IZf#U+|tbDad(rL^29`mota2nB{7!d6c|*JJ~>8wG#C6->H0 zDfN_@*-dq7FDSX^FF`|={f&i%poTH%%DSPds-;beU5d+nVx4!>;5heDOBVvn(ljr+ zH`>XOn}Y;%mSiY=uOlB9JCO$ojtSp2N_GThB7GM2m zICHq(b_WGP;zubd%#}_#s5r-QplvQK&CY%^qT)>u!u$} zB!y#()ZusuD9XU!SL_sJ1iik{vxC|<;3TQZ$8?72I63$no%_AtBV#i&)!^K$c60_4 z2*@D5fPY36k(u(e`QevCiHRqoKf4Z1sxF1@eyPY&goL0*K4=k` z7N5mkdWg;l6SB!5c;ciX)p*E@aW_Edz<8oh#F^jcTUVC@?YA&9 z_kKA+&+6J5IF?jFF)1-P3)jwFar9-@qbE<`ypn6EcZ-vY%LPsoNm7oM^!WK&1|2#F zZ=HZ|>x2%lqhLsd1E+Wl3freLA!81)5>L?kgWwJp`late&~>u52IaVm@{D;xeSQ7e zM4nd;FQ=+S9;A(wwLF1<8i)ewpWQsfd+8Ugi1#&rwL!~;={r8d7fx|PxtrhV9mpsa z2i)iG?$EHLS(=&Iz#cg|FPk(Qz_~t8XHiH#zH?RoK0YY93j<`FX;FX zT1v##NPbiXC%!aPWvmWhrZFhCm0bWJ9#nVDG4I+$s6k z=mKTXLQrG-j~{o^IxS+KM8tp=7LAf8Y6Y_kBfE#|>hL$zmhxcSI-ZvyMvcyPD23BH zoN~>~%sj<&yRG41i}7D;K(EW9-efp6B&*|(a&_QxP#<5PFN5Kgw1+e>t0v$toreK* zZuDKFGx!AQF9F&*kZ+l{c)r4E6|G+(o1RZn+mp)RYEXrSWAKGcM_iehgA|-?Hz>I_ z0um&Mt?l{Bpp_x!BSt2WQ&hC4H&!pM)CtWpYRaM)vh1GfvOt7|I z&^}cKrILSe@X-p{-FHhGDl62q#6){{%pfodI}PK}e#)BL@Jj9D+kW-KS(!=e8(mRUdr=#=2l(6Jpj zK{X!AupGNVLpZQv3Oez8)`|vYgQ{0iM<`H-eC!3%X+EBY!^(xq( zz*VFO=!I636jOX^NOll+pHvLg{7vm}&~3!W*I_|=!@;prZSnAWIBt>)*g9ah=Cu;x zAb)DCFo|h=m94tv>pg49pD^D-1C^(xKHLKtF zosfh;b|)16Fc{tLxIj5G)8$+c0)$QS8H!xQyTd=x{{@uWOu5Xj&QqA&Ufp$Vd4%jw z8JhS*5%T~@7+*9!Cb9JOldOg0PMFMsHJb-Xyfc9 zR(a(07A;x?Bdz{UyO$m8)361lLY-foVkAc-CUcTS7|p4wsgabB;A0-M@PO)uG5J9d zl}OqFd$=Z}*TlL4&H$SrH391d-qQB7O>kQw9~uS-3E-;qbKz3OUlUX(hVk8XY4Pxl zXt(#qgR~Slf!xZ19%u#ZUeUkqf^^woU=pHvxS?nu`v!Zf`9A)%8x=sp*Z@U&c=ztz z6>JCOV{xDpr#ybxYCoPnhU%Sf))+=;s<6~L3g4FsbRFQVG~nG2#!!Wxz7ep{4Mt9t zs_t+EBIt+0%s7qcjd63KyEh~WcCJt2pI?Avz+ zT3?M&;@tboFZ0OR3FQ-v7<^K3vO;AwHoLakwHFH99^_Py1Ox=CfiMyW-cIKzx~ioG z?_jAGgn|Y3t)&}a!47wi<5Os7Z$tmvR7G8#8$1mO_^t$kJlM#g=;@YQa@``3OX@9o zGQ_e$nj$X?-H<0Sy%q~3`3^iG#E8FQd|ceG@UoHe{_)O)3rrv}DDZ;NU*UvAUK-Ra zy-u4`O_`NMn`w`A!*%@laWz0Dtnh5_ZOC{hw)FK)5NGXQEm*~YOB$<<_0A*b@;(#h zvvR`l~cpH^(O@zlUS9B(7f|ypyUxzmX); zGr>)p-j2hvF9-r)L3j{(1%eC39qlvok|b{^$7pFKd=e->g0Q@}K;0RVQ9|C{FN<#! z2q9iw0#Z3*$z=Za#zy~R9K|TCELBugI7pcW&#Ysc4KphiGBG(vYf@TP@$=i=d+>k@ zbTH@djJQNtI$5*1DWFKreG}yTTriU?hDW)zp-cU7@_Miq3`WDv}}>ZNP8o@<|h7yij%z(@wm9;iZqohVq%garo!<#IZbX*A_pNB zke{dXVBt8RNT|3>8pI~XFBo9F4GmmBN#wt_?;$_p*lJPwBg(p9#XQLz5kOB6$Gh)XG~?AlZN zVcHOd!rV=*e^!#)2UEC<*REdG>a@{i?7|Hgdk$VtB&%Ir~c0 zRh)=J{kFgWrMxE_TeC|rF>#aNwr$}Q zCW8(5-Uc~tX~#I6#~5FKTUtRibPO7Vg;?fT5*AUzb6Xo2H*ykZjiB`ObS{(BSh}p3 zn1Y3i7KxK74=vk}i-XM1*-wk)v=+unBDa%#tx2yM_84x5UKs9VMo+e15e-C*Qhiin zt_!Q_)ZbJ;?FEZIbXJKoH{l4sfH}4>Vk+*|C|@e5H2niK8cpGWcjS*IQqh0>sKdk~ zqXnKhkz5kL1GrD7v`}HG;RgjQHHKQB?~@O5^8@(jhfH6ADGtldL^4cb|qbB{+*n<`v{>17I94fBK5Ka=?AVx<# zYHCdMXsUPC)PcD#7wi5@KHL$Pug4HXbU z0e%QNaJv#w@$xx_kg);#AnJ9cxmP5_7#JlwoNr{PQc6IgNa?ahqE11auy@+UW+XhKu4GSB6 zDo)lBm?a(A<-b%L&U(2@lb^32=!WKi)Y(KS8FHnoYz637a{ZaRfJ&zl*FX&&blY0I zC;J#`#tCZ~ZFAs8*NL5Fvl}f)s$bfobQwPUS8SeKocTAdUysM?vF@&ZM6ABy29!t& zaHIkGh>RJ>-?0+^k@}msvbTB;2R$;USSdP>UVPSkw)5_7RMmuuu0bFt zvQ+nk7KiYsVwqaO1&`S15Nln;WrI*0rmHpr*P{8%*nEVU3AxzBzzN6Kh|lbp4;Zj% zBxZ~$;!vvs-^+PoG}ILw})R*ccnUijn7nx4*?h8b(IPO zxgmz>UdUZju_8#>NjAsn?7rtm#=Jo6A}2NE&bq4%&zvI$9mE%KG*694g~19T+96nO z5i?z!vwE~V(uu$>9VOFp5Wcf|QnE)P@~4V&XQzG)*QA;#5px=3U@QtFSCMH||K@Q@ zEY}Gb36jUxH|wKy5n}P%bkkS!$UEQ+a!2Yl+Nm^qICKQRw}KlOkuxEgAnOAykoX;) zDMxVS$@T1h#mKLVJ_O<20^1->jR(Q>F=wl5_{)gRyaHgM+p^AyBAK7&5$g%N+|ls0 zWL^&07WnX_7$zV@14xT&UotKLVM5BZz*n}17(u|;v>JXA`yyqu^Cz1Jg zc_ksd8GHXY4ne&REU#{?Re&e1dv96^@eP57{f^=`Qz>?xbFD}^(eTg%YV$0YWC*x} zUt(^5)B+|1Ob*ivz}Rc(cA!+7W2y)l)-zfaJyYGSlktq&28 zq5rW=pN_=^t|-#G!DT)KX3S%e1?T{DaV0|q4yzVWw24h#*;zvDMkLpUNM9|8Yo%^{ z=%G678{}v%0d{YV`=P8?_8a+HjqVhxQsS=QvNa6W7$G?G>D4c&9% zkxcB5Vbe1tq~Kzk>k9wJeG{_2`NtNQ9LD~EQ_}&W(uPMgBw|!d@0ep3bwbNg4Ffzv zuO^))VxB}U-jbv_5Q0+S7N{J396OD0%R~4S$jBoioPmGa_c}|gKBDwqtATK+*m;)n z&3QzYVnrIzh{*D%XZX0eJ<4|S5svsvhx=>4Vs%q-x?^xcZ;dS7C(jC)Dh}>^EK_vWUE1MdbHHE5vl=*g3Bku@QR&)s}sXv0^c&2!LQ`%(oYdIvXu99Iw_>_l7vixa3zw?gNByPtT zIyOXDLTGsUbZe3(sk%mC!q(Wmb>1N)I7EU5v%n)pi$6J!WPRF^YQr;6(sU#L}ozb`d^(`zwZjJN=p8LprA!6F{0%sFr?)%| z17NJtIAV{vO9YrB6EqNuz$Md1M;6iTtx)oJb&$b`f!*AIm@G(k$|r9U6D8OiHy?>M z!4^PVssA)hH_q(d9Ma{ScMkvfw)$)^M~%tR+@N z?p_3HRkx=%P;BxbWE|Y-N|e?esgta^Wn;j_BnAIy@s|xNodEm{4(;fBiSo!{Lu2Ep zl+5NfbyoXhA(B z2D{(uyus)ubBc)m&=_^-bo;9OpT-M}%sdq&)kB&G;cC3Lpcu(3Kfsk48(U5cUI!yNHPo5Y*C~;cKq!P*^pec7)X9X{L`1dt^m(Z64e;d z_mHB9l&Fv5;&#GjuL6v91@GrbKGk@}Mf zPtE1cXI?WQh0z9y23?H01S&5y5(F(k=8YEJVMv?cOB|B!*ts9(k9&b$gN77Ae35Z^ z$OyZv&!L*|rEr4jGxcmQ@(C@ZlDc4kLk^J=J@t7#NuqK1JKlyNB}y93Aa;=?Nkm)pDIt|eF58o}}#9u<~dLMNO z$)U+zCblq8#}L~Uk|9FkgS0cq(fj$mdn#lI4^Wwg?$|>d%B!o!3I(Cdhe;Py{#!1F1&m$9fq)T0L?N7r5PIkYywb- z4NyK4?;VovRF&jFy*-x=q0k?baj_+-6X+$@xN&3?k{J=?UZW0xu`>aGh+DOhTiXRb zr57kH;fa$qYlwdmlQdHMFmY~GLCTvA*ZtJrdcAl*V#!5zA5>Q!JY>U?n1wUT2`L~M zXc&4AnR_=$*NBLi|8yuhq#qlk?IgQR=iotNHk)>pQn8!tajmVNR^m#hXU)%lf~gou znh8~?A@}voz7_Uo<+tP5cBiLDI91YU3z9gSEWaO_KvlQ_9$?XkF);I3!7}-SaO%;j z)-i0j+@Xe<%f{3*c}Y}Gx_jXCKi^6&(;;- z_K`_Vq>`)ebK&$~qk%AYhd8BSu@S*KX&`!dKtPBaLe7!u$p#pIAHi9I5Ym?Ol27Y0G2h<)ABvr`X|TsRf2 z)Q8v6Xk>IW+?jkKI zH)?4jf+Fh@@rAfH69-OGA|uH3MMghPO#FzwBsa|ZAVhj8 z=E;nP>EBKcM@21FpLV+ewG2V0FJE44J~cjAjKY6QN`k%h#3Ua+>VmLw7I&l9KOFMo zsZvXGMcj}&TkUtrri#Q3t|Pk|w=D^YI&p6uHI|_-C?{vo4!BXoywPhdUqPA2`SXtu z6$u;erbVRlt3~tJe99NPOmQz{o>GuzMyef)t-nb`vxG#9Jci-PN^3r2#$Jp$NLw6m-V3 z{udZ|_>TXZ-Li!`C~5yf!xG)Fp#tzl4cbvi?7Il6vjtXeo`it^i`H*McH%WB=h_dc z$vcwPg?JIyOj655dwsu!6bEFKtaowNbOkR!tX;j3m<#+f9y^@miAQ><6NZQYSE;$I zLs!sw{s>I|3MNrF6EJ3s1_*+Tbl{tO52t1VMbBW6Za3!95KabRHjS^v85*O8`&LC} zJ(5&~-~^ahGKv9C!fOkMBO)-!5dVb|w*uT$1FgrisDRrHr6`??@~~l{TtT6Ylh9E~ zWbR>(^g&tf54% z9~Zz{Q(i_K5J9Ua5q@aMfe0Frd-F{e78RX@?`G&dV(d>+H5@F-aQX9GJa=_bU6HA% zXip@({zbnf3;h-n?((eNz>Ht-=6BiWOt^PmU2tJp0e^P=`OIb4 zH;CL`!TCUvL&`iJj!D_t3@jy|V5Ma<6^vFogH zQn#+G<{wcNd@Z|g+Z*;+v&)6P=RV~5@VLAdy7Rdy+j4As=l1CGz-Qab6_-%Ys`XEQ zQ5^GXraW4_;M~jLTcFPS&IyUul8?K3PB%Iy^4U!kE6Vfe+*d&Q5ex$fH5(fnU}ODH z1_qj5N{9fEl^MtxZU?R%gcU97w~3BY`U}M?00zSb8P%Qt^%3++u53g?;SZEGebC(Y5WeVuye&K(*~tC;|g)0u7lZsi^= zY$j9ft7stE?UP3Vf1NN4Iay;$OtHKNVScNmI5QYh0^!$kpAmJR6-qze958!(;~LvU zk(v1AA~O{g^jX)#26Ah{^XI+I*;L0(*DV)N>X*f^R9087z!Ayi4YE(am9;gOeK|Gn zdq!Q(Sw|HW!!xfIkUmkNsLzjgGH8XVH&H7qS$TPRSy@2~YeG|Lu#i2P9#b2l4&IU% zrj+_iRa`Vkx6&T@{=FE5gE>lr%I0QXLA@!IxP^+N-g5 z%%h0CncuZ*?_F6bH1zlZO$Wir8p)d>D=PE=AXSBMBoGj2cZ{AiYyx&MSoyspp?&JZ+sfKn_RdJh{gLk7k2rM~=@mw?7z^Q#q#J&;u(EoL zO%&;LFfcHf0o}3(Twj8O(G1IzY#Y`wPjq%ez+{l*18*%8emOL+0S^K1}wgBqkj1Cp|Z7AsXG_`@M-9lxpm65 zZa>_yV>GSPPrss=nL{bxz~OOrwj9n zrPwb0!}*|~`#rk%k;toJp46$_?w%X)3BRf_j};DHpj;#)B+2wb-LGL75JZ> zP?eOtk;i_8IVkbqJV`Nc@3iVjWX{DX=*=OC5C;c56A-!?>i9H}7tjN!`sPJm5?9yC z%Bu9qk%Z+Jb`}QnvNAH?$m2U1Sk*5whR?GpUTfJ+o4p<$9@uFinJ%;->0mc6R4IGS zRIk$S>g)FpcVC*3eML;|O8|D8@1Dy36cMt`!s3LrjYw49)UmtyqSLy!D>1i%huhNE z{o&j%&b{Smkg58)5NLtRn5x-~E_K5rQ`bL8g0imTd3OAXMHR}&>tyJa?*i<3`^28X zR~Ev^uw%5P6lZ8CHoY$9^z~!z+cQ!b(yentFnEYzQ;B^GC7!=?IxLlEDvy?H6E*jd zxE>2v#yV|GJc%Pud0kke*Y1e8+qj1mU7eTsXd4p`&;25(Hmpl4YekJD9oJ2si~YM_ z(DRliRc0;s`Yf2Wq9Z_rr=XDARy~WiOF8Q6o*OpU(3hkef}P|l=n}MX$p?eetD2ad zDArdgOnY+#`y}!AzFcIN9V~lL5bH59v4`8z_zii+jeX~pvmCZ9;BznkWV%s>g*n_M z#?__4lwNt?R(hs;!j=a=S&c2d(8()z-jZUCjy`!PCdiNS`NrRVR&ELGP)-n^3Uqv! z7>!4{`))Sul3oz*$fEq1qk&pU)jtxvEq#n;_0?sml0>)<)Aa+s3?GAB_V&e=eEj*W zcjvUs$2mLq*Dx5#_@-^_ym>^ue%@K}a4%jw+^BBthpQL!z~ZUexWS~Xny~d!j~%D@ zA2H!#X-RB7D(IQ+#jCz4F<0TNXh5G}29ZWttDUepAM!kvwp+E?)nv$c8F>|4j?L3I zeHXuV3OW<3CA+Y@;M_IFoxG0|1gGC~%t+}X^}Q)GAx^i_ei1piB8DBXCyu) z55LVkHq))Mr&~r1^qra-S-r}?7nv?qYklTbX>V6@CP1-v?9F=e<}0TR4oa|?mRig- zdyaHPS3RW@?d0lU*J4S zBF-iJKlcDdNdGkb2nL4mA`^1)>^>NM z8O==j9*5N?#dSLI%Zvsubjq2zj1=AA2)rJxVO1oqQCziUt#GlN@0+kj%CxJ`nCl?$ zQJI$UV(DU%E@giDWl5aD=3{K{m}`T*Zn0dq_7-n5 z6~)inlhsk6Eb!!T6Go4AB>K699z*T7TSa<@BD$xIdt#asmQoz%w}Y z7HqdqewLA)C6Mw`sQs2D^9*1=70=NmR(%tr8cPRids-X5klycK$OQ9P#E-vy^#Ud_ zB}D)#qK>Ivl~9*E>M`0KFg?jKdux?RVCFzxqSE#3+F&N>7VT8)K{mmmHTV$V7+i*K zkSRZEo9&Ta{=QYRT4&WKr>ZTJR$Au78`re|yx=38mXWML3Xx<&(JKRMGl}Gd-KCX^ zN~87GMVtc(mKonKvHrM<^#hxU67aI>txlf21Y?TgebUV<@WIq;kMHp6>J4-&3ha79 z3mL2Lo-fw2gDW?lGxgEoRbtw6OJFQ-EVm9_P8E^@yy4WJ?`&bUxNCMQ+K+pY<@
    z*b3YjJGi@f6{=@WC%8JDZ@VoCL-u(ZJoV2sp&dAg-kUfBBO{G}rj0)?HB}S_@0-f-`c-nb z_mtZD&Wxd)A8*W`ZoH}JA))Ur;i3DJAR;a?SB`zvn7R3wuXVK&uSJQO&m= zw*4||_HXaF@zmSJ)O0o4B`cvj@?$grH15^_5GP8|hip$wN=e-Pfs!E-K_@%6 z&h60+EW|vgkL7?nq>datYK|dWpcSPXq?xILh&dQAW>@!Q4T^;AEe^5UcpvkxxPD?Y z&1n1e1@J0NL#J-M-!-3`$Z2R`t2}o4N|D-#B^@l+AAkm@{C=6X^KKt(jM*?dHi;&c zdiI03PfC8i5}8FjmG$!Fb`)#sxi4_n(Wv4Z@a&WmJ|tbeVmc=ASA*hg2cabd)F-7Q zhQ%oe2$abWzhC6RLdjVB;?BZogn>n~u1>D5B|{zWXqmKN?$OD=tJH*Y9vpdrVOHgj z_Ua8LpU&Q5Haqh=^}rqWYtFXL7Mp3!R@WCGQUEA$4_iryNhkF3j&XM=!1<;dWFM-_K7qF{vJPiCepyQE#)Fh0#%7&K@_{&7gJb zp!!q8kh9*FTsgc$9Eo3Ah=q0qI_`#-` z<%aZ$)7w8FFBnnq7=IJ}Z2zs5a*EH*hnAS>Ao#aJxh7~l09Sa$?`a{t!<`TI-{k^r zC-G6+7AcZHOKb}tOo@E+ z3y4*cWP|XJ7cjPiCUsmMu|XUY*)CIAGEQ|qiy!zQ^U--O(rTfG8GIH!ot*4}sYLO) zxn6}CyF3zG&wk)Xj4?lQB ztxS0~KaBOLBDY`p^TOWLNiC672ksa&wHw;%+rU991aX(k4)3uwWpT_FLwJ zskAzE>V4+c)dG7Su59o$U@ev_J%s#ZL(ctW2WK{$rPBh_i_4>%`0hQmcoRz4+80e= zu{u1M;+iY``m*n_U`px+8n5m}Qbbp$;2jXv*X&pP@wttP{);)MlggCiwgZWWU}UuMF{)oSHxz_gJJ``z6jp^Ori#cT+{kmR+&?UA5bQ3 z@Y=%uc>lZjrj&DepuEvw$QL4=)5at&Bg2soW=1>od}UdiSI3QlRT#=uEP93Biy^;~y4fBO4)eYrQgozG4eS>>zcLrWC%ib zh7fL~VPs+*ERs;CZ}rC6rquJ)bm!-|u`Zbw6_J)%8IFM%#p+3i4C_!F{#E;=i{x5= zlx2e7MgyZgtx1C@WgjaS0h&$n?0vBDmAzXb4>IiPhC9g4tn!*M92 zBH|trOxyRleLBQ%khyzTuBd*J(90frV*OW$mLI_;>_M_9!KhD8R@T{x)Y;qi$Hmed zDm|Qd>;zT~C|$#!U16W75pH} ztk;BSlm^sp4!OiY`L|h{Z;D9`Y98lNtu35<7l6Nv4dSj5!0iZwo`Mh91Gyc^n&%d2 zv*qg+Yg_Eg$=`G=N3cBIKK@8Gn8`UwmrOR%Harz-;TLNo4=${@N{?48(rzx0(QAAZ zA0A#KO2_38)8PFm@a^1#2f3v+-*w-iFm2b`m#-UK`Y#?-UZATv_spaH!9)J;dTi>4 z#Y{vzx z2Sn!`7K2-WmL+}-vp-5qnbm7)MN)`Hbf0Bp>d-sBbY*Gj`D8i~OPzwk9U?N9mX+?;)I9n9IIudlWUYj6S*;F23- z)%=T})+A|aZas2j&#Un;bHnXpTBGx=N^iyY9S~Qf-}_|l4XWieWma4E9-#BRv`%?f zSzYuz3Y(^{e`K1{0*Vk2g6;8X3n+Ux-&wfk;@p$W8U73hQmprUhcHD_ zwqb?<8t`NM-RFz16AuMz|Efce80JcQJW&e3jqmNYbiG7C>bf_Y2Mbgj8aCA^uVRid z>kVcU@~b-Z<~FC*+&^OY08kdy@vHYgTDe9)KlJU!8)-VQd z3yzQ|o}AW-2Hl4TgjE8}8-T@rtTi9};hBLc{T57j=on zxWsp;v6 z20sr>ezXx)SX}y&iLfHoI^G~ot5QEexhJcb^S!4AJAwwYEUTlzha$ZQ3~OEwjGJBf zP;vlV*iIO+J0~VaT&w5(umSh}+|D)ab~8d|#esg*6Ym4YC#dhZY17^bN`D7&`qX)W zd~+ZY`DGf2-F0BRTZHfCeKc|0;tw#7i>D`Q9nV!ZTAz2U2g5}|2Qveu%l_9z>kEN> zqD=I1mIt}wVzd5c_IQKMYAS`8!WZ}Op=GG><9&!XqshiDjkfW2=cBfLaEy~(R6%i0 z0M1z$I7AT7uUCs6NEi=ZD`r$0e>GnHP*JK|DJec)P}bP^3DowaXJFs4q%4+Xk=@6q zr{hH>MA!xQE7m*4O&I)2r*-j9xm_Jj8ImQV@u}7{3|Rp4>{&1m33~ZP@mQ&5LJU27#(S6+I}?vlgqPhiAex@)BF2)I&K6!%TwKS zzJu5zi(WWdHaJrfP5@eIJ96Y>3caq>9ogef`3a!Y*){>zPgY1Jg~{N8&}Lfv6}Hz_iEA(xKaLq85k*LLo|Mi2CSCi z9odB>u70em)*d6z&|S3Ufek_Ud^d?zo4F@wwrE}Xx{$4<1{av3yt2Y9%Z0$WddgS6 zHUM^|=+fc&7s*R4NWYoFOG5YGo@B1qmgo4#f?Hf(qrojl@lC66O*u_^bR0{(I=d01 zC}QjrWi?y=p(}T~+9Pi;$gU9lBXa=M_8^k&bGSfJ{xWhm>1eGozu9j^CUjfMB&&~8 zygwfr$soJ4^`-1FhJW3lD<^po z+2P2F)Kv;^Sw`JbeMC_XCvQD(dm zy7c&#q3d|ezd@5lko%F5kwWYcTHFW!ozo8@b_Lz-4bTSFts*IA9t} zn%iLeqSX9a(qq_vd?=Z+nmhaqj~SvCc{*6KAz-YiySln=Mi0MG0i4LM-`=;9ZXc4~ z^=7DbIdzv%3M|-SSfC`VCczEUBP7x0UPXGJn+=|G(Rx%wZq=Kg8#_qW}8 z3@6Ho^5C0W?~i083HMy4#JKPMkVT2fa&lI6a#HDS)OT;D>bvu@YHzU6Uq#VfB2iJW zFiD?(Y5A`2eZodChPC>%OwRRz5u>lWpfbH+diuTJe{^zDliU`ZQ(I?{1!t>@f;Iox zrsIfP!g%K9&K;%RDU`por~h(B|2KM)ZWmHg0_vX$KG?g_tFQZQmO$c%>f|LD zyToo}gl;YCuXe<$vb~TmcioRxdy*WdA$rt)TiU*TRjB6OreO{ zoYctP@3kBe!nn4cv$9TR=!m)bSJ|b+qsSOI`-&^9f9o-ATedY6Bbq3wS{oL==edg* z^8c^N`fu*!pIW!NX@B?qDW=x73n=kD|Frp<;Cdn^cF*HKHD61EDNK4`jUtOH_`D3i z%pf+&L`tMS$|Hf^cd#dt<-`dK*x01Cr4nD(7zSRe?aAjo$un2Se$)OUkVa3y=D{slY z)Y|dK0$tA!CpzE74KF47WHU$_oDeft1rw{Om|tY&1i84?jcrJ+`}2vLRtQ`!3OM{s zfQxI7$rV-Zg~KYCFz35ic*XoL@)G}@C8}`w!yV}o;6(8m8B$qd(|Mcn8!80>6uluthW}IVm5O@JpQ=w-n6v|eM zMI{`Dkawjybi4(|$JR~|KM+V|F)AhYb-(a$jbDQ2vIEGdzh4_U`y+bxl0=z|V9!?T zUo68Qp>(k@+NswwFd$tU_QvOR$6(~sW65$^{;yv3e0UCO#v(cml^wdTUL{{i5Q2DQ zcN1WmLDAVEQG|IQj{yPWfSZT42lzQS0;=}QZGkYgK7Pg#zzGMECYDV{Pcfg}-!Yt= zpf=-kSg|}&K8mfTbb0VDKvf&a`3CA#8kllj{r$lx=z=mk!NTU58AbwWhWV5~*&%7? zY0E%(3IZ*$1a<-DI5SgTez|P`pBy;t2;sYNfR%AY6p2w9F#MZHp(0%xPTE8Nciadb zy4O{F`BeYh=SA*?bM>{R+}oPUO6_Y6LuHPeqAgXWhGyRlojrtWkf2tJ#6Wo-$t#d8 zDeOQtZBmpC{)NnB?=u<>WLMPbodIJrgaGs9%a`#grRge-WU-?qvLe$!_iPDbP}$JH z1#bXvqZd`%YoBpB0OtjE=GWZ4cawQ@8elH-Jueou&c(>FVnX0z;n1S#12h zr6L>587kKvig+AL`-tH)Gj$Nqo&pXGGQzg@s8p{$8*vDr#$gD+hc9a6IZ+i|me6o2 zY&cm!APQ-7CIr>(XsDFO?CHLh*53u>P`p;#W`5VAr&w&6kW2~+v_oVV>@)Guhif#C zA4KTE>VpT0JX(gM-fR{<*ufJq^S1K^$-V)5RMQ-$9YVrq$tR1R8F@(4OLf;{(9_5e`WUJ|d`m;3{yKO~#N5AmQjS9?NnAQqP>-38wqtV2F?W44?`3r|;hBZZEl? zKNLlPwT%xWmt&yo`+K#Ancki~q%=_(Vuq0rgN#1k7_S@gar6Yga?|HC=gm;#&bD`> zX1A;)cocJXOMU(Jc4u&qK$_ukX$9ukK@Mfyi$zJ-Jmt=OQl6@hp1x)LdG7{u-op~L z+P3q%?AV6YiEldFZ=|a1ta+%zPsG`n*-iM31dXX&TojegvE{U zmjr?Nmi(-YYbq50&nx0K;D@Nt{|QgM&N_yg!TZ~Rna>A+X)!WN!?Tyz16XQXf5Amf z9r4H?^YEJ;qIU5*c<;4|?6)&{Dc9iHF(_}^z{ zoyr}`9QlpNB<^WA@sq<8GCLkZLt1$WRVYDJyUiY0OLoW$mxlha+#*$Am9*&JW<#-i zK~ynJ&&Xh!wL|w092K^Qkq&$C00F?-Nw5(VP&4$Yy3?`(S8Dy%6uuZ-I+<^a(cxfNg{>HLsJlS<*YQS3uSScT+d_C9^rPJrAvC*P-> zJ)i2{EAP4x>GV-!@cWB=1>)KelIe|vQSIo_qa$Numb#qE9UMK%zWz@YfXcq+KWPjr zh>Kpccs@B`X4jK{%gq$h4Qc1i~rN~@V}da{O1Gw=L-Bk eU4d`2bXWKSuLW?Y<$;e((NNv5lCsnEm;VEvwHcWJ literal 0 HcmV?d00001 diff --git a/examples_skyline/fair_prep_results.py b/examples_skyline/fair_prep_results.py new file mode 100644 index 0000000..e2f9565 --- /dev/null +++ b/examples_skyline/fair_prep_results.py @@ -0,0 +1,92 @@ +import os +import sys +import shutil +import warnings +warnings.simplefilter('ignore') + +from helper import extract_info + +sys.path.append(os.getcwd()) + +from fp.traindata_samplers import CompleteData +from fp.missingvalue_handlers import CompleteCaseAnalysis +from fp.dataset_experiments import GermanCreditDatasetSexExperiment +from fp.scalers import NamedStandardScaler, NamedMinMaxScaler +from fp.learners import NonTunedLogisticRegression, LogisticRegression, DecisionTree, NonTunedLogisticRegression, NonTunedDecisionTree +from fp.post_processors import NoPostProcessing, RejectOptionPostProcessing, EqualOddsPostProcessing, CalibratedEqualOddsPostProcessing +from fp.pre_processors import NoPreProcessing, DIRemover, Reweighing + +import numpy as np +import matplotlib.pyplot as plt + +#creating list of parameters that we will alter to observe variations +seeds = [0xbeef, 0xcafe, 0xdead, 0xdeadcafe, 0xdeadbeef, 0xbeefcafe, 0xcafebeef, 50, 583, 5278, 100000, 0xefac,0xfeeb, 0xdaed, 0xefacdaed, 0xfeebdead] +learners = [NonTunedLogisticRegression(), LogisticRegression()] +processors = [(NoPreProcessing(), NoPostProcessing()), (DIRemover(1.0), NoPostProcessing())] + +def calculate_metrics(seed, learner, pre_processor, post_processor): + ''' + Experiment function to run the experiments + ''' + exp = GermanCreditDatasetSexExperiment( + fixed_random_seed=seed, + train_data_sampler=CompleteData(), + missing_value_handler=CompleteCaseAnalysis(), + numeric_attribute_scaler=NamedStandardScaler(), + learners=[learner], + pre_processors=[pre_processor], + post_processors=[post_processor]) + exp.run() + +def run_exp(seeds, learners, processors): + ''' + This is the main driver function that calls the calculate_metrics to give metrices on combinations of various learners, pre and post processing techniques. + ''' + accuracy, disp_imp, fnr, fpr = [], [], [], [] + for processor in processors: + for learner in learners: + learner_acc, learner_di, learner_fnr, learner_fpr = [], [], [], [] + for seed in seeds: + calculate_metrics(seed, learner, pre_processor=processor[0], post_processor=processor[1]) + extract_info(learner_acc, learner_di, learner_fnr, learner_fpr) + accuracy.append(learner_acc) + disp_imp.append(learner_di) + fnr.append(learner_fnr) + fpr.append(learner_fpr) + + return accuracy, disp_imp, fnr, fpr + +accuracy, disp_imp, fnr, fpr = run_exp(seeds, learners, processors) +print(accuracy) +print(disp_imp) +print(fnr) +print(fpr) + +def plotter(x, y, x_ticks, x_label, main_title): + ''' + Function to plot various comparison plots. + ''' + title_list = ['NoPreProcessing', 'DIRemover(1.0)', 'DIRemover(0.5)', 'Reweighing', 'Reject Option', 'Caliberated Equal Odds'] + label_list = [('NonTunedLogistic', 'TunedLogistic'), ('NonTunedDecisionTree', 'TunedDecisionTree')] + fig, axs = plt.subplots(6, 2, figsize=((10,20))) + axs = axs.flatten() + for i in range(0, len(y), 2): + loc = i//2 + axs[loc].scatter(x[i], y[i], c='b', marker='o') + axs[loc].scatter(x[i+1], y[i+1], c='r', marker='o') + axs[loc].set_xticks(x_ticks) + axs[loc].set_yticks(np.arange(0.5, 1, 0.1)) + axs[loc].set_title(title_list[i//4]) + axs[loc].grid(True) + axs[loc].set_xlabel(x_label) + axs[loc].set_ylabel('Accuracy') + axs[loc].legend(label_list[int(i%4/2)]) + fig.suptitle(main_title) + plt.subplots_adjust(wspace=0.3, hspace=0.43) + fig.savefig('examples/' + main_title + '.png') + plt.show() + +plotter(x=disp_imp, y=accuracy, x_ticks=[0.5, 1, 1.5], x_label='DI', main_title='accuracy_vs_di') +plotter(x=fnr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FNR', main_title='accuracy_vs_fnr') +plotter(x=fpr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FPR', main_title='accuracy_vs_fpr') + diff --git a/examples_skyline/helper.py b/examples_skyline/helper.py new file mode 100644 index 0000000..05af226 --- /dev/null +++ b/examples_skyline/helper.py @@ -0,0 +1,53 @@ +import os +import sys +import shutil +import pandas as pd +import numpy as np + +path = 'logs' +def extract_info(learner_acc, learner_di, learner_fnr, learner_fpr, learner_label, learner_optimal): + try: + for folder_name, sub_folders, file_names in os.walk(path): + for sub_folder in sub_folders: + file_list = os.listdir(os.path.join(path, sub_folder)) + file_list.sort() + for file in file_list: + file_path = os.path.join(path,sub_folder, file) + df = pd.read_csv(str(file_path), header=None, names = ['split_type', 'label', 'parameter', 'value']) + test_data = df['split_type'] == 'test' + + flags = file.split("__") + pre_processor = flags[1] + post_processor = flags[2] + + if(len(flags) == 8 and flags[-1] == "OPTIMAL.csv"): + learner_optimal.append(True) + else: + learner_optimal.append(False) + + if(post_processor == "no_post_processing"): + learner_label.append(pre_processor) + else: + learner_label.append(post_processor[:17]) + + di = (df['parameter'] == 'disparate_impact') & test_data + di_value = df.loc[di, :] + + learner_di.append(di_value.iloc[0, -1]) + + acc = (df['parameter'] == 'accuracy') & test_data + acc_value = df.loc[acc, :] + learner_acc.append(acc_value.iloc[0, -1]) + + fnr = (df['parameter'] == 'generalized_false_negative_rate') & test_data + fnr_value = df.loc[fnr, :] + learner_fnr.append(fnr_value.iloc[0, -1]) + + fpr = (df['parameter'] == 'generalized_false_positive_rate') & test_data + fpr_value = df.loc[fpr, :] + learner_fpr.append(fpr_value.iloc[0, -1]) + + shutil.rmtree(path + '/' + sub_folder) + except: + pass + diff --git a/examples_skyline/missing_data_results.py b/examples_skyline/missing_data_results.py new file mode 100644 index 0000000..29186ce --- /dev/null +++ b/examples_skyline/missing_data_results.py @@ -0,0 +1,99 @@ +import os +import sys +import shutil +import warnings +warnings.simplefilter('ignore') + +from helper import extract_info + +sys.path.append(os.getcwd()) + +from fp.traindata_samplers import CompleteData +from fp.missingvalue_handlers import CompleteCaseAnalysis, ModeImputer, DataWigSimpleImputer +from fp.dataset_experiments import AdultDatasetWhiteExperiment +from fp.scalers import NamedStandardScaler, NamedMinMaxScaler +from fp.learners import NonTunedLogisticRegression, LogisticRegression, DecisionTree, NonTunedLogisticRegression, NonTunedDecisionTree +from fp.post_processors import NoPostProcessing, RejectOptionPostProcessing, EqualOddsPostProcessing, CalibratedEqualOddsPostProcessing +from fp.pre_processors import NoPreProcessing, DIRemover, Reweighing + +import numpy as np +import itertools +import matplotlib.pyplot as plt + +#creating list of parameters that we will alter to observe variations +seeds = [0xbeef, 0xcafe, 0xdead, 0xdeadcafe, 0xdeadbeef, 0xbeefcafe, 0xcafebeef, 50, 583, 5278, 100000, 0xefac,0xfeeb, 0xdaed, 0xefacdaed, 0xfeebdead] +learners = [NonTunedLogisticRegression(), LogisticRegression(), NonTunedDecisionTree(), DecisionTree()] +processors = [(NoPreProcessing(), NoPostProcessing()), (DIRemover(1.0), NoPostProcessing()), (DIRemover(0.5), NoPostProcessing()), (Reweighing(), NoPostProcessing()), + (NoPreProcessing(), RejectOptionPostProcessing()), (NoPreProcessing(), CalibratedEqualOddsPostProcessing())] +impute_column_list = ['workclass', 'occupation', 'native-country'] +label_column = 'income-per-year' +datawig_imputer = DataWigSimpleImputer(impute_column_list, label_column,out='out') +missing_value_imputers = [CompleteCaseAnalysis(), ModeImputer(impute_column_list), datawig_imputer] + + +def calculate_metrics(seed, learner, missing_value_imputer,pre_processor,post_processor): + ''' + Experiment function to run the experiments + ''' + exp = AdultDatasetWhiteExperiment( + fixed_random_seed=seed, + train_data_sampler=CompleteData(), + missing_value_handler=missing_value_imputer, + numeric_attribute_scaler=NamedStandardScaler(), + learners=[learner], + pre_processors=[pre_processor], + post_processors=[post_processor]) + exp.run() + +def run_exp(seeds, learners, processors): + ''' + This is the main driver function that calls the calculate_metrics to give metrices on combinations of various learners, pre and post processing techniques. + ''' + accuracy, disp_imp, fnr, fpr = [], [], [], [] + for learner in learners: + for processor in processors: + for imputer in missing_value_imputers: + imputer_acc, imputer_di, imputer_fnr, imputer_fpr = [], [], [], [] + for seed in seeds: + calculate_metrics(seed, learner, imputer, pre_processor=processor[0], post_processor=processor[1]) + extract_info(imputer_acc, imputer_di, imputer_fnr, imputer_fpr) + accuracy.append(imputer_acc) + disp_imp.append(imputer_di) + fnr.append(imputer_fnr) + fpr.append(imputer_fpr) + return accuracy, disp_imp, fnr, fpr + +accuracy, disp_imp, fnr, fpr = run_exp(seeds, learners, processors) + +def plotter(x, y, x_ticks, x_label, main_title): + ''' + Function to plot various comparison plots. + ''' + learner_list = ['NonTunedLogistic','TunedLogistic', 'NonTunedDecisionTree', 'TunedDecisionTree'] + processor_list = ['NoPreProcessing', 'DIRemover(1.0)', 'DIRemover(0.5)', 'Reweighing', 'Reject Option', 'Caliberated Equal Odds'] + title_list = list(itertools.product(learner_list,processor_list)) + label_list = [('CompleteCase', 'ModeImputer', 'datawig_simple')] + fig, axs = plt.subplots(len(learner_list), len(processor_list), figsize=((10,20))) + axs = axs.flatten() + for i in range(0, len(y), 3): + loc = i//3 + axs[loc].scatter(x[i], y[i], c='b', marker='o') + axs[loc].scatter(x[i+1], y[i+1], c='r', marker='o') + axs[loc].scatter(x[i+2], y[i+2], c='g', marker='o') + axs[loc].set_xticks(x_ticks) + axs[loc].set_yticks(np.arange(0.5, 1, 0.1)) + axs[loc].set_title(title_list[i//3],fontsize=8) + + axs[loc].grid(True) + axs[loc].set_xlabel(x_label) + axs[loc].set_ylabel('Accuracy') + axs[loc].legend(label_list[int(i%3/2)]) + fig.suptitle(main_title) + plt.subplots_adjust(wspace=0.3, hspace=0.43) + fig.savefig('examples/' + main_title + '.png') + plt.show() + +plotter(x=disp_imp, y=accuracy, x_ticks=[0.5, 1, 1.5], x_label='DI', main_title='missing_data_accuracy_vs_di') +plotter(x=fnr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FNR', main_title='missing_data_accuracy_vs_fnr') +plotter(x=fpr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FPR', main_title='missing_data_accuracy_vs_fpr') + diff --git a/examples_skyline/results_play_skyline_formula.ipynb b/examples_skyline/results_play_skyline_formula.ipynb new file mode 100644 index 0000000..e6ed2c6 --- /dev/null +++ b/examples_skyline/results_play_skyline_formula.ipynb @@ -0,0 +1,1778 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Import error: No module named 'tensorflow'\n" + ] + } + ], + "source": [ + "#load relevant libraries\n", + "import os\n", + "import sys\n", + "import pathlib\n", + "import shutil\n", + "import warnings\n", + "warnings.simplefilter('ignore')\n", + "from helper import extract_info\n", + "\n", + "os.chdir('..')\n", + "#parent_dir = os.path.dirname(os.getcwd())\n", + "#sys.path.append(parent_dir)\n", + "\n", + "from fp.traindata_samplers import CompleteData\n", + "from fp.missingvalue_handlers import CompleteCaseAnalysis\n", + "from fp.dataset_experiments import GermanCreditDatasetSexExperiment\n", + "from fp.scalers import NamedStandardScaler, NamedMinMaxScaler\n", + "from fp.learners import NonTunedLogisticRegression, LogisticRegression, DecisionTree, NonTunedLogisticRegression, NonTunedDecisionTree \n", + "from fp.post_processors import NoPostProcessing, RejectOptionPostProcessing, EqualOddsPostProcessing, CalibratedEqualOddsPostProcessing\n", + "from fp.pre_processors import NoPreProcessing, DIRemover, Reweighing\n", + "\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "#parameters in this cell can be adjusted for experimentation\n", + "\n", + "seeds = [0xbeef, 0xcafe, 0xdead, 0xdeadcafe, 0xdeadbeef, 0xbeefcafe, 0xcafebeef, 50, 583, 5278, 100000, 0xefac,0xfeeb, 0xdaed, 0xefacdaed, 0xfeebdead]\n", + "#seeds = [0xbeef, 0xcafe, 0xdead, 0xdeadcafe]\n", + "learners = [NonTunedLogisticRegression(), LogisticRegression()] \n", + "#tuned and non tuned DecisionTree() can also be used.\n", + "\n", + "#pairs of pre and post processors\n", + "#processors = [(NoPreProcessing(), NoPostProcessing()), (DIRemover(1.0), NoPostProcessing()), (DIRemover(0.5), NoPostProcessing()), (Reweighing(), NoPostProcessing()), (NoPreProcessing(), RejectOptionPostProcessing()), (NoPreProcessing(), CalibratedEqualOddsPostProcessing())]\n", + "#processors = [(NoPreProcessing(), NoPostProcessing()), (DIRemover(1.0), NoPostProcessing())]\n", + "pre_processors = [NoPreProcessing(), DIRemover(1.0)]\n", + "post_processors = [NoPostProcessing(), RejectOptionPostProcessing()]\n", + "\n", + "skyline_formula = {'accuracy': 0.6, 'selection_rate': 0.2, 'false_discovery_rate': 0.2}\n", + "\n", + "# TODO: add more test cases for above parameters\n", + "#update these for the purpose of plotting as per your experiment\n", + "#title_list = ['NoPreProcessing', 'DIRemover(1.0)', 'DIRemover(0.5)', 'Reweighing', 'Reject Option', 'Caliberated Equal Odds']\n", + "#add pair wise tuples for each pair of learners.\n", + "label_list = [('NonTunedLogistic', 'TunedLogistic')]" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "def calculate_metrics(seed, learner, pre_processor, post_processor, val_strategy):\n", + " '''\n", + " Experiment function to run the experiments\n", + " '''\n", + " exp = GermanCreditDatasetSexExperiment(\n", + " fixed_random_seed=seed,\n", + " train_data_sampler=CompleteData(),\n", + " missing_value_handler=CompleteCaseAnalysis(),\n", + " numeric_attribute_scaler=NamedStandardScaler(),\n", + " learners=learner,\n", + " pre_processors=pre_processor,\n", + " post_processors=post_processor,\n", + " optimal_validation_strategy=val_strategy)\n", + " exp.run()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 50 tasks | elapsed: 4.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 4.2s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.6s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.0s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.0s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n" + ] + } + ], + "source": [ + "def run_exp(seeds, learners, pre_processors, post_processors, optimal_validation_strategies):\n", + " '''\n", + " This is the main driver function that calls the calculate_metrics to give metrices on combinations of various learners, pre and post processing techniques.\n", + " '''\n", + " total_experiments = len(pre_processors) * len(post_processors) * len(learners)\n", + " #pd.set_option(\"display.max_rows\", None, \"display.max_columns\", None)\n", + " accuracy, disp_imp, fnr, fpr, label, optimal = [], [], [], [], [], []\n", + " for j in range(total_experiments):\n", + " accuracy.append(list())\n", + " disp_imp.append(list())\n", + " fnr.append(list())\n", + " fpr.append(list())\n", + " label.append(list())\n", + " optimal.append(list())\n", + " for seed in seeds: \n", + " learner_acc, learner_di, learner_fnr, learner_fpr, learner_label, learner_optimal = [], [], [], [], [], []\n", + " calculate_metrics(seed, learners, pre_processors, post_processors, optimal_validation_strategies)\n", + " extract_info(learner_acc, learner_di, learner_fnr, learner_fpr, learner_label, learner_optimal)\n", + " for i in range(len(learner_acc)):\n", + " accuracy[i].append(learner_acc[i])\n", + " disp_imp[i].append(learner_di[i])\n", + " fnr[i].append(learner_fnr[i])\n", + " fpr[i].append(learner_fpr[i])\n", + " #label[i].append(learner_label[i])\n", + " optimal[i].append(learner_optimal[i])\n", + " label = learner_label\n", + " \n", + " return accuracy, disp_imp, fnr, fpr, label, optimal\n", + " #return learner_acc, learner_di, learner_fnr, learner_fpr, label, optimal\n", + "\n", + "s_accuracy, s_disp_imp, s_fnr, s_fpr, s_label, s_optimal = run_exp(seeds, learners, pre_processors, post_processors, skyline_formula)\n", + "\n", + "#Converting string types to floating values for plotting\n", + "\n", + "accuracy, disp_imp, fnr, fpr = [],[],[],[]\n", + "for item in s_accuracy:\n", + " temp_list = list()\n", + " for i in item:\n", + " temp_list.append(float(i))\n", + " accuracy.append(temp_list)\n", + "\n", + "for item in s_disp_imp:\n", + " temp_list = list()\n", + " for i in item:\n", + " temp_list.append(float(i))\n", + " disp_imp.append(temp_list)\n", + "\n", + "for item in s_fnr:\n", + " temp_list = list()\n", + " for i in item:\n", + " temp_list.append(float(i))\n", + " fnr.append(temp_list)\n", + "\n", + "for item in s_fpr:\n", + " temp_list = list()\n", + " for i in item:\n", + " temp_list.append(float(i))\n", + " fpr.append(temp_list)\n", + "\n", + "title_list = s_label" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "def plotter(s_optimal, title_list, label_list, x, y, x_ticks, x_label, main_title):\n", + " '''\n", + " Function to plot various comparison plots.\n", + " '''\n", + " #update label list and title list as per the experiment being performed.\n", + " fig, axs = plt.subplots(2, 2, figsize=((10,20)))\n", + " axs = axs.flatten()\n", + " for i in range(0, len(y), 2):\n", + " loc = i//2\n", + " for k in range(len(x[i])):\n", + " if s_optimal[i][k]:\n", + " axs[loc].scatter(x[i][k], y[i][k], c='y', marker='o')\n", + " else:\n", + " axs[loc].scatter(x[i][k], y[i][k], c='b', marker='o')\n", + " if s_optimal[i+1][k]:\n", + " axs[loc].scatter(x[i+1][k], y[i+1][k], c='g', marker='o')\n", + " else:\n", + " axs[loc].scatter(x[i+1][k], y[i+1][k], c='r', marker='o')\n", + " axs[loc].set_xticks(x_ticks)\n", + " axs[loc].set_yticks(np.arange(0.5, 1, 0.1))\n", + " axs[loc].set_title(title_list[i])\n", + " axs[loc].grid(True)\n", + " axs[loc].set_xlabel(x_label)\n", + " axs[loc].set_ylabel('Accuracy')\n", + " axs[loc].legend(label_list[int(i%(len(label_list)*2)/2)])\n", + " fig.suptitle(main_title)\n", + " plt.subplots_adjust(wspace=0.3, hspace=0.43)\n", + " fig.savefig('examples_skyline/' + main_title + '.png')\n", + " plt.show()\n", + "\n", + "plotter(s_optimal, title_list, label_list, x=disp_imp, y=accuracy, x_ticks=[0.5, 1, 1.5], x_label='DI', main_title='accuracy_vs_di-skyline_formula')\n", + "plotter(s_optimal, title_list, label_list, x=fnr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FNR', main_title='accuracy_vs_fnr-skyline_formula')\n", + "plotter(s_optimal, title_list, label_list, x=fpr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FPR', main_title='accuracy_vs_fpr-skyline_formula')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.3" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples_skyline/results_play_skyline_order.ipynb b/examples_skyline/results_play_skyline_order.ipynb new file mode 100644 index 0000000..1ab6950 --- /dev/null +++ b/examples_skyline/results_play_skyline_order.ipynb @@ -0,0 +1,1767 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Import error: No module named 'tensorflow'\n" + ] + } + ], + "source": [ + "#load relevant libraries\n", + "import os\n", + "import sys\n", + "import pathlib\n", + "import shutil\n", + "import warnings\n", + "warnings.simplefilter('ignore')\n", + "from helper import extract_info\n", + "\n", + "os.chdir('..')\n", + "#parent_dir = os.path.dirname(os.getcwd())\n", + "#sys.path.append(parent_dir)\n", + "\n", + "from fp.traindata_samplers import CompleteData\n", + "from fp.missingvalue_handlers import CompleteCaseAnalysis\n", + "from fp.dataset_experiments import GermanCreditDatasetSexExperiment\n", + "from fp.scalers import NamedStandardScaler, NamedMinMaxScaler\n", + "from fp.learners import NonTunedLogisticRegression, LogisticRegression, DecisionTree, NonTunedLogisticRegression, NonTunedDecisionTree \n", + "from fp.post_processors import NoPostProcessing, RejectOptionPostProcessing, EqualOddsPostProcessing, CalibratedEqualOddsPostProcessing\n", + "from fp.pre_processors import NoPreProcessing, DIRemover, Reweighing\n", + "\n", + "import pandas as pd\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "#parameters in this cell can be adjusted for experimentation\n", + "\n", + "seeds = [0xbeef, 0xcafe, 0xdead, 0xdeadcafe, 0xdeadbeef, 0xbeefcafe, 0xcafebeef, 50, 583, 5278, 100000, 0xefac,0xfeeb, 0xdaed, 0xefacdaed, 0xfeebdead]\n", + "#seeds = [0xbeef, 0xcafe, 0xdead, 0xdeadcafe]\n", + "learners = [NonTunedLogisticRegression(), LogisticRegression()] \n", + "#tuned and non tuned DecisionTree() can also be used.\n", + "\n", + "#processors = [(NoPreProcessing(), NoPostProcessing()), (DIRemover(1.0), NoPostProcessing()), (DIRemover(0.5), NoPostProcessing()), (Reweighing(), NoPostProcessing()), (NoPreProcessing(), RejectOptionPostProcessing()), (NoPreProcessing(), CalibratedEqualOddsPostProcessing())]\n", + "skyline_order = ['accuracy', 'selection_rate', 'false_discovery_rate']\n", + "pre_processors = [NoPreProcessing(), DIRemover(1.0)]\n", + "post_processors = [NoPostProcessing(), RejectOptionPostProcessing()]\n", + "#add pair wise tuples for each pair of learners.\n", + "label_list = [('NonTunedLogistic', 'TunedLogistic')]" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "def calculate_metrics(seed, learner, pre_processor, post_processor, val_strategy):\n", + " '''\n", + " Experiment function to run the experiments\n", + " '''\n", + " exp = GermanCreditDatasetSexExperiment(\n", + " fixed_random_seed=seed,\n", + " train_data_sampler=CompleteData(),\n", + " missing_value_handler=CompleteCaseAnalysis(),\n", + " numeric_attribute_scaler=NamedStandardScaler(),\n", + " learners=learner,\n", + " pre_processors=pre_processor,\n", + " post_processors=post_processor,\n", + " optimal_validation_strategy=val_strategy)\n", + " exp.run()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 2.0s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.6s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.6s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.6s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.6s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.6s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.6s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.6s finished\n", + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from training data\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", + "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n", + "complete_case removed 0 instances from validation data\n", + "Injecting zero columns for features not present set()\n" + ] + } + ], + "source": [ + "def run_exp(seeds, learners, pre_processors, post_processors, optimal_validation_strategies):\n", + " '''\n", + " This is the main driver function that calls the calculate_metrics to give metrices on combinations of various learners, pre and post processing techniques.\n", + " '''\n", + " total_experiments = len(pre_processors) * len(post_processors) * len(learners)\n", + " #pd.set_option(\"display.max_rows\", None, \"display.max_columns\", None)\n", + " accuracy, disp_imp, fnr, fpr, label, optimal = [], [], [], [], [], []\n", + " for j in range(total_experiments):\n", + " accuracy.append(list())\n", + " disp_imp.append(list())\n", + " fnr.append(list())\n", + " fpr.append(list())\n", + " label.append(list())\n", + " optimal.append(list())\n", + " for seed in seeds: \n", + " learner_acc, learner_di, learner_fnr, learner_fpr, learner_label, learner_optimal = [], [], [], [], [], []\n", + " calculate_metrics(seed, learners, pre_processors, post_processors, optimal_validation_strategies)\n", + " extract_info(learner_acc, learner_di, learner_fnr, learner_fpr, learner_label, learner_optimal)\n", + " for i in range(len(learner_acc)):\n", + " accuracy[i].append(learner_acc[i])\n", + " disp_imp[i].append(learner_di[i])\n", + " fnr[i].append(learner_fnr[i])\n", + " fpr[i].append(learner_fpr[i])\n", + " #label[i].append(learner_label[i])\n", + " optimal[i].append(learner_optimal[i])\n", + " label = learner_label\n", + " \n", + " return accuracy, disp_imp, fnr, fpr, label, optimal\n", + " #return learner_acc, learner_di, learner_fnr, learner_fpr, label, optimal\n", + "\n", + "s_accuracy, s_disp_imp, s_fnr, s_fpr, s_label, s_optimal = run_exp(seeds, learners, pre_processors, post_processors, skyline_order)\n", + "\n", + "#Converting string types to floating values for plotting\n", + "\n", + "accuracy, disp_imp, fnr, fpr = [],[],[],[]\n", + "for item in s_accuracy:\n", + " temp_list = list()\n", + " for i in item:\n", + " temp_list.append(float(i))\n", + " accuracy.append(temp_list)\n", + "\n", + "for item in s_disp_imp:\n", + " temp_list = list()\n", + " for i in item:\n", + " temp_list.append(float(i))\n", + " disp_imp.append(temp_list)\n", + "\n", + "for item in s_fnr:\n", + " temp_list = list()\n", + " for i in item:\n", + " temp_list.append(float(i))\n", + " fnr.append(temp_list)\n", + "\n", + "for item in s_fpr:\n", + " temp_list = list()\n", + " for i in item:\n", + " temp_list.append(float(i))\n", + " fpr.append(temp_list)\n", + "\n", + "title_list = s_label" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "def plotter(s_optimal, title_list, label_list, x, y, x_ticks, x_label, main_title):\n", + " '''\n", + " Function to plot various comparison plots.\n", + " '''\n", + " #update label list and title list as per the experiment being performed.\n", + " fig, axs = plt.subplots(len(pre_processors), len(post_processors), figsize=((10,20)))\n", + " axs = axs.flatten()\n", + " for i in range(0, len(y), 2):\n", + " loc = i//2\n", + " for k in range(len(x[i])):\n", + " if s_optimal[i][k]:\n", + " axs[loc].scatter(x[i][k], y[i][k], c='y', marker='o')\n", + " else:\n", + " axs[loc].scatter(x[i][k], y[i][k], c='b', marker='o')\n", + " if s_optimal[i+1][k]:\n", + " axs[loc].scatter(x[i+1][k], y[i+1][k], c='g', marker='o')\n", + " else:\n", + " axs[loc].scatter(x[i+1][k], y[i+1][k], c='r', marker='o')\n", + " axs[loc].set_xticks(x_ticks)\n", + " axs[loc].set_yticks(np.arange(0.5, 1, 0.1))\n", + " axs[loc].set_title(title_list[i])\n", + " axs[loc].grid(True)\n", + " axs[loc].set_xlabel(x_label)\n", + " axs[loc].set_ylabel('Accuracy')\n", + " axs[loc].legend(label_list[int(i%(len(label_list)*2)/2)])\n", + " fig.suptitle(main_title)\n", + " plt.subplots_adjust(wspace=0.3, hspace=0.43)\n", + " fig.savefig('examples_skyline/' + main_title + '.png')\n", + " plt.show()\n", + "\n", + "plotter(s_optimal, title_list, label_list, x=disp_imp, y=accuracy, x_ticks=[0.5, 1, 1.5], x_label='DI', main_title='accuracy_vs_di-skyline-order')\n", + "plotter(s_optimal, title_list, label_list, x=fnr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FNR', main_title='accuracy_vs_fnr-skyline-order')\n", + "plotter(s_optimal, title_list, label_list, x=fpr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FPR', main_title='accuracy_vs_fpr-skyline-order')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.3" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/fp/dataset_experiments.py b/fp/dataset_experiments.py index 08b07aa..91e0229 100644 --- a/fp/dataset_experiments.py +++ b/fp/dataset_experiments.py @@ -6,7 +6,7 @@ class AdultDatasetWhiteMaleExperiment(BinaryClassificationExperiment): def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): + learners, pre_processors, post_processors, optimal_validation_strategy): test_set_ratio = 0.2 validation_set_ratio = 0.1 @@ -27,12 +27,16 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, 'label_maps': [{1.0: '>50K', 0.0: '<=50K'}], 'protected_attribute_maps': [{1.0: 'White', 0.0: 'Non-white'}, {1.0: 'Male', 0.0: 'Female'}] } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'adultwhitemale') + if optimal_validation_strategy: + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, + pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'adultwhitemale', optimal_validation_strategy) + else: + print("Need to specify the strategy that is used to select the optimal results!") + exit() def load_raw_data(self): return pd.read_csv('datasets/raw/adult.csv', na_values='?', sep=',') @@ -41,7 +45,7 @@ def load_raw_data(self): class AdultDatasetMaleExperiment(BinaryClassificationExperiment): def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): + learners, pre_processors, post_processors, optimal_validation_strategy): test_set_ratio = 0.2 validation_set_ratio = 0.1 @@ -62,12 +66,16 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, 'label_maps': [{1.0: '>50K', 0.0: '<=50K'}], 'protected_attribute_maps': [{1.0: 'Male', 0.0: 'Female'}] } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'adultmale') + if optimal_validation_strategy: + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, + pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'adultmale', optimal_validation_strategy) + else: + print("Need to specify the strategy that is used to select the optimal results!") + exit() def load_raw_data(self): return pd.read_csv('datasets/raw/adult.csv', na_values='?', sep=',') @@ -76,7 +84,7 @@ def load_raw_data(self): class AdultDatasetWhiteExperiment(BinaryClassificationExperiment): def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): + learners, pre_processors, post_processors, optimal_validation_strategy): test_set_ratio = 0.2 validation_set_ratio = 0.1 @@ -97,12 +105,16 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, 'label_maps': [{1.0: '>50K', 0.0: '<=50K'}], 'protected_attribute_maps': [{1.0: 'White', 0.0: 'Non-white'}] } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'adultwhite') + if optimal_validation_strategy: + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, + pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'adultwhite', optimal_validation_strategy) + else: + print("Need to specify the strategy that is used to select the optimal results!") + exit() def load_raw_data(self): return pd.read_csv('datasets/raw/adult.csv', na_values='?', sep=',') @@ -112,7 +124,7 @@ def load_raw_data(self): class PropublicaDatasetWhiteExperiment(BinaryClassificationExperiment): def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): + learners, pre_processors, post_processors, optimal_validation_strategy): test_set_ratio = 0.2 validation_set_ratio = 0.1 label_name = 'two_year_recid' @@ -132,12 +144,16 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, 'label_maps': [{1.0: 1, 0.0: 0}], 'protected_attribute_maps': [{1.0: 'Caucasian', 0.0: 'Non-white'}] } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'propublicawhite') + if optimal_validation_strategy: + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, + pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'propublicawhite', optimal_validation_strategy) + else: + print("Need to specify the strategy that is used to select the optimal results!") + exit() def load_raw_data(self): """The custom pre-processing function is adapted from @@ -163,7 +179,7 @@ class GermanCreditDatasetSexExperiment(BinaryClassificationExperiment): def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): + learners, pre_processors, post_processors, optimal_validation_strategy): test_set_ratio = 0.2 validation_set_ratio = 0.1 @@ -186,12 +202,16 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, 'label_maps': [{1.0: 1, 0.0: 0}], 'protected_attribute_maps': [{1.0: 'male', 0.0: 'female'}] } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'germancreditsex') + if optimal_validation_strategy: + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, + pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'germancreditsex', optimal_validation_strategy) + else: + print("Need to specify the strategy that is used to select the optimal results!") + exit() def load_raw_data(self): df = pd.read_csv('datasets/raw/german.csv', na_values='?', sep=',') @@ -253,7 +273,7 @@ class RicciRaceExperiment(BinaryClassificationExperiment): ''' def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): + learners, pre_processors, post_processors, optimal_validation_strategy): test_set_ratio = 0.2 validation_set_ratio = 0.1 @@ -274,12 +294,16 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, 'label_maps': [{1.0: 1, 0.0: 0}], 'protected_attribute_maps': [{1.0: 'W', 0.0: 'NW'}] } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names,attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'riccirace') + if optimal_validation_strategy: + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, + pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'riccirace', optimal_validation_strategy) + else: + print("Need to specify the strategy that is used to select the optimal results!") + exit() def load_raw_data(self): df = pd.read_csv('datasets/raw/ricci.txt', na_values='?', sep=',') @@ -305,7 +329,7 @@ class GiveMeSomeCreditExperiment(BinaryClassificationExperiment): Fairness intervention for the Age attribute (priviledge for age>=25) while predicting if a person will experience 90 days past due delinquency or worse. ''' def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): + learners, pre_processors, post_processors, optimal_validation_strategy): test_set_ratio = 0.2 validation_set_ratio = 0.1 @@ -327,12 +351,16 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, dataset_metadata = { 'label_maps': [{1.0: 1, 0.0: 0}] } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'givecredit') + if optimal_validation_strategy: + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, + pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'givecredit', optimal_validation_strategy) + else: + print("Need to specify the strategy that is used to select the optimal results!") + exit() def load_raw_data(self): df = pd.read_csv('datasets/raw/givemesomecredit.csv', na_values='?', sep=',',index_col=False) diff --git a/fp/dataset_experiments_old.py b/fp/dataset_experiments_old.py new file mode 100644 index 0000000..08b07aa --- /dev/null +++ b/fp/dataset_experiments_old.py @@ -0,0 +1,341 @@ +from fp.experiments import BinaryClassificationExperiment +import pandas as pd +import numpy as np + + +class AdultDatasetWhiteMaleExperiment(BinaryClassificationExperiment): + + def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, + learners, pre_processors, post_processors): + + test_set_ratio = 0.2 + validation_set_ratio = 0.1 + label_name = 'income-per-year' + positive_label = '>50K' + numeric_attribute_names = ['capital-gain', 'capital-loss', 'age', 'hours-per-week'] + categorical_attribute_names = ['workclass', 'education', 'marital-status', 'occupation', 'relationship', + 'native-country'] + attributes_to_drop_names = ['fnlwgt'] + + protected_attribute_names = ['race', 'sex'] + privileged_classes = [['White'], ['Male']] + + privileged_groups = [{'race': 1, 'sex': 1}] + unprivileged_groups = [{'race': 1, 'sex': 0}, {'sex': 0}] + + dataset_metadata = { + 'label_maps': [{1.0: '>50K', 0.0: '<=50K'}], + 'protected_attribute_maps': [{1.0: 'White', 0.0: 'Non-white'}, {1.0: 'Male', 0.0: 'Female'}] + } + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'adultwhitemale') + + def load_raw_data(self): + return pd.read_csv('datasets/raw/adult.csv', na_values='?', sep=',') + + +class AdultDatasetMaleExperiment(BinaryClassificationExperiment): + + def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, + learners, pre_processors, post_processors): + + test_set_ratio = 0.2 + validation_set_ratio = 0.1 + label_name = 'income-per-year' + positive_label = '>50K' + numeric_attribute_names = ['capital-gain', 'capital-loss', 'age', 'hours-per-week'] + categorical_attribute_names = ['workclass', 'education', 'marital-status', 'occupation', 'relationship', + 'native-country'] + attributes_to_drop_names = ['fnlwgt', 'race'] + + protected_attribute_names = ['sex'] + privileged_classes = [['Male']] + + privileged_groups = [{'sex': 1}] + unprivileged_groups = [{'sex': 0}] + + dataset_metadata = { + 'label_maps': [{1.0: '>50K', 0.0: '<=50K'}], + 'protected_attribute_maps': [{1.0: 'Male', 0.0: 'Female'}] + } + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'adultmale') + + def load_raw_data(self): + return pd.read_csv('datasets/raw/adult.csv', na_values='?', sep=',') + + +class AdultDatasetWhiteExperiment(BinaryClassificationExperiment): + + def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, + learners, pre_processors, post_processors): + + test_set_ratio = 0.2 + validation_set_ratio = 0.1 + label_name = 'income-per-year' + positive_label = '>50K' + numeric_attribute_names = ['capital-gain', 'capital-loss', 'age', 'hours-per-week'] + categorical_attribute_names = ['workclass', 'education', 'marital-status', 'occupation', 'relationship', + 'native-country'] + attributes_to_drop_names = ['fnlwgt', 'sex'] + + protected_attribute_names = ['race'] + privileged_classes = [['White']] + + privileged_groups = [{'race': 1}] + unprivileged_groups = [{'race': 0}] + + dataset_metadata = { + 'label_maps': [{1.0: '>50K', 0.0: '<=50K'}], + 'protected_attribute_maps': [{1.0: 'White', 0.0: 'Non-white'}] + } + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'adultwhite') + + def load_raw_data(self): + return pd.read_csv('datasets/raw/adult.csv', na_values='?', sep=',') + + + +class PropublicaDatasetWhiteExperiment(BinaryClassificationExperiment): + + def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, + learners, pre_processors, post_processors): + test_set_ratio = 0.2 + validation_set_ratio = 0.1 + label_name = 'two_year_recid' + positive_label = 1 + numeric_attribute_names = ['age', 'decile_score', 'priors_count', 'days_b_screening_arrest', 'decile_score', + 'is_recid'] + categorical_attribute_names = ['c_charge_degree', 'age_cat', 'score_text'] + attributes_to_drop_names = ['sex', 'c_jail_in', 'c_jail_out'] + + protected_attribute_names = ['race'] + privileged_classes = [['Caucasian']] + + privileged_groups = [{'race': 1}] + unprivileged_groups = [{'race': 0}] + + dataset_metadata = { + 'label_maps': [{1.0: 1, 0.0: 0}], + 'protected_attribute_maps': [{1.0: 'Caucasian', 0.0: 'Non-white'}] + } + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'propublicawhite') + + def load_raw_data(self): + """The custom pre-processing function is adapted from + https://github.com/IBM/AIF360/blob/master/aif360/algorithms/preprocessing/optim_preproc_helpers/data_preproc_functions.py + https://github.com/fair-preprocessing/nips2017/blob/master/compas/code/Generate_Compas_Data.ipynb + """ + df = pd.read_csv('datasets/raw/propublica-recidivism.csv', na_values='?', sep=',') + df = df[['age', 'c_charge_degree', 'race', 'age_cat', 'score_text', + 'sex', 'priors_count', 'days_b_screening_arrest', 'decile_score', + 'is_recid', 'two_year_recid', 'c_jail_in', 'c_jail_out']] + ix = df['days_b_screening_arrest'] <= 100 + ix = (df['days_b_screening_arrest'] >= -100) & ix + ix = (df['is_recid'] != -1) & ix + ix = (df['c_charge_degree'] != "O") & ix + ix = (df['score_text'] != 'N/A') & ix + df = df.loc[ix, :] + df['length_of_stay'] = (pd.to_datetime(df['c_jail_out']) - pd.to_datetime(df['c_jail_in'])).apply( + lambda x: x.days) + return df + + +class GermanCreditDatasetSexExperiment(BinaryClassificationExperiment): + + + def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, + learners, pre_processors, post_processors): + + test_set_ratio = 0.2 + validation_set_ratio = 0.1 + label_name = 'credit' + positive_label = 1 + numeric_attribute_names = ['month', 'credit_amount', 'residence_since', 'age', 'number_of_credits', + 'people_liable_for'] + categorical_attribute_names = ['credit_history', 'savings', 'employment'] + attributes_to_drop_names = ['personal_status', 'status', 'purpose', 'investment_as_income_percentage', + 'other_debtors', 'property', 'installment_plans', 'housing', 'skill_level', + 'telephone', 'foreign_worker'] + + protected_attribute_names = ['sex'] + privileged_classes = [[1.0]] + + privileged_groups = [{'sex': 1.0}] + unprivileged_groups = [{'sex': 0.0}] + + dataset_metadata = { + 'label_maps': [{1.0: 1, 0.0: 0}], + 'protected_attribute_maps': [{1.0: 'male', 0.0: 'female'}] + } + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'germancreditsex') + + def load_raw_data(self): + df = pd.read_csv('datasets/raw/german.csv', na_values='?', sep=',') + + def group_credit_hist(x): + if x in ['A30', 'A31', 'A32']: + return 'None/Paid' + elif x == 'A33': + return 'Delay' + elif x == 'A34': + return 'Other' + else: + return 'NA' + + def group_employ(x): + if x == 'A71': + return 'Unemployed' + elif x in ['A72', 'A73']: + return '1-4 years' + elif x in ['A74', 'A75']: + return '4+ years' + else: + return 'NA' + + def group_savings(x): + if x in ['A61', 'A62']: + return '<500' + elif x in ['A63', 'A64']: + return '500+' + elif x == 'A65': + return 'Unknown/None' + else: + return 'NA' + + def group_status(x): + if x in ['A11', 'A12']: + return '<200' + elif x in ['A13']: + return '200+' + elif x == 'A14': + return 'None' + else: + return 'NA' + + status_map = {'A91': 1.0, 'A93': 1.0, 'A94': 1.0, 'A92': 0.0, 'A95': 0.0} + df['sex'] = df['personal_status'].replace(status_map) + # group credit history, savings, and employment + df['credit_history'] = df['credit_history'].apply(lambda x: group_credit_hist(x)) + df['savings'] = df['savings'].apply(lambda x: group_savings(x)) + df['employment'] = df['employment'].apply(lambda x: group_employ(x)) + df['age'] = df['age'].apply(lambda x: np.float(x >= 25)) + df['status'] = df['status'].apply(lambda x: group_status(x)) + return df + +class RicciRaceExperiment(BinaryClassificationExperiment): + ''' + Check for fairness based on race (white vs minority i.e Black and Hispanic) while predicting if a candidate will pass i.e obtain total + marks greater than or equal to 70.0 + ''' + + def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, + learners, pre_processors, post_processors): + + test_set_ratio = 0.2 + validation_set_ratio = 0.1 + + label_name = 'combine' + positive_label = 1 + numeric_attribute_names = ['oral', 'written'] + categorical_attribute_names = ['position'] + attributes_to_drop_names = [] + + protected_attribute_names = ['race'] + privileged_classes = [[1.0]] + + privileged_groups = [{'race': 1.0}] + unprivileged_groups = [{'race': 0.0}] + + dataset_metadata = { + 'label_maps': [{1.0: 1, 0.0: 0}], + 'protected_attribute_maps': [{1.0: 'W', 0.0: 'NW'}] + } + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names,attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'riccirace') + + def load_raw_data(self): + df = pd.read_csv('datasets/raw/ricci.txt', na_values='?', sep=',') + df.columns = map(str.lower, df.columns) + + def group_race_minority(x): + if x in ['B', 'H', 'B']: + return 'NW' + else: + return 'W' + + post_map = {'Captain': 0.0, 'Lieutenant': 1.0} + df['position'] = df['position'].replace(post_map) + + #group minorities i.e Black and Hispanic are combined to 'NW'(non white) + df['race'] = df['race'].apply(lambda x: group_race_minority(x)) + df['combine'] = df['combine'].apply(lambda x: int(x >= 70)) + + return df + +class GiveMeSomeCreditExperiment(BinaryClassificationExperiment): + ''' + Fairness intervention for the Age attribute (priviledge for age>=25) while predicting if a person will experience 90 days past due delinquency or worse. + ''' + def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, + learners, pre_processors, post_processors): + + test_set_ratio = 0.2 + validation_set_ratio = 0.1 + label_name = 'SeriousDlqin2yrs' + positive_label = 1 + + numeric_attribute_names = ['RevolvingUtilizationOfUnsecuredLines','age','NumberOfTime30-59DaysPastDueNotWorse', + 'DebtRatio','MonthlyIncome','NumberOfOpenCreditLinesAndLoans','NumberOfTimes90DaysLate', + 'NumberRealEstateLoansOrLines','NumberOfTime60-89DaysPastDueNotWorse','NumberOfDependents'] + categorical_attribute_names = [] + attributes_to_drop_names = [] + + protected_attribute_names = ['age'] + privileged_classes = [[1.0]] + + privileged_groups = [{'age': 1}] + unprivileged_groups = [{'age': 0}] + + dataset_metadata = { + 'label_maps': [{1.0: 1, 0.0: 0}] + } + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'givecredit') + + def load_raw_data(self): + df = pd.read_csv('datasets/raw/givemesomecredit.csv', na_values='?', sep=',',index_col=False) + df['age'] = df['age'].apply(lambda x: int(x >= 25)) + return df + diff --git a/fp/experiments.py b/fp/experiments.py index cb5f33c..bbc5d5e 100644 --- a/fp/experiments.py +++ b/fp/experiments.py @@ -10,6 +10,7 @@ from sklearn.base import clone from sklearn.metrics import roc_auc_score from sklearn.model_selection import train_test_split +from fp.utils import filter_optimal_results_skyline_order, filter_optimal_results_skyline_formula class BinaryClassificationExperiment: @@ -35,7 +36,8 @@ def __init__(self, privileged_groups, unprivileged_groups, dataset_metadata, - dataset_name): + dataset_name, + optimal_validation_strategy): self.fixed_random_seed = fixed_random_seed self.test_set_ratio = test_set_ratio @@ -59,20 +61,21 @@ def __init__(self, self.dataset_name = dataset_name self.log_path = 'logs/' self.exec_timestamp = self.generate_timestamp() + self.optimal_validation_strategy = optimal_validation_strategy # --- Helper Methods Begin ------------------------------------------------ - def unique_file_name(self, learner, pre_processor, post_processor): - return '{}__{}__{}__{}__{}__{}__{}'.format(self.dataset_name, + def unique_file_name(self, pre_processor, post_processor, learner): + return '{0}__{1}__{2}__{3}__{4}__{5}__{6}'.format(self.dataset_name, + pre_processor.name(), + post_processor.name(), learner.name(), self.missing_value_handler.name(), self.train_data_sampler.name(), - self.numeric_attribute_scaler.name(), - pre_processor.name(), - post_processor.name()) + self.numeric_attribute_scaler.name()) def generate_file_path(self, file_name=''): @@ -131,7 +134,6 @@ def apply_model(self, data, scalers, adjusted_annotated_train_data, pre_processo feature_names_in_train_but_not_in_current = set(train_feature_names).difference( set(current_feature_names)) - print("Injecting zero columns for features not present", feature_names_in_train_but_not_in_current) validation_data_df, _ = adjusted_annotated_data.convert_to_dataframe() @@ -147,14 +149,12 @@ def apply_model(self, data, scalers, adjusted_annotated_train_data, pre_processo if learner.needs_annotated_data_for_prediction(): adjusted_annotated__data_with_predictions = model.predict(adjusted_annotated_data) else: - adjusted_annotated__data_with_predictions.labels = model.predict(adjusted_annotated_data.features) - + adjusted_annotated__data_with_predictions.labels = model.predict(adjusted_annotated_data.features).reshape(-1,1) try: class_probs = model.predict_proba(adjusted_annotated_data.features) adjusted_annotated__data_with_predictions.scores = class_probs[:, 0] except AttributeError: print("WARNING: MODEL CANNOT ASSIGN CLASS PROBABILITIES") - return adjusted_annotated_data, adjusted_annotated__data_with_predictions @@ -235,10 +235,11 @@ def run_single_exp(self, annotated_train_data, validation_data, test_data, scale post_processor : fairprep pre-processor abstraction from aif360.algorithms.post_processors """ - + adjusted_annotated_train_data = self.preprocess_data(pre_processor, annotated_train_data) - model = self.learn_classifier(learner, adjusted_annotated_train_data, self.fixed_random_seed) + model = self.learn_classifier(learner, adjusted_annotated_train_data, self.fixed_random_seed) + adjusted_annotated_train_data_with_predictions = adjusted_annotated_train_data.copy() if learner.needs_annotated_data_for_prediction(): @@ -260,13 +261,12 @@ def run_single_exp(self, annotated_train_data, validation_data, test_data, scale adjusted_annotated_test_data_with_predictions) results_file_name = '../{}{}-{}.csv'.format( - self.generate_file_path(), self.unique_file_name(pre_processor, learner, post_processor), self.fixed_random_seed) + self.generate_file_path(), self.unique_file_name(pre_processor, post_processor, learner), self.fixed_random_seed) results_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), results_file_name) results_dir_name = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../{}'.format(self.generate_file_path())) if not os.path.exists(results_dir_name): os.makedirs(results_dir_name) - results_file = [] results_file = self.log_metrics(results_file, model, adjusted_annotated_validation_data, @@ -280,45 +280,70 @@ def run_single_exp(self, annotated_train_data, validation_data, test_data, scale results_file.to_csv(results_file_path, index=False) + def filter_optimal_results(self): """Identifies the experiment(s) with the highest accuracy as optimal result. Keeps the test metrics just for the experiment(s) with the optimal result. """ + results_dir_name = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../{}'.format(self.generate_file_path())) results_dir = os.listdir(Path(results_dir_name)) - accuracies = dict() - max_accuracy = 0 - # Fetching the accuracy from the row('val', 'None', 'accuracy') of all the experiment results + ##### SKYLINE FORMULA IMPLEMENTATION + + privileged_metric_names = ['num_true_positives', 'num_false_positives', 'num_false_negatives', + 'num_true_negatives', 'num_generalized_true_positives', + 'num_generalized_false_positives', 'num_generalized_false_negatives', + 'num_generalized_true_negatives', 'true_positive_rate', 'false_positive_rate', + 'false_negative_rate', 'true_negative_rate', 'generalized_true_positive_rate', + 'generalized_false_positive_rate', 'generalized_false_negative_rate', + 'generalized_true_negative_rate', 'positive_predictive_value', + 'false_discovery_rate', 'false_omission_rate', 'negative_predictive_value', + 'accuracy', 'error_rate', 'num_pred_positives', 'num_pred_negatives', + 'selection_rate'] + + dictionary = {} + filenames = list() for result_filename in results_dir: file_path = os.path.join(results_dir_name, result_filename) result_df = pd.read_csv(file_path) result_df.fillna(value='', inplace=True) - accuracy = (result_df.loc[(result_df['Split'] == 'val') & + for privileged_metric in privileged_metric_names: + if privileged_metric not in dictionary: + dictionary[privileged_metric] = list() + p_metric = (result_df.loc[(result_df['Split'] == 'val') & (result_df['PrivilegedStatus'] == '') & - (result_df['MetricName'] == 'accuracy'), 'MetricValue'].values[0]) - accuracies[result_filename] = accuracy - if accuracy > max_accuracy: - max_accuracy = accuracy + (result_df['MetricName'] == privileged_metric), 'MetricValue'].values[0]) + dictionary[privileged_metric].append(p_metric) + filenames.append(result_filename) + + privileged_metric_values = pd.DataFrame(dictionary) + privileged_metric_values['filenames'] = filenames + + if isinstance(self.optimal_validation_strategy, dict): + skyline_result = filter_optimal_results_skyline_formula(privileged_metric_values, self.optimal_validation_strategy) + else: + skyline_result = filter_optimal_results_skyline_order(privileged_metric_values, self.optimal_validation_strategy) - # List of non optimal and optimal filenames and accuracy non_optimal_filenames = list() optimal_filenames = list() - for filename, accuracy in accuracies.items(): - if accuracy != max_accuracy: - non_optimal_filenames.append(filename) + filenames_list = privileged_metric_values['filenames'].tolist() + for file_name in filenames_list: + if file_name == skyline_result[-1]: + optimal_filenames.append(file_name) else: - optimal_filenames.append(filename) + non_optimal_filenames.append(file_name) # Removing the test results from the non optimal experiment results + ''' for file_name in non_optimal_filenames: - file_path = os.path.join(results_dir_name, result_filename) + file_path = os.path.join(results_dir_name, file_name) result_df = pd.read_csv(file_path) result_df = result_df[(result_df['Split'] != 'test')] os.remove(file_path) result_df.to_csv(file_path, index=False, header=False) - + ''' # Renaming the optimal experiment results file (or files if tie) for file_name in optimal_filenames: file_path = os.path.join(results_dir_name, file_name) @@ -341,6 +366,7 @@ def run(self): self.validation_set_ratio, random_state=self.fixed_random_seed) + #Just returns complete data train_data = self.train_data_sampler.sample(all_train_data) second_split_ratio = self.test_set_ratio / (self.test_set_ratio + self.validation_set_ratio) @@ -348,6 +374,8 @@ def run(self): validation_data, test_data = train_test_split(test_and_validation_data, test_size=second_split_ratio, random_state=self.fixed_random_seed) + + #fit just passes and hadle_missing does drop_na operation self.missing_value_handler.fit(train_data) filtered_train_data = self.missing_value_handler.handle_missing(train_data) @@ -374,10 +402,8 @@ def run(self): features_to_drop=self.attributes_to_drop_names, metadata=self.dataset_metadata ) - for pre_processor in self.pre_processors: for learner in self.learners: for post_processor in self.post_processors: - self.run_single_exp(annotated_train_data, validation_data, test_data, scalers, - pre_processor, learner, post_processor) + self.run_single_exp(annotated_train_data, validation_data, test_data, scalers, pre_processor, learner, post_processor) self.filter_optimal_results() diff --git a/fp/experiments_old.py b/fp/experiments_old.py new file mode 100644 index 0000000..cb5f33c --- /dev/null +++ b/fp/experiments_old.py @@ -0,0 +1,383 @@ +import os +import numpy as np +import pandas as pd + +from time import time +from pathlib import Path +from datetime import datetime +from aif360.datasets import StandardDataset +from aif360.metrics import ClassificationMetric +from sklearn.base import clone +from sklearn.metrics import roc_auc_score +from sklearn.model_selection import train_test_split + + +class BinaryClassificationExperiment: + + + def __init__(self, + fixed_random_seed, + test_set_ratio, + validation_set_ratio, + label_name, + positive_label, + numeric_attribute_names, + categorical_attribute_names, + attributes_to_drop_names, + train_data_sampler, + missing_value_handler, + numeric_attribute_scaler, + learners, + pre_processors, + post_processors, + protected_attribute_names, + privileged_classes, + privileged_groups, + unprivileged_groups, + dataset_metadata, + dataset_name): + + self.fixed_random_seed = fixed_random_seed + self.test_set_ratio = test_set_ratio + self.validation_set_ratio = validation_set_ratio + self.label_name = label_name + self.positive_label = positive_label + self.numeric_attribute_names = numeric_attribute_names + self.categorical_attribute_names = categorical_attribute_names + self.attributes_to_drop_names = attributes_to_drop_names + self.train_data_sampler = train_data_sampler + self.missing_value_handler = missing_value_handler + self.numeric_attribute_scaler = numeric_attribute_scaler + self.learners = learners + self.pre_processors = pre_processors + self.post_processors = post_processors + self.protected_attribute_names = protected_attribute_names + self.privileged_classes = privileged_classes + self.privileged_groups = privileged_groups + self.unprivileged_groups = unprivileged_groups + self.dataset_metadata = dataset_metadata + self.dataset_name = dataset_name + self.log_path = 'logs/' + self.exec_timestamp = self.generate_timestamp() + + + + # --- Helper Methods Begin ------------------------------------------------ + + + def unique_file_name(self, learner, pre_processor, post_processor): + return '{}__{}__{}__{}__{}__{}__{}'.format(self.dataset_name, + learner.name(), + self.missing_value_handler.name(), + self.train_data_sampler.name(), + self.numeric_attribute_scaler.name(), + pre_processor.name(), + post_processor.name()) + + + def generate_file_path(self, file_name=''): + dir_name = '{}_{}/'.format(self.exec_timestamp, self.dataset_name) + return self.log_path + dir_name + file_name + + + def generate_timestamp(self): + return datetime.fromtimestamp(time()).strftime('%Y-%m-%d_%H-%M-%S-%f')[:-3] + + + def load_raw_data(self): + raise NotImplementedError + + + def learn_classifier(self, learner, annotated_train_data, fixed_random_seed): + return learner.fit_model(annotated_train_data, fixed_random_seed) + + + def preprocess_data(self, pre_processor, annotated_dataset): + return pre_processor.pre_process(annotated_dataset, self.privileged_groups, self.unprivileged_groups) + + + def post_process_predictions(self, post_processor, validation_dataset, validation_dataset_with_predictions, + testset_with_predictions): + return post_processor.post_process(validation_dataset, validation_dataset_with_predictions, + testset_with_predictions, self.fixed_random_seed, + self.privileged_groups, self.unprivileged_groups) + + + def apply_model(self, data, scalers, adjusted_annotated_train_data, pre_processor, learner, model): + filtered_data = self.missing_value_handler.handle_missing(data) + print(self.missing_value_handler.name(), 'removed', len(data) - len(filtered_data), + 'instances from validation data') + + for numerical_attribute, scaler in scalers.items(): + numerical_attribute_data = np.array(filtered_data[numerical_attribute]).reshape(-1, 1) + scaled_numerical_attribute_data = scaler.transform(numerical_attribute_data) + filtered_data.loc[:, numerical_attribute] = scaled_numerical_attribute_data + + annotated_data = StandardDataset( + df=filtered_data, + label_name=self.label_name, + favorable_classes=[self.positive_label], + protected_attribute_names=self.protected_attribute_names, + privileged_classes=self.privileged_classes, + categorical_features=self.categorical_attribute_names, + features_to_drop=self.attributes_to_drop_names, + metadata=self.dataset_metadata + ) + + adjusted_annotated_data = self.preprocess_data(pre_processor, annotated_data) + + train_feature_names = adjusted_annotated_train_data.feature_names + current_feature_names = adjusted_annotated_data.feature_names + + feature_names_in_train_but_not_in_current = set(train_feature_names).difference( + set(current_feature_names)) + + print("Injecting zero columns for features not present", feature_names_in_train_but_not_in_current) + + validation_data_df, _ = adjusted_annotated_data.convert_to_dataframe() + + for feature_name in feature_names_in_train_but_not_in_current: + validation_data_df.loc[:, feature_name] = 0.0 + + adjusted_annotated_data.feature_names = train_feature_names + adjusted_annotated_data.features = validation_data_df[train_feature_names].values.copy() + + adjusted_annotated__data_with_predictions = adjusted_annotated_data.copy() + + if learner.needs_annotated_data_for_prediction(): + adjusted_annotated__data_with_predictions = model.predict(adjusted_annotated_data) + else: + adjusted_annotated__data_with_predictions.labels = model.predict(adjusted_annotated_data.features) + + try: + class_probs = model.predict_proba(adjusted_annotated_data.features) + adjusted_annotated__data_with_predictions.scores = class_probs[:, 0] + except AttributeError: + print("WARNING: MODEL CANNOT ASSIGN CLASS PROBABILITIES") + + return adjusted_annotated_data, adjusted_annotated__data_with_predictions + + + def log_metrics(self, results_file, model, annotated_data, annotated_data_with_predictions, prefix): + metric = ClassificationMetric(annotated_data, annotated_data_with_predictions, + unprivileged_groups=self.unprivileged_groups, + privileged_groups=self.privileged_groups) + + privileged_metric_names = ['num_true_positives', 'num_false_positives', 'num_false_negatives', + 'num_true_negatives', 'num_generalized_true_positives', + 'num_generalized_false_positives', 'num_generalized_false_negatives', + 'num_generalized_true_negatives', 'true_positive_rate', 'false_positive_rate', + 'false_negative_rate', 'true_negative_rate', 'generalized_true_positive_rate', + 'generalized_false_positive_rate', 'generalized_false_negative_rate', + 'generalized_true_negative_rate', 'positive_predictive_value', + 'false_discovery_rate', 'false_omission_rate', 'negative_predictive_value', + 'accuracy', 'error_rate', 'num_pred_positives', 'num_pred_negatives', + 'selection_rate'] + + for maybe_privileged in [None, True, False]: + for metric_name in privileged_metric_names: + metric_function = getattr(metric, metric_name) + metric_value = metric_function(privileged=maybe_privileged) + results_file.append([prefix, maybe_privileged, metric_name, metric_value]) + + if hasattr(model, 'predict_proba'): + auc = roc_auc_score(annotated_data.labels, model.predict_proba(annotated_data.features)[:, 1]) + else: + auc = None + + results_file.append([prefix, '', 'roc_auc', auc]) + + global_metric_names = ['true_positive_rate_difference', 'false_positive_rate_difference', + 'false_negative_rate_difference', 'false_omission_rate_difference', + 'false_discovery_rate_difference', 'false_positive_rate_ratio', + 'false_negative_rate_ratio', 'false_omission_rate_ratio', + 'false_discovery_rate_ratio', 'average_odds_difference', 'average_abs_odds_difference', + 'error_rate_difference', 'error_rate_ratio', 'disparate_impact', + 'statistical_parity_difference', 'generalized_entropy_index', + 'between_all_groups_generalized_entropy_index', + 'between_group_generalized_entropy_index', 'theil_index', 'coefficient_of_variation', + 'between_group_theil_index', 'between_group_coefficient_of_variation', + 'between_all_groups_theil_index', 'between_all_groups_coefficient_of_variation'] + + for metric_name in global_metric_names: + metric_function = getattr(metric, metric_name) + metric_value = metric_function() + results_file.append([prefix, '', metric_name, metric_value]) + + return results_file + + + # --- Helper Methods End -------------------------------------------------- + + + def run_single_exp(self, annotated_train_data, validation_data, test_data, scalers, pre_processor, + learner, post_processor): + """Executes a single instance of experiment out of all the possible + experiments from the given parameters. + + Parameters: + ----------- + annotated_train_data : annotated aif360.datasets.StandardDataset of + train data + + validation_data : pandas dataframe of validation data + + test_data : pandas dataframe of test data + + scalers : dictionary with (key='feature name', value='type of scaler') + + pre_processor : fairprep pre-processor abstraction from + aif360.algorithms.pre_processors + + learner : fairprep learner abstraction from sci-kit learn or + aif360.algorithms.inprocessing + + post_processor : fairprep pre-processor abstraction from + aif360.algorithms.post_processors + """ + + adjusted_annotated_train_data = self.preprocess_data(pre_processor, annotated_train_data) + model = self.learn_classifier(learner, adjusted_annotated_train_data, self.fixed_random_seed) + + adjusted_annotated_train_data_with_predictions = adjusted_annotated_train_data.copy() + + if learner.needs_annotated_data_for_prediction(): + adjusted_annotated_train_data_with_predictions = model.predict( + adjusted_annotated_train_data_with_predictions) + else: + adjusted_annotated_train_data_with_predictions.labels = model.predict( + adjusted_annotated_train_data_with_predictions.features) + + adjusted_annotated_validation_data, adjusted_annotated_validation_data_with_predictions = \ + self.apply_model(validation_data, scalers, adjusted_annotated_train_data, pre_processor, learner, model) + + adjusted_annotated_test_data, adjusted_annotated_test_data_with_predictions = \ + self.apply_model(test_data, scalers, adjusted_annotated_train_data, pre_processor, learner, model) + + adjusted_annotated_test_data_with_predictions = self.post_process_predictions(post_processor, + adjusted_annotated_validation_data, + adjusted_annotated_validation_data_with_predictions, + adjusted_annotated_test_data_with_predictions) + + results_file_name = '../{}{}-{}.csv'.format( + self.generate_file_path(), self.unique_file_name(pre_processor, learner, post_processor), self.fixed_random_seed) + results_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), results_file_name) + + results_dir_name = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../{}'.format(self.generate_file_path())) + if not os.path.exists(results_dir_name): + os.makedirs(results_dir_name) + + results_file = [] + + results_file = self.log_metrics(results_file, model, adjusted_annotated_validation_data, + adjusted_annotated_validation_data_with_predictions, 'val') + results_file = self.log_metrics(results_file, model, adjusted_annotated_train_data, + adjusted_annotated_train_data_with_predictions, 'train') + results_file = self.log_metrics(results_file, model, adjusted_annotated_test_data, + adjusted_annotated_test_data_with_predictions, 'test') + + results_file = pd.DataFrame(results_file, columns=['Split', 'PrivilegedStatus', 'MetricName', 'MetricValue']) + results_file.to_csv(results_file_path, index=False) + + + def filter_optimal_results(self): + """Identifies the experiment(s) with the highest accuracy as optimal + result. Keeps the test metrics just for the experiment(s) with the + optimal result. + """ + results_dir_name = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../{}'.format(self.generate_file_path())) + results_dir = os.listdir(Path(results_dir_name)) + accuracies = dict() + max_accuracy = 0 + + # Fetching the accuracy from the row('val', 'None', 'accuracy') of all the experiment results + for result_filename in results_dir: + file_path = os.path.join(results_dir_name, result_filename) + result_df = pd.read_csv(file_path) + result_df.fillna(value='', inplace=True) + accuracy = (result_df.loc[(result_df['Split'] == 'val') & + (result_df['PrivilegedStatus'] == '') & + (result_df['MetricName'] == 'accuracy'), 'MetricValue'].values[0]) + accuracies[result_filename] = accuracy + if accuracy > max_accuracy: + max_accuracy = accuracy + + # List of non optimal and optimal filenames and accuracy + non_optimal_filenames = list() + optimal_filenames = list() + for filename, accuracy in accuracies.items(): + if accuracy != max_accuracy: + non_optimal_filenames.append(filename) + else: + optimal_filenames.append(filename) + + # Removing the test results from the non optimal experiment results + for file_name in non_optimal_filenames: + file_path = os.path.join(results_dir_name, result_filename) + result_df = pd.read_csv(file_path) + result_df = result_df[(result_df['Split'] != 'test')] + os.remove(file_path) + result_df.to_csv(file_path, index=False, header=False) + + # Renaming the optimal experiment results file (or files if tie) + for file_name in optimal_filenames: + file_path = os.path.join(results_dir_name, file_name) + optimal_file_name = '{}{}'.format(file_name[:-4], '__OPTIMAL.csv') + optimal_file_path = os.path.join(results_dir_name, optimal_file_name) + os.rename(file_path, optimal_file_path) + + + def run(self): + """Executes all the possible experiments from the combination of given + learners, pre-processors and post-processors. + + No. of experiments = (#learners * #preprocessors * #postprocessors) + """ + np.random.seed(self.fixed_random_seed) + + data = self.load_raw_data() + + all_train_data, test_and_validation_data = train_test_split(data, test_size=self.test_set_ratio + + self.validation_set_ratio, + random_state=self.fixed_random_seed) + + train_data = self.train_data_sampler.sample(all_train_data) + + second_split_ratio = self.test_set_ratio / (self.test_set_ratio + self.validation_set_ratio) + + validation_data, test_data = train_test_split(test_and_validation_data, test_size=second_split_ratio, + random_state=self.fixed_random_seed) + + self.missing_value_handler.fit(train_data) + filtered_train_data = self.missing_value_handler.handle_missing(train_data) + + print(self.missing_value_handler.name(), 'removed', len(train_data) - len(filtered_train_data), + 'instances from training data') + + scalers = {} + + for numerical_attribute in self.numeric_attribute_names: + numerical_attribute_data = np.array(filtered_train_data[numerical_attribute]).reshape(-1, 1) + scaler = clone(self.numeric_attribute_scaler).fit(numerical_attribute_data) + scaled_numerical_attribute_data = scaler.transform(numerical_attribute_data) + + filtered_train_data.loc[:, numerical_attribute] = scaled_numerical_attribute_data + scalers[numerical_attribute] = scaler + + annotated_train_data = StandardDataset( + df=filtered_train_data, + label_name=self.label_name, + favorable_classes=[self.positive_label], + protected_attribute_names=self.protected_attribute_names, + privileged_classes=self.privileged_classes, + categorical_features=self.categorical_attribute_names, + features_to_drop=self.attributes_to_drop_names, + metadata=self.dataset_metadata + ) + + for pre_processor in self.pre_processors: + for learner in self.learners: + for post_processor in self.post_processors: + self.run_single_exp(annotated_train_data, validation_data, test_data, scalers, + pre_processor, learner, post_processor) + self.filter_optimal_results() diff --git a/fp/utils.py b/fp/utils.py new file mode 100644 index 0000000..6d9113e --- /dev/null +++ b/fp/utils.py @@ -0,0 +1,43 @@ +import pandas as pd + +def filter_optimal_results_skyline_order(_df, _order_list): + _df['selection_rate'] = abs(1 - _df['selection_rate']) + higher_is_better = ['num_true_positives', 'num_true_negatives', 'num_generalized_true_positives', + 'num_generalized_true_negatives', 'true_positive_rate', 'true_negative_rate', + 'generalized_true_positive_rate', 'generalized_true_negative_rate', 'positive_predictive_value', + 'accuracy', 'num_pred_positives'] + lower_is_better = ['selection_rate', 'num_false_positives', 'num_false_negatives', + 'num_generalized_false_positives', 'num_generalized_false_negatives', 'false_positive_rate', + 'false_negative_rate', 'generalized_false_positive_rate', 'generalized_false_negative_rate', + 'false_discovery_rate', 'false_omission_rate', 'negative_predictive_value', 'error_rate', + 'num_pred_negatives'] + order = [] + for item in _order_list: + if item in higher_is_better: + order.append(False) + else: + order.append(True) + _df = _df.sort_values(_order_list, ascending=order) + + return _df.values[0] + + +def filter_optimal_results_skyline_formula(_df, _formula): + df = pd.DataFrame() + for key in _formula: + df["norm_" + key] = (_df[key] - _df[key].min()) / (_df[key].max() - _df[key].min()) + + df_temp = list(_formula.values()) + keys = list(_formula.keys()) + for col in range(len(keys)): + keys[col] = "norm_" + keys[col] + + # Multiplying with the multiplier to perform sorting operation + df['norm_avg'] = df[keys].multiply(df_temp).sum(axis=1) + frames = [_df, df] + df_fin = pd.concat(frames, axis=1) + + df_fin = df_fin.sort_values(by='norm_avg', ascending=False) + cols = [c for c in df_fin.columns if c[:4] != 'norm'] + df_fin = df_fin[cols] + return df_fin.values[0] \ No newline at end of file From 19ccc5953d51ebae07cf80b4127d1bc0f7b0c8a8 Mon Sep 17 00:00:00 2001 From: Ke Yang Date: Mon, 22 Jun 2020 09:38:59 -0400 Subject: [PATCH 2/7] Add pipeline constructors --- pipeline/fairness_label.py | 74 +++ pipeline/label_pipeline.py | 506 ++++++++++++++++++++ pipeline/model/classifiers.py | 87 ++++ pipeline/model/fair_classifiers.py | 69 +++ pipeline/model/inprocessor.py | 74 +++ pipeline/postprocess/fair_postprocessors.py | 66 +++ pipeline/postprocess/postprocessor.py | 59 +++ pipeline/preprocess/categorizers.py | 51 ++ pipeline/preprocess/encoders.py | 98 ++++ pipeline/preprocess/fair_preprocessors.py | 82 ++++ pipeline/preprocess/filters.py | 69 +++ pipeline/preprocess/imputers.py | 88 ++++ pipeline/preprocess/preprocessor.py | 71 +++ pipeline/preprocess/samplers.py | 96 ++++ pipeline/preprocess/scalers.py | 47 ++ pipeline/preprocess/splitters.py | 134 ++++++ pipeline/preprocess/transformers.py | 24 + pipeline/step.py | 49 ++ 18 files changed, 1744 insertions(+) create mode 100644 pipeline/fairness_label.py create mode 100644 pipeline/label_pipeline.py create mode 100644 pipeline/model/classifiers.py create mode 100644 pipeline/model/fair_classifiers.py create mode 100644 pipeline/model/inprocessor.py create mode 100644 pipeline/postprocess/fair_postprocessors.py create mode 100644 pipeline/postprocess/postprocessor.py create mode 100644 pipeline/preprocess/categorizers.py create mode 100644 pipeline/preprocess/encoders.py create mode 100644 pipeline/preprocess/fair_preprocessors.py create mode 100644 pipeline/preprocess/filters.py create mode 100644 pipeline/preprocess/imputers.py create mode 100644 pipeline/preprocess/preprocessor.py create mode 100644 pipeline/preprocess/samplers.py create mode 100644 pipeline/preprocess/scalers.py create mode 100644 pipeline/preprocess/splitters.py create mode 100644 pipeline/preprocess/transformers.py create mode 100644 pipeline/step.py diff --git a/pipeline/fairness_label.py b/pipeline/fairness_label.py new file mode 100644 index 0000000..ce74e6d --- /dev/null +++ b/pipeline/fairness_label.py @@ -0,0 +1,74 @@ +from sklearn.metrics import confusion_matrix +import numpy as np +def get_static_label(df, sensi_atts, target_name, round_digit=3): + groupby_cols = sensi_atts+[target_name] + placeholder_att = list(set(df.columns).difference(groupby_cols))[0] + + count_all = df[groupby_cols+[placeholder_att]].groupby(groupby_cols).count() + values_all = count_all.get_values() + index_all = list(count_all.index) + + if len(sensi_atts) == 1: + norm_cols = [target_name] + elif len(sensi_atts) == 2: + norm_cols = [sensi_atts[0], target_name] + norm_values = df[norm_cols+[placeholder_att]].groupby(norm_cols).count().get_values() + + res_dict = {} + if 0 < len(sensi_atts) <= 2: + s1_n = len(df[sensi_atts[0]].unique()) + t_n = len(df[target_name].unique()) + for idx, tuple_i in enumerate(index_all): + if len(tuple_i[:-1]) == 1: + key_tuple = (tuple_i[0]) + else: + key_tuple = tuple_i[:-1] + idx_denom = idx % 2 + int(idx / (s1_n*t_n))*t_n # only work for binary 2nd sensitive att + if key_tuple not in res_dict: + res_dict[key_tuple] = {tuple_i[-1]: round(values_all[idx][0]/norm_values[idx_denom][0], round_digit)} + else: + res_dict[key_tuple].update({tuple_i[-1]: round(values_all[idx][0]/norm_values[idx_denom][0], round_digit)}) + else: # for more than 2 sensitive atts + pass + return res_dict + +def compute_evaluation_metric_binary(true_y, pred_y, label_order): + TN, FP, FN, TP = confusion_matrix(true_y, list(pred_y), labels=label_order).ravel() + P = TP + FN + N = TN + FP + ACC = (TP+TN) / (P+N) if (P+N) > 0.0 else np.float64(0.0) + return dict( + PR = P/ (P+N), P = TP + FN, N = TN + FP, + TPR=TP / P, TNR=TN / N, FPR=FP / N, FNR=FN / P, + PPV=TP / (TP+FP) if (TP+FP) > 0.0 else np.float64(0.0), + NPV=TN / (TN+FN) if (TN+FN) > 0.0 else np.float64(0.0), + FDR=FP / (FP+TP) if (FP+TP) > 0.0 else np.float64(0.0), + FOR=FN / (FN+TN) if (FN+TN) > 0.0 else np.float64(0.0), + ACC=ACC, + ERR=1-ACC, + F1=2*TP / (2*TP+FP+FN) if (2*TP+FP+FN) > 0.0 else np.float64(0.0) + ) +def get_performance_label(df, sensi_atts, target_name, posi_target, output_metrics=["TPR", "FPR", "TNR", "FNR", "PR"], round_digit=3): + + groupby_cols = sensi_atts+[target_name] + placeholder_att = list(set(df.columns).difference(groupby_cols))[0] + + count_all = df[groupby_cols+[placeholder_att]].groupby(groupby_cols).count() + index_all = list(count_all.index) + + res_dict = {} + target_label_order = [posi_target, set(df[target_name]).difference([posi_target]).pop()] + + for tuple_i in index_all: + if len(tuple_i[:-1]) == 1: + key_tuple = (tuple_i[0]) + else: + key_tuple = tuple_i[:-1] + cur_q = [] + for idx, vi in enumerate(tuple_i[:-1]): + cur_q.append("{}=='{}'".format(sensi_atts[idx], vi)) + tuple_df = df.query(" and ".join(cur_q)) + metrics_all = compute_evaluation_metric_binary(list(tuple_df[target_name]), list(tuple_df["pred_"+target_name]), target_label_order) + res_dict[key_tuple] = {x: round(metrics_all[x], round_digit) for x in metrics_all if x in output_metrics} + + return res_dict \ No newline at end of file diff --git a/pipeline/label_pipeline.py b/pipeline/label_pipeline.py new file mode 100644 index 0000000..5cc8321 --- /dev/null +++ b/pipeline/label_pipeline.py @@ -0,0 +1,506 @@ +""" + Class to run the pipeline + +""" +import pandas as pd +import numpy as np +import os +import warnings +warnings.filterwarnings("ignore") +from pipeline.preprocess.splitters import * +from pipeline.preprocess.samplers import * +from pipeline.preprocess.imputers import * +from pipeline.preprocess.scalers import * +from pipeline.preprocess.categorizers import * +from pipeline.preprocess.encoders import * +from pipeline.preprocess.fair_preprocessors import * +from pipeline.model.classifiers import * +from pipeline.model.fair_classifiers import * +from pipeline.postprocess.fair_postprocessors import * + +# for integrity check of user inputs +SUPPORT_STEPS = {"Splitter": "Splitter", "Sampler": "Sampler", "Imputer": "Imputer", "Scaler": "Scaler", + "Categorizer": "Categorizer", "Encoder": "Encoder", "SensitiveEncoder": "SensitiveAttEncoder", + "FairPreprocessor": "AIF_", "model": "SK_OPT_", "FairPostprocessor": "AIF_Postprocessing"} + +ALL_STEPS = ["RandomSplitter", "BalanceTargetSplitter", + "RandomSampler", "BalancePopulationSampler", + "DropNAImputer", "ModeImputer", "DatawigImputer", + "SK_StandardScaler", "SK_MinMaxScaler", + "SK_Discretizer", "SK_Binarizer", + "SK_OrdinalEncoder", "SK_OneHotEncoder", + "CustomCateAttsEncoder", + "AIF_Reweighing", "AIF_DIRemover", + "SK_LogisticRegression", "SK_DecisionTree", "OPT_LogisticRegression", "OPT_DecisionTree", "AIF_AdversarialDebiasing", + "AIF_EqOddsPostprocessing", "AIF_CalibratedEqOddsPostprocessing"] +PRINT_SPLIT = "\n===================================================\n" + +def init_input_steps(step_tuple, input_df): + step_str = step_tuple[0] + '(input_df, ' + for pi in step_tuple[1:]: + if isinstance(pi, str): + step_str += "'"+pi + "', " + else: + step_str += str(pi) + ", " + end_idx = step_str.rfind(", ") + step_str = step_str[0:end_idx] + step_str[end_idx:].replace(", ", ")") + print(step_str) + return eval(step_str) + + +class FairnessLabelPipeline(): + def __init__(self, data_file_name, target_col, target_positive_values, sensitive_atts=[], protected_values={}, sep_flag=None, na_mark=None): + """ + + :param data_file_name: str, file name that stores the data. + :param target_col: str, the name of the target variable in above data. + :param target_positive_values: list of str, each str is the value of the target variable in above data that represents the positive outcome. + :param sensitive_atts: list, stores the user specified sensitive attributes in above data. Optional. + :param protected_values: dict, stores the user-specified protected values for above sensitive attributes. Optional. + Key is the name in sensitive_atts, value is a list of str, representing the values of the attribute. + Order should mapping to the order in the sensitive_atts. + """ + # print(os.path.realpath(data_file_name)) + if not os.path.exists(os.path.realpath(data_file_name)): + print("The data you specified doesn't exist!") + raise ValueError + if ".csv" not in data_file_name: + print("The data you specified is not valid! Only support .csv file.") + raise ValueError + data_name = data_file_name.replace(".csv", "") + self.data_name = data_name[data_name.rfind("/")+1:] + if sep_flag: + df = pd.read_csv(data_file_name, sep=sep_flag) + else: # default ',' separated data + df = pd.read_csv(data_file_name) + if not df.shape[0]: + print("Uploaded data is empty!") + raise ValueError + + # integrity check for target_col + if target_col is None or target_positive_values is None: + print("Need to specify target_col and target_positive_value!") + raise ValueError + + if target_col not in df.columns: + print("Need to specify a valid target attribute to be predicted!") + raise ValueError + target_values = df[target_col].unique() + if len(target_values) != 2: + print("Only support binary target feature now!") + raise ValueError + if len(set(target_positive_values).intersection(target_values)) == 0: + print("Need to specify a valid target positive value!") + raise ValueError + self.target_col = target_col + self.target_positive_values = target_positive_values + + # integrity check for sensitive_atts and protected_values + input_indicator = sum([len(x)== 0 for x in [sensitive_atts, protected_values]]) + if input_indicator == 0: # both are specified + if len(sensitive_atts) != len(protected_values): + print("Different size of input sensitive attributes and protected values!") + raise ValueError + if sum([len(set(protected_values[x]).difference(df[x].unique())) > 0 for x in protected_values]) > 0: + print("Some specified protected values do not appear in the column specified in sensitive_atts!") + raise ValueError + elif input_indicator == 1: # one of parameter is empty + print("Need to specify both sensitive_atts and protected_values!") + raise ValueError + else: # both are empty + # TODO: add auto-generation for the below two variables: sensitive_atts and protected_values + # for adult only + sensitive_atts = ["sex", "race"] + protected_values = {"sex": ["Female"], "race": ["Black", "Asian-Pac-Islander", "Amer-Indian-Eskimo", "Other"]} + + self.sensitive_atts = sensitive_atts + # self.protected_values = protected_values + self.zero_mapping = {target_col: [x for x in target_values if x not in target_positive_values]} + self.zero_mapping.update(protected_values) + if na_mark: + self.na_mark = na_mark + else: + self.na_mark = None + + # refer numerical and categorical attributes first + # DataFrame.describe() usually returns 8 rows. + if df.describe().shape[0] == 8: + num_atts = set(df.describe().columns) + # DataFrame.describe() returns less than 8 rows when there is no numerical attribute. + else: + num_atts = set() + + cate_atts = set(df.columns).difference(num_atts) + if self.target_col in cate_atts: + cate_atts.remove(self.target_col) + if self.target_col in num_atts: + num_atts.remove(self.target_col) + for si in sensitive_atts: + cate_atts.remove(si) + self.num_atts = list(num_atts) + self.cate_atts = list(cate_atts) + self.df = df + + # record the sensitive attributes and target variable value mapping + sensi_target_value_mapping = {} + for atti in [self.target_col] + self.sensitive_atts: + atti_values = list(df[atti].unique()) + pro_values = self.zero_mapping[atti] + if len(pro_values) > 1: + pro_value_str = pro_values[0] + "&more" + else: + pro_value_str = pro_values[0] + other_values = list(set(atti_values).difference(pro_values)) + if len(other_values) > 1: + other_value_str = other_values[0] + "&more" + else: + other_value_str = other_values[0] + sensi_target_value_mapping[atti] = {0: pro_value_str, 1: other_value_str} + # print(sensi_target_value_mapping) + self.sensi_target_value_mapping = sensi_target_value_mapping + self.pipeline_id = self.data_name[:2] + + def init_necessary_steps(self, step_flag, apply_df, input_weights=[]): + # initialize the necessary steps + if step_flag == "Imputer": + return DropNAImputer(apply_df, na_mark=self.na_mark) + # elif step_flag == "Scaler": + # return SK_StandardScaler(apply_df, list(self.num_atts)) + elif step_flag == "Encoder": + return SK_OneHotEncoder(apply_df, list(self.cate_atts)) + elif step_flag == "SensitiveEncoder": + return CustomCateAttsEncoder(apply_df, self.sensitive_atts+[self.target_col], self.zero_mapping) + else: + return OPT_LogisticRegression(apply_df, self.target_col, instance_weights=input_weights) + + def print_necessary_steps(self): + # for printout and efficiency + return {"Imputer": ("DropNAImputer", "?"), + # "Scaler": ("SK_StandardScaler", list(self.num_atts)), + "Encoder": ("SK_OneHotEncoder", list(self.cate_atts)), + "SensitiveEncoder": ("CustomCateAttsEncoder", self.sensitive_atts+[self.target_col], self.zero_mapping), + "model": ("OPT_LogisticRegression", self.target_col)} + + + def run_pipeline(self, steps, return_test=True, output_interdata=False): + """ + + :param df: pandas dataframe, the data on which steps are applied. + :param steps: list of classes that represent the steps user want to perform on the above data. + Supported steps are listed in STEPS.md. + :return: two pandas dataframes: before_data and after_data. after_data is the data after applied the input steps. + + """ + + if not steps: + print("Require list of steps as input!") + raise ValueError + + if len(steps) < len(SUPPORT_STEPS): + print("Missing some input steps! Required steps are listed in the order below.\n"+" ".join(SUPPORT_STEPS.keys())) + raise ValueError + if sum([len(set(x[0]).intersection(list(SUPPORT_STEPS.values())[idx]))==0 for idx, x in enumerate(steps) if x is not None]) > 0: + print("Some input steps are not supported!") + raise ValueError + if sum([x[0] not in ALL_STEPS for x in steps if x is not None]) > 0: + print("Some input steps don't include enough parameters!") + raise ValueError + if sum([len(x) <=1 for x in steps if x is not None]) > 0: + print("Some input steps don't include enough parameters!") + raise ValueError + + if steps[-1] is not None and len(steps[0][1])==2: # run fair-postprocessor, then require validation set + print("FairPostprocessor requires a validation set! Specify through split_ratio in Splitter!") + raise ValueError + + self.pipeline_id = "_".join([self.pipeline_id]+[str(x[0]) for x in steps if x is not None]) + + support_step_names = list(SUPPORT_STEPS.keys()) + + # split the data into separated datasets for train, [validation], and test + if steps[0] is None: # default splitter + if steps[-1] is not None: # train, validation and test data + cur_splitter = BalanceTargetSplitter(self.df, [0.5, 0.3, 0.2], self.target_col) + else: # train and test data + cur_splitter = BalanceTargetSplitter(self.df, [0.7, 0.3], self.target_col) + else: + cur_splitter = init_input_steps(steps[0], self.df) + after_data = cur_splitter.apply(self.df) + after_data = list(after_data) + if output_interdata: + self.save_inter_data(after_data, cur_splitter.get_name()) + print("Done "+support_step_names[0]+PRINT_SPLIT) + # record the before data to output + before_data = [x for x in after_data] + + + # run sampler on train + if steps[1] is not None: + for idx_df, cur_df in enumerate(after_data): + cur_sampler = init_input_steps(steps[1], cur_df) + after_data[idx_df] = cur_sampler.apply(cur_df) + if output_interdata: + self.save_inter_data(after_data, cur_sampler.get_name(), steps[:1]) + print("Done "+support_step_names[1]+PRINT_SPLIT) + + + # run the preprocess steps: "Imputer", "Scaler", "Categorizer" that fit on train and apply on others + for idx, step_i in enumerate(steps[2:5]): + idx = idx + 2 + step_i_key = support_step_names[idx] + # fit on train data + if step_i is None: + if step_i_key in list(self.print_necessary_steps().keys()): # add default operation for necessary steps + step_i = self.init_necessary_steps(step_i_key, after_data[0]) + else: # skip the step + continue + else: # user-specified step + step_i = init_input_steps(steps[idx], after_data[0]) + # apply on train, validation and test data + for idx_df, cur_df in enumerate(after_data): + after_data[idx_df] = step_i.apply(cur_df) + + if output_interdata: + self.save_inter_data(after_data, step_i.get_name(), steps[:idx]) + print("Done " + support_step_names[idx] + PRINT_SPLIT) + + # run the preprocess steps: "Encoder" + # fit and apply on the same data + after_data, encoder_name = self.run_encoder(steps[5], after_data) + if output_interdata: + self.save_inter_data(after_data, encoder_name, steps[:5]) + print("Done " + support_step_names[5] + PRINT_SPLIT) + + # run the preprocess steps: "SensitiveAttEncoder" + # fit and apply on the same data + if steps[6] is None: + for idx_df, cur_df in enumerate(after_data): + cur_sensi_encoder = self.init_necessary_steps("SensitiveEncoder", cur_df) + after_data[idx_df] = cur_sensi_encoder.apply(cur_df) + else: # user-specified sensitive encoder + for idx_df, cur_df in enumerate(after_data): + cur_sensi_encoder = init_input_steps(steps[6], cur_df) + after_data[idx_df] = cur_sensi_encoder.apply(cur_df) + if output_interdata: + self.save_inter_data(after_data, cur_sensi_encoder.get_name(), steps[:6]) + print("Done " + support_step_names[6] + PRINT_SPLIT) + + # run the preprocess steps: "FairPreprocessor" + # fit and apply on the same data + if steps[7] is not None: + weights = [[0 for _ in range(x.shape[0])] for x in after_data] + for idx_df, cur_df in enumerate(after_data): + cur_fair_preprossor = init_input_steps(steps[7], cur_df) + if "AIF_Reweighing" in cur_fair_preprossor.get_name(): # special heck for methods updating sample weight + after_data[idx_df], weights[idx_df] = cur_fair_preprossor.apply(cur_df) + else: + after_data[idx_df] = cur_fair_preprossor.apply(cur_df) + if output_interdata: + self.save_inter_data(after_data, cur_fair_preprossor.get_name(), steps[:7]) + print("Done " + support_step_names[7] + PRINT_SPLIT) + + # after fair-preprocess, rerun encoder + after_data, encoder_name = self.run_encoder(steps[5], after_data) + if output_interdata: + self.save_inter_data(after_data, encoder_name+"_prep", steps[:5]) + print("Done " + support_step_names[5] + " for fair preprocessor "+PRINT_SPLIT) + + # run model step + # fit on train data + if steps[8] is None: + if weights: + cur_model = self.init_necessary_steps("model", after_data[0], input_weights=weights[0]) + else: + cur_model = self.init_necessary_steps("model", after_data[0]) + else: # TODO: add the support for weight in user-specified models + cur_model = init_input_steps(steps[8], after_data[0]) + + # predict on train, validation and test data + for idx_df, cur_df in enumerate(after_data): + after_data[idx_df] = cur_model.apply(cur_df) + if output_interdata: + self.save_inter_data(after_data, cur_model.get_name(), steps[:8]) + print("Done " + support_step_names[8] + PRINT_SPLIT) + + # run fair postprocess step + if steps[9] is not None: + # encode first + after_data, encoder_name = self.run_encoder(steps[5], after_data) + if output_interdata: + self.save_inter_data(after_data, encoder_name+"_post", steps[:5]) + print("Done " + support_step_names[5] + " for fair post processor " + PRINT_SPLIT) + # fit on validation data + cur_postprocessor = init_input_steps(steps[9], after_data[1]) + # predict on validation and test data + for idx_df, cur_df in enumerate(after_data[1:]): + after_data[idx_df+1] = cur_postprocessor.apply(cur_df) + if output_interdata: + self.save_inter_data(after_data, cur_postprocessor.get_name(), steps[:9]) + print("Done " + support_step_names[9] + PRINT_SPLIT) + + # transfer back to original values for encoded sensitive and target columns + for idx, df_i in enumerate(after_data): + for atti in [self.target_col]+ self.sensitive_atts: + df_i[atti] = df_i[atti].apply(lambda x: self.sensi_target_value_mapping[atti][x]) + if "pred_" +self.target_col in df_i.columns: + df_i["pred_" + self.target_col] = df_i["pred_" + self.target_col].apply(lambda x: int(x>=0.5)) + df_i["pred_" +self.target_col] = df_i["pred_" +self.target_col].apply(lambda x: self.sensi_target_value_mapping[self.target_col][x]) + + if return_test: # only return the before and after of test data + return before_data[-1], after_data[-1] + else: # return all before and after data + return before_data, after_data + + def run_encoder(self, encode_step_tuple, data_list): + # run the preprocess steps: "Encoder" + # fit and apply on the same data + if len(self.cate_atts) > 0: + for idx_df, cur_df in enumerate(data_list): + if encode_step_tuple is None: # default encoder + cur_encoder = self.init_necessary_steps(list(SUPPORT_STEPS.keys())[5], cur_df) + else: + + # check for user specified encoder that cover partial categorical atts + non_encoded_cate = set(self.cate_atts).difference(encode_step_tuple[1]) + if non_encoded_cate: + cur_encoder = init_input_steps((encode_step_tuple[0], self.cate_atts), cur_df) + else: + cur_encoder = init_input_steps(encode_step_tuple, cur_df) + data_list[idx_df] = cur_encoder.apply(cur_df) + + # check for different dimensions after encoding for validation and test set + if len(data_list) > 2: + for idx_df, cur_df in enumerate(data_list[1:]): + if cur_df.shape[1] != data_list[0].shape[1]: + for feature_i in set(data_list[0].columns).difference(cur_df.columns): + cur_df[feature_i] = 0.0 + data_list[idx_df] = cur_df.copy() + else: # check the dimensions for train and test set + if data_list[0].shape[1] != data_list[1].shape[1]: + diff_features_1 = set(data_list[0].columns).difference(data_list[1].columns) + diff_features_2 = set(data_list[1].columns).difference(data_list[0].columns) + add_df = data_list[1].copy() + for feature_i in diff_features_1.union(diff_features_2): + if feature_i not in add_df.columns: + add_df[feature_i] = 0.0 + data_list[1] = add_df.copy() + + add_df = data_list[0].copy() + for feature_i in diff_features_1.union(diff_features_2): + if feature_i not in add_df.columns: + add_df[feature_i] = 0.0 + data_list[0] = add_df.copy() + + return data_list, cur_encoder.get_name() + else: + return data_list, "None" + + def save_inter_data(self, input_dfs, step_name, pre_steps=[], path="data/inter_data/"): + if len(input_dfs) == 2: + suffix_names = ["train", "test"] + else: + suffix_names = ["train", "validation", "test"] + + for idx, df_i in enumerate(input_dfs): + output_df_i = df_i.copy() + df_path = os.path.realpath(path) + "/" + self.pipeline_id + "/" + suffix_names[idx] + "/" + if not os.path.exists(df_path): + os.makedirs(df_path) + if pre_steps: + pre_step_names = [x[0] for x in pre_steps if x is not None]+[step_name] + else: + pre_step_names = [step_name] + + for atti in [self.target_col]+ self.sensitive_atts: + if not isinstance(output_df_i[atti].values[0], str): + output_df_i[atti] = output_df_i[atti].apply(lambda x: self.sensi_target_value_mapping[atti][x]) + if "pred_" +self.target_col in output_df_i.columns: + output_df_i["pred_" + self.target_col] = output_df_i["pred_" + self.target_col].apply(lambda x: int(x >= 0.5)) + output_df_i["pred_" +self.target_col] = output_df_i["pred_" +self.target_col].apply(lambda x: self.sensi_target_value_mapping[self.target_col][x]) + + output_name = df_path + "__".join([self.data_name, "after"]+[x[:x.find("@")] for x in pre_step_names]) + ".csv" + print("!!!!!!!", suffix_names[idx], output_df_i.shape, "!!!!!!!") + output_df_i.to_csv(output_name, index=False) + print("Current "+suffix_names[idx]+" data after "+" ".join([x[:x.find("@")] for x in pre_step_names])+" \n Stored in ", output_name) + print() + + + + +if __name__ == '__main__': + # input_steps = [("BalanceTargetSplitter", [0.5, 0.3, 0.2], "income-per-year"), + # ("RandomSampler", 10000), # sampler + # ("DropNAImputer", "?"), + # ("SK_StandardScaler", ["fnlwgt", "age"]), + # ("SK_Discretizer", ["fnlwgt", "age"], [2, 3]), + # ("SK_OneHotEncoder", ["workclass"]), # encoder + # ("CustomCateAttsEncoder", ["sex", "race", "income-per-year"], {"sex": ["Female"], "race": ["Black"], "income-per-year": ["<=50K"]}), + # ("AIF_DIRemover", "income-per-year", "sex", 0.8), # fair-preprocessor + # ("AIF_AdversarialDebiasing", "income-per-year", "sex"), # test Adversial learning + # ("AIF_CalibratedEqOddsPostprocessing", "income-per-year", "sex") # fair-post-postprocessor + # ] + # cur_pip = FairnessLabelPipeline(data_file, y_col, y_posi, sensitive_atts=sensi_atts, protected_values=sensi_pro_valus, na_mark="?") + # before_test, after_test = cur_pip.run_pipeline(input_steps, return_test=True, output_interdata=True) + + # data_file = "../data/adult.csv" + # y_col = "income-per-year" + # y_posi = [">50K"] + # na_symbol = "?" + # sensi_atts = ["sex", "race"] + # sensi_pro_valus = {"sex": ["Female"], "race": ["Black", "Asian-Pac-Islander", "Amer-Indian-Eskimo", "Other"]} + + # data_file = "../data/mylsn_cleaned_2.csv" + # y_col = "status" + # y_posi = ["Ac"] + # na_symbol = "N/A" + # sensi_atts = ["sex", "race"] + # sensi_pro_valus = {"sex": ["female"], "race": ["black", "hispanic", "native-american", "asian"]} + + # + # input_steps = [("BalanceTargetSplitter", [0.7, 0.3], y_col), # splitter + # None, # sampler + # ("DropNAImputer", "?"), # imputer + # None, # scaler + # ("SK_Discretizer", ["age"], [3]), # categorizer + # None, # encoder + # None, # sensitive att and target encoder + # None, # fair-preprocessor + # ("OPT_LogisticRegression", y_col), # model + # None # fair-post-postprocessor + # ] + + # debias_focus_att = "race" + # input_steps = [("BalanceTargetSplitter", [0.7, 0.3], y_col), + # ("RandomSampler", 5000), # sampler + # None, # imputer + # None, # scaler + # None, + # None, # encoder + # None, + # None, #("AIF_DIRemover", y_col, debias_focus_att, 1.0), # fair-preprocessor + # ("SK_LogisticRegression", y_col), # model + # None # fair-post-postprocessor + # ] + + data_file = "../data/german_AIF.csv" + y_col = "credit" + y_posi = ["good"] + sensi_atts = ["age", "sex"] + sensi_pro_valus = {"age": ["young"], "sex": ["female"]} + debias_focus_att = "age" + fair_steps = [("BalanceTargetSplitter", [0.7, 0.3], y_col), + None, # sampler + None, # ("ModeImputer", [], ["workclass"], "?"), # imputer + None, # scaler + None, # categorizer + None, # encoder + None, + ("AIF_Reweighing", y_col, debias_focus_att), # fair-preprocessor + None, # ("OPT_LogisticRegression", y_col), # model + None # fair-post-postprocessor + ] + + cur_pip = FairnessLabelPipeline(data_file, y_col, y_posi, sensitive_atts=sensi_atts, + protected_values=sensi_pro_valus) + before_test, after_test = cur_pip.run_pipeline(fair_steps, return_test=True, output_interdata=True) + diff --git a/pipeline/model/classifiers.py b/pipeline/model/classifiers.py new file mode 100644 index 0000000..248311f --- /dev/null +++ b/pipeline/model/classifiers.py @@ -0,0 +1,87 @@ +""" + Classes of supervised binary classifiers. +""" + +import pandas as pd +from sklearn.linear_model import SGDClassifier +from sklearn.tree import DecisionTreeClassifier +from pipeline.model.inprocessor import Model + + +class SK_LogisticRegression(Model): + def __init__(self, df, target_col, loss_func="log", instance_weights=[], seed=0): + """ + :param df: pandas dataframe, stores the data to fit the classifier. + :param target_col: str, the name of the target variable in above data. + :param loss_func: str, the name of the loss function used in linear model. Same as the loss parameter in sklearn.linear_model.SGDClassifier. + The possible options are ‘hinge’, ‘log’, ‘modified_huber’, ‘squared_hinge’, ‘perceptron’, or a regression loss: ‘squared_loss’, ‘huber’, ‘epsilon_insensitive’, or ‘squared_epsilon_insensitive’. + :param instance_weights: list of float, each number represents the weight of the sample in above data. + :param seed: integer, the seed for random state. + """ + + cur_step = SGDClassifier(loss=loss_func, random_state=seed) + super().__init__("@".join(["SK_LogisticRegression", target_col]), cur_step, df, target_col, instance_weights=instance_weights) + + +class SK_DecisionTree(Model): + def __init__(self, df, target_col, instance_weights=[], seed=0): + """ + :param df: pandas dataframe, stores the data to fit the classifier. + :param target_col: str, the name of the target variable in above data. + :param instance_weights: list of float, each number represents the weight of the sample in above data. + :param seed: integer, the seed for random state. + """ + cur_step = DecisionTreeClassifier(random_state=seed) + super().__init__("@".join(["SK_DecisionTree", target_col]), cur_step, df, target_col, instance_weights=instance_weights) + + +class OPT_LogisticRegression(Model): + def __init__(self, df, target_col, loss_func="log", max_iter=1000, instance_weights=[], seed=0): + """ + :param df: pandas dataframe, stores the data to fit the classifier. + :param target_col: str, the name of the target variable in above data. + :param loss_func: str, the name of the loss function used in linear model. Same as the loss parameter in sklearn.linear_model.SGDClassifier. + The possible options are ‘hinge’, ‘log’, ‘modified_huber’, ‘squared_hinge’, ‘perceptron’, or a regression loss: ‘squared_loss’, ‘huber’, ‘epsilon_insensitive’, or ‘squared_epsilon_insensitive’. + :param max_iter: integer, max number of iterations of the model. + :param instance_weights: list of float, each number represents the weight of the sample in above data. + :param seed: integer, random seed. + """ + # Update below parameters according to the loss function used + param_grid = { + 'learner__loss': [loss_func], + 'learner__penalty': ['l2', 'l1', 'elasticnet'], + 'learner__alpha': [0.00005, 0.0001, 0.005, 0.001] + } + cur_step = SGDClassifier(max_iter=max_iter, random_state=seed) + super().__init__("@".join(["OPT_LogisticRegression", target_col]), cur_step, df, target_col, instance_weights=instance_weights, hyper_tune=True, param_grid=param_grid) + +class OPT_DecisionTree(Model): + def __init__(self, df, target_col, instance_weights=[], seed=0): + """ + :param df: pandas dataframe, stores the data to fit the classifier. + :param target_col: str, the name of the target variable in above data. + :param instance_weights: list of float, each number represents the weight of the sample in above data. + :param seed: integer, random seed. + """ + param_grid = { + 'learner__min_samples_split': range(20, 500, 10), + 'learner__max_depth': range(15, 30, 2), + 'learner__min_samples_leaf': [3, 4, 5, 10], + "learner__criterion": ["gini", "entropy"] + } + + cur_step = DecisionTreeClassifier(random_state=seed) + super().__init__("@".join(["OPT_DecisionTree", target_col]), cur_step, df, target_col, instance_weights=instance_weights, hyper_tune=True, param_grid=param_grid) + + +if __name__ == '__main__': + data = pd.read_csv("../../data/adult_pre_reweigh.csv") + cur_o = SK_LogisticRegression(data, "income-per-year") + # cur_o = SK_DecisionTree(data, "income-per-year") + # cur_o = OPT_LogisticRegression(data, "income-per-year") + # cur_o = OPT_DecisionTree(data, "income-per-year") + + after_data = cur_o.apply(data) + after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) + + print(cur_o.get_name()) \ No newline at end of file diff --git a/pipeline/model/fair_classifiers.py b/pipeline/model/fair_classifiers.py new file mode 100644 index 0000000..085e968 --- /dev/null +++ b/pipeline/model/fair_classifiers.py @@ -0,0 +1,69 @@ +""" + Classes of fair supervised binary classifiers. +""" + +import pandas as pd +from aif360.algorithms.inprocessing import AdversarialDebiasing +from aif360.algorithms.inprocessing import MetaFairClassifier +from aif360.algorithms.inprocessing import PrejudiceRemover +from pipeline.model.inprocessor import Model +import warnings +warnings.filterwarnings("ignore") + +class AIF_AdversarialDebiasing(Model): + + def __init__(self, df, target_col, sensitive_att, seed=0): + """ + :param df: pandas dataframe, stores the data to fit the fair classifier. + :param target_col: str, the name of the target variable in above data. + :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. + :param seed: integer, random seed. + + """ + + import tensorflow as tf + sess = tf.Session() + cur_step = AdversarialDebiasing(unprivileged_groups=[{sensitive_att: 0}], privileged_groups=[{sensitive_att: 1}], scope_name='debiased_classifier', debias=True, sess=sess, seed=seed) + super().__init__("@".join(["AIF_AdversarialDebiasing", sensitive_att]), cur_step, df, target_col, sensitive_att=sensitive_att, fair_aware=True) + + +class AIF_MetaFairClassifier(Model): + + def __init__(self, df, target_col, sensitive_att, fairness_penalty=0.8, fair_metric="sr"): + """ + :param df: pandas dataframe, stores the data to fit the fair classifier. + :param target_col: str, the name of the target variable in above data. + :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. + :param fairness_penalty: float in [0,1], fairness penalty parameter. default is 0.8. The same parameter in aif360.algorithms.inprocessing.MetaFairClassifier. + :param fair_metric: str, fairness metric used in this method. Value from ["fdr" (false discovery rate ratio), "sr" (statistical rate/disparate impact)]. + The same parameter in aif360.algorithms.inprocessing.MetaFairClassifier. + """ + + cur_step = MetaFairClassifier(tau=fairness_penalty, sensitive_attr=sensitive_att, type=fair_metric) + super().__init__("@".join(["AIF_MetaFairClassifier", sensitive_att]), cur_step, df, target_col, sensitive_att=sensitive_att, fair_aware=True) + +class AIF_PrejudiceRemover(Model): + + def __init__(self, df, target_col, sensitive_att, fairness_penalty=1.0): + """ + :param df: pandas dataframe, stores the data to fit the fair classifier. + :param target_col: str, the name of the target variable in above data. + :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. + :param fairness_penalty: float in [0,1], fairness penalty parameter. default is 1. The same parameter in aif360.algorithms.inprocessing.PrejudiceRemover. + + """ + # TODO: fix the bug that cannot import lib of 'getoutput' + cur_step = PrejudiceRemover(eta=fairness_penalty, sensitive_attr=sensitive_att, class_attr=target_col) + super().__init__("@".join(["AIF_PrejudiceRemover", sensitive_att]), cur_step, df, target_col, sensitive_att=sensitive_att, fair_aware=True) + + +if __name__ == '__main__': + data = pd.read_csv("../../data/adult_pre_reweigh.csv") + cur_o = AIF_AdversarialDebiasing(data, "income-per-year", "sex") + # cur_o = AIF_MetaFairClassifier(data, "income-per-year", "sex") + # cur_o = AIF_PrejudiceRemover(data, "income-per-year", "sex") + + after_data = cur_o.apply(data) + after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) + + print(cur_o.get_name()) \ No newline at end of file diff --git a/pipeline/model/inprocessor.py b/pipeline/model/inprocessor.py new file mode 100644 index 0000000..3df3909 --- /dev/null +++ b/pipeline/model/inprocessor.py @@ -0,0 +1,74 @@ +""" + Super class for all the supported classifier classes including fair-classifiers. +""" +import numpy as np +from aif360.datasets import BinaryLabelDataset +from sklearn.model_selection import GridSearchCV +from sklearn.pipeline import Pipeline +from pipeline.step import Step + +class Model(Step): + def __init__(self, step_name, step, df, target_col, instance_weights=[], hyper_tune=False, param_grid={}, sensitive_att=None, fair_aware=False, target_positive=1): + """ + :param step_name: str, name of the current input step. + :param step: object of the initialized class. + :param df: pandas dataframe, stores the data. + :param target_col: str, the name of the target attribute. + :param instance_weights: list of float in [0,1], each float represents the weight of the sample in above data. + :param hyper_tune: boolean, whether to tune the hyper-parameter. Default is False. + :param param_grid: dict, stores the search range of the hyper-parameter. When hyper_tune is True, this must be provided. + :param sensitive_att: str, the name of a sensitive attribute. + :param fair_aware: boolean, whether the model is fair-aware. Default is False. + :param target_positive: integer, 0 or 1, represents the positive value of the target attribute. Default is 1. + """ + + super().__init__(step_name=step_name, df=df, sensitive_att=sensitive_att, target_col=target_col) + # assume the data set has been encoded to numerical values + if fair_aware: # fair classifiers + # intitialize a binary label dataset from AIF 360 + aif_df = BinaryLabelDataset(df=df, label_names=[target_col], protected_attribute_names=[sensitive_att]) + fitted_step = step.fit(aif_df) + input_score = False + else: # regular classifiers + if len(instance_weights) == 0: + instance_weights = [1 for _ in range(1, df.shape[0] + 1)] + if hyper_tune: # grid search for best hyper parameters + if not param_grid: + print("Need to specify the search range of the hyper parameters - 'param_grid' is empty!") + raise ValueError + + search = GridSearchCV(Pipeline([('learner', step)]), param_grid, scoring='roc_auc', cv=5, verbose=1, n_jobs=-1) + fitted_step = search.fit(np.array(df.drop(columns=[target_col])), np.array(df[target_col]), None, **{'learner__sample_weight': instance_weights}) + else: + fitted_step = step.fit(np.array(df.drop(columns=[target_col])), np.array(df[target_col]), sample_weight=instance_weights) + input_score = True + + self.input_score = input_score + self.step = fitted_step + self.target_positive = target_positive + + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to apply the learned discretizer. + :return: pandas dataframe, stores the data after discretize. + """ + + # initialize AIF360 BinaryLabelDataset + + if self.input_score: # for regular model, generate score prediction + aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[]) + after_df, _ = aif_pred_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + + favorable_class_idx = list(self.step.classes_).index(self.target_positive) + after_df[self.pred_target_col] = [x[favorable_class_idx] for x in self.step.predict_proba(np.array(df.drop(columns=[self.target_col])))] + + else: # for fair model, generate label prediction + aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], + protected_attribute_names=[self.sensitive_att]) + + after_aif_df = self.step.predict(aif_pred_df) + after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + after_df[self.pred_target_col] = after_aif_df.labels + + return after_df diff --git a/pipeline/postprocess/fair_postprocessors.py b/pipeline/postprocess/fair_postprocessors.py new file mode 100644 index 0000000..efa5b1c --- /dev/null +++ b/pipeline/postprocess/fair_postprocessors.py @@ -0,0 +1,66 @@ +""" + Classes for post-process data and model outcomes +""" + +import pandas as pd +from aif360.algorithms.postprocessing import CalibratedEqOddsPostprocessing +from aif360.algorithms.postprocessing import EqOddsPostprocessing +from aif360.algorithms.postprocessing import RejectOptionClassification +from pipeline.postprocess.postprocessor import Postprocessor + +class AIF_EqOddsPostprocessing(Postprocessor): + + def __init__(self, df, target_col, sensitive_att, threshold=0.5, seed=0): + """ + :param df: pandas dataframe, stores the data to fit the postprocessor. + :param target_col: str, the name of the target variable in above data. Assume 1 represents the favorable class. + :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. + :param threshold: float in [0, 1], the classification threshold to generate the predicted class label. + :param seed: integer, the seed for random state. + """ + + cur_step = EqOddsPostprocessing([{sensitive_att: 0}], [{sensitive_att: 1}], seed) + super().__init__("@".join(["AIF_EqOddsPostprocessing", sensitive_att]), cur_step, df, sensitive_att, target_col, input_score=False, clf_threshold=threshold) + + +class AIF_CalibratedEqOddsPostprocessing(Postprocessor): + + def __init__(self, df, target_col, sensitive_att, threshold=0.5, seed=0, cost_constraint='weighted'): + """ + :param df: pandas dataframe, stores the data to fit the postprocessor. + :param target_col: str, the name of the target variable in above data. Assume 1 represents the favorable class. + :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. + :param threshold: float in [0, 1], the classification threshold to generate the predicted class label. + :param seed: integer, the seed for random state. + :param cost_constraint: str, the fairness constraints format, value from [fpr, fnr, weighted]. + The same parameter as in aif360.algorithms.postprocessing.CalibratedEqOddsPostprocessing. + """ + + cur_step = CalibratedEqOddsPostprocessing([{sensitive_att: 0}], [{sensitive_att: 1}], cost_constraint=cost_constraint, seed=seed) + super().__init__("@".join(["AIF_CalibratedEqOddsPostprocessing", sensitive_att]), cur_step, df, sensitive_att, target_col, input_score=True, clf_threshold=threshold) + +class AIF_RejectOptionPostprocessing(Postprocessor): + + def __init__(self, df, target_col, sensitive_att, threshold=0.5): + """ + :param df: pandas dataframe, stores the data to fit the postprocessor. + :param target_col: str, the name of the target variable in above data. Assume 1 represents the favorable class. + :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. + :param threshold: float in [0, 1], the classification threshold to generate the predicted class label. + """ + # TODO: fix the bug that reject option doesn't return results + cur_step = RejectOptionClassification([{sensitive_att: 0}], [{sensitive_att: 1}]) + super().__init__("@".join(["AIF_RejectOptionClassification", sensitive_att]), cur_step, df, sensitive_att, target_col, input_score=True, clf_threshold=threshold) + + + +if __name__ == '__main__': + data = pd.read_csv("../../data/adult_post.csv") + # cur_o = AIF_RejectOptionPostprocessing(data, "income-per-year", "sex") + cur_o = AIF_EqOddsPostprocessing(data, "income-per-year", "sex") + # cur_o = AIF_CalibratedEqOddsPostprocessing(data, "income-per-year", "sex") + + after_data = cur_o.apply(data) + after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) + + print(cur_o.get_name()) \ No newline at end of file diff --git a/pipeline/postprocess/postprocessor.py b/pipeline/postprocess/postprocessor.py new file mode 100644 index 0000000..f10494c --- /dev/null +++ b/pipeline/postprocess/postprocessor.py @@ -0,0 +1,59 @@ +""" + Super class for all the supported postprocessor classes. +""" +import numpy as np +from aif360.datasets import BinaryLabelDataset +from pipeline.step import Step + +class Postprocessor(Step): + def __init__(self, step_name, step, df, sensitive_att, target_col, input_score=True, clf_threshold=0.5): + """ + :param step_name: str, name of the current input step. + :param step: object of the initialized class. + :param df: pandas dataframe, stores the data. + :param sensitive_att: str, the name of a sensitive attribute. + :param target_col: str, the name of the target attribute. + :param input_score: boolean, represent whether the post-processor takes predicted score as input. Default is True. + :param clf_threshold: float in [0, 1], represents the threshold to categorize class labels from predicted scores. + """ + if "pred_"+target_col not in df.columns: + print("Require the predictions for ",target_col, " existing in the data!") + raise ValueError + super().__init__(step_name=step_name, df=df, sensitive_att=sensitive_att, target_col=target_col) + # assume the data set has been encoded to numerical values, + # intitialize a BinaryLabelDataset from AIF 360 + aif_true_df = BinaryLabelDataset(df=df.drop(columns=["pred_"+target_col]), label_names=[target_col], protected_attribute_names=[sensitive_att]) + + aif_pred_df = aif_true_df.copy() + + if input_score: + aif_pred_df.scores = df["pred_"+target_col] + else: + aif_pred_df.labels = np.array([int(x >= clf_threshold) for x in df["pred_"+target_col]]) + self.input_score = input_score + self.step = step.fit(aif_true_df, aif_pred_df) + self.clf_threshold = clf_threshold + + + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to apply the learned discretizer. + :return: pandas dataframe, stores the data after discretize. + """ + + # initialize AIF360 BinaryLabelDataset + + if self.input_score: # use score prediction to fit model, e.g. RejectOptionClassification, CalibratedEqOddsPostprocessing + aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], scores_names=[self.pred_target_col], + protected_attribute_names=[self.sensitive_att]) + else: # use label prediction to fit model, e.g. EqOddsPostprocessing + df["pred_label_"+self.target_col] = [int(x >= self.clf_threshold) for x in df[self.pred_target_col]] + aif_pred_df = BinaryLabelDataset(df=df.drop(columns=[self.pred_target_col]), label_names=["pred_label_"+self.target_col], + protected_attribute_names=[self.sensitive_att]) + + after_aif_df = self.step.predict(aif_pred_df) + after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + after_df[self.pred_target_col] = after_aif_df.labels + + return after_df \ No newline at end of file diff --git a/pipeline/preprocess/categorizers.py b/pipeline/preprocess/categorizers.py new file mode 100644 index 0000000..83d8d90 --- /dev/null +++ b/pipeline/preprocess/categorizers.py @@ -0,0 +1,51 @@ +""" + Classes to discretize numerical attributes into categorical attributes. +""" +import pandas as pd +from sklearn.preprocessing import KBinsDiscretizer +from sklearn.preprocessing import Binarizer +from pipeline.preprocess.preprocessor import Preprocessor + + +class SK_Discretizer(Preprocessor): + def __init__(self, df, num_atts, bin_size, encode='ordinal', strategy='kmeans'): + """ + :param df: pandas dataframe, stores the data to fit the discretizer. + :param num_atts: list of str, each str represents the name of a numerical attribute in above data. + :param bin_size: list of integer, each integer represents the number of bins to categorize the corresponding numerical attribute. + :param encode: same parameter with sklearn KBinsDiscretizer + :param strategy: same parameter with sklearn KBinsDiscretizer + """ + cur_step = {} + for idx, ai in enumerate(num_atts): + cur_step[ai] = KBinsDiscretizer(n_bins=bin_size[idx], encode=encode, strategy=strategy) + self.bin_size = bin_size + super().__init__("@".join([strategy+"Categorizer"]+num_atts), df, step=cur_step, focus_atts=num_atts, fit_flag=True) + + +class SK_Binarizer(Preprocessor): + def __init__(self, df, num_atts, bin_thresholds, copy=True): + """ + :param df: pandas dataframe, stores the data to fit the binarizer. + :param num_atts: list of str, each str represents the name of a numerical attribute in above data. + :param bin_thresholds: list of float, each float represents the value to binarize the corresponding numerical attributes. + Values below or equal to this threshold are replaced by 0, above it by 1. + :param copy: same parameter with sklearn Binarizer + """ + cur_step = {} + for idx, ai in enumerate(num_atts): + cur_step[ai] = Binarizer(threshold=bin_thresholds[idx], copy=copy) + + self.bin_thresholds = bin_thresholds + super().__init__("@".join(["BinaryCategorizer"]+num_atts), df, step=cur_step, focus_atts=num_atts, fit_flag=True) + + +if __name__ == '__main__': + data = pd.read_csv("../../data/adult.csv") + cur_o = SK_Discretizer(data, ["fnlwgt", "age"], [2, 3]) + # cur_o = SK_Binarizer(data, ["fnlwgt", "age"], [100000, 30]) + + after_data = cur_o.apply(data) + after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) + + print(cur_o.get_name()) \ No newline at end of file diff --git a/pipeline/preprocess/encoders.py b/pipeline/preprocess/encoders.py new file mode 100644 index 0000000..b2a6712 --- /dev/null +++ b/pipeline/preprocess/encoders.py @@ -0,0 +1,98 @@ +""" + Classes to encode the string values for categorical attributes. +""" +""" + Classes to discretize numerical attributes into categorical attributes. +""" +import numpy as np +import pandas as pd +from sklearn.preprocessing import OrdinalEncoder +from sklearn.preprocessing import OneHotEncoder +from pipeline.preprocess.preprocessor import Preprocessor + +class SK_OrdinalEncoder(Preprocessor): + def __init__(self, df, cate_atts, sort_label, sort_positive_value): + """ + :param df: pandas dataframe, stores the data to fit the encoder. + :param cate_atts: list of str, each str represents the name of a categorical attribute in above data. + :param encode_order_dict: dict, key (str) represents the name of categorical attribute, value is a list of str, representing the ordered categories of each each categorical attribute. + :param sort_label: str, name of the target varible to determine the order of ordinal encodings. + """ + cur_step = {} + for ci in cate_atts: + value_counts = {} + for vi in df[ci].unique(): + value_counts[vi] = df[(df[ci] == vi) & (df[sort_label] == sort_positive_value)].shape[0] + value_orders = sorted(value_counts.keys(), key=lambda x: value_counts[x]) + cur_step[ci] = OrdinalEncoder(categories=[value_orders]) + + self.sort_label = sort_label + self.sort_positive_value = sort_positive_value + super().__init__("@".join(["OrdinalEncoder"]+cate_atts), df, step=cur_step, focus_atts=cate_atts, fit_flag=True) + + +class SK_OneHotEncoder(Preprocessor): + def __init__(self, df, cate_atts): + """ + :param df: pandas dataframe, stores the data to fit the encoder. + :param cate_atts: list of str, each str represents the name of a categorical attribute in above data. + :param encode_order_dict: dict, key (str) represents the name of categorical attribute, value is a list of str, representing the ordered categories of each each categorical attribute. + :param sort_label: str, name of the target variable to determine the order of ordinal encodings. + """ + # TODO: fix the bug that sklearn one-hot encoder change the dimension + # cur_step = {} + # for ci in cate_atts: + # cur_step[ci] = OneHotEncoder() + super().__init__("@".join(["OneHotEncoder"]+cate_atts), df, step=None, focus_atts=cate_atts, fit_flag=False) + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to apply the learned encoder. + :return: pandas dataframe, stores the data after encode. + """ + after_df = pd.get_dummies(df, columns=self.focus_atts, prefix_sep='=') + # after_df = df[list(set(df.columns).difference(self.focus_atts))] + # for ci in self.focus_atts: + # ci_encode_array = self.step[ci].transform(np.array(df[ci]).reshape(-1, 1)).toarray() + # ci_encode_df = pd.DataFrame(ci_encode_array, columns=[ci+"="+x for x in self.step[ci].categories_[0]]) + # after_df = pd.concat([after_df, ci_encode_df], axis=1) + return after_df + +class CustomCateAttsEncoder(Preprocessor): + def __init__(self, df, sensitive_atts, protected_values): + """ To encode sensitive attribute and target feature. + :param df: pandas dataframe, stores the data to fit the encoder. + :param sensitive_atts: list of str, each str represents the name of a sensitive attribute. + :param protected_values: dict, key is the str in sensitive_atts, value is a list of str, each str represent the protected values for the key sensitive attribute. + """ + super().__init__("@".join(["SensitiveAttEncoder"]+sensitive_atts), df, step=None, focus_atts=sensitive_atts, fit_flag=False) + for x in sensitive_atts: + if sum([vi not in df[x].unique() for vi in protected_values[x]]) > 0: + print("Some input values of sensitive attribute ", x, " are not valid!") + raise ValueError + self.protected_values = protected_values + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to apply the encoder. + :return: pandas dataframe, stores the data after encode. + """ + after_df = df.copy() + for si in self.focus_atts: + after_df[si] = after_df[si].apply(lambda x: int(x not in self.protected_values[si])) + return after_df + + + +if __name__ == '__main__': + data = pd.read_csv("../data/train/adult__Imputer.csv") + # data = pd.read_csv("../../data/adult_pre_RandomSampler_1000.csv") + # data = pd.read_csv("../../data/adult_pre_SensitiveAttsEncoder_sex_race_income-per-year.csv") + # cur_o = SK_OrdinalEncoder(data, ["sex", "race"], "income-per-year", ">50K") + cur_o = SK_OneHotEncoder(data, ["workclass", "education", "marital-status", "occupation", "relationship", "native-country"]) + # cur_o = CustomCateAttsEncoder(data, ["sex", "race", "income-per-year"], {"sex": ["Female"], "race": ["Black"], "income-per-year": ["<=50K"]}) + + after_data = cur_o.apply(data) + after_data.to_csv("../data/adult_"+cur_o.get_name()+".csv", index=False) + + print(cur_o.get_name()) \ No newline at end of file diff --git a/pipeline/preprocess/fair_preprocessors.py b/pipeline/preprocess/fair_preprocessors.py new file mode 100644 index 0000000..aeb519f --- /dev/null +++ b/pipeline/preprocess/fair_preprocessors.py @@ -0,0 +1,82 @@ +""" + Class of fairness preprocessing interventions +""" + +import numpy as np +import pandas as pd +from aif360.datasets import BinaryLabelDataset +from aif360.algorithms.preprocessing import Reweighing as Reweighing +from aif360.algorithms.preprocessing import LFR as LFR +from aif360.algorithms.preprocessing import DisparateImpactRemover +from pipeline.preprocess.preprocessor import Preprocessor + +class AIF_Reweighing(Preprocessor): + + def __init__(self, df, target_col, sensitive_att): + """ + :param df: pandas dataframe, stores the data to fit the scaler. + :param target_col: str, the name of the target variable in above data. + :param target_positive_value: str, the value of above target variable that represents positive outcome. default is 1. + :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. + + """ + + cur_step = Reweighing([{sensitive_att: 0}], [{sensitive_att: 1}]) + super().__init__("@".join(["AIF_Reweighing", sensitive_att]), df, step=cur_step, fit_flag=True, weight_flag=True, sensitive_att=sensitive_att, target_col=target_col, fair_aware=True) + + +class AIF_DIRemover(Preprocessor): + + def __init__(self, df, target_col, sensitive_att, repair_level): + """ + :param df: pandas dataframe, stores the data to fit the scaler. + :param target_col: str, the name of the target variable in above data. + :param target_positive_value: str, the value of above target variable that represents positive outcome. default is 1. + :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. + + """ + if repair_level is None or not isinstance(repair_level, float): + print("Input repair_level is not valid! Should be float within [0,1]!") + raise ValueError + else: + if repair_level < 0 or repair_level > 1: + print("Input repair_level is not valid! Should be float within [0,1]!") + raise ValueError + self.repair_level = repair_level + cur_step = DisparateImpactRemover(repair_level=repair_level, sensitive_attribute=sensitive_att) + + super().__init__("@".join(["AIF_DIRemover", sensitive_att]), df, step=cur_step, fit_flag=False, sensitive_att=sensitive_att, target_col=target_col, fair_aware=True) + + +class AIF_LFR(Preprocessor): + + def __init__(self, df, target_col, sensitive_att): + """ NOTE: very sensitive to input data, refer the example in AIF 360 for this preprocessor + :param df: pandas dataframe, stores the data to fit the scaler. + :param target_col: str, the name of the target variable in above data. + :param target_positive_value: str, the value of above target variable that represents positive outcome. default is 1. + :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. + + """ + # TODO: fix the bug of LFR for not returning categorical atts + # TODO: experiment with the same data used by AIF360 tutorial to compare whether the categorical atts are returned + cur_step = LFR([{sensitive_att: 0}], [{sensitive_att: 1}]) + super().__init__("@".join(["AIF_LFR", sensitive_att]), df, step=cur_step, fit_flag=True, sensitive_att=sensitive_att, target_col=target_col, fair_aware=True) + + + +if __name__ == '__main__': + data = pd.read_csv("../../data/adult_pre_reweigh.csv") + # cur_o = AIF_Reweighing(data, "income-per-year", "sex") + # cur_o = AIF_LFR(data, "income-per-year", "sex") + cur_o = AIF_DIRemover(data, "income-per-year", "sex", 0.8) + + after_data = cur_o.apply(data) + # for Reweighing + # after_data, new_weights = cur_o.apply(data) + after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) + + print(cur_o.get_name()) + # for Reweighing + # print(len(new_weights)) + # print(new_weights) \ No newline at end of file diff --git a/pipeline/preprocess/filters.py b/pipeline/preprocess/filters.py new file mode 100644 index 0000000..b2a4426 --- /dev/null +++ b/pipeline/preprocess/filters.py @@ -0,0 +1,69 @@ +""" + Classes to filter slice of data. +""" +import pandas as pd +from pipeline.preprocess.preprocessor import Preprocessor + +# utility functions +def wrap_filter(att, value): + if isinstance(value, str): # string or null + if value in ["?", ""]: # for null + return '{}!={}'.format(att, att) + else: + return '{}=="{}"'.format(att, value) + else: # numerical value + return '{}=={}'.format(att, value) + +# TODO: add multiple filter class +class RowFilter(Preprocessor): + + def __init__(self, df, column, value): + """ + :param column: str, name of the column name to be filtered. + :param value: str, integer, float, the value of the column to be filtered. + """ + if column is None or value is None: + print("Need to specify column and value to create filter!") + raise ValueError + if column not in df.columns: + print("Need to specify valid column!") + raise ValueError + if value not in df[column].unique(): + print("Need to specify valid value!") + raise ValueError + self.column = column + self.value = value + super().__init__(step_name="@".join(["RowFilter", column, str(value)]), df=df, focus_atts=[column], fit_flag=False) + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to be filtered. + :return: pandas dataframe, stores the data after filter. + """ + return df.query(wrap_filter(self.column, self.value)) + +class RemoveColumnFilter(Preprocessor): + def __init__(self, df, exclude_cols): + """ + :param exclude_cols: list of string, each string represents the name of the column to be excluded. + """ + + super().__init__(step_name="RemoveColumnFilter", df=df, focus_atts=exclude_cols, fit_flag=False) + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to be filtered. + :return: pandas dataframe, stores the data after filter. + """ + return df.drop(columns=self.focus_atts) + +if __name__ == '__main__': + + data = pd.read_csv("../../data/adult_pre_reweigh.csv") + # cur_o = RowFilter(data, "sex", 0) + cur_o = RemoveColumnFilter(data, ["sex","race"]) + + after_data = cur_o.apply(data) + after_data.to_csv("../../data/adult_" + cur_o.get_name() + ".csv", index=False) + + print(cur_o.get_name()) diff --git a/pipeline/preprocess/imputers.py b/pipeline/preprocess/imputers.py new file mode 100644 index 0000000..36aee84 --- /dev/null +++ b/pipeline/preprocess/imputers.py @@ -0,0 +1,88 @@ +""" + Classes to impute missing values in data. +""" +import numpy as np +import pandas as pd +import datawig +from pipeline.preprocess.preprocessor import Preprocessor +from sklearn.impute import SimpleImputer + +class DropNAImputer(Preprocessor): + def __init__(self, df, na_mark=None): + """ + :param df: pandas dataframe, stores the data to fit the imputer. + :param na_mark: str, represents the symbol of missing values. Default is None, i.e. NaN represents the missing values. + """ + super().__init__("DropNAImputer", df=df, fit_flag=False, na_mark=na_mark) + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to impute. + :return: pandas dataframe, stores the data after impute. + """ + if self.na_mark: + df = df.replace({self.na_mark:np.nan}) + return df.dropna() + +class ModeImputer(Preprocessor): + def __init__(self, df, num_atts, cate_atts, na_mark=None): + """ + :param df: pandas dataframe, stores the data to fit the imputer. + :param num_atts: list of str, each str represents the name of numerical column to be imputed using the mean value. + :param cate_atts: list of str, each str represents the name of categorical column to be imputed using the most frequent value. + :param na_mark: str, represents the symbol of missing values. Default is None, i.e. NaN represents the missing values. + """ + if len(set(num_atts).intersection(cate_atts)) > 0: + print("Some attributes are both in num_atts and cate_atts!") + raise ValueError + + cur_step = {} + if len(cate_atts) > 0: + for ci in cate_atts: + cur_step[ci] = SimpleImputer(strategy='most_frequent') + if len(num_atts) > 0: + for ni in num_atts: + cur_step[ni] = SimpleImputer(strategy='mean') + + super().__init__("@".join(["ModeImputer"]+num_atts+cate_atts), df, step=cur_step, focus_atts=cate_atts+num_atts, fit_flag=True, na_mark=na_mark) + + +class DatawigImputer(Preprocessor): + def __init__(self, df, impute_atts, na_mark=None, output_path="datawig/", num_epochs=50): + """ + :param df: pandas dataframe, stores the data to fit the imputer. + :param impute_atts: list of str, each str represents the name of column to be imputed using datawig model. Column can be categorical or numerical. + :param na_mark: str, represents the symbol of missing values. Default is None, i.e. NaN represents the missing values. + :param output_path: str, the path to store the learned datawig model. + :param num_epochs: integer, the maximum iteration of datawig model. + """ + super().__init__("@".join(["DatawigImputer"] + impute_atts), df, focus_atts=impute_atts, fit_flag=False, na_mark=na_mark) + + learned_imputers = {} + for ai in impute_atts: + learned_imputers[ai] = datawig.SimpleImputer(input_columns=list(set(df.columns).difference(ai)), + output_column=ai, output_path=output_path).fit(train_df=df, num_epochs=num_epochs) + self.step = learned_imputers + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to apply the learned imputer. + :return: pandas dataframe, stores the data after impute. + """ + if self.na_mark: + df = df.replace({self.na_mark:np.nan}) + after_df = df.copy() + for ai in self.focus_atts: + after_df[ai] = self.step[ai].predict(df)[ai + '_imputed'] + return after_df + +if __name__ == '__main__': + data = pd.read_csv("../../data/adult_pre_RandomSampler_1000.csv") + cur_o = DropNAImputer(data, na_mark="?") + # cur_o = ModeImputer(data, ["fnlwgt"], ["workclass"], na_mark="?") + # cur_o = DatawigImputer(data, ["workclass"], na_mark="?") + + after_data = cur_o.apply(data) + after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) + + print(cur_o.get_name()) \ No newline at end of file diff --git a/pipeline/preprocess/preprocessor.py b/pipeline/preprocess/preprocessor.py new file mode 100644 index 0000000..6c5ffae --- /dev/null +++ b/pipeline/preprocess/preprocessor.py @@ -0,0 +1,71 @@ +""" + Super class for all the supported preprocessor classes. +""" +import numpy as np +from aif360.datasets import BinaryLabelDataset +from pipeline.step import Step + +class Preprocessor(Step): + def __init__(self, step_name, df, step=None, focus_atts=[], fit_flag=True, weight_flag=False, sensitive_att=None, target_col=None, fair_aware=False, na_mark=None): + """ + :param step_name: str, name of the current input step. + :param df: pandas dataframe, stores the data. + :param step: object of the initialized class. If none, initialize here. + :param focus_atts: lisf of str, each str represents the name of a column in above data that will be pre-processed. + :param fit_flag: boolean, whether to initialize step object here. + :param weight_flag: boolean, whether to output extra sample weight after fair-preprocessor. + :param sensitive_att: str, the name of a sensitive attribute. + :param target_col: str, the name of the target attribute. + :param fair_aware: boolean, whether the preprocessor is fair-aware. Default is False. If true, sensitive_att and target_col can not be null. + """ + super().__init__(step_name=step_name, df=df, focus_atts=focus_atts, sensitive_att=sensitive_att, target_col=target_col) + + if len(focus_atts) > 0 and fit_flag: + fitted_step = {} + for idx, ai in enumerate(focus_atts): + fitted_step[ai] = step[ai].fit(np.array(df[ai]).reshape(-1, 1)) + self.step = fitted_step + elif fair_aware and fit_flag: # for fair-preprocessors + aif_df = BinaryLabelDataset(df=df, label_names=[target_col], protected_attribute_names=[sensitive_att]) + self.step = step.fit(aif_df) + else: + if step is not None: + self.step = step + + # address different encoding of missing values + if na_mark is not None: + self.na_mark = na_mark + else: + self.na_mark = None + self.fair_aware = fair_aware + self.fit_flag = fit_flag + self.weight_flag = weight_flag + + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to apply the learned discretizer. + :return: pandas dataframe, stores the data after discretize. + """ + if self.na_mark: + df = df.replace({self.na_mark:np.nan}) + if self.fair_aware: # fair-preprocessor + aif_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[self.sensitive_att]) + if self.fit_flag: # fit has been initialized + after_aif_df = self.step.transform(aif_df) + else: # fit and transform is combined, e.g. DisparateImpactRemover + after_aif_df = self.step.fit_transform(aif_df) + + after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + if self.weight_flag: + preprocessed_weights = after_aif_df.instance_weights + + else: # regular preprocessor + after_df = df.copy() + for ai in self.focus_atts: + after_df[ai] = self.step[ai].transform(np.array(after_df[ai]).reshape(-1, 1)) + + if self.weight_flag: # for the preprocessor that updates weights, e.g. Reweighing + return after_df, preprocessed_weights + else: + return after_df \ No newline at end of file diff --git a/pipeline/preprocess/samplers.py b/pipeline/preprocess/samplers.py new file mode 100644 index 0000000..e95ea2d --- /dev/null +++ b/pipeline/preprocess/samplers.py @@ -0,0 +1,96 @@ +""" + Classes to sample a subset of data. +""" + +import numpy as np +import pandas as pd +from pipeline.preprocess.preprocessor import Preprocessor + +class RandomSampler(Preprocessor): + def __init__(self, df, sample_n, random_state=0): + """ + :param sample_n: integer, the size of the sampled subset of data + """ + if not sample_n: + print("Need to specify a size greater than 0!") + raise ValueError + self.sample_n = sample_n + self.random_state = random_state + super().__init__("RandomSampler@"+str(sample_n), df=df, fit_flag=False) + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to be sampled. + :return: pandas dataframe, stores the data after sample. + """ + return df.sample(n=self.sample_n, random_state=self.random_state) + +class BalancePopulationSampler(Preprocessor): + def __init__(self, df, sample_n, balance_col, random_state=0): + """ + :param sample_n: integer, the size of the sampled subset of data + :param balance_col: str, the name of a categorical column that the population of groups within this column are balanced in the sampled subset. + :param random_state: integer, the seed for random process, same as random_state in pandas.DataFrame.sample. + + """ + if not sample_n: + print("Need to specify a size greater than 0!") + raise ValueError + if not balance_col: + print("Need to specify the name of a column to perform balance sampling within this column!") + raise ValueError + if balance_col not in df.columns: + print("Need to specify a valid column to perform balance sampling within this column!") + raise ValueError + self.sample_n = sample_n + self.balance_col = balance_col + self.random_state = random_state + super().__init__("@".join(["BalanceSampler", balance_col, str(sample_n)]), df=df, fit_flag=False) + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to be sampled. + :return: pandas dataframe, stores the data after sample. + """ + # TODO: update to minimum sample set + balance_groups = list(df[self.balance_col].unique()) + n_group = int(np.ceil(self.sample_n/len(balance_groups))) + # print(n_group) + sampled_df = {} + small_groups = [] + for gi in balance_groups: + gi_data = df[df[self.balance_col]==gi] + if gi_data.shape[0] < n_group: # for small group, accept all in the balanced samples + sampled_df[gi] = gi_data + small_groups.append(gi) + else: + sampled_df[gi] = df[df[self.balance_col]==gi].sample(n=n_group, random_state=self.random_state) + + after_df = pd.DataFrame() + if not self.sample_n % len(balance_groups): # for even groups + for gi in balance_groups: + after_df = pd.concat([after_df, sampled_df[gi]]) + else: # for odd groups, remove extra sampled items + remove_cates = list(set(balance_groups).difference(small_groups)) + print(len(balance_groups)*n_group-self.sample_n) + for gi in np.random.choice(remove_cates, len(balance_groups)*n_group-self.sample_n, False): + after_df = pd.concat([after_df, sampled_df[gi].head(sampled_df[gi].shape[0]-1)]) + print(gi, after_df.shape[0]) + del sampled_df[gi] # remove the group already added into the final sample + print() + print(sampled_df.keys()) + for gi in sampled_df: + after_df = pd.concat([after_df, sampled_df[gi]]) + print(gi, after_df.shape[0]) + print() + print(after_df.groupby(self.balance_col).count()) + return after_df + +if __name__ == '__main__': + # cur_o = BalancePopulationSampler(1000, "marital-status") + data = pd.read_csv("../../data/adult.csv") + cur_o = RandomSampler(data, 1000) + + after_data = cur_o.apply(data) + after_data.to_csv("../../data/adult_" + cur_o.get_name() + ".csv", index=False) + print(cur_o.get_name()) \ No newline at end of file diff --git a/pipeline/preprocess/scalers.py b/pipeline/preprocess/scalers.py new file mode 100644 index 0000000..7a1e80f --- /dev/null +++ b/pipeline/preprocess/scalers.py @@ -0,0 +1,47 @@ +""" + Classes to scale data. +""" +import pandas as pd +from sklearn.preprocessing import StandardScaler, MinMaxScaler +from pipeline.preprocess.preprocessor import Preprocessor + +class SK_StandardScaler(Preprocessor): + + def __init__(self, df, num_atts, copy=True, with_mean=True, with_std=True): + """ + :param df: pandas dataframe, stores the data to fit the scaler. + :param num_atts: list of str, each str represents the name of a numerical attribute in above data. + :param copy: same parameter with sklearn StandardScaler + :param with_mean: same parameter with sklearn StandardScaler + :param with_std: same parameter with sklearn StandardScaler + """ + cur_step = {} + for ai in num_atts: + cur_step[ai] = StandardScaler(copy=copy, with_mean=with_mean, with_std=with_std) + + super().__init__("@".join(["StandardScaler"]+num_atts), df, step=cur_step, focus_atts=num_atts, fit_flag=True) + + +class SK_MinMaxScaler(Preprocessor): + def __init__(self, df, num_atts, feature_range=(0, 1), copy=True): + """ + :param df: pandas dataframe, stores the data to fit the scaler. + :param num_atts: list of str, each str represents the name of a numerical attribute in above data. + :param feature_range: same parameter with sklearn MinMaxScaler + :param copy: same parameter with sklearn MinMaxScaler + """ + cur_step = {} + for ai in num_atts: + cur_step[ai] = MinMaxScaler(feature_range=feature_range, copy=copy) + super().__init__("@".join(["MinMaxScaler"]+num_atts), df, step=cur_step, focus_atts=num_atts, fit_flag=True) + + +if __name__ == '__main__': + data = pd.read_csv("../../data/adult_pre_RandomSampler_1000.csv") + cur_o = SK_StandardScaler(data, ["fnlwgt", "age"]) + # cur_o = SK_MinMaxScaler(data, ["fnlwgt", "age"]) + + after_data = cur_o.apply(data) + after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) + + print(cur_o.get_name()) \ No newline at end of file diff --git a/pipeline/preprocess/splitters.py b/pipeline/preprocess/splitters.py new file mode 100644 index 0000000..d1ed315 --- /dev/null +++ b/pipeline/preprocess/splitters.py @@ -0,0 +1,134 @@ +""" + Classes to split data into train, validation, and test set. +""" +from pipeline.preprocess.preprocessor import Preprocessor +import numpy as np +import pandas as pd +from sklearn.model_selection import StratifiedShuffleSplit + +def valid_split_ratio(input_ratios): + if input_ratios is None: + print("Need to specify split_ratio!") + raise ValueError + else: + if len(input_ratios) == 1: # not valid, at least two for train and test + print("split_ratio should have at least 2 values for train and test sets and at most 3 values for train, validation and test sets!") + raise ValueError + if sum([not isinstance(x, float) for x in input_ratios]) > 0: + print("split_ratio includes non float value!") + raise ValueError + for x in input_ratios: + if not isinstance(x, float): + print("split_ratio includes non float value!") + raise ValueError + else: + if x < 0 or x > 1: + print("split_ratio includes not valid value! Value should between 0 and 1.") + raise ValueError + if sum(input_ratios) != 1: + print("The sum of split_ratio does not equal to 1!") + raise ValueError + + return True + +class BalanceTargetSplitter(Preprocessor): + def __init__(self, df, split_ratio, target_col, seed=0): + """ + :param df: pandas dataframe, stores the data to split. + :param split_ratio: list of float, each float represents the size-ratio of splitted data. Corresponding order maps to the size of the train, [validataion], and test set. + Value ranges in [0,1]. Sum of the values in this list should be equal to 1. + e.g. [0.7, 0.2, 0.1] means 70% train, 20% validation, and 10% test set. + :param target_col: str, the name of the target variable in above data. + :param seed: integer, seed to be used to generate random state. Same as 'random_state' in sklearn.model_selection.StratifiedKFold. Default is 0. + """ + super().__init__("@".join(["BalanceTargetSplitter", str(len(split_ratio))]), df=df, fit_flag=False, target_col=target_col) + if valid_split_ratio(split_ratio): + if len(split_ratio) == 2: # train and test + train_size = split_ratio[0] + test_size = split_ratio[1] + self.splitters = [StratifiedShuffleSplit(n_splits=1, test_size=test_size, train_size=train_size, random_state=seed)] + else: # train, validation and test + train_size = split_ratio[0] + validation_size = split_ratio[1] + test_size = split_ratio[2] + + self.splitters = [StratifiedShuffleSplit(n_splits=1, test_size=test_size+validation_size, train_size=train_size, random_state=seed), + StratifiedShuffleSplit(n_splits=1, test_size=test_size, train_size=validation_size, random_state=seed)] + + self.split_ratio = split_ratio + self.seed = seed + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to apply the learned splitter. + :return: pandas dataframe, stores the data after split. + """ + def split_to_df(splitter, input_df, y_label): + after_df_1 = pd.DataFrame() + after_df_2 = pd.DataFrame() + X = np.array(df.drop(columns=[y_label])) + y = np.array(df[y_label]) + for index_1, index_2 in splitter.split(X, y): + X_1, X_2 = X[index_1], X[index_2] + y_1, y_2 = y[index_1], y[index_2] + + after_df_1 = pd.concat([after_df_1, pd.DataFrame(data=np.hstack((X_1, y_1.reshape(-1,1))) ,columns=input_df.columns)]) + after_df_2 = pd.concat([after_df_2, pd.DataFrame(data=np.hstack((X_2, y_2.reshape(-1,1))), columns=input_df.columns)]) + return after_df_1, after_df_2 + + if len(self.split_ratio) == 2: # without validation set + return split_to_df(self.splitters[0], df, self.target_col) + else: # with validation set + after_train_df, rest_df = split_to_df(self.splitters[0], df, self.target_col) + after_val_df, after_test_df = split_to_df(self.splitters[1], rest_df, self.target_col) + return after_train_df, after_val_df, after_test_df + +class RandomSplitter(Preprocessor): + def __init__(self, df, split_ratio, seed=0): + """ + :param df: pandas dataframe, stores the data to split. + :param split_ratio: list of float, each float represents the size-ratio of splitted data. Corresponding order maps to the size of the train, [validataion], and test set. + Value ranges in [0,1]. Sum of the values in this list should be equal to 1. + e.g. [0.7, 0.2, 0.1] means 70% train, 20% validation, and 10% test set. + :param seed: integer, seed to be used to generate random state. Same as 'random_state' in sklearn.model_selection.StratifiedKFold. Default is 0. + """ + super().__init__("@".join(["RandomSplitter", str(len(split_ratio))]), df=df, fit_flag=False) + + if valid_split_ratio(split_ratio): + self.split_ratio = split_ratio + self.seed = seed + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to apply the learned splitter. + :return: pandas dataframe, stores the data after split. + """ + df = df.sample(frac=1, random_state=self.seed).reset_index(drop=True) + if len(self.split_ratio) == 2: # without validation set + split_idx = int(self.split_ratio[0]*df.shape[0]) + return df[:split_idx], df[split_idx:] + else: # with validation set + split_idx_1 = int(self.split_ratio[0] * df.shape[0]) + split_idx_2 = split_idx_1 + int(self.split_ratio[1] * df.shape[0]) + + return df[:split_idx_1], df[split_idx_1:split_idx_2], df[split_idx_2:] + + +if __name__ == '__main__': + data = pd.read_csv("../../data/adult_pre_RandomSampler_1000.csv") + # cur_o = BalanceTargetSplitter(data, [0.7, 0.3], "income-per-year") + cur_o = RandomSplitter(data, [0.5, 0.3, 0.2]) + + after_train, after_val, after_test = cur_o.apply(data) + # after_train, after_test = cur_o.apply(data) + print(after_train.shape) + print(after_val.shape) + print(after_test.shape) + # after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) + + print(cur_o.get_name()) + + + + + diff --git a/pipeline/preprocess/transformers.py b/pipeline/preprocess/transformers.py new file mode 100644 index 0000000..63919cc --- /dev/null +++ b/pipeline/preprocess/transformers.py @@ -0,0 +1,24 @@ +""" + Classes to transform data for multiple columns. +""" + +import numpy as np +import pandas as pd +from sklearn.compose import ColumnTransformer +from pipeline.preprocess.preprocessor import Preprocessor + +class SK_ColumnTransformer(Preprocessor): + def __init__(self, df, cols): + """ + :param df: pandas dataframe, stores the data to fit the discretizer. + :param cols: list of str, each str represents the name of an attribute in above data to be transformed. + """ + pass + # TODO: add column transformer + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to apply the learned discretizer. + :return: pandas dataframe, stores the data after discretize. + """ + pass \ No newline at end of file diff --git a/pipeline/step.py b/pipeline/step.py new file mode 100644 index 0000000..afd4aa5 --- /dev/null +++ b/pipeline/step.py @@ -0,0 +1,49 @@ +""" + Base abstract class for every step supported in this system. +""" +STEP_NAMES = {"Sampler": "SA"} + +class Step(): + def __init__(self, step_name, df=None, focus_atts=[], sensitive_att=None, target_col=None): + + if df is None: + print("Input data is empty!") + raise ValueError + + if sensitive_att: + if sensitive_att not in df.columns: + print("Need to specify a valid sensitive attribute!") + raise ValueError + self.sensitive_att = sensitive_att + if target_col is not None: + if target_col not in df.columns: + print("Need to specify a valid target attribute to be predicted!") + raise ValueError + if len(df[target_col].unique()) != 2: + print("Only support binary target feature now!") + raise ValueError + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name + if len(focus_atts) > 0: + if sum([x not in df.columns for x in focus_atts]) > 0: + print("Some specified attributes do not appear in the data!") + raise ValueError + self.focus_atts = focus_atts + self.name = step_name + # self.input_data = df + + + + def apply(self, df): + """ + :param df: pandas dataframe, stores the data to apply the learned discretizer. + :return: pandas dataframe, stores the data after discretize. + """ + raise NotImplementedError + + + def get_name(self): # return full name to print out + return self.name + + def get_abbr_name(self): # return abbreviated name used in the file name of data + return STEP_NAMES[self.name] From d06c7f3737c1b9cbc2bf58bbd53b9e4cc319e7a9 Mon Sep 17 00:00:00 2001 From: Ke Yang Date: Wed, 8 Jul 2020 12:06:37 -0400 Subject: [PATCH 3/7] Remove unnecessary files and rename driver python script accordingly --- pipeline/fairness_label.py | 74 --------------------- pipeline/{label_pipeline.py => fairprep.py} | 0 2 files changed, 74 deletions(-) delete mode 100644 pipeline/fairness_label.py rename pipeline/{label_pipeline.py => fairprep.py} (100%) diff --git a/pipeline/fairness_label.py b/pipeline/fairness_label.py deleted file mode 100644 index ce74e6d..0000000 --- a/pipeline/fairness_label.py +++ /dev/null @@ -1,74 +0,0 @@ -from sklearn.metrics import confusion_matrix -import numpy as np -def get_static_label(df, sensi_atts, target_name, round_digit=3): - groupby_cols = sensi_atts+[target_name] - placeholder_att = list(set(df.columns).difference(groupby_cols))[0] - - count_all = df[groupby_cols+[placeholder_att]].groupby(groupby_cols).count() - values_all = count_all.get_values() - index_all = list(count_all.index) - - if len(sensi_atts) == 1: - norm_cols = [target_name] - elif len(sensi_atts) == 2: - norm_cols = [sensi_atts[0], target_name] - norm_values = df[norm_cols+[placeholder_att]].groupby(norm_cols).count().get_values() - - res_dict = {} - if 0 < len(sensi_atts) <= 2: - s1_n = len(df[sensi_atts[0]].unique()) - t_n = len(df[target_name].unique()) - for idx, tuple_i in enumerate(index_all): - if len(tuple_i[:-1]) == 1: - key_tuple = (tuple_i[0]) - else: - key_tuple = tuple_i[:-1] - idx_denom = idx % 2 + int(idx / (s1_n*t_n))*t_n # only work for binary 2nd sensitive att - if key_tuple not in res_dict: - res_dict[key_tuple] = {tuple_i[-1]: round(values_all[idx][0]/norm_values[idx_denom][0], round_digit)} - else: - res_dict[key_tuple].update({tuple_i[-1]: round(values_all[idx][0]/norm_values[idx_denom][0], round_digit)}) - else: # for more than 2 sensitive atts - pass - return res_dict - -def compute_evaluation_metric_binary(true_y, pred_y, label_order): - TN, FP, FN, TP = confusion_matrix(true_y, list(pred_y), labels=label_order).ravel() - P = TP + FN - N = TN + FP - ACC = (TP+TN) / (P+N) if (P+N) > 0.0 else np.float64(0.0) - return dict( - PR = P/ (P+N), P = TP + FN, N = TN + FP, - TPR=TP / P, TNR=TN / N, FPR=FP / N, FNR=FN / P, - PPV=TP / (TP+FP) if (TP+FP) > 0.0 else np.float64(0.0), - NPV=TN / (TN+FN) if (TN+FN) > 0.0 else np.float64(0.0), - FDR=FP / (FP+TP) if (FP+TP) > 0.0 else np.float64(0.0), - FOR=FN / (FN+TN) if (FN+TN) > 0.0 else np.float64(0.0), - ACC=ACC, - ERR=1-ACC, - F1=2*TP / (2*TP+FP+FN) if (2*TP+FP+FN) > 0.0 else np.float64(0.0) - ) -def get_performance_label(df, sensi_atts, target_name, posi_target, output_metrics=["TPR", "FPR", "TNR", "FNR", "PR"], round_digit=3): - - groupby_cols = sensi_atts+[target_name] - placeholder_att = list(set(df.columns).difference(groupby_cols))[0] - - count_all = df[groupby_cols+[placeholder_att]].groupby(groupby_cols).count() - index_all = list(count_all.index) - - res_dict = {} - target_label_order = [posi_target, set(df[target_name]).difference([posi_target]).pop()] - - for tuple_i in index_all: - if len(tuple_i[:-1]) == 1: - key_tuple = (tuple_i[0]) - else: - key_tuple = tuple_i[:-1] - cur_q = [] - for idx, vi in enumerate(tuple_i[:-1]): - cur_q.append("{}=='{}'".format(sensi_atts[idx], vi)) - tuple_df = df.query(" and ".join(cur_q)) - metrics_all = compute_evaluation_metric_binary(list(tuple_df[target_name]), list(tuple_df["pred_"+target_name]), target_label_order) - res_dict[key_tuple] = {x: round(metrics_all[x], round_digit) for x in metrics_all if x in output_metrics} - - return res_dict \ No newline at end of file diff --git a/pipeline/label_pipeline.py b/pipeline/fairprep.py similarity index 100% rename from pipeline/label_pipeline.py rename to pipeline/fairprep.py From d3dc17d5d694bb047ca0ea3f22a456f6e00f7807 Mon Sep 17 00:00:00 2001 From: Ke Yang Date: Wed, 8 Jul 2020 12:11:02 -0400 Subject: [PATCH 4/7] cleanup --- .../accuracy_vs_di-skyline-order.png | Bin 43040 -> 0 bytes .../accuracy_vs_di-skyline_formula.png | Bin 42840 -> 0 bytes .../accuracy_vs_fnr-skyline-order.png | Bin 43584 -> 0 bytes .../accuracy_vs_fnr-skyline_formula.png | Bin 43356 -> 0 bytes .../accuracy_vs_fpr-skyline-order.png | Bin 43619 -> 0 bytes .../accuracy_vs_fpr-skyline_formula.png | Bin 43487 -> 0 bytes examples_skyline/fair_prep_results.py | 92 - examples_skyline/helper.py | 53 - examples_skyline/missing_data_results.py | 99 - .../results_play_skyline_formula.ipynb | 1778 ----------------- .../results_play_skyline_order.ipynb | 1767 ---------------- fp/dataset_experiments.py | 126 +- fp/dataset_experiments_old.py | 341 ---- fp/experiments.py | 90 +- fp/experiments_old.py | 383 ---- 15 files changed, 81 insertions(+), 4648 deletions(-) delete mode 100644 examples_skyline/accuracy_vs_di-skyline-order.png delete mode 100644 examples_skyline/accuracy_vs_di-skyline_formula.png delete mode 100644 examples_skyline/accuracy_vs_fnr-skyline-order.png delete mode 100644 examples_skyline/accuracy_vs_fnr-skyline_formula.png delete mode 100644 examples_skyline/accuracy_vs_fpr-skyline-order.png delete mode 100644 examples_skyline/accuracy_vs_fpr-skyline_formula.png delete mode 100644 examples_skyline/fair_prep_results.py delete mode 100644 examples_skyline/helper.py delete mode 100644 examples_skyline/missing_data_results.py delete mode 100644 examples_skyline/results_play_skyline_formula.ipynb delete mode 100644 examples_skyline/results_play_skyline_order.ipynb delete mode 100644 fp/dataset_experiments_old.py delete mode 100644 fp/experiments_old.py diff --git a/examples_skyline/accuracy_vs_di-skyline-order.png b/examples_skyline/accuracy_vs_di-skyline-order.png deleted file mode 100644 index 8673899d43952f6cd3a8a90887cb93ee847696aa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 43040 zcmeFZcUV>1mOZ!$3W9(MQ6#A#QBcW95)}j_CqaUWWC=>n2L&-9dJs^u zM$(ZCN=Aa@OwWb)y?Ryk>Q(n2zy7+uuKV44kDjo@+H1`@#+YNS$EwP5R21|S7=}^F zU%8}?VZ_ZCwkw8wH@xHA;?N2&r1moMn&j}02l?$s@b^7-S9I+$jPd~bkEo^BYXjaC zakzZVLBrP6!TIJr6U_RigWVllhdUOxSe#7m*<0A!@bjMIJe+>pYz(@ zGvkY!6YIh-7EJ!q1x=TTU%jr^f*rTEe-kboy1egmy`SJ0Re4rgjq3uTJaVTQg~i`= zu>Q2oPE3>-Id`2Sk|#WbJML`c1i7+_O@fA)njSBINPt3E2rkxGA%%zO!pg?hz+_4s zu7{ic%szyY7;)5>X;A*7gz0^kmb_`)7CQ2b#a5;IN7h=t-CO+ zcKN$;pzE)~%+e*jsm`2g<0cx+k=8wisLiVCNbsvyE=7CWV))G(+s3De%spU)Qolrq z+P4+jo2{;|r@1druMIq)(Vy&d`AOwC+0l?Uot9aKXJr(zlQyn?uGRg)x;;_pSVM%U z2428@=}xcPWO_!m+3nk3O>+wGH^p3d@apyJWcZ0*Sy|cIpg(i2`L`1UdxwcO`ONaI zqU)aA+V52F+ zIP;|tjhb?#SWSPwQK*5_8wP5*H}gy$vlk5P8wA(Z`?n3p2@xS7i7#I4Biy@m$#|Mg_LHT)v$?>sbD~>e!QK&oot4U&sbB zrC;=?F6`*MBUHLJkv7qi@U+Nb{Di#Qbm3^*+lyq5zrL&4maQgse^8E=OebSTncbHAg+c)w!Of-BWhTQ{?Fa+L1geJ`)1Bnaa; z(){+~2=n%e|M>U!ah*kuX`Gy#XW>Z+9zA+=r2Y;6sZ*z_dfaD9Px0|JMTj}p-ua&O z@rZH#&p;`gUrxnV-RsLU!uYFdYA?@Lz4jp?3u|sx&9NU<*3i_vO+iW7FwvgY0CVW7 zy!>N%MMeJ8dbwxdG4od)$uX%r+?uMzJ^#C!IsC_uTlTX97v`t(`{|QfE%fqjtXo=J z4Vq|=9kc3#lT#R3B`HVNS2KJ6(qj^~iEv%&Gz&OewJ=F&Dp~BfUUkIyp1pko%wevc zjtqn1t92o#8_c3TO(T?(3TMjJuLhk`s%MY(P#pfE=7PpS28Lf@bWBV( zKYskU>g4pHrl#hsL9vq{EQ&A~gv_$F+Fa}IjyruNoiO>UcnnLv`}+GIOKR0Moth+M z3R`#Oo<(DzoYYz$A=;|%K9!fTh_8p|prow)oRfp&j8(_Gv!RA=DV^_){cGy#_<#TY zEsO6gcFu&u!)Oqm(F@iN4Gp~xVpj=!c~UM+j>+}&qF9)K#VsbcvEx4-#FQ>vxZo#s z5G&ZXBu$ch_-A#XPW5xf;o;$g#|PO{6+?Nu;6ZOK<(H4Y^=DGo*N?a(6n^65Ni@^b zujfy|rR+?!Y!gL6+rFL_dE3@RB@G9M+>+AL_FM~%%@N^pwb8~%lblWgy8-E5yGp9A zteX#5Ee1dN75#jEUf|C6Gce4YA|lsdV1fe!15YVOBs=%H%HeaZ2)rh@Z<`N&eMu|n z?BP+OuCC51UszbUHWn9nh?$vB-m)tDU%t+ z%94_jZWAdQ0i!+#f^5h8|C^On?O+HOK9iN$L8*DVM zyWC?RDCYDlIVOfBfL>6cJKr{xm5rT!64qX6M$rV9phe3ic+Tizg>3uuN1Pa_{ih3l zQZG)lE21;PjLWlVJq6cZFd_~YOF==Q{`Bzag+T9(i~0Rt#Tf-ZDDv)fs^^$BIW_QC zafpiQ&W|?fz+&Fm++2jP$tftdZEQMH%|3 zvXU+M;UcUKWq3wfFbA|V4GjiAP&0}RE$QifxRU}g@0F-?)w82>^YcN`(a}+^BS+C4 z!t`mszl#!uAv6)bP4^ZX%rC&HeMMQ;2RF|x zF0KzN_8WTBb?!Eyh}WQqgN^O7)YeKIlh?}4fMdeS5J!e>(jcUZ*$v>V3S(lVMMXtT z8V#~;_{`1Cy$TH-EfTAq9jsieVUHez*NLvYv_zHY>9GFv>%tH;uL$=#1a-Fyq|Sx$ zu?h%iLNpTd+9;Sbx^Thg$IqW8`P~MNnYuZ$__p`Paxm0IOz^-mBO(}fk&zWwd2RAb zE_>G2*532MUrz>dNJ$wfCutaY{Pttz6cW;o_TDPY&dzrD z@p+HL`mY>XN$;CC-=Z}2!MeM)yZbsCtDQ)`UEIH^j83z*IH{$s9{cOpoqUPB{9dPU z)5eGti00aBXYe&a?30<^n`x5U8}z)ENKR>@PK`ZTqOs;!FU-ze9doKfHKll6h%joES*CtvB3}#WTvhXAv*H6ZBWfq=* zH9>&soD2ErsPQWpH5#c6RR~BXjSvb5v6r9bdagS~N_y%AN-Vubk>c{DORq$|Kl$#_ zD{*-bX$mG%411Kzr@Y3xIz@Ce&*k1EX73F(JWS+t@2&Z0y3Haih6j)$T{>jeqV28q z_Hu9UJS&3od8c2^?K(!MQx?9Deg1q&DN0;-Yip}$IOO_5tBO>Le9$QXGr=evW@Vl6 z^Yt|!Z%M$%->iIS0Z`(}{1N!XS=_IgG`0q+c&KSdBfw#>VD0=trkt&FT&D(QPV^&=@Z4 zIHB1PCa4HUbYe8t4<_ip1Y9*1Al%msaH74)(dx3StmW??Uxpi*#T=5O+$K0TArtM) zCRnD$j5!_|;WkC`%YE*dyJ?u-ef@fcib?#t^=40@J?+t>DPEgvDFD*w#T+;YS-Vw@ z?~MF1i@=(%M2a~oprl#0Ho;9`Yo#x$S_vbHDas$B$@q8gcpx690LE@jz=CCPvZc-tH7uK=B%*lZsyx@mlZs#o(n73y^ym>~ zBjB9UBBv=XBO@aN@6E;9`ucaPCx~3?A;b$fPP9Gks`}NL<2ad~udQ);`01lZ%|(ur z`gOpC0CQ*n>bKk|^qG*x$v$uzsXwX`s>){4-V3O<_ii;KLb$vuD*(1EetmzB5amwD zRf(1i6S;d!=@msxNE^Xxx4kvBx1 z*ExIktYv3*LSuwzTtm39RRxZSV((r}xUt}nkdZJc^JbRK)h6!_91+P7#8$O9Sx!JH zPQ?c4dU*(epF4L>f4+QUPVvi^FPtna(s%FPU7M@pW0MECr4=pd*<%?cmU$p;&!%#I1$yvqvR1Po+3;2#|Yud9BqMZN)x&wiie* zfq4&I*OKp^gAU!+`G{!4rLV!nvR9kozww4>!UytfIHbh+Pn3MGKYfy!pDv!Ex3#qm zdi~n`?;PzIa5T52UlssH;HrH@U1nqUP*9v>_F8!h$v_BF^n3Jogwy^auYh;L0ol9( z!W3@PTl5By+-nud)z61$Y2R>aB)*2}ebo3FB78nRJ|pS*edjJ;zHC~9XFGM~Ofv(Z zaCo%GU?k>yo%;(d$6J#E<>lqiRvobi{t^a{{0$&7`?(=`{(D1L<^eh#F@ClRvrY-| z(~C3`w}Gz$|KnILT^R~ex$5HL;x|K(Sne?fRzE0on32&Mk>T3%aZG%gu=(y>=`YBt zXMjt4tlnz{ktHZ9$_Cg^y@rZ}$=dRa?X$BZLp4E(a7U&9l}CUip}(8Xk2C<4W%Ay* zYnl=Za5rnxmQaBpLzkmP!#H_65 z#Ujbop@W1EOEdkptZ!mtV->8stE;O$RtEi%XfX0pj_It9fn)oZvUVKXFCW>`EZ2Ec zT3Xt1xB)F7lY%xqA~4Z|&u>S$Ppg@mC+p>!zeNP`r0lciNBd6>0j~p;lLD&|5kviX z+wx5(N5CvZL_|g7iQ!06qGA-&gv&!Sx@>FBO1{OP*(({In0f6h&bg%}Z6KErI#K~E z)DQ?d!u=jOJ6sbGSUb#mLBL0V_aY+Rt!w{KzdU~FA@#4CiKSXOKhDPSy_(&`PSBaJj5M;#lEY_E%AXc>j z(LyY!*L!;t$VwqFLC1D29o~UjJDY zpaH|Qm?X6+55XoJR&0p15$H?wXK(oe(T|1MSg>S6lIMp7xD5hdr=X?|X)v`9@bmW% zJuf97(6;DlYMR*Rxoog&_wJ%CgpB=Zn4KCdVChb-1N)4~j{w$@`m<-0iiSo9WDHh| zxuKd2tn_xxf!=#{r{@+I+hG|aaNAevUTj$a>XhTcm>R*U>w|R~@Bu|U3IPzz5liPc ztxw)Kw&=aFvEcyoJO4dM6SbhS%zeOIIy$|y-xxY+!}V-|097;sYz~M7pnW9#0ede? z4nwT7>!@8vru$&#WQc?sIn#bwKji@~kqLr3nmaS;nl_`%mX&YuU} zhj2|>Tf1=Hn=$v|#fyR_wTBp`ym}V-fRO}>&pjFz1IF*t*UkeG;1B}?k37B5o!8Hq zBvPlk^SObc!9|9!8r-X41B~qFG}SenI{5kV!HgSTHVp{u|6yiHyC@G|%itFi>#`2K z>g82dw!P(kSx)YH-(jpttozuzg>0ae2shf;t3-+=5RvTW0D>m9F?-Rk$?-T)^EMT! z&7q|fxVN{OMeM8Z@c09a;x4N2Ny)&xrfD}WX@86w-@;745zJ<{@q|!KP0e_dx)6)W zj`U!+Ul3)`93GF}-bi}#^y#pJ7$lTLK-YR1#-AVUUxUb(3(qW4)Ls^)3OBbR5K1_A zWR$8v=1ekX4^^qng`kI2$4;d<%+vwPb?oI;plo_&LmxVW`GQ#bA~0o+vc>eXkks4Q0nNs`g6ALTb4>okjQ2hojH zo?SVjxp=0GTb^F@UOY9ky7s1jVdlHU%H^B*nz^sQoXhj}NlUly8?EICv1z9P7^PFH#KJg1Z5vO0IY8 zFjqm2odAUv7B?LGv~6S{+`4On2zOf!Klqk%aj8 zWQfigqVN<5UtgXS1SYvTX|&w}A$S2WjeC|Qmi5q(4AZJ+8neICTcia51EjwoK!b@u zl`XzJp|!CGMlR?wdqciukznNA=Y}OujyYngj-+Gux6RCo4K^fLSwBzxn8LT^n5iI8 zgkShb#>^ostOJTno*p}1r)c5nIkfUK!_T@6%O19uY$Gz+lCxX+xKEwN@}W#u@2`~n^@hsk(Yd9rs)ON#)Y zN0m??`i>+OWSL5K?;Nzq|Az|abcQAOR2Hc=a?ro0H)HpT5fttj{?BWi69qpz)S}DuBA4Z zn=N?j%Q0HoggU-*eUQV{ZrzIYPuq=I?Tn^=RYu6{d(+BlxyJz)0i7;D6sd+A)?Fw{ z{p*I`611Kf#}HA%jBx;MUo^6Dz5{9?V<1c}N^2$W0g$Y7<;tTlA?t~8b)24#T4tK! zSTS9op^hM_dBuGQaF9TZ0B>Y>-m#ekeSdgnX69|JYUH!CwKX;0U;qmR&(SA5aWgQG z=`R(56aPG7UO}AHiiFFyaU+fb7ol(^5ZhDc-TQH2GMQEkV~$#KxZONI=oEfdG#w3K z+@=wr-iSzhh*7oi!*`eaz00j!0V$y+F+2sV;36dECjkMF1YhtL|2p@js>%|wQv0t!5~%!9w#z znnEU2n@RKD-a>jY$|WTw;_@vZhP3Bd>3}X?=O|bFq-6FJWzml(+zwx8b;iEX894+< zEw*XELvIlz=*O+BxWIF6S}bZT3xO)drQTv^9Y`UFy?Cw;hw2sHOF)$~7}$yRnR4gw zZQzO5Np|m51>V%2Z)-F>GGdq|iJ8o6W={5(OO-x19NquRXSd~}%HwRTS9a}6sC)A! zp$>mL->B!qOwlASl=2D$o#czwr;AbaP>K}OLS-_bdQ7L`so)(}W^W=aRO~#{0UBaj zE}1DnwY9a$%wZ7afwA_2c!$tHl-HUCN(c}k+CX?(1fDSlwUQR3FoPVa2$2WTM8EF7 z57yxTr{dv@UysD4WfUopkWsgx-sv6U`+Q|9G6-b@z2pU+45V(Nq&g~n+_`)`5;D^%BcpzbXe)TI zs8R#9GuGg;=)S&RDp=}$xGSq?8zn%TU5%rVd3P{$dRQThXiO|5%U0Zo92`Z3*y=Exc8l#~=atAFKN ztIX1lIj56QKez4{l24so`IrNebrQN^I8_iUh^3PNd^89d2Ei|1z6A_mh<}C0s}DM5 zK%pE>JjkZCO-%#uaFu6si=J#9PhL&nq$7m>)FS<+(NJ!}Cu3E?PMs<3*JVYzpey5}Q} z05aT8r4KNO1HjTE2^&ytDfaBSyIH)@q9|zHC9nt*1JJzS>5_S61J~hTz^ARStZ3nc zNWz8irYbFs>;3@ITM(ip9DfY(z&A?15*B#`g@F8tqTgj0+2GsW>GC5W2eUjMSbGXQ z$h#+rVxEY1P6C%z$vjxvcyh1h7r3YC@dK(<@Nnx4oPn~;R{?N&J!@xt=R-^8Oo@hl zp5See?}6b$IY|lh@dl_nSPr|^t+_74Y}Sq{E{i2-H6%rQO^hGt(QKQn{gGm*qurn0 zH0HQSCWj{x{AyFalG6+UhLuA?!T``IRMoU#5X21*;EW)x(YP6GUcl{E9X+j^{kfG5 zlMaX@v@Y{X| z4L~`@0QYk_d+OvbwJe(*A_X((5 zln&S@0cev8WI`!mVYU)JrsLI5MwkI^vK^u&B*&}r3FYLY4dLwah~2c6x;w)}FmH*M zul@S98A>k#-di3hwN^f)ve_vlLQy`|tSJhmo$g*>kgqF`U%qt3&@jP>aY4+@pt1@w z;6o87kylw<7j|9Vg^#a4_(tJcFpGTla&qK(YLCrD0+Q@exfi)bP-O}gZcuF3dww*zKQ8z6a9;6`PFwWX=Fq$oE9BhSTP z;PMD&g61_}Qr+~1Uk;2Ap-?e;&McKtQ6b&%`fR1ON-Km3gZU|^VkXz2{Q*b$WnnTw z&7^(Zlqi|r|DwW`7ZeKak>~goLlg9hiz!N}^8x^iy!gEMIAhjB`+}O!0Rnr69&Wlj zKT}3VhE*Qm2#lHPUv*+wA@X=wa_vxw1>UQVN}i4c19tf^DDZT=)76;%fOwTvbIb)q zhT>Ziw}Jd9;&)Rspfb38%gXPciy-bqF0TMs=$vvwydTw`xI7qP1=Th-cDz1o%qkBV zJbz9IOO}89xZHO%hk`<(+KN-UbyBq(h3p!!mnIe^si1rqxN|5dDD)_2fnVXSXMeo> zK}1TcjsRP>f}CX2E$X%5lGovQ94T<9o(E}50R*xxs9B~-(1A%#8#G8bSOug{v z0Bbr%`r3EEw4pt4;Ky1hWD`;Qkukt0ItUSgld0EZz5#+qhjE}pB2@P#Cekvsp^imN zOx*JUs`&4&+m>n}SQ)HinBqQNC0T?lvtLk+7POklh%kVMh|;RYA-0$l>V%*OaR zBQ;xwie|@S9=~0_oEBHq(;2sa$%FR;VlQIw0ss?`*q&j$HW8HV)#tgk@C_=W%rsh> z%iktbM}7sfPCNk!|Aygti*Zy=w!*SFJ6n70?z&lV&BBvNJq zP8(9qBD3}x^uz);E&#a;4hp(hX)&pPMwlv_FQ;LmP(2%@&}J* zfGYUlAnNm`gLMMHF9ahs(9gOf037HYJax4h6omA)HqFgJ8t+-vJWMqos&g^>k$DQ- zQtc@k?8p!Tia^T~TJD~a?{|bE#a-3$$oT*Uy>AB-h@~TcPK^^|SRNn9m2cm^%@~GI zf`lRKMpU?}q!JzjK%)(^B9zp1x=r8*m%ugpiW3wc#0jAwo~aZrw1eae?UPDMdZbFO zDl2Q>yO+&6J{s+<4fud8HN77c1eg9)+%i>^3=Z6y9N@fykY;z98HC1Es=C~Hz!)C>)2Qi$iC5W2{ z40nGAeQbQK43r@xnOhd~V9E2}|0US^XLb3ay;g(_Ex1W|a)M@!5%V2pCMM$$SP3Xn zLjLmvTF#UV)@3Y88Ye0D|7>PpSh6)JO#hW81@=1SC~;1CvAaKf0hNqBRtF2g08vlv zIzwk?Cj}Li<7s~|cR+@)u}8HsP!%umm2YH26a{UssEhX?To#iIF5~{ISBGFe>_`UlTZ1|lBcra8>(dB^nlm@Gv@2$a0Y756Ebsi zT$>YiE=4EcMDqCjBucFDr`?r3d(3XAx?&u=B%4>+tjB(lb}Ch_N) zsSqEBfw%^>_1@e0ibs#C%gc`wEDP4EXrx$E<3Dc>ea7R7H)l!M*@-b+?MdmlZz7&W zzk>GF9V5dq4oV{WfQNx9o$SX$gsEXz4-*T}2PjA4p(5vXFE%tZQ_QhwCFmd#CjIjQ zPB!jaMx=q7TFu<=J%pl@E%B$71jbs44TA9;)zr$Yq!<=_5x&#{Uy@p3v)EcL6Z^pv z^757I35`8z2gtv{#84-1Hg^D+n;P?V(~<$uyn=pHy9kVz-~O0yW49wY)>DrSqcPi{ zhbvRQCf%vi%V7%xIzT|b`~$nz+eJLW*SIXmbc*o3q|Cn@LS^}QS zgCMO&#=^Q|%RRD00&Zqfy<9x6o~;hIo|X|%g<{aR0pDPIx#_toiF`FIfGOIBXz>6M zJ(aRLerVDw$!2ve_oo(RI016{%paBHF{+^iiwEjLEI2 zx4{M9%bSoO&&C#&K`PC5ckkaC90J0O{V(q4ll?lmhwpO9k>l0W8Ko2z_RksanztsB zCnUH6J0)inl~Ey{I6*xq9XI@aQ1w$PDea+P!v})%ml6+X2S?Q#&wTvtk6994o}`}Y z42r?yTcXemUi!s3B|p{mEeKcSE75G83-hAr)4cAVSwXcIYb#^+y5itBnxmjVu~12~ zz#5Zr9OiGWnA%bRMeQ*%To@dsvoxvXE*6C*9_?Y(zkM+DQ^RS7)0P1@x-aINIUsbr zt;TY<{kqViR?9w{8xWyHkffSx9loagRik)OsaMo$2-*Ojogw+7gE~`6X=r*T#iGwKo*V*WLRNA0g-WML; zlU~C$zSByzUuPWzF|Nm`rt@EW)igg3zBqA$l`pd;lUlA&jIN(*-Zi6D+P*J@mFw26A=VZCKxR!6i5@s@{5m1f#{MiWqTNfnRpR`vwc{bR@ zL+8)H+6vqs<1=^L#K5kkgi~7xBFqFG(50xXEXwDh!e44fAXaX|lGs%H!ax5X=a700 z{~8|8YGkzhK2a&5L|lhT{-jWpaDr}5i$(d!;J2L9W4>zl3|_P6(rciDvEgA!)#YD|N=ayd7{f_|oc+*%$~h$>PW z9893uLlfHPp`n*M7V7WkA8PaPAUP3cegvX6GuK@iBVDhR<7#Yd(}CQaoXs>+nsd)2 ziw$bOzEDtLl`3!HCv8gkxb-a^-f&yyjkL@!miqPU2=3vR_myvv*s|ywdcyG83u7`u z8Yw*`1b=^ph-RIT)}%T^qdpgd@BsAKk&eR3YXpi*%e9$E0tWLc|+FiUuCwC-dm=aGG*ts-3J6tra#A3w(_0 z+&OhHU#dghNWh|n6)d;0J*&_~@+2@Y^9#k~hj#}}yyrAwT0aD{!rEZZ!8I;dQ^R5> zr^}bw>Z;lG@k)}CgHN6)XoC%1gro2Yx^nO%K<(C`?)dTJAVcul!_-RzuSq6UeF)mF zj)ss1qQ!%Ssbg*qYWF2eHAY36sTDgdrS2NBd`Fbrq^_ZX946rDkFQ?@yN-ggatP}t zAWi@=X#;likr`|*igCQ)BCA9LOfaP2HP#I(;afe;-U zs#D%COf%^D;18%p%p0nj`gOnzHSO%|-q>qsXeZ{=o|p>Gd^5ax|c)icuq=pyMH&C`N*y=ZT$IcGuBKFLiZ#9+Hc?? zZwKo*G6aKl3JRnl+8O%eVWDARsZc4c0i>PPrS~a;i%9bTv~R_{L`Ylq*^xn^C8)<1 zod!@|e3*L!KPG02VhsNMn1Q$SAmpq!x+>aUR9 z&HYA;TjKqxrNTh1&h)9HiN}1 z34F&&?+q&;5%9roDs?Cdp^h`4Lu1H>H{T>RGd_pKbTz|wodmyM6zTKc_5^Lt6RDCt zRi2)nsFXpYwF_$x{B!2rW8mRKEd?3FpihrO_3#P!A7325D1PlIrpdR)h_z+xb|xTi32d1 zJn)G@pOQQtWNmT%{aD-HHG`k83M9YLMm2G9aH;-Sy9DW@5+*=MJTmGRJ5AwR#~lK| zabEX;mPooGre9D}mpz*&fFxOxYU#)rI8=~n-VKT&CeOY>l_m{}T`Hk4j?cXaBA{C7 z(vzur!tu7iZHWw6DbShdw(+|fb^Cx|?ELqOSD?qU_9vFv7?&w|1`Dv(T%;umkb{{9 z@@S~08+!&kiWI;;s8ZvfMvR+~E4PXOgsH$3{0`tk=WPua=>ohQoBamADkc4|v(0g` z0WHvDvIvrMDp(?6x`lyam{doMvxJ|P?10k>68hUXUQW($@LUfaIg;EDOCNG6-bxmc~_PuQ2<8R|akH2iUE4hDYXed%R2-%=bCm5aj@3V}y zVsElh;~%W{>>0&VN_a{>$BxEm8{8=;!LAWlNOD zz+?9+Dr#EDY-?i`72%*xkpZKA9r<$_ThvrE9jLTpgtYSd-@ddHiGM_1Q=8nmDZ5qc6GVTnUsc>=7EbRPQuBGc$x?@g8}*v&oh zBz5V$0P*9;>OSl2CaB^{rfNbp$0{Sg1}UYEp&SM{bxQPKCY=wXvQsPL?40MX1J(}{ z47&dyAtt89k?&Gz3}kVNdOkI~49zjxSnu8S8aLNJY`jxTvyAJ18&%6Sdhq>R6_KU_ z%(Nzn<%mglP;VgXraI5`tr5z%v<83PfA9b`R1xMNJJK>R=#TV(W`)`{hHa25V9%aC z_;@I7Q$fN16E`j?a&u^&aNgF#6QEK(#Wp zYo?jp*DsjqgIb3Oj>{^G$gMq9%cc%#_JL;%-Iocztp^Ct>a)s+0tqQT_HLZ0}68cyZJH$w@~0{0>wKP zl%%Em^T7eQYWUjSIlHs_8r)> zhvkK@?-4527ru(vg+dpSlb3!y&}Z1E$d(W*XY}kDfJhTnkm%ay{fVpgzGUgKJl~v77!{^X@UwK#IJm zX=n&gfkB;T%uD1xiOSn^HBy0}XjEsgisJiD6xHCA6>$1>4f>)CwbQgfTvzMtJg7HF z>*GeV9p=4S!q=D~p`ac7X~8q+wyuekb5cC(sL-}J1#?WFAVXkV8Z(V${H-FcPpG4@Ql8LNA5r&T;Cuq4=TLP7oh&N>58Gw&#s;5u`Rr^>+ z6>!kUw{$mCMx~-Z{=`5(CPTECanoh!=hn{)X0J94SM>&j0^$t+y7nYHacDS2av!v9 z$AT$2IX72xd)a$?bkfE(qTC>2>b?7l`(!{iIT5jRa!2vQ5p?xrd8alNg@(CP)ZX)) zoABPb37&^HkI?m>o(1z7axmCHBy?;A<##%0D?TBiP}W~OVdTfJ9I{jQGd`@1A9EC8 zpck==X)tm~G`>(9#M9 zF6Z(6WE16^r#Tl#0*DWq{)UlH23|%guj(hNmPJC#VJ|otD`TGUuyG~zA)M9L8-uL;j zdKZ7)MSkAs7iU^;r@Tv2L)9>X@tp0U}$%)DjIB1A~T{i)ujX9X_E!$IrWdrEh8oS$f z`KptbsttUSDeUu$pfp&&Fhx?rJ4WmPjS&ved@=!*c6IQ}v5vnni~4;xv!vRR>ZKk% zoKmD*HPiXHpOqB0bIK4tqYOB$1=Lf)`x6U_eG*Hl1~YD-Tq)&o{dJ<1;26qg5H|@e z35t2p0}lcBiv4I~I}nD2G^0%gXu`|LEGEOK9=g%V6fBX3f887h+@X<7w z@?aGwkvs@05zu^djz4=+kp9_|f_Uf6Gd??eQoR1NC&kr2ds3YG(`QY9KzV=qGzK<v>SKBwBJ-`uIIL&#NouD!kTQ` zxG@45iR$bj;@s%)bC?Xk`VcE}L9^^lQyU)HqW>epvdZdd8^GP2wpE}7p zY9%-I*9Se%j;h~|cfWo+K8JDfbG!)V_3q-+F1ag>gy&snv0W-Bq}N5$w6Z(n<5_EN z-IeUR)9BPj67&OGDeQT4RDD?YBzy2ncn67-x^{^6I^g4&Tga*^n`6w8ieSmrHt224svleH#Gp}Qx@@Z}fb%{{lZxqXgC zeIyrW8IKK8c#h%8#8=&^qJy!DqT&mtM)!r@(v3A^Z6pPJ}>dk=i}9j zP3KQ(ao2D(AJ}vdJ#VxRyN|D#lq%3CO_m}SxK9sn$UOYdZ?Nsv{k`uWzwy@_+9xhm ztZlg$mu)&dQI6zNzjTQ#A-=wOIJ8)2D~#_xr$$7pkeGwrL$fj#jFc@%@ksWM<2&~d zKU1DRnU*=HWiVCZyU~^+1CiIhaJoxI0ImRwd3(l>glN3mHmTr~eZa)TInVXe`2?7p zQI~PD(qfLOOWo!XnT!>MP*8}2`bR=yq9yz>Z+adYISrt^hwLVd&OL*}OuKotS4i$3 z(-7GQx|i5kTgoTBZX$hsh=Zq8qTWHBj0Ou8DLtgXz(psf#(^y=}>DtSo~J9 zs(s>2{mR2GD_9=3!@pMEO-d;nv;T#0k2u)?Kr9%{4Y~o=0cpP5Z3eD0D!Fi4K2Z@Vn%ro{f}SvsW@ zKOY$eMeP?-wu>KKA7nzrU?KfdU2O%pFJlVg75uq)DhP^*P!39i9xr54M`jx&SHK9` ztV77+-maD}1Dyk9rt0;XCNPPNN-H+*f^8?pJ(8*h?R+;}(`XuxdH8WMVEgXhyLT_w zec1*!n3020u3nKt3Y0f{mX3l=*~(!Cx z;55iWx1nBQ%ldN708{lXNUzhx}P0wJeC~Ad9I}sYAORHq~WuU13%Qt&qVmzfSgPY*{hqG41OB;UWe6dcwLYZg>6(<=MzM(^3J;^rq zoNS&w_Us8pPU=(gLW5bvng*fg6MtnJjZ|L@WeV1buU-rig z(h<8}B9k6_|F<}Ngk#XP_E1Xfr_%J;`{Va;x-fEn(#+q8aMBeHqA@q^=%`nyLPmCD zO%u%@T|?u#x`^5sPeM9#k+)th&hJjJ?iP2Z+2A+I$zAAqO8$nI?6n+jH&#JuIqht8 zjpOLU`QM{bp{K6eHD1iHsB5w<*yp)q#U%MF;uIZSwS(w@I)oey<4d0MS`H2#oz}6Z z*?35vmse_f+3yf78*E`QUQH%LyK_RHH5KK+g*Pf=3Ya z_(c0Rrx~e7Qy=zuC}E4$^LVD$vUdIfX~LJOyaL~mXbR8o;o4>_Ze;TZsn$9H4+oy)1|XglpXVJEV3j0D?9{QA?~5QP6BihQlQolCxD z;Nt_h;Uox12rB+9Aoa{Q3z$67)E?>6)*iWoLI^1g(h0C!0fV%$hR#>#%{%X}379o9 zfW@V{+n`bxdOq{uocdglfttk}&Jd_say#=ogT;k)ZwC5ty4qy_>V7@5t3sSufVk@) zAQRSj8xR~|F@R*Rj$ibXqYN$=i{3|7Apl{a%FGTBsjF2F^P?+vZ6v zS1(@}M;cG-D@T0+q6&r)>5~JxkYX@89f#bdrzaZ;)YxbxOjH(D2XnRcFZ&@5;^xuj zX)>RlO`36JzhA$6@MXYB!JlrtVb~37z^D5TyfaA8rpn<=xs2z@=79g-sl1czWI}xe zvwsa%5AvfO8QR4BIK`@enEy2YhEhkJTBVzBP{3GG)!T&5%_n3t+nzVLHr5kTFm-C* z-Xl=njqy=>GpFH_p@znE-wJ}f0$H*1?;7rvIblA2fR3d}Chel~bm8(@ve0rhzrk;M z<>j>12M!drS!Q9U9(YeX?RE}$7V%$xc;t2#;P#OphJnE_nk>CN_0n0lAQNHOKZ0$R z7g?W=57^hkC9tTXKhmMA`RkA9Do80pL`XN#ipEKQJm8NrLJ?sf2k*_t7c(SC>Cy1u zq}7_(sI4+2cmRC=OtPZc^G$jaT_QQFcABOtiDGJZRV7$NC!WbRnK; zDi|7eOr7v<ETaI68>cO&;%z-t8PtJ;#6`~*7aKYNp04e5fg;*dH9f3%M> zf?ZQkS2^^5qV_}ZBF7o$6}AACFx52FJy8GpEK<@D(*-GV5-eC!W>Vn|r{G_C)DkyjCsNYWTA`z- zSL>QahENMS3x}j+pY^3efL7pswORlC7rM*hntm4jH zQ-C(Ku{w`D$XZ%j7O)-7U!euLO5v9l(8q-Kj)8riP(KIQtJaIUu>m@ma{FQd-2{h+ zPj(1-t~n5z(1v}`$ItUXO`_}~V-{+uz3R+;|VE>U<{_SVxHy~U_@c8+O zGEX@fEt8Y)^KijPAK4i>W8|lHB5822Xg(!nWHBD@E`tult2FKmE{j#ub7?3Q4XMkLC44L z+j$B?;jdn?>hd(og$G#0;GX_K9|5L_@zG897$sT!vO|Nv-)xG>N}}d_z`8FeH?qjehOE< z!_lM1nm_8FAK*igqJkYT)QOY5-6u{)PT{0Z?)*GUk1bqDK>^Qmcr>n|YM1Y<_f9-o zc?MFV?7%;+>Hq%PfzanG*3_t9dp-ZnX4x^2m}waqW5~7t-9tGbJWxiubM2&Q@Hi-% z1Z!&}rjmuxwM%Dlah)SP7u(q?lQa!Ihd5<)?bt`ls=a`JOHV^EeeSk%?WElQj+*OB zyF4zSgcvS;CI^Bk)iV@mu!?^zV-Riu-+`*G1_(H&4PlOr^I#j(MxE2J_Y#DdQs`8X z9Akq6dH8gHkNht&kaan1&tL&H3(1L{FZ=Bu%lYTLjnE5zHe56d0IL5Rfd!)C#jAtK z{{LNIK|PyLbS>@GazU+)sDIeVnS|*B$Vf)cA2T6tsZLJ#{E_cqi-=iAI`BiS@t|09 zvKDlBD7HIyv-9#cE~33B(qSuvf(~Ouy`U9-r_~iHflOm(H+FgdY?c-&$$}k2ev2lP zO~`H9^$l=e9TZW7%?F_^EnUKWDLFP)2Gvgu4Wqgxp$_f5Sps+kwqNM%?uGg*4w#B( z2y9B%5G`c{tV$d4X3*%Nw;MIw!2vYkmuSl{=v(K6-O-?J0qvEQS&Tvj-*IWezXvno z06k&T&(^lHR*CmmTz(5vQ*A`@J)75%ipXCZEcvK|G|;kXI5Xr$1P69VlFbbs5~dz> zUNAx?!{z2l+8%j0UB2xF#GebcDBIpzE-&r5DB$~aGm??oAG);%*BGKjzi?}+PS(eH zwb4Y&t+0_$jX;x%Ec6Hbh8;@GPV<6~~6UTYmp>^xV9Wd^G z{rWX|aTT2K-AB&dexzn#V11`6w`L4lRYKh#J+ymYJ`Y4Z;JQuWm%7q9=C-1>w|~xb zQa0w;zB(LP`PSw-w~)|MNmC}+VA0-cJcpkmOng%Ac|q^eKA*u`-lM8=>1oXHXl7k~ zrA|l-vxE@I8Ei}8D)P||gkW183AgQlzERI!#?06b*WHV8TQaNjY%y zhV6|}sUq02K@eIljGXu2^OT^GH`Ymu>(o!#66+K0DE$UV>|r(kMe6`eU|q^p|(b*-I2jJkF2fy3~Z0RObJp-BcWG@1QOxQ?F|>oO(>S86&2~h?lzLs!ONzuj{6waq$kljPRX*#Ti_82~1Lwg&`>18vEnsjJJm(=G;SYZ2|Y2xJGE^U=;6 z9zDiw)nB$6(f+Sd-dnDI{{CrbZvfB?U|0j)rzHO2322`{Q|RgrPQ-5fokRW8{N>|j z*_Qkgwn{|}eXvE%=D3Qtx}#$rYt5jYIrIQaP9Mkg#Vi@^cp<#EX@K`7*Ktx?z6Gcg zqFrOqcY-e6qlX_e7ikk4(1(CY6$2sAf?S#n<=8X=HJA20^aAGMUB zy|;c$+O#QxF$rS5rk2()gd)dVXZKdlIpDMzdS6gOJ8Xy*KQUpsv&BL?l)!g-kzuDa*x@7*Ii6tK4o%pC)B&75 z;BAtJkTEDB z8xYHK0pSp~py|Gigho6D>P2Xy7lOWhKpG9&C=9iJLX!w2Uq}E2UbfH)jr4rjHWKx7 zOjM#yA?Ub8TdV$6jvRLKLfca5NWn4R0Uo#vK0MS%3ww|sBb1?9Ajs;F!zg?I+L{4| zGvVnL73LxfV~e)Mgne97VLV`K8h1A!N#s z43VVFGE@=~k$G0Fq)aVKg~*gKv{Et?LQ#f{5yHwWWXSAXkG0pk*V^yiXP@_T-gEvq zpYzY&OMdtc&-2{(eO=dm-R;s(Nmq!3)7Mlq)LV>wE=JmDFO>y@(st*b0H{S9(M^<2 zoUT!7*P(AF89qY$cz9IM1;sSnhNldx*+g_96KPUtxgS0O{uaVAsJh374 zBE{%_NXP|vQ!@hsEJy1PS_dRp(2$b<4A%2p$WL1`O#Mz78Cerg@p3qixz+V39|Rm{ z&L-{NvX_@vI6eQHiV3a_IbS&Ua7*aV$Pir}sNI~RII6NrH4Uz zK170w2;bw#aaggQ%aH^4fB0~k3zkt1{%C_{G4`B|KgaAN20Y<2ZRfbgV3iG{t_sU^L5rB~1oU`V$ z2l56*_z<1a)U+?2JBvSX_7$bM&jB_#<*+_J2LqGRq|{Vl;`G)7jq0Hdr2=U2xk%i} zL$M`oA%yBdqEZeosLRYg=N8cBCXlLa89C<&TAa0LuJP&0WfDVQ7!>U{0rQ;RL%ht1 z)j5{Mxx?@nBC8eu-Q{WwZJ7f*F?jIdj)UWtT1B9*uRw?Ayj2fU?DDuEX$x4J&7VFg zau#`w(E}`$e6(GskKE#LeKYfg-|LbpoEMy7vtclTR%THk7Xhdx491*;{fRkI<|0f? zSegx;BF`~bQzAn{`(4v>b6b7={e@RHT3gZ56hD`^`0Vwkr?x~NKYrZM3Tb+tATW+n zfrw&zNHo*?q~Xie1ErrR-hqJ?nwAbNjop%A285!Be!Bn&s~$~AAe%b~!;PUPM_5;~ zprAa23NrvMAIv1rJOxY9WFp+H2g;IQtP97!UyRk-R!6Cj9E) z*wbv+*x0BjCx?u4ECwfw32f4R&KYD&GyAxCbD)YCM<-s+G+3*H!0<0_-MW?8Yrd`1 ze|UIU-}q@dRc=z!oAop_LU?z*7H()$3ji*a7if1eDGfgYtlX0L?xlZsD5MxZFu@r; zmEH&0oi(~rCu2RRSvPBlecpoRJcmU#_~&9}qPl@_PC0!VavUt`){gzb4Q$6qcHgX(J!fu=DuR98RCpZlg1tNTWiv0=|(Wcf2S1E~tYI2}pBm;~Xvlfr5 zsMz<{?zs2h!4TXG#QH=awVQ(PZq6Jd>>zNEn8TWg7>*E^Dxwd8n^ZlFNn+surxHw@ z;VK0-5Vv(WT@kfr&AhaQN#bIjFT=kR|u--4b@XV639X#%(trSXaynM76Mhff$3oZ)yDFriK(+zTFVBM$i?NBZ`# zl9Cc(+iv+4?{^$^+-MQnHPO#Xabk4FRWdjIIj%OWg*yQ=<-^}bvoEq9mq0=X8^H#AjCHwq#9iBB9l!hV-Yls zx1OH}7gi>?=ZgCh{3{yah_YK5rqW5I&48%Ja1q2}o%mcTe;{EM@h@w6=IIF-VUcVA zyqptgFHy@$!8n&x$h|CtDtrVd0b>mx_;rT z)V#b^#jnKd4~F-|+%W6YRX#(;v1^pa>k%_jQM04%|tPCw^07$Pj z%6?+>j(jM&KIaLE6a~|*mc-$Juv@S~OM;mPAawx;Hp(O0m^m95&*hyU290FS34*Yy zVXl20E|p6eISAn=;k9dl-cfCIi^exSHRe}NKXMIi5%4csyuPTHap5+G9}v5|cE>g= zQf7bpI^+%$U??d}iJHR5-`j9^s6s0 zJxdH^6OSeP2Ihz%=a|#IuCD!D1gHH;j50~Vl7Lcc)8_THnYzy}1|Z*qyJ!mViO30e zU@_@MORmWAEMTy*Lxg95hNJ_?y)71vkGg()yT{Ta07|=@BgPd`a4}QXF|fT!Qd)Qz zS|Y-e^9URVT};#3TgYOQuo8&BS9(rwIJMODti>pt6&s?Tu3p2MU=u-LPvX-^J9Rijm01aZJN_q~+uz{!f-|o=4(NcN=L_{w!Uz4r^m`CKjAUnyN41~=l zIK;J2lK~~^d0#P82*Y`*T{tKU?3o)WgFzIJ`*69;dRPPkIO6$TB_t^j&+#d{PP`X& zSeUV8N3Qo|agF!!BWbI>=V&0NT2)8-;bD_YoZ(43GNu|dCY_42`-1|G;1W0U)WeE= zSa_0u)oQP{&yMi&0!oB;GHkkH`2H9xkq;r89fnp zu&knhnqs%|)4-7x{vu6r`CE&87SRgc(O*ZII_gcWij+?isVT0cChsI1#3oVbH2W1? z-$Ps~0I4Aq0g7GaY-nf^DHFRCPOwJ!VaVZU(4c=WB7)bi?DUW{QZOAPo)6r7d@eBi zknjk#&wWM3V=2eeCX^bCaZOVdZ04_iLi0J2h_ww&KfZge%ZrCilU*ko9iu)Zw>mR@ zX??A?vU|)=)JxUt9o(tqR(ti$fgGSJKmV*BMpQOdNEgo_^AhrizzW`na}N#ir0osP zhy=n%CwvT*+x!;17v%OT!NjQwmBaVoAaVG+5gB@)hbOiJ@sNr9zV5S3l7mc4(=2B% z{>Eb=zrg05xETdWNdq$p^Do5l9GrZ$N6|pA;8&KHLks3X4Nj~(b2LbA4P5J0MDT( zNlC|oCyw{P_)Q1?Gej_fvT!lKf)lzY%UwADPWNFfOt2mI7c-gpFrN)9u`ozDjTGY7 zAtz$hYl!hfgB=BV#E}RluPVVr$%X`BmD`HS)E2H$1H@Dd zLUA%Qt9d@fg&!qQ5)88Lg@q*(|JP+S3Xu9%3bua4)dwzZJujDGa*=|di@NGKX;(r{ z$=@nN$((qgFN!uk3--+F{h+E3zy1qx6`b01Tyh%pEPnTZcFV2-aGdq1r3lVk0HZ20 z+#(YWtjZsNb7!6_r6vk$@V8T}*T`@w;`)R68AP#y)|x1!y9vb26fg7iWP31gV{+0z@-R3y(GuX$hZ8%-B#Lk;msc+21vt z2fO1YoS(@^FDtRwNPiUOWf&P5rNx~<)FP%hBol?m0})CUERv9iG(~+V$sy)8z4@!7 z397RLH1UdhVb2#&8#;b8CH~B?YCfDbO)``~$**_039O1~Fg8-6?lU$d$%I>2Cyvkp zqwmD92iMww{(UT|CrDQbh=2$agYgw)m3z%r8Efk5!mi4Rgo#`fLI80|45Hz1gArEl z$jdfZx7xw(Xn-shl2H+P4kiUXfn{b23U8#;!x5cn1NHo#<1{4P!IO~5Jq`1xRFGP0 zfX?CWo83TXc9<%O4sVlhD+CO5v*X5Fhdx<1!Nbs!WG%=>lj@fM`=>|ImWqvfLa~B; z;QnJfYb*O+K>|BNZ&D?&f#3_o_k=X9;dz;(gt^#Jxr+uYG-87Y=H!rkj}^s?jxpzY z(*}uA>_WO#!k>4rON3gAg7MHs=4`+JGqDAOK6;ljtnX3%rw#P%hVmOZOt5|N>5#ypDruIQmdnG-A)9T24Uv-4E`j0bs%tJl#vk zEs>Y$T)LFpfdO)Mo>`FIBg50e(McSuJ;uwJf*<$y_v`n~6UQ}FC3Em#-zCmuEQSsW zh0guncBjZP@TJf?zwu}J@Nqepk!9fWmA!O_$GlgJs*#f}r(!LVK{zBtFLe$ zD_%E+h9Kg0VqGB}1pvVk!HiYtqCYtxrW$oDfjyA%R=+T*ZRZQX&ZASO zc&Z2(e?J21X}Hai1x_5xNCylQkp#GbW`SUJ#Gj3@S&pgjk0i017&Q{NO;|-83nn=g z@vlL#Zs(Dsd>WF|7neUCB%}}Wk`J(4c_X(jSbQLKai7wvnK}?kytM^3l!_tqhA9%A30|6!OMB~5hnd&?u5{_X zTdumI9fwakBs-XWd!X-hagkQOA@^0W>eW{_aB39&$PTy2Lb3-CF-Xe_vajzB4=54W z^qU2e9Iin$_iB_(ADD1pqP*%M{~}J9sF&Wy6GagUd>5LVPchIbI+E88Gy3CD6P^o|3_ZMzIxLKQR25UGIkYeVQ zP9v;~dO(3I#$#AOMEFOvJH(wF=AxubpTb@+Ad}9pkEY-nYdF?|TA`jR=x{8ggh}`$ z@#}wqkU(ntAep6XVq>`7Yeam3&@gd1=VnVcbU^*3LVzavx~UqL(pGxgox4p%x}bH@7mPgoqt0Tr@ZvbFob| zhCVaE))>aP?=qV;AXT>fDHh<>$f}1JAJSX{So{N^MYJdpC>ktpRu3E6Jdeyf2%;tK zB|)v=5~>fI{aUED^XKL$tm`wsUPnvMZSh*~MEm};=>?DAFqw(^Q~3ijNpPkv#fz|l zB;L!-4^&J|;|Y3!8i;r=BEST5&nxr_OyivpKUy4Xk5f{3U}z~%Rw!ZzF^nUEa0y`g znAaJAa}=c@!2yu!Pc~?r9DoND&w&G4$Ps}tcvdd~-&Q$xtQMy-w=z!b(;q)_8ByGC z=qshs93i*vrP^rJ z9NHPd3TcN1M4m%l*y2bZ?!&X{F*XnZsz_|a88Agga3Z5Q4fA#~-vPK?Han4oBhG5! z&5^!%GLeCRT7cFkQGMs~rHa=h7h9npexGjEh~?f)F{696E3WC(klfEyPjSDu+NQxA zc+}Gy1S1F<*hk@4F1{EaeB$$S$OUFbzlrfi9jPW!udAqWfB*so{lE-J;hFpFRnG58 zo)+_NQHy#$d2?^?`~CsXw-F0;pV*OVfeomQ{7 z|1IWshs8h2i;$4P5$i7TOGh|f1TivpgrDlw@=9X_QQ|=`;tC^y7bX=O$#%koH=DLT zp#HVUK&Q|ZYl$k60DJBYXDDO*&`qvCg7F^ONORHB&;ZlS01hdA(&yS%fwUFyeDGt` z+r%@ru%CxA6G>!pl+PZ2l34eH=s_O{EDOj{J4v880y3gZ>Kn0MH>^3jXu|ygW$bl- z0r?IW#gp$-GdomnX1_Pr?YFaLVwA|*_10p_u!u4vf+T|yV_6vzg^s6;gJzPH2-smz zHs!7e9=-8iyw#gxZsa&p&=W@4RQabPmSV$!=Ox>p@eley3ZE}jwiLOCq}BNBkEC_s zUuRfQ_)in?*a2pY1SaVCx*NCQ{u9NJ?)ZJ)I*fCOCxbrGx9|Z}4oMAoc;9)* zcK&527}TH)&$Z%m9?M!w*`5s#yZ_X2P=gdL^Wu9}%7h^EKh%xd0G7}f7^^{1$SBePu#m0?7Fj)mG1w<$#O!G{^=&+ zhjIl!KX9Ph9C%1V{=jsCs)_#Wp3{n4rc~o{m$qq(+4DTq2mnR9>U8*;{si~<2HvjVS`A|@f!xy9MCX-R9LJoxz5=N0y zgY6B>4n=@1Gk_lpM4>H?1_@SdCVxamdHL5j=%ksE?KvA@#s|3HTxSfvjg5)f(Njr9 z*}G0dYGL6@?bSQwrrSX`b)ODT8`#-(9j2&9tuYr1FyPx66{P#?cHHjRp{OkHO_aefJ~kWboQPK& zDEX!AkhT8xss;3>XQ;h%FVOE$EgzVkvA*;+osSaz+ox+jZ-1>2r8FF%V%=TJ(Fy<2 zHRhok+}t%*V|*tI@P#|m)6?0I5~$71puS^ALFR{qy9^9I8eilzJfgzFTQP+?2zA14kRMDSN&FC70&;qvM=f3k2)k!`_~RDnC3M^udN^y6 z^kbhq`L!wbDgSxrnMemFIpGU(-U)>Pyws|TN^dzjHFHo`D7m@0@fu_cpkIyyxiL{s zMtz$Do>zu2S*g3jEErsI_(KaYIXTJgv545Y+e@u|_ETSX|EHX~39seRmWldACFfzq zS+Cny>7$dcq@~@wv~5e|E!L(FA2wq?0*lnw(b1s$8j}K}U%v)oY{hRO#!sF<7n1P$ zxXxqt+ZS7~O}iU`ul=$h?qGIyyQ}9+Y4&u@(r~2R`}a4KO*rySEg8oesCGpc>TNg{ z2@XnOa(cQPvR=Zr*R{8^gPM^Z$OW%ZankeKFWKl)I+zrED#zel+D!*0b$UFahh^Rk z$q5MwEf4}@pgj))85>|bJ6^+F;X~G+n2}a67o=mi0@B|`OH0e5eMT~O+EJmc)T(YK ztlZo!`#|pWZu`1WskU62q)iDnvV47Fg(ras(d%9a1CXSddYbcf^r5(9iFZuU> z&=hxyPA30A*~KlZ;g%|~j-qnRJ7HSaVIw8vF6G3?K>uFe`z~3hQzf$0=_!N33O_a6 zPfzzPS{m~0>T;?U5tjet*w(-oE8Z?wcK?3zyfj6&%ywOP>%qEJC0$*E5!f6Q$rNe4F!gh3T|AS5*=Jj0{zV!J*BWz+<2f(WGNt&`Eqa>t zH^?8~Q0`V?5d31Yq?I=GV6Kh2TVC5t(0nf#$)cvufCqVsnd0M9V?Q7m5p-9L%9hoCLzml%ev>Kk(mUdJ50dDeL# z{^$9sWf52Be)YM7{-sZ&M8{_>BB-s@L|HaR@3^O}{z|WM@XA=Y#W`aQnGKDK-aeZ8 zN2ttpR23ClrrcbVmr*q_xwj;sxs8hTw9Cg$Vl;Y7v_={ShWE^!W8_Wx!0BVbC3}>I zf7>dvE=@1X?~$DhR&-ekJ!Y(NBV8~1ThD}_`7ruO+atxL!O*Qrqe^eva_PHAkq!zC zKj0TOHdsc>=A4+xnvOTD8!M8G8x1IA4(#f8i{qDw`udJL+i_&%PIcn`IL0K+Rl-{P z>UT4XOtJ1&x8AZy+cEZV+xhHE!})>5JvW}6)0u77zTA?zgE~k@@ql{r!^ z3cDKr^!-oqId3}e2CWrhl0Pc^`g$16POqcVS0CK>O3@{st7J1zsqTGm(kH8Qp?*!= z6U$+?^7ju<_)+iXce<2*Ue1Di2pecn^#vvIQ`eL|4a*t*N}Y8SbyCc{P0z&{AwQo}5zHKvz`1 zbpFl_3&GCb$2a}Qt4>NdrasOrb}pCn{+9BQvMA~`Av!*~yfiuYnbev+Nuc4@Y+0$9d*!1m+NCRNBR zl*+t@f{WgGri3YjFW@bY#=M`h$F?-_H+RPaRWuWrXk&&g4#|$TJmd2G?&KwkF*B`V zU|@zj3M7GXhs~gL;sjTJw&{zYo~ebGVDgnXk##JB9jB7>1(edr^#Z{uBRK;Qoq&^G zbM=BwUqAW!np;~37R@~=s?3(yokp&A?ogCi;{K)1x|bS78^!DPD9E!(4LZ{7GR==W z?Q$+qU+ei9mrhigwsi_j0UTr?-FW5xr}AgcoJlpvIapdLwjU4NZ6Ry6VL08nzb~?G zvUAIDyVTY3H*L*E+vdxb=8H}iGA5AQYHZbK=G!Ad*O+>7RJPN;{zP5NOm&bbVth8r z+JmXSFuV51?_9K!_UiC4m(YGabT`MKYvYu&(euYY`8#RyNKGVb;cY`NKN1SnMgKTt zXC&gXXv=FIcVYjgxg6#4K6o>E3ThvPI{j=lbHZtB0SptRoXMm!aLaK#IGNHrNf!HP z)^svlxHHFUb05Dt<6wdLdQ+|ERC9GooB>5Menq~>k3(g=Jjt+~aZLO)HDqk$`Qq^A zKI2op)7s^d<@IhXDo-vl(R=nE<-7w1GS3zut*@`xX#T{(bl;Tw8P2xT!zGp>yNvqR zbNM^vw;db(Qe2Yj28bP#d1#z0VV8&zNKEI04@q93}dTv9P2 z{d>l_9}*Pbt950Ao$7%E!Jpr~i$?J9VKy%;3nq^(Qy-|3u9i!7oESCTTY#Hg{>Hz& z*?6{6=RByz@jkDmzHy+v6~L-9_-M{nvWz;5K8%$cXuRz0S>x8Rh5lw_Hj*38a`C>1 zEs&mGE!Uz_mFi(1P56E=#OQtXwZbl+ZQU4BddYpRJ}MAX4R^A#vUdLxwQ!-yluA_` zpVUsEJrk4O_#1mac$(Kx4fI^6;q&dBFX;3uEG#6O4E;07Nj04?{o&PrvA4CYtp}W< zdxZTB5QIh|qEH~=IT(S_CRN~y+PM=$y1LC{nHGX=|GFol_eEZIBIQLla>Sb_~ z@&*XwP)NfD^%0AMvP$w~g+r&C%*?@|HkbeJ^5E*127@05T-A=y?-uG{uzMbzl_iR) zn44khW{?lD*Eqn|lMVwEt>lZVCo$cF5wM>I#93U`aQ$@dobO(dg8%$2iex5MWG zcYCEHpP89CbgUH*QVfKG`(zwvWJYfCQCBtF`@jE@&(IkK!I8ZJ!|8VJJddfCyXILa zDa!Bpb}nEVwb8dxl8!i@TiJ5VCX-=G2J9o~_&&MSunoXL3IAga{9XYnt?)lY@ zz9KNv-E(eo{c@1{PjCI5wzTukG z?O{ZbTi4dMO-f3tuCtR9f8fM$d*1iQH|rDg^wj-b^JB*59zLyjawVVD$>VdnFWuJa zg1PP|F~ouCq1FuBMbyCKjFT9Fq~S${8` z6`?k@$MtqDjdVV;ZwH^yvqxrD*#O*)hLNO$V$Htl4;RYw&Ra9SLwy5Ss3m>x^9Jf{ZA{c9c zgC7H3>rDcK*u*z`ecR)8wYBz(r6kFZ$HH`m6x$DD=8}^vB_$-F7cqI4GcKhC0>2}fhxV)r!^+w|3o*<1`zxM8 z_j&a;V`2FLYS|9|9F2si*q52rmm|kE+7G>(Tzr|AoA15$i#85cu%4_1lJ4LITF}63 zU|Yt(qO(?DYPV!Z4(y7%70xRrWksk-%6uU)vwMCFUcmrAW~ zf_P(?FL811kUzQ)XP8l;`)vRGi4!LX0FjOLDU`ZCTG-#I(d=MQ4U>PFalw|snXKhE#QflM7tP5Ne9$jCpjsyg*YmB$-dMpL4a z>Y2@}OL5q!SJG}iMVc*xtHX{nID@1VzUV8SytjLGJrmikVIH_kfksbpHF3Jn>e}0z|EkAg7JUuszmAVlN#NNcj!~m1(>YADu zfm)Lz;1Fu^h=27b>^!3B*u*vwbcNs`prn?vFzE46SM_IZ{r(Z@;#bX*{3ljAmUh3*($UlAW_^gjXg|M{3oo(5vNnt!FKmHKW35 zYK`Yxo4aHE`0hZ=#3w!b_GCF8KzXX#x+I1_eTr;uayF4e39=_Yv<8czho^BSPG&A{ zrdPBm%)y1CQSPiGS=mro_|=^5SaO4bH^+F#_d-!?h&r6-1`^|niex+&o4usaUb~c9 z#Tv)>sKs=d#iHxTjeXj*J?tS5r@cc?<4kvY2ooB6M@Moc8GNP1z6a_6zs_IPetv)?rHhPCVe_W5Y@De0c~)z3nL zDUm-9)bEitKJmBDd&h>3C*UV`$(q3eaW1OGlrzHLhoT_ZbHYtl<+ykCo8ga*%dh$7 zKVJ}Dv-b|mm9?#_PxFXm-E$I2zIh;~YDuV}@@d0( zzP>#b6-H)8!jHE^pUcS2&ENQ==8N$&dPPffubab|XfFZn^Z$8C-Dus~lw$tn&t7iX zf*Tdjr20KGX$+nHV7>7b%MY~zEOSSx8oLd>#zf0)UMLtJ=7@~j;rfoT(|V)qDFPQR z#&}x@7IQ9)1{BZuwb*?7{X4_*>wl?h)z20Zs&nyHxq3%txQ%sVMaKOIV!X$WmTLd7Fr$+__RAKP0Z zKV+f3=)jkIh|@JuCS+`bX6$WNxu`+9r4Y-Qz_8V{3S#ZKtjp;X2L1>8J%3fnVH^yR ztW{q#WI^$!UnRV?IG2@j77h*yzot`Dn0Lq?rB8iAMN!n1U$2mXfAO^;BVL040})oS zc&CEUs+XZ?^$k`V!sciH7e8pP=TP)m`0+w%6FJ~#O{ zlRY{XKjYnm_(vKfE36U@doHmp+VLa4{g`H5SO&hBCSUYy6eeF#k`Cb`|MWkiRdmN` z<-L@9iV*~D+mS$!h-eD1{yAN7vv(0OnEwR>CnrfB8U@0i2Ch*|pI>V|ku2=w*Vk&zKw zB43{zhgxnYA7<|AzPzkQlHb>iJX$~RFFX+!XSBYSd*8(k2NcW#l2hq?k&ylc$W_bz3Qy2G5P9>W;@ARo zb)VU7&G5gU`Fogk$#3Z|Ig)`Vp=xRtpsDA1C21EXyd6zFlf`%OT_}B!)U+;uF{|<6 z=QWS)4W*-kkf+kqFD@?D0fe3O2;`6oUl5;4V&zDz@!;!jdG<>Ukmyr^AD(mx#va0Q zaY%}fH+{$c1vBf+FI{4>gWG_eVrLNsVs}FsA2w#`;OMB>$3L&YXIyxg6w{COU0q#+ zWfD2GN_&I_1YX`Vmp{j9D9!oD?}Rgs`Yr8LUHXE80$X&&h{1)I9I_Z$Sz$1N8a}A7 z2-*PMd}rI7s>hX~ib@4oyU`m(L`2G<3o}7}B#4)T{v@|Q5M3x$gP`rM0DU`)a#Tqe z9|asw8pht_UQY2|dAqF9Y7X}H@=8h-p?09ISUtRe`9A~ZfAzsy^>O0e@-JBu4F-tm z_doI~JmBie+B4hN)_qSWH-qm7N|Huv%c*hl&7}z$$)4A@OPaKcW)zGU`n(t&rRHC) z6W+F#Qdz}r&eK8X`Rh61!$BB)EZ{kIgO){W4W&VdD*VoOKVvpE2|9|p$|_-oE9L%8 zk0)er4h`NhxAZf=%$x5)FuzsDcFJmuGxB|63kY~`QY~`mP({hjhe9dqLtg*_t*-Vh zECdKz(pFel;^NJ-1D};1yJohO#3(O+^j+w)?TiO1&&aNCgnq@h_?aaul11{_ z7@BojU8_0Y{P0elW@FU(krE`Un%kYCsrE)SX`~v8HM)5Jh<&dv{ZLd%ILNSmQ$WDY z35pi5#Ax@cIlk`F=ZmlV`uLQzt)=;~k17S{kF3j=tMs=vGBDC{NNg2!b8KRu;)Z(C z`$A8f%aHsrsX9=}#}0m|c6-cq&e*t1tmHD=;0;NfF~PT6UFbGi^qnB~Oy1nBE*hhV z4%=@#b~H%h;6HZol;mjd#gY7ED6T1YFT^@_%;vBCsez7s?@(c6)|f%@BwNJQ))p#C z1CP|Y<0U}fz4hb!&MK2bFH@zaICxX^o?qcAtNX(I)QhaF3Md3cyF`o^2X#y!>3@bc zl58ZLRR%jKvh2PAeE+bb`dVPEoz0KgtEH+TbB`-1kir}}W+*~~P)U@7KEh&$(?bPBF(%mF z^z;Np;#FBo5tG8j%XxE{{X?% z5&v`M#@E*u9c5)WgwErg*+Q04hNk09P(HuK{5wiz@z%f}z|#C_n-_sO<#6qfA>EOc zh6sWzb|e)3caWnL%D>`K)42={>8-5jv0btZ4fS-^P}DuZn|goiAH35)>6fT$4R_?Ac;Eh^ z*5Uj_uWxVfv%c_$|M9`UY(@8uRdN(>&0L#?pAFQ`VmY4qZ=-~h#Qx#7#sU*kX(--K zII#ZnZ83Lk6s9nkmijoAO!yWTZON&&bLWF95YcA`b5_%8oc;R~phUl1Un{Wo z_ZO?lPsN%bYiUWiaaW|HYY$xsG`Hq(OV7!%;h-cPSS74_?Qev5v`zlmBV8?cu}M<7 zHm|n6{qp1T)TBI{M2Le3MK+l*NdIyz{_{KeGm%~${bYY^+JV?-&&FOp**|p0n^y$) zMA6LnXE(K7l2Cw`F1L)SIdce#>59=c?7k|nf`FG(>JehYh7B7c?AtX?N->am zwm=PcLm+#)dNUQ}&E0)$zo7F_^fgHR^X2|51okeo%4%nJA8NQFRsIB!SzLR=9$sDE)tU^r;rFIEo9Em~`%qJqz8DnZFr?4fu z0xrQUc1k%o-SzeL%MH+?8BzCrG`lA-8udf ztS!%2h^306FJ1pzpL6xriXQumUyaI|D3o<_%JN4vIbf2tHPH5dHDg;t+0OM894g3P`R#xyal3I z!QM|0Bq?@w!YCE5^Gj-BMRJ0p`XVm+VdrSWL?|Pn)6D7h5o1o-!^inknP=;m;e+=pyL)I}zihZx3!Y zKQ)dzbyKxTX0cv|wg~;CrEu1Y9P2^zX!9*}zt436#@_%$dZM&wd|T0}-XF!cnawSF zqF#~_tg}Hd1CO3~CUF4Z|IyvPEij1`F2L*Wyn-E&$qM=%u5QiGA z$#L7mGW$fRBuq%bz$Rh8cL|S%%X28pbH8=o$jj8@tExYvA063c;R-88lr1E3>pjri z;(tqZZOzHbBi?*5=d;#E37RO%Z+*Ge3&>lgK3DvyccYI69SAq)(ih1@f3t8LEpbr| zj7?LooyA1_5194NDvPhL-F4y2*c=*G2WKjE3OARQmX5>ASM*|a1Pha){i!BXL4_4O zZjAp-Kl)`Q)gr{`R|0)X<;BAhtmXHGCKevE4a@7wyL#oC{uj0A^Sn=0IZ$VNd1dp{RL1x==lO!C=R$ zk3vnwZzr$$C`+Jq!364kdV1)fpO&M8nN?=~V@4ZSZ<=}o0+w=Jt0`kEU6t`T{!(lf5e>?R4Ibi)C&fflg#Qgsk@c*B{zkdQ_%TzaFRf68k T{S*QLEk#*DO+H=j?DhWv+7o#k diff --git a/examples_skyline/accuracy_vs_di-skyline_formula.png b/examples_skyline/accuracy_vs_di-skyline_formula.png deleted file mode 100644 index 271ba877f30856507b9b09d19f5444fe4898ddd7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 42840 zcmeFZ2UJyCmNk4xf+C0kRH6YzK)?VZNK^p#MsWxN|=xD-(Kgn zw7Jh0Jty3OVMj38tCDJtVH4d>T7kH&?OB54fva>a-@PVdPI2vwxsw>EGyEkXcEH^x zQMU43wo!*6Z?sotb@tb^vy3t_mmZ4OcFDzOpW7j_PI6;- zC?hB3hlg9FB#iKI#uKA~hqn@Vv_;pCNRYw9$^RZ7YdM?Tnv|-OD;MzIW6P=Awq7)j zkm0&&)9pCm8y6vHJA8d}h|l8{`-9#Ry{M?DIKu79{t9pU!-qN9*w~7fI?Qs`w1Z!V zg{9!}Sa2H_X>H|zBf%Qph^`L4=5?72wJ4DF-ouvGN?0KpY$rus@iy%(am!f~#*~78 zlF~%#pWl5YHa0emm#pNi9wi0#lU(p~i{q{G z)5Xj02QqSdoaIGbmh429dbhVU1MD_dtbLgT6(&1#H4E)0`S1T=mQ~T!4Vx;OSE!m- zRaG_0&|>y9`S$YYb6?-qNXbWoLqoCAGXAFRnMQtu(WY3R9@qIB@oR1A2I=3vUCXiP z*48VuZw=zn$-Y;|xH#EKFzG4xqQlw@N5%=-oF(}d1E3kXz@9&?M zKg`%ZxnwgUI`{2(!E7bdV!g*!R?YP4uPclx8p#Z&wIXL`tX9|7Oq*h4xlW(96Dcui zJ}D(FT~lALNJdH9LNM^q8(U|+VE$HYb#Q;(yYn(VJw0nnotA;&;i+TI@yTxMOS@=k z+u**AtWcP&&J8j08^2_AosrBjBPdu~TZahQ#%(Rci;sQkw0NDHo4bRCF=l&n+QYfe zld>i52Ginb;}w_XDO^$9*E@}`j>|CDmWtgSx)Zn=z5`zaCafk)NYl%4Temm5f1j8b z|Gp{Qp(jx_Ucsa;MBurPPxEtn?l-SrN7FD}NPsQn;^b^hP>wzTThh{^W_0(i@!Vjw zx_faTwx*2kjGWx_t(7Wq)1jI`VW+v6%*;%?UtgYax$Rh8T`ia{ zS6bV0A4;1sDgR5kljEkkZdzDagz%dL6yS()@H@n5 zkL^NMw*_S>DXHo^8K>CUuYUggxue)6SK+-#Vz9omY^runKzw|B;_IG!Kf)3}ems+3 zSeWcIKfE?+>5-hKS6Fi=!+56ebHQSp4!ZlYO--@#xaDpKvoBBg+SuB@E-fu}UKl;P zXV0D+A^ftIrJJAF$ND}$lDl!E6%2sOZIV+p?$p_{$^maeLzDN2tw^ZGU4O+cVA-Q9 zG<=`v`b3tEZL%{FXKUN+Nk(b6G;tF* z-cmhg_4L=TJ2&2)lM>%tczwa5>l7bfx@qYM^;F@s7}|mdou67q8>3mOcsbbFNB4NF zJW4CwauzUayMF%O_m|$@-X$AzwOpma0Rj6?oH#M&bx{8$98oxitj?oHQ+4y3&c@r0 z{0O<^kk-}R-I{46FWdAp!>~2i(!j~t*>vJ(#@xa}3*4aAl$+<_qN`1)vk%jwb5eU$ zf-E6`o?GLheee3}>R6d4S38TE!$`W zyCOe5=tL)-&2`4}-1+k?>FWA40sMxQyW#H0J-KZRMjLX``h$pw2y7^vI@8IH9ASs4 zu(mY4WHGn3Qx`94sod1s26xaJEwcyiwMlc_jleYGVD)Q-=~(q-4O2M0`L_4#-}6{n zS~|J75LZVcG?O(_vz-@Bd2VK24Hw2WS$eGNun60IJG=eu)p6}o_u}`?REsOBQ63h0?p%b+SP5G1mYP7+D5DIQ)-7}f}^|hv^rs>`J`*xE#gqkzE zDR%4_hcjF{CMxceD$M7s&-;zWLd^WDfq1bAqJnXvzz(0BMXJSn+kixH$Eyv zh^3yt|KpUfu=bPN7xj~i7}>|3Fj*-1Fz{r9<)W>GT`8PcpKMJIe)#B74xIS-3PJWn zom>mK>(^W0J0h!CU9JyQ`D?nnyL%DGTM}Qx)lF&fo;szZp`np)8+`ia2iSn2p&_BP zA7^a_uemKW$_V4e-@p-wf(sEEUY=-IMZWAoZuhXAfbV$GNbSzuyVcCiQ+|XA4Xmgt zDfRt4XF#v1hYJi5v`ov+=C9Inb}rc1++?XO-OAVW4h|+5ZCl6&ZvSCVc_|wkt~XCJ zPSEgggUpS_WQ7pn)dde^d0-X2v=QSnjD&%N~;SMBv$MZw-eb~@7>l% zR{$odrmday;>GTGo}A`*MM9X6ZA+2UJQthvIqyqcxrO-`+@X0MMZU8QAZfU7xFemQ%yALA~ z9{RamHa^XKdt06A1K$al1qH!oLnx^}vwLV{ga}?;6TUr=(1=c5*hS@xjEtJPI(2RB z5C&ddtufg*_aIzCI8aBi2O|2ySbRj#rS!}{J!EievL>JCLctE8laQN;0Mv&IiNkH{OuJ?wbH zsft!+`t#%Mg7Kte<^i2g=E_=HZ;>0^-dyJ9I660w7=ZT&oBU%sJaO*y6|q#i^J#nEqg>D@?K9&Ol)7)(c!bM#)G!5KxkbNYio&vFHO~A(Xd>Zb=Ol`gR@IJHRv&3uMU2z> z7?m1b(rCuU#>U}h<@?9R#GE>HDxu8`!xvN@Wrk!LzmvHfCE@=qQ|$i{b-@2F@_%Oi z_K)vd^ylBX{akRVF*x&GG&DGz5Z0%X+if4ZwOa2{Q1HT3R#7n+a(+2EIbFS7N+-yT z`5gtj?OxNeiA{{3GwtSxaQy%kTMdDrsgb;xIiLHWGmcf%`CDzR{1tCnaIxBx2X~ek zU9jv)Ieh+}Zlmz8U%%`|!X4U_r0BD>Q#%T9$%aCwPF-_yax!g8xtUseK?$m4rj3Oc+Pp}@S9BaQH(W)rE8RNdWW&(9jjxrZsxJhaK zaKoX!k&U+|x(hYk))r9tggh=pGg%cIn@mztQs#L`*7RLxrM$eolOeki_Sh;|D3m^Y>n`k3qujywdMYf8YR0XN;F7+J#+~ zQkq>HXTKh7%`lYd#u4;_i?$&In}M?)Z;Guo5>JM#4~n_#`%hkf2{|_G+5-65v4*HC z>gyj#TBr{$XF+sS>!{t@S}wg5v_0$ZkpMZGX>**fv>)?W_Q7$;q*27_`1BwdesvVG zHUbK58}s#MP$Wb6)VO-}YHUuph?5Eg9_7wY59s&p`v6sQ?W`8m#g+bS>9Vg+R##QY zz)m)T|2`oba^XQpQ**O`^MWx-1k*h>lU4pwIG3{PT6FK{P(ZmY3WZP%Y(jaP|Dpcy znL3OS4@vQ1NlD2{sE~a>d^lzZ<^U%q$m-rF(p@k|sH z6qV2RbG(2|f^oWZyF^(<<&G=tp)@Shm(6|U^O@Zhhaf&S=9p;?K%A|F{O`qkQ5VMU z0^E7AwbA3zd3itI!g9|7I#F?E_i=9S+L`(J^BO5yw+|dV$OQT8xs8pD7f=u#7Z>lH zSz5YKvbm%q!vnYO{J#DBX)4aVpk#F{J0$9y{VFifXmD`w5Y!jvSUonZ;p+xoD~g?k zFn>>EZ8Tc?_U$NTB_*Z)`?HNQY=n*x1;-;49r| zg6no1PmJFkTBi})VNo!4a=_nXBib738^h_I;!1T*JtOnZPjwlr4h{}zm!*}JU(G^2 zd{`s7^%NJEO;NhP*xI*@G$=t22fP5Y;!5qTsHkv+{K0pAq@KxNd@Ij-bCF;$CPVYK zoWYo%%D{a!7OM2cGO|4hP~0&>Zua5B2d*%WUbkh=GiQ`|bUwA99By>fSV~5wwyy5R zap@OIrS6*xpDEcc4)=m_k9XuGySlmIIv?aFni0|phJthkA#vQf|D-W2Iv{4b*TVqz zqB+~789fhWUxJK3Ye$}SbWu@}9Uv;)s0v(wDj6LBbf|V$C|$n%kby@#29-Zxql#jy zF#!Pq%mR7&`C}-3gIZ1dlev>rgr>g!d-(D2O88%3XsC-6kwR&icOffjA(I;Ai5%}8 zF9S~W2F&rj>zpj$Ly9IQCU6j63)@d5c(XcRd;k9Z-0W;4Y>H)49HjWf`Fe2z9QIR+ zii(gpj=y)AYJ@7B*>-JdG8rni-!<@j#C8hGqF0B41v00YEQ^#I8ymsJy^FgMI{dSM zriGrApYbtNKQ+C*22hZkWngC3?cXRXlc-vT-`CE%=L-hP>|sCgGYS%Op<%nxhR?HZ zkY4lM{83wL+hRRS+(Ajnef;>900dsh;PrD%1E=x^Xu*|^Rk3-fNlJRcf`|$If~Bjz z?i;f*Rn8r`mVB^i*(SIGzQ2@nX5fz0wg;m1;1_uTVo%JwX;$`t>kTsxnZT)-E zWB>}dz@WkOQWtV~Q6awumK3&Rx>;~g&PXSK?-UPDD|n?AC>?7{6x7>j@)$`NY0B{= zWXNm@`Yt!Ve*LOlXrF37-J=&f(&ID|j*NI~G2J6ECgv*qw48!M8+^lc!d@1ZR6sm5 zz!5dA&X43kF%2oJaZ7@7K+JGms9E{roz8==WEKG7F#)(FpypG_cYu@t1&MTD>8F%c zk(PdfzE^!UM_yiDT}vwgs^`N`qQKFEP6IRMrmY}JSFaa-1-XJ8nAcmKVzqhlTv zo440Q$r9M$+S{}469K?z`22|Sge+_xGC6h=SOKa^^__<3!!J1~0NmD*V-_5fS!@jz z0v8WYYIYE88VZ++BD0ToQByC%xiA}Rid6_M>x7aL;R@hLh=H2=uiy+?(V?RVk2flxw2C;CwO~(=^0>#D2!fxKg39FY5y>6ON8=@czVA_yfkpTWl z)pb6mOZF+OE)!<~$GZvGZCegOKN*T%`2bf}S197yOa>}_$A4x-Bj5~-?`i-lY-Np# z(-b4b0^44{eVYJ4CPJ6M>9)cRP1Ui;|2n%encJHNwMsHvn0CHREZR7@^YQ`j9e?c< zUGB1lYPk6!1@-lGCMyA-S)$u7U=$<^z*=#rf(JM+jaLfLSz(*Or#W^GTXuZ{ z-k_ljGL2C{u?mJm%phrNV5f_$%`mEE@Otuu3s5|`JEeoO03Xn>iRt$D_cNZmdo9N= zmIBj^CJ4}qZ_GSGjt{JY2vuZCBhDa?vEi%7C%7fi>16!;%;nav0RF;(iL<#ayn>tx zQlQu`U#>z{QG5127%roL>6?4s-$ubX)16nEL+S-7I3B4lU((^m znxuHNvl{?v%5mRxju+zOOtD*rW(YPyz5h! z_+#}oS;$9yR@T=qb&HNc!5-YTnE>9wFZCSwT4xCLQ2RPm7u>mX=fM8`$7Lap7QodR z@QYMeSHtE$O|F&TAwpf9ET6DRB5*?0v=|(r??PdCO>~_9n_@3h8u`j8&b6>toOu%%7z?jkncd}=4O}c( zdtBmTG>U8e{d3?Q5!6_`wTD9U$DIsB1uVJ~nUr)!aW}|{)7GdO@Uk1;P5d*q(yCRZ zo^hTLwu^L;o;xb# z&d$su1Kb3HV$his zbIpUucOq~z-Wu{OlfE)fs0(~hMA=0Uzx$1@D%J zta`EJh6W|X>xG;%dJfc5pQFLSo?0Lw()5d+5s8J+(vp>5)X2{KMdqF!JAQl-@U=^I z+smcfsVIUTJ9Zi1F#z1AjEH1PVw#HukkU2~M{&HL8H^)?eom@fe*q`&Q&^D3r_-_* zIs;SJfgn?d;G}9890Q@%wDQH_kh6wQwxFgNEm@yx2EWQK8?O-F1dJMFK}UTLpO4el zxwSOe8DH$OTvJhT1*{!m?vU^$j=XnKfNg-%E_DIQGlY4eEddDsv>Sk%yLRqujFElK zrMC>#0>VTA4wrv&dvuMMPJoIDx3%F^ygpS32xYpOu5Jn-WuWp6JV@C)=8P z?l6@#pWxe$(goLZ!97+x*8(wcM2IN1jJ^4`@o@^_T))dKNc;L7C;To=y$lTv1v82T z7*&&q1D=!@s!bphk~Gux<>cj?gALphp?;uYxWCzMOjU_>7N-_z{#Mk8JwI%QP%{j{xzrw5o=l7+^i}lCdMf0yl|qvSg9H4AD2|U zLJfU={lJnoAaq+Hb%QcjmTO(4ySx28rbb!pr%yPr7i@aY!{tD-h(#OO9+Ajc`Cu+_ z@lv^YQ4ta5UQK&46(~gjt(R*$5qx27Q5d145Q1vY?Djo$_A)G>xx{V#O+dg0_+W?= zx90cmZaqZgt;`PO0Fk*lELfV@OrQ)o=d9NqMQCLs2O}bN0l$3%aQJeMiz51B#C!s>oAmLc5_o>75t>ib z*VWdl0yGY31rQ#`+!k9k1K8QvB5eGZscC3n&*B$gCv{ZX>ka{?;nBXlt50P?*jZ2Y zDjtl%wa9L^M}RLqb2enI;mRJM>zXq*z!uGbb>6jS4*^2wBA8-`{so2$M;7Xz3Rq`;L#c+ql#H-$U`u#*-*w_D;B z*))U6`!q*!0*~|wR=`HLxPblJ;C5n4;FD-5s~}_g$4ZL^{9Pj>P6#A$wor=qn{0tY zAh4zl>2LvP8&1fgC=U^ZKHauD;DmO@9Vst=4h{|?6o?6Pu9)xsV%kL(&3=sWCFGPQ zplk?;$;-Ql@RNRTHhXJzbwQV<2{|@J1%)6e!FJFxYeB?dm@{U_qzwg}tPzbfNET30j;Peox|0=W*k$1h8^M_&+R>L;NCjM{ z%?p{US5JX!wk(>hG%~mZ^)$H7-6j1D|EA?0S)Z*_#x_Fi5HsR0We zX6cFGZIC@esuma$k_2^?F~D2TU%Y4mI}9IY6SBFA(rie~(2mC!J|M9=Td0?*eL^rd zGu_^qr8v#nkeXjf9vHOe?T4Zt{vwrC(Y6n{bD{EDfO#=-*pCd+;E;2+0@a9o89Jkg zdPQ~xxsxXN2~;C}t*jhp+g|Yo>*M9oFVX_iJd4$7@Rc0FZEjFjbm^xdqP5^)M(bD> zs}tK&Rl(L5fomEE<}V%cb$;`oXQZxP)tv(Y$p{g+**OU903LzW#X~`K>46CVW}${9 zb@!T4)XOleLWdpXCj#rYNfKBK^`1X_wX4QCys4=sC|0s8CLX;TFm4e7ucf4av=LN|Speok_E|~v}1ei%~#QVvUaeyW4 zHsV2clk$ji4`QtWa_z`4^k%Pie`k1TeR(<^FkQ8G@7@6sr+nwm2XxiooZO%eUjqR1 z&*&U1lAE2aR^`t|061t7qCD57OFH0?0i^*Naq7gpo2%;qfFU5KlMVP0ekuN;Pp%`0 zm2c!OXKks0)~o4P+%6;UyD{VFB(QA5IBz-?yV5@^uqK>!udXRR!aeEfQ>w|Xd{toO zlI+~U#V!1}G(DP8#j1Ms>SIWGSb|j{#J`&=UeH?&ISbAroRr>Y zh}L>~JJK59ctk^<6SS^nVUY&bA6AJDU^heZ!h{{g>JCnKN7rzc?Fl)wcS}oca3lom zC$vzx;b*wU7%=DF19)h&$JRXCL3Oq5Mq<)Pc_90^PmQx){=`zI;rr(1+D{@?RfOu< z`k}?{f~)Cf1s8WPCP;ni`E+vcTRw(^2jih&MkfexWulQ{7)e7}yE)Fj{Fd8gcrVZ? zrd?jdccc=A;Kk8L!4)GV+}kfEWH-pf@AKm}n2Z@$gf#b%FrN7g2?_{g_`{@soY@T} zpY?t+{+kO0ai02`Zp>sfZxQeW#lvDlnR$ zj=n1jkW5q_*SMV)61qvybI@baB*z5OA+AI;IP`P<3z>`%V)h}5 zsH><1AhlY4f;N_504CNzKG{>ObM)v@R6!y1srogC8Q8Ug?P&%dKs(tH_%+^@pT#Xl zfp@OzZl%DmaeZ*4q-10)wIn1YQ27Nnhe6Qpb|$7eL*j%K5(o&wy~C9F_JXu?2#G+z zQ7%FbX9Cg)M3jKHKO!hN5+1yB=T6AUUb1gOq7C?;y<`rJj=nx?gpR;W{%7^y3tU}a zCqlmKH-w7k-rio5A0U;=0mw$S3BW})K=%L)(1YAkVgBEN46Q&?ndYuYpAr&!SD(tb z($&WW=K#B*q^9-;>ZVIb^ai;KM4KikRKkYIDJV?9O$EdNk1mbMZ3r;jHE_*dpgp~T zu(N-RDFiIc)$$Qjb9+?410vPMP;GGSos7IZ8_Y_SwjaZvz3^&;~x2G*fo z90Qa&otTM5_CV~1ll@S{#KFP4?v)tEPr^**_6mhiK&b%MdkY2X8`uaC z7+$Q~xocMw1NQl3jOp=i_+APTY#siyz_gukV7ulb=P5`N~ree8vVwi*z z;OplK?4AXOhR#<%v#`(~ZyKsmQ~)jd4N4Nm8`9pi7rdUF{8n*XOGi=uwSz$OOT}Gw zZg)~9)`ectVVD{Xyh#q;#Ioo|_%K~;SMc%blbGxY{;%dQj$S3h@LX_J)z2V(cz9)=h6$5(aOrwft4Tpvd2GI z(-uDID6p)uVU>n9m)9Szsj96r?s4PJT}w1UQVbO9JGgJ#Jv1*)->~q8i=4FuwK$&p zr*kLOclfI|NW$S4|4zlzcjgsppk?xhmL= z9Zo6^j)tCKUfn|4@kVO>fwo$JSGv{_{=}JC~Ufg2UYfK zIq*qVmt6dgXPB>a)xFpI_Qmwws)oWho}xKAaV7MZUVR)UQs0pL6IhMYo>oV=di zPCVDILw_EX5dL-d!jyEap{49lcrfa%&(S2w`!UvR52mValNoA1l|8;-;=E6Z&6lXjEm?OOKd{8I(1*hmv}{xDrR`C8|ud|MM?*Sr)C+^B>i>G*y) zKGl#^5~=DB{>3H)&tOW+eyqMYs66E4sqGd{iSA6W1mnzX%Bn3@mXsR6ltO=mbC)iw z@za*!&BIWZ*?!RM##;8G$}(2erN!gU zdtr04sfCZBp}_@qQ5+9o&juqlo5QF4VNEp8R% zZI+TEN%&2$^z9m>4`YTShj)GikrU5Nu5M*>3odG$2DNmg(SdsqiuiUKZoW0P2oxcP zX@2~RFUT_&x|nC)Ug>mL!mUd7F^1(muhUrK5V|RM1BVm9lAeMf4oTJiL5GN1rA2Ux zMs;24L~^%&{~J*k_jf8@@gx|2$Ma+>j6MV0+gk&RoKOU<3h|y-?|D-=9klxj7jN2( z37iHJRD-_)YY&+HZS8^AIl0zOh-l8hsvGbC;-#Ms!t@F<7=??ttbN5OdtyIzzY`4KC7&TdLKgqF=O zQTyqqzdFts`^lP;dqzU0A2)4d_T8IMva)JdefTKWW8;eUr-|Tr3d-QrC;O=&W-@~n z9ZIsgY~Tcjr*4usKmXhEjOpt~v)W@?)DpAR=9YLbU77j7XwKG^2v1n{W+q+;op0uS zL1L^6bx~%?oxsj!3-sV7PDmyX*@YI&71p5n-gczMqv*qIM*>MgIuh^$NbdFcvGLZ% zYOu&w-=jTna&--tD#2{9bt|)+oOEWz=6?=nZnBSmUtQN!j&0eA$BjxBo>8B}EIJQy5Jy zf9IrE@C7s&t4_bYe=Otu1@){QkzySn+rNDIa!aiIK|m=lBYu8uk;oku0pujeEn04S zE$TSiue!Vk_uY^OZtsk)>tzG_`tQVs&v6Y6f=hPFW9O$3@dASnun0qEx@U}`umJ(~3~4$kAz6Oa2Dc-TEW5+8KR z$MNjeOMv|hWQ_GYE9evY(Ctz3;@14!9OKf+@G(~lE09${nXt9BCG5VDXSfP%-Q4mr zpMwQd1wFuP>>|+b-yi+K>#uTCB2{n_xsnu+2Y-`fH{0my>au7I-jS3vauDLNWX9Yn81!pwFOSTx|O~G9MEg$RmG{#ZPPKSO)WwJ=d1s`x?H>9Fz8khtv)J#mqhB!X-B+adg!$BWK z64^^4(QTSd0>nn^cw1^HYBNFo1Fb`mjOL#*neY^p@(l9r1RF~#jo(Y`6JEH4CFuAr zaWSJl5vbbUfaL;i2R$aEfSfdeHX2kb1LJc_QWtdSvrGdy+V1Ven>9>hkro0SJPiRY z1voS4ebGmzk3e~VR}z7rN9CNJ9wXLrYK;YJ372My6a>Hu?x`sta9q5+ZByNasMtif zzB6cf^z>MhsnRY~RbT+glx(g_2#0LW3l0tr;?*1fkc34JH~!_9F&4l;6vBmt zf{?-lNSs>J9MhGVe%_Sbm?^_1EjF$ZE&A9HSaTcO^`S}BX##wm)om$E>C&hJrj*P0 z051j_MW|c*wJaJxctHZMtF;$vq5aDmm?ArKEz^NDyck4)&a~vTf*LP+5{b#kUID$O zP5YY$88wq8g6N^7cWOKUe=@3(m8|$~^*)d?B8~Tx+owSC!mW{Fg}aF<4U9V6cvDtIy3fK)+38l|Z!+)uG5*{F#1c==WA-wVLo#ie$Otk=mLmY=H zv$AnIB%9G<4jpBN<^%+4u+&0dL)_MR;C_5AU%7(hn1IL`%&kCoTLLiH05v87 zzySbZ@~eWzkGgp~%m72*eVB`jQw8pl#a+BGE*4DcmVWf^h5&xo6)Qw#p4(l~G7lJl z9kfN7!nMoE$|7hLXzsA#)7shB{$Swv9j0`F+DbbqFGHr>E}-REpw{i~eX{(o2}iiW zoF{jyG1)?>Wl17P_r3w|3Lr=jXl#Kgb88>@&rufoABs=~!|^oDwBx7KeMG+~gWF|2J#yM5(~6rZ!QUf zAPvNk%^z=9n5J0pGynBl@jtZ^%bXP7A-J1FO($C9nNoll>IJI;$YYq%#TUFZasnU) z;a;A7{FwX%JNvZ&_pR0YCpb6=Kh2^oHWpsLd3BVYUY1Au+SZmz=O-ioKOzns{u*&j zE*t8GQrCkC35G82Z3(Kl#_0x$Sj~?zDIcsc^(NRIdA{Pol@ch=fy2~*=)E{w#iqF? z;^E=JBJSRm;ri>#PC$EtF9|~I12RV1i}?K6Gv({oU$ArX^Q&EXvis~TZJ+7ECD$$I z0pH-@H!qL0Z%tojx-`5k>o&~uN;4yBkb;_ju-}_6oV+D-eCZND3Algn2X6#vBRjxZ z^nl>3c@B^RB-qqlassXL8gy#dP5e9yzY9GgvOs?o-0cC;_5$EDX28-3Xq+Dpu32)` z|5ho;yDGtTfh*$Aub!P9vfTM}#KNmA&^WtF!r(LndO!>6j7Ff>&kTC67=>(NoR=mN zLDYui%3xp3PiUB!=Yftzc*@PwK^>==c-3M>BPLj-MAcfsq{{AbcOGY8K7I4FkwN>F z9l9CPg%r+HUHN`iI&g@9`a^zM-Pz*b;|K9Wd3kw^iTf8d1D9MgT@Eak1unpqUtsb) zoNycb!ZMLiKU)k47J{ZEAMMEhc#NN4y<@(5O1XvXkI+Hq2fIQ9ARiHfceC2&VHUU6v$aYiN?2lsz8Xk)9~+SCeyhVGdaoD9TA8m-k*UM26}+B_>H^t{IR`}{giPp`sl*sA_r=CjQ5}BX-uaE`qjqeQN}_={;duXHF=-FNxuPb6O-B`(_9N|gHE+6Q z?!Fn~W>Iz0+nn2+tJC1*l_xbz!PTG2rJwF_J$;wV?#2yY03W-m&YnH{;@gU3LwY~k zf+g0E-%2xb92(dfko8%3o7B3MQm>a?aIF>h-Zv#JS)4Lcui1;>sewaD1;G;&q$s}dRkF~DuUj>Qg+tt1k3^EbqD=Oc1 zj6ypc5(=`MIOY4N767T6*$E1G@MKH2$>oX-d@6hY=BkoC_(F%Bt`8Xh$oEM<74*QX z&X7^Yj%ZA8Ao=@y75W{4pTdDK1+p1=z`0b^)dRPtx3}th>*fk1bY{!V%vRRNeKU8G zJe0^=a^R`sW?N=cG`+DIJdw$nqwaP39PGQ9W%5Mygq*K@pcF%FWo(UD4X>Fc+Pl-B z2EU)i;0lsf#J%}vf+l4YlsfbBE}#JFj=s`kGeJAIUqk#9Lx~+O?73b!^RlxWB+j@yHU+ll`ttB#=0lSPtH4-qP_>X9wp ziLj}jJ!@iRBbRw4mZ5bWTA34}G@yadaxDN!z{PWG98%iOjLBEPkH#}hZR%*LDftE8aM?SfD=A^xQse3fPS8uzDFYA z6J1J8Ht~EH`5V}(Dq=&_BWQsG)$HVS9ER6y?Daa6tj@&~$jGSc|M-5HW%;q+f(9Oh z>HjA%89;9T+87F2Qd;jfK_{iI=CQKSdgKD8dBk;ZBw1L+QNSs0zPo!UO(xWrjs}bR zo>3i@4PcSu#mgpH&4WL$#`iMsmiVkg)j*P#a4jiq>2z|MeIOZj+28Q>l~42+0w?)i zAH8C$Gr(y5I0^i=%jnHlDQEy#cC$(blkzP*}$g z)4&OPP672yEnR>6!er**v$=Ecrd}G%^;y0bMyltO&tAdxVD7r}5W{vh z$J?8!xa{ZYw^W$%Wo2D@M`rd$ z?hR4M*nE?dtG=Xcj}HGaAC&JrcKWP4UvLye0E(ur*&#G~^y->vWe3 zNYS|IqOfgU4?iXsG3LyTNPuA3hXtiWX~)yeGELv#lRH z#btA&bF{6F{ykbqz;%n;-~7BK=3${gb^VCqE6q-f`i!GqeRbSuZBCwG-|g#VTyg|3 zEBV{IC?7Tn_8!Qum_G~D_}Va25CINB4|F3a3ERZX8u2Tr)Arrv)=X1_kpj@$0L}9+ zq=_5AvukQ-1Z8GrAp!Avp4Vp|mMwfk0FS$!iTru8eIGW?);;DOczO_{5hGav`5~Xoj60LZ?`b247}O5F}o!2O2K;e?@(Lz z%8H}EGf3^=-+%?srNPTP74bYV%Hp4hyJ-H6J`kMKmVx|Ng=-`lQ&! z&Js#*bhRL;-E8RSD@WU9NY03rqiG!oS%bnYxuirNh(a|`@}U$GwZlUWU*WxnfO?yt z^$&GFqmBn?4%_Trh%DQoAYlx9J|o0D@s-9AdfPlfD=1Qfx_h?TVekbsGlJ4FC_Qus zM&gV4mQ_AX0vB3#hLX|KmqMKclfWO9IQ}ssk_*GA&X60Ef3k@G8M3Z61g5DkjhDW~ z&1=sS>0C5wtSJP5FdG>9Pc0>S7@KrRqhF%WP*NSk=p+t#0(tcJ(}Jk9|JHXuCbKwp zRq2kmC2(7OFThkXDg?nl93hV!kU&9323 zWS$Q>fsuMk_nk?I!nV^>qMVAKbVu|t?4jft%fSQRA5fC+1cylV;yFge@VV%f>X}>? zDt1`NkuzMRN3_W2r4>GR*&lfcmq~Yso`ml7L#N~MzwRXB%<41SQXfT6N*sX&{_*B@ zX$6MbF@KV)RENy#Xt18v)KIg)=*k)rewO0^y02U_Bz9k7Zi>6nqeHg6LP$vxUOjmkzF`_jT zvYAbg^+>`Ts4_}Mo#_&sS>g?*`plrBM1O%Ll4&#<>i74T-tt^1r%t1r^bK9pGGv>f zwKztOmB}dN^b|Lbb+k4Fr|*nh{aXWJ%CH%dGrD(=3rcF3XkwiG1BN3xfS3kx-3Y|; z*MUfndEBO5*f~xl3Lr`xCKCW03u-M*Xmo@Qi7p(V3H)wrc1Rrzs9gg!g7F{VMc+V=4LA$qt~_fbm68bu#f=F}@&|`Cs%w;D%(;6c$jK758je4O(53l2UQXH*@Lub!R)pY1~eDm5B_a+wr#C z`XSU7;3-iXby8|->#wgq(33T)JOa7^2Y>!4&?1A&RfTaKY}(Y`S2EjUGS5|?(e=L& znheLLjESm&<0*(Kp;K4@V4FpdzM`JoKb#RrE#w29z=SMlvVmJ=0`ni7e?8*>1ax#H z$UFYSvBZLotdxQdb;MlLzz$vg$KL44{x`v)+`#T!)Pd8NfFs^ZW0r8VHMcqSz&_*{ ztDjQG1i#RQv%S=KCvZwR|CU?PVpZ_2Z=cHJGB=IR-Ayp}cn5XjV`D51PFM?&QWn7~ zswaHDE*j^q->0X=JX4Kvuabx&mLUnh%Rq||>=vzUQ#`7gCBL{lf11?){Sm1&7*abeMnnNxlyry_3gN+rVzeQ1`cARS75{)%OKT2 zGAdqlQnj^mRVV6fl5DWgD9x6j=H?ChAi8X>Y?qzPEHQH&j%Y*qf7lXRvZHidT3+C2 z`95mz{4SAj2QK`>Q5KRNzu4C{H@On z-VzeX70{jjamVM%lqk72?e7;Fzw^-gce`k5oIG5qxXilmgXb_z7Q*PRr)U5JW@>CD zdg1mW4SGKVdOuxNvA0AmDjDd0hq;*A32LOI8OX>Mkw`VD#U?0vK8hZ zd5lKd{bx@exenu#R6x@LoKgb-Gz!k{(7J&JnEcVp3F48SJ*ii}-4Z|qn$;BlH-FRV0l?&R<<(R+Ci}GSWrza&n zC${ZIxe9DUxe{dJ7BO3#1+mBW~V5^%t1E>UY+n<-#$0K6?EWPu!MxxO}-PZP2ox9;m0)wSOj zjWpVSxRq7{!3+kM)%^I;26$`>paTF=o5FA*?Go2~P_+iWd>QFQeQ>R?9iyWIi=&H! zlm=RL`jo+u^Dozs`ksRumw57}#Y?eAn8U^oA;9yPxGAJZ7B zeXfoe`Lk;tf*92aTJO7AvaPLY1DWRt8SFPxx6BUG&8*B|RL9`5c@h#vdiT|n~JMt7-#~4~tL;u6y)pSow$+!eb$rRR*gzaEs#UvkN zG}%Xz)iXO|ThyxWSc`T{c9Btp?fw0d`b9j$y_~u5J^a+@?+^c-!};e@H+=6!W)Or=@%mj~<8p|3Dc^CYGuSVZGNJ6GjPLn#_g(c>YMO;Lo)XgF zlQ4H>eTy_^ssreh_8sglc*jjiN`cPVij;N60hY;|3@npel%$X&p|5S_YM%so(cO$Z z)=7JdBUk$j!AE0#f*RbOhd+D&mzIuQephicHdc1;-ZUBSie8>w&MBZe#ORWkX{+zO zhApo$GDu$|p`qcLMs~Dxyb>g0`8NDJ(K?T) z(I1p=Y__7IY-@L%SCYMEKrY2c)P3 z{)j|GK>_^$U~l>8T+511U}`~N8ixJhC6<&qNZpr2^#5WJ@{?! zaSo2EpG8htGT~@I2XM+us?^q>dOWt>3@e_{NZZ@nN7t7^t9Iqny~kfXd9nkI`ltjc zuWQ%k?8oOf!+Tw(MF*g-X3;vCl+sOa9^JDD8S-P&kveppc<+*0-ooo3hKP4{2Qm-| zOkp~yU8B@Nqz7TGWj%H=Y;2l&YLpn_y%p`WB65sw@9z&rCvMXoSph5?_3a~lI~q;t zAy^eZN-TP`6}?)%I#0EIL}L9XSQ?tpw!JYTjz)>0hIJ3Yop@PbUjQI@C$_y|i@E>` zfRNqnj=^6spfu>8r}8)=ksBFjLBn=>R)Fbo?T~iVRNZ2cn7K3(W{cXNt8*+| z_4LA_eQRJuW0lq|+yO35G0%gwpN)K!;md`-Q_z}au)U@U(+Xe~$u-o%K3z1w(S17m zBbta%=`TN;4u^_{h6ZH33%1Fw-X%XSjf#hGbcy@V?6E&)E@@82x`t+-2!$)4E;NT7{{^IxZ^`}6u1F%dpr0KA+ z8(pToQR!x^2pgY&D)B`DbXgF^{-p@la2@I+NY8uF;2e&oPRb%Zjz!7pJv5OIwbVxF?7(=uOK2}1KHeZf4bsr83Hw5T zCb)w%DCv?$i5OJs4VMAHP=}lcWXtP?oLHa00uDni5<3G~X!c8_>PCw}s8II-Y=&`7 z7!S`sMV`W;%ldcE6ifs35TwM49+uEr-KM9Fd^DZEZzeZXm`Xpx!|bL269< zfE$a|IiJrDvf&pbUY|wD>woJp#n?ZOy8kRt{QrZ@|5GgHw<)1O_djUh^Z)K7s<-~{ zC9?l>56)3uXe>bO0i~fNWwmG&H5&Cj>u^2a4k~KLP*cPB^zyVqfki!+F70t<|m(cd{0PK*$Eb83CkzR3X7d~h>d5gO2-QLK4 zTbO!;rqO}&115CEL#CA!7bhKnhFTyX9MplJkn4c?0Eg@EgA51ecgM!ZCqqAb0dyv| zHZ&ZO)y}huLWp+n#;_o2$blY99K>qWREI=nKSY;xyREBOgk4u=ObjZgBAb}27D#r|Vg4CC ztX7^k|4a816wV&)piyykErR*RLw3U5rMXu9mtmdU7cOXk+jhw9haT)E=-WY;2;&UU zNMKgN2s~+%YYXTY>L{jXVnU;c#$iIVJppmufiUw9bss^aAQ~qEL)cf(Z_!|mG9Z#d z1IZwdNr1o&^Ly$z*S$g5;q0o9#)cpm3E^`y{S{Eo4!c_bd4Qt*8>sVO*taISc8xUM zL^Qb;W{YJ4yAPThmyS=U^$|83x|Won4GMZ=D3TEhB}g^_ib@VDnWK`ElEdN7McMD$-EVc@zTf@E{c*=P24fV3`|Pv#UVA-h z&SyRlZ2*HOywt>DLFMJ0N;zn6lddTUQXfM#3-M*$)BH*w%Viy==$}?bGL^_EmQ>vg2^86bz-ikeE-qeX;L>_lA4)*xvN79CkrI~yQ4V_YWDoVyYC&OK zn>pS*e!V}QTR_fR1aC!LdU|D2db)(ye7_MJ(QKO-YPDvUw12(@RFzRQXhlzmgZw8K zhw#44?ibywPRKhJX9RftV2evgD6^N2Rz}#6>D+&uh_r#4S0>dRWs0`wvZRcRz3A&Q zj=~ZsJ2+&LP|y@%r84+C1vIF_C{MCLa%jkLpe3(v3azY|9S{hnFALtKq$5T~cI@&VAE%(^A(yTR zns7lhqQWwCIR0MW(7-|pwjyZQB#Ly9Jl<nkf$aKo!v5gd)35I2K2>r zI^snsLo4ZNWI#j|Hm?MG-xSXS}Abw!I>OF5c|vm8J| zVDsx-RZS{7-K?yvfe@&pl8qLD7h_=$*ajN2tmX7hAZrv1KeryxXl8pRhd|#Fhg@Fm6QqOwsajikHC^`^=3gaz+Qpa zt5k?Jil?Ne%Jl`}Y>`k^hdm2c&Zruto7r44b!9dWYghN?{d+O2H}0rp%Vq8VXbVxtEz8%j+eZ++;`)VW{k{^{anna~0$F_C1>I{`@ zVdpDhG2`bHGEV>sW|CiLEe1YwwSO~3Lnq{Ys$2~yu1%R$Uwdb+G6Rb3^gsD9Uw3Mw z6dnlf(S`Rl5TLmMg95Pektl5l7Q96Ptlw0q8V;F)txe0rxfi2+2%f?wFY@VgS$;{JRILj$>RK50d3A9dE z>;iI_Y#;b(dc;Q^K3uUYB1g8EOC}n3*0(TUvXF?T{KY3x&fYGxS4<2{l$WjM_lKPC zp0j=4zM^`?7NUVd22T7`x1Iq;oJ^vHsZ*(VE?Qth`axr)Ou3&Z%b|^wta1`;64I#0 zdYSv7utr+%cW6W#$ei-Mg$r6qU3&JHHdieb$dIiG6Cf`D<$8RK&p%A{_+}0|d{NR3`8D z(5_rNg#s8I6(+QvCq(w)!gS0+4Ov~Vi%te?7P74!Hgsuq?t`!x>^c#@?}KG2-PMno z7F1G1Ke*Z8kz=KlimstsJQ-UcT1*UjJ(vSfZafBkt}6N2G^mEuShNzLuJrmwIO^Pb zB1ns`nSqh)OILt=>iAIWDb&D-t_&d}!$iRXSm3E%7|1LqIu`ifo@>1KwGJHcg`Fh? zY4QbdR2QHsO498^p+vWZtSll%)Y9;B$i2~}7Jwq))eq|N~NcJlDNvd-vqk|ry7x#5-{9tXI??>g-*0L#&A*ZGJ( z10qR8OZ#z&<1X4BIlrzZm^*|0ED>jdQRszWEqQjZMzh{|^egm{eb0QmXIBpzg!kU1 zD^@S*QBM0OFr*kZQdi&M3Zi2%Xkh)p8a{TFY#cn$AX;50gJB6< zf_+ph*btiyT_0d>LOj3_uq&)VvcsH(b4_`eKqHRv1?rK)`!M??otT5oMjkaGkC#y# zQK%r8v-}G*7lq%v`Cx3!2^vny*u8a6&c2iFBR$<{9zfWLhs7($NVynXypD1g3V#Fx z4k>L1K}GZP8y*M`VPD+bMj8U7T}NVJar^7e(?9%4~YO5OQ1vq z119_uTyC^Zm2K`j+g!DCL0{Mx&LS@`z7TD=WH~M8feo}~i8@Tly-sF_rbxblnm=t@}lamoTWw6kGxwMUR-%mYQ-L>h5YktGg@J}a4VU^-A2UcT5*k$uH>xrK%Nr2GH#{}hBr;7D^RsXdPkuPifFI}RC7^LehXiR>X|A82rXg3@EtnN_67Ns!K z(=~vjrz9s!osCB5G7llQ@zbdtYvGZOK?@t?enK^rZ@N`1xV~oLu8L6;`i`GphLeHk zgq>606cAq{P4pBosv_i+-)+3t#6{wO9A#xatHVZ{{d>GWpu#5qO_5yq%>pxJuErW4Hz1#{EW*e^V=Ru^}qr zQ7hyk2~|%4`eT~LE=0)1?kIT7;~eAYoPH}afesND?)#u?2psYGA~Tk_bkW7}1Uj3^BH0|dkQE?F|s7%M`A zn^5+SKsqoe=nX|#=vyWuTU8>ioajh-8I`xaJHROS2plI;5GR6rCA4uN0ZgDGqQ3+l zfx@eM{lb?pO)-mVZcI9|Lel`YWd}?1@>qw9wM`_e9&;XZmIs-w<9{G?<}O1Z$nl|K z9tVLxbWEK>hf_5oQ_xq0dWZ%Y#4fh0BDSdoV+uly#{o-9ST8Yy*&vETL|zo~7o;9d zA}eB>ix3eK^$L>xqL)LBI1eK9j!a({U<@mihb^m$PQO}4WI!=oBol*9kWz?4IFUo{ zf=))!#9dP2hTTOMXfbi2<-;(Fx^S)eB*=th9_%#6aTy{F4hohaKr1qJLOLhfAE2!z z!8#;lET_y!&*YE1Cbxa?pn~TEc5Q>f-3q5r3JtMWJ^21#UFx07DSCfs66 zl>x?25vh4#N_gG{W<^8feZc0+wrM^NjT;21bt+t*j{qf+2}%|3-qj)R*^F4$5~iKJ z>*P&0&nnE*jk=HC0{{qMoRT6DeXMD@4wUB$KF=sg}5>sUu7f%0dqq z^txjvvWfgXQdc)Hd?LQ&-@kb?keBjdpOK2u=3r=UM$AolZ@8y#+X%IH>klysMsS!` zN9U2TO;-7xj(TEO)RYtw<`ErT?A&@#4+K?F!mx2FHUdYK zq$iq0=hynj6>yR6j~3BOvUqZM8c2djVR%%ZKYGf$>0z9~SAnl6G&DefsimBRrrE|X z=*bp?J~4@a&dx*;m3ShL0j<7rYD;1*f+?c1NA@Nhy%YAgHP)NRBm)jeFPb<>>MoEZ znV2d9P>=f6r+G~z07O{b5_^)c=xs~p&lBmtpQt6+3#4jCVmJUToZZBmVn9H0R1~|x zxwqR1MRA)YvRgtZBGPS>5%;zcmrd$HL&dYU*zKqorJL~3#op=Z@9BRTN*B{Z_RA=& zad(VgM;d(k*Fa~K2Rdle@eyly#2bY9L6lY#a?7xJFo4!SQZc}?1Y%SJ3wl3B-u6pO zOv;ZwU8DISPk!vGI`v(D{jK65R~QnG_Jw6UGx;4aemnO(Ag%Ff$!d;JD6yeFp`)!G zho&>4r$w{<#fMtml)VNAf+n|q1PMXObiqnW2TaflU#Wjvx?OS;x_2m8X+mp-=mCL) z(Nd18guu*6yf4I3ZhPD7WcB6EO>@sm^BB+L-l!I6P8Zho*25}h>}XoP{%%kI`W|q3 zR%}I#N%Xz9S;8TaeFqEG-F;lV6>@A2pSzg|*nSwB zM^Y+F%jEl>6DPr0I1i#$i~Q?pjV+JzQr44DRBzH)INUK4FGl(`ON9keO-K z5FK_T%6^1VJXEqktGn+`@>obcWy5Hrm1ZANu0$_w3c9FC^JNX>`|J>I^g)G)NXx30 zC}6}H890eVfi%dYHtSr@;o=%7h5;O(4ce`;`ydKjO9rH2XN!x8Wet?lSIg4XH{mQ+ zP*fjt9+9RO>k#;Fvprc8Q2~e+`9HrZo(6e1IO{leE7AN7-G;JDKWY7dcg?bEmp3^7 z>1^mD(ozOhB*={n9t4fJ%!hsq~`S%B@OVG^}t=n3n<`J#sI?OoqPU zOkHsZ^2QXDME17WgBXcp%mf)IcB^8Ghp=~Wl`F|r628PeAau2I`)jJJI~;t~q46}^ zjexfoK^=N#iGDluO3U3)H)(n}C>@6Sk~WzMM{FCU;XxvQo0!2Ed!@Q5dXuMtg~>Xc zm5@g45)fk$HQ&%IjP!(YjLz|bjU!U|lM#mjp^p$!CC$8M5~9!+{f{5~Pws~QVuXC(L<%28r~+boR|BMN>#-Dd zjdqZ0N8of!ZmPqVjP(K->n;@!T%HV$#6mi@Uv_+togE$(H9$**a3F}U)g9`wMefdBA7PnR7mavAXGs5W?!VGubl(JMJnj&R0WpgmAtF zHAv4gHgn1nDH5d!AMjaL-4*=Ud==$EV6mWw2;m$3skV0}%Kp{L{mkCxmu2J`z^iaX zUj-dAs)DI7EVZ4NC0_|=xeOahh$tdMT+KM<{Gps3833Wl>FD&J5UX%D8oNw7Zhdu* z&8bPjV8v&aKcoIQ;Og{Z#{2(d`A^n0I!H(N)g_{5cnL)yW0$7^_rrxN8HCvO|1WTP zh4w!-l!V$wT-~fHoOPu0=au3IwfUyT?upWGY#`H@k<$t7YWHeOLS`wQIQbh zcei>nt{1q|F4cSh9>?I=ArwmV^f|}T*@mBO$=(Yz0=OmTNJeY?-RTIy_h0cJ>mHxl zOJ*;Hj)Bsn3*ds>Ci=2Y`J5kQ8_)YC4PD0?+AlG zkBs;y!r&77lW8EnuCOD7L7!a+0lSB#G#Qs$!o zj=t8Pq+a4wYZ3Ql;q8M$WNBPwT!?%M5(3itN_acYD0t5#XX7eH#OcRYlCjL7bP-4x zwGLJY;^V{Lj+8?l1aU~{BM7gOBK_Z*4x(3RYMnSigwrP-P~#wDfJ3A)W~Tu*^RdFh zbQMVvP%Pju+RKcu@Y?TV>xWiTG6A^`raTqOvIO~@LFEt>PN`VC+~%{n&D&KG;GC0= z9_ogNlMQB)o@%D2tp5yh{~>5F60o2g2Y|f$JxLVM4k0{;k{~}7KsP}|5Ch5eL6`$S zsz>1`Noi@)5seJgEf0K41i*O=lB(u?1J&{cyJ4DXf7-D7MenFcE&Wy7cIBC9ux8SXu(^tYY&NdWA+BzN9 zQ>))yf3KqFVNLwI?Mj(Budf+MSM+4?|KUTOEl=hckbFBWE#EEGEtL^*d7yjLbe3vb zxz|pidv@Mqc4npv57z;iaZhjWHT13q<|RCSEC5`Eg^39i0INdqnjLV1=0^l6lw&&- zL+V;u*nv0&14A!IX$E!mA%B>)a)h_0Xq{kU8tsV-A$VjLLaytWj#7>?8;82OIt$)a zMe)d&A{Q$wD|-e8*d1*V8OIGH`rV0fZlzX`S`S#TS0Mg?E1eyRMK~7$bVwO-?HwoT|vRq^c9phM;#lNVwlHJa8KWL$qQLd zE$v3q!TvipJV{+mc@r#CZO_`@7`>v3Czw+pdO3wPjIuV-p-1bp8-;seiG^fPPEOVj ziXv?#j$_-aoRt+Q*Hn8{nvo*5gN{`rPRoDiCZ2CqAr@AYTpBH zVFn8CGeWZc#~ax3jek<9^+5Y(PC&(G zk3GF7fNpS9J(`V`^*YpPUPCj23nC;zAQ=)QzFhyurz$TAK-nA+ya>i%pR#h000L9C z=NG713(U;&_+8aZD_H;VS7v&#Ae%EH#(fHhC#DiA0bue;OG}$z5-oOhpkB7sF$^vn zywzmanlFxI#-MZ+3s{r>)bjHCPlxFD@F>=OtTYobez4wQ#JHC$+jsdD^S+9X4&(QD zpT=B4$`z56l-` z-q);{Vl98Sv5B{2V~)PcZf#=|gigT_PR2EBl=^pFh@2c70~RLhP?3RndZR%}k1zDq zYo7sq2({`R98{-8`H4zyDN>l)*D~TwtGebWAP}Ey%2{xPX7upnZ=JE3C)OPDMc34c zvGMUzIPxy&l-JbNF+-+DvNs<^o>JIM>N9XfM3X{~T-k5=_=@vpE#@T_toKTN#FD;! z`vz{RBfn9xEM|OcK{BesZ&)Y^6UhZsk*g>@F_9OU!$z{Lj^E=6>zxrbY2@!0j8#T& z!<9?k{uYO#qoZp+eUkdt0*YrDJ8~J{V`)3PmGfzV{Q8`|Gt8&gT#}})6Qk(7`s6fE zXLTK$t0A_Z@rxh%#Xe<7KA)!8iigB*{?v7O$M3x}40*pQ)@|8QaCYx=otp9G=MMNJ zi0BnE)L+`ak5Bpy9o^kBgV0$RPS zGI!uxSSbnn|2z%%@+B7E-=;XmezwYN))Z)5A{D!O`Lt<+)Q#|y0~6W-;`^c|>`Sant?PFTDf&0=L~V4yQ@yYB5n*Ll0j5qUPfA+Gy_J>yeRbj_T@6F70yJ0Tvp$ zTAA6-eZ`CvEytFNJB)UmRFsru9}Tk8q0Q zD(h4t3~xnWEcYxL6QTsJk9`|qWpm2fT|c(A>~Z#%z(z^dVJ=4uVP9>%*SMnRfxVQ z6%uDZ?0uT;jSt0_#ciiqzu&H^CM01SIOHhLWRSUiCQi=J^yHX|yVLw! z=1kNwxtgdUnwMjK@+zzbz9oCpQ#1B!U0ieTzC;*qSd}?+NhnG-WJb@?Q^WVZg~sG4 z^_!D6MBRS#JFcZDzF`&9a>m=2IydSnbym^3dpckE_P!HdSL8-}-QDlNnkChG=8%t{ z&dGgr=DSi$UN|B((|X2_R@g*kpAD z26$&qf2+!<jwgRR z;xe49Az*GZ$0%xjym4a<_Y#Xk;i*lE7p?A%*bVaK$Mt$UA6(F!HI?*>TS%O*8Q0g{ z@gmz@)5S?nmn~cI@h`E-YIh`FylJqGRm@f&+v{4FkrB}OE%S$TaIvFU<6s)?^L1yL zrqUW0tGG=`$8x?W3)19H#@SremC9S{J53NZtP{V zYuh5w9T?qIb529;_rkmxYd))Fd?o$jr@8G`>27%FYAy23NK)bM!(HS**K;?;*&iHF zyxwqBKr8kA#e|{XTO`k~b5fE}7tSvzuRPuItR~CC_&Yvnwc}AE4hmRe(O0Ro+CGKD zP&4;NW_I=kX9}gkeARO2q}PxDb>%GSVS&a%|aLJu(Dcs<@q1W ze9FR`Df2CkRi&u9JQegkoK!2{mN+Ys*z!RppJ!s=two3fYxwB8*!**Bu`zDn?o53( zqJ4GkxVYQ|wD}VeuXI=}y=&UJL}Wc4&X;aj-f*t?gX(G)>gE`y0lkCPzN3LIlm_w@ z5#G}gqe2Vcg+@CWy(Q&GuW7yp1jtruBI1QIuDJoLdEDVap&jEkS>L#M%4Doggd2)o zPN}f*3HE>1acMR2F_{=A?g~u4%Wh%9j*1a3!9pILt4~CfojH~~U;3yxsFD558&?&9 zl-dA4E$6(o)v(MV?X zCWUdw-A)G=;#<8i*^Wy5IP$31iC~~dikfREDP?74y^aBEz+9gr?0EddW@gWOno)He z3&V$IlZ~!M^@MHR8qDrcaOL`*wj-&9!YYPe&+`p~_9QHr_a(H33dMdt8dZVD^SQa1 z3Us>ik3E%#!zV^L7i{FkQ)M`3-E_qkx|g`Sgy_qDp9tDp=9RfHR~43VW!D*8T6T80 z-@9v-2jt!ja`d2uUM@jhW|XQPBQJ*n^~4%JlL{S6uHi|qdsQ(df_Fp0N2Sj^(8@m` zcfW~0nVZUyeYncw`|e!28K9cLJk0_*t0F~s5JBHQeXu_JR@1MV;|q6i=z1L5cT4U^ z(H52HcQ!tlOcg-l$hdNJ+XgUtjrg)0S29pL!98`KTf4Ez=P54j_OafB2a-}+mi1sD z!yek0=7QMg&!0^oy*P@xRXGat;m#-OQ}G?AP8FG{9%C8aqB(pvDXm3t>)gmy1@`v& zAr~@omWj!MHr;Km&*Us{cC;()M10duzQSrILlHXo^R$VBkqlPme$NOFfT5z!IC!Jo7eG@5vceLgpkNR38t4 zQnKT9Rq5u@V2@gNmfd_2bQhrPmudFlZVeR-)xCm+BQKxGyI5H4gq#91Xv7`^tp$65 z(6J%}<#7C9t}y%QBUlpyZACJepU8%yN_k?I7wh{&Rmn7(e`hDNT;gSm3VR7^>mA!o zN=hDJ_68x3D@6iH1u}Lw^e$bWfgppE(rb`2*+Gc;eGMZcuV))5QP3{u0l8s4s)hcb zik3ocNW*O&^XLiT)BKcR(14R%V(gkQmf?!L6V**ko6-Jt9ZU!j%u)+?g$z<+zLQV> zEtnI4J*61-$O$oeTYdW%OGhs_HP5&gfv^$8~3%N;@<|kd~MqG)NJEASsd>BgdbqQ>6>!}^DW(5v6-gl0$G@7To1@i?8rYcqPZ}pX`0Src?G!{z$ zdv--E84F|Mdb0|R%5x#Lc}+%ca>WuO*^$m|X)6^HnYa>lIC)aG!<+z0VDVhQVgv)f zEme{KeiO1In>A+LAgmuyTwE+^-=Ub5lcS6WIoDt`-=NwA61b2Tnu*Hc0c5^r*BOsR zb@8j474mhPy+B_rq1;>p%ZgWi>;YqGe1svZr>AEoFfoy)=%3fOf0jB}vrrf25)kFx zIVU-#vYdA=O4%dos9RvtZR}(6TD%_xxI+!0^@LPTxqbNXArrMIy-FyjbM^ea2?BJ4D;U9iMifL=ixeGYF6kmt*cQ z6N-Q#sD}UMHXqbet;uUhwNLq>X!90Qb#};_M&X-oJKLC>n>(F9A1t-EII#~bFUe!^ zbiXB^D=Ro>yNTox#o+IiJvo0PYbEU?GkNKIqegN*rso8-@s$@h3B$G(jz=3j6Vo2Zb$t91o7bh^a8a-E@7HVfXe~&jVe? zlkE?7rZqQO@-TjSGpM{9P>Ew9dhFuL?SR?O&15WG@Y{Gq1X2etScX}3n|CGr7Sksy zEKV$`*X*DqoU%#?jM1E=G|gG?`-|fr#U!Z))X&c{>m_JKCr=*kzF2|BO8^C~_Sv>c z!YkY)>gVUJzI69_P*MWZWS@{@U~3>W`FF9|9XDi7zl)%#7oT8 z3e6vdEh=XOO_B_id?FZd*ut7my*aW+R*XV5Fs)POOcBa;iH#6AwFxdSK*(z}U=$pU zGT?WtT+?qj4uE~TN@)Eh zT>#cX-U*e+g30)c_k_9kZRZo>0za}{!vtcl|M{6EKf|Mv1i0PAsPj|&?`AfPAcTJ7fHm@qWe`nfs3A1n;osO<8!x*dyZYM-a9(;AbjSa=` zo4U>#KEKnj@0u7l)nHy!d`+Jk4g;I|7jO~GL5C;&To$s}A)W)KCnAp3?HT61>qcjO zeEIfTAFSesdf_Am_*c;2?8&^o#hb@qb{6pDN+W}oNuKyNuOuqe@xCusiS@>4dhs3B zW#hcLi!R0i3U*5F?(PGqLKwmX=smMP-FUVk>;3FCv;V#-PjXe7Lo!8s# zh(r|*C_bWckPG&8AYUruok`{og+8LGzQ0(mFDvkn!$L?uZ)bWzhz$7G93FkYx$#4 z@}d*&V-2boxRCexw~p8qpIFcwNMY1aJ|G!)l6{zaHBPU(mMPY{ro(t%Av0B8l4UL3 z&aep|olr{jP3^%ar!;>a@AE`_CUCDz`B7DRCuPh9p7M~B`+0nYN!dMo;x<0f*f`^D zqU&kDhn4tp%N3d1cJ55<46-DoylD z=RSrc;bz|hXY)`VlQJ!1!^!^k3lxcN9N;%w;kh+@n#J2)MKY#a&M#B@^#^`Eupw2dSmA${}Co53Vn>j!?ZgNRs zs+~)Ps%hH#_LN6~6y@_s&k1q=U@jm*pF{=5{shXAoV^T4A5>EKW0^vod_3MKcaRCm^=d1s`i{ zpip?0SnTx0EQ99|{irZDSW6M!@8h>ePnUrb_=o@ciZl4j`c9ONDBFmz(qdg!r#>=T zzgewZtBR^3{Y+jR3o(#)({Ia6b}R!Ix_<2NUny!}PL#M1d(&kW;p+lCtMMpIth z?}t$5-Cd@U{^ce`{aae)vNu99d7&!#_BVE=KBfC!g?|W=sXkND_dCWAt@sze_(-2F z-5dBztm^T~EhmR0PhD&HROaflOX5ja0V%Oey}oY8z`yqJ=5?XB>cn}npO)#QS2i{< z#L{czb~Se#W;nO}?zXQ^AKpB9>X*(**=UTD{i+!DC`v@^okQam#;?ubRO^E-0A4#2 zBl-+7Ri}>&Z=!58cO4%r?7@goUbn&8p@P8H9i>;%rnNil#)`# zVPVyl?`)cTC1@u{#;FK(If|kwb8Ir^6T}0WNZLks_ij_;?em;T4%E%!h0|t@8|knk zlimIvn5e&V^WfU~d8Svd;&bQck1u0zKIoJ1ZR0AYQj^zn(@g5>@wufBmtNM2*m1Jn ze(u@9wTcEcSA6recCL7TWA!F+ic|+SdV}c^h8qkR%g`WN^U%+-j^B#Xhk~)<_-|QB zr)eIJSw8tdA+-#y?0@~jH@*~f80GhO!<`RORM9rg{b}lan>17HA*W6XB{VSW2WRi0 zj|6lPX1iKO&VJ*}!}Dz_Ay(b$P_3l%+4kA3u7`Z?+W9syX;}M1dydEq-`+G>XvvOC zSUj3svKWy|IwVmXojaocYK%SV+~<#UlrQ1;eK!)@q-fYHrN@NhfBrJI1oN1gz*aip zJMlWEo>mTj3a6&his2$t@m#94X^s%YQ0| zA50u3P!U;+5;8J`&~dRr2(5PSqohPZB^h7Cw~QirC5WxO@?vncqy4I9404r!orIR@ zKScnh_tZ?Ce0J+r_~|e*p)4`6+xXy$YU-MG>&h_DTsZ`2BAc(TuW9-xu+j$*!64zy zpXyOjAoSRz^LHqeuQiVL6?a~WtVG4Z7FfeUY!(tAfbYu%Jn|Z_AdC0TD0^KmHBnN+ zzbM(+4fm8IMBMW&a&DwxsgDP9Y`Nv%!ynex{gKuXE?~p-X?65!NR2*V^xO&VWo=J? ze*i8#C@(!Vl`r3`#@Ydt9|ve~2YRLD=DHv;?Q~g9nKoNhF8O!K7`x1QeiHK_=`l^u z!*pWhVc}wssWL$k(EG!9-?9dJik1!inB?CjP5k=LgomC{gY^ns9U#vlk4{5QCMHND zg+D>~W%l8ZVp1k6Ac0|73IbfbtNJmZDphb-pFl2qkb0o>J| zZ;MXre-%vpH#-`x@aI`u74r*FZ{tZ&=PMcdw{a**}|Mj6>V5EY=!{p?#9K9@`^%U-KLH4zewAM$`Q7REV{OTfL~sSP_|H?)B@SXi>Sr^m-TamH=}_*;ML zf819RN4`DqmsLxl7jH21AL76N(UScSQ1&kY=+oDo7BjKJxpTcXE-tXOl<;aS?2YBE ztxOnErxJ2Ic$UOFOjq`wccw#Oj>967{_nBm?+1PScG)jt$+2E2h%f#8nJn_S8vgu; zgQ=3*yN?7$1iV~~XmRPPEB+=TCbYTQ(Yfh()72~JxV@b^eRdt)d5hLR7=$FP--_aM z*x~;jULLjYpO*DZvQ$4kCHD4TF6)_cEURRYbHB#rZgRtw3_+@Z=jtTMvfl89wT(g~ z``L{^gjpyXavisJSyP}s9Af1wUWT(=G0H*&+nK!N9&r=6(PGu}!*Ne;~O8k!j z|NGBT{_1-2PA~wSuC6@~u4>Fo5vBp^cMkj~o@~3#f8rOTnc9dwm^|vk|M8lA6h@R1 zo9oeks{J}d2+GjUM2u1|8kWwtDc}hICZ4S%B_x4I zO~^d#`*n0nKeH0omh;M;hV3g(Q%kZ$dk(v3WB4n_1X<2R(H~pXqv2=G4EC%4X}~!g zRown8)|Kmv!t~cnLPx!gKqouqP8t{(AQKetEC(T%6*c|^*`*t`t>zV>V3r_pk3vQR_vh zc>iy-RG=baIa3s+o?L*GD+Te z3g0GW#$^+}j!H(~DfhRsWW}77%KQ}+O44RfCkYoM|K;zi?`z@#Yoe`;SFmJ`!(AUk zjFw$cRobWXeWijtDZ@`)_1?n4!Qp@&qcT*~i+1b2`6O94j3GFSh3cB_LH9&g2dlDI zqM|ubce;7SPe)#N1I7R9W_~^4IRXT|T9|KNkn_L^)|HS@I(-_g-?D{>?E`n8c|ya` z0Iw-DJ#!isx{hAv0%}kq?}75`8ZHGdk10}Iq-7h;X8-)&$!4^|j@Z$@*&WYInK6^) z56S%cUREJ_>5!uC4VxD~`qBIlf@c-|ol4M<_SvHLfP{pE#Rx!Q`mJVJmcpZ)=-x>+ zY3gaWKmqe_)2agNWBK0XbOfcgw9Ae6;|s5&i(|L^L}o}SUq)f5vy^z1tE70Kz}L~a zN3~0O0*SPP0~%DOnt)ME1+`BFJwrRoQ^A#zzNkV;&&sjJXVc-iT4* z#P^%)AF~nV)L3i!?v9(+(Y5Edrg1l4>TUuzI1Xz_#)FYq&EuPB%r(@Yc(-mv?YWqQ z1$G`(Hj=yv2ndWq|3r#eYkz@L;GvebW7e!kZy$#slb))qNZ5i8~sDJ?6&BS zxksBmURw{JKx8BTiN5pSp2AP{uDs_3Vt623&8}t$FQ<^+VW6dg-ETe}dfg&?rwLt!GO)Lj0@oj^W@Na-?Z$va5?mm)f3 zMgRvAH~Yy{2Dh_mzQ541CcL?{bhQa~8i_p9)aa77m~t$Vrj+1JzLUZ+p~w&vu`!;a z6PURU1&m)5W@Di}UQ(u3}S&FjpG|p!rX&j2_-j4~k1Ae?r4QjL{vjrt= z1fW8ViK1Phx186SH75{*cPq?@_Uhl_o&U^v{;#pO{x^E>|8oKV=L!6O`~>E_FPU7~ VBa;}y0s1LL^`M4Q`hMd}{}-3hDRKY+ diff --git a/examples_skyline/accuracy_vs_fnr-skyline-order.png b/examples_skyline/accuracy_vs_fnr-skyline-order.png deleted file mode 100644 index 27de7a95802e9783e412cc0940848099eff9e369..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 43584 zcmeFZ2UL{Vwl(?ziUAQ75k*3)AP5SgB*_L)KypSxE0QJW3@W0Mv`9*poRK6sm_TwW zKqQN#BIgWmZMyrMbMHOpf9JjN|8KlI#_ciMvdXG2>~HV2=A3J;{X$OWHu*uCgBXU9 zOWe7ofMLYV7`8WNKPh}--)hqaFMF(SNGR=x|6KMzd=BpqSlm&!#xSyD=zpTtUbjv7 zkk3X;%|_8&-^TvFl^$k#-^SvRxy>V^2fx|rSy>yIn_XqU!p?c|H$xj63w{oczyCYC zxs?G&+&upe4Eqg}xOH90A!4%EQ8U=KZ)c7-oaOOv#_Z`u?{FSui65T7?cg&WXJLP+ zuGrIO;-O$at%~y!=PG zt8;z^vyhP1$B!RFba?e@kN4VkYT@D|B52pv*Hh!;<5TO>jtveDY8047rqyB19&po@ zd!rnF9%zWTrnt7YMreuiPOB@5Z*GC)sz{5T7h>+@2b&k!SeMo8IXTZs80GE+{ydiIeO9=E<&Hx(UPn^0GeX8KI>05O%H9H*ZdIn{?cz;rnW(4%xA>v5XR@PMsq3mK4A*5<4>PO$4}a z>U!1jJ1?gXSv(&bGqfFRzOK-9e}7w&tg4by(B{?_x80;#*OZ+^1?@9V(;r;uDo##LUeVr+m+pPI z`}+0kgZuZ(X=+B4uZ^6pwuXln#$y@=_kqtdQlz4$Hks}zI!H#Q=yT#yP(;LJUV3Y* zLUn)LiAx$R!ou3s{&dz(47vIZ3WkQsW<|pxQXP9SswUI5l;6ax%-pSX@BZ=BA7FNd2i? zxItZCPWSb;=({&=Jl5^nSefC)-IbSr%~|z^h=labt5-*IO*+{Xm6RUB6*j!2VyiEA z-MDLHlp-N3%PSqm#o7JA^lH_~T-yniwsiHYxpq?;@X#MvSz9;o_Pgj#+Lajfl@`_1 z*W0ACoj+gYvN=~B>XJ3pUBGM7nQ41BfL`cF={g~;>ZI=a@|4L){ad)45qyfGeqG3= z(5*?vw=?A%jhu8RPUt_-)s=>uz2>x-Jl>k%vo@i#W9-YR|M?kt70hwLCA?ylSwZ;Y z$B%tNIuoT&FMOreIjQS=&2I9|koEWX_n#a%c<@Tq$%Rj39B=4_oZc|FZ?>G&z1x|o z)sU*3Mq5|7&?wNbINs*R+1MRdYT5z|G1q=ZTTxE#chW;Ae~)roH-bwmDk?gE`Eql* zW>ITUP|%xvlg_ZpO3`Y5jy(A8(9n=>N2Znu8mT;^w!7Yk>FT?34d|k!Y;5vsYHB!P zg6XeJ_a+*(C5|jKiSXK$^a?J2*G1RwvOb;!gV5))AZ@+1)EN$|>tso<*)dAWhC<7s zh`#DCFDRwm+}w(y&taMemPJXD^Eh>@lC?_gtnBSOzh~>VrzkS(J$yK3?!FNogwt=JdeIJbqcv{IwT|}O5$W9`4g1WRE#!P=WvW1931fAWpDe_zRt?Z zg6DnjZGd8c;QaF+`6fv)jN}Z$ngVu{7J{SG)0x!T4%hqo`ixuSYs9ja=I0gk^%G^H z1XD&D!qW_zqWnjFIoDyLC&ME!hB<)35aW21^R%VQVOTM3%+W5f2Pn^L%4fhb&*HbA zPCqXl2Fs~WexlTIfy-u0j+~BPX?l8^M$qBC)5>%PTxD=c#9p!!W1SV-`J@!|7vXY+ z=Dtw!TaWmbZ_Z04H8DYqhAH1t&ETGX|MRo)k#O^nf`S5Vr*WpDlAZaq$zJ<@Wh<+k z$4{ONPfcZ9P)r;f3J|g5*VL^Ix&Xs&^vA;^=JJpXziQzB;K76AM~=jJcwn$trC?NA zJW2M^dY_M&sq`d)ITV@3 z(^O61Lm6jd0b4>m9BC=OrhXx$LbD?2wgrvb?hB~J+jSZ?b}6G>t|CyXFftoP2i z*|*z1d-~My=a&~wJ6kJtuG^bC$L}bV*iGeGkIGc%afHA=MFG)9^ z?BGG0#WDG$CHL8n`}|iN%G1-+eM0tO!5`@jv0$t9MI#7q*ADM{Bjo(SZt4ex31_B*Dqax&G6KBPcL1j*?ks ztX);7?AJOO8JR6i!hGX)$*8C(jWVZv2#*sE+XWR{8Tw688A}tLX?b}{j*gB$at#88 z)=Fl|?@QPGR#t!L7wMSrmdqMLig=(W%Jw z!b;K0?Xvm#~AR3%e01zlHi@|~=T?c&n1vi1u19jj3Wc$O;Z zYVV}Oc~aiLmunK)DQ%-tKQ9}V9v{!BXJ8P`aWws+q?laG)0;^IF( z*=I8OJ^SU$mkL?hW$ey(bKp09P7{~DR#&GvE{woLEEo4ZIp|O>8!a3LkB;7Xl5=fi z!=%!K$X||00v-lFQ>!E*bJOzLP<24(XtaCgJ+mS)046rRS?amsfKJddXV{+ImynRq zzB&h2*{JN`P>8UVeZQ+JPxw@(N*}JoC0h_n{mAEjZEtT;zXGF8jxGBcw|No*%E+ z*{H||^eA@gR$(zMW|5+^MNnPe=--(pTw!HmX1)ywgZ07%OWO;TRA)rv8cW6#!x4;D z4V9k)6d40(sJ6df2k260q&tLA0-$~fIb0S4q7ZLY-Ctvx`b&iN;?XGQOyg6EiFaGH z?0c2PsFW`X3Ti^QwZ7~9>=~sHsA6-eP)+uEpTX(!)sz)9N@rGY600lQ26`SXG6QoAalzoLSenD{syT?QnX>P-kD z>oYss=7=f6?b8Um6e57|^ywZ%g0r);Yinz*>UV}ibs%o3Y;SKPs1~Z@c0oep{qLTD zQ>T;`d{15FdhlhxL@RKMr7x7SEL>bl5G6YRso6qoOj^18j|6w70wo4M%h(?U=I&ih z2$sC`^~Jy8)WsKC>Po-8BBtZMmA^dMWv$1+V$hahNIt@(7CmCoZb}rlX@HcKfzY&Idqn#$TQv zJ$UGl5&-h0?Ts0MsPHR#FU14sGtl6HwgAW!->Kt%NdjG;m6a72z&cPl2u<$Wxj8vG zzYs8!{WKgx$H0(EE+w&zp0%{ojD@a2f_;0q{1z@i+(-64KJ#Kt2)m9{lVb9(Ow+ERb0?3|Pcl zKvF>b>YtIH_66MPNJvTY?KR^Csz~K_BHC>|Fan+zPf)l1XFMtHDiOwwhi`8vyZ7!hA zB))&&TngF!yr5tYj5dE?XHv8qL9JtZYr~HbroN3>-SYA>51VQx*VnIKN8sK#;rVX0 zyWu^UCOyR?XW^VFfUMvtB?HLga#}P5o~pvPb1pzI7LWtCQR^)P8JgeS_CoJ2JY}{W+n>-6O{JN%9d0dot%v8-*AF>R>EjBTpLWlrv#ptiC~eH zmGxr~(bg)qX_+6c8y*^p6PPZ%nUa!%iw9Xj#(8A zz3V$&vE$b8_G&e!EWkZ9J^*URLEef3C8ZpM5?1wPKipAc`bN9eah$7*x1-{o53|6m{)Z)oCqe!O@BvF52sd(AWBn9 zkB*KC5DG2I?%%&(TUQszDE;R0$D=VoE3*V! zVW8St*H{A=gm7N93#_^;RV*AFvWTp}W6exWM6}P2G{V}AL$Jau<P3!oFX zFIT)Df#f0s&kG)WCqy8VqQ-CE?j#9a0qWDS((j%H@9OG^2Q)^Vo&;;30Fnt9vdO3q zR#3A0&i26r2U;jZHZ5vwwPb*s*-U=tOft`JobgiViuz?ODJ2yg9-anVOUq73HRn-f z`~K!+N+5d7FmKD2I_|NuvbF+`4|@IDQY@?NPkQY-U&9Pp^L4M8s*_VO#Ij4BoUl|} zH>L^@7kKI8lmGiiVm*C*V-US|H5Z^x;J88h+u4xgQgRTg&@@u%CCD2)FySwkXZYvHVa6Jp8NG6M4~ma zA9!iIZ`tM>w%kAxK>mZT2lF3wwQ@E+JwTaf*2|AZ3<#_L9Av^2Km&;UBRX`?ysW)- zDXjwdD4HoZy$>tP$oPkG8O4GO%ohbbBB((>P|1$(+b1uw;Gm#*pvESEL}aS(kt92( zFXq5d5@0oTfTV{9wG{A*En>xtHINKZtPkPP$>cHpk*t^`^8+MHMk9!7xLPMIz#JUv zvc|@#kQIT>3HZ@oQ{X6_eT;%%^@4qmr8lJB7<4w!2hfl}aAlNe65dGHc3aciyJwFf z=-MHnZlar6gX}Ipl%ge*VO2cR8VFSjC z#p{ERP?bPx;ew3X>oDY}TsN>V)A1j0|6jgY9;=QFg_K!oK-8zYxsP+0l+)BpfU zN=qw)tU`-mjyMQ=Z9*$Oad0Pha}An0fP87Gabs#Xvq1aNrC8hxx+Xqb_jbN{zqTI@ zcPor*p3S&IlH--DS8eMq72Ls*9;S6@S-|AFTPCoW`Tfn*-|FhDi&vc?uC#saX1c)7 zuK}_W=9&*7 zG%6~yu#itR)N=vvW!hR|Z>}0D$24Y3E~T|v(B6z+HdWAZ(y~n{+gY-F{r$U|D?HmD z748)vk$@tVc=Vh&UxW3pZ%3=WCUjuEf?VmE$wGy(`e}mb-TkvP`S}%^rJUu;v#Q6hWWpfnRIMlvxw-7huqe ziVvOAMfw&9IaVeQx$dB!okl;K=Od+gUujm51}lH_M#?Ds4j(HsZm3`MbERD-Eiso|bzxD__*^xE{arxNTEH)|OCur$>{-vB7YDTq za4kN+Q#%dnS4eeGR-57gD!~2d`(=q%D0L-6J!~vBbPbIil+dVzD<0MygQ%=Mw}}~3 zIQ4Rj+2&JJi;~#-f`SZ{o_G`j0dJ)*S{2*RWP&1e2M3BZ3Zh1fV=cnk;fE`5+9w5PX@-tcoWX}J39*K6!rHZV!NyuBe+1YR-5 zv-G-cOd(8#>I&5+py8#tZLOz(3Qu!Y-xnSSlY~h}x_l_R7CzTNwwi8=T@MO5#nZ(T zHc-4AheD8zZ^dG()XV43n-O?{+-w5z$bZFlx=e7P{wkp(BOIz>i@;^Z>NqMO5!Wq( zIEMzh-+ia7ZF6UHR0LEwcOVvN@GSUk33$B;5;dU=q2@> zTR5As<}*l}{u33l=I_%7-hwbCXn<5O@8PMcA^9IJF&#!f1(E|Wy|6xqA#x$8J~<6| z?py3Kh(-wmsN*D4Xqjm@57Kg>4tJ#rU1O&Q5s8wfW;%#bgo&&Q709zr%abVxSd}f0 zwZtLC7ns;NfN*r6t^>KC;e2qb$T=JqFe+ePQhP57A(9_J6(pSpYYUiYR@&XHaFG3k zHzuzFb!wd;uz>W{&Uj%wREK5s`0*e=oecekuyG`TLi!r!D%Yz!v5R-OfXTr9*39)p z+V@=ms0;Tb?M=auiFevZPQ&Q#?vBtM%w0uL*8zI6!R#eGJ8dQhD0O>#rGiaB;2s() zSOjeXv|4|80nz{$*FgCQQd=hsfl=Fe^3yzb0eA*O%}3miCIKnvfbmgn7co#+yGE5_8#6;xqUL14zQ7w5IC)y>Y%%2`@w`*Nb1 z4}yNM%2=*mRfO7V&b&%y7_1k_CkS+BsAbOB*xSD7>%)M zPb$y^P+tYOXMy&xUAiQVdkFe7R6EqQ%^)s)Fzr4s5i0058v&J4bf>Vmq8Z1bw79<9 zZQhbd7s?R|c+AbM z9HeK<7L$dA1zW&ZsC0EzzrJ~wJHz^8TXCp>lngcWJ7}pe1@|_`6c(=L%urQ+9kcCl z?uD2!W@}4JM3a2Jrl!@aKXYw0$WJvGGPEYtoh5ouWTAb zUnR(~LrvVBnsFd+ERDC_hRZEO{AOva&?*c2U zBvAS|w74WtT3`HOl9d9C+h4~F2d;y2Xej`+ejghb$L%otyBQ;;B`PK7C@;dH zX~+w}xVzW_Npdv7IZH}b7K&h@fH6DR!Wi-?oM8bx>^4$d4Ii{}|EM)+{;cjpo67LS z8yNC7Q$tr*SEgX@F~BE&^g$Bzj4xoAO=2- zGAO!-YS{y@MM)1Bj*P6xR-Ud4U`#?!Q7$BjxP9~vq6mG(=jc2IStXPaF{3{-2A5e6 zA6iULD1@5Vk@fB(o<<6eUyI|=Ud+_B!Mu{%woZv!1d0bF)nf^52d)Ey+L~VdWwvqGp#6U=& zK*1c

    ykp)RU|#5o)exA<`%!x(53?e%Z3+5+vFs9}yS{gp#OFPIEEi3S8SMTiI7 z55FBj8OL>fu?5ME$W8-`xDBP}v9U29Z4q07(o}zi4wS$7-8KsW3!>V43g8!iA!($? z)JMO16@w~ppfv?Sd;s%DyPF!uTCE>=j56VIj&{=IWR6iv)pVvpr&BNJg2ThZ09F|# z7K@9Yqo^6bB;h~nB6D5d6W77quI4cfRXwO$@0RgEfmDFv8qm@N#1?UY^YBnXLOiF2rWL^3{+GJ z5CwW2Mg2K{zQU=27W0Vy?2o+W z$aDZ=HL=;kQt!y@70QU6lZ@?r$1`}+FedEjy0->RR{HF$1uAml@*Z_v0i2*^wGGh} z6&n3@SRvB=D041AA}$o`A&*Jm9)(_emJK*$9l)I}ASGaWEZp46V3?45^eA0;XVZEM z{sk_r185+JlPS1m4%6}>JsX(-_$_}vg~yFFWZ+L)Toq90g0asBe7Gy=Dq9TFb6}ZA zHLlC18J6P4Y?Gz;!~B$>54y;}!H~oWe=t!PJs4xm_Kh)DR0$@kOXAuK|6w25f zy_VHNpkETcn-B zlF(|I09%N9q@KD;-|{rX7&6urfj`2vL4N03#|7U5WLK*iGA1qQ%ciZ26)WO8)vGyS zKqbebP~VtC8-Ielh$W3FwPA(YBe3l2v_0l zSI)oOg_UQmKOSICj{e?gIJU~$eEEGOn>R+T)(n%GPRJ?fnPSr)jKu7L2dR&C=jG-J z)&SiYA~JZ`NcRE0U(=p|m$EA<>)@IP(jzeah?I-#Khhl_LB*iAYH5j=1q1m5<5FSq zp_q?xoysEXTD?_gX#c15`2UHElN=BF&v76DAfg$=ilG*Y2B!n8=v2TYP@HdtETAdJ zgyDt3b!zAVF>`U?3579~r=Vu61o;gFiO3Phwc$`IsAYhc0^!Drovj%yrAru|efLNI zO3MJkY6E(`MC%JmW+eWo1|7um{gcwjFboemkDwRGyopaBf`A7v$uaTnTck#fLFu7m zye$dri^XB=l$bFS_+Vo+LHh>gfTMTcv^4F?5&P2}@N0T{b65{Va0LYgMhTIf&0L_e zT5^}znM6dQrxfbmOA~nD=Uk!yM))lTJaqYac~#->G7I%#+)}{(UNeGaLKYAZaxnqo z8{*RW2kipMLVZ8)t2;UIcPE70567E=0DzIBUzyu3gVYy&kuV&nihc{`|} zOcMPr3x1$Df~#Qor4><(3b=elNq&uwr@XytK={yYrUN`eXt0HEff^t*JbNB&_IbZ8E9V^FjKLn(OM5rQx%PxZN9pvkxSS2`3AyzP7E(%V`8#ivywtef^2sO)Y%$2-g-SC9})z>J7^3@Ak-KWo7;E=S4tG>#V3+;)(wBdd_YUEKu4w+#uZ z#TVfWcw&$i;PwAZtZA0}{z$>0@Yl5VvSDsf@mUNbFv_Y@V=<)g`GZ)fz3@avYVll> zIB*~nR=o%Ra{#P2U;Pa$K@w2A;Xz9CID*z>2bLW+ULj3#7HIPI(nY<|X6Jg1FKp*o4JrpM?6)8)4@zSMc3K2~>=z#+YDk0!8 zU%ul&ip3}ZaG|qWezeBCrOX8vlS&T^^k!*Uoyd4rEK;GD&=*?EU2}SR;V?G#EAku5 z5r_6;qSCO|ZSPEU9C-Qs&g%mEH;lZF&fXW!o)ycur`kJL{~?mctKt;<3Uv?FwtnFw zfudnv3>zI%=@X@EE4IB6OgggAW%nZpF#KLIJ!V$@=x-`N=X374G24vB-^4IRkVyDs zTh|$HBe0n)DlzfL0l;&?_bY;B$C#eVj|6KzaunV_T@Q8V4tQBbgc&}E#sXU0KBl_r zx;0h?VQpba`Y+1D&_@Ajzw*pG=f?`ql340R?wVOCzHxs8Z!Cz|8+7ZXYJ((kzvgM+vd=dQR6{+TGPI5rY2W_ytF#x8T=lxJP7IF0EWgi1>(+)$3L zQ{DY{w+9ql0}mu5Dt>iYA3RuFHL+ZaRqkC4A?NlGYWDCMRchl% z=fVXAwKu9=-hN)cdX8!LKz;lxs z(8WAOVKruZ0M3QM(8EH4Rt-<5Zy(wcXL&ylZ$B>i)PE?{l$rM!HsFLNp`KJ>Yg=zV zyc}jo%;n_oVxc-NezW(O}=(b|zJDMG9Ilv0D858Y2q4Dv>bC}+$ zFHkEC;lr9F(!SdsL-_D?&jlv2gJgj{J+<_T3VhqA+Mfl&su%4IeI@$4 zxZi#BWdC(bP4M4lvaV^_y-6=yvvpUx3$&PrhHsh$cA^Vn+D{I9zlKKk@DSt90|V3@ zT_W9y^zPC@N_AxWu$T~C3a;Pf6)20F%itl?xLtb-WVvU%eX1SS|)BC=jaP5-hwP z378&qzp6RSQOLI0V9{kapFxv)0%qvv#;8j>dn&D}+4$Ap$H#A4%CM)e{FjPcHNEp) zAbu}jygFOiWIoo9Bb|9F$*HAh>Vez6HehhVv$<+syz-79ok6fEdS~dZN$@R+y69 zugwgNT~T2HuJ%3#1RI|9;`5qoqv;v}`rn0R97j7N|UkS8^0h@LCVunNk-U z64kSyIvnF~(#!vDvr;qa>C^cqmSRI5@d$r_phZtbI~|C_#e&o+(F*S0^u@(Rn)Jh} zXGKIRK0XTNJ=@n@b5q^ezBO6?h0@p=x<@04`lg>v+6|gteUNa4EbKwb!oqR~tPU1; zM@L3jSy+;L9NPfvH44lq6{dgra*JuZKY`-h*|X56qneQalaP><#2Yd7AU9?8qrtGz z&T#6@uYRX%7R(v$9I(txn;<4P4#(f5{Y3PykTiRkIf!5D0D;=GbVLAyD&0`PDLRBG z@iZh{llOvT){G(N464;kedxx6Zv|v+wkuZ@khvKgyCZKnfTe&k7#JQNPL{U!V)EFr z2+c4`<>xo~uozDoew*ZHhv^7h%V7NGmzI5dC4rQUI91fpu!~N)?)1J?$Tptqwfcb? zAC$`PDFzo8vDIC=8<3Y*Q&qMH!y8a(I9;at^>#p=>Rzi*`9iBj zLd&g;lF%dMG4yF;xJXT-{mP(R;crVwD9FDS9CitxPGoJ4eu?7?B^BKYdG(u~oWO|A%BOz*_PYov60A}=xq!$Clp)qjmuziMd6+OV zOKEGfhAc!5$<*EEaA2}#mlnENLE;gnY)-QHcrO`9!_26_gT(H%IwE708;`l1KM3C2 z!=fY18XTT;Jk{V{{8&fV0oucAP0ce{jDCAJCInV>)BbWT)Yld-5r~H;01Dg~94nKZ zJo(gt1Ff&m&2uLWn@Lic(6uMF(NM@)Q*u&09B<>Z5mnSy3H%52(4I* zWDA^R1mBI7fnJ3KG;kGyR|I+85w8HKtAXHd8Z=0-{&^$j!(|qOH-wNXMXIB^h)u|8cOdjDbLNkbVQdscvRiV zI54u^fv$*5b}}rLcIz;?aS+Qg%bm0ejDTA&E zirwG7Zxt90-~rxv4ModOp(_J#ekYVxm!Nc-3bk3V2DOn;2o4Z(u$bDGndVna2!h5{ zwbx-<8XBqBpO9Vv@vdU6!5o#7p$-o<_>y9-m_{_HMy@z3nO`h;5v7_ZxU~djGV}~b z+Usue_4M{GfwaM&SXNecY3mBOb^OkrJ&Q~j(vkd2v7-#ofAICo7ZxbqLa|&$R8%w= zPmWg^&CCyTJ$p~Q$}&UQOl`nPwR;A`I^bF^NjxDXZv~-L&o_Iq7>7G1v^wwvnPNaO z2@VTO1(uxTh|GCVtTTjytkdeCH<)~qQ!mmb+p^ncC>Lt$ZP3Y?1FZOyLJf&*Bx@}3 zh*p3X3n|d3g#*ksP-sNmA)&5Xuuk~xCX=DnA`R41s9!}uBdW&WCWs?6B5p3qA&2q& znkM;nX^$#M2u$J9l6Ql0a+!2%=_x4>v?MFQp`*de1l`s|Ew($uGsPI2OJIk`N21y5^#~qy8_L>2t1=X%%%O&986L_9$ zc6V#*0uPcUi2KFI{C45;d;Jsh;$Kdq{H~x#IB9Kft@_4YE!gdut!X&^1b54f2miUG z$L3Wip}vF32F{*_xm>6rz@R|oD*>D~lejGC6>0@>i?$8y7N7>R+V?>gMAZ^1b~a$| zFJ8RB#e+l+9#3_LeaU9yZ!omuYFM})+s6Q3fdIi~2`OEjFEwAnr z=Sdp})-DSS?Bv?7Sjx9M?o{_bVR`xLxd!WJzgw|yJb0fQT}ae>jhh-ajWbbY>eipi zH+}m-&@D4m><3{u%%sHY?Q}{;3*Q#{vU{Oj|lV|X8ZADAzUi4m>X9OmHI zu$7RIsI94Kf%QfO&0N83ss!jdlFD~PekfQ_wouw_S8u3Hz%Nrv6p$<`n>}^TaLQtX zrx+OYR(;{)|;^VAaC>0J-Cq$^0m7{mQ4eVy7L zlGyZ@yGBNDKxfdWrw5SWzmJVLQ!{c?3@2j3qrNDB8LCMyANNQ8C+I2zBWKPdo^a&N z!}&Dr>=FD{@47wlivwqyee?wLtmLAALrJkQotb%3;)_KN{5u#`FmR@VN6SP+9GLze z`vUMYL<>2qfQf72cd^QV%wNJgJmEp%Y|1M9!rCo5%vt*9F3Jq#kJ-=a+qU!f^vOt5 zf&(yNVzvB_H->q@e9xGz0Z!@IjX#pr=!(a`pa z>b*U)Pv9EBUzmX^){WHy_R~sfDj9-mM`z>29dG)ItLK@=^nVO*SDvtN7SQLpyYq?4 zIEVTuyJn#r7-SGm);swOidFZZl2zNLM}(spC+DQmYbp11+h?w#n(=vRO4Imf0?$EB zPdE`E!8|B3qU?gBWh3G-@9X)jOmx_9{@F>t9K(LmiYL5jX&r;hcE(}fON)knj+4m_ z#kdc$a&;;4X*&$=jl5aE79DLj{D*Ol>LRW;4Mwp9;KJ{ufAOSlOVE2w_MS300>;|k7W`V?j!o(@Nw{e5S^ zn*rYTDE7GfN?O5gbML~P+(3x(tK_T$(mbs{pnZM z7|zvot{F;T#k2NLUcNjStmU$oeSl5SW*@m(Ps`;od#w3=*yOclav~b>xCy0_4@5N-`eU8 za*Msi-r60b%`*dmYIIce_+K)E-@)aEy7Q3J8e~!w(;3INR{B>g^gQ+{Yh{PN9^`XB zd-l`o2NP=tZ79mpn_w~|wcEYkD^0qTUbA-$Wb$zc(k;*x{SGSc60I|CD*_a%dmV_v z8_t=t(mSa~!$XxFI!opt7bxJR+YrW$98u2>or(q6WC&$l<5+tF$-ofIW+TmqL=?tW zL{7ksG@#1>_4I)xZ#ekLYaM(ziwn^^nlq&f{z(AP_;3F0sASE`pvq=A&y=a2=6NJ{ z|Nc7^kimf52sbYp8Q1@j+3}%-YUR5c(lIjBGzVe#N2z0Ok`$4LlNcN(yDPp&GbN{W z?)9uM=}*et9WRpEptb;7HVAfzvlH)rco`ruynFMP5 zFT_jvk>rD5DE&56vncfKg}tQ3{(DLJtB3}Six#AuZiR<2YZ=oflM~Sxrk0M?3WZD8 z*5b~dWAgbr$H`WQu7{0#X@2PK&8t-3dzN_DDn~qPX=}~rFGCY2m$J6qa8$$j;StH; zV1M8jnC&pbPT^)@Mv}Dpx!mJj=M_$r_8EJY8p?3slFr&ZfWM*M`Kv~ z=);fx>#5UtdE4y!?UDScUFxt-!y5#DWz|hQvMB&h%0E7pw_XUNU?=w!0HH6- z+S<>wPYL5PHq&1Cm~h4dyel5}92AL~lZzZ1_JuyHq=V;fHYmMUG;U;sn<3qJzdbzK zLN8t_%r6X1`9Aeb&qv9p7^B2(|L;o;yr=l>*1dQA9N4h1Nb^h@%}=m)Vm7~?Yw5TKXVywqD?c7>93D^0qLp&ep^b$vVypZzgIJ7t zd5y)hQC8SsWPRkcSvCq5xI|zwDflSCk5q|sw@pP$)8-jIv?Z&8MyeneQ^JR z#ld};g#Qug->bYDqeZfy=?S{!(x7D-;cTEcRIs7I6%IBwDcG$67Y{wbVC3s2X^-Ur zG&dQ_zqq-epsv0&cj6MGeMQ4$U=>&}FkzO97wcEU>{Z=<$BRC==9V#Sg z;Iv2k!a)BPY>Dyf3I}XnGcFQ=(VRY=1|K+OF^oJkFv!&*$jQH+xYmr*y`PePv8LDj z>PEcf5GZ=H*Z+>clvPv^{sLy+px2lz9~W`^8)}b(n*}e!50ZA`KCkwJXtZaqLvI|i zOFjd`A|$F__nj?Na6|58u-(A{q!As04wg7DBq2Tx5n2(Dlb{y=0F|0rZ2?*=1_imo z1p&x$4uVtNch~|TGWy=d6UbXecH*MpSSwbUw!5er?h;{_{jAaY`s1%?37P31#5{~Z zWegS5e2_8?K?rtY;u>hXa+QvkqfGAi7+omL;6b(c>#IFpB(t_X^~W(xk&pPF2Vne# z60Mgnahsd83{!Tps(V0l{dx8VX8SDoBN0~aaSnpxA+{$Zm{hAUh|MDPZYcBZrqZj&bdi?csNDArowy+U_ znT&ew-?LXw9*p`Qzk00LA*WghvT#U0=>R3bczc>uBihSPU?i{@@u_;VuD$14T=FdCRn|!v=hba4b z7zvRm^Dw#<9Im=q4AYQwcE1y_TIHs*UZpM411dE5`5b=I&-rbY zFZz|a6$X@8KpQxX<3D`ljjj^&_**WuA?GpcNd{v+5M)E>)ZlJJyV#_{3`N`k=&Cu1 z>VEiz>>%x!kCKv-AwUdp38Qet8%*9svZLB|=pHM+OJz;@X7!gjn>40RpAvEgntM}X+EK`g-?|63rL zu4`=6k9Ig(qP5=Sp0gyf6Bzq^e|4=^j}#R);K%1RboPH0N^1UND3N~B%bt)VrKg9x zf5*|$zuK-O36cZUcdRoTcdQ!95`E7U`HWwIh5;fH3@C8g1LH4tsdiXHSZ{^*Y0G;MHbVS51`Qlu8N8R7K!IX5+)K5Hqa*Y4P$+46B zJbrw)?+z|MqvgoF@bH^lUyWdb`5oq4aoXsM5e?@-4!xvt=Y-GkQ{xWxT5wdEs*yv% zK_%L^1!_AXXkW~1ph|G7ft2WclF_Rd$K0`xveFMs8*|SM&6RJF)iKF$Y z&2&}qppDjPF;7o_%gWQ}O5yNb@#z;YTv5nTqh-j=DfBqf_XMq6EK4`{=$pZ^e-fGd zzgT$}-iQFt3g$vlNB5HzA|eD#`VfIH%DdRP|1*5y;O-STt4$w4iZgsQAb`XE+ftw^ zpfL1~g2P_TcI%;b)Wee^+9DFq?HCXiUp)_Kv>}+4jgG)5q0Wa051G=f;kJy z<#5~H7F)mCQ3S$Pk1$lY<;bA>z+9eF=!mErsMVf^uT+13v}Ys@B}RC&3-v{*R8D%H z-i_>{>y}=xQ^OfmGcKRCEGN<2y?W6HUPlX2YR7)d{(0r99Q~^8u*uw5=AOT6mc6UngJpgB_s|P8rL}@k1UoBbf$!y#PI*t8 z9*GB7h)6vC6JTkAQ~*8QMq@cx<%A`GzrU6j_Wy|H{rf+&Ao}~VMHpq^(-mo`vwwL?~4n;bgZNRP*Ew`nGGAQ@p*_pWzff8g@?9T zsUMh=6aVc_%>zr9`r4mi=VDrRj|i)|a_=LaK3&iAp77msD3oE?zoNJQ`M#SyChd$? z%xaosU%s>w6dwX(4Tc&1W>*pv#U30i@$Aen1SQJY)6|Gii;~oQdnGBUSuH6^J7g%h zyK>1HMf!XAN_XjZj3>GkZ~L?861ZN~-@S0T+4kb)W={XA&;4zC;oPeS!yD#cN5R)1 z_PrjhuV<)k;oDu`G_U?1nN?q+3H~Q;`v*Mn_fYyDpLOA@UUo4t2{$+Y0F>1phYkuO zAfh~JcR@&t^@62fW1Y2Nh}zU18F(mwh;XmxbR9Ak6n>uE=e8SHlX21JX0*b)76CaX zl$TtNp;#0n`42hh&#yknl~7k#co_dMn@?C@&ywhjCIXz82U*vnJ^J{U{|N&9(@W0$ z2x1cS`JiUHhDd>e9}Cd~OP#GOi*t-;qsx+NsSNJzV419O?_`ifh zQBbde^zaySeGPj$K&P7k!@Ud@2sOyJvXGN=h??@*&P{B{gt?ZEYjPnLVd{b^HR zODbe}<|tdlp{<_)vSs}!1;_(k2ogAGMaLJI_djE&%3p;Y8QP)zBpd2U#`Vk<7al$_ zeymhOUjArVBN_Lz=u0|7QyWjURRw(RpL~|&GkYGkTj&G<7nPNya8QvwC*cG;K^RBq zz`a5Mfbv34#}o)jNV_xYJ>iq~Nq9~eqECS9Smbw(K5tiOk~vr{hF~HXQ=yGMH9h?< z6tuvL6;>AkTu49!`gO{n&7!Ya4*|cM2rm8KlBj;ro-b zt)>Y2aB=O%?XV|As|VG(f8C`9x9Og1D18922S5KvhYtKl3S=rf+CMBoAd$h0lrzUju!wyrLbg-QaY9PBAGJ3T~J#6cfD>t;2fzMQ3lSaLk8mwVkfo#%@wotg%ZP>pN;I$|;->KdV-4{4!+vll0>N;jC zeYzmKBwjP3 zy$p~W81i5H?s}qzMqScH{+XLuCO;41eV{Mx(;Jv>uyi;L8fkTbBIP&gfcuvdDsj(P z8j^xSLr+5k5+~YkbqHXKI5aZD!F3Aj)j=|t%Z=WuzkKMk8f*i=sb7CmHi&Q)o(MJS z3Y>$E9j@QKE zFLXG9$-p=!6^ogMG9m-OLteD|&e~`_^wL%j7y0@g5`u2te77x#=BPxwe(@;m#I|*~ zP|#hL8t9+0duw_P7z!9AP@QHuqCsB~uoCEO*O-rp#)(#tum_w2KWHId{uFG=XI{Q0 zj@A^^*a@ECH#7s+16Vtl%__F+P&3jF^q{5*8Dy(1!gH8_Z-I|=Lr@NE3Uupqki`gh zp`;1LOJkQza0;WnSGdjlN=Y*hVg&lF!{v>CR8e5yJ*po6J4p%><$rxD=;wc<_y07v z{m;+--xM8ST>s~1|1-J%OBDRyIV6TZVY6t|OyE#_2wR#2A09q>dy!c-)DrbKKp#Uq z2+Oje#D!n~3KbrsQ1+7o3tuw`XewD*l+e|PIyWE;r*(D`5y)=G3@U=?-v3+H|JB}? z$78v+Yd_IEE@jA+La7vGN@S=Ek)jaEkPsP?%(H4$W-BU`DMAP$8ntR&d^hAlb_5od6aWW$GpLw zdj$@$-T?vR#3s5`7;-2E5>O5?FU&&-VvT?|XK->KBZ4lir_iquR}*3>XJi!1eFcOy z0`?PoPe?OPL$h9!ZQVmW6TpW&)~I724#SBZ;Sg~*G`_E>c!&V%Wu;^iLPlO_so^v| zGY>nEc-Wx!5WpK@3$bJyKmxA@bqBN}b%3~_uV_EK2ppVjmki9z2{`n6!AG%c_yJ>f zqT&V4M|8bNqr>$wfdxQMLCiXE+FCVS`|^4jaaMw>0C@aE!dFd-E&y5igA33q8az^o z=qk(>%1?C!D8WPrhY|v)YR!kI;6ZQ(dLSI-F~ny90c-$g;Sgq zNQon8niQPyv*z;(etzKXZ_m#qmb5TdW z0mXt%04L`f+I4oN#X9#1Y=alkX3knIrI z%nn()C*c%6zmKtpe@$@9B#!wB_^7EtZ9}RMlF*JO%}Ub7F-z0>`1%SVD)&X6(S^(8 z;BDh8MI8+&LFvQ~hcq*o3POl>{N%Se7G-rmyug<$p&*ALv4UF^7oH~zPV>GyPd?mw zM}I3QWP%6-F7MMye-GoVLjWUu^{79COAsXbL4d@Js7wHMu=-%>#xyiKz#G^PkHj1x zF&T13OUo367*@D$mEfdY|L%Ydj(#g@+kf8*?&GXr`j%jJ`9kW< zis!AEAyWH;(Amep08Z4Mn)d<)u^32kQ5Oyxwbp&KziW>|y5;5~`ad9 zL@rU$&)I&UurceojNT>z8_WZ`h%X#s3184fSP(W^%}~F6`zAWDe!~V~N$RvgOkQ4V z7E&uecu_Ossfh^)9UGhZbKU%A^3;rc+9uO;bK-@;6c$}W!>9Lt`|TpU_#2x!@KA87 z9h`SWpp(4xBruT>hQE~`^w-Tpr6>)whbGt-vUZJ(wZeI|Qc3V5>ju-$o$;XGPjq0>>m@$X#QeKmRzhNYHxg z3qXN{xS2uTyJ{i6efw(%NWuNadMa7r*>C`0ck1MY4I#%ov@Q@?%?lO!2nMylc&r99Ez|2J#a_5VB{gZ{vBNuY~#Z#Q8Hs8LrRvG z0daru!#HT^#q5=4Vqy|6E%9YG_y~H?f%WnS!lC>d%f?VvWx<=p7rR@}=jqWdzmECu zhvZNKt%d`d0W@{>y2dC@lt2WgQ$nIG1MEF=FwUrkgJ2i&mp=ojK)|@>sY1#9J*vLg zMOg}^m{#^es{eYX6*fLsib_gY@G?nX0bg*f#Uqgs2t7Z?i!xCZ)OrRm@V}#9*N*WL zmhOP!xP;({VelQ(hg|Lh+~pGJ#Nr1Xe;W4K z@vYB5$}We3haY*UnZ+D64?E))%r;xeOykOJF-GMNgOiz`oeX=sEC8W+%-9&LI|$td zIxiSW+W4ZHV+)>luN%*Y3Ef^sAK7zB6KOD(o=EEN zNqa@T_5#Ash04YG0Bc>eg}~=f6(+&%pwwkH*nm8F@&x;roxl1-me0ULZZx+zpL{O! zbh)*03)9pKm{e8J$=~9XerIMg+}hO5mQNVJ*qu$`KNKUIx@W@Ou43R@S7)|LLianggI;P zgf}V-6t6ef!Mub}pRf>qTPmaAFs#A8T@Mi^aUcuq`ecZGSj?vPH{z-wwz~z~l0IC~RoXZ`gW3npnl=Wpv+meJ%p-X#>37 zIN+$#eE3{_SEBakU|wEcRIGXjLo@RI#amV$0sNhM5C+sp!j+JUE}f^S$7!MH4@qy% zU}FW$d*FEhsq2jhcdBEsPcI+o`9X~RiFqFEvv?FRcWMYR`?`-jxiyd!at7q(~(yQb0CcO^=)6>uwjSmX9B!P)7-}=U)W}aLywf?w6F&f z1_0@#(BU?hoB-2msf>xcJ`!M4KSKUYtcDbbnGl(z3N;Yll8k;o7;zZNT8M`y06Clt zB%UMCO6JV})C+=B0(sHO5)=1vW=)K^FJ2% z<6>_HxR|SBcx>@Sz3z)s?mEK*#07@zDW5zdibMXUlKy90zaU2CD2ICZrtE%vnDDid>paN>X<z3sJ}OqXWx5l8(Z_khcdV z6P6eZ%)mYQZN#Bg7_S>OC_+=O|Bl#V4_G>Z={U)tFBh^&cqWTE+2_17hUz%yMjG3o zpxxgmz_L6)rSHdNB(L7Lp}C@9r?v+WnsdYR*l6M@&^Oa04qI&Uj%pweXeMc-p2J;b zSo~daeV`%#PW3hHOk_Cv1g_CpcJPBkIbpNL_NH#ng}jBH4@{18l2`WTrxZAhq)KiJ zgtH}}RrVteD!56)aD--E`#YP2Q=MbiENP&D%wkF=fpQJ*Q-Pnr6oysS7r%V5w>XV0Rw?Ez-{g(t)y5leK^ZchxTjbqxRC4MVIeM zd9HcIrn!Zfzmbmvz%W-=MDxK zvpW%Cw{s4C)SK#oevbSUE7X^E!O57IPl9oZ9W;V3UO~)tHXYfCgw`$nt(aaRI2O;d zFiU#rYAp8JE&#okWQD^S)3U3V)rB#?;!xE9rUA0s_YVs=g@ywiAZ&fb?t5f$b(EPW4rA$RlfRV2?)0HP1ei<7`CsEDF5`S9zC*%%ARWX30`GUCdA1K9Lg+AWUr8nHh;0 zm1kQ`9IY4_c9vOjck@WicnxQDZEU;tZi5r7Y{T`SDj4aFdwi8}+JynfB0f7ANO&@! z1Wv#jl<3jXhqtz46J=b?Dclmg3>9So0XLW938W5Ks73NZH^lG6 zFrKfAl&7#u@+uJb5crDqyIcji888VU!T7TszAl?84qpvoScdQi9*1ElhC#8waRsAF za0|@qiFm061#JE#+{O72O^QCaOT?1TZCii{*ArVtgvaD$m6|D2PKQ$=qMB0(vof)G z6TW=8i2OUPx$#{6IIwjSU@7BUxbTGdS=^|-G6KE>o{<|VroS>G9EOq~}fj2v`Lm{qMqF1UW_x2$45>na zvE&%dYli5?>H(L)8!M|JbX5-a@X15;9anf66EyC7;p8KI89A0a#hugB&1P|WH9cWs zwu#N35d?1lVt{>jE9}}1BE)Ulo>7-xIEUW)E8#23(gJg3A@uo4s%rzM}6p11OpTdV3Mk$?%nKRJik{$|UJ0mM0c800T z;G_%X)Rz2>?fxJ~_?;lZDR!o%wr=#mIY~k7oqF~Nnm8s-5I<*BO0*HGTboIG#Chqi zvMj}UAo+LaLF3HHRxC@t@~V|9Rw!H->V-g!h@>%ZgW!OzzcuhF5-?NZ-4ACu!qFua z=u&jyx)0|%BqU$cQuAT|H8I$vN@Oqy#)sMTa%>Wabql;@ub^QXIr6v8J z^`HAXxP`p1AL1{VjwVVP9G|AxBKgKP!D5X#NAB2h6bGZ-d5d)(cgd`lyn~{W>mQ`D zr$wWp{FS4croS_+B95s;O)uq1rig}XWDSz}hdm3a{XiX(0VshK;oa)g{Xk>9V{xH_ z@QqPJehRRrK4%Cp1E~@>XIsY;^bt9r4q8d5BY$5iNa-!k5`@zh!TsJHJDxN*R)&jf z5PA^2^+*a0DJ#jth&mL;PuvQR4_Vq(WHy;U!U=|?XB%b?ar?enc%G87o*YfL;6veB z_I`SFL#(2LmqFf*b~O}8c@VHGkcolq0wC@*+m2+Sx3lxvN-S)A`p<|1&`R)n56abnR5 zF)U7U(r51;H&O6@%v>hz8r`T6-8jR7;+)>ku}^+_JD`2*q4P#|6}-+I0ZCZoArP=J zj=}m{2|S84cQ`Gacu!r?2_8e--VZCqIyhS14-HK~74;A{)Ms4dIa<{Nk9JkuSlNG4 z=GW{|UYNecVz3kg8&dUBalB~tl$MnABT2F^NI=Ec40{AYr*Y!pfZD5&ruUt#1aoQ0 z9TE`;c=JRaWP(Xa4!~6$E>Q%#AlYiZlWj(7GIBm7`h?L*O#exJ8kiRuLX5KM7!0;h zAkRR5O9cWHs9hhWrsnYi@}IRZj)u66dipGZ**y+gdA+4lc|uL{e-iE1@_KS|I=-f-JqM^34}2u{2|7S zV$;LRvP?+;z%Nj>9gDBOZ}YW#Xl&3i-iUg3V=GhMKW&UJ50c;*K<*h`-K)OvWzvKI zxJAzy4=}s8JpIw6eWax85fz25qp4%3FVC^(S4fPDbrc`Q+)Zm@8RTTK6oBqLAH6bA z@u)`@>DIK)m?CtY+(Q5~JkPAvrvz7c#QEy>3^CNUd9&uxu1P@`b}8C|8w)N* zC~2=jr~%<|sfp;Ru%$qKtIyWg#@OU(^M^lJZ4|nA6KV;_r~ukID+kvYlAJ(R&=TpP zwUOZKMp&}{l>rE0ze5RQ)Q0(#7E0;RU3K>fP$K32+}Hnm?5qD5s>B?mF%w#^5RYvj zD9Mv9@E9gj4JY{?uq#Oe$0P8wbKqE_M*=E_x?9~cf8ycfa9a{tMmRS4tRo!1Gm{RW zk*`V%Qq@5?=6=&F% z{SYIvDN5B30B`i$whgE=2777Xp9i94^D( zI7ra)FA`%J85trF41CI8IDZ!CFlk%E^I-)gm2e9=NHZHWfHs2(HmPKl__HU$=FtQk zFf9y9 zq&|+5<$QU6tx`~2hFsdKo8e}qqHdd@G%lAb>|LAQ{_>W6VZ`122!O-U3xyLGM#D#3cpg0}+0@%} zJ^Ow4bvimLO!i*Pd+7|NZj>&E6&SATQJ&e+j4NnexX^`$um&TW*v06RlasqrW(JL= zSWzVP_4F*~%iV`+ZqsyifEzuN_O&-z139_5F}b85? zX>DagancKrb`kv3JNuJT{c%(mqi@_L9O`s*qqqwp6w*>0+6Hu)fH`{tnh_TWRl1F} z8O5gbHAF^3L&L__*3T>#rYofdx;Zvih-q&(*ry(QFA~<*W^~+1=35Z-T(a3XZkZPMbWZ!r%v5&bSbETvC4v3ip{duAHR*!wYAa72Qap8 z+Q`^0FJt)f0Xziz!|6<(osp2>zteLDziVFRQ9~E?L{4JXL`$5ag6Pf`m5mt)#X^%Tj-}5XfyTz8V z^oNcW9KMZ!PY&IGZES3ckO2#PojcTe?f5>uf+J{^vyDl|=1dsKXILMX-_LGRJb{mk zG&y#JD(Um3Z@Pa+)6Nhcxw6Xdd{(ePORaHH0+_-UL32GZ%r!1XRvp?JtG+0=* z^D?Wr3B9CB$P+Zw(u7SXJXXzWM*eB(@Eh1Fm7*NPGEm?$-$hsIf66>kkSAJ^%EmGr zQOCTDg+J*8IKuQ$uud)&bBX1u~*bG>a-z$h1{^W_P(&(0|NKFZd)_qr?3^j-v3ocH0s{? zxUZDv_Riw2yrGI^byJWd~D!^xSCksckT)8YhUaY zBXL?5R~f`8AAv5BUwr?!U+lUN9#KaB{g|@-(I>K=lmjK4$DY~8_!v7mt$lJh$ZIR} zp7c}3fBedy4hpX(^P(HHxA%w1y&HR0yMK53YC1{@cCC9K4gb6dcWAh4-P--fmMcba zpX8Nuzp`sfb*r{R#?@P!f5C-Hwyi@6{u$wAm6Wizy*1u%-n@ODRvpV=eBWAmK9-K| z-Mib3(eu~0lED+Gx5|vyS}z(DxlHtS;J)OL!C=Or;CBvom}`PT1{UnM*U_I*cz7;s z_-=Abl&3EyjFEa^EJOU<*SNUo(^;=1+zhrvE%Qj*TUO&;=GNzR=dO>)*Box^tOQu@ zQB=+Q*xy$s@5iyP z6Vsz*sS>viSHCKi#|?Hfb62faJn*%7a(Z=D)w3hFma`mPvrF6FHa0`m9rHyk;wX(@ z)bcGUi!+h6%?VO8LPC2hx8D$Z;J%x~=c*q4sm0ViWJUc9mce2wiEe3*>1zupiRPG; zp*{&!))1t6+)|4=Gcn2d{Ya#!-#fZCX}@x*`vJr3ZC2wm$2vr&>n>_%4&`iRX7=g{ zkdkU>$dAD}oW>L%Ad{I{=x}MeY)EJ&SCz`U3X?4x-|lU(ee&i~33*_e=9y`C4YkI$kP(v%5Lyfgy*P=^e@j;SZdQ9$rowQ*z~lqhq|^ zKR(*>^6}*WGqcdP&G|{6vM*jHXPQVgC1kg=mB@9CZ4CX=OWd}W%xs- zpIN!iy~ouw$2;IYWp1DY; zUdpG^H|KpWi3*Im2$kN834K4gFVUYS^xy)!v}3)62NiV;j#S4_W}UPg)6#=zM_1Eb zC7o!O>Ej|vt<sW`7|D<7809Jp)_v zLTO+J=!{RYMoU<`Zm#3=^z?lA@ZmHf!3$sepBby?wiFxQ4)1+`&+FZui1Q807ufHg ztCu|g^iscwbO(u1%M0hr4MrjzzlVyVU9gM{sADvY4fvdLLZ9xoic2(4B-^4= zjbQnsCxy3a5}SE^)RZtfNhvvdADv#k8vIIc{CDC_hUyb%F-l3*Z{_4T8TW-uFKmfa zaJqb65KpZ2*Ap+iUB_R9XrQDWYCIm)@6WyQj`|Qb9QxGwq406{Rk7+B%8UsiIUB0* zI^3X^a2;F#d7%>}3C9(@H}_iS7zg4x*5f&J$#ZbxIS6^mUutnoDgZ52HGjW&)a4r& zhZiY~*BBk05N#7wsN83Ehr01YCCflbgLWT{Vt0M2o2?57C4LZ)Af!Ls6Q=qV_u;!S za(tXV&L};T>Fub=Twj55eO1R%N57O8+jMpbN1tv;fHo) zcF!%qK4o~mI#Ef)xxbxi(-CjlI}+t`I-^7@xM^#@-P+O->hMSjL_9Oo-*}{$xwyDS z?cJyB52BdbmxgH0eO0%SpRg@!)MA$um5U0_hZxGQOzyMs?krGOUOWPR&jY@7QN4JV zx(%9S^mp<*D#*~E?wpOpwiP1bYg)@(FV_Olr`+%b6s3lbh;G=hN(AVGJpYSesxh#To z;fcCLDN6BO(LQ|MXo8XXd2zLh*raYEuW@t7GC>2q-jZ{{%W@JD_Mk*5I!S&0vcem1 z;OPP5yLayzz$?>gB!8*|Wex)@ofjxyS~a>18gTf)zp)6s)J{|&1MCrP$|iFtUbY#D z#SvuH9^=fvrdoJFX*RcZ1EY+L%hz>!o~Trnpy1Mp+{yTi>B?uYs*K{|MA?rWU51o> zeSNtXTH)pq6BEOQ<{NBKfO?UIidhwx=SKcEuyDcsOdq<`ocYhZyj;_9%J}z-7cXuH zfst*~kss1aqYThG|8p#D-BZj=I5-(=+t#aFHJmptF4vt*o6e4$%O<;kU2`NW3yb%P zb&S6GmS$!?C{-(@!cWx~AaUvA_VKvr`VBjcMq+b>f~Z%;Zml5dBlFg*gJi7f=#Zx! zEu%71OcmPrI9MdQ#eyvb2u-2#j5ShB!mxZX1qzlo0%e8N zA>5m?Yrm>0HD!2sxR=`06cHXC{sOd)ju= zZr$pM*Dr5oW;Tvso;=e(2HxJf;`e>?gZpbRtNPY6rXz<3SB?dgTYruGes!yAS*Bv# zC}Q~Qt+p%ICF;d21oTC>Fcp7M-J;y3Eo!gNHhqh>o8M%!pPh35sXq=bUuSo`tm#`) z)3<7=+Hm_fZ%W$`)chSp=joc@0(ubA{J@|o#`|Ld z>z2P(YQ;zGxz|09Vg%xaG@vO77R!WWhyk+YN9d;PcS{i9sJn#9Fp z*if!5DY$y|sz3NjMXc>V?yNnxT!m#;e5<8RreA{HSJC>1(=ZGpx%!6pQ!QKWUnVXN_PJ;GJH*}m#Dj@$nhmznwd z3aS@LXN(F`yW{;z4!0Zpf>H=f#DU-BT?`{h z1*cv}cdogy#7u9lS@in16RjW%gxmO1%`&&jz74|NDl_@er>3tu@OWNJjjxA$O5vyKQiOw?9^MF(Bsu+e!8c5TdYy*U|m6C zcWhxyLsN8wmbyJxRpF0yg}ykN@6vs|8bnkCksya;L>HYK6>+) z@;Vt(Jr5O(`i%lLI|KoP@Kswe>HMmWGXqP{s^dX488NA;{Gh=wS-Nzwc0tn|fD$vO z+sKp~f5CVxzk1FQk7&G|O@NVTS8@7t@aKsrIzSX*(V0-V4;%U6Zvwdk_(iPKqOqma9WyG7!Aas27ZR~b-$E%Sf9n5EClydTVhll+E z@u}y~i^c{HdTEvTog3=Ya-0=^<|ZL=n{H}LQ$gx0AsLT5T4w?!&Xi|WCraonJ6$>H znt{x3@7ci9^+>wo)rJ#{PhU-Tr4drQ7ig2{KejUD?9R9dBt&m#z05n{Icf5Yb7#{Q zaSo=trtlxi$usuN^N@sGh#Qf(n zHNWap3G#BBx(-i%m|B}E!fpNX$6Xv0ACO2jrc z->uA!Ruk#|o(ik0UO)ZYx;f2rM9*HQC`h^B^W?W6I=EJjOS;QR<@&RaHLXzdsISFy z-H7h(+w*Hj5Sk?6mo9e87>p)_053=FK0U!K7+%Q{snZS7`acE3sI&QxRZ2+_pcQru z0hKP_1p%~vc>3m?^5bDwB+-9;K62&UU}6?du+M0H){5E$iF-c-4keQ4x3*xCo%#!F z9dWF5-m9|bz5;D>gh<=m$2C>bXT~SxK*@UHipH6)RG}(k4fW6Y&h`ZH1!L3z4%!3; z{EQZavE?^ET!|9QITb2yjgmV(@E|>^QPKPi|Eo`t9HZHlr$)w%$6uONgyk#_Cayhx z{P>HDZ*IPrvaql?C?q8GH3xYB4`24TK!U2WLG2y3^Vm#mN<_+;xR2r4-xZlCbphs+ zA0;jEda2a6#!L_Fiy1VEKuqW)VLleEAnf{@v%nkT(GD)}<_s>El_`RJAqF~~j}NYk zfxK#^m#DOJ1c9sMO!UZM>$R>2;(;X!MIp4b;`;DskZINUQR=JJTj%z%i0Rj>BoFkB z^kc_DwCU+d5)ue47z!^|oN#w9oI7i@*HQV-owe2x%3&qdHD?|Ad-Q_(4&J7XT9*7T z;aAjxpiN+%b3&p+fYs~4Ywp7ubBd1{`^1K4+1ZC*nKx|){nQHsMsz@z{zP3NhnGcR zL_~7bOSuSpcML8C^(rq5wR&D15fyG@vF?=WHTaUmq$J*)oSc#j(hc1}4;$`bWEI-O zX_UkW^!fOY>lqirN;xV^Z*Ai9rc&b^e!P(#o$f-+u$-b^EMoUMg#2=eM=(k2(%dHXL8J9V_wLP5Eto%!d-jt|WEQk;=HiQz`DJ=IW z8+;EBJfD1O=aKl}-wy6i7b=gAlFN#HoMRFajI=MPs-E1Z+j;??zq~y%pm*4RD_LQC z^6c!_RntQk;+y^Bt+|cecMLqEoXoBYF*AMqv~T-zQ*=cwsvaFU+}T@wOq~9Pj;Mzf zU)}-U1GE`K)q5H&lb*45jVRW;){wjjzgeo{f{YP6O)dGZOnH!1h@pA=!e zS1!Bn&(JEHd>8zFRfdw& zC#HfQw>441+IQjAzV?|4`-j=bC{eq~=R#;*1OHn7AgNS0G%&(?#23YYlA73)r76rv zRzT1zrQLMo7Zk#YfkX6723m?>LyNfEMw?$Hev0nrs^V$-m&?8-%ZwD+$lmht@gaUr zPQI)!HxREAJfO@H;O1iFLLb`CL{-6&g@>eS-7;e~t!%xZDniA0TJ zStcENT3Xr{h<>daZSC#lrz$4{2me^%5AxjNQVR<^BodAducOpg(EA7d>v{?n^atKl zHr5?;r0PkYY@cpk77ej!QdZ2q^T3RJ9Zb7DbdMW9SFlH4a7{W7B7lKK2! zsXCLxC~lpVI1*kS1gsyR(Y6mlj~Yot7`-b zl4E15Hc~QhYJb@E*zbKeCnKNld&9M<*{H!#>KH+cGzfqH=MD_pU zQ^|0VnkJaGRx=Fe8bYDxeD|^z9?_Aml$}e+W&-T2%j%%sdX1r|20|PiQ zMfDrKq!Fxt_m4pi<-b>6{^#-fuf3oz%m1k;tztn&RXXE+4_8hum;bi?c^qu}@&Y{H zU*<)>yL!|BURRF)+w-sTI75SfII>if_CuI79lBJMW(lHf_#ycBMd<^Ife2V1p8k!DYS<&fh50|8pA>1GvK}s^VkV}0^S+NgNbz*41>{Ph zc<)>~K17|;f4E)y{f8tc=i_8;ojN8Gl(+TvIUYO`Am)@_wXv93fZ29c%hxvRlbAm0 zYI$$h&{w~k59P~di>b_LBb?{Ypiaueiq zDF5#a{rn&sr5vZoxte{&XI49Bej)qpc#L=i@Ls@g0^GF#qegwTk z{8wNCv=4r~B@mDh&2CppE>g-%*lrjS2_xkojcXu6JbC;Ndy*(9OIu)&8Gv%QI%%7j z_$$RV5MUv|>o=e0UcGv?m4)TewKj<bL>319@Q0nXT z35Y6WbOfUtrCQFfi6TNmPg9eQ)l)mvxe_Oxqf^B*J|fjW5$p487XhG`_$>U3Kl$nC zUckl^R1oCAT?(lj+qU_WHq{X@;ZmixgYQ<7` z2V5ARv#iBk5o$bBv9Ge->&sjdB!?9&NYY5zOi~)G`rgemqXF)0mX75YI)zgmPvv#* zxAX!#TiJ0)8B`!L;sjdfH@NNBWh4Jm3TU&{C*ejJjIp*;U+k;lJ-2Gxg3b^S+E9R* zw>~MPAjPS7ATfxVBDtn&CnO1(MeyY^0Mrby+V#Vt3H6&Ym75QEtH8NNQMrK2q%VsB z=!0+_X!s0hGs{d8%6Fop?<_?Bhc1G`Zvv#mkofALQcr}8%n{frjN+z`@WIz{ty-B) z-$3H6f)p`?&A}8MP7s-Vz|a%nLQg zjC4qazJv+%_q8W!6^+HFxTAl0B{}_OQ^`r)pMn4Wo0mRM@D5K;QjQ1!liua6FciT7 zQ-zBQqfG9jOx=8``bs?}lUq`+0P;g6pd?6H@_d6BYnjb(8Nt=Sytp`z%7;^SIoCbh z^dGOvt*K;qeI5kE{v7jPR&8l^c6Kh!uQU|6S7wM3E<`1SIt0x2!E?E1OMg&m{+(2M zp%X)OpUJHU4C62gOSz6|}a;fSSoPxxDK&S0RU zlfuio1X;`rl=zm`Uir>(+YD(vjqq`MTk~H7r(>jMW9}i=3qXa!*BnyXZ_sQV>7Iz3 zYweLk$wbq?hUEiED}N?x43OZ}hV{{ZXk&r)P6#jW*hNx%3|A@J7DEuRHdru_ze* zU8bw*!{>VFxfja~}+h^zIxDZZZPbf*5^Xr)QBf+Mp zClWkakec#_ZBv)FR}_Z!s!6;a?;H6tzT70_4lsXlFp~r)8>3}`SVku&TfDU@;rY-r zHVWqvaR6^q#ZdZa#+fd=j3OH{%y-O7Bs3)Pet7(m`;9twO-4w1)#4H<-T-3xH%IBS zfUO()qBPVJ(!5gF)cANF$@#CFB7gSSAs3ah`t!r=LS)i>1*?i?hB2-ju;7qPQR3zdpX^&QOJV*ElJvzQXErAyNty<4tInz zK(ueF0@@4*K*byoE0~2Q(d?F7VE5w_gihjYA18$p1#8>?F=6h15)Ay`i4Ok<(a8V5 kf&c#o{`YU-=HjN$7uH>0oNcNGADf~eds-&t*u|Uw3pPyB=l}o! diff --git a/examples_skyline/accuracy_vs_fnr-skyline_formula.png b/examples_skyline/accuracy_vs_fnr-skyline_formula.png deleted file mode 100644 index 3738ce64a7a6acf22a568d500d7250fbe815d2d7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 43356 zcmeFZbyQVr_cpu`0SmBDX@d|@Q0YcdbkkiDD$+>jCQU3rHzi0Z-AXqo3Ift4NQ$&H zTN=K3bIy65^FGfnzVDCk9q(_9_l$84N8D@eweFbnx~_T6`-!sRRmweddoT>6#9h0r zieWpNFpMl_7diaKrNyxoe(ZF(gj3%I|9I@W{TTkd+xD7{1BUHAi2fyM>GfKNUkW*1 z(RNg`F>!RcX>W|(x#?(YW#edNe(SiivAu)2jWsX#1@7}_j+;3;+KTY-{PXX*ZR|~X zV&_G=G3+>oyL?gIHR4yVyS9dL-_~5C%>FC;FDnccEIg3i!SpUiUZMVxiriJPm&OWn z%ksX-X$C(kcI@DTA2&!y=-|gYAB+-yXiNPcahWn{$l2=&O-;%xtE(2@^X~BH zMdsx2J$&#$+pF(6Ew5v%f*_8Nm7P62Ihoqh(lWu_INtO|x@aWa^5^Ji;@V7kc~zdn zi3_~E#@Su!s;WOaM6fo)sA+BtqkBuiVtxo)?z$fuo9UYyCzEbz# zj}Fov;#ivv3~F#Vzh#Js^RLsAl9Em{efo7FXB{MdkG!@?xt?#`P`$Y^<1I({J=33g z-l!_+&i6cv-lE*;qDgLChVQ;>rrysb%$km~va*h+q-GET0s_jmHr?x^CHo8gs2D}Y z`~>@KlWjgfrFr_~i3;4g{k_rE^y~SGGi7TT_@(Ko3p4Sd1|p+T13cyHs#D+dU4$Rp z?8-8J`t&J*B|b4RRViA+qAM%DJJ&K%a${U6IAgLcS*@Y+#M{_dbSoU8*ShmpS2hg| z4Xd}ew;!{l!NZ3S30}hch6+cc)Yg7aIZdYLN9FeMTXrP5FHO%cEU4MqzB6fz(jWY7 z=r&A)Uih`Z9o1szDLt=*c)3tc5s}rjQT0WG^_52N4h=1RRu~q2mtH3k`=5^;Hjf&5 zyxH7X<2rs^s=2vYR!%N;k@zBxLoH!!#(S$c%6rodP5#CzQN(E?Xu5RCfQ*cc=jk5vr_Y`#sHnWuDaGe@6}x0|aB=aPo0})NEsU5iJu>T zVPWCOSaWQGWoO1nruRnb`Knib^n&4R3gNGOnebl@A2||!@7}%aj?OYq_l5DqXt?dl zTs3oKW@VKhy#h=(T)-aX`f>4BSL^z0B{e*Hk<+9}N1D!%UNtOGGguCHp4)j!a^iVJ z9LH{yt&GsCDJ$O~r(!T;@>;$L&n7G^%y#ZvQ_0rmT0%}vo5%8?FZy#~eQl3@-B~sc zj+(;tQOT_?J-g2|Be1DQe3`w@!R+w{i2v4(&#WE_khCr-Ev<*|mAA8dA13CO_5SNj#G5cq8fNCqn3&^kuC8Y5%X1Sw zh1o5wt;GED4FziP#TU1Xj38RTo?w6I>&qoBu1`V5(6pDQB<9Fj-6WlClcLp$R32A% zwFG&U+qdJy#Knh)hvWMc&5%vIqrT(>`Qhu-V>qnIIY}C}$ zWE2#sFy`~We|Id;4dvPnUc$9x=obftg(V+2B@^NB{SdpNx~%L|77iXBCCiSq=1)%! zvEmF247y>t_%jHfG&D5ScN-4Ya1(ZF6kj?G+u-iqyL7_#ad|fV(;iDU3J_s8IkdTs_Ezb_lFD|Nkc$82vOQe~% zCe+l`$sJ;oS2i(8@Mjb?Nv6V$u8WiUJcab6J`w5Cry?)^e8-L*hxhM~i4=92`10~p zce!`@Lem4o@9^_uW##a=PVPdWw6wH-NN0wADpzKSDh$M)UAvkf^hWp|RM)((&ctx^ ztS*bu^CM@pW9){iYQBG8i})VJXHE#=)=yDOR5TxNiJxds4TamW2Ky3G$j~o#*08cl z)66u`kB`puT>ITHY#{-w5v>Rmp3;z8|GeH7vWMh(_3l+*@*$(>(Zp;`8UHY9i6?g93IhNfh(fZ*&#o5v$u;y&b$_R&W53zqz9?~GR>N{qf#j4kjMhmRg*H+t@H zou>UbPRqb8K3w2Vio$CF^X9i#!o}R?-MsszdyBcm#PncMx4*seOz(Fa#5o-f4vtlb zV2cnLEPs7_FXA@;)2-9|bEfz2tIVG2kY#Hd8#^a6%eiq+{ruXW(r_(8d>*T!w`nV| z8;V%Co@)``o~ktoE3kc}{&nQghYuenIy0luypioaqSrKOD*rj3OgZ{MDb6mgP;JRY2wm(bXffQJKkB(Up7=B%z{Q_OuRz(CnmNSR*B3SbOEB{-PX3r>-vk3{=v@6AU#zefo5opI_ti=g+I1<(u&+ zxqfsTUHfZOqD)@|p}IZOP%_uDQ&H6UR};$Q$?M<7;^J5o!ufDsUS36WRrEv%A}P2) zHb_^&zo&as-@Q8rc+G6?$7ezugbZ_txbQxzGkv9v?($iXt_=V)K=M(90M!CZh|i+! z^ym<5z4rB$1*d+`rJDYJL)Zh3U9rM$^S7sZ3R_iAzZSB4E3xwPkiPp=u0?BtpMytl zk<-)X&x0~EGdCfhbMo`&ejEz2Y`I6a_jdmGc1WqEo2!#BVy5-90#>j3@RMi#7=&Bg zG=iFAuX1s5$;Dm`2nh&iywZ7qjxL3UOLHvDG)e^!Ck_1ZsDbN;d1DG@_h09UX! zR8%IKV}0?&iTG+J_g`-(MW>Pzqgq`0Jrgz4b;lu3W-fl4S=fjok-B2_s4*Gilakz8 z&qh#WW37Kn6V_a>zW;TnU*A-0O6DFtqGFtwR>fj2-T;tEK915cq#$}vSA(qEPhh2H zl>XLr3EhEFlIPEpHw{2mqczI$Sg{YC`|{;lXr-8<`;ebt(2Ey0%;~9orMDI&w;VcL zA zhZ_8k;4lC8A^&Ic&Ohd@?K*(GW}^V{1jsHNCr^It9magFZ;S|*H}`LCy10a@uy%EI z%@c_lz#NL@NVp*l6Z>FeE!GS67Zg9I%eNbnp=1)%1`v?-eQGqa{AMM&(^R(xFbp~| z*LRSxEy}K8*4fTey1f8;kLLQ*@|uhRjY&y(%4wSaJ=bdkKWeY7{R&7{JAg8}egS31 zT}@_9va+&0`j>*$)m@$)IqUfSPX74AqXt4V3jqwmZ-EVsp?CyrBuT<^6+v8n6;2;r zRl_yfru_$ERBWui=P8upiHIvz1u$nqdcdjb>V{vsbSYxtmveEH`?N4X=;SNUj&O2t zH2)hvb7)}ZGkXD%?>up)->|H#4B=0ICh?R94|e8Sb*a98|GxI?SI35}^@TJpWKp0>QZS$X)_u_XA~ zaR~R#KqVHx@;0Jeg(lU}aZ%V()ONr}#*Z$(qC(1Pq77$Lwvw>$9%ji5a1EWLSIPSN zdRwX%k0-HRix3C+j!RVZ`l+k_C=sE2Y;0muYmzOz6=+-L6aYyMrGK~t)PrR2jY-3! zL=n;u+88DwptigB?&akvT_kXDa$Z1Oer|D*4~N5<3|0BD2?>1@78DcfxqS8N>tDaD z5S6mGx0k@=Q`rK#u;U)R51~Bo1Omgm&%=2@t2@1Sb$FgC?w*oj_Xp*K5 zgM|f8syuEv*L;;ZY<70mq0C^Z#~~n0#3`jt8_1Iz&{*#ED-;9H<Mx(t2Acynb>CQP1K+HCyy0x{nWNo@QAt~ug$?V6yRo3~h z0ltkk65$DjJ9BONB~H}<=rwFADg|x{0+J1EwXc8yztU*w*}doCzp>jrMuo>??(C^-&$1_0$^c`AWUS9VK{xd&6&uvkPS@e&4IiD!~mD6eHtsfvZ{}}cHo05`} z`07|}cbR7~gsL1o(EXO`K=Wv_U{oIXhVfG@9n<(L~T3%;taAG?adqx^qG*)<6h0+`aqv zfr3XE0jUaZpr)jB@`?sT4ggDwgoLC)5_WbN6_eOlMC)IiNYYYRQnFvCI_x_oHMN$t zbq0%hOB}8ND>Z9A+3PywH$OMmJXqU&m7;idw26)Qi>GgUZlr)%?u62!rt&da5eoT zym7%|i(K&Q*IYof&WMT@x2%*?b^k~7#-8!w#fzr;^(P_303+Il)8n&)m1urcRaN=S zTUcmSlLNNHeSu_?YN>o z*;%U0*&pbki^z?={o%uh9s8#Az7&zToPt*x1GWar2(gwcGpV3JTP~Da1CL)N6JLVd z){>%;y7+;DIVmSc9h5~7{L~Z`{f9+BR#=?M@3#PMiPFxG&(DHizI+3+$TdPqNy%Z* zY$%vLlqENpW09DW>@icv=+g5BHnWoC`s{rc4naXJW{=qqWH!ZVY4UJm*q}K``X~m z`AxpQ+Kab_snAT*eq$QtCIjqJ1O8~>I{0v5lT_s;a8*m1~JYY$ck=}V=~M;3l2r2D6ds>vYk84e*fwU>7*=pUBAA{YlmUVJo*mx z{6#QsP7C#d#l?r~U!9i*rrQ4T;oe^TJ<6xPky2t(;##onXUew>LiL@)0H)EA?QkT6 z@k@OCco)j)*(UWOlXI}4DQPcU269KoXQmwNA5Vt4#CG!ULyHSiPA2T%iHX$Aj1H&h zGPgnG}Cg`aKg`^lW7VSjctj zX4pAtNOXX=vrePK(%y8ENVUJGSltH-GnyYzN@D z-{2PIy{CcC{kK)GQ>Q+DZ7ScXp1C^~rPQ1Gi)r*TgoKYr&}Z`f7>6&y-BzAwGT16pu< z1S?!ZmCuej`N?@M7q@?FBQC|gSu6@yb=jI6VuQ5WooyQMd+K{4BbO6 zLVuLlZh&Bx`rW#?o-uxhRB-bo7Bc}4G`qOi)>G)f@3Cy_G})m5_(=`0MuJjwMlhF_ zEa#S?(dEkz&~(8*0Hvy?UW3$!e|uoLjkM&JwX>&_Cz+%9Wr%~~NeE z-^(-R5e*~A?HB8c7A?`M=EYA;W#f88awcu{$xYl`4|bw$f}=tn2)jO0(BF4u?gj;2*Z zymgw&?SyzPEB$DninDWG+`Kp6bm1ryA|gNxn%rkfwZ>N|DCvHdI5va41=clo2EeFZer;UT~S#)Aw2#M$wH3 z(NDBG8b85t7;%=5#R8f|o1H4PKE6Aqg_BPr>$_-qB~D)63M`(at}<$?Q9 zt7!(@GJ0VPbk;Mzp=2pkAAbn@g$$E9AER46y$L2g9kGS{s664VhQ4DjQu;)_kv zMMM6giy&a7LZ$NKf&7BoqihznI?Pq{LhpfskTs`{2yE@Uh z#+)?9ofbwLJ0U)v0bL!hFaDXyEanjl>&85FtvMOl@X*mzlx9{&UL!=>k25R z1QtXH5gJ^0Pj6GA!oQ9&>V-fhv57KY{q!n-HrN?5%}w~U9izX4TP zEltf3c-Zth?qYc;G=$WfKt;LNvBAk3q+Po6w;t%`n4N$}@}uRI0c6zzxZbIfQV^Aa zfReXDU7`^`Js$40wbpM5)dYZve4YyL~VtuGn2gf-uNxF6eTuFw9 zvIN1F5EsaK5*(V z+#lXf7qNO4bI9uW!G}lN;JQ&sYeHL&xf)VKAUfnj>75^D)0zcSKTz6$&wPEs_$fIx zlYC}s<35Wkq1^gC`a2rl0g|d~toicg9iR8$C8Lj(e($?#%enQ;+yeb-pB!T6WN56b z0}76+A+5zOHrWu*bdwX`E&)O8%gvfveF_OoXUYXqtCmVRtOsTW993J)g9pzf_cE&9 zy7d-{%j(+NN!P6lufP!4yWlXdhXz{WV@^a0C@)3k1DA3M#V~$IV&1m<{!7mY@e8O5 zG=Jn6#2JCEGz`Ce`_`gY6H0#EU@-C|Hfx#jjh~(tJhx-k45X!Gyy*ioOX&^e2 zr>3gf7JHQ%WehyjvLlBex~aj`p*7QMSIvlWDNFE{v=C$sTniK|-=fkPq_RKjpr|AP zY3}Oz^XCD?2iC0vbG8_3dIQ)F%8{jd(|KeS^$b)s)}dlzwdEoJKqj0JnKN*o;)gg= zg(9Yqvrte0rwZ9;GD2M1kdCZg@M)avn2M+i!#5BWK<0{vvbVldyGD13TXsglk3GGf zOT7qcA{LASGY1FGrj!_BT}srfj}+DF)N}A-0#4X0!3t>FL@ zNt~1-KY6p?|R zzd~hE%Zx%90v|{#>H5WaElyht(OYdX7w@CWHEvj8t5r!dn1zx-NEu4Xgt+BpS5E-V zrs9$?S-W@dMnx_sD7i!}jDD%BQvgQQ1A-L%C54a=Teq1kRu5<@a>jC zG57bJmE4fVFr?8QX%$V)5&L_7kdhbHK>kRIi@RKP4Ae{ufbZOs!eHgd?QoJn(G(RM z5t-sOZD@lBw{Tj+QgF_!;^|p)oNqhda1S1XS%0wNt3#K`S=^feyF-?snW2{qT-u=_ z;PK-oSgVm+NuX!}u+fL|xH2ex5^KMBafBhU>T@tbTG1Z!PXJB7g&~TMi!&RjxQlAC z?9m=?qrFyf!2WcB1?n`O<>G1umkz({>RO`XM%gwC0Xh91Z*hWb4uRp^?VuOz^d+!47E%v{n56?&f_YS$o4| zgBR%x657E)G|^j}Z(T4*4pxgvB!Phy2Nt{*!rJ}){l@xG2`w5=c#ZZd@mH4`(j%eJ z>IgB&5?oRZhb+DB06)VI2Wz454X->YxwWFiUxLTDyT67J0xUBF1C&-=`7N%MTL1j= z5>gsJtVrG@8n8;(x$G7~4w!i3%0K^KKYmw~+rboE>*u;8{y-r`ByoB#v zX^RI&knL|2bC)it>C~GJ#bar5pclL$PIf}IRT;KsubWZ?iE<+=OI!Eu`%b${Gv;8& zBsnK$n(o0UY2lAQj{W8QyYW(Z=)oaA`I1JnU(2FRLbDQDlq7WTVgTWQiQqJ4;$7aV zF@SYFpD@28r5ScHof?b)`Hwt!@ZdB%`&B3-JG*dDs@}YLGt&GgsDBQ9UHi*w zGZsaGmVl#UFRT#9F5^%jQRG}eFT~w{JG#(H#R9EyTP$An`@e<9|4n{Nx@3R^P{+Ci zJG3QDC%XYJ22$dE3TjQjMhFbSHY#`{izT^(KeTCAa7f4mq~$hn3L+&gDCjWMkqf@M z0e3*i5fmwfl0F>Qm2BVqB6jhmkiw60+=A1{HqwKrWbsD-(@9Y z_u~P}IQS-zNN_b7Qyz6KDh7+{R9$^N(zYCgA%Y-ntu6wU!60F_0(wdT%!%B8x%zBa z8x9;CQV)^XXn?5n*pd_?`y8(EI?!iKBWv& z7eGh75wVTMRyslJ2te2gZEflsgVf%(YPoPx(w&`Sym4SX1y(uH^yWTt5rK4Q1|C*! z2XHt*Rp$f_WfZXQ^r&#c6+J%J-J(x`q6<2B!!TrZ&z0d&ZU+?QkhLzeY&AtON-P;7 zUZ-;ydaOe`9BJYpebokWAe&0AaorKb4Mkn1gM57_sW$$7S*Sw+BYG(gCXxheqksq8 zNkL(`wYkwS4B`wVkK(Ukf;LfrCfRYwdjt^0>F1XN`t~)f+zWq`w7`dR-y+Xdn-CIG z<*r&i8--yLr2cm$q`^i8YGo`KS(1~IuE9n@)y}#Iu*2N>_UeV~$K0(tF8aM2j3`NmN$AY>ef*f_ zM~eB#9*DV%A89(_n`iZ&>ukOB^}KIRV@+fc`-B`4ehv+ZARb5s9(UMT#4m58sXP}H z-YQ1c5^g(AsE3rTu+6QX5IJ#CRx#vbB`YLROZyA#`*&crLE@TS-h3ALaIZYTFRez++U-x2P9Eg*~ zFcz52SboXK1a=&doP}J)t)YgWL)+KjI*s<+R)FtdAR&#(@fhn}f1UVPAJYz_9f_ zT7%fY$v0Pb+=U@=0H+cxsDJMmn819cv%a%Z2h3;shnKiJr(5sCd>;7*?jjL%*oO53 z<$~DPT28}|xCavwp_!V3-O0-I11zEM;q#?*dZzk$2bM*Y& zYUz2{Qy#UCbfE*H=gj{y|Mrxg-P)=DxRimEvW;Z-?%Y>5EuD-o_c!xl5M{_Jw+gL3#DsnrQ3xJ{80||XoMZe<-`t$3 zJL_|XwC|WgQZ)Fi%x=J^ft4lQ`!OGAmgR5CVW+X;xAP!=_B|W6B;@$zjZGO&T}Q{w zM#PCzuHQqg?{#->tl%53o3L3bSngc2e>B><=jq;{oV)b9G27^wawYVe+S)yf8%v99 zZIZAw@7BQNQiWe@rcvqVnZ>*cFuWmjZUO2FPm`co$e@ zar8*))tm|xzPsP&dah+?46JiEcVD@zn(v}|2M*M6^f+8b{!B=1Xds~=d5^_TSOS>TMu>jRlgF@rk3K=& zDHHkcWEC%`Suqq`Lme#2<@FI9D+N(8r(xdhV#raC3taNzI4ir{;2@!1bCX{t8<55sIF*>0x&AD4o&R@Z?Kwg%VwLsUEjHg}Dx5J|aG_>z*k_+k^(G zdo4<9YrBybyTHlZUhem^xZ*=Tao%R; zsS=iETapz>_L?u*H6m~?eGahNUU0VP7CWZ{MgghvE!5OPL3aGw4CFW0bIln^Ajn4h zI)#i|ujLU7&EaCNls{AjkCaCf(s1YZRE#DFT|Sqgre<~;Q~p5f7aY0q>XmkW!_&*@ z*OZjB5)~t1lTh$OO%UXqCU7)!Mn=Pz0e0)S^-!w)Hgo23Yj$nb-4yV{5w(k*cRzi6 zEhOKifaNtL2ATse)10R`g-(l}c(d+0Iy*ZbNKsi?7rNEImFgOCXcv7>Pmdm$xJ5#x zFI@_H{rdV~56Ib2UQy~aaKCdWm_l6#wsv%sh5(g183gKyLfA{3R9|Uu*OG*eX$eqW zlT}b?2SvC&j!@|dGG}l^#2FmG_~;-Q{%7rjm!MlpFnnn^{&A?|j~fa16p!(r4SpJa zjY7A(ZC=$q5>H9j>fQq0v42O;8MfA7G64}PSGE(%d`X|S3|7wp#kfySnbM(aHwRr= zbd762Y?FRAT>YuDT&LkH8_pj*+TTYwNM4wk!ONGj6i4!7KNV*709>!R(4>Oi3+!j; zk-^r|5S|adX+V}r2!+w7>`Xb%QNt=G$7wjy5|JbW``_#VI1TurSVPGyp$9hLR`8G` ze-+g3EQf1@8{7bUg`c~54}d0pD%qK&Uk4-f!Vk4S2HF|(@=u3O-M2s`iSaBkuukiy z?MfoQd2`rBJ>85Ui496pbez7@7OgTXc}F<2-A=b2$#Cx$`Z-{*1Eb5T2} zd$3WOig+@30*c;y* z7)zH>(zc8TJIyye-14ZD%RJQCUSFU|xT?!;Ikvu&io)u~mBCyj$)* z6oObl6+0pQsNnqtEXiO|6id>l{Jq~FBt19R_4y%w^OnnClCVzOg;|hqP+=BY_bHn| z@Y5zb3J8-HUb7xpHWP?~(7YE)hB?(gs zNV$MX+|Xn0A!u!^II!Yb1F}Ys>Zbvr3dP`<0x29lVOLX<3Vd3w1srRc(8L1umL_2V z&s9f3&7BzkeJ|gt(`&AI@pdsqeBX>>AX=rsqL>87Wbkmtb%8=6C}6TKYi_;(2a>aIz@)FQ*$1K>$G-(0;%xXQ)_ctN1+@HBJA&zCg}ThHk_ zVXT;tlN1D6?Svv`ue^1K!{Z!zHLs_B07}ROGtygKl z{SO>CfGikb1#{{cLW}Iyty`!_&aGbr-w~rbK|Ybv+NoPF62*FIsHB5a@QiKwvndx0 zo20w#6A}lHVpc<7y-=l%HvlupbjgAuXv>=5U<99@3Y6#SA|UCQfk=SdTGM4~)=*MR z5Rs#ph;Y!iq?y)g_oCp_{s-n@TsLKIM|)SW&s7@Pc)+^{;xLk1VG5#_-2Owy$d&j7 zs6z1B{rEI-hB3*ZpaaS+=)TvsDVVV_t}4h|L7wGutS`L9wr{(!Qab4SNUk6r;zgjHv_0-()aBDS(2UI=4nf^OA@|NH z=qEu{6sXj4dWZ=N>(tcL5aN)k3}v16ygPcxJc}nrtuMz0z2>5!fgT>&4 zU8~DO0gWpq0|Pgv`pf$ZBLV|s2L=ZAj3y)`OhBzB)lvV+lP9wB@ygc72s^Q@g_wzQ#;3b|1b}zY-$2b{`@ai*jTf}kkWVw{?86R?vGimGZ z{GL|bwP?9Px(JRoy^tm7=LP@4Qc2@85YRz5$pJSh2nk#;4B*lhu|&O6M5rw!gO{~k zlUF&+!Skfs2a?s3S|4*YCo@uhi^g0G7ta0>Nyb|!dLl*XzBu`+Jd__-S0^(YMH|5# zirSUI@YkMxT@ZE5ynOjGT*9*m#iNMO@YSuZ;+a`2=Y#kzNe&cf^;MY1uo~w zT*%Il>YdDAL&2!-_t!Aq$vDLdMyL6sxXrY#+xqpc6nr=V`n$Bmlp69eC^w0e|6W?^ ze6afvA2ildYwpx56j^!3;@x4Dbqw;vP5v$p24qh|{tPJabXf@?Z%F*nhOM1qU%q;7 zPX6Q}IK4k~gnd@_xy!7x6Tf3YX*8Wkx65~$+~cxd<#TFn_fBi7e%3J0BEduXO~hg3 zxx*Sf79F$=QE=_>vSnRb$?u9Q5lc>~opZh79a$0{`A+C==dF_af~l!*DyKHunwmF@ zw=`D!XS6n-c+g%5@;+%eKH0jvxxmFv#rrQWz%2y8Mo*y z8J!%P8-Kgk`Js?K!F0nJndzrBs!XbQ^upNaW4`B@$0GlD!xRXgDWK{yrR`e27+Z2e zCiYCX;JLmJuiM+327h~5jA`%pc+welWBp^WdDhVb+}iJ?0OP2DG0*teBV<%a*DGvk zH6|g@jZ>bdGj1-sJ4e-*mhp%xe@RWc`tc!EuWOWo5#(kpZ%tU0X6w+KuxU8GZy)t)$#fJjF8ka)Gj~dYQ3FkTLQwSi05jK zb;+CzGUg(uC-dekg;FTlwfQE`&CK5ymbuP^)4Q}uR*AZ17tzi1^eIs?-U&FX(IKA6 zc|l0^h|XbJhUB!DI%yeCxlGRQtDJQ_Ks*5=E%?{$%_jk5p!!xo+aY@TNior#GcO_| z(?Pmc3T-}@k||q0+om9%=$viy{g^)vN<%~d!T6$mNBj8t&Ud&aR?TJ)Tb69Ravjnh zRS5q@J62mO8@W|{(bza1BBZ4oS<43&VnkTwrh)G?@usexHRT>r=bZCIFpP4%dykEogJ z*d0%*Ax4jbsj(mLhxZ)z2hGtOoLahN9)-xc2r43u(C5A7Nv%chs!XJDr=|5@IdtHF z|KaPI`5Uv}zOU}95?#*e=FYnD;-a{vJxLL`KZ=G!t}nug1)M;^2#dx28x55jBnA~G z3txPn`u26O!x~Gpdr2miRHHDc4aQTX9HS%%KoJ#}!^a|5v1K8+FCTZ74+VcACJw52 zy7v1^XaT)AA)RyM!wytdT^(2QPI2jTt8N`8k&3)oa$jlaR&=v!j@+P<4-ZH-~!x?*kB#n(i*?+QFD$VV0`?68Rk?SY1lGVJoQv} zKWlmaGKa;*pkKwb4{VL$RulR%kJNQ1x;1qt%qV1aXDa<`yg&`B#Eh5< zJyGn{Abg7UHLtqvV0UuPTXovkD+){_z{uK)AMbLQ!fregEsr}IkyO|k-+i|mlvK8_ z*GJZjO=L+(GVYL&_&Z|-y3^|frUQBoiE_E5xuD-D1PRtJWp~A;o#D8knx)MUprf<> zvE<}Xn?{E4WS^0Y@)`<0YUL4SuRvcn(3YOe=dE}NybcOw*U%7V+TDmv)JksJtvFv- zdYvU>!twXrnMJ&P?w;Fb6gNmQH_DhX!@=o?wRuqjJ9sB{n!?jPUGc&^$c}Qxz5#Lf zU;b1(wc?fbo-*d;&?RF&z7crRZ^ol*8a(|Pr8_PX=k*Upi(zy$ul;WBrS#=(^5HS^ zyMtl!AaRvF=g%X-&iP@K%NMxyDF)N!tp8+!7jA;uuLygZXI^`I*7)cXQpg zzM70Q>~-(J&@Q+Muidx^GbcvJFPOP9(6YbaKHNk^u=Cr9G?`TF$Ou2P9^DQXSPlgb z(M`f%cY4^G@9F_R)xZ%6b&Su#+F;>B$|GB_%J^JgRwijmDs-!-NTvJ8n09%W6v1)% zoxY)W58lS|I`uk9dfZa?cD;RDDB;fK%R(P{(brQ7hH~B3kk+Gu`%p3G zP1f$j<)i^6*@VH^R;AmF9yFVusZq5CJsUXX=`vsiBdQSw3=v9>eq=W0X?R z%k07BCzl&*{;j|+7I-zX;sE|^`!6$=^IG}I~m4yZg=yV7dch5t_5Lv0f4Mx22*=H%qKs#l% zZ@GQ198TV0fhOWEdCb@t9H>&*cdN3I2Tu0rxfPq-{ocqI;cBRALH{}GOhdwqTr`jo zOF-Xn!b>^}vXBrjC;N2XyvYH>4P`*9r+)RSbs;YaXDqrdKzErA_tE`t&7pj*308 zKI|Xj=cyEc(lBy`i*le4tu7ssGBPp>CJO(ybun6@}gY6m2G!vU^Gg<*m;)|f1q}ueC zLCvlepd<2GorWS2RO(y7Bi#-SG)kdVu4G3-=I_8J>Fkc82P% z54ne~Ny5-b{OI0YwmG1t)BWwE#kN?P|Ld?F*o{4=pxR>C-y-cF0oq5zIA2ah1v(3c znyr@{uYh=Zmiy#RWm9ch25exj5GiF$!@CPYSjACTEN!%!u*!)M~}9|z40?&BL65^~s4q4n1_5K{j5$jdMeew*p; zj*^vjp6#3aLSPYUh`?r9kH~@l=H8V!4{8#+S-S{fk^!y@7vkpaJM-q}i^8%@vT<&1 zsv5yVFK2slV9=PW!wz9(_c$z*xOzY^uCpPX-3?h>l(M~z>eUBYq=cVR>N!;=(Vu-H zAwt4FIJ z1v!wM#Oyxk8R;1IZrHr7`U2+$ZrL^lP5u^zIwB%qWZM zeLA%C*yh(#yv;9Sr6+nytnJI+ekBfZ0fzgZ+0_!c8o@@1UOo1pSOF&KbNIS`$o7qqq-y8vG>a6*2tUYJ@LUA}$ zLNiqZh_XdrNg?V{fCeQ_2w2bv%Dn*)2L!V1Lo=YiM79)W22P@IkS}@M%e}Ew-nF{Y zsCM|kpmyOi_v9T|$pJMX?iWEpaS(=}BTWiiN66|2f~?-^b2|Q7w~$}xT$OyAXt1A( zE1zxR`!=PDFH$$)rhOC?J@YN^ySP3rO!6ry!wNWp0Vr$>;-01ME~wp69KYBETs9rJ z2afQFnmGg9g%FYz!IEaR+;CP1i_&Xr; zH)NhY@(lU9pe;tw(OfD}UHL`^d{F5J_)7zxJwDe-g^gy<#_gFe`0rK%ay_V(fJ_t##&^uIIx zkz~OBf`DmK&PU?Or!7xajjcvb?rCENq%pe(;7#ON4Gp)VgP0%g!;^5HkOFm4<)hm> zg)aT^z+b;WkT9mX_79wNRAuKjPU3zmwE@Tq^O33RXiWf;@^qhn(3K-s$ggr*-VU)6 zFMPa%=qrq_RLLgh9C+0nvwdIe57cQ;{SRyz`CX*)**Ua-Txp8+WtBo@%gak0yvmHO z|JaU z!@^X3Cc97XL6Nv``=qy$r}b>04843w`2555YweF6mt3h~d@!uUzh36Xqa`#EQ0o0V zM)o*V$M|E;UDnQ^SGFto}|uEbRSIO0bI$1uB`C2 z-T2wQ{d&v$najPh6j8jwJRVaUO6r(u*8&ErsQzZPb0l>;B>su=rT8_E-K7U0fMLfs z-hI4AR3M~k9eep|20OYvmM^q#?zG;%>9w?TJMvM$!&u;w@7v#gkn4E`q(^L41$MqV zI6DBu@R>G5Jc}}fZU@x11wAU@6`BCE{G3#<6zQ#p2MV4*a`U17&wv#72J-xX+tDpa zwt^IWI~v91nEx7G2@bZBraOX>&h3H3&T-x};B?@q|= z#4KST1L4SINR&mc7fc`A<*{{`t-56IdHqo8yYf|7a9_O~EF({TDI4GqVc zGhgGCfLccwNmxPq#LM!;y%6wj=F5$bLj*JumF$0i7ANp} zbYjPigOXp9Okvsi%oyKZpLM&wy@O*84EoX%q4I~<=jSdn{e4^a& zFyZF4l@nm zk@K=`oRg%9vXaEM_dxmV$&Y%X9tXD{3V`k5{R7=c_7D8yqX61;bX5P)J+re4eD-q} z>;-RA#flYBUyIKOko|YUL+1@>SwuagU}T4Weuw4-)Yb_nas{FK>Zi-4aKIl^5}a0w zMOEDS#r>bbBE40lN(I-Aj9 zTyQKz>9bX1V`DhjP>v839i0i7Nx(5V=`%VT3HH8g(Y}bp^9+F{C1OgU1MTn)w38*u zM$29E{C!^X$0jXI|IqXLZUf*77j{VK^IE%uYk?ijic=CNs{3ugQBCMXCTPfSD+2j$ zZ31DC@3-tUgxLCgjaruN_LUy0{=D`_9MU@uf+g@(RD}ZhF89{wJ;)NeDC{`)Vqp@H zOEQR2l;A7HFtG<53`b}GTLuMv#ktnKB5-g67ayNGOv{2}7hG+2eCOD|;2P+*oQmik z%vfAp6iKFZ@-_P5Da7W-T-_vUr&mH<9cKsJ39h`c!vUol6l#b+p@|~Xx_C+e9q)j; zCEx&$_j13*;lQ#q%~U40pD)SNnq2g(&40sTWfnaJc4#K;?Jwj{n`5D(Z0+c9h-aTa z22U6~f8XGKuEUMFswm+Zrmut!g@E%V5RL>}ZaR@-bZeu0D-nPy{|%?ISiJdfXvjcr zZm{obK`*{sG&J|2b4k&W2I!m@@TK-jx37}B_&?cp_oIpn$dcm9tUaH#M;VVzmQWk% zx61fru%nZue%3%YF}QX+;FJi9U-{5<&nV%c4WQgzI%Jf$M<;w^-H0E$-|qC4=;+ZY zd-n*l5l&el?<=%A%u5B^9yN+|xskRY@!Uy)o}?&>0j>teGJyr1m<5LOvLq!9jbJA4 zbvvm4X1=Lgpq)Nb@8!h|=gc6}KbX}ibrX5)MqjN@8g79z{SBPAmhp(2nhg|QiYk$r zO6|@ZJNn|(bI@|-2`1a4xuzy2MON3(9ML%w@@y6PR+Yiikfv!_+P;fVKtn@gb(qJy zW~04yml31#(y>W8!5xblu23Z8S?Ph6>O>e+;3bNH16}}+=zg|}nl7*BS#2xZ!9hVu zB_(>$N|M`f;sLS+!$xwn{v)@hRUx7mEflEi3r=SiT))SH zA4-9+@St-v|4rRPcm&cvUgQ&P(ouBc<9VPzmFM;uf#tLcPAjpPDu)9Fj6hSNR0&rC z1L`^861PyJRaIBEsUOt8hoHP}JDv?mcZgkb<;l%skKRKEsnOS0=c7)yn-)08HwZ$j z#}L>e%q#ZXB>&rtIK)iWz@HR#(50u}^SaknHqX3IRpXbW0q1tn@F zgAtF|g1$5soMtNV?eOzPICO~!BK;(sqSFCyNSCCG71%#_sTc;861t0C#6VRMbvSo; zL7QkG1Tgv}PVmdaC#Zq()5D=gL+cJB&>=H4uLo7%C^&;i*RUKot{)wLiiF%wACV8P z6naqz9MIjLkTmbO4fRJt96XD`Vnn@(nwwi8FuPZWcEzaNC?Vwz;BAs(ENIiA1sel2 zd+DNG1qYf=+^xl8tk3uTKkdDFIMsXGHol}e)z)MRg`|WC$y`X9s7#5JIZ~N257iFI zP*R36Q!;KznN?US5;A0nSe1E}d3eu{eee6;&po{N{k*^9cO36~98Z7jVf9_#^&LLL zb)DCFo);Z3+2hS_(|ttPL|ldSM<10KRPFgFjf18RVRic{Z%0wHM`di0sG8~T0zYk< z4Y_sL^reD{tU`S?1ZFhK_`Rh{Q(1OZ%Cfu}%yXMa+S-7VVu z*+)urYW1wSI>6*efdC;MTte8T9R{CGXJB0i z6OGuD!;KnFe8;dO2gZS5b^&5Vr{45ZY@AXb9_>4Yn5%ZZs&q7bU5JwnA-vfl_R(qO zhRg~3EO9y@1yTHVL*Lbo5km8EoB9+9I7$PHdKDp0!3|fAY#fIUc@qx{1d02mBlp=H zB;MZeN!qx1bMnK>1r4W*;A9&IZ(}}amLLj6#o%heC1SGeH!?8T zCTD?9>isefddg332y#Bk&F8h#Xtj@zu~~k}`58!4fC{JVBl&V7XeLPr-iU|bKdM?OJ0LneQJ0*;>B3ltvh_X ztLzIc`uAtk_lzLzVJ2pCxF0{ZOIfeAF-}Oxf*>ardL=62az7uRm9a1qpC}P>JMRl` zR|$9^TR{tNSnSORTHqdY*8o_TwVFYvJM`HwfCYKVL8zPK54Sxw@EXM-{r5#piG{e#fsE-3 zj~Z@K(HaL>|;q+))!eKB9UKIMw*m2=AwHHysX|xuo zmH^iz9m=KP0ad`s85J8? zoL5A(DIGk>it%_5f1g6Tah{n9rFytr8U$Rz!~umHCcp^@2Z`Gm4Q5;-3`M)4q`E-7 z0z;s8viecZLPG#g#qc)5_07NySRBmRGnvGolB()Hb&7^5Pldap394-^D3Q z65T1-T3!L6iE5FS|D!nqjw(a)TAm;J|5p!RYYPh8F#Pl>9oUs$^+yi}yhWsUB zWF;DGqQw}B7^8}cio!o)ZD4`eoJs_KmSi&f;dXUeVlmRCFXJ zCNd<|9`;x&%_kyKtqr5!^URq6Ydzao7w9B3;D}aZNy*8gIxjC-euG@teX>m77aq;5 ztslQo^kDxuF*(TspAg#@u)JCrxHfVfUX8eiSWxb$@XGI;ao5`^pPMm&%uh112_8zg zpQN%nhMH}Uue{<$#Mn6_3HBtAKS_*7tZgVyT)PJMvv896=N1kQUg@aE8xcGuSK;0d z&zVyoHp1gQ7IHjPOgsIh`YVyw+c%7xp>9jKuyxa$v4o&-f@x~grLU`-43HoJZfuxC zvq}SZ0SJK4SakXf7S|V-z9f>o3G2B|wskx)pdnsdShj=V8qKdV)pyyQzi2m6WD;8q zDE_*)&2x&m4tz|@4l6OZTWg-*JHAAIHgPUH+0rFT&CRXA4Xz-V4QP&|Z7)WoeL!EU6;Fk_gq;=g@IC_W6y6O-w3m$@f5{ntU zt{U+Bj)jUS1=5O);6+Ma&sn!gW3mxcIDYIWJXNeakD5HsXI-Qp^zBanZcQtigpED1 zDasv{!7>6Pmnax+9%*SDp0P?6$HmmS16D&C{k}m{hN>auuA7>ecoMa1oATs7*XD-3 zUoVjRC$dZ!*+B9eToECYq(9rOb@=#kO?CAeV||DzbpcNkZ#`mUqAM691$t5>ad?6c z_R&_i=@g!)sGMP~dvQi=4l9Qy(iu47$H4XU6ej!AbbXkOLCG;nY*s*O!me2d7bY>c z83$q>fs%E-ko)8jvJn9WCQMm!5tw*H08G@58YlsRw2G=%5=5220_t$3c2lDlh^-w) zZ^fQwVyV5S7ezhIO}9(F_r8=OZtHz5`DaKw1tdJ9?SQqgg$rk{rTl#IVyNu^afE`Q zXbN=chXSERjV7eqK*A`7rPQHSn|6T|VT!@I!xa{zfpJ8D3NH$nnAG#&O%j)zQ>rrY zxGduCT?gMYFmM%3Bk|15;n0Us^N4gHz`~bE=KgrLy0^V)tH%?@s5UV;89pZNe#Esq z7Sfk4LVSmMq}izti+l>~d-=d@Ba;F~C)bXQxz0^?5H1I?B?z-ft%8B===Wg#vtu%t z&!m+@B9`B04Hw~c5sXpvl6Cq89G*a+s>d(YAO<00XbN^0y}fb}Jre^2R2B)MKH3GI ziAFpoJ>e~rNQsm+@V~?d1b>4Ss~QY|+H4zB(oTSMi9ml8WtD2;6ajTDoWyp5B0`WM zL~38AS~M(L3-XDPq|D^Edt_CCOC<3^!JDeCaii1(G*Z1*eX;o{Lny4IC`(K^@Z@8~ zFMi&l0*QDv%wV3wql%zD#2b(>*i@_|Hjp3zHY|D;G6b%I+Z~>8vU7j7a^d8M%ewtN zE(q(0@Pn{Ph&f8i*?b*@n!!9W@R22!{TtV=>6+h*?ZQwB6(zE9G2SVIgUZdTnQ|Bu)%20p8#Z32Qha6{TpnQoIg z4_pyJS@Mv+o)gT?%v}ON!Ou9}TKbUKJ&|~X1hLRQTcF@US_eHTJ0NpD*@CrTc5J~_ z9xGlQ9H@1lOc&IA&PAXsiC8@S{L0cL2x@AfK1Hw#z(WJCo^S*gO6b@aV~I~2tXQ2u zVt!8iD90KtIN(!Qemsf$+yW=Je!RI2f$1f4i2 zdF+Pl6y1G`MwehK(YrKvDN{#C=Pmcvt^45s`GAKwZ`A`avjwWWG-W*{b?L(4?y5|c z%p@rwj*plwj{#7k{&W;~nVX;wZVprmJSr&doKRF$R0#IbQP~r{+wx z=NC>n96EV25DWynudnDJjVzyskL z7*KJrXGsD|LzFTk+aQe@pv1rTZ6ZY&wKYIiwKxLi3vF$Qbgwp3B>_e?Q&X=EnjRaV z^|{NAXtcQLPQmvER-QGVj2#333V!R8(<*q|6tKcjuP=mtb%@%D6VZrDv7S2S1o862 z_HiCDR*srO1$Qn)4O9d~`w(HhGe~#HdBjURx-lb% z%~pG!CuSfBNDVF+P4M6elxo*!p%lD38rwWd2{iinTS(Y2-`P!Dzh*G^xX>GZxsR$9 z@6fe&Hi?QsyBUUwrT!!LQ6=~D1y+Ulw!b+7k*7(;qYF-Q@T$^;6%_>b1m-8=3#@-d zo~x6J%-so-cfk&pj15=~eoGvm1iOjYwB~;s!Sa3rh1O^G4@+3@SF}Rr`@i}DeVm8xjD)-5Qj-C+S z3TOK+2RFMTO&h6Gg1PnynECOPh6ZuYb}AFwJzMz7b-vpkXTfKg8YRSoBFtTlnp88+rR^jblaN;?!1Lis;VB zazvCkpfT6x43J$F0m{OM$UL$05Z@nypd-2^-eE~GF$bV5(2SII)jjJB@6v_U29MG> zU7NH}LFmdSAdo`MI0ixtv8y0{NszLH!weZarughE<&}1+cE>|tpf&e=eK}>-67N5d zY)q>YtJjUio>xlHh$ARaHQC|H4hO=)iQu=Ak!&z{Al~lqwJY*kt6w!uj0oX;W?q|k zA8RbBhY?g3t|P=rj68kPhY9OHc|3&cgUoh6$aV-NlkHlOgVq8^j>Qw<11uvqRTc0Y zxgp^=5z9EtUIG}PiOFzGy=MLBEBdu&h&EvHl<+gZ0ByubG>t!jn_E@g-7B_Jw63T7 zr&ll$D}~`%{S99(?d@SIoVfX5*XbA%u+|phjZ~yrobjju*@qx3wpA&96|KtfF@p%}*!``XgT}sgtG!T%0u|Y5 z9pm}bFUd$}!5peHrf}{)=K){ittx1WKzQay2lfKeAYL7DIvr~@ds+(%5z0IUi}Og} z@krSe;-P`ZK{QHQ`mWo^rsbG2osDlkH~GLyY-$j_OdtQVVs0qco0pi+B8qYyh^a&lp&N)Rc%rS%qiCSrA1*Z#w zFa*qi3K9J&CW{^1B`;tGlM#W<%obKnPa#;!!A7fh_T|L}5pZog3XuV85@^bkfrUg? z#72~8Cj$cmk!U_f?S^RAx*IZ{09Vo*BY`RLs0NZo3^K*s=Uh~(5sMH-ECxi`WE-!F zfX?$u7>NArj1Aa{O&DMh+o}~5EpIX+wzHW1f{g~-Th=S=Q?LJ-ZvH?usc%lj0HXs9E3>h}`aS%qgr-m(=HMV)vcm-PqzG$PywmpN!Zm2hF zm~7D;0*68Fhke~?2fTwwpSO;GB50ZBs!k2M^C;je!v~+p2~E zYsV4UNNgg&@;>*?QNnnsk`7WJJBe~(=lXDodG@V8+QG;*|p`I$~M8X{) z&Gl|*zXaBHm1;m<#6c6o!4EAOanB^?C&>+YC@4N!brk|>#5ErU97G;>nfV!tv@dz( zrGRAAc35h5K(&W8D*=VFBr=o{RuczR zf-(}5bJFrl6#a#e#HZA`aw2UbRyD|>q3%0Fpc2v?j@axjNm4#UB#a9}?dKutOG-*mQZ52Y(S+qHjQU z?fUNRL>qm)4hV+%*JRc4W80Hzcd9_%r;gMBxaKH;w$2w#m+tXkMIde8{>a!7U8*cB z-grj3)~&<`59QVx$oN(K_s_%LJcx&EEpQ+Tj6+WqGtdo~^3l2FtoXxYu(Yj)INf~K zom6VEzY+^x9u*|`A|sX*wMgb39^tx4RutXzSFSA2)ZdClvV+A3!nfd67jeH`GdaXy z4XGhRks4Uq&tXImJi8hFM2`35JVGFGgcuaprRk@^DILjcE>=`|_VdX}8&PL3-fR|C zpa@P-;Q`?7joCI&crIW|PXy4F>5hUY!WyX#ufs}8H;Dn!?`Vw~cHuytY2zQe$1s@S zlYfnP{sAr$&@a#>qY^Q!?8H}klHdPjGZl2QM427FM`A{Aey0N%9Wy81-u8 z(0veD1Ol(SJmE6Vn(<5#@cd)(wez);g$ki2xcumjEw3^Kw!Er3>qSt4wXzfPc=IH$ z2V8W@VjdxI=VkZlhs2v3(||DOh)puj1?=!)h%c~5hBLLPHvwb>17buUZCDIeL4aH>Jg@qwmmhk?bp82FibW~9#LA#stEW`MsJbeAfSndZpRQ`W;5me z*OhUgftjExfR72T3P;oP$!GH$`|7b;J}Y=HlJc1L1rFzH)vFy4k}fQb;m!z)nQWr2 zO&Mi~eO3}}F~UueGCJ9^pr@F9##BKu`^MaZ!x)J3YEnVu4vHu?)PmcDJG!7>dc{M# zzUp|TsMVvko(Y!0)tfu`VO4R|3_A;utM?wCim_v3iG1cw=d#H#{PMZvTlY>?_DC4 zNUFBhJYhgUr+`2IsE_)$>?Sa;{_p&ntoh&5t`H!wiWqql;Te|MSyU+oBfDq(3pBt90DM6<_Nl6{D zCu5r?eG`IZ5%mGg4dQ_d-U}%0Lqcl6?e{Zp*O6I*R0_3K36F zwGh%D2+;%MV1Wnk(3;Cby9K1A9{%NIAU3^P26UO|Z2;P<0ShF}EKt`AmWA5B5tEvy zOKyHL0Cf|x>Z%5T3~#5?@sSn#IrZ&NyWc++i~F#qjU%TH`yARd~95#nL@lK%$lGxasfeQ4a31lv_& zydTrCYUN5%Yyt=;k08vPLcip;YdZ-Oa0LVmDBPL@k|t({m;vyiAD(Y*`LgWv=@xtX zd4XL?my&%HPi2%{H@d&**2%IuNm;`a>r`IeFO5t18tQ*btVz5kRL3H);`ok$)k{jL zW{S!I>FME@PJa>>_^ioV(*Hc2S5zpD^+3b%b2kOd#cB>!MwQ9BKk9w|Y*laOqw$f+ zatYIc>1OAdcPaIgPG9Pepu3r6u~_76uPF`7yWfCIhg8* z#8%bV$cp5~$J5hO`^Cl6C|RE`7PGUn8;a$e<>TTamNpIp&aG#BPSR2M=%g4qq6-V< zuv5vpG&eVIZk?d+J*b<3NIna6Od6<syU^)G508ia#+zL!UKU@X{qd3dKOIj zX^5`;S||Kky&+U+O10y0h#^V^Wg(yK%hB#wg-KPBfd{kL0P=-8bYj0nQDD$B1r|Rl;5BFa7 z_x48T(&-_<2-MT2;Tk^HxOow}spUU^{!B*$H56X0Q2*Obcy6c{o=w)x7C{$bW=6)y z>E@31XfF;QBtpAk4b1@*?@&!`?R`v3iqGp`{Dq&te5vg0Jnd}*D&X|sM_hp);=;$D zjp-#&MZ?hqk?$AUnYhft2UEU0rAbi0R$H22>ex-mJ4-{42Nx5_>1 zG(Ej|n5HD^yFOh-;eDLM9}~;n{_0w@mA-zz1Q*x69eR3jVi}cJxY^z7vR!)ZzAIbo zIVgOvncrEnN+>RT8D&_(!*gYAb>LMF%3CP4cGh&)GWz*(e-sn{Q9)0+$&IhRH{yEM z;LWm!tGnAetsxnfd1_{wZ5}Hhyo0rw+C`^-xC4dEjMlxBZ}(MxRr}C8uJN_A<_|8w zYKr5NWvq#zvjucp1{Rwon2b#?^~hTv6&=Nl87ZAJXAF_F#I_V73pM~4 zB`+vBr$uAOB&qyDePE9KFyE%6A&4|ljFKD$_ z6qH`O=7$?OC+a)vQc_Zc5eAx>nR#O^EqfI5ewVsFz+A+%NS(PpQmqrB3XBQ)N;-fHk`=g8{SjK;$qYBu9> z@0VAq@$2YV7l~6PRXG>RQf$hK%iDVPVv~7rD7oq`(>Lrm3u~Y&&D?I)sW( zDKe}P9scd9n3*s7kGRjj6&vKX9@&o1xYyKvKbvuy?a__yJw3e@NjI$|UAl5ADs-2T;-N7=fpbOT76>V?$T)~#G>G!`(*rssr;?$IN?z+Z5f5_WF zsoiz&oxo|+F3FWgJv?;`0#{QW?WSWjzojBhx$*`hcpHv+wE`pfL5LB(dQxo4Zlmv8 zSH9A0QM0(%v0D9MtR-!>d=I1h5Yvls-T|@@-0w5vXE4stw}0Ny z#aN)|(QSnzaU5OeHP3|)O`_>v{faO3`HMF-@3N(Y{6fc?_@&C^;lrD2DC@c86*Ac@ zt~%-IUD`G3uzf?=ugfV#N4b7A-z)0)IAae}SWh?C(q%Rs)IHIaJc<$! zs84NPcU+;(@$C@r!gsjw3rBcG{&El=k;vT@H+i=%TB3I6=|%-k9WAC4`jO6Jmv1ck z1)7tP2>PoG_%=OW?DBd;K^ukq^3 z(l7>zg=4QDO=0d)7#VBZ9jGA@Rd>_~%17FYBraC1R?kZ9$cdp1`AZCH zrdswmrwCnYzGBF*W)1a>R|~6b{g8vQqN0)PotE`qs)sB5+ms`oh+{ykyn5ia+2OOxVV@eKm$B{)QWB*cwAknR`w=Ji*QGx;z{80? zYCy+q`7@@rO2YWtgpcRD)xyTrL5v-&PeiHRN>tt5!dSlfZkthjA56;gKGQNbq;~Cz z5$ra=zuG^}4@8y*r_NP-+cYFSTOTIBoRKQ8P!jf*i&amL+Dlg}G;*yxq)q!AuRht@ z4(llvReYh`;lwF_3sTv|%f3k74cEYZRC2M$42Ipo5ZmRH$*uAVe!tmlqI^@XUD}Cm zJkkWg?iNWnvHf|;)oVr~jPte%TD=j{dzQusw)jkac8?Mi3Exu|E6;n>d6}B723r(5 z4OU4ENecc&GkM(p7-!yzBb>T zHn+22Bz&NFx@Ejz;&Z9&CbV2pG0JFg0B8M(9;-&`MXg-sq~Eq0pV+cKKL5rW{?hHY zgO1}ey}dsjml8ntDOSXEw}JdqTj%;(sY*Z%eGXp4+QQ z7*8AB4Sd>2$wx(D;;pjmx>ZbqzQA1g$yNpcSJ4F!vEN76ew%o~fSsBzlJqvGc?2@O zRlG0|_1pWgcU;>}WztQhmvY<&+`J9--s~)hHQLcS5YZErN_9Zjf+M=@waLF0hV0*!YLTM;@7sWadUGzPx9y&km`mTY9*mb8oxq{HSVd zeEc>9#~k*}R?5;qtm6e&y)PY#P;oLjEsTfm@#&2Wt_p0C=aDDGNw#LW8%>QSl=-udjb&O$ebxvf`NpiJw z&b4v2<1A1&HvQ6FiUZ>^ z3**7?%9Y)7KQ7PlPEAc6*4rfCf_DmY`p4$qvQ|oE$XMoX8{G9$8UYCg+qr-lUJ6)Yw-w zwkCwR=Rp07M21SF{UDNPGH^!PR;yUS>@%Rpe%XI3{&P&>Txnv~7hh1&e6>ZvLTE+R z#vaAST$2463uf%bCi-f_tt$EEt7PlQHJhD!83|KvaIjQCL)h+rG-$B!SK0|T5R zjj&7n47!Rg8a6p5v8{aHctqm@SKk*0$yFSn25YAqC>;%#6yxHm09JE-b`M28Ffm+l z0yo1nGq4c71q=G2qjLbUyz#pz4&wwA{cfYb*7f@030TV&j`hEj8CTpaEivVypr9B! zbGqi6>%RT#FAPw-cOgA25N;2Pjvo}vAFXC)X7(l%-b-$#Z9V@cpSYgv)XwH+7NNYc z1J56|tV?1_@h@M-08e_%zWK7knX-@$q1@hQhI406ojTQv>g9w3kol^b8qpt_U%q@H zwFdT|8OZfwXiuD-!IF~NB&h4~C~?PQTwi1E_SsqP@kqb7DfvMk4hFr8R65oYyk4|w ztlt5Oy1vf7K0h4x89Aly)du4s5P|)|BvLY>W@;PfL*@qkIH_f6`J49^cKq-j|5lMd zlOIgucP?0G8ewg6;r8@Xo!srUWX7OQy>Dg&ML!nQ8*b@O+|>65gJCM9u>NmJrWvB` z{{H@n9*pi2^}wTK5Dqhu#l6MC>pB^=VTYXeMpRtKYs*3z#1IuVF>2+^f5g=24`u+rM#j56u4j?_BTwBJ=f)Ru^x(Nv5%K~*oxXzFcqwui z8el&Ml48Tc!9PUILbIjE%f-1ZGrhxZbDwlvVk5;h4%a>bFitMjqN%AVHb39BIGej( zT`$vIqu?<8+oYlA_Oz~DBz1QAnWXPWrVS{zG+43tg4(*VD{>KU>i5wH!p>1=j=P(t z^`#1~rG;c+nFnuf_SD?|_m^L* zog;(Dzf`LI)tQ0vakEF&%t=h&#~L|w*14NWzO4+LU_2m7!_MUdx^R(PeSLi?8miH% zj|d5zSEAgE&cAWjQ?p5;aVMtZI(-ilX^cc6q+7>jC%?6A`+?LESu=eVpuYC^QHs)O z$vTZv%mRoqP6Vf9@W^gYG~as?y~9uBVK*x(1q<(>%is;pUFVh54!jslHE)zU^ogX< z{_5B}fwTL9GiNt30l37EURgHnn$G!E1Bs9GZ-^#obv$?_r)y}~FYQah5c~Wdy&;f6 zohK&Fv`%S0vVOl~Tk`V5Z%5uQ93Fb~(tN^S6Ymq2zWTNP*H8h;U05i_3&!(>B_x>9 zIKe;~t*o*79>xx{_afkZb?H9#ICAAN=Jd9Rl@YDT%z=luhs!$G5cJFVVvW}m-z_Wj zwX}-oW@A4xeRYg?^oftor{_yeWPd}ZU`c@c49x+}|2PR~VhFmw6t*&igRQ zHWj6%&+h2f_&iT8O;k1A_PIT!L<6@^P?9|r5{45Fz|DrxDgFL$T+D^!8$CD3jwm*u zSM+1Y1OKr#QCWBkYcY0VVChXmo#w$`j^uCuwtr;95oKrPZ9UySA^Y|iRL38+S6@PsCh?%_<{0 zoGVGAf#nd$1jWl_2d^=*t(M$p5VqU#q0BT_flDwh1XthsY1yK;Qhn_AD=LL>=#tJfm+$;m75h4h zL5bvuoG9LIfa%U{@`PS})t`<+@lrA;H`fr0xfOg7N>M^$hijkpOvS*=aWX*#>>MeM zxK&mJyt!AEHcJLMr`#W$DdN;ANDI`^^WYdxwa@1Vn|{gxgiu4Q%R?A8BWPr0k4Qlx z^?LFxo#LCCDP1})jZz7_^z^dRTOSN0k??pe8kyB}c6NFbLSoJaiN}9C)(K&idw6&@ z$9U1{g0yEfiXNVaPuvkZaXYIrQB04{|KqUheN_)nk&|~0)?RI)l&been_j(?;<89I z=%jxv3F+vAnOit@h6^{!pA9naYzQ^|@t`kOWJRT#$;xN!FZ%N6s76v&yl-IB z9c9AeL{$VtC;#DDxWsy5cC!AYfK)(T_to~JkA(60v*_=wo|?Zg>&yuppu|rRq#>}u zF-V6Ohra8TJ}f{oPNS5GTT zyj|+Ib;vI%=bC$8K{>mcvis82bK6LriFq&nio6=%hwR$7z`%*!UAFn0C_X&Dam_gf zlTiB2;q2N~^s9Xh9_*uR4yXK!|4_SE9$^B^wSwrBVSNnVvwHmY_>&6GtJ-3jM>%g zebA8Wd`+dj*6GjxG^*vZJFO-u_FVbpR-5g|J(do0-Mck0A{P)M-WP83>9LYa*Ad$E z9LrR{xmm*0Kcl?B{*d-UAp8C1At_{wiL^&CWY#(#I_fBzi^jkqLUw%?*&Q5~0Zx6RUNwO|0kO+&Rwvy@2LBI-( zl#7dmTQV{-N~uA#2FUrRPnsNdRPE0;?Nw=Ay*gVwgof#8u6F9wCvT4LV8APq|FU?@ zJ|@uyd618)YxgO`-(gW(TSdiUya7WL%qN9H*29g8RuB|=vgO{~TyscAk(ZUI_>YLG z$$BFfhBivVeoITss-`A30_S4mI!Msl)6a~G#lUxEpL0a5RTvIB2Ei*f$gf^KF31dU zN-W_|C@9$cSqLQzTk;&!t-rt5{_Wg;e4nRa@TO2P5Ybp1S|L5MAQL;RCw^!9cFAw4 zbt`a^#Qc1j#-=9aN4dzMyxh-bm>fg)LCOq4GNjeqzbqxZTCS(ZxX1M1(Xb%rzV4lN z@O?kK%WxS(Pd96}lm;mmym5-A>{i6APR_9TR+q|{O{=J=uA!%VKjg90Gk@?vqvP*W zIXQk?pR{~~Nc4?G>!PlmPtsBr75A5}{~RKt`mSK#5TYUB!6?FVjV0j+Vls{9H8*-GXGwBu^bDII6Lw+3Akx`(2(7?EEX=9lg+rW#fIAarh3v z&J?w}T|Gy$;dJEj%;dD`dk}vqL1OskC4#XgpRNnF1`U8>EVO{xw^v3tTbwP*5F0*Z z#3BVh{?pt6D=P70HG&r3gz3oXa+B|tE?!>w_azF$vmdQeXMU_6}t>qhK2 zpd-8$u4JrxOUoMl-&FHJCsIq+PUnKn8!ICtG;3FP99}{>9was#LP1UI?^8yr50LK| zz#_#8R4S^_Y_(7cGzdO02)QG)M2|bb+-P~};yjEhGOjVF(m~eB>U~y*bui*K3FwOL z{;n)>SWQ5g2tNNPz~M)a9XpneDbPmy>tqXih?4njA^#nx9v9rVeGfmz- zzo&Q>Dt}4wv#y?t(iHWTbgX<~OlJ>C ztzDSIK99-7r37)4k86{-S=p2$EH0LZFY`Ez-Bbzdu2BZ#9uvji%Qx$n( zyvG)8t@J4Hj=Ct<3k!irsP{#21FElgPENChxW%Ha@rD}_>h&6nEZXy30cWTDpA=#L`OW=DrQNT) z|9&YBdIkeNVwu5H;o$?nc5Gd?=352*D^zm6J~`{K6|XZ>#bfEj(+3Zp zk5FoO$;V_vzqr{kHC-5$UQfiba`*q?%`(5+xtZei&StwY<* z=Oeor$qN(H(W$N|T8P4Hweuxo(yhK_lN#h(*dpSqp+^bkzID70=jTw7Y>n@q#gr z2(v34e2XW?bY=4Ie`V;hGlZ;t5@>P@X!~&+>)oAZEd_u`?oEi^RLsN zc*A6X`04}>1;N5ZdiX7OydWFJgNre$w6djzqpz=zyMoYji{#QkLv=7Kb+EIw{T44i zTq?6ewf~5*&HVd&(8iAgRpkR`)t_PSekG(3SbLw6vfSQYAbeI~glC6-%~QA8oxp=EktuTiYYN z!B{9E(!(6)Yb2*`Hb9=C)&l72JQ{ zH~2-Wv{}#ldt<3iD=L>fJ746P)#k~@#ntY~cFDVNI=7H*B*qO_-JX#pf=M{}gw zn7Y$PaNz|I3zZTiQ_KCQ)@?nGThHyj??0mz@!Y<)OyuhJuQxbn&XHCMu@z_y?^J9B zRsbMOznp&XR0GDw#>_5V64drnyMX%LhQj&j!pp8dMPgEJys2!_5Tu)0EDbS~vg?4% zfTJp`5B35wdGtBg{_-QZ6~(4Hf=WaqPm@MY1`$T->l@2x0P19ZFdwJ}38@4Z<`ppB z+pr$7jN#c&U_*hldNmJ0gfNta2EjUe@S4e!cYs`#qzjCULwNpF5RJ&VOh5PfxDy)P{kTGNRKxo4$zcqfE3l2HB0@?=CoeIpHsHK+ffHqJ30PftlrZ}0%^G~ODi%u!H1s+# z-E>scol3XM7fBFolq9lkg%H!RFfw8Rj(xF57O|PuLP=BAZTOy5ljX~ePjx(QeQJ3< zw4(CLBEh`DHxgJUX$Zijf7lUZesp}C=_zn#93r*YjM)pST*C>gAIh@BcNqulu3n{r zO5I>MH-`YR9j&?qjc!pKfgjV7gzV_lR(Nt;@c;g-Oz^h1cbaRTMQlr%kqBAkJ|p`S zz9z3!i68E|<|!?qlXetWHuH6voHvwDTM4SSPJQVe{YI%@qP7a90(1Wiz3@yaxB&^{ z-QC^os&n5|7fOeuu)MgI?pwgvn#E>sA68UUd`96nWFQ<5zzWtIR8%HTe_N=?DW@c{ zVL0rvV}w062>|~cb?iUi9RC~p=>Hu0KTqKQohR^mewnP0;K5I#0?$EOrl=?#S4=%{ H?)v`%N(v2P diff --git a/examples_skyline/accuracy_vs_fpr-skyline-order.png b/examples_skyline/accuracy_vs_fpr-skyline-order.png deleted file mode 100644 index ba45fb6549543282de4c380b51f379ea94c80510..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 43619 zcmeFZ1yq(@yDfYJDi$^d0v4!%fGDA~iGYMONT`Sc(hY)OU{Ojdse+`SBAu@RN(hqD zA|)UoEq&(VeZSiK>~r@1$N0xNXN>)h@qNJKbJx1px~@6rHLrC~QC^B+In#2Aq8Ox4 z9ao|#+6szV9JzD}KC!Q~slty%)<>mPmf}C>r5Eqw?`0OJG_5IW#X9mon#$&DGx+e3 z%?S+~W%Elm_UEk(DAV&c7AEF4CPo*w+Zk9{8=0FOqT&$xv)NHw&0ubRNXX;q(i>-a5}(|bp*7YpW;f{d)JwH#`tn$3o!8>X zwUqvPNavWdsB=?jpLTa>xl5^s{bk#Og_~(`_pxj2)i>cMRn5N05I+iNY5DQv91RT< ze!O&}81O?w(v6JDzU`89__1+KBsp@$Qj8bnw)p>!!+2ggPg*Try!ca5kyN(*U{YFt z)br<@`}bE2TU9-mtD2o?<&`cjE%o>D@wtEhetcFA=UOJFc(1KOSy?M7hNXB~?QcCX z5w)R*t^Gqn5?IBYGO|LiEMK)s&CD!GR8&-3XE{~99M5X&&3E#{hYv|S?O7KuUfjEL z=ToJUjT<+rSqj)5Q*|A@owbcdTY{F~O(sO#B}>@u=UzQMJ=?xoxso7W`_k!eH&<7u zUQsW~nV;#F@QvCf+o~urCmaVeKM()vyW2--zrOQtB9Pr$7@KUT!ww=?D zCwG#x2D5DYs$ZNHnw*;A+`e7%;N{QzY^TlhXVdWcbMCWgv8Ih_@~uYouMIHxS;mhl z=XmCR-I0jZ%fB`}HdYgPerj?brto)}$KO$lSYkPri?nhsSQl zzCK=+%WKQQlQHsPR#U9j-Jex0EM5-eO%GhDT}&AqpcJKD=cilc?CjDwI5`h+aBvhh zHa0SgIaSZh4!R1l2;1xzu^-^a`|hfEa%{JlSo4OB8y|oCcxvSRdWGPoCT;7sw~JE@ zN;c|y^105mZe)DR#ZGUJTij&y;7Rq zdhN=64-YwG<9MTnH*xi;SAy_m77@GPuCkC21@X!E8yFcMB_{408LQLVn%!)s^~Ugg zWo0FE<9%;$!ELTHLjw6%G&F)IJNy)wtDc?oI*^<{mxWU_nq`;m(|B31S-&*Xy1R6| zzpsW=iuoAo^%d!mC*`>=|Yu9ld=;m@zs zC0)V{@wRQUZ*{rURr;uKb9cTE-tpVFZ%hBUNiV0X8{X5JpYJ%3+3m?!c+tS%Q6Tr( z$EjE99zT7$bL7*Wh+zCJoaPvPlYTW5=J~=$Gc(Qy^$S;4z0#Mm9{PTw=-R#e_YXXJ z^k_FPZ~4#G{8s)ewWZGE^_R&V?}>1(bZ~G8R*;yj#*_c7Pbqtm-eT;{eE7moyX<>D zK0@t-Wg>XJ%$b4@gn5OR-2f*E2F2s3mHgU&6q2@ltuXOU(>#-tV_>%$gh4 zzUNVYNX_1nm}z_5m_N~Sm8H=O=TQ(HIvzK>TOr&;*Lmm!UibS1=_tw{G7j??4sBoPu(POT{&JmY9zU3sXuO9P+z)M(G8 zx3`uRZq~Q5vO0|rSN7$Zlowwo{SS3r#}BW<@h0k4vWk9v^?7pI={j_h4>U;fAE#1}dXH>N6~# z>Ccf1#KgpK?A>eO>@%1*om4uzlw$mYotfe^{o}@BOw2&6+nL$fR800TQAZ^yZ)QvF z#}=80a(mg>j)#Yb2Q=s^Dn6(hMLe@@G)<2)C%n5aXPU=CX6uX>YB?Dw~uS~(7{ zJo&UTES?m4ZrR1ln|#IWDjroU&Eye}ZcZ$YiMw1LzI@rThiqkGBI>QqBVJiHJx_y! zH^0zK>B`F2J9X-=H>+50uo=SYV}7HZ(oX%Ubsg>PyS8oHCKoQM>HEf}N9o1cba^a@ zUd+oh^VR{4@zI_t85x<%6}&myB_t%Irjas|YZBD^W+q3brd#KeLH>SY_Z5w z!@@C0Z%j396qHu7v3VmtKVxlh=~8d$qk~SfV|B@yvxrqloM-To9%6;mChH}W5Y9KR zo<9|UfF2_kA{dvEp@H`=Z1K%ae~xaoXe3GYUQ7a&{pqc`PXEV7x>g$qv$y;;sa=|oB~NU66$tPn7+ zK80wJZ#@0#)vMR_CQOH|qLAf#3|+rnryq@xn3cXdF;MgI;V$kv=V)1})cGMF*Vx$D zQ+C-padC0Q#l@Vd^TB-jZ|wTD$aDjmavCWU>g?={&U2X#Y;diPJ)@O!$t%sQIUWxz z7baZOb8nyD9tAZZlmiD2uu1Qc53MK;;7XppN>k7Yc*q%jHr>prH!h|+MqwLdCrH!Ztqy(g*wc5|BdA>KA&R0WNvtFm!=on|KpnzWrbe0Z8G_!_t$JlX0%|bKZ*V z(ip~SJVI#r%$e|^_M*O&a`7sxfETqd&Wbb$nS71Bdw1#N@83Vt^JiZuDJfOIF{)ag zJmI@+{cgjG$Rk);%0=ERV+aCyZ%cTzGZddZd1Bm>lSyK9y-8Za)Jo<^FB!XnS|(3T}$PCe!rh z=ZH4i6BD#m_O6~IZQ{#`{=P_V|Dp+suNAt3SQ>f z`bFLZJLnAdilZfw=oIT;POUJO!u;>3w0p2e|iX!(0{2o#}T&1z%2rN#`{J*zJ%yQ8~1 zrlO+4YT6u;TPMrrVS0M{Kvo}*!NrRzt`8~gOGvR!zy6?4zQXbsAJ#j;7{JZXud-Fh z%;q#bJw2C@kUH>3O`d5a=bFef$&4E}W@X)y;dn7{sqAr2Q2tDGi$lAYRgi?^aMVd} z<~ZQY6G%jr-04kEU+EXANySmdr*v4#-KOqRUc&kl1I@klDKXA2E+l%__uEM;f$Siv zv&L9&qaR|`Gt*v@z&YY2vld~Q50o5CHoxeKP->5DwF3u!q z5)81hVbi7+3VY12|+3t2j>Xn!~tWH1T2V1l4&E-N5odF>4 z97q7pB&-u?P0eq&9KUG;zsp#SI`X&V2RlJGt+wJJlfK#)b<8DmQ+4^m&k5eJ1%xBW z=Gz}P!#*GFZ_U@EqhF;`63AVXW_mVvyx~g2%kwu$Cg9e5eH;%($nYgoDIeCFUv2Md zUK_~T_Et-8bN+Q*zXd6X-?)0O7mJ8mUS1v<$-JpvF6l}Th=50r;?BRjO_UDTsj7&- zw1T2}HvjYiKK;TU*XdawBqt~D+_`fU12~;Ee(>BQnqD`cxX0hVog1k=YyJr7d*qAM zwk-+@3WNd*nzxwH($X@!96NgSG^qcl(o%j$N5|bMIjMvbZgwuh4bE>pH{;me-mafF*|k*)Q_0FhT3%kTaPtDe##255QzcyLpv@1( zk(rTl(9!xF$CRO8zxJ1lPba==><1rAE_E3q7k%*H!IgpJqHV5Cr^7|-XX$+nW>;@Lx+YB@ZaQ*XFjK| zUCVPsqCahD82hQb+}=5vaMM>w@9wPLD*Ze%GV*BO`0(%n!vEae-MfG?h05M%@Dk8pABS z0!rg8p6fJ5#s~0p*X)3OzF)Ih9p0EGdmm=l!fvQf>zZ_mI5z+^qsemB)QY z8?GdiWs9Z%Sk%$#gMIb<(9qBqOtG+r&*5TRkP1usXuoL%6{g(xGzfyexBy9?=^=z* z>~T#p47e>_bvZ;0TjrG+9mB7F4QECB(k;4v{kpKzs9`{2He@;Ph}_&Kz`!8x*qv3KW-&YamS}sCM*>$I9v!U)UwDCN z1C{YKw7SHK*En%LLDEE!Ql8X_?O))Rse9UX1-qvDBg z)JZQUqtPEoL)?LB(e2eVsviY8Nj?L7ccSDMA;^lIUpwwp81foS!s1 zJt6B0HvG&-eCm`hKY|;P4N@+b*CICB4m2I^|N82f(x>{2(NP=FZM~F0h=?Q=^5*=y z0ZD6a!gXF9a;B}*q86+oMCAj>*Tz^y!^6XsAO>AuDj_rFvPp^y|T)Mw_h5jhZqn3Dq*o zo4iD%2;UAcKSWZM1odP!gz0MJ8`G8?O<#UYeQ9=mo$_$8WK9JttJe_TYA}<$HuD|j z%^mfH^vGH?gP5i+8@O+QGg`DAC36PB)t4>z*V`3N$nKqkgUL_=v~-2O8Jl$tTYsmh zNcpUh&mtt)^dYfY*mB@`9+Q-ZME|iSnyR_63w4lcOXSO`x#>Y$#QdzR4UtE-O9owd zcbj4P>ecE%N&L`m61s@iQ{YB(-_ujY#Ka^zDS9!buAi6y>exmPHB=t6qK?P`(S3QN z{E=MqCiSnQWzy-Y1+?EH?_OJnNg?N_TPPM=*B>R&UhW5_Fd`EtL2S(63?(JH+8u&Jo%=pN__fo#pT zbvo*0^llB_3x`lH?mvH;#N5fJU$R74EJw!|iaeQT(v`!VC1i?rc63x5hEWz`oBOE9 zVbPMo8(d|fhn0QA#)HW`6B*1=hBh<&>sLm+9?}N>W0Rhq7*w~gu#gMViUE`>X%3r@ z$=*2run*_@(ecSIiRanL{k5vw=D34=TspKEj1m)8(vC7BR)@BP`v(Nr)-?JcKz%#J z>4zc>S!S)R^YOMHii)_z#dUo*u(F!kl6$;mV zZ?&5IDwU+FSD4lNcD_Mp2Y@8>s!FixfQX1>fBy|cM~Ex`8p+PJk;&hBdg2aScNzZr_D63+Y63FSGw279Lr|x9;j1$2`k}3@Evx5wh|SDz zOU&(zjK9BzqOUb;IZitDCA<7_r z*?$^`#59UhK=#E(206gd4VVE4SeckIT}SR*VuEPW^(FgWHuf9W#0QjIdlGpm zBQrA&Ps1ZNwIpMmzRtS>4Ryywh6OvXJ>g$DBlhe`d#mKTf>vu>@49edS(%)I*qFb* z6O@0lI6B9EHfO7U`SJyduo^NKSrVMW!dgUrU{ss-tFBgIU=ddF<#(NP790PnUzhtC z@{chlq`_ysR~Ot-=HP7FCfWZ5HLb#|`F9%X(-sW;ibW2Nj--O*)aJga8j7(@at!38 zhLp=bY(|w)(h!Eoh^VR>)ZRt*pL{%`o~ZEzb&q%~5ysxtk>lqQrUAqL%6;ROmWH=|Ihkm#FB$&76 zA!$?*1lBdBudmPI!y^nb1UkbWz%0E5YTLE(l!!QlWhc}nSW29SKkezQit%w8{}~5K zk3b^dH|O6ivF)imfqZI{^JPE_n2)>Jd890BvMWU3>bQy2G*@yCA%e}L)w$cy*g0~ zQNQ@bYRR6>eVhB3n7k$FYTjS+``#X%ljGat>FH}EecvNH&sJjU$1}tv8>X-9M#cUd zgeES866m7U?LHDoj9hA`e1STiLo@FM$^@r!i1sc(m4%dA1A+P8zI!KNP`p9Fv_T~~ zZz^8UtSJ^Kk$^QP)F4NHrj++Slw$oNF+Zt5C=QT!ZdVor?`X%7E+arw$gZFzs`GQR zsu~)xSl7J?$$3hsppl^?Xo@?;&+l1hNl+gy4}?+GQx!vEHI{9V(;E&62|b*$#+O|d z82K5pXuMI?^S}vus!+Hv>-N1}6_Ntdr+)WrDrR?dWbdy_z*U`yBs$(`E`nl3p3VZS zA=D3Z#kMpX(p$&CKoUl~=l4*1K*N9=4IQDFK%}wwM)Ajw6@;P>aJeAl`kb-!{9>&(>*AAY%$#c)wHtl0RXxm?wnn1I_jTD5+vKll6N3 z-BOxkGJo>|+}26`8U6U$Sb`pz@_?3kU1_UWMPq?32=`U`G&efh;Xtdn#naZ)mSHUo zQ|#_}`;)KU_N$2vy$A6Gh$ErmT;iH?u#j@eTBIm+Ag= z->8x9@;<mtAwE=QA)Qe)E4VRdulSKqn zl4)Fz;!D_J>yyWBul#}v#AuDWp0qF22HD&eUX<%GsjBkl#(1UKc}@22+qZ8bn>K?< zpG9Bi*m1Wd@zNKb+H}?nm=F4n#}8L4EA$}uc=y=1I7&fIQ3hrs1qjU6^1!-D5WQ`8wyB|jN@gpjFc+3IxmZo^ReRvb!!P=pGObBGLkzOTfZ>Q1 zUp9CZ{=dp3i`gIpPL(YEcetM!VeHKeaw{>}MEJD^6)Ok@I>8VKwLN!|5%Ow3x z{veiK&BwGp&cdwwasBOK2}Q#COkEB|8tV2I?1vS-zJE`e=dn2j4kIHgOK8{4)#9<3 zPFY!ORM7dv1SU^gTY4HM$rtJ1Na{fpRpb3CY2X{v|I%pE`v8>xgp?E)QU<7?vZSP& zzOaNuzD#-m<_e~brZ0>E@rdlY6T+>K;JGzkMwU*Zf-&{ogO_BES${vjb_WANeHPktuWJVR2;zS%;s8k#*GLkCNVFSdtWdaG zhWYxE_=l#+a!*o@bp^LLKTfoW+6kJF-jMO1BG7fFg zXOjjvRYE+&tmS2371h9ew0;8-4>kiC%y+ZQMtOO8HHc)kpd)>m-Qm`6kf_y#nr-|@ z<%zHk0;HmRD^pr2bB9{5q?PJPI(@F3pM0eAK83Omd?9kn83LM`WyMWNq!`;O6?xtGLgN+XnO=uZ#tv!U1D`MMy=I%k#*uWjEXVR3 zZm1#OH6Iu&^4jc_5qu$8W3^{VY=5Dhsf4P~7fd<-2M;1q`d1_x19JW|g7lDvN(Jg% zMt__E^tXnl76iw_6I7jzAhq;1XQ%5rb~2Kh#6w6^FkqNr&g!JjKy8u~@w+9r%=?$KwC<7cb%N~G8g7s|(jhsrOJk5^6cmC`pC5K4ke6)jPeeta+$A9D;&sq@<^E;D_tnWVde=2rrQZeWQ@gNdU_< zo`e?!Ot6R4uv{jZ?aKx&fs73=<(v^3=B13hiuXSrpTs$ z2u!1V+y@EKTaOdqlDugYtw|z9W|#3b>C}Gr6&k1u&uULwf+?plj6W*f&SV z#_k>Kt&v6HI~hD;6fq(mRl?r76nS}Gb#vQE>|549x({rUd3VzhTir4K4;9I0XQ?#H|yrBqzlH= z4wr2c5B_rq@7vnv8Mn0^S?%2i0f3zQeXQ#GLMv-`6Nw*%L~rZc7w9dBn;$P`w8MkUXyZ&Cus|qU-)vc}0n#3D4W!u5=7^uH^y8+?MP! zM0$hzzS*Mpu)ZgLakxjrv=2vKf0Zg$)FN z@$i@PtDDf9R`1s9VF&{KT}dx)=@NV z_6-RsxyDI9y)<ZmPw(l5(4vl&E z_^3NOBUJ)kBH!1XbryT@_;K-HC#xgRO8^~I${uLt#pUJdGBWpXuiiFLz30#&jSxXo zUp5yA3Q!G+-;=9&v744V?6nLJP%fQeKQ<^&=ZfoJInNI<%?Z?+90u@M0fx#II;Ra&f4#-$bNdpS8Pp<>2^32||A z%YFLv30Tm&cLWU1xHOorA@^!Jc)@5$>rCN^8XOe)91_aQ|H8bWC~Y@&N4*^)B4<(e z@a_SihB*2zuAG#}3@f(hLY@-_as}zss4^h8bz*5n06^qIX*Dh;yb>vt)Hq+A3D&n7)JRJ1MN@#Eeb#(myk6XRzNf0d%B3|5+{%;v2>^&;s!D z{~s&2WamH0E16k4DJniJCSe7oD230RbNqwU4}?8DbAVgg(lQAjyBQa*HKYRk9{f^v z$YbM8j$)zQoIELpNUH+}U!>AeiVBdmROBsQrrJ?d?=sjnmy29$dfwl7EZ}i(Qon0i z%}|7p)xfuYZuZ?-&ky10VCR-}XO=Ll`s3zMmIL20-!CW1nOU40E9&_*flXmyQ<{C=T3WQj-f$k?p8OSNEUlt(-*71?FE2xrpH(U>Z`Za{8D zZ!K=+ZjgG95Pe|HDoWCYq|zPd&)>7MpRhBwwx3Yb*et|AOEGOEw`tTUDBsp5Jv_Yp zVaIOXvFY2@36G%w}GGNj%54hhH&W?9x&Pd# z*XsrvDwdbHuh_pPkH(n$FWl!5+$YC9x^E%N!9)>h+_`c~W{%De1O2tk%ET?yq@#tffI=6)4KZL7e*8KZE0M7DDen*e7+fVo^ zoSjS_PI`}bUd>6K%gRKMLtM9bIzN5wrHlTjWtFshgf^;Oz|t^g!5oYYy>%+8qvWg;RKeIGm)l$mlSVrf3*G(JDKj(n2x;j$(J;+>fBSc6if+Xo z9x0*MxLz%#GSn_S9TBKl7gNr3Xkl$=uZStPSiJD7{~cU@B`%L=CtVNm{{My#1_d!t zhQZ$3(v7Ca6{DYLUBM3)iO&uidKSy*-8l9C#<%>N4E-#39~(-L)fHS*7p5FRF?qKa znF>4X*ZuH;y}j*&#};9Smls7%8Uu%3Cbh${$m#czW7O&9On*QdGXYbKD)KhLt-jdg7dp3V%v*2erA(I)S!0hbw zw{JJE?xQ1ztgjQ3mwxAq39?ftljA+_ar}{SeU0S1b14BiulDacW_Hrps%fwDCF#54 z!^_V&III&|Q@dW&olY_U&=gasVm~|Eq5OffzC|aVnRy(IQm@icULU`BVQAxzUfLqG zG_b~7*n>2kC^FtC$vE;d(XLvPbeOosKM3CIDAIb08&KP>n(NQc)@<6t&Mv)Uhc6V3 z%SuYpBE5`eI~O4Wa#~t0>RRGPN;Cg)i%P-0vfb{tWd;V=BLqVd)Kx}$EqW+7rUKo_6&2H+A*vR1PRU6#033C5NL|zks^R_urNwY38d{jpMWN zqh_c*DOW%F=rl!DL?ZGWdjCOMT3YGUDd{zvww`E^18gP0J1}4`bnM z)D6FafJ||5THOhA&;Es}mus%sAFK6mA;dgP{wlD}LaDZ`k`wv_GYB9=(qAjXAoSCN zhi3{Ot^APez32E%ilN}f{igGm+jvBIlPjk&CE}G;RHD$=qS9BFOq48OK7i1!>dPod zk_rHE55PxBdTDs#H(Gc>2Po#U5SI;9e-Lv^PPFdzlU;}t8GoKM=jF@enb{%}whPhP z{-I7yY{vS9O{DmbrIPlt~eWTF#aR#JGKtrb) zO-g{VQl?*}%-!>&Jr8c-6#U>VXfI^%{@dNZ+OF5%Ka9JxTh?nU;EPg8U=r76f!hRZ zDnd?!iQs$i=i*`|NlD2#7iFL`Qvb_L0*pZ=P1YA`KI7J{XQNYVrEs;MF;>-qE*BBQ z-acPs5dKL8+QK?|HsgyE4<*EEM}}ioIbI!UDp_)4;q<3Zd!kt(3IK--jX+=oD76o% z&aPrU6iMzLWl+l_0d9RCY<-lqCzNQ1Qd#??-I69iA}OZr5UI-^9h5>Bh#Hh*LMu)j zKW>P=2{cCqJktZ527}m>&egha8Nba&*7|!>cd)`MBpJ3GI1UhOX?%6EGbrpi?AB^G z8EI@!Sy+5()R4U*m0JXWp<4L&9MzcGB481h5)#tUqdBZOaFkM%)o{4Dr*|94NbA&F z6#NK039vf+Eg;O}KIh9dRQ1lF!q{84d`kpx+?0KLP+$FK0XgM&t%)j?SnJFcG?Luw zH|>dzkwqMjxYfOcOcEK5tcd41Ato6B>53i%INWHxE*gm&j=izw9KG46cIeC}}_9zh-A=?}4tI#KRDqzOPxqIxdAz z>HPUmdxY9lmbbG|)rn49BiZ9n8X-a^VQMI;7)=g$5;?oYv71FbXbGiye`pnDET79z z0Y#}2HTn%(wENd?Pq63ojWjl zT0yV;-MeG>-R}GeN>S`@kF0~r5pG-LIeZC;Ft}|}TSV2|{M!YHbQ!Ojz+Acb zo{q*^N@aDSNTeC?7Ke>pY?Gdq`j4NA_2UiNI_o&&T6F1Vp|Em5f+RYuCm*p!+*rIq z8~9n}Fpe8f!EsPdhnEktNpHD)U_-tz3ea_7-K}*3V}g=sxI2R?B(YMnOQY(;leCnI zr*CIboMtzXvAESOi{t!Xa$Ki$=<&L^i5H9bX`tA!m?9L}{`F{X4csHApGVtOn~jY# z{Qwoc0_%oTcbNS}Pmpn!DAb(PgD{ZZL#twEZ_M7{PJL#VkWMq@!JAisyRCkDV#O@{ zD%GGsV`W0E8H5vBdD=_%2Q6g(oUjIHZ(NYzJUHKu#ir9_!H()H)AHUHLM~}1AhNZk zDy2BKdC;waH|c7q?`?LH_|2@8@9N#fAC|M$EUMyKb}WB94R$J?j;5I zj=@1ac3ZmC=IHL=OC-lwiWd0?(PVrc^R}m`zFq^jz$nZzL&%v23N^Hn&|QS*nl5!?Ip2#g zPLURx*d>A?8@BNpTs&3i?_5~4{%PMMEK<}7V&I4ZmFzTefTwcK*H4?#P+2HMY#b+o)fqerT1Xz^PLYhC|D%ISAPrJDHnvPtg}m zEh9>egE=FgIU7$R(xR5mzIwHfQa2D5!nm~WC`!Sc0s^VUX*%1W^S<^eZtHa!Rr`Im z_>kX|3(ARsj!FfG<18Pl{2Yj@HYkY-(qox*!-(~tke0`si%6#cR z`#qAre$`mf8rdc-CuguN;F;jhU5&aScBQ{x?4B2sDqGGI(gs?{E^Vngaok(>@=;!i zU}c7oxRF0c_m4l=gG~Kvy`7%5EcpRVT|{mh8j47ZUuMaN_wQi~J4OZ&SNF%Yp<(OQ z45HXabb}hy3^)_E#IC@+<}kZ*L*+rcXnQ3`G3V*DI^p#)ZgbKGMIuzqcD`4m^CJo! zUN^_2b90wW$p8;bP4y+0^mqTSTqIVbveMFDGA!F+QB(^vpi5O|2z%Ob+OSi_2BXLD z{kx33-(RdIR4~E5w}0&qz@I~JO)V{75iEpamzrVyzbF7zqtHv@qvRvye$%Pp!z9I^Qvj1 zSK3UiEw_>oZ=H4{cj=oKnzm=lLK^m^?6u`CDa$=w_kP_Pe%awwYmXetm)N~qDpTZ; z#qCFRBSXLVN*}#@w7O`fG@G~8I!|!x_4*P=Q?~)sj7Rz)y2(8{Z~_9~>z6wVJgTdb z_fEaUYd)>&wePB<~&{-_< zy5{(+Ea4xnbW8fcDb`&cVJ|KLF$o5Dqw^=+f?m}#vxx7ncxc9+&b#?sx@>aJaC@7q zEK9kjPXXiXs+VrfJTS`ov}sy7&$iCHobZb`Gi9vRHFC%il-$I~h+fH$lcMPSa}Uz{ zy!u;j_0SEPL@C9Cqap>gadF;TtM!tiI_~dQ)!}_sCm^c3HS#QqX0Ee+dh4A_P%iy* zUx3fua9OA!dT=K${dLsVIb`L)+cOKNP!)JuqMMKu^rer9H93rqI8o(b%!6b1!THTo z&<)>QDq~70q0NzHY2AG@+oF0UZduZRA$}&6>8DoS*4(7Fp$00<-#-dIfFEcK_3zMF z_7Yyfjp~aX9tr2kEwOKl-7>{;?)n@4BIX|@F_!#h&UnVZUGeP@9NDB;O=Ntc4gw0y z!YKGUYhPsTPdlSQ-A}E0_wNrs@Q^L#!SIRPPi|ZNhtnc~V|X#=QlxqFe`buF%zU{Y zMMAE}Dz>?vx^=%COx<*0RXlq1XlVIaG{vLfrm3-3@oJ~K;igSjzcDrlhWH1t2Lv2u zO;}!@no*kb{LX{{26U%jh|Tv~&tkURZts)%>tru+G9GU3fb*Mq%00NUv^E-2Ip3F? zCjZ_cx_)*Q)f-gz`Fb8Ha6&+y{drl!k1hB2caJQX74{`9k7Y^DINHc{ZE5-Od^)$O z&eUBMD>m+{kP~W-+NA4ngg7x_Pw9CL1o}Y5j@|BN*5ALq@4|4_Du>m{3>By&krq?p zox4)UlRG~*-CO%2q{e_IRVnXVYcx|6V}tI{F^An4WSYR$qei+y?(JwFAb=O9%V=0f zuE5;n8-@1Ag73v{4P5!b9n*s>>W22c9UsFOdUyH_Ur<(NsS;%BSly=i!`{t3J}c>F zUe{O-k0j|R1(vQQ4o=twr7O41{i63spiK%d+ih_1P*R22OA|qHji$TT=r!*42|GWo zlWWsRHw$Y>Hz&@TE08)HV1q8H>7l-HlT&9Ty^C?wamxdaO9gHvZWjRkI&@cz!M50o zxSiSJGNF@DLPw=PX{FitJ!@4&mn}_}HhT1IP>h2sN3OW6Of^|I_szLKXfZRX^k?|q zxk<^0u2h;SB?#V1$UW#VG`8iS6k~qaONw2R3KsqSFvIQtn3h(e^b|GbLox7EcG>>EI~h00-riww zStNY@Q`9eh+7+F9U$au?>qG-%>LHoA{1Nr)?|bVfD*as+)4;)?q#XVBXq~CMyLUfD zSr^l@zfQR#vG;uI74_oh8w?^o2ktmlACnUBn!|lf|E{GKl5`LJ^vy)5NUMR({->HB z$G=@ycXLDF22NY9U0d_8%?3XeCj6*z6%`vQvV`4AUOmU!ps`J7;lRM%iZJbv+F z{}OgQqi?gIk#d#|r_zC$DCLLC=eFylg)_t*ewId?$V0iU{>FKVVlBLsFe`a0#kN27 z@Z8L|tN97ZD;VfN%z9bXkI??w`Gz0wq*^RFWBEFY+0Y2@$s)gGFk^>K^Q~jlukGq0 zcbFKqSx3@js86y874tIf%WpsFl-1rVMg5{xKS=%&=W6m0F{7v)W*R29tCmF!lv@xO zy#+6i0{yui>D*4my>YRa6~Ac4rVbZ$_E7u2r=3vH6l1Kx)pV0uB%E*)3aA zXBEi=D`Sb~@ZobQhX*pgY_DUXhgY(H-O8j+Nr9ymZb9wVlv}bzBl~9UlIzB}ciy6I ziqdOKlii;YNVjlC?{XR{@2zRjISX$3_&;QBxdm|zP9UHgZ4DDX{5B<9f*xXEUthAa zZQD`r$AbM)KYsA1nwH#QKp075q}4QDqlz07k4*aWizU3uDr#LBT0?K$Oy$}~AmIG1<`|-n&bUhHuJL+lL<#j`U}&uAs6(kYTg({u}KJ+GR1xUYQwXA^c(JqdpgTX`QkFemkGzHuZX{l?2b4!Wk0FMEN1hi_59+`%0`($Jl z?QvNJ1$7u`*of;3UcQ%+4Y#8sBi%++80;gSb%*=i_egGPiMsb*Z*l2PNjkUH@Nm~X z+$9xSjsgoKE9<~(1>6g|pOQg06HqQBT{GyH2=MpscuhnKrE9^bneax?zEtBl%b;`e z@VNcN(qN}!8zOKx~(!FmscBY4#mwOT2!K-u!u#my2tzdDv3 zNzF<7X;PlRFa}9FQl3wXu3)8t?zc~VSB}ko-+7d>y&e6QCb;v{3N-VW#_D-OJv!)F zOYJZ5+vG6Z(V&=*EY!R5nE+Kf+_5-3KHjUxIL{p~z4vsCJYof-DZSf$e}%!RxvNFA z3WyxE@q12Bb0+T)sNq|qs2lR$MnDhl3&qQomD&<2Y~@>|X}@Knp0ev0Cs#_AFPG|@ z-zq99omDkUD_ZW6)2@_tr~9?|wEBn`xfSJojH?P7`E<%A_LCy8=>0v($2Hd4>-^Se z-L%uHO}`(K6y;X*t0Q13SG#wQ@mOy0js#9!7LqJ~ZDtso8zM-nGvgN@SIJCu>eO83 z)?UZ)4J4(0<8vOnk%LcM#zeDGX22_6si)JQoabt$Cssai)D2}K)^~pDI}>HcX04}t z9qkq}_pi;Jn>qxHzMq}&NgkZ;vW}Qbc;`ZHLTU&%(Wm6nDikq2TE~ib|If+WW-%HR z6G8OA$qZ5GWE>blr@ow}r6qCfpvyJMDQOLkP&>bB)Bx3I(p+j<|N6w*wQI4XhZ2?( ztYe|mXrx6ei6dzMXMM?@wD}p{@s|n~MVddy%LDyIJ z>vB}^ZGU|`!ob9Tif+lu3Iyi9?9;e0jawYdsTSD825xfBUAvCKn9p?J;@$Cq<^*i9 z5j2Zl6?hwYq>1SY8!7CIO>5h`ckef5)tlCs{A0M*vj1ubB34{tN)$BF2&-#-fHq(Y z@Axx~))&)dkT5{xTre3 z+~NECWh|l&icnQHs;p2cHw#iyiu!5tX+N_M2M1Z-Sc#(sV98`(6~r1D;y^&)B7W8~ zND~m`{MpFouz?`GRKXoF21_^`$fe(luiuY;F5~W=)VO>FOCzR&J1l^u(AB(>X19?@U}Y(ioFmH%WPqH4Ymv0Pq4no1FiwcgEw{#nuw$ ztlQocgG1&UevbVWwr9^p>H~b4)lW14lAQoDGx(@D-eVGNBD3STkOct*s7*U%ZP>yA z11@V-RpttMs`^g#i#X1Gmbq)9Uq&g$oy}Ymy+eN$|2H}lLE3|q#*S{A51T?&(pVP2 z4n^5UDVN;mJrrjm!nA~}AxReLtq-3U6eewYx=3)9O2bl~<3=ABaH;Kk^aPH+>bY;S zg+pSJsLDQg5#v?x(_r&Lz4cFMtNqq2=;>27SJ$FvVP^%~ z51ekp%=&%3nI}5xl)FSz)?tyxA?@#Wk-yh2EFXzeaCct#Y=?K=sFkfQ{rWP)Tj-|0j1#)YtiR1 z)-*e`EjO*g^q2i=vgneE7bNh1LDzUGocrYNmh7iTM2I|qUzab09>pzzyN_eZ;M)P` zjgG1;>?-$<4+{>ky1KB+u766aeS9g(%V%oF9NXGzYdW9K9Q~tOrRa^9d>C`vqZLbC zn8>3QtYhe!5)F|q~ll+wI1y8k$)i{{fE&pF|xk$;i7<2o=!<`+8}Ct6D6TZ;UtE58=9^L4m=G- zKNh-Hf?ObMhU&G#P3iRg-X2a-wFNw+5zP%RE&OKIu7Bw6-nHu{u-50bqFYdDEucqX zf&&gC?3$(q`HFOI!dw>QLb`8foKTMgVLl6lN3q^pcx>2l$XGz1jIOLyB5lef3t;oE z7o_n8J*sH%`vPQ!_SHxAUVfs_}km=>uV)C-~n(Om~or3%{s^=9v* zemN{GDC$4QBQaI}ztGQLigwu>7}+PMMOgdo61ZroZ6}Tu8UVb|ziy27-@qYq_Q1*_ zeVO~CvlD9SODOT@uWz5eI}WpO0o~uDWOR<`m0lVuLRIsayj<#YRtK=#Z`ZA?I9qhJ zwexK$xAKI<9qMT;;-JS4k@8`jEiF;fv$a5P$**r~*W3MG_GkVM4jBSt zI4C}GM@+P)@@i2+qa+vDcE`IIKr;-?f0mH{V)~cCxFPgioUCrHul-6g_f<50IE07wJMdU;y_~UW zVcByqLR<2tPMUh@z1fA`rpV1h;G9Fdh_lXt4YXL1#)FS!MV>ywEVN3Xi!Ga z8y^DK5f|g)ByM+G5xL-(G`U5$_>PX?##PA+u`5<4M!qZUUEe~Sj`#fs;lHD>5UW)_ zT=J`|ivD;CTL}yWhksstOWbqe-c^b&?b`p2EDVgmKV**o+4NLDhuMw|6jc#3(3s-- zp^ucpU`zB54vu~Q{&+6-e%q+B42N=AY|Sq54-7O${}yR2fEy8hxhSkknA9_W3PCL6 z*2$`V%88_Mj`3t9ZwjO>V6(lbyvRM~{{mW-iZ(YQ?@(@cwoB?^iv|T$B5lNd?%q|@ z)oo#%exZ}ChPDyX;D&0nQUp7`T5uGp-yK_t7^4Y|>~BD}4?{$f8_b`dW1L19;)1Q) z*FP+btn^y@IEG@B17fi4f`-m*sdi>fUhG%VyG~R~SC=(6Y02+&ZL!Y}Q-9d1jvQS1 z?q$|eh1YN$lKL`=9NuiiI_?{VP4!;Cot2f9B?U(~#?>90aPD4^C2R9ny0@)W#1N)f z=DliHcE>q|?s7Qr+(LUdh%)veB$WZ686t$CQk4DE`sY`^?mA!{m2|V_-oN9%pk+&! zR#ZmIl4dG+7J5m`7rHD+uOD%egK=xUzIcyx%s~iuN6%Deaex~9MDV~JbxZm;NJz}Q zMYOxx`>~9)w56sxLbR#(h%i2@209Mo83lhoj@B1a#!*gz_i7aFIB)B5EqK zf0Im3*qoc>G%C!0anTQ9D>Ub#_fOZi8XH}*NL-t_?}BFAQEc5|j7=EFbixgcU#m<- zPh-nVvgIJ`yvcRKFI4=jN0VJ=WwEv47t*QS+DXIn!^&e(&8XL=vv1K9xM(&pRe6SY!{gZH1m!5FRee0TQ`B^aay#AipVoD$dkUYnbA zVKl4vOJBtUKKd;0QuH%n@5C?YbWX-VD5P8}gJ~M=C|MBBxsyH%2k`{7V4qpEYJSO7 znL?Ilmqyo31oprqI141facr;Z31cg7O5w;qUUwpal~i!SKS8|zFnwQVGOmt&xNiOW zFjq86`#*g61nn3Eun|%-cx^&N+N!<<4RWM&d87JjO5j*BU3`(uElCCe^A_!^<3BCy z17Rfx73@R79p;U05?>M3WPGis0R%}y7^(V_de>7MDwfZQMNtgcQV8_M9f~{gk7t^g zW6uNJLqJf_Ci7^?SD}5-*k{&Xhv3OBGa*}3oFLDJ){gkHOpNMmaM z9bI}ANGICS!;`K7rV@y9OIB`<#LgxdgfBOi(|p6*a@)q|P8X|>f-MYKDGOS@ERCD+ zvE-Y)@coaWx0ZEb8)9r=oAB2^B>(y|3gbmYJ~>Bn;(z!GkXHUf0sp@b`*$^s{~nV6 z9umxf|1D(wKfc~{c~A-0bDd4CF`#1g)0^!=NKYQK1B;HA?kHg}gPbrJ^sa`!QUdrucuE+iR zm2cfw5VA|Qy8(SB8v(JA{a*e@dv6}k^}4nV|D4rlzjGOObeX!W2512yp>h z1q)sCf!EivX)D-9XNh5Y0rmQBm;R!7hl}X|j zh4Kd%V2#QjUaKLk8L-qPDrrcI5Qc5O-0ZH}L2Nbf9SOgUbhHDBo=N4JrB% zgGJ+aQ^X4;JUo2NWn;YfB;dt~&*6*b-oAZHuA>wPk_lp!uGeXlE1{bAj*jMmb9Cj0 z%E}{IHUmcUaMnFcy2`)Zxx<3o@i6X@7fk;?FiY6oOe^{Q3i)#6!T37f@Mmh#M6c=+ z^W7APSEJBkDFzSI%g=9P)XSGu)13!gu6uEae<6>Dg_Ay7XvIOo#_9n>BX4OrnE`Yj zB-FR4xVR7aCjbqsH|P~egqYF(7D~$e$*K>kBVn_cBMI*Sd@Tga6E8?inPDy=jFc{9 zP8vSg#aQ^>&?R+Xg~RGs+zT>QZdj15rl%Jswjoj2E_TR%yn%9bKp!p)_Lmp#ahD)g zwh_t0@Rr5M_Pt}nof~=g?-zzAgxQUz^iQ4hz{17)=Ri^bMrJB9%K`XScGjT<788VTPlh+9d6J0WS!f#FlcCbn{!@ z0Ezd4g%KOl!eSV>88qK~PftDq9F@ECehAQ-UdMmn>cRyWCP%!~h z;jr`KSU4LA) zaDV(*IkEv=6<;*VHPFH!1KgEI)%h+<36gcLdsZ6OW<$N!0iy_ODWtL`-r=Bp6vKXg!Dg0pAl%A+^;X-fo;aj6uI*8FQ z{?CYb*f*f|VHy&G8BPbrXB=zsu_t#RU}hWQxhLU zvh@!htjUzb5rBAFT2_`YX-6;tXATyYo`Vw|HTBTYi$h!<4_`N!yQ{#(-86SXov7;T z@)W=~2AS29HMbJY$~xQ!&_}cT!(p}ui#=juOz z@vYXeYg1MdZDlXa!7hg!6ule37CR*>sjtO+hd%?8W5$i}f1 z5vBo4*)QUywfLbx1eI*5qxk@AQ}15l1*(BM@cGN7Aal@VQM>brh%{s-=9FmbWbokg z?Zpeoe;zfm=r5boR8e8HEL2HeT{X;%0{G_RD3~IO5;>rRL{8(7B*cbeeK-cKyQ|*L zG*?zu&S3Q?lCd5nxDuaLcqCcDW%HW*i&7#?CW`y+h`m`~zj!Euj2ZPnO5G#O^Agc_Ht5wflbvK8(cAQ>Z+u*43QxKe;7 zu1Me^>D?#x+Y-&E-uqaeXJOiqeJFDSdB}JY;#PyFSYw(^o%g!@SDV&t?;@q#9Q0L zu!bx{GRcAXinlkr08b$z0>tOdK-q8_#J_lPiYOC63t3K@0P+!zN0`&(`GA0c=lCo5 z2WPRi$>WLB2#K;g6~moppA*^ztPp{?5z`b^D=RDS3hhjs669jmZ&H$Y3X%hH-oeO{ zTG4`GEwY8@@QP6T^4h45?@tFx)+44N&QsZ~na%YxKH`H_;HK6B)XK#Lg$ZmZ{=84o z9>!l2@D19FsTbtXbHYv77yT;@wzU$fbR<8|Ur!-7sDf$NfT}7hZu4eFM&faT2JA&` z23I3GC|Zb*r85kEFKD00-Tz^Q<{hBCXUO6t8yYX${rFng!%NH+x|u76weH-xLwLn? z5R0JVc3eS~wE}ZJ)NkB)LGGf<;{0Bn%8H6FM6!XT7|fKJ+PG*Yf+oR>cJ0!e&q9?F zs%;Lu~xctjH21DZ$tbC~<0OAce6>(j$+8aWJtk-ipbF*#0c%puBC~^SvtPp>BQ7k%8G-5*j&^Z z)ULDF{?^`l^V7|+7i}kVlMesUrnfyWx#!4{DUZ5kqeAuobjzwp-)-YYi=q^$y1*po zQUna2Se<*<3Dlsw*aTOtS?E{bwXtT?vQ}n9)Svftc`y9_)~mZ2<5^knmTI*B6XuN0 ze3y4$7yKR()apIXQbl}OzJ@Ei%aE}VVZ*X55oUP+z=?-nOt7RwI*Jb7k0UPck^U3v zk`JeJ54xnE(pfh*k#hn${DEGFoo$S zW0aXIQ@3@Vn~sJ98e}|D%8sxr8MdBh8>XL^tY-j8Qx6X%h^|=p0ZeVcI2}Q!5ZK}o zG!HYPNEwLG9{_j`(J?EfkrONlLwFYWMZ=_`7sz;4uRf7hV1$-VOmZ^pM`2%?JhDiU zpv@gr7#FL2&u$puM&>sm{>0vcjc5GNRiNLi0tI}?^UbnSl6W~l)iIQGu&)R-=>$h7 z*szmdf{Sb9J0Tf2ilkA(CvjA1(xB*|&Fe z5FmsKMYgu>v5i5lXQD5{8w)$(u&t#p{HzbD{7 zPTyz5+Btu^=s>FTg1ur5fYnr|IU7Pn29}-1>IR70msAhLix3Oke!A@*$)Y3);EWqd z6j3k?lOY5*a#|n(>Lb%E&=kV~cCIr8@jelThut`85nKs(8v>AB2kkIL*wv{Z01~%fq}orUI4RxM z<6^Cas}CtFoKgIf)SRmy-P;%mn54Tn)lr_ZFi69rGr8io-7wXe5@Vg=^LT&h&V#j^PsiWPQ zl7Z|PRDY5kqtd8Ravt48aAI+r{wk6Y04UyrO^S3cx80%4I18p_7>gygiiSpgMu#Zd zNPdViThV8XWkUh12-(KP>_ZQrU1|N}UMuexyX^C1WDm(wh@*<(67HQ?ev%PY4fMD0 zm9Bhpf;9FZj178^lkVvg(!xWO%Q`fS?`s6lCqEvXr~u-hNSOlAfKB^;MYRgfeMgYf z5PN0h7k)gzPR?MI!Z{o`_)sx$uRP%g=i_)jUm?3;Jz_bGla*+!F#Ld?7`YK68hjO$ ze9=%lkWZxHXa=WBB7P&bVC!9`Z!e;-IO+H6?b~}phPHwTii_CuSxGwJhh|(G(!9FG z7nCH~Ge<&9v=OBwb&``*FA=@5R{*EJ$eG8cN#ocjTd-h&&m{uO3*n-4V{B>Ve8J012Kn zE~VAeaUZM^n4f3&P0sz9cL5uuBk%UDH#iPh0UIDR3!xvP-w$|4rzImYe+WrxZJEQ7 zbK_aTV?Ue~l3&w6z@oQHRQVo4M*dn3A{{`dF&P;$X&d9KrxPQ>i26)gJP?)eupQCX zB=S>HdfCZS8U8`}`8VJw{c(8slO_xf2#~AMViOPFx^-*zL@pXbwNVP|70=@MMvvUG zuEY_hwBuw(MoqoY!Gq)E78TD?51$zqvl9dP^w4(HyvtU#JwynP!F?oLdM3nY(r&Yh zC?(Iq7=mc5+b@PLl$QHCv`Qri4}pK-!J4{w;--}turwn5RrH~eywM=w%_*&e%NcLX+ij%h==NMk2(K9SrP^9@3q0} z!5CMGDHuBxYXHxr!W>(%2A~&-D?kBBSi(PIy*rMHct>%1qC@$lY!Q2?27Si>I06E? z%YKo*s^Bt-1HOP}T6ONle8Y7 z;5l{mpDn+yim)LeniC|G-V;W>zMQU_nsSFsGxK3KZ#POsb zipDzPC@5wPI!9;?vPvFBq>y!FANHh3FN~`a7bQ*uiL!9T4h6L~s*%rc3 zPWv2&VUTfNkofjih!4FO?GhpR>*(kqxDqIy#m+7dp`lpc*V%nvr^^`f5N!Z88M#Cu z8N$68H46XM2Bg-tGuFgs4U|)syH9bkjIo>eOUNL{i+%AGz{g?`X^x{NK8q$biq?zC zL+cApgDVkPe`_~51lBQxS7}V#Dgl>1;xTD{qXJk@ZIBb;3`ar5%v0pDk+_w6P~+e# z5bG_Tk2;^&i{TsDL!5&ViqD}608UK4C8A3WGNd@-n}iT}0pvBN){E-u&)^_vh-Cf( z>P-?t5mQ&u(h^~-Mn5O+Br?cl-*+pN#!Na=?W+oq!`p|hC+E!0{ioN`S$$iMobtr( zkGui045CJdJA0hP5{~q4U;x>K1%N-`riq!wrUKOc6eM^Ii@3t`W~s@yEH(7xE2Oc& zgQZ>a;MkQ!Qk9<13dDLAW(X5~x=R-jEkDD%p7<@HTFhck0PCKpQU7!(?jcfUs0#>_ zjwA#X>x}s)hzAz*+8(~XWH`sN0K2W9>W$IzNqJED0GSM6y@viB8A@WkMva`@nPzqa z!tdR^4$*<8*kPQUnfM6n{mp>ppbe&=0O2EGEi*F66dcB$d*eer7W3v1C6Df;N09p| zk=a_^XLMJmD;rsJ?an8OE&P!qJji^jQ6I!2Fp?cprz1ig;kKNA(SgO^6wnGMz#Nv` z3R^)!+*}TYL07bF-mb~n^v@Z!z*Bf+BSVjEvlD$U;2$1d$wf1K9?1;26_QvsyJVpw zfkQ7rkn7;Q80Y>>;~ox%pITq(hK2DsH08lB*ZT#uzU5N2>;Vsw?l>Gj>VZ@R>(E(_ zM`!u}XC2L9mH$$Tqq6}PG~oikaifoOY5E?xpsro0j{eRACGOP!%%4FDk%(Q@3n%dU z-nsVn$404n&(b{T(1rV+9@eBfAUM9GbA&vkO59 z42P!toE!;E4X&P%z@Q@nI+aP0r9y1!r7`AWIqku^ndRR^k=O{9zw%S;=AJNz!D{08 zhSZ4wj3oX+Xg^z4_Rm~Vx^?j;cE~J6G=vLo{kj_)_TMOJ2q!2F#fM@I{gLJftWQ$A z)ay7TmN`Ro4w}?!+xl{Jl4R$wu@eNcqYu@{_h7>**v)e9Qd^PsiFPnt#mq|Qbvxeie z*1L0pbo6d|tM|37QHqV16N^q$yv-(1_uJWxM-B|GzpUkdfB%huz+bdz^$WK5O_gx5 ze0`OxaG|F^Y{BtrP+g0|l_W{(DQBmlg;yBEVTUqOA3-Yxr9lI9`45a2XOox2P#m-8 z&vzb(di`2R3ac#^+mX_7+O?$w8A;Uh=X;(hg_EJfUYJQ%h%I|hOI1~MQ~unyAug`V z0DVXDrg9H>pQWN4cl&+yRCQAm8&({PV@p-l214?>r)3G}$-)w;$&@!8KH}}j+&$5D zvC1i1N!h-0#rSGatN5f);uqueDY66V*rBxeL#Z9JuzVUDpHk1xI7*GqILgZ2Xw8$% z%E~$$m;S21IrFmlByxrXqXQau@ zH#_8F9dOpzW}$6tY@C{&E-zb%`us8&l=>GPX{@ZRGnM2lk}2Cce##8#V^dUr`^JHg z=Yxb);o7xp`?TKN{2=V@?Ts@l%iY}$6L#9YkF>b|enxDMw*?i2`PzzcDYVTtphWRn zwQfrxa#Q`0_98)4f|_}bw?Hx4#l@93;%!!lZXFxvY}LKXK(@?LxG0=HeY!M&nj*06 zR2#EAc$}%JsdU$~aRAf0grO%Z0XO^(VtHt=O#b2_bbXoS5%>SoAC*&meB7^p<}xx~ z=gYxy9V)dBvtJrlQ39SuaYv>JU#?GjHx?H54&-|+{@g5zUJi>JjYrLw^dl1M8`2W0 z7$|kfEC1pi4>ar$js5@K&@=Bk87oT#Y_w3 z+z#%lf)AptY_5DfGSu8%j%Jkj2NZg*kg?pojesga?qjL0t=$a7>9cdPuERiDb#pU2 zEJw|YS*{g=mJt3+fU^9HneRBs1uWi&4skH1SjMh@bg;g@ek%Z9Po!ZYz!;g)8}R_u zjUn?V+&{10Xn48f@QsEI^;(vv9osjBAAf#y zrEQIirF*3`OPl}wK3Z?LfW(q4f)33c{w&5)c3bh(P%ZxUApJ?Rskdjp*gH7rgGPM2 zl8u!$G9$yb!Pvpx{xWEc=4f8|YTQzG#uw1Op-<}-w*JwnzW#nM;2=ep?r|LM@B5cs zQ=F1jng%##Dt@(rWw>d+iY85`(&*(6@dE5G`5^IS%#?PitwxsDMcwf&r z?dcP{FRp@BRm8RCL+8q$Yd_c2Y}~(p|FK5XoRQ)km1b|+?wY??&8J(U%27S5Y}{|3 zAW#=|O+hJD>ReE}zql&rV^GGaC?}##w43zow!Q32)UHmiNRmBInC|``EXD7(I(5qo zoZE#p93ji<%jP`w6b2}du?wuk#JHZF_KZv1FClh+L)^iEv8JfO;iuUf>o|0_4vY<) z@ZSkQg_VQjj^tSfvaHtm`i#-UpBgOJ`f=c6No~h7`O~$9KW8>Xz z0vl4MM9i`+DQY~nSI_8Ghya$nE>eLVf{ZN2JVHO4ql;dq80FSKe%mHu=9)YCEuecW z9LMopP1)7{J{>t!lY=J?y?mJ!ySu%qwPkL$fM1SrwtTGt2#*o|cbftXb*ZyAt31T1 z>w1>YW@<RrU) zD~%u9U{Ise)>T_9>1AS=HS4QCs|NWTemU8p*JHCXU&zmezT@wY@{QU5lhyTvpda;9 z`C;*j(0DqVHj4bSAbleagLPMmj(@TC`fj_A^En;XjH`!VDSO+o#6ISvIK~&NQXHcX zLir(6blk*e`0W>K_PDr5j|z>Cl1C%aQx-h?gxm3@H3uzU?>*^LIa>-xAC@Bi>z6s7 zl35cYrXUiymwjd0waO~$rwr<~ZqCd_ zv>x63WXAgp-@4XM;$gk+WD7MW*A=HWJ(I&8naf|4Hc^>--gUV#(Yty?IeR$795Kbl zF>#HT+sJ(Otv~nCcz^f(#ctm)4oU2)K0SA-+P*8lo(y|{p1$i{D_P-%k)xSVxoXaR7%Q* zu#!k=JJXWkv0rE!*H4Ijy|R*(*WvIOWl~tjVds%;N9J=LxJ@3zYpto0`TF`X?Sn^? zGXpBN(^WHE*7xe==H@3Ob#_0&2JQb^PJR63zuRt(KPt+Z$V)RE5-=4LlRtbdCD*B& zy?gbOTa=F7wpRxu+@_V2e+LuV&0H{8H8)g+A|r~cZZ6n($F(C(Z#9Jhg1+KAcU&n~ z;F|5VZXFfXhqbu&ThC0;Y~Op3ErQd&`zg3IN{t3GHajIQI_T~AF8sUrWy@2Zd2!xb zeS^1T^5zR=?j2~jxxi5{U3$aS88qc~bZLDt-K8uAyrI-Gw;L>Nk61Ii-OX}aSU*SZ zHj0w=xwZ4HI(x^JL)mwte5^6*wmgF}n_&NF4Y#h3}cIQ?U{Gz)+&=O1G;cCbVL z%umnPdOV{x&J}MLQMk}O6n8aX(wp(?gRc}0Msu+0i@{{M3}ArR=OEwhkS-SO3fTm- zL7_3`r~zBgBUYxDObV;~MfNRl)WvPg7vy5YyVk!rzbR1L=6v~+BL~3$O4Q7Ec9KA2 zWF&lPCLGC;Blo~)#4B&Bliq6^Jfc>dX!c$7F;ctV%KVW2F?|)lcs(tgMY3v$N^-J#&d7eC8P&@G;`g<_kuS z8XT5s1*>bsA%Du@b|tu+6Ac&vBr97UE-f7sznz=GI^9M=p|DOvA!oF-rS(>4fcuc+ z?)LbE(%DmkCnOGQxhSN?7n*r}xLZy`42L9)jHmK)-!q8sF%!g+{1k|6p$=NN_!_hyd+M4(%EqSm8D!rbg`1R4rEtY@(atT_+9vFb&5sIJYdj)<<=9o~aWq9Zz<{d9eh@^9 zYa=jhOAH{CydJo!?rE-MyEb>^Z3gLP+XQ_X7ut^v;=$Fy4{WIPI|22(%wq-IM>&kV zSg?fscMCaXIhwn!i{j$3XNZF)XigSgl)Q?ettsoY$P zAb5xpB}fq_FA6rK<4neN;PWe+ImD_kE6!W-cGBmIfy#^WGi}qbC}LYLF)TyIBvrSz zaxM+8U)q4_W%!WF7I z78yLQ4H=%;@tSHDlo~JhcZynoT9-JGx!Fm;2yTzYDrNh>zaBi2e)yeiCof9aik_Ag zqPvZZjM|05XHNa?<&FqcX8t{#d~(@!|CEo+(AU?yp%UT*bn7@j!ntHVK0aQE-bp(M zuKRkY&~jk|HP!vuvuAH$);H;}fGL#TWG1VrsgDrOU?14_pUqzt$3A)cM4jQBc%1yC z&(+j}ED@+e^f4CecBQXKySvEXn+}iJZx z9ki2Yg=AvNuDJTR2T8j)OMuscOCR1n1tk(E$`3&_0)s@%FYi15iU;CVr-D9>#0yO5 zcyO3xDnDjO=iJ2-@buR>R|_AY&!@@YJ_YN!_N`HMRpaInmOvM5=I4f z#*%FgH*&fphwkgcd-6ek{a@?C+}vDbm7hOvA_W%sT5qmcACpVrJa?X1(M6+N?=7j2 zND_`4FDF}wZiB?tA*u+!D>jU6n5icAbIGToqCzv(P=(#5;nMrOu*xp(@Ja>QLO4W5 zB_tS^8^>E=jH*?*$GI(2y^~Gc$T#zy+6&!NjT5-Idxq|^?VE8-m@)%K>nP+K##; z=jBNOfft*01aSm)sC0iCvQTpX-`j>OjZ93uzt~pLh6?J-Bi;>dQ2Sh0*D1yHeBYp& z*-({`IrrMRqn%@n*Xz~g*6MN z81rUKC7|`8UC8}Q7LG)p)clK!Lrc#2eZZGCgCy;Xf;s<3+OZ-mtUqtZ(PPJsg&_Yo z2Yyg;JvMa9wGeZ8D_h$VumSFAU3;l_rL_IPTc(KSq0b~AsPtJ-S`4vou5B2-v%f=k zLkAZw8i)w2qmC#mM(!MNYFim^=?CfkiP-Y)=y-NV4>7$26Dg$?3 zFR24qkJzQf(av$qs0>41Jsh_!qT8Zn-xEVnjOp4~DJDXBlmK;_tYN?U^#T2v9Pg87 zM3lwJS(TfN5pJYd_sB|*iP?GK!iBr31KF{?KOFGc`iVt5a*f@!WoudUOHmM9N za02=#tl-UkIB9#2bsq|m`{+^#g8}-bW(yrlja0tm1x_Pxd9)=Vm4H=c5{%)?IANzo@PKH1x*D8?I8Az}P`BobO;z%y*gBswl*0G75ZP z{Jt|L|MuvEGNy`$!D7OZ&z=njRXz!QB=!r8Ud0p%*3mpFIFLr3$EBBFsb?6bYSPAQzu#J6S*Sf0$z2(%hUj;u` zGYXnqkTo%BOL-ByiL=|G;?X0<{QDmEOFN(CQ#0%0GGirQ%fTG`k+~a(Sp)CFlOBoF$d2)}z*OlRZYAJeG4Z(%4>PzmWqe1fk!&h)F|MwlIIO zFaMbqi&x^Qen~NpBM)Xq`@_34svI)+*a+);WOpeE<>t#Og)zR${oDo$v?jBx~f1O z`|DOVHpS!sj`ZaBz^LZ^ItT{S&0SJkrDv;sYxvhkTp71L7iH91Y|^@a{7#qE6@HoZ zB4llr?oB3o30@Eew_FQ%vT4T1QgUgZaY>z%SGgjWlkbl-cT2i?l-8BQGWkuAb3~@4 z2?K=>wAmxDi?98d!Dc!2!nwVhX2oBXA2#*(j9*>#z?x~~=|K;!z3mHB`W5&koP-WW z;1${H=)KNnhKd;(ouAkkv4gsF;Fy7?PcM!9^N@6Np|lrSYpNblb&fx^koix)JQ1Pw z$0H2}dClU~nausuqO#n)ZoaUwMzKtJ@Zh;Y-D#BC$3N7*>fHt4!AmByyky&+reW_8 zv=%68SM}KP+(Kn((%U@aa`IP4zlC;GCLo2rwVRj?s1+Y}jDph?MeTWT7e&0y+6o1I zMDMvdmFDy|Hk5>vII%h@+J-+K)f4}7NZGA6h+1o=X*fpFm8M2@p!MmB=Pa*93ySF0 zGK<97tvK``%xH;$>G-Fhaw`6-$^`{ScL8m%cf7YY`pT)Fl&wO%df&Hi3XxqM9h;IO zFndr-ynwl^^Mu4uM$l0zYTCz8R37(r@^{_0=j!D&`XHgBu6OC;gC_P&s;qbw{pUm0 z(q1VqnDuwtV?*7i%2j$O@X@^ZIahzWaRU>RJ9}7wV(HBfN+ksu^E#|Jbfx_5m)P!b zs$xyS+bd`o_`>$EKM?#9ODSl;9npWGMCF>YlS~8+UqR77>3Y{IC1n-GYrp5}9k-YN z25-2(Adr+eUM9;%zQ+b&Tqq0j4W_Cc{D=#xEXZKf$ma#@juOXw=3)%{ayfsqk*||@ zI`ZB9tiFd9fX-R<5^DXA*Ax7y0etM#x0Vpt7LtCV5%xdie<*c$@QLDHJ4!qpFZ{jg zMkwc~saSbg5<1*Yy%Gz?_^Jnt92|_h!!$JmbW~NRMHM-9Jt}GO=W-TJj&GkgRk!Bb zTBmN%uIqnC#ZpzxXg*O#>JJkKR4+ z_I}JYqlM3umM;4mh26ETn(i1byNIT=&MD5Jv8Q5iIXDc*J4R%Q=9qdgFf|LR87HYp z4jnn705>_uF7Z~yrzhg&zMc_E7Yn`w|2mec2F)NF^N4!+**PiBR|(0<2heZc0>S+8 zLx&C(B5%+KpIQ*4vK{r>2qq~Oou&Tq(HGcnu%35vvbUC#y~H%0?dkba3}!hGRsl)g zw0p1>6UK`sMrRSWQY=)^%zlAT|xyYD`e& zAARYfPWlviLYJ%oWeY+}U9&ahsJ6EDV4b+V!@cmUA(aa3mR@tRGw3x(to?*TF)KS; zAK-8b7_!~Kg28VDYCY2x&N+Vz;uk@r7bEbOiF4* z*XR03yC=u>z?Bs4%WUF$<+;E(Gr^aGvmi!{F_MV@;;EhP9Ke`y_ z^12va+H}YaT=4_x-Sx8YHobvjx5ke=1S-`e-H~JURn zV#lwPoo=#a_(v2;FDMJJ6on$u5;TP~{iWoYmZ`k0 zfZjbw<_^)8Xa&;>%JbJlT6G6$?K!cgQb^Yk32Nlir#oQF$Ogi{7k~_UozG~lk;Sq* z%#I8Emd!-9>A^%zd2VRNDMl;yF3(c!-Rg0ve_x$S{s=}x{|JdFn9m3%6JP;RG$36M zeYH(8T2nRGJuu)6D{(q0Gebl7mX?+s^`t%+fn8his;ycZ;2?ZYA4J#=%->UH=oi-r zK;O*`vJr9YAk>e0NrHg-?vez?@tK2-x;~R10V~P+vt@|Z z-@kTEjwM~ko5N)I00W4>A_J+P+r5M;+y@5-gHW12=(}+7V$?+F!uD$jt=P0|W3zj; zH8cX}&hbpGrPN$nTOvg+7^$`)S!jR>9Z7qjGeSGHCnk`=XFeB7mgTPmO_6Hc7K-c0 zE;KGl^?+ci`BLvFN$N;%7o-3mOdT%)g#`^;)H;H7fOP3UflOj{!CHv^|L2mji~_Bw?UT}})r{-=${|JPS|8UC-G$xeOJne2@D zx1GrdEieS`8X7mTh0*pmsu|K!5C=FbB?Dh@P~tIHjIO#N=bE6 zWK#`4yd<=&tYlNu(=oQf!!1&J9 z_G9PDs|*$YM{eW4-sfX1D0zbp?~hDRA8x3AXoERszoo9D+^w%>tZrmYG1h3irKHpb zn_T`t9c_y8fS1g%$3}m-t2NyCh5caWl>th@^oEG1r-TphGNz>zqvK%^|Cb9uIS%&^ z1HONbs9QFykZDAEWUw!AQxH`|Ga;4y+q3Dv`CHeG|MF%0Ysi zRi$TT{q2Fu{97MZDx3;sRZ&&7EAQIBy?i6F&+^yg-z{+MF>%lO&!_Fbe53A5kVXHu zvN{T-<~z?jr}2r2B1qlaA1Z9S4?qPW%oKzO0r2aEe`M`Vdc%dr?BlawyhIoM$FLo0 za~_{|4V677;6FiwQvbJI^zo4b1467p>PzKvQ zT1}DhAR+oBe{;V&1DI!ZChWB*QCG!DfJ|;G2VSPi)+T^jmV&?*tj*V#%B!j3b$eG`~jfRi@<=3ow|!VxwxS2 z1gFwvCeTH+e}ZI4@7C-tgi@4YwyWZ?&~E3+*Wm%_4RjXa9$h*rKJ&KcC8pv<1~+wn z*zDs3@$`QyAKOH6^$&WN;tLT=AxK*K2+!4kaxjC)x;#u$9Lj@j$lLBCmn+W5n@B{X z`!wl;pAl*M?G1!ypZVD+uI5}a>!bb#ZhZq{b6YxDGv)G4BZU;4GAGYo{F13uWSPvX zu$a9s%t4KP>b?a!SlF7&+;N0*--iTC8L9SlK6C~Q-|21zX;x&f(%;hv%ZD7^i}RIB zj_i9+)j#z)=>necGE9LCTm(|Sczf~CW8V^}$bGA=t<_AdcvDl)5xkfrGAqV+pCTzw zk_Cb~6UY#_)A~jhM|29j0dL-9PdHg$VBQ0HN z8hN7u{_&?Z1gH48+97}U23RTya&gY)_~_uCK87eP1?-|N-friEd8NZzHnfe2hhBL`%VvGbN zJMQ$682#{HUz)r?2q0NT#u4N`M_}$&v_Sl-l^jPrb-nUup5(I({q3c%;w&~tlcQ*F za?Zgd>qoaQ57r%U9`VGQHlDBMT&T7Sq@B$=R@KU+psNs*;{A=+@dr2OeUf zN@F7sPo9s*=gOezn!vo;eDn)2LSk_drNOu^1evtCi`lIUrb~z@#ZjrX zfJZMb#D>~jSTb`Sd$I^b>-Ex$*A=}kvpuo*v|b~fS(Ki8E!{VSf4V@RmH-%rfnUG( zr%rAY=lomF@ud=%`4SQJ<7uDkIYRzec;2E_`aghe`ojE#hpcKlj9Bj=zi@Xao(UtQ z0npi7GcnjvgKCEb;;3yB{7wjHxVL1*c`qacoplgB;GT2z z54`a7)2??_Sc8^qBPKsRIZ^^0Mw@1iojE>Ltn!Y6?w&rq1F9l=UZFOP*Ud3!x&+$u z5Su*|eR*94VaA9{?2ZV!-UKN9Bbrk&3(bnf%bj+?zeYKDSH?hvi1XxsPrCL$Px$`t oEBxPk@xP&N_&>3X3YOMuzpkDeT19;Y#AS-2+!@)VU-W+cUnM$6?EnA( diff --git a/examples_skyline/accuracy_vs_fpr-skyline_formula.png b/examples_skyline/accuracy_vs_fpr-skyline_formula.png deleted file mode 100644 index 172bfe9e4b8dbca7a624a56a758a5f5fe1fe0d92..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 43487 zcmeFZXH=Ehwk`URQ3OnYBEbNXL6jgFBa(BJpdvYE$(Tz8Q6wl5mLeGzIY&W3at6sD zk|ig}Z%nH8uCwi0?NzI)1&bBtH@`W@=%e>O<}(!~8Or_i`!Niol)Z6P z4Z{eVF^mL9P72SswK})K1F_R(Sq*ad$BX>#AMpD=`y0AW7)Egz{YTK+@3RF@UUa^u zg;yg(FC)(?QCyt=WK0p=cKENqmzZ5?FHWRy!_lJ&7Ga?Mfv#t{yVRoqZwb! zqG%6>oy26XUedT1@vGlmKg48bcVX?)LDqvTif?9qKDkWD`&KW`Wa>an=*#@q78kyD z4Q4ASzQ1OitD_aF*lqm0%DVN;#Zg2G7FrWVi-P7PQ1?m9~+#(>Lhlg}Oj1nGnr2Wt&ai5eXf(P3FGv3zzV6-lbi-qNCX=!OshOta)DjN@v z;^)twL&x8XIK|nPtZ2!qnVQCVd3$T>>+83T7BGl7UhOjSE!o=IS}rG|Ztw9(I(_EM z6`}WvW=&BpR*V=uud*8*h6S`dq?Sx@UmBYndvE9J#u2igl2X&w);1M?Mnpu{rYE~) zvMt^utJ~7W08{DxPDF_t;IB&f^yy2#iT>gEw#0AWzF9W(9$WJlja0gJ^{NJ!xNy}B z0q*u0X`+PYU)@<9iRx_Y>+2k+PhYMJ<8v!n>%3`~=(#qFU)x!V-n|h-#U!4nFZ_do0JYLoQO(rFCOvdS$?KF41jnz+|HYK8~*c$ku3>QIhxOs)^EVol>uY zj~_p>$oA&is7FhBwr79`x3;a*17|-wiu9i}OAL?^$dKF}4 zpLOTiMb~`$b`F-2psK3sSn1~Mg@S^DvG{18^Y4Z2gsP4$eL151_R%Bau1so%i?>+b zef)UZ*q{F)DdqW7r%su!&GyG7Bz!wcOB*g|)%kXNbM1Xl(BU_sp{9Dd))u*TgSP(s zV|9FG0;zgA=b62iUvu#C3hCIt*pp{xC%L_>4p+gZ6k%`k zRPnvgI~?vL{H`py{%Nv1D}H_WDYHL+mGj&{e~Lj-XO90(+k&t4?=DmyQ(qld9ccP! zT>TO@SSi1|ySr&K?$X$687BU!V{bx2ObCgH8pxQvZo#}a$S`}~h2IUPQ@#arsOv{} z9W&n98kc%+g)gXr9sOks#uewgyEC>>&75ZMDZcRKNL6odZ;Dabz)>2ScQtR=P89Xq zM!An2SJ%-o4|?+^px>4+t3+z&w{h3AE-4Z8*}BK0mwh8B+pLMn_|vZ&}rG)nwl2a zUV;ukuF3|5g@vuo4W_|F)^v4gZ*Qz5H;%RUyA7tkdUcS5l$4W$qb5gEc5!~b`Hiw9 zhn<~Wj&=9q1NV~R;^3$#d|SM7GOV1bj^wau31k1A<#?&dm_W(aIK}tvxi&XtThV;? z4#PKZz#I!&w0`33@YtNmOSbC5TXtv0Rt21hj}&v03!>%G`;?!5@$1*GHE=0co>FN- zB*}K2(Ub437`8XB@;_eq<2e`lObNIetqX>6u;N( zVp+ilk1Drs-(Fl!8Ca=-O=S|-m3lMYXM2O=+&L9^i(Zjy>a@m#=a#SJ9*+F@`ohG& zrQ$J#pl#npcTZ0v=l(Zm6NHeTa3*!3iP~v~aF^d`(07(?uik`NQq0K6(91Gy zVD&9cPd~>f>ikWxA1)}zyybGx@~^C>TDSp_iNb|!!dWaPTVv7pRa8_6m^Yua=`ZXM zoP&iJ5)qLC%UNS*X9vG})2>7_Sv#%s-37DdK;Nw^L}XO$A=-wzv3k9Ec5@p8)z#H5 zuRpX+wtLQ%jBi8&C}}o}_KvH3N`1437J`qX&0uYAwa83Qj+6IbripxxMceg;a6u$%rHrq%HlDZr}5PwwulmC-G)kCK=|Y~7f#^G$~Rs3P-{p#`pkhK`QS*x$-vwBw{K zEIRe2v4$M?B~u>5Vx2c^%J}7_>sPO`b8{<9cc!+%RM-7VoN0}Hp{Ay`TuCi80o%Og zdxo*gSp7S>F!$1jWYmSFP4)E}a80$_zXByz-#R)ug}i^CEIe@UO3=|0Cz3P%@t8Hnd*Zhp_h2M^lIe0||(1qwRelvdXS1&Hlum7`K% zK!v4{fnp$M-_%orFZt;#YS2d4%haJ7GANw5ICueLrzW76E`S%EdEYQ5%6JPhYEOx0 z9?RrQvDxMb5vOeLP1obb*}l6zB9TffGrii{hVHR(%F%d8FYHoMgESL&?%Y|PjF0BL za6vs@DYDh6Ny4pVg^rds35S!07!nd6AK#gJQ+w=m#vshBUiz*3LbiPg5adnF%&g`H zO7LrTMKePE-fR7fi;GE%7A$Bv8tp97d2dX{KMxFa%i)8N0GlM_>C>l#goG?1QFm^2 zK={X}q;N-Gbhr#Fv-S7FNV0&@?b{z1sGcfCin8+Y@c{~$day#xB(6jC^ontne+z^y z{h7Re*Y}3^hDl)U$itHv`b+DLGaWmy$pI;)K#Y%#jpg9ts%v#Ts}zx77Ug~&ZBNJ} z%kApQn)hBCxqSKZgf$xl*E0!~ZWkw%9eArJ#)fvPp&ms$P zqysLAhK43~pu|(#Ln$*lI@-{CMKj=puoA)_u(Sj%+u6G|0eEygAfa&H-CnC395jNo z%9$nOwD_|I-NjfHA&8aUi#2S83somN;uKG4ZvrSnAsW!b_ClbqhNY!t!8nJuDGUsO z4^h`&2{3~go2JgZ*&=`UJqpA0od0P zBU1;P*0Eun0(Mqy2-gaPg(SH9XrsO0D>YxweHbXUlfSV%8IzE}zB18rrNm<;Gh2dl zX}L{#8uGF_Y?u{53L5uj-Au~NFcqfv!xw4%u1^4vXr*Zwj_jQQhl)+pfW# zRpEZ;Hbs}xSv1@R;X&uRLINEB9clr;>yv4|+sSu~ji;~ObB&Lc4};V-3FCKmf z{~-UbELZ7M64m3UAfj}QHN2-`Vw#+2QPH#U;;^S}4U zVw#uzu0IsEACmT7Y7m~~jp>B4Q&zao{^8d4wn<(u*G!qi$aMruBqb#aho78)q-Iri z1GCMt?H5&icYZV|l;3Ggq2FUVRZv6q>#x7c8>+oR77ta1*7P)~B_RQQKu>p;S--*M zoM7!WlQ4t0!R=W$kA<&vT#>twqv&Anv-7)jQ*{vp$XjIKmZB9d*zdd3_)tB4s~$Qo5z3 zMNVGcX@yz*UOJQSmILLnbJ75y=tUeqy=0ccL$QQPSU_F1-@e(e>_UM_bK(S-udgrQ z@TsLHsW28E-OQ^nDh~Cfz5*wd31rovUS;x{zx4RYlO%&8R|LjWj7Ur?-H?NVJhi8aRD~}>D$0Ac z+l-E}Zf7;GtRQaCXVb=IZomMgp!wm-I)=wRS!RJxs2G#V%cW86+$6b~(wSzI25?PF zK9u`)T3Xu2m>532UcmYSFib$^Nl-<=y~)#`Ylsk0hhd;hHwrn?u*73BIjuB^MKMZT z_uad9XgYwwSgD}kL!embeZxxhquNjjz^(nRy*4o4>UZyc+JSo5sMVE}`+_u)ii*mu z%U7;6K)q-U5Vir}qnWsbgrb^SPfCmBi?GpWnk|e^sf#B%I$|n-d05F3?}(CTvjX%+Kt*mBGQuc^(j!5TG8x zs$))oFOM8Ncx%9W%|Km4<1Q_qp*4V&dRQb-_nG;_M65&kZIA;mRj{zIXe8#|Cn5{qUEAG>UI%nn z53Gd3dm)>8D0~d+7M7OUuQvFruf8^wWYP8pD z{xjJaa6hJy{l~(gc5PRr)Tv-H7a&L92}5%p^j@o}tCNSdplNMw{dgNb+wk$DM+vYx z^|H-h|M>CaD8O!NYHCVGQ4O>ip$atEwkz9qMcfRO6dNmPzNrWZ#J#r-0>JrWWF!Z6 zr)(=AcO0)lp*aAe#pUI8sOLITZwjXxmgu42u|D>GudsxQK6-VwKUp>Add;zzpFi(5 zM2czOzkeT=SetR6L|l8jhBhUX#vXvAMrUSHLwRl*wyxGTG=!9u`Mwi^jXC=Br+%v= zL3@eQh^}T;rqjT^5lV@TiI}N@k|J1vE8mR+Eyf!o9YrIdkP3kBOG6kDYTZ`=?C=F+ z^zwqJio2O>-2(eN4XR3ZDEHj@9G;>Dr5z0@(CXLs3_!@O(L2j*uwAABRwcYPF@nMy zr~-8Wk^=6Fce@5`aQK6`mXi#k&Nra2&g{0v=ccEp@85j-=uv*Dn`op5OzzQx2ZuH` z0HQhnp1-Xf4V+X#sa<#GC`lVoH+a!TBRd_Px0zX4tg?W`9Z4p>zMz{9y=nV8Ffi~X zlSEQ+vG~etf3Id`n$HRsql8C$P6S}-Kat&ZzUsPUM0f6Hi>LHsyw92^;$3N=v@Y^gAfHV7H|VnmJW zqdLFq3S(OtiGko0b)HZO;4Xs2i=XrM@(PZQZW8&CpiT^n2^A(VW)pvh91~Mht?#>G zm&AY~g)Q0%@y@`=tTCbmM!?h60v}8(?f$-=Ipm4l6hkC?p;}3O~=t6DLyq zO`x#!fc#arybaq&;8r;X3wmO6uB>dLkP|MA4D!mydeu~;mygvZqkcNc$;kzWgv3(8 z0xyh8;yz(7OejGAXlHw~uB}ZSVsaAflWgnm^MI*S<(38@fa;Za2}F1tI&=L-P;qhbQD)|}r%WC`J|r@g{f0)l30s?xjQ2Z^iMEQuoAMZ5ZuseD_N}N;$&x6QBrzYy0s`T z8!Iz%{``3jH8sZ;y~~#`qlrTMrnaf6YjuYmEtf%C2%?W4;*NR3l}i0f6Li$;_%^*sEqD5i`E3Hmj-@? zT%6G=b#Sx*NZ}mQ2KtiaR(ZrXqg-PSOoC-wTtIRY)N0wcD@ngrSHn$c>)1g=FC?$2 zuC!>q9Lw1EwuLpV+sRTu1;Y2LIXE}~lb`|AuAuezb6uOTI2Gsa(nv>o+BUt z1*}!e$7{2^BndP?5n{Oeb#--ZQ0}HB;|ps6rzsK=6f#VArD@;0moMrzn*wOcXucG7 z^s7@>fPRbxzFvD;Uk|evt}3hoLexhEu!kU!LWnFBEd1rVmg&N zWLQTv)5MZ>Z>}Pca$40d89R#IofO}myTql9=Wb(@kT3v3C5C14$qC#1A#W^U?$|Vj z+aPZquW4-Lby`{*8X7t;Y!~(k*nYIkg0j8V9U-Jo05WmWW5e|KSBYW?l1;TmLtai} z^(}~IbQ^G&L5j))pkGpSvnV!L+1R*1BiZ9F0fcqBBvQ%&K`t)-TdMzSdO+VCd>EA1 zZvpQf@ps92>RT6Le4;>Goqjs$A?6*0!{N>=E5jBp9Ga8?j?I21q^6;v1NIrs!Grdn zGY=m*67u#fyX<~y>OAMB!8mqMd>~f3x*cDk@BOw7xpidruqCl^r00TNfur@AHA`+| z1DoM8*YRfo0Uf2@o6bY!1T?g?oWNbgTo2%E8wZe!KhY(%*5`!S*zuQY1h}4m%nM|A z#F_~JONsE_NL2vBTCklyXeMjPa&8+rz)df7ok=<)^O7B?)&@-?KS6&NOvSse3jfkx zC(DrmpUm**xMF;FHwOX758O#frr+Vwerfa5(BFlwTBO;J1XvWfWHle3(wm)gb8~5F zWjj4Q)3%n7_CE{_-2y(Ypy(*BvA{Fs&9S+u{l*3>!0SZ#Mq#%Tu0lR*u4Do4o-k_E25=+!479JG_Ef}Y>vC1(QWN9be~MW4c&Zz zb3A-zU3i1%&z`9&DEI?G@6w^E%>nruC<%2+t<9|LY=MAd{6LNZ`8Wa8ZRt;w#<%=e zP6$~i%x}|hX|YR4l+ z$1}$%MJ7QEHwO?PsA(E`Zz?gKcVp){4VS>3FXTvROO*24c$hJ3hCa&^cVF+P{SDWm zpTnvfQ}+5B=OrX0VD%&vIE@?58(zQu#I|_hI?TPrw^xY%4A$6?8G$g};YUE!4tiqA z?#?C+ukZZOXZ%e%Mln|f*CGSRQo*3iO=y>GgdlkU_VahaCKxFo@J%3d4S}lDwzb8x zSO6`G@`1<7)TqbD{;$OlR|_|$l3gIYO#{p8kOL934VH12OP4|Xvm)(R;%#aH6?q0K4t~|zI<81pQTcdQj#I;}q265vZUEPl>dSd$VYvs!DZKm0p&mB@ z9BOCD+CVbg5|AF5B~ze$QGnv7BSAGLDTmAy5F5y=PeGZ0vK8++TOb3`q5VRWcnswU zp^3~o-Zli*U`gpxOKqjk7B4AngP5lQZv!Rc_X}Q62zZF_M49fmv2%l9mpO<%qMoZM zp}Yo3sM3OafHVk?wOIqxfsyJ!tM$dvo+7t2YoYf_QR0byepmp#U{YU+;^W7UQIUxP zJA}nZNpC$EN_MdXcCM4ZQ&15Cw56I)tX7-6{y4;UD{Mk801bmh zw)JPtn-$1|1p`H+p*MdH4}bXM4{}DRv-GAJ&)@w63hWLLb6`1r1Lhn6rizD0A--f) z@9UQ@6R1{?P>M%584`dTltHf>OxvK=@Mtmdg(8t&!Xulv=+}8DS?{2bpZrUtT8NO16tR+2oS zjnVT^>BnW6HPs%A0la64Ze@W}d}~{qpzn?s7xQQks4tcP2k6CIQ#w<0V~n<^v|S2) za2&=d$zrgH$k6wF_|Vytiois^!$|FcAtKTPDxih3)PS-B#9wHbRbFonKoi-*iS>u$ z%hO%T2*AUBV3kE^GjGr**|xus1BzFK4_P=^Sw}zC9lvOg(@N3#04O>VkR!<8U7!cK z%;a=g0-FH92kjylk<;g5K;sBNLH#4(HmmOq8wg>8GRLMh3fMlwl~1AU*_KzEBs$R; zC2@1ojKuy_m1tFA+#eWadysC{r=>vr`tRV}u3%JbKvr~8EL-idLMswwF#l229Hegf8sO zzc45^R~!S8x~mJ3v_SME*$tK!LYe1W&FGc@crD- zePKQ{qMyEexf&ZEk8rknoPsISFqK&AYt(H^IHy%5aO8MhUdJ2 zRnnZL#yN`TG@uF^nDTGO&dBzZdKZyW(tkqb>F@brQLo?W$r7TXdO)yvY_D{28MpyT zML7(39lb#D#Xpk00nktNfwr`BdEvqlElR(Mqs1SFK0Djw|v@~ioT zUGWDQy@wVPcDl$?WUx5T?(MP{3~f;`d}bVWfY{(C@F`d>0naH`tgc(X}bS+>`z$1_iy0%`%oyX{kK&;T)U2pP55)TYr73P7i zUdnUTn(?B&G{h-U_r*`Z3JE}RKkpe1I@rWi^s84mAb+fuCtA?nmz9-mu;s=&M(l4q zd!}YMslyhZ<*b*K6`y>r^8@}6B=(?e{u-L7rW&qV_13CSq7=G|9Oc=NJaON(=4-3fh5C>B~wO-l!!hh-!6^edL1xs@*oZfF&r(%1S*Q zcUoMO>z!tozVMIHx^uRvdAdNbEnHOzhJOmmkwsy}8wd;M)nB^DjgKBbo(OzmsBR;a z=~mhJTErJ^fGm^)!Wh(I@}Qz6wXF~(oO(y&Z$XozeZ-h?>PRC>cnQ7xLM7MkzQ&)XwfVnrr4A>oGS>MvDkX+N0uWypN8K@1@gmEZI1jF2pb@Hnt1 zKmvUPhXMy=ooKJcXFgjCGLaIVTJmAMCAS7ZsYKKaBA}yucie#vl!Lg8*heL$Mw0FA zt*xU_tL#AtFx~{a?J)6SvxoFPztvLp6y)#xAbCofDCchHE8WF~MUJ+ykSV8kAyHF#dqJs-LHhBKB363@d?80eCpjs*_ub4c4@gubsnWcaM-;21(`T07Kt&`DyjFRx20>GyU z69ZW}5$t3|yeaFkk16P;kXZ?WGUeP$5-= z6Gv6`^>O|?k946heb1E?wde!mUUR*|N=f!t44%?4qvbh&mb?Dg`_yHZme>H7Dgplm zh^v!77|e0BtjjkNvM5NW-vF;_S^0z#aRuqOD>+N-Nu`OxUa6ST9q42O#6SmOIY7)! zN*yz=KQ@ocRh}edErdQ@uQc+pi zQQ&0O5GAqsVC}+%3oezE!h7HZN)6zW`&C%6b}2A{nm>SO0A-1r1@JHcC$u4b0c5OO zb{~6ZCIG_2%=e5diHV6DmCQRsah3!)AK2W|N3v~tgmk$DJ8c}=sARr~UiT8sC6*mh+8(28M2r@$S;49n8RES3F8Ip+yKtboR`s3j4 z`+%<__VaiKPE>X@6jTHvD0ef@wgZrSj^~=K`@LX^d+)$p1FW@dD`@Nq01|{XdeeW7 zjb%o1eipjyecg}L?<_kz`wcQ`CTB%66B7=0_5_9K^`&tuDAj^c{f2-Iq{TMCaH?@e zckX-ywt-c4czBpz{GR;`?I8K5*w~CeKdnln8EYm6Aqs$tx2XM)9|$cW;o(WZorg>w zp{Gv*i-KHG>CT1~kkrXQSI)B2VN*0Xv~~5cS^3W|;(Td8+xtLoqoLa1ET8XP;ZT`@gs@;Pvkh;*w9X zvR(teyn~2Z90y~w{LA_wE?zs^tgNJ@<>;8nGFi>+s{vu?M3T)#Fc@`T54m-M02}fl zo>(F#A($n4HUc6OM2xOy3OIi=7coK3#RqWNt($lvV0}WK|px(pb7vg5|8mn zx4T*0eflD3k_qGOgU|5tDgpay(i7z|#nH6}6haS}!VJRDlzR9{{J&`NR`I5KdN)s= zi77UX>S+-dA%^R8FDwF=eQ@O|*0XZ%?y+r3W|P@0tStFyyclMA+!IHjp$v8U@OngX zajPqa(eo1u_)XQ+XLnLz(q}IDk*Y|bqLTnKX92N4>DRYIPjhW^xOf__+}sUKK2x^J z+55R#CS0&i;wu5BB7}z{!>FO3Jdbn()zcFR-_-`7Ub~wHw+1vBsdMPnF*ZQByO!{Q7q7A?6qN7^B3=ftyO@-o4D?l1i5D zpEHbc{Y>i4igH2XQOmEkX^I$%?ga~DxP_6^mod#wyupK>9IleOCjil@QDwagtuer9cL)?44V z)|njjQVKO^-W2#9#F{4@=%}!c(B?Du%MW1Yj_~cUy8g5IUn9EmRQF8KQ<=%+m_-?P zZDU$kgEYLSiB%-1o(8{VAKe~oBaduswUCu#->s;ytS7{zODbV*JKh8@I~|r}<4emf z88apD+qhq8_M2oCZ1GX``{ABGN@N2~ zGVA27iT@Q-JUXJ@W9XEH`E5G-*c`$QaTAo4D>c`$y+Vi82-}_PJI92p8-QU-)62nl$kXd#otojWT zdN8J=K7ln0FpB(W?o4%!jdeGl)+&0*a9iA(8vu=TfWt3Gb||zKpUd%OJM0x*la-{0 zJ?tlrNVhrs{Nu`?*BdbYwJxON$`kEMIT)#XbBx8UC*TWlRV+3Nj*p>yc)l2%{rV{- zX08W|6%{(bE`#cDfk}uI613piC5?4z8g#4alOH@-tS))p(~=Hrp#81N?jkd%imDhU zpfxZs8t6%r6LT9)4S&{?`k+AVZZnt7d+X+%2bqL0+%tPrGH&Btk_L(24I+Jtp&QZyD zfvcXLPaz(@qaZ)A5ACq)08USyBQg&U3aXWyy{VQ(81u)q+pQOl&nE86d9W9X1W#o> zpdjY5lz9kS3L@aX4>bBGbyUUZxl#LxwBlPq`9%)TQ#lB0w}916Y}I~R*-w$=Nty4ANiz=dGGO|M zEnOIAYY~l%Z43JPqSng#l1myv7!{Uq2FmE6AXv5b;%L?OOZ@YVFst?`Vi|7!%TxO( zbN>CQcW^BW9KB<(UT4)pxPJZkA;)a&*+LQ`>iFeD>dhHB`6aZHU_lLVttMr|o>QbJ zp!bvi>vBWJ2Gi49ExNusjpNguL|Kh}N-Ji=PW24G+SyraU2Dqd{=}iqo4BCEC9XoKIlpYX83O*K&wqTr`>m6^#{|_ee-a*HRN01rdG%B3l~~zEEaT43r__ z{K%0QpsQVGdNjd%4FJNP8JLyB+HAG%wLk@qbHu~Xhd)B7zjX~cQD(~I1V%-&z(vwqikB~sYIkVHHtkw;@!l`jZ;FkJ zvjj=Q68O%7IkvwPwB?aNDNY`qRH|ev zUhlIbYOc8&I_!dWyd)&D_wUbg9dmgCXju`QK(C;eXd=83)Ft?(z*}8ieFZoi?fewr z@>tKFl?#HRK=9uD5PfP6Ke(}s#WX*bTAzVi1xbfvNh9v^_7y>^Z`=Ws{A1(29y^Qp zE0U6ZYnjg=Ku8}!)@GJl&9|7sTRle&Z)vsd}SRVSuKA2>s@|^M>_)Tw?`~ zSj+EpGI}gL;m<~c5DQOGe`*z)B1E#NxCrTAH8;1$XM}dWydDSv#?t?v8EWKC6aB@m zZqqm767oh|k^8WzeH3}Wu8yDtBlu%$+9CoHRboUNLDL*`i9LRY;h02(ud}j-MCBf$ z*Q1X*UKCD@GSnfuqNq5z$h}W^2Z`p)73i+;W)&Acjv{hJ>xw{#!QIMh7XfYgL-xI{ z;oPrKF!RsnQd&qSI+C@M!S%HG60>|1J8e-rtv{)dKt95VwWnS`feQmw14Ql#5Q#g1 z4CptYYMx+a36}KZ;!at9LVx=$0-38u&Sy0qEl^=oS}~N(FKBsM!I=ag znoTiMR15x< zUOK?RgMd}C++&qiP$YmQsQSBMDsJ>Peug=gAxNbMDLWacB-lZ1g)Y;}K#@$wh8a4# zxnnBEe|;)hBDX=I26BUgmcDU*2K+@xqIjP~=oemnj|_83hG~EbZ>p9n%~^35_$^V# zpML;1^W?zsA62y&&Xj+EK)T;DJKp{a7!E2&#sj;c^$5%dMH#2D1V%KZJQ$KBs7Fvl z&4$~;7KYlZU*{0kPE{p_CEN@UlFqZ~)dJ1TQ87)wKn<7~2jgp)irBF^zbBtF>P~HP zxw?x_q6ZOI5n!;B5?i8(wt-(}KXvL7Vq*X7i~zxMdKQYRDj3m9^nXdDlC725nZsyi z+Y{fE{{BHRIyNR31ZKT{{Xa*8+mY{i`*VNO_)nTq4h!SM1cd~a*#vq)$Y(dxoh1XE zTln1L7fkd2x;E+UP(&1pP32wpq*?~&P&QDoB_LLuubKdb?oHdIvzsoaG6Gk!!PhR; zzW2%#IH}z&r;^RtB1>rJin{k>AGDijf=4m6sE*K&9%jAWcQ5&e0b*t5V{V{7U#Cbj ztR~Qd^@GeM8>{>SNZ?oCjzG>7VQmsjFmQn?VfU~H3%mQ>%!;kbyKfW$1IRJGt6y)T z)QFKF^Z5DgH6n4DJSI*d? zJB5K9e?SHzj~IA8fyI*!Xs2v<8=p0v5+4nI?a*Hx{FETSDfXoshE3C(_=SEtb>)dU zwC0?z(g{xk9uOQDVL&>@FE2YIiv{S>3emu`|3fB2f=j^h3vp-V$)-|Y^j9C9RqtP* zlIC&vC)1w(=0k=>Ho*os?@^Z_NTk5Na-l{;FbIS+!aDq~7?Fw{^(9Z?L`K)J0E{K%T)!Jo;`c`?W&iI$)*kAA)7uB zH=sd|RTi4#VxZm9(z-uD)z+4`H{tN?z;(W-W!#XsyAW=rhN>xv%uh}LdhO;!%b=!CY)C@wizopQomRlcvh;BYYVLoj zzk7v&M1b*mJg$@b*Lpj5eqVUhpY`@srI6z<$BDY}-2{u>i}flJhyzb8ftC4ig|MiY#Xs zA{3IQrWg3O?QL$J0NsUgw=19M>Kr;K{g0O}i(&J4KgwosTB?DL+2{5xAtgn_!Xgpu zcHH;_hBn?tk9p-vRot~ico?S=OgZA51&_ZE12`sHv-rlz?0v%BKVzL|%#Up_ z4teXI$keb5_O#}*VKt#lyBIIq(M5Ni?#SV(UibI=UH4ZbV+i2r1W>@Idh^o2(g|&) z+ci^}e%GBuV;RLhy*kKo>az?lFMrW3zqU!v())G~R>=1*C%Pos;N>$uPDuehp)1d& zPK)du;@K>bZ?sTbSvl_!$$Xv=I<=-mdLc4#Cwz$oT|KlUx9oeqLre$j?-XrAe*g4y zm2ahdV}=W__pba7(N4kFKbjI&-6q{yR6JG%Fg2BTn4z?NZo-A~N?9`V_&2@M@6%qg z#wfjt@eDy~Uwb~mxeB5NnI zguF;5q-Q&cZ>IV~c=-K<8#ZJ>AV#j39b*JHCgdgrKoF}9j{SH*fHw_jo7p=kb|P!${`n1lBs!Ak z5=i=vz62M(Rd=QdayFuVHE{CW2m&qD+`&Y8MAPWm_VyZ{D|_UJenDFrz8TLe#8oF~ zQ^_~@$nL`+0d3NBA=QRnIYmC!pRCPC$tRj*hqKEc zCgm|I)rV!=I?)o716B(prEY*Uj0e0>J++A8YMSRT3I_uj6&Q*^DlA!~YopF#FzF?M z&h7Ez8Cwc31s2Y*O_h$-k&)&=lrTeN@%alEa=@3GEKlHPdaaD8_?vCm;-WrX{Lj%t zQ4epZ>>Xx-OWb1)up@uA4DH){vYZ%E4A?Dbeu0N`_xDb?c>CuG7Ycu#aB=pZZ}W?` z|9jD{9K*1C%mnoFL`w0&?>Iuj&yB9ITV7@&s{oIMxk0! zK`i3d9Q=?3y+v17%_4C;w^gmn74sA6czAeyJ%Bmbe%7AT$tG?waIEgthpm|PTK)I3 ze^4C;?Tq^9(YL+8Kp3bWkh^R1P0ix*_$4_mxsaUuuLEwuXt$*GBMN3PzaGJ_xT_Ec z-0(JHgcRtTy1UhM-j3&{pX+k<{j7fpEB(kp%KGuL)qn_I;>?N9aHPj?f+7RQi=VVo zS>7u?4|J2r?M=m}++5?p>9R6EXfj$(c}huHHkxOv zBvW7qf1{7@`aToD{D*y0$iQv4?e))Vxl3sjJNFNnV-5IRk^ATa1M|~Qz-?;a^;F2M z(DP`dIbc--*RaEJkoQ;BI*=~r9%eiXCc9EK{GUJkUR?lc3jXh?a0`DD+jTB=N*LaK z3rw1YCf|Z)iQH#ud9Q9ps&3zRW)>D$vqqyCmEachnPx1UcKRg|M#um}YTKQD2Ta-z z4(Bk}(~qJmziejk;FOthmz8BE(W!%AHaI!d(L}(jOm^$_l>E8HCDuxNqX)N3#|69Z zA50)`miAlP7{|imiPCSpF?jGfhamx+Fy!{3UR2*_@GDi@4)Ezd$u+#6hxDl0A7Yfm z7bVFs;umL&h#y*ss8ACM931-1fF0xx|8@8PepFfled34{CY^4cDSs|MlxXD^>*bT> znroXW++EoL`4_eC*-kaxe@GU&u`$gJWGn{ zMjc{E_W}E$#Hu^Ef&`)I4ISYSDyH6c`daxJzw7_Jc~ ztMB|8K+P@a=Sc1A(+5vgLVCIiGG;>+4n~g2jOfbYSMVr&3Z7@5CuOhcH7|7c}!9mVr=)ctfKOBg# z603D!uOFj+Bl{*O2ztp9Ow$GjVFdP3Q!VY|FnlMf?4NoXwZpH_hbs(YekW1m0}OMf zzepRhn;x9^0=GeYwE#4}zB7K%58nzhB$`Z$B$7tv7Ze{k_z5YoOY8yy8i-i{hCmZ; zBs#4CH65J^6yI}+fSnH*v<_r^1JM<^;*j|s+T2ok8O;S~>8`4yy{e~Vwv=|_NSzNHAi8F?g3~pE(V7koZ453yfae z5$(Fx{u~=JtehIyOo7N>)?ERbO-E_RdO(@+gTVcd_kk14FYarA76C@^5<&}{Y4o2W zKCwu=y4;OdmoMuZMJgqOWL2K$aOl**FFNigFl_1AebNWVxa^3qTTHN(@1zt{Vt($# z<|qHiSQNsrF%%N2YJpaLD+8PCuNS%vV-Fbi-a`f91vIz4WHY25p=41_HK;%I>}&i< z7&hidooG0eSge2EpPBC7;$xhi!A+mixen5~({KS={+-yl$}srDXwQ&={#9~Zce=(! zEX&LNKARDqa^NfM2-8Y)%D&@0be(1LeS;Eu^ zCPodc02m;Nf~N>N=;FLJyIzHi(vv6P74H=4rK8&<1hANyGic7Ag<#&`@4R1NsprLc z>ci@!CGkZ0m!niz!;{zw0%D@LWD@4b+E;yt_UuiN+9*#?VmHyVfu+xg7w3lo zA`8yi(S#6+oS&#|20Wt3=*qMy4vnqha9kB|FehaT=PM{u*=gWCv?rybk#7qe7*0|z zA$o#es9F;B9KmKqtu+Q7ld*bvwvl;tpn&*Ydt1>$4h@W25G8>4L$WjS$%C5#yamhO zn}+S-+=4&d-4PKC6iF-0K+HtMc-X%L{|ytt2KWDN1Tr;^V?kK~nkXz+H@0<^l&HoN zm;xkZ02zg_(j3hXwPA6tvn@ z?c7}QoIERiT6$Bk9k6=i{W6rus{jhSyY>4EW-m8A!J$GzL2C}jehlzZo=e4*_kk#_ zB77ZpAETo)vlYW|79#%!>&`uG9E3oCg+=l_e(Y{bLE!|ENq}#hD*k+zDd(4D?5 zQsHU!Iqx2sOF^6V6_3Xl=+$bawWX5Mg1Q#f?wA9)_%V9!vMZ8tWl$DKVQA@{k?CV z|0&E>T)r3V{4QivP)vb128RtE;VQ;4p)dL8lYS?2pvfOLFb6-sDo7||w0^8I`XWRRHrPW}&}SjHeUlV!uUdI7Kh0(n!^z)2+9b{6Jya|A!SGhAcYOOv=br5 zdH|&=0Hha?*`DLS#_ccu1PNO9DK)bebV7!4t$+r{8W9gsX{ZjC_`ydA3tbPUpr8Ob zaU??-te~5ly>?{=)RMn%1yrL(!!hdQYk-Z-{^M}Pd9y$>lRl#?YUq;~> z`p2M{)-*K4qw@mKoT)U92A(g!%m|_ew4BV$oF^z}`(K1S%2M0es80f{JjfdbT(VXe zB_@e#$V-;se-0)+Kk<&AoSot{rp40JvQftD3_!Bk!7l7s8w*vNe ziTc;u%4I|%)h<6cdicl2l(U|}A&i)V{(R$QKT@w(Z5$@1a_j3sxARL)P2~oC?OaT- zgLf^hzCD`2fiKQWMJrT8Z>k_QTZ5A`GmZ%(*UJ(7nZj)~bzd2hv19@cH#RdmI*>|) zJutMPEXbW9-wy>U+1IZf#U+|tbDad(rL^29`mota2nB{7!d6c|*JJ~>8wG#C6->H0 zDfN_@*-dq7FDSX^FF`|={f&i%poTH%%DSPds-;beU5d+nVx4!>;5heDOBVvn(ljr+ zH`>XOn}Y;%mSiY=uOlB9JCO$ojtSp2N_GThB7GM2m zICHq(b_WGP;zubd%#}_#s5r-QplvQK&CY%^qT)>u!u$} zB!y#()ZusuD9XU!SL_sJ1iik{vxC|<;3TQZ$8?72I63$no%_AtBV#i&)!^K$c60_4 z2*@D5fPY36k(u(e`QevCiHRqoKf4Z1sxF1@eyPY&goL0*K4=k` z7N5mkdWg;l6SB!5c;ciX)p*E@aW_Edz<8oh#F^jcTUVC@?YA&9 z_kKA+&+6J5IF?jFF)1-P3)jwFar9-@qbE<`ypn6EcZ-vY%LPsoNm7oM^!WK&1|2#F zZ=HZ|>x2%lqhLsd1E+Wl3freLA!81)5>L?kgWwJp`late&~>u52IaVm@{D;xeSQ7e zM4nd;FQ=+S9;A(wwLF1<8i)ewpWQsfd+8Ugi1#&rwL!~;={r8d7fx|PxtrhV9mpsa z2i)iG?$EHLS(=&Iz#cg|FPk(Qz_~t8XHiH#zH?RoK0YY93j<`FX;FX zT1v##NPbiXC%!aPWvmWhrZFhCm0bWJ9#nVDG4I+$s6k z=mKTXLQrG-j~{o^IxS+KM8tp=7LAf8Y6Y_kBfE#|>hL$zmhxcSI-ZvyMvcyPD23BH zoN~>~%sj<&yRG41i}7D;K(EW9-efp6B&*|(a&_QxP#<5PFN5Kgw1+e>t0v$toreK* zZuDKFGx!AQF9F&*kZ+l{c)r4E6|G+(o1RZn+mp)RYEXrSWAKGcM_iehgA|-?Hz>I_ z0um&Mt?l{Bpp_x!BSt2WQ&hC4H&!pM)CtWpYRaM)vh1GfvOt7|I z&^}cKrILSe@X-p{-FHhGDl62q#6){{%pfodI}PK}e#)BL@Jj9D+kW-KS(!=e8(mRUdr=#=2l(6Jpj zK{X!AupGNVLpZQv3Oez8)`|vYgQ{0iM<`H-eC!3%X+EBY!^(xq( zz*VFO=!I636jOX^NOll+pHvLg{7vm}&~3!W*I_|=!@;prZSnAWIBt>)*g9ah=Cu;x zAb)DCFo|h=m94tv>pg49pD^D-1C^(xKHLKtF zosfh;b|)16Fc{tLxIj5G)8$+c0)$QS8H!xQyTd=x{{@uWOu5Xj&QqA&Ufp$Vd4%jw z8JhS*5%T~@7+*9!Cb9JOldOg0PMFMsHJb-Xyfc9 zR(a(07A;x?Bdz{UyO$m8)361lLY-foVkAc-CUcTS7|p4wsgabB;A0-M@PO)uG5J9d zl}OqFd$=Z}*TlL4&H$SrH391d-qQB7O>kQw9~uS-3E-;qbKz3OUlUX(hVk8XY4Pxl zXt(#qgR~Slf!xZ19%u#ZUeUkqf^^woU=pHvxS?nu`v!Zf`9A)%8x=sp*Z@U&c=ztz z6>JCOV{xDpr#ybxYCoPnhU%Sf))+=;s<6~L3g4FsbRFQVG~nG2#!!Wxz7ep{4Mt9t zs_t+EBIt+0%s7qcjd63KyEh~WcCJt2pI?Avz+ zT3?M&;@tboFZ0OR3FQ-v7<^K3vO;AwHoLakwHFH99^_Py1Ox=CfiMyW-cIKzx~ioG z?_jAGgn|Y3t)&}a!47wi<5Os7Z$tmvR7G8#8$1mO_^t$kJlM#g=;@YQa@``3OX@9o zGQ_e$nj$X?-H<0Sy%q~3`3^iG#E8FQd|ceG@UoHe{_)O)3rrv}DDZ;NU*UvAUK-Ra zy-u4`O_`NMn`w`A!*%@laWz0Dtnh5_ZOC{hw)FK)5NGXQEm*~YOB$<<_0A*b@;(#h zvvR`l~cpH^(O@zlUS9B(7f|ypyUxzmX); zGr>)p-j2hvF9-r)L3j{(1%eC39qlvok|b{^$7pFKd=e->g0Q@}K;0RVQ9|C{FN<#! z2q9iw0#Z3*$z=Za#zy~R9K|TCELBugI7pcW&#Ysc4KphiGBG(vYf@TP@$=i=d+>k@ zbTH@djJQNtI$5*1DWFKreG}yTTriU?hDW)zp-cU7@_Miq3`WDv}}>ZNP8o@<|h7yij%z(@wm9;iZqohVq%garo!<#IZbX*A_pNB zke{dXVBt8RNT|3>8pI~XFBo9F4GmmBN#wt_?;$_p*lJPwBg(p9#XQLz5kOB6$Gh)XG~?AlZN zVcHOd!rV=*e^!#)2UEC<*REdG>a@{i?7|Hgdk$VtB&%Ir~c0 zRh)=J{kFgWrMxE_TeC|rF>#aNwr$}Q zCW8(5-Uc~tX~#I6#~5FKTUtRibPO7Vg;?fT5*AUzb6Xo2H*ykZjiB`ObS{(BSh}p3 zn1Y3i7KxK74=vk}i-XM1*-wk)v=+unBDa%#tx2yM_84x5UKs9VMo+e15e-C*Qhiin zt_!Q_)ZbJ;?FEZIbXJKoH{l4sfH}4>Vk+*|C|@e5H2niK8cpGWcjS*IQqh0>sKdk~ zqXnKhkz5kL1GrD7v`}HG;RgjQHHKQB?~@O5^8@(jhfH6ADGtldL^4cb|qbB{+*n<`v{>17I94fBK5Ka=?AVx<# zYHCdMXsUPC)PcD#7wi5@KHL$Pug4HXbU z0e%QNaJv#w@$xx_kg);#AnJ9cxmP5_7#JlwoNr{PQc6IgNa?ahqE11auy@+UW+XhKu4GSB6 zDo)lBm?a(A<-b%L&U(2@lb^32=!WKi)Y(KS8FHnoYz637a{ZaRfJ&zl*FX&&blY0I zC;J#`#tCZ~ZFAs8*NL5Fvl}f)s$bfobQwPUS8SeKocTAdUysM?vF@&ZM6ABy29!t& zaHIkGh>RJ>-?0+^k@}msvbTB;2R$;USSdP>UVPSkw)5_7RMmuuu0bFt zvQ+nk7KiYsVwqaO1&`S15Nln;WrI*0rmHpr*P{8%*nEVU3AxzBzzN6Kh|lbp4;Zj% zBxZ~$;!vvs-^+PoG}ILw})R*ccnUijn7nx4*?h8b(IPO zxgmz>UdUZju_8#>NjAsn?7rtm#=Jo6A}2NE&bq4%&zvI$9mE%KG*694g~19T+96nO z5i?z!vwE~V(uu$>9VOFp5Wcf|QnE)P@~4V&XQzG)*QA;#5px=3U@QtFSCMH||K@Q@ zEY}Gb36jUxH|wKy5n}P%bkkS!$UEQ+a!2Yl+Nm^qICKQRw}KlOkuxEgAnOAykoX;) zDMxVS$@T1h#mKLVJ_O<20^1->jR(Q>F=wl5_{)gRyaHgM+p^AyBAK7&5$g%N+|ls0 zWL^&07WnX_7$zV@14xT&UotKLVM5BZz*n}17(u|;v>JXA`yyqu^Cz1Jg zc_ksd8GHXY4ne&REU#{?Re&e1dv96^@eP57{f^=`Qz>?xbFD}^(eTg%YV$0YWC*x} zUt(^5)B+|1Ob*ivz}Rc(cA!+7W2y)l)-zfaJyYGSlktq&28 zq5rW=pN_=^t|-#G!DT)KX3S%e1?T{DaV0|q4yzVWw24h#*;zvDMkLpUNM9|8Yo%^{ z=%G678{}v%0d{YV`=P8?_8a+HjqVhxQsS=QvNa6W7$G?G>D4c&9% zkxcB5Vbe1tq~Kzk>k9wJeG{_2`NtNQ9LD~EQ_}&W(uPMgBw|!d@0ep3bwbNg4Ffzv zuO^))VxB}U-jbv_5Q0+S7N{J396OD0%R~4S$jBoioPmGa_c}|gKBDwqtATK+*m;)n z&3QzYVnrIzh{*D%XZX0eJ<4|S5svsvhx=>4Vs%q-x?^xcZ;dS7C(jC)Dh}>^EK_vWUE1MdbHHE5vl=*g3Bku@QR&)s}sXv0^c&2!LQ`%(oYdIvXu99Iw_>_l7vixa3zw?gNByPtT zIyOXDLTGsUbZe3(sk%mC!q(Wmb>1N)I7EU5v%n)pi$6J!WPRF^YQr;6(sU#L}ozb`d^(`zwZjJN=p8LprA!6F{0%sFr?)%| z17NJtIAV{vO9YrB6EqNuz$Md1M;6iTtx)oJb&$b`f!*AIm@G(k$|r9U6D8OiHy?>M z!4^PVssA)hH_q(d9Ma{ScMkvfw)$)^M~%tR+@N z?p_3HRkx=%P;BxbWE|Y-N|e?esgta^Wn;j_BnAIy@s|xNodEm{4(;fBiSo!{Lu2Ep zl+5NfbyoXhA(B z2D{(uyus)ubBc)m&=_^-bo;9OpT-M}%sdq&)kB&G;cC3Lpcu(3Kfsk48(U5cUI!yNHPo5Y*C~;cKq!P*^pec7)X9X{L`1dt^m(Z64e;d z_mHB9l&Fv5;&#GjuL6v91@GrbKGk@}Mf zPtE1cXI?WQh0z9y23?H01S&5y5(F(k=8YEJVMv?cOB|B!*ts9(k9&b$gN77Ae35Z^ z$OyZv&!L*|rEr4jGxcmQ@(C@ZlDc4kLk^J=J@t7#NuqK1JKlyNB}y93Aa;=?Nkm)pDIt|eF58o}}#9u<~dLMNO z$)U+zCblq8#}L~Uk|9FkgS0cq(fj$mdn#lI4^Wwg?$|>d%B!o!3I(Cdhe;Py{#!1F1&m$9fq)T0L?N7r5PIkYywb- z4NyK4?;VovRF&jFy*-x=q0k?baj_+-6X+$@xN&3?k{J=?UZW0xu`>aGh+DOhTiXRb zr57kH;fa$qYlwdmlQdHMFmY~GLCTvA*ZtJrdcAl*V#!5zA5>Q!JY>U?n1wUT2`L~M zXc&4AnR_=$*NBLi|8yuhq#qlk?IgQR=iotNHk)>pQn8!tajmVNR^m#hXU)%lf~gou znh8~?A@}voz7_Uo<+tP5cBiLDI91YU3z9gSEWaO_KvlQ_9$?XkF);I3!7}-SaO%;j z)-i0j+@Xe<%f{3*c}Y}Gx_jXCKi^6&(;;- z_K`_Vq>`)ebK&$~qk%AYhd8BSu@S*KX&`!dKtPBaLe7!u$p#pIAHi9I5Ym?Ol27Y0G2h<)ABvr`X|TsRf2 z)Q8v6Xk>IW+?jkKI zH)?4jf+Fh@@rAfH69-OGA|uH3MMghPO#FzwBsa|ZAVhj8 z=E;nP>EBKcM@21FpLV+ewG2V0FJE44J~cjAjKY6QN`k%h#3Ua+>VmLw7I&l9KOFMo zsZvXGMcj}&TkUtrri#Q3t|Pk|w=D^YI&p6uHI|_-C?{vo4!BXoywPhdUqPA2`SXtu z6$u;erbVRlt3~tJe99NPOmQz{o>GuzMyef)t-nb`vxG#9Jci-PN^3r2#$Jp$NLw6m-V3 z{udZ|_>TXZ-Li!`C~5yf!xG)Fp#tzl4cbvi?7Il6vjtXeo`it^i`H*McH%WB=h_dc z$vcwPg?JIyOj655dwsu!6bEFKtaowNbOkR!tX;j3m<#+f9y^@miAQ><6NZQYSE;$I zLs!sw{s>I|3MNrF6EJ3s1_*+Tbl{tO52t1VMbBW6Za3!95KabRHjS^v85*O8`&LC} zJ(5&~-~^ahGKv9C!fOkMBO)-!5dVb|w*uT$1FgrisDRrHr6`??@~~l{TtT6Ylh9E~ zWbR>(^g&tf54% z9~Zz{Q(i_K5J9Ua5q@aMfe0Frd-F{e78RX@?`G&dV(d>+H5@F-aQX9GJa=_bU6HA% zXip@({zbnf3;h-n?((eNz>Ht-=6BiWOt^PmU2tJp0e^P=`OIb4 zH;CL`!TCUvL&`iJj!D_t3@jy|V5Ma<6^vFogH zQn#+G<{wcNd@Z|g+Z*;+v&)6P=RV~5@VLAdy7Rdy+j4As=l1CGz-Qab6_-%Ys`XEQ zQ5^GXraW4_;M~jLTcFPS&IyUul8?K3PB%Iy^4U!kE6Vfe+*d&Q5ex$fH5(fnU}ODH z1_qj5N{9fEl^MtxZU?R%gcU97w~3BY`U}M?00zSb8P%Qt^%3++u53g?;SZEGebC(Y5WeVuye&K(*~tC;|g)0u7lZsi^= zY$j9ft7stE?UP3Vf1NN4Iay;$OtHKNVScNmI5QYh0^!$kpAmJR6-qze958!(;~LvU zk(v1AA~O{g^jX)#26Ah{^XI+I*;L0(*DV)N>X*f^R9087z!Ayi4YE(am9;gOeK|Gn zdq!Q(Sw|HW!!xfIkUmkNsLzjgGH8XVH&H7qS$TPRSy@2~YeG|Lu#i2P9#b2l4&IU% zrj+_iRa`Vkx6&T@{=FE5gE>lr%I0QXLA@!IxP^+N-g5 z%%h0CncuZ*?_F6bH1zlZO$Wir8p)d>D=PE=AXSBMBoGj2cZ{AiYyx&MSoyspp?&JZ+sfKn_RdJh{gLk7k2rM~=@mw?7z^Q#q#J&;u(EoL zO%&;LFfcHf0o}3(Twj8O(G1IzY#Y`wPjq%ez+{l*18*%8emOL+0S^K1}wgBqkj1Cp|Z7AsXG_`@M-9lxpm65 zZa>_yV>GSPPrss=nL{bxz~OOrwj9n zrPwb0!}*|~`#rk%k;toJp46$_?w%X)3BRf_j};DHpj;#)B+2wb-LGL75JZ> zP?eOtk;i_8IVkbqJV`Nc@3iVjWX{DX=*=OC5C;c56A-!?>i9H}7tjN!`sPJm5?9yC z%Bu9qk%Z+Jb`}QnvNAH?$m2U1Sk*5whR?GpUTfJ+o4p<$9@uFinJ%;->0mc6R4IGS zRIk$S>g)FpcVC*3eML;|O8|D8@1Dy36cMt`!s3LrjYw49)UmtyqSLy!D>1i%huhNE z{o&j%&b{Smkg58)5NLtRn5x-~E_K5rQ`bL8g0imTd3OAXMHR}&>tyJa?*i<3`^28X zR~Ev^uw%5P6lZ8CHoY$9^z~!z+cQ!b(yentFnEYzQ;B^GC7!=?IxLlEDvy?H6E*jd zxE>2v#yV|GJc%Pud0kke*Y1e8+qj1mU7eTsXd4p`&;25(Hmpl4YekJD9oJ2si~YM_ z(DRliRc0;s`Yf2Wq9Z_rr=XDARy~WiOF8Q6o*OpU(3hkef}P|l=n}MX$p?eetD2ad zDArdgOnY+#`y}!AzFcIN9V~lL5bH59v4`8z_zii+jeX~pvmCZ9;BznkWV%s>g*n_M z#?__4lwNt?R(hs;!j=a=S&c2d(8()z-jZUCjy`!PCdiNS`NrRVR&ELGP)-n^3Uqv! z7>!4{`))Sul3oz*$fEq1qk&pU)jtxvEq#n;_0?sml0>)<)Aa+s3?GAB_V&e=eEj*W zcjvUs$2mLq*Dx5#_@-^_ym>^ue%@K}a4%jw+^BBthpQL!z~ZUexWS~Xny~d!j~%D@ zA2H!#X-RB7D(IQ+#jCz4F<0TNXh5G}29ZWttDUepAM!kvwp+E?)nv$c8F>|4j?L3I zeHXuV3OW<3CA+Y@;M_IFoxG0|1gGC~%t+}X^}Q)GAx^i_ei1piB8DBXCyu) z55LVkHq))Mr&~r1^qra-S-r}?7nv?qYklTbX>V6@CP1-v?9F=e<}0TR4oa|?mRig- zdyaHPS3RW@?d0lU*J4S zBF-iJKlcDdNdGkb2nL4mA`^1)>^>NM z8O==j9*5N?#dSLI%Zvsubjq2zj1=AA2)rJxVO1oqQCziUt#GlN@0+kj%CxJ`nCl?$ zQJI$UV(DU%E@giDWl5aD=3{K{m}`T*Zn0dq_7-n5 z6~)inlhsk6Eb!!T6Go4AB>K699z*T7TSa<@BD$xIdt#asmQoz%w}Y z7HqdqewLA)C6Mw`sQs2D^9*1=70=NmR(%tr8cPRids-X5klycK$OQ9P#E-vy^#Ud_ zB}D)#qK>Ivl~9*E>M`0KFg?jKdux?RVCFzxqSE#3+F&N>7VT8)K{mmmHTV$V7+i*K zkSRZEo9&Ta{=QYRT4&WKr>ZTJR$Au78`re|yx=38mXWML3Xx<&(JKRMGl}Gd-KCX^ zN~87GMVtc(mKonKvHrM<^#hxU67aI>txlf21Y?TgebUV<@WIq;kMHp6>J4-&3ha79 z3mL2Lo-fw2gDW?lGxgEoRbtw6OJFQ-EVm9_P8E^@yy4WJ?`&bUxNCMQ+K+pY<@
      z*b3YjJGi@f6{=@WC%8JDZ@VoCL-u(ZJoV2sp&dAg-kUfBBO{G}rj0)?HB}S_@0-f-`c-nb z_mtZD&Wxd)A8*W`ZoH}JA))Ur;i3DJAR;a?SB`zvn7R3wuXVK&uSJQO&m= zw*4||_HXaF@zmSJ)O0o4B`cvj@?$grH15^_5GP8|hip$wN=e-Pfs!E-K_@%6 z&h60+EW|vgkL7?nq>datYK|dWpcSPXq?xILh&dQAW>@!Q4T^;AEe^5UcpvkxxPD?Y z&1n1e1@J0NL#J-M-!-3`$Z2R`t2}o4N|D-#B^@l+AAkm@{C=6X^KKt(jM*?dHi;&c zdiI03PfC8i5}8FjmG$!Fb`)#sxi4_n(Wv4Z@a&WmJ|tbeVmc=ASA*hg2cabd)F-7Q zhQ%oe2$abWzhC6RLdjVB;?BZogn>n~u1>D5B|{zWXqmKN?$OD=tJH*Y9vpdrVOHgj z_Ua8LpU&Q5Haqh=^}rqWYtFXL7Mp3!R@WCGQUEA$4_iryNhkF3j&XM=!1<;dWFM-_K7qF{vJPiCepyQE#)Fh0#%7&K@_{&7gJb zp!!q8kh9*FTsgc$9Eo3Ah=q0qI_`#-` z<%aZ$)7w8FFBnnq7=IJ}Z2zs5a*EH*hnAS>Ao#aJxh7~l09Sa$?`a{t!<`TI-{k^r zC-G6+7AcZHOKb}tOo@E+ z3y4*cWP|XJ7cjPiCUsmMu|XUY*)CIAGEQ|qiy!zQ^U--O(rTfG8GIH!ot*4}sYLO) zxn6}CyF3zG&wk)Xj4?lQB ztxS0~KaBOLBDY`p^TOWLNiC672ksa&wHw;%+rU991aX(k4)3uwWpT_FLwJ zskAzE>V4+c)dG7Su59o$U@ev_J%s#ZL(ctW2WK{$rPBh_i_4>%`0hQmcoRz4+80e= zu{u1M;+iY``m*n_U`px+8n5m}Qbbp$;2jXv*X&pP@wttP{);)MlggCiwgZWWU}UuMF{)oSHxz_gJJ``z6jp^Ori#cT+{kmR+&?UA5bQ3 z@Y=%uc>lZjrj&DepuEvw$QL4=)5at&Bg2soW=1>od}UdiSI3QlRT#=uEP93Biy^;~y4fBO4)eYrQgozG4eS>>zcLrWC%ib zh7fL~VPs+*ERs;CZ}rC6rquJ)bm!-|u`Zbw6_J)%8IFM%#p+3i4C_!F{#E;=i{x5= zlx2e7MgyZgtx1C@WgjaS0h&$n?0vBDmAzXb4>IiPhC9g4tn!*M92 zBH|trOxyRleLBQ%khyzTuBd*J(90frV*OW$mLI_;>_M_9!KhD8R@T{x)Y;qi$Hmed zDm|Qd>;zT~C|$#!U16W75pH} ztk;BSlm^sp4!OiY`L|h{Z;D9`Y98lNtu35<7l6Nv4dSj5!0iZwo`Mh91Gyc^n&%d2 zv*qg+Yg_Eg$=`G=N3cBIKK@8Gn8`UwmrOR%Harz-;TLNo4=${@N{?48(rzx0(QAAZ zA0A#KO2_38)8PFm@a^1#2f3v+-*w-iFm2b`m#-UK`Y#?-UZATv_spaH!9)J;dTi>4 z#Y{vzx z2Sn!`7K2-WmL+}-vp-5qnbm7)MN)`Hbf0Bp>d-sBbY*Gj`D8i~OPzwk9U?N9mX+?;)I9n9IIudlWUYj6S*;F23- z)%=T})+A|aZas2j&#Un;bHnXpTBGx=N^iyY9S~Qf-}_|l4XWieWma4E9-#BRv`%?f zSzYuz3Y(^{e`K1{0*Vk2g6;8X3n+Ux-&wfk;@p$W8U73hQmprUhcHD_ zwqb?<8t`NM-RFz16AuMz|Efce80JcQJW&e3jqmNYbiG7C>bf_Y2Mbgj8aCA^uVRid z>kVcU@~b-Z<~FC*+&^OY08kdy@vHYgTDe9)KlJU!8)-VQd z3yzQ|o}AW-2Hl4TgjE8}8-T@rtTi9};hBLc{T57j=on zxWsp;v6 z20sr>ezXx)SX}y&iLfHoI^G~ot5QEexhJcb^S!4AJAwwYEUTlzha$ZQ3~OEwjGJBf zP;vlV*iIO+J0~VaT&w5(umSh}+|D)ab~8d|#esg*6Ym4YC#dhZY17^bN`D7&`qX)W zd~+ZY`DGf2-F0BRTZHfCeKc|0;tw#7i>D`Q9nV!ZTAz2U2g5}|2Qveu%l_9z>kEN> zqD=I1mIt}wVzd5c_IQKMYAS`8!WZ}Op=GG><9&!XqshiDjkfW2=cBfLaEy~(R6%i0 z0M1z$I7AT7uUCs6NEi=ZD`r$0e>GnHP*JK|DJec)P}bP^3DowaXJFs4q%4+Xk=@6q zr{hH>MA!xQE7m*4O&I)2r*-j9xm_Jj8ImQV@u}7{3|Rp4>{&1m33~ZP@mQ&5LJU27#(S6+I}?vlgqPhiAex@)BF2)I&K6!%TwKS zzJu5zi(WWdHaJrfP5@eIJ96Y>3caq>9ogef`3a!Y*){>zPgY1Jg~{N8&}Lfv6}Hz_iEA(xKaLq85k*LLo|Mi2CSCi z9odB>u70em)*d6z&|S3Ufek_Ud^d?zo4F@wwrE}Xx{$4<1{av3yt2Y9%Z0$WddgS6 zHUM^|=+fc&7s*R4NWYoFOG5YGo@B1qmgo4#f?Hf(qrojl@lC66O*u_^bR0{(I=d01 zC}QjrWi?y=p(}T~+9Pi;$gU9lBXa=M_8^k&bGSfJ{xWhm>1eGozu9j^CUjfMB&&~8 zygwfr$soJ4^`-1FhJW3lD<^po z+2P2F)Kv;^Sw`JbeMC_XCvQD(dm zy7c&#q3d|ezd@5lko%F5kwWYcTHFW!ozo8@b_Lz-4bTSFts*IA9t} zn%iLeqSX9a(qq_vd?=Z+nmhaqj~SvCc{*6KAz-YiySln=Mi0MG0i4LM-`=;9ZXc4~ z^=7DbIdzv%3M|-SSfC`VCczEUBP7x0UPXGJn+=|G(Rx%wZq=Kg8#_qW}8 z3@6Ho^5C0W?~i083HMy4#JKPMkVT2fa&lI6a#HDS)OT;D>bvu@YHzU6Uq#VfB2iJW zFiD?(Y5A`2eZodChPC>%OwRRz5u>lWpfbH+diuTJe{^zDliU`ZQ(I?{1!t>@f;Iox zrsIfP!g%K9&K;%RDU`por~h(B|2KM)ZWmHg0_vX$KG?g_tFQZQmO$c%>f|LD zyToo}gl;YCuXe<$vb~TmcioRxdy*WdA$rt)TiU*TRjB6OreO{ zoYctP@3kBe!nn4cv$9TR=!m)bSJ|b+qsSOI`-&^9f9o-ATedY6Bbq3wS{oL==edg* z^8c^N`fu*!pIW!NX@B?qDW=x73n=kD|Frp<;Cdn^cF*HKHD61EDNK4`jUtOH_`D3i z%pf+&L`tMS$|Hf^cd#dt<-`dK*x01Cr4nD(7zSRe?aAjo$un2Se$)OUkVa3y=D{slY z)Y|dK0$tA!CpzE74KF47WHU$_oDeft1rw{Om|tY&1i84?jcrJ+`}2vLRtQ`!3OM{s zfQxI7$rV-Zg~KYCFz35ic*XoL@)G}@C8}`w!yV}o;6(8m8B$qd(|Mcn8!80>6uluthW}IVm5O@JpQ=w-n6v|eM zMI{`Dkawjybi4(|$JR~|KM+V|F)AhYb-(a$jbDQ2vIEGdzh4_U`y+bxl0=z|V9!?T zUo68Qp>(k@+NswwFd$tU_QvOR$6(~sW65$^{;yv3e0UCO#v(cml^wdTUL{{i5Q2DQ zcN1WmLDAVEQG|IQj{yPWfSZT42lzQS0;=}QZGkYgK7Pg#zzGMECYDV{Pcfg}-!Yt= zpf=-kSg|}&K8mfTbb0VDKvf&a`3CA#8kllj{r$lx=z=mk!NTU58AbwWhWV5~*&%7? zY0E%(3IZ*$1a<-DI5SgTez|P`pBy;t2;sYNfR%AY6p2w9F#MZHp(0%xPTE8Nciadb zy4O{F`BeYh=SA*?bM>{R+}oPUO6_Y6LuHPeqAgXWhGyRlojrtWkf2tJ#6Wo-$t#d8 zDeOQtZBmpC{)NnB?=u<>WLMPbodIJrgaGs9%a`#grRge-WU-?qvLe$!_iPDbP}$JH z1#bXvqZd`%YoBpB0OtjE=GWZ4cawQ@8elH-Jueou&c(>FVnX0z;n1S#12h zr6L>587kKvig+AL`-tH)Gj$Nqo&pXGGQzg@s8p{$8*vDr#$gD+hc9a6IZ+i|me6o2 zY&cm!APQ-7CIr>(XsDFO?CHLh*53u>P`p;#W`5VAr&w&6kW2~+v_oVV>@)Guhif#C zA4KTE>VpT0JX(gM-fR{<*ufJq^S1K^$-V)5RMQ-$9YVrq$tR1R8F@(4OLf;{(9_5e`WUJ|d`m;3{yKO~#N5AmQjS9?NnAQqP>-38wqtV2F?W44?`3r|;hBZZEl? zKNLlPwT%xWmt&yo`+K#Ancki~q%=_(Vuq0rgN#1k7_S@gar6Yga?|HC=gm;#&bD`> zX1A;)cocJXOMU(Jc4u&qK$_ukX$9ukK@Mfyi$zJ-Jmt=OQl6@hp1x)LdG7{u-op~L z+P3q%?AV6YiEldFZ=|a1ta+%zPsG`n*-iM31dXX&TojegvE{U zmjr?Nmi(-YYbq50&nx0K;D@Nt{|QgM&N_yg!TZ~Rna>A+X)!WN!?Tyz16XQXf5Amf z9r4H?^YEJ;qIU5*c<;4|?6)&{Dc9iHF(_}^z{ zoyr}`9QlpNB<^WA@sq<8GCLkZLt1$WRVYDJyUiY0OLoW$mxlha+#*$Am9*&JW<#-i zK~ynJ&&Xh!wL|w092K^Qkq&$C00F?-Nw5(VP&4$Yy3?`(S8Dy%6uuZ-I+<^a(cxfNg{>HLsJlS<*YQS3uSScT+d_C9^rPJrAvC*P-> zJ)i2{EAP4x>GV-!@cWB=1>)KelIe|vQSIo_qa$Numb#qE9UMK%zWz@YfXcq+KWPjr zh>Kpccs@B`X4jK{%gq$h4Qc1i~rN~@V}da{O1Gw=L-Bk eU4d`2bXWKSuLW?Y<$;e((NNv5lCsnEm;VEvwHcWJ diff --git a/examples_skyline/fair_prep_results.py b/examples_skyline/fair_prep_results.py deleted file mode 100644 index e2f9565..0000000 --- a/examples_skyline/fair_prep_results.py +++ /dev/null @@ -1,92 +0,0 @@ -import os -import sys -import shutil -import warnings -warnings.simplefilter('ignore') - -from helper import extract_info - -sys.path.append(os.getcwd()) - -from fp.traindata_samplers import CompleteData -from fp.missingvalue_handlers import CompleteCaseAnalysis -from fp.dataset_experiments import GermanCreditDatasetSexExperiment -from fp.scalers import NamedStandardScaler, NamedMinMaxScaler -from fp.learners import NonTunedLogisticRegression, LogisticRegression, DecisionTree, NonTunedLogisticRegression, NonTunedDecisionTree -from fp.post_processors import NoPostProcessing, RejectOptionPostProcessing, EqualOddsPostProcessing, CalibratedEqualOddsPostProcessing -from fp.pre_processors import NoPreProcessing, DIRemover, Reweighing - -import numpy as np -import matplotlib.pyplot as plt - -#creating list of parameters that we will alter to observe variations -seeds = [0xbeef, 0xcafe, 0xdead, 0xdeadcafe, 0xdeadbeef, 0xbeefcafe, 0xcafebeef, 50, 583, 5278, 100000, 0xefac,0xfeeb, 0xdaed, 0xefacdaed, 0xfeebdead] -learners = [NonTunedLogisticRegression(), LogisticRegression()] -processors = [(NoPreProcessing(), NoPostProcessing()), (DIRemover(1.0), NoPostProcessing())] - -def calculate_metrics(seed, learner, pre_processor, post_processor): - ''' - Experiment function to run the experiments - ''' - exp = GermanCreditDatasetSexExperiment( - fixed_random_seed=seed, - train_data_sampler=CompleteData(), - missing_value_handler=CompleteCaseAnalysis(), - numeric_attribute_scaler=NamedStandardScaler(), - learners=[learner], - pre_processors=[pre_processor], - post_processors=[post_processor]) - exp.run() - -def run_exp(seeds, learners, processors): - ''' - This is the main driver function that calls the calculate_metrics to give metrices on combinations of various learners, pre and post processing techniques. - ''' - accuracy, disp_imp, fnr, fpr = [], [], [], [] - for processor in processors: - for learner in learners: - learner_acc, learner_di, learner_fnr, learner_fpr = [], [], [], [] - for seed in seeds: - calculate_metrics(seed, learner, pre_processor=processor[0], post_processor=processor[1]) - extract_info(learner_acc, learner_di, learner_fnr, learner_fpr) - accuracy.append(learner_acc) - disp_imp.append(learner_di) - fnr.append(learner_fnr) - fpr.append(learner_fpr) - - return accuracy, disp_imp, fnr, fpr - -accuracy, disp_imp, fnr, fpr = run_exp(seeds, learners, processors) -print(accuracy) -print(disp_imp) -print(fnr) -print(fpr) - -def plotter(x, y, x_ticks, x_label, main_title): - ''' - Function to plot various comparison plots. - ''' - title_list = ['NoPreProcessing', 'DIRemover(1.0)', 'DIRemover(0.5)', 'Reweighing', 'Reject Option', 'Caliberated Equal Odds'] - label_list = [('NonTunedLogistic', 'TunedLogistic'), ('NonTunedDecisionTree', 'TunedDecisionTree')] - fig, axs = plt.subplots(6, 2, figsize=((10,20))) - axs = axs.flatten() - for i in range(0, len(y), 2): - loc = i//2 - axs[loc].scatter(x[i], y[i], c='b', marker='o') - axs[loc].scatter(x[i+1], y[i+1], c='r', marker='o') - axs[loc].set_xticks(x_ticks) - axs[loc].set_yticks(np.arange(0.5, 1, 0.1)) - axs[loc].set_title(title_list[i//4]) - axs[loc].grid(True) - axs[loc].set_xlabel(x_label) - axs[loc].set_ylabel('Accuracy') - axs[loc].legend(label_list[int(i%4/2)]) - fig.suptitle(main_title) - plt.subplots_adjust(wspace=0.3, hspace=0.43) - fig.savefig('examples/' + main_title + '.png') - plt.show() - -plotter(x=disp_imp, y=accuracy, x_ticks=[0.5, 1, 1.5], x_label='DI', main_title='accuracy_vs_di') -plotter(x=fnr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FNR', main_title='accuracy_vs_fnr') -plotter(x=fpr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FPR', main_title='accuracy_vs_fpr') - diff --git a/examples_skyline/helper.py b/examples_skyline/helper.py deleted file mode 100644 index 05af226..0000000 --- a/examples_skyline/helper.py +++ /dev/null @@ -1,53 +0,0 @@ -import os -import sys -import shutil -import pandas as pd -import numpy as np - -path = 'logs' -def extract_info(learner_acc, learner_di, learner_fnr, learner_fpr, learner_label, learner_optimal): - try: - for folder_name, sub_folders, file_names in os.walk(path): - for sub_folder in sub_folders: - file_list = os.listdir(os.path.join(path, sub_folder)) - file_list.sort() - for file in file_list: - file_path = os.path.join(path,sub_folder, file) - df = pd.read_csv(str(file_path), header=None, names = ['split_type', 'label', 'parameter', 'value']) - test_data = df['split_type'] == 'test' - - flags = file.split("__") - pre_processor = flags[1] - post_processor = flags[2] - - if(len(flags) == 8 and flags[-1] == "OPTIMAL.csv"): - learner_optimal.append(True) - else: - learner_optimal.append(False) - - if(post_processor == "no_post_processing"): - learner_label.append(pre_processor) - else: - learner_label.append(post_processor[:17]) - - di = (df['parameter'] == 'disparate_impact') & test_data - di_value = df.loc[di, :] - - learner_di.append(di_value.iloc[0, -1]) - - acc = (df['parameter'] == 'accuracy') & test_data - acc_value = df.loc[acc, :] - learner_acc.append(acc_value.iloc[0, -1]) - - fnr = (df['parameter'] == 'generalized_false_negative_rate') & test_data - fnr_value = df.loc[fnr, :] - learner_fnr.append(fnr_value.iloc[0, -1]) - - fpr = (df['parameter'] == 'generalized_false_positive_rate') & test_data - fpr_value = df.loc[fpr, :] - learner_fpr.append(fpr_value.iloc[0, -1]) - - shutil.rmtree(path + '/' + sub_folder) - except: - pass - diff --git a/examples_skyline/missing_data_results.py b/examples_skyline/missing_data_results.py deleted file mode 100644 index 29186ce..0000000 --- a/examples_skyline/missing_data_results.py +++ /dev/null @@ -1,99 +0,0 @@ -import os -import sys -import shutil -import warnings -warnings.simplefilter('ignore') - -from helper import extract_info - -sys.path.append(os.getcwd()) - -from fp.traindata_samplers import CompleteData -from fp.missingvalue_handlers import CompleteCaseAnalysis, ModeImputer, DataWigSimpleImputer -from fp.dataset_experiments import AdultDatasetWhiteExperiment -from fp.scalers import NamedStandardScaler, NamedMinMaxScaler -from fp.learners import NonTunedLogisticRegression, LogisticRegression, DecisionTree, NonTunedLogisticRegression, NonTunedDecisionTree -from fp.post_processors import NoPostProcessing, RejectOptionPostProcessing, EqualOddsPostProcessing, CalibratedEqualOddsPostProcessing -from fp.pre_processors import NoPreProcessing, DIRemover, Reweighing - -import numpy as np -import itertools -import matplotlib.pyplot as plt - -#creating list of parameters that we will alter to observe variations -seeds = [0xbeef, 0xcafe, 0xdead, 0xdeadcafe, 0xdeadbeef, 0xbeefcafe, 0xcafebeef, 50, 583, 5278, 100000, 0xefac,0xfeeb, 0xdaed, 0xefacdaed, 0xfeebdead] -learners = [NonTunedLogisticRegression(), LogisticRegression(), NonTunedDecisionTree(), DecisionTree()] -processors = [(NoPreProcessing(), NoPostProcessing()), (DIRemover(1.0), NoPostProcessing()), (DIRemover(0.5), NoPostProcessing()), (Reweighing(), NoPostProcessing()), - (NoPreProcessing(), RejectOptionPostProcessing()), (NoPreProcessing(), CalibratedEqualOddsPostProcessing())] -impute_column_list = ['workclass', 'occupation', 'native-country'] -label_column = 'income-per-year' -datawig_imputer = DataWigSimpleImputer(impute_column_list, label_column,out='out') -missing_value_imputers = [CompleteCaseAnalysis(), ModeImputer(impute_column_list), datawig_imputer] - - -def calculate_metrics(seed, learner, missing_value_imputer,pre_processor,post_processor): - ''' - Experiment function to run the experiments - ''' - exp = AdultDatasetWhiteExperiment( - fixed_random_seed=seed, - train_data_sampler=CompleteData(), - missing_value_handler=missing_value_imputer, - numeric_attribute_scaler=NamedStandardScaler(), - learners=[learner], - pre_processors=[pre_processor], - post_processors=[post_processor]) - exp.run() - -def run_exp(seeds, learners, processors): - ''' - This is the main driver function that calls the calculate_metrics to give metrices on combinations of various learners, pre and post processing techniques. - ''' - accuracy, disp_imp, fnr, fpr = [], [], [], [] - for learner in learners: - for processor in processors: - for imputer in missing_value_imputers: - imputer_acc, imputer_di, imputer_fnr, imputer_fpr = [], [], [], [] - for seed in seeds: - calculate_metrics(seed, learner, imputer, pre_processor=processor[0], post_processor=processor[1]) - extract_info(imputer_acc, imputer_di, imputer_fnr, imputer_fpr) - accuracy.append(imputer_acc) - disp_imp.append(imputer_di) - fnr.append(imputer_fnr) - fpr.append(imputer_fpr) - return accuracy, disp_imp, fnr, fpr - -accuracy, disp_imp, fnr, fpr = run_exp(seeds, learners, processors) - -def plotter(x, y, x_ticks, x_label, main_title): - ''' - Function to plot various comparison plots. - ''' - learner_list = ['NonTunedLogistic','TunedLogistic', 'NonTunedDecisionTree', 'TunedDecisionTree'] - processor_list = ['NoPreProcessing', 'DIRemover(1.0)', 'DIRemover(0.5)', 'Reweighing', 'Reject Option', 'Caliberated Equal Odds'] - title_list = list(itertools.product(learner_list,processor_list)) - label_list = [('CompleteCase', 'ModeImputer', 'datawig_simple')] - fig, axs = plt.subplots(len(learner_list), len(processor_list), figsize=((10,20))) - axs = axs.flatten() - for i in range(0, len(y), 3): - loc = i//3 - axs[loc].scatter(x[i], y[i], c='b', marker='o') - axs[loc].scatter(x[i+1], y[i+1], c='r', marker='o') - axs[loc].scatter(x[i+2], y[i+2], c='g', marker='o') - axs[loc].set_xticks(x_ticks) - axs[loc].set_yticks(np.arange(0.5, 1, 0.1)) - axs[loc].set_title(title_list[i//3],fontsize=8) - - axs[loc].grid(True) - axs[loc].set_xlabel(x_label) - axs[loc].set_ylabel('Accuracy') - axs[loc].legend(label_list[int(i%3/2)]) - fig.suptitle(main_title) - plt.subplots_adjust(wspace=0.3, hspace=0.43) - fig.savefig('examples/' + main_title + '.png') - plt.show() - -plotter(x=disp_imp, y=accuracy, x_ticks=[0.5, 1, 1.5], x_label='DI', main_title='missing_data_accuracy_vs_di') -plotter(x=fnr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FNR', main_title='missing_data_accuracy_vs_fnr') -plotter(x=fpr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FPR', main_title='missing_data_accuracy_vs_fpr') - diff --git a/examples_skyline/results_play_skyline_formula.ipynb b/examples_skyline/results_play_skyline_formula.ipynb deleted file mode 100644 index e6ed2c6..0000000 --- a/examples_skyline/results_play_skyline_formula.ipynb +++ /dev/null @@ -1,1778 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Import error: No module named 'tensorflow'\n" - ] - } - ], - "source": [ - "#load relevant libraries\n", - "import os\n", - "import sys\n", - "import pathlib\n", - "import shutil\n", - "import warnings\n", - "warnings.simplefilter('ignore')\n", - "from helper import extract_info\n", - "\n", - "os.chdir('..')\n", - "#parent_dir = os.path.dirname(os.getcwd())\n", - "#sys.path.append(parent_dir)\n", - "\n", - "from fp.traindata_samplers import CompleteData\n", - "from fp.missingvalue_handlers import CompleteCaseAnalysis\n", - "from fp.dataset_experiments import GermanCreditDatasetSexExperiment\n", - "from fp.scalers import NamedStandardScaler, NamedMinMaxScaler\n", - "from fp.learners import NonTunedLogisticRegression, LogisticRegression, DecisionTree, NonTunedLogisticRegression, NonTunedDecisionTree \n", - "from fp.post_processors import NoPostProcessing, RejectOptionPostProcessing, EqualOddsPostProcessing, CalibratedEqualOddsPostProcessing\n", - "from fp.pre_processors import NoPreProcessing, DIRemover, Reweighing\n", - "\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "#parameters in this cell can be adjusted for experimentation\n", - "\n", - "seeds = [0xbeef, 0xcafe, 0xdead, 0xdeadcafe, 0xdeadbeef, 0xbeefcafe, 0xcafebeef, 50, 583, 5278, 100000, 0xefac,0xfeeb, 0xdaed, 0xefacdaed, 0xfeebdead]\n", - "#seeds = [0xbeef, 0xcafe, 0xdead, 0xdeadcafe]\n", - "learners = [NonTunedLogisticRegression(), LogisticRegression()] \n", - "#tuned and non tuned DecisionTree() can also be used.\n", - "\n", - "#pairs of pre and post processors\n", - "#processors = [(NoPreProcessing(), NoPostProcessing()), (DIRemover(1.0), NoPostProcessing()), (DIRemover(0.5), NoPostProcessing()), (Reweighing(), NoPostProcessing()), (NoPreProcessing(), RejectOptionPostProcessing()), (NoPreProcessing(), CalibratedEqualOddsPostProcessing())]\n", - "#processors = [(NoPreProcessing(), NoPostProcessing()), (DIRemover(1.0), NoPostProcessing())]\n", - "pre_processors = [NoPreProcessing(), DIRemover(1.0)]\n", - "post_processors = [NoPostProcessing(), RejectOptionPostProcessing()]\n", - "\n", - "skyline_formula = {'accuracy': 0.6, 'selection_rate': 0.2, 'false_discovery_rate': 0.2}\n", - "\n", - "# TODO: add more test cases for above parameters\n", - "#update these for the purpose of plotting as per your experiment\n", - "#title_list = ['NoPreProcessing', 'DIRemover(1.0)', 'DIRemover(0.5)', 'Reweighing', 'Reject Option', 'Caliberated Equal Odds']\n", - "#add pair wise tuples for each pair of learners.\n", - "label_list = [('NonTunedLogistic', 'TunedLogistic')]" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "def calculate_metrics(seed, learner, pre_processor, post_processor, val_strategy):\n", - " '''\n", - " Experiment function to run the experiments\n", - " '''\n", - " exp = GermanCreditDatasetSexExperiment(\n", - " fixed_random_seed=seed,\n", - " train_data_sampler=CompleteData(),\n", - " missing_value_handler=CompleteCaseAnalysis(),\n", - " numeric_attribute_scaler=NamedStandardScaler(),\n", - " learners=learner,\n", - " pre_processors=pre_processor,\n", - " post_processors=post_processor,\n", - " optimal_validation_strategy=val_strategy)\n", - " exp.run()" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 50 tasks | elapsed: 4.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 4.2s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.6s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.0s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.0s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n" - ] - } - ], - "source": [ - "def run_exp(seeds, learners, pre_processors, post_processors, optimal_validation_strategies):\n", - " '''\n", - " This is the main driver function that calls the calculate_metrics to give metrices on combinations of various learners, pre and post processing techniques.\n", - " '''\n", - " total_experiments = len(pre_processors) * len(post_processors) * len(learners)\n", - " #pd.set_option(\"display.max_rows\", None, \"display.max_columns\", None)\n", - " accuracy, disp_imp, fnr, fpr, label, optimal = [], [], [], [], [], []\n", - " for j in range(total_experiments):\n", - " accuracy.append(list())\n", - " disp_imp.append(list())\n", - " fnr.append(list())\n", - " fpr.append(list())\n", - " label.append(list())\n", - " optimal.append(list())\n", - " for seed in seeds: \n", - " learner_acc, learner_di, learner_fnr, learner_fpr, learner_label, learner_optimal = [], [], [], [], [], []\n", - " calculate_metrics(seed, learners, pre_processors, post_processors, optimal_validation_strategies)\n", - " extract_info(learner_acc, learner_di, learner_fnr, learner_fpr, learner_label, learner_optimal)\n", - " for i in range(len(learner_acc)):\n", - " accuracy[i].append(learner_acc[i])\n", - " disp_imp[i].append(learner_di[i])\n", - " fnr[i].append(learner_fnr[i])\n", - " fpr[i].append(learner_fpr[i])\n", - " #label[i].append(learner_label[i])\n", - " optimal[i].append(learner_optimal[i])\n", - " label = learner_label\n", - " \n", - " return accuracy, disp_imp, fnr, fpr, label, optimal\n", - " #return learner_acc, learner_di, learner_fnr, learner_fpr, label, optimal\n", - "\n", - "s_accuracy, s_disp_imp, s_fnr, s_fpr, s_label, s_optimal = run_exp(seeds, learners, pre_processors, post_processors, skyline_formula)\n", - "\n", - "#Converting string types to floating values for plotting\n", - "\n", - "accuracy, disp_imp, fnr, fpr = [],[],[],[]\n", - "for item in s_accuracy:\n", - " temp_list = list()\n", - " for i in item:\n", - " temp_list.append(float(i))\n", - " accuracy.append(temp_list)\n", - "\n", - "for item in s_disp_imp:\n", - " temp_list = list()\n", - " for i in item:\n", - " temp_list.append(float(i))\n", - " disp_imp.append(temp_list)\n", - "\n", - "for item in s_fnr:\n", - " temp_list = list()\n", - " for i in item:\n", - " temp_list.append(float(i))\n", - " fnr.append(temp_list)\n", - "\n", - "for item in s_fpr:\n", - " temp_list = list()\n", - " for i in item:\n", - " temp_list.append(float(i))\n", - " fpr.append(temp_list)\n", - "\n", - "title_list = s_label" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "def plotter(s_optimal, title_list, label_list, x, y, x_ticks, x_label, main_title):\n", - " '''\n", - " Function to plot various comparison plots.\n", - " '''\n", - " #update label list and title list as per the experiment being performed.\n", - " fig, axs = plt.subplots(2, 2, figsize=((10,20)))\n", - " axs = axs.flatten()\n", - " for i in range(0, len(y), 2):\n", - " loc = i//2\n", - " for k in range(len(x[i])):\n", - " if s_optimal[i][k]:\n", - " axs[loc].scatter(x[i][k], y[i][k], c='y', marker='o')\n", - " else:\n", - " axs[loc].scatter(x[i][k], y[i][k], c='b', marker='o')\n", - " if s_optimal[i+1][k]:\n", - " axs[loc].scatter(x[i+1][k], y[i+1][k], c='g', marker='o')\n", - " else:\n", - " axs[loc].scatter(x[i+1][k], y[i+1][k], c='r', marker='o')\n", - " axs[loc].set_xticks(x_ticks)\n", - " axs[loc].set_yticks(np.arange(0.5, 1, 0.1))\n", - " axs[loc].set_title(title_list[i])\n", - " axs[loc].grid(True)\n", - " axs[loc].set_xlabel(x_label)\n", - " axs[loc].set_ylabel('Accuracy')\n", - " axs[loc].legend(label_list[int(i%(len(label_list)*2)/2)])\n", - " fig.suptitle(main_title)\n", - " plt.subplots_adjust(wspace=0.3, hspace=0.43)\n", - " fig.savefig('examples_skyline/' + main_title + '.png')\n", - " plt.show()\n", - "\n", - "plotter(s_optimal, title_list, label_list, x=disp_imp, y=accuracy, x_ticks=[0.5, 1, 1.5], x_label='DI', main_title='accuracy_vs_di-skyline_formula')\n", - "plotter(s_optimal, title_list, label_list, x=fnr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FNR', main_title='accuracy_vs_fnr-skyline_formula')\n", - "plotter(s_optimal, title_list, label_list, x=fpr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FPR', main_title='accuracy_vs_fpr-skyline_formula')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.3" - } - }, - "nbformat": 4, - "nbformat_minor": 1 -} diff --git a/examples_skyline/results_play_skyline_order.ipynb b/examples_skyline/results_play_skyline_order.ipynb deleted file mode 100644 index 1ab6950..0000000 --- a/examples_skyline/results_play_skyline_order.ipynb +++ /dev/null @@ -1,1767 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Import error: No module named 'tensorflow'\n" - ] - } - ], - "source": [ - "#load relevant libraries\n", - "import os\n", - "import sys\n", - "import pathlib\n", - "import shutil\n", - "import warnings\n", - "warnings.simplefilter('ignore')\n", - "from helper import extract_info\n", - "\n", - "os.chdir('..')\n", - "#parent_dir = os.path.dirname(os.getcwd())\n", - "#sys.path.append(parent_dir)\n", - "\n", - "from fp.traindata_samplers import CompleteData\n", - "from fp.missingvalue_handlers import CompleteCaseAnalysis\n", - "from fp.dataset_experiments import GermanCreditDatasetSexExperiment\n", - "from fp.scalers import NamedStandardScaler, NamedMinMaxScaler\n", - "from fp.learners import NonTunedLogisticRegression, LogisticRegression, DecisionTree, NonTunedLogisticRegression, NonTunedDecisionTree \n", - "from fp.post_processors import NoPostProcessing, RejectOptionPostProcessing, EqualOddsPostProcessing, CalibratedEqualOddsPostProcessing\n", - "from fp.pre_processors import NoPreProcessing, DIRemover, Reweighing\n", - "\n", - "import pandas as pd\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "#parameters in this cell can be adjusted for experimentation\n", - "\n", - "seeds = [0xbeef, 0xcafe, 0xdead, 0xdeadcafe, 0xdeadbeef, 0xbeefcafe, 0xcafebeef, 50, 583, 5278, 100000, 0xefac,0xfeeb, 0xdaed, 0xefacdaed, 0xfeebdead]\n", - "#seeds = [0xbeef, 0xcafe, 0xdead, 0xdeadcafe]\n", - "learners = [NonTunedLogisticRegression(), LogisticRegression()] \n", - "#tuned and non tuned DecisionTree() can also be used.\n", - "\n", - "#processors = [(NoPreProcessing(), NoPostProcessing()), (DIRemover(1.0), NoPostProcessing()), (DIRemover(0.5), NoPostProcessing()), (Reweighing(), NoPostProcessing()), (NoPreProcessing(), RejectOptionPostProcessing()), (NoPreProcessing(), CalibratedEqualOddsPostProcessing())]\n", - "skyline_order = ['accuracy', 'selection_rate', 'false_discovery_rate']\n", - "pre_processors = [NoPreProcessing(), DIRemover(1.0)]\n", - "post_processors = [NoPostProcessing(), RejectOptionPostProcessing()]\n", - "#add pair wise tuples for each pair of learners.\n", - "label_list = [('NonTunedLogistic', 'TunedLogistic')]" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "def calculate_metrics(seed, learner, pre_processor, post_processor, val_strategy):\n", - " '''\n", - " Experiment function to run the experiments\n", - " '''\n", - " exp = GermanCreditDatasetSexExperiment(\n", - " fixed_random_seed=seed,\n", - " train_data_sampler=CompleteData(),\n", - " missing_value_handler=CompleteCaseAnalysis(),\n", - " numeric_attribute_scaler=NamedStandardScaler(),\n", - " learners=learner,\n", - " pre_processors=pre_processor,\n", - " post_processors=post_processor,\n", - " optimal_validation_strategy=val_strategy)\n", - " exp.run()" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 2.0s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.6s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.6s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.6s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.6s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.6s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.6s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.6s finished\n", - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from training data\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.5s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 53 out of 60 | elapsed: 0.4s remaining: 0.1s\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.4s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "Fitting 5 folds for each of 12 candidates, totalling 60 fits\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Parallel(n_jobs=-1)]: Using backend LokyBackend with 4 concurrent workers.\n", - "[Parallel(n_jobs=-1)]: Done 60 out of 60 | elapsed: 0.5s finished\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n", - "complete_case removed 0 instances from validation data\n", - "Injecting zero columns for features not present set()\n" - ] - } - ], - "source": [ - "def run_exp(seeds, learners, pre_processors, post_processors, optimal_validation_strategies):\n", - " '''\n", - " This is the main driver function that calls the calculate_metrics to give metrices on combinations of various learners, pre and post processing techniques.\n", - " '''\n", - " total_experiments = len(pre_processors) * len(post_processors) * len(learners)\n", - " #pd.set_option(\"display.max_rows\", None, \"display.max_columns\", None)\n", - " accuracy, disp_imp, fnr, fpr, label, optimal = [], [], [], [], [], []\n", - " for j in range(total_experiments):\n", - " accuracy.append(list())\n", - " disp_imp.append(list())\n", - " fnr.append(list())\n", - " fpr.append(list())\n", - " label.append(list())\n", - " optimal.append(list())\n", - " for seed in seeds: \n", - " learner_acc, learner_di, learner_fnr, learner_fpr, learner_label, learner_optimal = [], [], [], [], [], []\n", - " calculate_metrics(seed, learners, pre_processors, post_processors, optimal_validation_strategies)\n", - " extract_info(learner_acc, learner_di, learner_fnr, learner_fpr, learner_label, learner_optimal)\n", - " for i in range(len(learner_acc)):\n", - " accuracy[i].append(learner_acc[i])\n", - " disp_imp[i].append(learner_di[i])\n", - " fnr[i].append(learner_fnr[i])\n", - " fpr[i].append(learner_fpr[i])\n", - " #label[i].append(learner_label[i])\n", - " optimal[i].append(learner_optimal[i])\n", - " label = learner_label\n", - " \n", - " return accuracy, disp_imp, fnr, fpr, label, optimal\n", - " #return learner_acc, learner_di, learner_fnr, learner_fpr, label, optimal\n", - "\n", - "s_accuracy, s_disp_imp, s_fnr, s_fpr, s_label, s_optimal = run_exp(seeds, learners, pre_processors, post_processors, skyline_order)\n", - "\n", - "#Converting string types to floating values for plotting\n", - "\n", - "accuracy, disp_imp, fnr, fpr = [],[],[],[]\n", - "for item in s_accuracy:\n", - " temp_list = list()\n", - " for i in item:\n", - " temp_list.append(float(i))\n", - " accuracy.append(temp_list)\n", - "\n", - "for item in s_disp_imp:\n", - " temp_list = list()\n", - " for i in item:\n", - " temp_list.append(float(i))\n", - " disp_imp.append(temp_list)\n", - "\n", - "for item in s_fnr:\n", - " temp_list = list()\n", - " for i in item:\n", - " temp_list.append(float(i))\n", - " fnr.append(temp_list)\n", - "\n", - "for item in s_fpr:\n", - " temp_list = list()\n", - " for i in item:\n", - " temp_list.append(float(i))\n", - " fpr.append(temp_list)\n", - "\n", - "title_list = s_label" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "def plotter(s_optimal, title_list, label_list, x, y, x_ticks, x_label, main_title):\n", - " '''\n", - " Function to plot various comparison plots.\n", - " '''\n", - " #update label list and title list as per the experiment being performed.\n", - " fig, axs = plt.subplots(len(pre_processors), len(post_processors), figsize=((10,20)))\n", - " axs = axs.flatten()\n", - " for i in range(0, len(y), 2):\n", - " loc = i//2\n", - " for k in range(len(x[i])):\n", - " if s_optimal[i][k]:\n", - " axs[loc].scatter(x[i][k], y[i][k], c='y', marker='o')\n", - " else:\n", - " axs[loc].scatter(x[i][k], y[i][k], c='b', marker='o')\n", - " if s_optimal[i+1][k]:\n", - " axs[loc].scatter(x[i+1][k], y[i+1][k], c='g', marker='o')\n", - " else:\n", - " axs[loc].scatter(x[i+1][k], y[i+1][k], c='r', marker='o')\n", - " axs[loc].set_xticks(x_ticks)\n", - " axs[loc].set_yticks(np.arange(0.5, 1, 0.1))\n", - " axs[loc].set_title(title_list[i])\n", - " axs[loc].grid(True)\n", - " axs[loc].set_xlabel(x_label)\n", - " axs[loc].set_ylabel('Accuracy')\n", - " axs[loc].legend(label_list[int(i%(len(label_list)*2)/2)])\n", - " fig.suptitle(main_title)\n", - " plt.subplots_adjust(wspace=0.3, hspace=0.43)\n", - " fig.savefig('examples_skyline/' + main_title + '.png')\n", - " plt.show()\n", - "\n", - "plotter(s_optimal, title_list, label_list, x=disp_imp, y=accuracy, x_ticks=[0.5, 1, 1.5], x_label='DI', main_title='accuracy_vs_di-skyline-order')\n", - "plotter(s_optimal, title_list, label_list, x=fnr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FNR', main_title='accuracy_vs_fnr-skyline-order')\n", - "plotter(s_optimal, title_list, label_list, x=fpr, y=accuracy, x_ticks=[-0.4, 0, 0.4], x_label='FPR', main_title='accuracy_vs_fpr-skyline-order')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.3" - } - }, - "nbformat": 4, - "nbformat_minor": 1 -} diff --git a/fp/dataset_experiments.py b/fp/dataset_experiments.py index 91e0229..08b07aa 100644 --- a/fp/dataset_experiments.py +++ b/fp/dataset_experiments.py @@ -6,7 +6,7 @@ class AdultDatasetWhiteMaleExperiment(BinaryClassificationExperiment): def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors, optimal_validation_strategy): + learners, pre_processors, post_processors): test_set_ratio = 0.2 validation_set_ratio = 0.1 @@ -27,16 +27,12 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, 'label_maps': [{1.0: '>50K', 0.0: '<=50K'}], 'protected_attribute_maps': [{1.0: 'White', 0.0: 'Non-white'}, {1.0: 'Male', 0.0: 'Female'}] } - if optimal_validation_strategy: - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, - pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'adultwhitemale', optimal_validation_strategy) - else: - print("Need to specify the strategy that is used to select the optimal results!") - exit() + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'adultwhitemale') def load_raw_data(self): return pd.read_csv('datasets/raw/adult.csv', na_values='?', sep=',') @@ -45,7 +41,7 @@ def load_raw_data(self): class AdultDatasetMaleExperiment(BinaryClassificationExperiment): def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors, optimal_validation_strategy): + learners, pre_processors, post_processors): test_set_ratio = 0.2 validation_set_ratio = 0.1 @@ -66,16 +62,12 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, 'label_maps': [{1.0: '>50K', 0.0: '<=50K'}], 'protected_attribute_maps': [{1.0: 'Male', 0.0: 'Female'}] } - if optimal_validation_strategy: - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, - pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'adultmale', optimal_validation_strategy) - else: - print("Need to specify the strategy that is used to select the optimal results!") - exit() + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'adultmale') def load_raw_data(self): return pd.read_csv('datasets/raw/adult.csv', na_values='?', sep=',') @@ -84,7 +76,7 @@ def load_raw_data(self): class AdultDatasetWhiteExperiment(BinaryClassificationExperiment): def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors, optimal_validation_strategy): + learners, pre_processors, post_processors): test_set_ratio = 0.2 validation_set_ratio = 0.1 @@ -105,16 +97,12 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, 'label_maps': [{1.0: '>50K', 0.0: '<=50K'}], 'protected_attribute_maps': [{1.0: 'White', 0.0: 'Non-white'}] } - if optimal_validation_strategy: - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, - pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'adultwhite', optimal_validation_strategy) - else: - print("Need to specify the strategy that is used to select the optimal results!") - exit() + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'adultwhite') def load_raw_data(self): return pd.read_csv('datasets/raw/adult.csv', na_values='?', sep=',') @@ -124,7 +112,7 @@ def load_raw_data(self): class PropublicaDatasetWhiteExperiment(BinaryClassificationExperiment): def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors, optimal_validation_strategy): + learners, pre_processors, post_processors): test_set_ratio = 0.2 validation_set_ratio = 0.1 label_name = 'two_year_recid' @@ -144,16 +132,12 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, 'label_maps': [{1.0: 1, 0.0: 0}], 'protected_attribute_maps': [{1.0: 'Caucasian', 0.0: 'Non-white'}] } - if optimal_validation_strategy: - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, - pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'propublicawhite', optimal_validation_strategy) - else: - print("Need to specify the strategy that is used to select the optimal results!") - exit() + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'propublicawhite') def load_raw_data(self): """The custom pre-processing function is adapted from @@ -179,7 +163,7 @@ class GermanCreditDatasetSexExperiment(BinaryClassificationExperiment): def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors, optimal_validation_strategy): + learners, pre_processors, post_processors): test_set_ratio = 0.2 validation_set_ratio = 0.1 @@ -202,16 +186,12 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, 'label_maps': [{1.0: 1, 0.0: 0}], 'protected_attribute_maps': [{1.0: 'male', 0.0: 'female'}] } - if optimal_validation_strategy: - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, - pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'germancreditsex', optimal_validation_strategy) - else: - print("Need to specify the strategy that is used to select the optimal results!") - exit() + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'germancreditsex') def load_raw_data(self): df = pd.read_csv('datasets/raw/german.csv', na_values='?', sep=',') @@ -273,7 +253,7 @@ class RicciRaceExperiment(BinaryClassificationExperiment): ''' def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors, optimal_validation_strategy): + learners, pre_processors, post_processors): test_set_ratio = 0.2 validation_set_ratio = 0.1 @@ -294,16 +274,12 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, 'label_maps': [{1.0: 1, 0.0: 0}], 'protected_attribute_maps': [{1.0: 'W', 0.0: 'NW'}] } - if optimal_validation_strategy: - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, - pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'riccirace', optimal_validation_strategy) - else: - print("Need to specify the strategy that is used to select the optimal results!") - exit() + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names,attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'riccirace') def load_raw_data(self): df = pd.read_csv('datasets/raw/ricci.txt', na_values='?', sep=',') @@ -329,7 +305,7 @@ class GiveMeSomeCreditExperiment(BinaryClassificationExperiment): Fairness intervention for the Age attribute (priviledge for age>=25) while predicting if a person will experience 90 days past due delinquency or worse. ''' def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors, optimal_validation_strategy): + learners, pre_processors, post_processors): test_set_ratio = 0.2 validation_set_ratio = 0.1 @@ -351,16 +327,12 @@ def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, dataset_metadata = { 'label_maps': [{1.0: 1, 0.0: 0}] } - if optimal_validation_strategy: - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, - pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'givecredit', optimal_validation_strategy) - else: - print("Need to specify the strategy that is used to select the optimal results!") - exit() + + super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, + numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, + train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, + post_processors, protected_attribute_names, privileged_classes, privileged_groups, + unprivileged_groups, dataset_metadata, 'givecredit') def load_raw_data(self): df = pd.read_csv('datasets/raw/givemesomecredit.csv', na_values='?', sep=',',index_col=False) diff --git a/fp/dataset_experiments_old.py b/fp/dataset_experiments_old.py deleted file mode 100644 index 08b07aa..0000000 --- a/fp/dataset_experiments_old.py +++ /dev/null @@ -1,341 +0,0 @@ -from fp.experiments import BinaryClassificationExperiment -import pandas as pd -import numpy as np - - -class AdultDatasetWhiteMaleExperiment(BinaryClassificationExperiment): - - def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): - - test_set_ratio = 0.2 - validation_set_ratio = 0.1 - label_name = 'income-per-year' - positive_label = '>50K' - numeric_attribute_names = ['capital-gain', 'capital-loss', 'age', 'hours-per-week'] - categorical_attribute_names = ['workclass', 'education', 'marital-status', 'occupation', 'relationship', - 'native-country'] - attributes_to_drop_names = ['fnlwgt'] - - protected_attribute_names = ['race', 'sex'] - privileged_classes = [['White'], ['Male']] - - privileged_groups = [{'race': 1, 'sex': 1}] - unprivileged_groups = [{'race': 1, 'sex': 0}, {'sex': 0}] - - dataset_metadata = { - 'label_maps': [{1.0: '>50K', 0.0: '<=50K'}], - 'protected_attribute_maps': [{1.0: 'White', 0.0: 'Non-white'}, {1.0: 'Male', 0.0: 'Female'}] - } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'adultwhitemale') - - def load_raw_data(self): - return pd.read_csv('datasets/raw/adult.csv', na_values='?', sep=',') - - -class AdultDatasetMaleExperiment(BinaryClassificationExperiment): - - def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): - - test_set_ratio = 0.2 - validation_set_ratio = 0.1 - label_name = 'income-per-year' - positive_label = '>50K' - numeric_attribute_names = ['capital-gain', 'capital-loss', 'age', 'hours-per-week'] - categorical_attribute_names = ['workclass', 'education', 'marital-status', 'occupation', 'relationship', - 'native-country'] - attributes_to_drop_names = ['fnlwgt', 'race'] - - protected_attribute_names = ['sex'] - privileged_classes = [['Male']] - - privileged_groups = [{'sex': 1}] - unprivileged_groups = [{'sex': 0}] - - dataset_metadata = { - 'label_maps': [{1.0: '>50K', 0.0: '<=50K'}], - 'protected_attribute_maps': [{1.0: 'Male', 0.0: 'Female'}] - } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'adultmale') - - def load_raw_data(self): - return pd.read_csv('datasets/raw/adult.csv', na_values='?', sep=',') - - -class AdultDatasetWhiteExperiment(BinaryClassificationExperiment): - - def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): - - test_set_ratio = 0.2 - validation_set_ratio = 0.1 - label_name = 'income-per-year' - positive_label = '>50K' - numeric_attribute_names = ['capital-gain', 'capital-loss', 'age', 'hours-per-week'] - categorical_attribute_names = ['workclass', 'education', 'marital-status', 'occupation', 'relationship', - 'native-country'] - attributes_to_drop_names = ['fnlwgt', 'sex'] - - protected_attribute_names = ['race'] - privileged_classes = [['White']] - - privileged_groups = [{'race': 1}] - unprivileged_groups = [{'race': 0}] - - dataset_metadata = { - 'label_maps': [{1.0: '>50K', 0.0: '<=50K'}], - 'protected_attribute_maps': [{1.0: 'White', 0.0: 'Non-white'}] - } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'adultwhite') - - def load_raw_data(self): - return pd.read_csv('datasets/raw/adult.csv', na_values='?', sep=',') - - - -class PropublicaDatasetWhiteExperiment(BinaryClassificationExperiment): - - def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): - test_set_ratio = 0.2 - validation_set_ratio = 0.1 - label_name = 'two_year_recid' - positive_label = 1 - numeric_attribute_names = ['age', 'decile_score', 'priors_count', 'days_b_screening_arrest', 'decile_score', - 'is_recid'] - categorical_attribute_names = ['c_charge_degree', 'age_cat', 'score_text'] - attributes_to_drop_names = ['sex', 'c_jail_in', 'c_jail_out'] - - protected_attribute_names = ['race'] - privileged_classes = [['Caucasian']] - - privileged_groups = [{'race': 1}] - unprivileged_groups = [{'race': 0}] - - dataset_metadata = { - 'label_maps': [{1.0: 1, 0.0: 0}], - 'protected_attribute_maps': [{1.0: 'Caucasian', 0.0: 'Non-white'}] - } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'propublicawhite') - - def load_raw_data(self): - """The custom pre-processing function is adapted from - https://github.com/IBM/AIF360/blob/master/aif360/algorithms/preprocessing/optim_preproc_helpers/data_preproc_functions.py - https://github.com/fair-preprocessing/nips2017/blob/master/compas/code/Generate_Compas_Data.ipynb - """ - df = pd.read_csv('datasets/raw/propublica-recidivism.csv', na_values='?', sep=',') - df = df[['age', 'c_charge_degree', 'race', 'age_cat', 'score_text', - 'sex', 'priors_count', 'days_b_screening_arrest', 'decile_score', - 'is_recid', 'two_year_recid', 'c_jail_in', 'c_jail_out']] - ix = df['days_b_screening_arrest'] <= 100 - ix = (df['days_b_screening_arrest'] >= -100) & ix - ix = (df['is_recid'] != -1) & ix - ix = (df['c_charge_degree'] != "O") & ix - ix = (df['score_text'] != 'N/A') & ix - df = df.loc[ix, :] - df['length_of_stay'] = (pd.to_datetime(df['c_jail_out']) - pd.to_datetime(df['c_jail_in'])).apply( - lambda x: x.days) - return df - - -class GermanCreditDatasetSexExperiment(BinaryClassificationExperiment): - - - def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): - - test_set_ratio = 0.2 - validation_set_ratio = 0.1 - label_name = 'credit' - positive_label = 1 - numeric_attribute_names = ['month', 'credit_amount', 'residence_since', 'age', 'number_of_credits', - 'people_liable_for'] - categorical_attribute_names = ['credit_history', 'savings', 'employment'] - attributes_to_drop_names = ['personal_status', 'status', 'purpose', 'investment_as_income_percentage', - 'other_debtors', 'property', 'installment_plans', 'housing', 'skill_level', - 'telephone', 'foreign_worker'] - - protected_attribute_names = ['sex'] - privileged_classes = [[1.0]] - - privileged_groups = [{'sex': 1.0}] - unprivileged_groups = [{'sex': 0.0}] - - dataset_metadata = { - 'label_maps': [{1.0: 1, 0.0: 0}], - 'protected_attribute_maps': [{1.0: 'male', 0.0: 'female'}] - } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'germancreditsex') - - def load_raw_data(self): - df = pd.read_csv('datasets/raw/german.csv', na_values='?', sep=',') - - def group_credit_hist(x): - if x in ['A30', 'A31', 'A32']: - return 'None/Paid' - elif x == 'A33': - return 'Delay' - elif x == 'A34': - return 'Other' - else: - return 'NA' - - def group_employ(x): - if x == 'A71': - return 'Unemployed' - elif x in ['A72', 'A73']: - return '1-4 years' - elif x in ['A74', 'A75']: - return '4+ years' - else: - return 'NA' - - def group_savings(x): - if x in ['A61', 'A62']: - return '<500' - elif x in ['A63', 'A64']: - return '500+' - elif x == 'A65': - return 'Unknown/None' - else: - return 'NA' - - def group_status(x): - if x in ['A11', 'A12']: - return '<200' - elif x in ['A13']: - return '200+' - elif x == 'A14': - return 'None' - else: - return 'NA' - - status_map = {'A91': 1.0, 'A93': 1.0, 'A94': 1.0, 'A92': 0.0, 'A95': 0.0} - df['sex'] = df['personal_status'].replace(status_map) - # group credit history, savings, and employment - df['credit_history'] = df['credit_history'].apply(lambda x: group_credit_hist(x)) - df['savings'] = df['savings'].apply(lambda x: group_savings(x)) - df['employment'] = df['employment'].apply(lambda x: group_employ(x)) - df['age'] = df['age'].apply(lambda x: np.float(x >= 25)) - df['status'] = df['status'].apply(lambda x: group_status(x)) - return df - -class RicciRaceExperiment(BinaryClassificationExperiment): - ''' - Check for fairness based on race (white vs minority i.e Black and Hispanic) while predicting if a candidate will pass i.e obtain total - marks greater than or equal to 70.0 - ''' - - def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): - - test_set_ratio = 0.2 - validation_set_ratio = 0.1 - - label_name = 'combine' - positive_label = 1 - numeric_attribute_names = ['oral', 'written'] - categorical_attribute_names = ['position'] - attributes_to_drop_names = [] - - protected_attribute_names = ['race'] - privileged_classes = [[1.0]] - - privileged_groups = [{'race': 1.0}] - unprivileged_groups = [{'race': 0.0}] - - dataset_metadata = { - 'label_maps': [{1.0: 1, 0.0: 0}], - 'protected_attribute_maps': [{1.0: 'W', 0.0: 'NW'}] - } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names,attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'riccirace') - - def load_raw_data(self): - df = pd.read_csv('datasets/raw/ricci.txt', na_values='?', sep=',') - df.columns = map(str.lower, df.columns) - - def group_race_minority(x): - if x in ['B', 'H', 'B']: - return 'NW' - else: - return 'W' - - post_map = {'Captain': 0.0, 'Lieutenant': 1.0} - df['position'] = df['position'].replace(post_map) - - #group minorities i.e Black and Hispanic are combined to 'NW'(non white) - df['race'] = df['race'].apply(lambda x: group_race_minority(x)) - df['combine'] = df['combine'].apply(lambda x: int(x >= 70)) - - return df - -class GiveMeSomeCreditExperiment(BinaryClassificationExperiment): - ''' - Fairness intervention for the Age attribute (priviledge for age>=25) while predicting if a person will experience 90 days past due delinquency or worse. - ''' - def __init__(self, fixed_random_seed, train_data_sampler, missing_value_handler, numeric_attribute_scaler, - learners, pre_processors, post_processors): - - test_set_ratio = 0.2 - validation_set_ratio = 0.1 - label_name = 'SeriousDlqin2yrs' - positive_label = 1 - - numeric_attribute_names = ['RevolvingUtilizationOfUnsecuredLines','age','NumberOfTime30-59DaysPastDueNotWorse', - 'DebtRatio','MonthlyIncome','NumberOfOpenCreditLinesAndLoans','NumberOfTimes90DaysLate', - 'NumberRealEstateLoansOrLines','NumberOfTime60-89DaysPastDueNotWorse','NumberOfDependents'] - categorical_attribute_names = [] - attributes_to_drop_names = [] - - protected_attribute_names = ['age'] - privileged_classes = [[1.0]] - - privileged_groups = [{'age': 1}] - unprivileged_groups = [{'age': 0}] - - dataset_metadata = { - 'label_maps': [{1.0: 1, 0.0: 0}] - } - - super().__init__(fixed_random_seed, test_set_ratio, validation_set_ratio, label_name, positive_label, - numeric_attribute_names, categorical_attribute_names, attributes_to_drop_names, - train_data_sampler, missing_value_handler, numeric_attribute_scaler, learners, pre_processors, - post_processors, protected_attribute_names, privileged_classes, privileged_groups, - unprivileged_groups, dataset_metadata, 'givecredit') - - def load_raw_data(self): - df = pd.read_csv('datasets/raw/givemesomecredit.csv', na_values='?', sep=',',index_col=False) - df['age'] = df['age'].apply(lambda x: int(x >= 25)) - return df - diff --git a/fp/experiments.py b/fp/experiments.py index bbc5d5e..cb5f33c 100644 --- a/fp/experiments.py +++ b/fp/experiments.py @@ -10,7 +10,6 @@ from sklearn.base import clone from sklearn.metrics import roc_auc_score from sklearn.model_selection import train_test_split -from fp.utils import filter_optimal_results_skyline_order, filter_optimal_results_skyline_formula class BinaryClassificationExperiment: @@ -36,8 +35,7 @@ def __init__(self, privileged_groups, unprivileged_groups, dataset_metadata, - dataset_name, - optimal_validation_strategy): + dataset_name): self.fixed_random_seed = fixed_random_seed self.test_set_ratio = test_set_ratio @@ -61,21 +59,20 @@ def __init__(self, self.dataset_name = dataset_name self.log_path = 'logs/' self.exec_timestamp = self.generate_timestamp() - self.optimal_validation_strategy = optimal_validation_strategy # --- Helper Methods Begin ------------------------------------------------ - def unique_file_name(self, pre_processor, post_processor, learner): - return '{0}__{1}__{2}__{3}__{4}__{5}__{6}'.format(self.dataset_name, - pre_processor.name(), - post_processor.name(), + def unique_file_name(self, learner, pre_processor, post_processor): + return '{}__{}__{}__{}__{}__{}__{}'.format(self.dataset_name, learner.name(), self.missing_value_handler.name(), self.train_data_sampler.name(), - self.numeric_attribute_scaler.name()) + self.numeric_attribute_scaler.name(), + pre_processor.name(), + post_processor.name()) def generate_file_path(self, file_name=''): @@ -134,6 +131,7 @@ def apply_model(self, data, scalers, adjusted_annotated_train_data, pre_processo feature_names_in_train_but_not_in_current = set(train_feature_names).difference( set(current_feature_names)) + print("Injecting zero columns for features not present", feature_names_in_train_but_not_in_current) validation_data_df, _ = adjusted_annotated_data.convert_to_dataframe() @@ -149,12 +147,14 @@ def apply_model(self, data, scalers, adjusted_annotated_train_data, pre_processo if learner.needs_annotated_data_for_prediction(): adjusted_annotated__data_with_predictions = model.predict(adjusted_annotated_data) else: - adjusted_annotated__data_with_predictions.labels = model.predict(adjusted_annotated_data.features).reshape(-1,1) + adjusted_annotated__data_with_predictions.labels = model.predict(adjusted_annotated_data.features) + try: class_probs = model.predict_proba(adjusted_annotated_data.features) adjusted_annotated__data_with_predictions.scores = class_probs[:, 0] except AttributeError: print("WARNING: MODEL CANNOT ASSIGN CLASS PROBABILITIES") + return adjusted_annotated_data, adjusted_annotated__data_with_predictions @@ -235,11 +235,10 @@ def run_single_exp(self, annotated_train_data, validation_data, test_data, scale post_processor : fairprep pre-processor abstraction from aif360.algorithms.post_processors """ - + adjusted_annotated_train_data = self.preprocess_data(pre_processor, annotated_train_data) - model = self.learn_classifier(learner, adjusted_annotated_train_data, self.fixed_random_seed) - + adjusted_annotated_train_data_with_predictions = adjusted_annotated_train_data.copy() if learner.needs_annotated_data_for_prediction(): @@ -261,12 +260,13 @@ def run_single_exp(self, annotated_train_data, validation_data, test_data, scale adjusted_annotated_test_data_with_predictions) results_file_name = '../{}{}-{}.csv'.format( - self.generate_file_path(), self.unique_file_name(pre_processor, post_processor, learner), self.fixed_random_seed) + self.generate_file_path(), self.unique_file_name(pre_processor, learner, post_processor), self.fixed_random_seed) results_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), results_file_name) results_dir_name = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../{}'.format(self.generate_file_path())) if not os.path.exists(results_dir_name): os.makedirs(results_dir_name) + results_file = [] results_file = self.log_metrics(results_file, model, adjusted_annotated_validation_data, @@ -280,70 +280,45 @@ def run_single_exp(self, annotated_train_data, validation_data, test_data, scale results_file.to_csv(results_file_path, index=False) - def filter_optimal_results(self): """Identifies the experiment(s) with the highest accuracy as optimal result. Keeps the test metrics just for the experiment(s) with the optimal result. """ - results_dir_name = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../{}'.format(self.generate_file_path())) results_dir = os.listdir(Path(results_dir_name)) + accuracies = dict() + max_accuracy = 0 - ##### SKYLINE FORMULA IMPLEMENTATION - - privileged_metric_names = ['num_true_positives', 'num_false_positives', 'num_false_negatives', - 'num_true_negatives', 'num_generalized_true_positives', - 'num_generalized_false_positives', 'num_generalized_false_negatives', - 'num_generalized_true_negatives', 'true_positive_rate', 'false_positive_rate', - 'false_negative_rate', 'true_negative_rate', 'generalized_true_positive_rate', - 'generalized_false_positive_rate', 'generalized_false_negative_rate', - 'generalized_true_negative_rate', 'positive_predictive_value', - 'false_discovery_rate', 'false_omission_rate', 'negative_predictive_value', - 'accuracy', 'error_rate', 'num_pred_positives', 'num_pred_negatives', - 'selection_rate'] - - dictionary = {} - filenames = list() + # Fetching the accuracy from the row('val', 'None', 'accuracy') of all the experiment results for result_filename in results_dir: file_path = os.path.join(results_dir_name, result_filename) result_df = pd.read_csv(file_path) result_df.fillna(value='', inplace=True) - for privileged_metric in privileged_metric_names: - if privileged_metric not in dictionary: - dictionary[privileged_metric] = list() - p_metric = (result_df.loc[(result_df['Split'] == 'val') & + accuracy = (result_df.loc[(result_df['Split'] == 'val') & (result_df['PrivilegedStatus'] == '') & - (result_df['MetricName'] == privileged_metric), 'MetricValue'].values[0]) - dictionary[privileged_metric].append(p_metric) - filenames.append(result_filename) - - privileged_metric_values = pd.DataFrame(dictionary) - privileged_metric_values['filenames'] = filenames - - if isinstance(self.optimal_validation_strategy, dict): - skyline_result = filter_optimal_results_skyline_formula(privileged_metric_values, self.optimal_validation_strategy) - else: - skyline_result = filter_optimal_results_skyline_order(privileged_metric_values, self.optimal_validation_strategy) + (result_df['MetricName'] == 'accuracy'), 'MetricValue'].values[0]) + accuracies[result_filename] = accuracy + if accuracy > max_accuracy: + max_accuracy = accuracy + # List of non optimal and optimal filenames and accuracy non_optimal_filenames = list() optimal_filenames = list() - filenames_list = privileged_metric_values['filenames'].tolist() - for file_name in filenames_list: - if file_name == skyline_result[-1]: - optimal_filenames.append(file_name) + for filename, accuracy in accuracies.items(): + if accuracy != max_accuracy: + non_optimal_filenames.append(filename) else: - non_optimal_filenames.append(file_name) + optimal_filenames.append(filename) # Removing the test results from the non optimal experiment results - ''' for file_name in non_optimal_filenames: - file_path = os.path.join(results_dir_name, file_name) + file_path = os.path.join(results_dir_name, result_filename) result_df = pd.read_csv(file_path) result_df = result_df[(result_df['Split'] != 'test')] os.remove(file_path) result_df.to_csv(file_path, index=False, header=False) - ''' + # Renaming the optimal experiment results file (or files if tie) for file_name in optimal_filenames: file_path = os.path.join(results_dir_name, file_name) @@ -366,7 +341,6 @@ def run(self): self.validation_set_ratio, random_state=self.fixed_random_seed) - #Just returns complete data train_data = self.train_data_sampler.sample(all_train_data) second_split_ratio = self.test_set_ratio / (self.test_set_ratio + self.validation_set_ratio) @@ -374,8 +348,6 @@ def run(self): validation_data, test_data = train_test_split(test_and_validation_data, test_size=second_split_ratio, random_state=self.fixed_random_seed) - - #fit just passes and hadle_missing does drop_na operation self.missing_value_handler.fit(train_data) filtered_train_data = self.missing_value_handler.handle_missing(train_data) @@ -402,8 +374,10 @@ def run(self): features_to_drop=self.attributes_to_drop_names, metadata=self.dataset_metadata ) + for pre_processor in self.pre_processors: for learner in self.learners: for post_processor in self.post_processors: - self.run_single_exp(annotated_train_data, validation_data, test_data, scalers, pre_processor, learner, post_processor) + self.run_single_exp(annotated_train_data, validation_data, test_data, scalers, + pre_processor, learner, post_processor) self.filter_optimal_results() diff --git a/fp/experiments_old.py b/fp/experiments_old.py deleted file mode 100644 index cb5f33c..0000000 --- a/fp/experiments_old.py +++ /dev/null @@ -1,383 +0,0 @@ -import os -import numpy as np -import pandas as pd - -from time import time -from pathlib import Path -from datetime import datetime -from aif360.datasets import StandardDataset -from aif360.metrics import ClassificationMetric -from sklearn.base import clone -from sklearn.metrics import roc_auc_score -from sklearn.model_selection import train_test_split - - -class BinaryClassificationExperiment: - - - def __init__(self, - fixed_random_seed, - test_set_ratio, - validation_set_ratio, - label_name, - positive_label, - numeric_attribute_names, - categorical_attribute_names, - attributes_to_drop_names, - train_data_sampler, - missing_value_handler, - numeric_attribute_scaler, - learners, - pre_processors, - post_processors, - protected_attribute_names, - privileged_classes, - privileged_groups, - unprivileged_groups, - dataset_metadata, - dataset_name): - - self.fixed_random_seed = fixed_random_seed - self.test_set_ratio = test_set_ratio - self.validation_set_ratio = validation_set_ratio - self.label_name = label_name - self.positive_label = positive_label - self.numeric_attribute_names = numeric_attribute_names - self.categorical_attribute_names = categorical_attribute_names - self.attributes_to_drop_names = attributes_to_drop_names - self.train_data_sampler = train_data_sampler - self.missing_value_handler = missing_value_handler - self.numeric_attribute_scaler = numeric_attribute_scaler - self.learners = learners - self.pre_processors = pre_processors - self.post_processors = post_processors - self.protected_attribute_names = protected_attribute_names - self.privileged_classes = privileged_classes - self.privileged_groups = privileged_groups - self.unprivileged_groups = unprivileged_groups - self.dataset_metadata = dataset_metadata - self.dataset_name = dataset_name - self.log_path = 'logs/' - self.exec_timestamp = self.generate_timestamp() - - - - # --- Helper Methods Begin ------------------------------------------------ - - - def unique_file_name(self, learner, pre_processor, post_processor): - return '{}__{}__{}__{}__{}__{}__{}'.format(self.dataset_name, - learner.name(), - self.missing_value_handler.name(), - self.train_data_sampler.name(), - self.numeric_attribute_scaler.name(), - pre_processor.name(), - post_processor.name()) - - - def generate_file_path(self, file_name=''): - dir_name = '{}_{}/'.format(self.exec_timestamp, self.dataset_name) - return self.log_path + dir_name + file_name - - - def generate_timestamp(self): - return datetime.fromtimestamp(time()).strftime('%Y-%m-%d_%H-%M-%S-%f')[:-3] - - - def load_raw_data(self): - raise NotImplementedError - - - def learn_classifier(self, learner, annotated_train_data, fixed_random_seed): - return learner.fit_model(annotated_train_data, fixed_random_seed) - - - def preprocess_data(self, pre_processor, annotated_dataset): - return pre_processor.pre_process(annotated_dataset, self.privileged_groups, self.unprivileged_groups) - - - def post_process_predictions(self, post_processor, validation_dataset, validation_dataset_with_predictions, - testset_with_predictions): - return post_processor.post_process(validation_dataset, validation_dataset_with_predictions, - testset_with_predictions, self.fixed_random_seed, - self.privileged_groups, self.unprivileged_groups) - - - def apply_model(self, data, scalers, adjusted_annotated_train_data, pre_processor, learner, model): - filtered_data = self.missing_value_handler.handle_missing(data) - print(self.missing_value_handler.name(), 'removed', len(data) - len(filtered_data), - 'instances from validation data') - - for numerical_attribute, scaler in scalers.items(): - numerical_attribute_data = np.array(filtered_data[numerical_attribute]).reshape(-1, 1) - scaled_numerical_attribute_data = scaler.transform(numerical_attribute_data) - filtered_data.loc[:, numerical_attribute] = scaled_numerical_attribute_data - - annotated_data = StandardDataset( - df=filtered_data, - label_name=self.label_name, - favorable_classes=[self.positive_label], - protected_attribute_names=self.protected_attribute_names, - privileged_classes=self.privileged_classes, - categorical_features=self.categorical_attribute_names, - features_to_drop=self.attributes_to_drop_names, - metadata=self.dataset_metadata - ) - - adjusted_annotated_data = self.preprocess_data(pre_processor, annotated_data) - - train_feature_names = adjusted_annotated_train_data.feature_names - current_feature_names = adjusted_annotated_data.feature_names - - feature_names_in_train_but_not_in_current = set(train_feature_names).difference( - set(current_feature_names)) - - print("Injecting zero columns for features not present", feature_names_in_train_but_not_in_current) - - validation_data_df, _ = adjusted_annotated_data.convert_to_dataframe() - - for feature_name in feature_names_in_train_but_not_in_current: - validation_data_df.loc[:, feature_name] = 0.0 - - adjusted_annotated_data.feature_names = train_feature_names - adjusted_annotated_data.features = validation_data_df[train_feature_names].values.copy() - - adjusted_annotated__data_with_predictions = adjusted_annotated_data.copy() - - if learner.needs_annotated_data_for_prediction(): - adjusted_annotated__data_with_predictions = model.predict(adjusted_annotated_data) - else: - adjusted_annotated__data_with_predictions.labels = model.predict(adjusted_annotated_data.features) - - try: - class_probs = model.predict_proba(adjusted_annotated_data.features) - adjusted_annotated__data_with_predictions.scores = class_probs[:, 0] - except AttributeError: - print("WARNING: MODEL CANNOT ASSIGN CLASS PROBABILITIES") - - return adjusted_annotated_data, adjusted_annotated__data_with_predictions - - - def log_metrics(self, results_file, model, annotated_data, annotated_data_with_predictions, prefix): - metric = ClassificationMetric(annotated_data, annotated_data_with_predictions, - unprivileged_groups=self.unprivileged_groups, - privileged_groups=self.privileged_groups) - - privileged_metric_names = ['num_true_positives', 'num_false_positives', 'num_false_negatives', - 'num_true_negatives', 'num_generalized_true_positives', - 'num_generalized_false_positives', 'num_generalized_false_negatives', - 'num_generalized_true_negatives', 'true_positive_rate', 'false_positive_rate', - 'false_negative_rate', 'true_negative_rate', 'generalized_true_positive_rate', - 'generalized_false_positive_rate', 'generalized_false_negative_rate', - 'generalized_true_negative_rate', 'positive_predictive_value', - 'false_discovery_rate', 'false_omission_rate', 'negative_predictive_value', - 'accuracy', 'error_rate', 'num_pred_positives', 'num_pred_negatives', - 'selection_rate'] - - for maybe_privileged in [None, True, False]: - for metric_name in privileged_metric_names: - metric_function = getattr(metric, metric_name) - metric_value = metric_function(privileged=maybe_privileged) - results_file.append([prefix, maybe_privileged, metric_name, metric_value]) - - if hasattr(model, 'predict_proba'): - auc = roc_auc_score(annotated_data.labels, model.predict_proba(annotated_data.features)[:, 1]) - else: - auc = None - - results_file.append([prefix, '', 'roc_auc', auc]) - - global_metric_names = ['true_positive_rate_difference', 'false_positive_rate_difference', - 'false_negative_rate_difference', 'false_omission_rate_difference', - 'false_discovery_rate_difference', 'false_positive_rate_ratio', - 'false_negative_rate_ratio', 'false_omission_rate_ratio', - 'false_discovery_rate_ratio', 'average_odds_difference', 'average_abs_odds_difference', - 'error_rate_difference', 'error_rate_ratio', 'disparate_impact', - 'statistical_parity_difference', 'generalized_entropy_index', - 'between_all_groups_generalized_entropy_index', - 'between_group_generalized_entropy_index', 'theil_index', 'coefficient_of_variation', - 'between_group_theil_index', 'between_group_coefficient_of_variation', - 'between_all_groups_theil_index', 'between_all_groups_coefficient_of_variation'] - - for metric_name in global_metric_names: - metric_function = getattr(metric, metric_name) - metric_value = metric_function() - results_file.append([prefix, '', metric_name, metric_value]) - - return results_file - - - # --- Helper Methods End -------------------------------------------------- - - - def run_single_exp(self, annotated_train_data, validation_data, test_data, scalers, pre_processor, - learner, post_processor): - """Executes a single instance of experiment out of all the possible - experiments from the given parameters. - - Parameters: - ----------- - annotated_train_data : annotated aif360.datasets.StandardDataset of - train data - - validation_data : pandas dataframe of validation data - - test_data : pandas dataframe of test data - - scalers : dictionary with (key='feature name', value='type of scaler') - - pre_processor : fairprep pre-processor abstraction from - aif360.algorithms.pre_processors - - learner : fairprep learner abstraction from sci-kit learn or - aif360.algorithms.inprocessing - - post_processor : fairprep pre-processor abstraction from - aif360.algorithms.post_processors - """ - - adjusted_annotated_train_data = self.preprocess_data(pre_processor, annotated_train_data) - model = self.learn_classifier(learner, adjusted_annotated_train_data, self.fixed_random_seed) - - adjusted_annotated_train_data_with_predictions = adjusted_annotated_train_data.copy() - - if learner.needs_annotated_data_for_prediction(): - adjusted_annotated_train_data_with_predictions = model.predict( - adjusted_annotated_train_data_with_predictions) - else: - adjusted_annotated_train_data_with_predictions.labels = model.predict( - adjusted_annotated_train_data_with_predictions.features) - - adjusted_annotated_validation_data, adjusted_annotated_validation_data_with_predictions = \ - self.apply_model(validation_data, scalers, adjusted_annotated_train_data, pre_processor, learner, model) - - adjusted_annotated_test_data, adjusted_annotated_test_data_with_predictions = \ - self.apply_model(test_data, scalers, adjusted_annotated_train_data, pre_processor, learner, model) - - adjusted_annotated_test_data_with_predictions = self.post_process_predictions(post_processor, - adjusted_annotated_validation_data, - adjusted_annotated_validation_data_with_predictions, - adjusted_annotated_test_data_with_predictions) - - results_file_name = '../{}{}-{}.csv'.format( - self.generate_file_path(), self.unique_file_name(pre_processor, learner, post_processor), self.fixed_random_seed) - results_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), results_file_name) - - results_dir_name = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../{}'.format(self.generate_file_path())) - if not os.path.exists(results_dir_name): - os.makedirs(results_dir_name) - - results_file = [] - - results_file = self.log_metrics(results_file, model, adjusted_annotated_validation_data, - adjusted_annotated_validation_data_with_predictions, 'val') - results_file = self.log_metrics(results_file, model, adjusted_annotated_train_data, - adjusted_annotated_train_data_with_predictions, 'train') - results_file = self.log_metrics(results_file, model, adjusted_annotated_test_data, - adjusted_annotated_test_data_with_predictions, 'test') - - results_file = pd.DataFrame(results_file, columns=['Split', 'PrivilegedStatus', 'MetricName', 'MetricValue']) - results_file.to_csv(results_file_path, index=False) - - - def filter_optimal_results(self): - """Identifies the experiment(s) with the highest accuracy as optimal - result. Keeps the test metrics just for the experiment(s) with the - optimal result. - """ - results_dir_name = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../{}'.format(self.generate_file_path())) - results_dir = os.listdir(Path(results_dir_name)) - accuracies = dict() - max_accuracy = 0 - - # Fetching the accuracy from the row('val', 'None', 'accuracy') of all the experiment results - for result_filename in results_dir: - file_path = os.path.join(results_dir_name, result_filename) - result_df = pd.read_csv(file_path) - result_df.fillna(value='', inplace=True) - accuracy = (result_df.loc[(result_df['Split'] == 'val') & - (result_df['PrivilegedStatus'] == '') & - (result_df['MetricName'] == 'accuracy'), 'MetricValue'].values[0]) - accuracies[result_filename] = accuracy - if accuracy > max_accuracy: - max_accuracy = accuracy - - # List of non optimal and optimal filenames and accuracy - non_optimal_filenames = list() - optimal_filenames = list() - for filename, accuracy in accuracies.items(): - if accuracy != max_accuracy: - non_optimal_filenames.append(filename) - else: - optimal_filenames.append(filename) - - # Removing the test results from the non optimal experiment results - for file_name in non_optimal_filenames: - file_path = os.path.join(results_dir_name, result_filename) - result_df = pd.read_csv(file_path) - result_df = result_df[(result_df['Split'] != 'test')] - os.remove(file_path) - result_df.to_csv(file_path, index=False, header=False) - - # Renaming the optimal experiment results file (or files if tie) - for file_name in optimal_filenames: - file_path = os.path.join(results_dir_name, file_name) - optimal_file_name = '{}{}'.format(file_name[:-4], '__OPTIMAL.csv') - optimal_file_path = os.path.join(results_dir_name, optimal_file_name) - os.rename(file_path, optimal_file_path) - - - def run(self): - """Executes all the possible experiments from the combination of given - learners, pre-processors and post-processors. - - No. of experiments = (#learners * #preprocessors * #postprocessors) - """ - np.random.seed(self.fixed_random_seed) - - data = self.load_raw_data() - - all_train_data, test_and_validation_data = train_test_split(data, test_size=self.test_set_ratio + - self.validation_set_ratio, - random_state=self.fixed_random_seed) - - train_data = self.train_data_sampler.sample(all_train_data) - - second_split_ratio = self.test_set_ratio / (self.test_set_ratio + self.validation_set_ratio) - - validation_data, test_data = train_test_split(test_and_validation_data, test_size=second_split_ratio, - random_state=self.fixed_random_seed) - - self.missing_value_handler.fit(train_data) - filtered_train_data = self.missing_value_handler.handle_missing(train_data) - - print(self.missing_value_handler.name(), 'removed', len(train_data) - len(filtered_train_data), - 'instances from training data') - - scalers = {} - - for numerical_attribute in self.numeric_attribute_names: - numerical_attribute_data = np.array(filtered_train_data[numerical_attribute]).reshape(-1, 1) - scaler = clone(self.numeric_attribute_scaler).fit(numerical_attribute_data) - scaled_numerical_attribute_data = scaler.transform(numerical_attribute_data) - - filtered_train_data.loc[:, numerical_attribute] = scaled_numerical_attribute_data - scalers[numerical_attribute] = scaler - - annotated_train_data = StandardDataset( - df=filtered_train_data, - label_name=self.label_name, - favorable_classes=[self.positive_label], - protected_attribute_names=self.protected_attribute_names, - privileged_classes=self.privileged_classes, - categorical_features=self.categorical_attribute_names, - features_to_drop=self.attributes_to_drop_names, - metadata=self.dataset_metadata - ) - - for pre_processor in self.pre_processors: - for learner in self.learners: - for post_processor in self.post_processors: - self.run_single_exp(annotated_train_data, validation_data, test_data, scalers, - pre_processor, learner, post_processor) - self.filter_optimal_results() From 2752ac234b7c109342a985788f978299202dae08 Mon Sep 17 00:00:00 2001 From: Ke Yang Date: Wed, 15 Jul 2020 10:22:43 -0400 Subject: [PATCH 5/7] clean up wrong files --- fp/utils.py | 43 ------------------------------------------- 1 file changed, 43 deletions(-) delete mode 100644 fp/utils.py diff --git a/fp/utils.py b/fp/utils.py deleted file mode 100644 index 6d9113e..0000000 --- a/fp/utils.py +++ /dev/null @@ -1,43 +0,0 @@ -import pandas as pd - -def filter_optimal_results_skyline_order(_df, _order_list): - _df['selection_rate'] = abs(1 - _df['selection_rate']) - higher_is_better = ['num_true_positives', 'num_true_negatives', 'num_generalized_true_positives', - 'num_generalized_true_negatives', 'true_positive_rate', 'true_negative_rate', - 'generalized_true_positive_rate', 'generalized_true_negative_rate', 'positive_predictive_value', - 'accuracy', 'num_pred_positives'] - lower_is_better = ['selection_rate', 'num_false_positives', 'num_false_negatives', - 'num_generalized_false_positives', 'num_generalized_false_negatives', 'false_positive_rate', - 'false_negative_rate', 'generalized_false_positive_rate', 'generalized_false_negative_rate', - 'false_discovery_rate', 'false_omission_rate', 'negative_predictive_value', 'error_rate', - 'num_pred_negatives'] - order = [] - for item in _order_list: - if item in higher_is_better: - order.append(False) - else: - order.append(True) - _df = _df.sort_values(_order_list, ascending=order) - - return _df.values[0] - - -def filter_optimal_results_skyline_formula(_df, _formula): - df = pd.DataFrame() - for key in _formula: - df["norm_" + key] = (_df[key] - _df[key].min()) / (_df[key].max() - _df[key].min()) - - df_temp = list(_formula.values()) - keys = list(_formula.keys()) - for col in range(len(keys)): - keys[col] = "norm_" + keys[col] - - # Multiplying with the multiplier to perform sorting operation - df['norm_avg'] = df[keys].multiply(df_temp).sum(axis=1) - frames = [_df, df] - df_fin = pd.concat(frames, axis=1) - - df_fin = df_fin.sort_values(by='norm_avg', ascending=False) - cols = [c for c in df_fin.columns if c[:4] != 'norm'] - df_fin = df_fin[cols] - return df_fin.values[0] \ No newline at end of file From dceaab8324116f04500993737c940bf78b448deb Mon Sep 17 00:00:00 2001 From: Ke Yang Date: Thu, 30 Jul 2020 18:21:06 -0400 Subject: [PATCH 6/7] update for inputs and flexible order --- pipeline/fairprep.py | 666 +++++++----------- .../RS_NS-test.csv | 201 ++++++ .../RS_NS-train.csv | 501 +++++++++++++ .../RS_NS-val.csv | 301 ++++++++ .../RS_NS_NI-test.csv | 201 ++++++ .../RS_NS_NI-train.csv | 501 +++++++++++++ .../RS_NS_NI-val.csv | 301 ++++++++ .../RS_NS_NI_MS-test.csv | 201 ++++++ .../RS_NS_NI_MS-train.csv | 501 +++++++++++++ .../RS_NS_NI_MS-val.csv | 301 ++++++++ .../RS_NS_NI_MS_NB-test.csv | 201 ++++++ .../RS_NS_NI_MS_NB-train.csv | 501 +++++++++++++ .../RS_NS_NI_MS_NB-val.csv | 301 ++++++++ .../RS_NS_NI_MS_NB_HE-test.csv | 201 ++++++ .../RS_NS_NI_MS_NB_HE-train.csv | 501 +++++++++++++ .../RS_NS_NI_MS_NB_HE-val.csv | 301 ++++++++ .../RS_NS_NI_MS_NB_HE_ME-test.csv | 201 ++++++ .../RS_NS_NI_MS_NB_HE_ME-train.csv | 501 +++++++++++++ .../RS_NS_NI_MS_NB_HE_ME-val.csv | 301 ++++++++ .../RS_NS_NI_MS_NB_HE_ME_RW-test.csv | 201 ++++++ .../RS_NS_NI_MS_NB_HE_ME_RW-train.csv | 501 +++++++++++++ .../RS_NS_NI_MS_NB_HE_ME_RW-val.csv | 301 ++++++++ .../RS_NS_NI_MS_NB_HE_ME_RW_OLR-test.csv | 201 ++++++ .../RS_NS_NI_MS_NB_HE_ME_RW_OLR-train.csv | 501 +++++++++++++ .../RS_NS_NI_MS_NB_HE_ME_RW_OLR-val.csv | 301 ++++++++ .../RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-test.csv | 201 ++++++ .../RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-train.csv | 501 +++++++++++++ .../RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-val.csv | 301 ++++++++ .../german_AIF_test.json | 1 + pipeline/model/classifiers.py | 239 ++++++- pipeline/model/fair_classifiers.py | 172 ++++- pipeline/model/inprocessor.py | 74 -- pipeline/postprocess/fair_postprocessors.py | 203 +++++- pipeline/postprocess/postprocessor.py | 59 -- pipeline/preprocess/categorizers.py | 132 +++- pipeline/preprocess/encoders.py | 170 +++-- pipeline/preprocess/fair_preprocessors.py | 166 ++++- pipeline/preprocess/filters.py | 113 +-- pipeline/preprocess/imputers.py | 173 ++++- pipeline/preprocess/preprocessor.py | 71 -- pipeline/preprocess/samplers.py | 132 +++- pipeline/preprocess/scalers.py | 109 ++- pipeline/preprocess/splitters.py | 180 +++-- pipeline/step.py | 78 +- 44 files changed, 10693 insertions(+), 1072 deletions(-) create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS-test.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS-train.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS-val.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI-test.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI-train.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI-val.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS-test.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS-train.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS-val.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB-test.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB-train.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB-val.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE-test.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE-train.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE-val.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME-test.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME-train.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME-val.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW-test.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW-train.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW-val.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR-test.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR-train.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR-val.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-test.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-train.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-val.csv create mode 100644 pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/german_AIF_test.json delete mode 100644 pipeline/model/inprocessor.py delete mode 100644 pipeline/postprocess/postprocessor.py delete mode 100644 pipeline/preprocess/preprocessor.py diff --git a/pipeline/fairprep.py b/pipeline/fairprep.py index 5cc8321..a4dcc61 100644 --- a/pipeline/fairprep.py +++ b/pipeline/fairprep.py @@ -2,9 +2,18 @@ Class to run the pipeline """ -import pandas as pd -import numpy as np + + import os +import numpy as np +import pandas as pd + +from time import time +from pathlib import Path +from datetime import datetime +from itertools import islice + + import warnings warnings.filterwarnings("ignore") from pipeline.preprocess.splitters import * @@ -18,41 +27,24 @@ from pipeline.model.fair_classifiers import * from pipeline.postprocess.fair_postprocessors import * -# for integrity check of user inputs -SUPPORT_STEPS = {"Splitter": "Splitter", "Sampler": "Sampler", "Imputer": "Imputer", "Scaler": "Scaler", - "Categorizer": "Categorizer", "Encoder": "Encoder", "SensitiveEncoder": "SensitiveAttEncoder", - "FairPreprocessor": "AIF_", "model": "SK_OPT_", "FairPostprocessor": "AIF_Postprocessing"} - -ALL_STEPS = ["RandomSplitter", "BalanceTargetSplitter", - "RandomSampler", "BalancePopulationSampler", - "DropNAImputer", "ModeImputer", "DatawigImputer", - "SK_StandardScaler", "SK_MinMaxScaler", - "SK_Discretizer", "SK_Binarizer", - "SK_OrdinalEncoder", "SK_OneHotEncoder", - "CustomCateAttsEncoder", - "AIF_Reweighing", "AIF_DIRemover", - "SK_LogisticRegression", "SK_DecisionTree", "OPT_LogisticRegression", "OPT_DecisionTree", "AIF_AdversarialDebiasing", - "AIF_EqOddsPostprocessing", "AIF_CalibratedEqOddsPostprocessing"] +import json + PRINT_SPLIT = "\n===================================================\n" +# for integrity check of user inputs +PREREQUISITE_STEPS = {"Splitter": [], "Sampler": [], "Imputer": ["Splitter"], "Scaler": ["Imputer"], + "Categorizer": ["Imputer"], "Encoder": ["Imputer"], "SpecialEncoder": ["Imputer"], + "FairPreprocessor": ["Splitter", "Imputer", "Scaler", "Categorizer", "Encoder", "SpecialEncoder"], + "Model": ["Splitter", "Imputer", "Scaler", "Categorizer", "Encoder", "SensitiveAttEncoder"], + "FairPostprocessor": ["Splitter", "Imputer", "Scaler", "Categorizer", "Encoder", "SpecialEncoder"]} -def init_input_steps(step_tuple, input_df): - step_str = step_tuple[0] + '(input_df, ' - for pi in step_tuple[1:]: - if isinstance(pi, str): - step_str += "'"+pi + "', " - else: - step_str += str(pi) + ", " - end_idx = step_str.rfind(", ") - step_str = step_str[0:end_idx] + step_str[end_idx:].replace(", ", ")") - print(step_str) - return eval(step_str) -class FairnessLabelPipeline(): - def __init__(self, data_file_name, target_col, target_positive_values, sensitive_atts=[], protected_values={}, sep_flag=None, na_mark=None): +class FairPipeline(): + def __init__(self, data_file_name, target_col, target_positive_values, sensitive_atts, value_mapping, sep_flag=None, na_mark=None, verbose=True): """ :param data_file_name: str, file name that stores the data. + :param *fixed_random_seed: int, the fixed random seed that will be used through the pipeline. :param target_col: str, the name of the target variable in above data. :param target_positive_values: list of str, each str is the value of the target variable in above data that represents the positive outcome. :param sensitive_atts: list, stores the user specified sensitive attributes in above data. Optional. @@ -60,129 +52,176 @@ def __init__(self, data_file_name, target_col, target_positive_values, sensitive Key is the name in sensitive_atts, value is a list of str, representing the values of the attribute. Order should mapping to the order in the sensitive_atts. """ - # print(os.path.realpath(data_file_name)) - if not os.path.exists(os.path.realpath(data_file_name)): + + # TODO: move integrity check to a separate function + self.data_file_name = data_file_name + self.target_col = target_col + self.target_positive_values = target_positive_values + self.sensitive_atts = sensitive_atts + self.value_mapping = value_mapping + self.sep_flag = sep_flag + self.na_mark = na_mark + self.verbose = verbose + + self.data_name = None + self.raw_data = None + + self.attributes = None + self.metadata_desp = None + self.num_atts = None + self.cate_atts = None + self.pred_target = "pred_" + target_col + + self.log_path = 'logs/' + self.exec_timestamp = self.generate_timestamp() + self.log_dir_name = None + + def validate_input_parameters(self): + if not os.path.exists(os.path.realpath(self.data_file_name)): print("The data you specified doesn't exist!") raise ValueError - if ".csv" not in data_file_name: + if ".csv" not in self.data_file_name: print("The data you specified is not valid! Only support .csv file.") raise ValueError - data_name = data_file_name.replace(".csv", "") + data_name = self.data_file_name.replace(".csv", "") self.data_name = data_name[data_name.rfind("/")+1:] - if sep_flag: - df = pd.read_csv(data_file_name, sep=sep_flag) + + if self.sep_flag: + df = pd.read_csv(self.data_file_name, sep=self.sep_flag) else: # default ',' separated data - df = pd.read_csv(data_file_name) + df = pd.read_csv(self.data_file_name) + if not df.shape[0]: print("Uploaded data is empty!") raise ValueError + # np.random.seed(fixed_random_seed) + # self.fixed_random_seed = fixed_random_seed + # integrity check for target_col - if target_col is None or target_positive_values is None: + if self.target_col is None or self.target_positive_values is None: print("Need to specify target_col and target_positive_value!") raise ValueError - if target_col not in df.columns: + if self.target_col not in df.columns: print("Need to specify a valid target attribute to be predicted!") raise ValueError - target_values = df[target_col].unique() + target_values = df[self.target_col].unique() if len(target_values) != 2: print("Only support binary target feature now!") raise ValueError - if len(set(target_positive_values).intersection(target_values)) == 0: + if len(set(self.target_positive_values).intersection(target_values)) == 0: print("Need to specify a valid target positive value!") raise ValueError - self.target_col = target_col - self.target_positive_values = target_positive_values - # integrity check for sensitive_atts and protected_values - input_indicator = sum([len(x)== 0 for x in [sensitive_atts, protected_values]]) - if input_indicator == 0: # both are specified - if len(sensitive_atts) != len(protected_values): - print("Different size of input sensitive attributes and protected values!") - raise ValueError - if sum([len(set(protected_values[x]).difference(df[x].unique())) > 0 for x in protected_values]) > 0: - print("Some specified protected values do not appear in the column specified in sensitive_atts!") - raise ValueError - elif input_indicator == 1: # one of parameter is empty - print("Need to specify both sensitive_atts and protected_values!") - raise ValueError - else: # both are empty - # TODO: add auto-generation for the below two variables: sensitive_atts and protected_values - # for adult only - sensitive_atts = ["sex", "race"] - protected_values = {"sex": ["Female"], "race": ["Black", "Asian-Pac-Islander", "Amer-Indian-Eskimo", "Other"]} + # TODO: update the below code + # # integrity check for sensitive_atts and protected_values + # input_indicator = sum([len(x)== 0 for x in [self.sensitive_atts, self.protected_values]]) + # if input_indicator == 0: # both are specified + # if len(self.sensitive_atts) != len(self.protected_values): + # print("Different size of input sensitive attributes and protected values!") + # raise ValueError + # if sum([len(set(self.protected_values[x]).difference(df[x].unique())) > 0 for x in self.protected_values]) > 0: + # print("Some specified protected values do not appear in the column specified in sensitive_atts!") + # raise ValueError + # elif input_indicator == 1: # one of parameter is empty + # print("Need to specify both sensitive_atts and protected_values!") + # raise ValueError + # else: # both are empty + # print("Need to specify both sensitive_atts and protected_values!") + # raise ValueError + + self.raw_data = df + self.attributes = df.columns + return self + + def get_meta_information(self): + # infer the numerical and categorical attributes first + if self.raw_data.describe().shape[0] == 8:# DataFrame.describe() usually returns 8 rows. + num_atts = set(self.raw_data.describe().columns) + else:# DataFrame.describe() returns less than 8 rows when there is no numerical attribute. + num_atts = set() - self.sensitive_atts = sensitive_atts - # self.protected_values = protected_values - self.zero_mapping = {target_col: [x for x in target_values if x not in target_positive_values]} - self.zero_mapping.update(protected_values) - if na_mark: - self.na_mark = na_mark - else: - self.na_mark = None + cate_atts = set(self.attributes).difference(num_atts) - # refer numerical and categorical attributes first - # DataFrame.describe() usually returns 8 rows. - if df.describe().shape[0] == 8: - num_atts = set(df.describe().columns) - # DataFrame.describe() returns less than 8 rows when there is no numerical attribute. - else: - num_atts = set() - cate_atts = set(df.columns).difference(num_atts) + self.metadata_desp = {"size": self.raw_data.shape[0], "features": self.raw_data.shape[1], + "categorical features": list(cate_atts), "numerical features": list(num_atts)} + # record the domain of the attributes + feature_domain = {} + for attri in self.attributes: + if attri in cate_atts: + all_values = self.raw_data[attri].unique() + feature_domain[attri] = (len(all_values), list(all_values)) + else: + feature_domain[attri] = (min(self.raw_data[attri]), max(self.raw_data[attri])) + self.metadata_desp.update({"domain": feature_domain}) + + # remove the sensitive and target attributes from the inferred list since these attributes are treated separately in the pipeline if self.target_col in cate_atts: cate_atts.remove(self.target_col) if self.target_col in num_atts: num_atts.remove(self.target_col) - for si in sensitive_atts: + for si in self.sensitive_atts: cate_atts.remove(si) + self.num_atts = list(num_atts) self.cate_atts = list(cate_atts) - self.df = df - - # record the sensitive attributes and target variable value mapping - sensi_target_value_mapping = {} - for atti in [self.target_col] + self.sensitive_atts: - atti_values = list(df[atti].unique()) - pro_values = self.zero_mapping[atti] - if len(pro_values) > 1: - pro_value_str = pro_values[0] + "&more" - else: - pro_value_str = pro_values[0] - other_values = list(set(atti_values).difference(pro_values)) - if len(other_values) > 1: - other_value_str = other_values[0] + "&more" - else: - other_value_str = other_values[0] - sensi_target_value_mapping[atti] = {0: pro_value_str, 1: other_value_str} - # print(sensi_target_value_mapping) - self.sensi_target_value_mapping = sensi_target_value_mapping - self.pipeline_id = self.data_name[:2] - - def init_necessary_steps(self, step_flag, apply_df, input_weights=[]): - # initialize the necessary steps - if step_flag == "Imputer": - return DropNAImputer(apply_df, na_mark=self.na_mark) - # elif step_flag == "Scaler": - # return SK_StandardScaler(apply_df, list(self.num_atts)) - elif step_flag == "Encoder": - return SK_OneHotEncoder(apply_df, list(self.cate_atts)) - elif step_flag == "SensitiveEncoder": - return CustomCateAttsEncoder(apply_df, self.sensitive_atts+[self.target_col], self.zero_mapping) - else: - return OPT_LogisticRegression(apply_df, self.target_col, instance_weights=input_weights) - def print_necessary_steps(self): - # for printout and efficiency - return {"Imputer": ("DropNAImputer", "?"), - # "Scaler": ("SK_StandardScaler", list(self.num_atts)), - "Encoder": ("SK_OneHotEncoder", list(self.cate_atts)), - "SensitiveEncoder": ("CustomCateAttsEncoder", self.sensitive_atts+[self.target_col], self.zero_mapping), - "model": ("OPT_LogisticRegression", self.target_col)} + log_dir_name = os.path.join(os.path.dirname(os.path.realpath(__file__)), self.generate_file_path()) + if not os.path.exists(log_dir_name): + os.makedirs(log_dir_name) + + self.log_dir_name = log_dir_name + print("**"*10, log_dir_name) + # print(self.metadata_desp) + + with open(log_dir_name+"/"+self.data_name+'.json', 'w') as outfile: + json.dump(dict(self.metadata_desp), outfile) + + return self + + def iter_steps(self, steps, input_df): + """ + Generator function to iterate steps. + :param steps: list of objects that represent the steps user want to perform on the input data. + Supported steps are listed in STEPS.md. + :return: the pandas dataframes that are returned by applying a step on the input data. + """ + for idx, stepi in enumerate(islice(steps, 0, len(steps))): + # if the current step is the encoder, feed inputdata's metadata so that the encoded data can have the domain of the raw dataset + stepi.fit(input_df) + return_df = stepi.apply(input_df) + if len(return_df) == 2: # special heck for the step that return weights + return_df = return_df[0] + yield idx, stepi.fitted_step, return_df + + def validate_input_steps(self, steps): + + if not steps or not isinstance(steps, list): + print("Require non-empty list of steps as input!") + raise ValueError + + # TODO: integrity check for steps using pre-requisite order + + return self + def fill_zero_to_dummy_data(self, input_step, df): + raw_df = pd.get_dummies(self.raw_data, columns=self.cate_atts, prefix_sep='=') + if input_step.step_name() in ["FairPreprocessor", "Model", "FairInprocessor", "FairPostprocessor"]: # these steps return non-encoded data + encoded_df = pd.get_dummies(df, columns=self.cate_atts, prefix_sep='=') + else: + encoded_df = df + # print("**" * 10, raw_df.shape, encoded_df.shape) + if raw_df.shape[1] != encoded_df.shape[1]: + for coli in set(raw_df.columns): + if "=" in coli and coli[:coli.find("=")] in self.cate_atts and coli not in encoded_df.columns: + encoded_df.loc[:, coli] = 0 + + # print("**" * 10, raw_df.shape, encoded_df.shape) + return encoded_df - def run_pipeline(self, steps, return_test=True, output_interdata=False): + def run_pipeline(self, steps, save_interdata=False): """ :param df: pandas dataframe, the data on which steps are applied. @@ -191,316 +230,117 @@ def run_pipeline(self, steps, return_test=True, output_interdata=False): :return: two pandas dataframes: before_data and after_data. after_data is the data after applied the input steps. """ + self.validate_input_parameters() + self.get_meta_information() - if not steps: - print("Require list of steps as input!") - raise ValueError + self.validate_input_steps(steps) - if len(steps) < len(SUPPORT_STEPS): - print("Missing some input steps! Required steps are listed in the order below.\n"+" ".join(SUPPORT_STEPS.keys())) - raise ValueError - if sum([len(set(x[0]).intersection(list(SUPPORT_STEPS.values())[idx]))==0 for idx, x in enumerate(steps) if x is not None]) > 0: - print("Some input steps are not supported!") - raise ValueError - if sum([x[0] not in ALL_STEPS for x in steps if x is not None]) > 0: - print("Some input steps don't include enough parameters!") - raise ValueError - if sum([len(x) <=1 for x in steps if x is not None]) > 0: - print("Some input steps don't include enough parameters!") - raise ValueError + # first run the splitter + print(self.print_log_message(steps, 0)) + train_df, val_df, test_df = steps[0].apply(self.raw_data) + print(PRINT_SPLIT) - if steps[-1] is not None and len(steps[0][1])==2: # run fair-postprocessor, then require validation set - print("FairPostprocessor requires a validation set! Specify through split_ratio in Splitter!") - raise ValueError + # for step_idx, train_fitted_step, train_df in self.iter_steps(steps, train_df): + for step_idx, stepi in enumerate(steps): + if step_idx == 0: + continue - self.pipeline_id = "_".join([self.pipeline_id]+[str(x[0]) for x in steps if x is not None]) + print(self.print_log_message(steps, step_idx)) - support_step_names = list(SUPPORT_STEPS.keys()) + # run the same step on validation data, if multiple methods are specified at one step, the one with optimal performance on validation set is selected + if isinstance(steps[step_idx], list): # multiple methods specified at one step + # TODO: add the selection on validation data + pass - # split the data into separated datasets for train, [validation], and test - if steps[0] is None: # default splitter - if steps[-1] is not None: # train, validation and test data - cur_splitter = BalanceTargetSplitter(self.df, [0.5, 0.3, 0.2], self.target_col) - else: # train and test data - cur_splitter = BalanceTargetSplitter(self.df, [0.7, 0.3], self.target_col) - else: - cur_splitter = init_input_steps(steps[0], self.df) - after_data = cur_splitter.apply(self.df) - after_data = list(after_data) - if output_interdata: - self.save_inter_data(after_data, cur_splitter.get_name()) - print("Done "+support_step_names[0]+PRINT_SPLIT) - # record the before data to output - before_data = [x for x in after_data] - - - # run sampler on train - if steps[1] is not None: - for idx_df, cur_df in enumerate(after_data): - cur_sampler = init_input_steps(steps[1], cur_df) - after_data[idx_df] = cur_sampler.apply(cur_df) - if output_interdata: - self.save_inter_data(after_data, cur_sampler.get_name(), steps[:1]) - print("Done "+support_step_names[1]+PRINT_SPLIT) - - - # run the preprocess steps: "Imputer", "Scaler", "Categorizer" that fit on train and apply on others - for idx, step_i in enumerate(steps[2:5]): - idx = idx + 2 - step_i_key = support_step_names[idx] - # fit on train data - if step_i is None: - if step_i_key in list(self.print_necessary_steps().keys()): # add default operation for necessary steps - step_i = self.init_necessary_steps(step_i_key, after_data[0]) - else: # skip the step - continue - else: # user-specified step - step_i = init_input_steps(steps[idx], after_data[0]) - # apply on train, validation and test data - for idx_df, cur_df in enumerate(after_data): - after_data[idx_df] = step_i.apply(cur_df) - - if output_interdata: - self.save_inter_data(after_data, step_i.get_name(), steps[:idx]) - print("Done " + support_step_names[idx] + PRINT_SPLIT) - - # run the preprocess steps: "Encoder" - # fit and apply on the same data - after_data, encoder_name = self.run_encoder(steps[5], after_data) - if output_interdata: - self.save_inter_data(after_data, encoder_name, steps[:5]) - print("Done " + support_step_names[5] + PRINT_SPLIT) - - # run the preprocess steps: "SensitiveAttEncoder" - # fit and apply on the same data - if steps[6] is None: - for idx_df, cur_df in enumerate(after_data): - cur_sensi_encoder = self.init_necessary_steps("SensitiveEncoder", cur_df) - after_data[idx_df] = cur_sensi_encoder.apply(cur_df) - else: # user-specified sensitive encoder - for idx_df, cur_df in enumerate(after_data): - cur_sensi_encoder = init_input_steps(steps[6], cur_df) - after_data[idx_df] = cur_sensi_encoder.apply(cur_df) - if output_interdata: - self.save_inter_data(after_data, cur_sensi_encoder.get_name(), steps[:6]) - print("Done " + support_step_names[6] + PRINT_SPLIT) - - # run the preprocess steps: "FairPreprocessor" - # fit and apply on the same data - if steps[7] is not None: - weights = [[0 for _ in range(x.shape[0])] for x in after_data] - for idx_df, cur_df in enumerate(after_data): - cur_fair_preprossor = init_input_steps(steps[7], cur_df) - if "AIF_Reweighing" in cur_fair_preprossor.get_name(): # special heck for methods updating sample weight - after_data[idx_df], weights[idx_df] = cur_fair_preprossor.apply(cur_df) - else: - after_data[idx_df] = cur_fair_preprossor.apply(cur_df) - if output_interdata: - self.save_inter_data(after_data, cur_fair_preprossor.get_name(), steps[:7]) - print("Done " + support_step_names[7] + PRINT_SPLIT) - - # after fair-preprocess, rerun encoder - after_data, encoder_name = self.run_encoder(steps[5], after_data) - if output_interdata: - self.save_inter_data(after_data, encoder_name+"_prep", steps[:5]) - print("Done " + support_step_names[5] + " for fair preprocessor "+PRINT_SPLIT) - - # run model step - # fit on train data - if steps[8] is None: - if weights: - cur_model = self.init_necessary_steps("model", after_data[0], input_weights=weights[0]) - else: - cur_model = self.init_necessary_steps("model", after_data[0]) - else: # TODO: add the support for weight in user-specified models - cur_model = init_input_steps(steps[8], after_data[0]) - - # predict on train, validation and test data - for idx_df, cur_df in enumerate(after_data): - after_data[idx_df] = cur_model.apply(cur_df) - if output_interdata: - self.save_inter_data(after_data, cur_model.get_name(), steps[:8]) - print("Done " + support_step_names[8] + PRINT_SPLIT) - - # run fair postprocess step - if steps[9] is not None: - # encode first - after_data, encoder_name = self.run_encoder(steps[5], after_data) - if output_interdata: - self.save_inter_data(after_data, encoder_name+"_post", steps[:5]) - print("Done " + support_step_names[5] + " for fair post processor " + PRINT_SPLIT) - # fit on validation data - cur_postprocessor = init_input_steps(steps[9], after_data[1]) - # predict on validation and test data - for idx_df, cur_df in enumerate(after_data[1:]): - after_data[idx_df+1] = cur_postprocessor.apply(cur_df) - if output_interdata: - self.save_inter_data(after_data, cur_postprocessor.get_name(), steps[:9]) - print("Done " + support_step_names[9] + PRINT_SPLIT) - - # transfer back to original values for encoded sensitive and target columns - for idx, df_i in enumerate(after_data): - for atti in [self.target_col]+ self.sensitive_atts: - df_i[atti] = df_i[atti].apply(lambda x: self.sensi_target_value_mapping[atti][x]) - if "pred_" +self.target_col in df_i.columns: - df_i["pred_" + self.target_col] = df_i["pred_" + self.target_col].apply(lambda x: int(x>=0.5)) - df_i["pred_" +self.target_col] = df_i["pred_" +self.target_col].apply(lambda x: self.sensi_target_value_mapping[self.target_col][x]) - - if return_test: # only return the before and after of test data - return before_data[-1], after_data[-1] - else: # return all before and after data - return before_data, after_data - - def run_encoder(self, encode_step_tuple, data_list): - # run the preprocess steps: "Encoder" - # fit and apply on the same data - if len(self.cate_atts) > 0: - for idx_df, cur_df in enumerate(data_list): - if encode_step_tuple is None: # default encoder - cur_encoder = self.init_necessary_steps(list(SUPPORT_STEPS.keys())[5], cur_df) - else: - - # check for user specified encoder that cover partial categorical atts - non_encoded_cate = set(self.cate_atts).difference(encode_step_tuple[1]) - if non_encoded_cate: - cur_encoder = init_input_steps((encode_step_tuple[0], self.cate_atts), cur_df) - else: - cur_encoder = init_input_steps(encode_step_tuple, cur_df) - data_list[idx_df] = cur_encoder.apply(cur_df) - - # check for different dimensions after encoding for validation and test set - if len(data_list) > 2: - for idx_df, cur_df in enumerate(data_list[1:]): - if cur_df.shape[1] != data_list[0].shape[1]: - for feature_i in set(data_list[0].columns).difference(cur_df.columns): - cur_df[feature_i] = 0.0 - data_list[idx_df] = cur_df.copy() - else: # check the dimensions for train and test set - if data_list[0].shape[1] != data_list[1].shape[1]: - diff_features_1 = set(data_list[0].columns).difference(data_list[1].columns) - diff_features_2 = set(data_list[1].columns).difference(data_list[0].columns) - add_df = data_list[1].copy() - for feature_i in diff_features_1.union(diff_features_2): - if feature_i not in add_df.columns: - add_df[feature_i] = 0.0 - data_list[1] = add_df.copy() - - add_df = data_list[0].copy() - for feature_i in diff_features_1.union(diff_features_2): - if feature_i not in add_df.columns: - add_df[feature_i] = 0.0 - data_list[0] = add_df.copy() - - return data_list, cur_encoder.get_name() - else: - return data_list, "None" + else: # single method specified at a step + stepi.fit(train_df) + train_df = stepi.apply(train_df) - def save_inter_data(self, input_dfs, step_name, pre_steps=[], path="data/inter_data/"): - if len(input_dfs) == 2: - suffix_names = ["train", "test"] - else: - suffix_names = ["train", "validation", "test"] - - for idx, df_i in enumerate(input_dfs): - output_df_i = df_i.copy() - df_path = os.path.realpath(path) + "/" + self.pipeline_id + "/" + suffix_names[idx] + "/" - if not os.path.exists(df_path): - os.makedirs(df_path) - if pre_steps: - pre_step_names = [x[0] for x in pre_steps if x is not None]+[step_name] - else: - pre_step_names = [step_name] + if stepi.fit_only_on_train(): # the steps that fit only on train + val_df = stepi.apply(val_df) + test_df = stepi.apply(test_df) + + else: # the steps that treat train, val, test independently through the same rule + stepi.fit(val_df) + val_df = stepi.apply(val_df) + + stepi.fit(test_df) + test_df = stepi.apply(test_df) - for atti in [self.target_col]+ self.sensitive_atts: - if not isinstance(output_df_i[atti].values[0], str): - output_df_i[atti] = output_df_i[atti].apply(lambda x: self.sensi_target_value_mapping[atti][x]) - if "pred_" +self.target_col in output_df_i.columns: - output_df_i["pred_" + self.target_col] = output_df_i["pred_" + self.target_col].apply(lambda x: int(x >= 0.5)) - output_df_i["pred_" +self.target_col] = output_df_i["pred_" +self.target_col].apply(lambda x: self.sensi_target_value_mapping[self.target_col][x]) + # autofill encoded columns after the steps that might affect the dimension of the encoded data. Only for categorical attributes that are not sensitive and target column + if stepi.input_encoded_data() != stepi.output_encoded_data(): + train_df = self.fill_zero_to_dummy_data(stepi, train_df) + val_df = self.fill_zero_to_dummy_data(stepi, val_df) + test_df = self.fill_zero_to_dummy_data(stepi, test_df) - output_name = df_path + "__".join([self.data_name, "after"]+[x[:x.find("@")] for x in pre_step_names]) + ".csv" - print("!!!!!!!", suffix_names[idx], output_df_i.shape, "!!!!!!!") - output_df_i.to_csv(output_name, index=False) - print("Current "+suffix_names[idx]+" data after "+" ".join([x[:x.find("@")] for x in pre_step_names])+" \n Stored in ", output_name) - print() + if save_interdata: # save intermediate data on the disc + for file_i, df_i in zip(["train", "val", "test"], [train_df, val_df, test_df]): + self.save_inter_data(file_i, df_i, steps[:step_idx+1]) + print(PRINT_SPLIT) + + # # TODO: move to other location + # # transfer back to original values for encoded sensitive and target columns + # for dfi in [train_df, val_df, test_df]: + # for atti in [self.target_col] + self.sensitive_atts: + # df_i[atti] = df_i[atti].apply(lambda x: self.sensi_target_value_mapping[atti][x]) + # + # if self.pred_target in df_i.columns: + # # TODO: check whether to keep the line for score prediction + # df_i[self.pred_target] = df_i[self.pred_target].apply(lambda x: int(x >= 0.5)) # for the model that returns a score instead of labels + # df_i[self.pred_target] = df_i[self.pred_target].apply(lambda x: self.sensi_target_value_mapping[self.target_col][x]) + + return train_df, val_df, test_df + + def print_log_message(self, steps, step_idx): + if not self.verbose: + return None + return '(step %d of %d) running %s' % (step_idx + 1, len(steps), steps[step_idx].abbr_name()) + PRINT_SPLIT + + def get_executed_steps_name(self, executed_steps): + return "_".join([x.abbr_name() for x in executed_steps]) + + def generate_file_path(self, file_name=''): + dir_name = '{}__{}/'.format(self.exec_timestamp, self.data_name) + return self.log_path + dir_name + file_name + + def generate_timestamp(self): + return datetime.fromtimestamp(time()).strftime('%Y-%m-%d_%H-%M-%S-%f')[:-3] + + + def save_inter_data(self, file_name, df: pd.DataFrame, executed_steps): + data_file_name = '{}-{}.csv'.format(self.get_executed_steps_name(executed_steps), file_name) + data_file_path = os.path.join(self.log_dir_name, data_file_name) + + df.to_csv(data_file_path, index=False) + if self.verbose: + print ("Data is saved to ", data_file_path) if __name__ == '__main__': - # input_steps = [("BalanceTargetSplitter", [0.5, 0.3, 0.2], "income-per-year"), - # ("RandomSampler", 10000), # sampler - # ("DropNAImputer", "?"), - # ("SK_StandardScaler", ["fnlwgt", "age"]), - # ("SK_Discretizer", ["fnlwgt", "age"], [2, 3]), - # ("SK_OneHotEncoder", ["workclass"]), # encoder - # ("CustomCateAttsEncoder", ["sex", "race", "income-per-year"], {"sex": ["Female"], "race": ["Black"], "income-per-year": ["<=50K"]}), - # ("AIF_DIRemover", "income-per-year", "sex", 0.8), # fair-preprocessor - # ("AIF_AdversarialDebiasing", "income-per-year", "sex"), # test Adversial learning - # ("AIF_CalibratedEqOddsPostprocessing", "income-per-year", "sex") # fair-post-postprocessor - # ] - # cur_pip = FairnessLabelPipeline(data_file, y_col, y_posi, sensitive_atts=sensi_atts, protected_values=sensi_pro_valus, na_mark="?") - # before_test, after_test = cur_pip.run_pipeline(input_steps, return_test=True, output_interdata=True) - - # data_file = "../data/adult.csv" - # y_col = "income-per-year" - # y_posi = [">50K"] - # na_symbol = "?" - # sensi_atts = ["sex", "race"] - # sensi_pro_valus = {"sex": ["Female"], "race": ["Black", "Asian-Pac-Islander", "Amer-Indian-Eskimo", "Other"]} - - # data_file = "../data/mylsn_cleaned_2.csv" - # y_col = "status" - # y_posi = ["Ac"] - # na_symbol = "N/A" - # sensi_atts = ["sex", "race"] - # sensi_pro_valus = {"sex": ["female"], "race": ["black", "hispanic", "native-american", "asian"]} - - # - # input_steps = [("BalanceTargetSplitter", [0.7, 0.3], y_col), # splitter - # None, # sampler - # ("DropNAImputer", "?"), # imputer - # None, # scaler - # ("SK_Discretizer", ["age"], [3]), # categorizer - # None, # encoder - # None, # sensitive att and target encoder - # None, # fair-preprocessor - # ("OPT_LogisticRegression", y_col), # model - # None # fair-post-postprocessor - # ] - - # debias_focus_att = "race" - # input_steps = [("BalanceTargetSplitter", [0.7, 0.3], y_col), - # ("RandomSampler", 5000), # sampler - # None, # imputer - # None, # scaler - # None, - # None, # encoder - # None, - # None, #("AIF_DIRemover", y_col, debias_focus_att, 1.0), # fair-preprocessor - # ("SK_LogisticRegression", y_col), # model - # None # fair-post-postprocessor - # ] - - data_file = "../data/german_AIF.csv" + + data_file = "../data/german_AIF_test.csv" y_col = "credit" y_posi = ["good"] sensi_atts = ["age", "sex"] - sensi_pro_valus = {"age": ["young"], "sex": ["female"]} - debias_focus_att = "age" - fair_steps = [("BalanceTargetSplitter", [0.7, 0.3], y_col), - None, # sampler - None, # ("ModeImputer", [], ["workclass"], "?"), # imputer - None, # scaler - None, # categorizer - None, # encoder - None, - ("AIF_Reweighing", y_col, debias_focus_att), # fair-preprocessor - None, # ("OPT_LogisticRegression", y_col), # model - None # fair-post-postprocessor - ] - - cur_pip = FairnessLabelPipeline(data_file, y_col, y_posi, sensitive_atts=sensi_atts, - protected_values=sensi_pro_valus) - before_test, after_test = cur_pip.run_pipeline(fair_steps, return_test=True, output_interdata=True) + + value_mapping = {"female": 0, "male": 1, "good": 1, "bad": 0, "young": 0, "old": 1} + + debias_focus_att = "sex" + global_seed = 0 + + numerical_atts = ["month", "credit_amount"] + categorical_atts = ["status", "employment", "housing"] + + pipeline = [RandomSplitter([0.5, 0.3, 0.2], global_seed), NoSampler(), NoImputer(), SK_MinMaxScaler(numerical_atts), + NoBinarizer(), OneHotEncoder(categorical_atts), MappingEncoder([y_col] + sensi_atts, value_mapping), + AIF_Reweighing(y_col, debias_focus_att), OPT_LogisticRegression(y_col, global_seed), NoFairPostprocessor()] + + + + cur_pip = FairPipeline(data_file, y_col, y_posi, sensi_atts, value_mapping) + + + train_now, val_now, test_now = cur_pip.run_pipeline(pipeline, save_interdata=True) diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS-test.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS-test.csv new file mode 100644 index 0000000..37c539e --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS-test.csv @@ -0,0 +1,201 @@ +status,month,credit_amount,employment,housing,sex,age,credit +A13,36,5848,A73,A152,male,young,good +A11,36,2302,A73,A151,male,old,bad +A12,12,1103,A74,A152,male,old,good +A14,18,1530,A73,A152,male,old,bad +A11,15,1721,A72,A152,male,old,good +A14,18,2775,A74,A152,male,old,bad +A13,12,1881,A73,A151,female,old,good +A14,15,3029,A74,A152,male,old,good +A11,12,1657,A73,A152,male,old,good +A12,21,2353,A73,A152,male,old,good +A11,18,2659,A73,A152,male,old,good +A14,12,1592,A74,A152,female,old,good +A13,24,1925,A73,A152,male,old,good +A14,36,11054,A73,A152,male,old,good +A12,24,1967,A75,A152,female,young,good +A12,36,2671,A73,A153,female,old,bad +A14,24,3430,A75,A152,male,old,good +A13,36,4473,A75,A152,male,old,good +A14,12,4675,A72,A151,female,young,good +A14,24,999,A75,A152,male,young,good +A12,48,10961,A74,A152,male,old,bad +A12,12,1318,A75,A152,male,old,good +A11,6,609,A74,A152,female,old,good +A14,21,2993,A73,A152,male,old,good +A14,60,15653,A74,A152,male,young,good +A14,15,3368,A75,A151,male,young,good +A11,48,4788,A74,A152,male,old,good +A11,12,2246,A75,A152,male,old,bad +A11,24,1747,A72,A152,male,young,good +A12,48,5096,A73,A152,female,old,bad +A12,18,884,A75,A152,male,old,bad +A14,18,6070,A75,A152,male,old,good +A11,24,1382,A74,A152,male,old,good +A12,6,753,A73,A152,female,old,good +A14,9,2697,A73,A152,male,old,good +A12,6,590,A72,A152,male,old,good +A12,30,1919,A72,A152,male,old,bad +A11,21,1647,A73,A152,male,old,bad +A12,18,5866,A73,A152,male,old,good +A14,24,5943,A72,A152,female,old,bad +A11,12,727,A72,A152,male,old,bad +A11,24,2812,A75,A151,female,old,good +A11,48,4308,A72,A151,female,young,bad +A11,30,6187,A74,A151,male,young,good +A12,18,1295,A72,A152,female,old,good +A14,36,5711,A75,A152,male,old,good +A14,48,6110,A73,A153,male,old,good +A12,30,8386,A74,A152,male,old,bad +A13,18,1961,A75,A152,female,young,good +A12,12,1158,A73,A152,male,old,good +A12,12,1410,A73,A152,male,old,good +A14,33,7253,A74,A152,male,old,good +A13,6,683,A72,A152,female,old,good +A14,12,1413,A74,A152,male,old,good +A11,36,5371,A73,A152,male,old,good +A12,30,4280,A73,A151,female,old,bad +A11,24,1358,A75,A152,male,old,bad +A13,10,781,A75,A153,male,old,good +A14,4,3380,A74,A152,female,old,good +A12,9,1082,A75,A152,male,old,good +A12,36,2337,A75,A152,male,old,good +A14,7,846,A75,A153,male,old,good +A12,48,8487,A74,A152,female,young,good +A13,15,1905,A75,A151,male,old,good +A12,12,1804,A72,A152,male,old,good +A11,12,652,A75,A151,female,young,good +A12,12,1331,A72,A152,male,young,bad +A14,36,3079,A73,A152,male,old,good +A13,15,2687,A74,A151,male,old,good +A13,12,1330,A72,A152,male,old,good +A11,12,1168,A73,A152,male,old,good +A11,36,3446,A75,A152,male,old,bad +A12,9,1199,A74,A152,female,old,good +A11,12,741,A71,A152,female,young,bad +A12,18,7374,A71,A152,male,old,good +A14,18,2169,A73,A152,male,old,bad +A14,6,1338,A73,A152,male,old,good +A11,24,6419,A75,A153,female,old,good +A11,15,975,A73,A152,male,young,good +A14,24,2684,A73,A152,male,old,good +A14,24,4526,A73,A152,male,old,good +A12,12,951,A72,A151,female,old,bad +A12,54,15945,A72,A151,male,old,bad +A14,15,3556,A73,A152,male,old,good +A14,4,1455,A74,A152,male,old,good +A12,24,1935,A75,A152,male,old,bad +A12,12,3124,A72,A152,male,old,good +A14,6,1543,A73,A152,male,old,good +A11,24,2964,A75,A153,male,old,good +A11,18,1442,A74,A153,male,old,bad +A14,60,13756,A75,A153,male,old,good +A12,20,6148,A75,A152,male,old,good +A14,24,6313,A75,A152,male,old,good +A11,12,7865,A75,A153,male,old,bad +A13,12,1297,A73,A151,male,young,good +A11,18,2462,A73,A152,male,young,bad +A14,18,6761,A73,A151,male,old,bad +A14,36,9572,A72,A152,male,old,bad +A14,36,7409,A75,A152,male,old,good +A11,36,6887,A73,A152,male,old,bad +A14,24,3621,A75,A152,male,old,bad +A11,6,1352,A71,A151,female,young,good +A13,12,1480,A71,A153,male,old,good +A14,18,1028,A73,A152,female,old,good +A11,12,902,A74,A151,male,young,bad +A11,10,2315,A75,A152,male,old,good +A14,9,1980,A72,A151,female,young,bad +A12,24,1553,A74,A151,female,young,good +A13,10,1240,A75,A153,female,old,bad +A12,12,1223,A75,A151,male,old,bad +A11,21,2606,A72,A151,female,old,good +A11,12,1409,A75,A152,male,old,good +A14,18,1568,A73,A151,female,young,good +A14,5,3448,A74,A152,male,old,good +A12,18,3872,A71,A152,female,old,good +A12,12,2366,A74,A152,male,old,good +A14,36,7678,A74,A152,female,old,good +A12,36,9398,A72,A151,male,old,bad +A14,36,9566,A73,A152,female,old,good +A11,24,1938,A72,A152,male,old,bad +A11,12,2149,A73,A153,male,old,bad +A14,36,6614,A75,A152,male,old,good +A14,12,2859,A71,A152,male,old,good +A12,20,2629,A73,A152,male,old,good +A12,18,6204,A73,A152,male,old,good +A11,6,3676,A73,A151,male,old,good +A12,15,2326,A73,A152,male,old,good +A11,6,3384,A73,A151,male,old,bad +A12,48,3060,A74,A152,male,old,bad +A13,6,1299,A73,A152,male,old,good +A14,10,1418,A73,A151,male,old,good +A14,12,996,A74,A152,female,young,good +A14,20,3485,A72,A152,male,old,good +A14,18,3378,A73,A152,male,old,good +A14,9,1224,A73,A152,male,old,good +A13,9,745,A73,A152,female,old,bad +A12,12,1860,A71,A152,male,old,good +A11,20,4272,A75,A152,female,young,good +A12,7,2415,A73,A152,male,old,good +A14,24,3777,A73,A152,male,old,good +A12,30,2181,A75,A152,male,old,good +A14,18,4594,A72,A152,male,old,good +A11,12,759,A74,A152,male,old,bad +A14,30,4530,A74,A151,female,old,good +A14,10,1924,A73,A152,male,old,good +A14,12,3447,A73,A152,female,old,good +A12,60,14027,A74,A152,male,old,bad +A13,18,2864,A73,A152,male,old,bad +A14,12,1255,A75,A152,male,old,good +A11,42,4370,A74,A152,male,old,bad +A14,12,926,A71,A152,female,old,good +A12,15,802,A75,A152,male,old,bad +A12,12,1092,A73,A152,female,old,good +A14,21,1591,A74,A152,male,old,good +A14,36,4454,A73,A152,female,old,good +A14,12,682,A74,A152,female,old,good +A12,18,6361,A75,A152,male,old,good +A14,12,1258,A72,A151,female,young,good +A12,27,2520,A73,A152,male,young,bad +A11,6,338,A75,A152,male,old,good +A12,20,7057,A74,A151,male,old,good +A11,20,2235,A73,A151,male,old,bad +A13,12,1424,A75,A152,female,old,good +A12,24,11328,A73,A152,male,old,bad +A14,24,1249,A72,A152,male,old,good +A12,9,2118,A73,A152,male,old,good +A11,24,1285,A74,A151,female,old,bad +A14,48,3578,A75,A152,male,old,good +A14,10,2848,A73,A152,male,old,good +A11,18,3509,A74,A152,female,young,good +A11,8,1164,A75,A153,male,old,good +A12,21,3976,A74,A152,male,old,good +A12,18,3612,A75,A152,female,old,good +A12,72,5595,A73,A152,male,young,bad +A11,12,709,A75,A152,male,old,bad +A11,36,8229,A73,A152,male,old,bad +A11,21,3414,A72,A152,male,old,bad +A12,36,12612,A73,A153,male,old,bad +A14,6,1750,A75,A152,male,old,good +A14,12,3077,A73,A152,male,old,good +A14,15,5324,A75,A153,female,old,good +A13,4,1494,A72,A152,male,old,good +A11,12,2578,A71,A153,female,old,good +A12,7,2329,A72,A152,female,old,good +A11,9,654,A73,A152,male,old,bad +A14,36,8133,A73,A152,female,old,good +A14,24,3863,A73,A153,male,old,good +A12,12,7472,A71,A151,female,young,good +A14,12,1555,A75,A153,male,old,bad +A11,12,1262,A75,A152,male,old,good +A12,9,790,A73,A152,female,old,good +A12,30,5234,A71,A152,male,old,bad +A11,30,2406,A74,A151,female,young,bad +A12,12,2969,A72,A151,female,young,bad +A14,21,12680,A75,A153,male,old,bad +A11,12,1082,A73,A152,male,old,bad +A12,27,3915,A73,A152,male,old,bad +A14,9,3832,A75,A152,male,old,good +A12,18,1928,A72,A152,male,old,bad +A12,36,9857,A74,A152,male,old,good diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS-train.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS-train.csv new file mode 100644 index 0000000..1045c8d --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS-train.csv @@ -0,0 +1,501 @@ +status,month,credit_amount,employment,housing,sex,age,credit +A11,36,3959,A71,A152,male,old,good +A14,9,3577,A73,A151,male,old,good +A14,18,2515,A73,A152,male,old,good +A12,12,1995,A72,A152,male,old,good +A14,60,10366,A75,A152,male,old,good +A14,24,7393,A73,A152,male,old,good +A13,12,409,A73,A151,female,old,good +A14,9,2507,A75,A153,male,old,good +A14,30,4811,A74,A151,female,young,good +A12,48,6560,A74,A152,male,young,bad +A12,36,9034,A72,A151,male,old,bad +A11,24,3552,A74,A152,male,old,bad +A12,20,6468,A71,A152,male,old,good +A11,30,2522,A75,A152,male,old,good +A12,36,2820,A72,A152,male,old,bad +A14,6,783,A73,A152,male,old,good +A11,21,3763,A74,A152,male,young,good +A14,30,7485,A71,A152,female,old,bad +A11,15,1403,A73,A151,female,old,good +A12,11,4771,A74,A152,male,old,good +A11,24,4020,A73,A152,male,old,good +A12,24,1201,A72,A152,male,old,good +A12,48,5381,A71,A153,male,old,good +A14,24,5511,A73,A152,male,young,good +A14,18,1098,A71,A152,female,old,good +A12,48,6224,A75,A153,male,old,bad +A14,11,1393,A72,A152,female,old,good +A12,9,1919,A74,A151,male,old,good +A14,15,5045,A75,A152,female,old,good +A12,48,5951,A73,A152,female,young,bad +A11,8,731,A75,A152,male,old,good +A14,18,629,A75,A152,male,old,good +A14,18,1808,A74,A152,female,young,bad +A12,21,1188,A75,A152,female,old,bad +A14,24,3235,A75,A152,male,old,good +A12,24,11560,A73,A151,female,young,bad +A12,9,1154,A75,A152,male,old,good +A14,18,3850,A74,A152,male,old,good +A11,24,2359,A71,A152,male,old,bad +A14,24,1533,A72,A152,female,old,good +A13,36,4210,A73,A152,male,old,bad +A11,30,3857,A73,A152,male,old,good +A14,12,1393,A75,A152,male,old,good +A14,18,1800,A73,A152,male,young,good +A13,12,2251,A73,A152,female,old,good +A12,12,836,A72,A152,female,young,bad +A11,18,1345,A73,A152,male,old,bad +A12,36,4795,A72,A152,female,old,good +A11,12,708,A73,A152,male,old,good +A11,12,1289,A73,A152,male,young,good +A14,18,1864,A73,A152,female,old,bad +A12,48,9960,A72,A152,female,old,bad +A11,18,5302,A75,A153,male,old,good +A11,6,1872,A71,A153,male,old,good +A11,24,2439,A72,A152,female,old,bad +A14,9,1388,A73,A151,female,old,good +A12,48,12169,A71,A153,male,old,good +A14,18,2662,A74,A152,male,old,good +A14,13,1409,A71,A152,female,old,good +A11,15,3643,A75,A152,female,old,good +A11,48,6758,A73,A152,female,old,bad +A14,24,1851,A74,A152,male,old,good +A11,27,3416,A73,A152,male,old,good +A14,27,5117,A74,A152,male,old,good +A14,10,1597,A73,A151,male,old,good +A14,6,1382,A73,A152,female,old,good +A14,10,1231,A75,A152,male,old,good +A12,15,2631,A73,A151,female,old,bad +A12,30,2150,A73,A152,female,young,bad +A12,12,2930,A74,A152,female,old,good +A14,6,6761,A74,A152,male,old,good +A12,21,2745,A74,A152,male,old,good +A11,24,2325,A74,A152,male,old,good +A14,6,1204,A73,A151,male,old,good +A11,18,1049,A72,A151,female,young,good +A14,12,2292,A71,A152,male,old,bad +A14,9,1449,A74,A152,female,old,good +A11,18,7511,A75,A153,male,old,bad +A12,9,918,A73,A152,female,old,bad +A14,24,2346,A74,A152,male,old,good +A11,24,1603,A75,A152,female,old,good +A11,11,3905,A73,A151,male,old,good +A14,18,1126,A72,A151,female,young,good +A13,21,2923,A73,A152,female,old,good +A12,9,3195,A73,A152,female,old,good +A11,6,4716,A72,A152,male,old,good +A12,6,14555,A71,A152,male,young,bad +A11,36,5493,A75,A153,male,old,good +A14,24,1585,A74,A152,male,old,good +A11,12,3590,A73,A152,male,old,good +A14,12,2759,A75,A152,male,old,good +A11,12,2122,A73,A151,male,old,good +A12,12,2002,A74,A151,male,old,good +A14,24,1901,A73,A151,male,old,good +A14,12,1493,A72,A152,female,old,good +A12,6,1209,A71,A152,male,old,bad +A12,48,3979,A74,A152,male,old,good +A11,24,7721,A72,A152,female,old,good +A11,18,3650,A72,A151,female,young,good +A11,18,1880,A74,A152,male,old,good +A12,18,1941,A73,A152,male,old,good +A14,15,874,A72,A152,female,young,good +A14,27,5190,A75,A152,male,old,good +A14,36,10477,A75,A153,male,old,good +A12,24,4351,A73,A152,female,old,good +A14,24,1376,A74,A152,female,old,good +A12,30,3496,A73,A152,male,old,good +A11,40,5998,A73,A152,male,old,bad +A11,28,4006,A73,A152,male,old,bad +A14,36,3342,A75,A152,male,old,good +A12,18,2622,A73,A152,male,old,good +A12,36,2384,A72,A151,male,old,bad +A11,24,1659,A72,A151,female,old,bad +A14,24,3488,A74,A152,female,young,good +A12,60,7418,A73,A152,male,old,good +A14,18,2238,A73,A152,female,young,good +A11,24,1333,A71,A153,male,old,bad +A14,22,1283,A74,A151,female,young,good +A14,6,3518,A73,A151,male,old,good +A12,12,585,A73,A151,male,young,good +A12,39,4933,A74,A152,male,young,bad +A14,24,1278,A75,A152,male,old,good +A14,12,1768,A73,A151,male,young,good +A11,24,3349,A72,A153,male,old,bad +A12,9,1391,A73,A152,male,old,good +A14,18,3422,A75,A152,male,old,good +A14,10,2146,A72,A151,female,young,good +A14,15,1520,A75,A152,male,old,good +A14,24,6842,A73,A152,male,old,good +A14,18,2051,A72,A152,male,old,good +A11,24,2996,A73,A152,male,young,bad +A11,24,3660,A73,A152,female,old,good +A11,24,1442,A74,A151,female,young,bad +A11,9,1138,A73,A152,male,young,good +A14,30,5771,A74,A152,female,young,good +A11,18,2600,A73,A153,male,old,bad +A13,24,3749,A72,A152,female,old,good +A14,12,3059,A74,A152,male,old,good +A11,6,662,A72,A152,male,old,good +A11,18,1940,A72,A153,male,old,good +A12,36,2323,A74,A151,male,young,good +A12,48,12204,A73,A152,male,old,good +A14,24,5507,A75,A153,male,old,good +A14,18,433,A71,A151,female,young,bad +A14,6,4611,A72,A152,female,old,bad +A12,12,2762,A75,A152,female,young,bad +A14,30,4272,A73,A152,male,old,good +A12,39,11760,A74,A151,male,old,good +A12,36,2225,A75,A153,male,old,bad +A13,12,1474,A72,A152,female,old,good +A14,12,1402,A74,A151,female,old,good +A12,18,1913,A72,A152,male,old,good +A14,14,802,A73,A152,male,old,good +A13,15,1271,A73,A153,male,old,bad +A13,24,1258,A73,A152,female,old,good +A14,12,3565,A72,A152,male,old,good +A11,12,3386,A75,A153,male,old,bad +A13,12,2247,A73,A152,female,old,good +A14,24,5804,A73,A152,male,old,good +A14,15,960,A74,A152,female,old,good +A12,18,1056,A75,A152,male,old,bad +A13,42,6289,A72,A152,male,old,good +A14,9,2753,A75,A152,male,old,good +A14,10,2210,A73,A151,male,young,bad +A11,48,10297,A74,A153,male,old,bad +A12,36,14318,A75,A153,male,old,bad +A12,30,4249,A71,A152,male,old,bad +A14,24,929,A74,A152,male,old,good +A12,9,959,A73,A152,female,old,bad +A14,21,2782,A74,A152,female,old,good +A12,48,8358,A72,A152,female,old,good +A14,9,2301,A72,A151,female,young,good +A12,48,3566,A74,A152,male,old,good +A11,18,3966,A75,A151,female,old,bad +A14,6,1237,A73,A152,female,old,good +A11,18,976,A72,A152,female,young,bad +A11,24,915,A75,A152,female,old,bad +A13,24,947,A74,A153,male,old,bad +A12,24,2825,A74,A152,male,old,good +A14,21,2288,A72,A152,female,young,good +A14,10,2901,A72,A151,female,old,good +A12,45,4576,A71,A152,male,old,good +A14,48,7238,A75,A152,male,old,good +A11,36,2746,A75,A152,male,old,bad +A12,24,1743,A75,A152,male,old,good +A11,9,1366,A72,A151,female,young,bad +A14,18,1505,A73,A153,male,old,good +A11,6,343,A72,A152,female,old,good +A14,24,2223,A75,A152,male,old,good +A14,12,1495,A75,A152,male,old,good +A13,15,2327,A72,A152,female,young,bad +A11,45,11816,A75,A151,male,old,bad +A12,12,1484,A73,A152,male,young,bad +A14,36,9055,A73,A153,male,old,good +A14,12,1655,A75,A152,male,old,good +A14,18,1453,A72,A152,female,old,good +A11,12,1228,A73,A152,female,young,bad +A12,6,932,A74,A152,female,old,good +A12,36,8086,A75,A152,male,old,bad +A14,6,1346,A75,A153,male,old,good +A12,36,2862,A75,A153,male,old,good +A11,24,3123,A72,A152,female,old,bad +A12,60,7408,A72,A152,female,young,bad +A12,15,1264,A73,A151,male,young,bad +A14,6,1554,A74,A151,female,young,good +A12,21,3652,A74,A152,male,old,good +A11,48,3931,A74,A153,male,old,bad +A11,6,1361,A72,A152,male,old,good +A11,6,1203,A75,A152,male,old,good +A14,12,3527,A72,A152,male,old,good +A14,36,7855,A73,A152,female,young,bad +A11,21,3357,A72,A152,female,old,good +A13,18,2100,A73,A152,male,old,bad +A14,18,1169,A73,A152,male,old,good +A14,36,8947,A74,A152,male,old,good +A11,18,1936,A74,A151,male,young,good +A12,24,3512,A74,A152,male,old,good +A12,24,6967,A74,A151,male,old,good +A14,30,2333,A75,A152,male,old,good +A11,18,2039,A73,A151,female,young,bad +A11,36,9271,A74,A152,male,young,bad +A14,21,1572,A75,A152,female,old,good +A12,6,454,A72,A152,male,young,good +A14,36,6304,A75,A152,male,old,good +A14,24,2197,A74,A152,male,old,good +A14,48,7629,A75,A152,male,old,good +A13,10,1275,A72,A152,female,young,good +A11,48,7119,A73,A153,male,old,bad +A12,24,12579,A75,A153,female,old,bad +A12,18,2779,A73,A151,male,young,good +A14,4,1503,A74,A152,male,old,good +A12,8,1237,A73,A152,female,young,bad +A14,48,3914,A73,A152,male,old,bad +A14,24,3062,A75,A151,male,old,good +A12,36,1953,A75,A153,male,old,bad +A12,30,3832,A72,A152,male,young,good +A14,6,1538,A72,A152,female,old,good +A14,6,2108,A74,A151,male,old,good +A11,6,428,A75,A152,female,old,good +A12,36,4455,A73,A152,male,old,bad +A13,21,2319,A72,A151,male,old,bad +A14,12,1163,A73,A152,male,old,good +A14,9,936,A75,A152,male,old,good +A11,18,2473,A71,A152,male,young,bad +A14,15,1569,A75,A152,male,old,good +A12,12,3017,A72,A151,female,old,good +A14,12,2331,A75,A152,male,old,good +A14,12,2096,A74,A152,male,old,good +A12,12,1037,A74,A152,male,old,good +A14,6,1236,A73,A151,male,old,good +A14,24,4151,A73,A152,male,old,good +A14,48,3609,A73,A152,female,old,good +A14,12,1412,A73,A152,female,old,good +A11,11,3939,A73,A152,male,old,good +A11,12,1526,A75,A153,male,old,good +A14,22,2675,A75,A152,male,old,good +A12,30,4221,A73,A152,female,old,good +A11,24,2924,A73,A152,male,old,good +A14,18,1817,A73,A152,female,old,good +A13,10,1225,A73,A152,male,old,good +A11,15,1845,A72,A151,female,old,good +A14,6,250,A73,A152,female,old,good +A14,6,660,A74,A151,male,young,good +A13,6,709,A72,A152,male,old,good +A11,36,15857,A71,A152,male,old,good +A12,24,1837,A74,A153,female,old,bad +A11,13,1797,A72,A152,male,old,good +A11,6,448,A72,A152,female,young,bad +A12,18,3213,A72,A151,male,young,good +A11,12,2579,A72,A152,male,old,bad +A11,6,1198,A75,A153,female,old,bad +A14,21,5003,A73,A152,female,old,bad +A12,24,2333,A72,A152,male,old,good +A14,15,1262,A74,A152,male,old,good +A12,12,1155,A75,A152,male,old,good +A14,10,894,A74,A152,female,old,good +A14,12,3331,A75,A152,male,old,good +A12,18,1113,A73,A152,female,old,good +A14,12,719,A75,A152,male,old,bad +A12,24,2064,A71,A152,female,old,bad +A13,30,3656,A75,A152,male,old,good +A11,24,6615,A71,A153,male,old,good +A14,18,3229,A71,A152,male,old,good +A14,15,3343,A73,A153,male,old,good +A11,30,3108,A72,A152,male,old,bad +A12,9,458,A73,A152,male,young,good +A14,15,1471,A73,A153,male,old,good +A12,11,1322,A73,A152,female,old,good +A11,9,1288,A75,A152,male,old,good +A14,12,707,A73,A152,male,old,good +A12,15,1308,A75,A152,male,old,good +A14,9,3074,A73,A152,male,old,good +A11,15,2511,A71,A151,female,young,good +A14,6,1740,A75,A151,male,old,good +A14,12,1123,A73,A151,female,old,bad +A12,11,1577,A72,A152,female,young,good +A11,45,1845,A73,A153,male,young,bad +A14,24,2028,A74,A152,male,old,good +A12,27,8318,A75,A153,female,old,bad +A12,12,1295,A72,A151,female,young,bad +A12,24,11938,A73,A152,male,old,bad +A14,36,1819,A73,A153,male,old,bad +A11,15,1478,A75,A152,male,old,good +A14,24,1516,A73,A152,female,old,good +A11,9,2136,A73,A152,male,young,good +A14,15,3812,A72,A152,female,young,good +A14,18,4165,A73,A152,male,old,bad +A12,24,4057,A74,A152,male,old,bad +A14,48,10127,A73,A153,male,old,bad +A12,6,484,A74,A152,male,old,good +A12,60,6288,A73,A153,male,old,bad +A11,24,6872,A72,A152,male,old,bad +A14,15,1532,A73,A152,female,old,good +A14,9,2134,A73,A152,male,old,good +A11,12,1107,A73,A151,male,young,good +A11,20,2212,A74,A152,male,old,good +A14,15,3186,A74,A151,female,young,good +A11,47,10722,A72,A152,female,old,good +A14,7,730,A75,A151,male,old,good +A14,12,2073,A73,A152,female,old,good +A11,21,571,A75,A152,male,old,good +A13,15,2360,A73,A152,male,old,good +A11,12,1344,A73,A152,male,old,good +A11,42,7174,A74,A152,female,old,bad +A13,42,4796,A75,A153,male,old,good +A11,42,3965,A72,A152,male,old,bad +A14,11,7228,A73,A152,male,old,good +A12,24,1216,A72,A152,male,old,bad +A14,12,2748,A75,A153,female,old,good +A12,42,9283,A71,A153,male,old,good +A14,27,4526,A72,A152,male,old,good +A14,21,5248,A73,A152,male,old,good +A14,24,3181,A72,A152,female,old,good +A14,24,717,A75,A152,male,old,good +A14,12,1262,A73,A152,male,young,good +A14,12,1884,A75,A152,male,old,good +A12,18,1924,A72,A151,female,old,bad +A11,48,7476,A74,A153,male,old,good +A13,12,939,A74,A152,male,old,bad +A12,12,841,A74,A151,female,young,good +A12,24,4113,A72,A151,female,old,bad +A13,24,5152,A74,A152,male,young,good +A11,33,4281,A73,A152,female,young,bad +A12,12,1567,A73,A152,female,young,good +A11,12,339,A75,A152,male,old,good +A11,15,1275,A73,A151,female,young,bad +A12,12,639,A73,A152,male,old,bad +A14,36,2299,A75,A152,male,old,good +A11,24,1823,A71,A152,male,old,bad +A11,48,7763,A75,A153,male,old,bad +A14,6,1595,A74,A152,male,old,good +A11,12,385,A74,A152,female,old,good +A14,12,776,A73,A152,male,old,good +A12,24,3069,A75,A153,male,old,good +A11,9,1364,A74,A152,male,old,good +A14,6,1221,A73,A152,male,old,good +A11,8,3398,A74,A152,male,old,good +A11,18,4153,A73,A152,male,old,bad +A12,15,1778,A72,A151,female,old,bad +A14,9,1236,A72,A151,female,young,good +A14,6,1766,A73,A151,male,young,good +A11,30,8072,A72,A152,male,young,good +A12,9,5129,A75,A153,female,old,bad +A14,24,8648,A72,A152,male,old,bad +A11,24,4169,A73,A152,male,old,good +A14,12,1935,A75,A152,male,old,good +A14,24,2397,A75,A152,male,old,bad +A14,15,1459,A73,A152,female,old,good +A12,12,685,A74,A152,male,young,bad +A11,48,4605,A75,A153,male,young,bad +A14,11,2142,A75,A152,male,old,good +A11,15,950,A75,A151,male,old,bad +A12,16,1175,A71,A153,male,old,good +A11,12,1498,A73,A152,female,young,good +A14,18,1984,A73,A153,male,old,good +A12,36,3804,A73,A152,female,old,bad +A11,24,1987,A73,A151,male,young,bad +A14,24,4139,A73,A152,male,old,good +A14,12,701,A73,A152,male,old,good +A14,54,9436,A73,A152,male,old,good +A11,18,3190,A73,A152,female,young,bad +A12,13,882,A72,A152,male,young,good +A12,24,2760,A75,A153,male,old,good +A14,15,2221,A73,A151,female,young,good +A11,12,626,A73,A152,female,young,bad +A12,48,18424,A73,A152,female,old,bad +A11,18,1217,A73,A152,male,old,bad +A12,7,2576,A73,A152,male,old,good +A11,6,2647,A73,A151,male,old,good +A14,27,2570,A73,A151,female,young,bad +A13,24,3148,A73,A152,male,old,good +A11,6,666,A74,A152,female,old,good +A14,12,1185,A73,A152,female,old,good +A14,30,5954,A74,A152,male,old,good +A12,45,4746,A72,A152,male,young,bad +A14,15,1213,A75,A152,male,old,good +A14,6,1898,A73,A152,male,old,good +A14,48,11590,A73,A151,female,young,bad +A14,4,601,A72,A151,female,young,good +A12,18,1245,A73,A152,male,old,bad +A14,36,5742,A74,A152,male,old,good +A12,14,1410,A75,A152,male,old,good +A11,12,900,A73,A152,male,young,bad +A14,6,362,A73,A152,female,old,good +A14,15,1979,A75,A152,male,old,good +A14,36,3349,A73,A152,female,old,bad +A14,12,797,A75,A152,female,old,bad +A14,6,518,A73,A152,female,old,good +A11,24,2303,A75,A152,male,old,bad +A11,36,2712,A75,A152,male,old,bad +A14,24,1474,A72,A152,male,old,good +A14,18,1820,A73,A152,male,old,good +A14,24,1927,A73,A152,female,old,good +A11,15,1433,A73,A151,female,young,good +A12,12,958,A74,A152,male,old,good +A14,36,3835,A75,A152,female,old,good +A14,12,763,A73,A152,female,old,good +A14,6,2978,A73,A152,male,old,good +A14,12,1291,A73,A152,female,old,good +A11,12,674,A74,A152,male,young,bad +A14,60,6527,A73,A153,male,old,good +A14,12,976,A75,A152,male,old,good +A13,10,3949,A72,A152,male,old,good +A12,10,1048,A73,A152,male,young,good +A14,24,2670,A75,A152,male,old,good +A11,30,4583,A73,A152,male,old,good +A14,24,2375,A73,A152,male,old,good +A11,12,1282,A73,A151,female,young,bad +A14,24,3757,A75,A153,female,old,good +A12,24,4241,A73,A152,male,old,bad +A11,24,6579,A71,A153,male,old,good +A11,12,2121,A73,A152,male,old,good +A14,12,2171,A72,A152,female,old,good +A14,15,3594,A72,A152,female,old,good +A14,36,4686,A73,A153,male,old,good +A14,15,4657,A73,A152,male,old,good +A11,14,8978,A75,A152,male,old,bad +A14,18,1055,A72,A152,female,old,good +A11,48,3051,A73,A152,male,old,bad +A12,30,3441,A73,A151,female,young,bad +A14,24,5150,A75,A152,male,old,good +A14,12,1963,A74,A151,male,old,good +A12,18,3590,A71,A152,male,old,good +A14,36,5842,A75,A152,male,old,good +A14,24,1469,A75,A151,male,old,good +A14,24,2603,A73,A151,female,old,good +A11,12,3651,A73,A152,male,old,good +A11,24,3149,A72,A153,male,young,good +A11,24,4110,A75,A151,male,young,bad +A12,9,1437,A74,A152,male,old,bad +A13,24,1377,A75,A153,female,old,good +A11,9,1422,A72,A153,male,old,bad +A11,30,3622,A75,A151,female,old,good +A11,24,3345,A75,A151,male,old,bad +A12,12,625,A72,A152,male,old,good +A12,12,983,A72,A151,female,young,good +A12,30,2991,A75,A152,female,young,good +A14,18,1149,A73,A152,male,old,good +A12,9,276,A73,A151,male,young,good +A11,24,1371,A73,A151,female,young,bad +A11,12,795,A72,A152,female,old,bad +A11,24,1282,A73,A152,female,old,bad +A12,6,368,A75,A152,male,old,good +A14,24,1258,A74,A152,male,young,good +A14,15,2186,A74,A151,female,old,good +A14,24,2835,A75,A152,male,old,good +A11,24,3632,A73,A151,female,young,good +A14,24,1311,A74,A152,male,old,good +A11,36,2348,A73,A152,male,old,good +A12,6,433,A72,A151,female,young,bad +A11,15,3959,A73,A152,female,old,bad +A12,15,2631,A73,A152,female,young,good +A14,12,2445,A72,A151,male,old,good +A11,10,1038,A74,A152,male,old,good +A14,48,8858,A74,A153,male,old,good +A13,24,1275,A73,A152,male,old,good +A11,24,6568,A73,A152,male,young,good +A13,24,1344,A74,A152,male,old,bad +A12,27,5965,A75,A152,male,old,good +A14,12,618,A75,A152,male,old,good +A14,10,727,A75,A153,male,old,good +A12,15,1444,A72,A152,male,young,good +A11,36,3620,A73,A152,male,old,good +A11,10,2241,A72,A151,male,old,good +A12,60,14782,A75,A153,female,old,bad +A14,24,2022,A73,A152,female,old,good +A13,12,609,A72,A152,female,old,bad +A11,24,3021,A73,A151,male,young,good +A11,18,4380,A73,A152,male,old,good +A14,10,1309,A73,A152,male,old,bad +A11,24,2828,A73,A152,male,young,good +A12,18,1795,A75,A151,female,old,good +A14,18,1533,A72,A152,male,old,bad +A14,24,5103,A72,A153,male,old,good +A11,30,10623,A75,A153,male,old,good +A14,12,1386,A73,A152,female,old,bad +A14,24,4591,A73,A152,male,old,bad +A13,12,3399,A75,A152,male,old,good +A13,6,1047,A73,A152,female,old,good +A11,48,7685,A74,A151,female,old,bad diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS-val.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS-val.csv new file mode 100644 index 0000000..e0b67a7 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS-val.csv @@ -0,0 +1,301 @@ +status,month,credit_amount,employment,housing,sex,age,credit +A11,18,1553,A73,A152,male,old,bad +A14,15,1360,A73,A152,male,old,good +A12,8,760,A74,A152,female,old,good +A12,18,2899,A75,A152,male,old,good +A12,45,3031,A73,A151,male,young,bad +A12,24,4712,A73,A152,male,old,good +A12,18,1887,A73,A152,male,old,good +A12,36,2273,A74,A152,male,old,good +A12,12,888,A75,A152,male,old,bad +A11,12,1620,A73,A152,female,old,good +A14,15,2708,A72,A152,male,old,good +A12,18,866,A73,A152,male,young,good +A12,8,1414,A73,A152,male,old,good +A14,9,1313,A75,A152,male,young,good +A14,24,2578,A75,A152,male,old,good +A14,15,3568,A75,A151,female,old,good +A14,24,1393,A73,A152,male,old,good +A13,6,2116,A73,A152,male,old,good +A14,12,2012,A74,A152,female,old,good +A14,24,1940,A75,A152,male,old,good +A13,6,1323,A75,A152,male,old,good +A11,36,1977,A75,A152,male,old,bad +A11,12,1108,A74,A152,male,old,bad +A14,15,1300,A75,A153,male,old,good +A11,6,1169,A75,A152,male,old,good +A11,36,3249,A74,A153,male,old,good +A14,39,2569,A73,A152,male,young,good +A12,9,1549,A72,A152,male,old,good +A14,6,700,A75,A153,male,old,good +A12,12,6078,A74,A152,male,old,good +A12,9,1206,A75,A152,female,young,good +A12,24,1355,A72,A152,female,young,bad +A14,24,2978,A73,A152,male,old,good +A14,24,1525,A74,A152,female,old,good +A14,6,932,A73,A152,female,young,good +A12,27,2528,A72,A152,female,old,good +A11,18,3114,A72,A151,female,old,bad +A13,30,1908,A75,A152,male,old,bad +A14,12,1240,A75,A152,female,old,good +A14,12,717,A75,A152,male,old,good +A12,15,1514,A73,A152,male,young,good +A14,15,1829,A75,A152,male,old,good +A14,12,640,A73,A152,male,old,good +A12,36,7432,A73,A151,female,old,good +A14,12,1542,A74,A152,male,old,good +A11,36,9629,A74,A152,male,young,bad +A14,6,426,A75,A152,male,old,good +A14,15,4623,A73,A152,male,old,bad +A11,18,8471,A73,A151,female,young,good +A12,36,3711,A73,A152,male,old,good +A14,48,12749,A74,A152,male,old,good +A11,42,7882,A74,A153,male,old,good +A14,60,10144,A74,A152,female,young,good +A12,24,1246,A72,A152,male,young,bad +A14,36,3595,A75,A152,male,old,good +A12,12,3617,A75,A151,male,old,good +A14,12,886,A73,A152,female,young,good +A11,24,1546,A74,A151,male,young,bad +A11,12,2171,A73,A152,male,old,good +A14,36,2613,A73,A152,male,old,good +A14,12,1101,A73,A152,male,old,good +A12,18,3001,A74,A151,female,old,good +A14,12,2279,A73,A153,male,old,good +A12,10,1521,A73,A152,male,old,good +A12,18,1042,A73,A152,female,old,bad +A12,48,6681,A73,A153,male,old,good +A11,36,2145,A74,A152,male,young,bad +A11,24,4817,A74,A152,male,old,bad +A12,48,14421,A73,A152,male,young,bad +A11,12,691,A75,A152,male,old,bad +A12,18,1239,A73,A153,male,old,good +A12,21,2767,A75,A151,male,old,bad +A12,18,6260,A74,A151,male,old,good +A11,18,750,A71,A152,female,old,bad +A14,12,1503,A73,A151,male,old,good +A11,12,701,A73,A152,male,old,good +A14,11,1154,A71,A152,female,old,good +A14,21,3160,A75,A152,male,old,good +A11,18,2124,A73,A151,female,young,bad +A12,12,766,A73,A152,male,old,bad +A12,36,6948,A73,A151,male,old,good +A13,9,1126,A75,A152,male,old,good +A11,18,1216,A72,A151,female,young,bad +A11,12,2214,A73,A152,male,young,good +A14,24,4042,A74,A152,male,old,good +A11,15,1053,A72,A152,male,old,good +A12,24,2896,A72,A152,male,old,good +A11,12,6199,A73,A151,male,old,bad +A12,24,7758,A75,A151,female,old,good +A11,12,1372,A74,A152,male,old,bad +A11,30,6350,A75,A152,male,old,bad +A12,18,4439,A75,A152,male,old,good +A12,48,15672,A73,A152,male,young,bad +A14,12,804,A75,A152,male,old,good +A11,21,1602,A75,A152,male,old,good +A12,12,6468,A71,A152,male,old,bad +A14,18,1473,A72,A152,male,old,good +A14,24,5433,A71,A151,female,old,good +A14,6,1238,A71,A152,male,old,good +A14,33,2764,A73,A152,female,old,good +A12,12,1922,A73,A152,male,old,bad +A13,30,3017,A75,A152,male,old,good +A11,24,1207,A72,A151,female,young,bad +A11,16,2625,A75,A151,male,old,bad +A11,12,684,A73,A151,male,old,bad +A12,9,1670,A72,A152,female,young,bad +A14,6,672,A71,A152,female,old,good +A14,21,2580,A72,A152,male,old,bad +A14,24,6314,A71,A152,male,old,good +A11,24,3234,A72,A151,female,young,bad +A12,10,7308,A71,A153,male,old,good +A12,13,2101,A72,A152,female,young,good +A11,36,6229,A72,A151,female,young,bad +A13,12,3016,A73,A152,male,young,good +A14,12,522,A75,A152,male,old,good +A14,12,1934,A75,A152,male,old,good +A14,48,2751,A75,A152,male,old,good +A12,15,1512,A73,A152,male,old,bad +A12,30,2503,A75,A152,male,old,good +A14,24,1287,A75,A152,female,old,good +A12,36,12389,A73,A153,male,old,bad +A13,9,1337,A72,A152,male,old,bad +A11,27,2442,A75,A152,male,old,good +A14,10,1364,A73,A152,female,old,good +A11,24,2384,A75,A151,male,old,good +A11,9,2799,A73,A151,male,old,good +A13,18,1445,A74,A152,male,old,good +A14,6,1743,A73,A152,male,old,good +A13,10,1347,A74,A152,male,old,good +A11,24,4870,A73,A153,male,old,bad +A14,12,1736,A74,A152,female,old,good +A12,12,754,A75,A152,male,old,good +A14,36,3535,A74,A152,male,old,good +A14,15,1478,A73,A152,male,old,good +A14,30,6742,A74,A152,male,old,good +A14,36,7127,A72,A151,female,young,bad +A11,15,806,A73,A152,female,young,good +A13,24,2892,A75,A153,male,old,good +A12,36,5800,A73,A152,male,old,good +A14,24,3105,A72,A152,male,young,good +A14,15,1386,A73,A151,male,old,good +A14,18,2404,A73,A152,female,old,good +A12,24,3878,A72,A152,male,old,good +A12,24,3092,A72,A151,male,young,bad +A14,28,2743,A75,A152,male,old,good +A14,12,1574,A73,A152,male,old,good +A11,21,1835,A73,A152,female,young,bad +A12,24,2718,A73,A151,female,young,bad +A14,48,4844,A71,A151,male,old,bad +A11,24,1199,A75,A152,male,old,bad +A11,14,3973,A71,A153,male,young,good +A14,24,2679,A72,A152,female,old,good +A14,12,2133,A75,A153,female,old,good +A11,24,1231,A75,A151,female,old,good +A14,24,2872,A75,A152,male,old,good +A12,18,2427,A75,A152,male,old,good +A11,60,7297,A75,A151,male,old,bad +A14,30,7596,A75,A152,male,old,good +A11,39,14179,A74,A152,male,old,good +A12,9,1501,A75,A152,female,old,bad +A12,48,3844,A74,A153,male,old,bad +A14,42,4042,A73,A152,male,old,good +A11,6,1374,A71,A152,female,old,good +A14,24,7814,A74,A152,male,old,good +A14,36,909,A75,A152,male,old,good +A14,18,6458,A75,A152,male,old,bad +A14,24,2424,A75,A152,male,old,good +A14,15,2788,A74,A152,female,young,good +A11,12,697,A72,A152,male,old,bad +A14,21,2476,A75,A152,male,old,good +A12,12,1534,A72,A151,male,young,bad +A12,6,1068,A75,A152,male,old,good +A12,12,3573,A73,A152,female,young,good +A12,18,1301,A75,A152,male,old,good +A14,42,7166,A74,A151,male,old,good +A12,24,2039,A72,A152,male,young,bad +A11,24,2483,A73,A152,male,young,good +A13,24,3617,A75,A151,male,young,good +A14,24,2058,A73,A152,male,old,good +A11,24,1755,A75,A152,female,old,good +A14,24,1559,A74,A152,male,old,good +A11,24,3161,A73,A151,male,old,bad +A14,48,10222,A74,A152,male,old,good +A11,24,1381,A73,A152,female,old,bad +A14,21,2241,A75,A152,male,old,good +A14,24,2538,A75,A152,male,old,bad +A11,36,8065,A73,A152,female,young,bad +A12,48,6416,A75,A151,female,old,bad +A12,24,4736,A72,A152,female,young,bad +A14,12,930,A75,A152,male,old,good +A11,24,2910,A74,A153,male,old,good +A12,18,2278,A72,A152,female,old,bad +A14,36,4463,A73,A152,male,old,bad +A14,36,2394,A73,A152,female,young,good +A14,24,2255,A72,A152,male,old,good +A14,39,8588,A75,A152,male,old,good +A12,24,5743,A72,A153,female,young,good +A11,12,1200,A73,A151,female,young,good +A14,30,1867,A75,A152,male,old,good +A11,6,860,A75,A152,female,old,good +A11,24,1024,A72,A152,male,old,bad +A11,60,6836,A75,A152,male,old,bad +A11,12,1680,A75,A152,male,old,good +A14,24,1413,A73,A152,male,old,good +A12,8,907,A72,A152,male,old,good +A11,24,1193,A71,A151,female,old,bad +A11,12,2577,A73,A152,male,old,good +A14,24,3972,A74,A151,female,young,good +A12,24,3758,A71,A151,female,young,good +A14,18,2320,A71,A152,male,old,good +A14,24,2463,A74,A152,male,old,good +A13,6,1343,A75,A152,male,old,good +A14,28,7824,A72,A151,male,old,good +A14,21,3275,A75,A152,male,old,good +A12,18,3244,A73,A152,female,old,good +A14,27,8613,A73,A152,male,old,good +A11,36,1842,A72,A152,female,old,bad +A14,24,2611,A75,A152,male,old,good +A12,9,2030,A74,A152,male,young,good +A14,24,9277,A73,A153,male,old,good +A14,12,1076,A73,A152,male,old,good +A14,10,1546,A73,A152,male,old,good +A11,12,1893,A73,A152,female,old,good +A14,12,2141,A74,A152,male,old,good +A14,36,7980,A72,A151,male,old,bad +A14,12,1264,A75,A151,male,old,good +A11,18,2249,A74,A152,male,old,good +A11,48,6143,A75,A153,female,old,bad +A11,21,3599,A74,A151,female,old,good +A12,15,1537,A75,A152,male,old,good +A14,36,10974,A71,A152,female,old,bad +A12,18,4297,A75,A152,male,old,bad +A11,18,1190,A71,A153,female,old,bad +A11,12,1274,A72,A152,female,old,bad +A14,9,2406,A71,A152,male,old,good +A12,24,6403,A72,A152,male,old,good +A14,10,1287,A75,A152,male,old,good +A12,42,5954,A74,A152,female,old,good +A11,6,1957,A74,A152,female,old,good +A14,24,1552,A74,A152,male,old,good +A14,24,937,A72,A152,male,old,good +A14,24,2032,A75,A153,male,old,good +A14,36,10875,A75,A152,male,old,good +A12,24,5084,A75,A152,female,old,good +A11,36,5179,A74,A152,male,old,bad +A12,6,1050,A71,A152,male,old,good +A13,36,3913,A73,A152,male,young,good +A13,18,3049,A72,A152,female,old,good +A11,36,8335,A75,A153,male,old,bad +A11,12,1858,A72,A151,female,young,good +A11,12,4843,A75,A151,male,old,bad +A14,18,3780,A72,A152,male,old,good +A12,12,2028,A73,A152,male,old,good +A14,24,2284,A74,A152,male,old,good +A12,15,2728,A74,A152,male,old,good +A11,27,5293,A71,A152,male,old,bad +A12,60,9157,A73,A153,male,old,good +A14,18,1943,A72,A152,female,young,bad +A11,24,2957,A75,A152,male,old,good +A11,18,2389,A72,A152,female,old,good +A11,18,1882,A73,A151,female,young,bad +A12,30,1715,A73,A152,female,old,good +A11,48,6331,A75,A153,male,old,bad +A14,15,1316,A73,A152,male,old,good +A11,6,14896,A75,A152,male,old,bad +A14,30,3077,A75,A152,male,old,good +A14,4,1544,A74,A152,male,old,good +A12,12,1007,A73,A152,male,young,good +A14,10,2069,A73,A152,male,old,good +A12,18,12976,A71,A153,female,old,bad +A11,18,3104,A74,A152,male,old,good +A12,36,3990,A72,A152,female,old,good +A11,12,3499,A73,A152,female,old,bad +A12,9,1136,A75,A153,male,old,bad +A13,15,392,A72,A151,female,young,good +A11,18,1131,A71,A152,female,old,bad +A12,12,1424,A74,A152,male,old,good +A11,42,3394,A71,A152,male,old,good +A11,48,6999,A74,A152,male,old,bad +A11,30,11998,A72,A152,male,old,bad +A14,30,2831,A73,A152,female,old,good +A12,24,1965,A73,A151,female,old,good +A14,24,1597,A75,A153,male,old,good +A12,6,2063,A72,A151,male,old,good +A12,26,7966,A72,A152,male,old,good +A12,48,7582,A71,A153,male,old,good +A14,24,4679,A74,A152,male,old,good +A14,12,5801,A75,A151,male,old,good +A14,24,3868,A75,A151,female,old,good +A14,12,2390,A75,A152,male,old,good +A12,15,6850,A71,A152,male,old,bad +A14,9,1478,A74,A152,male,young,bad +A14,6,2080,A73,A152,male,young,good +A12,6,931,A72,A152,female,old,bad +A14,9,1244,A75,A151,female,old,good +A11,10,2132,A72,A151,female,old,good +A14,18,1950,A74,A152,male,old,good +A11,6,1374,A73,A152,male,old,good +A12,6,1449,A75,A152,male,old,good +A14,18,1582,A75,A152,male,old,good diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI-test.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI-test.csv new file mode 100644 index 0000000..37c539e --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI-test.csv @@ -0,0 +1,201 @@ +status,month,credit_amount,employment,housing,sex,age,credit +A13,36,5848,A73,A152,male,young,good +A11,36,2302,A73,A151,male,old,bad +A12,12,1103,A74,A152,male,old,good +A14,18,1530,A73,A152,male,old,bad +A11,15,1721,A72,A152,male,old,good +A14,18,2775,A74,A152,male,old,bad +A13,12,1881,A73,A151,female,old,good +A14,15,3029,A74,A152,male,old,good +A11,12,1657,A73,A152,male,old,good +A12,21,2353,A73,A152,male,old,good +A11,18,2659,A73,A152,male,old,good +A14,12,1592,A74,A152,female,old,good +A13,24,1925,A73,A152,male,old,good +A14,36,11054,A73,A152,male,old,good +A12,24,1967,A75,A152,female,young,good +A12,36,2671,A73,A153,female,old,bad +A14,24,3430,A75,A152,male,old,good +A13,36,4473,A75,A152,male,old,good +A14,12,4675,A72,A151,female,young,good +A14,24,999,A75,A152,male,young,good +A12,48,10961,A74,A152,male,old,bad +A12,12,1318,A75,A152,male,old,good +A11,6,609,A74,A152,female,old,good +A14,21,2993,A73,A152,male,old,good +A14,60,15653,A74,A152,male,young,good +A14,15,3368,A75,A151,male,young,good +A11,48,4788,A74,A152,male,old,good +A11,12,2246,A75,A152,male,old,bad +A11,24,1747,A72,A152,male,young,good +A12,48,5096,A73,A152,female,old,bad +A12,18,884,A75,A152,male,old,bad +A14,18,6070,A75,A152,male,old,good +A11,24,1382,A74,A152,male,old,good +A12,6,753,A73,A152,female,old,good +A14,9,2697,A73,A152,male,old,good +A12,6,590,A72,A152,male,old,good +A12,30,1919,A72,A152,male,old,bad +A11,21,1647,A73,A152,male,old,bad +A12,18,5866,A73,A152,male,old,good +A14,24,5943,A72,A152,female,old,bad +A11,12,727,A72,A152,male,old,bad +A11,24,2812,A75,A151,female,old,good +A11,48,4308,A72,A151,female,young,bad +A11,30,6187,A74,A151,male,young,good +A12,18,1295,A72,A152,female,old,good +A14,36,5711,A75,A152,male,old,good +A14,48,6110,A73,A153,male,old,good +A12,30,8386,A74,A152,male,old,bad +A13,18,1961,A75,A152,female,young,good +A12,12,1158,A73,A152,male,old,good +A12,12,1410,A73,A152,male,old,good +A14,33,7253,A74,A152,male,old,good +A13,6,683,A72,A152,female,old,good +A14,12,1413,A74,A152,male,old,good +A11,36,5371,A73,A152,male,old,good +A12,30,4280,A73,A151,female,old,bad +A11,24,1358,A75,A152,male,old,bad +A13,10,781,A75,A153,male,old,good +A14,4,3380,A74,A152,female,old,good +A12,9,1082,A75,A152,male,old,good +A12,36,2337,A75,A152,male,old,good +A14,7,846,A75,A153,male,old,good +A12,48,8487,A74,A152,female,young,good +A13,15,1905,A75,A151,male,old,good +A12,12,1804,A72,A152,male,old,good +A11,12,652,A75,A151,female,young,good +A12,12,1331,A72,A152,male,young,bad +A14,36,3079,A73,A152,male,old,good +A13,15,2687,A74,A151,male,old,good +A13,12,1330,A72,A152,male,old,good +A11,12,1168,A73,A152,male,old,good +A11,36,3446,A75,A152,male,old,bad +A12,9,1199,A74,A152,female,old,good +A11,12,741,A71,A152,female,young,bad +A12,18,7374,A71,A152,male,old,good +A14,18,2169,A73,A152,male,old,bad +A14,6,1338,A73,A152,male,old,good +A11,24,6419,A75,A153,female,old,good +A11,15,975,A73,A152,male,young,good +A14,24,2684,A73,A152,male,old,good +A14,24,4526,A73,A152,male,old,good +A12,12,951,A72,A151,female,old,bad +A12,54,15945,A72,A151,male,old,bad +A14,15,3556,A73,A152,male,old,good +A14,4,1455,A74,A152,male,old,good +A12,24,1935,A75,A152,male,old,bad +A12,12,3124,A72,A152,male,old,good +A14,6,1543,A73,A152,male,old,good +A11,24,2964,A75,A153,male,old,good +A11,18,1442,A74,A153,male,old,bad +A14,60,13756,A75,A153,male,old,good +A12,20,6148,A75,A152,male,old,good +A14,24,6313,A75,A152,male,old,good +A11,12,7865,A75,A153,male,old,bad +A13,12,1297,A73,A151,male,young,good +A11,18,2462,A73,A152,male,young,bad +A14,18,6761,A73,A151,male,old,bad +A14,36,9572,A72,A152,male,old,bad +A14,36,7409,A75,A152,male,old,good +A11,36,6887,A73,A152,male,old,bad +A14,24,3621,A75,A152,male,old,bad +A11,6,1352,A71,A151,female,young,good +A13,12,1480,A71,A153,male,old,good +A14,18,1028,A73,A152,female,old,good +A11,12,902,A74,A151,male,young,bad +A11,10,2315,A75,A152,male,old,good +A14,9,1980,A72,A151,female,young,bad +A12,24,1553,A74,A151,female,young,good +A13,10,1240,A75,A153,female,old,bad +A12,12,1223,A75,A151,male,old,bad +A11,21,2606,A72,A151,female,old,good +A11,12,1409,A75,A152,male,old,good +A14,18,1568,A73,A151,female,young,good +A14,5,3448,A74,A152,male,old,good +A12,18,3872,A71,A152,female,old,good +A12,12,2366,A74,A152,male,old,good +A14,36,7678,A74,A152,female,old,good +A12,36,9398,A72,A151,male,old,bad +A14,36,9566,A73,A152,female,old,good +A11,24,1938,A72,A152,male,old,bad +A11,12,2149,A73,A153,male,old,bad +A14,36,6614,A75,A152,male,old,good +A14,12,2859,A71,A152,male,old,good +A12,20,2629,A73,A152,male,old,good +A12,18,6204,A73,A152,male,old,good +A11,6,3676,A73,A151,male,old,good +A12,15,2326,A73,A152,male,old,good +A11,6,3384,A73,A151,male,old,bad +A12,48,3060,A74,A152,male,old,bad +A13,6,1299,A73,A152,male,old,good +A14,10,1418,A73,A151,male,old,good +A14,12,996,A74,A152,female,young,good +A14,20,3485,A72,A152,male,old,good +A14,18,3378,A73,A152,male,old,good +A14,9,1224,A73,A152,male,old,good +A13,9,745,A73,A152,female,old,bad +A12,12,1860,A71,A152,male,old,good +A11,20,4272,A75,A152,female,young,good +A12,7,2415,A73,A152,male,old,good +A14,24,3777,A73,A152,male,old,good +A12,30,2181,A75,A152,male,old,good +A14,18,4594,A72,A152,male,old,good +A11,12,759,A74,A152,male,old,bad +A14,30,4530,A74,A151,female,old,good +A14,10,1924,A73,A152,male,old,good +A14,12,3447,A73,A152,female,old,good +A12,60,14027,A74,A152,male,old,bad +A13,18,2864,A73,A152,male,old,bad +A14,12,1255,A75,A152,male,old,good +A11,42,4370,A74,A152,male,old,bad +A14,12,926,A71,A152,female,old,good +A12,15,802,A75,A152,male,old,bad +A12,12,1092,A73,A152,female,old,good +A14,21,1591,A74,A152,male,old,good +A14,36,4454,A73,A152,female,old,good +A14,12,682,A74,A152,female,old,good +A12,18,6361,A75,A152,male,old,good +A14,12,1258,A72,A151,female,young,good +A12,27,2520,A73,A152,male,young,bad +A11,6,338,A75,A152,male,old,good +A12,20,7057,A74,A151,male,old,good +A11,20,2235,A73,A151,male,old,bad +A13,12,1424,A75,A152,female,old,good +A12,24,11328,A73,A152,male,old,bad +A14,24,1249,A72,A152,male,old,good +A12,9,2118,A73,A152,male,old,good +A11,24,1285,A74,A151,female,old,bad +A14,48,3578,A75,A152,male,old,good +A14,10,2848,A73,A152,male,old,good +A11,18,3509,A74,A152,female,young,good +A11,8,1164,A75,A153,male,old,good +A12,21,3976,A74,A152,male,old,good +A12,18,3612,A75,A152,female,old,good +A12,72,5595,A73,A152,male,young,bad +A11,12,709,A75,A152,male,old,bad +A11,36,8229,A73,A152,male,old,bad +A11,21,3414,A72,A152,male,old,bad +A12,36,12612,A73,A153,male,old,bad +A14,6,1750,A75,A152,male,old,good +A14,12,3077,A73,A152,male,old,good +A14,15,5324,A75,A153,female,old,good +A13,4,1494,A72,A152,male,old,good +A11,12,2578,A71,A153,female,old,good +A12,7,2329,A72,A152,female,old,good +A11,9,654,A73,A152,male,old,bad +A14,36,8133,A73,A152,female,old,good +A14,24,3863,A73,A153,male,old,good +A12,12,7472,A71,A151,female,young,good +A14,12,1555,A75,A153,male,old,bad +A11,12,1262,A75,A152,male,old,good +A12,9,790,A73,A152,female,old,good +A12,30,5234,A71,A152,male,old,bad +A11,30,2406,A74,A151,female,young,bad +A12,12,2969,A72,A151,female,young,bad +A14,21,12680,A75,A153,male,old,bad +A11,12,1082,A73,A152,male,old,bad +A12,27,3915,A73,A152,male,old,bad +A14,9,3832,A75,A152,male,old,good +A12,18,1928,A72,A152,male,old,bad +A12,36,9857,A74,A152,male,old,good diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI-train.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI-train.csv new file mode 100644 index 0000000..1045c8d --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI-train.csv @@ -0,0 +1,501 @@ +status,month,credit_amount,employment,housing,sex,age,credit +A11,36,3959,A71,A152,male,old,good +A14,9,3577,A73,A151,male,old,good +A14,18,2515,A73,A152,male,old,good +A12,12,1995,A72,A152,male,old,good +A14,60,10366,A75,A152,male,old,good +A14,24,7393,A73,A152,male,old,good +A13,12,409,A73,A151,female,old,good +A14,9,2507,A75,A153,male,old,good +A14,30,4811,A74,A151,female,young,good +A12,48,6560,A74,A152,male,young,bad +A12,36,9034,A72,A151,male,old,bad +A11,24,3552,A74,A152,male,old,bad +A12,20,6468,A71,A152,male,old,good +A11,30,2522,A75,A152,male,old,good +A12,36,2820,A72,A152,male,old,bad +A14,6,783,A73,A152,male,old,good +A11,21,3763,A74,A152,male,young,good +A14,30,7485,A71,A152,female,old,bad +A11,15,1403,A73,A151,female,old,good +A12,11,4771,A74,A152,male,old,good +A11,24,4020,A73,A152,male,old,good +A12,24,1201,A72,A152,male,old,good +A12,48,5381,A71,A153,male,old,good +A14,24,5511,A73,A152,male,young,good +A14,18,1098,A71,A152,female,old,good +A12,48,6224,A75,A153,male,old,bad +A14,11,1393,A72,A152,female,old,good +A12,9,1919,A74,A151,male,old,good +A14,15,5045,A75,A152,female,old,good +A12,48,5951,A73,A152,female,young,bad +A11,8,731,A75,A152,male,old,good +A14,18,629,A75,A152,male,old,good +A14,18,1808,A74,A152,female,young,bad +A12,21,1188,A75,A152,female,old,bad +A14,24,3235,A75,A152,male,old,good +A12,24,11560,A73,A151,female,young,bad +A12,9,1154,A75,A152,male,old,good +A14,18,3850,A74,A152,male,old,good +A11,24,2359,A71,A152,male,old,bad +A14,24,1533,A72,A152,female,old,good +A13,36,4210,A73,A152,male,old,bad +A11,30,3857,A73,A152,male,old,good +A14,12,1393,A75,A152,male,old,good +A14,18,1800,A73,A152,male,young,good +A13,12,2251,A73,A152,female,old,good +A12,12,836,A72,A152,female,young,bad +A11,18,1345,A73,A152,male,old,bad +A12,36,4795,A72,A152,female,old,good +A11,12,708,A73,A152,male,old,good +A11,12,1289,A73,A152,male,young,good +A14,18,1864,A73,A152,female,old,bad +A12,48,9960,A72,A152,female,old,bad +A11,18,5302,A75,A153,male,old,good +A11,6,1872,A71,A153,male,old,good +A11,24,2439,A72,A152,female,old,bad +A14,9,1388,A73,A151,female,old,good +A12,48,12169,A71,A153,male,old,good +A14,18,2662,A74,A152,male,old,good +A14,13,1409,A71,A152,female,old,good +A11,15,3643,A75,A152,female,old,good +A11,48,6758,A73,A152,female,old,bad +A14,24,1851,A74,A152,male,old,good +A11,27,3416,A73,A152,male,old,good +A14,27,5117,A74,A152,male,old,good +A14,10,1597,A73,A151,male,old,good +A14,6,1382,A73,A152,female,old,good +A14,10,1231,A75,A152,male,old,good +A12,15,2631,A73,A151,female,old,bad +A12,30,2150,A73,A152,female,young,bad +A12,12,2930,A74,A152,female,old,good +A14,6,6761,A74,A152,male,old,good +A12,21,2745,A74,A152,male,old,good +A11,24,2325,A74,A152,male,old,good +A14,6,1204,A73,A151,male,old,good +A11,18,1049,A72,A151,female,young,good +A14,12,2292,A71,A152,male,old,bad +A14,9,1449,A74,A152,female,old,good +A11,18,7511,A75,A153,male,old,bad +A12,9,918,A73,A152,female,old,bad +A14,24,2346,A74,A152,male,old,good +A11,24,1603,A75,A152,female,old,good +A11,11,3905,A73,A151,male,old,good +A14,18,1126,A72,A151,female,young,good +A13,21,2923,A73,A152,female,old,good +A12,9,3195,A73,A152,female,old,good +A11,6,4716,A72,A152,male,old,good +A12,6,14555,A71,A152,male,young,bad +A11,36,5493,A75,A153,male,old,good +A14,24,1585,A74,A152,male,old,good +A11,12,3590,A73,A152,male,old,good +A14,12,2759,A75,A152,male,old,good +A11,12,2122,A73,A151,male,old,good +A12,12,2002,A74,A151,male,old,good +A14,24,1901,A73,A151,male,old,good +A14,12,1493,A72,A152,female,old,good +A12,6,1209,A71,A152,male,old,bad +A12,48,3979,A74,A152,male,old,good +A11,24,7721,A72,A152,female,old,good +A11,18,3650,A72,A151,female,young,good +A11,18,1880,A74,A152,male,old,good +A12,18,1941,A73,A152,male,old,good +A14,15,874,A72,A152,female,young,good +A14,27,5190,A75,A152,male,old,good +A14,36,10477,A75,A153,male,old,good +A12,24,4351,A73,A152,female,old,good +A14,24,1376,A74,A152,female,old,good +A12,30,3496,A73,A152,male,old,good +A11,40,5998,A73,A152,male,old,bad +A11,28,4006,A73,A152,male,old,bad +A14,36,3342,A75,A152,male,old,good +A12,18,2622,A73,A152,male,old,good +A12,36,2384,A72,A151,male,old,bad +A11,24,1659,A72,A151,female,old,bad +A14,24,3488,A74,A152,female,young,good +A12,60,7418,A73,A152,male,old,good +A14,18,2238,A73,A152,female,young,good +A11,24,1333,A71,A153,male,old,bad +A14,22,1283,A74,A151,female,young,good +A14,6,3518,A73,A151,male,old,good +A12,12,585,A73,A151,male,young,good +A12,39,4933,A74,A152,male,young,bad +A14,24,1278,A75,A152,male,old,good +A14,12,1768,A73,A151,male,young,good +A11,24,3349,A72,A153,male,old,bad +A12,9,1391,A73,A152,male,old,good +A14,18,3422,A75,A152,male,old,good +A14,10,2146,A72,A151,female,young,good +A14,15,1520,A75,A152,male,old,good +A14,24,6842,A73,A152,male,old,good +A14,18,2051,A72,A152,male,old,good +A11,24,2996,A73,A152,male,young,bad +A11,24,3660,A73,A152,female,old,good +A11,24,1442,A74,A151,female,young,bad +A11,9,1138,A73,A152,male,young,good +A14,30,5771,A74,A152,female,young,good +A11,18,2600,A73,A153,male,old,bad +A13,24,3749,A72,A152,female,old,good +A14,12,3059,A74,A152,male,old,good +A11,6,662,A72,A152,male,old,good +A11,18,1940,A72,A153,male,old,good +A12,36,2323,A74,A151,male,young,good +A12,48,12204,A73,A152,male,old,good +A14,24,5507,A75,A153,male,old,good +A14,18,433,A71,A151,female,young,bad +A14,6,4611,A72,A152,female,old,bad +A12,12,2762,A75,A152,female,young,bad +A14,30,4272,A73,A152,male,old,good +A12,39,11760,A74,A151,male,old,good +A12,36,2225,A75,A153,male,old,bad +A13,12,1474,A72,A152,female,old,good +A14,12,1402,A74,A151,female,old,good +A12,18,1913,A72,A152,male,old,good +A14,14,802,A73,A152,male,old,good +A13,15,1271,A73,A153,male,old,bad +A13,24,1258,A73,A152,female,old,good +A14,12,3565,A72,A152,male,old,good +A11,12,3386,A75,A153,male,old,bad +A13,12,2247,A73,A152,female,old,good +A14,24,5804,A73,A152,male,old,good +A14,15,960,A74,A152,female,old,good +A12,18,1056,A75,A152,male,old,bad +A13,42,6289,A72,A152,male,old,good +A14,9,2753,A75,A152,male,old,good +A14,10,2210,A73,A151,male,young,bad +A11,48,10297,A74,A153,male,old,bad +A12,36,14318,A75,A153,male,old,bad +A12,30,4249,A71,A152,male,old,bad +A14,24,929,A74,A152,male,old,good +A12,9,959,A73,A152,female,old,bad +A14,21,2782,A74,A152,female,old,good +A12,48,8358,A72,A152,female,old,good +A14,9,2301,A72,A151,female,young,good +A12,48,3566,A74,A152,male,old,good +A11,18,3966,A75,A151,female,old,bad +A14,6,1237,A73,A152,female,old,good +A11,18,976,A72,A152,female,young,bad +A11,24,915,A75,A152,female,old,bad +A13,24,947,A74,A153,male,old,bad +A12,24,2825,A74,A152,male,old,good +A14,21,2288,A72,A152,female,young,good +A14,10,2901,A72,A151,female,old,good +A12,45,4576,A71,A152,male,old,good +A14,48,7238,A75,A152,male,old,good +A11,36,2746,A75,A152,male,old,bad +A12,24,1743,A75,A152,male,old,good +A11,9,1366,A72,A151,female,young,bad +A14,18,1505,A73,A153,male,old,good +A11,6,343,A72,A152,female,old,good +A14,24,2223,A75,A152,male,old,good +A14,12,1495,A75,A152,male,old,good +A13,15,2327,A72,A152,female,young,bad +A11,45,11816,A75,A151,male,old,bad +A12,12,1484,A73,A152,male,young,bad +A14,36,9055,A73,A153,male,old,good +A14,12,1655,A75,A152,male,old,good +A14,18,1453,A72,A152,female,old,good +A11,12,1228,A73,A152,female,young,bad +A12,6,932,A74,A152,female,old,good +A12,36,8086,A75,A152,male,old,bad +A14,6,1346,A75,A153,male,old,good +A12,36,2862,A75,A153,male,old,good +A11,24,3123,A72,A152,female,old,bad +A12,60,7408,A72,A152,female,young,bad +A12,15,1264,A73,A151,male,young,bad +A14,6,1554,A74,A151,female,young,good +A12,21,3652,A74,A152,male,old,good +A11,48,3931,A74,A153,male,old,bad +A11,6,1361,A72,A152,male,old,good +A11,6,1203,A75,A152,male,old,good +A14,12,3527,A72,A152,male,old,good +A14,36,7855,A73,A152,female,young,bad +A11,21,3357,A72,A152,female,old,good +A13,18,2100,A73,A152,male,old,bad +A14,18,1169,A73,A152,male,old,good +A14,36,8947,A74,A152,male,old,good +A11,18,1936,A74,A151,male,young,good +A12,24,3512,A74,A152,male,old,good +A12,24,6967,A74,A151,male,old,good +A14,30,2333,A75,A152,male,old,good +A11,18,2039,A73,A151,female,young,bad +A11,36,9271,A74,A152,male,young,bad +A14,21,1572,A75,A152,female,old,good +A12,6,454,A72,A152,male,young,good +A14,36,6304,A75,A152,male,old,good +A14,24,2197,A74,A152,male,old,good +A14,48,7629,A75,A152,male,old,good +A13,10,1275,A72,A152,female,young,good +A11,48,7119,A73,A153,male,old,bad +A12,24,12579,A75,A153,female,old,bad +A12,18,2779,A73,A151,male,young,good +A14,4,1503,A74,A152,male,old,good +A12,8,1237,A73,A152,female,young,bad +A14,48,3914,A73,A152,male,old,bad +A14,24,3062,A75,A151,male,old,good +A12,36,1953,A75,A153,male,old,bad +A12,30,3832,A72,A152,male,young,good +A14,6,1538,A72,A152,female,old,good +A14,6,2108,A74,A151,male,old,good +A11,6,428,A75,A152,female,old,good +A12,36,4455,A73,A152,male,old,bad +A13,21,2319,A72,A151,male,old,bad +A14,12,1163,A73,A152,male,old,good +A14,9,936,A75,A152,male,old,good +A11,18,2473,A71,A152,male,young,bad +A14,15,1569,A75,A152,male,old,good +A12,12,3017,A72,A151,female,old,good +A14,12,2331,A75,A152,male,old,good +A14,12,2096,A74,A152,male,old,good +A12,12,1037,A74,A152,male,old,good +A14,6,1236,A73,A151,male,old,good +A14,24,4151,A73,A152,male,old,good +A14,48,3609,A73,A152,female,old,good +A14,12,1412,A73,A152,female,old,good +A11,11,3939,A73,A152,male,old,good +A11,12,1526,A75,A153,male,old,good +A14,22,2675,A75,A152,male,old,good +A12,30,4221,A73,A152,female,old,good +A11,24,2924,A73,A152,male,old,good +A14,18,1817,A73,A152,female,old,good +A13,10,1225,A73,A152,male,old,good +A11,15,1845,A72,A151,female,old,good +A14,6,250,A73,A152,female,old,good +A14,6,660,A74,A151,male,young,good +A13,6,709,A72,A152,male,old,good +A11,36,15857,A71,A152,male,old,good +A12,24,1837,A74,A153,female,old,bad +A11,13,1797,A72,A152,male,old,good +A11,6,448,A72,A152,female,young,bad +A12,18,3213,A72,A151,male,young,good +A11,12,2579,A72,A152,male,old,bad +A11,6,1198,A75,A153,female,old,bad +A14,21,5003,A73,A152,female,old,bad +A12,24,2333,A72,A152,male,old,good +A14,15,1262,A74,A152,male,old,good +A12,12,1155,A75,A152,male,old,good +A14,10,894,A74,A152,female,old,good +A14,12,3331,A75,A152,male,old,good +A12,18,1113,A73,A152,female,old,good +A14,12,719,A75,A152,male,old,bad +A12,24,2064,A71,A152,female,old,bad +A13,30,3656,A75,A152,male,old,good +A11,24,6615,A71,A153,male,old,good +A14,18,3229,A71,A152,male,old,good +A14,15,3343,A73,A153,male,old,good +A11,30,3108,A72,A152,male,old,bad +A12,9,458,A73,A152,male,young,good +A14,15,1471,A73,A153,male,old,good +A12,11,1322,A73,A152,female,old,good +A11,9,1288,A75,A152,male,old,good +A14,12,707,A73,A152,male,old,good +A12,15,1308,A75,A152,male,old,good +A14,9,3074,A73,A152,male,old,good +A11,15,2511,A71,A151,female,young,good +A14,6,1740,A75,A151,male,old,good +A14,12,1123,A73,A151,female,old,bad +A12,11,1577,A72,A152,female,young,good +A11,45,1845,A73,A153,male,young,bad +A14,24,2028,A74,A152,male,old,good +A12,27,8318,A75,A153,female,old,bad +A12,12,1295,A72,A151,female,young,bad +A12,24,11938,A73,A152,male,old,bad +A14,36,1819,A73,A153,male,old,bad +A11,15,1478,A75,A152,male,old,good +A14,24,1516,A73,A152,female,old,good +A11,9,2136,A73,A152,male,young,good +A14,15,3812,A72,A152,female,young,good +A14,18,4165,A73,A152,male,old,bad +A12,24,4057,A74,A152,male,old,bad +A14,48,10127,A73,A153,male,old,bad +A12,6,484,A74,A152,male,old,good +A12,60,6288,A73,A153,male,old,bad +A11,24,6872,A72,A152,male,old,bad +A14,15,1532,A73,A152,female,old,good +A14,9,2134,A73,A152,male,old,good +A11,12,1107,A73,A151,male,young,good +A11,20,2212,A74,A152,male,old,good +A14,15,3186,A74,A151,female,young,good +A11,47,10722,A72,A152,female,old,good +A14,7,730,A75,A151,male,old,good +A14,12,2073,A73,A152,female,old,good +A11,21,571,A75,A152,male,old,good +A13,15,2360,A73,A152,male,old,good +A11,12,1344,A73,A152,male,old,good +A11,42,7174,A74,A152,female,old,bad +A13,42,4796,A75,A153,male,old,good +A11,42,3965,A72,A152,male,old,bad +A14,11,7228,A73,A152,male,old,good +A12,24,1216,A72,A152,male,old,bad +A14,12,2748,A75,A153,female,old,good +A12,42,9283,A71,A153,male,old,good +A14,27,4526,A72,A152,male,old,good +A14,21,5248,A73,A152,male,old,good +A14,24,3181,A72,A152,female,old,good +A14,24,717,A75,A152,male,old,good +A14,12,1262,A73,A152,male,young,good +A14,12,1884,A75,A152,male,old,good +A12,18,1924,A72,A151,female,old,bad +A11,48,7476,A74,A153,male,old,good +A13,12,939,A74,A152,male,old,bad +A12,12,841,A74,A151,female,young,good +A12,24,4113,A72,A151,female,old,bad +A13,24,5152,A74,A152,male,young,good +A11,33,4281,A73,A152,female,young,bad +A12,12,1567,A73,A152,female,young,good +A11,12,339,A75,A152,male,old,good +A11,15,1275,A73,A151,female,young,bad +A12,12,639,A73,A152,male,old,bad +A14,36,2299,A75,A152,male,old,good +A11,24,1823,A71,A152,male,old,bad +A11,48,7763,A75,A153,male,old,bad +A14,6,1595,A74,A152,male,old,good +A11,12,385,A74,A152,female,old,good +A14,12,776,A73,A152,male,old,good +A12,24,3069,A75,A153,male,old,good +A11,9,1364,A74,A152,male,old,good +A14,6,1221,A73,A152,male,old,good +A11,8,3398,A74,A152,male,old,good +A11,18,4153,A73,A152,male,old,bad +A12,15,1778,A72,A151,female,old,bad +A14,9,1236,A72,A151,female,young,good +A14,6,1766,A73,A151,male,young,good +A11,30,8072,A72,A152,male,young,good +A12,9,5129,A75,A153,female,old,bad +A14,24,8648,A72,A152,male,old,bad +A11,24,4169,A73,A152,male,old,good +A14,12,1935,A75,A152,male,old,good +A14,24,2397,A75,A152,male,old,bad +A14,15,1459,A73,A152,female,old,good +A12,12,685,A74,A152,male,young,bad +A11,48,4605,A75,A153,male,young,bad +A14,11,2142,A75,A152,male,old,good +A11,15,950,A75,A151,male,old,bad +A12,16,1175,A71,A153,male,old,good +A11,12,1498,A73,A152,female,young,good +A14,18,1984,A73,A153,male,old,good +A12,36,3804,A73,A152,female,old,bad +A11,24,1987,A73,A151,male,young,bad +A14,24,4139,A73,A152,male,old,good +A14,12,701,A73,A152,male,old,good +A14,54,9436,A73,A152,male,old,good +A11,18,3190,A73,A152,female,young,bad +A12,13,882,A72,A152,male,young,good +A12,24,2760,A75,A153,male,old,good +A14,15,2221,A73,A151,female,young,good +A11,12,626,A73,A152,female,young,bad +A12,48,18424,A73,A152,female,old,bad +A11,18,1217,A73,A152,male,old,bad +A12,7,2576,A73,A152,male,old,good +A11,6,2647,A73,A151,male,old,good +A14,27,2570,A73,A151,female,young,bad +A13,24,3148,A73,A152,male,old,good +A11,6,666,A74,A152,female,old,good +A14,12,1185,A73,A152,female,old,good +A14,30,5954,A74,A152,male,old,good +A12,45,4746,A72,A152,male,young,bad +A14,15,1213,A75,A152,male,old,good +A14,6,1898,A73,A152,male,old,good +A14,48,11590,A73,A151,female,young,bad +A14,4,601,A72,A151,female,young,good +A12,18,1245,A73,A152,male,old,bad +A14,36,5742,A74,A152,male,old,good +A12,14,1410,A75,A152,male,old,good +A11,12,900,A73,A152,male,young,bad +A14,6,362,A73,A152,female,old,good +A14,15,1979,A75,A152,male,old,good +A14,36,3349,A73,A152,female,old,bad +A14,12,797,A75,A152,female,old,bad +A14,6,518,A73,A152,female,old,good +A11,24,2303,A75,A152,male,old,bad +A11,36,2712,A75,A152,male,old,bad +A14,24,1474,A72,A152,male,old,good +A14,18,1820,A73,A152,male,old,good +A14,24,1927,A73,A152,female,old,good +A11,15,1433,A73,A151,female,young,good +A12,12,958,A74,A152,male,old,good +A14,36,3835,A75,A152,female,old,good +A14,12,763,A73,A152,female,old,good +A14,6,2978,A73,A152,male,old,good +A14,12,1291,A73,A152,female,old,good +A11,12,674,A74,A152,male,young,bad +A14,60,6527,A73,A153,male,old,good +A14,12,976,A75,A152,male,old,good +A13,10,3949,A72,A152,male,old,good +A12,10,1048,A73,A152,male,young,good +A14,24,2670,A75,A152,male,old,good +A11,30,4583,A73,A152,male,old,good +A14,24,2375,A73,A152,male,old,good +A11,12,1282,A73,A151,female,young,bad +A14,24,3757,A75,A153,female,old,good +A12,24,4241,A73,A152,male,old,bad +A11,24,6579,A71,A153,male,old,good +A11,12,2121,A73,A152,male,old,good +A14,12,2171,A72,A152,female,old,good +A14,15,3594,A72,A152,female,old,good +A14,36,4686,A73,A153,male,old,good +A14,15,4657,A73,A152,male,old,good +A11,14,8978,A75,A152,male,old,bad +A14,18,1055,A72,A152,female,old,good +A11,48,3051,A73,A152,male,old,bad +A12,30,3441,A73,A151,female,young,bad +A14,24,5150,A75,A152,male,old,good +A14,12,1963,A74,A151,male,old,good +A12,18,3590,A71,A152,male,old,good +A14,36,5842,A75,A152,male,old,good +A14,24,1469,A75,A151,male,old,good +A14,24,2603,A73,A151,female,old,good +A11,12,3651,A73,A152,male,old,good +A11,24,3149,A72,A153,male,young,good +A11,24,4110,A75,A151,male,young,bad +A12,9,1437,A74,A152,male,old,bad +A13,24,1377,A75,A153,female,old,good +A11,9,1422,A72,A153,male,old,bad +A11,30,3622,A75,A151,female,old,good +A11,24,3345,A75,A151,male,old,bad +A12,12,625,A72,A152,male,old,good +A12,12,983,A72,A151,female,young,good +A12,30,2991,A75,A152,female,young,good +A14,18,1149,A73,A152,male,old,good +A12,9,276,A73,A151,male,young,good +A11,24,1371,A73,A151,female,young,bad +A11,12,795,A72,A152,female,old,bad +A11,24,1282,A73,A152,female,old,bad +A12,6,368,A75,A152,male,old,good +A14,24,1258,A74,A152,male,young,good +A14,15,2186,A74,A151,female,old,good +A14,24,2835,A75,A152,male,old,good +A11,24,3632,A73,A151,female,young,good +A14,24,1311,A74,A152,male,old,good +A11,36,2348,A73,A152,male,old,good +A12,6,433,A72,A151,female,young,bad +A11,15,3959,A73,A152,female,old,bad +A12,15,2631,A73,A152,female,young,good +A14,12,2445,A72,A151,male,old,good +A11,10,1038,A74,A152,male,old,good +A14,48,8858,A74,A153,male,old,good +A13,24,1275,A73,A152,male,old,good +A11,24,6568,A73,A152,male,young,good +A13,24,1344,A74,A152,male,old,bad +A12,27,5965,A75,A152,male,old,good +A14,12,618,A75,A152,male,old,good +A14,10,727,A75,A153,male,old,good +A12,15,1444,A72,A152,male,young,good +A11,36,3620,A73,A152,male,old,good +A11,10,2241,A72,A151,male,old,good +A12,60,14782,A75,A153,female,old,bad +A14,24,2022,A73,A152,female,old,good +A13,12,609,A72,A152,female,old,bad +A11,24,3021,A73,A151,male,young,good +A11,18,4380,A73,A152,male,old,good +A14,10,1309,A73,A152,male,old,bad +A11,24,2828,A73,A152,male,young,good +A12,18,1795,A75,A151,female,old,good +A14,18,1533,A72,A152,male,old,bad +A14,24,5103,A72,A153,male,old,good +A11,30,10623,A75,A153,male,old,good +A14,12,1386,A73,A152,female,old,bad +A14,24,4591,A73,A152,male,old,bad +A13,12,3399,A75,A152,male,old,good +A13,6,1047,A73,A152,female,old,good +A11,48,7685,A74,A151,female,old,bad diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI-val.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI-val.csv new file mode 100644 index 0000000..e0b67a7 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI-val.csv @@ -0,0 +1,301 @@ +status,month,credit_amount,employment,housing,sex,age,credit +A11,18,1553,A73,A152,male,old,bad +A14,15,1360,A73,A152,male,old,good +A12,8,760,A74,A152,female,old,good +A12,18,2899,A75,A152,male,old,good +A12,45,3031,A73,A151,male,young,bad +A12,24,4712,A73,A152,male,old,good +A12,18,1887,A73,A152,male,old,good +A12,36,2273,A74,A152,male,old,good +A12,12,888,A75,A152,male,old,bad +A11,12,1620,A73,A152,female,old,good +A14,15,2708,A72,A152,male,old,good +A12,18,866,A73,A152,male,young,good +A12,8,1414,A73,A152,male,old,good +A14,9,1313,A75,A152,male,young,good +A14,24,2578,A75,A152,male,old,good +A14,15,3568,A75,A151,female,old,good +A14,24,1393,A73,A152,male,old,good +A13,6,2116,A73,A152,male,old,good +A14,12,2012,A74,A152,female,old,good +A14,24,1940,A75,A152,male,old,good +A13,6,1323,A75,A152,male,old,good +A11,36,1977,A75,A152,male,old,bad +A11,12,1108,A74,A152,male,old,bad +A14,15,1300,A75,A153,male,old,good +A11,6,1169,A75,A152,male,old,good +A11,36,3249,A74,A153,male,old,good +A14,39,2569,A73,A152,male,young,good +A12,9,1549,A72,A152,male,old,good +A14,6,700,A75,A153,male,old,good +A12,12,6078,A74,A152,male,old,good +A12,9,1206,A75,A152,female,young,good +A12,24,1355,A72,A152,female,young,bad +A14,24,2978,A73,A152,male,old,good +A14,24,1525,A74,A152,female,old,good +A14,6,932,A73,A152,female,young,good +A12,27,2528,A72,A152,female,old,good +A11,18,3114,A72,A151,female,old,bad +A13,30,1908,A75,A152,male,old,bad +A14,12,1240,A75,A152,female,old,good +A14,12,717,A75,A152,male,old,good +A12,15,1514,A73,A152,male,young,good +A14,15,1829,A75,A152,male,old,good +A14,12,640,A73,A152,male,old,good +A12,36,7432,A73,A151,female,old,good +A14,12,1542,A74,A152,male,old,good +A11,36,9629,A74,A152,male,young,bad +A14,6,426,A75,A152,male,old,good +A14,15,4623,A73,A152,male,old,bad +A11,18,8471,A73,A151,female,young,good +A12,36,3711,A73,A152,male,old,good +A14,48,12749,A74,A152,male,old,good +A11,42,7882,A74,A153,male,old,good +A14,60,10144,A74,A152,female,young,good +A12,24,1246,A72,A152,male,young,bad +A14,36,3595,A75,A152,male,old,good +A12,12,3617,A75,A151,male,old,good +A14,12,886,A73,A152,female,young,good +A11,24,1546,A74,A151,male,young,bad +A11,12,2171,A73,A152,male,old,good +A14,36,2613,A73,A152,male,old,good +A14,12,1101,A73,A152,male,old,good +A12,18,3001,A74,A151,female,old,good +A14,12,2279,A73,A153,male,old,good +A12,10,1521,A73,A152,male,old,good +A12,18,1042,A73,A152,female,old,bad +A12,48,6681,A73,A153,male,old,good +A11,36,2145,A74,A152,male,young,bad +A11,24,4817,A74,A152,male,old,bad +A12,48,14421,A73,A152,male,young,bad +A11,12,691,A75,A152,male,old,bad +A12,18,1239,A73,A153,male,old,good +A12,21,2767,A75,A151,male,old,bad +A12,18,6260,A74,A151,male,old,good +A11,18,750,A71,A152,female,old,bad +A14,12,1503,A73,A151,male,old,good +A11,12,701,A73,A152,male,old,good +A14,11,1154,A71,A152,female,old,good +A14,21,3160,A75,A152,male,old,good +A11,18,2124,A73,A151,female,young,bad +A12,12,766,A73,A152,male,old,bad +A12,36,6948,A73,A151,male,old,good +A13,9,1126,A75,A152,male,old,good +A11,18,1216,A72,A151,female,young,bad +A11,12,2214,A73,A152,male,young,good +A14,24,4042,A74,A152,male,old,good +A11,15,1053,A72,A152,male,old,good +A12,24,2896,A72,A152,male,old,good +A11,12,6199,A73,A151,male,old,bad +A12,24,7758,A75,A151,female,old,good +A11,12,1372,A74,A152,male,old,bad +A11,30,6350,A75,A152,male,old,bad +A12,18,4439,A75,A152,male,old,good +A12,48,15672,A73,A152,male,young,bad +A14,12,804,A75,A152,male,old,good +A11,21,1602,A75,A152,male,old,good +A12,12,6468,A71,A152,male,old,bad +A14,18,1473,A72,A152,male,old,good +A14,24,5433,A71,A151,female,old,good +A14,6,1238,A71,A152,male,old,good +A14,33,2764,A73,A152,female,old,good +A12,12,1922,A73,A152,male,old,bad +A13,30,3017,A75,A152,male,old,good +A11,24,1207,A72,A151,female,young,bad +A11,16,2625,A75,A151,male,old,bad +A11,12,684,A73,A151,male,old,bad +A12,9,1670,A72,A152,female,young,bad +A14,6,672,A71,A152,female,old,good +A14,21,2580,A72,A152,male,old,bad +A14,24,6314,A71,A152,male,old,good +A11,24,3234,A72,A151,female,young,bad +A12,10,7308,A71,A153,male,old,good +A12,13,2101,A72,A152,female,young,good +A11,36,6229,A72,A151,female,young,bad +A13,12,3016,A73,A152,male,young,good +A14,12,522,A75,A152,male,old,good +A14,12,1934,A75,A152,male,old,good +A14,48,2751,A75,A152,male,old,good +A12,15,1512,A73,A152,male,old,bad +A12,30,2503,A75,A152,male,old,good +A14,24,1287,A75,A152,female,old,good +A12,36,12389,A73,A153,male,old,bad +A13,9,1337,A72,A152,male,old,bad +A11,27,2442,A75,A152,male,old,good +A14,10,1364,A73,A152,female,old,good +A11,24,2384,A75,A151,male,old,good +A11,9,2799,A73,A151,male,old,good +A13,18,1445,A74,A152,male,old,good +A14,6,1743,A73,A152,male,old,good +A13,10,1347,A74,A152,male,old,good +A11,24,4870,A73,A153,male,old,bad +A14,12,1736,A74,A152,female,old,good +A12,12,754,A75,A152,male,old,good +A14,36,3535,A74,A152,male,old,good +A14,15,1478,A73,A152,male,old,good +A14,30,6742,A74,A152,male,old,good +A14,36,7127,A72,A151,female,young,bad +A11,15,806,A73,A152,female,young,good +A13,24,2892,A75,A153,male,old,good +A12,36,5800,A73,A152,male,old,good +A14,24,3105,A72,A152,male,young,good +A14,15,1386,A73,A151,male,old,good +A14,18,2404,A73,A152,female,old,good +A12,24,3878,A72,A152,male,old,good +A12,24,3092,A72,A151,male,young,bad +A14,28,2743,A75,A152,male,old,good +A14,12,1574,A73,A152,male,old,good +A11,21,1835,A73,A152,female,young,bad +A12,24,2718,A73,A151,female,young,bad +A14,48,4844,A71,A151,male,old,bad +A11,24,1199,A75,A152,male,old,bad +A11,14,3973,A71,A153,male,young,good +A14,24,2679,A72,A152,female,old,good +A14,12,2133,A75,A153,female,old,good +A11,24,1231,A75,A151,female,old,good +A14,24,2872,A75,A152,male,old,good +A12,18,2427,A75,A152,male,old,good +A11,60,7297,A75,A151,male,old,bad +A14,30,7596,A75,A152,male,old,good +A11,39,14179,A74,A152,male,old,good +A12,9,1501,A75,A152,female,old,bad +A12,48,3844,A74,A153,male,old,bad +A14,42,4042,A73,A152,male,old,good +A11,6,1374,A71,A152,female,old,good +A14,24,7814,A74,A152,male,old,good +A14,36,909,A75,A152,male,old,good +A14,18,6458,A75,A152,male,old,bad +A14,24,2424,A75,A152,male,old,good +A14,15,2788,A74,A152,female,young,good +A11,12,697,A72,A152,male,old,bad +A14,21,2476,A75,A152,male,old,good +A12,12,1534,A72,A151,male,young,bad +A12,6,1068,A75,A152,male,old,good +A12,12,3573,A73,A152,female,young,good +A12,18,1301,A75,A152,male,old,good +A14,42,7166,A74,A151,male,old,good +A12,24,2039,A72,A152,male,young,bad +A11,24,2483,A73,A152,male,young,good +A13,24,3617,A75,A151,male,young,good +A14,24,2058,A73,A152,male,old,good +A11,24,1755,A75,A152,female,old,good +A14,24,1559,A74,A152,male,old,good +A11,24,3161,A73,A151,male,old,bad +A14,48,10222,A74,A152,male,old,good +A11,24,1381,A73,A152,female,old,bad +A14,21,2241,A75,A152,male,old,good +A14,24,2538,A75,A152,male,old,bad +A11,36,8065,A73,A152,female,young,bad +A12,48,6416,A75,A151,female,old,bad +A12,24,4736,A72,A152,female,young,bad +A14,12,930,A75,A152,male,old,good +A11,24,2910,A74,A153,male,old,good +A12,18,2278,A72,A152,female,old,bad +A14,36,4463,A73,A152,male,old,bad +A14,36,2394,A73,A152,female,young,good +A14,24,2255,A72,A152,male,old,good +A14,39,8588,A75,A152,male,old,good +A12,24,5743,A72,A153,female,young,good +A11,12,1200,A73,A151,female,young,good +A14,30,1867,A75,A152,male,old,good +A11,6,860,A75,A152,female,old,good +A11,24,1024,A72,A152,male,old,bad +A11,60,6836,A75,A152,male,old,bad +A11,12,1680,A75,A152,male,old,good +A14,24,1413,A73,A152,male,old,good +A12,8,907,A72,A152,male,old,good +A11,24,1193,A71,A151,female,old,bad +A11,12,2577,A73,A152,male,old,good +A14,24,3972,A74,A151,female,young,good +A12,24,3758,A71,A151,female,young,good +A14,18,2320,A71,A152,male,old,good +A14,24,2463,A74,A152,male,old,good +A13,6,1343,A75,A152,male,old,good +A14,28,7824,A72,A151,male,old,good +A14,21,3275,A75,A152,male,old,good +A12,18,3244,A73,A152,female,old,good +A14,27,8613,A73,A152,male,old,good +A11,36,1842,A72,A152,female,old,bad +A14,24,2611,A75,A152,male,old,good +A12,9,2030,A74,A152,male,young,good +A14,24,9277,A73,A153,male,old,good +A14,12,1076,A73,A152,male,old,good +A14,10,1546,A73,A152,male,old,good +A11,12,1893,A73,A152,female,old,good +A14,12,2141,A74,A152,male,old,good +A14,36,7980,A72,A151,male,old,bad +A14,12,1264,A75,A151,male,old,good +A11,18,2249,A74,A152,male,old,good +A11,48,6143,A75,A153,female,old,bad +A11,21,3599,A74,A151,female,old,good +A12,15,1537,A75,A152,male,old,good +A14,36,10974,A71,A152,female,old,bad +A12,18,4297,A75,A152,male,old,bad +A11,18,1190,A71,A153,female,old,bad +A11,12,1274,A72,A152,female,old,bad +A14,9,2406,A71,A152,male,old,good +A12,24,6403,A72,A152,male,old,good +A14,10,1287,A75,A152,male,old,good +A12,42,5954,A74,A152,female,old,good +A11,6,1957,A74,A152,female,old,good +A14,24,1552,A74,A152,male,old,good +A14,24,937,A72,A152,male,old,good +A14,24,2032,A75,A153,male,old,good +A14,36,10875,A75,A152,male,old,good +A12,24,5084,A75,A152,female,old,good +A11,36,5179,A74,A152,male,old,bad +A12,6,1050,A71,A152,male,old,good +A13,36,3913,A73,A152,male,young,good +A13,18,3049,A72,A152,female,old,good +A11,36,8335,A75,A153,male,old,bad +A11,12,1858,A72,A151,female,young,good +A11,12,4843,A75,A151,male,old,bad +A14,18,3780,A72,A152,male,old,good +A12,12,2028,A73,A152,male,old,good +A14,24,2284,A74,A152,male,old,good +A12,15,2728,A74,A152,male,old,good +A11,27,5293,A71,A152,male,old,bad +A12,60,9157,A73,A153,male,old,good +A14,18,1943,A72,A152,female,young,bad +A11,24,2957,A75,A152,male,old,good +A11,18,2389,A72,A152,female,old,good +A11,18,1882,A73,A151,female,young,bad +A12,30,1715,A73,A152,female,old,good +A11,48,6331,A75,A153,male,old,bad +A14,15,1316,A73,A152,male,old,good +A11,6,14896,A75,A152,male,old,bad +A14,30,3077,A75,A152,male,old,good +A14,4,1544,A74,A152,male,old,good +A12,12,1007,A73,A152,male,young,good +A14,10,2069,A73,A152,male,old,good +A12,18,12976,A71,A153,female,old,bad +A11,18,3104,A74,A152,male,old,good +A12,36,3990,A72,A152,female,old,good +A11,12,3499,A73,A152,female,old,bad +A12,9,1136,A75,A153,male,old,bad +A13,15,392,A72,A151,female,young,good +A11,18,1131,A71,A152,female,old,bad +A12,12,1424,A74,A152,male,old,good +A11,42,3394,A71,A152,male,old,good +A11,48,6999,A74,A152,male,old,bad +A11,30,11998,A72,A152,male,old,bad +A14,30,2831,A73,A152,female,old,good +A12,24,1965,A73,A151,female,old,good +A14,24,1597,A75,A153,male,old,good +A12,6,2063,A72,A151,male,old,good +A12,26,7966,A72,A152,male,old,good +A12,48,7582,A71,A153,male,old,good +A14,24,4679,A74,A152,male,old,good +A14,12,5801,A75,A151,male,old,good +A14,24,3868,A75,A151,female,old,good +A14,12,2390,A75,A152,male,old,good +A12,15,6850,A71,A152,male,old,bad +A14,9,1478,A74,A152,male,young,bad +A14,6,2080,A73,A152,male,young,good +A12,6,931,A72,A152,female,old,bad +A14,9,1244,A75,A151,female,old,good +A11,10,2132,A72,A151,female,old,good +A14,18,1950,A74,A152,male,old,good +A11,6,1374,A73,A152,male,old,good +A12,6,1449,A75,A152,male,old,good +A14,18,1582,A75,A152,male,old,good diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS-test.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS-test.csv new file mode 100644 index 0000000..1b64b92 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS-test.csv @@ -0,0 +1,201 @@ +status,month,credit_amount,employment,housing,sex,age,credit +A13,0.5714285714285714,0.30802244965335096,A73,A152,male,young,good +A11,0.5714285714285714,0.11290855067679102,A73,A151,male,old,bad +A12,0.14285714285714285,0.04693518212831517,A74,A152,male,old,good +A14,0.24999999999999997,0.0704302850225597,A73,A152,male,old,bad +A11,0.19642857142857142,0.08093980411576977,A72,A152,male,old,good +A14,0.24999999999999997,0.1389347419390338,A74,A152,male,old,bad +A13,0.14285714285714285,0.08974358974358974,A73,A151,female,old,good +A14,0.19642857142857142,0.15291075162319798,A74,A152,male,old,good +A11,0.14285714285714285,0.07741828986464179,A73,A152,male,old,good +A12,0.3035714285714286,0.11571475734565863,A73,A152,male,old,good +A11,0.24999999999999997,0.13255199735886433,A73,A152,male,old,good +A14,0.14285714285714285,0.07384175195333993,A74,A152,female,old,good +A13,0.3571428571428571,0.09216463079124024,A73,A152,male,old,good +A14,0.5714285714285714,0.594475624518543,A73,A152,male,old,good +A12,0.3571428571428571,0.09447562451854297,A75,A152,female,young,good +A12,0.5714285714285714,0.13321228128095083,A73,A153,female,old,bad +A14,0.3571428571428571,0.17497523935292178,A75,A152,male,old,good +A13,0.5714285714285714,0.23236491691427316,A75,A152,male,old,good +A14,0.14285714285714285,0.24347969626939586,A72,A151,female,young,good +A14,0.3571428571428571,0.041212721470232194,A75,A152,male,young,good +A12,0.7857142857142857,0.5893584241223726,A74,A152,male,old,bad +A12,0.14285714285714285,0.05876526906569825,A75,A152,male,old,good +A11,0.03571428571428571,0.019753494002421042,A74,A152,female,old,good +A14,0.3035714285714286,0.1509298998569385,A73,A152,male,old,good +A14,1.0,0.847529437658193,A74,A152,male,young,good +A14,0.19642857142857142,0.17156377242214152,A75,A151,male,young,good +A11,0.7857142857142857,0.24969736986904373,A74,A152,male,old,good +A11,0.14285714285714285,0.10982722570705403,A75,A152,male,old,bad +A11,0.3571428571428571,0.08237041928029053,A72,A152,male,young,good +A12,0.7857142857142857,0.2666446572025971,A73,A152,female,old,bad +A12,0.24999999999999997,0.034885000550236606,A75,A152,male,old,bad +A14,0.24999999999999997,0.32023770221195114,A75,A152,male,old,good +A11,0.3571428571428571,0.06228678331682623,A74,A152,male,old,good +A12,0.03571428571428571,0.02767690106745901,A73,A152,female,old,good +A14,0.08928571428571427,0.13464289644547156,A73,A152,male,old,good +A12,0.03571428571428571,0.01870804445911742,A72,A152,male,old,good +A12,0.4642857142857143,0.09183448883019699,A72,A152,male,old,bad +A11,0.3035714285714286,0.07686805326290305,A73,A152,male,old,bad +A12,0.24999999999999997,0.3090128755364807,A73,A152,male,old,good +A14,0.3571428571428571,0.3132496973698691,A72,A152,female,old,bad +A11,0.14285714285714285,0.026246285902938263,A72,A152,male,old,bad +A11,0.3571428571428571,0.14097061736546715,A75,A151,female,old,good +A11,0.7857142857142857,0.22328601298558381,A72,A151,female,young,bad +A11,0.4642857142857143,0.3266754704522945,A74,A151,male,young,good +A12,0.24999999999999997,0.05749972488169913,A72,A152,female,old,good +A14,0.5714285714285714,0.3004842082095301,A75,A152,male,old,good +A14,0.7857142857142857,0.32243864861890614,A73,A153,male,old,good +A12,0.4642857142857143,0.44767249917464513,A74,A152,male,old,bad +A13,0.24999999999999997,0.09414548255749973,A75,A152,female,young,good +A12,0.14285714285714285,0.04996148343787829,A73,A152,male,old,good +A12,0.14285714285714285,0.06382744580169472,A73,A152,male,old,good +A14,0.5178571428571429,0.385330692197645,A74,A152,male,old,good +A13,0.03571428571428571,0.023825244855287777,A72,A152,female,old,good +A14,0.14285714285714285,0.06399251678221635,A74,A152,male,old,good +A11,0.5714285714285714,0.2817761637504127,A73,A152,male,old,good +A12,0.4642857142857143,0.2217453505007153,A73,A151,female,old,bad +A11,0.3571428571428571,0.06096621547265324,A75,A152,male,old,bad +A13,0.10714285714285712,0.0292175635523275,A75,A153,male,old,good +A14,0.0,0.17222405634422802,A74,A152,female,old,good +A12,0.08928571428571427,0.045779685264663805,A75,A152,male,old,good +A12,0.5714285714285714,0.11483437878287663,A75,A152,male,old,good +A14,0.053571428571428575,0.03279410146362936,A75,A153,male,old,good +A12,0.7857142857142857,0.45322988885220644,A74,A152,female,young,good +A13,0.19642857142857142,0.09106415758776273,A75,A151,male,old,good +A12,0.14285714285714285,0.08550676791020138,A72,A152,male,old,good +A11,0.14285714285714285,0.022119511389897654,A75,A151,female,young,good +A12,0.14285714285714285,0.059480576647958625,A72,A152,male,young,bad +A14,0.5714285714285714,0.15566193463189174,A73,A152,male,old,good +A13,0.19642857142857142,0.1340926598437328,A74,A151,male,old,good +A13,0.14285714285714285,0.05942555298778475,A72,A152,male,old,good +A11,0.14285714285714285,0.05051172003961703,A73,A152,male,old,good +A11,0.5714285714285714,0.17585561791570375,A75,A152,male,old,bad +A12,0.08928571428571427,0.052217453505007144,A74,A152,female,old,good +A11,0.14285714285714285,0.02701661714537251,A71,A152,female,young,bad +A12,0.24999999999999997,0.39198855507868385,A71,A152,male,old,good +A14,0.24999999999999997,0.10559040387366568,A73,A152,male,old,bad +A14,0.03571428571428571,0.05986574226917574,A73,A152,male,old,good +A11,0.3571428571428571,0.33944095961263343,A75,A153,female,old,good +A11,0.19642857142857142,0.039892153626059204,A73,A152,male,young,good +A14,0.3571428571428571,0.13392758886321118,A73,A152,male,old,good +A14,0.3571428571428571,0.23528117090348852,A73,A152,male,old,good +A12,0.14285714285714285,0.038571585781886214,A72,A151,female,old,bad +A12,0.8928571428571428,0.8635963464289644,A72,A151,male,old,bad +A14,0.19642857142857142,0.18190822053483,A73,A152,male,old,good +A14,0.0,0.06630351050951909,A74,A152,male,old,good +A12,0.3571428571428571,0.09271486739297898,A75,A152,male,old,bad +A12,0.14285714285714285,0.1581379993397161,A72,A152,male,old,good +A14,0.03571428571428571,0.07114559260482008,A73,A152,male,old,good +A11,0.3571428571428571,0.14933421371189612,A75,A153,male,old,good +A11,0.24999999999999997,0.06558820292725871,A74,A153,male,old,bad +A14,1.0,0.7431495543083525,A75,A153,male,old,good +A12,0.2857142857142857,0.3245295477055134,A75,A152,male,old,good +A14,0.3571428571428571,0.3336084516342027,A75,A152,male,old,good +A11,0.14285714285714285,0.41900517222405637,A75,A153,male,old,bad +A13,0.14285714285714285,0.05760977220204688,A73,A151,male,young,good +A11,0.24999999999999997,0.121712336304611,A73,A152,male,young,bad +A14,0.24999999999999997,0.3582590513920986,A73,A151,male,old,bad +A14,0.5714285714285714,0.5129305601408606,A72,A152,male,old,bad +A14,0.5714285714285714,0.39391438318476946,A75,A152,male,old,good +A11,0.5714285714285714,0.36519203257400684,A73,A152,male,old,bad +A14,0.3571428571428571,0.18548475844613185,A75,A152,male,old,bad +A11,0.03571428571428571,0.06063607351160999,A71,A151,female,young,good +A13,0.14285714285714285,0.06767910201386595,A71,A153,male,old,good +A14,0.24999999999999997,0.042808407615274574,A73,A152,female,old,good +A11,0.14285714285714285,0.035875426433366345,A74,A151,male,young,bad +A11,0.10714285714285712,0.1136238582590514,A75,A152,male,old,good +A14,0.08928571428571427,0.09519093210080334,A72,A151,female,young,bad +A12,0.3571428571428571,0.07169582920655881,A74,A151,female,young,good +A13,0.10714285714285712,0.05447342357213601,A75,A153,female,old,bad +A12,0.14285714285714285,0.05353802134918015,A75,A151,male,old,bad +A11,0.3035714285714286,0.12963574336964895,A72,A151,female,old,good +A11,0.14285714285714285,0.06377242214152085,A75,A152,male,old,good +A14,0.24999999999999997,0.07252118410916694,A73,A151,female,young,good +A14,0.01785714285714285,0.1759656652360515,A74,A152,male,old,good +A12,0.24999999999999997,0.1992956971497744,A71,A152,female,old,good +A12,0.14285714285714285,0.116430064927919,A74,A152,male,old,good +A14,0.5714285714285714,0.4087157477715418,A74,A152,female,old,good +A12,0.5714285714285714,0.5033564432706064,A72,A151,male,old,bad +A14,0.5714285714285714,0.5126004181798173,A73,A152,female,old,good +A11,0.3571428571428571,0.0928799383735006,A72,A152,male,old,bad +A11,0.14285714285714285,0.10448993067018818,A73,A153,male,old,bad +A14,0.5714285714285714,0.350170573346539,A75,A152,male,old,good +A14,0.14285714285714285,0.14355672939363928,A71,A152,male,old,good +A12,0.2857142857142857,0.13090128755364808,A73,A152,male,old,good +A12,0.24999999999999997,0.32761087267525035,A73,A152,male,old,good +A11,0.03571428571428571,0.18851105975569496,A73,A151,male,old,good +A12,0.19642857142857142,0.114229118520964,A73,A152,male,old,good +A11,0.03571428571428571,0.17244415098492352,A73,A151,male,old,bad +A12,0.7857142857142857,0.1546164850885881,A74,A152,male,old,bad +A13,0.03571428571428571,0.05771981952239463,A73,A152,male,old,good +A14,0.10714285714285712,0.06426763508308572,A73,A151,male,old,good +A14,0.14285714285714285,0.04104765048971058,A74,A152,female,young,good +A14,0.2857142857142857,0.1780015406624849,A72,A152,male,old,good +A14,0.24999999999999997,0.17211400902388027,A73,A152,male,old,good +A14,0.08928571428571427,0.05359304500935402,A73,A152,male,old,good +A13,0.08928571428571427,0.02723671178606801,A73,A152,female,old,bad +A12,0.14285714285714285,0.08858809287993837,A71,A152,male,old,good +A11,0.2857142857142857,0.2213051612193243,A75,A152,female,young,good +A12,0.053571428571428575,0.11912622427643886,A73,A152,male,old,good +A14,0.3571428571428571,0.19406844943325632,A73,A152,male,old,good +A12,0.4642857142857143,0.10625068779575217,A75,A152,male,old,good +A14,0.24999999999999997,0.239022779795312,A72,A152,male,old,good +A11,0.14285714285714285,0.028007043028502255,A74,A152,male,old,bad +A14,0.4642857142857143,0.23550126554418402,A74,A151,female,old,good +A14,0.10714285714285712,0.09210960713106636,A73,A152,male,old,good +A14,0.14285714285714285,0.17591064157587763,A73,A152,female,old,good +A12,1.0,0.7580609662154726,A74,A152,male,old,bad +A13,0.24999999999999997,0.14383184769450866,A73,A152,male,old,bad +A14,0.14285714285714285,0.05529877847474414,A75,A152,male,old,good +A11,0.6785714285714286,0.22669747991636405,A74,A152,male,old,bad +A14,0.14285714285714285,0.03719599427753935,A71,A152,female,old,good +A12,0.19642857142857142,0.030373060415978873,A75,A152,male,old,bad +A12,0.14285714285714285,0.04632992186640256,A73,A152,female,old,good +A14,0.3035714285714286,0.07378672829316606,A74,A152,male,old,good +A14,0.5714285714285714,0.23131946737096953,A73,A152,female,old,good +A14,0.14285714285714285,0.023770221195113902,A74,A152,female,old,good +A12,0.24999999999999997,0.3362495873225487,A75,A152,male,old,good +A14,0.14285714285714285,0.055463849455265765,A72,A151,female,young,good +A12,0.4107142857142857,0.12490370859469573,A73,A152,male,young,bad +A11,0.03571428571428571,0.00484208209530098,A75,A152,male,old,good +A12,0.2857142857142857,0.37454605480356556,A74,A151,male,old,good +A11,0.2857142857142857,0.10922196544514141,A73,A151,male,old,bad +A13,0.14285714285714285,0.06459777704412897,A75,A152,female,old,good +A12,0.3571428571428571,0.6095521074061846,A73,A152,male,old,bad +A14,0.3571428571428571,0.05496863651370089,A72,A152,male,old,good +A12,0.08928571428571427,0.10278419720479806,A73,A152,male,old,good +A11,0.3571428571428571,0.05694948827996038,A74,A151,female,old,bad +A14,0.7857142857142857,0.18311874105865522,A75,A152,male,old,good +A14,0.10714285714285712,0.14295146913172666,A73,A152,male,old,good +A11,0.24999999999999997,0.17932210850665786,A74,A152,female,young,good +A11,0.07142857142857142,0.05029162539892153,A75,A153,male,old,good +A12,0.3035714285714286,0.2050181578078574,A74,A152,male,old,good +A12,0.24999999999999997,0.18498954550456698,A75,A152,female,old,good +A12,1.2142857142857142,0.2941014636293606,A73,A152,male,young,bad +A11,0.14285714285714285,0.025255860019808517,A75,A152,male,old,bad +A11,0.5714285714285714,0.43903378452734676,A73,A152,male,old,bad +A11,0.3035714285714286,0.17409486079013978,A72,A152,male,old,bad +A12,0.5714285714285714,0.6802024870694399,A73,A153,male,old,bad +A14,0.03571428571428571,0.08253549026081215,A75,A152,male,old,good +A14,0.14285714285714285,0.15555188731154398,A73,A152,male,old,good +A14,0.19642857142857142,0.2791900517222406,A75,A153,female,old,good +A13,0.0,0.06844943325630021,A72,A152,male,old,good +A11,0.14285714285714285,0.12809508088478047,A71,A153,female,old,good +A12,0.053571428571428575,0.11439418950148562,A72,A152,female,old,good +A11,0.08928571428571427,0.022229558710245404,A73,A152,male,old,bad +A14,0.5714285714285714,0.4337515131506548,A73,A152,female,old,good +A14,0.3571428571428571,0.19880048420820953,A73,A153,male,old,good +A12,0.14285714285714285,0.39738087377572356,A71,A151,female,young,good +A14,0.14285714285714285,0.07180587652690656,A75,A153,male,old,bad +A11,0.14285714285714285,0.055683944095961266,A75,A152,male,old,good +A12,0.08928571428571427,0.02971277649389237,A73,A152,female,old,good +A12,0.4642857142857143,0.27423792230659183,A71,A152,male,old,bad +A11,0.4642857142857143,0.11863101133487398,A74,A151,female,young,bad +A12,0.14285714285714285,0.1496093320127655,A72,A151,female,young,bad +A14,0.3035714285714286,0.6839440959612633,A75,A153,male,old,bad +A11,0.14285714285714285,0.045779685264663805,A73,A152,male,old,bad +A12,0.4107142857142857,0.20166171453725104,A73,A152,male,old,bad +A14,0.08928571428571427,0.19709475074281943,A75,A152,male,old,good +A12,0.24999999999999997,0.09232970177176185,A72,A152,male,old,bad +A12,0.5714285714285714,0.5286123032904149,A74,A152,male,old,good diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS-train.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS-train.csv new file mode 100644 index 0000000..b15c2c9 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS-train.csv @@ -0,0 +1,501 @@ +status,month,credit_amount,employment,housing,sex,age,credit +A11,0.5714285714285714,0.20408275558490152,A71,A152,male,old,good +A14,0.08928571428571427,0.18306371739848135,A73,A151,male,old,good +A14,0.24999999999999997,0.12462859029382635,A73,A152,male,old,good +A12,0.14285714285714285,0.09601628700341147,A72,A152,male,old,good +A14,1.0,0.5566193463189171,A75,A152,male,old,good +A14,0.3571428571428571,0.39303400462198745,A73,A152,male,old,good +A13,0.14285714285714285,0.008748761967646089,A73,A151,female,old,good +A14,0.08928571428571427,0.12418840101243535,A75,A153,male,old,good +A14,0.4642857142857143,0.25096291405304283,A74,A151,female,young,good +A12,0.7857142857142857,0.3471992956971498,A74,A152,male,young,bad +A12,0.5714285714285714,0.48332783096731596,A72,A151,male,old,bad +A11,0.3571428571428571,0.1816881258941345,A74,A152,male,old,bad +A12,0.2857142857142857,0.3421371189611533,A71,A152,male,old,good +A11,0.4642857142857143,0.1250137559150435,A75,A152,male,old,good +A12,0.5714285714285714,0.14141080664685815,A72,A152,male,old,bad +A14,0.03571428571428571,0.029327610872675252,A73,A152,male,old,good +A11,0.3035714285714286,0.19329811819082207,A74,A152,male,young,good +A14,0.4642857142857143,0.39809618135798397,A71,A152,female,old,bad +A11,0.19642857142857142,0.06344228018047761,A73,A151,female,old,good +A12,0.125,0.24876196764608782,A74,A152,male,old,good +A11,0.3571428571428571,0.20743919885550788,A73,A152,male,old,good +A12,0.3571428571428571,0.052327500825354895,A72,A152,male,old,good +A12,0.7857142857142857,0.28232640035215145,A71,A153,male,old,good +A14,0.3571428571428571,0.28947947617475517,A73,A152,male,young,good +A14,0.24999999999999997,0.04666006382744581,A71,A152,female,old,good +A12,0.7857142857142857,0.32871134587872786,A75,A153,male,old,bad +A14,0.125,0.06289204357873886,A72,A152,female,old,good +A12,0.08928571428571427,0.09183448883019699,A74,A151,male,old,good +A14,0.19642857142857142,0.2638384505337295,A75,A152,female,old,good +A12,0.7857142857142857,0.3136898866512601,A73,A152,female,young,bad +A11,0.07142857142857142,0.026466380543633764,A75,A152,male,old,good +A14,0.24999999999999997,0.02085396720589854,A75,A152,male,old,good +A14,0.24999999999999997,0.08572686255089688,A74,A152,female,young,bad +A12,0.3035714285714286,0.05161219324309453,A75,A152,female,old,bad +A14,0.3571428571428571,0.16424562561901618,A75,A152,male,old,good +A12,0.3571428571428571,0.6223175965665235,A73,A151,female,young,bad +A12,0.08928571428571427,0.04974138879718279,A75,A152,male,old,good +A14,0.24999999999999997,0.19808517662594918,A74,A152,male,old,good +A11,0.3571428571428571,0.11604489930670188,A71,A152,male,old,bad +A14,0.3571428571428571,0.07059535600308132,A72,A152,female,old,good +A13,0.5714285714285714,0.21789369428854408,A73,A152,male,old,bad +A11,0.4642857142857143,0.19847034224716628,A73,A152,male,old,good +A14,0.14285714285714285,0.06289204357873886,A75,A152,male,old,good +A14,0.24999999999999997,0.08528667326950588,A73,A152,male,young,good +A13,0.14285714285714285,0.1101023440079234,A73,A152,female,old,good +A12,0.14285714285714285,0.03224386486189061,A72,A152,female,young,bad +A11,0.24999999999999997,0.06025090789039286,A73,A152,male,old,bad +A12,0.5714285714285714,0.2500825354902608,A72,A152,female,old,good +A11,0.14285714285714285,0.025200836359634642,A73,A152,male,old,good +A11,0.14285714285714285,0.05716958292065588,A73,A152,male,young,good +A14,0.24999999999999997,0.08880818752063387,A73,A152,female,old,bad +A12,0.7857142857142857,0.5342797402883239,A72,A152,female,old,bad +A11,0.24999999999999997,0.27797953119841534,A75,A153,male,old,good +A11,0.03571428571428571,0.08924837680202487,A71,A153,male,old,good +A11,0.3571428571428571,0.12044679212061186,A72,A152,female,old,bad +A14,0.08928571428571427,0.06261692527786948,A73,A151,female,old,good +A12,0.7857142857142857,0.6558270056124132,A71,A153,male,old,good +A14,0.24999999999999997,0.13271706833938596,A74,A152,male,old,good +A14,0.1607142857142857,0.06377242214152085,A71,A152,female,old,good +A11,0.19642857142857142,0.1866952789699571,A75,A152,female,old,good +A11,0.7857142857142857,0.358093980411577,A73,A152,female,old,bad +A14,0.3571428571428571,0.0880928799383735,A74,A152,male,old,good +A11,0.4107142857142857,0.17420490811048753,A73,A152,male,old,good +A14,0.4107142857142857,0.2678001540662485,A74,A152,male,old,good +A14,0.10714285714285712,0.07411687025420931,A73,A151,male,old,good +A14,0.03571428571428571,0.06228678331682623,A73,A152,female,old,good +A14,0.10714285714285712,0.05397821063057115,A75,A152,male,old,good +A12,0.19642857142857142,0.13101133487399583,A73,A151,female,old,bad +A12,0.4642857142857143,0.10454495433036205,A73,A152,female,young,bad +A12,0.14285714285714285,0.1474634092659844,A74,A152,female,old,good +A14,0.03571428571428571,0.3582590513920986,A74,A152,male,old,good +A12,0.3035714285714286,0.13728403213381754,A74,A152,male,old,good +A11,0.3571428571428571,0.11417409486079015,A74,A152,male,old,good +A14,0.03571428571428571,0.05249257180587652,A73,A151,male,old,good +A11,0.24999999999999997,0.04396390447892594,A72,A151,female,young,good +A14,0.14285714285714285,0.11235831407505227,A71,A152,male,old,bad +A14,0.08928571428571427,0.06597336854847584,A74,A152,female,old,good +A11,0.24999999999999997,0.39952679652250467,A75,A153,male,old,bad +A12,0.08928571428571427,0.03675580499614835,A73,A152,female,old,bad +A14,0.3571428571428571,0.1153295917244415,A74,A152,male,old,good +A11,0.3571428571428571,0.07444701221525256,A75,A152,female,old,good +A11,0.125,0.2011114779355123,A73,A151,male,old,good +A14,0.24999999999999997,0.0482007263123143,A72,A151,female,young,good +A13,0.3035714285714286,0.14707824364476726,A73,A152,female,old,good +A12,0.08928571428571427,0.1620446792120612,A73,A152,female,old,good +A11,0.03571428571428571,0.24573566633652472,A72,A152,male,old,good +A12,0.03571428571428571,0.7871134587872785,A71,A152,male,young,bad +A11,0.5714285714285714,0.2884890502916254,A75,A153,male,old,good +A14,0.3571428571428571,0.0734565863321228,A74,A152,male,old,good +A11,0.14285714285714285,0.18377902498074172,A73,A152,male,old,good +A14,0.14285714285714285,0.1380543633762518,A75,A152,male,old,good +A11,0.14285714285714285,0.10300429184549356,A73,A151,male,old,good +A12,0.14285714285714285,0.09640145262462858,A74,A151,male,old,good +A14,0.3571428571428571,0.09084406294706723,A73,A151,male,old,good +A14,0.14285714285714285,0.06839440959612633,A72,A152,female,old,good +A12,0.03571428571428571,0.052767690106745896,A71,A152,male,old,bad +A12,0.7857142857142857,0.20518322878837902,A74,A152,male,old,good +A11,0.3571428571428571,0.4110817651590184,A72,A152,female,old,good +A11,0.24999999999999997,0.1870804445911742,A72,A151,female,young,good +A11,0.24999999999999997,0.08968856608341587,A74,A152,male,old,good +A12,0.24999999999999997,0.09304500935402223,A73,A152,male,old,good +A14,0.19642857142857142,0.034334763948497854,A72,A152,female,young,good +A14,0.4107142857142857,0.2718168812589414,A75,A152,male,old,good +A14,0.5714285714285714,0.5627269725982172,A75,A153,male,old,good +A12,0.3571428571428571,0.22565203037306042,A73,A152,female,old,good +A14,0.3571428571428571,0.06195664135578298,A74,A152,female,old,good +A12,0.4642857142857143,0.1786068009243975,A73,A152,male,old,good +A11,0.6428571428571428,0.3162759986794322,A73,A152,male,old,bad +A11,0.4285714285714286,0.20666886761307363,A73,A152,male,old,bad +A14,0.5714285714285714,0.1701331572576208,A75,A152,male,old,good +A12,0.24999999999999997,0.13051612193243095,A73,A152,male,old,good +A12,0.5714285714285714,0.11742049081104876,A72,A151,male,old,bad +A11,0.3571428571428571,0.07752833718498954,A72,A151,female,old,bad +A14,0.3571428571428571,0.1781666116430065,A74,A152,female,young,good +A12,1.0,0.39440959612633436,A73,A152,male,old,good +A14,0.24999999999999997,0.10938703642566304,A73,A152,female,young,good +A11,0.3571428571428571,0.059590623968306375,A71,A153,male,old,bad +A14,0.3214285714285714,0.05683944095961263,A74,A151,female,young,good +A14,0.03571428571428571,0.17981732144822274,A73,A151,male,old,good +A12,0.14285714285714285,0.018432926158248045,A73,A151,male,young,good +A12,0.625,0.25767580059425554,A74,A152,male,young,bad +A14,0.3571428571428571,0.056564322658743255,A75,A152,male,old,good +A14,0.14285714285714285,0.08352591614394189,A73,A151,male,young,good +A11,0.3571428571428571,0.17051832287883792,A72,A153,male,old,bad +A12,0.08928571428571427,0.0627819962583911,A73,A152,male,old,good +A14,0.24999999999999997,0.17453505007153078,A75,A152,male,old,good +A14,0.10714285714285712,0.10432485968966655,A72,A151,female,young,good +A14,0.19642857142857142,0.06988004842082095,A75,A152,male,old,good +A14,0.3571428571428571,0.3627159678661825,A73,A152,male,old,good +A14,0.24999999999999997,0.09909761197314845,A72,A152,male,old,good +A11,0.3571428571428571,0.15109497083746012,A73,A152,male,young,bad +A11,0.3571428571428571,0.18763068119291296,A73,A152,female,old,good +A11,0.3571428571428571,0.06558820292725871,A74,A151,female,young,bad +A11,0.08928571428571427,0.04886101023440079,A73,A152,male,young,good +A14,0.4642857142857143,0.3037856278199626,A74,A152,female,young,good +A11,0.24999999999999997,0.12930560140860572,A73,A153,male,old,bad +A13,0.3571428571428571,0.19252778694838782,A72,A152,female,old,good +A14,0.14285714285714285,0.15456146142841423,A74,A152,male,old,good +A11,0.03571428571428571,0.022669747991636405,A72,A152,male,old,good +A11,0.24999999999999997,0.09298998569384835,A72,A153,male,old,good +A12,0.5714285714285714,0.1140640475404424,A74,A151,male,young,good +A12,0.7857142857142857,0.6577528337184989,A73,A152,male,old,good +A14,0.3571428571428571,0.28925938153405967,A75,A153,male,old,good +A14,0.24999999999999997,0.010069329811819083,A71,A151,female,young,bad +A14,0.03571428571428571,0.23995818201826785,A72,A152,female,old,bad +A12,0.14285714285714285,0.13821943435677342,A75,A152,female,young,bad +A14,0.4642857142857143,0.2213051612193243,A73,A152,male,old,good +A12,0.625,0.6333223286012986,A74,A151,male,old,good +A12,0.5714285714285714,0.10867172884340266,A75,A153,male,old,bad +A13,0.14285714285714285,0.06734896005282272,A72,A152,female,old,good +A14,0.14285714285714285,0.06338725652030373,A74,A151,female,old,good +A12,0.24999999999999997,0.09150434686915374,A72,A152,male,old,good +A14,0.17857142857142858,0.030373060415978873,A73,A152,male,old,good +A13,0.19642857142857142,0.05617915703752613,A73,A153,male,old,bad +A13,0.3571428571428571,0.055463849455265765,A73,A152,female,old,good +A14,0.14285714285714285,0.18240343347639487,A72,A152,male,old,good +A11,0.14285714285714285,0.17255419830527127,A75,A153,male,old,bad +A13,0.14285714285714285,0.1098822493672279,A73,A152,female,old,good +A14,0.3571428571428571,0.30560140860570045,A73,A152,male,old,good +A14,0.19642857142857142,0.03906679872345109,A74,A152,female,old,good +A12,0.24999999999999997,0.044349070100143065,A75,A152,male,old,bad +A13,0.6785714285714286,0.3322878837900297,A72,A152,male,old,good +A14,0.08928571428571427,0.13772422141520854,A75,A152,male,old,good +A14,0.10714285714285712,0.10784637394079453,A73,A151,male,young,bad +A11,0.7857142857142857,0.5528227137669197,A74,A153,male,old,bad +A12,0.5714285714285714,0.7740728513260702,A75,A153,male,old,bad +A12,0.4642857142857143,0.2200396170353252,A71,A152,male,old,bad +A14,0.3571428571428571,0.03736106525806096,A74,A152,male,old,good +A12,0.08928571428571427,0.039011775063277215,A73,A152,female,old,bad +A14,0.3035714285714286,0.13931990756025092,A74,A152,female,old,good +A12,0.7857142857142857,0.44613183668977663,A72,A152,female,old,good +A14,0.08928571428571427,0.11285352701661715,A72,A151,female,young,good +A12,0.7857142857142857,0.18245845713656875,A74,A152,male,old,good +A11,0.24999999999999997,0.20446792120611865,A75,A151,female,old,bad +A14,0.03571428571428571,0.05430835259161439,A73,A152,female,old,good +A11,0.24999999999999997,0.03994717728623308,A72,A152,female,young,bad +A11,0.3571428571428571,0.03659073401562672,A75,A152,female,old,bad +A13,0.3571428571428571,0.03835149114119071,A74,A153,male,old,bad +A12,0.3571428571428571,0.14168592494772753,A74,A152,male,old,good +A14,0.3035714285714286,0.11213821943435677,A72,A152,female,young,good +A14,0.10714285714285712,0.145867723120942,A72,A151,female,old,good +A12,0.7321428571428571,0.23803235391218225,A71,A152,male,old,good +A14,0.7857142857142857,0.3845053372950369,A75,A152,male,old,good +A11,0.5714285714285714,0.13733905579399142,A75,A152,male,old,bad +A12,0.3571428571428571,0.08215032463959503,A75,A152,male,old,good +A11,0.08928571428571427,0.06140640475404424,A72,A151,female,young,bad +A14,0.24999999999999997,0.06905469351821283,A73,A153,male,old,good +A11,0.03571428571428571,0.005117200396170352,A72,A152,female,old,good +A14,0.3571428571428571,0.10856168152305491,A75,A152,male,old,good +A14,0.14285714285714285,0.06850445691647408,A75,A152,male,old,good +A13,0.19642857142857142,0.11428414218113787,A72,A152,female,young,bad +A11,0.7321428571428571,0.6364036535710355,A75,A151,male,old,bad +A12,0.14285714285714285,0.06789919665456146,A73,A152,male,young,bad +A14,0.5714285714285714,0.4844833278309673,A73,A153,male,old,good +A14,0.14285714285714285,0.07730824254429404,A75,A152,male,old,good +A14,0.24999999999999997,0.06619346318917134,A72,A152,female,old,good +A11,0.14285714285714285,0.053813139650049524,A73,A152,female,young,bad +A12,0.03571428571428571,0.037526136238582586,A74,A152,female,old,good +A12,0.5714285714285714,0.4311654011224827,A75,A152,male,old,bad +A14,0.03571428571428571,0.06030593155056674,A75,A153,male,old,good +A12,0.5714285714285714,0.1437218003741609,A75,A153,male,old,good +A11,0.3571428571428571,0.15808297567954221,A72,A152,female,old,bad +A12,1.0,0.3938593595245956,A72,A152,female,young,bad +A12,0.19642857142857142,0.055793991416309016,A73,A151,male,young,bad +A14,0.03571428571428571,0.07175085286673269,A74,A151,female,young,good +A12,0.3035714285714286,0.18719049191152196,A74,A152,male,old,good +A11,0.7857142857142857,0.20254209310003302,A74,A153,male,old,bad +A11,0.03571428571428571,0.061131286453174866,A72,A152,male,old,good +A11,0.03571428571428571,0.052437548145702645,A75,A152,male,old,good +A14,0.14285714285714285,0.18031253438978762,A72,A152,male,old,good +A14,0.5714285714285714,0.4184549356223176,A73,A152,female,young,bad +A11,0.3035714285714286,0.17095851216022892,A72,A152,female,old,good +A13,0.24999999999999997,0.10179377132166832,A73,A152,male,old,bad +A14,0.24999999999999997,0.0505667436997909,A73,A152,male,old,good +A14,0.5714285714285714,0.47854077253218885,A74,A152,male,old,good +A11,0.24999999999999997,0.09276989105315285,A74,A151,male,young,good +A12,0.3571428571428571,0.1794871794871795,A74,A152,male,old,good +A12,0.3571428571428571,0.3695939253879168,A74,A151,male,old,good +A14,0.4642857142857143,0.11461428414218112,A75,A152,male,old,good +A11,0.24999999999999997,0.09843732805106195,A73,A151,female,young,bad +A11,0.5714285714285714,0.49636843842852424,A74,A152,male,young,bad +A14,0.3035714285714286,0.07274127874986244,A75,A152,female,old,good +A12,0.03571428571428571,0.011224826675470454,A72,A152,male,young,good +A14,0.5714285714285714,0.33311323869263787,A75,A152,male,old,good +A14,0.3571428571428571,0.10713106635853417,A74,A152,male,old,good +A14,0.7857142857142857,0.40601958842302194,A75,A152,male,old,good +A13,0.10714285714285712,0.05639925167822163,A72,A152,female,young,good +A11,0.7857142857142857,0.37795752173434577,A73,A153,male,old,bad +A12,0.3571428571428571,0.6783867062837019,A75,A153,female,old,bad +A12,0.24999999999999997,0.1391548365797293,A73,A151,male,young,good +A14,0.0,0.06894464619786508,A74,A152,male,old,good +A12,0.07142857142857142,0.05430835259161439,A73,A152,female,young,bad +A14,0.7857142857142857,0.20160669087707717,A73,A152,male,old,bad +A14,0.3571428571428571,0.15472653240893586,A75,A151,male,old,good +A12,0.5714285714285714,0.09370529327610873,A75,A153,male,old,bad +A12,0.4642857142857143,0.19709475074281943,A72,A152,male,young,good +A14,0.03571428571428571,0.0708704743039507,A72,A152,female,old,good +A14,0.03571428571428571,0.10223396060305931,A74,A151,male,old,good +A11,0.03571428571428571,0.00979421151094971,A75,A152,female,old,good +A12,0.5714285714285714,0.2313744910311434,A73,A152,male,old,bad +A13,0.3035714285714286,0.1138439528997469,A72,A151,male,old,bad +A14,0.14285714285714285,0.05023660173874765,A73,A152,male,old,good +A14,0.08928571428571427,0.037746230879278087,A75,A152,male,old,good +A11,0.24999999999999997,0.12231759656652359,A71,A152,male,young,bad +A14,0.19642857142857142,0.07257620776934082,A75,A152,male,old,good +A12,0.14285714285714285,0.1522504677011115,A72,A151,female,old,good +A14,0.14285714285714285,0.11450423682183337,A75,A152,male,old,good +A14,0.14285714285714285,0.10157367668097282,A74,A152,male,old,good +A12,0.14285714285714285,0.04330362055683944,A74,A152,male,old,good +A14,0.03571428571428571,0.05425332893144051,A73,A151,male,old,good +A14,0.3571428571428571,0.21464729833828547,A73,A152,male,old,good +A14,0.7857142857142857,0.18482447452404535,A73,A152,female,old,good +A14,0.14285714285714285,0.06393749312204247,A73,A152,female,old,good +A11,0.125,0.20298228238142402,A73,A152,male,old,good +A11,0.14285714285714285,0.0702101903818642,A75,A153,male,old,good +A14,0.3214285714285714,0.1334323759216463,A75,A152,male,old,good +A12,0.4642857142857143,0.2184989545504567,A73,A152,female,old,good +A11,0.3571428571428571,0.14713326730494114,A73,A152,male,old,good +A14,0.24999999999999997,0.08622207549246176,A73,A152,female,old,good +A13,0.10714285714285712,0.0536480686695279,A73,A152,male,old,good +A11,0.19642857142857142,0.08776273797733025,A72,A151,female,old,good +A14,0.03571428571428571,0.0,A73,A152,female,old,good +A14,0.03571428571428571,0.022559700671288655,A74,A151,male,young,good +A13,0.03571428571428571,0.025255860019808517,A72,A152,male,old,good +A11,0.5714285714285714,0.8587542643336634,A71,A152,male,old,good +A12,0.3571428571428571,0.08732254869593925,A74,A153,female,old,bad +A11,0.1607142857142857,0.08512160228898426,A72,A152,male,old,good +A11,0.03571428571428571,0.010894684714427203,A72,A152,female,young,bad +A12,0.24999999999999997,0.16303510509519095,A72,A151,male,young,good +A11,0.14285714285714285,0.12815010454495435,A72,A152,male,old,bad +A11,0.03571428571428571,0.05216242984483327,A75,A153,female,old,bad +A14,0.3035714285714286,0.26152745680642675,A73,A152,female,old,bad +A12,0.3571428571428571,0.11461428414218112,A72,A152,male,old,good +A14,0.19642857142857142,0.055683944095961266,A74,A152,male,old,good +A12,0.14285714285714285,0.049796412457356665,A75,A152,male,old,good +A14,0.10714285714285712,0.03543523715197534,A74,A152,female,old,good +A14,0.14285714285714285,0.16952789699570817,A75,A152,male,old,good +A12,0.24999999999999997,0.04748541873005392,A73,A152,female,old,good +A14,0.14285714285714285,0.02580609662154727,A75,A152,male,old,bad +A12,0.3571428571428571,0.09981291955540883,A71,A152,female,old,bad +A13,0.4642857142857143,0.18741058655221746,A75,A152,male,old,good +A11,0.3571428571428571,0.3502255970067129,A71,A153,male,old,good +A14,0.24999999999999997,0.16391548365797293,A71,A152,male,old,good +A14,0.19642857142857142,0.17018818091779467,A73,A153,male,old,good +A11,0.4642857142857143,0.1572576207769341,A72,A152,male,old,bad +A12,0.08928571428571427,0.011444921316165951,A73,A152,male,young,good +A14,0.19642857142857142,0.06718388907230109,A73,A153,male,old,good +A12,0.125,0.05898536370639375,A73,A152,female,old,good +A11,0.08928571428571427,0.057114559260482006,A75,A152,male,old,good +A14,0.14285714285714285,0.025145812699460767,A73,A152,male,old,good +A12,0.19642857142857142,0.058215032463959496,A75,A152,male,old,good +A14,0.08928571428571427,0.15538681633102236,A73,A152,male,old,good +A11,0.19642857142857142,0.12440849565313085,A71,A151,female,young,good +A14,0.03571428571428571,0.0819852536590734,A75,A151,male,old,good +A14,0.14285714285714285,0.04803565533179267,A73,A151,female,old,bad +A12,0.125,0.07301639705073182,A72,A152,female,young,good +A11,0.7321428571428571,0.08776273797733025,A73,A153,male,young,bad +A14,0.3571428571428571,0.09783206778914934,A74,A152,male,old,good +A12,0.4107142857142857,0.4439308902828216,A75,A153,female,old,bad +A12,0.14285714285714285,0.05749972488169913,A72,A151,female,young,bad +A12,0.3571428571428571,0.6431165401122483,A73,A152,male,old,bad +A14,0.5714285714285714,0.08633212281280951,A73,A153,male,old,bad +A11,0.19642857142857142,0.0675690546935182,A75,A152,male,old,good +A14,0.3571428571428571,0.06965995378012545,A73,A152,female,old,good +A11,0.08928571428571427,0.1037746230879278,A73,A152,male,young,good +A14,0.19642857142857142,0.19599427753934193,A72,A152,female,young,good +A14,0.24999999999999997,0.21541762958071972,A73,A152,male,old,bad +A12,0.3571428571428571,0.20947507428194126,A74,A152,male,old,bad +A14,0.7857142857142857,0.543468691537361,A73,A153,male,old,bad +A12,0.03571428571428571,0.012875536480686695,A74,A152,male,old,good +A12,1.0,0.33223286012985587,A73,A153,male,old,bad +A11,0.3571428571428571,0.36436667767139874,A72,A152,male,old,bad +A14,0.19642857142857142,0.07054033234290745,A73,A152,female,old,good +A14,0.08928571428571427,0.10366457576758006,A73,A152,male,old,good +A11,0.14285714285714285,0.04715527676901067,A73,A151,male,young,good +A11,0.2857142857142857,0.10795642126114229,A74,A152,male,old,good +A14,0.19642857142857142,0.16154946627049632,A74,A151,female,young,good +A11,0.7678571428571428,0.5762077693408165,A72,A152,female,old,good +A14,0.053571428571428575,0.02641135688345989,A75,A151,male,old,good +A14,0.14285714285714285,0.10030813249697369,A73,A152,female,old,good +A11,0.3035714285714286,0.0176625949158138,A75,A152,male,old,good +A13,0.19642857142857142,0.11609992296687575,A73,A152,male,old,good +A11,0.14285714285714285,0.06019588423021899,A73,A152,male,old,good +A11,0.6785714285714286,0.3809838230439089,A74,A152,female,old,bad +A13,0.6785714285714286,0.2501375591504347,A75,A153,male,old,good +A11,0.6785714285714286,0.20441289754594477,A72,A152,male,old,bad +A14,0.125,0.38395510069329813,A73,A152,male,old,good +A12,0.3571428571428571,0.05315285572796302,A72,A152,male,old,bad +A14,0.14285714285714285,0.13744910311433917,A75,A153,female,old,good +A12,0.6785714285714286,0.49702872235061074,A71,A153,male,old,good +A14,0.4107142857142857,0.23528117090348852,A72,A152,male,old,good +A14,0.3035714285714286,0.2750082535490261,A73,A152,male,old,good +A14,0.3571428571428571,0.16127434796962695,A72,A152,female,old,good +A14,0.3571428571428571,0.02569604930119952,A75,A152,male,old,good +A14,0.14285714285714285,0.055683944095961266,A73,A152,male,young,good +A14,0.14285714285714285,0.08990866072411137,A75,A152,male,old,good +A12,0.24999999999999997,0.09210960713106636,A72,A151,female,old,bad +A11,0.7857142857142857,0.39760096841641906,A74,A153,male,old,good +A13,0.14285714285714285,0.03791130185979971,A74,A152,male,old,bad +A12,0.14285714285714285,0.03251898316275999,A74,A151,female,young,good +A12,0.3571428571428571,0.21255639925167824,A72,A151,female,old,bad +A13,0.3571428571428571,0.2697259821723341,A74,A152,male,young,good +A11,0.5178571428571429,0.22180037416088919,A73,A152,female,young,bad +A12,0.14285714285714285,0.07246616044899307,A73,A152,female,young,good +A11,0.14285714285714285,0.004897105755474855,A75,A152,male,old,good +A11,0.19642857142857142,0.05639925167822163,A73,A151,female,young,bad +A12,0.14285714285714285,0.021404203807637284,A73,A152,male,old,bad +A14,0.5714285714285714,0.1127434796962694,A75,A152,male,old,good +A11,0.3571428571428571,0.086552217453505,A71,A152,male,old,bad +A11,0.7857142857142857,0.41339275888632115,A75,A153,male,old,bad +A14,0.03571428571428571,0.07400682293386156,A74,A152,male,old,good +A11,0.14285714285714285,0.007428194123473095,A74,A152,female,old,good +A14,0.14285714285714285,0.028942445251458126,A73,A152,male,old,good +A12,0.3571428571428571,0.15511169803015298,A75,A153,male,old,good +A11,0.08928571428571427,0.06129635743369649,A74,A152,male,old,good +A14,0.03571428571428571,0.0534279740288324,A73,A152,male,old,good +A11,0.07142857142857142,0.17321448222735777,A74,A152,male,old,good +A11,0.24999999999999997,0.21475734565863322,A73,A152,male,old,bad +A12,0.19642857142857142,0.08407615274568064,A72,A151,female,old,bad +A14,0.08928571428571427,0.05425332893144051,A72,A151,female,young,good +A14,0.03571428571428571,0.08341586882359414,A73,A151,male,young,good +A11,0.4642857142857143,0.43039506988004844,A72,A152,male,young,good +A12,0.08928571428571427,0.268460437988335,A75,A153,female,old,bad +A14,0.3571428571428571,0.4620886981402003,A72,A152,male,old,bad +A11,0.3571428571428571,0.21563772422141522,A73,A152,male,old,good +A14,0.14285714285714285,0.09271486739297898,A75,A152,male,old,good +A14,0.3571428571428571,0.11813579839330911,A75,A152,male,old,bad +A14,0.19642857142857142,0.06652360515021459,A73,A152,female,old,good +A12,0.14285714285714285,0.023935292175635527,A74,A152,male,young,bad +A11,0.7857142857142857,0.2396280400572246,A75,A153,male,young,bad +A14,0.125,0.10410476504897105,A75,A152,male,old,good +A11,0.19642857142857142,0.03851656212171234,A75,A151,male,old,bad +A12,0.21428571428571427,0.050896885660834154,A71,A153,male,old,good +A11,0.14285714285714285,0.06866952789699571,A73,A152,female,young,good +A14,0.24999999999999997,0.09541102674149884,A73,A153,male,old,good +A12,0.5714285714285714,0.19555408825795093,A73,A152,female,old,bad +A11,0.3571428571428571,0.09557609772202047,A73,A151,male,young,bad +A14,0.3571428571428571,0.21398701441619897,A73,A152,male,old,good +A14,0.14285714285714285,0.024815670738417523,A73,A152,male,old,good +A14,0.8928571428571428,0.5054473423572136,A73,A152,male,old,good +A11,0.24999999999999997,0.16176956091119182,A73,A152,female,young,bad +A12,0.1607142857142857,0.034774953229888855,A72,A152,male,young,good +A12,0.3571428571428571,0.13810938703642567,A75,A153,male,old,good +A14,0.19642857142857142,0.10845163420270716,A73,A151,female,young,good +A11,0.14285714285714285,0.020688896225376913,A73,A152,female,young,bad +A12,0.7857142857142857,1.0,A73,A152,female,old,bad +A11,0.24999999999999997,0.0532078793881369,A73,A152,male,old,bad +A12,0.053571428571428575,0.12798503356443272,A73,A152,male,old,good +A11,0.03571428571428571,0.13189171343677783,A73,A151,male,old,good +A14,0.4107142857142857,0.12765489160338947,A73,A151,female,young,bad +A13,0.3571428571428571,0.1594585671838891,A73,A152,male,old,good +A11,0.03571428571428571,0.022889842632331906,A74,A152,female,old,good +A14,0.14285714285714285,0.051447122262572906,A73,A152,female,old,good +A14,0.4642857142857143,0.3138549576317817,A74,A152,male,old,good +A12,0.7321428571428571,0.24738637614174097,A72,A152,male,young,bad +A14,0.19642857142857142,0.0529877847474414,A75,A152,male,old,good +A14,0.03571428571428571,0.09067899196654561,A73,A152,male,old,good +A14,0.7857142857142857,0.6239683063717398,A73,A151,female,young,bad +A14,0.0,0.01931330472103004,A72,A151,female,young,good +A12,0.24999999999999997,0.05474854187300539,A73,A152,male,old,bad +A14,0.5714285714285714,0.30218994167492025,A74,A152,male,old,good +A12,0.17857142857142858,0.06382744580169472,A75,A152,male,old,good +A11,0.14285714285714285,0.035765379113018594,A73,A152,male,young,bad +A14,0.03571428571428571,0.0061626499394739735,A73,A152,female,old,good +A14,0.19642857142857142,0.09513590844062947,A75,A152,male,old,good +A14,0.5714285714285714,0.17051832287883792,A73,A152,female,old,bad +A14,0.14285714285714285,0.030097942115109497,A75,A152,female,old,bad +A14,0.03571428571428571,0.014746340926598437,A73,A152,female,old,good +A11,0.3571428571428571,0.1129635743369649,A75,A152,male,old,bad +A11,0.5714285714285714,0.1354682513480797,A75,A152,male,old,bad +A14,0.3571428571428571,0.06734896005282272,A72,A152,male,old,good +A14,0.24999999999999997,0.08638714647298339,A73,A152,male,old,good +A14,0.3571428571428571,0.09227467811158797,A73,A152,female,old,good +A11,0.19642857142857142,0.06509298998569385,A73,A151,female,young,good +A12,0.14285714285714285,0.03895675140310334,A74,A152,male,old,good +A14,0.5714285714285714,0.19725982172334106,A75,A152,female,old,good +A14,0.14285714285714285,0.028227137669197756,A73,A152,female,old,good +A14,0.03571428571428571,0.15010454495433037,A73,A152,male,old,good +A14,0.14285714285714285,0.05727963024100363,A73,A152,female,old,good +A11,0.14285714285714285,0.0233300319137229,A74,A152,male,young,bad +A14,1.0,0.3453835149114119,A73,A153,male,old,good +A14,0.14285714285714285,0.03994717728623308,A75,A152,male,old,good +A13,0.10714285714285712,0.20353251898316277,A72,A152,male,old,good +A12,0.10714285714285712,0.043908880818752064,A73,A152,male,young,good +A14,0.3571428571428571,0.13315725762077696,A75,A152,male,old,good +A11,0.4642857142857143,0.23841751953339935,A73,A152,male,old,good +A14,0.3571428571428571,0.11692527786948388,A73,A152,male,old,good +A11,0.14285714285714285,0.056784417299438755,A73,A151,female,young,bad +A14,0.3571428571428571,0.19296797622977882,A75,A153,female,old,good +A12,0.3571428571428571,0.2195994277539342,A73,A152,male,old,bad +A11,0.3571428571428571,0.3482447452404534,A71,A153,male,old,good +A11,0.14285714285714285,0.10294926818531969,A73,A152,male,old,good +A14,0.14285714285714285,0.10570045119401342,A72,A152,female,old,good +A14,0.19642857142857142,0.18399911962143722,A72,A152,female,old,good +A14,0.5714285714285714,0.24408495653130846,A73,A153,male,old,good +A14,0.19642857142857142,0.2424892703862661,A73,A152,male,old,good +A11,0.17857142857142858,0.48024650599757895,A75,A152,male,old,bad +A14,0.24999999999999997,0.04429404643996919,A72,A152,female,old,good +A11,0.7857142857142857,0.15412127214702323,A73,A152,male,old,bad +A12,0.4642857142857143,0.17558049961483438,A73,A151,female,young,bad +A14,0.3571428571428571,0.26961593485198637,A75,A152,male,old,good +A14,0.14285714285714285,0.09425552987784747,A74,A151,male,old,good +A12,0.24999999999999997,0.18377902498074172,A71,A152,male,old,good +A14,0.5714285714285714,0.3076923076923077,A75,A152,male,old,good +A14,0.3571428571428571,0.06707384175195334,A75,A151,male,old,good +A14,0.3571428571428571,0.12947067238912735,A73,A151,female,old,good +A11,0.14285714285714285,0.18713546825134808,A73,A152,male,old,good +A11,0.3571428571428571,0.15951359084406297,A72,A153,male,young,good +A11,0.3571428571428571,0.21239132827115662,A75,A151,male,young,bad +A12,0.08928571428571427,0.06531308462638935,A74,A152,male,old,bad +A13,0.3571428571428571,0.062011665015956854,A75,A153,female,old,good +A11,0.08928571428571427,0.06448772972378122,A72,A153,male,old,bad +A11,0.4642857142857143,0.18553978210630573,A75,A151,female,old,good +A11,0.3571428571428571,0.17029822823814242,A75,A151,male,old,bad +A12,0.14285714285714285,0.020633872565203038,A72,A152,male,old,good +A12,0.14285714285714285,0.040332342907450205,A72,A151,female,young,good +A12,0.4642857142857143,0.15081985253659075,A75,A152,female,young,good +A14,0.24999999999999997,0.049466270496313414,A73,A152,male,old,good +A12,0.08928571428571427,0.001430615164520744,A73,A151,male,young,good +A11,0.3571428571428571,0.06168152305491362,A73,A151,female,young,bad +A11,0.14285714285714285,0.029987894794761747,A72,A152,female,old,bad +A11,0.3571428571428571,0.056784417299438755,A73,A152,female,old,bad +A12,0.03571428571428571,0.0064927919005172245,A75,A152,male,old,good +A14,0.3571428571428571,0.055463849455265765,A74,A152,male,young,good +A14,0.19642857142857142,0.10652580609662154,A74,A151,female,old,good +A14,0.3571428571428571,0.14223616154946628,A75,A152,male,old,good +A11,0.3571428571428571,0.18609001870804448,A73,A151,female,young,good +A14,0.3571428571428571,0.05838010344448112,A74,A152,male,old,good +A11,0.5714285714285714,0.11543963904478925,A73,A152,male,old,good +A12,0.03571428571428571,0.010069329811819083,A72,A151,female,young,bad +A11,0.19642857142857142,0.20408275558490152,A73,A152,female,old,bad +A12,0.19642857142857142,0.13101133487399583,A73,A152,female,young,good +A14,0.14285714285714285,0.12077693408165512,A72,A151,male,old,good +A11,0.10714285714285712,0.04335864421701331,A74,A152,male,old,good +A14,0.7857142857142857,0.473643666776714,A74,A153,male,old,good +A13,0.3571428571428571,0.05639925167822163,A73,A152,male,old,good +A11,0.3571428571428571,0.3476394849785408,A73,A152,male,young,good +A13,0.3571428571428571,0.06019588423021899,A74,A152,male,old,bad +A12,0.4107142857142857,0.3144602178936943,A75,A152,male,old,good +A14,0.14285714285714285,0.020248706943985912,A75,A152,male,old,good +A14,0.10714285714285712,0.026246285902938263,A75,A153,male,old,good +A12,0.19642857142857142,0.06569825024760646,A72,A152,male,young,good +A11,0.5714285714285714,0.18542973478595798,A73,A152,male,old,good +A11,0.10714285714285712,0.10955210740618465,A72,A151,male,old,good +A12,1.0,0.799603829646748,A75,A153,female,old,bad +A14,0.3571428571428571,0.09750192582810609,A73,A152,female,old,good +A13,0.14285714285714285,0.019753494002421042,A72,A152,female,old,bad +A11,0.3571428571428571,0.15247056234180698,A73,A151,male,young,good +A11,0.24999999999999997,0.2272477165181028,A73,A152,male,old,good +A14,0.10714285714285712,0.05827005612413337,A73,A152,male,old,bad +A11,0.3571428571428571,0.14185099592824915,A73,A152,male,young,good +A12,0.24999999999999997,0.08501155496863651,A75,A151,female,old,good +A14,0.24999999999999997,0.07059535600308132,A72,A152,male,old,bad +A14,0.3571428571428571,0.26702982282381427,A72,A153,male,old,good +A11,0.4642857142857143,0.5707604269836029,A75,A153,male,old,good +A14,0.14285714285714285,0.06250687795752173,A73,A152,female,old,bad +A14,0.3571428571428571,0.23885770881479035,A73,A152,male,old,bad +A13,0.14285714285714285,0.17326950588753165,A75,A152,male,old,good +A13,0.03571428571428571,0.04385385715857819,A73,A152,female,old,good +A11,0.7857142857142857,0.4091009133927589,A74,A151,female,old,bad diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS-val.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS-val.csv new file mode 100644 index 0000000..757c07f --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS-val.csv @@ -0,0 +1,301 @@ +status,month,credit_amount,employment,housing,sex,age,credit +A11,0.24999999999999997,0.07169582920655881,A73,A152,male,old,bad +A14,0.19642857142857142,0.06107626279300099,A73,A152,male,old,good +A12,0.07142857142857142,0.02806206668867613,A74,A152,female,old,good +A12,0.24999999999999997,0.14575767580059426,A75,A152,male,old,good +A12,0.7321428571428571,0.15302079894354573,A73,A151,male,young,bad +A12,0.3571428571428571,0.24551557169582922,A73,A152,male,old,good +A12,0.24999999999999997,0.090073731704633,A73,A152,male,old,good +A12,0.5714285714285714,0.11131286453174864,A74,A152,male,old,good +A12,0.14285714285714285,0.035105095190932106,A75,A152,male,old,bad +A11,0.14285714285714285,0.07538241443820842,A73,A152,female,old,good +A14,0.19642857142857142,0.1352481567073842,A72,A152,male,old,good +A12,0.24999999999999997,0.03389457466710685,A73,A152,male,young,good +A12,0.07142857142857142,0.06404754044239022,A73,A152,male,old,good +A14,0.08928571428571427,0.05849015076482887,A75,A152,male,young,good +A14,0.3571428571428571,0.12809508088478047,A75,A152,male,old,good +A14,0.19642857142857142,0.1825685044569165,A75,A151,female,old,good +A14,0.3571428571428571,0.06289204357873886,A73,A152,male,old,good +A13,0.03571428571428571,0.10267414988445031,A73,A152,male,old,good +A14,0.14285714285714285,0.09695168922636734,A74,A152,female,old,good +A14,0.3571428571428571,0.09298998569384835,A75,A152,male,old,good +A13,0.03571428571428571,0.05904038736656762,A75,A152,male,old,good +A11,0.5714285714285714,0.09502586112028172,A75,A152,male,old,bad +A11,0.14285714285714285,0.047210300429184546,A74,A152,male,old,bad +A14,0.19642857142857142,0.05777484318256851,A75,A153,male,old,good +A11,0.03571428571428571,0.0505667436997909,A75,A152,male,old,good +A11,0.5714285714285714,0.16501595686145043,A74,A153,male,old,good +A14,0.625,0.1275998679432156,A73,A152,male,young,good +A12,0.08928571428571427,0.07147573456586331,A72,A152,male,old,good +A14,0.03571428571428571,0.024760647078243648,A75,A153,male,old,good +A12,0.14285714285714285,0.32067789149334214,A74,A152,male,old,good +A12,0.08928571428571427,0.05260261912622427,A75,A152,female,young,good +A12,0.3571428571428571,0.060801144492131615,A72,A152,female,young,bad +A14,0.3571428571428571,0.15010454495433037,A73,A152,male,old,good +A14,0.3571428571428571,0.07015516672169032,A74,A152,female,old,good +A14,0.03571428571428571,0.037526136238582586,A73,A152,female,young,good +A12,0.4107142857142857,0.12534389787608674,A72,A152,female,old,good +A11,0.24999999999999997,0.15758776273797734,A72,A151,female,old,bad +A13,0.4642857142857143,0.09122922856828436,A75,A152,male,old,bad +A14,0.14285714285714285,0.05447342357213601,A75,A152,female,old,good +A14,0.14285714285714285,0.02569604930119952,A75,A152,male,old,good +A12,0.19642857142857142,0.0695499064597777,A73,A152,male,young,good +A14,0.19642857142857142,0.08688235941454825,A75,A152,male,old,good +A14,0.14285714285714285,0.02145922746781116,A73,A152,male,old,good +A12,0.5714285714285714,0.39517992736876856,A73,A151,female,old,good +A14,0.14285714285714285,0.0710905689446462,A74,A152,male,old,good +A11,0.5714285714285714,0.5160669087707714,A74,A152,male,young,bad +A14,0.03571428571428571,0.00968416419060196,A75,A152,male,old,good +A14,0.19642857142857142,0.24061846594035435,A73,A152,male,old,bad +A11,0.24999999999999997,0.45234951028942444,A73,A151,female,young,good +A12,0.5714285714285714,0.1904368878617806,A73,A152,male,old,good +A14,0.7857142857142857,0.6877407285132606,A74,A152,male,old,good +A11,0.6785714285714286,0.4199405744470122,A74,A153,male,old,good +A14,1.0,0.5444040937603168,A74,A152,female,young,good +A12,0.3571428571428571,0.05480356553317926,A72,A152,male,young,bad +A14,0.5714285714285714,0.1840541432816111,A75,A152,male,old,good +A12,0.14285714285714285,0.18526466380543635,A75,A151,male,old,good +A14,0.14285714285714285,0.034995047870584356,A73,A152,female,young,good +A11,0.3571428571428571,0.07131066358534169,A74,A151,male,young,bad +A11,0.14285714285714285,0.10570045119401342,A73,A152,male,old,good +A14,0.5714285714285714,0.13002090899086607,A73,A152,male,old,good +A14,0.14285714285714285,0.04682513480796743,A73,A152,male,old,good +A12,0.24999999999999997,0.1513700891383295,A74,A151,female,old,good +A14,0.14285714285714285,0.1116430064927919,A73,A153,male,old,good +A12,0.10714285714285712,0.06993507208099482,A73,A152,male,old,good +A12,0.24999999999999997,0.04357873885770881,A73,A152,female,old,bad +A12,0.7857142857142857,0.3538571585781886,A73,A153,male,old,good +A11,0.5714285714285714,0.10426983602949268,A74,A152,male,young,bad +A11,0.3571428571428571,0.2512930560140861,A74,A152,male,old,bad +A12,0.7857142857142857,0.7797402883239792,A73,A152,male,young,bad +A11,0.14285714285714285,0.02426543413667877,A75,A152,male,old,bad +A12,0.24999999999999997,0.05441839991196214,A73,A153,male,old,good +A12,0.3035714285714286,0.1384945526576428,A75,A151,male,old,bad +A12,0.24999999999999997,0.33069219764498736,A74,A151,male,old,good +A11,0.24999999999999997,0.027511830086937385,A71,A152,female,old,bad +A14,0.14285714285714285,0.06894464619786508,A73,A151,male,old,good +A11,0.14285714285714285,0.024815670738417523,A73,A152,male,old,good +A14,0.125,0.04974138879718279,A71,A152,female,old,good +A14,0.3035714285714286,0.16011885110597557,A75,A152,male,old,good +A11,0.24999999999999997,0.10311433916584131,A73,A151,female,young,bad +A12,0.14285714285714285,0.02839220864971938,A73,A152,male,old,bad +A12,0.5714285714285714,0.3685484758446132,A73,A151,male,old,good +A13,0.08928571428571427,0.0482007263123143,A75,A152,male,old,good +A11,0.24999999999999997,0.05315285572796302,A72,A151,female,young,bad +A11,0.14285714285714285,0.10806646858149004,A73,A152,male,young,good +A14,0.3571428571428571,0.20864971937933313,A74,A152,male,old,good +A11,0.19642857142857142,0.04418399911962144,A72,A152,male,old,good +A12,0.3571428571428571,0.14559260482007264,A72,A152,male,old,good +A11,0.14285714285714285,0.327335754374381,A73,A151,male,old,bad +A12,0.3571428571428571,0.41311764058545175,A75,A151,female,old,good +A11,0.14285714285714285,0.06173654671508748,A74,A152,male,old,bad +A11,0.4642857142857143,0.33564432706063607,A75,A152,male,old,bad +A12,0.24999999999999997,0.2304941124683614,A75,A152,male,old,good +A12,0.7857142857142857,0.8485748872014967,A73,A152,male,young,bad +A14,0.14285714285714285,0.030483107736326624,A75,A152,male,old,good +A11,0.3035714285714286,0.07439198855507868,A75,A152,male,old,good +A12,0.14285714285714285,0.3421371189611533,A71,A152,male,old,bad +A14,0.24999999999999997,0.06729393639264884,A72,A152,male,old,good +A14,0.3571428571428571,0.2851876306811929,A71,A151,female,old,good +A14,0.03571428571428571,0.05436337625178826,A71,A152,male,old,good +A14,0.5178571428571429,0.13832948167712117,A73,A152,female,old,good +A12,0.14285714285714285,0.09199955981071861,A73,A152,male,old,bad +A13,0.4642857142857143,0.1522504677011115,A75,A152,male,old,good +A11,0.3571428571428571,0.052657642786398146,A72,A151,female,young,bad +A11,0.21428571428571427,0.13068119291295258,A75,A151,male,old,bad +A11,0.14285714285714285,0.023880268515461652,A73,A151,male,old,bad +A12,0.08928571428571427,0.07813359744690217,A72,A152,female,young,bad +A14,0.03571428571428571,0.02321998459337515,A71,A152,female,old,good +A14,0.3035714285714286,0.12820512820512822,A72,A152,male,old,bad +A14,0.3571428571428571,0.3336634752943766,A71,A152,male,old,good +A11,0.3571428571428571,0.1641906019588423,A72,A151,female,young,bad +A12,0.10714285714285712,0.3883569935072081,A71,A153,male,old,good +A12,0.1607142857142857,0.10184879498184218,A72,A152,female,young,good +A11,0.5714285714285714,0.32898646417959726,A72,A151,female,young,bad +A13,0.14285714285714285,0.15219544404093763,A73,A152,male,young,good +A14,0.14285714285714285,0.014966435567293938,A75,A152,male,old,good +A14,0.14285714285714285,0.0926598437328051,A75,A152,male,old,good +A14,0.7857142857142857,0.1376141740948608,A75,A152,male,old,good +A12,0.19642857142857142,0.06943985913942995,A73,A152,male,old,bad +A12,0.4642857142857143,0.12396830637173985,A75,A152,male,old,good +A14,0.3571428571428571,0.05705953560030813,A75,A152,female,old,good +A12,0.5714285714285714,0.6679322108506658,A73,A153,male,old,bad +A13,0.08928571428571427,0.05981071860900186,A72,A152,male,old,bad +A11,0.4107142857142857,0.12061186310113349,A75,A152,male,old,good +A14,0.10714285714285712,0.06129635743369649,A73,A152,female,old,good +A11,0.3571428571428571,0.11742049081104876,A75,A151,male,old,good +A11,0.08928571428571427,0.1402553097832068,A73,A151,male,old,good +A13,0.24999999999999997,0.06575327390778034,A74,A152,male,old,good +A14,0.03571428571428571,0.08215032463959503,A73,A152,male,old,good +A13,0.10714285714285712,0.06036095521074061,A74,A152,male,old,good +A11,0.3571428571428571,0.25420931000330144,A73,A153,male,old,bad +A14,0.14285714285714285,0.0817651590183779,A74,A152,female,old,good +A12,0.14285714285714285,0.027731924727632886,A75,A152,male,old,good +A14,0.5714285714285714,0.18075272367117862,A74,A152,male,old,good +A14,0.19642857142857142,0.0675690546935182,A73,A152,male,old,good +A14,0.4642857142857143,0.35721360184879497,A74,A152,male,old,good +A14,0.5714285714285714,0.37839771101573677,A72,A151,female,young,bad +A11,0.19642857142857142,0.030593155056674374,A73,A152,female,young,good +A13,0.3571428571428571,0.14537251017937713,A75,A153,male,old,good +A12,0.5714285714285714,0.30538131396500495,A73,A152,male,old,good +A14,0.3571428571428571,0.15709254979641246,A72,A152,male,young,good +A14,0.19642857142857142,0.06250687795752173,A73,A151,male,old,good +A14,0.24999999999999997,0.11852096401452623,A73,A152,female,old,good +A12,0.3571428571428571,0.19962583911081766,A72,A152,male,old,good +A12,0.3571428571428571,0.15637724221415208,A72,A151,male,young,bad +A14,0.4285714285714286,0.1371739848134698,A75,A152,male,old,good +A14,0.14285714285714285,0.07285132607021019,A73,A152,male,old,good +A11,0.3035714285714286,0.0872125013755915,A73,A152,female,young,bad +A12,0.3571428571428571,0.13579839330912294,A73,A151,female,young,bad +A14,0.7857142857142857,0.2527786948387807,A71,A151,male,old,bad +A11,0.3571428571428571,0.052217453505007144,A75,A152,male,old,bad +A11,0.17857142857142858,0.20485308682733577,A71,A153,male,young,good +A14,0.3571428571428571,0.1336524705623418,A72,A152,female,old,good +A14,0.14285714285714285,0.10360955210740619,A75,A153,female,old,good +A11,0.3571428571428571,0.05397821063057115,A75,A151,female,old,good +A14,0.3571428571428571,0.14427203697589966,A75,A152,male,old,good +A12,0.24999999999999997,0.11978650819852536,A75,A152,male,old,good +A11,1.0,0.3877517332452955,A75,A151,male,old,bad +A14,0.4642857142857143,0.40420380763728403,A75,A152,male,old,good +A11,0.625,0.7664245625619016,A74,A152,male,old,good +A12,0.08928571428571427,0.06883459887751733,A75,A152,female,old,bad +A12,0.7857142857142857,0.19775503466490593,A74,A153,male,old,bad +A14,0.6785714285714286,0.20864971937933313,A73,A152,male,old,good +A11,0.03571428571428571,0.06184659403543523,A71,A152,female,old,good +A14,0.3571428571428571,0.41619896555518876,A74,A152,male,old,good +A14,0.5714285714285714,0.03626059205458347,A75,A152,male,old,good +A14,0.24999999999999997,0.34158688235941453,A75,A152,male,old,bad +A14,0.3571428571428571,0.11962143721800374,A75,A152,male,old,good +A14,0.19642857142857142,0.13965004952129417,A74,A152,female,young,good +A11,0.14285714285714285,0.024595576097722022,A72,A152,male,old,bad +A14,0.3035714285714286,0.12248266754704522,A75,A152,male,old,good +A12,0.14285714285714285,0.0706503796632552,A72,A151,male,young,bad +A12,0.03571428571428571,0.04500935402222955,A75,A152,male,old,good +A12,0.14285714285714285,0.18284362275778587,A73,A152,female,young,good +A12,0.24999999999999997,0.05782986684274238,A75,A152,male,old,good +A14,0.6785714285714286,0.3805436337625179,A74,A151,male,old,good +A12,0.3571428571428571,0.09843732805106195,A72,A152,male,young,bad +A11,0.3571428571428571,0.12286783316826234,A73,A152,male,young,good +A13,0.3571428571428571,0.18526466380543635,A75,A151,male,young,good +A14,0.3571428571428571,0.09948277759436558,A73,A152,male,old,good +A11,0.3571428571428571,0.08281060856168151,A75,A152,female,old,good +A14,0.3571428571428571,0.07202597116760207,A74,A152,male,old,good +A11,0.3571428571428571,0.16017387476614944,A73,A151,male,old,bad +A14,0.7857142857142857,0.5486959392538792,A74,A152,male,old,good +A11,0.3571428571428571,0.062231759656652355,A73,A152,female,old,bad +A14,0.3035714285714286,0.10955210740618465,A75,A152,male,old,good +A14,0.3571428571428571,0.12589413447782546,A75,A152,male,old,bad +A11,0.5714285714285714,0.4300099042588313,A73,A152,female,young,bad +A12,0.7857142857142857,0.33927588863211183,A75,A151,female,old,bad +A12,0.3571428571428571,0.24683613954000222,A72,A152,female,young,bad +A14,0.14285714285714285,0.037416088918234836,A75,A152,male,old,good +A11,0.3571428571428571,0.1463629360625069,A74,A153,male,old,good +A12,0.24999999999999997,0.11158798283261802,A72,A152,female,old,bad +A14,0.5714285714285714,0.2318146803125344,A73,A152,male,old,bad +A14,0.5714285714285714,0.11797072741278751,A73,A152,female,young,good +A14,0.3571428571428571,0.1103224386486189,A72,A152,male,old,good +A14,0.625,0.4587872785297678,A75,A152,male,old,good +A12,0.3571428571428571,0.3022449653350941,A72,A153,female,young,good +A11,0.14285714285714285,0.05227247716518102,A73,A151,female,young,good +A14,0.4642857142857143,0.08897325850115549,A75,A152,male,old,good +A11,0.03571428571428571,0.0335644327060636,A75,A152,female,old,good +A11,0.3571428571428571,0.04258831297457907,A72,A152,male,old,bad +A11,1.0,0.36238582590513924,A75,A152,male,old,bad +A11,0.14285714285714285,0.07868383404864092,A75,A152,male,old,good +A14,0.3571428571428571,0.06399251678221635,A73,A152,male,old,good +A12,0.07142857142857142,0.03615054473423572,A72,A152,male,old,good +A11,0.3571428571428571,0.05188731154396391,A71,A151,female,old,bad +A11,0.14285714285714285,0.1280400572246066,A73,A152,male,old,good +A14,0.3571428571428571,0.2047980631671619,A74,A151,female,young,good +A12,0.3571428571428571,0.1930229998899527,A71,A151,female,young,good +A14,0.24999999999999997,0.11389897655992078,A71,A152,male,old,good +A14,0.3571428571428571,0.12176735996478487,A74,A152,male,old,good +A13,0.03571428571428571,0.06014086057004511,A75,A152,male,old,good +A14,0.4285714285714286,0.4167492021569275,A72,A151,male,old,good +A14,0.3035714285714286,0.16644657202597118,A75,A152,male,old,good +A12,0.24999999999999997,0.16474083856058105,A73,A152,female,old,good +A14,0.4107142857142857,0.46016287003411466,A73,A152,male,old,good +A11,0.5714285714285714,0.08759766699680863,A72,A152,female,old,bad +A14,0.3571428571428571,0.12991086167051832,A75,A152,male,old,good +A12,0.08928571428571427,0.09794211510949709,A74,A152,male,young,good +A14,0.3571428571428571,0.49669858038956755,A73,A153,male,old,good +A14,0.14285714285714285,0.045449543303620554,A73,A152,male,old,good +A14,0.10714285714285712,0.07131066358534169,A73,A152,male,old,good +A11,0.14285714285714285,0.09040387366567623,A73,A152,female,old,good +A14,0.14285714285714285,0.10404974138879718,A74,A152,male,old,good +A14,0.5714285714285714,0.425332893144052,A72,A151,male,old,bad +A14,0.14285714285714285,0.055793991416309016,A75,A151,male,old,good +A11,0.24999999999999997,0.10999229668757565,A74,A152,male,old,good +A11,0.7857142857142857,0.324254429404644,A75,A153,female,old,bad +A11,0.3035714285714286,0.1842742379223066,A74,A151,female,old,good +A12,0.19642857142857142,0.07081545064377683,A75,A152,male,old,good +A14,0.5714285714285714,0.590073731704633,A71,A152,female,old,bad +A12,0.24999999999999997,0.2226807527236712,A75,A152,male,old,bad +A11,0.24999999999999997,0.05172224056344228,A71,A153,female,old,bad +A11,0.14285714285714285,0.056344228018047754,A72,A152,female,old,bad +A14,0.08928571428571427,0.11863101133487398,A71,A152,male,old,good +A12,0.3571428571428571,0.33856058104985143,A72,A152,male,old,good +A14,0.10714285714285712,0.05705953560030813,A75,A152,male,old,good +A12,0.6785714285714286,0.3138549576317817,A74,A152,female,old,good +A11,0.03571428571428571,0.09392538791680423,A74,A152,female,old,good +A14,0.3571428571428571,0.07164080554638494,A74,A152,male,old,good +A14,0.3571428571428571,0.03780125453945196,A72,A152,male,old,good +A14,0.3571428571428571,0.09805216242984482,A75,A153,male,old,good +A14,0.5714285714285714,0.5846263893474194,A75,A152,male,old,good +A12,0.3571428571428571,0.2659843732805106,A75,A152,female,old,good +A11,0.5714285714285714,0.2712116209970287,A74,A152,male,old,bad +A12,0.03571428571428571,0.044018928139099814,A71,A152,male,old,good +A13,0.5714285714285714,0.2015516672169033,A73,A152,male,young,good +A13,0.24999999999999997,0.15401122482667548,A72,A152,female,old,good +A11,0.5714285714285714,0.44486629250577747,A75,A153,male,old,bad +A11,0.14285714285714285,0.08847804555959062,A72,A151,female,young,good +A11,0.14285714285714285,0.25272367117860683,A75,A151,male,old,bad +A14,0.24999999999999997,0.19423352041377795,A72,A152,male,old,good +A12,0.14285714285714285,0.09783206778914934,A73,A152,male,old,good +A14,0.3571428571428571,0.11191812479366127,A74,A152,male,old,good +A12,0.19642857142857142,0.1363486299108617,A74,A152,male,old,good +A11,0.4107142857142857,0.27748431825685044,A71,A152,male,old,bad +A12,1.0,0.4900957411687025,A73,A153,male,old,good +A14,0.24999999999999997,0.09315505667436998,A72,A152,female,young,bad +A11,0.3571428571428571,0.148949048090679,A75,A152,male,old,good +A11,0.24999999999999997,0.11769560911191813,A72,A152,female,old,good +A11,0.24999999999999997,0.08979861340376362,A73,A151,female,young,bad +A12,0.4642857142857143,0.08060966215472654,A73,A152,female,old,good +A11,0.7857142857142857,0.33459887751733247,A75,A153,male,old,bad +A14,0.19642857142857142,0.0586552217453505,A73,A152,male,old,good +A11,0.03571428571428571,0.8058765269065697,A75,A152,male,old,bad +A14,0.4642857142857143,0.15555188731154398,A75,A152,male,old,good +A14,0.0,0.07120061626499395,A74,A152,male,old,good +A12,0.14285714285714285,0.041652910751623196,A73,A152,male,young,good +A14,0.10714285714285712,0.10008803785627819,A73,A152,male,old,good +A12,0.24999999999999997,0.7002310993727302,A71,A153,female,old,bad +A11,0.24999999999999997,0.1570375261362386,A74,A152,male,old,good +A12,0.5714285714285714,0.20578848905029165,A72,A152,female,old,good +A11,0.14285714285714285,0.17877187190491914,A73,A152,female,old,bad +A12,0.08928571428571427,0.048750962914053037,A75,A153,male,old,bad +A13,0.19642857142857142,0.007813359744690218,A72,A151,female,young,good +A11,0.24999999999999997,0.048475844613183675,A71,A152,female,old,bad +A12,0.14285714285714285,0.06459777704412897,A74,A152,male,old,good +A11,0.6785714285714286,0.17299438758666227,A71,A152,male,old,good +A11,0.7857142857142857,0.3713546825134808,A74,A152,male,old,bad +A11,0.4642857142857143,0.6464179597226807,A72,A152,male,old,bad +A14,0.4642857142857143,0.14201606690877078,A73,A152,female,old,good +A12,0.3571428571428571,0.09436557719819522,A73,A151,female,old,good +A14,0.3571428571428571,0.07411687025420931,A75,A153,male,old,good +A12,0.03571428571428571,0.09975789589523495,A72,A151,male,old,good +A12,0.3928571428571428,0.42456256190161773,A72,A152,male,old,good +A12,0.7857142857142857,0.4034334763948498,A71,A153,male,old,good +A14,0.3571428571428571,0.24369979091009136,A74,A152,male,old,good +A14,0.14285714285714285,0.30543633762517886,A75,A151,male,old,good +A14,0.3571428571428571,0.1990756025090789,A75,A151,female,old,good +A14,0.14285714285714285,0.11775063277209201,A75,A152,male,old,good +A12,0.19642857142857142,0.3631561571475735,A71,A152,male,old,bad +A14,0.08928571428571427,0.0675690546935182,A74,A152,male,young,bad +A14,0.03571428571428571,0.10069329811819082,A73,A152,male,young,good +A12,0.03571428571428571,0.03747111257840871,A72,A152,female,old,bad +A14,0.08928571428571427,0.05469351821283151,A75,A151,female,old,good +A11,0.10714285714285712,0.10355452844723231,A72,A151,female,old,good +A14,0.24999999999999997,0.0935402222955871,A74,A152,male,old,good +A11,0.03571428571428571,0.06184659403543523,A73,A152,male,old,good +A12,0.03571428571428571,0.06597336854847584,A75,A152,male,old,good +A14,0.24999999999999997,0.07329151535160118,A75,A152,male,old,good diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB-test.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB-test.csv new file mode 100644 index 0000000..1b64b92 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB-test.csv @@ -0,0 +1,201 @@ +status,month,credit_amount,employment,housing,sex,age,credit +A13,0.5714285714285714,0.30802244965335096,A73,A152,male,young,good +A11,0.5714285714285714,0.11290855067679102,A73,A151,male,old,bad +A12,0.14285714285714285,0.04693518212831517,A74,A152,male,old,good +A14,0.24999999999999997,0.0704302850225597,A73,A152,male,old,bad +A11,0.19642857142857142,0.08093980411576977,A72,A152,male,old,good +A14,0.24999999999999997,0.1389347419390338,A74,A152,male,old,bad +A13,0.14285714285714285,0.08974358974358974,A73,A151,female,old,good +A14,0.19642857142857142,0.15291075162319798,A74,A152,male,old,good +A11,0.14285714285714285,0.07741828986464179,A73,A152,male,old,good +A12,0.3035714285714286,0.11571475734565863,A73,A152,male,old,good +A11,0.24999999999999997,0.13255199735886433,A73,A152,male,old,good +A14,0.14285714285714285,0.07384175195333993,A74,A152,female,old,good +A13,0.3571428571428571,0.09216463079124024,A73,A152,male,old,good +A14,0.5714285714285714,0.594475624518543,A73,A152,male,old,good +A12,0.3571428571428571,0.09447562451854297,A75,A152,female,young,good +A12,0.5714285714285714,0.13321228128095083,A73,A153,female,old,bad +A14,0.3571428571428571,0.17497523935292178,A75,A152,male,old,good +A13,0.5714285714285714,0.23236491691427316,A75,A152,male,old,good +A14,0.14285714285714285,0.24347969626939586,A72,A151,female,young,good +A14,0.3571428571428571,0.041212721470232194,A75,A152,male,young,good +A12,0.7857142857142857,0.5893584241223726,A74,A152,male,old,bad +A12,0.14285714285714285,0.05876526906569825,A75,A152,male,old,good +A11,0.03571428571428571,0.019753494002421042,A74,A152,female,old,good +A14,0.3035714285714286,0.1509298998569385,A73,A152,male,old,good +A14,1.0,0.847529437658193,A74,A152,male,young,good +A14,0.19642857142857142,0.17156377242214152,A75,A151,male,young,good +A11,0.7857142857142857,0.24969736986904373,A74,A152,male,old,good +A11,0.14285714285714285,0.10982722570705403,A75,A152,male,old,bad +A11,0.3571428571428571,0.08237041928029053,A72,A152,male,young,good +A12,0.7857142857142857,0.2666446572025971,A73,A152,female,old,bad +A12,0.24999999999999997,0.034885000550236606,A75,A152,male,old,bad +A14,0.24999999999999997,0.32023770221195114,A75,A152,male,old,good +A11,0.3571428571428571,0.06228678331682623,A74,A152,male,old,good +A12,0.03571428571428571,0.02767690106745901,A73,A152,female,old,good +A14,0.08928571428571427,0.13464289644547156,A73,A152,male,old,good +A12,0.03571428571428571,0.01870804445911742,A72,A152,male,old,good +A12,0.4642857142857143,0.09183448883019699,A72,A152,male,old,bad +A11,0.3035714285714286,0.07686805326290305,A73,A152,male,old,bad +A12,0.24999999999999997,0.3090128755364807,A73,A152,male,old,good +A14,0.3571428571428571,0.3132496973698691,A72,A152,female,old,bad +A11,0.14285714285714285,0.026246285902938263,A72,A152,male,old,bad +A11,0.3571428571428571,0.14097061736546715,A75,A151,female,old,good +A11,0.7857142857142857,0.22328601298558381,A72,A151,female,young,bad +A11,0.4642857142857143,0.3266754704522945,A74,A151,male,young,good +A12,0.24999999999999997,0.05749972488169913,A72,A152,female,old,good +A14,0.5714285714285714,0.3004842082095301,A75,A152,male,old,good +A14,0.7857142857142857,0.32243864861890614,A73,A153,male,old,good +A12,0.4642857142857143,0.44767249917464513,A74,A152,male,old,bad +A13,0.24999999999999997,0.09414548255749973,A75,A152,female,young,good +A12,0.14285714285714285,0.04996148343787829,A73,A152,male,old,good +A12,0.14285714285714285,0.06382744580169472,A73,A152,male,old,good +A14,0.5178571428571429,0.385330692197645,A74,A152,male,old,good +A13,0.03571428571428571,0.023825244855287777,A72,A152,female,old,good +A14,0.14285714285714285,0.06399251678221635,A74,A152,male,old,good +A11,0.5714285714285714,0.2817761637504127,A73,A152,male,old,good +A12,0.4642857142857143,0.2217453505007153,A73,A151,female,old,bad +A11,0.3571428571428571,0.06096621547265324,A75,A152,male,old,bad +A13,0.10714285714285712,0.0292175635523275,A75,A153,male,old,good +A14,0.0,0.17222405634422802,A74,A152,female,old,good +A12,0.08928571428571427,0.045779685264663805,A75,A152,male,old,good +A12,0.5714285714285714,0.11483437878287663,A75,A152,male,old,good +A14,0.053571428571428575,0.03279410146362936,A75,A153,male,old,good +A12,0.7857142857142857,0.45322988885220644,A74,A152,female,young,good +A13,0.19642857142857142,0.09106415758776273,A75,A151,male,old,good +A12,0.14285714285714285,0.08550676791020138,A72,A152,male,old,good +A11,0.14285714285714285,0.022119511389897654,A75,A151,female,young,good +A12,0.14285714285714285,0.059480576647958625,A72,A152,male,young,bad +A14,0.5714285714285714,0.15566193463189174,A73,A152,male,old,good +A13,0.19642857142857142,0.1340926598437328,A74,A151,male,old,good +A13,0.14285714285714285,0.05942555298778475,A72,A152,male,old,good +A11,0.14285714285714285,0.05051172003961703,A73,A152,male,old,good +A11,0.5714285714285714,0.17585561791570375,A75,A152,male,old,bad +A12,0.08928571428571427,0.052217453505007144,A74,A152,female,old,good +A11,0.14285714285714285,0.02701661714537251,A71,A152,female,young,bad +A12,0.24999999999999997,0.39198855507868385,A71,A152,male,old,good +A14,0.24999999999999997,0.10559040387366568,A73,A152,male,old,bad +A14,0.03571428571428571,0.05986574226917574,A73,A152,male,old,good +A11,0.3571428571428571,0.33944095961263343,A75,A153,female,old,good +A11,0.19642857142857142,0.039892153626059204,A73,A152,male,young,good +A14,0.3571428571428571,0.13392758886321118,A73,A152,male,old,good +A14,0.3571428571428571,0.23528117090348852,A73,A152,male,old,good +A12,0.14285714285714285,0.038571585781886214,A72,A151,female,old,bad +A12,0.8928571428571428,0.8635963464289644,A72,A151,male,old,bad +A14,0.19642857142857142,0.18190822053483,A73,A152,male,old,good +A14,0.0,0.06630351050951909,A74,A152,male,old,good +A12,0.3571428571428571,0.09271486739297898,A75,A152,male,old,bad +A12,0.14285714285714285,0.1581379993397161,A72,A152,male,old,good +A14,0.03571428571428571,0.07114559260482008,A73,A152,male,old,good +A11,0.3571428571428571,0.14933421371189612,A75,A153,male,old,good +A11,0.24999999999999997,0.06558820292725871,A74,A153,male,old,bad +A14,1.0,0.7431495543083525,A75,A153,male,old,good +A12,0.2857142857142857,0.3245295477055134,A75,A152,male,old,good +A14,0.3571428571428571,0.3336084516342027,A75,A152,male,old,good +A11,0.14285714285714285,0.41900517222405637,A75,A153,male,old,bad +A13,0.14285714285714285,0.05760977220204688,A73,A151,male,young,good +A11,0.24999999999999997,0.121712336304611,A73,A152,male,young,bad +A14,0.24999999999999997,0.3582590513920986,A73,A151,male,old,bad +A14,0.5714285714285714,0.5129305601408606,A72,A152,male,old,bad +A14,0.5714285714285714,0.39391438318476946,A75,A152,male,old,good +A11,0.5714285714285714,0.36519203257400684,A73,A152,male,old,bad +A14,0.3571428571428571,0.18548475844613185,A75,A152,male,old,bad +A11,0.03571428571428571,0.06063607351160999,A71,A151,female,young,good +A13,0.14285714285714285,0.06767910201386595,A71,A153,male,old,good +A14,0.24999999999999997,0.042808407615274574,A73,A152,female,old,good +A11,0.14285714285714285,0.035875426433366345,A74,A151,male,young,bad +A11,0.10714285714285712,0.1136238582590514,A75,A152,male,old,good +A14,0.08928571428571427,0.09519093210080334,A72,A151,female,young,bad +A12,0.3571428571428571,0.07169582920655881,A74,A151,female,young,good +A13,0.10714285714285712,0.05447342357213601,A75,A153,female,old,bad +A12,0.14285714285714285,0.05353802134918015,A75,A151,male,old,bad +A11,0.3035714285714286,0.12963574336964895,A72,A151,female,old,good +A11,0.14285714285714285,0.06377242214152085,A75,A152,male,old,good +A14,0.24999999999999997,0.07252118410916694,A73,A151,female,young,good +A14,0.01785714285714285,0.1759656652360515,A74,A152,male,old,good +A12,0.24999999999999997,0.1992956971497744,A71,A152,female,old,good +A12,0.14285714285714285,0.116430064927919,A74,A152,male,old,good +A14,0.5714285714285714,0.4087157477715418,A74,A152,female,old,good +A12,0.5714285714285714,0.5033564432706064,A72,A151,male,old,bad +A14,0.5714285714285714,0.5126004181798173,A73,A152,female,old,good +A11,0.3571428571428571,0.0928799383735006,A72,A152,male,old,bad +A11,0.14285714285714285,0.10448993067018818,A73,A153,male,old,bad +A14,0.5714285714285714,0.350170573346539,A75,A152,male,old,good +A14,0.14285714285714285,0.14355672939363928,A71,A152,male,old,good +A12,0.2857142857142857,0.13090128755364808,A73,A152,male,old,good +A12,0.24999999999999997,0.32761087267525035,A73,A152,male,old,good +A11,0.03571428571428571,0.18851105975569496,A73,A151,male,old,good +A12,0.19642857142857142,0.114229118520964,A73,A152,male,old,good +A11,0.03571428571428571,0.17244415098492352,A73,A151,male,old,bad +A12,0.7857142857142857,0.1546164850885881,A74,A152,male,old,bad +A13,0.03571428571428571,0.05771981952239463,A73,A152,male,old,good +A14,0.10714285714285712,0.06426763508308572,A73,A151,male,old,good +A14,0.14285714285714285,0.04104765048971058,A74,A152,female,young,good +A14,0.2857142857142857,0.1780015406624849,A72,A152,male,old,good +A14,0.24999999999999997,0.17211400902388027,A73,A152,male,old,good +A14,0.08928571428571427,0.05359304500935402,A73,A152,male,old,good +A13,0.08928571428571427,0.02723671178606801,A73,A152,female,old,bad +A12,0.14285714285714285,0.08858809287993837,A71,A152,male,old,good +A11,0.2857142857142857,0.2213051612193243,A75,A152,female,young,good +A12,0.053571428571428575,0.11912622427643886,A73,A152,male,old,good +A14,0.3571428571428571,0.19406844943325632,A73,A152,male,old,good +A12,0.4642857142857143,0.10625068779575217,A75,A152,male,old,good +A14,0.24999999999999997,0.239022779795312,A72,A152,male,old,good +A11,0.14285714285714285,0.028007043028502255,A74,A152,male,old,bad +A14,0.4642857142857143,0.23550126554418402,A74,A151,female,old,good +A14,0.10714285714285712,0.09210960713106636,A73,A152,male,old,good +A14,0.14285714285714285,0.17591064157587763,A73,A152,female,old,good +A12,1.0,0.7580609662154726,A74,A152,male,old,bad +A13,0.24999999999999997,0.14383184769450866,A73,A152,male,old,bad +A14,0.14285714285714285,0.05529877847474414,A75,A152,male,old,good +A11,0.6785714285714286,0.22669747991636405,A74,A152,male,old,bad +A14,0.14285714285714285,0.03719599427753935,A71,A152,female,old,good +A12,0.19642857142857142,0.030373060415978873,A75,A152,male,old,bad +A12,0.14285714285714285,0.04632992186640256,A73,A152,female,old,good +A14,0.3035714285714286,0.07378672829316606,A74,A152,male,old,good +A14,0.5714285714285714,0.23131946737096953,A73,A152,female,old,good +A14,0.14285714285714285,0.023770221195113902,A74,A152,female,old,good +A12,0.24999999999999997,0.3362495873225487,A75,A152,male,old,good +A14,0.14285714285714285,0.055463849455265765,A72,A151,female,young,good +A12,0.4107142857142857,0.12490370859469573,A73,A152,male,young,bad +A11,0.03571428571428571,0.00484208209530098,A75,A152,male,old,good +A12,0.2857142857142857,0.37454605480356556,A74,A151,male,old,good +A11,0.2857142857142857,0.10922196544514141,A73,A151,male,old,bad +A13,0.14285714285714285,0.06459777704412897,A75,A152,female,old,good +A12,0.3571428571428571,0.6095521074061846,A73,A152,male,old,bad +A14,0.3571428571428571,0.05496863651370089,A72,A152,male,old,good +A12,0.08928571428571427,0.10278419720479806,A73,A152,male,old,good +A11,0.3571428571428571,0.05694948827996038,A74,A151,female,old,bad +A14,0.7857142857142857,0.18311874105865522,A75,A152,male,old,good +A14,0.10714285714285712,0.14295146913172666,A73,A152,male,old,good +A11,0.24999999999999997,0.17932210850665786,A74,A152,female,young,good +A11,0.07142857142857142,0.05029162539892153,A75,A153,male,old,good +A12,0.3035714285714286,0.2050181578078574,A74,A152,male,old,good +A12,0.24999999999999997,0.18498954550456698,A75,A152,female,old,good +A12,1.2142857142857142,0.2941014636293606,A73,A152,male,young,bad +A11,0.14285714285714285,0.025255860019808517,A75,A152,male,old,bad +A11,0.5714285714285714,0.43903378452734676,A73,A152,male,old,bad +A11,0.3035714285714286,0.17409486079013978,A72,A152,male,old,bad +A12,0.5714285714285714,0.6802024870694399,A73,A153,male,old,bad +A14,0.03571428571428571,0.08253549026081215,A75,A152,male,old,good +A14,0.14285714285714285,0.15555188731154398,A73,A152,male,old,good +A14,0.19642857142857142,0.2791900517222406,A75,A153,female,old,good +A13,0.0,0.06844943325630021,A72,A152,male,old,good +A11,0.14285714285714285,0.12809508088478047,A71,A153,female,old,good +A12,0.053571428571428575,0.11439418950148562,A72,A152,female,old,good +A11,0.08928571428571427,0.022229558710245404,A73,A152,male,old,bad +A14,0.5714285714285714,0.4337515131506548,A73,A152,female,old,good +A14,0.3571428571428571,0.19880048420820953,A73,A153,male,old,good +A12,0.14285714285714285,0.39738087377572356,A71,A151,female,young,good +A14,0.14285714285714285,0.07180587652690656,A75,A153,male,old,bad +A11,0.14285714285714285,0.055683944095961266,A75,A152,male,old,good +A12,0.08928571428571427,0.02971277649389237,A73,A152,female,old,good +A12,0.4642857142857143,0.27423792230659183,A71,A152,male,old,bad +A11,0.4642857142857143,0.11863101133487398,A74,A151,female,young,bad +A12,0.14285714285714285,0.1496093320127655,A72,A151,female,young,bad +A14,0.3035714285714286,0.6839440959612633,A75,A153,male,old,bad +A11,0.14285714285714285,0.045779685264663805,A73,A152,male,old,bad +A12,0.4107142857142857,0.20166171453725104,A73,A152,male,old,bad +A14,0.08928571428571427,0.19709475074281943,A75,A152,male,old,good +A12,0.24999999999999997,0.09232970177176185,A72,A152,male,old,bad +A12,0.5714285714285714,0.5286123032904149,A74,A152,male,old,good diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB-train.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB-train.csv new file mode 100644 index 0000000..b15c2c9 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB-train.csv @@ -0,0 +1,501 @@ +status,month,credit_amount,employment,housing,sex,age,credit +A11,0.5714285714285714,0.20408275558490152,A71,A152,male,old,good +A14,0.08928571428571427,0.18306371739848135,A73,A151,male,old,good +A14,0.24999999999999997,0.12462859029382635,A73,A152,male,old,good +A12,0.14285714285714285,0.09601628700341147,A72,A152,male,old,good +A14,1.0,0.5566193463189171,A75,A152,male,old,good +A14,0.3571428571428571,0.39303400462198745,A73,A152,male,old,good +A13,0.14285714285714285,0.008748761967646089,A73,A151,female,old,good +A14,0.08928571428571427,0.12418840101243535,A75,A153,male,old,good +A14,0.4642857142857143,0.25096291405304283,A74,A151,female,young,good +A12,0.7857142857142857,0.3471992956971498,A74,A152,male,young,bad +A12,0.5714285714285714,0.48332783096731596,A72,A151,male,old,bad +A11,0.3571428571428571,0.1816881258941345,A74,A152,male,old,bad +A12,0.2857142857142857,0.3421371189611533,A71,A152,male,old,good +A11,0.4642857142857143,0.1250137559150435,A75,A152,male,old,good +A12,0.5714285714285714,0.14141080664685815,A72,A152,male,old,bad +A14,0.03571428571428571,0.029327610872675252,A73,A152,male,old,good +A11,0.3035714285714286,0.19329811819082207,A74,A152,male,young,good +A14,0.4642857142857143,0.39809618135798397,A71,A152,female,old,bad +A11,0.19642857142857142,0.06344228018047761,A73,A151,female,old,good +A12,0.125,0.24876196764608782,A74,A152,male,old,good +A11,0.3571428571428571,0.20743919885550788,A73,A152,male,old,good +A12,0.3571428571428571,0.052327500825354895,A72,A152,male,old,good +A12,0.7857142857142857,0.28232640035215145,A71,A153,male,old,good +A14,0.3571428571428571,0.28947947617475517,A73,A152,male,young,good +A14,0.24999999999999997,0.04666006382744581,A71,A152,female,old,good +A12,0.7857142857142857,0.32871134587872786,A75,A153,male,old,bad +A14,0.125,0.06289204357873886,A72,A152,female,old,good +A12,0.08928571428571427,0.09183448883019699,A74,A151,male,old,good +A14,0.19642857142857142,0.2638384505337295,A75,A152,female,old,good +A12,0.7857142857142857,0.3136898866512601,A73,A152,female,young,bad +A11,0.07142857142857142,0.026466380543633764,A75,A152,male,old,good +A14,0.24999999999999997,0.02085396720589854,A75,A152,male,old,good +A14,0.24999999999999997,0.08572686255089688,A74,A152,female,young,bad +A12,0.3035714285714286,0.05161219324309453,A75,A152,female,old,bad +A14,0.3571428571428571,0.16424562561901618,A75,A152,male,old,good +A12,0.3571428571428571,0.6223175965665235,A73,A151,female,young,bad +A12,0.08928571428571427,0.04974138879718279,A75,A152,male,old,good +A14,0.24999999999999997,0.19808517662594918,A74,A152,male,old,good +A11,0.3571428571428571,0.11604489930670188,A71,A152,male,old,bad +A14,0.3571428571428571,0.07059535600308132,A72,A152,female,old,good +A13,0.5714285714285714,0.21789369428854408,A73,A152,male,old,bad +A11,0.4642857142857143,0.19847034224716628,A73,A152,male,old,good +A14,0.14285714285714285,0.06289204357873886,A75,A152,male,old,good +A14,0.24999999999999997,0.08528667326950588,A73,A152,male,young,good +A13,0.14285714285714285,0.1101023440079234,A73,A152,female,old,good +A12,0.14285714285714285,0.03224386486189061,A72,A152,female,young,bad +A11,0.24999999999999997,0.06025090789039286,A73,A152,male,old,bad +A12,0.5714285714285714,0.2500825354902608,A72,A152,female,old,good +A11,0.14285714285714285,0.025200836359634642,A73,A152,male,old,good +A11,0.14285714285714285,0.05716958292065588,A73,A152,male,young,good +A14,0.24999999999999997,0.08880818752063387,A73,A152,female,old,bad +A12,0.7857142857142857,0.5342797402883239,A72,A152,female,old,bad +A11,0.24999999999999997,0.27797953119841534,A75,A153,male,old,good +A11,0.03571428571428571,0.08924837680202487,A71,A153,male,old,good +A11,0.3571428571428571,0.12044679212061186,A72,A152,female,old,bad +A14,0.08928571428571427,0.06261692527786948,A73,A151,female,old,good +A12,0.7857142857142857,0.6558270056124132,A71,A153,male,old,good +A14,0.24999999999999997,0.13271706833938596,A74,A152,male,old,good +A14,0.1607142857142857,0.06377242214152085,A71,A152,female,old,good +A11,0.19642857142857142,0.1866952789699571,A75,A152,female,old,good +A11,0.7857142857142857,0.358093980411577,A73,A152,female,old,bad +A14,0.3571428571428571,0.0880928799383735,A74,A152,male,old,good +A11,0.4107142857142857,0.17420490811048753,A73,A152,male,old,good +A14,0.4107142857142857,0.2678001540662485,A74,A152,male,old,good +A14,0.10714285714285712,0.07411687025420931,A73,A151,male,old,good +A14,0.03571428571428571,0.06228678331682623,A73,A152,female,old,good +A14,0.10714285714285712,0.05397821063057115,A75,A152,male,old,good +A12,0.19642857142857142,0.13101133487399583,A73,A151,female,old,bad +A12,0.4642857142857143,0.10454495433036205,A73,A152,female,young,bad +A12,0.14285714285714285,0.1474634092659844,A74,A152,female,old,good +A14,0.03571428571428571,0.3582590513920986,A74,A152,male,old,good +A12,0.3035714285714286,0.13728403213381754,A74,A152,male,old,good +A11,0.3571428571428571,0.11417409486079015,A74,A152,male,old,good +A14,0.03571428571428571,0.05249257180587652,A73,A151,male,old,good +A11,0.24999999999999997,0.04396390447892594,A72,A151,female,young,good +A14,0.14285714285714285,0.11235831407505227,A71,A152,male,old,bad +A14,0.08928571428571427,0.06597336854847584,A74,A152,female,old,good +A11,0.24999999999999997,0.39952679652250467,A75,A153,male,old,bad +A12,0.08928571428571427,0.03675580499614835,A73,A152,female,old,bad +A14,0.3571428571428571,0.1153295917244415,A74,A152,male,old,good +A11,0.3571428571428571,0.07444701221525256,A75,A152,female,old,good +A11,0.125,0.2011114779355123,A73,A151,male,old,good +A14,0.24999999999999997,0.0482007263123143,A72,A151,female,young,good +A13,0.3035714285714286,0.14707824364476726,A73,A152,female,old,good +A12,0.08928571428571427,0.1620446792120612,A73,A152,female,old,good +A11,0.03571428571428571,0.24573566633652472,A72,A152,male,old,good +A12,0.03571428571428571,0.7871134587872785,A71,A152,male,young,bad +A11,0.5714285714285714,0.2884890502916254,A75,A153,male,old,good +A14,0.3571428571428571,0.0734565863321228,A74,A152,male,old,good +A11,0.14285714285714285,0.18377902498074172,A73,A152,male,old,good +A14,0.14285714285714285,0.1380543633762518,A75,A152,male,old,good +A11,0.14285714285714285,0.10300429184549356,A73,A151,male,old,good +A12,0.14285714285714285,0.09640145262462858,A74,A151,male,old,good +A14,0.3571428571428571,0.09084406294706723,A73,A151,male,old,good +A14,0.14285714285714285,0.06839440959612633,A72,A152,female,old,good +A12,0.03571428571428571,0.052767690106745896,A71,A152,male,old,bad +A12,0.7857142857142857,0.20518322878837902,A74,A152,male,old,good +A11,0.3571428571428571,0.4110817651590184,A72,A152,female,old,good +A11,0.24999999999999997,0.1870804445911742,A72,A151,female,young,good +A11,0.24999999999999997,0.08968856608341587,A74,A152,male,old,good +A12,0.24999999999999997,0.09304500935402223,A73,A152,male,old,good +A14,0.19642857142857142,0.034334763948497854,A72,A152,female,young,good +A14,0.4107142857142857,0.2718168812589414,A75,A152,male,old,good +A14,0.5714285714285714,0.5627269725982172,A75,A153,male,old,good +A12,0.3571428571428571,0.22565203037306042,A73,A152,female,old,good +A14,0.3571428571428571,0.06195664135578298,A74,A152,female,old,good +A12,0.4642857142857143,0.1786068009243975,A73,A152,male,old,good +A11,0.6428571428571428,0.3162759986794322,A73,A152,male,old,bad +A11,0.4285714285714286,0.20666886761307363,A73,A152,male,old,bad +A14,0.5714285714285714,0.1701331572576208,A75,A152,male,old,good +A12,0.24999999999999997,0.13051612193243095,A73,A152,male,old,good +A12,0.5714285714285714,0.11742049081104876,A72,A151,male,old,bad +A11,0.3571428571428571,0.07752833718498954,A72,A151,female,old,bad +A14,0.3571428571428571,0.1781666116430065,A74,A152,female,young,good +A12,1.0,0.39440959612633436,A73,A152,male,old,good +A14,0.24999999999999997,0.10938703642566304,A73,A152,female,young,good +A11,0.3571428571428571,0.059590623968306375,A71,A153,male,old,bad +A14,0.3214285714285714,0.05683944095961263,A74,A151,female,young,good +A14,0.03571428571428571,0.17981732144822274,A73,A151,male,old,good +A12,0.14285714285714285,0.018432926158248045,A73,A151,male,young,good +A12,0.625,0.25767580059425554,A74,A152,male,young,bad +A14,0.3571428571428571,0.056564322658743255,A75,A152,male,old,good +A14,0.14285714285714285,0.08352591614394189,A73,A151,male,young,good +A11,0.3571428571428571,0.17051832287883792,A72,A153,male,old,bad +A12,0.08928571428571427,0.0627819962583911,A73,A152,male,old,good +A14,0.24999999999999997,0.17453505007153078,A75,A152,male,old,good +A14,0.10714285714285712,0.10432485968966655,A72,A151,female,young,good +A14,0.19642857142857142,0.06988004842082095,A75,A152,male,old,good +A14,0.3571428571428571,0.3627159678661825,A73,A152,male,old,good +A14,0.24999999999999997,0.09909761197314845,A72,A152,male,old,good +A11,0.3571428571428571,0.15109497083746012,A73,A152,male,young,bad +A11,0.3571428571428571,0.18763068119291296,A73,A152,female,old,good +A11,0.3571428571428571,0.06558820292725871,A74,A151,female,young,bad +A11,0.08928571428571427,0.04886101023440079,A73,A152,male,young,good +A14,0.4642857142857143,0.3037856278199626,A74,A152,female,young,good +A11,0.24999999999999997,0.12930560140860572,A73,A153,male,old,bad +A13,0.3571428571428571,0.19252778694838782,A72,A152,female,old,good +A14,0.14285714285714285,0.15456146142841423,A74,A152,male,old,good +A11,0.03571428571428571,0.022669747991636405,A72,A152,male,old,good +A11,0.24999999999999997,0.09298998569384835,A72,A153,male,old,good +A12,0.5714285714285714,0.1140640475404424,A74,A151,male,young,good +A12,0.7857142857142857,0.6577528337184989,A73,A152,male,old,good +A14,0.3571428571428571,0.28925938153405967,A75,A153,male,old,good +A14,0.24999999999999997,0.010069329811819083,A71,A151,female,young,bad +A14,0.03571428571428571,0.23995818201826785,A72,A152,female,old,bad +A12,0.14285714285714285,0.13821943435677342,A75,A152,female,young,bad +A14,0.4642857142857143,0.2213051612193243,A73,A152,male,old,good +A12,0.625,0.6333223286012986,A74,A151,male,old,good +A12,0.5714285714285714,0.10867172884340266,A75,A153,male,old,bad +A13,0.14285714285714285,0.06734896005282272,A72,A152,female,old,good +A14,0.14285714285714285,0.06338725652030373,A74,A151,female,old,good +A12,0.24999999999999997,0.09150434686915374,A72,A152,male,old,good +A14,0.17857142857142858,0.030373060415978873,A73,A152,male,old,good +A13,0.19642857142857142,0.05617915703752613,A73,A153,male,old,bad +A13,0.3571428571428571,0.055463849455265765,A73,A152,female,old,good +A14,0.14285714285714285,0.18240343347639487,A72,A152,male,old,good +A11,0.14285714285714285,0.17255419830527127,A75,A153,male,old,bad +A13,0.14285714285714285,0.1098822493672279,A73,A152,female,old,good +A14,0.3571428571428571,0.30560140860570045,A73,A152,male,old,good +A14,0.19642857142857142,0.03906679872345109,A74,A152,female,old,good +A12,0.24999999999999997,0.044349070100143065,A75,A152,male,old,bad +A13,0.6785714285714286,0.3322878837900297,A72,A152,male,old,good +A14,0.08928571428571427,0.13772422141520854,A75,A152,male,old,good +A14,0.10714285714285712,0.10784637394079453,A73,A151,male,young,bad +A11,0.7857142857142857,0.5528227137669197,A74,A153,male,old,bad +A12,0.5714285714285714,0.7740728513260702,A75,A153,male,old,bad +A12,0.4642857142857143,0.2200396170353252,A71,A152,male,old,bad +A14,0.3571428571428571,0.03736106525806096,A74,A152,male,old,good +A12,0.08928571428571427,0.039011775063277215,A73,A152,female,old,bad +A14,0.3035714285714286,0.13931990756025092,A74,A152,female,old,good +A12,0.7857142857142857,0.44613183668977663,A72,A152,female,old,good +A14,0.08928571428571427,0.11285352701661715,A72,A151,female,young,good +A12,0.7857142857142857,0.18245845713656875,A74,A152,male,old,good +A11,0.24999999999999997,0.20446792120611865,A75,A151,female,old,bad +A14,0.03571428571428571,0.05430835259161439,A73,A152,female,old,good +A11,0.24999999999999997,0.03994717728623308,A72,A152,female,young,bad +A11,0.3571428571428571,0.03659073401562672,A75,A152,female,old,bad +A13,0.3571428571428571,0.03835149114119071,A74,A153,male,old,bad +A12,0.3571428571428571,0.14168592494772753,A74,A152,male,old,good +A14,0.3035714285714286,0.11213821943435677,A72,A152,female,young,good +A14,0.10714285714285712,0.145867723120942,A72,A151,female,old,good +A12,0.7321428571428571,0.23803235391218225,A71,A152,male,old,good +A14,0.7857142857142857,0.3845053372950369,A75,A152,male,old,good +A11,0.5714285714285714,0.13733905579399142,A75,A152,male,old,bad +A12,0.3571428571428571,0.08215032463959503,A75,A152,male,old,good +A11,0.08928571428571427,0.06140640475404424,A72,A151,female,young,bad +A14,0.24999999999999997,0.06905469351821283,A73,A153,male,old,good +A11,0.03571428571428571,0.005117200396170352,A72,A152,female,old,good +A14,0.3571428571428571,0.10856168152305491,A75,A152,male,old,good +A14,0.14285714285714285,0.06850445691647408,A75,A152,male,old,good +A13,0.19642857142857142,0.11428414218113787,A72,A152,female,young,bad +A11,0.7321428571428571,0.6364036535710355,A75,A151,male,old,bad +A12,0.14285714285714285,0.06789919665456146,A73,A152,male,young,bad +A14,0.5714285714285714,0.4844833278309673,A73,A153,male,old,good +A14,0.14285714285714285,0.07730824254429404,A75,A152,male,old,good +A14,0.24999999999999997,0.06619346318917134,A72,A152,female,old,good +A11,0.14285714285714285,0.053813139650049524,A73,A152,female,young,bad +A12,0.03571428571428571,0.037526136238582586,A74,A152,female,old,good +A12,0.5714285714285714,0.4311654011224827,A75,A152,male,old,bad +A14,0.03571428571428571,0.06030593155056674,A75,A153,male,old,good +A12,0.5714285714285714,0.1437218003741609,A75,A153,male,old,good +A11,0.3571428571428571,0.15808297567954221,A72,A152,female,old,bad +A12,1.0,0.3938593595245956,A72,A152,female,young,bad +A12,0.19642857142857142,0.055793991416309016,A73,A151,male,young,bad +A14,0.03571428571428571,0.07175085286673269,A74,A151,female,young,good +A12,0.3035714285714286,0.18719049191152196,A74,A152,male,old,good +A11,0.7857142857142857,0.20254209310003302,A74,A153,male,old,bad +A11,0.03571428571428571,0.061131286453174866,A72,A152,male,old,good +A11,0.03571428571428571,0.052437548145702645,A75,A152,male,old,good +A14,0.14285714285714285,0.18031253438978762,A72,A152,male,old,good +A14,0.5714285714285714,0.4184549356223176,A73,A152,female,young,bad +A11,0.3035714285714286,0.17095851216022892,A72,A152,female,old,good +A13,0.24999999999999997,0.10179377132166832,A73,A152,male,old,bad +A14,0.24999999999999997,0.0505667436997909,A73,A152,male,old,good +A14,0.5714285714285714,0.47854077253218885,A74,A152,male,old,good +A11,0.24999999999999997,0.09276989105315285,A74,A151,male,young,good +A12,0.3571428571428571,0.1794871794871795,A74,A152,male,old,good +A12,0.3571428571428571,0.3695939253879168,A74,A151,male,old,good +A14,0.4642857142857143,0.11461428414218112,A75,A152,male,old,good +A11,0.24999999999999997,0.09843732805106195,A73,A151,female,young,bad +A11,0.5714285714285714,0.49636843842852424,A74,A152,male,young,bad +A14,0.3035714285714286,0.07274127874986244,A75,A152,female,old,good +A12,0.03571428571428571,0.011224826675470454,A72,A152,male,young,good +A14,0.5714285714285714,0.33311323869263787,A75,A152,male,old,good +A14,0.3571428571428571,0.10713106635853417,A74,A152,male,old,good +A14,0.7857142857142857,0.40601958842302194,A75,A152,male,old,good +A13,0.10714285714285712,0.05639925167822163,A72,A152,female,young,good +A11,0.7857142857142857,0.37795752173434577,A73,A153,male,old,bad +A12,0.3571428571428571,0.6783867062837019,A75,A153,female,old,bad +A12,0.24999999999999997,0.1391548365797293,A73,A151,male,young,good +A14,0.0,0.06894464619786508,A74,A152,male,old,good +A12,0.07142857142857142,0.05430835259161439,A73,A152,female,young,bad +A14,0.7857142857142857,0.20160669087707717,A73,A152,male,old,bad +A14,0.3571428571428571,0.15472653240893586,A75,A151,male,old,good +A12,0.5714285714285714,0.09370529327610873,A75,A153,male,old,bad +A12,0.4642857142857143,0.19709475074281943,A72,A152,male,young,good +A14,0.03571428571428571,0.0708704743039507,A72,A152,female,old,good +A14,0.03571428571428571,0.10223396060305931,A74,A151,male,old,good +A11,0.03571428571428571,0.00979421151094971,A75,A152,female,old,good +A12,0.5714285714285714,0.2313744910311434,A73,A152,male,old,bad +A13,0.3035714285714286,0.1138439528997469,A72,A151,male,old,bad +A14,0.14285714285714285,0.05023660173874765,A73,A152,male,old,good +A14,0.08928571428571427,0.037746230879278087,A75,A152,male,old,good +A11,0.24999999999999997,0.12231759656652359,A71,A152,male,young,bad +A14,0.19642857142857142,0.07257620776934082,A75,A152,male,old,good +A12,0.14285714285714285,0.1522504677011115,A72,A151,female,old,good +A14,0.14285714285714285,0.11450423682183337,A75,A152,male,old,good +A14,0.14285714285714285,0.10157367668097282,A74,A152,male,old,good +A12,0.14285714285714285,0.04330362055683944,A74,A152,male,old,good +A14,0.03571428571428571,0.05425332893144051,A73,A151,male,old,good +A14,0.3571428571428571,0.21464729833828547,A73,A152,male,old,good +A14,0.7857142857142857,0.18482447452404535,A73,A152,female,old,good +A14,0.14285714285714285,0.06393749312204247,A73,A152,female,old,good +A11,0.125,0.20298228238142402,A73,A152,male,old,good +A11,0.14285714285714285,0.0702101903818642,A75,A153,male,old,good +A14,0.3214285714285714,0.1334323759216463,A75,A152,male,old,good +A12,0.4642857142857143,0.2184989545504567,A73,A152,female,old,good +A11,0.3571428571428571,0.14713326730494114,A73,A152,male,old,good +A14,0.24999999999999997,0.08622207549246176,A73,A152,female,old,good +A13,0.10714285714285712,0.0536480686695279,A73,A152,male,old,good +A11,0.19642857142857142,0.08776273797733025,A72,A151,female,old,good +A14,0.03571428571428571,0.0,A73,A152,female,old,good +A14,0.03571428571428571,0.022559700671288655,A74,A151,male,young,good +A13,0.03571428571428571,0.025255860019808517,A72,A152,male,old,good +A11,0.5714285714285714,0.8587542643336634,A71,A152,male,old,good +A12,0.3571428571428571,0.08732254869593925,A74,A153,female,old,bad +A11,0.1607142857142857,0.08512160228898426,A72,A152,male,old,good +A11,0.03571428571428571,0.010894684714427203,A72,A152,female,young,bad +A12,0.24999999999999997,0.16303510509519095,A72,A151,male,young,good +A11,0.14285714285714285,0.12815010454495435,A72,A152,male,old,bad +A11,0.03571428571428571,0.05216242984483327,A75,A153,female,old,bad +A14,0.3035714285714286,0.26152745680642675,A73,A152,female,old,bad +A12,0.3571428571428571,0.11461428414218112,A72,A152,male,old,good +A14,0.19642857142857142,0.055683944095961266,A74,A152,male,old,good +A12,0.14285714285714285,0.049796412457356665,A75,A152,male,old,good +A14,0.10714285714285712,0.03543523715197534,A74,A152,female,old,good +A14,0.14285714285714285,0.16952789699570817,A75,A152,male,old,good +A12,0.24999999999999997,0.04748541873005392,A73,A152,female,old,good +A14,0.14285714285714285,0.02580609662154727,A75,A152,male,old,bad +A12,0.3571428571428571,0.09981291955540883,A71,A152,female,old,bad +A13,0.4642857142857143,0.18741058655221746,A75,A152,male,old,good +A11,0.3571428571428571,0.3502255970067129,A71,A153,male,old,good +A14,0.24999999999999997,0.16391548365797293,A71,A152,male,old,good +A14,0.19642857142857142,0.17018818091779467,A73,A153,male,old,good +A11,0.4642857142857143,0.1572576207769341,A72,A152,male,old,bad +A12,0.08928571428571427,0.011444921316165951,A73,A152,male,young,good +A14,0.19642857142857142,0.06718388907230109,A73,A153,male,old,good +A12,0.125,0.05898536370639375,A73,A152,female,old,good +A11,0.08928571428571427,0.057114559260482006,A75,A152,male,old,good +A14,0.14285714285714285,0.025145812699460767,A73,A152,male,old,good +A12,0.19642857142857142,0.058215032463959496,A75,A152,male,old,good +A14,0.08928571428571427,0.15538681633102236,A73,A152,male,old,good +A11,0.19642857142857142,0.12440849565313085,A71,A151,female,young,good +A14,0.03571428571428571,0.0819852536590734,A75,A151,male,old,good +A14,0.14285714285714285,0.04803565533179267,A73,A151,female,old,bad +A12,0.125,0.07301639705073182,A72,A152,female,young,good +A11,0.7321428571428571,0.08776273797733025,A73,A153,male,young,bad +A14,0.3571428571428571,0.09783206778914934,A74,A152,male,old,good +A12,0.4107142857142857,0.4439308902828216,A75,A153,female,old,bad +A12,0.14285714285714285,0.05749972488169913,A72,A151,female,young,bad +A12,0.3571428571428571,0.6431165401122483,A73,A152,male,old,bad +A14,0.5714285714285714,0.08633212281280951,A73,A153,male,old,bad +A11,0.19642857142857142,0.0675690546935182,A75,A152,male,old,good +A14,0.3571428571428571,0.06965995378012545,A73,A152,female,old,good +A11,0.08928571428571427,0.1037746230879278,A73,A152,male,young,good +A14,0.19642857142857142,0.19599427753934193,A72,A152,female,young,good +A14,0.24999999999999997,0.21541762958071972,A73,A152,male,old,bad +A12,0.3571428571428571,0.20947507428194126,A74,A152,male,old,bad +A14,0.7857142857142857,0.543468691537361,A73,A153,male,old,bad +A12,0.03571428571428571,0.012875536480686695,A74,A152,male,old,good +A12,1.0,0.33223286012985587,A73,A153,male,old,bad +A11,0.3571428571428571,0.36436667767139874,A72,A152,male,old,bad +A14,0.19642857142857142,0.07054033234290745,A73,A152,female,old,good +A14,0.08928571428571427,0.10366457576758006,A73,A152,male,old,good +A11,0.14285714285714285,0.04715527676901067,A73,A151,male,young,good +A11,0.2857142857142857,0.10795642126114229,A74,A152,male,old,good +A14,0.19642857142857142,0.16154946627049632,A74,A151,female,young,good +A11,0.7678571428571428,0.5762077693408165,A72,A152,female,old,good +A14,0.053571428571428575,0.02641135688345989,A75,A151,male,old,good +A14,0.14285714285714285,0.10030813249697369,A73,A152,female,old,good +A11,0.3035714285714286,0.0176625949158138,A75,A152,male,old,good +A13,0.19642857142857142,0.11609992296687575,A73,A152,male,old,good +A11,0.14285714285714285,0.06019588423021899,A73,A152,male,old,good +A11,0.6785714285714286,0.3809838230439089,A74,A152,female,old,bad +A13,0.6785714285714286,0.2501375591504347,A75,A153,male,old,good +A11,0.6785714285714286,0.20441289754594477,A72,A152,male,old,bad +A14,0.125,0.38395510069329813,A73,A152,male,old,good +A12,0.3571428571428571,0.05315285572796302,A72,A152,male,old,bad +A14,0.14285714285714285,0.13744910311433917,A75,A153,female,old,good +A12,0.6785714285714286,0.49702872235061074,A71,A153,male,old,good +A14,0.4107142857142857,0.23528117090348852,A72,A152,male,old,good +A14,0.3035714285714286,0.2750082535490261,A73,A152,male,old,good +A14,0.3571428571428571,0.16127434796962695,A72,A152,female,old,good +A14,0.3571428571428571,0.02569604930119952,A75,A152,male,old,good +A14,0.14285714285714285,0.055683944095961266,A73,A152,male,young,good +A14,0.14285714285714285,0.08990866072411137,A75,A152,male,old,good +A12,0.24999999999999997,0.09210960713106636,A72,A151,female,old,bad +A11,0.7857142857142857,0.39760096841641906,A74,A153,male,old,good +A13,0.14285714285714285,0.03791130185979971,A74,A152,male,old,bad +A12,0.14285714285714285,0.03251898316275999,A74,A151,female,young,good +A12,0.3571428571428571,0.21255639925167824,A72,A151,female,old,bad +A13,0.3571428571428571,0.2697259821723341,A74,A152,male,young,good +A11,0.5178571428571429,0.22180037416088919,A73,A152,female,young,bad +A12,0.14285714285714285,0.07246616044899307,A73,A152,female,young,good +A11,0.14285714285714285,0.004897105755474855,A75,A152,male,old,good +A11,0.19642857142857142,0.05639925167822163,A73,A151,female,young,bad +A12,0.14285714285714285,0.021404203807637284,A73,A152,male,old,bad +A14,0.5714285714285714,0.1127434796962694,A75,A152,male,old,good +A11,0.3571428571428571,0.086552217453505,A71,A152,male,old,bad +A11,0.7857142857142857,0.41339275888632115,A75,A153,male,old,bad +A14,0.03571428571428571,0.07400682293386156,A74,A152,male,old,good +A11,0.14285714285714285,0.007428194123473095,A74,A152,female,old,good +A14,0.14285714285714285,0.028942445251458126,A73,A152,male,old,good +A12,0.3571428571428571,0.15511169803015298,A75,A153,male,old,good +A11,0.08928571428571427,0.06129635743369649,A74,A152,male,old,good +A14,0.03571428571428571,0.0534279740288324,A73,A152,male,old,good +A11,0.07142857142857142,0.17321448222735777,A74,A152,male,old,good +A11,0.24999999999999997,0.21475734565863322,A73,A152,male,old,bad +A12,0.19642857142857142,0.08407615274568064,A72,A151,female,old,bad +A14,0.08928571428571427,0.05425332893144051,A72,A151,female,young,good +A14,0.03571428571428571,0.08341586882359414,A73,A151,male,young,good +A11,0.4642857142857143,0.43039506988004844,A72,A152,male,young,good +A12,0.08928571428571427,0.268460437988335,A75,A153,female,old,bad +A14,0.3571428571428571,0.4620886981402003,A72,A152,male,old,bad +A11,0.3571428571428571,0.21563772422141522,A73,A152,male,old,good +A14,0.14285714285714285,0.09271486739297898,A75,A152,male,old,good +A14,0.3571428571428571,0.11813579839330911,A75,A152,male,old,bad +A14,0.19642857142857142,0.06652360515021459,A73,A152,female,old,good +A12,0.14285714285714285,0.023935292175635527,A74,A152,male,young,bad +A11,0.7857142857142857,0.2396280400572246,A75,A153,male,young,bad +A14,0.125,0.10410476504897105,A75,A152,male,old,good +A11,0.19642857142857142,0.03851656212171234,A75,A151,male,old,bad +A12,0.21428571428571427,0.050896885660834154,A71,A153,male,old,good +A11,0.14285714285714285,0.06866952789699571,A73,A152,female,young,good +A14,0.24999999999999997,0.09541102674149884,A73,A153,male,old,good +A12,0.5714285714285714,0.19555408825795093,A73,A152,female,old,bad +A11,0.3571428571428571,0.09557609772202047,A73,A151,male,young,bad +A14,0.3571428571428571,0.21398701441619897,A73,A152,male,old,good +A14,0.14285714285714285,0.024815670738417523,A73,A152,male,old,good +A14,0.8928571428571428,0.5054473423572136,A73,A152,male,old,good +A11,0.24999999999999997,0.16176956091119182,A73,A152,female,young,bad +A12,0.1607142857142857,0.034774953229888855,A72,A152,male,young,good +A12,0.3571428571428571,0.13810938703642567,A75,A153,male,old,good +A14,0.19642857142857142,0.10845163420270716,A73,A151,female,young,good +A11,0.14285714285714285,0.020688896225376913,A73,A152,female,young,bad +A12,0.7857142857142857,1.0,A73,A152,female,old,bad +A11,0.24999999999999997,0.0532078793881369,A73,A152,male,old,bad +A12,0.053571428571428575,0.12798503356443272,A73,A152,male,old,good +A11,0.03571428571428571,0.13189171343677783,A73,A151,male,old,good +A14,0.4107142857142857,0.12765489160338947,A73,A151,female,young,bad +A13,0.3571428571428571,0.1594585671838891,A73,A152,male,old,good +A11,0.03571428571428571,0.022889842632331906,A74,A152,female,old,good +A14,0.14285714285714285,0.051447122262572906,A73,A152,female,old,good +A14,0.4642857142857143,0.3138549576317817,A74,A152,male,old,good +A12,0.7321428571428571,0.24738637614174097,A72,A152,male,young,bad +A14,0.19642857142857142,0.0529877847474414,A75,A152,male,old,good +A14,0.03571428571428571,0.09067899196654561,A73,A152,male,old,good +A14,0.7857142857142857,0.6239683063717398,A73,A151,female,young,bad +A14,0.0,0.01931330472103004,A72,A151,female,young,good +A12,0.24999999999999997,0.05474854187300539,A73,A152,male,old,bad +A14,0.5714285714285714,0.30218994167492025,A74,A152,male,old,good +A12,0.17857142857142858,0.06382744580169472,A75,A152,male,old,good +A11,0.14285714285714285,0.035765379113018594,A73,A152,male,young,bad +A14,0.03571428571428571,0.0061626499394739735,A73,A152,female,old,good +A14,0.19642857142857142,0.09513590844062947,A75,A152,male,old,good +A14,0.5714285714285714,0.17051832287883792,A73,A152,female,old,bad +A14,0.14285714285714285,0.030097942115109497,A75,A152,female,old,bad +A14,0.03571428571428571,0.014746340926598437,A73,A152,female,old,good +A11,0.3571428571428571,0.1129635743369649,A75,A152,male,old,bad +A11,0.5714285714285714,0.1354682513480797,A75,A152,male,old,bad +A14,0.3571428571428571,0.06734896005282272,A72,A152,male,old,good +A14,0.24999999999999997,0.08638714647298339,A73,A152,male,old,good +A14,0.3571428571428571,0.09227467811158797,A73,A152,female,old,good +A11,0.19642857142857142,0.06509298998569385,A73,A151,female,young,good +A12,0.14285714285714285,0.03895675140310334,A74,A152,male,old,good +A14,0.5714285714285714,0.19725982172334106,A75,A152,female,old,good +A14,0.14285714285714285,0.028227137669197756,A73,A152,female,old,good +A14,0.03571428571428571,0.15010454495433037,A73,A152,male,old,good +A14,0.14285714285714285,0.05727963024100363,A73,A152,female,old,good +A11,0.14285714285714285,0.0233300319137229,A74,A152,male,young,bad +A14,1.0,0.3453835149114119,A73,A153,male,old,good +A14,0.14285714285714285,0.03994717728623308,A75,A152,male,old,good +A13,0.10714285714285712,0.20353251898316277,A72,A152,male,old,good +A12,0.10714285714285712,0.043908880818752064,A73,A152,male,young,good +A14,0.3571428571428571,0.13315725762077696,A75,A152,male,old,good +A11,0.4642857142857143,0.23841751953339935,A73,A152,male,old,good +A14,0.3571428571428571,0.11692527786948388,A73,A152,male,old,good +A11,0.14285714285714285,0.056784417299438755,A73,A151,female,young,bad +A14,0.3571428571428571,0.19296797622977882,A75,A153,female,old,good +A12,0.3571428571428571,0.2195994277539342,A73,A152,male,old,bad +A11,0.3571428571428571,0.3482447452404534,A71,A153,male,old,good +A11,0.14285714285714285,0.10294926818531969,A73,A152,male,old,good +A14,0.14285714285714285,0.10570045119401342,A72,A152,female,old,good +A14,0.19642857142857142,0.18399911962143722,A72,A152,female,old,good +A14,0.5714285714285714,0.24408495653130846,A73,A153,male,old,good +A14,0.19642857142857142,0.2424892703862661,A73,A152,male,old,good +A11,0.17857142857142858,0.48024650599757895,A75,A152,male,old,bad +A14,0.24999999999999997,0.04429404643996919,A72,A152,female,old,good +A11,0.7857142857142857,0.15412127214702323,A73,A152,male,old,bad +A12,0.4642857142857143,0.17558049961483438,A73,A151,female,young,bad +A14,0.3571428571428571,0.26961593485198637,A75,A152,male,old,good +A14,0.14285714285714285,0.09425552987784747,A74,A151,male,old,good +A12,0.24999999999999997,0.18377902498074172,A71,A152,male,old,good +A14,0.5714285714285714,0.3076923076923077,A75,A152,male,old,good +A14,0.3571428571428571,0.06707384175195334,A75,A151,male,old,good +A14,0.3571428571428571,0.12947067238912735,A73,A151,female,old,good +A11,0.14285714285714285,0.18713546825134808,A73,A152,male,old,good +A11,0.3571428571428571,0.15951359084406297,A72,A153,male,young,good +A11,0.3571428571428571,0.21239132827115662,A75,A151,male,young,bad +A12,0.08928571428571427,0.06531308462638935,A74,A152,male,old,bad +A13,0.3571428571428571,0.062011665015956854,A75,A153,female,old,good +A11,0.08928571428571427,0.06448772972378122,A72,A153,male,old,bad +A11,0.4642857142857143,0.18553978210630573,A75,A151,female,old,good +A11,0.3571428571428571,0.17029822823814242,A75,A151,male,old,bad +A12,0.14285714285714285,0.020633872565203038,A72,A152,male,old,good +A12,0.14285714285714285,0.040332342907450205,A72,A151,female,young,good +A12,0.4642857142857143,0.15081985253659075,A75,A152,female,young,good +A14,0.24999999999999997,0.049466270496313414,A73,A152,male,old,good +A12,0.08928571428571427,0.001430615164520744,A73,A151,male,young,good +A11,0.3571428571428571,0.06168152305491362,A73,A151,female,young,bad +A11,0.14285714285714285,0.029987894794761747,A72,A152,female,old,bad +A11,0.3571428571428571,0.056784417299438755,A73,A152,female,old,bad +A12,0.03571428571428571,0.0064927919005172245,A75,A152,male,old,good +A14,0.3571428571428571,0.055463849455265765,A74,A152,male,young,good +A14,0.19642857142857142,0.10652580609662154,A74,A151,female,old,good +A14,0.3571428571428571,0.14223616154946628,A75,A152,male,old,good +A11,0.3571428571428571,0.18609001870804448,A73,A151,female,young,good +A14,0.3571428571428571,0.05838010344448112,A74,A152,male,old,good +A11,0.5714285714285714,0.11543963904478925,A73,A152,male,old,good +A12,0.03571428571428571,0.010069329811819083,A72,A151,female,young,bad +A11,0.19642857142857142,0.20408275558490152,A73,A152,female,old,bad +A12,0.19642857142857142,0.13101133487399583,A73,A152,female,young,good +A14,0.14285714285714285,0.12077693408165512,A72,A151,male,old,good +A11,0.10714285714285712,0.04335864421701331,A74,A152,male,old,good +A14,0.7857142857142857,0.473643666776714,A74,A153,male,old,good +A13,0.3571428571428571,0.05639925167822163,A73,A152,male,old,good +A11,0.3571428571428571,0.3476394849785408,A73,A152,male,young,good +A13,0.3571428571428571,0.06019588423021899,A74,A152,male,old,bad +A12,0.4107142857142857,0.3144602178936943,A75,A152,male,old,good +A14,0.14285714285714285,0.020248706943985912,A75,A152,male,old,good +A14,0.10714285714285712,0.026246285902938263,A75,A153,male,old,good +A12,0.19642857142857142,0.06569825024760646,A72,A152,male,young,good +A11,0.5714285714285714,0.18542973478595798,A73,A152,male,old,good +A11,0.10714285714285712,0.10955210740618465,A72,A151,male,old,good +A12,1.0,0.799603829646748,A75,A153,female,old,bad +A14,0.3571428571428571,0.09750192582810609,A73,A152,female,old,good +A13,0.14285714285714285,0.019753494002421042,A72,A152,female,old,bad +A11,0.3571428571428571,0.15247056234180698,A73,A151,male,young,good +A11,0.24999999999999997,0.2272477165181028,A73,A152,male,old,good +A14,0.10714285714285712,0.05827005612413337,A73,A152,male,old,bad +A11,0.3571428571428571,0.14185099592824915,A73,A152,male,young,good +A12,0.24999999999999997,0.08501155496863651,A75,A151,female,old,good +A14,0.24999999999999997,0.07059535600308132,A72,A152,male,old,bad +A14,0.3571428571428571,0.26702982282381427,A72,A153,male,old,good +A11,0.4642857142857143,0.5707604269836029,A75,A153,male,old,good +A14,0.14285714285714285,0.06250687795752173,A73,A152,female,old,bad +A14,0.3571428571428571,0.23885770881479035,A73,A152,male,old,bad +A13,0.14285714285714285,0.17326950588753165,A75,A152,male,old,good +A13,0.03571428571428571,0.04385385715857819,A73,A152,female,old,good +A11,0.7857142857142857,0.4091009133927589,A74,A151,female,old,bad diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB-val.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB-val.csv new file mode 100644 index 0000000..757c07f --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB-val.csv @@ -0,0 +1,301 @@ +status,month,credit_amount,employment,housing,sex,age,credit +A11,0.24999999999999997,0.07169582920655881,A73,A152,male,old,bad +A14,0.19642857142857142,0.06107626279300099,A73,A152,male,old,good +A12,0.07142857142857142,0.02806206668867613,A74,A152,female,old,good +A12,0.24999999999999997,0.14575767580059426,A75,A152,male,old,good +A12,0.7321428571428571,0.15302079894354573,A73,A151,male,young,bad +A12,0.3571428571428571,0.24551557169582922,A73,A152,male,old,good +A12,0.24999999999999997,0.090073731704633,A73,A152,male,old,good +A12,0.5714285714285714,0.11131286453174864,A74,A152,male,old,good +A12,0.14285714285714285,0.035105095190932106,A75,A152,male,old,bad +A11,0.14285714285714285,0.07538241443820842,A73,A152,female,old,good +A14,0.19642857142857142,0.1352481567073842,A72,A152,male,old,good +A12,0.24999999999999997,0.03389457466710685,A73,A152,male,young,good +A12,0.07142857142857142,0.06404754044239022,A73,A152,male,old,good +A14,0.08928571428571427,0.05849015076482887,A75,A152,male,young,good +A14,0.3571428571428571,0.12809508088478047,A75,A152,male,old,good +A14,0.19642857142857142,0.1825685044569165,A75,A151,female,old,good +A14,0.3571428571428571,0.06289204357873886,A73,A152,male,old,good +A13,0.03571428571428571,0.10267414988445031,A73,A152,male,old,good +A14,0.14285714285714285,0.09695168922636734,A74,A152,female,old,good +A14,0.3571428571428571,0.09298998569384835,A75,A152,male,old,good +A13,0.03571428571428571,0.05904038736656762,A75,A152,male,old,good +A11,0.5714285714285714,0.09502586112028172,A75,A152,male,old,bad +A11,0.14285714285714285,0.047210300429184546,A74,A152,male,old,bad +A14,0.19642857142857142,0.05777484318256851,A75,A153,male,old,good +A11,0.03571428571428571,0.0505667436997909,A75,A152,male,old,good +A11,0.5714285714285714,0.16501595686145043,A74,A153,male,old,good +A14,0.625,0.1275998679432156,A73,A152,male,young,good +A12,0.08928571428571427,0.07147573456586331,A72,A152,male,old,good +A14,0.03571428571428571,0.024760647078243648,A75,A153,male,old,good +A12,0.14285714285714285,0.32067789149334214,A74,A152,male,old,good +A12,0.08928571428571427,0.05260261912622427,A75,A152,female,young,good +A12,0.3571428571428571,0.060801144492131615,A72,A152,female,young,bad +A14,0.3571428571428571,0.15010454495433037,A73,A152,male,old,good +A14,0.3571428571428571,0.07015516672169032,A74,A152,female,old,good +A14,0.03571428571428571,0.037526136238582586,A73,A152,female,young,good +A12,0.4107142857142857,0.12534389787608674,A72,A152,female,old,good +A11,0.24999999999999997,0.15758776273797734,A72,A151,female,old,bad +A13,0.4642857142857143,0.09122922856828436,A75,A152,male,old,bad +A14,0.14285714285714285,0.05447342357213601,A75,A152,female,old,good +A14,0.14285714285714285,0.02569604930119952,A75,A152,male,old,good +A12,0.19642857142857142,0.0695499064597777,A73,A152,male,young,good +A14,0.19642857142857142,0.08688235941454825,A75,A152,male,old,good +A14,0.14285714285714285,0.02145922746781116,A73,A152,male,old,good +A12,0.5714285714285714,0.39517992736876856,A73,A151,female,old,good +A14,0.14285714285714285,0.0710905689446462,A74,A152,male,old,good +A11,0.5714285714285714,0.5160669087707714,A74,A152,male,young,bad +A14,0.03571428571428571,0.00968416419060196,A75,A152,male,old,good +A14,0.19642857142857142,0.24061846594035435,A73,A152,male,old,bad +A11,0.24999999999999997,0.45234951028942444,A73,A151,female,young,good +A12,0.5714285714285714,0.1904368878617806,A73,A152,male,old,good +A14,0.7857142857142857,0.6877407285132606,A74,A152,male,old,good +A11,0.6785714285714286,0.4199405744470122,A74,A153,male,old,good +A14,1.0,0.5444040937603168,A74,A152,female,young,good +A12,0.3571428571428571,0.05480356553317926,A72,A152,male,young,bad +A14,0.5714285714285714,0.1840541432816111,A75,A152,male,old,good +A12,0.14285714285714285,0.18526466380543635,A75,A151,male,old,good +A14,0.14285714285714285,0.034995047870584356,A73,A152,female,young,good +A11,0.3571428571428571,0.07131066358534169,A74,A151,male,young,bad +A11,0.14285714285714285,0.10570045119401342,A73,A152,male,old,good +A14,0.5714285714285714,0.13002090899086607,A73,A152,male,old,good +A14,0.14285714285714285,0.04682513480796743,A73,A152,male,old,good +A12,0.24999999999999997,0.1513700891383295,A74,A151,female,old,good +A14,0.14285714285714285,0.1116430064927919,A73,A153,male,old,good +A12,0.10714285714285712,0.06993507208099482,A73,A152,male,old,good +A12,0.24999999999999997,0.04357873885770881,A73,A152,female,old,bad +A12,0.7857142857142857,0.3538571585781886,A73,A153,male,old,good +A11,0.5714285714285714,0.10426983602949268,A74,A152,male,young,bad +A11,0.3571428571428571,0.2512930560140861,A74,A152,male,old,bad +A12,0.7857142857142857,0.7797402883239792,A73,A152,male,young,bad +A11,0.14285714285714285,0.02426543413667877,A75,A152,male,old,bad +A12,0.24999999999999997,0.05441839991196214,A73,A153,male,old,good +A12,0.3035714285714286,0.1384945526576428,A75,A151,male,old,bad +A12,0.24999999999999997,0.33069219764498736,A74,A151,male,old,good +A11,0.24999999999999997,0.027511830086937385,A71,A152,female,old,bad +A14,0.14285714285714285,0.06894464619786508,A73,A151,male,old,good +A11,0.14285714285714285,0.024815670738417523,A73,A152,male,old,good +A14,0.125,0.04974138879718279,A71,A152,female,old,good +A14,0.3035714285714286,0.16011885110597557,A75,A152,male,old,good +A11,0.24999999999999997,0.10311433916584131,A73,A151,female,young,bad +A12,0.14285714285714285,0.02839220864971938,A73,A152,male,old,bad +A12,0.5714285714285714,0.3685484758446132,A73,A151,male,old,good +A13,0.08928571428571427,0.0482007263123143,A75,A152,male,old,good +A11,0.24999999999999997,0.05315285572796302,A72,A151,female,young,bad +A11,0.14285714285714285,0.10806646858149004,A73,A152,male,young,good +A14,0.3571428571428571,0.20864971937933313,A74,A152,male,old,good +A11,0.19642857142857142,0.04418399911962144,A72,A152,male,old,good +A12,0.3571428571428571,0.14559260482007264,A72,A152,male,old,good +A11,0.14285714285714285,0.327335754374381,A73,A151,male,old,bad +A12,0.3571428571428571,0.41311764058545175,A75,A151,female,old,good +A11,0.14285714285714285,0.06173654671508748,A74,A152,male,old,bad +A11,0.4642857142857143,0.33564432706063607,A75,A152,male,old,bad +A12,0.24999999999999997,0.2304941124683614,A75,A152,male,old,good +A12,0.7857142857142857,0.8485748872014967,A73,A152,male,young,bad +A14,0.14285714285714285,0.030483107736326624,A75,A152,male,old,good +A11,0.3035714285714286,0.07439198855507868,A75,A152,male,old,good +A12,0.14285714285714285,0.3421371189611533,A71,A152,male,old,bad +A14,0.24999999999999997,0.06729393639264884,A72,A152,male,old,good +A14,0.3571428571428571,0.2851876306811929,A71,A151,female,old,good +A14,0.03571428571428571,0.05436337625178826,A71,A152,male,old,good +A14,0.5178571428571429,0.13832948167712117,A73,A152,female,old,good +A12,0.14285714285714285,0.09199955981071861,A73,A152,male,old,bad +A13,0.4642857142857143,0.1522504677011115,A75,A152,male,old,good +A11,0.3571428571428571,0.052657642786398146,A72,A151,female,young,bad +A11,0.21428571428571427,0.13068119291295258,A75,A151,male,old,bad +A11,0.14285714285714285,0.023880268515461652,A73,A151,male,old,bad +A12,0.08928571428571427,0.07813359744690217,A72,A152,female,young,bad +A14,0.03571428571428571,0.02321998459337515,A71,A152,female,old,good +A14,0.3035714285714286,0.12820512820512822,A72,A152,male,old,bad +A14,0.3571428571428571,0.3336634752943766,A71,A152,male,old,good +A11,0.3571428571428571,0.1641906019588423,A72,A151,female,young,bad +A12,0.10714285714285712,0.3883569935072081,A71,A153,male,old,good +A12,0.1607142857142857,0.10184879498184218,A72,A152,female,young,good +A11,0.5714285714285714,0.32898646417959726,A72,A151,female,young,bad +A13,0.14285714285714285,0.15219544404093763,A73,A152,male,young,good +A14,0.14285714285714285,0.014966435567293938,A75,A152,male,old,good +A14,0.14285714285714285,0.0926598437328051,A75,A152,male,old,good +A14,0.7857142857142857,0.1376141740948608,A75,A152,male,old,good +A12,0.19642857142857142,0.06943985913942995,A73,A152,male,old,bad +A12,0.4642857142857143,0.12396830637173985,A75,A152,male,old,good +A14,0.3571428571428571,0.05705953560030813,A75,A152,female,old,good +A12,0.5714285714285714,0.6679322108506658,A73,A153,male,old,bad +A13,0.08928571428571427,0.05981071860900186,A72,A152,male,old,bad +A11,0.4107142857142857,0.12061186310113349,A75,A152,male,old,good +A14,0.10714285714285712,0.06129635743369649,A73,A152,female,old,good +A11,0.3571428571428571,0.11742049081104876,A75,A151,male,old,good +A11,0.08928571428571427,0.1402553097832068,A73,A151,male,old,good +A13,0.24999999999999997,0.06575327390778034,A74,A152,male,old,good +A14,0.03571428571428571,0.08215032463959503,A73,A152,male,old,good +A13,0.10714285714285712,0.06036095521074061,A74,A152,male,old,good +A11,0.3571428571428571,0.25420931000330144,A73,A153,male,old,bad +A14,0.14285714285714285,0.0817651590183779,A74,A152,female,old,good +A12,0.14285714285714285,0.027731924727632886,A75,A152,male,old,good +A14,0.5714285714285714,0.18075272367117862,A74,A152,male,old,good +A14,0.19642857142857142,0.0675690546935182,A73,A152,male,old,good +A14,0.4642857142857143,0.35721360184879497,A74,A152,male,old,good +A14,0.5714285714285714,0.37839771101573677,A72,A151,female,young,bad +A11,0.19642857142857142,0.030593155056674374,A73,A152,female,young,good +A13,0.3571428571428571,0.14537251017937713,A75,A153,male,old,good +A12,0.5714285714285714,0.30538131396500495,A73,A152,male,old,good +A14,0.3571428571428571,0.15709254979641246,A72,A152,male,young,good +A14,0.19642857142857142,0.06250687795752173,A73,A151,male,old,good +A14,0.24999999999999997,0.11852096401452623,A73,A152,female,old,good +A12,0.3571428571428571,0.19962583911081766,A72,A152,male,old,good +A12,0.3571428571428571,0.15637724221415208,A72,A151,male,young,bad +A14,0.4285714285714286,0.1371739848134698,A75,A152,male,old,good +A14,0.14285714285714285,0.07285132607021019,A73,A152,male,old,good +A11,0.3035714285714286,0.0872125013755915,A73,A152,female,young,bad +A12,0.3571428571428571,0.13579839330912294,A73,A151,female,young,bad +A14,0.7857142857142857,0.2527786948387807,A71,A151,male,old,bad +A11,0.3571428571428571,0.052217453505007144,A75,A152,male,old,bad +A11,0.17857142857142858,0.20485308682733577,A71,A153,male,young,good +A14,0.3571428571428571,0.1336524705623418,A72,A152,female,old,good +A14,0.14285714285714285,0.10360955210740619,A75,A153,female,old,good +A11,0.3571428571428571,0.05397821063057115,A75,A151,female,old,good +A14,0.3571428571428571,0.14427203697589966,A75,A152,male,old,good +A12,0.24999999999999997,0.11978650819852536,A75,A152,male,old,good +A11,1.0,0.3877517332452955,A75,A151,male,old,bad +A14,0.4642857142857143,0.40420380763728403,A75,A152,male,old,good +A11,0.625,0.7664245625619016,A74,A152,male,old,good +A12,0.08928571428571427,0.06883459887751733,A75,A152,female,old,bad +A12,0.7857142857142857,0.19775503466490593,A74,A153,male,old,bad +A14,0.6785714285714286,0.20864971937933313,A73,A152,male,old,good +A11,0.03571428571428571,0.06184659403543523,A71,A152,female,old,good +A14,0.3571428571428571,0.41619896555518876,A74,A152,male,old,good +A14,0.5714285714285714,0.03626059205458347,A75,A152,male,old,good +A14,0.24999999999999997,0.34158688235941453,A75,A152,male,old,bad +A14,0.3571428571428571,0.11962143721800374,A75,A152,male,old,good +A14,0.19642857142857142,0.13965004952129417,A74,A152,female,young,good +A11,0.14285714285714285,0.024595576097722022,A72,A152,male,old,bad +A14,0.3035714285714286,0.12248266754704522,A75,A152,male,old,good +A12,0.14285714285714285,0.0706503796632552,A72,A151,male,young,bad +A12,0.03571428571428571,0.04500935402222955,A75,A152,male,old,good +A12,0.14285714285714285,0.18284362275778587,A73,A152,female,young,good +A12,0.24999999999999997,0.05782986684274238,A75,A152,male,old,good +A14,0.6785714285714286,0.3805436337625179,A74,A151,male,old,good +A12,0.3571428571428571,0.09843732805106195,A72,A152,male,young,bad +A11,0.3571428571428571,0.12286783316826234,A73,A152,male,young,good +A13,0.3571428571428571,0.18526466380543635,A75,A151,male,young,good +A14,0.3571428571428571,0.09948277759436558,A73,A152,male,old,good +A11,0.3571428571428571,0.08281060856168151,A75,A152,female,old,good +A14,0.3571428571428571,0.07202597116760207,A74,A152,male,old,good +A11,0.3571428571428571,0.16017387476614944,A73,A151,male,old,bad +A14,0.7857142857142857,0.5486959392538792,A74,A152,male,old,good +A11,0.3571428571428571,0.062231759656652355,A73,A152,female,old,bad +A14,0.3035714285714286,0.10955210740618465,A75,A152,male,old,good +A14,0.3571428571428571,0.12589413447782546,A75,A152,male,old,bad +A11,0.5714285714285714,0.4300099042588313,A73,A152,female,young,bad +A12,0.7857142857142857,0.33927588863211183,A75,A151,female,old,bad +A12,0.3571428571428571,0.24683613954000222,A72,A152,female,young,bad +A14,0.14285714285714285,0.037416088918234836,A75,A152,male,old,good +A11,0.3571428571428571,0.1463629360625069,A74,A153,male,old,good +A12,0.24999999999999997,0.11158798283261802,A72,A152,female,old,bad +A14,0.5714285714285714,0.2318146803125344,A73,A152,male,old,bad +A14,0.5714285714285714,0.11797072741278751,A73,A152,female,young,good +A14,0.3571428571428571,0.1103224386486189,A72,A152,male,old,good +A14,0.625,0.4587872785297678,A75,A152,male,old,good +A12,0.3571428571428571,0.3022449653350941,A72,A153,female,young,good +A11,0.14285714285714285,0.05227247716518102,A73,A151,female,young,good +A14,0.4642857142857143,0.08897325850115549,A75,A152,male,old,good +A11,0.03571428571428571,0.0335644327060636,A75,A152,female,old,good +A11,0.3571428571428571,0.04258831297457907,A72,A152,male,old,bad +A11,1.0,0.36238582590513924,A75,A152,male,old,bad +A11,0.14285714285714285,0.07868383404864092,A75,A152,male,old,good +A14,0.3571428571428571,0.06399251678221635,A73,A152,male,old,good +A12,0.07142857142857142,0.03615054473423572,A72,A152,male,old,good +A11,0.3571428571428571,0.05188731154396391,A71,A151,female,old,bad +A11,0.14285714285714285,0.1280400572246066,A73,A152,male,old,good +A14,0.3571428571428571,0.2047980631671619,A74,A151,female,young,good +A12,0.3571428571428571,0.1930229998899527,A71,A151,female,young,good +A14,0.24999999999999997,0.11389897655992078,A71,A152,male,old,good +A14,0.3571428571428571,0.12176735996478487,A74,A152,male,old,good +A13,0.03571428571428571,0.06014086057004511,A75,A152,male,old,good +A14,0.4285714285714286,0.4167492021569275,A72,A151,male,old,good +A14,0.3035714285714286,0.16644657202597118,A75,A152,male,old,good +A12,0.24999999999999997,0.16474083856058105,A73,A152,female,old,good +A14,0.4107142857142857,0.46016287003411466,A73,A152,male,old,good +A11,0.5714285714285714,0.08759766699680863,A72,A152,female,old,bad +A14,0.3571428571428571,0.12991086167051832,A75,A152,male,old,good +A12,0.08928571428571427,0.09794211510949709,A74,A152,male,young,good +A14,0.3571428571428571,0.49669858038956755,A73,A153,male,old,good +A14,0.14285714285714285,0.045449543303620554,A73,A152,male,old,good +A14,0.10714285714285712,0.07131066358534169,A73,A152,male,old,good +A11,0.14285714285714285,0.09040387366567623,A73,A152,female,old,good +A14,0.14285714285714285,0.10404974138879718,A74,A152,male,old,good +A14,0.5714285714285714,0.425332893144052,A72,A151,male,old,bad +A14,0.14285714285714285,0.055793991416309016,A75,A151,male,old,good +A11,0.24999999999999997,0.10999229668757565,A74,A152,male,old,good +A11,0.7857142857142857,0.324254429404644,A75,A153,female,old,bad +A11,0.3035714285714286,0.1842742379223066,A74,A151,female,old,good +A12,0.19642857142857142,0.07081545064377683,A75,A152,male,old,good +A14,0.5714285714285714,0.590073731704633,A71,A152,female,old,bad +A12,0.24999999999999997,0.2226807527236712,A75,A152,male,old,bad +A11,0.24999999999999997,0.05172224056344228,A71,A153,female,old,bad +A11,0.14285714285714285,0.056344228018047754,A72,A152,female,old,bad +A14,0.08928571428571427,0.11863101133487398,A71,A152,male,old,good +A12,0.3571428571428571,0.33856058104985143,A72,A152,male,old,good +A14,0.10714285714285712,0.05705953560030813,A75,A152,male,old,good +A12,0.6785714285714286,0.3138549576317817,A74,A152,female,old,good +A11,0.03571428571428571,0.09392538791680423,A74,A152,female,old,good +A14,0.3571428571428571,0.07164080554638494,A74,A152,male,old,good +A14,0.3571428571428571,0.03780125453945196,A72,A152,male,old,good +A14,0.3571428571428571,0.09805216242984482,A75,A153,male,old,good +A14,0.5714285714285714,0.5846263893474194,A75,A152,male,old,good +A12,0.3571428571428571,0.2659843732805106,A75,A152,female,old,good +A11,0.5714285714285714,0.2712116209970287,A74,A152,male,old,bad +A12,0.03571428571428571,0.044018928139099814,A71,A152,male,old,good +A13,0.5714285714285714,0.2015516672169033,A73,A152,male,young,good +A13,0.24999999999999997,0.15401122482667548,A72,A152,female,old,good +A11,0.5714285714285714,0.44486629250577747,A75,A153,male,old,bad +A11,0.14285714285714285,0.08847804555959062,A72,A151,female,young,good +A11,0.14285714285714285,0.25272367117860683,A75,A151,male,old,bad +A14,0.24999999999999997,0.19423352041377795,A72,A152,male,old,good +A12,0.14285714285714285,0.09783206778914934,A73,A152,male,old,good +A14,0.3571428571428571,0.11191812479366127,A74,A152,male,old,good +A12,0.19642857142857142,0.1363486299108617,A74,A152,male,old,good +A11,0.4107142857142857,0.27748431825685044,A71,A152,male,old,bad +A12,1.0,0.4900957411687025,A73,A153,male,old,good +A14,0.24999999999999997,0.09315505667436998,A72,A152,female,young,bad +A11,0.3571428571428571,0.148949048090679,A75,A152,male,old,good +A11,0.24999999999999997,0.11769560911191813,A72,A152,female,old,good +A11,0.24999999999999997,0.08979861340376362,A73,A151,female,young,bad +A12,0.4642857142857143,0.08060966215472654,A73,A152,female,old,good +A11,0.7857142857142857,0.33459887751733247,A75,A153,male,old,bad +A14,0.19642857142857142,0.0586552217453505,A73,A152,male,old,good +A11,0.03571428571428571,0.8058765269065697,A75,A152,male,old,bad +A14,0.4642857142857143,0.15555188731154398,A75,A152,male,old,good +A14,0.0,0.07120061626499395,A74,A152,male,old,good +A12,0.14285714285714285,0.041652910751623196,A73,A152,male,young,good +A14,0.10714285714285712,0.10008803785627819,A73,A152,male,old,good +A12,0.24999999999999997,0.7002310993727302,A71,A153,female,old,bad +A11,0.24999999999999997,0.1570375261362386,A74,A152,male,old,good +A12,0.5714285714285714,0.20578848905029165,A72,A152,female,old,good +A11,0.14285714285714285,0.17877187190491914,A73,A152,female,old,bad +A12,0.08928571428571427,0.048750962914053037,A75,A153,male,old,bad +A13,0.19642857142857142,0.007813359744690218,A72,A151,female,young,good +A11,0.24999999999999997,0.048475844613183675,A71,A152,female,old,bad +A12,0.14285714285714285,0.06459777704412897,A74,A152,male,old,good +A11,0.6785714285714286,0.17299438758666227,A71,A152,male,old,good +A11,0.7857142857142857,0.3713546825134808,A74,A152,male,old,bad +A11,0.4642857142857143,0.6464179597226807,A72,A152,male,old,bad +A14,0.4642857142857143,0.14201606690877078,A73,A152,female,old,good +A12,0.3571428571428571,0.09436557719819522,A73,A151,female,old,good +A14,0.3571428571428571,0.07411687025420931,A75,A153,male,old,good +A12,0.03571428571428571,0.09975789589523495,A72,A151,male,old,good +A12,0.3928571428571428,0.42456256190161773,A72,A152,male,old,good +A12,0.7857142857142857,0.4034334763948498,A71,A153,male,old,good +A14,0.3571428571428571,0.24369979091009136,A74,A152,male,old,good +A14,0.14285714285714285,0.30543633762517886,A75,A151,male,old,good +A14,0.3571428571428571,0.1990756025090789,A75,A151,female,old,good +A14,0.14285714285714285,0.11775063277209201,A75,A152,male,old,good +A12,0.19642857142857142,0.3631561571475735,A71,A152,male,old,bad +A14,0.08928571428571427,0.0675690546935182,A74,A152,male,young,bad +A14,0.03571428571428571,0.10069329811819082,A73,A152,male,young,good +A12,0.03571428571428571,0.03747111257840871,A72,A152,female,old,bad +A14,0.08928571428571427,0.05469351821283151,A75,A151,female,old,good +A11,0.10714285714285712,0.10355452844723231,A72,A151,female,old,good +A14,0.24999999999999997,0.0935402222955871,A74,A152,male,old,good +A11,0.03571428571428571,0.06184659403543523,A73,A152,male,old,good +A12,0.03571428571428571,0.06597336854847584,A75,A152,male,old,good +A14,0.24999999999999997,0.07329151535160118,A75,A152,male,old,good diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE-test.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE-test.csv new file mode 100644 index 0000000..4fbf0ba --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE-test.csv @@ -0,0 +1,201 @@ +month,credit_amount,sex,age,credit,status=A11,status=A12,status=A13,status=A14,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153 +0.5714285714285714,0.30802244965335096,male,young,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.11290855067679102,male,old,bad,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.04693518212831517,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.0704302850225597,male,old,bad,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.08093980411576977,male,old,good,1,0,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.1389347419390338,male,old,bad,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.08974358974358974,female,old,good,0,0,1,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.15291075162319798,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.07741828986464179,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.3035714285714286,0.11571475734565863,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.13255199735886433,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.07384175195333993,female,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.09216463079124024,male,old,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.594475624518543,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.09447562451854297,female,young,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.13321228128095083,female,old,bad,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.17497523935292178,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.23236491691427316,male,old,good,0,0,1,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.24347969626939586,female,young,good,0,0,0,1,0,1,0,0,0,1,0,0 +0.3571428571428571,0.041212721470232194,male,young,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.7857142857142857,0.5893584241223726,male,old,bad,0,1,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.05876526906569825,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.03571428571428571,0.019753494002421042,female,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.3035714285714286,0.1509298998569385,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +1.0,0.847529437658193,male,young,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.19642857142857142,0.17156377242214152,male,young,good,0,0,0,1,0,0,0,0,1,1,0,0 +0.7857142857142857,0.24969736986904373,male,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.10982722570705403,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.08237041928029053,male,young,good,1,0,0,0,0,1,0,0,0,0,1,0 +0.7857142857142857,0.2666446572025971,female,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.034885000550236606,male,old,bad,0,1,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.32023770221195114,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.06228678331682623,male,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.02767690106745901,female,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.13464289644547156,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.01870804445911742,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.4642857142857143,0.09183448883019699,male,old,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.3035714285714286,0.07686805326290305,male,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.3090128755364807,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.3132496973698691,female,old,bad,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.026246285902938263,male,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.14097061736546715,female,old,good,1,0,0,0,0,0,0,0,1,1,0,0 +0.7857142857142857,0.22328601298558381,female,young,bad,1,0,0,0,0,1,0,0,0,1,0,0 +0.4642857142857143,0.3266754704522945,male,young,good,1,0,0,0,0,0,0,1,0,1,0,0 +0.24999999999999997,0.05749972488169913,female,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.3004842082095301,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.7857142857142857,0.32243864861890614,male,old,good,0,0,0,1,0,0,1,0,0,0,0,1 +0.4642857142857143,0.44767249917464513,male,old,bad,0,1,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.09414548255749973,female,young,good,0,0,1,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.04996148343787829,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06382744580169472,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.5178571428571429,0.385330692197645,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.03571428571428571,0.023825244855287777,female,old,good,0,0,1,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.06399251678221635,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.2817761637504127,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.2217453505007153,female,old,bad,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.06096621547265324,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.10714285714285712,0.0292175635523275,male,old,good,0,0,1,0,0,0,0,0,1,0,0,1 +0.0,0.17222405634422802,female,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.08928571428571427,0.045779685264663805,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.11483437878287663,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.053571428571428575,0.03279410146362936,male,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.7857142857142857,0.45322988885220644,female,young,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.19642857142857142,0.09106415758776273,male,old,good,0,0,1,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.08550676791020138,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.022119511389897654,female,young,good,1,0,0,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.059480576647958625,male,young,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.15566193463189174,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.1340926598437328,male,old,good,0,0,1,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.05942555298778475,male,old,good,0,0,1,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.05051172003961703,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.17585561791570375,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.08928571428571427,0.052217453505007144,female,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.02701661714537251,female,young,bad,1,0,0,0,1,0,0,0,0,0,1,0 +0.24999999999999997,0.39198855507868385,male,old,good,0,1,0,0,1,0,0,0,0,0,1,0 +0.24999999999999997,0.10559040387366568,male,old,bad,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.05986574226917574,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.33944095961263343,female,old,good,1,0,0,0,0,0,0,0,1,0,0,1 +0.19642857142857142,0.039892153626059204,male,young,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.13392758886321118,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.23528117090348852,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.038571585781886214,female,old,bad,0,1,0,0,0,1,0,0,0,1,0,0 +0.8928571428571428,0.8635963464289644,male,old,bad,0,1,0,0,0,1,0,0,0,1,0,0 +0.19642857142857142,0.18190822053483,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.0,0.06630351050951909,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.09271486739297898,male,old,bad,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.1581379993397161,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.07114559260482008,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.14933421371189612,male,old,good,1,0,0,0,0,0,0,0,1,0,0,1 +0.24999999999999997,0.06558820292725871,male,old,bad,1,0,0,0,0,0,0,1,0,0,0,1 +1.0,0.7431495543083525,male,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.2857142857142857,0.3245295477055134,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.3336084516342027,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.41900517222405637,male,old,bad,1,0,0,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.05760977220204688,male,young,good,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.121712336304611,male,young,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.3582590513920986,male,old,bad,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.5129305601408606,male,old,bad,0,0,0,1,0,1,0,0,0,0,1,0 +0.5714285714285714,0.39391438318476946,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.36519203257400684,male,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.18548475844613185,male,old,bad,0,0,0,1,0,0,0,0,1,0,1,0 +0.03571428571428571,0.06063607351160999,female,young,good,1,0,0,0,1,0,0,0,0,1,0,0 +0.14285714285714285,0.06767910201386595,male,old,good,0,0,1,0,1,0,0,0,0,0,0,1 +0.24999999999999997,0.042808407615274574,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.035875426433366345,male,young,bad,1,0,0,0,0,0,0,1,0,1,0,0 +0.10714285714285712,0.1136238582590514,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.08928571428571427,0.09519093210080334,female,young,bad,0,0,0,1,0,1,0,0,0,1,0,0 +0.3571428571428571,0.07169582920655881,female,young,good,0,1,0,0,0,0,0,1,0,1,0,0 +0.10714285714285712,0.05447342357213601,female,old,bad,0,0,1,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.05353802134918015,male,old,bad,0,1,0,0,0,0,0,0,1,1,0,0 +0.3035714285714286,0.12963574336964895,female,old,good,1,0,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.06377242214152085,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.07252118410916694,female,young,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.01785714285714285,0.1759656652360515,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.24999999999999997,0.1992956971497744,female,old,good,0,1,0,0,1,0,0,0,0,0,1,0 +0.14285714285714285,0.116430064927919,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.5714285714285714,0.4087157477715418,female,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.5033564432706064,male,old,bad,0,1,0,0,0,1,0,0,0,1,0,0 +0.5714285714285714,0.5126004181798173,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.0928799383735006,male,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.10448993067018818,male,old,bad,1,0,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.350170573346539,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.14355672939363928,male,old,good,0,0,0,1,1,0,0,0,0,0,1,0 +0.2857142857142857,0.13090128755364808,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.32761087267525035,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.18851105975569496,male,old,good,1,0,0,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.114229118520964,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.17244415098492352,male,old,bad,1,0,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.1546164850885881,male,old,bad,0,1,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.05771981952239463,male,old,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.06426763508308572,male,old,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.04104765048971058,female,young,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.2857142857142857,0.1780015406624849,male,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.24999999999999997,0.17211400902388027,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.05359304500935402,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.02723671178606801,female,old,bad,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.08858809287993837,male,old,good,0,1,0,0,1,0,0,0,0,0,1,0 +0.2857142857142857,0.2213051612193243,female,young,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.053571428571428575,0.11912622427643886,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.19406844943325632,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.4642857142857143,0.10625068779575217,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.239022779795312,male,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.028007043028502255,male,old,bad,1,0,0,0,0,0,0,1,0,0,1,0 +0.4642857142857143,0.23550126554418402,female,old,good,0,0,0,1,0,0,0,1,0,1,0,0 +0.10714285714285712,0.09210960713106636,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.17591064157587763,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +1.0,0.7580609662154726,male,old,bad,0,1,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.14383184769450866,male,old,bad,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.05529877847474414,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.6785714285714286,0.22669747991636405,male,old,bad,1,0,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.03719599427753935,female,old,good,0,0,0,1,1,0,0,0,0,0,1,0 +0.19642857142857142,0.030373060415978873,male,old,bad,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.04632992186640256,female,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.3035714285714286,0.07378672829316606,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.23131946737096953,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.023770221195113902,female,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.24999999999999997,0.3362495873225487,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.055463849455265765,female,young,good,0,0,0,1,0,1,0,0,0,1,0,0 +0.4107142857142857,0.12490370859469573,male,young,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.00484208209530098,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.2857142857142857,0.37454605480356556,male,old,good,0,1,0,0,0,0,0,1,0,1,0,0 +0.2857142857142857,0.10922196544514141,male,old,bad,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.06459777704412897,female,old,good,0,0,1,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.6095521074061846,male,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.05496863651370089,male,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.08928571428571427,0.10278419720479806,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.05694948827996038,female,old,bad,1,0,0,0,0,0,0,1,0,1,0,0 +0.7857142857142857,0.18311874105865522,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.10714285714285712,0.14295146913172666,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.17932210850665786,female,young,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.07142857142857142,0.05029162539892153,male,old,good,1,0,0,0,0,0,0,0,1,0,0,1 +0.3035714285714286,0.2050181578078574,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.18498954550456698,female,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +1.2142857142857142,0.2941014636293606,male,young,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.025255860019808517,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.43903378452734676,male,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.3035714285714286,0.17409486079013978,male,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.6802024870694399,male,old,bad,0,1,0,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.08253549026081215,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.15555188731154398,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.2791900517222406,female,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.0,0.06844943325630021,male,old,good,0,0,1,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.12809508088478047,female,old,good,1,0,0,0,1,0,0,0,0,0,0,1 +0.053571428571428575,0.11439418950148562,female,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.022229558710245404,male,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.4337515131506548,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.19880048420820953,male,old,good,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.39738087377572356,female,young,good,0,1,0,0,1,0,0,0,0,1,0,0 +0.14285714285714285,0.07180587652690656,male,old,bad,0,0,0,1,0,0,0,0,1,0,0,1 +0.14285714285714285,0.055683944095961266,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.08928571428571427,0.02971277649389237,female,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.27423792230659183,male,old,bad,0,1,0,0,1,0,0,0,0,0,1,0 +0.4642857142857143,0.11863101133487398,female,young,bad,1,0,0,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.1496093320127655,female,young,bad,0,1,0,0,0,1,0,0,0,1,0,0 +0.3035714285714286,0.6839440959612633,male,old,bad,0,0,0,1,0,0,0,0,1,0,0,1 +0.14285714285714285,0.045779685264663805,male,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.4107142857142857,0.20166171453725104,male,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.19709475074281943,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.09232970177176185,male,old,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.5286123032904149,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE-train.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE-train.csv new file mode 100644 index 0000000..7fa0cc9 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE-train.csv @@ -0,0 +1,501 @@ +month,credit_amount,sex,age,credit,status=A11,status=A12,status=A13,status=A14,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153 +0.5714285714285714,0.20408275558490152,male,old,good,1,0,0,0,1,0,0,0,0,0,1,0 +0.08928571428571427,0.18306371739848135,male,old,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.12462859029382635,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.09601628700341147,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +1.0,0.5566193463189171,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.39303400462198745,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.008748761967646089,female,old,good,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.12418840101243535,male,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.4642857142857143,0.25096291405304283,female,young,good,0,0,0,1,0,0,0,1,0,1,0,0 +0.7857142857142857,0.3471992956971498,male,young,bad,0,1,0,0,0,0,0,1,0,0,1,0 +0.5714285714285714,0.48332783096731596,male,old,bad,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.1816881258941345,male,old,bad,1,0,0,0,0,0,0,1,0,0,1,0 +0.2857142857142857,0.3421371189611533,male,old,good,0,1,0,0,1,0,0,0,0,0,1,0 +0.4642857142857143,0.1250137559150435,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.14141080664685815,male,old,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.029327610872675252,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3035714285714286,0.19329811819082207,male,young,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.4642857142857143,0.39809618135798397,female,old,bad,0,0,0,1,1,0,0,0,0,0,1,0 +0.19642857142857142,0.06344228018047761,female,old,good,1,0,0,0,0,0,1,0,0,1,0,0 +0.125,0.24876196764608782,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.20743919885550788,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.052327500825354895,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.7857142857142857,0.28232640035215145,male,old,good,0,1,0,0,1,0,0,0,0,0,0,1 +0.3571428571428571,0.28947947617475517,male,young,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.04666006382744581,female,old,good,0,0,0,1,1,0,0,0,0,0,1,0 +0.7857142857142857,0.32871134587872786,male,old,bad,0,1,0,0,0,0,0,0,1,0,0,1 +0.125,0.06289204357873886,female,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.08928571428571427,0.09183448883019699,male,old,good,0,1,0,0,0,0,0,1,0,1,0,0 +0.19642857142857142,0.2638384505337295,female,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.7857142857142857,0.3136898866512601,female,young,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.07142857142857142,0.026466380543633764,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.02085396720589854,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.08572686255089688,female,young,bad,0,0,0,1,0,0,0,1,0,0,1,0 +0.3035714285714286,0.05161219324309453,female,old,bad,0,1,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.16424562561901618,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.6223175965665235,female,young,bad,0,1,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.04974138879718279,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.19808517662594918,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.11604489930670188,male,old,bad,1,0,0,0,1,0,0,0,0,0,1,0 +0.3571428571428571,0.07059535600308132,female,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.5714285714285714,0.21789369428854408,male,old,bad,0,0,1,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.19847034224716628,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06289204357873886,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.08528667326950588,male,young,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.1101023440079234,female,old,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.03224386486189061,female,young,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.06025090789039286,male,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.2500825354902608,female,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.025200836359634642,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.05716958292065588,male,young,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.08880818752063387,female,old,bad,0,0,0,1,0,0,1,0,0,0,1,0 +0.7857142857142857,0.5342797402883239,female,old,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.27797953119841534,male,old,good,1,0,0,0,0,0,0,0,1,0,0,1 +0.03571428571428571,0.08924837680202487,male,old,good,1,0,0,0,1,0,0,0,0,0,0,1 +0.3571428571428571,0.12044679212061186,female,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.06261692527786948,female,old,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.7857142857142857,0.6558270056124132,male,old,good,0,1,0,0,1,0,0,0,0,0,0,1 +0.24999999999999997,0.13271706833938596,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.1607142857142857,0.06377242214152085,female,old,good,0,0,0,1,1,0,0,0,0,0,1,0 +0.19642857142857142,0.1866952789699571,female,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.7857142857142857,0.358093980411577,female,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.0880928799383735,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.4107142857142857,0.17420490811048753,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.4107142857142857,0.2678001540662485,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.10714285714285712,0.07411687025420931,male,old,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.06228678331682623,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.10714285714285712,0.05397821063057115,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.13101133487399583,female,old,bad,0,1,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.10454495433036205,female,young,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.1474634092659844,female,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.3582590513920986,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.3035714285714286,0.13728403213381754,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.11417409486079015,male,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.05249257180587652,male,old,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.04396390447892594,female,young,good,1,0,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.11235831407505227,male,old,bad,0,0,0,1,1,0,0,0,0,0,1,0 +0.08928571428571427,0.06597336854847584,female,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.24999999999999997,0.39952679652250467,male,old,bad,1,0,0,0,0,0,0,0,1,0,0,1 +0.08928571428571427,0.03675580499614835,female,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.1153295917244415,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.07444701221525256,female,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.125,0.2011114779355123,male,old,good,1,0,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.0482007263123143,female,young,good,0,0,0,1,0,1,0,0,0,1,0,0 +0.3035714285714286,0.14707824364476726,female,old,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.1620446792120612,female,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.24573566633652472,male,old,good,1,0,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.7871134587872785,male,young,bad,0,1,0,0,1,0,0,0,0,0,1,0 +0.5714285714285714,0.2884890502916254,male,old,good,1,0,0,0,0,0,0,0,1,0,0,1 +0.3571428571428571,0.0734565863321228,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.18377902498074172,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.1380543633762518,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.10300429184549356,male,old,good,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.09640145262462858,male,old,good,0,1,0,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.09084406294706723,male,old,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.06839440959612633,female,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.03571428571428571,0.052767690106745896,male,old,bad,0,1,0,0,1,0,0,0,0,0,1,0 +0.7857142857142857,0.20518322878837902,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.4110817651590184,female,old,good,1,0,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.1870804445911742,female,young,good,1,0,0,0,0,1,0,0,0,1,0,0 +0.24999999999999997,0.08968856608341587,male,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.09304500935402223,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.034334763948497854,female,young,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.4107142857142857,0.2718168812589414,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.5627269725982172,male,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.3571428571428571,0.22565203037306042,female,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.06195664135578298,female,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.4642857142857143,0.1786068009243975,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.6428571428571428,0.3162759986794322,male,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.4285714285714286,0.20666886761307363,male,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.1701331572576208,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.13051612193243095,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.11742049081104876,male,old,bad,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.07752833718498954,female,old,bad,1,0,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.1781666116430065,female,young,good,0,0,0,1,0,0,0,1,0,0,1,0 +1.0,0.39440959612633436,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.10938703642566304,female,young,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.059590623968306375,male,old,bad,1,0,0,0,1,0,0,0,0,0,0,1 +0.3214285714285714,0.05683944095961263,female,young,good,0,0,0,1,0,0,0,1,0,1,0,0 +0.03571428571428571,0.17981732144822274,male,old,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.018432926158248045,male,young,good,0,1,0,0,0,0,1,0,0,1,0,0 +0.625,0.25767580059425554,male,young,bad,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.056564322658743255,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.08352591614394189,male,young,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.17051832287883792,male,old,bad,1,0,0,0,0,1,0,0,0,0,0,1 +0.08928571428571427,0.0627819962583911,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.17453505007153078,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.10714285714285712,0.10432485968966655,female,young,good,0,0,0,1,0,1,0,0,0,1,0,0 +0.19642857142857142,0.06988004842082095,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.3627159678661825,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.09909761197314845,male,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.15109497083746012,male,young,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.18763068119291296,female,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.06558820292725871,female,young,bad,1,0,0,0,0,0,0,1,0,1,0,0 +0.08928571428571427,0.04886101023440079,male,young,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.3037856278199626,female,young,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.24999999999999997,0.12930560140860572,male,old,bad,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.19252778694838782,female,old,good,0,0,1,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.15456146142841423,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.03571428571428571,0.022669747991636405,male,old,good,1,0,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.09298998569384835,male,old,good,1,0,0,0,0,1,0,0,0,0,0,1 +0.5714285714285714,0.1140640475404424,male,young,good,0,1,0,0,0,0,0,1,0,1,0,0 +0.7857142857142857,0.6577528337184989,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.28925938153405967,male,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.24999999999999997,0.010069329811819083,female,young,bad,0,0,0,1,1,0,0,0,0,1,0,0 +0.03571428571428571,0.23995818201826785,female,old,bad,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.13821943435677342,female,young,bad,0,1,0,0,0,0,0,0,1,0,1,0 +0.4642857142857143,0.2213051612193243,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.625,0.6333223286012986,male,old,good,0,1,0,0,0,0,0,1,0,1,0,0 +0.5714285714285714,0.10867172884340266,male,old,bad,0,1,0,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.06734896005282272,female,old,good,0,0,1,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.06338725652030373,female,old,good,0,0,0,1,0,0,0,1,0,1,0,0 +0.24999999999999997,0.09150434686915374,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.17857142857142858,0.030373060415978873,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.05617915703752613,male,old,bad,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.055463849455265765,female,old,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.18240343347639487,male,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.17255419830527127,male,old,bad,1,0,0,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.1098822493672279,female,old,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.30560140860570045,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.03906679872345109,female,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.24999999999999997,0.044349070100143065,male,old,bad,0,1,0,0,0,0,0,0,1,0,1,0 +0.6785714285714286,0.3322878837900297,male,old,good,0,0,1,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.13772422141520854,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.10714285714285712,0.10784637394079453,male,young,bad,0,0,0,1,0,0,1,0,0,1,0,0 +0.7857142857142857,0.5528227137669197,male,old,bad,1,0,0,0,0,0,0,1,0,0,0,1 +0.5714285714285714,0.7740728513260702,male,old,bad,0,1,0,0,0,0,0,0,1,0,0,1 +0.4642857142857143,0.2200396170353252,male,old,bad,0,1,0,0,1,0,0,0,0,0,1,0 +0.3571428571428571,0.03736106525806096,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.08928571428571427,0.039011775063277215,female,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.3035714285714286,0.13931990756025092,female,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.7857142857142857,0.44613183668977663,female,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.11285352701661715,female,young,good,0,0,0,1,0,1,0,0,0,1,0,0 +0.7857142857142857,0.18245845713656875,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.20446792120611865,female,old,bad,1,0,0,0,0,0,0,0,1,1,0,0 +0.03571428571428571,0.05430835259161439,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.03994717728623308,female,young,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.03659073401562672,female,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.03835149114119071,male,old,bad,0,0,1,0,0,0,0,1,0,0,0,1 +0.3571428571428571,0.14168592494772753,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.3035714285714286,0.11213821943435677,female,young,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.10714285714285712,0.145867723120942,female,old,good,0,0,0,1,0,1,0,0,0,1,0,0 +0.7321428571428571,0.23803235391218225,male,old,good,0,1,0,0,1,0,0,0,0,0,1,0 +0.7857142857142857,0.3845053372950369,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.13733905579399142,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.08215032463959503,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.08928571428571427,0.06140640475404424,female,young,bad,1,0,0,0,0,1,0,0,0,1,0,0 +0.24999999999999997,0.06905469351821283,male,old,good,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.005117200396170352,female,old,good,1,0,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.10856168152305491,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.06850445691647408,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.11428414218113787,female,young,bad,0,0,1,0,0,1,0,0,0,0,1,0 +0.7321428571428571,0.6364036535710355,male,old,bad,1,0,0,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.06789919665456146,male,young,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.4844833278309673,male,old,good,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.07730824254429404,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.06619346318917134,female,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.053813139650049524,female,young,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.037526136238582586,female,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.5714285714285714,0.4311654011224827,male,old,bad,0,1,0,0,0,0,0,0,1,0,1,0 +0.03571428571428571,0.06030593155056674,male,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.5714285714285714,0.1437218003741609,male,old,good,0,1,0,0,0,0,0,0,1,0,0,1 +0.3571428571428571,0.15808297567954221,female,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +1.0,0.3938593595245956,female,young,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.19642857142857142,0.055793991416309016,male,young,bad,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.07175085286673269,female,young,good,0,0,0,1,0,0,0,1,0,1,0,0 +0.3035714285714286,0.18719049191152196,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.7857142857142857,0.20254209310003302,male,old,bad,1,0,0,0,0,0,0,1,0,0,0,1 +0.03571428571428571,0.061131286453174866,male,old,good,1,0,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.052437548145702645,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.18031253438978762,male,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.5714285714285714,0.4184549356223176,female,young,bad,0,0,0,1,0,0,1,0,0,0,1,0 +0.3035714285714286,0.17095851216022892,female,old,good,1,0,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.10179377132166832,male,old,bad,0,0,1,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.0505667436997909,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.5714285714285714,0.47854077253218885,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.24999999999999997,0.09276989105315285,male,young,good,1,0,0,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.1794871794871795,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.3695939253879168,male,old,good,0,1,0,0,0,0,0,1,0,1,0,0 +0.4642857142857143,0.11461428414218112,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.09843732805106195,female,young,bad,1,0,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.49636843842852424,male,young,bad,1,0,0,0,0,0,0,1,0,0,1,0 +0.3035714285714286,0.07274127874986244,female,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.03571428571428571,0.011224826675470454,male,young,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.33311323869263787,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.10713106635853417,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.7857142857142857,0.40601958842302194,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.10714285714285712,0.05639925167822163,female,young,good,0,0,1,0,0,1,0,0,0,0,1,0 +0.7857142857142857,0.37795752173434577,male,old,bad,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.6783867062837019,female,old,bad,0,1,0,0,0,0,0,0,1,0,0,1 +0.24999999999999997,0.1391548365797293,male,young,good,0,1,0,0,0,0,1,0,0,1,0,0 +0.0,0.06894464619786508,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.07142857142857142,0.05430835259161439,female,young,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.20160669087707717,male,old,bad,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.15472653240893586,male,old,good,0,0,0,1,0,0,0,0,1,1,0,0 +0.5714285714285714,0.09370529327610873,male,old,bad,0,1,0,0,0,0,0,0,1,0,0,1 +0.4642857142857143,0.19709475074281943,male,young,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.0708704743039507,female,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.03571428571428571,0.10223396060305931,male,old,good,0,0,0,1,0,0,0,1,0,1,0,0 +0.03571428571428571,0.00979421151094971,female,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.2313744910311434,male,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.3035714285714286,0.1138439528997469,male,old,bad,0,0,1,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.05023660173874765,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.037746230879278087,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.12231759656652359,male,young,bad,1,0,0,0,1,0,0,0,0,0,1,0 +0.19642857142857142,0.07257620776934082,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.1522504677011115,female,old,good,0,1,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.11450423682183337,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.10157367668097282,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.04330362055683944,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.05425332893144051,male,old,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.21464729833828547,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.7857142857142857,0.18482447452404535,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06393749312204247,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.125,0.20298228238142402,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.0702101903818642,male,old,good,1,0,0,0,0,0,0,0,1,0,0,1 +0.3214285714285714,0.1334323759216463,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.4642857142857143,0.2184989545504567,female,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.14713326730494114,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.08622207549246176,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.10714285714285712,0.0536480686695279,male,old,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.08776273797733025,female,old,good,1,0,0,0,0,1,0,0,0,1,0,0 +0.03571428571428571,0.0,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.022559700671288655,male,young,good,0,0,0,1,0,0,0,1,0,1,0,0 +0.03571428571428571,0.025255860019808517,male,old,good,0,0,1,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.8587542643336634,male,old,good,1,0,0,0,1,0,0,0,0,0,1,0 +0.3571428571428571,0.08732254869593925,female,old,bad,0,1,0,0,0,0,0,1,0,0,0,1 +0.1607142857142857,0.08512160228898426,male,old,good,1,0,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.010894684714427203,female,young,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.16303510509519095,male,young,good,0,1,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.12815010454495435,male,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.05216242984483327,female,old,bad,1,0,0,0,0,0,0,0,1,0,0,1 +0.3035714285714286,0.26152745680642675,female,old,bad,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.11461428414218112,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.19642857142857142,0.055683944095961266,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.049796412457356665,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.10714285714285712,0.03543523715197534,female,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.16952789699570817,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.04748541873005392,female,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.02580609662154727,male,old,bad,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.09981291955540883,female,old,bad,0,1,0,0,1,0,0,0,0,0,1,0 +0.4642857142857143,0.18741058655221746,male,old,good,0,0,1,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.3502255970067129,male,old,good,1,0,0,0,1,0,0,0,0,0,0,1 +0.24999999999999997,0.16391548365797293,male,old,good,0,0,0,1,1,0,0,0,0,0,1,0 +0.19642857142857142,0.17018818091779467,male,old,good,0,0,0,1,0,0,1,0,0,0,0,1 +0.4642857142857143,0.1572576207769341,male,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.011444921316165951,male,young,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.06718388907230109,male,old,good,0,0,0,1,0,0,1,0,0,0,0,1 +0.125,0.05898536370639375,female,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.057114559260482006,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.025145812699460767,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.058215032463959496,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.08928571428571427,0.15538681633102236,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.12440849565313085,female,young,good,1,0,0,0,1,0,0,0,0,1,0,0 +0.03571428571428571,0.0819852536590734,male,old,good,0,0,0,1,0,0,0,0,1,1,0,0 +0.14285714285714285,0.04803565533179267,female,old,bad,0,0,0,1,0,0,1,0,0,1,0,0 +0.125,0.07301639705073182,female,young,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.7321428571428571,0.08776273797733025,male,young,bad,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09783206778914934,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.4107142857142857,0.4439308902828216,female,old,bad,0,1,0,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.05749972488169913,female,young,bad,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.6431165401122483,male,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.08633212281280951,male,old,bad,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.0675690546935182,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.06965995378012545,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.1037746230879278,male,young,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.19599427753934193,female,young,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.24999999999999997,0.21541762958071972,male,old,bad,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.20947507428194126,male,old,bad,0,1,0,0,0,0,0,1,0,0,1,0 +0.7857142857142857,0.543468691537361,male,old,bad,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.012875536480686695,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +1.0,0.33223286012985587,male,old,bad,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.36436667767139874,male,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.19642857142857142,0.07054033234290745,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.10366457576758006,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.04715527676901067,male,young,good,1,0,0,0,0,0,1,0,0,1,0,0 +0.2857142857142857,0.10795642126114229,male,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.19642857142857142,0.16154946627049632,female,young,good,0,0,0,1,0,0,0,1,0,1,0,0 +0.7678571428571428,0.5762077693408165,female,old,good,1,0,0,0,0,1,0,0,0,0,1,0 +0.053571428571428575,0.02641135688345989,male,old,good,0,0,0,1,0,0,0,0,1,1,0,0 +0.14285714285714285,0.10030813249697369,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3035714285714286,0.0176625949158138,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.19642857142857142,0.11609992296687575,male,old,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06019588423021899,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.6785714285714286,0.3809838230439089,female,old,bad,1,0,0,0,0,0,0,1,0,0,1,0 +0.6785714285714286,0.2501375591504347,male,old,good,0,0,1,0,0,0,0,0,1,0,0,1 +0.6785714285714286,0.20441289754594477,male,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.125,0.38395510069329813,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.05315285572796302,male,old,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.13744910311433917,female,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.6785714285714286,0.49702872235061074,male,old,good,0,1,0,0,1,0,0,0,0,0,0,1 +0.4107142857142857,0.23528117090348852,male,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.3035714285714286,0.2750082535490261,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.16127434796962695,female,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.02569604930119952,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.055683944095961266,male,young,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.08990866072411137,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.09210960713106636,female,old,bad,0,1,0,0,0,1,0,0,0,1,0,0 +0.7857142857142857,0.39760096841641906,male,old,good,1,0,0,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.03791130185979971,male,old,bad,0,0,1,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.03251898316275999,female,young,good,0,1,0,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.21255639925167824,female,old,bad,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.2697259821723341,male,young,good,0,0,1,0,0,0,0,1,0,0,1,0 +0.5178571428571429,0.22180037416088919,female,young,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.07246616044899307,female,young,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.004897105755474855,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.19642857142857142,0.05639925167822163,female,young,bad,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.021404203807637284,male,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.1127434796962694,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.086552217453505,male,old,bad,1,0,0,0,1,0,0,0,0,0,1,0 +0.7857142857142857,0.41339275888632115,male,old,bad,1,0,0,0,0,0,0,0,1,0,0,1 +0.03571428571428571,0.07400682293386156,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.007428194123473095,female,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.028942445251458126,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.15511169803015298,male,old,good,0,1,0,0,0,0,0,0,1,0,0,1 +0.08928571428571427,0.06129635743369649,male,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.0534279740288324,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.07142857142857142,0.17321448222735777,male,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.21475734565863322,male,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.08407615274568064,female,old,bad,0,1,0,0,0,1,0,0,0,1,0,0 +0.08928571428571427,0.05425332893144051,female,young,good,0,0,0,1,0,1,0,0,0,1,0,0 +0.03571428571428571,0.08341586882359414,male,young,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.4642857142857143,0.43039506988004844,male,young,good,1,0,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.268460437988335,female,old,bad,0,1,0,0,0,0,0,0,1,0,0,1 +0.3571428571428571,0.4620886981402003,male,old,bad,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.21563772422141522,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.09271486739297898,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.11813579839330911,male,old,bad,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.06652360515021459,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.023935292175635527,male,young,bad,0,1,0,0,0,0,0,1,0,0,1,0 +0.7857142857142857,0.2396280400572246,male,young,bad,1,0,0,0,0,0,0,0,1,0,0,1 +0.125,0.10410476504897105,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.03851656212171234,male,old,bad,1,0,0,0,0,0,0,0,1,1,0,0 +0.21428571428571427,0.050896885660834154,male,old,good,0,1,0,0,1,0,0,0,0,0,0,1 +0.14285714285714285,0.06866952789699571,female,young,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.09541102674149884,male,old,good,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.19555408825795093,female,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.09557609772202047,male,young,bad,1,0,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.21398701441619897,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.024815670738417523,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.8928571428571428,0.5054473423572136,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.16176956091119182,female,young,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.1607142857142857,0.034774953229888855,male,young,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.13810938703642567,male,old,good,0,1,0,0,0,0,0,0,1,0,0,1 +0.19642857142857142,0.10845163420270716,female,young,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.020688896225376913,female,young,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,1.0,female,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.0532078793881369,male,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.053571428571428575,0.12798503356443272,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.13189171343677783,male,old,good,1,0,0,0,0,0,1,0,0,1,0,0 +0.4107142857142857,0.12765489160338947,female,young,bad,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.1594585671838891,male,old,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.022889842632331906,female,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.051447122262572906,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.4642857142857143,0.3138549576317817,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.7321428571428571,0.24738637614174097,male,young,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.19642857142857142,0.0529877847474414,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.03571428571428571,0.09067899196654561,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.7857142857142857,0.6239683063717398,female,young,bad,0,0,0,1,0,0,1,0,0,1,0,0 +0.0,0.01931330472103004,female,young,good,0,0,0,1,0,1,0,0,0,1,0,0 +0.24999999999999997,0.05474854187300539,male,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.30218994167492025,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.17857142857142858,0.06382744580169472,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.035765379113018594,male,young,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.0061626499394739735,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.09513590844062947,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.17051832287883792,female,old,bad,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.030097942115109497,female,old,bad,0,0,0,1,0,0,0,0,1,0,1,0 +0.03571428571428571,0.014746340926598437,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.1129635743369649,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.1354682513480797,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.06734896005282272,male,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.24999999999999997,0.08638714647298339,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.09227467811158797,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.06509298998569385,female,young,good,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.03895675140310334,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.5714285714285714,0.19725982172334106,female,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.028227137669197756,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.15010454495433037,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.05727963024100363,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.0233300319137229,male,young,bad,1,0,0,0,0,0,0,1,0,0,1,0 +1.0,0.3453835149114119,male,old,good,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.03994717728623308,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.10714285714285712,0.20353251898316277,male,old,good,0,0,1,0,0,1,0,0,0,0,1,0 +0.10714285714285712,0.043908880818752064,male,young,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.13315725762077696,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.4642857142857143,0.23841751953339935,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.11692527786948388,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.056784417299438755,female,young,bad,1,0,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.19296797622977882,female,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.3571428571428571,0.2195994277539342,male,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.3482447452404534,male,old,good,1,0,0,0,1,0,0,0,0,0,0,1 +0.14285714285714285,0.10294926818531969,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.10570045119401342,female,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.19642857142857142,0.18399911962143722,female,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.5714285714285714,0.24408495653130846,male,old,good,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.2424892703862661,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.17857142857142858,0.48024650599757895,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.04429404643996919,female,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.7857142857142857,0.15412127214702323,male,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.17558049961483438,female,young,bad,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.26961593485198637,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.09425552987784747,male,old,good,0,0,0,1,0,0,0,1,0,1,0,0 +0.24999999999999997,0.18377902498074172,male,old,good,0,1,0,0,1,0,0,0,0,0,1,0 +0.5714285714285714,0.3076923076923077,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.06707384175195334,male,old,good,0,0,0,1,0,0,0,0,1,1,0,0 +0.3571428571428571,0.12947067238912735,female,old,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.18713546825134808,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.15951359084406297,male,young,good,1,0,0,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.21239132827115662,male,young,bad,1,0,0,0,0,0,0,0,1,1,0,0 +0.08928571428571427,0.06531308462638935,male,old,bad,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.062011665015956854,female,old,good,0,0,1,0,0,0,0,0,1,0,0,1 +0.08928571428571427,0.06448772972378122,male,old,bad,1,0,0,0,0,1,0,0,0,0,0,1 +0.4642857142857143,0.18553978210630573,female,old,good,1,0,0,0,0,0,0,0,1,1,0,0 +0.3571428571428571,0.17029822823814242,male,old,bad,1,0,0,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.020633872565203038,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.040332342907450205,female,young,good,0,1,0,0,0,1,0,0,0,1,0,0 +0.4642857142857143,0.15081985253659075,female,young,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.049466270496313414,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.001430615164520744,male,young,good,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.06168152305491362,female,young,bad,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.029987894794761747,female,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.056784417299438755,female,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.0064927919005172245,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.055463849455265765,male,young,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.19642857142857142,0.10652580609662154,female,old,good,0,0,0,1,0,0,0,1,0,1,0,0 +0.3571428571428571,0.14223616154946628,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.18609001870804448,female,young,good,1,0,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.05838010344448112,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.11543963904478925,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.010069329811819083,female,young,bad,0,1,0,0,0,1,0,0,0,1,0,0 +0.19642857142857142,0.20408275558490152,female,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.13101133487399583,female,young,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.12077693408165512,male,old,good,0,0,0,1,0,1,0,0,0,1,0,0 +0.10714285714285712,0.04335864421701331,male,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.7857142857142857,0.473643666776714,male,old,good,0,0,0,1,0,0,0,1,0,0,0,1 +0.3571428571428571,0.05639925167822163,male,old,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.3476394849785408,male,young,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.06019588423021899,male,old,bad,0,0,1,0,0,0,0,1,0,0,1,0 +0.4107142857142857,0.3144602178936943,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.020248706943985912,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.10714285714285712,0.026246285902938263,male,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.19642857142857142,0.06569825024760646,male,young,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.18542973478595798,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.10955210740618465,male,old,good,1,0,0,0,0,1,0,0,0,1,0,0 +1.0,0.799603829646748,female,old,bad,0,1,0,0,0,0,0,0,1,0,0,1 +0.3571428571428571,0.09750192582810609,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.019753494002421042,female,old,bad,0,0,1,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.15247056234180698,male,young,good,1,0,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.2272477165181028,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.05827005612413337,male,old,bad,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.14185099592824915,male,young,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.08501155496863651,female,old,good,0,1,0,0,0,0,0,0,1,1,0,0 +0.24999999999999997,0.07059535600308132,male,old,bad,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.26702982282381427,male,old,good,0,0,0,1,0,1,0,0,0,0,0,1 +0.4642857142857143,0.5707604269836029,male,old,good,1,0,0,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.06250687795752173,female,old,bad,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.23885770881479035,male,old,bad,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.17326950588753165,male,old,good,0,0,1,0,0,0,0,0,1,0,1,0 +0.03571428571428571,0.04385385715857819,female,old,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.4091009133927589,female,old,bad,1,0,0,0,0,0,0,1,0,1,0,0 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE-val.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE-val.csv new file mode 100644 index 0000000..3907229 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE-val.csv @@ -0,0 +1,301 @@ +month,credit_amount,sex,age,credit,status=A11,status=A12,status=A13,status=A14,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153 +0.24999999999999997,0.07169582920655881,male,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.06107626279300099,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.07142857142857142,0.02806206668867613,female,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.14575767580059426,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.7321428571428571,0.15302079894354573,male,young,bad,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.24551557169582922,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.090073731704633,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.11131286453174864,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.035105095190932106,male,old,bad,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.07538241443820842,female,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.1352481567073842,male,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.24999999999999997,0.03389457466710685,male,young,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.07142857142857142,0.06404754044239022,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.05849015076482887,male,young,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.12809508088478047,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.1825685044569165,female,old,good,0,0,0,1,0,0,0,0,1,1,0,0 +0.3571428571428571,0.06289204357873886,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.10267414988445031,male,old,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.09695168922636734,female,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.09298998569384835,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.03571428571428571,0.05904038736656762,male,old,good,0,0,1,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.09502586112028172,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.047210300429184546,male,old,bad,1,0,0,0,0,0,0,1,0,0,1,0 +0.19642857142857142,0.05777484318256851,male,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.03571428571428571,0.0505667436997909,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.16501595686145043,male,old,good,1,0,0,0,0,0,0,1,0,0,0,1 +0.625,0.1275998679432156,male,young,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.07147573456586331,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.024760647078243648,male,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.14285714285714285,0.32067789149334214,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.08928571428571427,0.05260261912622427,female,young,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.060801144492131615,female,young,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.15010454495433037,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.07015516672169032,female,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.03571428571428571,0.037526136238582586,female,young,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.4107142857142857,0.12534389787608674,female,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.15758776273797734,female,old,bad,1,0,0,0,0,1,0,0,0,1,0,0 +0.4642857142857143,0.09122922856828436,male,old,bad,0,0,1,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.05447342357213601,female,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.02569604930119952,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.0695499064597777,male,young,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.08688235941454825,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.02145922746781116,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.5714285714285714,0.39517992736876856,female,old,good,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.0710905689446462,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.5160669087707714,male,young,bad,1,0,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.00968416419060196,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.24061846594035435,male,old,bad,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.45234951028942444,female,young,good,1,0,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.1904368878617806,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.6877407285132606,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.6785714285714286,0.4199405744470122,male,old,good,1,0,0,0,0,0,0,1,0,0,0,1 +1.0,0.5444040937603168,female,young,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.05480356553317926,male,young,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.1840541432816111,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.18526466380543635,male,old,good,0,1,0,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.034995047870584356,female,young,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.07131066358534169,male,young,bad,1,0,0,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.10570045119401342,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.13002090899086607,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.04682513480796743,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.1513700891383295,female,old,good,0,1,0,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.1116430064927919,male,old,good,0,0,0,1,0,0,1,0,0,0,0,1 +0.10714285714285712,0.06993507208099482,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.04357873885770881,female,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.3538571585781886,male,old,good,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.10426983602949268,male,young,bad,1,0,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.2512930560140861,male,old,bad,1,0,0,0,0,0,0,1,0,0,1,0 +0.7857142857142857,0.7797402883239792,male,young,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.02426543413667877,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.05441839991196214,male,old,good,0,1,0,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.1384945526576428,male,old,bad,0,1,0,0,0,0,0,0,1,1,0,0 +0.24999999999999997,0.33069219764498736,male,old,good,0,1,0,0,0,0,0,1,0,1,0,0 +0.24999999999999997,0.027511830086937385,female,old,bad,1,0,0,0,1,0,0,0,0,0,1,0 +0.14285714285714285,0.06894464619786508,male,old,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.024815670738417523,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.125,0.04974138879718279,female,old,good,0,0,0,1,1,0,0,0,0,0,1,0 +0.3035714285714286,0.16011885110597557,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.10311433916584131,female,young,bad,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.02839220864971938,male,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.3685484758446132,male,old,good,0,1,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.0482007263123143,male,old,good,0,0,1,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.05315285572796302,female,young,bad,1,0,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.10806646858149004,male,young,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.20864971937933313,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.19642857142857142,0.04418399911962144,male,old,good,1,0,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.14559260482007264,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.327335754374381,male,old,bad,1,0,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.41311764058545175,female,old,good,0,1,0,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.06173654671508748,male,old,bad,1,0,0,0,0,0,0,1,0,0,1,0 +0.4642857142857143,0.33564432706063607,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.2304941124683614,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.7857142857142857,0.8485748872014967,male,young,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.030483107736326624,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3035714285714286,0.07439198855507868,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.3421371189611533,male,old,bad,0,1,0,0,1,0,0,0,0,0,1,0 +0.24999999999999997,0.06729393639264884,male,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.2851876306811929,female,old,good,0,0,0,1,1,0,0,0,0,1,0,0 +0.03571428571428571,0.05436337625178826,male,old,good,0,0,0,1,1,0,0,0,0,0,1,0 +0.5178571428571429,0.13832948167712117,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.09199955981071861,male,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.1522504677011115,male,old,good,0,0,1,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.052657642786398146,female,young,bad,1,0,0,0,0,1,0,0,0,1,0,0 +0.21428571428571427,0.13068119291295258,male,old,bad,1,0,0,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.023880268515461652,male,old,bad,1,0,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.07813359744690217,female,young,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.02321998459337515,female,old,good,0,0,0,1,1,0,0,0,0,0,1,0 +0.3035714285714286,0.12820512820512822,male,old,bad,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.3336634752943766,male,old,good,0,0,0,1,1,0,0,0,0,0,1,0 +0.3571428571428571,0.1641906019588423,female,young,bad,1,0,0,0,0,1,0,0,0,1,0,0 +0.10714285714285712,0.3883569935072081,male,old,good,0,1,0,0,1,0,0,0,0,0,0,1 +0.1607142857142857,0.10184879498184218,female,young,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.32898646417959726,female,young,bad,1,0,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.15219544404093763,male,young,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.014966435567293938,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.0926598437328051,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.7857142857142857,0.1376141740948608,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.06943985913942995,male,old,bad,0,1,0,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.12396830637173985,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.05705953560030813,female,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.6679322108506658,male,old,bad,0,1,0,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.05981071860900186,male,old,bad,0,0,1,0,0,1,0,0,0,0,1,0 +0.4107142857142857,0.12061186310113349,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.10714285714285712,0.06129635743369649,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.11742049081104876,male,old,good,1,0,0,0,0,0,0,0,1,1,0,0 +0.08928571428571427,0.1402553097832068,male,old,good,1,0,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.06575327390778034,male,old,good,0,0,1,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.08215032463959503,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.10714285714285712,0.06036095521074061,male,old,good,0,0,1,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.25420931000330144,male,old,bad,1,0,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.0817651590183779,female,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.027731924727632886,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.18075272367117862,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.19642857142857142,0.0675690546935182,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.4642857142857143,0.35721360184879497,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.37839771101573677,female,young,bad,0,0,0,1,0,1,0,0,0,1,0,0 +0.19642857142857142,0.030593155056674374,female,young,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.14537251017937713,male,old,good,0,0,1,0,0,0,0,0,1,0,0,1 +0.5714285714285714,0.30538131396500495,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.15709254979641246,male,young,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.19642857142857142,0.06250687795752173,male,old,good,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.11852096401452623,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.19962583911081766,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.15637724221415208,male,young,bad,0,1,0,0,0,1,0,0,0,1,0,0 +0.4285714285714286,0.1371739848134698,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.07285132607021019,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3035714285714286,0.0872125013755915,female,young,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.13579839330912294,female,young,bad,0,1,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.2527786948387807,male,old,bad,0,0,0,1,1,0,0,0,0,1,0,0 +0.3571428571428571,0.052217453505007144,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.17857142857142858,0.20485308682733577,male,young,good,1,0,0,0,1,0,0,0,0,0,0,1 +0.3571428571428571,0.1336524705623418,female,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.10360955210740619,female,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.3571428571428571,0.05397821063057115,female,old,good,1,0,0,0,0,0,0,0,1,1,0,0 +0.3571428571428571,0.14427203697589966,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.11978650819852536,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +1.0,0.3877517332452955,male,old,bad,1,0,0,0,0,0,0,0,1,1,0,0 +0.4642857142857143,0.40420380763728403,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.625,0.7664245625619016,male,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.08928571428571427,0.06883459887751733,female,old,bad,0,1,0,0,0,0,0,0,1,0,1,0 +0.7857142857142857,0.19775503466490593,male,old,bad,0,1,0,0,0,0,0,1,0,0,0,1 +0.6785714285714286,0.20864971937933313,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.06184659403543523,female,old,good,1,0,0,0,1,0,0,0,0,0,1,0 +0.3571428571428571,0.41619896555518876,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.03626059205458347,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.34158688235941453,male,old,bad,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.11962143721800374,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.13965004952129417,female,young,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.024595576097722022,male,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.3035714285714286,0.12248266754704522,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.0706503796632552,male,young,bad,0,1,0,0,0,1,0,0,0,1,0,0 +0.03571428571428571,0.04500935402222955,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.18284362275778587,female,young,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.05782986684274238,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.6785714285714286,0.3805436337625179,male,old,good,0,0,0,1,0,0,0,1,0,1,0,0 +0.3571428571428571,0.09843732805106195,male,young,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.12286783316826234,male,young,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.18526466380543635,male,young,good,0,0,1,0,0,0,0,0,1,1,0,0 +0.3571428571428571,0.09948277759436558,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.08281060856168151,female,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.07202597116760207,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.16017387476614944,male,old,bad,1,0,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.5486959392538792,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.062231759656652355,female,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.3035714285714286,0.10955210740618465,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.12589413447782546,male,old,bad,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.4300099042588313,female,young,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.33927588863211183,female,old,bad,0,1,0,0,0,0,0,0,1,1,0,0 +0.3571428571428571,0.24683613954000222,female,young,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.037416088918234836,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.1463629360625069,male,old,good,1,0,0,0,0,0,0,1,0,0,0,1 +0.24999999999999997,0.11158798283261802,female,old,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.2318146803125344,male,old,bad,0,0,0,1,0,0,1,0,0,0,1,0 +0.5714285714285714,0.11797072741278751,female,young,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.1103224386486189,male,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.625,0.4587872785297678,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.3022449653350941,female,young,good,0,1,0,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.05227247716518102,female,young,good,1,0,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.08897325850115549,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.03571428571428571,0.0335644327060636,female,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.04258831297457907,male,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +1.0,0.36238582590513924,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.07868383404864092,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.06399251678221635,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.07142857142857142,0.03615054473423572,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.05188731154396391,female,old,bad,1,0,0,0,1,0,0,0,0,1,0,0 +0.14285714285714285,0.1280400572246066,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.2047980631671619,female,young,good,0,0,0,1,0,0,0,1,0,1,0,0 +0.3571428571428571,0.1930229998899527,female,young,good,0,1,0,0,1,0,0,0,0,1,0,0 +0.24999999999999997,0.11389897655992078,male,old,good,0,0,0,1,1,0,0,0,0,0,1,0 +0.3571428571428571,0.12176735996478487,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.03571428571428571,0.06014086057004511,male,old,good,0,0,1,0,0,0,0,0,1,0,1,0 +0.4285714285714286,0.4167492021569275,male,old,good,0,0,0,1,0,1,0,0,0,1,0,0 +0.3035714285714286,0.16644657202597118,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.16474083856058105,female,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.4107142857142857,0.46016287003411466,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.5714285714285714,0.08759766699680863,female,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.12991086167051832,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.08928571428571427,0.09794211510949709,male,young,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.49669858038956755,male,old,good,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.045449543303620554,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.10714285714285712,0.07131066358534169,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.09040387366567623,female,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.10404974138879718,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.425332893144052,male,old,bad,0,0,0,1,0,1,0,0,0,1,0,0 +0.14285714285714285,0.055793991416309016,male,old,good,0,0,0,1,0,0,0,0,1,1,0,0 +0.24999999999999997,0.10999229668757565,male,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.7857142857142857,0.324254429404644,female,old,bad,1,0,0,0,0,0,0,0,1,0,0,1 +0.3035714285714286,0.1842742379223066,female,old,good,1,0,0,0,0,0,0,1,0,1,0,0 +0.19642857142857142,0.07081545064377683,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.590073731704633,female,old,bad,0,0,0,1,1,0,0,0,0,0,1,0 +0.24999999999999997,0.2226807527236712,male,old,bad,0,1,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.05172224056344228,female,old,bad,1,0,0,0,1,0,0,0,0,0,0,1 +0.14285714285714285,0.056344228018047754,female,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.11863101133487398,male,old,good,0,0,0,1,1,0,0,0,0,0,1,0 +0.3571428571428571,0.33856058104985143,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.10714285714285712,0.05705953560030813,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.6785714285714286,0.3138549576317817,female,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.09392538791680423,female,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.07164080554638494,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.03780125453945196,male,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.09805216242984482,male,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.5714285714285714,0.5846263893474194,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.2659843732805106,female,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.2712116209970287,male,old,bad,1,0,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.044018928139099814,male,old,good,0,1,0,0,1,0,0,0,0,0,1,0 +0.5714285714285714,0.2015516672169033,male,young,good,0,0,1,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.15401122482667548,female,old,good,0,0,1,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.44486629250577747,male,old,bad,1,0,0,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.08847804555959062,female,young,good,1,0,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.25272367117860683,male,old,bad,1,0,0,0,0,0,0,0,1,1,0,0 +0.24999999999999997,0.19423352041377795,male,old,good,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.09783206778914934,male,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.11191812479366127,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.19642857142857142,0.1363486299108617,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.4107142857142857,0.27748431825685044,male,old,bad,1,0,0,0,1,0,0,0,0,0,1,0 +1.0,0.4900957411687025,male,old,good,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.09315505667436998,female,young,bad,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.148949048090679,male,old,good,1,0,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.11769560911191813,female,old,good,1,0,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.08979861340376362,female,young,bad,1,0,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.08060966215472654,female,old,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.33459887751733247,male,old,bad,1,0,0,0,0,0,0,0,1,0,0,1 +0.19642857142857142,0.0586552217453505,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.8058765269065697,male,old,bad,1,0,0,0,0,0,0,0,1,0,1,0 +0.4642857142857143,0.15555188731154398,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.0,0.07120061626499395,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.041652910751623196,male,young,good,0,1,0,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.10008803785627819,male,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.7002310993727302,female,old,bad,0,1,0,0,1,0,0,0,0,0,0,1 +0.24999999999999997,0.1570375261362386,male,old,good,1,0,0,0,0,0,0,1,0,0,1,0 +0.5714285714285714,0.20578848905029165,female,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.17877187190491914,female,old,bad,1,0,0,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.048750962914053037,male,old,bad,0,1,0,0,0,0,0,0,1,0,0,1 +0.19642857142857142,0.007813359744690218,female,young,good,0,0,1,0,0,1,0,0,0,1,0,0 +0.24999999999999997,0.048475844613183675,female,old,bad,1,0,0,0,1,0,0,0,0,0,1,0 +0.14285714285714285,0.06459777704412897,male,old,good,0,1,0,0,0,0,0,1,0,0,1,0 +0.6785714285714286,0.17299438758666227,male,old,good,1,0,0,0,1,0,0,0,0,0,1,0 +0.7857142857142857,0.3713546825134808,male,old,bad,1,0,0,0,0,0,0,1,0,0,1,0 +0.4642857142857143,0.6464179597226807,male,old,bad,1,0,0,0,0,1,0,0,0,0,1,0 +0.4642857142857143,0.14201606690877078,female,old,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.09436557719819522,female,old,good,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.07411687025420931,male,old,good,0,0,0,1,0,0,0,0,1,0,0,1 +0.03571428571428571,0.09975789589523495,male,old,good,0,1,0,0,0,1,0,0,0,1,0,0 +0.3928571428571428,0.42456256190161773,male,old,good,0,1,0,0,0,1,0,0,0,0,1,0 +0.7857142857142857,0.4034334763948498,male,old,good,0,1,0,0,1,0,0,0,0,0,0,1 +0.3571428571428571,0.24369979091009136,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.30543633762517886,male,old,good,0,0,0,1,0,0,0,0,1,1,0,0 +0.3571428571428571,0.1990756025090789,female,old,good,0,0,0,1,0,0,0,0,1,1,0,0 +0.14285714285714285,0.11775063277209201,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.3631561571475735,male,old,bad,0,1,0,0,1,0,0,0,0,0,1,0 +0.08928571428571427,0.0675690546935182,male,young,bad,0,0,0,1,0,0,0,1,0,0,1,0 +0.03571428571428571,0.10069329811819082,male,young,good,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.03747111257840871,female,old,bad,0,1,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.05469351821283151,female,old,good,0,0,0,1,0,0,0,0,1,1,0,0 +0.10714285714285712,0.10355452844723231,female,old,good,1,0,0,0,0,1,0,0,0,1,0,0 +0.24999999999999997,0.0935402222955871,male,old,good,0,0,0,1,0,0,0,1,0,0,1,0 +0.03571428571428571,0.06184659403543523,male,old,good,1,0,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.06597336854847584,male,old,good,0,1,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.07329151535160118,male,old,good,0,0,0,1,0,0,0,0,1,0,1,0 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME-test.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME-test.csv new file mode 100644 index 0000000..912f248 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME-test.csv @@ -0,0 +1,201 @@ +month,credit_amount,sex,age,credit,status=A11,status=A12,status=A13,status=A14,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153 +0.5714285714285714,0.30802244965335096,1,0,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.11290855067679102,1,1,0,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.04693518212831517,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.0704302850225597,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.08093980411576977,1,1,1,1,0,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.1389347419390338,1,1,0,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.08974358974358974,0,1,1,0,0,1,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.15291075162319798,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.07741828986464179,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.3035714285714286,0.11571475734565863,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.13255199735886433,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.07384175195333993,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.09216463079124024,1,1,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.594475624518543,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.09447562451854297,0,0,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.13321228128095083,0,1,0,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.17497523935292178,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.23236491691427316,1,1,1,0,0,1,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.24347969626939586,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0 +0.3571428571428571,0.041212721470232194,1,0,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.7857142857142857,0.5893584241223726,1,1,0,0,1,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.05876526906569825,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.03571428571428571,0.019753494002421042,0,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.3035714285714286,0.1509298998569385,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +1.0,0.847529437658193,1,0,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.19642857142857142,0.17156377242214152,1,0,1,0,0,0,1,0,0,0,0,1,1,0,0 +0.7857142857142857,0.24969736986904373,1,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.10982722570705403,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.08237041928029053,1,0,1,1,0,0,0,0,1,0,0,0,0,1,0 +0.7857142857142857,0.2666446572025971,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.034885000550236606,1,1,0,0,1,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.32023770221195114,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.06228678331682623,1,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.02767690106745901,0,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.13464289644547156,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.01870804445911742,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.4642857142857143,0.09183448883019699,1,1,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.3035714285714286,0.07686805326290305,1,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.3090128755364807,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.3132496973698691,0,1,0,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.026246285902938263,1,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.14097061736546715,0,1,1,1,0,0,0,0,0,0,0,1,1,0,0 +0.7857142857142857,0.22328601298558381,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0 +0.4642857142857143,0.3266754704522945,1,0,1,1,0,0,0,0,0,0,1,0,1,0,0 +0.24999999999999997,0.05749972488169913,0,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.3004842082095301,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.7857142857142857,0.32243864861890614,1,1,1,0,0,0,1,0,0,1,0,0,0,0,1 +0.4642857142857143,0.44767249917464513,1,1,0,0,1,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.09414548255749973,0,0,1,0,0,1,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.04996148343787829,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06382744580169472,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.5178571428571429,0.385330692197645,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.03571428571428571,0.023825244855287777,0,1,1,0,0,1,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.06399251678221635,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.2817761637504127,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.2217453505007153,0,1,0,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.06096621547265324,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.10714285714285712,0.0292175635523275,1,1,1,0,0,1,0,0,0,0,0,1,0,0,1 +0.0,0.17222405634422802,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.08928571428571427,0.045779685264663805,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.11483437878287663,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.053571428571428575,0.03279410146362936,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.7857142857142857,0.45322988885220644,0,0,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.19642857142857142,0.09106415758776273,1,1,1,0,0,1,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.08550676791020138,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.022119511389897654,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.059480576647958625,1,0,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.15566193463189174,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.1340926598437328,1,1,1,0,0,1,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.05942555298778475,1,1,1,0,0,1,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.05051172003961703,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.17585561791570375,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.08928571428571427,0.052217453505007144,0,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.02701661714537251,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0 +0.24999999999999997,0.39198855507868385,1,1,1,0,1,0,0,1,0,0,0,0,0,1,0 +0.24999999999999997,0.10559040387366568,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.05986574226917574,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.33944095961263343,0,1,1,1,0,0,0,0,0,0,0,1,0,0,1 +0.19642857142857142,0.039892153626059204,1,0,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.13392758886321118,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.23528117090348852,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.038571585781886214,0,1,0,0,1,0,0,0,1,0,0,0,1,0,0 +0.8928571428571428,0.8635963464289644,1,1,0,0,1,0,0,0,1,0,0,0,1,0,0 +0.19642857142857142,0.18190822053483,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.0,0.06630351050951909,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.09271486739297898,1,1,0,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.1581379993397161,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.07114559260482008,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.14933421371189612,1,1,1,1,0,0,0,0,0,0,0,1,0,0,1 +0.24999999999999997,0.06558820292725871,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1 +1.0,0.7431495543083525,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.2857142857142857,0.3245295477055134,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.3336084516342027,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.41900517222405637,1,1,0,1,0,0,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.05760977220204688,1,0,1,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.121712336304611,1,0,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.3582590513920986,1,1,0,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.5129305601408606,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0 +0.5714285714285714,0.39391438318476946,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.36519203257400684,1,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.18548475844613185,1,1,0,0,0,0,1,0,0,0,0,1,0,1,0 +0.03571428571428571,0.06063607351160999,0,0,1,1,0,0,0,1,0,0,0,0,1,0,0 +0.14285714285714285,0.06767910201386595,1,1,1,0,0,1,0,1,0,0,0,0,0,0,1 +0.24999999999999997,0.042808407615274574,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.035875426433366345,1,0,0,1,0,0,0,0,0,0,1,0,1,0,0 +0.10714285714285712,0.1136238582590514,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.08928571428571427,0.09519093210080334,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0 +0.3571428571428571,0.07169582920655881,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0 +0.10714285714285712,0.05447342357213601,0,1,0,0,0,1,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.05353802134918015,1,1,0,0,1,0,0,0,0,0,0,1,1,0,0 +0.3035714285714286,0.12963574336964895,0,1,1,1,0,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.06377242214152085,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.07252118410916694,0,0,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.01785714285714285,0.1759656652360515,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.24999999999999997,0.1992956971497744,0,1,1,0,1,0,0,1,0,0,0,0,0,1,0 +0.14285714285714285,0.116430064927919,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.5714285714285714,0.4087157477715418,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.5033564432706064,1,1,0,0,1,0,0,0,1,0,0,0,1,0,0 +0.5714285714285714,0.5126004181798173,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.0928799383735006,1,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.10448993067018818,1,1,0,1,0,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.350170573346539,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.14355672939363928,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0 +0.2857142857142857,0.13090128755364808,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.32761087267525035,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.18851105975569496,1,1,1,1,0,0,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.114229118520964,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.17244415098492352,1,1,0,1,0,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.1546164850885881,1,1,0,0,1,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.05771981952239463,1,1,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.06426763508308572,1,1,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.04104765048971058,0,0,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.2857142857142857,0.1780015406624849,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.24999999999999997,0.17211400902388027,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.05359304500935402,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.02723671178606801,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.08858809287993837,1,1,1,0,1,0,0,1,0,0,0,0,0,1,0 +0.2857142857142857,0.2213051612193243,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.053571428571428575,0.11912622427643886,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.19406844943325632,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.4642857142857143,0.10625068779575217,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.239022779795312,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.028007043028502255,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0 +0.4642857142857143,0.23550126554418402,0,1,1,0,0,0,1,0,0,0,1,0,1,0,0 +0.10714285714285712,0.09210960713106636,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.17591064157587763,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +1.0,0.7580609662154726,1,1,0,0,1,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.14383184769450866,1,1,0,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.05529877847474414,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.6785714285714286,0.22669747991636405,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.03719599427753935,0,1,1,0,0,0,1,1,0,0,0,0,0,1,0 +0.19642857142857142,0.030373060415978873,1,1,0,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.04632992186640256,0,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.3035714285714286,0.07378672829316606,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.23131946737096953,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.023770221195113902,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.24999999999999997,0.3362495873225487,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.055463849455265765,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0 +0.4107142857142857,0.12490370859469573,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.00484208209530098,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.2857142857142857,0.37454605480356556,1,1,1,0,1,0,0,0,0,0,1,0,1,0,0 +0.2857142857142857,0.10922196544514141,1,1,0,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.06459777704412897,0,1,1,0,0,1,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.6095521074061846,1,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.05496863651370089,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.08928571428571427,0.10278419720479806,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.05694948827996038,0,1,0,1,0,0,0,0,0,0,1,0,1,0,0 +0.7857142857142857,0.18311874105865522,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.10714285714285712,0.14295146913172666,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.17932210850665786,0,0,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.07142857142857142,0.05029162539892153,1,1,1,1,0,0,0,0,0,0,0,1,0,0,1 +0.3035714285714286,0.2050181578078574,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.18498954550456698,0,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +1.2142857142857142,0.2941014636293606,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.025255860019808517,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.43903378452734676,1,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.3035714285714286,0.17409486079013978,1,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.6802024870694399,1,1,0,0,1,0,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.08253549026081215,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.15555188731154398,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.2791900517222406,0,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.0,0.06844943325630021,1,1,1,0,0,1,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.12809508088478047,0,1,1,1,0,0,0,1,0,0,0,0,0,0,1 +0.053571428571428575,0.11439418950148562,0,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.022229558710245404,1,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.4337515131506548,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.19880048420820953,1,1,1,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.39738087377572356,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0 +0.14285714285714285,0.07180587652690656,1,1,0,0,0,0,1,0,0,0,0,1,0,0,1 +0.14285714285714285,0.055683944095961266,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.08928571428571427,0.02971277649389237,0,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.27423792230659183,1,1,0,0,1,0,0,1,0,0,0,0,0,1,0 +0.4642857142857143,0.11863101133487398,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.1496093320127655,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0 +0.3035714285714286,0.6839440959612633,1,1,0,0,0,0,1,0,0,0,0,1,0,0,1 +0.14285714285714285,0.045779685264663805,1,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.4107142857142857,0.20166171453725104,1,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.19709475074281943,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.09232970177176185,1,1,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.5286123032904149,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME-train.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME-train.csv new file mode 100644 index 0000000..cd248b0 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME-train.csv @@ -0,0 +1,501 @@ +month,credit_amount,sex,age,credit,status=A11,status=A12,status=A13,status=A14,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153 +0.5714285714285714,0.20408275558490152,1,1,1,1,0,0,0,1,0,0,0,0,0,1,0 +0.08928571428571427,0.18306371739848135,1,1,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.12462859029382635,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.09601628700341147,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +1.0,0.5566193463189171,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.39303400462198745,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.008748761967646089,0,1,1,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.12418840101243535,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.4642857142857143,0.25096291405304283,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0 +0.7857142857142857,0.3471992956971498,1,0,0,0,1,0,0,0,0,0,1,0,0,1,0 +0.5714285714285714,0.48332783096731596,1,1,0,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.1816881258941345,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0 +0.2857142857142857,0.3421371189611533,1,1,1,0,1,0,0,1,0,0,0,0,0,1,0 +0.4642857142857143,0.1250137559150435,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.14141080664685815,1,1,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.029327610872675252,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3035714285714286,0.19329811819082207,1,0,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.4642857142857143,0.39809618135798397,0,1,0,0,0,0,1,1,0,0,0,0,0,1,0 +0.19642857142857142,0.06344228018047761,0,1,1,1,0,0,0,0,0,1,0,0,1,0,0 +0.125,0.24876196764608782,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.20743919885550788,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.052327500825354895,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.7857142857142857,0.28232640035215145,1,1,1,0,1,0,0,1,0,0,0,0,0,0,1 +0.3571428571428571,0.28947947617475517,1,0,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.04666006382744581,0,1,1,0,0,0,1,1,0,0,0,0,0,1,0 +0.7857142857142857,0.32871134587872786,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1 +0.125,0.06289204357873886,0,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.08928571428571427,0.09183448883019699,1,1,1,0,1,0,0,0,0,0,1,0,1,0,0 +0.19642857142857142,0.2638384505337295,0,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.7857142857142857,0.3136898866512601,0,0,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.07142857142857142,0.026466380543633764,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.02085396720589854,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.08572686255089688,0,0,0,0,0,0,1,0,0,0,1,0,0,1,0 +0.3035714285714286,0.05161219324309453,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.16424562561901618,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.6223175965665235,0,0,0,0,1,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.04974138879718279,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.19808517662594918,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.11604489930670188,1,1,0,1,0,0,0,1,0,0,0,0,0,1,0 +0.3571428571428571,0.07059535600308132,0,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.5714285714285714,0.21789369428854408,1,1,0,0,0,1,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.19847034224716628,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06289204357873886,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.08528667326950588,1,0,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.1101023440079234,0,1,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.03224386486189061,0,0,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.06025090789039286,1,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.2500825354902608,0,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.025200836359634642,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.05716958292065588,1,0,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.08880818752063387,0,1,0,0,0,0,1,0,0,1,0,0,0,1,0 +0.7857142857142857,0.5342797402883239,0,1,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.27797953119841534,1,1,1,1,0,0,0,0,0,0,0,1,0,0,1 +0.03571428571428571,0.08924837680202487,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1 +0.3571428571428571,0.12044679212061186,0,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.06261692527786948,0,1,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.7857142857142857,0.6558270056124132,1,1,1,0,1,0,0,1,0,0,0,0,0,0,1 +0.24999999999999997,0.13271706833938596,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.1607142857142857,0.06377242214152085,0,1,1,0,0,0,1,1,0,0,0,0,0,1,0 +0.19642857142857142,0.1866952789699571,0,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.7857142857142857,0.358093980411577,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.0880928799383735,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.4107142857142857,0.17420490811048753,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.4107142857142857,0.2678001540662485,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.10714285714285712,0.07411687025420931,1,1,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.06228678331682623,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.10714285714285712,0.05397821063057115,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.13101133487399583,0,1,0,0,1,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.10454495433036205,0,0,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.1474634092659844,0,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.3582590513920986,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.3035714285714286,0.13728403213381754,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.11417409486079015,1,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.05249257180587652,1,1,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.04396390447892594,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.11235831407505227,1,1,0,0,0,0,1,1,0,0,0,0,0,1,0 +0.08928571428571427,0.06597336854847584,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.24999999999999997,0.39952679652250467,1,1,0,1,0,0,0,0,0,0,0,1,0,0,1 +0.08928571428571427,0.03675580499614835,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.1153295917244415,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.07444701221525256,0,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.125,0.2011114779355123,1,1,1,1,0,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.0482007263123143,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0 +0.3035714285714286,0.14707824364476726,0,1,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.1620446792120612,0,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.24573566633652472,1,1,1,1,0,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.7871134587872785,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0 +0.5714285714285714,0.2884890502916254,1,1,1,1,0,0,0,0,0,0,0,1,0,0,1 +0.3571428571428571,0.0734565863321228,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.18377902498074172,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.1380543633762518,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.10300429184549356,1,1,1,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.09640145262462858,1,1,1,0,1,0,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.09084406294706723,1,1,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.06839440959612633,0,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.03571428571428571,0.052767690106745896,1,1,0,0,1,0,0,1,0,0,0,0,0,1,0 +0.7857142857142857,0.20518322878837902,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.4110817651590184,0,1,1,1,0,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.1870804445911742,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0 +0.24999999999999997,0.08968856608341587,1,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.09304500935402223,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.034334763948497854,0,0,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.4107142857142857,0.2718168812589414,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.5627269725982172,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.3571428571428571,0.22565203037306042,0,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.06195664135578298,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.4642857142857143,0.1786068009243975,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.6428571428571428,0.3162759986794322,1,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.4285714285714286,0.20666886761307363,1,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.1701331572576208,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.13051612193243095,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.11742049081104876,1,1,0,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.07752833718498954,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.1781666116430065,0,0,1,0,0,0,1,0,0,0,1,0,0,1,0 +1.0,0.39440959612633436,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.10938703642566304,0,0,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.059590623968306375,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1 +0.3214285714285714,0.05683944095961263,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0 +0.03571428571428571,0.17981732144822274,1,1,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.018432926158248045,1,0,1,0,1,0,0,0,0,1,0,0,1,0,0 +0.625,0.25767580059425554,1,0,0,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.056564322658743255,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.08352591614394189,1,0,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.17051832287883792,1,1,0,1,0,0,0,0,1,0,0,0,0,0,1 +0.08928571428571427,0.0627819962583911,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.17453505007153078,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.10714285714285712,0.10432485968966655,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0 +0.19642857142857142,0.06988004842082095,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.3627159678661825,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.09909761197314845,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.15109497083746012,1,0,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.18763068119291296,0,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.06558820292725871,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0 +0.08928571428571427,0.04886101023440079,1,0,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.3037856278199626,0,0,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.24999999999999997,0.12930560140860572,1,1,0,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.19252778694838782,0,1,1,0,0,1,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.15456146142841423,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.03571428571428571,0.022669747991636405,1,1,1,1,0,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.09298998569384835,1,1,1,1,0,0,0,0,1,0,0,0,0,0,1 +0.5714285714285714,0.1140640475404424,1,0,1,0,1,0,0,0,0,0,1,0,1,0,0 +0.7857142857142857,0.6577528337184989,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.28925938153405967,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.24999999999999997,0.010069329811819083,0,0,0,0,0,0,1,1,0,0,0,0,1,0,0 +0.03571428571428571,0.23995818201826785,0,1,0,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.13821943435677342,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0 +0.4642857142857143,0.2213051612193243,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.625,0.6333223286012986,1,1,1,0,1,0,0,0,0,0,1,0,1,0,0 +0.5714285714285714,0.10867172884340266,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.06734896005282272,0,1,1,0,0,1,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.06338725652030373,0,1,1,0,0,0,1,0,0,0,1,0,1,0,0 +0.24999999999999997,0.09150434686915374,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.17857142857142858,0.030373060415978873,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.05617915703752613,1,1,0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.055463849455265765,0,1,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.18240343347639487,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.17255419830527127,1,1,0,1,0,0,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.1098822493672279,0,1,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.30560140860570045,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.03906679872345109,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.24999999999999997,0.044349070100143065,1,1,0,0,1,0,0,0,0,0,0,1,0,1,0 +0.6785714285714286,0.3322878837900297,1,1,1,0,0,1,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.13772422141520854,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.10714285714285712,0.10784637394079453,1,0,0,0,0,0,1,0,0,1,0,0,1,0,0 +0.7857142857142857,0.5528227137669197,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1 +0.5714285714285714,0.7740728513260702,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1 +0.4642857142857143,0.2200396170353252,1,1,0,0,1,0,0,1,0,0,0,0,0,1,0 +0.3571428571428571,0.03736106525806096,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.08928571428571427,0.039011775063277215,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.3035714285714286,0.13931990756025092,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.7857142857142857,0.44613183668977663,0,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.11285352701661715,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0 +0.7857142857142857,0.18245845713656875,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.20446792120611865,0,1,0,1,0,0,0,0,0,0,0,1,1,0,0 +0.03571428571428571,0.05430835259161439,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.03994717728623308,0,0,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.03659073401562672,0,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.03835149114119071,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1 +0.3571428571428571,0.14168592494772753,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.3035714285714286,0.11213821943435677,0,0,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.10714285714285712,0.145867723120942,0,1,1,0,0,0,1,0,1,0,0,0,1,0,0 +0.7321428571428571,0.23803235391218225,1,1,1,0,1,0,0,1,0,0,0,0,0,1,0 +0.7857142857142857,0.3845053372950369,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.13733905579399142,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.08215032463959503,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.08928571428571427,0.06140640475404424,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0 +0.24999999999999997,0.06905469351821283,1,1,1,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.005117200396170352,0,1,1,1,0,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.10856168152305491,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.06850445691647408,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.11428414218113787,0,0,0,0,0,1,0,0,1,0,0,0,0,1,0 +0.7321428571428571,0.6364036535710355,1,1,0,1,0,0,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.06789919665456146,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.4844833278309673,1,1,1,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.07730824254429404,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.06619346318917134,0,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.053813139650049524,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.037526136238582586,0,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.5714285714285714,0.4311654011224827,1,1,0,0,1,0,0,0,0,0,0,1,0,1,0 +0.03571428571428571,0.06030593155056674,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.5714285714285714,0.1437218003741609,1,1,1,0,1,0,0,0,0,0,0,1,0,0,1 +0.3571428571428571,0.15808297567954221,0,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +1.0,0.3938593595245956,0,0,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.19642857142857142,0.055793991416309016,1,0,0,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.07175085286673269,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0 +0.3035714285714286,0.18719049191152196,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.7857142857142857,0.20254209310003302,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1 +0.03571428571428571,0.061131286453174866,1,1,1,1,0,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.052437548145702645,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.18031253438978762,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.5714285714285714,0.4184549356223176,0,0,0,0,0,0,1,0,0,1,0,0,0,1,0 +0.3035714285714286,0.17095851216022892,0,1,1,1,0,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.10179377132166832,1,1,0,0,0,1,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.0505667436997909,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.5714285714285714,0.47854077253218885,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.24999999999999997,0.09276989105315285,1,0,1,1,0,0,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.1794871794871795,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.3695939253879168,1,1,1,0,1,0,0,0,0,0,1,0,1,0,0 +0.4642857142857143,0.11461428414218112,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.09843732805106195,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.49636843842852424,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0 +0.3035714285714286,0.07274127874986244,0,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.03571428571428571,0.011224826675470454,1,0,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.33311323869263787,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.10713106635853417,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.7857142857142857,0.40601958842302194,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.10714285714285712,0.05639925167822163,0,0,1,0,0,1,0,0,1,0,0,0,0,1,0 +0.7857142857142857,0.37795752173434577,1,1,0,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.6783867062837019,0,1,0,0,1,0,0,0,0,0,0,1,0,0,1 +0.24999999999999997,0.1391548365797293,1,0,1,0,1,0,0,0,0,1,0,0,1,0,0 +0.0,0.06894464619786508,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.07142857142857142,0.05430835259161439,0,0,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.20160669087707717,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.15472653240893586,1,1,1,0,0,0,1,0,0,0,0,1,1,0,0 +0.5714285714285714,0.09370529327610873,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1 +0.4642857142857143,0.19709475074281943,1,0,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.0708704743039507,0,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.03571428571428571,0.10223396060305931,1,1,1,0,0,0,1,0,0,0,1,0,1,0,0 +0.03571428571428571,0.00979421151094971,0,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.2313744910311434,1,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.3035714285714286,0.1138439528997469,1,1,0,0,0,1,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.05023660173874765,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.037746230879278087,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.12231759656652359,1,0,0,1,0,0,0,1,0,0,0,0,0,1,0 +0.19642857142857142,0.07257620776934082,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.1522504677011115,0,1,1,0,1,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.11450423682183337,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.10157367668097282,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.04330362055683944,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.05425332893144051,1,1,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.21464729833828547,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.7857142857142857,0.18482447452404535,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06393749312204247,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.125,0.20298228238142402,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.0702101903818642,1,1,1,1,0,0,0,0,0,0,0,1,0,0,1 +0.3214285714285714,0.1334323759216463,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.4642857142857143,0.2184989545504567,0,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.14713326730494114,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.08622207549246176,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.10714285714285712,0.0536480686695279,1,1,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.08776273797733025,0,1,1,1,0,0,0,0,1,0,0,0,1,0,0 +0.03571428571428571,0.0,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.022559700671288655,1,0,1,0,0,0,1,0,0,0,1,0,1,0,0 +0.03571428571428571,0.025255860019808517,1,1,1,0,0,1,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.8587542643336634,1,1,1,1,0,0,0,1,0,0,0,0,0,1,0 +0.3571428571428571,0.08732254869593925,0,1,0,0,1,0,0,0,0,0,1,0,0,0,1 +0.1607142857142857,0.08512160228898426,1,1,1,1,0,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.010894684714427203,0,0,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.16303510509519095,1,0,1,0,1,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.12815010454495435,1,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.05216242984483327,0,1,0,1,0,0,0,0,0,0,0,1,0,0,1 +0.3035714285714286,0.26152745680642675,0,1,0,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.11461428414218112,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.19642857142857142,0.055683944095961266,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.049796412457356665,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.10714285714285712,0.03543523715197534,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.16952789699570817,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.04748541873005392,0,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.02580609662154727,1,1,0,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.09981291955540883,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0 +0.4642857142857143,0.18741058655221746,1,1,1,0,0,1,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.3502255970067129,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1 +0.24999999999999997,0.16391548365797293,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0 +0.19642857142857142,0.17018818091779467,1,1,1,0,0,0,1,0,0,1,0,0,0,0,1 +0.4642857142857143,0.1572576207769341,1,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.011444921316165951,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.06718388907230109,1,1,1,0,0,0,1,0,0,1,0,0,0,0,1 +0.125,0.05898536370639375,0,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.057114559260482006,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.025145812699460767,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.058215032463959496,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.08928571428571427,0.15538681633102236,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.12440849565313085,0,0,1,1,0,0,0,1,0,0,0,0,1,0,0 +0.03571428571428571,0.0819852536590734,1,1,1,0,0,0,1,0,0,0,0,1,1,0,0 +0.14285714285714285,0.04803565533179267,0,1,0,0,0,0,1,0,0,1,0,0,1,0,0 +0.125,0.07301639705073182,0,0,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.7321428571428571,0.08776273797733025,1,0,0,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09783206778914934,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.4107142857142857,0.4439308902828216,0,1,0,0,1,0,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.05749972488169913,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.6431165401122483,1,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.08633212281280951,1,1,0,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.0675690546935182,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.06965995378012545,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.1037746230879278,1,0,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.19599427753934193,0,0,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.24999999999999997,0.21541762958071972,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.20947507428194126,1,1,0,0,1,0,0,0,0,0,1,0,0,1,0 +0.7857142857142857,0.543468691537361,1,1,0,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.012875536480686695,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +1.0,0.33223286012985587,1,1,0,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.36436667767139874,1,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.19642857142857142,0.07054033234290745,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.10366457576758006,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.04715527676901067,1,0,1,1,0,0,0,0,0,1,0,0,1,0,0 +0.2857142857142857,0.10795642126114229,1,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.19642857142857142,0.16154946627049632,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0 +0.7678571428571428,0.5762077693408165,0,1,1,1,0,0,0,0,1,0,0,0,0,1,0 +0.053571428571428575,0.02641135688345989,1,1,1,0,0,0,1,0,0,0,0,1,1,0,0 +0.14285714285714285,0.10030813249697369,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3035714285714286,0.0176625949158138,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.19642857142857142,0.11609992296687575,1,1,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06019588423021899,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.6785714285714286,0.3809838230439089,0,1,0,1,0,0,0,0,0,0,1,0,0,1,0 +0.6785714285714286,0.2501375591504347,1,1,1,0,0,1,0,0,0,0,0,1,0,0,1 +0.6785714285714286,0.20441289754594477,1,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.125,0.38395510069329813,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.05315285572796302,1,1,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.13744910311433917,0,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.6785714285714286,0.49702872235061074,1,1,1,0,1,0,0,1,0,0,0,0,0,0,1 +0.4107142857142857,0.23528117090348852,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.3035714285714286,0.2750082535490261,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.16127434796962695,0,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.02569604930119952,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.055683944095961266,1,0,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.08990866072411137,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.09210960713106636,0,1,0,0,1,0,0,0,1,0,0,0,1,0,0 +0.7857142857142857,0.39760096841641906,1,1,1,1,0,0,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.03791130185979971,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.03251898316275999,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.21255639925167824,0,1,0,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.2697259821723341,1,0,1,0,0,1,0,0,0,0,1,0,0,1,0 +0.5178571428571429,0.22180037416088919,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.07246616044899307,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.004897105755474855,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.19642857142857142,0.05639925167822163,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.021404203807637284,1,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.1127434796962694,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.086552217453505,1,1,0,1,0,0,0,1,0,0,0,0,0,1,0 +0.7857142857142857,0.41339275888632115,1,1,0,1,0,0,0,0,0,0,0,1,0,0,1 +0.03571428571428571,0.07400682293386156,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.007428194123473095,0,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.028942445251458126,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.15511169803015298,1,1,1,0,1,0,0,0,0,0,0,1,0,0,1 +0.08928571428571427,0.06129635743369649,1,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.0534279740288324,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.07142857142857142,0.17321448222735777,1,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.21475734565863322,1,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.08407615274568064,0,1,0,0,1,0,0,0,1,0,0,0,1,0,0 +0.08928571428571427,0.05425332893144051,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0 +0.03571428571428571,0.08341586882359414,1,0,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.4642857142857143,0.43039506988004844,1,0,1,1,0,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.268460437988335,0,1,0,0,1,0,0,0,0,0,0,1,0,0,1 +0.3571428571428571,0.4620886981402003,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.21563772422141522,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.09271486739297898,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.11813579839330911,1,1,0,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.06652360515021459,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.023935292175635527,1,0,0,0,1,0,0,0,0,0,1,0,0,1,0 +0.7857142857142857,0.2396280400572246,1,0,0,1,0,0,0,0,0,0,0,1,0,0,1 +0.125,0.10410476504897105,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.03851656212171234,1,1,0,1,0,0,0,0,0,0,0,1,1,0,0 +0.21428571428571427,0.050896885660834154,1,1,1,0,1,0,0,1,0,0,0,0,0,0,1 +0.14285714285714285,0.06866952789699571,0,0,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.09541102674149884,1,1,1,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.19555408825795093,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.09557609772202047,1,0,0,1,0,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.21398701441619897,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.024815670738417523,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.8928571428571428,0.5054473423572136,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.16176956091119182,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.1607142857142857,0.034774953229888855,1,0,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.13810938703642567,1,1,1,0,1,0,0,0,0,0,0,1,0,0,1 +0.19642857142857142,0.10845163420270716,0,0,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.020688896225376913,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,1.0,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.0532078793881369,1,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.053571428571428575,0.12798503356443272,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.13189171343677783,1,1,1,1,0,0,0,0,0,1,0,0,1,0,0 +0.4107142857142857,0.12765489160338947,0,0,0,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.1594585671838891,1,1,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.022889842632331906,0,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.051447122262572906,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.4642857142857143,0.3138549576317817,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.7321428571428571,0.24738637614174097,1,0,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.19642857142857142,0.0529877847474414,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.03571428571428571,0.09067899196654561,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.7857142857142857,0.6239683063717398,0,0,0,0,0,0,1,0,0,1,0,0,1,0,0 +0.0,0.01931330472103004,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0 +0.24999999999999997,0.05474854187300539,1,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.30218994167492025,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.17857142857142858,0.06382744580169472,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.035765379113018594,1,0,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.0061626499394739735,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.09513590844062947,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.17051832287883792,0,1,0,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.030097942115109497,0,1,0,0,0,0,1,0,0,0,0,1,0,1,0 +0.03571428571428571,0.014746340926598437,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.1129635743369649,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.1354682513480797,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.06734896005282272,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.24999999999999997,0.08638714647298339,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.09227467811158797,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.19642857142857142,0.06509298998569385,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.03895675140310334,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.5714285714285714,0.19725982172334106,0,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.028227137669197756,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.15010454495433037,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.05727963024100363,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.0233300319137229,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0 +1.0,0.3453835149114119,1,1,1,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.03994717728623308,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.10714285714285712,0.20353251898316277,1,1,1,0,0,1,0,0,1,0,0,0,0,1,0 +0.10714285714285712,0.043908880818752064,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.13315725762077696,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.4642857142857143,0.23841751953339935,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.11692527786948388,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.056784417299438755,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.19296797622977882,0,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.3571428571428571,0.2195994277539342,1,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.3482447452404534,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1 +0.14285714285714285,0.10294926818531969,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.10570045119401342,0,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.19642857142857142,0.18399911962143722,0,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.5714285714285714,0.24408495653130846,1,1,1,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.2424892703862661,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.17857142857142858,0.48024650599757895,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.04429404643996919,0,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.7857142857142857,0.15412127214702323,1,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.17558049961483438,0,0,0,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.26961593485198637,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.09425552987784747,1,1,1,0,0,0,1,0,0,0,1,0,1,0,0 +0.24999999999999997,0.18377902498074172,1,1,1,0,1,0,0,1,0,0,0,0,0,1,0 +0.5714285714285714,0.3076923076923077,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.06707384175195334,1,1,1,0,0,0,1,0,0,0,0,1,1,0,0 +0.3571428571428571,0.12947067238912735,0,1,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.18713546825134808,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.15951359084406297,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.21239132827115662,1,0,0,1,0,0,0,0,0,0,0,1,1,0,0 +0.08928571428571427,0.06531308462638935,1,1,0,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.062011665015956854,0,1,1,0,0,1,0,0,0,0,0,1,0,0,1 +0.08928571428571427,0.06448772972378122,1,1,0,1,0,0,0,0,1,0,0,0,0,0,1 +0.4642857142857143,0.18553978210630573,0,1,1,1,0,0,0,0,0,0,0,1,1,0,0 +0.3571428571428571,0.17029822823814242,1,1,0,1,0,0,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.020633872565203038,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.040332342907450205,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0 +0.4642857142857143,0.15081985253659075,0,0,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.049466270496313414,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.001430615164520744,1,0,1,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.06168152305491362,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.029987894794761747,0,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.056784417299438755,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.0064927919005172245,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.055463849455265765,1,0,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.19642857142857142,0.10652580609662154,0,1,1,0,0,0,1,0,0,0,1,0,1,0,0 +0.3571428571428571,0.14223616154946628,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.18609001870804448,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.05838010344448112,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.11543963904478925,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.010069329811819083,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0 +0.19642857142857142,0.20408275558490152,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.13101133487399583,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.12077693408165512,1,1,1,0,0,0,1,0,1,0,0,0,1,0,0 +0.10714285714285712,0.04335864421701331,1,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.7857142857142857,0.473643666776714,1,1,1,0,0,0,1,0,0,0,1,0,0,0,1 +0.3571428571428571,0.05639925167822163,1,1,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.3476394849785408,1,0,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.06019588423021899,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0 +0.4107142857142857,0.3144602178936943,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.020248706943985912,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.10714285714285712,0.026246285902938263,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.19642857142857142,0.06569825024760646,1,0,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.18542973478595798,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.10955210740618465,1,1,1,1,0,0,0,0,1,0,0,0,1,0,0 +1.0,0.799603829646748,0,1,0,0,1,0,0,0,0,0,0,1,0,0,1 +0.3571428571428571,0.09750192582810609,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.019753494002421042,0,1,0,0,0,1,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.15247056234180698,1,0,1,1,0,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.2272477165181028,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.05827005612413337,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.14185099592824915,1,0,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.08501155496863651,0,1,1,0,1,0,0,0,0,0,0,1,1,0,0 +0.24999999999999997,0.07059535600308132,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.26702982282381427,1,1,1,0,0,0,1,0,1,0,0,0,0,0,1 +0.4642857142857143,0.5707604269836029,1,1,1,1,0,0,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.06250687795752173,0,1,0,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.23885770881479035,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.17326950588753165,1,1,1,0,0,1,0,0,0,0,0,1,0,1,0 +0.03571428571428571,0.04385385715857819,0,1,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.4091009133927589,0,1,0,1,0,0,0,0,0,0,1,0,1,0,0 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME-val.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME-val.csv new file mode 100644 index 0000000..ac5e47f --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME-val.csv @@ -0,0 +1,301 @@ +month,credit_amount,sex,age,credit,status=A11,status=A12,status=A13,status=A14,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153 +0.24999999999999997,0.07169582920655881,1,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.06107626279300099,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.07142857142857142,0.02806206668867613,0,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.14575767580059426,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.7321428571428571,0.15302079894354573,1,0,0,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.24551557169582922,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.090073731704633,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.11131286453174864,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.14285714285714285,0.035105095190932106,1,1,0,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.07538241443820842,0,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.1352481567073842,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.24999999999999997,0.03389457466710685,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.07142857142857142,0.06404754044239022,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.05849015076482887,1,0,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.12809508088478047,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.1825685044569165,0,1,1,0,0,0,1,0,0,0,0,1,1,0,0 +0.3571428571428571,0.06289204357873886,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.10267414988445031,1,1,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.09695168922636734,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.09298998569384835,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.03571428571428571,0.05904038736656762,1,1,1,0,0,1,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.09502586112028172,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.047210300429184546,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0 +0.19642857142857142,0.05777484318256851,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.03571428571428571,0.0505667436997909,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.16501595686145043,1,1,1,1,0,0,0,0,0,0,1,0,0,0,1 +0.625,0.1275998679432156,1,0,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.08928571428571427,0.07147573456586331,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.024760647078243648,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.14285714285714285,0.32067789149334214,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.08928571428571427,0.05260261912622427,0,0,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.060801144492131615,0,0,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.15010454495433037,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.07015516672169032,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.03571428571428571,0.037526136238582586,0,0,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.4107142857142857,0.12534389787608674,0,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.15758776273797734,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0 +0.4642857142857143,0.09122922856828436,1,1,0,0,0,1,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.05447342357213601,0,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.02569604930119952,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.0695499064597777,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.08688235941454825,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.02145922746781116,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.5714285714285714,0.39517992736876856,0,1,1,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.0710905689446462,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.5160669087707714,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.00968416419060196,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.24061846594035435,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.45234951028942444,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.1904368878617806,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.6877407285132606,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.6785714285714286,0.4199405744470122,1,1,1,1,0,0,0,0,0,0,1,0,0,0,1 +1.0,0.5444040937603168,0,0,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.05480356553317926,1,0,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.1840541432816111,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.18526466380543635,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.034995047870584356,0,0,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.07131066358534169,1,0,0,1,0,0,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.10570045119401342,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.13002090899086607,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.04682513480796743,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.1513700891383295,0,1,1,0,1,0,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.1116430064927919,1,1,1,0,0,0,1,0,0,1,0,0,0,0,1 +0.10714285714285712,0.06993507208099482,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.04357873885770881,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.3538571585781886,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.10426983602949268,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.2512930560140861,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0 +0.7857142857142857,0.7797402883239792,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.02426543413667877,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.05441839991196214,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.1384945526576428,1,1,0,0,1,0,0,0,0,0,0,1,1,0,0 +0.24999999999999997,0.33069219764498736,1,1,1,0,1,0,0,0,0,0,1,0,1,0,0 +0.24999999999999997,0.027511830086937385,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0 +0.14285714285714285,0.06894464619786508,1,1,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.024815670738417523,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.125,0.04974138879718279,0,1,1,0,0,0,1,1,0,0,0,0,0,1,0 +0.3035714285714286,0.16011885110597557,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.10311433916584131,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.02839220864971938,1,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.3685484758446132,1,1,1,0,1,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.0482007263123143,1,1,1,0,0,1,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.05315285572796302,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.10806646858149004,1,0,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.20864971937933313,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.19642857142857142,0.04418399911962144,1,1,1,1,0,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.14559260482007264,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.327335754374381,1,1,0,1,0,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.41311764058545175,0,1,1,0,1,0,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.06173654671508748,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0 +0.4642857142857143,0.33564432706063607,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.2304941124683614,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.7857142857142857,0.8485748872014967,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.030483107736326624,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3035714285714286,0.07439198855507868,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.3421371189611533,1,1,0,0,1,0,0,1,0,0,0,0,0,1,0 +0.24999999999999997,0.06729393639264884,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.2851876306811929,0,1,1,0,0,0,1,1,0,0,0,0,1,0,0 +0.03571428571428571,0.05436337625178826,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0 +0.5178571428571429,0.13832948167712117,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.09199955981071861,1,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.1522504677011115,1,1,1,0,0,1,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.052657642786398146,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0 +0.21428571428571427,0.13068119291295258,1,1,0,1,0,0,0,0,0,0,0,1,1,0,0 +0.14285714285714285,0.023880268515461652,1,1,0,1,0,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.07813359744690217,0,0,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.03571428571428571,0.02321998459337515,0,1,1,0,0,0,1,1,0,0,0,0,0,1,0 +0.3035714285714286,0.12820512820512822,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.3336634752943766,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0 +0.3571428571428571,0.1641906019588423,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0 +0.10714285714285712,0.3883569935072081,1,1,1,0,1,0,0,1,0,0,0,0,0,0,1 +0.1607142857142857,0.10184879498184218,0,0,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.32898646417959726,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.15219544404093763,1,0,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.014966435567293938,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.0926598437328051,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.7857142857142857,0.1376141740948608,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.06943985913942995,1,1,0,0,1,0,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.12396830637173985,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.05705953560030813,0,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.6679322108506658,1,1,0,0,1,0,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.05981071860900186,1,1,0,0,0,1,0,0,1,0,0,0,0,1,0 +0.4107142857142857,0.12061186310113349,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.10714285714285712,0.06129635743369649,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.11742049081104876,1,1,1,1,0,0,0,0,0,0,0,1,1,0,0 +0.08928571428571427,0.1402553097832068,1,1,1,1,0,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.06575327390778034,1,1,1,0,0,1,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.08215032463959503,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.10714285714285712,0.06036095521074061,1,1,1,0,0,1,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.25420931000330144,1,1,0,1,0,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.0817651590183779,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.027731924727632886,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.18075272367117862,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.19642857142857142,0.0675690546935182,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.4642857142857143,0.35721360184879497,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.37839771101573677,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0 +0.19642857142857142,0.030593155056674374,0,0,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.14537251017937713,1,1,1,0,0,1,0,0,0,0,0,1,0,0,1 +0.5714285714285714,0.30538131396500495,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.15709254979641246,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.19642857142857142,0.06250687795752173,1,1,1,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.11852096401452623,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.19962583911081766,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.15637724221415208,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0 +0.4285714285714286,0.1371739848134698,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.07285132607021019,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3035714285714286,0.0872125013755915,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.13579839330912294,0,0,0,0,1,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.2527786948387807,1,1,0,0,0,0,1,1,0,0,0,0,1,0,0 +0.3571428571428571,0.052217453505007144,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.17857142857142858,0.20485308682733577,1,0,1,1,0,0,0,1,0,0,0,0,0,0,1 +0.3571428571428571,0.1336524705623418,0,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.10360955210740619,0,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.3571428571428571,0.05397821063057115,0,1,1,1,0,0,0,0,0,0,0,1,1,0,0 +0.3571428571428571,0.14427203697589966,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.11978650819852536,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +1.0,0.3877517332452955,1,1,0,1,0,0,0,0,0,0,0,1,1,0,0 +0.4642857142857143,0.40420380763728403,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.625,0.7664245625619016,1,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.08928571428571427,0.06883459887751733,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0 +0.7857142857142857,0.19775503466490593,1,1,0,0,1,0,0,0,0,0,1,0,0,0,1 +0.6785714285714286,0.20864971937933313,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.06184659403543523,0,1,1,1,0,0,0,1,0,0,0,0,0,1,0 +0.3571428571428571,0.41619896555518876,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.03626059205458347,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.34158688235941453,1,1,0,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.11962143721800374,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.13965004952129417,0,0,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.024595576097722022,1,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.3035714285714286,0.12248266754704522,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.14285714285714285,0.0706503796632552,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0 +0.03571428571428571,0.04500935402222955,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.18284362275778587,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.05782986684274238,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.6785714285714286,0.3805436337625179,1,1,1,0,0,0,1,0,0,0,1,0,1,0,0 +0.3571428571428571,0.09843732805106195,1,0,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.12286783316826234,1,0,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.18526466380543635,1,0,1,0,0,1,0,0,0,0,0,1,1,0,0 +0.3571428571428571,0.09948277759436558,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.08281060856168151,0,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.07202597116760207,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.16017387476614944,1,1,0,1,0,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.5486959392538792,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.062231759656652355,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.3035714285714286,0.10955210740618465,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.12589413447782546,1,1,0,0,0,0,1,0,0,0,0,1,0,1,0 +0.5714285714285714,0.4300099042588313,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.33927588863211183,0,1,0,0,1,0,0,0,0,0,0,1,1,0,0 +0.3571428571428571,0.24683613954000222,0,0,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.037416088918234836,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.1463629360625069,1,1,1,1,0,0,0,0,0,0,1,0,0,0,1 +0.24999999999999997,0.11158798283261802,0,1,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.2318146803125344,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0 +0.5714285714285714,0.11797072741278751,0,0,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.1103224386486189,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.625,0.4587872785297678,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.3022449653350941,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.05227247716518102,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.08897325850115549,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.03571428571428571,0.0335644327060636,0,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.04258831297457907,1,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +1.0,0.36238582590513924,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.14285714285714285,0.07868383404864092,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.3571428571428571,0.06399251678221635,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.07142857142857142,0.03615054473423572,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.05188731154396391,0,1,0,1,0,0,0,1,0,0,0,0,1,0,0 +0.14285714285714285,0.1280400572246066,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.2047980631671619,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0 +0.3571428571428571,0.1930229998899527,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0 +0.24999999999999997,0.11389897655992078,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0 +0.3571428571428571,0.12176735996478487,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.03571428571428571,0.06014086057004511,1,1,1,0,0,1,0,0,0,0,0,1,0,1,0 +0.4285714285714286,0.4167492021569275,1,1,1,0,0,0,1,0,1,0,0,0,1,0,0 +0.3035714285714286,0.16644657202597118,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.24999999999999997,0.16474083856058105,0,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.4107142857142857,0.46016287003411466,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.5714285714285714,0.08759766699680863,0,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.3571428571428571,0.12991086167051832,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.08928571428571427,0.09794211510949709,1,0,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.49669858038956755,1,1,1,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.045449543303620554,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.10714285714285712,0.07131066358534169,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.09040387366567623,0,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.10404974138879718,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.5714285714285714,0.425332893144052,1,1,0,0,0,0,1,0,1,0,0,0,1,0,0 +0.14285714285714285,0.055793991416309016,1,1,1,0,0,0,1,0,0,0,0,1,1,0,0 +0.24999999999999997,0.10999229668757565,1,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.7857142857142857,0.324254429404644,0,1,0,1,0,0,0,0,0,0,0,1,0,0,1 +0.3035714285714286,0.1842742379223066,0,1,1,1,0,0,0,0,0,0,1,0,1,0,0 +0.19642857142857142,0.07081545064377683,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.590073731704633,0,1,0,0,0,0,1,1,0,0,0,0,0,1,0 +0.24999999999999997,0.2226807527236712,1,1,0,0,1,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.05172224056344228,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1 +0.14285714285714285,0.056344228018047754,0,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.11863101133487398,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0 +0.3571428571428571,0.33856058104985143,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.10714285714285712,0.05705953560030813,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.6785714285714286,0.3138549576317817,0,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.09392538791680423,0,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.07164080554638494,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.03780125453945196,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.09805216242984482,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.5714285714285714,0.5846263893474194,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.3571428571428571,0.2659843732805106,0,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.5714285714285714,0.2712116209970287,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0 +0.03571428571428571,0.044018928139099814,1,1,1,0,1,0,0,1,0,0,0,0,0,1,0 +0.5714285714285714,0.2015516672169033,1,0,1,0,0,1,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.15401122482667548,0,1,1,0,0,1,0,0,1,0,0,0,0,1,0 +0.5714285714285714,0.44486629250577747,1,1,0,1,0,0,0,0,0,0,0,1,0,0,1 +0.14285714285714285,0.08847804555959062,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.25272367117860683,1,1,0,1,0,0,0,0,0,0,0,1,1,0,0 +0.24999999999999997,0.19423352041377795,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.09783206778914934,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.11191812479366127,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.19642857142857142,0.1363486299108617,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.4107142857142857,0.27748431825685044,1,1,0,1,0,0,0,1,0,0,0,0,0,1,0 +1.0,0.4900957411687025,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.09315505667436998,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0 +0.3571428571428571,0.148949048090679,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.11769560911191813,0,1,1,1,0,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.08979861340376362,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.08060966215472654,0,1,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.33459887751733247,1,1,0,1,0,0,0,0,0,0,0,1,0,0,1 +0.19642857142857142,0.0586552217453505,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.8058765269065697,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0 +0.4642857142857143,0.15555188731154398,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.0,0.07120061626499395,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.041652910751623196,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.10008803785627819,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.24999999999999997,0.7002310993727302,0,1,0,0,1,0,0,1,0,0,0,0,0,0,1 +0.24999999999999997,0.1570375261362386,1,1,1,1,0,0,0,0,0,0,1,0,0,1,0 +0.5714285714285714,0.20578848905029165,0,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.17877187190491914,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.048750962914053037,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1 +0.19642857142857142,0.007813359744690218,0,0,1,0,0,1,0,0,1,0,0,0,1,0,0 +0.24999999999999997,0.048475844613183675,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0 +0.14285714285714285,0.06459777704412897,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0 +0.6785714285714286,0.17299438758666227,1,1,1,1,0,0,0,1,0,0,0,0,0,1,0 +0.7857142857142857,0.3713546825134808,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0 +0.4642857142857143,0.6464179597226807,1,1,0,1,0,0,0,0,1,0,0,0,0,1,0 +0.4642857142857143,0.14201606690877078,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.09436557719819522,0,1,1,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.07411687025420931,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1 +0.03571428571428571,0.09975789589523495,1,1,1,0,1,0,0,0,1,0,0,0,1,0,0 +0.3928571428571428,0.42456256190161773,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0 +0.7857142857142857,0.4034334763948498,1,1,1,0,1,0,0,1,0,0,0,0,0,0,1 +0.3571428571428571,0.24369979091009136,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.14285714285714285,0.30543633762517886,1,1,1,0,0,0,1,0,0,0,0,1,1,0,0 +0.3571428571428571,0.1990756025090789,0,1,1,0,0,0,1,0,0,0,0,1,1,0,0 +0.14285714285714285,0.11775063277209201,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 +0.19642857142857142,0.3631561571475735,1,1,0,0,1,0,0,1,0,0,0,0,0,1,0 +0.08928571428571427,0.0675690546935182,1,0,0,0,0,0,1,0,0,0,1,0,0,1,0 +0.03571428571428571,0.10069329811819082,1,0,1,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.03747111257840871,0,1,0,0,1,0,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.05469351821283151,0,1,1,0,0,0,1,0,0,0,0,1,1,0,0 +0.10714285714285712,0.10355452844723231,0,1,1,1,0,0,0,0,1,0,0,0,1,0,0 +0.24999999999999997,0.0935402222955871,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0 +0.03571428571428571,0.06184659403543523,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.06597336854847584,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0 +0.24999999999999997,0.07329151535160118,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW-test.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW-test.csv new file mode 100644 index 0000000..2714c97 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW-test.csv @@ -0,0 +1,201 @@ +month,credit_amount,sex,age,credit,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153,status=A11,status=A12,status=A13,status=A14 +0.5714285714285714,0.30802244965335096,1.0,0.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.11290855067679102,1.0,1.0,0.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.04693518212831517,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.0704302850225597,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.08093980411576977,1.0,1.0,1.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.1389347419390338,1.0,1.0,0.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.08974358974358974,0.0,1.0,1.0,0,0,1,0,0,1,0,0,0,0,1,0 +0.19642857142857142,0.15291075162319798,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.07741828986464179,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3035714285714286,0.11571475734565863,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.13255199735886433,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.07384175195333993,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09216463079124024,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.594475624518543,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09447562451854297,0.0,0.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.13321228128095083,0.0,1.0,0.0,0,0,1,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.17497523935292178,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.23236491691427316,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,1,0 +0.14285714285714285,0.24347969626939586,0.0,0.0,1.0,0,1,0,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.041212721470232194,1.0,0.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.7857142857142857,0.5893584241223726,1.0,1.0,0.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.05876526906569825,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.03571428571428571,0.019753494002421042,0.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.3035714285714286,0.1509298998569385,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +1.0,0.847529437658193,1.0,0.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.17156377242214152,1.0,0.0,1.0,0,0,0,0,1,1,0,0,0,0,0,1 +0.7857142857142857,0.24969736986904373,1.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.14285714285714285,0.10982722570705403,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.08237041928029053,1.0,0.0,1.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.7857142857142857,0.2666446572025971,0.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.034885000550236606,1.0,1.0,0.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.32023770221195114,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.06228678331682623,1.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.02767690106745901,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.13464289644547156,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.01870804445911742,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.09183448883019699,1.0,1.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.3035714285714286,0.07686805326290305,1.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.3090128755364807,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.3132496973698691,0.0,1.0,0.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.026246285902938263,1.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.14097061736546715,0.0,1.0,1.0,0,0,0,0,1,1,0,0,1,0,0,0 +0.7857142857142857,0.22328601298558381,0.0,0.0,0.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.4642857142857143,0.3266754704522945,1.0,0.0,1.0,0,0,0,1,0,1,0,0,1,0,0,0 +0.24999999999999997,0.05749972488169913,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.3004842082095301,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.7857142857142857,0.32243864861890614,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,0,0,1 +0.4642857142857143,0.44767249917464513,1.0,1.0,0.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.09414548255749973,0.0,0.0,1.0,0,0,0,0,1,0,1,0,0,0,1,0 +0.14285714285714285,0.04996148343787829,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.06382744580169472,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.5178571428571429,0.385330692197645,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.023825244855287777,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06399251678221635,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.2817761637504127,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.4642857142857143,0.2217453505007153,0.0,1.0,0.0,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.06096621547265324,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.10714285714285712,0.0292175635523275,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,1,0 +0.0,0.17222405634422802,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.08928571428571427,0.045779685264663805,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.11483437878287663,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.053571428571428575,0.03279410146362936,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.7857142857142857,0.45322988885220644,0.0,0.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.19642857142857142,0.09106415758776273,1.0,1.0,1.0,0,0,0,0,1,1,0,0,0,0,1,0 +0.14285714285714285,0.08550676791020138,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.022119511389897654,0.0,0.0,1.0,0,0,0,0,1,1,0,0,1,0,0,0 +0.14285714285714285,0.059480576647958625,1.0,0.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.15566193463189174,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.1340926598437328,1.0,1.0,1.0,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.05942555298778475,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.05051172003961703,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.17585561791570375,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.08928571428571427,0.052217453505007144,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.02701661714537251,0.0,0.0,0.0,1,0,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.39198855507868385,1.0,1.0,1.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.10559040387366568,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.05986574226917574,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.33944095961263343,0.0,1.0,1.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.19642857142857142,0.039892153626059204,1.0,0.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.13392758886321118,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.23528117090348852,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.038571585781886214,0.0,1.0,0.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.8928571428571428,0.8635963464289644,1.0,1.0,0.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.19642857142857142,0.18190822053483,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.0,0.06630351050951909,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09271486739297898,1.0,1.0,0.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.1581379993397161,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.07114559260482008,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.14933421371189612,1.0,1.0,1.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.24999999999999997,0.06558820292725871,1.0,1.0,0.0,0,0,0,1,0,0,0,1,1,0,0,0 +1.0,0.7431495543083525,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.2857142857142857,0.3245295477055134,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.3336084516342027,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.41900517222405637,1.0,1.0,0.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.14285714285714285,0.05760977220204688,1.0,0.0,1.0,0,0,1,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.121712336304611,1.0,0.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.3582590513920986,1.0,1.0,0.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.5714285714285714,0.5129305601408606,1.0,1.0,0.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.39391438318476946,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.36519203257400684,1.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.18548475844613185,1.0,1.0,0.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.06063607351160999,0.0,0.0,1.0,1,0,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.06767910201386595,1.0,1.0,1.0,1,0,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.042808407615274574,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.035875426433366345,1.0,0.0,0.0,0,0,0,1,0,1,0,0,1,0,0,0 +0.10714285714285712,0.1136238582590514,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.08928571428571427,0.09519093210080334,0.0,0.0,0.0,0,1,0,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.07169582920655881,0.0,0.0,1.0,0,0,0,1,0,1,0,0,0,1,0,0 +0.10714285714285712,0.05447342357213601,0.0,1.0,0.0,0,0,0,0,1,0,0,1,0,0,1,0 +0.14285714285714285,0.05353802134918015,1.0,1.0,0.0,0,0,0,0,1,1,0,0,0,1,0,0 +0.3035714285714286,0.12963574336964895,0.0,1.0,1.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.06377242214152085,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.07252118410916694,0.0,0.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.01785714285714285,0.1759656652360515,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.1992956971497744,0.0,1.0,1.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.116430064927919,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.4087157477715418,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.5033564432706064,1.0,1.0,0.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.5714285714285714,0.5126004181798173,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.0928799383735006,1.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.10448993067018818,1.0,1.0,0.0,0,0,1,0,0,0,0,1,1,0,0,0 +0.5714285714285714,0.350170573346539,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.14355672939363928,1.0,1.0,1.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.2857142857142857,0.13090128755364808,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.32761087267525035,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.18851105975569496,1.0,1.0,1.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.19642857142857142,0.114229118520964,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.17244415098492352,1.0,1.0,0.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.7857142857142857,0.1546164850885881,1.0,1.0,0.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.05771981952239463,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.06426763508308572,1.0,1.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.04104765048971058,0.0,0.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.2857142857142857,0.1780015406624849,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.17211400902388027,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.05359304500935402,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.02723671178606801,0.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.08858809287993837,1.0,1.0,1.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.2857142857142857,0.2213051612193243,0.0,0.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.053571428571428575,0.11912622427643886,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.19406844943325632,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.4642857142857143,0.10625068779575217,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.239022779795312,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.028007043028502255,1.0,1.0,0.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.4642857142857143,0.23550126554418402,0.0,1.0,1.0,0,0,0,1,0,1,0,0,0,0,0,1 +0.10714285714285712,0.09210960713106636,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.17591064157587763,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +1.0,0.7580609662154726,1.0,1.0,0.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.14383184769450866,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.05529877847474414,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.6785714285714286,0.22669747991636405,1.0,1.0,0.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.14285714285714285,0.03719599427753935,0.0,1.0,1.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.030373060415978873,1.0,1.0,0.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.04632992186640256,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3035714285714286,0.07378672829316606,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.23131946737096953,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.023770221195113902,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.3362495873225487,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.055463849455265765,0.0,0.0,1.0,0,1,0,0,0,1,0,0,0,0,0,1 +0.4107142857142857,0.12490370859469573,1.0,0.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.00484208209530098,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.2857142857142857,0.37454605480356556,1.0,1.0,1.0,0,0,0,1,0,1,0,0,0,1,0,0 +0.2857142857142857,0.10922196544514141,1.0,1.0,0.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.06459777704412897,0.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,1,0 +0.3571428571428571,0.6095521074061846,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.05496863651370089,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.10278419720479806,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.05694948827996038,0.0,1.0,0.0,0,0,0,1,0,1,0,0,1,0,0,0 +0.7857142857142857,0.18311874105865522,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.14295146913172666,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.17932210850665786,0.0,0.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.07142857142857142,0.05029162539892153,1.0,1.0,1.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.3035714285714286,0.2050181578078574,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.18498954550456698,0.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +1.2142857142857142,0.2941014636293606,1.0,0.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.025255860019808517,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.43903378452734676,1.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3035714285714286,0.17409486079013978,1.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.6802024870694399,1.0,1.0,0.0,0,0,1,0,0,0,0,1,0,1,0,0 +0.03571428571428571,0.08253549026081215,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.15555188731154398,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.2791900517222406,0.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.0,0.06844943325630021,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.12809508088478047,0.0,1.0,1.0,1,0,0,0,0,0,0,1,1,0,0,0 +0.053571428571428575,0.11439418950148562,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.022229558710245404,1.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.4337515131506548,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.19880048420820953,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.39738087377572356,0.0,0.0,1.0,1,0,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.07180587652690656,1.0,1.0,0.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.14285714285714285,0.055683944095961266,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.08928571428571427,0.02971277649389237,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.27423792230659183,1.0,1.0,0.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.11863101133487398,0.0,0.0,0.0,0,0,0,1,0,1,0,0,1,0,0,0 +0.14285714285714285,0.1496093320127655,0.0,0.0,0.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.3035714285714286,0.6839440959612633,1.0,1.0,0.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.14285714285714285,0.045779685264663805,1.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.4107142857142857,0.20166171453725104,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.19709475074281943,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.09232970177176185,1.0,1.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.5286123032904149,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW-train.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW-train.csv new file mode 100644 index 0000000..9e3b099 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW-train.csv @@ -0,0 +1,501 @@ +month,credit_amount,sex,age,credit,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153,status=A11,status=A12,status=A13,status=A14 +0.5714285714285714,0.20408275558490152,1.0,1.0,1.0,1,0,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.18306371739848135,1.0,1.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.24999999999999997,0.12462859029382635,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.09601628700341147,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +1.0,0.5566193463189171,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.39303400462198745,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.008748761967646089,0.0,1.0,1.0,0,0,1,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.12418840101243535,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.4642857142857143,0.25096291405304283,0.0,0.0,1.0,0,0,0,1,0,1,0,0,0,0,0,1 +0.7857142857142857,0.3471992956971498,1.0,0.0,0.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.48332783096731596,1.0,1.0,0.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.1816881258941345,1.0,1.0,0.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.2857142857142857,0.3421371189611533,1.0,1.0,1.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.1250137559150435,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.14141080664685815,1.0,1.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.029327610872675252,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.19329811819082207,1.0,0.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.4642857142857143,0.39809618135798397,0.0,1.0,0.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.06344228018047761,0.0,1.0,1.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.125,0.24876196764608782,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.20743919885550788,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.052327500825354895,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.28232640035215145,1.0,1.0,1.0,1,0,0,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.28947947617475517,1.0,0.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.04666006382744581,0.0,1.0,1.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.32871134587872786,1.0,1.0,0.0,0,0,0,0,1,0,0,1,0,1,0,0 +0.125,0.06289204357873886,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.09183448883019699,1.0,1.0,1.0,0,0,0,1,0,1,0,0,0,1,0,0 +0.19642857142857142,0.2638384505337295,0.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.7857142857142857,0.3136898866512601,0.0,0.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.07142857142857142,0.026466380543633764,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.02085396720589854,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.08572686255089688,0.0,0.0,0.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.3035714285714286,0.05161219324309453,0.0,1.0,0.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.16424562561901618,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.6223175965665235,0.0,0.0,0.0,0,0,1,0,0,1,0,0,0,1,0,0 +0.08928571428571427,0.04974138879718279,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.19808517662594918,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.11604489930670188,1.0,1.0,0.0,1,0,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.07059535600308132,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.21789369428854408,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.19847034224716628,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.06289204357873886,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.08528667326950588,1.0,0.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.1101023440079234,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.03224386486189061,0.0,0.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.06025090789039286,1.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.2500825354902608,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.025200836359634642,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.05716958292065588,1.0,0.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.08880818752063387,0.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.5342797402883239,0.0,1.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.27797953119841534,1.0,1.0,1.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.03571428571428571,0.08924837680202487,1.0,1.0,1.0,1,0,0,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.12044679212061186,0.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.06261692527786948,0.0,1.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.7857142857142857,0.6558270056124132,1.0,1.0,1.0,1,0,0,0,0,0,0,1,0,1,0,0 +0.24999999999999997,0.13271706833938596,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.1607142857142857,0.06377242214152085,0.0,1.0,1.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.1866952789699571,0.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.7857142857142857,0.358093980411577,0.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.0880928799383735,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.4107142857142857,0.17420490811048753,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.4107142857142857,0.2678001540662485,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.10714285714285712,0.07411687025420931,1.0,1.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.03571428571428571,0.06228678331682623,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.05397821063057115,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.13101133487399583,0.0,1.0,0.0,0,0,1,0,0,1,0,0,0,1,0,0 +0.4642857142857143,0.10454495433036205,0.0,0.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.1474634092659844,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.3582590513920986,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.3035714285714286,0.13728403213381754,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.11417409486079015,1.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.05249257180587652,1.0,1.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.24999999999999997,0.04396390447892594,0.0,0.0,1.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.11235831407505227,1.0,1.0,0.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.06597336854847584,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.39952679652250467,1.0,1.0,0.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.08928571428571427,0.03675580499614835,0.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.1153295917244415,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.07444701221525256,0.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.125,0.2011114779355123,1.0,1.0,1.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.0482007263123143,0.0,0.0,1.0,0,1,0,0,0,1,0,0,0,0,0,1 +0.3035714285714286,0.14707824364476726,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.1620446792120612,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.24573566633652472,1.0,1.0,1.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.7871134587872785,1.0,0.0,0.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.2884890502916254,1.0,1.0,1.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.3571428571428571,0.0734565863321228,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.18377902498074172,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.1380543633762518,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.10300429184549356,1.0,1.0,1.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.09640145262462858,1.0,1.0,1.0,0,0,0,1,0,1,0,0,0,1,0,0 +0.3571428571428571,0.09084406294706723,1.0,1.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.06839440959612633,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.052767690106745896,1.0,1.0,0.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.20518322878837902,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.4110817651590184,0.0,1.0,1.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.1870804445911742,0.0,0.0,1.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.08968856608341587,1.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.24999999999999997,0.09304500935402223,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.034334763948497854,0.0,0.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.4107142857142857,0.2718168812589414,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.5627269725982172,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.3571428571428571,0.22565203037306042,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.06195664135578298,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.4642857142857143,0.1786068009243975,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.6428571428571428,0.3162759986794322,1.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.4285714285714286,0.20666886761307363,1.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.1701331572576208,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.13051612193243095,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.11742049081104876,1.0,1.0,0.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.07752833718498954,0.0,1.0,0.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.1781666116430065,0.0,0.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +1.0,0.39440959612633436,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.10938703642566304,0.0,0.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.059590623968306375,1.0,1.0,0.0,1,0,0,0,0,0,0,1,1,0,0,0 +0.3214285714285714,0.05683944095961263,0.0,0.0,1.0,0,0,0,1,0,1,0,0,0,0,0,1 +0.03571428571428571,0.17981732144822274,1.0,1.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.018432926158248045,1.0,0.0,1.0,0,0,1,0,0,1,0,0,0,1,0,0 +0.625,0.25767580059425554,1.0,0.0,0.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.056564322658743255,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.08352591614394189,1.0,0.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.17051832287883792,1.0,1.0,0.0,0,1,0,0,0,0,0,1,1,0,0,0 +0.08928571428571427,0.0627819962583911,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.17453505007153078,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.10432485968966655,0.0,0.0,1.0,0,1,0,0,0,1,0,0,0,0,0,1 +0.19642857142857142,0.06988004842082095,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.3627159678661825,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.09909761197314845,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.15109497083746012,1.0,0.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.18763068119291296,0.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.06558820292725871,0.0,0.0,0.0,0,0,0,1,0,1,0,0,1,0,0,0 +0.08928571428571427,0.04886101023440079,1.0,0.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.4642857142857143,0.3037856278199626,0.0,0.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.12930560140860572,1.0,1.0,0.0,0,0,1,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.19252778694838782,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.15456146142841423,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.022669747991636405,1.0,1.0,1.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.09298998569384835,1.0,1.0,1.0,0,1,0,0,0,0,0,1,1,0,0,0 +0.5714285714285714,0.1140640475404424,1.0,0.0,1.0,0,0,0,1,0,1,0,0,0,1,0,0 +0.7857142857142857,0.6577528337184989,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.28925938153405967,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.24999999999999997,0.010069329811819083,0.0,0.0,0.0,1,0,0,0,0,1,0,0,0,0,0,1 +0.03571428571428571,0.23995818201826785,0.0,1.0,0.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.13821943435677342,0.0,0.0,0.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.4642857142857143,0.2213051612193243,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.625,0.6333223286012986,1.0,1.0,1.0,0,0,0,1,0,1,0,0,0,1,0,0 +0.5714285714285714,0.10867172884340266,1.0,1.0,0.0,0,0,0,0,1,0,0,1,0,1,0,0 +0.14285714285714285,0.06734896005282272,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06338725652030373,0.0,1.0,1.0,0,0,0,1,0,1,0,0,0,0,0,1 +0.24999999999999997,0.09150434686915374,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.17857142857142858,0.030373060415978873,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.05617915703752613,1.0,1.0,0.0,0,0,1,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.055463849455265765,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.18240343347639487,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.17255419830527127,1.0,1.0,0.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.14285714285714285,0.1098822493672279,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.30560140860570045,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.03906679872345109,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.044349070100143065,1.0,1.0,0.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.6785714285714286,0.3322878837900297,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.13772422141520854,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.10784637394079453,1.0,0.0,0.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.7857142857142857,0.5528227137669197,1.0,1.0,0.0,0,0,0,1,0,0,0,1,1,0,0,0 +0.5714285714285714,0.7740728513260702,1.0,1.0,0.0,0,0,0,0,1,0,0,1,0,1,0,0 +0.4642857142857143,0.2200396170353252,1.0,1.0,0.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.03736106525806096,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.08928571428571427,0.039011775063277215,0.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3035714285714286,0.13931990756025092,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.7857142857142857,0.44613183668977663,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.11285352701661715,0.0,0.0,1.0,0,1,0,0,0,1,0,0,0,0,0,1 +0.7857142857142857,0.18245845713656875,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.20446792120611865,0.0,1.0,0.0,0,0,0,0,1,1,0,0,1,0,0,0 +0.03571428571428571,0.05430835259161439,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.03994717728623308,0.0,0.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.03659073401562672,0.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.03835149114119071,1.0,1.0,0.0,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.14168592494772753,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.3035714285714286,0.11213821943435677,0.0,0.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.145867723120942,0.0,1.0,1.0,0,1,0,0,0,1,0,0,0,0,0,1 +0.7321428571428571,0.23803235391218225,1.0,1.0,1.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.3845053372950369,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.13733905579399142,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.08215032463959503,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.08928571428571427,0.06140640475404424,0.0,0.0,0.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.06905469351821283,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,0,0,1 +0.03571428571428571,0.005117200396170352,0.0,1.0,1.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.10856168152305491,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.06850445691647408,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.11428414218113787,0.0,0.0,0.0,0,1,0,0,0,0,1,0,0,0,1,0 +0.7321428571428571,0.6364036535710355,1.0,1.0,0.0,0,0,0,0,1,1,0,0,1,0,0,0 +0.14285714285714285,0.06789919665456146,1.0,0.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.4844833278309673,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.07730824254429404,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.06619346318917134,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.053813139650049524,0.0,0.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.037526136238582586,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.4311654011224827,1.0,1.0,0.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.03571428571428571,0.06030593155056674,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.5714285714285714,0.1437218003741609,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,1,0,0 +0.3571428571428571,0.15808297567954221,0.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +1.0,0.3938593595245956,0.0,0.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.055793991416309016,1.0,0.0,0.0,0,0,1,0,0,1,0,0,0,1,0,0 +0.03571428571428571,0.07175085286673269,0.0,0.0,1.0,0,0,0,1,0,1,0,0,0,0,0,1 +0.3035714285714286,0.18719049191152196,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.7857142857142857,0.20254209310003302,1.0,1.0,0.0,0,0,0,1,0,0,0,1,1,0,0,0 +0.03571428571428571,0.061131286453174866,1.0,1.0,1.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.052437548145702645,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.18031253438978762,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.4184549356223176,0.0,0.0,0.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.17095851216022892,0.0,1.0,1.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.10179377132166832,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.0505667436997909,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.47854077253218885,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.09276989105315285,1.0,0.0,1.0,0,0,0,1,0,1,0,0,1,0,0,0 +0.3571428571428571,0.1794871794871795,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.3695939253879168,1.0,1.0,1.0,0,0,0,1,0,1,0,0,0,1,0,0 +0.4642857142857143,0.11461428414218112,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.09843732805106195,0.0,0.0,0.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.5714285714285714,0.49636843842852424,1.0,0.0,0.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.3035714285714286,0.07274127874986244,0.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.011224826675470454,1.0,0.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.33311323869263787,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.10713106635853417,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.7857142857142857,0.40601958842302194,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.05639925167822163,0.0,0.0,1.0,0,1,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.37795752173434577,1.0,1.0,0.0,0,0,1,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.6783867062837019,0.0,1.0,0.0,0,0,0,0,1,0,0,1,0,1,0,0 +0.24999999999999997,0.1391548365797293,1.0,0.0,1.0,0,0,1,0,0,1,0,0,0,1,0,0 +0.0,0.06894464619786508,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.07142857142857142,0.05430835259161439,0.0,0.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.20160669087707717,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.15472653240893586,1.0,1.0,1.0,0,0,0,0,1,1,0,0,0,0,0,1 +0.5714285714285714,0.09370529327610873,1.0,1.0,0.0,0,0,0,0,1,0,0,1,0,1,0,0 +0.4642857142857143,0.19709475074281943,1.0,0.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.0708704743039507,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.10223396060305931,1.0,1.0,1.0,0,0,0,1,0,1,0,0,0,0,0,1 +0.03571428571428571,0.00979421151094971,0.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.2313744910311434,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3035714285714286,0.1138439528997469,1.0,1.0,0.0,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.05023660173874765,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.037746230879278087,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.12231759656652359,1.0,0.0,0.0,1,0,0,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.07257620776934082,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.1522504677011115,0.0,1.0,1.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.11450423682183337,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.10157367668097282,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.04330362055683944,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.05425332893144051,1.0,1.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.21464729833828547,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.18482447452404535,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.06393749312204247,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.125,0.20298228238142402,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.0702101903818642,1.0,1.0,1.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.3214285714285714,0.1334323759216463,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.4642857142857143,0.2184989545504567,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.14713326730494114,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.08622207549246176,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.0536480686695279,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.08776273797733025,0.0,1.0,1.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.03571428571428571,0.0,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.022559700671288655,1.0,0.0,1.0,0,0,0,1,0,1,0,0,0,0,0,1 +0.03571428571428571,0.025255860019808517,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.8587542643336634,1.0,1.0,1.0,1,0,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.08732254869593925,0.0,1.0,0.0,0,0,0,1,0,0,0,1,0,1,0,0 +0.1607142857142857,0.08512160228898426,1.0,1.0,1.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.010894684714427203,0.0,0.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.16303510509519095,1.0,0.0,1.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.12815010454495435,1.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.05216242984483327,0.0,1.0,0.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.3035714285714286,0.26152745680642675,0.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.11461428414218112,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.055683944095961266,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.049796412457356665,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.10714285714285712,0.03543523715197534,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.16952789699570817,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.04748541873005392,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.02580609662154727,1.0,1.0,0.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.09981291955540883,0.0,1.0,0.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.18741058655221746,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,1,0 +0.3571428571428571,0.3502255970067129,1.0,1.0,1.0,1,0,0,0,0,0,0,1,1,0,0,0 +0.24999999999999997,0.16391548365797293,1.0,1.0,1.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.17018818091779467,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,0,0,1 +0.4642857142857143,0.1572576207769341,1.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.011444921316165951,1.0,0.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.06718388907230109,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,0,0,1 +0.125,0.05898536370639375,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.057114559260482006,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.025145812699460767,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.058215032463959496,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.08928571428571427,0.15538681633102236,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.12440849565313085,0.0,0.0,1.0,1,0,0,0,0,1,0,0,1,0,0,0 +0.03571428571428571,0.0819852536590734,1.0,1.0,1.0,0,0,0,0,1,1,0,0,0,0,0,1 +0.14285714285714285,0.04803565533179267,0.0,1.0,0.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.125,0.07301639705073182,0.0,0.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.7321428571428571,0.08776273797733025,1.0,0.0,0.0,0,0,1,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.09783206778914934,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.4107142857142857,0.4439308902828216,0.0,1.0,0.0,0,0,0,0,1,0,0,1,0,1,0,0 +0.14285714285714285,0.05749972488169913,0.0,0.0,0.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.6431165401122483,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.08633212281280951,1.0,1.0,0.0,0,0,1,0,0,0,0,1,0,0,0,1 +0.19642857142857142,0.0675690546935182,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.06965995378012545,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.1037746230879278,1.0,0.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.19599427753934193,0.0,0.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.21541762958071972,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.20947507428194126,1.0,1.0,0.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.7857142857142857,0.543468691537361,1.0,1.0,0.0,0,0,1,0,0,0,0,1,0,0,0,1 +0.03571428571428571,0.012875536480686695,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +1.0,0.33223286012985587,1.0,1.0,0.0,0,0,1,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.36436667767139874,1.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.07054033234290745,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.10366457576758006,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.04715527676901067,1.0,0.0,1.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.2857142857142857,0.10795642126114229,1.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.19642857142857142,0.16154946627049632,0.0,0.0,1.0,0,0,0,1,0,1,0,0,0,0,0,1 +0.7678571428571428,0.5762077693408165,0.0,1.0,1.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.053571428571428575,0.02641135688345989,1.0,1.0,1.0,0,0,0,0,1,1,0,0,0,0,0,1 +0.14285714285714285,0.10030813249697369,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.0176625949158138,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.19642857142857142,0.11609992296687575,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06019588423021899,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.6785714285714286,0.3809838230439089,0.0,1.0,0.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.6785714285714286,0.2501375591504347,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,1,0 +0.6785714285714286,0.20441289754594477,1.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.125,0.38395510069329813,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.05315285572796302,1.0,1.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.13744910311433917,0.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.6785714285714286,0.49702872235061074,1.0,1.0,1.0,1,0,0,0,0,0,0,1,0,1,0,0 +0.4107142857142857,0.23528117090348852,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.2750082535490261,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.16127434796962695,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.02569604930119952,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.055683944095961266,1.0,0.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.08990866072411137,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.09210960713106636,0.0,1.0,0.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.7857142857142857,0.39760096841641906,1.0,1.0,1.0,0,0,0,1,0,0,0,1,1,0,0,0 +0.14285714285714285,0.03791130185979971,1.0,1.0,0.0,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.03251898316275999,0.0,0.0,1.0,0,0,0,1,0,1,0,0,0,1,0,0 +0.3571428571428571,0.21255639925167824,0.0,1.0,0.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.2697259821723341,1.0,0.0,1.0,0,0,0,1,0,0,1,0,0,0,1,0 +0.5178571428571429,0.22180037416088919,0.0,0.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.07246616044899307,0.0,0.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.004897105755474855,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.19642857142857142,0.05639925167822163,0.0,0.0,0.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.021404203807637284,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.1127434796962694,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.086552217453505,1.0,1.0,0.0,1,0,0,0,0,0,1,0,1,0,0,0 +0.7857142857142857,0.41339275888632115,1.0,1.0,0.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.03571428571428571,0.07400682293386156,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.007428194123473095,0.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.14285714285714285,0.028942445251458126,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.15511169803015298,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,1,0,0 +0.08928571428571427,0.06129635743369649,1.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.0534279740288324,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.07142857142857142,0.17321448222735777,1.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.24999999999999997,0.21475734565863322,1.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.08407615274568064,0.0,1.0,0.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.08928571428571427,0.05425332893144051,0.0,0.0,1.0,0,1,0,0,0,1,0,0,0,0,0,1 +0.03571428571428571,0.08341586882359414,1.0,0.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.4642857142857143,0.43039506988004844,1.0,0.0,1.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.268460437988335,0.0,1.0,0.0,0,0,0,0,1,0,0,1,0,1,0,0 +0.3571428571428571,0.4620886981402003,1.0,1.0,0.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.21563772422141522,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.09271486739297898,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.11813579839330911,1.0,1.0,0.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.06652360515021459,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.023935292175635527,1.0,0.0,0.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.7857142857142857,0.2396280400572246,1.0,0.0,0.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.125,0.10410476504897105,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.03851656212171234,1.0,1.0,0.0,0,0,0,0,1,1,0,0,1,0,0,0 +0.21428571428571427,0.050896885660834154,1.0,1.0,1.0,1,0,0,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.06866952789699571,0.0,0.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.09541102674149884,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,0,0,1 +0.5714285714285714,0.19555408825795093,0.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.09557609772202047,1.0,0.0,0.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.21398701441619897,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.024815670738417523,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.8928571428571428,0.5054473423572136,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.16176956091119182,0.0,0.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.1607142857142857,0.034774953229888855,1.0,0.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.13810938703642567,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,1,0,0 +0.19642857142857142,0.10845163420270716,0.0,0.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.020688896225376913,0.0,0.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.7857142857142857,1.0,0.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.0532078793881369,1.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.053571428571428575,0.12798503356443272,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.13189171343677783,1.0,1.0,1.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.4107142857142857,0.12765489160338947,0.0,0.0,0.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.1594585671838891,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.022889842632331906,0.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.14285714285714285,0.051447122262572906,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.4642857142857143,0.3138549576317817,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.7321428571428571,0.24738637614174097,1.0,0.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.0529877847474414,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.09067899196654561,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.6239683063717398,0.0,0.0,0.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.0,0.01931330472103004,0.0,0.0,1.0,0,1,0,0,0,1,0,0,0,0,0,1 +0.24999999999999997,0.05474854187300539,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.30218994167492025,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.17857142857142858,0.06382744580169472,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.035765379113018594,1.0,0.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.0061626499394739735,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.09513590844062947,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.17051832287883792,0.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.030097942115109497,0.0,1.0,0.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.014746340926598437,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.1129635743369649,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.1354682513480797,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.06734896005282272,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.08638714647298339,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09227467811158797,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.06509298998569385,0.0,0.0,1.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.03895675140310334,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.19725982172334106,0.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.028227137669197756,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.15010454495433037,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.05727963024100363,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.0233300319137229,1.0,0.0,0.0,0,0,0,1,0,0,1,0,1,0,0,0 +1.0,0.3453835149114119,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.03994717728623308,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.20353251898316277,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.043908880818752064,1.0,0.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.13315725762077696,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.4642857142857143,0.23841751953339935,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.11692527786948388,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.056784417299438755,0.0,0.0,0.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.19296797622977882,0.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.3571428571428571,0.2195994277539342,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.3482447452404534,1.0,1.0,1.0,1,0,0,0,0,0,0,1,1,0,0,0 +0.14285714285714285,0.10294926818531969,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.10570045119401342,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.18399911962143722,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.24408495653130846,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,0,0,1 +0.19642857142857142,0.2424892703862661,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.17857142857142858,0.48024650599757895,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.04429404643996919,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.15412127214702323,1.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.4642857142857143,0.17558049961483438,0.0,0.0,0.0,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.26961593485198637,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.09425552987784747,1.0,1.0,1.0,0,0,0,1,0,1,0,0,0,0,0,1 +0.24999999999999997,0.18377902498074172,1.0,1.0,1.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.3076923076923077,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.06707384175195334,1.0,1.0,1.0,0,0,0,0,1,1,0,0,0,0,0,1 +0.3571428571428571,0.12947067238912735,0.0,1.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.18713546825134808,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.15951359084406297,1.0,0.0,1.0,0,1,0,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.21239132827115662,1.0,0.0,0.0,0,0,0,0,1,1,0,0,1,0,0,0 +0.08928571428571427,0.06531308462638935,1.0,1.0,0.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.062011665015956854,0.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,1,0 +0.08928571428571427,0.06448772972378122,1.0,1.0,0.0,0,1,0,0,0,0,0,1,1,0,0,0 +0.4642857142857143,0.18553978210630573,0.0,1.0,1.0,0,0,0,0,1,1,0,0,1,0,0,0 +0.3571428571428571,0.17029822823814242,1.0,1.0,0.0,0,0,0,0,1,1,0,0,1,0,0,0 +0.14285714285714285,0.020633872565203038,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.040332342907450205,0.0,0.0,1.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.4642857142857143,0.15081985253659075,0.0,0.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.049466270496313414,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.001430615164520744,1.0,0.0,1.0,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.06168152305491362,0.0,0.0,0.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.029987894794761747,0.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.056784417299438755,0.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.0064927919005172245,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.055463849455265765,1.0,0.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.10652580609662154,0.0,1.0,1.0,0,0,0,1,0,1,0,0,0,0,0,1 +0.3571428571428571,0.14223616154946628,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.18609001870804448,0.0,0.0,1.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.05838010344448112,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.11543963904478925,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.010069329811819083,0.0,0.0,0.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.19642857142857142,0.20408275558490152,0.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.13101133487399583,0.0,0.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.12077693408165512,1.0,1.0,1.0,0,1,0,0,0,1,0,0,0,0,0,1 +0.10714285714285712,0.04335864421701331,1.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.7857142857142857,0.473643666776714,1.0,1.0,1.0,0,0,0,1,0,0,0,1,0,0,0,1 +0.3571428571428571,0.05639925167822163,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.3476394849785408,1.0,0.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.06019588423021899,1.0,1.0,0.0,0,0,0,1,0,0,1,0,0,0,1,0 +0.4107142857142857,0.3144602178936943,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.020248706943985912,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.026246285902938263,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.19642857142857142,0.06569825024760646,1.0,0.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.18542973478595798,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.10714285714285712,0.10955210740618465,1.0,1.0,1.0,0,1,0,0,0,1,0,0,1,0,0,0 +1.0,0.799603829646748,0.0,1.0,0.0,0,0,0,0,1,0,0,1,0,1,0,0 +0.3571428571428571,0.09750192582810609,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.019753494002421042,0.0,1.0,0.0,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.15247056234180698,1.0,0.0,1.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.2272477165181028,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.10714285714285712,0.05827005612413337,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.14185099592824915,1.0,0.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.08501155496863651,0.0,1.0,1.0,0,0,0,0,1,1,0,0,0,1,0,0 +0.24999999999999997,0.07059535600308132,1.0,1.0,0.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.26702982282381427,1.0,1.0,1.0,0,1,0,0,0,0,0,1,0,0,0,1 +0.4642857142857143,0.5707604269836029,1.0,1.0,1.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.14285714285714285,0.06250687795752173,0.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.23885770881479035,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.17326950588753165,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,1,0 +0.03571428571428571,0.04385385715857819,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.4091009133927589,0.0,1.0,0.0,0,0,0,1,0,1,0,0,1,0,0,0 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW-val.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW-val.csv new file mode 100644 index 0000000..d26a4e0 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW-val.csv @@ -0,0 +1,301 @@ +month,credit_amount,sex,age,credit,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153,status=A11,status=A12,status=A13,status=A14 +0.24999999999999997,0.07169582920655881,1.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.06107626279300099,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.07142857142857142,0.02806206668867613,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.14575767580059426,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.7321428571428571,0.15302079894354573,1.0,0.0,0.0,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.24551557169582922,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.090073731704633,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.11131286453174864,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.035105095190932106,1.0,1.0,0.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.07538241443820842,0.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.1352481567073842,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.03389457466710685,1.0,0.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.07142857142857142,0.06404754044239022,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.05849015076482887,1.0,0.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.12809508088478047,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.1825685044569165,0.0,1.0,1.0,0,0,0,0,1,1,0,0,0,0,0,1 +0.3571428571428571,0.06289204357873886,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.10267414988445031,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.09695168922636734,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09298998569384835,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.05904038736656762,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,1,0 +0.5714285714285714,0.09502586112028172,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.047210300429184546,1.0,1.0,0.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.19642857142857142,0.05777484318256851,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.03571428571428571,0.0505667436997909,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.16501595686145043,1.0,1.0,1.0,0,0,0,1,0,0,0,1,1,0,0,0 +0.625,0.1275998679432156,1.0,0.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.07147573456586331,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.024760647078243648,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.14285714285714285,0.32067789149334214,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.08928571428571427,0.05260261912622427,0.0,0.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.060801144492131615,0.0,0.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.15010454495433037,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.07015516672169032,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.037526136238582586,0.0,0.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.4107142857142857,0.12534389787608674,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.15758776273797734,0.0,1.0,0.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.4642857142857143,0.09122922856828436,1.0,1.0,0.0,0,0,0,0,1,0,1,0,0,0,1,0 +0.14285714285714285,0.05447342357213601,0.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.02569604930119952,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.0695499064597777,1.0,0.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.08688235941454825,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.02145922746781116,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.39517992736876856,0.0,1.0,1.0,0,0,1,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.0710905689446462,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.5160669087707714,1.0,0.0,0.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.00968416419060196,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.24061846594035435,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.45234951028942444,0.0,0.0,1.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.5714285714285714,0.1904368878617806,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.6877407285132606,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.6785714285714286,0.4199405744470122,1.0,1.0,1.0,0,0,0,1,0,0,0,1,1,0,0,0 +1.0,0.5444040937603168,0.0,0.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.05480356553317926,1.0,0.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.1840541432816111,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.18526466380543635,1.0,1.0,1.0,0,0,0,0,1,1,0,0,0,1,0,0 +0.14285714285714285,0.034995047870584356,0.0,0.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.07131066358534169,1.0,0.0,0.0,0,0,0,1,0,1,0,0,1,0,0,0 +0.14285714285714285,0.10570045119401342,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.13002090899086607,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.04682513480796743,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.1513700891383295,0.0,1.0,1.0,0,0,0,1,0,1,0,0,0,1,0,0 +0.14285714285714285,0.1116430064927919,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,0,0,1 +0.10714285714285712,0.06993507208099482,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.04357873885770881,0.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.3538571585781886,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,1,0,0 +0.5714285714285714,0.10426983602949268,1.0,0.0,0.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.3571428571428571,0.2512930560140861,1.0,1.0,0.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.7857142857142857,0.7797402883239792,1.0,0.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.02426543413667877,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.05441839991196214,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,1,0,0 +0.3035714285714286,0.1384945526576428,1.0,1.0,0.0,0,0,0,0,1,1,0,0,0,1,0,0 +0.24999999999999997,0.33069219764498736,1.0,1.0,1.0,0,0,0,1,0,1,0,0,0,1,0,0 +0.24999999999999997,0.027511830086937385,0.0,1.0,0.0,1,0,0,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.06894464619786508,1.0,1.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.024815670738417523,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.125,0.04974138879718279,0.0,1.0,1.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.16011885110597557,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.10311433916584131,0.0,0.0,0.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.02839220864971938,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.3685484758446132,1.0,1.0,1.0,0,0,1,0,0,1,0,0,0,1,0,0 +0.08928571428571427,0.0482007263123143,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,1,0 +0.24999999999999997,0.05315285572796302,0.0,0.0,0.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.10806646858149004,1.0,0.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.20864971937933313,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.04418399911962144,1.0,1.0,1.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.14559260482007264,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.327335754374381,1.0,1.0,0.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.41311764058545175,0.0,1.0,1.0,0,0,0,0,1,1,0,0,0,1,0,0 +0.14285714285714285,0.06173654671508748,1.0,1.0,0.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.4642857142857143,0.33564432706063607,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.2304941124683614,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.7857142857142857,0.8485748872014967,1.0,0.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.030483107736326624,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3035714285714286,0.07439198855507868,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.3421371189611533,1.0,1.0,0.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.06729393639264884,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.2851876306811929,0.0,1.0,1.0,1,0,0,0,0,1,0,0,0,0,0,1 +0.03571428571428571,0.05436337625178826,1.0,1.0,1.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.5178571428571429,0.13832948167712117,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.09199955981071861,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.1522504677011115,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,1,0 +0.3571428571428571,0.052657642786398146,0.0,0.0,0.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.21428571428571427,0.13068119291295258,1.0,1.0,0.0,0,0,0,0,1,1,0,0,1,0,0,0 +0.14285714285714285,0.023880268515461652,1.0,1.0,0.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.08928571428571427,0.07813359744690217,0.0,0.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.02321998459337515,0.0,1.0,1.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.12820512820512822,1.0,1.0,0.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.3336634752943766,1.0,1.0,1.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.1641906019588423,0.0,0.0,0.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.10714285714285712,0.3883569935072081,1.0,1.0,1.0,1,0,0,0,0,0,0,1,0,1,0,0 +0.1607142857142857,0.10184879498184218,0.0,0.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.32898646417959726,0.0,0.0,0.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.15219544404093763,1.0,0.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.014966435567293938,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.0926598437328051,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.7857142857142857,0.1376141740948608,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.06943985913942995,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.12396830637173985,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.05705953560030813,0.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.6679322108506658,1.0,1.0,0.0,0,0,1,0,0,0,0,1,0,1,0,0 +0.08928571428571427,0.05981071860900186,1.0,1.0,0.0,0,1,0,0,0,0,1,0,0,0,1,0 +0.4107142857142857,0.12061186310113349,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.10714285714285712,0.06129635743369649,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.11742049081104876,1.0,1.0,1.0,0,0,0,0,1,1,0,0,1,0,0,0 +0.08928571428571427,0.1402553097832068,1.0,1.0,1.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.06575327390778034,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.08215032463959503,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.06036095521074061,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.25420931000330144,1.0,1.0,0.0,0,0,1,0,0,0,0,1,1,0,0,0 +0.14285714285714285,0.0817651590183779,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.027731924727632886,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.18075272367117862,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.0675690546935182,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.4642857142857143,0.35721360184879497,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.37839771101573677,0.0,0.0,0.0,0,1,0,0,0,1,0,0,0,0,0,1 +0.19642857142857142,0.030593155056674374,0.0,0.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.14537251017937713,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,1,0 +0.5714285714285714,0.30538131396500495,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.15709254979641246,1.0,0.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.06250687795752173,1.0,1.0,1.0,0,0,1,0,0,1,0,0,0,0,0,1 +0.24999999999999997,0.11852096401452623,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.19962583911081766,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.15637724221415208,1.0,0.0,0.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.4285714285714286,0.1371739848134698,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.07285132607021019,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.0872125013755915,0.0,0.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.13579839330912294,0.0,0.0,0.0,0,0,1,0,0,1,0,0,0,1,0,0 +0.7857142857142857,0.2527786948387807,1.0,1.0,0.0,1,0,0,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.052217453505007144,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.17857142857142858,0.20485308682733577,1.0,0.0,1.0,1,0,0,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.1336524705623418,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.10360955210740619,0.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.3571428571428571,0.05397821063057115,0.0,1.0,1.0,0,0,0,0,1,1,0,0,1,0,0,0 +0.3571428571428571,0.14427203697589966,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.11978650819852536,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +1.0,0.3877517332452955,1.0,1.0,0.0,0,0,0,0,1,1,0,0,1,0,0,0 +0.4642857142857143,0.40420380763728403,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.625,0.7664245625619016,1.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.08928571428571427,0.06883459887751733,0.0,1.0,0.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.7857142857142857,0.19775503466490593,1.0,1.0,0.0,0,0,0,1,0,0,0,1,0,1,0,0 +0.6785714285714286,0.20864971937933313,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.06184659403543523,0.0,1.0,1.0,1,0,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.41619896555518876,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.03626059205458347,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.34158688235941453,1.0,1.0,0.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.11962143721800374,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.13965004952129417,0.0,0.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.024595576097722022,1.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.3035714285714286,0.12248266754704522,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.0706503796632552,1.0,0.0,0.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.03571428571428571,0.04500935402222955,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.18284362275778587,0.0,0.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.05782986684274238,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.6785714285714286,0.3805436337625179,1.0,1.0,1.0,0,0,0,1,0,1,0,0,0,0,0,1 +0.3571428571428571,0.09843732805106195,1.0,0.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.12286783316826234,1.0,0.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.18526466380543635,1.0,0.0,1.0,0,0,0,0,1,1,0,0,0,0,1,0 +0.3571428571428571,0.09948277759436558,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.08281060856168151,0.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.07202597116760207,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.16017387476614944,1.0,1.0,0.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.7857142857142857,0.5486959392538792,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.062231759656652355,0.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3035714285714286,0.10955210740618465,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.12589413447782546,1.0,1.0,0.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.4300099042588313,0.0,0.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.7857142857142857,0.33927588863211183,0.0,1.0,0.0,0,0,0,0,1,1,0,0,0,1,0,0 +0.3571428571428571,0.24683613954000222,0.0,0.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.037416088918234836,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.1463629360625069,1.0,1.0,1.0,0,0,0,1,0,0,0,1,1,0,0,0 +0.24999999999999997,0.11158798283261802,0.0,1.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.2318146803125344,1.0,1.0,0.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.11797072741278751,0.0,0.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.1103224386486189,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.625,0.4587872785297678,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.3022449653350941,0.0,0.0,1.0,0,1,0,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.05227247716518102,0.0,0.0,1.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.4642857142857143,0.08897325850115549,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.0335644327060636,0.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.04258831297457907,1.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +1.0,0.36238582590513924,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.07868383404864092,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.06399251678221635,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.07142857142857142,0.03615054473423572,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.05188731154396391,0.0,1.0,0.0,1,0,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.1280400572246066,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.2047980631671619,0.0,0.0,1.0,0,0,0,1,0,1,0,0,0,0,0,1 +0.3571428571428571,0.1930229998899527,0.0,0.0,1.0,1,0,0,0,0,1,0,0,0,1,0,0 +0.24999999999999997,0.11389897655992078,1.0,1.0,1.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.12176735996478487,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.06014086057004511,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,1,0 +0.4285714285714286,0.4167492021569275,1.0,1.0,1.0,0,1,0,0,0,1,0,0,0,0,0,1 +0.3035714285714286,0.16644657202597118,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.16474083856058105,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.4107142857142857,0.46016287003411466,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.08759766699680863,0.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.12991086167051832,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.08928571428571427,0.09794211510949709,1.0,0.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.49669858038956755,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.045449543303620554,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.07131066358534169,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.09040387366567623,0.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.10404974138879718,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.425332893144052,1.0,1.0,0.0,0,1,0,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.055793991416309016,1.0,1.0,1.0,0,0,0,0,1,1,0,0,0,0,0,1 +0.24999999999999997,0.10999229668757565,1.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.7857142857142857,0.324254429404644,0.0,1.0,0.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.3035714285714286,0.1842742379223066,0.0,1.0,1.0,0,0,0,1,0,1,0,0,1,0,0,0 +0.19642857142857142,0.07081545064377683,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.590073731704633,0.0,1.0,0.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.2226807527236712,1.0,1.0,0.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.05172224056344228,0.0,1.0,0.0,1,0,0,0,0,0,0,1,1,0,0,0 +0.14285714285714285,0.056344228018047754,0.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.11863101133487398,1.0,1.0,1.0,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.33856058104985143,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.10714285714285712,0.05705953560030813,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.6785714285714286,0.3138549576317817,0.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.09392538791680423,0.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.3571428571428571,0.07164080554638494,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.03780125453945196,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09805216242984482,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.5714285714285714,0.5846263893474194,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.2659843732805106,0.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.2712116209970287,1.0,1.0,0.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.044018928139099814,1.0,1.0,1.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.2015516672169033,1.0,0.0,1.0,0,0,1,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.15401122482667548,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.44486629250577747,1.0,1.0,0.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.14285714285714285,0.08847804555959062,0.0,0.0,1.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.25272367117860683,1.0,1.0,0.0,0,0,0,0,1,1,0,0,1,0,0,0 +0.24999999999999997,0.19423352041377795,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.09783206778914934,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.11191812479366127,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.1363486299108617,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.4107142857142857,0.27748431825685044,1.0,1.0,0.0,1,0,0,0,0,0,1,0,1,0,0,0 +1.0,0.4900957411687025,1.0,1.0,1.0,0,0,1,0,0,0,0,1,0,1,0,0 +0.24999999999999997,0.09315505667436998,0.0,0.0,0.0,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.148949048090679,1.0,1.0,1.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.11769560911191813,0.0,1.0,1.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.08979861340376362,0.0,0.0,0.0,0,0,1,0,0,1,0,0,1,0,0,0 +0.4642857142857143,0.08060966215472654,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.33459887751733247,1.0,1.0,0.0,0,0,0,0,1,0,0,1,1,0,0,0 +0.19642857142857142,0.0586552217453505,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.8058765269065697,1.0,1.0,0.0,0,0,0,0,1,0,1,0,1,0,0,0 +0.4642857142857143,0.15555188731154398,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.0,0.07120061626499395,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.041652910751623196,1.0,0.0,1.0,0,0,1,0,0,0,1,0,0,1,0,0 +0.10714285714285712,0.10008803785627819,1.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.7002310993727302,0.0,1.0,0.0,1,0,0,0,0,0,0,1,0,1,0,0 +0.24999999999999997,0.1570375261362386,1.0,1.0,1.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.5714285714285714,0.20578848905029165,0.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.17877187190491914,0.0,1.0,0.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.048750962914053037,1.0,1.0,0.0,0,0,0,0,1,0,0,1,0,1,0,0 +0.19642857142857142,0.007813359744690218,0.0,0.0,1.0,0,1,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.048475844613183675,0.0,1.0,0.0,1,0,0,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.06459777704412897,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,1,0,0 +0.6785714285714286,0.17299438758666227,1.0,1.0,1.0,1,0,0,0,0,0,1,0,1,0,0,0 +0.7857142857142857,0.3713546825134808,1.0,1.0,0.0,0,0,0,1,0,0,1,0,1,0,0,0 +0.4642857142857143,0.6464179597226807,1.0,1.0,0.0,0,1,0,0,0,0,1,0,1,0,0,0 +0.4642857142857143,0.14201606690877078,0.0,1.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09436557719819522,0.0,1.0,1.0,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.07411687025420931,1.0,1.0,1.0,0,0,0,0,1,0,0,1,0,0,0,1 +0.03571428571428571,0.09975789589523495,1.0,1.0,1.0,0,1,0,0,0,1,0,0,0,1,0,0 +0.3928571428571428,0.42456256190161773,1.0,1.0,1.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.4034334763948498,1.0,1.0,1.0,1,0,0,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.24369979091009136,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.30543633762517886,1.0,1.0,1.0,0,0,0,0,1,1,0,0,0,0,0,1 +0.3571428571428571,0.1990756025090789,0.0,1.0,1.0,0,0,0,0,1,1,0,0,0,0,0,1 +0.14285714285714285,0.11775063277209201,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.3631561571475735,1.0,1.0,0.0,1,0,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.0675690546935182,1.0,0.0,0.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.10069329811819082,1.0,0.0,1.0,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.03747111257840871,0.0,1.0,0.0,0,1,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.05469351821283151,0.0,1.0,1.0,0,0,0,0,1,1,0,0,0,0,0,1 +0.10714285714285712,0.10355452844723231,0.0,1.0,1.0,0,1,0,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.0935402222955871,1.0,1.0,1.0,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.06184659403543523,1.0,1.0,1.0,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.06597336854847584,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.07329151535160118,1.0,1.0,1.0,0,0,0,0,1,0,1,0,0,0,0,1 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR-test.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR-test.csv new file mode 100644 index 0000000..5e83fb1 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR-test.csv @@ -0,0 +1,201 @@ +month,credit_amount,sex,age,credit,pred_credit,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153,status=A11,status=A12,status=A13,status=A14 +0.5714285714285714,0.30802244965335096,1.0,0.0,1.0,0.31967729971945585,0,0,1,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.11290855067679102,1.0,1.0,0.0,0.23796927750170668,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.04693518212831517,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.0704302850225597,1.0,1.0,0.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.08093980411576977,1.0,1.0,1.0,0.4768786443351435,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.1389347419390338,1.0,1.0,0.0,0.9234069094768464,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.08974358974358974,0.0,1.0,1.0,0.5785653349165971,0,0,1,0,0,1,0,0,0,0,1,0 +0.19642857142857142,0.15291075162319798,1.0,1.0,1.0,0.9318352763505735,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.07741828986464179,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.3035714285714286,0.11571475734565863,1.0,1.0,1.0,0.5855097720668199,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.13255199735886433,1.0,1.0,1.0,0.3989479019699039,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.07384175195333993,0.0,1.0,1.0,0.9086619979671742,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09216463079124024,1.0,1.0,1.0,0.5640737884995887,0,0,1,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.594475624518543,1.0,1.0,1.0,0.7918122026495104,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09447562451854297,0.0,0.0,1.0,0.3676314741249905,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.13321228128095083,0.0,1.0,0.0,0.2581995981513079,0,0,1,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.17497523935292178,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.23236491691427316,1.0,1.0,1.0,0.48667382453412633,0,0,0,0,1,0,1,0,0,0,1,0 +0.14285714285714285,0.24347969626939586,0.0,0.0,1.0,0.8290700279000807,0,1,0,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.041212721470232194,1.0,0.0,1.0,0.8205202256524424,0,0,0,0,1,0,1,0,0,0,0,1 +0.7857142857142857,0.5893584241223726,1.0,1.0,0.0,0.40471109118578136,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.05876526906569825,1.0,1.0,1.0,0.7138311294496358,0,0,0,0,1,0,1,0,0,1,0,0 +0.03571428571428571,0.019753494002421042,0.0,1.0,1.0,0.5122497385912862,0,0,0,1,0,0,1,0,1,0,0,0 +0.3035714285714286,0.1509298998569385,1.0,1.0,1.0,0.8769881149919502,0,0,1,0,0,0,1,0,0,0,0,1 +1.0,0.847529437658193,1.0,0.0,1.0,0.5547639995361563,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.17156377242214152,1.0,0.0,1.0,0.8695372347753674,0,0,0,0,1,1,0,0,0,0,0,1 +0.7857142857142857,0.24969736986904373,1.0,1.0,1.0,0.21980151898449196,0,0,0,1,0,0,1,0,1,0,0,0 +0.14285714285714285,0.10982722570705403,1.0,1.0,0.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.08237041928029053,1.0,0.0,1.0,0.2729209608367981,0,1,0,0,0,0,1,0,1,0,0,0 +0.7857142857142857,0.2666446572025971,0.0,1.0,0.0,0.22634832395437568,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.034885000550236606,1.0,1.0,0.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.32023770221195114,1.0,1.0,1.0,0.9073342365921735,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.06228678331682623,1.0,1.0,1.0,0.43499565515022004,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.02767690106745901,0.0,1.0,1.0,0.6295469835412176,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.13464289644547156,1.0,1.0,1.0,0.921786795969523,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.01870804445911742,1.0,1.0,1.0,0.7623117828116139,0,1,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.09183448883019699,1.0,1.0,0.0,0.539932226665866,0,1,0,0,0,0,1,0,0,1,0,0 +0.3035714285714286,0.07686805326290305,1.0,1.0,0.0,0.3692311661992841,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.3090128755364807,1.0,1.0,1.0,0.6156438955835984,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.3132496973698691,0.0,1.0,0.0,0.8301501720360537,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.026246285902938263,1.0,1.0,0.0,0.5082778240326536,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.14097061736546715,0.0,1.0,1.0,0.28637876253981037,0,0,0,0,1,1,0,0,1,0,0,0 +0.7857142857142857,0.22328601298558381,0.0,0.0,0.0,0.0810126268631418,0,1,0,0,0,1,0,0,1,0,0,0 +0.4642857142857143,0.3266754704522945,1.0,0.0,1.0,0.26441754960248154,0,0,0,1,0,1,0,0,1,0,0,0 +0.24999999999999997,0.05749972488169913,0.0,1.0,1.0,0.5545926067069147,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.3004842082095301,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.7857142857142857,0.32243864861890614,1.0,1.0,1.0,0.6234592356361524,0,0,1,0,0,0,0,1,0,0,0,1 +0.4642857142857143,0.44767249917464513,1.0,1.0,0.0,0.591004897644727,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.09414548255749973,0.0,0.0,1.0,0.43705682731209555,0,0,0,0,1,0,1,0,0,0,1,0 +0.14285714285714285,0.04996148343787829,1.0,1.0,1.0,0.6731420559668704,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.06382744580169472,1.0,1.0,1.0,0.6731420559668704,0,0,1,0,0,0,1,0,0,1,0,0 +0.5178571428571429,0.385330692197645,1.0,1.0,1.0,0.8654407578963966,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.023825244855287777,0.0,1.0,1.0,0.681321561513423,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06399251678221635,1.0,1.0,1.0,0.9393970480266846,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.2817761637504127,1.0,1.0,1.0,0.23796927750170668,0,0,1,0,0,0,1,0,1,0,0,0 +0.4642857142857143,0.2217453505007153,0.0,1.0,0.0,0.383421164823506,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.06096621547265324,1.0,1.0,0.0,0.38472306870888867,0,0,0,0,1,0,1,0,1,0,0,0 +0.10714285714285712,0.0292175635523275,1.0,1.0,1.0,0.6696986103919891,0,0,0,0,1,0,0,1,0,0,1,0 +0.0,0.17222405634422802,0.0,1.0,1.0,0.9329250944686631,0,0,0,1,0,0,1,0,0,0,0,1 +0.08928571428571427,0.045779685264663805,1.0,1.0,1.0,0.7387979215486421,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.11483437878287663,1.0,1.0,1.0,0.47720039923434754,0,0,0,0,1,0,1,0,0,1,0,0 +0.053571428571428575,0.03279410146362936,1.0,1.0,1.0,0.9178375942401232,0,0,0,0,1,0,0,1,0,0,0,1 +0.7857142857142857,0.45322988885220644,0.0,0.0,1.0,0.20756498401823725,0,0,0,1,0,0,1,0,0,1,0,0 +0.19642857142857142,0.09106415758776273,1.0,1.0,1.0,0.6955811084767616,0,0,0,0,1,1,0,0,0,0,1,0 +0.14285714285714285,0.08550676791020138,1.0,1.0,1.0,0.7138311294496358,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.022119511389897654,0.0,0.0,1.0,0.2848197012851972,0,0,0,0,1,1,0,0,1,0,0,0 +0.14285714285714285,0.059480576647958625,1.0,0.0,0.0,0.5995907180616916,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.15566193463189174,1.0,1.0,1.0,0.7918122026495104,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.1340926598437328,1.0,1.0,1.0,0.7377669250251128,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.05942555298778475,1.0,1.0,1.0,0.7215189623993398,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.05051172003961703,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.17585561791570375,1.0,1.0,0.0,0.27443988356010096,0,0,0,0,1,0,1,0,1,0,0,0 +0.08928571428571427,0.052217453505007144,0.0,1.0,1.0,0.6908920489455583,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.02701661714537251,0.0,0.0,0.0,0.2844193852648953,1,0,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.39198855507868385,1.0,1.0,1.0,0.6594326055741871,1,0,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.10559040387366568,1.0,1.0,0.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.05986574226917574,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.33944095961263343,0.0,1.0,1.0,0.22408792958769466,0,0,0,0,1,0,0,1,1,0,0,0 +0.19642857142857142,0.039892153626059204,1.0,0.0,1.0,0.311205543841052,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.13392758886321118,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.23528117090348852,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.038571585781886214,0.0,1.0,0.0,0.6155200677110325,0,1,0,0,0,1,0,0,0,1,0,0 +0.8928571428571428,0.8635963464289644,1.0,1.0,0.0,0.3004289224837474,0,1,0,0,0,1,0,0,0,1,0,0 +0.19642857142857142,0.18190822053483,1.0,1.0,1.0,0.9016367060401769,0,0,1,0,0,0,1,0,0,0,0,1 +0.0,0.06630351050951909,1.0,1.0,1.0,0.9558919804312949,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09271486739297898,1.0,1.0,0.0,0.6014238410536501,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.1581379993397161,1.0,1.0,1.0,0.7138311294496358,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.07114559260482008,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.14933421371189612,1.0,1.0,1.0,0.3103438400976657,0,0,0,0,1,0,0,1,1,0,0,0 +0.24999999999999997,0.06558820292725871,1.0,1.0,0.0,0.41602094847665755,0,0,0,1,0,0,0,1,1,0,0,0 +1.0,0.7431495543083525,1.0,1.0,1.0,0.5481571705989746,0,0,0,0,1,0,0,1,0,0,0,1 +0.2857142857142857,0.3245295477055134,1.0,1.0,1.0,0.6408270671508309,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.3336084516342027,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.41900517222405637,1.0,1.0,0.0,0.42657246837101115,0,0,0,0,1,0,0,1,1,0,0,0 +0.14285714285714285,0.05760977220204688,1.0,0.0,1.0,0.5621939327570208,0,0,1,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.121712336304611,1.0,0.0,0.0,0.2849263027670955,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.3582590513920986,1.0,1.0,0.0,0.8899154955245911,0,0,1,0,0,1,0,0,0,0,0,1 +0.5714285714285714,0.5129305601408606,1.0,1.0,0.0,0.8216427748498599,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.39391438318476946,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.36519203257400684,1.0,1.0,0.0,0.23796927750170668,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.18548475844613185,1.0,1.0,0.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.06063607351160999,0.0,0.0,1.0,0.3382029230298863,1,0,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.06767910201386595,1.0,1.0,1.0,0.650463907744803,1,0,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.042808407615274574,0.0,1.0,1.0,0.8384025299994181,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.035875426433366345,1.0,0.0,0.0,0.43311856998291953,0,0,0,1,0,1,0,0,1,0,0,0 +0.10714285714285712,0.1136238582590514,1.0,1.0,1.0,0.5291896437274354,0,0,0,0,1,0,1,0,1,0,0,0 +0.08928571428571427,0.09519093210080334,0.0,0.0,0.0,0.8461497236114719,0,1,0,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.07169582920655881,0.0,0.0,1.0,0.41718500620577564,0,0,0,1,0,1,0,0,0,1,0,0 +0.10714285714285712,0.05447342357213601,0.0,1.0,0.0,0.5654556515140055,0,0,0,0,1,0,0,1,0,0,1,0 +0.14285714285714285,0.05353802134918015,1.0,1.0,0.0,0.7138311294496358,0,0,0,0,1,1,0,0,0,1,0,0 +0.3035714285714286,0.12963574336964895,0.0,1.0,1.0,0.31273337858185585,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.06377242214152085,1.0,1.0,1.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.07252118410916694,0.0,0.0,1.0,0.7569602202607998,0,0,1,0,0,1,0,0,0,0,0,1 +0.01785714285714285,0.1759656652360515,1.0,1.0,1.0,0.9540917374927895,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.1992956971497744,0.0,1.0,1.0,0.5541068942854244,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.116430064927919,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.4087157477715418,0.0,1.0,1.0,0.7844990820387668,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.5033564432706064,1.0,1.0,0.0,0.47720039923434754,0,1,0,0,0,1,0,0,0,1,0,0 +0.5714285714285714,0.5126004181798173,0.0,1.0,1.0,0.709384236480772,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.0928799383735006,1.0,1.0,0.0,0.38472306870888867,0,1,0,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.10448993067018818,1.0,1.0,0.0,0.38048671184038707,0,0,1,0,0,0,0,1,1,0,0,0 +0.5714285714285714,0.350170573346539,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.14355672939363928,1.0,1.0,1.0,0.9262782795387031,1,0,0,0,0,0,1,0,0,0,0,1 +0.2857142857142857,0.13090128755364808,1.0,1.0,1.0,0.5956378735010521,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.32761087267525035,1.0,1.0,1.0,0.6156438955835984,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.18851105975569496,1.0,1.0,1.0,0.5231864157229879,0,0,1,0,0,1,0,0,1,0,0,0 +0.19642857142857142,0.114229118520964,1.0,1.0,1.0,0.6449158361571704,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.17244415098492352,1.0,1.0,0.0,0.5231864157229879,0,0,1,0,0,1,0,0,1,0,0,0 +0.7857142857142857,0.1546164850885881,1.0,1.0,0.0,0.40471109118578136,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.05771981952239463,1.0,1.0,1.0,0.7333540005028889,0,0,1,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.06426763508308572,1.0,1.0,1.0,0.9187129412749173,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.04104765048971058,0.0,0.0,1.0,0.8565716032957927,0,0,0,1,0,0,1,0,0,0,0,1 +0.2857142857142857,0.1780015406624849,1.0,1.0,1.0,0.9000458966532298,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.17211400902388027,1.0,1.0,1.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.05359304500935402,1.0,1.0,1.0,0.921786795969523,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.02723671178606801,0.0,1.0,0.0,0.6088670652522917,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.08858809287993837,1.0,1.0,1.0,0.7134293371692706,1,0,0,0,0,0,1,0,0,1,0,0 +0.2857142857142857,0.2213051612193243,0.0,0.0,1.0,0.22169998970541557,0,0,0,0,1,0,1,0,1,0,0,0 +0.053571428571428575,0.11912622427643886,1.0,1.0,1.0,0.7174552542410282,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.19406844943325632,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.4642857142857143,0.10625068779575217,1.0,1.0,1.0,0.539932226665866,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.239022779795312,1.0,1.0,1.0,0.9073342365921735,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.028007043028502255,1.0,1.0,0.0,0.5600008807692614,0,0,0,1,0,0,1,0,1,0,0,0 +0.4642857142857143,0.23550126554418402,0.0,1.0,1.0,0.8239601442576517,0,0,0,1,0,1,0,0,0,0,0,1 +0.10714285714285712,0.09210960713106636,1.0,1.0,1.0,0.9187129412749173,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.17591064157587763,0.0,1.0,1.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +1.0,0.7580609662154726,1.0,1.0,0.0,0.29141195522842805,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.14383184769450866,1.0,1.0,0.0,0.624582242671011,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.05529877847474414,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.6785714285714286,0.22669747991636405,1.0,1.0,0.0,0.26590645487362585,0,0,0,1,0,0,1,0,1,0,0,0 +0.14285714285714285,0.03719599427753935,0.0,1.0,1.0,0.8896713869108571,1,0,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.030373060415978873,1.0,1.0,0.0,0.6874873703107746,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.04632992186640256,0.0,1.0,1.0,0.5692869095347953,0,0,1,0,0,0,1,0,0,1,0,0 +0.3035714285714286,0.07378672829316606,1.0,1.0,1.0,0.9140325439012884,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.23131946737096953,0.0,1.0,1.0,0.709384236480772,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.023770221195113902,0.0,1.0,1.0,0.9086619979671742,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.3362495873225487,1.0,1.0,1.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.055463849455265765,0.0,0.0,1.0,0.8290700279000807,0,1,0,0,0,1,0,0,0,0,0,1 +0.4107142857142857,0.12490370859469573,1.0,0.0,0.0,0.39742586223642357,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.00484208209530098,1.0,1.0,1.0,0.5706358388636316,0,0,0,0,1,0,1,0,1,0,0,0 +0.2857142857142857,0.37454605480356556,1.0,1.0,1.0,0.6871883067965017,0,0,0,1,0,1,0,0,0,1,0,0 +0.2857142857142857,0.10922196544514141,1.0,1.0,0.0,0.3790392890575462,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.06459777704412897,0.0,1.0,1.0,0.6244595378782976,0,0,0,0,1,0,1,0,0,0,1,0 +0.3571428571428571,0.6095521074061846,1.0,1.0,0.0,0.5547218618582729,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.05496863651370089,1.0,1.0,1.0,0.8839295784323556,0,1,0,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.10278419720479806,1.0,1.0,1.0,0.7001676841419655,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.05694948827996038,0.0,1.0,0.0,0.3307077506353231,0,0,0,1,0,1,0,0,1,0,0,0 +0.7857142857142857,0.18311874105865522,1.0,1.0,1.0,0.7359170768632337,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.14295146913172666,1.0,1.0,1.0,0.9187129412749173,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.17932210850665786,0.0,0.0,1.0,0.276086333823588,0,0,0,1,0,0,1,0,1,0,0,0 +0.07142857142857142,0.05029162539892153,1.0,1.0,1.0,0.4679701801537487,0,0,0,0,1,0,0,1,1,0,0,0 +0.3035714285714286,0.2050181578078574,1.0,1.0,1.0,0.678114089607729,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.18498954550456698,0.0,1.0,1.0,0.5545926067069147,0,0,0,0,1,0,1,0,0,1,0,0 +1.2142857142857142,0.2941014636293606,1.0,0.0,0.0,0.09102457054520846,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.025255860019808517,1.0,1.0,0.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.43903378452734676,1.0,1.0,0.0,0.23796927750170668,0,0,1,0,0,0,1,0,1,0,0,0 +0.3035714285714286,0.17409486079013978,1.0,1.0,0.0,0.4148665750517137,0,1,0,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.6802024870694399,1.0,1.0,0.0,0.3516352721472709,0,0,1,0,0,0,0,1,0,1,0,0 +0.03571428571428571,0.08253549026081215,1.0,1.0,1.0,0.9418146463783325,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.15555188731154398,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.2791900517222406,0.0,1.0,1.0,0.8368159662459218,0,0,0,0,1,0,0,1,0,0,0,1 +0.0,0.06844943325630021,1.0,1.0,1.0,0.7836588733899705,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.12809508088478047,0.0,1.0,1.0,0.32271911415921684,1,0,0,0,0,0,0,1,1,0,0,0 +0.053571428571428575,0.11439418950148562,0.0,1.0,1.0,0.6637429098283977,0,1,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.022229558710245404,1.0,1.0,0.0,0.4917873581105606,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.4337515131506548,0.0,1.0,1.0,0.709384236480772,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.19880048420820953,1.0,1.0,1.0,0.8189994124469444,0,0,1,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.39738087377572356,0.0,0.0,1.0,0.48957791029126796,1,0,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.07180587652690656,1.0,1.0,0.0,0.9005973681731101,0,0,0,0,1,0,0,1,0,0,0,1 +0.14285714285714285,0.055683944095961266,1.0,1.0,1.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.08928571428571427,0.02971277649389237,0.0,1.0,1.0,0.599794302613941,0,0,1,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.27423792230659183,1.0,1.0,0.0,0.5394438034973912,1,0,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.11863101133487398,0.0,0.0,0.0,0.18745666815478274,0,0,0,1,0,1,0,0,1,0,0,0 +0.14285714285714285,0.1496093320127655,0.0,0.0,0.0,0.49006922670215497,0,1,0,0,0,1,0,0,0,1,0,0 +0.3035714285714286,0.6839440959612633,1.0,1.0,0.0,0.8613899174477632,0,0,0,0,1,0,0,1,0,0,0,1 +0.14285714285714285,0.045779685264663805,1.0,1.0,0.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.4107142857142857,0.20166171453725104,1.0,1.0,0.0,0.5235081475203799,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.19709475074281943,1.0,1.0,1.0,0.9345335095933344,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.09232970177176185,1.0,1.0,0.0,0.6598740118142182,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.5286123032904149,1.0,1.0,1.0,0.5291642625196091,0,0,0,1,0,0,1,0,0,1,0,0 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR-train.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR-train.csv new file mode 100644 index 0000000..1627cd1 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR-train.csv @@ -0,0 +1,501 @@ +month,credit_amount,sex,age,credit,pred_credit,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153,status=A11,status=A12,status=A13,status=A14 +0.5714285714285714,0.20408275558490152,1.0,1.0,1.0,0.2740485661497681,1,0,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.18306371739848135,1.0,1.0,1.0,0.921786795969523,0,0,1,0,0,1,0,0,0,0,0,1 +0.24999999999999997,0.12462859029382635,1.0,1.0,1.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.09601628700341147,1.0,1.0,1.0,0.7138311294496358,0,1,0,0,0,0,1,0,0,1,0,0 +1.0,0.5566193463189171,1.0,1.0,1.0,0.6276600176708876,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.39303400462198745,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.008748761967646089,0.0,1.0,1.0,0.5785653349165971,0,0,1,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.12418840101243535,1.0,1.0,1.0,0.9112945990675128,0,0,0,0,1,0,0,1,0,0,0,1 +0.4642857142857143,0.25096291405304283,0.0,0.0,1.0,0.737517899393192,0,0,0,1,0,1,0,0,0,0,0,1 +0.7857142857142857,0.3471992956971498,1.0,0.0,0.0,0.28983662188342285,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.48332783096731596,1.0,1.0,0.0,0.47720039923434754,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.1816881258941345,1.0,1.0,0.0,0.43499565515022004,0,0,0,1,0,0,1,0,1,0,0,0 +0.2857142857142857,0.3421371189611533,1.0,1.0,1.0,0.6403744137297571,1,0,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.1250137559150435,1.0,1.0,1.0,0.32719918796985015,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.14141080664685815,1.0,1.0,0.0,0.47720039923434754,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.029327610872675252,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.19329811819082207,1.0,0.0,1.0,0.34386083170097576,0,0,0,1,0,0,1,0,1,0,0,0 +0.4642857142857143,0.39809618135798397,0.0,1.0,0.0,0.7914015454381981,1,0,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.06344228018047761,0.0,1.0,1.0,0.3257054908520209,0,0,1,0,0,1,0,0,1,0,0,0 +0.125,0.24876196764608782,1.0,1.0,1.0,0.7620595412443067,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.20743919885550788,1.0,1.0,1.0,0.34047407781017125,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.052327500825354895,1.0,1.0,1.0,0.6014238410536501,0,1,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.28232640035215145,1.0,1.0,1.0,0.2839698095777251,1,0,0,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.28947947617475517,1.0,0.0,1.0,0.7905497651969635,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.04666006382744581,0.0,1.0,1.0,0.862481750443303,1,0,0,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.32871134587872786,1.0,1.0,0.0,0.28436974428843736,0,0,0,0,1,0,0,1,0,1,0,0 +0.125,0.06289204357873886,0.0,1.0,1.0,0.8939030026966095,0,1,0,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.09183448883019699,1.0,1.0,1.0,0.7769155728885463,0,0,0,1,0,1,0,0,0,1,0,0 +0.19642857142857142,0.2638384505337295,0.0,1.0,1.0,0.8769316531253295,0,0,0,0,1,0,1,0,0,0,0,1 +0.7857142857142857,0.3136898866512601,0.0,0.0,0.0,0.1493953955906259,0,0,1,0,0,0,1,0,0,1,0,0 +0.07142857142857142,0.026466380543633764,1.0,1.0,1.0,0.5499994958063575,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.02085396720589854,1.0,1.0,1.0,0.9073342365921735,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.08572686255089688,0.0,0.0,0.0,0.8228490355418768,0,0,0,1,0,0,1,0,0,0,0,1 +0.3035714285714286,0.05161219324309453,0.0,1.0,0.0,0.523377616709452,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.16424562561901618,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.6223175965665235,0.0,0.0,0.0,0.3243121077691882,0,0,1,0,0,1,0,0,0,1,0,0 +0.08928571428571427,0.04974138879718279,1.0,1.0,1.0,0.7387979215486421,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.19808517662594918,1.0,1.0,1.0,0.9234069094768464,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.11604489930670188,1.0,1.0,0.0,0.3842577810457014,1,0,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.07059535600308132,0.0,1.0,1.0,0.8301501720360537,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.21789369428854408,1.0,1.0,0.0,0.4390666360957615,0,0,1,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.19847034224716628,1.0,1.0,1.0,0.2864857137967765,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.06289204357873886,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.08528667326950588,1.0,0.0,1.0,0.8291441697520612,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.1101023440079234,0.0,1.0,1.0,0.5785653349165971,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.03224386486189061,0.0,0.0,0.0,0.49006922670215497,0,1,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.06025090789039286,1.0,1.0,0.0,0.3989479019699039,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.2500825354902608,0.0,1.0,1.0,0.3694096935983138,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.025200836359634642,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.05716958292065588,1.0,0.0,1.0,0.3387603186528358,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.08880818752063387,0.0,1.0,0.0,0.8384025299994181,0,0,1,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.5342797402883239,0.0,1.0,0.0,0.26164945103668286,0,1,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.27797953119841534,1.0,1.0,1.0,0.36651873073537256,0,0,0,0,1,0,0,1,1,0,0,0 +0.03571428571428571,0.08924837680202487,1.0,1.0,1.0,0.48838112309751835,1,0,0,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.12044679212061186,0.0,1.0,0.0,0.28637876253981037,0,1,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.06261692527786948,0.0,1.0,1.0,0.8832307883744815,0,0,1,0,0,1,0,0,0,0,0,1 +0.7857142857142857,0.6558270056124132,1.0,1.0,1.0,0.2839698095777251,1,0,0,0,0,0,0,1,0,1,0,0 +0.24999999999999997,0.13271706833938596,1.0,1.0,1.0,0.9234069094768464,0,0,0,1,0,0,1,0,0,0,0,1 +0.1607142857142857,0.06377242214152085,0.0,1.0,1.0,0.8854921566472022,1,0,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.1866952789699571,0.0,1.0,1.0,0.3691093043872932,0,0,0,0,1,0,1,0,1,0,0,0 +0.7857142857142857,0.358093980411577,0.0,1.0,0.0,0.10812898709422081,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.0880928799383735,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.4107142857142857,0.17420490811048753,1.0,1.0,1.0,0.3128458574189485,0,0,1,0,0,0,1,0,1,0,0,0 +0.4107142857142857,0.2678001540662485,1.0,1.0,1.0,0.8921185284956621,0,0,0,1,0,0,1,0,0,0,0,1 +0.10714285714285712,0.07411687025420931,1.0,1.0,1.0,0.9187129412749173,0,0,1,0,0,1,0,0,0,0,0,1 +0.03571428571428571,0.06228678331682623,0.0,1.0,1.0,0.895580236194126,0,0,1,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.05397821063057115,1.0,1.0,1.0,0.9319236160108605,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.13101133487399583,0.0,1.0,0.0,0.5382443052921533,0,0,1,0,0,1,0,0,0,1,0,0 +0.4642857142857143,0.10454495433036205,0.0,0.0,0.0,0.27183024502308256,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.1474634092659844,0.0,1.0,1.0,0.6634322478338307,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.3582590513920986,1.0,1.0,1.0,0.9522216912358249,0,0,0,1,0,0,1,0,0,0,0,1 +0.3035714285714286,0.13728403213381754,1.0,1.0,1.0,0.678114089607729,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.11417409486079015,1.0,1.0,1.0,0.43499565515022004,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.05249257180587652,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,1,0,0,0,0,0,1 +0.24999999999999997,0.04396390447892594,0.0,0.0,1.0,0.2364918904353026,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.11235831407505227,1.0,1.0,0.0,0.9262782795387031,1,0,0,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.06597336854847584,0.0,1.0,1.0,0.9185698239945864,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.39952679652250467,1.0,1.0,0.0,0.36651873073537256,0,0,0,0,1,0,0,1,1,0,0,0 +0.08928571428571427,0.03675580499614835,0.0,1.0,0.0,0.599794302613941,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.1153295917244415,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.07444701221525256,0.0,1.0,1.0,0.28637876253981037,0,0,0,0,1,0,1,0,1,0,0,0 +0.125,0.2011114779355123,1.0,1.0,1.0,0.4708753344299414,0,0,1,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.0482007263123143,0.0,0.0,1.0,0.7904631079537381,0,1,0,0,0,1,0,0,0,0,0,1 +0.3035714285714286,0.14707824364476726,0.0,1.0,1.0,0.4849768887446002,0,0,1,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.1620446792120612,0.0,1.0,1.0,0.599794302613941,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.24573566633652472,1.0,1.0,1.0,0.5706358388636316,0,1,0,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.7871134587872785,1.0,0.0,0.0,0.6577144367789873,1,0,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.2884890502916254,1.0,1.0,1.0,0.21396727904669746,0,0,0,0,1,0,0,1,1,0,0,0 +0.3571428571428571,0.0734565863321228,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.18377902498074172,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.1380543633762518,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.10300429184549356,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.09640145262462858,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,1,0,0,0,1,0,0 +0.3571428571428571,0.09084406294706723,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.06839440959612633,0.0,1.0,1.0,0.8898642220971337,0,1,0,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.052767690106745896,1.0,1.0,0.0,0.7619553600617908,1,0,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.20518322878837902,1.0,1.0,1.0,0.40471109118578136,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.4110817651590184,0.0,1.0,1.0,0.28637876253981037,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.1870804445911742,0.0,0.0,1.0,0.2364918904353026,0,1,0,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.08968856608341587,1.0,1.0,1.0,0.49745855352837404,0,0,0,1,0,0,1,0,1,0,0,0 +0.24999999999999997,0.09304500935402223,1.0,1.0,1.0,0.6156438955835984,0,0,1,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.034334763948497854,0.0,0.0,1.0,0.8105188297717469,0,1,0,0,0,0,1,0,0,0,0,1 +0.4107142857142857,0.2718168812589414,1.0,1.0,1.0,0.8704016314829519,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.5627269725982172,1.0,1.0,1.0,0.7682673905629588,0,0,0,0,1,0,0,1,0,0,0,1 +0.3571428571428571,0.22565203037306042,0.0,1.0,1.0,0.4443019547874373,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.06195664135578298,0.0,1.0,1.0,0.8575078279322482,0,0,0,1,0,0,1,0,0,0,0,1 +0.4642857142857143,0.1786068009243975,1.0,1.0,1.0,0.49210970933040776,0,0,1,0,0,0,1,0,0,1,0,0 +0.6428571428571428,0.3162759986794322,1.0,1.0,0.0,0.20892794417015212,0,0,1,0,0,0,1,0,1,0,0,0 +0.4285714285714286,0.20666886761307363,1.0,1.0,0.0,0.30391227130738524,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.1701331572576208,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.13051612193243095,1.0,1.0,1.0,0.6156438955835984,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.11742049081104876,1.0,1.0,0.0,0.47720039923434754,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.07752833718498954,0.0,1.0,0.0,0.28637876253981037,0,1,0,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.1781666116430065,0.0,0.0,1.0,0.7832044401604136,0,0,0,1,0,0,1,0,0,0,0,1 +1.0,0.39440959612633436,1.0,1.0,1.0,0.21615402523359528,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.10938703642566304,0.0,0.0,1.0,0.7569602202607998,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.059590623968306375,1.0,1.0,0.0,0.30992319586753697,1,0,0,0,0,0,0,1,1,0,0,0 +0.3214285714285714,0.05683944095961263,0.0,0.0,1.0,0.7970917912232778,0,0,0,1,0,1,0,0,0,0,0,1 +0.03571428571428571,0.17981732144822274,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.018432926158248045,1.0,0.0,1.0,0.5528336477984951,0,0,1,0,0,1,0,0,0,1,0,0 +0.625,0.25767580059425554,1.0,0.0,0.0,0.3730439023791205,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.056564322658743255,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.08352591614394189,1.0,0.0,1.0,0.8618695563470995,0,0,1,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.17051832287883792,1.0,1.0,0.0,0.3103438400976657,0,1,0,0,0,0,0,1,1,0,0,0 +0.08928571428571427,0.0627819962583911,1.0,1.0,1.0,0.7001676841419655,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.17453505007153078,1.0,1.0,1.0,0.9073342365921735,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.10432485968966655,0.0,0.0,1.0,0.8406172417730042,0,1,0,0,0,1,0,0,0,0,0,1 +0.19642857142857142,0.06988004842082095,1.0,1.0,1.0,0.9173729937673434,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.3627159678661825,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.09909761197314845,1.0,1.0,1.0,0.9073342365921735,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.15109497083746012,1.0,0.0,0.0,0.23658638755348427,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.18763068119291296,0.0,1.0,1.0,0.2488657768966351,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.06558820292725871,0.0,0.0,0.0,0.2287663808190244,0,0,0,1,0,1,0,0,1,0,0,0 +0.08928571428571427,0.04886101023440079,1.0,0.0,1.0,0.36745330533116055,0,0,1,0,0,0,1,0,1,0,0,0 +0.4642857142857143,0.3037856278199626,0.0,0.0,1.0,0.737517899393192,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.12930560140860572,1.0,1.0,0.0,0.3232634558617386,0,0,1,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.19252778694838782,0.0,1.0,1.0,0.501464322292528,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.15456146142841423,1.0,1.0,1.0,0.9393970480266846,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.022669747991636405,1.0,1.0,1.0,0.5706358388636316,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.09298998569384835,1.0,1.0,1.0,0.36651873073537256,0,1,0,0,0,0,0,1,1,0,0,0 +0.5714285714285714,0.1140640475404424,1.0,0.0,1.0,0.4028715067363472,0,0,0,1,0,1,0,0,0,1,0,0 +0.7857142857142857,0.6577528337184989,1.0,1.0,1.0,0.3131231838824953,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.28925938153405967,1.0,1.0,1.0,0.8456935742394528,0,0,0,0,1,0,0,1,0,0,0,1 +0.24999999999999997,0.010069329811819083,0.0,0.0,0.0,0.7901372777915644,1,0,0,0,0,1,0,0,0,0,0,1 +0.03571428571428571,0.23995818201826785,0.0,1.0,0.0,0.9121910841333913,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.13821943435677342,0.0,0.0,0.0,0.49006922670215497,0,0,0,0,1,0,1,0,0,1,0,0 +0.4642857142857143,0.2213051612193243,1.0,1.0,1.0,0.8302239413120919,0,0,1,0,0,0,1,0,0,0,0,1 +0.625,0.6333223286012986,1.0,1.0,1.0,0.49778098104998286,0,0,0,1,0,1,0,0,0,1,0,0 +0.5714285714285714,0.10867172884340266,1.0,1.0,0.0,0.39646283586920644,0,0,0,0,1,0,0,1,0,1,0,0 +0.14285714285714285,0.06734896005282272,0.0,1.0,1.0,0.6244595378782976,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06338725652030373,0.0,1.0,1.0,0.9086619979671742,0,0,0,1,0,1,0,0,0,0,0,1 +0.24999999999999997,0.09150434686915374,1.0,1.0,1.0,0.6598740118142182,0,1,0,0,0,0,1,0,0,1,0,0 +0.17857142857142858,0.030373060415978873,1.0,1.0,1.0,0.9052897264392974,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.05617915703752613,1.0,1.0,0.0,0.5758465012701344,0,0,1,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.055463849455265765,0.0,1.0,1.0,0.45368904432201734,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.18240343347639487,1.0,1.0,1.0,0.9264124244522206,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.17255419830527127,1.0,1.0,0.0,0.42657246837101115,0,0,0,0,1,0,0,1,1,0,0,0 +0.14285714285714285,0.1098822493672279,0.0,1.0,1.0,0.5785653349165971,0,0,1,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.30560140860570045,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.03906679872345109,0.0,1.0,1.0,0.8976829391418685,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.044349070100143065,1.0,1.0,0.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.6785714285714286,0.3322878837900297,1.0,1.0,1.0,0.42442143235694413,0,1,0,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.13772422141520854,1.0,1.0,1.0,0.9345335095933344,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.10784637394079453,1.0,0.0,0.0,0.8715443022869169,0,0,1,0,0,1,0,0,0,0,0,1 +0.7857142857142857,0.5528227137669197,1.0,1.0,0.0,0.16857126089732433,0,0,0,1,0,0,0,1,1,0,0,0 +0.5714285714285714,0.7740728513260702,1.0,1.0,0.0,0.39646283586920644,0,0,0,0,1,0,0,1,0,1,0,0 +0.4642857142857143,0.2200396170353252,1.0,1.0,0.0,0.5394438034973912,1,0,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.03736106525806096,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.08928571428571427,0.039011775063277215,0.0,1.0,0.0,0.599794302613941,0,0,1,0,0,0,1,0,0,1,0,0 +0.3035714285714286,0.13931990756025092,0.0,1.0,1.0,0.8721840349758069,0,0,0,1,0,0,1,0,0,0,0,1 +0.7857142857142857,0.44613183668977663,0.0,1.0,1.0,0.26164945103668286,0,1,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.11285352701661715,0.0,0.0,1.0,0.8461497236114719,0,1,0,0,0,1,0,0,0,0,0,1 +0.7857142857142857,0.18245845713656875,1.0,1.0,1.0,0.40471109118578136,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.20446792120611865,0.0,1.0,0.0,0.3403565857713484,0,0,0,0,1,1,0,0,1,0,0,0 +0.03571428571428571,0.05430835259161439,0.0,1.0,1.0,0.895580236194126,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.03994717728623308,0.0,0.0,0.0,0.2364918904353026,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.03659073401562672,0.0,1.0,0.0,0.28637876253981037,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.03835149114119071,1.0,1.0,0.0,0.5813781424290714,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.14168592494772753,1.0,1.0,1.0,0.6500940471086876,0,0,0,1,0,0,1,0,0,1,0,0 +0.3035714285714286,0.11213821943435677,0.0,0.0,1.0,0.7688898730617176,0,1,0,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.145867723120942,0.0,1.0,1.0,0.8978106855080499,0,1,0,0,0,1,0,0,0,0,0,1 +0.7321428571428571,0.23803235391218225,1.0,1.0,1.0,0.38456298371763664,1,0,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.3845053372950369,1.0,1.0,1.0,0.7359170768632337,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.13733905579399142,1.0,1.0,0.0,0.27443988356010096,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.08215032463959503,1.0,1.0,1.0,0.6014238410536501,0,0,0,0,1,0,1,0,0,1,0,0 +0.08928571428571427,0.06140640475404424,0.0,0.0,0.0,0.31109338775758766,0,1,0,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.06905469351821283,1.0,1.0,1.0,0.8533241696172105,0,0,1,0,0,0,0,1,0,0,0,1 +0.03571428571428571,0.005117200396170352,0.0,1.0,1.0,0.4603229620470974,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.10856168152305491,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.06850445691647408,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.11428414218113787,0.0,0.0,0.0,0.4681806327003752,0,1,0,0,0,0,1,0,0,0,1,0 +0.7321428571428571,0.6364036535710355,1.0,1.0,0.0,0.20599972812749537,0,0,0,0,1,1,0,0,1,0,0,0 +0.14285714285714285,0.06789919665456146,1.0,0.0,0.0,0.5528336477984951,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.4844833278309673,1.0,1.0,1.0,0.7324169714111652,0,0,1,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.07730824254429404,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.06619346318917134,0.0,1.0,1.0,0.8627147751385414,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.053813139650049524,0.0,0.0,0.0,0.24744012641724394,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.037526136238582586,0.0,1.0,1.0,0.7170669202424681,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.4311654011224827,1.0,1.0,0.0,0.47720039923434754,0,0,0,0,1,0,1,0,0,1,0,0 +0.03571428571428571,0.06030593155056674,1.0,1.0,1.0,0.9209417015586914,0,0,0,0,1,0,0,1,0,0,0,1 +0.5714285714285714,0.1437218003741609,1.0,1.0,1.0,0.39646283586920644,0,0,0,0,1,0,0,1,0,1,0,0 +0.3571428571428571,0.15808297567954221,0.0,1.0,0.0,0.28637876253981037,0,1,0,0,0,0,1,0,1,0,0,0 +1.0,0.3938593595245956,0.0,0.0,0.0,0.1140139540448384,0,1,0,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.055793991416309016,1.0,0.0,0.0,0.5216017218323117,0,0,1,0,0,1,0,0,0,1,0,0 +0.03571428571428571,0.07175085286673269,0.0,0.0,1.0,0.8847736592891744,0,0,0,1,0,1,0,0,0,0,0,1 +0.3035714285714286,0.18719049191152196,1.0,1.0,1.0,0.678114089607729,0,0,0,1,0,0,1,0,0,1,0,0 +0.7857142857142857,0.20254209310003302,1.0,1.0,0.0,0.16857126089732433,0,0,0,1,0,0,0,1,1,0,0,0 +0.03571428571428571,0.061131286453174866,1.0,1.0,1.0,0.5706358388636316,0,1,0,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.052437548145702645,1.0,1.0,1.0,0.5706358388636316,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.18031253438978762,1.0,1.0,1.0,0.9264124244522206,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.4184549356223176,0.0,0.0,0.0,0.594377341023918,0,0,1,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.17095851216022892,0.0,1.0,1.0,0.31273337858185585,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.10179377132166832,1.0,1.0,0.0,0.624582242671011,0,0,1,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.0505667436997909,1.0,1.0,1.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.47854077253218885,1.0,1.0,1.0,0.8501233211422209,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.09276989105315285,1.0,0.0,1.0,0.3727423047912524,0,0,0,1,0,1,0,0,1,0,0,0 +0.3571428571428571,0.1794871794871795,1.0,1.0,1.0,0.6500940471086876,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.3695939253879168,1.0,1.0,1.0,0.6500940471086876,0,0,0,1,0,1,0,0,0,1,0,0 +0.4642857142857143,0.11461428414218112,1.0,1.0,1.0,0.85555465817234,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.09843732805106195,0.0,0.0,0.0,0.20364878828289232,0,0,1,0,0,1,0,0,1,0,0,0 +0.5714285714285714,0.49636843842852424,1.0,0.0,0.0,0.2184939341622545,0,0,0,1,0,0,1,0,1,0,0,0 +0.3035714285714286,0.07274127874986244,0.0,1.0,1.0,0.8471418368374785,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.011224826675470454,1.0,0.0,1.0,0.6581569155182169,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.33311323869263787,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.10713106635853417,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.7857142857142857,0.40601958842302194,1.0,1.0,1.0,0.7359170768632337,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.05639925167822163,0.0,0.0,1.0,0.5204868269574562,0,1,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.37795752173434577,1.0,1.0,0.0,0.11967916113567616,0,0,1,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.6783867062837019,0.0,1.0,0.0,0.4107052500869476,0,0,0,0,1,0,0,1,0,1,0,0 +0.24999999999999997,0.1391548365797293,1.0,0.0,1.0,0.4901999941597845,0,0,1,0,0,1,0,0,0,1,0,0 +0.0,0.06894464619786508,1.0,1.0,1.0,0.9558919804312949,0,0,0,1,0,0,1,0,0,0,0,1 +0.07142857142857142,0.05430835259161439,0.0,0.0,0.0,0.4840535155941271,0,0,1,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.20160669087707717,1.0,1.0,0.0,0.6970354972308322,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.15472653240893586,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,1,0,0,0,0,0,1 +0.5714285714285714,0.09370529327610873,1.0,1.0,0.0,0.39646283586920644,0,0,0,0,1,0,0,1,0,1,0,0 +0.4642857142857143,0.19709475074281943,1.0,0.0,1.0,0.4133255669397803,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.0708704743039507,0.0,1.0,1.0,0.9121910841333913,0,1,0,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.10223396060305931,1.0,1.0,1.0,0.9522216912358249,0,0,0,1,0,1,0,0,0,0,0,1 +0.03571428571428571,0.00979421151094971,0.0,1.0,1.0,0.4603229620470974,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.2313744910311434,1.0,1.0,0.0,0.4297441010936714,0,0,1,0,0,0,1,0,0,1,0,0 +0.3035714285714286,0.1138439528997469,1.0,1.0,0.0,0.6399187311966064,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.05023660173874765,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.037746230879278087,1.0,1.0,1.0,0.9345335095933344,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.12231759656652359,1.0,0.0,0.0,0.3250877505238663,1,0,0,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.07257620776934082,1.0,1.0,1.0,0.9173729937673434,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.1522504677011115,0.0,1.0,1.0,0.6155200677110325,0,1,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.11450423682183337,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.10157367668097282,1.0,1.0,1.0,0.9393970480266846,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.04330362055683944,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.05425332893144051,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.21464729833828547,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.18482447452404535,0.0,1.0,1.0,0.5962182584447656,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.06393749312204247,0.0,1.0,1.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +0.125,0.20298228238142402,1.0,1.0,1.0,0.4708753344299414,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.0702101903818642,1.0,1.0,1.0,0.42657246837101115,0,0,0,0,1,0,0,1,1,0,0,0 +0.3214285714285714,0.1334323759216463,1.0,1.0,1.0,0.8922523883382062,0,0,0,0,1,0,1,0,0,0,0,1 +0.4642857142857143,0.2184989545504567,0.0,1.0,1.0,0.383421164823506,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.14713326730494114,1.0,1.0,1.0,0.34047407781017125,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.08622207549246176,0.0,1.0,1.0,0.8384025299994181,0,0,1,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.0536480686695279,1.0,1.0,1.0,0.6993390090705507,0,0,1,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.08776273797733025,0.0,1.0,1.0,0.3691093043872932,0,1,0,0,0,1,0,0,1,0,0,0 +0.03571428571428571,0.0,0.0,1.0,1.0,0.895580236194126,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.022559700671288655,1.0,0.0,1.0,0.9228647175807431,0,0,0,1,0,1,0,0,0,0,0,1 +0.03571428571428571,0.025255860019808517,1.0,1.0,1.0,0.7691184526170824,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.8587542643336634,1.0,1.0,1.0,0.2740485661497681,1,0,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.08732254869593925,0.0,1.0,0.0,0.46182459874957915,0,0,0,1,0,0,0,1,0,1,0,0 +0.1607142857142857,0.08512160228898426,1.0,1.0,1.0,0.4978064484847755,0,1,0,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.010894684714427203,0.0,0.0,0.0,0.3386431143464047,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.16303510509519095,1.0,0.0,1.0,0.5380335258497126,0,1,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.12815010454495435,1.0,1.0,0.0,0.5082778240326536,0,1,0,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.05216242984483327,0.0,1.0,0.0,0.3803633753089816,0,0,0,0,1,0,0,1,1,0,0,0 +0.3035714285714286,0.26152745680642675,0.0,1.0,0.0,0.8206450815998698,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.11461428414218112,1.0,1.0,1.0,0.6014238410536501,0,1,0,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.055683944095961266,1.0,1.0,1.0,0.9318352763505735,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.049796412457356665,1.0,1.0,1.0,0.7138311294496358,0,0,0,0,1,0,1,0,0,1,0,0 +0.10714285714285712,0.03543523715197534,0.0,1.0,1.0,0.9153811453083084,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.16952789699570817,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.04748541873005392,0.0,1.0,1.0,0.5069022744695886,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.02580609662154727,1.0,1.0,0.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.09981291955540883,0.0,1.0,0.0,0.49148753840738346,1,0,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.18741058655221746,1.0,1.0,1.0,0.5493424244597425,0,0,0,0,1,0,1,0,0,0,1,0 +0.3571428571428571,0.3502255970067129,1.0,1.0,1.0,0.30992319586753697,1,0,0,0,0,0,0,1,1,0,0,0 +0.24999999999999997,0.16391548365797293,1.0,1.0,1.0,0.9071687984489271,1,0,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.17018818091779467,1.0,1.0,1.0,0.868365131914781,0,0,1,0,0,0,0,1,0,0,0,1 +0.4642857142857143,0.1572576207769341,1.0,1.0,0.0,0.32719918796985015,0,1,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.011444921316165951,1.0,0.0,1.0,0.5836541255891218,0,0,1,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.06718388907230109,1.0,1.0,1.0,0.868365131914781,0,0,1,0,0,0,0,1,0,0,0,1 +0.125,0.05898536370639375,0.0,1.0,1.0,0.5795267454297757,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.057114559260482006,1.0,1.0,1.0,0.5396118319655506,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.025145812699460767,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.058215032463959496,1.0,1.0,1.0,0.6874873703107746,0,0,0,0,1,0,1,0,0,1,0,0 +0.08928571428571427,0.15538681633102236,1.0,1.0,1.0,0.921786795969523,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.12440849565313085,0.0,0.0,1.0,0.259549729145002,1,0,0,0,0,1,0,0,1,0,0,0 +0.03571428571428571,0.0819852536590734,1.0,1.0,1.0,0.9418146463783325,0,0,0,0,1,1,0,0,0,0,0,1 +0.14285714285714285,0.04803565533179267,0.0,1.0,0.0,0.8696334095426655,0,0,1,0,0,1,0,0,0,0,0,1 +0.125,0.07301639705073182,0.0,0.0,1.0,0.5005400663825238,0,1,0,0,0,0,1,0,0,1,0,0 +0.7321428571428571,0.08776273797733025,1.0,0.0,0.0,0.08470200286988733,0,0,1,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.09783206778914934,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.4107142857142857,0.4439308902828216,0.0,1.0,0.0,0.38066739707842107,0,0,0,0,1,0,0,1,0,1,0,0 +0.14285714285714285,0.05749972488169913,0.0,0.0,0.0,0.49006922670215497,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.6431165401122483,1.0,1.0,0.0,0.5547218618582729,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.08633212281280951,1.0,1.0,0.0,0.7324169714111652,0,0,1,0,0,0,0,1,0,0,0,1 +0.19642857142857142,0.0675690546935182,1.0,1.0,1.0,0.4768786443351435,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.06965995378012545,0.0,1.0,1.0,0.8013985500332685,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.1037746230879278,1.0,0.0,1.0,0.36745330533116055,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.19599427753934193,0.0,0.0,1.0,0.8105188297717469,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.21541762958071972,1.0,1.0,0.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.20947507428194126,1.0,1.0,0.0,0.6500940471086876,0,0,0,1,0,0,1,0,0,1,0,0 +0.7857142857142857,0.543468691537361,1.0,1.0,0.0,0.6234592356361524,0,0,1,0,0,0,0,1,0,0,0,1 +0.03571428571428571,0.012875536480686695,1.0,1.0,1.0,0.7979365203848136,0,0,0,1,0,0,1,0,0,1,0,0 +1.0,0.33223286012985587,1.0,1.0,0.0,0.16559346885680104,0,0,1,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.36436667767139874,1.0,1.0,0.0,0.38472306870888867,0,1,0,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.07054033234290745,0.0,1.0,1.0,0.8547131179732448,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.10366457576758006,1.0,1.0,1.0,0.921786795969523,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.04715527676901067,1.0,0.0,1.0,0.3387603186528358,0,0,1,0,0,1,0,0,1,0,0,0 +0.2857142857142857,0.10795642126114229,1.0,1.0,1.0,0.47653149674170675,0,0,0,1,0,0,1,0,1,0,0,0 +0.19642857142857142,0.16154946627049632,0.0,0.0,1.0,0.8404307054775538,0,0,0,1,0,1,0,0,0,0,0,1 +0.7678571428571428,0.5762077693408165,0.0,1.0,1.0,0.13279434507188115,0,1,0,0,0,0,1,0,1,0,0,0 +0.053571428571428575,0.02641135688345989,1.0,1.0,1.0,0.9394762243447705,0,0,0,0,1,1,0,0,0,0,0,1 +0.14285714285714285,0.10030813249697369,0.0,1.0,1.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.0176625949158138,1.0,1.0,1.0,0.4148665750517137,0,0,0,0,1,0,1,0,1,0,0,0 +0.19642857142857142,0.11609992296687575,1.0,1.0,1.0,0.6535565228181367,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06019588423021899,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.6785714285714286,0.3809838230439089,0.0,1.0,0.0,0.18862334019529733,0,0,0,1,0,0,1,0,1,0,0,0 +0.6785714285714286,0.2501375591504347,1.0,1.0,1.0,0.3466918762732373,0,0,0,0,1,0,0,1,0,0,1,0 +0.6785714285714286,0.20441289754594477,1.0,1.0,0.0,0.22731351394237334,0,1,0,0,0,0,1,0,1,0,0,0 +0.125,0.38395510069329813,1.0,1.0,1.0,0.915529351477635,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.05315285572796302,1.0,1.0,0.0,0.6014238410536501,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.13744910311433917,0.0,1.0,1.0,0.8532586635277519,0,0,0,0,1,0,0,1,0,0,0,1 +0.6785714285714286,0.49702872235061074,1.0,1.0,1.0,0.33770845427133345,1,0,0,0,0,0,0,1,0,1,0,0 +0.4107142857142857,0.23528117090348852,1.0,1.0,1.0,0.8704016314829519,0,1,0,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.2750082535490261,1.0,1.0,1.0,0.8769881149919502,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.16127434796962695,0.0,1.0,1.0,0.8301501720360537,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.02569604930119952,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.055683944095961266,1.0,0.0,1.0,0.8618695563470995,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.08990866072411137,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.09210960713106636,0.0,1.0,0.0,0.5545926067069147,0,1,0,0,0,1,0,0,0,1,0,0 +0.7857142857142857,0.39760096841641906,1.0,1.0,1.0,0.16857126089732433,0,0,0,1,0,0,0,1,1,0,0,0 +0.14285714285714285,0.03791130185979971,1.0,1.0,0.0,0.7613436169716516,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.03251898316275999,0.0,0.0,1.0,0.5419816737959281,0,0,0,1,0,1,0,0,0,1,0,0 +0.3571428571428571,0.21255639925167824,0.0,1.0,0.0,0.491978924064202,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.2697259821723341,1.0,0.0,1.0,0.5367070201327678,0,0,0,1,0,0,1,0,0,0,1,0 +0.5178571428571429,0.22180037416088919,0.0,0.0,0.0,0.12004837083863333,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.07246616044899307,0.0,0.0,1.0,0.44241615651549826,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.004897105755474855,1.0,1.0,1.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.19642857142857142,0.05639925167822163,0.0,0.0,0.0,0.2247881972110395,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.021404203807637284,1.0,1.0,0.0,0.6731420559668704,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.1127434796962694,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.086552217453505,1.0,1.0,0.0,0.3842577810457014,1,0,0,0,0,0,1,0,1,0,0,0 +0.7857142857142857,0.41339275888632115,1.0,1.0,0.0,0.14138446961748627,0,0,0,0,1,0,0,1,1,0,0,0 +0.03571428571428571,0.07400682293386156,1.0,1.0,1.0,0.9522216912358249,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.007428194123473095,0.0,1.0,1.0,0.44959115198776406,0,0,0,1,0,0,1,0,1,0,0,0 +0.14285714285714285,0.028942445251458126,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.15511169803015298,1.0,1.0,1.0,0.520597760213557,0,0,0,0,1,0,0,1,0,1,0,0 +0.08928571428571427,0.06129635743369649,1.0,1.0,1.0,0.590693107733845,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.0534279740288324,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.07142857142857142,0.17321448222735777,1.0,1.0,1.0,0.6007809114857595,0,0,0,1,0,0,1,0,1,0,0,0 +0.24999999999999997,0.21475734565863322,1.0,1.0,0.0,0.3989479019699039,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.08407615274568064,0.0,1.0,0.0,0.5853827741356347,0,1,0,0,0,1,0,0,0,1,0,0 +0.08928571428571427,0.05425332893144051,0.0,0.0,1.0,0.8461497236114719,0,1,0,0,0,1,0,0,0,0,0,1 +0.03571428571428571,0.08341586882359414,1.0,0.0,1.0,0.8891646790727171,0,0,1,0,0,1,0,0,0,0,0,1 +0.4642857142857143,0.43039506988004844,1.0,0.0,1.0,0.22597418414948542,0,1,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.268460437988335,0.0,1.0,0.0,0.5664245567595892,0,0,0,0,1,0,0,1,0,1,0,0 +0.3571428571428571,0.4620886981402003,1.0,1.0,0.0,0.8839295784323556,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.21563772422141522,1.0,1.0,1.0,0.34047407781017125,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.09271486739297898,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.11813579839330911,1.0,1.0,0.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.06652360515021459,0.0,1.0,1.0,0.8547131179732448,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.023935292175635527,1.0,0.0,0.0,0.6483538884899454,0,0,0,1,0,0,1,0,0,1,0,0 +0.7857142857142857,0.2396280400572246,1.0,0.0,0.0,0.08995843040536927,0,0,0,0,1,0,0,1,1,0,0,0 +0.125,0.10410476504897105,1.0,1.0,1.0,0.9292175567150299,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.03851656212171234,1.0,1.0,0.0,0.4768786443351435,0,0,0,0,1,1,0,0,1,0,0,0 +0.21428571428571427,0.050896885660834154,1.0,1.0,1.0,0.602425169638959,1,0,0,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.06866952789699571,0.0,0.0,1.0,0.24744012641724394,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.09541102674149884,1.0,1.0,1.0,0.8533241696172105,0,0,1,0,0,0,0,1,0,0,0,1 +0.5714285714285714,0.19555408825795093,0.0,1.0,0.0,0.3259888088958532,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.09557609772202047,1.0,0.0,0.0,0.23658638755348427,0,0,1,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.21398701441619897,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.024815670738417523,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.8928571428571428,0.5054473423572136,1.0,1.0,1.0,0.6415019923339554,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.16176956091119182,0.0,0.0,0.0,0.20364878828289232,0,0,1,0,0,0,1,0,1,0,0,0 +0.1607142857142857,0.034774953229888855,1.0,0.0,1.0,0.5894933892681636,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.13810938703642567,1.0,1.0,1.0,0.520597760213557,0,0,0,0,1,0,0,1,0,1,0,0 +0.19642857142857142,0.10845163420270716,0.0,0.0,1.0,0.7793274409005319,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.020688896225376913,0.0,0.0,0.0,0.24744012641724394,0,0,1,0,0,0,1,0,1,0,0,0 +0.7857142857142857,1.0,0.0,1.0,0.0,0.22634832395437568,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.0532078793881369,1.0,1.0,0.0,0.3989479019699039,0,0,1,0,0,0,1,0,1,0,0,0 +0.053571428571428575,0.12798503356443272,1.0,1.0,1.0,0.7174552542410282,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.13189171343677783,1.0,1.0,1.0,0.5231864157229879,0,0,1,0,0,1,0,0,1,0,0,0 +0.4107142857142857,0.12765489160338947,0.0,0.0,0.0,0.6811557639743221,0,0,1,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.1594585671838891,1.0,1.0,1.0,0.5640737884995887,0,0,1,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.022889842632331906,0.0,1.0,1.0,0.5122497385912862,0,0,0,1,0,0,1,0,1,0,0,0 +0.14285714285714285,0.051447122262572906,0.0,1.0,1.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +0.4642857142857143,0.3138549576317817,1.0,1.0,1.0,0.8794147982207655,0,0,0,1,0,0,1,0,0,0,0,1 +0.7321428571428571,0.24738637614174097,1.0,0.0,0.0,0.2731769653496512,0,1,0,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.0529877847474414,1.0,1.0,1.0,0.9173729937673434,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.09067899196654561,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.6239683063717398,0.0,0.0,0.0,0.46989347577510143,0,0,1,0,0,1,0,0,0,0,0,1 +0.0,0.01931330472103004,0.0,0.0,1.0,0.8714857078947565,0,1,0,0,0,1,0,0,0,0,0,1 +0.24999999999999997,0.05474854187300539,1.0,1.0,0.0,0.6156438955835984,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.30218994167492025,1.0,1.0,1.0,0.8501233211422209,0,0,0,1,0,0,1,0,0,0,0,1 +0.17857142857142858,0.06382744580169472,1.0,1.0,1.0,0.6964156448050416,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.035765379113018594,1.0,0.0,0.0,0.3387603186528358,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.0061626499394739735,0.0,1.0,1.0,0.895580236194126,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.09513590844062947,1.0,1.0,1.0,0.9173729937673434,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.17051832287883792,0.0,1.0,0.0,0.709384236480772,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.030097942115109497,0.0,1.0,0.0,0.8898642220971337,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.014746340926598437,0.0,1.0,1.0,0.895580236194126,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.1129635743369649,1.0,1.0,0.0,0.38472306870888867,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.1354682513480797,1.0,1.0,0.0,0.27443988356010096,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.06734896005282272,1.0,1.0,1.0,0.8839295784323556,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.08638714647298339,1.0,1.0,1.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09227467811158797,0.0,1.0,1.0,0.8013985500332685,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.06509298998569385,0.0,0.0,1.0,0.2247881972110395,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.03895675140310334,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.19725982172334106,0.0,1.0,1.0,0.7472556622006505,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.028227137669197756,0.0,1.0,1.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.15010454495433037,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.05727963024100363,0.0,1.0,1.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.0233300319137229,1.0,0.0,0.0,0.43311856998291953,0,0,0,1,0,0,1,0,1,0,0,0 +1.0,0.3453835149114119,1.0,1.0,1.0,0.5003985149708152,0,0,1,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.03994717728623308,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.20353251898316277,1.0,1.0,1.0,0.7380360665955924,0,1,0,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.043908880818752064,1.0,0.0,1.0,0.5734408207763663,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.13315725762077696,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.4642857142857143,0.23841751953339935,1.0,1.0,1.0,0.2864857137967765,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.11692527786948388,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.056784417299438755,0.0,0.0,0.0,0.24744012641724394,0,0,1,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.19296797622977882,0.0,1.0,1.0,0.7786348012503493,0,0,0,0,1,0,0,1,0,0,0,1 +0.3571428571428571,0.2195994277539342,1.0,1.0,0.0,0.5547218618582729,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.3482447452404534,1.0,1.0,1.0,0.30992319586753697,1,0,0,0,0,0,0,1,1,0,0,0 +0.14285714285714285,0.10294926818531969,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.10570045119401342,0.0,1.0,1.0,0.8898642220971337,0,1,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.18399911962143722,0.0,1.0,1.0,0.8769316531253295,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.24408495653130846,1.0,1.0,1.0,0.7324169714111652,0,0,1,0,0,0,0,1,0,0,0,1 +0.19642857142857142,0.2424892703862661,1.0,1.0,1.0,0.9016367060401769,0,0,1,0,0,0,1,0,0,0,0,1 +0.17857142857142858,0.48024650599757895,1.0,1.0,0.0,0.4873369967986317,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.04429404643996919,0.0,1.0,1.0,0.8627147751385414,0,1,0,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.15412127214702323,1.0,1.0,0.0,0.15889026804756212,0,0,1,0,0,0,1,0,1,0,0,0 +0.4642857142857143,0.17558049961483438,0.0,0.0,0.0,0.27183024502308256,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.26961593485198637,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.09425552987784747,1.0,1.0,1.0,0.9393970480266846,0,0,0,1,0,1,0,0,0,0,0,1 +0.24999999999999997,0.18377902498074172,1.0,1.0,1.0,0.6594326055741871,1,0,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.3076923076923077,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.06707384175195334,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,1,0,0,0,0,0,1 +0.3571428571428571,0.12947067238912735,0.0,1.0,1.0,0.8013985500332685,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.18713546825134808,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.15951359084406297,1.0,0.0,1.0,0.2126849403417187,0,1,0,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.21239132827115662,1.0,0.0,0.0,0.2729209608367981,0,0,0,0,1,1,0,0,1,0,0,0 +0.08928571428571427,0.06531308462638935,1.0,1.0,0.0,0.7769155728885463,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.062011665015956854,0.0,1.0,1.0,0.419918901878422,0,0,0,0,1,0,0,1,0,0,1,0 +0.08928571428571427,0.06448772972378122,1.0,1.0,0.0,0.45755650252851443,0,1,0,0,0,0,0,1,1,0,0,0 +0.4642857142857143,0.18553978210630573,0.0,1.0,1.0,0.23787440014014885,0,0,0,0,1,1,0,0,1,0,0,0 +0.3571428571428571,0.17029822823814242,1.0,1.0,0.0,0.38472306870888867,0,0,0,0,1,1,0,0,1,0,0,0 +0.14285714285714285,0.020633872565203038,1.0,1.0,1.0,0.7138311294496358,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.040332342907450205,0.0,0.0,1.0,0.49006922670215497,0,1,0,0,0,1,0,0,0,1,0,0 +0.4642857142857143,0.15081985253659075,0.0,0.0,1.0,0.31136986465762495,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.049466270496313414,1.0,1.0,1.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.001430615164520744,1.0,0.0,1.0,0.5836541255891218,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.06168152305491362,0.0,0.0,0.0,0.16589905516842904,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.029987894794761747,0.0,1.0,0.0,0.39882243364250436,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.056784417299438755,0.0,1.0,0.0,0.2488657768966351,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.0064927919005172245,1.0,1.0,1.0,0.7623117828116139,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.055463849455265765,1.0,0.0,1.0,0.8491471130930849,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.10652580609662154,0.0,1.0,1.0,0.8976829391418685,0,0,0,1,0,1,0,0,0,0,0,1 +0.3571428571428571,0.14223616154946628,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.18609001870804448,0.0,0.0,1.0,0.16589905516842904,0,0,1,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.05838010344448112,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.11543963904478925,1.0,1.0,1.0,0.23796927750170668,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.010069329811819083,0.0,0.0,0.0,0.5527042865014291,0,1,0,0,0,1,0,0,0,1,0,0 +0.19642857142857142,0.20408275558490152,0.0,1.0,0.0,0.3257054908520209,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.13101133487399583,0.0,0.0,1.0,0.41167927397905774,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.12077693408165512,1.0,1.0,1.0,0.9264124244522206,0,1,0,0,0,1,0,0,0,0,0,1 +0.10714285714285712,0.04335864421701331,1.0,1.0,1.0,0.5805283757278361,0,0,0,1,0,0,1,0,1,0,0,0 +0.7857142857142857,0.473643666776714,1.0,1.0,1.0,0.7117589057670359,0,0,0,1,0,0,0,1,0,0,0,1 +0.3571428571428571,0.05639925167822163,1.0,1.0,1.0,0.5640737884995887,0,0,1,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.3476394849785408,1.0,0.0,1.0,0.23658638755348427,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.06019588423021899,1.0,1.0,0.0,0.6586754162607802,0,0,0,1,0,0,1,0,0,0,1,0 +0.4107142857142857,0.3144602178936943,1.0,1.0,1.0,0.5709518098205263,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.020248706943985912,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.026246285902938263,1.0,1.0,1.0,0.9078496135630285,0,0,0,0,1,0,0,1,0,0,0,1 +0.19642857142857142,0.06569825024760646,1.0,0.0,1.0,0.5690789555483984,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.18542973478595798,1.0,1.0,1.0,0.23796927750170668,0,0,1,0,0,0,1,0,1,0,0,0 +0.10714285714285712,0.10955210740618465,1.0,1.0,1.0,0.5291896437274354,0,1,0,0,0,1,0,0,1,0,0,0 +1.0,0.799603829646748,0.0,1.0,0.0,0.13365287548774402,0,0,0,0,1,0,0,1,0,1,0,0 +0.3571428571428571,0.09750192582810609,0.0,1.0,1.0,0.8013985500332685,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.019753494002421042,0.0,1.0,0.0,0.6244595378782976,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.15247056234180698,1.0,0.0,1.0,0.23658638755348427,0,0,1,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.2272477165181028,1.0,1.0,1.0,0.3989479019699039,0,0,1,0,0,0,1,0,1,0,0,0 +0.10714285714285712,0.05827005612413337,1.0,1.0,0.0,0.9187129412749173,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.14185099592824915,1.0,0.0,1.0,0.23658638755348427,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.08501155496863651,0.0,1.0,1.0,0.5545926067069147,0,0,0,0,1,1,0,0,0,1,0,0 +0.24999999999999997,0.07059535600308132,1.0,1.0,0.0,0.9073342365921735,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.26702982282381427,1.0,1.0,1.0,0.8456935742394528,0,1,0,0,0,0,0,1,0,0,0,1 +0.4642857142857143,0.5707604269836029,1.0,1.0,1.0,0.2592550937927965,0,0,0,0,1,0,0,1,1,0,0,0 +0.14285714285714285,0.06250687795752173,0.0,1.0,0.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.23885770881479035,1.0,1.0,0.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.17326950588753165,1.0,1.0,1.0,0.7215189623993398,0,0,0,0,1,0,1,0,0,0,1,0 +0.03571428571428571,0.04385385715857819,0.0,1.0,1.0,0.6383519380348411,0,0,1,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.4091009133927589,0.0,1.0,0.0,0.15312328184273788,0,0,0,1,0,1,0,0,1,0,0,0 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR-val.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR-val.csv new file mode 100644 index 0000000..4856718 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR-val.csv @@ -0,0 +1,301 @@ +month,credit_amount,sex,age,credit,pred_credit,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153,status=A11,status=A12,status=A13,status=A14 +0.24999999999999997,0.07169582920655881,1.0,1.0,0.0,0.3989479019699039,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.06107626279300099,1.0,1.0,1.0,0.9016367060401769,0,0,1,0,0,0,1,0,0,0,0,1 +0.07142857142857142,0.02806206668867613,0.0,1.0,1.0,0.6997655328413775,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.14575767580059426,1.0,1.0,1.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.7321428571428571,0.15302079894354573,1.0,0.0,0.0,0.23681941072883386,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.24551557169582922,1.0,1.0,1.0,0.5547218618582729,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.090073731704633,1.0,1.0,1.0,0.6156438955835984,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.11131286453174864,1.0,1.0,1.0,0.5291642625196091,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.035105095190932106,1.0,1.0,0.0,0.7138311294496358,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.07538241443820842,0.0,1.0,1.0,0.3538845096386129,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.1352481567073842,1.0,1.0,1.0,0.9173729937673434,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.03389457466710685,1.0,0.0,1.0,0.4901999941597845,0,0,1,0,0,0,1,0,0,1,0,0 +0.07142857142857142,0.06404754044239022,1.0,1.0,1.0,0.7088870911194225,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.05849015076482887,1.0,0.0,1.0,0.8955009029182879,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.12809508088478047,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.1825685044569165,0.0,1.0,1.0,0.8769316531253295,0,0,0,0,1,1,0,0,0,0,0,1 +0.3571428571428571,0.06289204357873886,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.10267414988445031,1.0,1.0,1.0,0.7333540005028889,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.09695168922636734,0.0,1.0,1.0,0.9086619979671742,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09298998569384835,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.05904038736656762,1.0,1.0,1.0,0.7691184526170824,0,0,0,0,1,0,1,0,0,0,1,0 +0.5714285714285714,0.09502586112028172,1.0,1.0,0.0,0.27443988356010096,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.047210300429184546,1.0,1.0,0.0,0.5600008807692614,0,0,0,1,0,0,1,0,1,0,0,0 +0.19642857142857142,0.05777484318256851,1.0,1.0,1.0,0.8887675866521803,0,0,0,0,1,0,0,1,0,0,0,1 +0.03571428571428571,0.0505667436997909,1.0,1.0,1.0,0.5706358388636316,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.16501595686145043,1.0,1.0,1.0,0.2510304635302003,0,0,0,1,0,0,0,1,1,0,0,0 +0.625,0.1275998679432156,1.0,0.0,1.0,0.6681686332576849,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.07147573456586331,1.0,1.0,1.0,0.7387979215486421,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.024760647078243648,1.0,1.0,1.0,0.9209417015586914,0,0,0,0,1,0,0,1,0,0,0,1 +0.14285714285714285,0.32067789149334214,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,0,1,0,0,1,0,0 +0.08928571428571427,0.05260261912622427,0.0,0.0,1.0,0.5214711461502933,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.060801144492131615,0.0,0.0,0.0,0.3676314741249905,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.15010454495433037,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.07015516672169032,0.0,1.0,1.0,0.8575078279322482,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.037526136238582586,0.0,0.0,1.0,0.8373645845436015,0,0,1,0,0,0,1,0,0,0,0,1 +0.4107142857142857,0.12534389787608674,0.0,1.0,1.0,0.46064338284484674,0,1,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.15758776273797734,0.0,1.0,0.0,0.3403565857713484,0,1,0,0,0,1,0,0,1,0,0,0 +0.4642857142857143,0.09122922856828436,1.0,1.0,0.0,0.5493424244597425,0,0,0,0,1,0,1,0,0,0,1,0 +0.14285714285714285,0.05447342357213601,0.0,1.0,1.0,0.8898642220971337,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.02569604930119952,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.0695499064597777,1.0,0.0,1.0,0.5216017218323117,0,0,1,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.08688235941454825,1.0,1.0,1.0,0.9173729937673434,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.02145922746781116,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.39517992736876856,0.0,1.0,1.0,0.3259888088958532,0,0,1,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.0710905689446462,1.0,1.0,1.0,0.9393970480266846,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.5160669087707714,1.0,0.0,0.0,0.2184939341622545,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.00968416419060196,1.0,1.0,1.0,0.9418146463783325,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.24061846594035435,1.0,1.0,0.0,0.9016367060401769,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.45234951028942444,0.0,0.0,1.0,0.20364878828289232,0,0,1,0,0,1,0,0,1,0,0,0 +0.5714285714285714,0.1904368878617806,1.0,1.0,1.0,0.4297441010936714,0,0,1,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.6877407285132606,1.0,1.0,1.0,0.7743267172453298,0,0,0,1,0,0,1,0,0,0,0,1 +0.6785714285714286,0.4199405744470122,1.0,1.0,1.0,0.20677830560523824,0,0,0,1,0,0,0,1,1,0,0,0 +1.0,0.5444040937603168,0.0,0.0,1.0,0.4443440748703878,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.05480356553317926,1.0,0.0,0.0,0.4752943973393717,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.1840541432816111,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.18526466380543635,1.0,1.0,1.0,0.7138311294496358,0,0,0,0,1,1,0,0,0,1,0,0 +0.14285714285714285,0.034995047870584356,0.0,0.0,1.0,0.8001795737339381,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.07131066358534169,1.0,0.0,0.0,0.3160895023001086,0,0,0,1,0,1,0,0,1,0,0,0 +0.14285714285714285,0.10570045119401342,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.13002090899086607,1.0,1.0,1.0,0.7918122026495104,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.04682513480796743,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.1513700891383295,0.0,1.0,1.0,0.6052277358503451,0,0,0,1,0,1,0,0,0,1,0,0 +0.14285714285714285,0.1116430064927919,1.0,1.0,1.0,0.8820768558276499,0,0,1,0,0,0,0,1,0,0,0,1 +0.10714285714285712,0.06993507208099482,1.0,1.0,1.0,0.6913008425849491,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.04357873885770881,0.0,1.0,0.0,0.5069022744695886,0,0,1,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.3538571585781886,1.0,1.0,1.0,0.24702882446640287,0,0,1,0,0,0,0,1,0,1,0,0 +0.5714285714285714,0.10426983602949268,1.0,0.0,0.0,0.2184939341622545,0,0,0,1,0,0,1,0,1,0,0,0 +0.3571428571428571,0.2512930560140861,1.0,1.0,0.0,0.43499565515022004,0,0,0,1,0,0,1,0,1,0,0,0 +0.7857142857142857,0.7797402883239792,1.0,0.0,0.0,0.21486216341529946,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.02426543413667877,1.0,1.0,0.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.05441839991196214,1.0,1.0,1.0,0.5354741988477034,0,0,1,0,0,0,0,1,0,1,0,0 +0.3035714285714286,0.1384945526576428,1.0,1.0,0.0,0.6311298802642114,0,0,0,0,1,1,0,0,0,1,0,0 +0.24999999999999997,0.33069219764498736,1.0,1.0,1.0,0.70490883351029,0,0,0,1,0,1,0,0,0,1,0,0 +0.24999999999999997,0.027511830086937385,0.0,1.0,0.0,0.33991531191906393,1,0,0,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.06894464619786508,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.024815670738417523,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.125,0.04974138879718279,0.0,1.0,1.0,0.8937163943553702,1,0,0,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.16011885110597557,1.0,1.0,1.0,0.8962138068279807,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.10311433916584131,0.0,0.0,0.0,0.20364878828289232,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.02839220864971938,1.0,1.0,0.0,0.6731420559668704,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.3685484758446132,1.0,1.0,1.0,0.4297441010936714,0,0,1,0,0,1,0,0,0,1,0,0 +0.08928571428571427,0.0482007263123143,1.0,1.0,1.0,0.7460536409951634,0,0,0,0,1,0,1,0,0,0,1,0 +0.24999999999999997,0.05315285572796302,0.0,0.0,0.0,0.2364918904353026,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.10806646858149004,1.0,0.0,1.0,0.3387603186528358,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.20864971937933313,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.04418399911962144,1.0,1.0,1.0,0.4768786443351435,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.14559260482007264,1.0,1.0,1.0,0.6014238410536501,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.327335754374381,1.0,1.0,0.0,0.46045295935712555,0,0,1,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.41311764058545175,0.0,1.0,1.0,0.491978924064202,0,0,0,0,1,1,0,0,0,1,0,0 +0.14285714285714285,0.06173654671508748,1.0,1.0,0.0,0.5600008807692614,0,0,0,1,0,0,1,0,1,0,0,0 +0.4642857142857143,0.33564432706063607,1.0,1.0,0.0,0.32719918796985015,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.2304941124683614,1.0,1.0,1.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.7857142857142857,0.8485748872014967,1.0,0.0,0.0,0.21486216341529946,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.030483107736326624,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.3035714285714286,0.07439198855507868,1.0,1.0,1.0,0.4148665750517137,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.3421371189611533,1.0,1.0,0.0,0.7134293371692706,1,0,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.06729393639264884,1.0,1.0,1.0,0.9073342365921735,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.2851876306811929,0.0,1.0,1.0,0.8298727730544322,1,0,0,0,0,1,0,0,0,0,0,1 +0.03571428571428571,0.05436337625178826,1.0,1.0,1.0,0.9417068119041873,1,0,0,0,0,0,1,0,0,0,0,1 +0.5178571428571429,0.13832948167712117,0.0,1.0,1.0,0.7345947491674498,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.09199955981071861,1.0,1.0,0.0,0.6731420559668704,0,0,1,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.1522504677011115,1.0,1.0,1.0,0.5493424244597425,0,0,0,0,1,0,1,0,0,0,1,0 +0.3571428571428571,0.052657642786398146,0.0,0.0,0.0,0.1941383022647156,0,1,0,0,0,1,0,0,1,0,0,0 +0.21428571428571427,0.13068119291295258,1.0,1.0,0.0,0.4664405275790275,0,0,0,0,1,1,0,0,1,0,0,0 +0.14285714285714285,0.023880268515461652,1.0,1.0,0.0,0.46045295935712555,0,0,1,0,0,1,0,0,1,0,0,0 +0.08928571428571427,0.07813359744690217,0.0,0.0,0.0,0.5214711461502933,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.02321998459337515,0.0,1.0,1.0,0.9120334763614909,1,0,0,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.12820512820512822,1.0,1.0,0.0,0.8962138068279807,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.3336634752943766,1.0,1.0,1.0,0.8837277100655102,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.1641906019588423,0.0,0.0,0.0,0.1941383022647156,0,1,0,0,0,1,0,0,1,0,0,0 +0.10714285714285712,0.3883569935072081,1.0,1.0,1.0,0.6608113136649546,1,0,0,0,0,0,0,1,0,1,0,0 +0.1607142857142857,0.10184879498184218,0.0,0.0,1.0,0.4796070935730555,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.32898646417959726,0.0,0.0,0.0,0.1271934988531199,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.15219544404093763,1.0,0.0,1.0,0.5621939327570208,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.014966435567293938,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.0926598437328051,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.7857142857142857,0.1376141740948608,1.0,1.0,1.0,0.7359170768632337,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.06943985913942995,1.0,1.0,0.0,0.6449158361571704,0,0,1,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.12396830637173985,1.0,1.0,1.0,0.539932226665866,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.05705953560030813,0.0,1.0,1.0,0.8301501720360537,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.6679322108506658,1.0,1.0,0.0,0.3516352721472709,0,0,1,0,0,0,0,1,0,1,0,0 +0.08928571428571427,0.05981071860900186,1.0,1.0,0.0,0.7460536409951634,0,1,0,0,0,0,1,0,0,0,1,0 +0.4107142857142857,0.12061186310113349,1.0,1.0,1.0,0.3554393160412404,0,0,0,0,1,0,1,0,1,0,0,0 +0.10714285714285712,0.06129635743369649,0.0,1.0,1.0,0.8788408139676869,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.11742049081104876,1.0,1.0,1.0,0.38472306870888867,0,0,0,0,1,1,0,0,1,0,0,0 +0.08928571428571427,0.1402553097832068,1.0,1.0,1.0,0.4917873581105606,0,0,1,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.06575327390778034,1.0,1.0,1.0,0.712739903075119,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.08215032463959503,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.06036095521074061,1.0,1.0,1.0,0.7762312226178342,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.25420931000330144,1.0,1.0,0.0,0.27088325935678026,0,0,1,0,0,0,0,1,1,0,0,0 +0.14285714285714285,0.0817651590183779,0.0,1.0,1.0,0.9086619979671742,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.027731924727632886,1.0,1.0,1.0,0.7138311294496358,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.18075272367117862,1.0,1.0,1.0,0.8501233211422209,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.0675690546935182,1.0,1.0,1.0,0.9016367060401769,0,0,1,0,0,0,1,0,0,0,0,1 +0.4642857142857143,0.35721360184879497,1.0,1.0,1.0,0.8794147982207655,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.37839771101573677,0.0,0.0,0.0,0.6396221742784345,0,1,0,0,0,1,0,0,0,0,0,1 +0.19642857142857142,0.030593155056674374,0.0,0.0,1.0,0.2247881972110395,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.14537251017937713,1.0,1.0,1.0,0.530059222287824,0,0,0,0,1,0,0,1,0,0,1,0 +0.5714285714285714,0.30538131396500495,1.0,1.0,1.0,0.4297441010936714,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.15709254979641246,1.0,0.0,1.0,0.8205202256524424,0,1,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.06250687795752173,1.0,1.0,1.0,0.9016367060401769,0,0,1,0,0,1,0,0,0,0,0,1 +0.24999999999999997,0.11852096401452623,0.0,1.0,1.0,0.8384025299994181,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.19962583911081766,1.0,1.0,1.0,0.6014238410536501,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.15637724221415208,1.0,0.0,0.0,0.4752943973393717,0,1,0,0,0,1,0,0,0,1,0,0 +0.4285714285714286,0.1371739848134698,1.0,1.0,1.0,0.8656027328560056,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.07285132607021019,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.0872125013755915,0.0,0.0,0.0,0.1840254387203246,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.13579839330912294,0.0,0.0,0.0,0.3243121077691882,0,0,1,0,0,1,0,0,0,1,0,0 +0.7857142857142857,0.2527786948387807,1.0,1.0,0.0,0.7355348054855501,1,0,0,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.052217453505007144,1.0,1.0,0.0,0.38472306870888867,0,0,0,0,1,0,1,0,1,0,0,0 +0.17857142857142858,0.20485308682733577,1.0,0.0,1.0,0.2907185455724763,1,0,0,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.1336524705623418,0.0,1.0,1.0,0.8301501720360537,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.10360955210740619,0.0,1.0,1.0,0.8532586635277519,0,0,0,0,1,0,0,1,0,0,0,1 +0.3571428571428571,0.05397821063057115,0.0,1.0,1.0,0.28637876253981037,0,0,0,0,1,1,0,0,1,0,0,0 +0.3571428571428571,0.14427203697589966,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.11978650819852536,1.0,1.0,1.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +1.0,0.3877517332452955,1.0,1.0,0.0,0.121581563557335,0,0,0,0,1,1,0,0,1,0,0,0 +0.4642857142857143,0.40420380763728403,1.0,1.0,1.0,0.85555465817234,0,0,0,0,1,0,1,0,0,0,0,1 +0.625,0.7664245625619016,1.0,1.0,1.0,0.2911457072868785,0,0,0,1,0,0,1,0,1,0,0,0 +0.08928571428571427,0.06883459887751733,0.0,1.0,0.0,0.6447959977442328,0,0,0,0,1,0,1,0,0,1,0,0 +0.7857142857142857,0.19775503466490593,1.0,1.0,0.0,0.3285309193360387,0,0,0,1,0,0,0,1,0,1,0,0 +0.6785714285714286,0.20864971937933313,1.0,1.0,1.0,0.7473544772820154,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.06184659403543523,0.0,1.0,1.0,0.45983457538120076,1,0,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.41619896555518876,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.03626059205458347,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.34158688235941453,1.0,1.0,0.0,0.9073342365921735,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.11962143721800374,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.13965004952129417,0.0,0.0,1.0,0.8404307054775538,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.024595576097722022,1.0,1.0,0.0,0.5082778240326536,0,1,0,0,0,0,1,0,1,0,0,0 +0.3035714285714286,0.12248266754704522,1.0,1.0,1.0,0.8962138068279807,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.0706503796632552,1.0,0.0,0.0,0.5995907180616916,0,1,0,0,0,1,0,0,0,1,0,0 +0.03571428571428571,0.04500935402222955,1.0,1.0,1.0,0.7623117828116139,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.18284362275778587,0.0,0.0,1.0,0.44241615651549826,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.05782986684274238,1.0,1.0,1.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.6785714285714286,0.3805436337625179,1.0,1.0,1.0,0.8152117172112082,0,0,0,1,0,1,0,0,0,0,0,1 +0.3571428571428571,0.09843732805106195,1.0,0.0,0.0,0.4752943973393717,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.12286783316826234,1.0,0.0,1.0,0.23658638755348427,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.18526466380543635,1.0,0.0,1.0,0.4847650698535884,0,0,0,0,1,1,0,0,0,0,1,0 +0.3571428571428571,0.09948277759436558,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.08281060856168151,0.0,1.0,1.0,0.28637876253981037,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.07202597116760207,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.16017387476614944,1.0,1.0,0.0,0.34047407781017125,0,0,1,0,0,1,0,0,1,0,0,0 +0.7857142857142857,0.5486959392538792,1.0,1.0,1.0,0.7743267172453298,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.062231759656652355,0.0,1.0,0.0,0.2488657768966351,0,0,1,0,0,0,1,0,1,0,0,0 +0.3035714285714286,0.10955210740618465,1.0,1.0,1.0,0.8962138068279807,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.12589413447782546,1.0,1.0,0.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.4300099042588313,0.0,0.0,0.0,0.1073942894895514,0,0,1,0,0,0,1,0,1,0,0,0 +0.7857142857142857,0.33927588863211183,0.0,1.0,0.0,0.26164945103668286,0,0,0,0,1,1,0,0,0,1,0,0 +0.3571428571428571,0.24683613954000222,0.0,0.0,0.0,0.3676314741249905,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.037416088918234836,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.1463629360625069,1.0,1.0,1.0,0.3565290863994199,0,0,0,1,0,0,0,1,1,0,0,0 +0.24999999999999997,0.11158798283261802,0.0,1.0,0.0,0.5545926067069147,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.2318146803125344,1.0,1.0,0.0,0.7918122026495104,0,0,1,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.11797072741278751,0.0,0.0,1.0,0.594377341023918,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.1103224386486189,1.0,1.0,1.0,0.8839295784323556,0,1,0,0,0,0,1,0,0,0,0,1 +0.625,0.4587872785297678,1.0,1.0,1.0,0.8024775365103721,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.3022449653350941,0.0,0.0,1.0,0.2949724434247011,0,1,0,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.05227247716518102,0.0,0.0,1.0,0.24744012641724394,0,0,1,0,0,1,0,0,1,0,0,0 +0.4642857142857143,0.08897325850115549,1.0,1.0,1.0,0.85555465817234,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.0335644327060636,0.0,1.0,1.0,0.4603229620470974,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.04258831297457907,1.0,1.0,0.0,0.38472306870888867,0,1,0,0,0,0,1,0,1,0,0,0 +1.0,0.36238582590513924,1.0,1.0,0.0,0.121581563557335,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.07868383404864092,1.0,1.0,1.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.06399251678221635,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.07142857142857142,0.03615054473423572,1.0,1.0,1.0,0.7468001783284491,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.05188731154396391,0.0,1.0,0.0,0.28597713146577547,1,0,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.1280400572246066,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.2047980631671619,0.0,0.0,1.0,0.7832044401604136,0,0,0,1,0,1,0,0,0,0,0,1 +0.3571428571428571,0.1930229998899527,0.0,0.0,1.0,0.3671745216659698,1,0,0,0,0,1,0,0,0,1,0,0 +0.24999999999999997,0.11389897655992078,1.0,1.0,1.0,0.9071687984489271,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.12176735996478487,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.06014086057004511,1.0,1.0,1.0,0.7691184526170824,0,0,0,0,1,0,1,0,0,0,1,0 +0.4285714285714286,0.4167492021569275,1.0,1.0,1.0,0.8656027328560056,0,1,0,0,0,1,0,0,0,0,0,1 +0.3035714285714286,0.16644657202597118,1.0,1.0,1.0,0.8962138068279807,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.16474083856058105,0.0,1.0,1.0,0.5069022744695886,0,0,1,0,0,0,1,0,0,1,0,0 +0.4107142857142857,0.46016287003411466,1.0,1.0,1.0,0.8472095845527893,0,0,1,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.08759766699680863,0.0,1.0,0.0,0.19533656176753816,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.12991086167051832,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.08928571428571427,0.09794211510949709,1.0,0.0,1.0,0.676443925470061,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.49669858038956755,1.0,1.0,1.0,0.8189994124469444,0,0,1,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.045449543303620554,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.07131066358534169,1.0,1.0,1.0,0.9187129412749173,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.09040387366567623,0.0,1.0,1.0,0.3538845096386129,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.10404974138879718,1.0,1.0,1.0,0.9393970480266846,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.425332893144052,1.0,1.0,0.0,0.8216427748498599,0,1,0,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.055793991416309016,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,1,0,0,0,0,0,1 +0.24999999999999997,0.10999229668757565,1.0,1.0,1.0,0.49745855352837404,0,0,0,1,0,0,1,0,1,0,0,0 +0.7857142857142857,0.324254429404644,0.0,1.0,0.0,0.0955803074919803,0,0,0,0,1,0,0,1,1,0,0,0 +0.3035714285714286,0.1842742379223066,0.0,1.0,1.0,0.35908907651024774,0,0,0,1,0,1,0,0,1,0,0,0 +0.19642857142857142,0.07081545064377683,1.0,1.0,1.0,0.6874873703107746,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.590073731704633,0.0,1.0,0.0,0.7468841586935758,1,0,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.2226807527236712,1.0,1.0,0.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.05172224056344228,0.0,1.0,0.0,0.27039187876105036,1,0,0,0,0,0,0,1,1,0,0,0 +0.14285714285714285,0.056344228018047754,0.0,1.0,0.0,0.39882243364250436,0,1,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.11863101133487398,1.0,1.0,1.0,0.9344131207554249,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.33856058104985143,1.0,1.0,1.0,0.6014238410536501,0,1,0,0,0,0,1,0,0,1,0,0 +0.10714285714285712,0.05705953560030813,1.0,1.0,1.0,0.9319236160108605,0,0,0,0,1,0,1,0,0,0,0,1 +0.6785714285714286,0.3138549576317817,0.0,1.0,1.0,0.3593859563426322,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.09392538791680423,0.0,1.0,1.0,0.5122497385912862,0,0,0,1,0,0,1,0,1,0,0,0 +0.3571428571428571,0.07164080554638494,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.03780125453945196,1.0,1.0,1.0,0.8839295784323556,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09805216242984482,1.0,1.0,1.0,0.8456935742394528,0,0,0,0,1,0,0,1,0,0,0,1 +0.5714285714285714,0.5846263893474194,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.2659843732805106,0.0,1.0,1.0,0.491978924064202,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.2712116209970287,1.0,1.0,0.0,0.31774368328346525,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.044018928139099814,1.0,1.0,1.0,0.7619553600617908,1,0,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.2015516672169033,1.0,0.0,1.0,0.31967729971945585,0,0,1,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.15401122482667548,0.0,1.0,1.0,0.5639451140754361,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.44486629250577747,1.0,1.0,0.0,0.21396727904669746,0,0,0,0,1,0,0,1,1,0,0,0 +0.14285714285714285,0.08847804555959062,0.0,0.0,1.0,0.2848197012851972,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.25272367117860683,1.0,1.0,0.0,0.5082778240326536,0,0,0,0,1,1,0,0,1,0,0,0 +0.24999999999999997,0.19423352041377795,1.0,1.0,1.0,0.9073342365921735,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.09783206778914934,1.0,1.0,1.0,0.6731420559668704,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.11191812479366127,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.1363486299108617,1.0,1.0,1.0,0.7303600113933694,0,0,0,1,0,0,1,0,0,1,0,0 +0.4107142857142857,0.27748431825685044,1.0,1.0,0.0,0.3549890108481963,1,0,0,0,0,0,1,0,1,0,0,0 +1.0,0.4900957411687025,1.0,1.0,1.0,0.16559346885680104,0,0,1,0,0,0,0,1,0,1,0,0 +0.24999999999999997,0.09315505667436998,0.0,0.0,0.0,0.7904631079537381,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.148949048090679,1.0,1.0,1.0,0.38472306870888867,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.11769560911191813,0.0,1.0,1.0,0.3403565857713484,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.08979861340376362,0.0,0.0,0.0,0.20364878828289232,0,0,1,0,0,1,0,0,1,0,0,0 +0.4642857142857143,0.08060966215472654,0.0,1.0,1.0,0.383421164823506,0,0,1,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.33459887751733247,1.0,1.0,0.0,0.14138446961748627,0,0,0,0,1,0,0,1,1,0,0,0 +0.19642857142857142,0.0586552217453505,1.0,1.0,1.0,0.9016367060401769,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.8058765269065697,1.0,1.0,0.0,0.5706358388636316,0,0,0,0,1,0,1,0,1,0,0,0 +0.4642857142857143,0.15555188731154398,1.0,1.0,1.0,0.85555465817234,0,0,0,0,1,0,1,0,0,0,0,1 +0.0,0.07120061626499395,1.0,1.0,1.0,0.9558919804312949,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.041652910751623196,1.0,0.0,1.0,0.5528336477984951,0,0,1,0,0,0,1,0,0,1,0,0 +0.10714285714285712,0.10008803785627819,1.0,1.0,1.0,0.9187129412749173,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.7002310993727302,0.0,1.0,0.0,0.4721076735930411,1,0,0,0,0,0,0,1,0,1,0,0 +0.24999999999999997,0.1570375261362386,1.0,1.0,1.0,0.49745855352837404,0,0,0,1,0,0,1,0,1,0,0,0 +0.5714285714285714,0.20578848905029165,0.0,1.0,1.0,0.3694096935983138,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.17877187190491914,0.0,1.0,0.0,0.3538845096386129,0,0,1,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.048750962914053037,1.0,1.0,0.0,0.6705704969262621,0,0,0,0,1,0,0,1,0,1,0,0 +0.19642857142857142,0.007813359744690218,0.0,0.0,1.0,0.4681806327003752,0,1,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.048475844613183675,0.0,1.0,0.0,0.33991531191906393,1,0,0,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.06459777704412897,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,0,1,0,0,1,0,0 +0.6785714285714286,0.17299438758666227,1.0,1.0,1.0,0.22696837267036857,1,0,0,0,0,0,1,0,1,0,0,0 +0.7857142857142857,0.3713546825134808,1.0,1.0,0.0,0.21980151898449196,0,0,0,1,0,0,1,0,1,0,0,0 +0.4642857142857143,0.6464179597226807,1.0,1.0,0.0,0.32719918796985015,0,1,0,0,0,0,1,0,1,0,0,0 +0.4642857142857143,0.14201606690877078,0.0,1.0,1.0,0.7583632325020305,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09436557719819522,0.0,1.0,1.0,0.4443019547874373,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.07411687025420931,1.0,1.0,1.0,0.8456935742394528,0,0,0,0,1,0,0,1,0,0,0,1 +0.03571428571428571,0.09975789589523495,1.0,1.0,1.0,0.7623117828116139,0,1,0,0,0,1,0,0,0,1,0,0 +0.3928571428571428,0.42456256190161773,1.0,1.0,1.0,0.5811811839164577,0,1,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.4034334763948498,1.0,1.0,1.0,0.2839698095777251,1,0,0,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.24369979091009136,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.30543633762517886,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,1,0,0,0,0,0,1 +0.3571428571428571,0.1990756025090789,0.0,1.0,1.0,0.8301501720360537,0,0,0,0,1,1,0,0,0,0,0,1 +0.14285714285714285,0.11775063277209201,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.3631561571475735,1.0,1.0,0.0,0.6870648053224668,1,0,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.0675690546935182,1.0,0.0,0.0,0.9134302134303278,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.10069329811819082,1.0,0.0,1.0,0.8891646790727171,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.03747111257840871,0.0,1.0,0.0,0.6730269139352237,0,1,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.05469351821283151,0.0,1.0,1.0,0.9015902882244003,0,0,0,0,1,1,0,0,0,0,0,1 +0.10714285714285712,0.10355452844723231,0.0,1.0,1.0,0.41906888316914087,0,1,0,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.0935402222955871,1.0,1.0,1.0,0.9234069094768464,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.06184659403543523,1.0,1.0,1.0,0.5231864157229879,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.06597336854847584,1.0,1.0,1.0,0.7623117828116139,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.07329151535160118,1.0,1.0,1.0,0.9073342365921735,0,0,0,0,1,0,1,0,0,0,0,1 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-test.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-test.csv new file mode 100644 index 0000000..5e83fb1 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-test.csv @@ -0,0 +1,201 @@ +month,credit_amount,sex,age,credit,pred_credit,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153,status=A11,status=A12,status=A13,status=A14 +0.5714285714285714,0.30802244965335096,1.0,0.0,1.0,0.31967729971945585,0,0,1,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.11290855067679102,1.0,1.0,0.0,0.23796927750170668,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.04693518212831517,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.0704302850225597,1.0,1.0,0.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.08093980411576977,1.0,1.0,1.0,0.4768786443351435,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.1389347419390338,1.0,1.0,0.0,0.9234069094768464,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.08974358974358974,0.0,1.0,1.0,0.5785653349165971,0,0,1,0,0,1,0,0,0,0,1,0 +0.19642857142857142,0.15291075162319798,1.0,1.0,1.0,0.9318352763505735,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.07741828986464179,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.3035714285714286,0.11571475734565863,1.0,1.0,1.0,0.5855097720668199,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.13255199735886433,1.0,1.0,1.0,0.3989479019699039,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.07384175195333993,0.0,1.0,1.0,0.9086619979671742,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09216463079124024,1.0,1.0,1.0,0.5640737884995887,0,0,1,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.594475624518543,1.0,1.0,1.0,0.7918122026495104,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09447562451854297,0.0,0.0,1.0,0.3676314741249905,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.13321228128095083,0.0,1.0,0.0,0.2581995981513079,0,0,1,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.17497523935292178,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.23236491691427316,1.0,1.0,1.0,0.48667382453412633,0,0,0,0,1,0,1,0,0,0,1,0 +0.14285714285714285,0.24347969626939586,0.0,0.0,1.0,0.8290700279000807,0,1,0,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.041212721470232194,1.0,0.0,1.0,0.8205202256524424,0,0,0,0,1,0,1,0,0,0,0,1 +0.7857142857142857,0.5893584241223726,1.0,1.0,0.0,0.40471109118578136,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.05876526906569825,1.0,1.0,1.0,0.7138311294496358,0,0,0,0,1,0,1,0,0,1,0,0 +0.03571428571428571,0.019753494002421042,0.0,1.0,1.0,0.5122497385912862,0,0,0,1,0,0,1,0,1,0,0,0 +0.3035714285714286,0.1509298998569385,1.0,1.0,1.0,0.8769881149919502,0,0,1,0,0,0,1,0,0,0,0,1 +1.0,0.847529437658193,1.0,0.0,1.0,0.5547639995361563,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.17156377242214152,1.0,0.0,1.0,0.8695372347753674,0,0,0,0,1,1,0,0,0,0,0,1 +0.7857142857142857,0.24969736986904373,1.0,1.0,1.0,0.21980151898449196,0,0,0,1,0,0,1,0,1,0,0,0 +0.14285714285714285,0.10982722570705403,1.0,1.0,0.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.08237041928029053,1.0,0.0,1.0,0.2729209608367981,0,1,0,0,0,0,1,0,1,0,0,0 +0.7857142857142857,0.2666446572025971,0.0,1.0,0.0,0.22634832395437568,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.034885000550236606,1.0,1.0,0.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.32023770221195114,1.0,1.0,1.0,0.9073342365921735,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.06228678331682623,1.0,1.0,1.0,0.43499565515022004,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.02767690106745901,0.0,1.0,1.0,0.6295469835412176,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.13464289644547156,1.0,1.0,1.0,0.921786795969523,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.01870804445911742,1.0,1.0,1.0,0.7623117828116139,0,1,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.09183448883019699,1.0,1.0,0.0,0.539932226665866,0,1,0,0,0,0,1,0,0,1,0,0 +0.3035714285714286,0.07686805326290305,1.0,1.0,0.0,0.3692311661992841,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.3090128755364807,1.0,1.0,1.0,0.6156438955835984,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.3132496973698691,0.0,1.0,0.0,0.8301501720360537,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.026246285902938263,1.0,1.0,0.0,0.5082778240326536,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.14097061736546715,0.0,1.0,1.0,0.28637876253981037,0,0,0,0,1,1,0,0,1,0,0,0 +0.7857142857142857,0.22328601298558381,0.0,0.0,0.0,0.0810126268631418,0,1,0,0,0,1,0,0,1,0,0,0 +0.4642857142857143,0.3266754704522945,1.0,0.0,1.0,0.26441754960248154,0,0,0,1,0,1,0,0,1,0,0,0 +0.24999999999999997,0.05749972488169913,0.0,1.0,1.0,0.5545926067069147,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.3004842082095301,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.7857142857142857,0.32243864861890614,1.0,1.0,1.0,0.6234592356361524,0,0,1,0,0,0,0,1,0,0,0,1 +0.4642857142857143,0.44767249917464513,1.0,1.0,0.0,0.591004897644727,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.09414548255749973,0.0,0.0,1.0,0.43705682731209555,0,0,0,0,1,0,1,0,0,0,1,0 +0.14285714285714285,0.04996148343787829,1.0,1.0,1.0,0.6731420559668704,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.06382744580169472,1.0,1.0,1.0,0.6731420559668704,0,0,1,0,0,0,1,0,0,1,0,0 +0.5178571428571429,0.385330692197645,1.0,1.0,1.0,0.8654407578963966,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.023825244855287777,0.0,1.0,1.0,0.681321561513423,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06399251678221635,1.0,1.0,1.0,0.9393970480266846,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.2817761637504127,1.0,1.0,1.0,0.23796927750170668,0,0,1,0,0,0,1,0,1,0,0,0 +0.4642857142857143,0.2217453505007153,0.0,1.0,0.0,0.383421164823506,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.06096621547265324,1.0,1.0,0.0,0.38472306870888867,0,0,0,0,1,0,1,0,1,0,0,0 +0.10714285714285712,0.0292175635523275,1.0,1.0,1.0,0.6696986103919891,0,0,0,0,1,0,0,1,0,0,1,0 +0.0,0.17222405634422802,0.0,1.0,1.0,0.9329250944686631,0,0,0,1,0,0,1,0,0,0,0,1 +0.08928571428571427,0.045779685264663805,1.0,1.0,1.0,0.7387979215486421,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.11483437878287663,1.0,1.0,1.0,0.47720039923434754,0,0,0,0,1,0,1,0,0,1,0,0 +0.053571428571428575,0.03279410146362936,1.0,1.0,1.0,0.9178375942401232,0,0,0,0,1,0,0,1,0,0,0,1 +0.7857142857142857,0.45322988885220644,0.0,0.0,1.0,0.20756498401823725,0,0,0,1,0,0,1,0,0,1,0,0 +0.19642857142857142,0.09106415758776273,1.0,1.0,1.0,0.6955811084767616,0,0,0,0,1,1,0,0,0,0,1,0 +0.14285714285714285,0.08550676791020138,1.0,1.0,1.0,0.7138311294496358,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.022119511389897654,0.0,0.0,1.0,0.2848197012851972,0,0,0,0,1,1,0,0,1,0,0,0 +0.14285714285714285,0.059480576647958625,1.0,0.0,0.0,0.5995907180616916,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.15566193463189174,1.0,1.0,1.0,0.7918122026495104,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.1340926598437328,1.0,1.0,1.0,0.7377669250251128,0,0,0,1,0,1,0,0,0,0,1,0 +0.14285714285714285,0.05942555298778475,1.0,1.0,1.0,0.7215189623993398,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.05051172003961703,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.17585561791570375,1.0,1.0,0.0,0.27443988356010096,0,0,0,0,1,0,1,0,1,0,0,0 +0.08928571428571427,0.052217453505007144,0.0,1.0,1.0,0.6908920489455583,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.02701661714537251,0.0,0.0,0.0,0.2844193852648953,1,0,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.39198855507868385,1.0,1.0,1.0,0.6594326055741871,1,0,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.10559040387366568,1.0,1.0,0.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.05986574226917574,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.33944095961263343,0.0,1.0,1.0,0.22408792958769466,0,0,0,0,1,0,0,1,1,0,0,0 +0.19642857142857142,0.039892153626059204,1.0,0.0,1.0,0.311205543841052,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.13392758886321118,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.23528117090348852,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.038571585781886214,0.0,1.0,0.0,0.6155200677110325,0,1,0,0,0,1,0,0,0,1,0,0 +0.8928571428571428,0.8635963464289644,1.0,1.0,0.0,0.3004289224837474,0,1,0,0,0,1,0,0,0,1,0,0 +0.19642857142857142,0.18190822053483,1.0,1.0,1.0,0.9016367060401769,0,0,1,0,0,0,1,0,0,0,0,1 +0.0,0.06630351050951909,1.0,1.0,1.0,0.9558919804312949,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09271486739297898,1.0,1.0,0.0,0.6014238410536501,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.1581379993397161,1.0,1.0,1.0,0.7138311294496358,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.07114559260482008,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.14933421371189612,1.0,1.0,1.0,0.3103438400976657,0,0,0,0,1,0,0,1,1,0,0,0 +0.24999999999999997,0.06558820292725871,1.0,1.0,0.0,0.41602094847665755,0,0,0,1,0,0,0,1,1,0,0,0 +1.0,0.7431495543083525,1.0,1.0,1.0,0.5481571705989746,0,0,0,0,1,0,0,1,0,0,0,1 +0.2857142857142857,0.3245295477055134,1.0,1.0,1.0,0.6408270671508309,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.3336084516342027,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.41900517222405637,1.0,1.0,0.0,0.42657246837101115,0,0,0,0,1,0,0,1,1,0,0,0 +0.14285714285714285,0.05760977220204688,1.0,0.0,1.0,0.5621939327570208,0,0,1,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.121712336304611,1.0,0.0,0.0,0.2849263027670955,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.3582590513920986,1.0,1.0,0.0,0.8899154955245911,0,0,1,0,0,1,0,0,0,0,0,1 +0.5714285714285714,0.5129305601408606,1.0,1.0,0.0,0.8216427748498599,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.39391438318476946,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.36519203257400684,1.0,1.0,0.0,0.23796927750170668,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.18548475844613185,1.0,1.0,0.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.06063607351160999,0.0,0.0,1.0,0.3382029230298863,1,0,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.06767910201386595,1.0,1.0,1.0,0.650463907744803,1,0,0,0,0,0,0,1,0,0,1,0 +0.24999999999999997,0.042808407615274574,0.0,1.0,1.0,0.8384025299994181,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.035875426433366345,1.0,0.0,0.0,0.43311856998291953,0,0,0,1,0,1,0,0,1,0,0,0 +0.10714285714285712,0.1136238582590514,1.0,1.0,1.0,0.5291896437274354,0,0,0,0,1,0,1,0,1,0,0,0 +0.08928571428571427,0.09519093210080334,0.0,0.0,0.0,0.8461497236114719,0,1,0,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.07169582920655881,0.0,0.0,1.0,0.41718500620577564,0,0,0,1,0,1,0,0,0,1,0,0 +0.10714285714285712,0.05447342357213601,0.0,1.0,0.0,0.5654556515140055,0,0,0,0,1,0,0,1,0,0,1,0 +0.14285714285714285,0.05353802134918015,1.0,1.0,0.0,0.7138311294496358,0,0,0,0,1,1,0,0,0,1,0,0 +0.3035714285714286,0.12963574336964895,0.0,1.0,1.0,0.31273337858185585,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.06377242214152085,1.0,1.0,1.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.07252118410916694,0.0,0.0,1.0,0.7569602202607998,0,0,1,0,0,1,0,0,0,0,0,1 +0.01785714285714285,0.1759656652360515,1.0,1.0,1.0,0.9540917374927895,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.1992956971497744,0.0,1.0,1.0,0.5541068942854244,1,0,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.116430064927919,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.4087157477715418,0.0,1.0,1.0,0.7844990820387668,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.5033564432706064,1.0,1.0,0.0,0.47720039923434754,0,1,0,0,0,1,0,0,0,1,0,0 +0.5714285714285714,0.5126004181798173,0.0,1.0,1.0,0.709384236480772,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.0928799383735006,1.0,1.0,0.0,0.38472306870888867,0,1,0,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.10448993067018818,1.0,1.0,0.0,0.38048671184038707,0,0,1,0,0,0,0,1,1,0,0,0 +0.5714285714285714,0.350170573346539,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.14355672939363928,1.0,1.0,1.0,0.9262782795387031,1,0,0,0,0,0,1,0,0,0,0,1 +0.2857142857142857,0.13090128755364808,1.0,1.0,1.0,0.5956378735010521,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.32761087267525035,1.0,1.0,1.0,0.6156438955835984,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.18851105975569496,1.0,1.0,1.0,0.5231864157229879,0,0,1,0,0,1,0,0,1,0,0,0 +0.19642857142857142,0.114229118520964,1.0,1.0,1.0,0.6449158361571704,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.17244415098492352,1.0,1.0,0.0,0.5231864157229879,0,0,1,0,0,1,0,0,1,0,0,0 +0.7857142857142857,0.1546164850885881,1.0,1.0,0.0,0.40471109118578136,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.05771981952239463,1.0,1.0,1.0,0.7333540005028889,0,0,1,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.06426763508308572,1.0,1.0,1.0,0.9187129412749173,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.04104765048971058,0.0,0.0,1.0,0.8565716032957927,0,0,0,1,0,0,1,0,0,0,0,1 +0.2857142857142857,0.1780015406624849,1.0,1.0,1.0,0.9000458966532298,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.17211400902388027,1.0,1.0,1.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.05359304500935402,1.0,1.0,1.0,0.921786795969523,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.02723671178606801,0.0,1.0,0.0,0.6088670652522917,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.08858809287993837,1.0,1.0,1.0,0.7134293371692706,1,0,0,0,0,0,1,0,0,1,0,0 +0.2857142857142857,0.2213051612193243,0.0,0.0,1.0,0.22169998970541557,0,0,0,0,1,0,1,0,1,0,0,0 +0.053571428571428575,0.11912622427643886,1.0,1.0,1.0,0.7174552542410282,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.19406844943325632,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.4642857142857143,0.10625068779575217,1.0,1.0,1.0,0.539932226665866,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.239022779795312,1.0,1.0,1.0,0.9073342365921735,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.028007043028502255,1.0,1.0,0.0,0.5600008807692614,0,0,0,1,0,0,1,0,1,0,0,0 +0.4642857142857143,0.23550126554418402,0.0,1.0,1.0,0.8239601442576517,0,0,0,1,0,1,0,0,0,0,0,1 +0.10714285714285712,0.09210960713106636,1.0,1.0,1.0,0.9187129412749173,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.17591064157587763,0.0,1.0,1.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +1.0,0.7580609662154726,1.0,1.0,0.0,0.29141195522842805,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.14383184769450866,1.0,1.0,0.0,0.624582242671011,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.05529877847474414,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.6785714285714286,0.22669747991636405,1.0,1.0,0.0,0.26590645487362585,0,0,0,1,0,0,1,0,1,0,0,0 +0.14285714285714285,0.03719599427753935,0.0,1.0,1.0,0.8896713869108571,1,0,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.030373060415978873,1.0,1.0,0.0,0.6874873703107746,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.04632992186640256,0.0,1.0,1.0,0.5692869095347953,0,0,1,0,0,0,1,0,0,1,0,0 +0.3035714285714286,0.07378672829316606,1.0,1.0,1.0,0.9140325439012884,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.23131946737096953,0.0,1.0,1.0,0.709384236480772,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.023770221195113902,0.0,1.0,1.0,0.9086619979671742,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.3362495873225487,1.0,1.0,1.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.055463849455265765,0.0,0.0,1.0,0.8290700279000807,0,1,0,0,0,1,0,0,0,0,0,1 +0.4107142857142857,0.12490370859469573,1.0,0.0,0.0,0.39742586223642357,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.00484208209530098,1.0,1.0,1.0,0.5706358388636316,0,0,0,0,1,0,1,0,1,0,0,0 +0.2857142857142857,0.37454605480356556,1.0,1.0,1.0,0.6871883067965017,0,0,0,1,0,1,0,0,0,1,0,0 +0.2857142857142857,0.10922196544514141,1.0,1.0,0.0,0.3790392890575462,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.06459777704412897,0.0,1.0,1.0,0.6244595378782976,0,0,0,0,1,0,1,0,0,0,1,0 +0.3571428571428571,0.6095521074061846,1.0,1.0,0.0,0.5547218618582729,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.05496863651370089,1.0,1.0,1.0,0.8839295784323556,0,1,0,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.10278419720479806,1.0,1.0,1.0,0.7001676841419655,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.05694948827996038,0.0,1.0,0.0,0.3307077506353231,0,0,0,1,0,1,0,0,1,0,0,0 +0.7857142857142857,0.18311874105865522,1.0,1.0,1.0,0.7359170768632337,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.14295146913172666,1.0,1.0,1.0,0.9187129412749173,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.17932210850665786,0.0,0.0,1.0,0.276086333823588,0,0,0,1,0,0,1,0,1,0,0,0 +0.07142857142857142,0.05029162539892153,1.0,1.0,1.0,0.4679701801537487,0,0,0,0,1,0,0,1,1,0,0,0 +0.3035714285714286,0.2050181578078574,1.0,1.0,1.0,0.678114089607729,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.18498954550456698,0.0,1.0,1.0,0.5545926067069147,0,0,0,0,1,0,1,0,0,1,0,0 +1.2142857142857142,0.2941014636293606,1.0,0.0,0.0,0.09102457054520846,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.025255860019808517,1.0,1.0,0.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.43903378452734676,1.0,1.0,0.0,0.23796927750170668,0,0,1,0,0,0,1,0,1,0,0,0 +0.3035714285714286,0.17409486079013978,1.0,1.0,0.0,0.4148665750517137,0,1,0,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.6802024870694399,1.0,1.0,0.0,0.3516352721472709,0,0,1,0,0,0,0,1,0,1,0,0 +0.03571428571428571,0.08253549026081215,1.0,1.0,1.0,0.9418146463783325,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.15555188731154398,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.2791900517222406,0.0,1.0,1.0,0.8368159662459218,0,0,0,0,1,0,0,1,0,0,0,1 +0.0,0.06844943325630021,1.0,1.0,1.0,0.7836588733899705,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.12809508088478047,0.0,1.0,1.0,0.32271911415921684,1,0,0,0,0,0,0,1,1,0,0,0 +0.053571428571428575,0.11439418950148562,0.0,1.0,1.0,0.6637429098283977,0,1,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.022229558710245404,1.0,1.0,0.0,0.4917873581105606,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.4337515131506548,0.0,1.0,1.0,0.709384236480772,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.19880048420820953,1.0,1.0,1.0,0.8189994124469444,0,0,1,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.39738087377572356,0.0,0.0,1.0,0.48957791029126796,1,0,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.07180587652690656,1.0,1.0,0.0,0.9005973681731101,0,0,0,0,1,0,0,1,0,0,0,1 +0.14285714285714285,0.055683944095961266,1.0,1.0,1.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.08928571428571427,0.02971277649389237,0.0,1.0,1.0,0.599794302613941,0,0,1,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.27423792230659183,1.0,1.0,0.0,0.5394438034973912,1,0,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.11863101133487398,0.0,0.0,0.0,0.18745666815478274,0,0,0,1,0,1,0,0,1,0,0,0 +0.14285714285714285,0.1496093320127655,0.0,0.0,0.0,0.49006922670215497,0,1,0,0,0,1,0,0,0,1,0,0 +0.3035714285714286,0.6839440959612633,1.0,1.0,0.0,0.8613899174477632,0,0,0,0,1,0,0,1,0,0,0,1 +0.14285714285714285,0.045779685264663805,1.0,1.0,0.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.4107142857142857,0.20166171453725104,1.0,1.0,0.0,0.5235081475203799,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.19709475074281943,1.0,1.0,1.0,0.9345335095933344,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.09232970177176185,1.0,1.0,0.0,0.6598740118142182,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.5286123032904149,1.0,1.0,1.0,0.5291642625196091,0,0,0,1,0,0,1,0,0,1,0,0 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-train.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-train.csv new file mode 100644 index 0000000..1627cd1 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-train.csv @@ -0,0 +1,501 @@ +month,credit_amount,sex,age,credit,pred_credit,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153,status=A11,status=A12,status=A13,status=A14 +0.5714285714285714,0.20408275558490152,1.0,1.0,1.0,0.2740485661497681,1,0,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.18306371739848135,1.0,1.0,1.0,0.921786795969523,0,0,1,0,0,1,0,0,0,0,0,1 +0.24999999999999997,0.12462859029382635,1.0,1.0,1.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.09601628700341147,1.0,1.0,1.0,0.7138311294496358,0,1,0,0,0,0,1,0,0,1,0,0 +1.0,0.5566193463189171,1.0,1.0,1.0,0.6276600176708876,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.39303400462198745,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.008748761967646089,0.0,1.0,1.0,0.5785653349165971,0,0,1,0,0,1,0,0,0,0,1,0 +0.08928571428571427,0.12418840101243535,1.0,1.0,1.0,0.9112945990675128,0,0,0,0,1,0,0,1,0,0,0,1 +0.4642857142857143,0.25096291405304283,0.0,0.0,1.0,0.737517899393192,0,0,0,1,0,1,0,0,0,0,0,1 +0.7857142857142857,0.3471992956971498,1.0,0.0,0.0,0.28983662188342285,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.48332783096731596,1.0,1.0,0.0,0.47720039923434754,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.1816881258941345,1.0,1.0,0.0,0.43499565515022004,0,0,0,1,0,0,1,0,1,0,0,0 +0.2857142857142857,0.3421371189611533,1.0,1.0,1.0,0.6403744137297571,1,0,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.1250137559150435,1.0,1.0,1.0,0.32719918796985015,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.14141080664685815,1.0,1.0,0.0,0.47720039923434754,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.029327610872675252,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.19329811819082207,1.0,0.0,1.0,0.34386083170097576,0,0,0,1,0,0,1,0,1,0,0,0 +0.4642857142857143,0.39809618135798397,0.0,1.0,0.0,0.7914015454381981,1,0,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.06344228018047761,0.0,1.0,1.0,0.3257054908520209,0,0,1,0,0,1,0,0,1,0,0,0 +0.125,0.24876196764608782,1.0,1.0,1.0,0.7620595412443067,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.20743919885550788,1.0,1.0,1.0,0.34047407781017125,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.052327500825354895,1.0,1.0,1.0,0.6014238410536501,0,1,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.28232640035215145,1.0,1.0,1.0,0.2839698095777251,1,0,0,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.28947947617475517,1.0,0.0,1.0,0.7905497651969635,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.04666006382744581,0.0,1.0,1.0,0.862481750443303,1,0,0,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.32871134587872786,1.0,1.0,0.0,0.28436974428843736,0,0,0,0,1,0,0,1,0,1,0,0 +0.125,0.06289204357873886,0.0,1.0,1.0,0.8939030026966095,0,1,0,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.09183448883019699,1.0,1.0,1.0,0.7769155728885463,0,0,0,1,0,1,0,0,0,1,0,0 +0.19642857142857142,0.2638384505337295,0.0,1.0,1.0,0.8769316531253295,0,0,0,0,1,0,1,0,0,0,0,1 +0.7857142857142857,0.3136898866512601,0.0,0.0,0.0,0.1493953955906259,0,0,1,0,0,0,1,0,0,1,0,0 +0.07142857142857142,0.026466380543633764,1.0,1.0,1.0,0.5499994958063575,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.02085396720589854,1.0,1.0,1.0,0.9073342365921735,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.08572686255089688,0.0,0.0,0.0,0.8228490355418768,0,0,0,1,0,0,1,0,0,0,0,1 +0.3035714285714286,0.05161219324309453,0.0,1.0,0.0,0.523377616709452,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.16424562561901618,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.6223175965665235,0.0,0.0,0.0,0.3243121077691882,0,0,1,0,0,1,0,0,0,1,0,0 +0.08928571428571427,0.04974138879718279,1.0,1.0,1.0,0.7387979215486421,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.19808517662594918,1.0,1.0,1.0,0.9234069094768464,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.11604489930670188,1.0,1.0,0.0,0.3842577810457014,1,0,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.07059535600308132,0.0,1.0,1.0,0.8301501720360537,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.21789369428854408,1.0,1.0,0.0,0.4390666360957615,0,0,1,0,0,0,1,0,0,0,1,0 +0.4642857142857143,0.19847034224716628,1.0,1.0,1.0,0.2864857137967765,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.06289204357873886,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.08528667326950588,1.0,0.0,1.0,0.8291441697520612,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.1101023440079234,0.0,1.0,1.0,0.5785653349165971,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.03224386486189061,0.0,0.0,0.0,0.49006922670215497,0,1,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.06025090789039286,1.0,1.0,0.0,0.3989479019699039,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.2500825354902608,0.0,1.0,1.0,0.3694096935983138,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.025200836359634642,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.05716958292065588,1.0,0.0,1.0,0.3387603186528358,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.08880818752063387,0.0,1.0,0.0,0.8384025299994181,0,0,1,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.5342797402883239,0.0,1.0,0.0,0.26164945103668286,0,1,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.27797953119841534,1.0,1.0,1.0,0.36651873073537256,0,0,0,0,1,0,0,1,1,0,0,0 +0.03571428571428571,0.08924837680202487,1.0,1.0,1.0,0.48838112309751835,1,0,0,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.12044679212061186,0.0,1.0,0.0,0.28637876253981037,0,1,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.06261692527786948,0.0,1.0,1.0,0.8832307883744815,0,0,1,0,0,1,0,0,0,0,0,1 +0.7857142857142857,0.6558270056124132,1.0,1.0,1.0,0.2839698095777251,1,0,0,0,0,0,0,1,0,1,0,0 +0.24999999999999997,0.13271706833938596,1.0,1.0,1.0,0.9234069094768464,0,0,0,1,0,0,1,0,0,0,0,1 +0.1607142857142857,0.06377242214152085,0.0,1.0,1.0,0.8854921566472022,1,0,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.1866952789699571,0.0,1.0,1.0,0.3691093043872932,0,0,0,0,1,0,1,0,1,0,0,0 +0.7857142857142857,0.358093980411577,0.0,1.0,0.0,0.10812898709422081,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.0880928799383735,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.4107142857142857,0.17420490811048753,1.0,1.0,1.0,0.3128458574189485,0,0,1,0,0,0,1,0,1,0,0,0 +0.4107142857142857,0.2678001540662485,1.0,1.0,1.0,0.8921185284956621,0,0,0,1,0,0,1,0,0,0,0,1 +0.10714285714285712,0.07411687025420931,1.0,1.0,1.0,0.9187129412749173,0,0,1,0,0,1,0,0,0,0,0,1 +0.03571428571428571,0.06228678331682623,0.0,1.0,1.0,0.895580236194126,0,0,1,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.05397821063057115,1.0,1.0,1.0,0.9319236160108605,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.13101133487399583,0.0,1.0,0.0,0.5382443052921533,0,0,1,0,0,1,0,0,0,1,0,0 +0.4642857142857143,0.10454495433036205,0.0,0.0,0.0,0.27183024502308256,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.1474634092659844,0.0,1.0,1.0,0.6634322478338307,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.3582590513920986,1.0,1.0,1.0,0.9522216912358249,0,0,0,1,0,0,1,0,0,0,0,1 +0.3035714285714286,0.13728403213381754,1.0,1.0,1.0,0.678114089607729,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.11417409486079015,1.0,1.0,1.0,0.43499565515022004,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.05249257180587652,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,1,0,0,0,0,0,1 +0.24999999999999997,0.04396390447892594,0.0,0.0,1.0,0.2364918904353026,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.11235831407505227,1.0,1.0,0.0,0.9262782795387031,1,0,0,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.06597336854847584,0.0,1.0,1.0,0.9185698239945864,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.39952679652250467,1.0,1.0,0.0,0.36651873073537256,0,0,0,0,1,0,0,1,1,0,0,0 +0.08928571428571427,0.03675580499614835,0.0,1.0,0.0,0.599794302613941,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.1153295917244415,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.07444701221525256,0.0,1.0,1.0,0.28637876253981037,0,0,0,0,1,0,1,0,1,0,0,0 +0.125,0.2011114779355123,1.0,1.0,1.0,0.4708753344299414,0,0,1,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.0482007263123143,0.0,0.0,1.0,0.7904631079537381,0,1,0,0,0,1,0,0,0,0,0,1 +0.3035714285714286,0.14707824364476726,0.0,1.0,1.0,0.4849768887446002,0,0,1,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.1620446792120612,0.0,1.0,1.0,0.599794302613941,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.24573566633652472,1.0,1.0,1.0,0.5706358388636316,0,1,0,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.7871134587872785,1.0,0.0,0.0,0.6577144367789873,1,0,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.2884890502916254,1.0,1.0,1.0,0.21396727904669746,0,0,0,0,1,0,0,1,1,0,0,0 +0.3571428571428571,0.0734565863321228,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.18377902498074172,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.1380543633762518,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.10300429184549356,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.09640145262462858,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,1,0,0,0,1,0,0 +0.3571428571428571,0.09084406294706723,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.06839440959612633,0.0,1.0,1.0,0.8898642220971337,0,1,0,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.052767690106745896,1.0,1.0,0.0,0.7619553600617908,1,0,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.20518322878837902,1.0,1.0,1.0,0.40471109118578136,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.4110817651590184,0.0,1.0,1.0,0.28637876253981037,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.1870804445911742,0.0,0.0,1.0,0.2364918904353026,0,1,0,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.08968856608341587,1.0,1.0,1.0,0.49745855352837404,0,0,0,1,0,0,1,0,1,0,0,0 +0.24999999999999997,0.09304500935402223,1.0,1.0,1.0,0.6156438955835984,0,0,1,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.034334763948497854,0.0,0.0,1.0,0.8105188297717469,0,1,0,0,0,0,1,0,0,0,0,1 +0.4107142857142857,0.2718168812589414,1.0,1.0,1.0,0.8704016314829519,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.5627269725982172,1.0,1.0,1.0,0.7682673905629588,0,0,0,0,1,0,0,1,0,0,0,1 +0.3571428571428571,0.22565203037306042,0.0,1.0,1.0,0.4443019547874373,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.06195664135578298,0.0,1.0,1.0,0.8575078279322482,0,0,0,1,0,0,1,0,0,0,0,1 +0.4642857142857143,0.1786068009243975,1.0,1.0,1.0,0.49210970933040776,0,0,1,0,0,0,1,0,0,1,0,0 +0.6428571428571428,0.3162759986794322,1.0,1.0,0.0,0.20892794417015212,0,0,1,0,0,0,1,0,1,0,0,0 +0.4285714285714286,0.20666886761307363,1.0,1.0,0.0,0.30391227130738524,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.1701331572576208,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.13051612193243095,1.0,1.0,1.0,0.6156438955835984,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.11742049081104876,1.0,1.0,0.0,0.47720039923434754,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.07752833718498954,0.0,1.0,0.0,0.28637876253981037,0,1,0,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.1781666116430065,0.0,0.0,1.0,0.7832044401604136,0,0,0,1,0,0,1,0,0,0,0,1 +1.0,0.39440959612633436,1.0,1.0,1.0,0.21615402523359528,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.10938703642566304,0.0,0.0,1.0,0.7569602202607998,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.059590623968306375,1.0,1.0,0.0,0.30992319586753697,1,0,0,0,0,0,0,1,1,0,0,0 +0.3214285714285714,0.05683944095961263,0.0,0.0,1.0,0.7970917912232778,0,0,0,1,0,1,0,0,0,0,0,1 +0.03571428571428571,0.17981732144822274,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.018432926158248045,1.0,0.0,1.0,0.5528336477984951,0,0,1,0,0,1,0,0,0,1,0,0 +0.625,0.25767580059425554,1.0,0.0,0.0,0.3730439023791205,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.056564322658743255,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.08352591614394189,1.0,0.0,1.0,0.8618695563470995,0,0,1,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.17051832287883792,1.0,1.0,0.0,0.3103438400976657,0,1,0,0,0,0,0,1,1,0,0,0 +0.08928571428571427,0.0627819962583911,1.0,1.0,1.0,0.7001676841419655,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.17453505007153078,1.0,1.0,1.0,0.9073342365921735,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.10432485968966655,0.0,0.0,1.0,0.8406172417730042,0,1,0,0,0,1,0,0,0,0,0,1 +0.19642857142857142,0.06988004842082095,1.0,1.0,1.0,0.9173729937673434,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.3627159678661825,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.09909761197314845,1.0,1.0,1.0,0.9073342365921735,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.15109497083746012,1.0,0.0,0.0,0.23658638755348427,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.18763068119291296,0.0,1.0,1.0,0.2488657768966351,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.06558820292725871,0.0,0.0,0.0,0.2287663808190244,0,0,0,1,0,1,0,0,1,0,0,0 +0.08928571428571427,0.04886101023440079,1.0,0.0,1.0,0.36745330533116055,0,0,1,0,0,0,1,0,1,0,0,0 +0.4642857142857143,0.3037856278199626,0.0,0.0,1.0,0.737517899393192,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.12930560140860572,1.0,1.0,0.0,0.3232634558617386,0,0,1,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.19252778694838782,0.0,1.0,1.0,0.501464322292528,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.15456146142841423,1.0,1.0,1.0,0.9393970480266846,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.022669747991636405,1.0,1.0,1.0,0.5706358388636316,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.09298998569384835,1.0,1.0,1.0,0.36651873073537256,0,1,0,0,0,0,0,1,1,0,0,0 +0.5714285714285714,0.1140640475404424,1.0,0.0,1.0,0.4028715067363472,0,0,0,1,0,1,0,0,0,1,0,0 +0.7857142857142857,0.6577528337184989,1.0,1.0,1.0,0.3131231838824953,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.28925938153405967,1.0,1.0,1.0,0.8456935742394528,0,0,0,0,1,0,0,1,0,0,0,1 +0.24999999999999997,0.010069329811819083,0.0,0.0,0.0,0.7901372777915644,1,0,0,0,0,1,0,0,0,0,0,1 +0.03571428571428571,0.23995818201826785,0.0,1.0,0.0,0.9121910841333913,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.13821943435677342,0.0,0.0,0.0,0.49006922670215497,0,0,0,0,1,0,1,0,0,1,0,0 +0.4642857142857143,0.2213051612193243,1.0,1.0,1.0,0.8302239413120919,0,0,1,0,0,0,1,0,0,0,0,1 +0.625,0.6333223286012986,1.0,1.0,1.0,0.49778098104998286,0,0,0,1,0,1,0,0,0,1,0,0 +0.5714285714285714,0.10867172884340266,1.0,1.0,0.0,0.39646283586920644,0,0,0,0,1,0,0,1,0,1,0,0 +0.14285714285714285,0.06734896005282272,0.0,1.0,1.0,0.6244595378782976,0,1,0,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06338725652030373,0.0,1.0,1.0,0.9086619979671742,0,0,0,1,0,1,0,0,0,0,0,1 +0.24999999999999997,0.09150434686915374,1.0,1.0,1.0,0.6598740118142182,0,1,0,0,0,0,1,0,0,1,0,0 +0.17857142857142858,0.030373060415978873,1.0,1.0,1.0,0.9052897264392974,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.05617915703752613,1.0,1.0,0.0,0.5758465012701344,0,0,1,0,0,0,0,1,0,0,1,0 +0.3571428571428571,0.055463849455265765,0.0,1.0,1.0,0.45368904432201734,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.18240343347639487,1.0,1.0,1.0,0.9264124244522206,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.17255419830527127,1.0,1.0,0.0,0.42657246837101115,0,0,0,0,1,0,0,1,1,0,0,0 +0.14285714285714285,0.1098822493672279,0.0,1.0,1.0,0.5785653349165971,0,0,1,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.30560140860570045,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.03906679872345109,0.0,1.0,1.0,0.8976829391418685,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.044349070100143065,1.0,1.0,0.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.6785714285714286,0.3322878837900297,1.0,1.0,1.0,0.42442143235694413,0,1,0,0,0,0,1,0,0,0,1,0 +0.08928571428571427,0.13772422141520854,1.0,1.0,1.0,0.9345335095933344,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.10784637394079453,1.0,0.0,0.0,0.8715443022869169,0,0,1,0,0,1,0,0,0,0,0,1 +0.7857142857142857,0.5528227137669197,1.0,1.0,0.0,0.16857126089732433,0,0,0,1,0,0,0,1,1,0,0,0 +0.5714285714285714,0.7740728513260702,1.0,1.0,0.0,0.39646283586920644,0,0,0,0,1,0,0,1,0,1,0,0 +0.4642857142857143,0.2200396170353252,1.0,1.0,0.0,0.5394438034973912,1,0,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.03736106525806096,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.08928571428571427,0.039011775063277215,0.0,1.0,0.0,0.599794302613941,0,0,1,0,0,0,1,0,0,1,0,0 +0.3035714285714286,0.13931990756025092,0.0,1.0,1.0,0.8721840349758069,0,0,0,1,0,0,1,0,0,0,0,1 +0.7857142857142857,0.44613183668977663,0.0,1.0,1.0,0.26164945103668286,0,1,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.11285352701661715,0.0,0.0,1.0,0.8461497236114719,0,1,0,0,0,1,0,0,0,0,0,1 +0.7857142857142857,0.18245845713656875,1.0,1.0,1.0,0.40471109118578136,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.20446792120611865,0.0,1.0,0.0,0.3403565857713484,0,0,0,0,1,1,0,0,1,0,0,0 +0.03571428571428571,0.05430835259161439,0.0,1.0,1.0,0.895580236194126,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.03994717728623308,0.0,0.0,0.0,0.2364918904353026,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.03659073401562672,0.0,1.0,0.0,0.28637876253981037,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.03835149114119071,1.0,1.0,0.0,0.5813781424290714,0,0,0,1,0,0,0,1,0,0,1,0 +0.3571428571428571,0.14168592494772753,1.0,1.0,1.0,0.6500940471086876,0,0,0,1,0,0,1,0,0,1,0,0 +0.3035714285714286,0.11213821943435677,0.0,0.0,1.0,0.7688898730617176,0,1,0,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.145867723120942,0.0,1.0,1.0,0.8978106855080499,0,1,0,0,0,1,0,0,0,0,0,1 +0.7321428571428571,0.23803235391218225,1.0,1.0,1.0,0.38456298371763664,1,0,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.3845053372950369,1.0,1.0,1.0,0.7359170768632337,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.13733905579399142,1.0,1.0,0.0,0.27443988356010096,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.08215032463959503,1.0,1.0,1.0,0.6014238410536501,0,0,0,0,1,0,1,0,0,1,0,0 +0.08928571428571427,0.06140640475404424,0.0,0.0,0.0,0.31109338775758766,0,1,0,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.06905469351821283,1.0,1.0,1.0,0.8533241696172105,0,0,1,0,0,0,0,1,0,0,0,1 +0.03571428571428571,0.005117200396170352,0.0,1.0,1.0,0.4603229620470974,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.10856168152305491,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.06850445691647408,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.11428414218113787,0.0,0.0,0.0,0.4681806327003752,0,1,0,0,0,0,1,0,0,0,1,0 +0.7321428571428571,0.6364036535710355,1.0,1.0,0.0,0.20599972812749537,0,0,0,0,1,1,0,0,1,0,0,0 +0.14285714285714285,0.06789919665456146,1.0,0.0,0.0,0.5528336477984951,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.4844833278309673,1.0,1.0,1.0,0.7324169714111652,0,0,1,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.07730824254429404,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.06619346318917134,0.0,1.0,1.0,0.8627147751385414,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.053813139650049524,0.0,0.0,0.0,0.24744012641724394,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.037526136238582586,0.0,1.0,1.0,0.7170669202424681,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.4311654011224827,1.0,1.0,0.0,0.47720039923434754,0,0,0,0,1,0,1,0,0,1,0,0 +0.03571428571428571,0.06030593155056674,1.0,1.0,1.0,0.9209417015586914,0,0,0,0,1,0,0,1,0,0,0,1 +0.5714285714285714,0.1437218003741609,1.0,1.0,1.0,0.39646283586920644,0,0,0,0,1,0,0,1,0,1,0,0 +0.3571428571428571,0.15808297567954221,0.0,1.0,0.0,0.28637876253981037,0,1,0,0,0,0,1,0,1,0,0,0 +1.0,0.3938593595245956,0.0,0.0,0.0,0.1140139540448384,0,1,0,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.055793991416309016,1.0,0.0,0.0,0.5216017218323117,0,0,1,0,0,1,0,0,0,1,0,0 +0.03571428571428571,0.07175085286673269,0.0,0.0,1.0,0.8847736592891744,0,0,0,1,0,1,0,0,0,0,0,1 +0.3035714285714286,0.18719049191152196,1.0,1.0,1.0,0.678114089607729,0,0,0,1,0,0,1,0,0,1,0,0 +0.7857142857142857,0.20254209310003302,1.0,1.0,0.0,0.16857126089732433,0,0,0,1,0,0,0,1,1,0,0,0 +0.03571428571428571,0.061131286453174866,1.0,1.0,1.0,0.5706358388636316,0,1,0,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.052437548145702645,1.0,1.0,1.0,0.5706358388636316,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.18031253438978762,1.0,1.0,1.0,0.9264124244522206,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.4184549356223176,0.0,0.0,0.0,0.594377341023918,0,0,1,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.17095851216022892,0.0,1.0,1.0,0.31273337858185585,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.10179377132166832,1.0,1.0,0.0,0.624582242671011,0,0,1,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.0505667436997909,1.0,1.0,1.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.47854077253218885,1.0,1.0,1.0,0.8501233211422209,0,0,0,1,0,0,1,0,0,0,0,1 +0.24999999999999997,0.09276989105315285,1.0,0.0,1.0,0.3727423047912524,0,0,0,1,0,1,0,0,1,0,0,0 +0.3571428571428571,0.1794871794871795,1.0,1.0,1.0,0.6500940471086876,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.3695939253879168,1.0,1.0,1.0,0.6500940471086876,0,0,0,1,0,1,0,0,0,1,0,0 +0.4642857142857143,0.11461428414218112,1.0,1.0,1.0,0.85555465817234,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.09843732805106195,0.0,0.0,0.0,0.20364878828289232,0,0,1,0,0,1,0,0,1,0,0,0 +0.5714285714285714,0.49636843842852424,1.0,0.0,0.0,0.2184939341622545,0,0,0,1,0,0,1,0,1,0,0,0 +0.3035714285714286,0.07274127874986244,0.0,1.0,1.0,0.8471418368374785,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.011224826675470454,1.0,0.0,1.0,0.6581569155182169,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.33311323869263787,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.10713106635853417,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.7857142857142857,0.40601958842302194,1.0,1.0,1.0,0.7359170768632337,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.05639925167822163,0.0,0.0,1.0,0.5204868269574562,0,1,0,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.37795752173434577,1.0,1.0,0.0,0.11967916113567616,0,0,1,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.6783867062837019,0.0,1.0,0.0,0.4107052500869476,0,0,0,0,1,0,0,1,0,1,0,0 +0.24999999999999997,0.1391548365797293,1.0,0.0,1.0,0.4901999941597845,0,0,1,0,0,1,0,0,0,1,0,0 +0.0,0.06894464619786508,1.0,1.0,1.0,0.9558919804312949,0,0,0,1,0,0,1,0,0,0,0,1 +0.07142857142857142,0.05430835259161439,0.0,0.0,0.0,0.4840535155941271,0,0,1,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.20160669087707717,1.0,1.0,0.0,0.6970354972308322,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.15472653240893586,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,1,0,0,0,0,0,1 +0.5714285714285714,0.09370529327610873,1.0,1.0,0.0,0.39646283586920644,0,0,0,0,1,0,0,1,0,1,0,0 +0.4642857142857143,0.19709475074281943,1.0,0.0,1.0,0.4133255669397803,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.0708704743039507,0.0,1.0,1.0,0.9121910841333913,0,1,0,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.10223396060305931,1.0,1.0,1.0,0.9522216912358249,0,0,0,1,0,1,0,0,0,0,0,1 +0.03571428571428571,0.00979421151094971,0.0,1.0,1.0,0.4603229620470974,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.2313744910311434,1.0,1.0,0.0,0.4297441010936714,0,0,1,0,0,0,1,0,0,1,0,0 +0.3035714285714286,0.1138439528997469,1.0,1.0,0.0,0.6399187311966064,0,1,0,0,0,1,0,0,0,0,1,0 +0.14285714285714285,0.05023660173874765,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.037746230879278087,1.0,1.0,1.0,0.9345335095933344,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.12231759656652359,1.0,0.0,0.0,0.3250877505238663,1,0,0,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.07257620776934082,1.0,1.0,1.0,0.9173729937673434,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.1522504677011115,0.0,1.0,1.0,0.6155200677110325,0,1,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.11450423682183337,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.10157367668097282,1.0,1.0,1.0,0.9393970480266846,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.04330362055683944,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.05425332893144051,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.21464729833828547,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.18482447452404535,0.0,1.0,1.0,0.5962182584447656,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.06393749312204247,0.0,1.0,1.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +0.125,0.20298228238142402,1.0,1.0,1.0,0.4708753344299414,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.0702101903818642,1.0,1.0,1.0,0.42657246837101115,0,0,0,0,1,0,0,1,1,0,0,0 +0.3214285714285714,0.1334323759216463,1.0,1.0,1.0,0.8922523883382062,0,0,0,0,1,0,1,0,0,0,0,1 +0.4642857142857143,0.2184989545504567,0.0,1.0,1.0,0.383421164823506,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.14713326730494114,1.0,1.0,1.0,0.34047407781017125,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.08622207549246176,0.0,1.0,1.0,0.8384025299994181,0,0,1,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.0536480686695279,1.0,1.0,1.0,0.6993390090705507,0,0,1,0,0,0,1,0,0,0,1,0 +0.19642857142857142,0.08776273797733025,0.0,1.0,1.0,0.3691093043872932,0,1,0,0,0,1,0,0,1,0,0,0 +0.03571428571428571,0.0,0.0,1.0,1.0,0.895580236194126,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.022559700671288655,1.0,0.0,1.0,0.9228647175807431,0,0,0,1,0,1,0,0,0,0,0,1 +0.03571428571428571,0.025255860019808517,1.0,1.0,1.0,0.7691184526170824,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.8587542643336634,1.0,1.0,1.0,0.2740485661497681,1,0,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.08732254869593925,0.0,1.0,0.0,0.46182459874957915,0,0,0,1,0,0,0,1,0,1,0,0 +0.1607142857142857,0.08512160228898426,1.0,1.0,1.0,0.4978064484847755,0,1,0,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.010894684714427203,0.0,0.0,0.0,0.3386431143464047,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.16303510509519095,1.0,0.0,1.0,0.5380335258497126,0,1,0,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.12815010454495435,1.0,1.0,0.0,0.5082778240326536,0,1,0,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.05216242984483327,0.0,1.0,0.0,0.3803633753089816,0,0,0,0,1,0,0,1,1,0,0,0 +0.3035714285714286,0.26152745680642675,0.0,1.0,0.0,0.8206450815998698,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.11461428414218112,1.0,1.0,1.0,0.6014238410536501,0,1,0,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.055683944095961266,1.0,1.0,1.0,0.9318352763505735,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.049796412457356665,1.0,1.0,1.0,0.7138311294496358,0,0,0,0,1,0,1,0,0,1,0,0 +0.10714285714285712,0.03543523715197534,0.0,1.0,1.0,0.9153811453083084,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.16952789699570817,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.04748541873005392,0.0,1.0,1.0,0.5069022744695886,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.02580609662154727,1.0,1.0,0.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.09981291955540883,0.0,1.0,0.0,0.49148753840738346,1,0,0,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.18741058655221746,1.0,1.0,1.0,0.5493424244597425,0,0,0,0,1,0,1,0,0,0,1,0 +0.3571428571428571,0.3502255970067129,1.0,1.0,1.0,0.30992319586753697,1,0,0,0,0,0,0,1,1,0,0,0 +0.24999999999999997,0.16391548365797293,1.0,1.0,1.0,0.9071687984489271,1,0,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.17018818091779467,1.0,1.0,1.0,0.868365131914781,0,0,1,0,0,0,0,1,0,0,0,1 +0.4642857142857143,0.1572576207769341,1.0,1.0,0.0,0.32719918796985015,0,1,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.011444921316165951,1.0,0.0,1.0,0.5836541255891218,0,0,1,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.06718388907230109,1.0,1.0,1.0,0.868365131914781,0,0,1,0,0,0,0,1,0,0,0,1 +0.125,0.05898536370639375,0.0,1.0,1.0,0.5795267454297757,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.057114559260482006,1.0,1.0,1.0,0.5396118319655506,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.025145812699460767,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.058215032463959496,1.0,1.0,1.0,0.6874873703107746,0,0,0,0,1,0,1,0,0,1,0,0 +0.08928571428571427,0.15538681633102236,1.0,1.0,1.0,0.921786795969523,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.12440849565313085,0.0,0.0,1.0,0.259549729145002,1,0,0,0,0,1,0,0,1,0,0,0 +0.03571428571428571,0.0819852536590734,1.0,1.0,1.0,0.9418146463783325,0,0,0,0,1,1,0,0,0,0,0,1 +0.14285714285714285,0.04803565533179267,0.0,1.0,0.0,0.8696334095426655,0,0,1,0,0,1,0,0,0,0,0,1 +0.125,0.07301639705073182,0.0,0.0,1.0,0.5005400663825238,0,1,0,0,0,0,1,0,0,1,0,0 +0.7321428571428571,0.08776273797733025,1.0,0.0,0.0,0.08470200286988733,0,0,1,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.09783206778914934,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.4107142857142857,0.4439308902828216,0.0,1.0,0.0,0.38066739707842107,0,0,0,0,1,0,0,1,0,1,0,0 +0.14285714285714285,0.05749972488169913,0.0,0.0,0.0,0.49006922670215497,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.6431165401122483,1.0,1.0,0.0,0.5547218618582729,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.08633212281280951,1.0,1.0,0.0,0.7324169714111652,0,0,1,0,0,0,0,1,0,0,0,1 +0.19642857142857142,0.0675690546935182,1.0,1.0,1.0,0.4768786443351435,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.06965995378012545,0.0,1.0,1.0,0.8013985500332685,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.1037746230879278,1.0,0.0,1.0,0.36745330533116055,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.19599427753934193,0.0,0.0,1.0,0.8105188297717469,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.21541762958071972,1.0,1.0,0.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.20947507428194126,1.0,1.0,0.0,0.6500940471086876,0,0,0,1,0,0,1,0,0,1,0,0 +0.7857142857142857,0.543468691537361,1.0,1.0,0.0,0.6234592356361524,0,0,1,0,0,0,0,1,0,0,0,1 +0.03571428571428571,0.012875536480686695,1.0,1.0,1.0,0.7979365203848136,0,0,0,1,0,0,1,0,0,1,0,0 +1.0,0.33223286012985587,1.0,1.0,0.0,0.16559346885680104,0,0,1,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.36436667767139874,1.0,1.0,0.0,0.38472306870888867,0,1,0,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.07054033234290745,0.0,1.0,1.0,0.8547131179732448,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.10366457576758006,1.0,1.0,1.0,0.921786795969523,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.04715527676901067,1.0,0.0,1.0,0.3387603186528358,0,0,1,0,0,1,0,0,1,0,0,0 +0.2857142857142857,0.10795642126114229,1.0,1.0,1.0,0.47653149674170675,0,0,0,1,0,0,1,0,1,0,0,0 +0.19642857142857142,0.16154946627049632,0.0,0.0,1.0,0.8404307054775538,0,0,0,1,0,1,0,0,0,0,0,1 +0.7678571428571428,0.5762077693408165,0.0,1.0,1.0,0.13279434507188115,0,1,0,0,0,0,1,0,1,0,0,0 +0.053571428571428575,0.02641135688345989,1.0,1.0,1.0,0.9394762243447705,0,0,0,0,1,1,0,0,0,0,0,1 +0.14285714285714285,0.10030813249697369,0.0,1.0,1.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.0176625949158138,1.0,1.0,1.0,0.4148665750517137,0,0,0,0,1,0,1,0,1,0,0,0 +0.19642857142857142,0.11609992296687575,1.0,1.0,1.0,0.6535565228181367,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.06019588423021899,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.6785714285714286,0.3809838230439089,0.0,1.0,0.0,0.18862334019529733,0,0,0,1,0,0,1,0,1,0,0,0 +0.6785714285714286,0.2501375591504347,1.0,1.0,1.0,0.3466918762732373,0,0,0,0,1,0,0,1,0,0,1,0 +0.6785714285714286,0.20441289754594477,1.0,1.0,0.0,0.22731351394237334,0,1,0,0,0,0,1,0,1,0,0,0 +0.125,0.38395510069329813,1.0,1.0,1.0,0.915529351477635,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.05315285572796302,1.0,1.0,0.0,0.6014238410536501,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.13744910311433917,0.0,1.0,1.0,0.8532586635277519,0,0,0,0,1,0,0,1,0,0,0,1 +0.6785714285714286,0.49702872235061074,1.0,1.0,1.0,0.33770845427133345,1,0,0,0,0,0,0,1,0,1,0,0 +0.4107142857142857,0.23528117090348852,1.0,1.0,1.0,0.8704016314829519,0,1,0,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.2750082535490261,1.0,1.0,1.0,0.8769881149919502,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.16127434796962695,0.0,1.0,1.0,0.8301501720360537,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.02569604930119952,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.055683944095961266,1.0,0.0,1.0,0.8618695563470995,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.08990866072411137,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.09210960713106636,0.0,1.0,0.0,0.5545926067069147,0,1,0,0,0,1,0,0,0,1,0,0 +0.7857142857142857,0.39760096841641906,1.0,1.0,1.0,0.16857126089732433,0,0,0,1,0,0,0,1,1,0,0,0 +0.14285714285714285,0.03791130185979971,1.0,1.0,0.0,0.7613436169716516,0,0,0,1,0,0,1,0,0,0,1,0 +0.14285714285714285,0.03251898316275999,0.0,0.0,1.0,0.5419816737959281,0,0,0,1,0,1,0,0,0,1,0,0 +0.3571428571428571,0.21255639925167824,0.0,1.0,0.0,0.491978924064202,0,1,0,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.2697259821723341,1.0,0.0,1.0,0.5367070201327678,0,0,0,1,0,0,1,0,0,0,1,0 +0.5178571428571429,0.22180037416088919,0.0,0.0,0.0,0.12004837083863333,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.07246616044899307,0.0,0.0,1.0,0.44241615651549826,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.004897105755474855,1.0,1.0,1.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.19642857142857142,0.05639925167822163,0.0,0.0,0.0,0.2247881972110395,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.021404203807637284,1.0,1.0,0.0,0.6731420559668704,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.1127434796962694,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.086552217453505,1.0,1.0,0.0,0.3842577810457014,1,0,0,0,0,0,1,0,1,0,0,0 +0.7857142857142857,0.41339275888632115,1.0,1.0,0.0,0.14138446961748627,0,0,0,0,1,0,0,1,1,0,0,0 +0.03571428571428571,0.07400682293386156,1.0,1.0,1.0,0.9522216912358249,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.007428194123473095,0.0,1.0,1.0,0.44959115198776406,0,0,0,1,0,0,1,0,1,0,0,0 +0.14285714285714285,0.028942445251458126,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.15511169803015298,1.0,1.0,1.0,0.520597760213557,0,0,0,0,1,0,0,1,0,1,0,0 +0.08928571428571427,0.06129635743369649,1.0,1.0,1.0,0.590693107733845,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.0534279740288324,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.07142857142857142,0.17321448222735777,1.0,1.0,1.0,0.6007809114857595,0,0,0,1,0,0,1,0,1,0,0,0 +0.24999999999999997,0.21475734565863322,1.0,1.0,0.0,0.3989479019699039,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.08407615274568064,0.0,1.0,0.0,0.5853827741356347,0,1,0,0,0,1,0,0,0,1,0,0 +0.08928571428571427,0.05425332893144051,0.0,0.0,1.0,0.8461497236114719,0,1,0,0,0,1,0,0,0,0,0,1 +0.03571428571428571,0.08341586882359414,1.0,0.0,1.0,0.8891646790727171,0,0,1,0,0,1,0,0,0,0,0,1 +0.4642857142857143,0.43039506988004844,1.0,0.0,1.0,0.22597418414948542,0,1,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.268460437988335,0.0,1.0,0.0,0.5664245567595892,0,0,0,0,1,0,0,1,0,1,0,0 +0.3571428571428571,0.4620886981402003,1.0,1.0,0.0,0.8839295784323556,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.21563772422141522,1.0,1.0,1.0,0.34047407781017125,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.09271486739297898,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.11813579839330911,1.0,1.0,0.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.06652360515021459,0.0,1.0,1.0,0.8547131179732448,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.023935292175635527,1.0,0.0,0.0,0.6483538884899454,0,0,0,1,0,0,1,0,0,1,0,0 +0.7857142857142857,0.2396280400572246,1.0,0.0,0.0,0.08995843040536927,0,0,0,0,1,0,0,1,1,0,0,0 +0.125,0.10410476504897105,1.0,1.0,1.0,0.9292175567150299,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.03851656212171234,1.0,1.0,0.0,0.4768786443351435,0,0,0,0,1,1,0,0,1,0,0,0 +0.21428571428571427,0.050896885660834154,1.0,1.0,1.0,0.602425169638959,1,0,0,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.06866952789699571,0.0,0.0,1.0,0.24744012641724394,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.09541102674149884,1.0,1.0,1.0,0.8533241696172105,0,0,1,0,0,0,0,1,0,0,0,1 +0.5714285714285714,0.19555408825795093,0.0,1.0,0.0,0.3259888088958532,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.09557609772202047,1.0,0.0,0.0,0.23658638755348427,0,0,1,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.21398701441619897,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.024815670738417523,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.8928571428571428,0.5054473423572136,1.0,1.0,1.0,0.6415019923339554,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.16176956091119182,0.0,0.0,0.0,0.20364878828289232,0,0,1,0,0,0,1,0,1,0,0,0 +0.1607142857142857,0.034774953229888855,1.0,0.0,1.0,0.5894933892681636,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.13810938703642567,1.0,1.0,1.0,0.520597760213557,0,0,0,0,1,0,0,1,0,1,0,0 +0.19642857142857142,0.10845163420270716,0.0,0.0,1.0,0.7793274409005319,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.020688896225376913,0.0,0.0,0.0,0.24744012641724394,0,0,1,0,0,0,1,0,1,0,0,0 +0.7857142857142857,1.0,0.0,1.0,0.0,0.22634832395437568,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.0532078793881369,1.0,1.0,0.0,0.3989479019699039,0,0,1,0,0,0,1,0,1,0,0,0 +0.053571428571428575,0.12798503356443272,1.0,1.0,1.0,0.7174552542410282,0,0,1,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.13189171343677783,1.0,1.0,1.0,0.5231864157229879,0,0,1,0,0,1,0,0,1,0,0,0 +0.4107142857142857,0.12765489160338947,0.0,0.0,0.0,0.6811557639743221,0,0,1,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.1594585671838891,1.0,1.0,1.0,0.5640737884995887,0,0,1,0,0,0,1,0,0,0,1,0 +0.03571428571428571,0.022889842632331906,0.0,1.0,1.0,0.5122497385912862,0,0,0,1,0,0,1,0,1,0,0,0 +0.14285714285714285,0.051447122262572906,0.0,1.0,1.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +0.4642857142857143,0.3138549576317817,1.0,1.0,1.0,0.8794147982207655,0,0,0,1,0,0,1,0,0,0,0,1 +0.7321428571428571,0.24738637614174097,1.0,0.0,0.0,0.2731769653496512,0,1,0,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.0529877847474414,1.0,1.0,1.0,0.9173729937673434,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.09067899196654561,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.6239683063717398,0.0,0.0,0.0,0.46989347577510143,0,0,1,0,0,1,0,0,0,0,0,1 +0.0,0.01931330472103004,0.0,0.0,1.0,0.8714857078947565,0,1,0,0,0,1,0,0,0,0,0,1 +0.24999999999999997,0.05474854187300539,1.0,1.0,0.0,0.6156438955835984,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.30218994167492025,1.0,1.0,1.0,0.8501233211422209,0,0,0,1,0,0,1,0,0,0,0,1 +0.17857142857142858,0.06382744580169472,1.0,1.0,1.0,0.6964156448050416,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.035765379113018594,1.0,0.0,0.0,0.3387603186528358,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.0061626499394739735,0.0,1.0,1.0,0.895580236194126,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.09513590844062947,1.0,1.0,1.0,0.9173729937673434,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.17051832287883792,0.0,1.0,0.0,0.709384236480772,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.030097942115109497,0.0,1.0,0.0,0.8898642220971337,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.014746340926598437,0.0,1.0,1.0,0.895580236194126,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.1129635743369649,1.0,1.0,0.0,0.38472306870888867,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.1354682513480797,1.0,1.0,0.0,0.27443988356010096,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.06734896005282272,1.0,1.0,1.0,0.8839295784323556,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.08638714647298339,1.0,1.0,1.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09227467811158797,0.0,1.0,1.0,0.8013985500332685,0,0,1,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.06509298998569385,0.0,0.0,1.0,0.2247881972110395,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.03895675140310334,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,0,1,0,0,1,0,0 +0.5714285714285714,0.19725982172334106,0.0,1.0,1.0,0.7472556622006505,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.028227137669197756,0.0,1.0,1.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.15010454495433037,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.05727963024100363,0.0,1.0,1.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.0233300319137229,1.0,0.0,0.0,0.43311856998291953,0,0,0,1,0,0,1,0,1,0,0,0 +1.0,0.3453835149114119,1.0,1.0,1.0,0.5003985149708152,0,0,1,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.03994717728623308,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.20353251898316277,1.0,1.0,1.0,0.7380360665955924,0,1,0,0,0,0,1,0,0,0,1,0 +0.10714285714285712,0.043908880818752064,1.0,0.0,1.0,0.5734408207763663,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.13315725762077696,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.4642857142857143,0.23841751953339935,1.0,1.0,1.0,0.2864857137967765,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.11692527786948388,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.056784417299438755,0.0,0.0,0.0,0.24744012641724394,0,0,1,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.19296797622977882,0.0,1.0,1.0,0.7786348012503493,0,0,0,0,1,0,0,1,0,0,0,1 +0.3571428571428571,0.2195994277539342,1.0,1.0,0.0,0.5547218618582729,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.3482447452404534,1.0,1.0,1.0,0.30992319586753697,1,0,0,0,0,0,0,1,1,0,0,0 +0.14285714285714285,0.10294926818531969,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.10570045119401342,0.0,1.0,1.0,0.8898642220971337,0,1,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.18399911962143722,0.0,1.0,1.0,0.8769316531253295,0,1,0,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.24408495653130846,1.0,1.0,1.0,0.7324169714111652,0,0,1,0,0,0,0,1,0,0,0,1 +0.19642857142857142,0.2424892703862661,1.0,1.0,1.0,0.9016367060401769,0,0,1,0,0,0,1,0,0,0,0,1 +0.17857142857142858,0.48024650599757895,1.0,1.0,0.0,0.4873369967986317,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.04429404643996919,0.0,1.0,1.0,0.8627147751385414,0,1,0,0,0,0,1,0,0,0,0,1 +0.7857142857142857,0.15412127214702323,1.0,1.0,0.0,0.15889026804756212,0,0,1,0,0,0,1,0,1,0,0,0 +0.4642857142857143,0.17558049961483438,0.0,0.0,0.0,0.27183024502308256,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.26961593485198637,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.09425552987784747,1.0,1.0,1.0,0.9393970480266846,0,0,0,1,0,1,0,0,0,0,0,1 +0.24999999999999997,0.18377902498074172,1.0,1.0,1.0,0.6594326055741871,1,0,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.3076923076923077,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.06707384175195334,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,1,0,0,0,0,0,1 +0.3571428571428571,0.12947067238912735,0.0,1.0,1.0,0.8013985500332685,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.18713546825134808,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.15951359084406297,1.0,0.0,1.0,0.2126849403417187,0,1,0,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.21239132827115662,1.0,0.0,0.0,0.2729209608367981,0,0,0,0,1,1,0,0,1,0,0,0 +0.08928571428571427,0.06531308462638935,1.0,1.0,0.0,0.7769155728885463,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.062011665015956854,0.0,1.0,1.0,0.419918901878422,0,0,0,0,1,0,0,1,0,0,1,0 +0.08928571428571427,0.06448772972378122,1.0,1.0,0.0,0.45755650252851443,0,1,0,0,0,0,0,1,1,0,0,0 +0.4642857142857143,0.18553978210630573,0.0,1.0,1.0,0.23787440014014885,0,0,0,0,1,1,0,0,1,0,0,0 +0.3571428571428571,0.17029822823814242,1.0,1.0,0.0,0.38472306870888867,0,0,0,0,1,1,0,0,1,0,0,0 +0.14285714285714285,0.020633872565203038,1.0,1.0,1.0,0.7138311294496358,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.040332342907450205,0.0,0.0,1.0,0.49006922670215497,0,1,0,0,0,1,0,0,0,1,0,0 +0.4642857142857143,0.15081985253659075,0.0,0.0,1.0,0.31136986465762495,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.049466270496313414,1.0,1.0,1.0,0.8899154955245911,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.001430615164520744,1.0,0.0,1.0,0.5836541255891218,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.06168152305491362,0.0,0.0,0.0,0.16589905516842904,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.029987894794761747,0.0,1.0,0.0,0.39882243364250436,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.056784417299438755,0.0,1.0,0.0,0.2488657768966351,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.0064927919005172245,1.0,1.0,1.0,0.7623117828116139,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.055463849455265765,1.0,0.0,1.0,0.8491471130930849,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.10652580609662154,0.0,1.0,1.0,0.8976829391418685,0,0,0,1,0,1,0,0,0,0,0,1 +0.3571428571428571,0.14223616154946628,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.18609001870804448,0.0,0.0,1.0,0.16589905516842904,0,0,1,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.05838010344448112,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.11543963904478925,1.0,1.0,1.0,0.23796927750170668,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.010069329811819083,0.0,0.0,0.0,0.5527042865014291,0,1,0,0,0,1,0,0,0,1,0,0 +0.19642857142857142,0.20408275558490152,0.0,1.0,0.0,0.3257054908520209,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.13101133487399583,0.0,0.0,1.0,0.41167927397905774,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.12077693408165512,1.0,1.0,1.0,0.9264124244522206,0,1,0,0,0,1,0,0,0,0,0,1 +0.10714285714285712,0.04335864421701331,1.0,1.0,1.0,0.5805283757278361,0,0,0,1,0,0,1,0,1,0,0,0 +0.7857142857142857,0.473643666776714,1.0,1.0,1.0,0.7117589057670359,0,0,0,1,0,0,0,1,0,0,0,1 +0.3571428571428571,0.05639925167822163,1.0,1.0,1.0,0.5640737884995887,0,0,1,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.3476394849785408,1.0,0.0,1.0,0.23658638755348427,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.06019588423021899,1.0,1.0,0.0,0.6586754162607802,0,0,0,1,0,0,1,0,0,0,1,0 +0.4107142857142857,0.3144602178936943,1.0,1.0,1.0,0.5709518098205263,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.020248706943985912,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.10714285714285712,0.026246285902938263,1.0,1.0,1.0,0.9078496135630285,0,0,0,0,1,0,0,1,0,0,0,1 +0.19642857142857142,0.06569825024760646,1.0,0.0,1.0,0.5690789555483984,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.18542973478595798,1.0,1.0,1.0,0.23796927750170668,0,0,1,0,0,0,1,0,1,0,0,0 +0.10714285714285712,0.10955210740618465,1.0,1.0,1.0,0.5291896437274354,0,1,0,0,0,1,0,0,1,0,0,0 +1.0,0.799603829646748,0.0,1.0,0.0,0.13365287548774402,0,0,0,0,1,0,0,1,0,1,0,0 +0.3571428571428571,0.09750192582810609,0.0,1.0,1.0,0.8013985500332685,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.019753494002421042,0.0,1.0,0.0,0.6244595378782976,0,1,0,0,0,0,1,0,0,0,1,0 +0.3571428571428571,0.15247056234180698,1.0,0.0,1.0,0.23658638755348427,0,0,1,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.2272477165181028,1.0,1.0,1.0,0.3989479019699039,0,0,1,0,0,0,1,0,1,0,0,0 +0.10714285714285712,0.05827005612413337,1.0,1.0,0.0,0.9187129412749173,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.14185099592824915,1.0,0.0,1.0,0.23658638755348427,0,0,1,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.08501155496863651,0.0,1.0,1.0,0.5545926067069147,0,0,0,0,1,1,0,0,0,1,0,0 +0.24999999999999997,0.07059535600308132,1.0,1.0,0.0,0.9073342365921735,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.26702982282381427,1.0,1.0,1.0,0.8456935742394528,0,1,0,0,0,0,0,1,0,0,0,1 +0.4642857142857143,0.5707604269836029,1.0,1.0,1.0,0.2592550937927965,0,0,0,0,1,0,0,1,1,0,0,0 +0.14285714285714285,0.06250687795752173,0.0,1.0,0.0,0.8696334095426655,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.23885770881479035,1.0,1.0,0.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.17326950588753165,1.0,1.0,1.0,0.7215189623993398,0,0,0,0,1,0,1,0,0,0,1,0 +0.03571428571428571,0.04385385715857819,0.0,1.0,1.0,0.6383519380348411,0,0,1,0,0,0,1,0,0,0,1,0 +0.7857142857142857,0.4091009133927589,0.0,1.0,0.0,0.15312328184273788,0,0,0,1,0,1,0,0,1,0,0,0 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-val.csv b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-val.csv new file mode 100644 index 0000000..4856718 --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/RS_NS_NI_MS_NB_HE_ME_RW_OLR_NP-val.csv @@ -0,0 +1,301 @@ +month,credit_amount,sex,age,credit,pred_credit,employment=A71,employment=A72,employment=A73,employment=A74,employment=A75,housing=A151,housing=A152,housing=A153,status=A11,status=A12,status=A13,status=A14 +0.24999999999999997,0.07169582920655881,1.0,1.0,0.0,0.3989479019699039,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.06107626279300099,1.0,1.0,1.0,0.9016367060401769,0,0,1,0,0,0,1,0,0,0,0,1 +0.07142857142857142,0.02806206668867613,0.0,1.0,1.0,0.6997655328413775,0,0,0,1,0,0,1,0,0,1,0,0 +0.24999999999999997,0.14575767580059426,1.0,1.0,1.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.7321428571428571,0.15302079894354573,1.0,0.0,0.0,0.23681941072883386,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.24551557169582922,1.0,1.0,1.0,0.5547218618582729,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.090073731704633,1.0,1.0,1.0,0.6156438955835984,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.11131286453174864,1.0,1.0,1.0,0.5291642625196091,0,0,0,1,0,0,1,0,0,1,0,0 +0.14285714285714285,0.035105095190932106,1.0,1.0,0.0,0.7138311294496358,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.07538241443820842,0.0,1.0,1.0,0.3538845096386129,0,0,1,0,0,0,1,0,1,0,0,0 +0.19642857142857142,0.1352481567073842,1.0,1.0,1.0,0.9173729937673434,0,1,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.03389457466710685,1.0,0.0,1.0,0.4901999941597845,0,0,1,0,0,0,1,0,0,1,0,0 +0.07142857142857142,0.06404754044239022,1.0,1.0,1.0,0.7088870911194225,0,0,1,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.05849015076482887,1.0,0.0,1.0,0.8955009029182879,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.12809508088478047,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.1825685044569165,0.0,1.0,1.0,0.8769316531253295,0,0,0,0,1,1,0,0,0,0,0,1 +0.3571428571428571,0.06289204357873886,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.10267414988445031,1.0,1.0,1.0,0.7333540005028889,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.09695168922636734,0.0,1.0,1.0,0.9086619979671742,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09298998569384835,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.05904038736656762,1.0,1.0,1.0,0.7691184526170824,0,0,0,0,1,0,1,0,0,0,1,0 +0.5714285714285714,0.09502586112028172,1.0,1.0,0.0,0.27443988356010096,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.047210300429184546,1.0,1.0,0.0,0.5600008807692614,0,0,0,1,0,0,1,0,1,0,0,0 +0.19642857142857142,0.05777484318256851,1.0,1.0,1.0,0.8887675866521803,0,0,0,0,1,0,0,1,0,0,0,1 +0.03571428571428571,0.0505667436997909,1.0,1.0,1.0,0.5706358388636316,0,0,0,0,1,0,1,0,1,0,0,0 +0.5714285714285714,0.16501595686145043,1.0,1.0,1.0,0.2510304635302003,0,0,0,1,0,0,0,1,1,0,0,0 +0.625,0.1275998679432156,1.0,0.0,1.0,0.6681686332576849,0,0,1,0,0,0,1,0,0,0,0,1 +0.08928571428571427,0.07147573456586331,1.0,1.0,1.0,0.7387979215486421,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.024760647078243648,1.0,1.0,1.0,0.9209417015586914,0,0,0,0,1,0,0,1,0,0,0,1 +0.14285714285714285,0.32067789149334214,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,0,1,0,0,1,0,0 +0.08928571428571427,0.05260261912622427,0.0,0.0,1.0,0.5214711461502933,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.060801144492131615,0.0,0.0,0.0,0.3676314741249905,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.15010454495433037,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.07015516672169032,0.0,1.0,1.0,0.8575078279322482,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.037526136238582586,0.0,0.0,1.0,0.8373645845436015,0,0,1,0,0,0,1,0,0,0,0,1 +0.4107142857142857,0.12534389787608674,0.0,1.0,1.0,0.46064338284484674,0,1,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.15758776273797734,0.0,1.0,0.0,0.3403565857713484,0,1,0,0,0,1,0,0,1,0,0,0 +0.4642857142857143,0.09122922856828436,1.0,1.0,0.0,0.5493424244597425,0,0,0,0,1,0,1,0,0,0,1,0 +0.14285714285714285,0.05447342357213601,0.0,1.0,1.0,0.8898642220971337,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.02569604930119952,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.0695499064597777,1.0,0.0,1.0,0.5216017218323117,0,0,1,0,0,0,1,0,0,1,0,0 +0.19642857142857142,0.08688235941454825,1.0,1.0,1.0,0.9173729937673434,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.02145922746781116,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.39517992736876856,0.0,1.0,1.0,0.3259888088958532,0,0,1,0,0,1,0,0,0,1,0,0 +0.14285714285714285,0.0710905689446462,1.0,1.0,1.0,0.9393970480266846,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.5160669087707714,1.0,0.0,0.0,0.2184939341622545,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.00968416419060196,1.0,1.0,1.0,0.9418146463783325,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.24061846594035435,1.0,1.0,0.0,0.9016367060401769,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.45234951028942444,0.0,0.0,1.0,0.20364878828289232,0,0,1,0,0,1,0,0,1,0,0,0 +0.5714285714285714,0.1904368878617806,1.0,1.0,1.0,0.4297441010936714,0,0,1,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.6877407285132606,1.0,1.0,1.0,0.7743267172453298,0,0,0,1,0,0,1,0,0,0,0,1 +0.6785714285714286,0.4199405744470122,1.0,1.0,1.0,0.20677830560523824,0,0,0,1,0,0,0,1,1,0,0,0 +1.0,0.5444040937603168,0.0,0.0,1.0,0.4443440748703878,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.05480356553317926,1.0,0.0,0.0,0.4752943973393717,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.1840541432816111,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.18526466380543635,1.0,1.0,1.0,0.7138311294496358,0,0,0,0,1,1,0,0,0,1,0,0 +0.14285714285714285,0.034995047870584356,0.0,0.0,1.0,0.8001795737339381,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.07131066358534169,1.0,0.0,0.0,0.3160895023001086,0,0,0,1,0,1,0,0,1,0,0,0 +0.14285714285714285,0.10570045119401342,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.5714285714285714,0.13002090899086607,1.0,1.0,1.0,0.7918122026495104,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.04682513480796743,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.1513700891383295,0.0,1.0,1.0,0.6052277358503451,0,0,0,1,0,1,0,0,0,1,0,0 +0.14285714285714285,0.1116430064927919,1.0,1.0,1.0,0.8820768558276499,0,0,1,0,0,0,0,1,0,0,0,1 +0.10714285714285712,0.06993507208099482,1.0,1.0,1.0,0.6913008425849491,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.04357873885770881,0.0,1.0,0.0,0.5069022744695886,0,0,1,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.3538571585781886,1.0,1.0,1.0,0.24702882446640287,0,0,1,0,0,0,0,1,0,1,0,0 +0.5714285714285714,0.10426983602949268,1.0,0.0,0.0,0.2184939341622545,0,0,0,1,0,0,1,0,1,0,0,0 +0.3571428571428571,0.2512930560140861,1.0,1.0,0.0,0.43499565515022004,0,0,0,1,0,0,1,0,1,0,0,0 +0.7857142857142857,0.7797402883239792,1.0,0.0,0.0,0.21486216341529946,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.02426543413667877,1.0,1.0,0.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.05441839991196214,1.0,1.0,1.0,0.5354741988477034,0,0,1,0,0,0,0,1,0,1,0,0 +0.3035714285714286,0.1384945526576428,1.0,1.0,0.0,0.6311298802642114,0,0,0,0,1,1,0,0,0,1,0,0 +0.24999999999999997,0.33069219764498736,1.0,1.0,1.0,0.70490883351029,0,0,0,1,0,1,0,0,0,1,0,0 +0.24999999999999997,0.027511830086937385,0.0,1.0,0.0,0.33991531191906393,1,0,0,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.06894464619786508,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.024815670738417523,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.125,0.04974138879718279,0.0,1.0,1.0,0.8937163943553702,1,0,0,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.16011885110597557,1.0,1.0,1.0,0.8962138068279807,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.10311433916584131,0.0,0.0,0.0,0.20364878828289232,0,0,1,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.02839220864971938,1.0,1.0,0.0,0.6731420559668704,0,0,1,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.3685484758446132,1.0,1.0,1.0,0.4297441010936714,0,0,1,0,0,1,0,0,0,1,0,0 +0.08928571428571427,0.0482007263123143,1.0,1.0,1.0,0.7460536409951634,0,0,0,0,1,0,1,0,0,0,1,0 +0.24999999999999997,0.05315285572796302,0.0,0.0,0.0,0.2364918904353026,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.10806646858149004,1.0,0.0,1.0,0.3387603186528358,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.20864971937933313,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.04418399911962144,1.0,1.0,1.0,0.4768786443351435,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.14559260482007264,1.0,1.0,1.0,0.6014238410536501,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.327335754374381,1.0,1.0,0.0,0.46045295935712555,0,0,1,0,0,1,0,0,1,0,0,0 +0.3571428571428571,0.41311764058545175,0.0,1.0,1.0,0.491978924064202,0,0,0,0,1,1,0,0,0,1,0,0 +0.14285714285714285,0.06173654671508748,1.0,1.0,0.0,0.5600008807692614,0,0,0,1,0,0,1,0,1,0,0,0 +0.4642857142857143,0.33564432706063607,1.0,1.0,0.0,0.32719918796985015,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.2304941124683614,1.0,1.0,1.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.7857142857142857,0.8485748872014967,1.0,0.0,0.0,0.21486216341529946,0,0,1,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.030483107736326624,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.3035714285714286,0.07439198855507868,1.0,1.0,1.0,0.4148665750517137,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.3421371189611533,1.0,1.0,0.0,0.7134293371692706,1,0,0,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.06729393639264884,1.0,1.0,1.0,0.9073342365921735,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.2851876306811929,0.0,1.0,1.0,0.8298727730544322,1,0,0,0,0,1,0,0,0,0,0,1 +0.03571428571428571,0.05436337625178826,1.0,1.0,1.0,0.9417068119041873,1,0,0,0,0,0,1,0,0,0,0,1 +0.5178571428571429,0.13832948167712117,0.0,1.0,1.0,0.7345947491674498,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.09199955981071861,1.0,1.0,0.0,0.6731420559668704,0,0,1,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.1522504677011115,1.0,1.0,1.0,0.5493424244597425,0,0,0,0,1,0,1,0,0,0,1,0 +0.3571428571428571,0.052657642786398146,0.0,0.0,0.0,0.1941383022647156,0,1,0,0,0,1,0,0,1,0,0,0 +0.21428571428571427,0.13068119291295258,1.0,1.0,0.0,0.4664405275790275,0,0,0,0,1,1,0,0,1,0,0,0 +0.14285714285714285,0.023880268515461652,1.0,1.0,0.0,0.46045295935712555,0,0,1,0,0,1,0,0,1,0,0,0 +0.08928571428571427,0.07813359744690217,0.0,0.0,0.0,0.5214711461502933,0,1,0,0,0,0,1,0,0,1,0,0 +0.03571428571428571,0.02321998459337515,0.0,1.0,1.0,0.9120334763614909,1,0,0,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.12820512820512822,1.0,1.0,0.0,0.8962138068279807,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.3336634752943766,1.0,1.0,1.0,0.8837277100655102,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.1641906019588423,0.0,0.0,0.0,0.1941383022647156,0,1,0,0,0,1,0,0,1,0,0,0 +0.10714285714285712,0.3883569935072081,1.0,1.0,1.0,0.6608113136649546,1,0,0,0,0,0,0,1,0,1,0,0 +0.1607142857142857,0.10184879498184218,0.0,0.0,1.0,0.4796070935730555,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.32898646417959726,0.0,0.0,0.0,0.1271934988531199,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.15219544404093763,1.0,0.0,1.0,0.5621939327570208,0,0,1,0,0,0,1,0,0,0,1,0 +0.14285714285714285,0.014966435567293938,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.0926598437328051,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.7857142857142857,0.1376141740948608,1.0,1.0,1.0,0.7359170768632337,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.06943985913942995,1.0,1.0,0.0,0.6449158361571704,0,0,1,0,0,0,1,0,0,1,0,0 +0.4642857142857143,0.12396830637173985,1.0,1.0,1.0,0.539932226665866,0,0,0,0,1,0,1,0,0,1,0,0 +0.3571428571428571,0.05705953560030813,0.0,1.0,1.0,0.8301501720360537,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.6679322108506658,1.0,1.0,0.0,0.3516352721472709,0,0,1,0,0,0,0,1,0,1,0,0 +0.08928571428571427,0.05981071860900186,1.0,1.0,0.0,0.7460536409951634,0,1,0,0,0,0,1,0,0,0,1,0 +0.4107142857142857,0.12061186310113349,1.0,1.0,1.0,0.3554393160412404,0,0,0,0,1,0,1,0,1,0,0,0 +0.10714285714285712,0.06129635743369649,0.0,1.0,1.0,0.8788408139676869,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.11742049081104876,1.0,1.0,1.0,0.38472306870888867,0,0,0,0,1,1,0,0,1,0,0,0 +0.08928571428571427,0.1402553097832068,1.0,1.0,1.0,0.4917873581105606,0,0,1,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.06575327390778034,1.0,1.0,1.0,0.712739903075119,0,0,0,1,0,0,1,0,0,0,1,0 +0.03571428571428571,0.08215032463959503,1.0,1.0,1.0,0.9303800002041727,0,0,1,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.06036095521074061,1.0,1.0,1.0,0.7762312226178342,0,0,0,1,0,0,1,0,0,0,1,0 +0.3571428571428571,0.25420931000330144,1.0,1.0,0.0,0.27088325935678026,0,0,1,0,0,0,0,1,1,0,0,0 +0.14285714285714285,0.0817651590183779,0.0,1.0,1.0,0.9086619979671742,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.027731924727632886,1.0,1.0,1.0,0.7138311294496358,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.18075272367117862,1.0,1.0,1.0,0.8501233211422209,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.0675690546935182,1.0,1.0,1.0,0.9016367060401769,0,0,1,0,0,0,1,0,0,0,0,1 +0.4642857142857143,0.35721360184879497,1.0,1.0,1.0,0.8794147982207655,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.37839771101573677,0.0,0.0,0.0,0.6396221742784345,0,1,0,0,0,1,0,0,0,0,0,1 +0.19642857142857142,0.030593155056674374,0.0,0.0,1.0,0.2247881972110395,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.14537251017937713,1.0,1.0,1.0,0.530059222287824,0,0,0,0,1,0,0,1,0,0,1,0 +0.5714285714285714,0.30538131396500495,1.0,1.0,1.0,0.4297441010936714,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.15709254979641246,1.0,0.0,1.0,0.8205202256524424,0,1,0,0,0,0,1,0,0,0,0,1 +0.19642857142857142,0.06250687795752173,1.0,1.0,1.0,0.9016367060401769,0,0,1,0,0,1,0,0,0,0,0,1 +0.24999999999999997,0.11852096401452623,0.0,1.0,1.0,0.8384025299994181,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.19962583911081766,1.0,1.0,1.0,0.6014238410536501,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.15637724221415208,1.0,0.0,0.0,0.4752943973393717,0,1,0,0,0,1,0,0,0,1,0,0 +0.4285714285714286,0.1371739848134698,1.0,1.0,1.0,0.8656027328560056,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.07285132607021019,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.3035714285714286,0.0872125013755915,0.0,0.0,0.0,0.1840254387203246,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.13579839330912294,0.0,0.0,0.0,0.3243121077691882,0,0,1,0,0,1,0,0,0,1,0,0 +0.7857142857142857,0.2527786948387807,1.0,1.0,0.0,0.7355348054855501,1,0,0,0,0,1,0,0,0,0,0,1 +0.3571428571428571,0.052217453505007144,1.0,1.0,0.0,0.38472306870888867,0,0,0,0,1,0,1,0,1,0,0,0 +0.17857142857142858,0.20485308682733577,1.0,0.0,1.0,0.2907185455724763,1,0,0,0,0,0,0,1,1,0,0,0 +0.3571428571428571,0.1336524705623418,0.0,1.0,1.0,0.8301501720360537,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.10360955210740619,0.0,1.0,1.0,0.8532586635277519,0,0,0,0,1,0,0,1,0,0,0,1 +0.3571428571428571,0.05397821063057115,0.0,1.0,1.0,0.28637876253981037,0,0,0,0,1,1,0,0,1,0,0,0 +0.3571428571428571,0.14427203697589966,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.11978650819852536,1.0,1.0,1.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +1.0,0.3877517332452955,1.0,1.0,0.0,0.121581563557335,0,0,0,0,1,1,0,0,1,0,0,0 +0.4642857142857143,0.40420380763728403,1.0,1.0,1.0,0.85555465817234,0,0,0,0,1,0,1,0,0,0,0,1 +0.625,0.7664245625619016,1.0,1.0,1.0,0.2911457072868785,0,0,0,1,0,0,1,0,1,0,0,0 +0.08928571428571427,0.06883459887751733,0.0,1.0,0.0,0.6447959977442328,0,0,0,0,1,0,1,0,0,1,0,0 +0.7857142857142857,0.19775503466490593,1.0,1.0,0.0,0.3285309193360387,0,0,0,1,0,0,0,1,0,1,0,0 +0.6785714285714286,0.20864971937933313,1.0,1.0,1.0,0.7473544772820154,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.06184659403543523,0.0,1.0,1.0,0.45983457538120076,1,0,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.41619896555518876,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.03626059205458347,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.34158688235941453,1.0,1.0,0.0,0.9073342365921735,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.11962143721800374,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.13965004952129417,0.0,0.0,1.0,0.8404307054775538,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.024595576097722022,1.0,1.0,0.0,0.5082778240326536,0,1,0,0,0,0,1,0,1,0,0,0 +0.3035714285714286,0.12248266754704522,1.0,1.0,1.0,0.8962138068279807,0,0,0,0,1,0,1,0,0,0,0,1 +0.14285714285714285,0.0706503796632552,1.0,0.0,0.0,0.5995907180616916,0,1,0,0,0,1,0,0,0,1,0,0 +0.03571428571428571,0.04500935402222955,1.0,1.0,1.0,0.7623117828116139,0,0,0,0,1,0,1,0,0,1,0,0 +0.14285714285714285,0.18284362275778587,0.0,0.0,1.0,0.44241615651549826,0,0,1,0,0,0,1,0,0,1,0,0 +0.24999999999999997,0.05782986684274238,1.0,1.0,1.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.6785714285714286,0.3805436337625179,1.0,1.0,1.0,0.8152117172112082,0,0,0,1,0,1,0,0,0,0,0,1 +0.3571428571428571,0.09843732805106195,1.0,0.0,0.0,0.4752943973393717,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.12286783316826234,1.0,0.0,1.0,0.23658638755348427,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.18526466380543635,1.0,0.0,1.0,0.4847650698535884,0,0,0,0,1,1,0,0,0,0,1,0 +0.3571428571428571,0.09948277759436558,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.08281060856168151,0.0,1.0,1.0,0.28637876253981037,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.07202597116760207,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.16017387476614944,1.0,1.0,0.0,0.34047407781017125,0,0,1,0,0,1,0,0,1,0,0,0 +0.7857142857142857,0.5486959392538792,1.0,1.0,1.0,0.7743267172453298,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.062231759656652355,0.0,1.0,0.0,0.2488657768966351,0,0,1,0,0,0,1,0,1,0,0,0 +0.3035714285714286,0.10955210740618465,1.0,1.0,1.0,0.8962138068279807,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.12589413447782546,1.0,1.0,0.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.5714285714285714,0.4300099042588313,0.0,0.0,0.0,0.1073942894895514,0,0,1,0,0,0,1,0,1,0,0,0 +0.7857142857142857,0.33927588863211183,0.0,1.0,0.0,0.26164945103668286,0,0,0,0,1,1,0,0,0,1,0,0 +0.3571428571428571,0.24683613954000222,0.0,0.0,0.0,0.3676314741249905,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.037416088918234836,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.1463629360625069,1.0,1.0,1.0,0.3565290863994199,0,0,0,1,0,0,0,1,1,0,0,0 +0.24999999999999997,0.11158798283261802,0.0,1.0,0.0,0.5545926067069147,0,1,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.2318146803125344,1.0,1.0,0.0,0.7918122026495104,0,0,1,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.11797072741278751,0.0,0.0,1.0,0.594377341023918,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.1103224386486189,1.0,1.0,1.0,0.8839295784323556,0,1,0,0,0,0,1,0,0,0,0,1 +0.625,0.4587872785297678,1.0,1.0,1.0,0.8024775365103721,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.3022449653350941,0.0,0.0,1.0,0.2949724434247011,0,1,0,0,0,0,0,1,0,1,0,0 +0.14285714285714285,0.05227247716518102,0.0,0.0,1.0,0.24744012641724394,0,0,1,0,0,1,0,0,1,0,0,0 +0.4642857142857143,0.08897325850115549,1.0,1.0,1.0,0.85555465817234,0,0,0,0,1,0,1,0,0,0,0,1 +0.03571428571428571,0.0335644327060636,0.0,1.0,1.0,0.4603229620470974,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.04258831297457907,1.0,1.0,0.0,0.38472306870888867,0,1,0,0,0,0,1,0,1,0,0,0 +1.0,0.36238582590513924,1.0,1.0,0.0,0.121581563557335,0,0,0,0,1,0,1,0,1,0,0,0 +0.14285714285714285,0.07868383404864092,1.0,1.0,1.0,0.5082778240326536,0,0,0,0,1,0,1,0,1,0,0,0 +0.3571428571428571,0.06399251678221635,1.0,1.0,1.0,0.8627767388444526,0,0,1,0,0,0,1,0,0,0,0,1 +0.07142857142857142,0.03615054473423572,1.0,1.0,1.0,0.7468001783284491,0,1,0,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.05188731154396391,0.0,1.0,0.0,0.28597713146577547,1,0,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.1280400572246066,1.0,1.0,1.0,0.46045295935712555,0,0,1,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.2047980631671619,0.0,0.0,1.0,0.7832044401604136,0,0,0,1,0,1,0,0,0,0,0,1 +0.3571428571428571,0.1930229998899527,0.0,0.0,1.0,0.3671745216659698,1,0,0,0,0,1,0,0,0,1,0,0 +0.24999999999999997,0.11389897655992078,1.0,1.0,1.0,0.9071687984489271,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.12176735996478487,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.06014086057004511,1.0,1.0,1.0,0.7691184526170824,0,0,0,0,1,0,1,0,0,0,1,0 +0.4285714285714286,0.4167492021569275,1.0,1.0,1.0,0.8656027328560056,0,1,0,0,0,1,0,0,0,0,0,1 +0.3035714285714286,0.16644657202597118,1.0,1.0,1.0,0.8962138068279807,0,0,0,0,1,0,1,0,0,0,0,1 +0.24999999999999997,0.16474083856058105,0.0,1.0,1.0,0.5069022744695886,0,0,1,0,0,0,1,0,0,1,0,0 +0.4107142857142857,0.46016287003411466,1.0,1.0,1.0,0.8472095845527893,0,0,1,0,0,0,1,0,0,0,0,1 +0.5714285714285714,0.08759766699680863,0.0,1.0,0.0,0.19533656176753816,0,1,0,0,0,0,1,0,1,0,0,0 +0.3571428571428571,0.12991086167051832,1.0,1.0,1.0,0.8839295784323556,0,0,0,0,1,0,1,0,0,0,0,1 +0.08928571428571427,0.09794211510949709,1.0,0.0,1.0,0.676443925470061,0,0,0,1,0,0,1,0,0,1,0,0 +0.3571428571428571,0.49669858038956755,1.0,1.0,1.0,0.8189994124469444,0,0,1,0,0,0,0,1,0,0,0,1 +0.14285714285714285,0.045449543303620554,1.0,1.0,1.0,0.9122329885255572,0,0,1,0,0,0,1,0,0,0,0,1 +0.10714285714285712,0.07131066358534169,1.0,1.0,1.0,0.9187129412749173,0,0,1,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.09040387366567623,0.0,1.0,1.0,0.3538845096386129,0,0,1,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.10404974138879718,1.0,1.0,1.0,0.9393970480266846,0,0,0,1,0,0,1,0,0,0,0,1 +0.5714285714285714,0.425332893144052,1.0,1.0,0.0,0.8216427748498599,0,1,0,0,0,1,0,0,0,0,0,1 +0.14285714285714285,0.055793991416309016,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,1,0,0,0,0,0,1 +0.24999999999999997,0.10999229668757565,1.0,1.0,1.0,0.49745855352837404,0,0,0,1,0,0,1,0,1,0,0,0 +0.7857142857142857,0.324254429404644,0.0,1.0,0.0,0.0955803074919803,0,0,0,0,1,0,0,1,1,0,0,0 +0.3035714285714286,0.1842742379223066,0.0,1.0,1.0,0.35908907651024774,0,0,0,1,0,1,0,0,1,0,0,0 +0.19642857142857142,0.07081545064377683,1.0,1.0,1.0,0.6874873703107746,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.590073731704633,0.0,1.0,0.0,0.7468841586935758,1,0,0,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.2226807527236712,1.0,1.0,0.0,0.6598740118142182,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.05172224056344228,0.0,1.0,0.0,0.27039187876105036,1,0,0,0,0,0,0,1,1,0,0,0 +0.14285714285714285,0.056344228018047754,0.0,1.0,0.0,0.39882243364250436,0,1,0,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.11863101133487398,1.0,1.0,1.0,0.9344131207554249,1,0,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.33856058104985143,1.0,1.0,1.0,0.6014238410536501,0,1,0,0,0,0,1,0,0,1,0,0 +0.10714285714285712,0.05705953560030813,1.0,1.0,1.0,0.9319236160108605,0,0,0,0,1,0,1,0,0,0,0,1 +0.6785714285714286,0.3138549576317817,0.0,1.0,1.0,0.3593859563426322,0,0,0,1,0,0,1,0,0,1,0,0 +0.03571428571428571,0.09392538791680423,0.0,1.0,1.0,0.5122497385912862,0,0,0,1,0,0,1,0,1,0,0,0 +0.3571428571428571,0.07164080554638494,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.3571428571428571,0.03780125453945196,1.0,1.0,1.0,0.8839295784323556,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09805216242984482,1.0,1.0,1.0,0.8456935742394528,0,0,0,0,1,0,0,1,0,0,0,1 +0.5714285714285714,0.5846263893474194,1.0,1.0,1.0,0.8216427748498599,0,0,0,0,1,0,1,0,0,0,0,1 +0.3571428571428571,0.2659843732805106,0.0,1.0,1.0,0.491978924064202,0,0,0,0,1,0,1,0,0,1,0,0 +0.5714285714285714,0.2712116209970287,1.0,1.0,0.0,0.31774368328346525,0,0,0,1,0,0,1,0,1,0,0,0 +0.03571428571428571,0.044018928139099814,1.0,1.0,1.0,0.7619553600617908,1,0,0,0,0,0,1,0,0,1,0,0 +0.5714285714285714,0.2015516672169033,1.0,0.0,1.0,0.31967729971945585,0,0,1,0,0,0,1,0,0,0,1,0 +0.24999999999999997,0.15401122482667548,0.0,1.0,1.0,0.5639451140754361,0,1,0,0,0,0,1,0,0,0,1,0 +0.5714285714285714,0.44486629250577747,1.0,1.0,0.0,0.21396727904669746,0,0,0,0,1,0,0,1,1,0,0,0 +0.14285714285714285,0.08847804555959062,0.0,0.0,1.0,0.2848197012851972,0,1,0,0,0,1,0,0,1,0,0,0 +0.14285714285714285,0.25272367117860683,1.0,1.0,0.0,0.5082778240326536,0,0,0,0,1,1,0,0,1,0,0,0 +0.24999999999999997,0.19423352041377795,1.0,1.0,1.0,0.9073342365921735,0,1,0,0,0,0,1,0,0,0,0,1 +0.14285714285714285,0.09783206778914934,1.0,1.0,1.0,0.6731420559668704,0,0,1,0,0,0,1,0,0,1,0,0 +0.3571428571428571,0.11191812479366127,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.19642857142857142,0.1363486299108617,1.0,1.0,1.0,0.7303600113933694,0,0,0,1,0,0,1,0,0,1,0,0 +0.4107142857142857,0.27748431825685044,1.0,1.0,0.0,0.3549890108481963,1,0,0,0,0,0,1,0,1,0,0,0 +1.0,0.4900957411687025,1.0,1.0,1.0,0.16559346885680104,0,0,1,0,0,0,0,1,0,1,0,0 +0.24999999999999997,0.09315505667436998,0.0,0.0,0.0,0.7904631079537381,0,1,0,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.148949048090679,1.0,1.0,1.0,0.38472306870888867,0,0,0,0,1,0,1,0,1,0,0,0 +0.24999999999999997,0.11769560911191813,0.0,1.0,1.0,0.3403565857713484,0,1,0,0,0,0,1,0,1,0,0,0 +0.24999999999999997,0.08979861340376362,0.0,0.0,0.0,0.20364878828289232,0,0,1,0,0,1,0,0,1,0,0,0 +0.4642857142857143,0.08060966215472654,0.0,1.0,1.0,0.383421164823506,0,0,1,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.33459887751733247,1.0,1.0,0.0,0.14138446961748627,0,0,0,0,1,0,0,1,1,0,0,0 +0.19642857142857142,0.0586552217453505,1.0,1.0,1.0,0.9016367060401769,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.8058765269065697,1.0,1.0,0.0,0.5706358388636316,0,0,0,0,1,0,1,0,1,0,0,0 +0.4642857142857143,0.15555188731154398,1.0,1.0,1.0,0.85555465817234,0,0,0,0,1,0,1,0,0,0,0,1 +0.0,0.07120061626499395,1.0,1.0,1.0,0.9558919804312949,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.041652910751623196,1.0,0.0,1.0,0.5528336477984951,0,0,1,0,0,0,1,0,0,1,0,0 +0.10714285714285712,0.10008803785627819,1.0,1.0,1.0,0.9187129412749173,0,0,1,0,0,0,1,0,0,0,0,1 +0.24999999999999997,0.7002310993727302,0.0,1.0,0.0,0.4721076735930411,1,0,0,0,0,0,0,1,0,1,0,0 +0.24999999999999997,0.1570375261362386,1.0,1.0,1.0,0.49745855352837404,0,0,0,1,0,0,1,0,1,0,0,0 +0.5714285714285714,0.20578848905029165,0.0,1.0,1.0,0.3694096935983138,0,1,0,0,0,0,1,0,0,1,0,0 +0.14285714285714285,0.17877187190491914,0.0,1.0,0.0,0.3538845096386129,0,0,1,0,0,0,1,0,1,0,0,0 +0.08928571428571427,0.048750962914053037,1.0,1.0,0.0,0.6705704969262621,0,0,0,0,1,0,0,1,0,1,0,0 +0.19642857142857142,0.007813359744690218,0.0,0.0,1.0,0.4681806327003752,0,1,0,0,0,1,0,0,0,0,1,0 +0.24999999999999997,0.048475844613183675,0.0,1.0,0.0,0.33991531191906393,1,0,0,0,0,0,1,0,1,0,0,0 +0.14285714285714285,0.06459777704412897,1.0,1.0,1.0,0.7543809345848546,0,0,0,1,0,0,1,0,0,1,0,0 +0.6785714285714286,0.17299438758666227,1.0,1.0,1.0,0.22696837267036857,1,0,0,0,0,0,1,0,1,0,0,0 +0.7857142857142857,0.3713546825134808,1.0,1.0,0.0,0.21980151898449196,0,0,0,1,0,0,1,0,1,0,0,0 +0.4642857142857143,0.6464179597226807,1.0,1.0,0.0,0.32719918796985015,0,1,0,0,0,0,1,0,1,0,0,0 +0.4642857142857143,0.14201606690877078,0.0,1.0,1.0,0.7583632325020305,0,0,1,0,0,0,1,0,0,0,0,1 +0.3571428571428571,0.09436557719819522,0.0,1.0,1.0,0.4443019547874373,0,0,1,0,0,1,0,0,0,1,0,0 +0.3571428571428571,0.07411687025420931,1.0,1.0,1.0,0.8456935742394528,0,0,0,0,1,0,0,1,0,0,0,1 +0.03571428571428571,0.09975789589523495,1.0,1.0,1.0,0.7623117828116139,0,1,0,0,0,1,0,0,0,1,0,0 +0.3928571428571428,0.42456256190161773,1.0,1.0,1.0,0.5811811839164577,0,1,0,0,0,0,1,0,0,1,0,0 +0.7857142857142857,0.4034334763948498,1.0,1.0,1.0,0.2839698095777251,1,0,0,0,0,0,0,1,0,1,0,0 +0.3571428571428571,0.24369979091009136,1.0,1.0,1.0,0.903630573225354,0,0,0,1,0,0,1,0,0,0,0,1 +0.14285714285714285,0.30543633762517886,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,1,0,0,0,0,0,1 +0.3571428571428571,0.1990756025090789,0.0,1.0,1.0,0.8301501720360537,0,0,0,0,1,1,0,0,0,0,0,1 +0.14285714285714285,0.11775063277209201,1.0,1.0,1.0,0.9264124244522206,0,0,0,0,1,0,1,0,0,0,0,1 +0.19642857142857142,0.3631561571475735,1.0,1.0,0.0,0.6870648053224668,1,0,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.0675690546935182,1.0,0.0,0.0,0.9134302134303278,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.10069329811819082,1.0,0.0,1.0,0.8891646790727171,0,0,1,0,0,0,1,0,0,0,0,1 +0.03571428571428571,0.03747111257840871,0.0,1.0,0.0,0.6730269139352237,0,1,0,0,0,0,1,0,0,1,0,0 +0.08928571428571427,0.05469351821283151,0.0,1.0,1.0,0.9015902882244003,0,0,0,0,1,1,0,0,0,0,0,1 +0.10714285714285712,0.10355452844723231,0.0,1.0,1.0,0.41906888316914087,0,1,0,0,0,1,0,0,1,0,0,0 +0.24999999999999997,0.0935402222955871,1.0,1.0,1.0,0.9234069094768464,0,0,0,1,0,0,1,0,0,0,0,1 +0.03571428571428571,0.06184659403543523,1.0,1.0,1.0,0.5231864157229879,0,0,1,0,0,0,1,0,1,0,0,0 +0.03571428571428571,0.06597336854847584,1.0,1.0,1.0,0.7623117828116139,0,0,0,0,1,0,1,0,0,1,0,0 +0.24999999999999997,0.07329151535160118,1.0,1.0,1.0,0.9073342365921735,0,0,0,0,1,0,1,0,0,0,0,1 diff --git a/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/german_AIF_test.json b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/german_AIF_test.json new file mode 100644 index 0000000..1e56c7a --- /dev/null +++ b/pipeline/logs/2020-07-30_18-09-09-739__german_AIF_test/german_AIF_test.json @@ -0,0 +1 @@ +{"size": 1000, "features": 8, "categorical features": ["credit", "age", "employment", "housing", "sex", "status"], "numerical features": ["credit_amount", "month"], "domain": {"status": [4, ["A11", "A12", "A14", "A13"]], "month": [4, 72], "credit_amount": [250, 18424], "employment": [5, ["A75", "A73", "A74", "A71", "A72"]], "housing": [3, ["A152", "A153", "A151"]], "sex": [2, ["male", "female"]], "age": [2, ["old", "young"]], "credit": [2, ["good", "bad"]]}} \ No newline at end of file diff --git a/pipeline/model/classifiers.py b/pipeline/model/classifiers.py index 248311f..27e514b 100644 --- a/pipeline/model/classifiers.py +++ b/pipeline/model/classifiers.py @@ -3,85 +3,252 @@ """ import pandas as pd +import numpy as np +from aif360.datasets import BinaryLabelDataset from sklearn.linear_model import SGDClassifier from sklearn.tree import DecisionTreeClassifier -from pipeline.model.inprocessor import Model +from sklearn.model_selection import GridSearchCV +from sklearn.pipeline import Pipeline +from pipeline.step import Step -class SK_LogisticRegression(Model): - def __init__(self, df, target_col, loss_func="log", instance_weights=[], seed=0): + +class SK_LogisticRegression(Step): + def __init__(self, target_col, seed, loss_func="log", instance_weights=[], target_positive=1): """ - :param df: pandas dataframe, stores the data to fit the classifier. :param target_col: str, the name of the target variable in above data. + :param seed: integer, the seed for random state. :param loss_func: str, the name of the loss function used in linear model. Same as the loss parameter in sklearn.linear_model.SGDClassifier. The possible options are ‘hinge’, ‘log’, ‘modified_huber’, ‘squared_hinge’, ‘perceptron’, or a regression loss: ‘squared_loss’, ‘huber’, ‘epsilon_insensitive’, or ‘squared_epsilon_insensitive’. :param instance_weights: list of float, each number represents the weight of the sample in above data. - :param seed: integer, the seed for random state. + :param target_positive: integer, 0 or 1, represents the positive value of the target attribute. Default is 1. + """ + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name + self.seed = seed + self.loss_func = loss_func + self.instance_weights = instance_weights + self.target_positive = target_positive + self.fitted_step = None + + def fit(self, df): + if len(self.instance_weights) == 0: + self.instance_weights = [1 for _ in range(1, df.shape[0] + 1)] + + self.fitted_step = SGDClassifier(loss=self.loss_func, random_state=self.seed).fit(np.array(df.drop(columns=[self.target_col])), np.array(df[self.target_col]), sample_weight=self.instance_weights) + + return self + + def apply(self, df): + aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[]) + after_df, _ = aif_pred_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + + favorable_class_idx = list(self.fitted_step.classes_).index(self.target_positive) + after_df[self.pred_target_col] = [x[favorable_class_idx] for x in self.fitted_step.predict_proba(np.array(df.drop(columns=[self.target_col])))] + + return after_df + + def name(self): + return "SK_LogisticRegression" + + def abbr_name(self): + return "LR" + + def step_name(self): + return "Model" + + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return False - cur_step = SGDClassifier(loss=loss_func, random_state=seed) - super().__init__("@".join(["SK_LogisticRegression", target_col]), cur_step, df, target_col, instance_weights=instance_weights) + def fit_only_on_train(self): + return True -class SK_DecisionTree(Model): - def __init__(self, df, target_col, instance_weights=[], seed=0): +class SK_DecisionTree(Step): + def __init__(self, target_col, seed, instance_weights=[], target_positive=1): """ - :param df: pandas dataframe, stores the data to fit the classifier. :param target_col: str, the name of the target variable in above data. - :param instance_weights: list of float, each number represents the weight of the sample in above data. :param seed: integer, the seed for random state. + :param instance_weights: list of float, each number represents the weight of the sample in above data. + :param target_positive: integer, 0 or 1, represents the positive value of the target attribute. Default is 1. + """ - cur_step = DecisionTreeClassifier(random_state=seed) - super().__init__("@".join(["SK_DecisionTree", target_col]), cur_step, df, target_col, instance_weights=instance_weights) + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name + self.seed = seed + self.instance_weights = instance_weights + self.target_positive = target_positive + self.fitted_step = None + + def fit(self, df): + if len(self.instance_weights) == 0: + self.instance_weights = [1 for _ in range(1, df.shape[0] + 1)] + + self.fitted_step = DecisionTreeClassifier(random_state=self.seed).fit(np.array(df.drop(columns=[self.target_col])), np.array(df[self.target_col]), sample_weight=self.instance_weights) + + return self + + def apply(self, df): + aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[]) + after_df, _ = aif_pred_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + + favorable_class_idx = list(self.fitted_step.classes_).index(self.target_positive) + after_df[self.pred_target_col] = [x[favorable_class_idx] for x in self.fitted_step.predict_proba(np.array(df.drop(columns=[self.target_col])))] + + return after_df + + def name(self): + return "SK_DecisionTree" + + def abbr_name(self): + return "DT" + + def step_name(self): + return "Model" + + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True -class OPT_LogisticRegression(Model): - def __init__(self, df, target_col, loss_func="log", max_iter=1000, instance_weights=[], seed=0): +class OPT_LogisticRegression(Step): + def __init__(self, target_col, seed, loss_func="log", max_iter=1000, instance_weights=[], target_positive=1): """ - :param df: pandas dataframe, stores the data to fit the classifier. :param target_col: str, the name of the target variable in above data. + :param seed: integer, random seed. :param loss_func: str, the name of the loss function used in linear model. Same as the loss parameter in sklearn.linear_model.SGDClassifier. The possible options are ‘hinge’, ‘log’, ‘modified_huber’, ‘squared_hinge’, ‘perceptron’, or a regression loss: ‘squared_loss’, ‘huber’, ‘epsilon_insensitive’, or ‘squared_epsilon_insensitive’. :param max_iter: integer, max number of iterations of the model. :param instance_weights: list of float, each number represents the weight of the sample in above data. - :param seed: integer, random seed. + :param target_positive: integer, 0 or 1, represents the positive value of the target attribute. Default is 1. """ - # Update below parameters according to the loss function used - param_grid = { + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name + self.seed = seed + self.loss_func = loss_func + self.max_iter = max_iter + self.instance_weights = instance_weights + self.target_positive = target_positive + self.fitted_step = None + + self.param_grid = { 'learner__loss': [loss_func], 'learner__penalty': ['l2', 'l1', 'elasticnet'], 'learner__alpha': [0.00005, 0.0001, 0.005, 0.001] } - cur_step = SGDClassifier(max_iter=max_iter, random_state=seed) - super().__init__("@".join(["OPT_LogisticRegression", target_col]), cur_step, df, target_col, instance_weights=instance_weights, hyper_tune=True, param_grid=param_grid) -class OPT_DecisionTree(Model): - def __init__(self, df, target_col, instance_weights=[], seed=0): + def fit(self, df): + if len(self.instance_weights) == 0: + self.instance_weights = [1 for _ in range(1, df.shape[0] + 1)] + + search = GridSearchCV(Pipeline([('learner', SGDClassifier(max_iter=self.max_iter, random_state=self.seed))]), self.param_grid, scoring='roc_auc', cv=5, verbose=1, n_jobs=-1) + self.fitted_step = search.fit(np.array(df.drop(columns=[self.target_col])), np.array(df[self.target_col]), None, + **{'learner__sample_weight': self.instance_weights}) + + return self + + def apply(self, df): + aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[]) + after_df, _ = aif_pred_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + + favorable_class_idx = list(self.fitted_step.classes_).index(self.target_positive) + after_df[self.pred_target_col] = [x[favorable_class_idx] for x in self.fitted_step.predict_proba(np.array(df.drop(columns=[self.target_col])))] + + return after_df + + def name(self): + return "OPT_LogisticRegression" + + def abbr_name(self): + return "OLR" + + def step_name(self): + return "Model" + + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True + +class OPT_DecisionTree(Step): + def __init__(self, target_col, seed, instance_weights=[], target_positive=1): """ - :param df: pandas dataframe, stores the data to fit the classifier. :param target_col: str, the name of the target variable in above data. - :param instance_weights: list of float, each number represents the weight of the sample in above data. :param seed: integer, random seed. + :param instance_weights: list of float, each number represents the weight of the sample in above data. + :param target_positive: integer, 0 or 1, represents the positive value of the target attribute. Default is 1. """ - param_grid = { + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name + self.seed = seed + self.instance_weights = instance_weights + self.target_positive = target_positive + self.fitted_step = None + + self.param_grid = { 'learner__min_samples_split': range(20, 500, 10), 'learner__max_depth': range(15, 30, 2), 'learner__min_samples_leaf': [3, 4, 5, 10], "learner__criterion": ["gini", "entropy"] } - cur_step = DecisionTreeClassifier(random_state=seed) - super().__init__("@".join(["OPT_DecisionTree", target_col]), cur_step, df, target_col, instance_weights=instance_weights, hyper_tune=True, param_grid=param_grid) + def fit(self, df): + if len(self.instance_weights) == 0: + self.instance_weights = [1 for _ in range(1, df.shape[0] + 1)] + + search = GridSearchCV(Pipeline([('learner', DecisionTreeClassifier(random_state=self.seed))]), self.param_grid, scoring='roc_auc', cv=5, verbose=1, n_jobs=-1) + self.fitted_step = search.fit(np.array(df.drop(columns=[self.target_col])), np.array(df[self.target_col]), None, + **{'learner__sample_weight': self.instance_weights}) + + return self + + def apply(self, df): + aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[]) + after_df, _ = aif_pred_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + + favorable_class_idx = list(self.fitted_step.classes_).index(self.target_positive) + after_df[self.pred_target_col] = [x[favorable_class_idx] for x in self.fitted_step.predict_proba(np.array(df.drop(columns=[self.target_col])))] + return after_df + + def name(self): + return "OPT_DecisionTree" + + def abbr_name(self): + return "ODT" + + def step_name(self): + return "Model" + + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True if __name__ == '__main__': - data = pd.read_csv("../../data/adult_pre_reweigh.csv") - cur_o = SK_LogisticRegression(data, "income-per-year") - # cur_o = SK_DecisionTree(data, "income-per-year") - # cur_o = OPT_LogisticRegression(data, "income-per-year") - # cur_o = OPT_DecisionTree(data, "income-per-year") + data = pd.read_csv("../../data/german_pre_encoded.csv") + # cur_o = SK_LogisticRegression("credit", 0) + cur_o = SK_DecisionTree("credit", 0) + # cur_o = OPT_LogisticRegression("credit", 0) + # cur_o = OPT_DecisionTree("credit", 0) + cur_o.fit(data) after_data = cur_o.apply(data) - after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) - - print(cur_o.get_name()) \ No newline at end of file + after_data.to_csv("../../data/german_after_" + cur_o.name() + ".csv", index=False) diff --git a/pipeline/model/fair_classifiers.py b/pipeline/model/fair_classifiers.py index 085e968..f92c5ad 100644 --- a/pipeline/model/fair_classifiers.py +++ b/pipeline/model/fair_classifiers.py @@ -3,35 +3,72 @@ """ import pandas as pd +from aif360.datasets import BinaryLabelDataset from aif360.algorithms.inprocessing import AdversarialDebiasing from aif360.algorithms.inprocessing import MetaFairClassifier from aif360.algorithms.inprocessing import PrejudiceRemover -from pipeline.model.inprocessor import Model +from pipeline.step import Step + import warnings warnings.filterwarnings("ignore") -class AIF_AdversarialDebiasing(Model): +class AIF_AdversarialDebiasing(Step): - def __init__(self, df, target_col, sensitive_att, seed=0): + def __init__(self, target_col, sensitive_att, seed): """ - :param df: pandas dataframe, stores the data to fit the fair classifier. - :param target_col: str, the name of the target variable in above data. + :param target_col: str, the name of the target variable in above data. Assume 1 represents the favorable class. :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. - :param seed: integer, random seed. - + :param seed: integer, the seed for random process. """ import tensorflow as tf - sess = tf.Session() - cur_step = AdversarialDebiasing(unprivileged_groups=[{sensitive_att: 0}], privileged_groups=[{sensitive_att: 1}], scope_name='debiased_classifier', debias=True, sess=sess, seed=seed) - super().__init__("@".join(["AIF_AdversarialDebiasing", sensitive_att]), cur_step, df, target_col, sensitive_att=sensitive_att, fair_aware=True) + self.sess = tf.Session() + + self.sensitive_att = sensitive_att + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name + self.seed = seed + self.fitted_step = None + + def fit(self, df): + # wrap the input dataframe with AIF 360 object "BinaryLabelDataset" + aif_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[self.sensitive_att]) + self.fitted_step = AdversarialDebiasing(unprivileged_groups=[{self.sensitive_att: 0}], privileged_groups=[{self.sensitive_att: 1}], scope_name='debiased_classifier', debias=True, sess=self.sess, seed=self.seed).fit(aif_df) + + return self + + def apply(self, df): + aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[self.sensitive_att]) + + after_aif_df = self.fitted_step.predict(aif_pred_df) + + after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + after_df[self.pred_target_col] = after_aif_df.labels + + return after_df + + def name(self): + return "AdversarialDebiasing" + def abbr_name(self): + return "AD" -class AIF_MetaFairClassifier(Model): + def step_name(self): + return "FairInprocessor" - def __init__(self, df, target_col, sensitive_att, fairness_penalty=0.8, fair_metric="sr"): + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True + +class AIF_MetaFairClassifier(Step): + + def __init__(self, target_col, sensitive_att, fairness_penalty=0.8, fair_metric="sr"): """ - :param df: pandas dataframe, stores the data to fit the fair classifier. :param target_col: str, the name of the target variable in above data. :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. :param fairness_penalty: float in [0,1], fairness penalty parameter. default is 0.8. The same parameter in aif360.algorithms.inprocessing.MetaFairClassifier. @@ -39,31 +76,112 @@ def __init__(self, df, target_col, sensitive_att, fairness_penalty=0.8, fair_met The same parameter in aif360.algorithms.inprocessing.MetaFairClassifier. """ - cur_step = MetaFairClassifier(tau=fairness_penalty, sensitive_attr=sensitive_att, type=fair_metric) - super().__init__("@".join(["AIF_MetaFairClassifier", sensitive_att]), cur_step, df, target_col, sensitive_att=sensitive_att, fair_aware=True) + self.sensitive_att = sensitive_att + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name + self.fairness_penalty = fairness_penalty + self.fair_metric = fair_metric + + self.fitted_step = None + + + def fit(self, df): + # wrap the input dataframe with AIF 360 object "BinaryLabelDataset" + aif_df = BinaryLabelDataset(df=df, label_names=[self.target_col], + protected_attribute_names=[self.sensitive_att]) + self.fitted_step = MetaFairClassifier(tau=self.fairness_penalty, sensitive_attr=self.sensitive_att, type=self.fair_metric).fit(aif_df) + + return self + + def apply(self, df): + aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], + protected_attribute_names=[self.sensitive_att]) + + after_aif_df = self.fitted_step.predict(aif_pred_df) -class AIF_PrejudiceRemover(Model): + after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + after_df[self.pred_target_col] = after_aif_df.labels - def __init__(self, df, target_col, sensitive_att, fairness_penalty=1.0): + return after_df + + def name(self): + return "MetaFairClassifier" + + def abbr_name(self): + return "MFC" + + def step_name(self): + return "FairInprocessor" + + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True + +class AIF_PrejudiceRemover(Step): + + def __init__(self, target_col, sensitive_att, fairness_penalty=1.0): """ - :param df: pandas dataframe, stores the data to fit the fair classifier. :param target_col: str, the name of the target variable in above data. :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. :param fairness_penalty: float in [0,1], fairness penalty parameter. default is 1. The same parameter in aif360.algorithms.inprocessing.PrejudiceRemover. """ # TODO: fix the bug that cannot import lib of 'getoutput' - cur_step = PrejudiceRemover(eta=fairness_penalty, sensitive_attr=sensitive_att, class_attr=target_col) - super().__init__("@".join(["AIF_PrejudiceRemover", sensitive_att]), cur_step, df, target_col, sensitive_att=sensitive_att, fair_aware=True) + self.sensitive_att = sensitive_att + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name + self.fairness_penalty = fairness_penalty + + self.fitted_step = None + + def fit(self, df): + # wrap the input dataframe with AIF 360 object "BinaryLabelDataset" + aif_df = BinaryLabelDataset(df=df, label_names=[self.target_col], + protected_attribute_names=[self.sensitive_att]) + self.fitted_step = PrejudiceRemover(eta=self.fairness_penalty, sensitive_attr=self.sensitive_att, class_attr=self.target_col).fit(aif_df) + + return self + def apply(self, df): + aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], + protected_attribute_names=[self.sensitive_att]) + + after_aif_df = self.fitted_step.predict(aif_pred_df) + + after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + after_df[self.pred_target_col] = after_aif_df.labels + + return after_df + + def name(self): + return "PrejudiceRemover" + + def abbr_name(self): + return "PR" + + def step_name(self): + return "FairInprocessor" + + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True if __name__ == '__main__': - data = pd.read_csv("../../data/adult_pre_reweigh.csv") - cur_o = AIF_AdversarialDebiasing(data, "income-per-year", "sex") - # cur_o = AIF_MetaFairClassifier(data, "income-per-year", "sex") - # cur_o = AIF_PrejudiceRemover(data, "income-per-year", "sex") + data = pd.read_csv("../../data/german_pre_encoded.csv") + # cur_o = AIF_AdversarialDebiasing("credit", "sex") + cur_o = AIF_MetaFairClassifier("credit", "sex") + # cur_o = AIF_PrejudiceRemover("credit", "sex") # not working TODO: fix + cur_o.fit(data) after_data = cur_o.apply(data) - after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) - - print(cur_o.get_name()) \ No newline at end of file + after_data.to_csv("../../data/german_after_" + cur_o.name() + ".csv", index=False) diff --git a/pipeline/model/inprocessor.py b/pipeline/model/inprocessor.py deleted file mode 100644 index 3df3909..0000000 --- a/pipeline/model/inprocessor.py +++ /dev/null @@ -1,74 +0,0 @@ -""" - Super class for all the supported classifier classes including fair-classifiers. -""" -import numpy as np -from aif360.datasets import BinaryLabelDataset -from sklearn.model_selection import GridSearchCV -from sklearn.pipeline import Pipeline -from pipeline.step import Step - -class Model(Step): - def __init__(self, step_name, step, df, target_col, instance_weights=[], hyper_tune=False, param_grid={}, sensitive_att=None, fair_aware=False, target_positive=1): - """ - :param step_name: str, name of the current input step. - :param step: object of the initialized class. - :param df: pandas dataframe, stores the data. - :param target_col: str, the name of the target attribute. - :param instance_weights: list of float in [0,1], each float represents the weight of the sample in above data. - :param hyper_tune: boolean, whether to tune the hyper-parameter. Default is False. - :param param_grid: dict, stores the search range of the hyper-parameter. When hyper_tune is True, this must be provided. - :param sensitive_att: str, the name of a sensitive attribute. - :param fair_aware: boolean, whether the model is fair-aware. Default is False. - :param target_positive: integer, 0 or 1, represents the positive value of the target attribute. Default is 1. - """ - - super().__init__(step_name=step_name, df=df, sensitive_att=sensitive_att, target_col=target_col) - # assume the data set has been encoded to numerical values - if fair_aware: # fair classifiers - # intitialize a binary label dataset from AIF 360 - aif_df = BinaryLabelDataset(df=df, label_names=[target_col], protected_attribute_names=[sensitive_att]) - fitted_step = step.fit(aif_df) - input_score = False - else: # regular classifiers - if len(instance_weights) == 0: - instance_weights = [1 for _ in range(1, df.shape[0] + 1)] - if hyper_tune: # grid search for best hyper parameters - if not param_grid: - print("Need to specify the search range of the hyper parameters - 'param_grid' is empty!") - raise ValueError - - search = GridSearchCV(Pipeline([('learner', step)]), param_grid, scoring='roc_auc', cv=5, verbose=1, n_jobs=-1) - fitted_step = search.fit(np.array(df.drop(columns=[target_col])), np.array(df[target_col]), None, **{'learner__sample_weight': instance_weights}) - else: - fitted_step = step.fit(np.array(df.drop(columns=[target_col])), np.array(df[target_col]), sample_weight=instance_weights) - input_score = True - - self.input_score = input_score - self.step = fitted_step - self.target_positive = target_positive - - - def apply(self, df): - """ - :param df: pandas dataframe, stores the data to apply the learned discretizer. - :return: pandas dataframe, stores the data after discretize. - """ - - # initialize AIF360 BinaryLabelDataset - - if self.input_score: # for regular model, generate score prediction - aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[]) - after_df, _ = aif_pred_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) - - favorable_class_idx = list(self.step.classes_).index(self.target_positive) - after_df[self.pred_target_col] = [x[favorable_class_idx] for x in self.step.predict_proba(np.array(df.drop(columns=[self.target_col])))] - - else: # for fair model, generate label prediction - aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], - protected_attribute_names=[self.sensitive_att]) - - after_aif_df = self.step.predict(aif_pred_df) - after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) - after_df[self.pred_target_col] = after_aif_df.labels - - return after_df diff --git a/pipeline/postprocess/fair_postprocessors.py b/pipeline/postprocess/fair_postprocessors.py index efa5b1c..412fdc6 100644 --- a/pipeline/postprocess/fair_postprocessors.py +++ b/pipeline/postprocess/fair_postprocessors.py @@ -3,64 +3,217 @@ """ import pandas as pd +import numpy as np +from aif360.datasets import BinaryLabelDataset from aif360.algorithms.postprocessing import CalibratedEqOddsPostprocessing from aif360.algorithms.postprocessing import EqOddsPostprocessing from aif360.algorithms.postprocessing import RejectOptionClassification -from pipeline.postprocess.postprocessor import Postprocessor +from pipeline.step import Step -class AIF_EqOddsPostprocessing(Postprocessor): +class NoFairPostprocessor(Step): + def __init__(self): + self.fitted_step = None - def __init__(self, df, target_col, sensitive_att, threshold=0.5, seed=0): + def fit(self, df): + pass + + def apply(self, df): + return df + + def name(self): + return "NoPostprocessor" + + def abbr_name(self): + return "NP" + + def step_name(self): + return "FairPostprocessor" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return False + +class AIF_EqOddsPostprocessing(Step): + + def __init__(self, target_col, sensitive_att, seed, threshold=0.5): """ - :param df: pandas dataframe, stores the data to fit the postprocessor. :param target_col: str, the name of the target variable in above data. Assume 1 represents the favorable class. :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. + :param seed: integer, the seed for random process. :param threshold: float in [0, 1], the classification threshold to generate the predicted class label. - :param seed: integer, the seed for random state. """ + self.sensitive_att = sensitive_att + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name + self.seed = seed + self.clf_threshold = threshold + self.fitted_step = None + + + def fit(self, df): + # wrap the input dataframe with AIF 360 object "BinaryLabelDataset" + aif_true_df = BinaryLabelDataset(df=df.drop(columns=[self.pred_target_col]), label_names=[self.target_col], + protected_attribute_names=[self.sensitive_att]) + + aif_pred_df = aif_true_df.copy() + aif_pred_df.labels = np.array([int(x >= self.clf_threshold) for x in df[self.pred_target_col]]) + + self.fitted_step = EqOddsPostprocessing([{self.sensitive_att: 0}], [{self.sensitive_att: 1}], self.seed).fit(aif_true_df, aif_pred_df) + + return self + + def apply(self, df): + # wrap the input dataframe with AIF 360 object "BinaryLabelDataset" + df["pred_label_" + self.target_col] = [int(x >= self.clf_threshold) for x in df[self.pred_target_col]] + aif_pred_df = BinaryLabelDataset(df=df.drop(columns=[self.pred_target_col]), + label_names=["pred_label_" + self.target_col], + protected_attribute_names=[self.sensitive_att]) + + after_aif_df = self.fitted_step.predict(aif_pred_df) + + after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + after_df[self.pred_target_col] = after_aif_df.labels + + return after_df + + def name(self): + return "Eq_odds" + + def abbr_name(self): + return "EQ" + + def step_name(self): + return "FairPostprocessor" + + def input_encoded_data(self): + return True - cur_step = EqOddsPostprocessing([{sensitive_att: 0}], [{sensitive_att: 1}], seed) - super().__init__("@".join(["AIF_EqOddsPostprocessing", sensitive_att]), cur_step, df, sensitive_att, target_col, input_score=False, clf_threshold=threshold) + def output_encoded_data(self): + return False + def fit_only_on_train(self): + return True -class AIF_CalibratedEqOddsPostprocessing(Postprocessor): +class AIF_CalibratedEqOddsPostprocessing(Step): - def __init__(self, df, target_col, sensitive_att, threshold=0.5, seed=0, cost_constraint='weighted'): + def __init__(self, target_col, sensitive_att, seed, cost_constraint='weighted'): """ :param df: pandas dataframe, stores the data to fit the postprocessor. :param target_col: str, the name of the target variable in above data. Assume 1 represents the favorable class. :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. - :param threshold: float in [0, 1], the classification threshold to generate the predicted class label. - :param seed: integer, the seed for random state. + :param seed: integer, the seed for random process. :param cost_constraint: str, the fairness constraints format, value from [fpr, fnr, weighted]. The same parameter as in aif360.algorithms.postprocessing.CalibratedEqOddsPostprocessing. """ + self.sensitive_att = sensitive_att + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name + self.seed = seed + self.cost_constraint = cost_constraint + self.fitted_step = None + + def fit(self, df): + + aif_true_df = BinaryLabelDataset(df=df.drop(columns=[self.pred_target_col]), label_names=[self.target_col], + protected_attribute_names=[self.sensitive_att]) + aif_pred_df = aif_true_df.copy() + aif_pred_df.scores = df[self.pred_target_col] + self.fitted_step = CalibratedEqOddsPostprocessing([{self.sensitive_att: 0}], [{self.sensitive_att: 1}], cost_constraint=self.cost_constraint, seed=self.seed).fit(aif_true_df, aif_pred_df) + return self + + def apply(self, df): + aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], scores_names=[self.pred_target_col], + protected_attribute_names=[self.sensitive_att]) + + after_aif_df = self.fitted_step.predict(aif_pred_df) + after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + after_df[self.pred_target_col] = after_aif_df.labels + + return after_df - cur_step = CalibratedEqOddsPostprocessing([{sensitive_att: 0}], [{sensitive_att: 1}], cost_constraint=cost_constraint, seed=seed) - super().__init__("@".join(["AIF_CalibratedEqOddsPostprocessing", sensitive_att]), cur_step, df, sensitive_att, target_col, input_score=True, clf_threshold=threshold) + def name(self): + return "Calibrated_eq_odds" -class AIF_RejectOptionPostprocessing(Postprocessor): + def abbr_name(self): + return "CEQ" - def __init__(self, df, target_col, sensitive_att, threshold=0.5): + def step_name(self): + return "FairPostprocessor" + + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True + +class AIF_RejectOptionPostprocessing(Step): + + def __init__(self, target_col, sensitive_att): """ :param df: pandas dataframe, stores the data to fit the postprocessor. :param target_col: str, the name of the target variable in above data. Assume 1 represents the favorable class. :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. - :param threshold: float in [0, 1], the classification threshold to generate the predicted class label. """ - # TODO: fix the bug that reject option doesn't return results - cur_step = RejectOptionClassification([{sensitive_att: 0}], [{sensitive_att: 1}]) - super().__init__("@".join(["AIF_RejectOptionClassification", sensitive_att]), cur_step, df, sensitive_att, target_col, input_score=True, clf_threshold=threshold) + # TODO: fix the bug that reject option outputs error + self.sensitive_att = sensitive_att + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name + self.fitted_step = None + + def fit(self, df): + aif_true_df = BinaryLabelDataset(df=df.drop(columns=[self.pred_target_col]), label_names=[self.target_col], + protected_attribute_names=[self.sensitive_att]) + aif_pred_df = aif_true_df.copy() + aif_pred_df.scores = df[self.pred_target_col] + self.fitted_step = RejectOptionClassification([{self.sensitive_att: 0}], [{self.sensitive_att: 1}]).fit(aif_true_df, aif_pred_df) + return self + + def apply(self, df): + aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], scores_names=[self.pred_target_col], + protected_attribute_names=[self.sensitive_att]) + + after_aif_df = self.fitted_step.predict(aif_pred_df) + after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + after_df[self.pred_target_col] = after_aif_df.labels + + return after_df + + def name(self): + return "Reject_option" + + def abbr_name(self): + return "RO" + + def step_name(self): + return "FairPostprocessor" + + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True if __name__ == '__main__': - data = pd.read_csv("../../data/adult_post.csv") - # cur_o = AIF_RejectOptionPostprocessing(data, "income-per-year", "sex") - cur_o = AIF_EqOddsPostprocessing(data, "income-per-year", "sex") - # cur_o = AIF_CalibratedEqOddsPostprocessing(data, "income-per-year", "sex") + data = pd.read_csv("../../data/german_pre_encoded_after_model.csv") + # cur_o = AIF_RejectOptionPostprocessing("credit", "sex") + # cur_o = AIF_EqOddsPostprocessing("credit", "sex", 0) + cur_o = AIF_CalibratedEqOddsPostprocessing("credit", "sex", 0) + + cur_o.fit(data) after_data = cur_o.apply(data) - after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) - print(cur_o.get_name()) \ No newline at end of file + after_data.to_csv("../../data/german_after_" + cur_o.name() + ".csv", index=False) diff --git a/pipeline/postprocess/postprocessor.py b/pipeline/postprocess/postprocessor.py deleted file mode 100644 index f10494c..0000000 --- a/pipeline/postprocess/postprocessor.py +++ /dev/null @@ -1,59 +0,0 @@ -""" - Super class for all the supported postprocessor classes. -""" -import numpy as np -from aif360.datasets import BinaryLabelDataset -from pipeline.step import Step - -class Postprocessor(Step): - def __init__(self, step_name, step, df, sensitive_att, target_col, input_score=True, clf_threshold=0.5): - """ - :param step_name: str, name of the current input step. - :param step: object of the initialized class. - :param df: pandas dataframe, stores the data. - :param sensitive_att: str, the name of a sensitive attribute. - :param target_col: str, the name of the target attribute. - :param input_score: boolean, represent whether the post-processor takes predicted score as input. Default is True. - :param clf_threshold: float in [0, 1], represents the threshold to categorize class labels from predicted scores. - """ - if "pred_"+target_col not in df.columns: - print("Require the predictions for ",target_col, " existing in the data!") - raise ValueError - super().__init__(step_name=step_name, df=df, sensitive_att=sensitive_att, target_col=target_col) - # assume the data set has been encoded to numerical values, - # intitialize a BinaryLabelDataset from AIF 360 - aif_true_df = BinaryLabelDataset(df=df.drop(columns=["pred_"+target_col]), label_names=[target_col], protected_attribute_names=[sensitive_att]) - - aif_pred_df = aif_true_df.copy() - - if input_score: - aif_pred_df.scores = df["pred_"+target_col] - else: - aif_pred_df.labels = np.array([int(x >= clf_threshold) for x in df["pred_"+target_col]]) - self.input_score = input_score - self.step = step.fit(aif_true_df, aif_pred_df) - self.clf_threshold = clf_threshold - - - - def apply(self, df): - """ - :param df: pandas dataframe, stores the data to apply the learned discretizer. - :return: pandas dataframe, stores the data after discretize. - """ - - # initialize AIF360 BinaryLabelDataset - - if self.input_score: # use score prediction to fit model, e.g. RejectOptionClassification, CalibratedEqOddsPostprocessing - aif_pred_df = BinaryLabelDataset(df=df, label_names=[self.target_col], scores_names=[self.pred_target_col], - protected_attribute_names=[self.sensitive_att]) - else: # use label prediction to fit model, e.g. EqOddsPostprocessing - df["pred_label_"+self.target_col] = [int(x >= self.clf_threshold) for x in df[self.pred_target_col]] - aif_pred_df = BinaryLabelDataset(df=df.drop(columns=[self.pred_target_col]), label_names=["pred_label_"+self.target_col], - protected_attribute_names=[self.sensitive_att]) - - after_aif_df = self.step.predict(aif_pred_df) - after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) - after_df[self.pred_target_col] = after_aif_df.labels - - return after_df \ No newline at end of file diff --git a/pipeline/preprocess/categorizers.py b/pipeline/preprocess/categorizers.py index 83d8d90..f0afb99 100644 --- a/pipeline/preprocess/categorizers.py +++ b/pipeline/preprocess/categorizers.py @@ -2,50 +2,138 @@ Classes to discretize numerical attributes into categorical attributes. """ import pandas as pd +import numpy as np from sklearn.preprocessing import KBinsDiscretizer from sklearn.preprocessing import Binarizer -from pipeline.preprocess.preprocessor import Preprocessor +from pipeline.step import Step +class NoBinarizer(Step): + def __init__(self): + self.fitted_step = None -class SK_Discretizer(Preprocessor): - def __init__(self, df, num_atts, bin_size, encode='ordinal', strategy='kmeans'): + def fit(self, df): + pass + + def apply(self, df): + return df + + def name(self): + return "NoBinarizer" + + def abbr_name(self): + return "NB" + + def step_name(self): + return "Categorizer" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return False + +class SK_Discretizer(Step): + def __init__(self, num_atts, bin_size, encode='ordinal', strategy='kmeans'): """ - :param df: pandas dataframe, stores the data to fit the discretizer. :param num_atts: list of str, each str represents the name of a numerical attribute in above data. :param bin_size: list of integer, each integer represents the number of bins to categorize the corresponding numerical attribute. :param encode: same parameter with sklearn KBinsDiscretizer :param strategy: same parameter with sklearn KBinsDiscretizer """ - cur_step = {} - for idx, ai in enumerate(num_atts): - cur_step[ai] = KBinsDiscretizer(n_bins=bin_size[idx], encode=encode, strategy=strategy) + self.focus_atts = num_atts self.bin_size = bin_size - super().__init__("@".join([strategy+"Categorizer"]+num_atts), df, step=cur_step, focus_atts=num_atts, fit_flag=True) + self.encode = encode + self.strategy = strategy + + self.fitted_step = None + + def fit(self, df): + fitted_step = {} + for idx, ai in enumerate(self.focus_atts): + fitted_step[ai] = KBinsDiscretizer(n_bins=self.bin_size[idx], encode=self.encode, strategy=self.strategy).fit(np.array(df[ai]).reshape(-1, 1)) + self.fitted_step = fitted_step + return self + + def apply(self, df): + after_df = df.copy() + for ai in self.focus_atts: + after_df[ai] = self.fitted_step[ai].transform(np.array(after_df[ai]).reshape(-1, 1)) + + return after_df + + def name(self): + return "SK_Discretizer" + + def abbr_name(self): + return "DS" + + def step_name(self): + return "Categorizer" + def input_encoded_data(self): + return False -class SK_Binarizer(Preprocessor): - def __init__(self, df, num_atts, bin_thresholds, copy=True): + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True + +class SK_Binarizer(Step): + def __init__(self, num_atts, bin_thresholds, copy=True): """ - :param df: pandas dataframe, stores the data to fit the binarizer. :param num_atts: list of str, each str represents the name of a numerical attribute in above data. :param bin_thresholds: list of float, each float represents the value to binarize the corresponding numerical attributes. Values below or equal to this threshold are replaced by 0, above it by 1. :param copy: same parameter with sklearn Binarizer """ - cur_step = {} - for idx, ai in enumerate(num_atts): - cur_step[ai] = Binarizer(threshold=bin_thresholds[idx], copy=copy) - + self.focus_atts = num_atts self.bin_thresholds = bin_thresholds - super().__init__("@".join(["BinaryCategorizer"]+num_atts), df, step=cur_step, focus_atts=num_atts, fit_flag=True) + self.copy = copy + + self.fitted_step = None + + def fit(self, df): + fitted_step = {} + for idx, ai in enumerate(self.focus_atts): + fitted_step[ai] = Binarizer(threshold=self.bin_thresholds[idx], copy=self.copy).fit(np.array(df[ai]).reshape(-1, 1)) + self.fitted_step = fitted_step + return self + + def apply(self, df): + after_df = df.copy() + for ai in self.focus_atts: + after_df[ai] = self.fitted_step[ai].transform(np.array(after_df[ai]).reshape(-1, 1)) + + return after_df + def name(self): + return "SK_Binarizer" + + def abbr_name(self): + return "BI" + + def step_name(self): + return "Categorizer" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True if __name__ == '__main__': - data = pd.read_csv("../../data/adult.csv") - cur_o = SK_Discretizer(data, ["fnlwgt", "age"], [2, 3]) - # cur_o = SK_Binarizer(data, ["fnlwgt", "age"], [100000, 30]) - after_data = cur_o.apply(data) - after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) + data = pd.read_csv("../../data/german.csv") + # cur_o = SK_Discretizer(["month", "age"], [2, 3]) + cur_o = SK_Binarizer(["month", "age"], [24, 30]) - print(cur_o.get_name()) \ No newline at end of file + cur_o.fit(data) + after_data = cur_o.apply(data) + after_data.to_csv("../../data/german_"+cur_o.name()+".csv", index=False) diff --git a/pipeline/preprocess/encoders.py b/pipeline/preprocess/encoders.py index b2a6712..e81ab60 100644 --- a/pipeline/preprocess/encoders.py +++ b/pipeline/preprocess/encoders.py @@ -1,98 +1,144 @@ """ Classes to encode the string values for categorical attributes. """ -""" - Classes to discretize numerical attributes into categorical attributes. -""" + import numpy as np import pandas as pd from sklearn.preprocessing import OrdinalEncoder -from sklearn.preprocessing import OneHotEncoder -from pipeline.preprocess.preprocessor import Preprocessor +from pipeline.step import Step -class SK_OrdinalEncoder(Preprocessor): - def __init__(self, df, cate_atts, sort_label, sort_positive_value): +class SK_OrdinalEncoder(Step): + def __init__(self, cate_atts, sort_label, sort_positive_value): """ - :param df: pandas dataframe, stores the data to fit the encoder. :param cate_atts: list of str, each str represents the name of a categorical attribute in above data. - :param encode_order_dict: dict, key (str) represents the name of categorical attribute, value is a list of str, representing the ordered categories of each each categorical attribute. :param sort_label: str, name of the target varible to determine the order of ordinal encodings. + :param sort_positive_value: str, value of the target varible to determine the order of ordinal encodings. + """ - cur_step = {} - for ci in cate_atts: - value_counts = {} - for vi in df[ci].unique(): - value_counts[vi] = df[(df[ci] == vi) & (df[sort_label] == sort_positive_value)].shape[0] - value_orders = sorted(value_counts.keys(), key=lambda x: value_counts[x]) - cur_step[ci] = OrdinalEncoder(categories=[value_orders]) + self.focus_atts = cate_atts self.sort_label = sort_label self.sort_positive_value = sort_positive_value - super().__init__("@".join(["OrdinalEncoder"]+cate_atts), df, step=cur_step, focus_atts=cate_atts, fit_flag=True) + self.fitted_step = None + + def fit(self, df): + fitted_step = {} + for idx, ai in enumerate(self.focus_atts): + value_counts = {} + for vi in df[ai].unique(): + value_counts[vi] = df[(df[ai] == vi) & (df[self.sort_label] == self.sort_positive_value)].shape[0] + value_orders = sorted(value_counts.keys(), key=lambda x: value_counts[x]) + fitted_step[ai] = OrdinalEncoder(categories=[value_orders]).fit(np.array(df[ai]).reshape(-1, 1)) + self.fitted_step = fitted_step + return self + + def apply(self, df): + after_df = df.copy() + for ai in self.focus_atts: + after_df[ai] = self.fitted_step[ai].transform(np.array(after_df[ai]).reshape(-1, 1)) + + return after_df + + def name(self): + return "SK_OrdinalEncoder" + + def abbr_name(self): + return "OE" + + def step_name(self): + return "Encoder" -class SK_OneHotEncoder(Preprocessor): - def __init__(self, df, cate_atts): + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return True + + def fit_only_on_train(self): + return True + + +class OneHotEncoder(Step): + def __init__(self, cate_atts): """ - :param df: pandas dataframe, stores the data to fit the encoder. :param cate_atts: list of str, each str represents the name of a categorical attribute in above data. - :param encode_order_dict: dict, key (str) represents the name of categorical attribute, value is a list of str, representing the ordered categories of each each categorical attribute. - :param sort_label: str, name of the target variable to determine the order of ordinal encodings. """ - # TODO: fix the bug that sklearn one-hot encoder change the dimension - # cur_step = {} - # for ci in cate_atts: - # cur_step[ci] = OneHotEncoder() - super().__init__("@".join(["OneHotEncoder"]+cate_atts), df, step=None, focus_atts=cate_atts, fit_flag=False) + self.focus_atts = cate_atts + + def fit(self, df): + pass def apply(self, df): - """ - :param df: pandas dataframe, stores the data to apply the learned encoder. - :return: pandas dataframe, stores the data after encode. - """ after_df = pd.get_dummies(df, columns=self.focus_atts, prefix_sep='=') - # after_df = df[list(set(df.columns).difference(self.focus_atts))] - # for ci in self.focus_atts: - # ci_encode_array = self.step[ci].transform(np.array(df[ci]).reshape(-1, 1)).toarray() - # ci_encode_df = pd.DataFrame(ci_encode_array, columns=[ci+"="+x for x in self.step[ci].categories_[0]]) - # after_df = pd.concat([after_df, ci_encode_df], axis=1) return after_df -class CustomCateAttsEncoder(Preprocessor): - def __init__(self, df, sensitive_atts, protected_values): - """ To encode sensitive attribute and target feature. - :param df: pandas dataframe, stores the data to fit the encoder. - :param sensitive_atts: list of str, each str represents the name of a sensitive attribute. - :param protected_values: dict, key is the str in sensitive_atts, value is a list of str, each str represent the protected values for the key sensitive attribute. + def name(self): + return "OneHotEncoder" + + def abbr_name(self): + return "HE" + + def step_name(self): + return "Encoder" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return True + + def fit_only_on_train(self): + return False + +# That can only apply in SensitiveAttributeEncoder +class MappingEncoder(Step): + def __init__(self, focus_atts, mapping_dict): + """ Encode sensitive attribute and target feature through mapping string to numberical values according to the input dictionary. + :param focus_atts: list of str, each str represents the name of an attribute. + :param mapping_dict: dict, key is the value (str) of the attribute in focus_atts, value is int that encode the value. E.g. {"female": 0, "male": 1} for the values of attribute gender. """ - super().__init__("@".join(["SensitiveAttEncoder"]+sensitive_atts), df, step=None, focus_atts=sensitive_atts, fit_flag=False) - for x in sensitive_atts: - if sum([vi not in df[x].unique() for vi in protected_values[x]]) > 0: - print("Some input values of sensitive attribute ", x, " are not valid!") - raise ValueError - self.protected_values = protected_values + + self.focus_atts = focus_atts + self.mapping_dict = mapping_dict + + def fit(self, df): + pass + def apply(self, df): - """ - :param df: pandas dataframe, stores the data to apply the encoder. - :return: pandas dataframe, stores the data after encode. - """ after_df = df.copy() for si in self.focus_atts: - after_df[si] = after_df[si].apply(lambda x: int(x not in self.protected_values[si])) + after_df[si] = after_df[si].apply(lambda x: self.mapping_dict[x]) return after_df + def name(self): + return "MappingEncoder" + + def abbr_name(self): + return "ME" + + def step_name(self): + return "SpecialEncoder" + + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return True + + def fit_only_on_train(self): + return False if __name__ == '__main__': - data = pd.read_csv("../data/train/adult__Imputer.csv") - # data = pd.read_csv("../../data/adult_pre_RandomSampler_1000.csv") - # data = pd.read_csv("../../data/adult_pre_SensitiveAttsEncoder_sex_race_income-per-year.csv") - # cur_o = SK_OrdinalEncoder(data, ["sex", "race"], "income-per-year", ">50K") - cur_o = SK_OneHotEncoder(data, ["workclass", "education", "marital-status", "occupation", "relationship", "native-country"]) - # cur_o = CustomCateAttsEncoder(data, ["sex", "race", "income-per-year"], {"sex": ["Female"], "race": ["Black"], "income-per-year": ["<=50K"]}) + data = pd.read_csv("../../data/german_AIF.csv") - after_data = cur_o.apply(data) - after_data.to_csv("../data/adult_"+cur_o.get_name()+".csv", index=False) + cur_o = SK_OrdinalEncoder(["sex"], "credit", "good") + # cur_o = OneHotEncoder(["sex", "credit", "age"]) - print(cur_o.get_name()) \ No newline at end of file + # cur_o = MappingEncoder(["sex", "credit"], {"female": 0, "male": 1, "good": 1, "bad": 0}) + + cur_o.fit(data) + after_data = cur_o.apply(data) + after_data.to_csv("../../data/german_AIF_"+cur_o.name()+".csv", index=False) diff --git a/pipeline/preprocess/fair_preprocessors.py b/pipeline/preprocess/fair_preprocessors.py index aeb519f..908ad19 100644 --- a/pipeline/preprocess/fair_preprocessors.py +++ b/pipeline/preprocess/fair_preprocessors.py @@ -2,39 +2,75 @@ Class of fairness preprocessing interventions """ -import numpy as np import pandas as pd from aif360.datasets import BinaryLabelDataset from aif360.algorithms.preprocessing import Reweighing as Reweighing from aif360.algorithms.preprocessing import LFR as LFR from aif360.algorithms.preprocessing import DisparateImpactRemover -from pipeline.preprocess.preprocessor import Preprocessor +from pipeline.step import Step -class AIF_Reweighing(Preprocessor): +class AIF_Reweighing(Step): - def __init__(self, df, target_col, sensitive_att): + def __init__(self, target_col, sensitive_att): """ - :param df: pandas dataframe, stores the data to fit the scaler. :param target_col: str, the name of the target variable in above data. - :param target_positive_value: str, the value of above target variable that represents positive outcome. default is 1. - :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. + :param sensitive_att: str, the name of a sensitive attribute in above data. """ + self.sensitive_att = sensitive_att + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name - cur_step = Reweighing([{sensitive_att: 0}], [{sensitive_att: 1}]) - super().__init__("@".join(["AIF_Reweighing", sensitive_att]), df, step=cur_step, fit_flag=True, weight_flag=True, sensitive_att=sensitive_att, target_col=target_col, fair_aware=True) + self.fitted_step = None + def fit(self, df): + # wrap the input dataframe with AIF 360 object "BinaryLabelDataset" + aif_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[self.sensitive_att]) + self.fitted_step = Reweighing([{self.sensitive_att: 0}], [{self.sensitive_att: 1}]).fit(aif_df) + return self -class AIF_DIRemover(Preprocessor): + def apply(self, df): + # wrap the input dataframe with AIF 360 object "BinaryLabelDataset" + aif_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[self.sensitive_att]) + after_aif_df = self.fitted_step.transform(aif_df) + # TODO: double check whether to return weights + preprocessed_weights = after_aif_df.instance_weights - def __init__(self, df, target_col, sensitive_att, repair_level): + after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + + return after_df + + def name(self): + return "Reweighing" + + def abbr_name(self): + return "RW" + + def step_name(self): + return "FairPreprocessor" + + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True + +class AIF_DIRemover(Step): + def __init__(self, target_col, sensitive_att, repair_level): """ - :param df: pandas dataframe, stores the data to fit the scaler. :param target_col: str, the name of the target variable in above data. - :param target_positive_value: str, the value of above target variable that represents positive outcome. default is 1. - :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. + :param sensitive_att: str, the name of a sensitive attribute in above data. """ + self.sensitive_att = sensitive_att + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name + + self.fitted_step = None + if repair_level is None or not isinstance(repair_level, float): print("Input repair_level is not valid! Should be float within [0,1]!") raise ValueError @@ -42,41 +78,109 @@ def __init__(self, df, target_col, sensitive_att, repair_level): if repair_level < 0 or repair_level > 1: print("Input repair_level is not valid! Should be float within [0,1]!") raise ValueError + self.repair_level = repair_level - cur_step = DisparateImpactRemover(repair_level=repair_level, sensitive_attribute=sensitive_att) - super().__init__("@".join(["AIF_DIRemover", sensitive_att]), df, step=cur_step, fit_flag=False, sensitive_att=sensitive_att, target_col=target_col, fair_aware=True) + def fit(self, df): + # TODO: Rewrite the AIF DIRemover to afford the separation of train, validation, and test. + # aif_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[self.sensitive_att]) + # self.fitted_step = DisparateImpactRemover(repair_level=self.repair_level, sensitive_attribute=self.sensitive_att).fit(aif_df) + # + pass + + def apply(self, df): + # wrap the input dataframe with AIF 360 object "BinaryLabelDataset" + aif_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[self.sensitive_att]) + after_aif_df = DisparateImpactRemover(repair_level=self.repair_level, sensitive_attribute=self.sensitive_att).fit_transform(aif_df) + + after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + return after_df + + def name(self): + return "DIRemover" + + def abbr_name(self): + return "RI" -class AIF_LFR(Preprocessor): + def step_name(self): + return "FairPreprocessor" - def __init__(self, df, target_col, sensitive_att): + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return False + +class AIF_LFR(Step): + + def __init__(self, target_col, sensitive_att): """ NOTE: very sensitive to input data, refer the example in AIF 360 for this preprocessor - :param df: pandas dataframe, stores the data to fit the scaler. :param target_col: str, the name of the target variable in above data. - :param target_positive_value: str, the value of above target variable that represents positive outcome. default is 1. - :param sensitive_att: str, the name of a sensitive attribute in above data. If none, call auto_detection to update. Value 0 represent protected. + :param sensitive_att: str, the name of a sensitive attribute in above data. """ # TODO: fix the bug of LFR for not returning categorical atts # TODO: experiment with the same data used by AIF360 tutorial to compare whether the categorical atts are returned - cur_step = LFR([{sensitive_att: 0}], [{sensitive_att: 1}]) - super().__init__("@".join(["AIF_LFR", sensitive_att]), df, step=cur_step, fit_flag=True, sensitive_att=sensitive_att, target_col=target_col, fair_aware=True) + self.sensitive_att = sensitive_att + self.target_col = target_col + self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name + self.fitted_step = None + def fit(self, df): + # wrap the input dataframe with AIF 360 object "BinaryLabelDataset" + aif_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[self.sensitive_att]) + self.fitted_step = LFR([{self.sensitive_att: 0}], [{self.sensitive_att: 1}]).fit(aif_df) + return self -if __name__ == '__main__': - data = pd.read_csv("../../data/adult_pre_reweigh.csv") - # cur_o = AIF_Reweighing(data, "income-per-year", "sex") - # cur_o = AIF_LFR(data, "income-per-year", "sex") - cur_o = AIF_DIRemover(data, "income-per-year", "sex", 0.8) + def apply(self, df): + # wrap the input dataframe with AIF 360 object "BinaryLabelDataset" + aif_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[self.sensitive_att]) + after_aif_df = self.fitted_step.transform(aif_df) + + after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) + return after_df + + def name(self): + return "LFR" + def abbr_name(self): + return "LR" + + def step_name(self): + return "FairPreprocessor" + + def input_encoded_data(self): + return True + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True + +if __name__ == '__main__': + data = pd.read_csv("../../data/german_pre_encoded.csv") + # name_mapping = {"female": 0, "male": 1, "young": 0, "old": 1, "bad": 0, "good": 1} + # for atti in ["credit", "sex", "age"]: + # data[atti] = data[atti].apply(lambda x: name_mapping[x]) + # + # data.to_csv("../../data/german_pre_encoded.csv") + + # cur_o = AIF_Reweighing("credit", "sex") + # cur_o = AIF_LFR("credit", "sex") # TODO: bug not working for this dataset, test another dataset + cur_o = AIF_DIRemover("credit", "sex", 0.8) + cur_o.fit(data) after_data = cur_o.apply(data) - # for Reweighing + # # for Reweighing # after_data, new_weights = cur_o.apply(data) - after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) + after_data.to_csv("../../data/german_after_"+cur_o.name()+".csv", index=False) - print(cur_o.get_name()) + # print(cur_o.get_name()) # for Reweighing # print(len(new_weights)) # print(new_weights) \ No newline at end of file diff --git a/pipeline/preprocess/filters.py b/pipeline/preprocess/filters.py index b2a4426..264deb4 100644 --- a/pipeline/preprocess/filters.py +++ b/pipeline/preprocess/filters.py @@ -2,68 +2,93 @@ Classes to filter slice of data. """ import pandas as pd -from pipeline.preprocess.preprocessor import Preprocessor - -# utility functions -def wrap_filter(att, value): - if isinstance(value, str): # string or null - if value in ["?", ""]: # for null - return '{}!={}'.format(att, att) - else: - return '{}=="{}"'.format(att, value) - else: # numerical value - return '{}=={}'.format(att, value) +from pipeline.step import Step + # TODO: add multiple filter class -class RowFilter(Preprocessor): +class RowFilter(Step): - def __init__(self, df, column, value): + def __init__(self, column, value): """ - :param column: str, name of the column name to be filtered. - :param value: str, integer, float, the value of the column to be filtered. + :param column: str, name of the column name to be focused. + :param value: str, integer, float, the value of the column to keep. """ - if column is None or value is None: - print("Need to specify column and value to create filter!") - raise ValueError - if column not in df.columns: - print("Need to specify valid column!") - raise ValueError - if value not in df[column].unique(): - print("Need to specify valid value!") - raise ValueError + self.column = column self.value = value - super().__init__(step_name="@".join(["RowFilter", column, str(value)]), df=df, focus_atts=[column], fit_flag=False) + + def fit(self, df): + pass + + # utility functions to wrap the input filter to a df query + def wrap_filter(self): + if isinstance(self.value, str): # string or null + if self.value in ["?", ""]: # for null + return '{}!={}'.format(self.column, self.column) + else: + return '{}=="{}"'.format(self.column, self.value) + else: # numerical value + return '{}=={}'.format(self.column, self.value) def apply(self, df): - """ - :param df: pandas dataframe, stores the data to be filtered. - :return: pandas dataframe, stores the data after filter. - """ - return df.query(wrap_filter(self.column, self.value)) + return df.query(self.wrap_filter()) + + def name(self): + return "RowFilter" + + def abbr_name(self): + return "RF" + + def step_name(self): + return "Filter" + + def input_encoded_data(self): + return False -class RemoveColumnFilter(Preprocessor): - def __init__(self, df, exclude_cols): + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return False + +class ColumnFilter(Step): + def __init__(self, remove_cols): """ - :param exclude_cols: list of string, each string represents the name of the column to be excluded. + :param exclude_cols: list of string, each string represents the name of the column to be filtered out. """ + self.remove_cols = remove_cols - super().__init__(step_name="RemoveColumnFilter", df=df, focus_atts=exclude_cols, fit_flag=False) + def fit(self, df): + pass def apply(self, df): - """ - :param df: pandas dataframe, stores the data to be filtered. - :return: pandas dataframe, stores the data after filter. - """ - return df.drop(columns=self.focus_atts) + return df.drop(columns=self.remove_cols) + + def name(self): + return "ColumnFilter" + + def abbr_name(self): + return "CF" + + def step_name(self): + return "Filter" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return False if __name__ == '__main__': - data = pd.read_csv("../../data/adult_pre_reweigh.csv") - # cur_o = RowFilter(data, "sex", 0) - cur_o = RemoveColumnFilter(data, ["sex","race"]) + data = pd.read_csv("../../data/german_AIF.csv") + cur_o = RowFilter("sex", "female") + # cur_o = ColumnFilter(["sex", "age"]) + cur_o.fit(data) after_data = cur_o.apply(data) - after_data.to_csv("../../data/adult_" + cur_o.get_name() + ".csv", index=False) + after_data.to_csv("../../data/german_AIF_" + cur_o.name() + ".csv", index=False) - print(cur_o.get_name()) diff --git a/pipeline/preprocess/imputers.py b/pipeline/preprocess/imputers.py index 36aee84..66cfc57 100644 --- a/pipeline/preprocess/imputers.py +++ b/pipeline/preprocess/imputers.py @@ -4,65 +4,143 @@ import numpy as np import pandas as pd import datawig -from pipeline.preprocess.preprocessor import Preprocessor from sklearn.impute import SimpleImputer +from pipeline.step import Step -class DropNAImputer(Preprocessor): - def __init__(self, df, na_mark=None): +class NoImputer(Step): + def __init__(self): + self.fitted_step = None + + def fit(self, df): + pass + + def apply(self, df): + return df + + def name(self): + return "NoImputer" + + def abbr_name(self): + return "NI" + + def step_name(self): + return "Imputer" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return False + +class DropNAImputer(Step): + def __init__(self, na_mark=None): """ - :param df: pandas dataframe, stores the data to fit the imputer. :param na_mark: str, represents the symbol of missing values. Default is None, i.e. NaN represents the missing values. """ - super().__init__("DropNAImputer", df=df, fit_flag=False, na_mark=na_mark) + self.na_mark = na_mark + + def fit(self, df): + pass def apply(self, df): - """ - :param df: pandas dataframe, stores the data to impute. - :return: pandas dataframe, stores the data after impute. - """ if self.na_mark: df = df.replace({self.na_mark:np.nan}) return df.dropna() -class ModeImputer(Preprocessor): - def __init__(self, df, num_atts, cate_atts, na_mark=None): + def name(self): + return "DropNAImputer" + + def abbr_name(self): + return "DN" + + def step_name(self): + return "Imputer" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return False + +class ModeImputer(Step): + def __init__(self, num_atts, cate_atts, na_mark=None): """ - :param df: pandas dataframe, stores the data to fit the imputer. :param num_atts: list of str, each str represents the name of numerical column to be imputed using the mean value. :param cate_atts: list of str, each str represents the name of categorical column to be imputed using the most frequent value. :param na_mark: str, represents the symbol of missing values. Default is None, i.e. NaN represents the missing values. """ - if len(set(num_atts).intersection(cate_atts)) > 0: - print("Some attributes are both in num_atts and cate_atts!") - raise ValueError + self.num_atts = num_atts + self.cate_atts = cate_atts + self.na_mark = na_mark + + self.fitted_step = None - cur_step = {} - if len(cate_atts) > 0: - for ci in cate_atts: - cur_step[ci] = SimpleImputer(strategy='most_frequent') - if len(num_atts) > 0: - for ni in num_atts: - cur_step[ni] = SimpleImputer(strategy='mean') + def fit(self, df): + fitted_step = {} + if len(self.cate_atts) > 0: + for ci in self.cate_atts: + fitted_step[ci] = SimpleImputer(strategy='most_frequent').fit(np.array(df[ci]).reshape(-1, 1)) + if len(self.num_atts) > 0: + for ni in self.num_atts: + fitted_step[ni] = SimpleImputer(strategy='mean').fit(np.array(df[ni]).reshape(-1, 1)) + self.fitted_step = fitted_step - super().__init__("@".join(["ModeImputer"]+num_atts+cate_atts), df, step=cur_step, focus_atts=cate_atts+num_atts, fit_flag=True, na_mark=na_mark) + return self + def apply(self, df): + if self.na_mark: + df = df.replace({self.na_mark:np.nan}) + after_df = df.copy() + for ai in self.num_atts + self.cate_atts: + after_df[ai] = self.fitted_step[ai].transform(np.array(after_df[ai]).reshape(-1, 1)) + + return after_df + + def name(self): + return "SK_ModeImputer" + + def abbr_name(self): + return "MI" + + def step_name(self): + return "Imputer" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False -class DatawigImputer(Preprocessor): - def __init__(self, df, impute_atts, na_mark=None, output_path="datawig/", num_epochs=50): + def fit_only_on_train(self): + return True + +class DatawigImputer(Step): + def __init__(self, impute_atts, na_mark=None, output_path="datawig/", num_epochs=50): """ - :param df: pandas dataframe, stores the data to fit the imputer. :param impute_atts: list of str, each str represents the name of column to be imputed using datawig model. Column can be categorical or numerical. :param na_mark: str, represents the symbol of missing values. Default is None, i.e. NaN represents the missing values. :param output_path: str, the path to store the learned datawig model. :param num_epochs: integer, the maximum iteration of datawig model. """ - super().__init__("@".join(["DatawigImputer"] + impute_atts), df, focus_atts=impute_atts, fit_flag=False, na_mark=na_mark) + self.focus_atts = impute_atts + self.na_mark = na_mark + self.output_path = output_path + self.num_epochs = num_epochs + + self.fitted_step = None + def fit(self, df): learned_imputers = {} - for ai in impute_atts: - learned_imputers[ai] = datawig.SimpleImputer(input_columns=list(set(df.columns).difference(ai)), - output_column=ai, output_path=output_path).fit(train_df=df, num_epochs=num_epochs) - self.step = learned_imputers + for ai in self.focus_atts: + learned_imputers[ai] = datawig.SimpleImputer(input_columns=list(set(df.columns).difference(ai)), output_column=ai, output_path=self.output_path).fit(train_df=df, num_epochs=self.num_epochs) + self.fitted_step = learned_imputers + return self def apply(self, df): """ @@ -73,16 +151,33 @@ def apply(self, df): df = df.replace({self.na_mark:np.nan}) after_df = df.copy() for ai in self.focus_atts: - after_df[ai] = self.step[ai].predict(df)[ai + '_imputed'] + after_df[ai] = self.fitted_step[ai].predict(df)[ai + '_imputed'] return after_df + def name(self): + return "DatawigImputer" + + def abbr_name(self): + return "DW" + + def step_name(self): + return "Imputer" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True + if __name__ == '__main__': - data = pd.read_csv("../../data/adult_pre_RandomSampler_1000.csv") - cur_o = DropNAImputer(data, na_mark="?") - # cur_o = ModeImputer(data, ["fnlwgt"], ["workclass"], na_mark="?") - # cur_o = DatawigImputer(data, ["workclass"], na_mark="?") + data = pd.read_csv("../../data/adult_AIF.csv") + # cur_o = DropNAImputer(na_mark="?") + cur_o = ModeImputer([],["workclass"], na_mark="?") + # cur_o = DatawigImputer(["workclass"], na_mark="?") # TODO: test after fix dependency issue + cur_o.fit(data) after_data = cur_o.apply(data) - after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) - - print(cur_o.get_name()) \ No newline at end of file + after_data.to_csv("../../data/adult_AIF_"+cur_o.name()+".csv", index=False) diff --git a/pipeline/preprocess/preprocessor.py b/pipeline/preprocess/preprocessor.py deleted file mode 100644 index 6c5ffae..0000000 --- a/pipeline/preprocess/preprocessor.py +++ /dev/null @@ -1,71 +0,0 @@ -""" - Super class for all the supported preprocessor classes. -""" -import numpy as np -from aif360.datasets import BinaryLabelDataset -from pipeline.step import Step - -class Preprocessor(Step): - def __init__(self, step_name, df, step=None, focus_atts=[], fit_flag=True, weight_flag=False, sensitive_att=None, target_col=None, fair_aware=False, na_mark=None): - """ - :param step_name: str, name of the current input step. - :param df: pandas dataframe, stores the data. - :param step: object of the initialized class. If none, initialize here. - :param focus_atts: lisf of str, each str represents the name of a column in above data that will be pre-processed. - :param fit_flag: boolean, whether to initialize step object here. - :param weight_flag: boolean, whether to output extra sample weight after fair-preprocessor. - :param sensitive_att: str, the name of a sensitive attribute. - :param target_col: str, the name of the target attribute. - :param fair_aware: boolean, whether the preprocessor is fair-aware. Default is False. If true, sensitive_att and target_col can not be null. - """ - super().__init__(step_name=step_name, df=df, focus_atts=focus_atts, sensitive_att=sensitive_att, target_col=target_col) - - if len(focus_atts) > 0 and fit_flag: - fitted_step = {} - for idx, ai in enumerate(focus_atts): - fitted_step[ai] = step[ai].fit(np.array(df[ai]).reshape(-1, 1)) - self.step = fitted_step - elif fair_aware and fit_flag: # for fair-preprocessors - aif_df = BinaryLabelDataset(df=df, label_names=[target_col], protected_attribute_names=[sensitive_att]) - self.step = step.fit(aif_df) - else: - if step is not None: - self.step = step - - # address different encoding of missing values - if na_mark is not None: - self.na_mark = na_mark - else: - self.na_mark = None - self.fair_aware = fair_aware - self.fit_flag = fit_flag - self.weight_flag = weight_flag - - - def apply(self, df): - """ - :param df: pandas dataframe, stores the data to apply the learned discretizer. - :return: pandas dataframe, stores the data after discretize. - """ - if self.na_mark: - df = df.replace({self.na_mark:np.nan}) - if self.fair_aware: # fair-preprocessor - aif_df = BinaryLabelDataset(df=df, label_names=[self.target_col], protected_attribute_names=[self.sensitive_att]) - if self.fit_flag: # fit has been initialized - after_aif_df = self.step.transform(aif_df) - else: # fit and transform is combined, e.g. DisparateImpactRemover - after_aif_df = self.step.fit_transform(aif_df) - - after_df, _ = after_aif_df.convert_to_dataframe(de_dummy_code=True, sep='=', set_category=True) - if self.weight_flag: - preprocessed_weights = after_aif_df.instance_weights - - else: # regular preprocessor - after_df = df.copy() - for ai in self.focus_atts: - after_df[ai] = self.step[ai].transform(np.array(after_df[ai]).reshape(-1, 1)) - - if self.weight_flag: # for the preprocessor that updates weights, e.g. Reweighing - return after_df, preprocessed_weights - else: - return after_df \ No newline at end of file diff --git a/pipeline/preprocess/samplers.py b/pipeline/preprocess/samplers.py index e95ea2d..d19e390 100644 --- a/pipeline/preprocess/samplers.py +++ b/pipeline/preprocess/samplers.py @@ -4,58 +4,92 @@ import numpy as np import pandas as pd -from pipeline.preprocess.preprocessor import Preprocessor +from pipeline.step import Step -class RandomSampler(Preprocessor): - def __init__(self, df, sample_n, random_state=0): + +class NoSampler(Step): + def __init__(self): + self.fitted_step = None + + def fit(self, df): + pass + + def apply(self, df): + return df + + def name(self): + return "NoSampler" + + def abbr_name(self): + return "NS" + + def step_name(self): + return "Sampler" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return False + +class RandomSampler(Step): + def __init__(self, sample_n, seed): """ :param sample_n: integer, the size of the sampled subset of data + :param seed: integer, the seed for random process. """ - if not sample_n: - print("Need to specify a size greater than 0!") - raise ValueError self.sample_n = sample_n - self.random_state = random_state - super().__init__("RandomSampler@"+str(sample_n), df=df, fit_flag=False) + self.seed = seed + + def fit(self, df): + pass def apply(self, df): - """ - :param df: pandas dataframe, stores the data to be sampled. - :return: pandas dataframe, stores the data after sample. - """ - return df.sample(n=self.sample_n, random_state=self.random_state) + return df.sample(n=self.sample_n, random_state=self.seed) + + def name(self): + return "RandomSampler" + + def abbr_name(self): + return "RS" + + def step_name(self): + return "Sampler" -class BalancePopulationSampler(Preprocessor): - def __init__(self, df, sample_n, balance_col, random_state=0): + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return False + + +class BalancePopulationSampler(Step): + def __init__(self, sample_n, balance_col, seed): """ :param sample_n: integer, the size of the sampled subset of data :param balance_col: str, the name of a categorical column that the population of groups within this column are balanced in the sampled subset. - :param random_state: integer, the seed for random process, same as random_state in pandas.DataFrame.sample. + :param seed: integer, the seed for random process. """ - if not sample_n: - print("Need to specify a size greater than 0!") - raise ValueError - if not balance_col: - print("Need to specify the name of a column to perform balance sampling within this column!") - raise ValueError - if balance_col not in df.columns: - print("Need to specify a valid column to perform balance sampling within this column!") - raise ValueError + self.sample_n = sample_n self.balance_col = balance_col - self.random_state = random_state - super().__init__("@".join(["BalanceSampler", balance_col, str(sample_n)]), df=df, fit_flag=False) + self.seed = seed + + def fit(self, df): + pass def apply(self, df): - """ - :param df: pandas dataframe, stores the data to be sampled. - :return: pandas dataframe, stores the data after sample. - """ - # TODO: update to minimum sample set + # TODO: update to minimum sample set and remove print statement balance_groups = list(df[self.balance_col].unique()) n_group = int(np.ceil(self.sample_n/len(balance_groups))) - # print(n_group) + sampled_df = {} small_groups = [] for gi in balance_groups: @@ -64,7 +98,7 @@ def apply(self, df): sampled_df[gi] = gi_data small_groups.append(gi) else: - sampled_df[gi] = df[df[self.balance_col]==gi].sample(n=n_group, random_state=self.random_state) + sampled_df[gi] = df[df[self.balance_col]==gi].sample(n=n_group, random_state=self.seed) after_df = pd.DataFrame() if not self.sample_n % len(balance_groups): # for even groups @@ -86,11 +120,31 @@ def apply(self, df): print(after_df.groupby(self.balance_col).count()) return after_df + def name(self): + return "BalanceSampler" + + def abbr_name(self): + return "BS" + + def step_name(self): + return "Sampler" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return False + if __name__ == '__main__': - # cur_o = BalancePopulationSampler(1000, "marital-status") - data = pd.read_csv("../../data/adult.csv") - cur_o = RandomSampler(data, 1000) + data = pd.read_csv("../../data/german_AIF.csv") + # cur_o = RandomSampler(200, 0) + + cur_o = BalancePopulationSampler(200, "sex", 0) + + cur_o.fit(data) after_data = cur_o.apply(data) - after_data.to_csv("../../data/adult_" + cur_o.get_name() + ".csv", index=False) - print(cur_o.get_name()) \ No newline at end of file + after_data.to_csv("../../data/german_AIF_" + cur_o.name() + ".csv", index=False) \ No newline at end of file diff --git a/pipeline/preprocess/scalers.py b/pipeline/preprocess/scalers.py index 7a1e80f..8ad585a 100644 --- a/pipeline/preprocess/scalers.py +++ b/pipeline/preprocess/scalers.py @@ -2,46 +2,113 @@ Classes to scale data. """ import pandas as pd +import numpy as np from sklearn.preprocessing import StandardScaler, MinMaxScaler -from pipeline.preprocess.preprocessor import Preprocessor +from pipeline.step import Step -class SK_StandardScaler(Preprocessor): +class SK_StandardScaler(Step): - def __init__(self, df, num_atts, copy=True, with_mean=True, with_std=True): + def __init__(self, num_atts, copy=True, with_mean=True, with_std=True): """ - :param df: pandas dataframe, stores the data to fit the scaler. :param num_atts: list of str, each str represents the name of a numerical attribute in above data. :param copy: same parameter with sklearn StandardScaler :param with_mean: same parameter with sklearn StandardScaler :param with_std: same parameter with sklearn StandardScaler """ - cur_step = {} - for ai in num_atts: - cur_step[ai] = StandardScaler(copy=copy, with_mean=with_mean, with_std=with_std) + self.focus_atts = num_atts + self.copy = copy + self.with_mean = with_mean + self.with_std = with_std - super().__init__("@".join(["StandardScaler"]+num_atts), df, step=cur_step, focus_atts=num_atts, fit_flag=True) + self.fitted_step = None + def fit(self, df): + fitted_step = {} + for ai in self.focus_atts: + fitted_step[ai] = StandardScaler(copy=self.copy, with_mean=self.with_mean, with_std=self.with_std).fit(np.array(df[ai]).reshape(-1, 1)) -class SK_MinMaxScaler(Preprocessor): - def __init__(self, df, num_atts, feature_range=(0, 1), copy=True): + self.fitted_step = fitted_step + + return self + + def apply(self, df): + after_df = df.copy() + for ai in self.focus_atts: + after_df[ai] = self.fitted_step[ai].transform(np.array(after_df[ai]).reshape(-1, 1)) + + return after_df + + def name(self): + return "StandardScaler" + + def abbr_name(self): + return "SS" + + def step_name(self): + return "Scaler" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True + + +class SK_MinMaxScaler(Step): + def __init__(self, num_atts, feature_range=(0, 1), copy=True): """ - :param df: pandas dataframe, stores the data to fit the scaler. :param num_atts: list of str, each str represents the name of a numerical attribute in above data. :param feature_range: same parameter with sklearn MinMaxScaler :param copy: same parameter with sklearn MinMaxScaler """ - cur_step = {} - for ai in num_atts: - cur_step[ai] = MinMaxScaler(feature_range=feature_range, copy=copy) - super().__init__("@".join(["MinMaxScaler"]+num_atts), df, step=cur_step, focus_atts=num_atts, fit_flag=True) + self.focus_atts = num_atts + self.feature_range = feature_range + self.copy = copy + + self.fitted_step = None + def fit(self, df): + fitted_step = {} + for ai in self.focus_atts: + fitted_step[ai] = MinMaxScaler(feature_range=self.feature_range, copy=self.copy).fit(np.array(df[ai]).reshape(-1, 1)) + + self.fitted_step = fitted_step + + return self + + def apply(self, df): + after_df = df.copy() + for ai in self.focus_atts: + after_df[ai] = self.fitted_step[ai].transform(np.array(after_df[ai]).reshape(-1, 1)) + + return after_df + + def name(self): + return "MinMaxScaler" + + def abbr_name(self): + return "MS" + + def step_name(self): + return "Scaler" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True if __name__ == '__main__': - data = pd.read_csv("../../data/adult_pre_RandomSampler_1000.csv") - cur_o = SK_StandardScaler(data, ["fnlwgt", "age"]) - # cur_o = SK_MinMaxScaler(data, ["fnlwgt", "age"]) + data = pd.read_csv("../../data/german_AIF.csv") + # cur_o = SK_StandardScaler(["month", "credit_amount"]) + cur_o = SK_MinMaxScaler(["month", "credit_amount"]) + cur_o.fit(data) after_data = cur_o.apply(data) - after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) - - print(cur_o.get_name()) \ No newline at end of file + after_data.to_csv("../../data/german_AIF_"+cur_o.name()+".csv", index=False) diff --git a/pipeline/preprocess/splitters.py b/pipeline/preprocess/splitters.py index d1ed315..9a725dc 100644 --- a/pipeline/preprocess/splitters.py +++ b/pipeline/preprocess/splitters.py @@ -1,23 +1,40 @@ """ Classes to split data into train, validation, and test set. """ -from pipeline.preprocess.preprocessor import Preprocessor import numpy as np import pandas as pd from sklearn.model_selection import StratifiedShuffleSplit - -def valid_split_ratio(input_ratios): - if input_ratios is None: +from pipeline.step import Step + +# TODO: optimize the below function +def split_to_df(splitter, input_df, y_label): + after_df_1 = pd.DataFrame() + after_df_2 = pd.DataFrame() + X = np.array(input_df.drop(columns=[y_label])) + y = np.array(input_df[y_label]) + for index_1, index_2 in splitter.split(X, y): + X_1, X_2 = X[index_1], X[index_2] + y_1, y_2 = y[index_1], y[index_2] + + after_df_1 = pd.concat( + [after_df_1, pd.DataFrame(data=np.hstack((X_1, y_1.reshape(-1, 1))), columns=input_df.columns)]) + after_df_2 = pd.concat( + [after_df_2, pd.DataFrame(data=np.hstack((X_2, y_2.reshape(-1, 1))), columns=input_df.columns)]) + return after_df_1, after_df_2 + +def valid_split_ratio(split_ratio): + if split_ratio is None: print("Need to specify split_ratio!") raise ValueError else: - if len(input_ratios) == 1: # not valid, at least two for train and test - print("split_ratio should have at least 2 values for train and test sets and at most 3 values for train, validation and test sets!") + if len(split_ratio) == 1: # not valid, at least two for train and test + print( + "split_ratio should have at least 2 values for train and test sets and at most 3 values for train, validation and test sets!") raise ValueError - if sum([not isinstance(x, float) for x in input_ratios]) > 0: + if sum([not isinstance(x, float) for x in split_ratio]) > 0: print("split_ratio includes non float value!") raise ValueError - for x in input_ratios: + for x in split_ratio: if not isinstance(x, float): print("split_ratio includes non float value!") raise ValueError @@ -25,108 +42,125 @@ def valid_split_ratio(input_ratios): if x < 0 or x > 1: print("split_ratio includes not valid value! Value should between 0 and 1.") raise ValueError - if sum(input_ratios) != 1: + if sum(split_ratio) != 1: print("The sum of split_ratio does not equal to 1!") raise ValueError return True -class BalanceTargetSplitter(Preprocessor): - def __init__(self, df, split_ratio, target_col, seed=0): +class BalanceTargetSplitter(Step): + # TODO: fix the bug of not returning enough item + def __init__(self, split_ratio, target_col, seed): """ - :param df: pandas dataframe, stores the data to split. :param split_ratio: list of float, each float represents the size-ratio of splitted data. Corresponding order maps to the size of the train, [validataion], and test set. Value ranges in [0,1]. Sum of the values in this list should be equal to 1. e.g. [0.7, 0.2, 0.1] means 70% train, 20% validation, and 10% test set. :param target_col: str, the name of the target variable in above data. :param seed: integer, seed to be used to generate random state. Same as 'random_state' in sklearn.model_selection.StratifiedKFold. Default is 0. """ - super().__init__("@".join(["BalanceTargetSplitter", str(len(split_ratio))]), df=df, fit_flag=False, target_col=target_col) - if valid_split_ratio(split_ratio): - if len(split_ratio) == 2: # train and test - train_size = split_ratio[0] - test_size = split_ratio[1] - self.splitters = [StratifiedShuffleSplit(n_splits=1, test_size=test_size, train_size=train_size, random_state=seed)] - else: # train, validation and test - train_size = split_ratio[0] - validation_size = split_ratio[1] - test_size = split_ratio[2] - - self.splitters = [StratifiedShuffleSplit(n_splits=1, test_size=test_size+validation_size, train_size=train_size, random_state=seed), - StratifiedShuffleSplit(n_splits=1, test_size=test_size, train_size=validation_size, random_state=seed)] - self.split_ratio = split_ratio + self.target_col = target_col self.seed = seed + self.fitted_step = None + + + def fit(self, df): + if valid_split_ratio(self.split_ratio): + train_size = self.split_ratio[0] + validation_size = self.split_ratio[1] + test_size = self.split_ratio[2] + + self.fitted_step = [StratifiedShuffleSplit(n_splits=1, test_size=test_size+validation_size, train_size=train_size, random_state=self.seed), + StratifiedShuffleSplit(n_splits=1, test_size=test_size, train_size=validation_size, random_state=self.seed)] + else: + print("Invalid inputs!") + raise ValueError + return self + def apply(self, df): + after_train_df, rest_df = split_to_df(self.fitted_step[0], df, self.target_col) + after_val_df, after_test_df = split_to_df(self.fitted_step[1], rest_df, self.target_col) + return after_train_df, after_val_df, after_test_df + + def name(self): + return "BalanceTargetSplitter" + + def abbr_name(self): + return "BS" + + def step_name(self): + return "Splitter" + + def input_encoded_data(self): + return False + + def output_encoded_data(self): + return False + + def fit_only_on_train(self): + return True + +class RandomSplitter(Step): + def __init__(self, split_ratio, seed): """ - :param df: pandas dataframe, stores the data to apply the learned splitter. - :return: pandas dataframe, stores the data after split. - """ - def split_to_df(splitter, input_df, y_label): - after_df_1 = pd.DataFrame() - after_df_2 = pd.DataFrame() - X = np.array(df.drop(columns=[y_label])) - y = np.array(df[y_label]) - for index_1, index_2 in splitter.split(X, y): - X_1, X_2 = X[index_1], X[index_2] - y_1, y_2 = y[index_1], y[index_2] - - after_df_1 = pd.concat([after_df_1, pd.DataFrame(data=np.hstack((X_1, y_1.reshape(-1,1))) ,columns=input_df.columns)]) - after_df_2 = pd.concat([after_df_2, pd.DataFrame(data=np.hstack((X_2, y_2.reshape(-1,1))), columns=input_df.columns)]) - return after_df_1, after_df_2 - - if len(self.split_ratio) == 2: # without validation set - return split_to_df(self.splitters[0], df, self.target_col) - else: # with validation set - after_train_df, rest_df = split_to_df(self.splitters[0], df, self.target_col) - after_val_df, after_test_df = split_to_df(self.splitters[1], rest_df, self.target_col) - return after_train_df, after_val_df, after_test_df - -class RandomSplitter(Preprocessor): - def __init__(self, df, split_ratio, seed=0): - """ - :param df: pandas dataframe, stores the data to split. :param split_ratio: list of float, each float represents the size-ratio of splitted data. Corresponding order maps to the size of the train, [validataion], and test set. Value ranges in [0,1]. Sum of the values in this list should be equal to 1. e.g. [0.7, 0.2, 0.1] means 70% train, 20% validation, and 10% test set. :param seed: integer, seed to be used to generate random state. Same as 'random_state' in sklearn.model_selection.StratifiedKFold. Default is 0. """ - super().__init__("@".join(["RandomSplitter", str(len(split_ratio))]), df=df, fit_flag=False) - - if valid_split_ratio(split_ratio): - self.split_ratio = split_ratio + self.split_ratio = split_ratio self.seed = seed + self.fitted_step = None + + def fit(self, df): + if valid_split_ratio(self.split_ratio): + pass + else: + print("Invalid inputs!") + raise ValueError def apply(self, df): - """ - :param df: pandas dataframe, stores the data to apply the learned splitter. - :return: pandas dataframe, stores the data after split. - """ df = df.sample(frac=1, random_state=self.seed).reset_index(drop=True) - if len(self.split_ratio) == 2: # without validation set - split_idx = int(self.split_ratio[0]*df.shape[0]) - return df[:split_idx], df[split_idx:] - else: # with validation set - split_idx_1 = int(self.split_ratio[0] * df.shape[0]) - split_idx_2 = split_idx_1 + int(self.split_ratio[1] * df.shape[0]) + split_idx_1 = int(self.split_ratio[0] * df.shape[0]) + split_idx_2 = split_idx_1 + int(self.split_ratio[1] * df.shape[0]) + + return df.iloc[:split_idx_1], df.iloc[split_idx_1:split_idx_2], df.iloc[split_idx_2:] + + def name(self): + return "RandomSplitter" + + def abbr_name(self): + return "RS" + + def step_name(self): + return "Splitter" + + def input_encoded_data(self): + return False - return df[:split_idx_1], df[split_idx_1:split_idx_2], df[split_idx_2:] + def output_encoded_data(self): + return False + def fit_only_on_train(self): + return False if __name__ == '__main__': - data = pd.read_csv("../../data/adult_pre_RandomSampler_1000.csv") - # cur_o = BalanceTargetSplitter(data, [0.7, 0.3], "income-per-year") - cur_o = RandomSplitter(data, [0.5, 0.3, 0.2]) + data = pd.read_csv("../../data/german_AIF.csv") + print(data.shape) + # cur_o = BalanceTargetSplitter([0.5, 0.3, 0.2], "credit", 0) # bug: returned splitted data's total size does not equal to the input data + cur_o = RandomSplitter([0.5, 0.3, 0.2], 0) + cur_o.fit(data) after_train, after_val, after_test = cur_o.apply(data) # after_train, after_test = cur_o.apply(data) print(after_train.shape) print(after_val.shape) print(after_test.shape) - # after_data.to_csv("../../data/adult_"+cur_o.get_name()+".csv", index=False) - print(cur_o.get_name()) + after_train.to_csv("../../data/german_AIF_train_"+cur_o.name()+".csv", index=False) + after_val.to_csv("../../data/german_AIF_val_"+cur_o.name()+".csv", index=False) + after_test.to_csv("../../data/german_AIF_test_"+cur_o.name()+".csv", index=False) diff --git a/pipeline/step.py b/pipeline/step.py index afd4aa5..e353bfa 100644 --- a/pipeline/step.py +++ b/pipeline/step.py @@ -1,49 +1,55 @@ """ Base abstract class for every step supported in this system. """ -STEP_NAMES = {"Sampler": "SA"} +METHOD_NAME_MAPPING = {"RandomSplitter": "RSP", "BalanceTargetSplitter": "BTSP", + "RandomSampler": "RSM", "BalancePopulationSampler": "BPSM", + "DropNAImputer": "DNIM", "ModeImputer": "MIM", "DatawigImputer": "DWIM", + "SK_StandardScaler": "SSC", "SK_MinMaxScaler": "MMSC", + "SK_Discretizer": "DCA", "SK_Binarizer": "BCA", + "SK_OrdinalEncoder": "OREN", "SK_OneHotEncoder": "OHEN", + "CustomCateAttsEncoder": "CCSN", + "AIF_Reweighing": "RWFB", "AIF_DIRemover": "DIRFB", + "SK_LogisticRegression": "LRRM", "SK_DecisionTree": "DTRM", "OPT_LogisticRegression": "OLRRM", "OPT_DecisionTree": "ODTRM", "AIF_AdversarialDebiasing": "ADFM", + "AIF_EqOddsPostprocessing": "EQFA", "AIF_CalibratedEqOddsPostprocessing": "CEQFA"} + +SUPPORT_STEPS = {"SP": "Splitter", "SM": "Sampler", "IM": "Imputer", "SC": "Scaler", + "CA": "Categorizer", "EN": "Encoder", "SN": "SensitiveAttEncoder", + "FB": "FairPreprocessor", "RM": "model", "FM": "model", "RA": "FairPostprocessor"} class Step(): - def __init__(self, step_name, df=None, focus_atts=[], sensitive_att=None, target_col=None): - - if df is None: - print("Input data is empty!") - raise ValueError - - if sensitive_att: - if sensitive_att not in df.columns: - print("Need to specify a valid sensitive attribute!") - raise ValueError - self.sensitive_att = sensitive_att - if target_col is not None: - if target_col not in df.columns: - print("Need to specify a valid target attribute to be predicted!") - raise ValueError - if len(df[target_col].unique()) != 2: - print("Only support binary target feature now!") - raise ValueError - self.target_col = target_col - self.pred_target_col = "pred_" + target_col # store the predicted score (probability) column using this fixed name - if len(focus_atts) > 0: - if sum([x not in df.columns for x in focus_atts]) > 0: - print("Some specified attributes do not appear in the data!") - raise ValueError - self.focus_atts = focus_atts - self.name = step_name - # self.input_data = df - - + def fit(self, df): + """ + :param df: pandas dataframe, stores the data to learn the step. + :return: self: the fitted step is updated. + """ + raise NotImplementedError def apply(self, df): """ - :param df: pandas dataframe, stores the data to apply the learned discretizer. - :return: pandas dataframe, stores the data after discretize. + :param df: pandas dataframe, stores the data to apply the step. + :return: pandas dataframe, stores the data after the step. """ raise NotImplementedError + def name(self): + raise NotImplementedError + + def abbr_name(self): + raise NotImplementedError + + def step_name(self): + raise NotImplementedError + + def input_encoded_data(self): + # use to detect the change of the dimension of the dataset + raise NotImplementedError + + def output_encoded_data(self): + # use to detect the change of the dimension of the dataset + raise NotImplementedError + + def fit_only_on_train(self): + # indicate whether the step fit on the input data (return False) or use the fitted model (return True) + raise NotImplementedError - def get_name(self): # return full name to print out - return self.name - def get_abbr_name(self): # return abbreviated name used in the file name of data - return STEP_NAMES[self.name] From 46e1f3f093a63ac2f611ea2c9a3f54795b56193e Mon Sep 17 00:00:00 2001 From: Ke Yang Date: Fri, 31 Jul 2020 09:26:47 -0400 Subject: [PATCH 7/7] minor for generator function --- pipeline/fairprep.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/pipeline/fairprep.py b/pipeline/fairprep.py index a4dcc61..32f629b 100644 --- a/pipeline/fairprep.py +++ b/pipeline/fairprep.py @@ -181,20 +181,17 @@ def get_meta_information(self): return self - def iter_steps(self, steps, input_df): + def iter_steps(self, steps): + # TODO: optimize with the input data. """ Generator function to iterate steps. :param steps: list of objects that represent the steps user want to perform on the input data. Supported steps are listed in STEPS.md. :return: the pandas dataframes that are returned by applying a step on the input data. """ - for idx, stepi in enumerate(islice(steps, 0, len(steps))): - # if the current step is the encoder, feed inputdata's metadata so that the encoded data can have the domain of the raw dataset - stepi.fit(input_df) - return_df = stepi.apply(input_df) - if len(return_df) == 2: # special heck for the step that return weights - return_df = return_df[0] - yield idx, stepi.fitted_step, return_df + # islice(steps, 0, len(steps)) + for idx, stepi in enumerate(steps): + yield idx, stepi def validate_input_steps(self, steps): @@ -241,7 +238,8 @@ def run_pipeline(self, steps, save_interdata=False): print(PRINT_SPLIT) # for step_idx, train_fitted_step, train_df in self.iter_steps(steps, train_df): - for step_idx, stepi in enumerate(steps): + # for step_idx, stepi in enumerate(steps): + for step_idx, stepi in self.iter_steps(steps): if step_idx == 0: continue