From 652ba4ad2c8a8f01713545d6b231a84dd3f2307a Mon Sep 17 00:00:00 2001
From: Caroline DE POURTALES <cdepourt@montana.irit.fr>
Date: Wed, 8 Jun 2022 10:59:00 +0200
Subject: [PATCH] update random forest for faster, clean callbacks

---
 .../pima/pima_nbestim_50_maxdepth_3.mod.pkl   | Bin 0 -> 70012 bytes
 pages/RFxp/RFxp.py                            | 148 +++
 pages/RFxp/data.py                            | 168 ++++
 pages/RFxp/options.py                         | 154 +++
 pages/RFxp/pima.csv                           | 769 +++++++++++++++
 pages/RFxp/xrf/__init__.py                    |   3 +
 pages/RFxp/xrf/rndmforest.py                  | 137 +++
 pages/RFxp/xrf/tree.py                        | 174 ++++
 pages/RFxp/xrf/xforest.py                     | 874 ++++++++++++++++++
 9 files changed, 2427 insertions(+)
 create mode 100644 pages/RFxp/Classifiers/RF2001/pima/pima_nbestim_50_maxdepth_3.mod.pkl
 create mode 100755 pages/RFxp/RFxp.py
 create mode 100644 pages/RFxp/data.py
 create mode 100644 pages/RFxp/options.py
 create mode 100644 pages/RFxp/pima.csv
 create mode 100644 pages/RFxp/xrf/__init__.py
 create mode 100644 pages/RFxp/xrf/rndmforest.py
 create mode 100644 pages/RFxp/xrf/tree.py
 create mode 100644 pages/RFxp/xrf/xforest.py

diff --git a/pages/RFxp/Classifiers/RF2001/pima/pima_nbestim_50_maxdepth_3.mod.pkl b/pages/RFxp/Classifiers/RF2001/pima/pima_nbestim_50_maxdepth_3.mod.pkl
new file mode 100644
index 0000000000000000000000000000000000000000..ac92a2373a6e27d1bf1b9537fcb42235914f452f
GIT binary patch
literal 70012
zcmZo*nVQGQ$N&O8d=*7$dPR9DxoP=Dsl_Ezdf0;8j0_A6r%axr**K+kibf9`Oh%$O
zJ0~@<C{HgnuQ)X~DJNAg9wsFgl$e*2pX&xT!#O9hxHvN{Gqng|2wzfSacX>OaY<%w
zVo83{lpax-VI@VWse19rAU&zYQ+mW)Qj;@_GxPI8ic(XtnZ%h~lv$Eml$oD5rH3Uw
zGcR*W4@Yr9PG(6-YSEM)mL#wzICB##;!{!!N;0PS^$6x>=EWB$<`(3n7RQ5?PVr{y
z5kMBrNli?f;?3A20TL`v%}mcIiBBs^OfJdH&x;33x`RRsgnM{EW~ZelmXsEy7Ekf(
z;REqOYU1<qQ@|oTMc~kgFD^+eNuA=?BMLG#Gq<3$D6^z8J|#7|C^fM-bqbP^VDHC6
zES}=m!<n315TBS+kdcU_NFW{>WbtLGMW7Ix(!*+~XP{>^r8KFBCodizxW!Yvjd}#&
z-1vgTqQu<dDH<7q8KN0t8R8id8Il=N88R8t8FD34dN`Bv^Gk|LiV_Q^bo6lM=O@J%
zC+8QXPU-An%Zty-Pb!|`mm%%-^CJsG4|`c^QBr<!>J)E=9<K7lqFj&{i%O<+X2|;W
zaD&1D8q8DtG6Z|rQ}arS@(U`bWQck*XNY+-Wr%w-W=O!jogvx7l2}@jKP5xTFGB_)
zEC*-y@a4tBLOMP(FMf(QM-NwCe12(3L1{^GJSfIEz(JQ<96zN;G_N$bpi&Ryc)i@x
zoRZAMqN2pgDLp*#MXAa8dBr6~rO72zCQs>M1uLA=!=9G{Q8IaoH$&?bXU3!{?NfrL
zXm~SvGqq01FzR7VDXA<-1!-YQGnvxa0g+|yVYHdz=jZ43|NsC0V8WZBWJ*$}GY2Sr
zFfcUOPbry_1ac_cZQe{h{HQq~eo6*^4>vrLisPrmPSMEVgXFmk!3?bo;S3R+0hgf-
z3NvZ1<_*6Ek%LawF9Q*J89Lq^8M@w#8G0Fp873efW|%>I0P&x74^x^2!dDsg*!&~_
z@e_>v0QOskbB2BoTXAw?P9i9eXIS?zWfCySnS%*x#+0@xK~s8oL8%2Ew^O{CdRRdv
zLHv{+A!st!1M&6ZLFAMk7Ep08W%3kn4otr=wob_i??keoWJ;1ZBgm(<Q#3$9*uw?N
zneoZ_rFkV&y!m@rLEPdgm?rVJPRX$DVGc7jLj+3?W6cyljUKL?)U=ZL<c!Rmlqo&j
zMc@(w%43ImV@eNaNk&m>aYjB!4F@d4PXVQ3kd3f1XiASbB&VmQ#OHwv4Y*7RsKm`E
zAtpe*8QP{~lxEa>bF@v#D9fn#7HFH2QJ&%NEzve5qawrKTcK@AMrB65w?^BPjH(QO
zZ-cferQR0ajNSsE#Omb1%)kJt0~i>f2^ShcFp9;YPfTG2yXiaoHkd+?ltHrtEGk(+
zd<F&vHYm-&z)&#p?G<|+mCnHF887TNA;cLN8VVhlp?nrJ8q7UW>Co^vbY}UB2llRD
z0Vv@R>j27V$oL<W166UrSHXnU?B#WPXQ%*}IuK1zA6Oj&0|U%I5Wa*1%s=c90eCop
zxewwU`u=R;PWt`YJ{K$iB@$8y+V^gifl12zNA_M&Enw<E3_*Q0s;@q-O?qpu4AuZ6
zJRCToOdco=iw_0{h6`tStR1qB-U^=k^}YQSs63c@P~yM^W-u^tLur^l!Q2Dx4q@v<
zF8x091kFBgg6^CWAS&>6?|pk&ut6}wouEF5$`AIiaEI^}9C#rN20kc_<Ua<61V;z&
z)v4aqx8K`ugvx`dhC+h&EzS!%G5Pd+`+TqlDB+MwP@n1QW;tGixAsm@Enuo4+5ueN
zvy~<0l%`@WRzMX>`;-j-PUjC0Efo%6`am{>c7W0h{t*6wNC?fK1EJx31qdIk8AM1x
z_y=MjG=m3(J`e$+84S>92MGT_DuiZ8fzS<Z5Sl>=LN~ZTXa*4|Er3cZBtZBNk|4B0
zIFt^C&<suxx*-T0G^I%y@)-)HNhDWqC+hpS$*$h~JDqV=ULhIbNYw?XKFWykX3B`o
zNFb|z$;k5N&&bAJofLHvQPN5|MD_JQI1u^PzLP*ni!9BsAjY9?&4!>;`(E1{BTGD}
z#VBb}O1J}o4&NX9IchRqw_gBCMKG+8PcR4EuKmGw((Ne8C7*HpWVdOvAKM$l>;lmZ
zq4-PMfMkb{d$>(Ly?Jjx8KfSD7vvCh=LD5b(Lk&B_USM^Ao@WvL4A{=Hh<!fdTy@+
zQV+uof%r;V1LI<Ahp)*q^&bCvZyyHJ1Ev!(N?N26%)q$V++myaWYLRK=j>5asue+Z
z`YzD4+$i<H9#)frtz%Fks1Kr!fdN)yK=_9EN?HTsViyPDEj`Qot>4?bK{P<f2EsM$
z%#Q{|66fFB+e36f$O92XyA!2mVo(DYCmG#XvKo33yPy(U1Z6^Khf*k=2%#Brp|l}{
zevk~I8K6a-LlBg&1*M}Qv_l+(ZU~3a4gnAvR#Y?iK==)TP#Pw03E?w*fCdGuD2MT(
z?tp|40|Tr`hM5oJGk8PIfhmN93E4$#?0Tc};aS8Ymxv4x;vL@2f5szy^R0a=vILgK
zEK(LtNPA}G&}YW#mVfT4{U=z6gXjn47)2~d7=e`?PX2sd?ZkZ*HOnv%Zp?Ofbv(H9
z@tC~}!Y~E~A{w)>q6?*{XAr?(#3n>K6d2sQs3Z8&ehspH3knEk(X6tW9ur#b+WR5v
zI}k@uU(C}W#^8r<?c<R3JxIh?#3rOYvvmlQoT0n3>m6!Uz>tek#3D5!1(>w&+y6~0
z)7QNA8nrRoK)5kGXG`h6gOlIeizB<U!HJ;%V9HU8SR;%g7N!nD3ptqGJ#|9w_bdAX
zhya9CNF!)pNZjo-jjwO*9U(d(q(c-zeTn9~ZlvvbY_AW|0VN?VHlm7H251p`APZ6?
zO(=%Y4$z{{0a|1-$UwvyV0;cJA6E21i`)iiQ4Mbe!_;d))Wcet44x4Fflvso0Bwaf
zI79gGR<9`<-w4WwdKK1MhP7rJpsiL0sQVezAnF_Zz|{`fMeKo9{o*5|h`lpMhh<*E
z8~c6&MJ!4Y<q+gh_i@^X;;={d-(WEVqCXHSV$~gX-w%Fk`|_6ke2{t=Rv=t-{k|#j
zFr)g0{alzH5dDB~>ldU9gi+kjz=2W3B2^(5&h%J2{N8xa|3|?~`*4tE7<Py!m_=bq
z`C$w&Z9!1qr7y3~cZt5S_W-Mh5)P60idY5)2Rnzh8z$=yguS<)3Dp9o9^_yYvB=TO
z(2(Sy^E;QFNAZQdDp&)I@FVEXf9+@L)h6Gumw;&j(G9Ky^+hf?RoeCXy*+Gv0Hlq9
z0kahg8*c*3H&{8GGUI);b@^-i60iW2NFdzrKcv61N&d$>dn2e82o+4wov<nyWxU`(
zIJh_=s)*fC1u23O(joMQ5(v%U1)&>YtynWOzA=OkaVP@=1GLCJ0ByB0*g@qjpfptB
z0a(!olZOr@Jb-ri8{DDlp{-(gkqa%t8DOni*gyhwK%xQKYKGejGY96*U~rg_UBrIv
zUKBSpidfzZECU6+ty40JI-PDXVGhZnBw+_fhqh^6rE=+S?W>UUAeL4vQY)xHsQHS$
zjo<{eg9l&Rt0L<YsKaQ*qI4)HR6BI&_x4$xd}dz*O(9@vK_<bZKP9f|>Ew*3_ElgF
zFoLi?m`c<l7PA!#QwO109JY0D*62}uXTK6603jU+4+T!(;aHWS^w=Ig=LFJ8#E`T5
zf+wfcf4#R)0;z}L2g&dv7B<9+R248V9I$nGHMPEK)&KYQp)frldO-rg@B}FXVU+k`
zkZ^zv;lf&*$k7Yx_00K{eUtOud;36;W*9y|xatv76J!@S`P5z=rUyhf5H8Z&7FVw6
z3wvd+1yT>g4FTZnjjhPzMReRRfR#^xwkj7uTZ0MEj(Y;M<GuhoX2}2@qlEDr9HB)M
z*c1i^*x02)8blmECJGx{g?6|dpktN|Ay9SD4toQXh8f2I8*7D$`$5#hTBESmA_LUi
z1`n{G$u837E?4jz8bunZW1mPx(*~KSEB3ICc>{9V#Zsi9CVe@F^#3mf^FO?>pAC;9
z1_p))c^E|+vat*S$qo~=TxKs^@y0$GS>FP}V}niE#TJvNJhKl*)<;Au?y6l}XqUqa
zv@t#hj8-Fx{}x0#1aw~Ap}Xm}y#=y;4PitV71GH17~BZz`}F*$(wvZ|7_&F%vrH()
zGDtbR%er+sxaGb5Q)Gz`W%yf-58@rvnbc#vbl%x-L)JH;h@d+q`op&`V0~g=j;t?%
za7Q^IMld<p`z`8NWJ9b2tQ85HWkO1y7Z$qSvY+zhJlpztuTeY74S@vR*|7b!`ro$q
z_OS4SI9G?DKCi%3!}Wij+p9q|KuI5Pae}P~<wq2ukm)7#j`;y->#+ekHVNy9H$Yow
z4zMB-)*6I$*r8*Vu%Z#(x`Ry@L7m0`>v%(3e-6+Afd$YpRhaz;q98>kY}$yy2EvE6
zA`d`|Vg?h4IJ{_unhPs}8K46K3~t~sA-jmxSgzzcJd0SAB2OXSVGGOl(>k-?+7}^9
zU@2l@5elJs9Ku)bi(D4;#Qr`+07_zZ%u$N22cZtOCzllRAG%}T2-N|m7Gx1j`iwEQ
z2j^N}LY+ogkVsJ9s)Xa+N@t(iYlAI<5kVM5EJ|yP!P+5juk6m7T(3~aA`=K#1+Xz%
zF_={#nuxJMqmvA>o0;CCcFY@M;jKnkD-Nj$I^b=7)m|gBnfdJlw6V_vc^E}3lK&>m
z-09)~n|?6|xetyBS0QX?(+poGzOy%k>j5zccX&anKp4sW3=9k!7)30SI~f=b*f>=5
z2Kj45zOj!1X@=nggyZ2zKv4go+}9XYD|CGboI+nPQeY)o^8WubdmorxAi5zQT$~V9
z#0EfHsS41dE&w|A+29K)x)d@XG^{8)0PR4-#zJB07@!?#hFFMtSWyU@L5EKlL0hNr
zq88dQKL8#3gpFmwTE%caw1@@y4~$_&C$#8qfQ_-5L+ppG0APR?{SJ}fFd@5$wGe*o
zJ~WD0Qak2IN!;MB$`gC<x>d!wjc*CAl|n9o84MB~&IDAN9DDo9UJF@b0p<)ovNXek
zNQd06gt-b5kL@3UqZLX#C?i-@dbgzi-*WPTeKu4Jm}(&0dRp%F$KAa9C2Fh7Ast^4
zd*MuugM*kzU4U;E+E^q5;hAGUHpM^Hf8N{6gB=JZ8e9na&qZ>#t<2`9_L@*FVCp~s
zMiC1ZhZAxR_eAu%xaYk`ErJ>_XYk=#zzi0L73W)Y?;m}ST9gnmU01cheda92H)tyj
z2zRiB-*mk%)p}>I0Ja}ScoGa3m`c=@2DaerjjhPT=$IEki@XQWj`;#;$D9FLw85r-
z8en6Suti4D*4qMT2Oc()-2hvR1e<AxP1nF$f6$_;0XpN|0PS$YT8S_|td-c{3N6B5
z(>yR5Y7Z;`7$ANoyGX0KIjLf36ltV(%u$P&c!wiW%qIWOytS`ImLS+_RCY)#s!-m<
z`5bjRN1+m<V~(=8qruE!qf2aRio{dYj$}bO(Mj6`9>oj{3<rV;>XZ6rp~wF2IqLGk
z14a0XGzJC+Ylm3DEl+h!-rEZz+s9x{&_0-Qlrdcf0fPGW&0Nmd9rwl_)<K1sV~$az
zp^TXx2zJO?tEL!H^Ul5<q5(oSWMQ-#QR3l%zr!M-hA(ypUZRdsHV~fK?@)|oiY<A8
zI@U<USSoCq1f>_jAcDWu$l&7Or8hsz{^EPoX&E90NMIdvlq!OV4!x3={PW<DOK2T>
z6P!gTM#o$LT7)iuwi*>+(>c)AT>-Qe$p9T=JOC>iAyLNwpYDOm!^a$9>pWr8Fi`cd
z)+TI599DELfEL;C=@{6wkQJl|JrD+=VT++)tx(v20epQZY?=r<7Rvye&Vg(bAiId2
z>Y0-;Jd0SAtkz)XU{vw)-Tnh_?VAV`u_#&NK&XRyPQ<Y%+n%AW?EX-KQN*GY2@Dzz
zN9I3CsAG6w-vUb^AX<TN$J}X7&wbsWZ|z~z4Ipix8CQ4@3(7$6X!WfO$>E=W+rAQ_
z0ZL|I6tSq`V(9=|@g4xx0j3%VPh0G3IS~0a?geVc)q!xw_0uAsr=5S%Tbr0G?2)4h
zG`;dS=7jOfckk`jfgK1X3JUQTu?}7iL1!m%p5cFsI?%yDc%b7D^Ob`dG4D_Z5Qr#Z
zAKg~rKFN6*tqL*5C}N>LfKu8Hw|T>AcE!B5kARARsRxAJ*`V?Mo8QCNs8xspVSQlp
z85kH)t4Imri&zC{5!(PQVjmPhiZa+bN(bmzC<Cm+3ti*`8(Vw;YqdhBaTuVBj2K|8
zOK2+;HqFBT8#9G=m>Zx)YXYoe4l7!rW2OzzA|0c(3R?^XElS}_{-JyaXonvbKn##D
zA-jm(v*>8y@GN3cib{iI2f4QvU&2cWFRh1%ESMqX!15~n^qYmR?Cp{B`~?Ct<_x+H
zup;O?*c>P!P(iSjrxNpeN@(S4`%b79FhxYs9B}6S_Fo~7?eoAIU_=^55sMNo3DFLr
z+-y5Vqu-+zB}8=0VPk{Pb<7Y84}=r+A8fh~bvi@~qZNxB+zcC193J^jb2wW54t3L%
zLmWmCi#n!g<q&#io8ICd@9oVXE`*Q{p#<FtQHZjsje&tdg`mC_nNx2Roq2C>05bqY
z9|*!IVvz@q*cw0Cvp;aQ?=CoQF9K2z$HcEQ6@u#lF&qiH6QqiP;e$PD(}02aBDMiq
z#2$bS1Z*gS7O~Kx??4fh4=p-jW0bHlPZ*y8+S+V@?s|d^3@|{8Son-Sbny~{3dCHv
zI@oj(tZ0Q6l`wS>k1{YYKvxF9TdmNd7H&RFAygf#waY+ZE0%M%XY%kYVo|avXlcFW
z-xDuBytOAZgO8L&4U9iqIS6c8XRCDYu|0I!5)uzYEUg!MbL?tC%YL*mH^NKnr%k!F
z@{i6Yw4#KtK8V9GvkWXN!&<MX;S%bQe{5m~lhO<OB@hE3WC7vzsfVw*y6luXWUmO(
z0VN5K@tN>l_?$cAm3=c*2bfxrg;B(!#)GZHg#OKkrt-eEp8(bXBk(S*zfiXCvHi0U
z=HM4~Z|vn@TEMgy!6FDYorSvYlknIs*m%_a#taM$Y8XW<N<0)G)~6Q13;@wYjCoJ%
zRuft9;w|a`0};ImWn&w+j=2x)A>|dwGSGk&WGV&QSS!Xr!2{?9Q3mM7Py^^d0jvmh
zfKC@NFhH$<@u5W<QV|BW9adDsRsg^{@-TI<nR?jnCD_0NTpYGE9yZVbZSBGa7T^OB
zVA~iNVAEKzF<%C2s5_xc_ZgsLy9dI+)ehOM*v3{#kKtLw!ip9K28Ia|ED!8qTk)Hb
z(>9i|SY&AifdmJ)NiS9{KKsg^30cB{&{(Xc!}{_EztZPDK<(HPu`39g02oj@?hJ%G
z?nO<a#V;*ipw7e}NW^HxA{AW;X>5)TPB(WgNZRn;J_Kelh;|^{(e%2sdF$?*?@*_S
z7zmFU!`3&VtTbrAoL)k5XF*z;y~Cy+VbP8&?@?DoIAD&&BI%njbEc((oM&U63GZ8b
zlo@;?Hc-MgmZG)}32!V_UL<nSzw$lmQho=_RxC<yVneP2pL54v`<7RzRVxwGV4H*6
zen`r_K`km92=^wg1s^=iv*L<9Y@IDA+Cc+Spsn~s6|oJ_A{Mp>zX956g)NRsfGw?u
zIV~Pqgh6Zq?Z|_$pz^TkDA;;Z2574kw#W%KU;r<Ap&j}L=-&MUurXcObQP?Kg%#<r
z0SKsl47L#Wz!zshXZqpmR$=-KC@W$u=CXtg&mtDqY6C}OgPlW5V{)(XQ^GU&sQN4%
zVCyE&f(?Qa3YcTDNLibKVS$4~eB`FY)D;iV_R<sHjbad6Z5biC-`*E$6_|1$JUxSJ
z<``Dh!2E?=Q-H=eiyYN`Y@%P<Cx9)25)YCHw(?@GA9a}$^ayo;p@DE~n<GV4Pvpxx
zv|T|C7_C?ocOHm$c)NUknApYl_B)|Y1XB)07y|`haX6vrU~=Kv>jJ;G_K|Qg5QA_P
z@~PM-XY;8ysAIS!EagWX<K)0-#UiDb2{UinIvf(tzi0mB6>2;*5MCr@a_>h&1N&Qh
zQINOb*q&hgsh(Rc;`;1~y*FGB0|SEt35%pYK#SN1(6QJB(6LxpD-*sB6guO70M?3y
zwQ6BSA#^4me#{B9HQE4Mw+d^O!WL6O&4W#I!S?pUrnO)L2rxbatZ0RK6gJ=h6^9kk
z46qdn(18dzKZ>#<c7wHD{qQVeQCbNP65&N`HFDm?(uze&`Ub|IZ5)btR1J?;yg^+g
zrGPmW3yVMqt>`c*p-ONU%N>&URL2-}oejHi%{~=k5QHS+fTwBFI#Vu*y+fNO)4|t@
zWng&V>hPRBf5ww3@9lR%Y=V#i#RRixtan#ag8X~?)es#Jas%OAPqS{Fn6%{aQ+w2n
zrDhmKENT&D<e*^Hwt2}vwB1Av2^d8ztSW&x$ibnfOe;Mr`4QTNO*4Z2gXm{qKyBq2
z5!ClMc|zCUuP^OU-7kPq#G<&f!OkJWviK2i;(OGy7l=6Kq%lwB+@Yhd&=#u^-f{t3
z`Jf8(D2R5zy;}-npa8vyeE=IMfKERtz;;W)ib&{^e0YZ)x=|ChTWSGx`U^IVr2ykY
zTdD9B0nj2DHmwG4y~0)iz=}|)I*`A>7}hF<$-_2+LR-JEMPaZ30{Hq^FUpG8CGWVh
zhG!8A4plfI<j|uaw@;JfoqZ$mMeKx`H?19hai%11ka}(Z6>b&-1A{>Y#)<-DX$FQL
z_DWBM*1BpRLmi7H;+PYdIVefL!G~bhepf8KSh4>S+Qvj*j3O2_Yg;?0m91pix&m#3
z=m)}MvA>L#%Xqwhj=Ec{f$&*>dXllG#UAfakAFE3g;B(!y3@viXTtHL-~YY0?|}sn
zh)%%VgO5_R9!PS4Elxr$(oG14C&+jZhV6m^u^AW$7qJj^A26noVCKLUtD(5_LWYA>
z9Cyjb2k-4`AqGIm2ZT2USAE!VIq&Ct)M-v4#&CV1CuO0Ai#517A!?xD0JIegJEU(y
zA++NS-Oc0xD;l9i8f=Ue;xh&YSZf!y2cH32q{3$S;m40ar=MVBr?6v4;H_SmJ+LAc
zR+Qo@Vqx;ofrbOnrT(yERA9%az};m54imD+VyCR$Za+MWSftiyLfSKH2jiJXU*1=G
zYhOX2h(&LGv_$KuzY@P|UkFbcpbk|U!6LSMY3Q9R6Cc~_qUa+$RtZy%+Ik|q0dfEA
z<FY?Yo}rzXgjvL*WbFfq4qRCuIejdzpthcfIEkow@|iVmW=GJ*I0?_x!`7R^iX4c4
zjWCK><Y)p7XejNSxt}Hdy*)d!ghMq(D;72|0;&iNIUd+QIhyv@)aIrAJcvaQl87oK
z>NMkbo;OcX<DDUcVA1R(v~FKQ<U7=OKM;)3ibaWs548^Vi7(c>2fen>gt!nwE+AZG
zZJFbo^JUvB)I$>;2<wAbfPRcF2e>#Ps)&66T~WXQT~PpUwZax*L01UCTdmM@QDGLt
zcRN8BbHVreL-*`EKvyEbr^{gaq3dNA<U@+|2hg(uU|Squ>u_PyVDJ?KuxTvl7%T&{
z6$=}{fGzHV@!>@*0|RJ$n(QL>nSX)*@GN4HvYNu}pAYP}C@(2m7Vw5(D;Bl&Wa+T2
zBJWb~?-%y};YkBDfri<NMb&5OaO~?o#_X8OsAG8!#2?FnI+IO!@y?vvEw7qZ+_1Mp
zcBex)Mk^Mj>QS?CSRyNs?ELx->M<uotSFE%-^R55@inyFM!p1#ptqW*55IZ!4z;z`
zkcm;mqEvMaRu1Qm9F&UaesAA|?7s!s7)30~A|!=ShdHue%oX3hMy*1K=uPO|6%6Xg
zeuWyIL@Z{54HO`yKL!Q{1&ksVCA}PQaWHwr=9`uB9@Rb~s=AK1i*iCAyhmN1+z>)A
z9$>+UTGg2oU&J1Oo+JfZ|M~zr-31#1JpenQ3A*<mei$Cq<_1_13YAZQ7L~B|v9MiQ
z@BsqYbQyGOLj#l#JAn#5Z3S)hGC<YAr^{etz|bNbHk}5WW`ptJ8)BjEWPpST*+uM@
zH%(Q;vxtQS5QLV3pNHCq+zP@n7K@Vf8!R18EZ)2-SoDp(9<n|M%nhr^S!&hCYxbG#
zOT&ZZp4!(z8~`B|2)BY5l&0^sSa!kQ5~2e_9w5Aj-D&oW9i{!RQ7;o{h{Pyjk-Y&r
zQu|t2ijc(dclH|~HbF=O!egqBcFftPz2*VxbQcjb;15~*8N#Q&x6gps1R)zzFp5}|
zR-Qw&Lp0Zfs0qK{qfQ4oBw-Y>DDmgu>0thC&Hrm9Xaf^O6kUtvKe-cl>b<=c#D!3j
za4S7D&Uu>t+KcwEvkf2`O)-jCr1Wy(%$X1NCnILrJIOt?mx5@3kVI?%hHZ6_gy?{h
z&O{fnDCvQLgjOv2u`3gxW3jN4tKg^YL#MUi9dKwX6n^A1v{eZodxZ`(G(bD@urX2C
zVSVrpJ@oKC*y61Q*qAEpcot|Y6<%aQr^#T8z+hv*@UsVCMKE-_jasePiXs=rp;5$=
zdL{rgtwAXvhrYLIOI)73v+qVuQ&@^vq@)i%5liw2OIm2(Yt#+22QZIaK{l45AjLua
zQex4o-Up~f0uf`CG8P_@Yv$dwkAk`!Oc61|zw(Li#s7ESqutw~g};bZh<8vBnxLlc
z@!ozK*diE_O)!fx$5;ohGJcA>tB8n=sg>JLRySL|Mm-|AAsnNKMJXx|_&Ufv|2lWp
zy!ZA~VNL|m4w&m=QR3l2l!H=|pjWTcd;4CHdN?M0YeCPvQ?5L6uTc-rWAG;!E;6fb
zUA=eqHR>2O2_1LT`+rn$wkk2s1V|`{betLTpfq%B6t;*8b~H0=V<@~Rgs%UD?Ye?(
zR6PJ~B|1Pipu$h4g3Y``w+z6>Mqw)eVAE9$(A`!rK5V8RzW54S1j5HuVKey@cFeyu
zA9o%aMH;DR0w5Jl3+BGRZx34})sCFzu@q^@(hLEK4l&xbv#gU}+y91V2hfse%#Jxq
zhmgV1p=Y%(lZNMG`vb`OHV{4}4z|7%HVpyMPIy|Qzwe#y_umguudbR<h*6}Ww8j=h
zI`nb3+~n+gV{Z+y2tqm#zUTtB7aet}KjCSxClZS*^(`J_w2olAt6;4yl<-stb#Ob9
zvbp~=+VqkGX2%>Q{sfX7N)~tS*P8d<z6|0s2uZ}Dd$;#R?q)yz+CCbh140t9KKGpO
zhstQ}8}=xN7&f?Iv>G7>!pI-?u#Lm0Cvy>Q{cYZ{xKN4fCPq~T-Pa7K&^I{4szB6Z
zRjhCpp#*2}FF=b>*mTVU=-4E@!wVhbgcX?$(4rUKVTbO?htJ5vHmE{dov<PoHkJur
z+yrf9!iwAm=<X_bs}r`n3OdaNZ$-iuUBQZG*t8R@h=<WS;4mS(WB%fE&k#8ifY??e
zQqc-azYGivJPv=(gq**&{H=XGavnq~Vqs%#NMnNx3=NhJu<4M~NTxAtAiRhOrW|#N
zd6)yNh=bV!<Ac>3WINaui^WH*d1F5XEC3@2w{jLA+VE&u`zzEl2oeYn1e|oKIvKI|
zE!tskwhpjX8*G{jIoKE$L_6&A{S+Yo;=TPAm=i(th9U=8s|QwOf`k!R+hKz3^&a^d
z5A0Enm1!W{;oY+DwcxHTw@~l8IzapwABy`KI53WQMoKpqzG~dD?+f9$`Sk8P`v!#L
z85oE-Y>`P?<KmhN?@`xx5^;eG*Q#?rI*i}gn<CrS0NKfdZMp}e)#w17?tz~Z3M;~3
z9aw1V4`#jtbkP#*P9+BD;-&`JGz_d22b~#*t@mtzo;bw-Ey~~<Lt%XASSHLKSO@n2
zbS%;Vx}g;|gAXrKp}UzH{3$Eap2tlerhD*VMFwc{W#&yA__|L*yO-cG0cMCfc)a5O
z6<>B6<Mer05eO@iknCe%IAG@Bzp(2^ljs%uc(6H8;sN2M(omHQD0luaI1ucBmC9@7
z$}YH$Hue{f(P~6;XF}RDWrvchA&Qcho?#rh3~z2g8R)GJm^)F63I>AibmMvLmTC3G
zUIu0Wn0CYH2&0($Al~6h2-6v!A80e?6Dl#r8d3BmM8cP>qg>zTkW0{=2c~IUPI>td
zw$%mfN`?c3?=*_I?AfGp@*V0C-3Q_^T8&6WsKCv0x9t@IbZhyQUZP$M@__JGj7f<p
zi*H|kYi|Q_A(SM%d+E$sbE!8Q?xP;<eINu}oM0<L3AP$x7lbfC7X!if*u&O$!dib&
zCpN&A+C$~xtw$&ystmL~1<ZuYLyJt<@hS_T=bR=$7d655^25d?Vb|ipTa&PJN1?4r
z__?Ss_d)mm!_Fb4^ajzyg{dXOvttg97C0g006Tw_&`DA#S@c1?gMkENQ;hFx)TQ+S
zm`m$XI+6{34p(I|`U1B;v_A@W0|NuY2g0q*CrXXGjV&&cyx523ZAp`?(-ZqXWcwI$
zF^X6e|6NFNa9v~e_Sy9Js7v_|5MI>c?jsQP^fubLo(zOn1cdKA7#!Vj(;j{L0oKZd
zb<9!RxggQO;Y;g=*X!S-p0uQZxwIas==yL$>9+m5lad?X%Dh7jPY1%S&2{-)O0%n;
zq3zu#eD@Np!;9j-24{>S7A2Y(6di;XEB@5Jbr0=K0}p~#f#5_5|0~63(2hs3A*c_w
zv>tWFoP@*r(9b)406p&%b{A3tw4)9?Rs_~+gw22_K#N9r(Fr^G2|7mU0B!BU_VmMA
zpYV=3?6iKUKG>a4u#KOvV@=?v@<WS$_^u@A<;}3gNbqSMXzLZ`P6i4$h`z6n4;!9E
zEUa}2PRb5(4o#2$%?;lB#-7mFCyG7>T?dBH=SvU&du1<;oaYx*6Il1D=-?u5v*$(s
zJNtcL2S5o0!egIXri;WB$-cE;4Alar67mRUnSz|=Utdk%p^dq@<1b<rLL6os@bQxP
zece75Y!Qqg+}eg+tO4t|Lv#{epXwC5vu9!3CDc})1Lh5)5c6Opi$j*v<9Eds@9lG7
zLLj;!6{8i4RKzlXRxXq*`?_Gtx)-R&fG`jq2smRiwPdZ(MUr<txlQYv`}6#3)IInN
z{uo6pa&iKllr=$v>9pvXcW5*8gjY%!|NI=E@%cUKSqKgR1k(#B_&^x-99+l(9HLsW
z0nkNBu+#G4=b=IuBQ1a~GJ@SC+5p|n1UttRHmw6Y#}t0t2yBcLHZTC4VNZZ{=%KAx
z*g8?z%sXs16KuK)UX;SdG+}%CVMQ%$@e=fiYIu>1+Hql^bWzgL7y-xOS;V3w;SY%p
z`-FJ*EcJS4KLuF=Y3vi$frgFwfs!9XgPp^&S?S{2GGE#opy;c?C}LsyK}))gH@i8M
zrhQy>&g>ED#>)eQcLn9GRyzH={DJ*eumhk(0AYPQEbR{be)q^8$sG&~3^Eu+EK(6v
zkd}7NKIZ32)$Wh)P^VuW5I(gZc6}eIi)dJ4ww9G2Lpu%s0A>-3?9PdAuh^F@pZWdE
zuGjV{&>#U*2`TuB*amykOZIl(d1Lkdy+>^w60sW$R^*}hkBGajVADM)>4-rKqlg8&
zA5KU)tW!t|Pvm%N?+h1XU|?t<JTS4~lWnmh!%Nf)Y8(jPAvNdaEzi2F%V=jII1xWk
zkO1Ai#E=7-p)Y{8Vqr7%46tKPpo@GSK-IyDRCucqx(*XIEyVzBmBKe{LQnaJb@X9N
z=ixVo!Ui0mi<{u<JYh@xVXacwbP=q`g%!21F=6ONR@f~AuqFKsj^HpMdwNNhd-)LA
zy+rEt5=v{dAlcy^^sv4Tl%lfP0oK8Xjjf_&jRsQ(p{%D57H)rSZ-%VzLLJeq1eA+B
z7(6kimyn9Y383RH>~nMPHGO?!FOF>AhH8TL-CMbOw;=O<`(orQx`1#|xohjJ!=GNh
zM?I`Bp#Wog2^KyO<Bc4CtmLV8yZ+8T2qFL>9pVT~FY!3Qssfa7W{@H1KZpek3@GtR
z#4Q4_nQ>J2<DFhIFyy#me@k>?kdVeZ)M=uI7=rF}T5(xZ{@!b}TM`IgUJctzkFw5`
zl<6h(!}C5si@XD{=^)s&5i}$(z?Q;8<sG1{Ojs)rwrB~qL>^Y8!H#l<&8WkRN@!~n
zYCL=l6($cGFo3l>;jLI`s}y#mH1yU~vWv7AN*uhyv(<=_Wg2YYW1nTnMFo~tBT^A_
z;mjEuhsU3*xfs%(+3!F^6$8VCVvJTJYL*dkNVs&tKAmS1+R}2uCmTs^Juy?|&v~?C
zHwb45nDbDMGH$@TTj;_W9}fo=)^xRR{BKZ;!Uu$B+VyI*k0*M+LOYo%onV$1oxgPT
z&+1!fi)b`3rk7CS&%xi}f#bKOssG=j4s=YwJh=%udKn(XIlOhS_>}SPz5Qlb0D)*C
zmg*a-o|RZs`3CjSzy`u6KFyPysqk>kOSEal1dLWAtSSXLhoK?VK~696$99D~Xve~k
zd;AIUd-_qri-CkAp3%oX1z;yPL8oWn=a0f#X|U7uq5KBu7$-OYKrLiw(Fz;ugcYgq
zjhj$?unnHD=_}YN`mmXJ*m_dfY51_QO;{@wwjLCA?H;Ukh~UFq1fwZyHEyc?Wi&jC
zSoETD#qudl_vGH$*CD4VEJZ9VTp=`zga7#$b~CGos5AHpnHWVZN|DfD<KSccAzp6I
zanyknB4&Ug_A)S_EM{ULJR=L6E`UwbK+Ms@=$NBa1r1IPLb3;1x^KQfn*k=g?lWpj
zwYJOC=cwyNi8%KX;w}aT<Y)l3FktS6O)nvJ%mtXVAKDw-wSGF`!+X?oU^igS;G@Qa
zmBaG_`z+6^Xvd}yF+c(fPt+=l@L1$LUXd#vKi{KPSq^C!MJ!6|cS4ziQm{_iO{aIL
zhxie3Q3=d{DCfr>AUw?niwC59!oa|w1TId98v9%T?U*+}k9c-~j&;Ir-eiET$AnD_
z!IrkeEQX(u1YM5_Z;hHkI{I)vY?lvQBSao{v^0F03fAg{*$eI9!&Vf)?1ygzg<kUq
z+dvANo`P@egxLoP6S7;eXRBm~>7I9_qys*I>6lQ=DTh$PCpV#FeTPH`gMCd(`wl!r
zJ><?I9ixavvhTtf9&3kN{I7Q>G+(e!g2fbwRv<hx{?ckg>Vn=4_9#a@ClJmuup$BV
zNMbvTA{HfUGlV!iQesTn_yPU;J4=GC(I-O3MEP^xqqVw>3F-p{0|=uO^$ZdityrY+
zEJ#apanOic`Cs?ddwW6T2s%)OG4=^E8-c|fX0A_RK3)CZ{wYF`fx&?AO|GuLYRb9}
zzO`S3tWP1Ip#NBBO}UbD?j6P$CTyl0wq*jT3JEapJZaxhI9W&X2-=OM4nYL%J9k`K
zf{o=KTCafcg(|T1%xcK~gRZ$Gs)!AM7O@J@F->^K9=aqRwyqSmlpfZ~gmu(m9dURE
zA3DYf@36!8u<0h4L!o1(2~c|#pvRgtKxx>yrwPy^7gnUg2L_-UH{o|w!3Hc~i?d*@
zW#};`4IT~@9V^oF=PdW|Y{kMN1VT%}568nb_6f_<FdDLc)!KpK^Q>QtEN|>hkhAs%
z%tca2`V!L8%pIPsj}X?JcGW%{;s7W~_~>M)LI#v|nhk`{lZ16NQFb96h`=afQCoR7
z4%24r+H>ILJNvsZi$U~;Qi54Kzi-tE4*l1tmqIHL9w>OSw<~t)$M^PWAbVigfxw>k
z2T=~TCz`jY8=#%`e*o`YPzz?xbapuB>2j;<)qAuv1PCvZdh+VG@TN;>MIHm;OPXE&
z8gBJ^@DOe0-yEYAixLk9oE%zRCSEw-{1|lw1_R-XQMv-9Ri18pZx7qj00~khg7LS>
z*zSFr*fo1$hz2NW4=zrKDq_(O$Aeu84eO}G7E8g`bHZlOp;k4(rgfl2TLSc`Xa?wj
z0c?FJY^gu&bba`VPSBzeK9&kKA9jcytVo58<uX89%kTjKXi*KHxrdG2LKjcL2M{Q{
zG}<fPMtOJ^u_#Hn!4iIHbSH9AiDjSwrIpuE?ZEWJlGh>Q5$a{d3or)?P+DaN9331b
zRK&OZdw_c95fNjYRhu$x{>I-zJ(Nv>@RD!XGzy_sEVB0*6jI?gnxgodL4sfwg=Q26
zlrcpH!b|<J>l47&iUl8TIRA}^p}{G%fdVW`{Zaf;kmykL=4aRS+()Pb8bn+R2x}c$
zz#I&siMYfW)|)^{5BM*QzHo-e!{OI2KmK1Q-`gJt*#pB12(MfIzqa(qx5w}855n|-
zXd;g21}Ot!q}DWP0|gkDM#BaQpcj6?Z!CqjY7?NlrC>|%;YA&6F%;}r4_HwL<AYp@
zjA8PyRxI@9RCos;y7AKix?2iX#KT+9Q2nrJw+84I0T>_F`h{Cb;XuJm8-b|dS;QhG
z9q{oW@8&<_k-kalMJ09)i$YZ<ck;eOJuN<=0Arv4wMaL0(9(Vspq+XjwY5gVZYtEH
zcM0D?<oi+atM}4}sIBG&2^g(dc*+4YG#%LDKJZ&Ly+@svI}k~*h~;%!sHn>E-rfmp
z4wPshJcbKZiFR4B2u2Z$8lKh;8zyYlJw5Ba{cV^5V7d%rpa7{i0iLexo5Eqc#o;CD
z9a#;8dlP>a+GafbbIM);Y!8ee+=>m8oT0n3>mBMLcMM?|tym;?8W<N_J6L?+nDH+c
z?TF_F!bd#A&W}a8sHB1Lji#)V&108sdxLs{mO?7=tyuJvo8TRG=yVsX$bxtLt)OG5
zu&{(JZh|gmf=zS5_Uyy>uvRHd8|=iT2hgno3ech(cBC|HD*%Hp%svPWJ8Ta=?FBuY
z54NidCJt-8!WT=$g2RODvDi>&f1lx5#G+<V8~CNsHOS34EUj3Sr2pC0A<=x-jkG<F
zQP+hs6k!yx$i^}_Bs!?y`mt4E*&ftIL?lc@c)^ntXttH`T~J}`LoWS3^8~HKsDV+$
zqPDKY9E!gAX2|lQpEN{xsXwgM3>#a7IM<Y5(LC$gY_HNo&rwf&T9ANI#3IE5cr5*!
z_e1{U|Iu1`2_+asEK=1faDDLw`>H1b8)`}(qn?v`fbdfI33Fa?KHYR0<7@)x{#`hQ
zzLB?YwSd(E*OzD;axr&PA-U5dN#KD!8|%vTTYTQz&xQmfgft+${`iAQ#k1RA-=m%s
zMMSTssPfsFDW{*KZdk>$C<|l5D*C0-uwzT$MI>x#KD0I30Nnxr@fr96ZYUE{1Tw%*
z!-w9{1RKMJwR#<3(^$|}GHmMrYz!EFN<XYs3{?kjy~5V1LI)-ez|4ad(G1Xm3wY}n
zdiKD9K+1~PmN3)W;aS8&qYp|UE-IOdTow{h#Aap84Ub}dZ7+nZuL1K$Q)Fodg=B}0
z8#Nkrskc!VH8BvL?sD4Uz31nxOQ@%*I}k4NpdLkArzwF^#3E(w10|Ec*sn|KG@ShO
zo&8dn0brW&v<};{l(I~LclL9@>Y;=J;l1|v&cCcEn0w70wt@hnQx9Je%fR4Z?eOpO
z!MCE{-=hw^9LU5dVo{4`O9$>~-=L4Y&+Jh)kTMWHp$T^V9qMi)!o3NIe;620$3`{q
zwPG)v(Q$Oxb?(etPmTBXSug{@G~ofFZ7;;`z57V|5zkiL{GZ-kevLXt-4F^cPRJUI
zod8?c3cL6NdTa{3h=lQB(^y~+GBCiVyI|8)u*FueMOpBTu4vOxu-#Pfq7t@v2ez&i
zT13K&YS>bLn7z;qu?)~_{osqcVCR=wLW=$dKX90k-HLU#*pWRnidbTo`ooeoN^8x*
z(SdJX)$Vf!Z|!FwmxWl0SY&Aifh31Z+|PNMR=>8lN0t!4JKbeq{MpLkiCmb9Ud2=U
zf3O$@(HF`Iw(_c)S3X;vzX^3k!2!Zc%r9ogb5-7biE&9atVo5m>XC{d1LI%^hq~jZ
z>nEH+I}enB@IXOG)8wd=pWmY$*Ftz&PCp}ee_p|TdlgW4z_AZT5esuS0|UbXDgDp(
zCS99*=D&ZBx~ORZ=9vJ<(FD5S#Otf%Zbo;sYx)ilUY`s*6F>*<Mo@2p@cE->Hq<vZ
zOnQs9VU_S9hA?Yj=@-I>92AGG$ip~E>H_p6DFx`vI&62719bXmK{2GrfSud~KkN=x
zbV2pOj!K5@t%n`044;9Aoj(ejE`m-w!A_)t9eV=j!;U$Ds)H}qf);`BGXWqzW?+E$
znd~A>SgPG&XcTFro<9o9!r*{vu!3(8twc`qSc){1=@N%fhy7<CORnX4YCj2V5R`b3
zhtV-dDk>7vp4mA#Z*!WxzxyHDv=rf^idS8G)IYaiBii+EggfThE6;pV(M2no8Zc+@
zk-ZPf@~~q$xS?KyP=s4Yu<Ox~{0*L-fcIFT4D^m`PpzSZior{?MLmWXMH;F*;~awG
z1zyei|K9#N#DNg<1KzRE1v772Iozsx)3M9_C2DJI0pZpd!~zBeIT!;>dl9UXl;+51
zG4;Piz1Gkn4x`nGQspujIQZ52hjj_PM!hh_f$)|K@t;R$lq$bNJ!7CDn4mi~+)9ph
z@xMUboy7nd=fhTnVsy;WH;68P7KyM+kzsrDVH-B#9dl?A3NI?rH*i81C&7;=fvSfe
zI|7|PQh-h?9e_@2!H;f+o)86FR|<|d5CNTTg3av1#x~)5|6v_@SkVh@H6DN*P)K$W
zd(D2i$M7s-QHsih0EcG^&d&T<@9arCtWU}TcCKe5D3)M2Ap>6#%fN8J$stM9(Bqui
zGt?#D4+wY6#s589?aKQIZCZ-(bO`L+QrLP&h*J&lb<7je(vI1~jw?YO!z6qX5yX53
zv}q#38$DqcXP_48rWi#mY7r#h@Zw0&Oot4#8GHlGb*LyseuI?*i&lzy)Q|V5W6%kN
zcSXT2+<<L8fcTg2I$I@y<0g(4?@_Br2FxNBwF-%cUtofAK}!SS3tFDO*_&e<^#rvF
zVIW+Ez|xBw#A8r02HXiHs)#j!u0w^b2gNwQ6xv#4fOeSS$CSX9?!y)%!Pc3=I_xlc
zSO**GH24@MR6YTEzA6K>h=sL=VSD)DOZ=flbOW^Y3NPBB`e0iU;6*L8$cIf+IRt~l
zgzO@A;&QKHdXf}+E4EKeVFkPCJNs7Tw2h?|i_{^!;BEfN{$B&{WS_*R_H$t|4Wb+J
zF^X7}BB8;`;dr^u`(3{u*`u7pxq$H4V9mmB&zcrrN4*wr0pTJ6cCDH%$bE2(*@{If
zf-amnV+~&<g?b{C9>G=+%nFpMjEFt)u=TR29Z)rlA{NP=6J|zQJACa-OilRr-u?y5
zVi3ImvlWZ7Na{hDL;KO=mfP~4piVC_5IzRPH1uL`e(?jeORovf;KObgKyp9$xEWa0
z2dm0Z-02YQki})|>Ne@U{T`6NU^s#B$z!ooJWJ00e~-Ezn21(7thMb5vkOE!#DR+w
zqKeoCXc2n=x|@mtwvH6K9y9^gs)e4|1TSJ?(?77?PS8bEu$g)Ik<rk_OdxlGF|??J
z%R^hSuw7j6=_Oc^4LczUR#d~*p+e1tU3~HYZA-ub*vvkxH4Hf;mh2+-@f&vG;n|8s
zDk>SY+t1k>RlIz+|G-;<tyq+z`GBKCm!#sve;rRy??PITh*89%WEq2G2NSL**8FF#
z*`sWDbRfLp@k58|`+eKapxy{c^l9-Q&`y%VJH2GEY3^}*c@|wZA>FrVW08bA#Ov;=
z{bmz-kG52t@GdMh%@Zec6K|j$17eL)#3I*(paG3pi;YzN^S`&(M3$IPi!r@~5`PUg
z4sSlOJiqt-1?mMZL=;_eTt7npsoX@}4M#-l&{DLizhuFC)UoRZ%p<3fir@(|Z#p=5
z)lAA16M2hTR1$Ha%FK@jMH1)VquwsyKzN{lFQ_9eg6|3H&8iKN#22v&(6Lt7(a*5O
zMzAqb__Ppgj1<~YhcCH@6?sq^)@p<=s)7~8dXO<&_(oXhihu*q^HvwYidyKpR0im@
z5q#Y%bYKD2dW9{Pf-RzQfUPWmt{i~719q|$WXg@~BGyrtEo68Wv8Y+i!eI-`_R~7E
z-`ZCrXHhJzSY&C2fCL9I5r?1tcK1<d;uF&F7qJPk4)=E0{+VZc5%n%12EsG(jWMtM
zlI!-NjgiF@Y*j`HF#K;^`4nwYj}68^0aDg3NLyy-0Nag&x<$a6pnc$m1p@;El6l~T
zFVG=aIE6kI8}TPWYWerqXuFLFjm5@0c%7Zo?!NH7Js)xe8B}8wv8e8`c5vF6Y4ySO
zCF=EkMD!+L{!@Z6Kr|7D{2k5x+ZQ?i4eIIq4VY&FAT<FRraB+Cm-B4QGvR$}UjecQ
zh7S-Pi-jpgGLNWuaEt3!Et>t-z6fRjh;B$HzK9Kg7O@Jj)+@9%2;02`+f@adL5GjU
zLW@XPQ4DXr!irMZOgpTI1vw0aVFL?L`32B{f&<VZ7IywB>;e>cs}`gegrV2<!Q^3w
z{=s%_fs}wT%wE{W)_8E3kX^(EwCWiTjUtxR8GPiN2x<kvPW2}=7K@Zc!87p!w?60x
z1U#|-0uFvCVSstv9%_+p=Fo4mZZ*IEMSD-E76=thbkT)!aY};|L4Ec{>KPKP*HNcE
z4g_Kpv8e8}boi9M=g+11=crp53J8w@CaAiyTnM|2dR&VG;p1UEs@y(vN58huhXe?O
zbV!G{VqpUoC<71;t`2HU$@=Gw-r4s<bU?@i!gpjr6f!WN7eTPGVAxnIN_x4F;-K{Z
zqr~&s@9aBa27qV<!qb)8F6!NT_5y7~E)k344sBu$)i-)*uMDyWh8ujq*&AEO9b*O`
z{iLY?=%vXF{*YGS0q9~NSdoT4a}J-mhZbe90RVVW1zo2J(+@A2pvRZMb~C|?R+uNC
zMHWmRwviQfxiWkMDy&2A3HCGDMcV(_PZftokw)qaK2p&HKGzd={wSd%o?#IIp;;VU
z6gf&u?H{1FIu2xGv>H*zK5ZN>9QnGd@x>8)e~3X4(t&X6Z<*iDKc^>dLcP<8h;uJz
z-8wO8$>XQ?C`A^78b*<ZQdBU6Iap>Le^aylg}pn(g%Fa68$@A;(z!r%Ku97sP(l=9
zPD{bofnqIU9>hC5sLan%|MMPokyk?nMynAe9vI{txU}mWdVjsJ?}AwjrU@5Ku$x>_
ziWmlOf>jIeVWH#28=u;zgYAJ54k;Kj_((;pLhI|R_98o)%j4Leqqa5~2%jGdyPi-J
zY88a?CFsss?28`7Onr#jJ82+c1|R*rQ`ntMunsi5l?d&4!*&<Jr-`6T++n+gU_~4Q
z^du=*2OG|Z6?qUJGQbvJJ%F|v55QWj&>496ZY1a;ss*s6{?H}%unstUS_wLB1;1ew
z+KN2@8LS|?h+Wxw&SKaXu?cBu)()_-&niY}6oDu#9dnc-=s=)@19wNPY5Y~x4TcU$
z7)2~nkq$md>dP~Ot4iw+LZ>f5mVqvJCcK^!s*(ZaxDp~-W3av7NbUf4&|p~^R`eo!
zpTQu(p-GcHac<KS)H{zF2;X^hENG)}pYTb0)YdKGYXM<nrZD$F{Hp?QHNuKmlz4Cm
zbU1T%{f6Ju-`mR|d%yv6>=Rj<;X$l}_r{QqzuVuS#UJ676tF$`=r?-8M;xFG^nr&r
zOEoVq?nS#egdqr{h((E~1n3>?3?ER>dsQI32j7M7{MnoD(Qb16K)BT(;=YsVl;C}P
zSSuY8560jY8@5&|#v&>7u}=lqj6JmIgk72pD?;Jd-$86*fG>iAb=0A!?ZZ0q46y6(
zVAD#_X(8BB{0Y$WSK-rGu-#A4A{M?23F;o$N&?s-DcHtR7!6z64?AWB=57W<aF~!?
z#PZBLqB=B+Sl*2648sf)Z-&+>8D{NMf~IJAGkWv3PRS_hbh^QWIA0T1q`)E)LbEu)
z_TV=n=Rqt*EV4A{j-IM(UpM5Pc#gVc-JlSoh(*zt5b3aOPI`*V$0w+#(-U#w2zTqd
zX`gd1qV8=c;vCgk4oOQc$iG6J4k2OJ6KYn|!8k=9W%|q^#vwq?t*`Rdd;80f0EUnT
zr5LB^qv&H$bGR89?3jG-9qNjR1B7==rJQv8zVyI*`+A5?5Hf-A9(C9>3d(xY22YGj
znjr?lNGXSXE4bgNIK4%i9wdB+)RyU%$xptcU5r9R>lb#i6UttC2HYn*VRXzf_Qb<x
z)}edR6<~bm3_JYjWmrcXT0~*&d55kGeE?ll)BxR337fuw?IMD0@H_zRz{7R}!HZJp
zI!|~<AGRkSDh@BwC_K3-(4%3PcFa+Vm<RC=ODwPTO<{X$PiW6OioOLg@QW-yz*0Mi
z7Qh^9MA4TJ>~Kr>>7BE-ckQ=<)Wfg=;YB7erKm}paBJ-6=Ub&47u>gB2r~dg8x&x4
z%u(E_5bI!}WGOGa{x#~gYD65>4BNX78?ysh$G|{%UF1J8@m^=2C-(Xf4G{7`5JsyJ
zA^{_%99|zPd&JcD&b|jG1fmz@V06q;TB8U29BO-Z&dXZ;-aZke9)^iH-bGF$ozI5l
zJ=!rXgcpaw&h<pCB8)Ivji~N)a=0eV_2&PD7pQkAH4t7r1@Y1cw6hKfUo!|>FN!+w
zfO~8cqt(a&z25Es^mva2(6LR}jC29CRR>#I4{rrRT+9H!iwRn!!Pjj<*ImMw(nCAQ
zuw6*7-A1r;MIS(0tFYZk4$vYMHf;kR%Y+uqaC4zE;tc4EmtX?`(AH*yJ7uj#)z&tl
z;n`|L>iAqZ(_;(2dkNRNPuQ3lO44tza)@*I5W=1G05xkj5I>FJ0Zmb0sv!pB?j@8W
z??IFUleQ!4$&)Y87K;$xkg0Yh{pekfgQ$DE8wf8h+FvDRankNRTGsY+fQ19Bs6c9s
zU#NKbz&?qUsdCc4_x3-*9)l7J75H0?3o;!J>91^(|MAX#4O9!5T0r>tj73)^<wVud
z_SicRzF8Hv9##Ns5sa|FXf>j=ZVQqfV0S3_!nA;C!n??9Gfd6ePQ0_%1FMG;L==^<
zst5h9F3|D9*gDD>tw!{*NC()uOK9s1z5x^JCfG3=3{ZLaZXZ}j7}imS?&4{H7NxKm
zY6a+6BkZ^jcq<aRZW7jkhAnZ2jWxoHJZMph(Ykb{tVlE0c^*A9iZoKEb5N3KLaYO9
zDSJP18G@xFjG~W0$zk(!ZKZ<tH>j5)f2hLf2qR^A2Og%=_Szf7UvGc<&i*hg1%YTH
z*7w223{l3o90+&7R@`3@CI9k0>b>&{Ss1NGq@v!yIM~zS-Q@7B>0ck(M}XW1!wQ63
zM?BB^el?$cfZD2hK=^chm=(y;fbC)s6!`-w4l5ow^iKQz9(B6oK^jJ@5mq&U?P)M{
zfE^AO2Nr-4gm+)TRKmtSA$%i((<HE^>?qglVOeL1;!Xxb2i7?)VqqQk?8P7!LC6Ne
z$BjS~A_jsWd;x;)gs5X+K<S-8Z>OC43wjhHwz3qX`PhKArhNhQJP+9XNdmM7eE{0i
zhR@SLPoaZd)!7gMol1wDQU|?K^8kzwYv#e64jrU~UE&05F2c`hhBXJF=D@YV*3iS`
zp@W#PG8f*gglm8>pn*g7pyQi3XTuB`__S$;tY3y4f~Vunk)i9&n4yO`+{e^9CBwhd
z8MRVo_+SsN?VK}0GQyoXU_<j$+NK0e$%yf0%81QK@aDiYjInh}MtCQZhLR~s-i)A;
z#o8$v8KU0I8Cl+>mbIv9S-^ou`pu<Tf8N@+AZKDMWi3is^x4*7Yv|@Dd%nI$U6{sz
zxib^i+yci-gRR3OzV{cSK0ZTRYfSjq^gR;U%r~{Kqt2Hw*b>Y;up@p@j=g8_!YFHD
zkpgxngN1{9wAbR8V{h#3Ap#Kc0P(Aqkm?u)&{+)7=?*xBKJW2QGf0=$;2r8wJq{5V
zJ#?ga0G~?t=6e4mP2M-CM^`0a?#x7vUWSGwhX%)G`$cbDKy8jQ5N?i7%D%KZPx?68
zO&`RE3yM1!7%+P1DDn^D9XQ@zZx^}n&i))E1|Vbt;dvc9cFQLPJ0IIOKy*OJ1B9;;
z@L3mldTRSidq0Q{C`rP$p$pI=7Cx8>TWu5$DKg;~!o!M4Xmb%hxClF%2-<Uo7j?l9
z{ZJQy(iE5pmxr!yf*sN109{B4KRO9EhzdJ(4mx!YHUvyS%!f6D8K7(0Va;LKIVLcg
z(#`LxUSUqdvxr44^6VVWv3a=fjDKg}gq*gq6tPG}hQfxT3-;F^>U}>Q{>=U&ICh}K
zgc6MAEK~+cu{cy09{8SWaT@j5bRxD5K@DI)J=2$PPg}0`v~2tDo2WNTG&o@tu}J>g
z@cHi_`(4w2hJ^1&Tl398xaT`1rh#dfz)SlnFb9L^2Esc}LCQcFwW!B?d_DM_kPD9D
z`QIzv+iwJEhT#i^7|mJa=w&cSahP*lRq;9hQ?#W4?gXnWm=!1&(38--MC}$SU=*=P
zRfvJHu(QLxgHjEuyWgVj<UK$*JQvq3UoY4H26X`|5w~KW6}`E&;?jH6?P7Qqcw)3-
z9iT<519ZF22H2n{v<QR^Qo^?hK^JDi0v3KV2eb%;tx1QSsSM}CTAeVB(AFnxyASLD
zQ22ZlbQ3+C57P&uVe-&zOt5(>_@F7Y$cB$xK<3t9t=QBI`3!~9q#m}i{F2PP^eGt(
zJse?RW{}&I$x}2Nr_@f-=;2B&F3HSIEXgk_o)SAnqlYP_WJ-p6+mx)GYzzz;lVFTZ
zFvcYq;}evT1v&*2#x`PPV1TiMkk~~?>`6%MO-SrZNbFBYY#}CunMO$LAS8AX5_=L7
zdlM4-5)%6p5?hEF$$TVs5E8oxi9HF4y$OkZ35op)i7mu}WIhr*2#H;U#GZu2-h{-y
zgv9=Y#1>*jG9QT@gv2gFVoyS1Z$e^ULSlbHVhgb$nUBN{LSh#ou_uAqsac!Y7#Mn3
zG7^hYru49vr)H*SloU_#%TV*{5lqWTEGbFNi!Uik%qvdIFUp<Lk)ht1;Tk(dBf~vo
zQpTo?OBtWQ2`dYfqOw3qDGQW_vOtL^3zTZIK*=Qwluoih2_y@YGO|EPA`6rjvOtL-
z3l#fVplHtm#dj7cqO**c85puaQJV#d(=1SAW`SZc3lx1>pm@syMOc;*3rHP^0L4uf
zC{nUOv5^IehAdF{XMqAf%ZL@E4n%;$I13cGS)fqO0tIIlC>*mu0hndP22uwiKw*^y
z3Z&E?=A6{{DLpd9**U3+MR|G!MX3cv`N^rp#hH2Odht1lNvSzgdU$+5v};~+eoAT)
iJmcc(P1<!jLyr<@fb=54g+PX0)(<uYhSH?cBs~DCFVLv~

literal 0
HcmV?d00001

diff --git a/pages/RFxp/RFxp.py b/pages/RFxp/RFxp.py
new file mode 100755
index 0000000..5557e04
--- /dev/null
+++ b/pages/RFxp/RFxp.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python3
+#-*- coding:utf-8 -*-
+##
+## xprf.py
+##
+##  Created on: Oct 08, 2020
+##      Author: Yacine Izza
+##      E-mail: yacine.izza@univ-toulouse.fr
+##
+
+#
+#==============================================================================
+from __future__ import print_function
+from data import Data
+from options import Options
+import os
+import sys
+import pickle
+import resource
+
+
+from xrf import XRF, RF2001, Dataset
+import numpy as np
+
+
+
+#
+#==============================================================================
+def show_info():
+    """
+        Print info message.
+    """
+    print("c RFxp: Random Forest explainer.")
+    print('c')
+
+    
+#
+#==============================================================================
+def pickle_save_file(filename, data):
+    try:
+        f =  open(filename, "wb")
+        pickle.dump(data, f)
+        f.close()
+    except:
+        print("Cannot save to file", filename)
+        exit()
+
+def pickle_load_file(filename):
+    try:
+        f =  open(filename, "rb")
+        data = pickle.load(f)
+        f.close()
+        return data
+    except Exception as e:
+        print(e)
+        print("Cannot load from file", filename)
+        exit()    
+        
+    
+#
+#==============================================================================
+if __name__ == '__main__':
+    # parsing command-line options
+    options = Options(sys.argv)
+    
+    # making output unbuffered
+    if sys.version_info.major == 2:
+        sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
+
+    # showing head
+    show_info()
+
+        
+        
+    if options.files:
+        cls = None
+        xrf = None
+        
+        print("loading data ...")
+        data = Dataset(filename=options.files[0], 
+                    separator=options.separator, use_categorical = options.use_categorical)
+            
+        if options.train:
+            '''
+            data = Dataset(filename=options.files[0], mapfile=options.mapfile,
+                    separator=options.separator,
+                    use_categorical = options.use_categorical)
+            '''        
+            params = {'n_trees': options.n_estimators,
+                        'depth': options.maxdepth}
+            cls = RF2001(**params)
+            train_accuracy, test_accuracy = cls.train(data)
+            
+            if options.verb == 1:
+                print("----------------------")
+                print("Train accuracy: {0:.2f}".format(100. * train_accuracy))
+                print("Test accuracy: {0:.2f}".format(100. * test_accuracy))
+                print("----------------------")           
+            
+            xrf = XRF(cls, data.feature_names, data.target_name, options.verb)
+            #xrf.test_tree_ensemble()          
+            
+            bench_name = os.path.basename(options.files[0])
+            assert (bench_name.endswith('.csv'))
+            bench_name = os.path.splitext(bench_name)[0]
+            bench_dir_name = options.output + "/RF2001/" + bench_name
+            try:
+                os.stat(bench_dir_name)
+            except:
+                os.makedirs(bench_dir_name)
+
+            basename = (os.path.join(bench_dir_name, bench_name +
+                            "_nbestim_" + str(options.n_estimators) +
+                            "_maxdepth_" + str(options.maxdepth)))
+
+            modfile =  basename + '.mod.pkl'
+            print("saving  model to ", modfile)
+            pickle_save_file(modfile, cls)        
+
+
+        # read a sample from options.explain
+        if options.explain:
+            options.explain = [float(v.strip()) for v in options.explain.split(',')]
+            
+            if not xrf:
+                print("loading model ...")
+                cls = pickle_load_file(options.files[1])
+                #print()
+                #print("class skl:",cls.forest.classes_)
+                #print("feat names:",data.feature_names)
+                #print("extended name:",data.extended_feature_names_as_array_strings)
+                #print("target:",data.target_name)
+                #print()
+                xrf = XRF(cls, data.feature_names, data.target_name, options.verb)
+                if options.verb:
+                    # print test accuracy of the RF model
+                    _, X_test, _, y_test = data.train_test_split()
+                    X_test = data.transform(X_test) 
+                    cls.print_accuracy(X_test, y_test) 
+            
+            expl = xrf.explain(options.explain, options.xtype)
+            
+            print(f"expl len: {len(expl)}")
+            
+            del xrf.enc
+            del xrf.x            
+          
+            
\ No newline at end of file
diff --git a/pages/RFxp/data.py b/pages/RFxp/data.py
new file mode 100644
index 0000000..6c1546d
--- /dev/null
+++ b/pages/RFxp/data.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+##
+## data.py
+##
+##  Created on: Sep 20, 2017
+##      Author: Alexey Ignatiev, Nina Narodytska
+##      E-mail: aignatiev@ciencias.ulisboa.pt, narodytska@vmware.com
+##
+
+#
+#==============================================================================
+from __future__ import print_function
+import collections
+import itertools
+import os, pickle
+import six
+from six.moves import range
+import numpy as np
+
+
+#
+#==============================================================================
+class Data(object):
+    """
+        Class for representing data (transactions).
+    """
+
+    def __init__(self, filename=None, fpointer=None, mapfile=None,
+            separator=',', use_categorical = False):
+        """
+            Constructor and parser.
+        """
+
+        self.names = None
+        self.nm2id = None
+        self.samps = None
+        self.wghts = None
+        self.feats = None
+        self.fvmap = None
+        self.ovmap = {}
+        self.fvars = None
+        self.fname = filename
+        self.mname = mapfile
+        self.deleted = set([])
+
+        if filename:
+            with open(filename, 'r') as fp:
+                self.parse(fp, separator)
+        elif fpointer:
+            self.parse(fpointer, separator)
+
+        if self.mname:
+            self.read_orig_values()
+
+        # check if we have extra info about categorical_features
+
+        if (use_categorical):
+            extra_file = filename+".pkl"
+            try:
+                f =  open(extra_file, "rb")
+                print("Attempt: loading extra data from ", extra_file)
+                extra_info = pickle.load(f)
+                print("loaded")
+                f.close()
+                self.categorical_features = extra_info["categorical_features"]
+                self.categorical_names = extra_info["categorical_names"]
+                self.class_names = extra_info["class_names"]
+                self.categorical_onehot_names  = extra_info["categorical_names"].copy()
+
+                for i, name in enumerate(self.class_names):
+                    self.class_names[i] = str(name).replace("b'","'")
+                for c in self.categorical_names.items():
+                    clean_feature_names = []
+                    for i, name in enumerate(c[1]):
+                        name = str(name).replace("b'","'")
+                        clean_feature_names.append(name)
+                    self.categorical_names[c[0]] = clean_feature_names
+
+            except Exception as e:
+                f.close()
+                print("Please provide info about categorical features or omit option -c", e)
+                exit()
+
+    def parse(self, fp, separator):
+        """
+            Parse input file.
+        """
+
+        # reading data set from file
+        lines = fp.readlines()
+
+        # reading preamble
+        self.names = lines[0].strip().split(separator)
+        self.feats = [set([]) for n in self.names]
+        del(lines[0])
+
+        # filling name to id mapping
+        self.nm2id = {name: i for i, name in enumerate(self.names)}
+
+        self.nonbin2bin = {}
+        for name in self.nm2id:
+            spl = name.rsplit(':',1)
+            if (spl[0] not in self.nonbin2bin):
+                self.nonbin2bin[spl[0]] = [name]
+            else:
+                self.nonbin2bin[spl[0]].append(name)
+
+        # reading training samples
+        self.samps, self.wghts = [], []
+
+        for line, w in six.iteritems(collections.Counter(lines)):
+            sample = line.strip().split(separator)
+            for i, f in enumerate(sample):
+                if f:
+                    self.feats[i].add(f)
+            self.samps.append(sample)
+            self.wghts.append(w)
+
+        # direct and opposite mappings for items
+        idpool = itertools.count(start=0)
+        FVMap = collections.namedtuple('FVMap', ['dir', 'opp'])
+        self.fvmap = FVMap(dir={}, opp={})
+
+        # mapping features to ids
+        for i in range(len(self.names) - 1):
+            feats = sorted(list(self.feats[i]), reverse=True)
+            if len(feats) > 2:
+                for l in feats:
+                    self.fvmap.dir[(self.names[i], l)] = l
+            else:
+                self.fvmap.dir[(self.names[i], feats[0])] = 1
+                if len(feats) == 2:
+                    self.fvmap.dir[(self.names[i], feats[1])] = 0
+
+        # opposite mapping
+        for key, val in six.iteritems(self.fvmap.dir):
+            self.fvmap.opp[val] = key
+
+        # determining feature variables (excluding class variables)
+        for v, pair in six.iteritems(self.fvmap.opp):
+            if pair[0] == self.names[-1]:
+                self.fvars = v - 1
+                break
+
+    def read_orig_values(self):
+        """
+            Read original values for all the features.
+            (from a separate CSV file)
+        """
+
+        self.ovmap = {}
+
+        for line in open(self.mname, 'r'):
+            featval, bits = line.strip().split(',')
+            feat, val = featval.split(':')
+
+            for i, b in enumerate(bits):
+                f = '{0}:b{1}'.format(feat, i + 1)
+                v = self.fvmap.dir[(f, '1')]
+
+                if v not in self.ovmap:
+                    self.ovmap[v] = [feat]
+
+                if -v not in self.ovmap:
+                    self.ovmap[-v] = [feat]
+
+                self.ovmap[v if b == '1' else -v].append(val)
diff --git a/pages/RFxp/options.py b/pages/RFxp/options.py
new file mode 100644
index 0000000..446eb71
--- /dev/null
+++ b/pages/RFxp/options.py
@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+##
+## options.py
+##
+##  Created on: Dec 7, 2018
+##      Author: Alexey Ignatiev, Nina Narodytska
+##      E-mail: aignatiev@ciencias.ulisboa.pt, narodytska@vmware.com
+##
+
+#
+#==============================================================================
+from __future__ import print_function
+import getopt
+import math
+import os
+import sys
+
+
+#
+#==============================================================================
+class Options(object):
+    """
+        Class for representing command-line options.
+    """
+
+    def __init__(self, command):
+        """
+            Constructor.
+        """
+
+        # actions
+        self.train = False
+        self.encode = 'none'
+        self.explain = ''
+        self.xtype = 'abd'
+        self.use_categorical = False
+
+        # training options
+        self.accmin = 0.95
+        self.n_estimators = 100
+        self.maxdepth = 3
+        self.testsplit = 0.2
+        self.seed = 7
+
+        # other options
+        self.files = None
+        self.output = 'Classifiers'
+        self.mapfile = None
+        self.separator = ','
+        self.smallest = False
+        self.solver = 'g3'
+        self.verb = 0
+
+        
+        if command:
+            self.parse(command)
+
+    def parse(self, command):
+        """
+            Parser.
+        """
+
+        self.command = command
+
+        try:
+            opts, args = getopt.getopt(command[1:],
+                                    'e:hc:d:Mn:o:s:tvx:X:',
+                                    ['encode=', 'help', 'use-categorical=',
+                                     'maxdepth=', 'minimum', 'nbestims=',
+                                     'output=', 'seed=', 'solver=', 'testsplit=',
+                                     'train', 'verbose', 'explain=', 'xtype=' ])
+        except getopt.GetoptError as err:
+            sys.stderr.write(str(err).capitalize())
+            self.usage()
+            sys.exit(1)
+
+        for opt, arg in opts:
+            if opt in ('-a', '--accmin'):
+                self.accmin = float(arg)
+            elif opt in ('-c', '--use-categorical'):
+                self.use_categorical = True
+            elif opt in ('-d', '--maxdepth'):
+                self.maxdepth = int(arg)
+            elif opt in ('-e', '--encode'):
+                self.encode = str(arg)
+            elif opt in ('-h', '--help'):
+                self.usage()
+                sys.exit(0)
+
+            elif opt in ('-M', '--minimum'):
+                self.smallest = True
+            elif opt in ('-n', '--nbestims'):
+                self.n_estimators = int(arg)
+            elif opt in ('-o', '--output'):
+                self.output = str(arg)
+    
+            elif opt == '--seed':
+                self.seed = int(arg)
+            elif opt == '--sep':
+                self.separator = str(arg)
+            elif opt in ('-s', '--solver'):
+                self.solver = str(arg)
+            elif opt == '--testsplit':
+                self.testsplit = float(arg)
+            elif opt in ('-t', '--train'):
+                self.train = True
+            elif opt in ('-v', '--verbose'):
+                self.verb += 1
+            elif opt in ('-x', '--explain'):
+                self.explain = str(arg)
+            elif opt in ('-X', '--xtype'):
+                self.xtype = str(arg)
+            else:
+                assert False, 'Unhandled option: {0} {1}'.format(opt, arg)
+
+        if self.encode == 'none':
+            self.encode = None
+
+        self.files = args
+
+    def usage(self):
+        """
+            Print usage message.
+        """
+
+        print('Usage: ' + os.path.basename(self.command[0]) + ' [options] input-file')
+        print('Options:')
+        #print('        -a, --accmin=<float>       Minimal accuracy')
+        #print('                                   Available values: [0.0, 1.0] (default = 0.95)')
+        #print('        -c, --use-categorical      Treat categorical features as categorical (with categorical features info if available)')
+        print('        -d, --maxdepth=<int>       Maximal depth of a tree')
+        print('                                   Available values: [1, INT_MAX] (default = 3)')
+        #print('        -e, --encode=<smt>         Encode a previously trained model')
+        #print('                                   Available values: sat, maxsat, none (default = none)')
+        print('        -h, --help                 Show this message')
+  
+        #print('        -m, --map-file=<string>    Path to a file containing a mapping to original feature values. (default: none)')
+        #print('        -M, --minimum              Compute a smallest size explanation (instead of a subset-minimal one)')
+        print('        -n, --nbestims=<int>       Number of trees in the ensemble')
+        print('                                   Available values: [1, INT_MAX] (default = 100)')
+        print('        -o, --output=<string>      Directory where output files will be stored (default: \'temp\')')
+       
+        print('        --seed=<int>               Seed for random splitting')
+        print('                                   Available values: [1, INT_MAX] (default = 7)')
+        print('        --sep=<string>             Field separator used in input file (default = \',\')')
+        print('        -s, --solver=<string>      A SAT oracle to use')
+        print('                                   Available values: glucose3, minisat (default = g3)')
+        print('        -t, --train                Train a model of a given dataset')
+        print('        --testsplit=<float>        Training and test sets split')
+        print('                                   Available values: [0.0, 1.0] (default = 0.2)')
+        print('        -v, --verbose              Increase verbosity level')
+        print('        -x, --explain=<string>     Explain a decision for a given comma-separated sample (default: none)')
+        print('        -X, --xtype=<string>       Type of explanation to compute: abductive or contrastive')
diff --git a/pages/RFxp/pima.csv b/pages/RFxp/pima.csv
new file mode 100644
index 0000000..f3fac60
--- /dev/null
+++ b/pages/RFxp/pima.csv
@@ -0,0 +1,769 @@
+Pregnant,plasma glucose,Diastolic blood pressure,Triceps skin fold thickness,2-Hour serum insulin,Body mass index,Diabetes pedigree function,Age,target
+4.0,117.0,62.0,12.0,0.0,29.7,0.38,30.0,1
+4.0,158.0,78.0,0.0,0.0,32.9,0.8029999999999999,31.0,1
+2.0,118.0,80.0,0.0,0.0,42.9,0.693,21.0,1
+13.0,129.0,0.0,30.0,0.0,39.9,0.569,44.0,1
+5.0,162.0,104.0,0.0,0.0,37.7,0.151,52.0,1
+7.0,114.0,64.0,0.0,0.0,27.4,0.732,34.0,1
+6.0,102.0,82.0,0.0,0.0,30.8,0.18,36.0,1
+1.0,196.0,76.0,36.0,249.0,36.5,0.875,29.0,1
+9.0,102.0,76.0,37.0,0.0,32.9,0.665,46.0,1
+7.0,161.0,86.0,0.0,0.0,30.4,0.165,47.0,1
+7.0,114.0,66.0,0.0,0.0,32.8,0.258,42.0,1
+4.0,184.0,78.0,39.0,277.0,37.0,0.264,31.0,1
+0.0,137.0,40.0,35.0,168.0,43.1,2.2880000000000003,33.0,1
+6.0,125.0,76.0,0.0,0.0,33.8,0.121,54.0,1
+11.0,155.0,76.0,28.0,150.0,33.3,1.3530000000000002,51.0,1
+7.0,187.0,50.0,33.0,392.0,33.9,0.826,34.0,1
+7.0,178.0,84.0,0.0,0.0,39.9,0.331,41.0,1
+0.0,180.0,66.0,39.0,0.0,42.0,1.893,25.0,1
+8.0,120.0,86.0,0.0,0.0,28.4,0.259,22.0,1
+2.0,105.0,80.0,45.0,191.0,33.7,0.711,29.0,1
+0.0,118.0,84.0,47.0,230.0,45.8,0.551,31.0,1
+7.0,150.0,78.0,29.0,126.0,35.2,0.6920000000000001,54.0,1
+1.0,149.0,68.0,29.0,127.0,29.3,0.349,42.0,1
+8.0,188.0,78.0,0.0,0.0,47.9,0.13699999999999998,43.0,1
+3.0,173.0,78.0,39.0,185.0,33.8,0.97,31.0,1
+0.0,189.0,104.0,25.0,0.0,34.3,0.435,41.0,1
+9.0,164.0,84.0,21.0,0.0,30.8,0.831,32.0,1
+4.0,131.0,68.0,21.0,166.0,33.1,0.16,28.0,0
+6.0,85.0,78.0,0.0,0.0,31.2,0.382,42.0,0
+5.0,143.0,78.0,0.0,0.0,45.0,0.19,47.0,0
+4.0,110.0,66.0,0.0,0.0,31.9,0.47100000000000003,29.0,0
+10.0,115.0,0.0,0.0,0.0,35.3,0.134,29.0,0
+5.0,73.0,60.0,0.0,0.0,26.8,0.268,27.0,0
+7.0,106.0,92.0,18.0,0.0,22.7,0.235,48.0,0
+0.0,98.0,82.0,15.0,84.0,25.2,0.299,22.0,0
+2.0,88.0,58.0,26.0,16.0,28.4,0.7659999999999999,22.0,0
+1.0,73.0,50.0,10.0,0.0,23.0,0.248,21.0,0
+6.0,144.0,72.0,27.0,228.0,33.9,0.255,40.0,0
+5.0,122.0,86.0,0.0,0.0,34.7,0.29,33.0,0
+1.0,107.0,72.0,30.0,82.0,30.8,0.821,24.0,0
+0.0,101.0,64.0,17.0,0.0,21.0,0.252,21.0,0
+6.0,80.0,66.0,30.0,0.0,26.2,0.313,41.0,0
+0.0,173.0,78.0,32.0,265.0,46.5,1.159,58.0,0
+2.0,122.0,76.0,27.0,200.0,35.9,0.483,26.0,0
+2.0,99.0,52.0,15.0,94.0,24.6,0.637,21.0,0
+1.0,151.0,60.0,0.0,0.0,26.1,0.179,22.0,0
+6.0,105.0,70.0,32.0,68.0,30.8,0.122,37.0,0
+1.0,119.0,44.0,47.0,63.0,35.5,0.28,25.0,0
+4.0,132.0,86.0,31.0,0.0,28.0,0.419,63.0,0
+10.0,129.0,76.0,28.0,122.0,35.9,0.28,39.0,0
+2.0,106.0,56.0,27.0,165.0,29.0,0.426,22.0,0
+4.0,127.0,88.0,11.0,155.0,34.5,0.598,28.0,0
+1.0,157.0,72.0,21.0,168.0,25.6,0.12300000000000001,24.0,0
+0.0,101.0,76.0,0.0,0.0,35.7,0.198,26.0,0
+6.0,125.0,68.0,30.0,120.0,30.0,0.46399999999999997,32.0,0
+2.0,82.0,52.0,22.0,115.0,28.5,1.699,25.0,0
+0.0,113.0,80.0,16.0,0.0,31.0,0.8740000000000001,21.0,0
+0.0,100.0,70.0,26.0,50.0,30.8,0.597,21.0,0
+2.0,120.0,76.0,37.0,105.0,39.7,0.215,29.0,0
+6.0,183.0,94.0,0.0,0.0,40.8,1.4609999999999999,45.0,0
+0.0,125.0,96.0,0.0,0.0,22.5,0.262,21.0,0
+1.0,126.0,56.0,29.0,152.0,28.7,0.8009999999999999,21.0,0
+9.0,89.0,62.0,0.0,0.0,22.5,0.142,33.0,0
+3.0,84.0,68.0,30.0,106.0,31.9,0.591,25.0,0
+2.0,122.0,60.0,18.0,106.0,29.8,0.7170000000000001,22.0,0
+2.0,117.0,90.0,19.0,71.0,25.2,0.313,21.0,0
+2.0,89.0,90.0,30.0,0.0,33.5,0.292,42.0,0
+1.0,91.0,54.0,25.0,100.0,25.2,0.23399999999999999,23.0,0
+6.0,102.0,90.0,39.0,0.0,35.7,0.674,28.0,0
+12.0,106.0,80.0,0.0,0.0,23.6,0.13699999999999998,44.0,0
+4.0,129.0,86.0,20.0,270.0,35.1,0.231,23.0,0
+6.0,129.0,90.0,7.0,326.0,19.6,0.5820000000000001,60.0,0
+9.0,134.0,74.0,33.0,60.0,25.9,0.46,81.0,0
+3.0,111.0,90.0,12.0,78.0,28.4,0.495,29.0,0
+1.0,128.0,82.0,17.0,183.0,27.5,0.115,22.0,0
+1.0,71.0,62.0,0.0,0.0,21.8,0.41600000000000004,26.0,0
+7.0,142.0,60.0,33.0,190.0,28.8,0.687,61.0,0
+4.0,115.0,72.0,0.0,0.0,28.9,0.376,46.0,1
+9.0,165.0,88.0,0.0,0.0,30.4,0.302,49.0,1
+13.0,152.0,90.0,33.0,29.0,26.8,0.731,43.0,1
+13.0,126.0,90.0,0.0,0.0,43.4,0.583,42.0,1
+6.0,194.0,78.0,0.0,0.0,23.5,0.129,59.0,1
+4.0,146.0,78.0,0.0,0.0,38.5,0.52,67.0,1
+3.0,129.0,92.0,49.0,155.0,36.4,0.968,32.0,1
+2.0,108.0,80.0,0.0,0.0,27.0,0.259,52.0,1
+0.0,123.0,72.0,0.0,0.0,36.3,0.258,52.0,1
+14.0,175.0,62.0,30.0,0.0,33.6,0.212,38.0,1
+3.0,107.0,62.0,13.0,48.0,22.9,0.6779999999999999,23.0,1
+8.0,143.0,66.0,0.0,0.0,34.9,0.129,41.0,1
+17.0,163.0,72.0,41.0,114.0,40.9,0.8170000000000001,47.0,1
+11.0,135.0,0.0,0.0,0.0,52.3,0.578,40.0,1
+9.0,156.0,86.0,28.0,155.0,34.3,1.189,42.0,1
+3.0,176.0,86.0,27.0,156.0,33.3,1.1540000000000001,52.0,1
+5.0,85.0,74.0,22.0,0.0,29.0,1.224,32.0,1
+3.0,173.0,84.0,33.0,474.0,35.7,0.258,22.0,1
+6.0,147.0,80.0,0.0,0.0,29.5,0.17800000000000002,50.0,1
+6.0,195.0,70.0,0.0,0.0,30.9,0.32799999999999996,31.0,1
+10.0,108.0,66.0,0.0,0.0,32.4,0.272,42.0,1
+9.0,140.0,94.0,0.0,0.0,32.7,0.7340000000000001,45.0,1
+6.0,0.0,68.0,41.0,0.0,39.0,0.727,41.0,1
+2.0,155.0,74.0,17.0,96.0,26.6,0.433,27.0,1
+7.0,181.0,84.0,21.0,192.0,35.9,0.586,51.0,1
+9.0,156.0,86.0,0.0,0.0,24.8,0.23,53.0,1
+7.0,109.0,80.0,31.0,0.0,35.9,1.127,43.0,1
+2.0,71.0,70.0,27.0,0.0,28.0,0.586,22.0,0
+10.0,92.0,62.0,0.0,0.0,25.9,0.16699999999999998,31.0,0
+12.0,88.0,74.0,40.0,54.0,35.3,0.37799999999999995,48.0,0
+2.0,128.0,64.0,42.0,0.0,40.0,1.101,24.0,0
+10.0,115.0,98.0,0.0,0.0,24.0,1.022,34.0,0
+1.0,79.0,60.0,42.0,48.0,43.5,0.6779999999999999,23.0,0
+1.0,100.0,74.0,12.0,46.0,19.5,0.149,28.0,0
+1.0,119.0,88.0,41.0,170.0,45.3,0.507,26.0,0
+9.0,72.0,78.0,25.0,0.0,31.6,0.28,38.0,0
+8.0,194.0,80.0,0.0,0.0,26.1,0.551,67.0,0
+13.0,153.0,88.0,37.0,140.0,40.6,1.1740000000000002,39.0,0
+2.0,119.0,0.0,0.0,0.0,19.6,0.8320000000000001,72.0,0
+2.0,88.0,74.0,19.0,53.0,29.0,0.22899999999999998,22.0,0
+2.0,130.0,96.0,0.0,0.0,22.6,0.268,21.0,0
+0.0,94.0,0.0,0.0,0.0,0.0,0.256,25.0,0
+8.0,110.0,76.0,0.0,0.0,27.8,0.237,58.0,0
+2.0,92.0,76.0,20.0,0.0,24.2,1.6980000000000002,28.0,0
+0.0,101.0,62.0,0.0,0.0,21.9,0.336,25.0,0
+2.0,122.0,70.0,27.0,0.0,36.8,0.34,27.0,0
+0.0,125.0,68.0,0.0,0.0,24.7,0.20600000000000002,21.0,0
+4.0,117.0,64.0,27.0,120.0,33.2,0.23,24.0,0
+1.0,85.0,66.0,29.0,0.0,26.6,0.35100000000000003,31.0,0
+2.0,108.0,62.0,10.0,278.0,25.3,0.8809999999999999,22.0,0
+2.0,74.0,0.0,0.0,0.0,0.0,0.102,22.0,0
+7.0,136.0,90.0,0.0,0.0,29.9,0.21,50.0,0
+3.0,115.0,66.0,39.0,140.0,38.1,0.15,28.0,0
+10.0,133.0,68.0,0.0,0.0,27.0,0.245,36.0,0
+1.0,139.0,46.0,19.0,83.0,28.7,0.654,22.0,0
+11.0,127.0,106.0,0.0,0.0,39.0,0.19,51.0,0
+4.0,99.0,68.0,38.0,0.0,32.8,0.145,33.0,0
+5.0,77.0,82.0,41.0,42.0,35.8,0.156,35.0,0
+1.0,139.0,62.0,41.0,480.0,40.7,0.536,21.0,0
+2.0,115.0,64.0,22.0,0.0,30.8,0.42100000000000004,21.0,0
+4.0,137.0,84.0,0.0,0.0,31.2,0.252,30.0,0
+2.0,100.0,54.0,28.0,105.0,37.8,0.498,24.0,0
+1.0,93.0,56.0,11.0,0.0,22.5,0.41700000000000004,22.0,0
+0.0,165.0,76.0,43.0,255.0,47.9,0.259,26.0,0
+2.0,129.0,0.0,0.0,0.0,38.5,0.304,41.0,0
+0.0,141.0,84.0,26.0,0.0,32.4,0.433,22.0,0
+0.0,101.0,65.0,28.0,0.0,24.6,0.237,22.0,0
+5.0,126.0,78.0,27.0,22.0,29.6,0.439,40.0,0
+3.0,82.0,70.0,0.0,0.0,21.1,0.389,25.0,0
+1.0,83.0,68.0,0.0,0.0,18.2,0.624,27.0,0
+9.0,106.0,52.0,0.0,0.0,31.2,0.38,42.0,0
+3.0,116.0,0.0,0.0,0.0,23.5,0.187,23.0,0
+4.0,110.0,76.0,20.0,100.0,28.4,0.11800000000000001,27.0,0
+3.0,111.0,56.0,39.0,0.0,30.1,0.557,30.0,0
+4.0,85.0,58.0,22.0,49.0,27.8,0.306,28.0,0
+0.0,118.0,64.0,23.0,89.0,0.0,1.7309999999999999,21.0,0
+5.0,147.0,78.0,0.0,0.0,33.7,0.218,65.0,0
+0.0,131.0,0.0,0.0,0.0,43.2,0.27,26.0,1
+4.0,123.0,62.0,0.0,0.0,32.0,0.226,35.0,1
+9.0,152.0,78.0,34.0,171.0,34.2,0.893,33.0,1
+2.0,155.0,52.0,27.0,540.0,38.7,0.24,25.0,1
+0.0,104.0,64.0,37.0,64.0,33.6,0.51,22.0,1
+9.0,112.0,82.0,24.0,0.0,28.2,1.2819999999999998,50.0,1
+8.0,155.0,62.0,26.0,495.0,34.0,0.5429999999999999,46.0,1
+5.0,115.0,76.0,0.0,0.0,31.2,0.34299999999999997,44.0,1
+5.0,189.0,64.0,33.0,325.0,31.2,0.583,29.0,1
+0.0,162.0,76.0,36.0,0.0,49.6,0.364,26.0,1
+5.0,158.0,84.0,41.0,210.0,39.4,0.395,29.0,1
+3.0,187.0,70.0,22.0,200.0,36.4,0.408,36.0,1
+7.0,103.0,66.0,32.0,0.0,39.1,0.344,31.0,1
+0.0,198.0,66.0,32.0,274.0,41.3,0.502,28.0,1
+10.0,168.0,74.0,0.0,0.0,38.0,0.537,34.0,1
+0.0,140.0,65.0,26.0,130.0,42.6,0.431,24.0,1
+3.0,169.0,74.0,19.0,125.0,29.9,0.268,31.0,1
+9.0,164.0,78.0,0.0,0.0,32.8,0.14800000000000002,45.0,1
+5.0,109.0,62.0,41.0,129.0,35.8,0.514,25.0,1
+0.0,131.0,66.0,40.0,0.0,34.3,0.196,22.0,1
+14.0,100.0,78.0,25.0,184.0,36.6,0.41200000000000003,46.0,1
+0.0,167.0,0.0,0.0,0.0,32.3,0.8390000000000001,30.0,1
+8.0,167.0,106.0,46.0,231.0,37.6,0.165,43.0,1
+2.0,174.0,88.0,37.0,120.0,44.5,0.6459999999999999,24.0,1
+0.0,138.0,60.0,35.0,167.0,34.6,0.534,21.0,1
+8.0,181.0,68.0,36.0,495.0,30.1,0.615,60.0,1
+2.0,102.0,86.0,36.0,120.0,45.5,0.127,23.0,1
+3.0,150.0,76.0,0.0,0.0,21.0,0.207,37.0,0
+7.0,179.0,95.0,31.0,0.0,34.2,0.16399999999999998,60.0,0
+0.0,102.0,78.0,40.0,90.0,34.5,0.23800000000000002,24.0,0
+1.0,96.0,64.0,27.0,87.0,33.2,0.289,21.0,0
+3.0,116.0,74.0,15.0,105.0,26.3,0.107,24.0,0
+1.0,164.0,82.0,43.0,67.0,32.8,0.341,50.0,0
+1.0,130.0,70.0,13.0,105.0,25.9,0.47200000000000003,22.0,0
+2.0,91.0,62.0,0.0,0.0,27.3,0.525,22.0,0
+0.0,114.0,80.0,34.0,285.0,44.2,0.16699999999999998,27.0,0
+6.0,114.0,0.0,0.0,0.0,0.0,0.18899999999999997,26.0,0
+12.0,121.0,78.0,17.0,0.0,26.5,0.259,62.0,0
+4.0,92.0,80.0,0.0,0.0,42.2,0.237,29.0,0
+1.0,90.0,68.0,8.0,0.0,24.5,1.138,36.0,0
+1.0,109.0,38.0,18.0,120.0,23.1,0.40700000000000003,26.0,0
+10.0,75.0,82.0,0.0,0.0,33.3,0.263,38.0,0
+1.0,143.0,74.0,22.0,61.0,26.2,0.256,21.0,0
+10.0,162.0,84.0,0.0,0.0,27.7,0.182,54.0,0
+7.0,150.0,66.0,42.0,342.0,34.7,0.718,42.0,0
+0.0,117.0,0.0,0.0,0.0,33.8,0.932,44.0,0
+8.0,65.0,72.0,23.0,0.0,32.0,0.6,42.0,0
+3.0,99.0,62.0,19.0,74.0,21.8,0.27899999999999997,26.0,0
+3.0,96.0,78.0,39.0,0.0,37.3,0.23800000000000002,40.0,0
+7.0,62.0,78.0,0.0,0.0,32.6,0.391,41.0,0
+5.0,128.0,80.0,0.0,0.0,34.6,0.14400000000000002,45.0,0
+5.0,110.0,68.0,0.0,0.0,26.0,0.292,30.0,0
+2.0,75.0,64.0,24.0,55.0,29.7,0.37,33.0,0
+2.0,108.0,64.0,0.0,0.0,30.8,0.158,21.0,0
+2.0,87.0,0.0,23.0,0.0,28.9,0.773,25.0,0
+7.0,119.0,0.0,0.0,0.0,25.2,0.209,37.0,0
+0.0,102.0,86.0,17.0,105.0,29.3,0.695,27.0,0
+0.0,126.0,84.0,29.0,215.0,30.7,0.52,24.0,0
+0.0,132.0,78.0,0.0,0.0,32.4,0.39299999999999996,21.0,0
+1.0,108.0,88.0,19.0,0.0,27.1,0.4,24.0,0
+0.0,123.0,88.0,37.0,0.0,35.2,0.19699999999999998,29.0,0
+5.0,88.0,66.0,21.0,23.0,24.4,0.342,30.0,0
+8.0,112.0,72.0,0.0,0.0,23.6,0.84,58.0,0
+0.0,84.0,82.0,31.0,125.0,38.2,0.233,23.0,0
+2.0,83.0,65.0,28.0,66.0,36.8,0.629,24.0,0
+7.0,137.0,90.0,41.0,0.0,32.0,0.391,39.0,0
+2.0,68.0,62.0,13.0,15.0,20.1,0.257,23.0,0
+0.0,117.0,66.0,31.0,188.0,30.8,0.493,22.0,0
+0.0,93.0,60.0,0.0,0.0,35.3,0.263,25.0,0
+3.0,100.0,68.0,23.0,81.0,31.6,0.9490000000000001,28.0,0
+4.0,112.0,78.0,40.0,0.0,39.4,0.23600000000000002,38.0,0
+1.0,143.0,84.0,23.0,310.0,42.4,1.0759999999999998,22.0,0
+6.0,92.0,92.0,0.0,0.0,19.9,0.188,28.0,0
+2.0,127.0,58.0,24.0,275.0,27.7,1.6,25.0,0
+2.0,94.0,68.0,18.0,76.0,26.0,0.561,21.0,0
+0.0,78.0,88.0,29.0,40.0,36.9,0.434,21.0,0
+0.0,152.0,82.0,39.0,272.0,41.5,0.27,27.0,0
+6.0,134.0,70.0,23.0,130.0,35.4,0.542,29.0,1
+11.0,136.0,84.0,35.0,130.0,28.3,0.26,42.0,1
+5.0,139.0,80.0,35.0,160.0,31.6,0.361,25.0,1
+3.0,158.0,70.0,30.0,328.0,35.5,0.344,35.0,1
+0.0,188.0,82.0,14.0,185.0,32.0,0.682,22.0,1
+6.0,104.0,74.0,18.0,156.0,29.9,0.722,41.0,1
+6.0,119.0,50.0,22.0,176.0,27.1,1.318,33.0,1
+8.0,124.0,76.0,24.0,600.0,28.7,0.687,52.0,1
+0.0,119.0,0.0,0.0,0.0,32.4,0.141,24.0,1
+1.0,88.0,30.0,42.0,99.0,55.0,0.496,26.0,1
+7.0,142.0,90.0,24.0,480.0,30.4,0.128,43.0,1
+10.0,101.0,86.0,37.0,0.0,45.6,1.136,38.0,1
+0.0,145.0,0.0,0.0,0.0,44.2,0.63,31.0,1
+10.0,90.0,85.0,32.0,0.0,34.9,0.825,56.0,1
+1.0,117.0,88.0,24.0,145.0,34.5,0.40299999999999997,40.0,1
+5.0,115.0,98.0,0.0,0.0,52.9,0.209,28.0,1
+0.0,179.0,90.0,27.0,0.0,44.1,0.6859999999999999,23.0,1
+7.0,129.0,68.0,49.0,125.0,38.5,0.439,43.0,1
+0.0,138.0,0.0,0.0,0.0,36.3,0.9329999999999999,25.0,1
+3.0,129.0,64.0,29.0,115.0,26.4,0.21899999999999997,28.0,1
+3.0,162.0,52.0,38.0,0.0,37.2,0.652,24.0,1
+9.0,184.0,85.0,15.0,0.0,30.0,1.213,49.0,1
+6.0,124.0,72.0,0.0,0.0,27.6,0.368,29.0,1
+4.0,171.0,72.0,0.0,0.0,43.6,0.479,26.0,1
+3.0,128.0,72.0,25.0,190.0,32.4,0.5489999999999999,27.0,1
+0.0,131.0,88.0,0.0,0.0,31.6,0.743,32.0,1
+10.0,115.0,0.0,0.0,0.0,0.0,0.261,30.0,1
+13.0,106.0,72.0,54.0,0.0,36.6,0.17800000000000002,45.0,0
+0.0,74.0,52.0,10.0,36.0,27.8,0.26899999999999996,22.0,0
+5.0,109.0,75.0,26.0,0.0,36.0,0.546,60.0,0
+2.0,83.0,66.0,23.0,50.0,32.2,0.4970000000000001,22.0,0
+4.0,154.0,62.0,31.0,284.0,32.8,0.237,23.0,0
+1.0,90.0,62.0,18.0,59.0,25.1,1.268,25.0,0
+6.0,111.0,64.0,39.0,0.0,34.2,0.26,24.0,0
+0.0,126.0,86.0,27.0,120.0,27.4,0.515,21.0,0
+1.0,96.0,122.0,0.0,0.0,22.4,0.207,27.0,0
+5.0,99.0,74.0,27.0,0.0,29.0,0.203,32.0,0
+1.0,108.0,60.0,46.0,178.0,35.5,0.415,24.0,0
+4.0,120.0,68.0,0.0,0.0,29.6,0.7090000000000001,34.0,0
+6.0,107.0,88.0,0.0,0.0,36.8,0.727,31.0,0
+4.0,114.0,65.0,0.0,0.0,21.9,0.4320000000000001,37.0,0
+2.0,94.0,76.0,18.0,66.0,31.6,0.649,23.0,0
+0.0,102.0,75.0,23.0,0.0,0.0,0.5720000000000001,21.0,0
+1.0,91.0,64.0,24.0,0.0,29.2,0.192,21.0,0
+1.0,0.0,74.0,20.0,23.0,27.7,0.299,21.0,0
+11.0,103.0,68.0,40.0,0.0,46.2,0.126,42.0,0
+1.0,135.0,54.0,0.0,0.0,26.7,0.687,62.0,0
+2.0,100.0,64.0,23.0,0.0,29.7,0.368,21.0,0
+2.0,110.0,74.0,29.0,125.0,32.4,0.698,27.0,0
+0.0,137.0,68.0,14.0,148.0,24.8,0.14300000000000002,21.0,0
+0.0,104.0,76.0,0.0,0.0,18.4,0.5820000000000001,27.0,0
+4.0,147.0,74.0,25.0,293.0,34.9,0.385,30.0,0
+0.0,104.0,64.0,23.0,116.0,27.8,0.45399999999999996,23.0,0
+2.0,105.0,58.0,40.0,94.0,34.9,0.225,25.0,0
+3.0,102.0,44.0,20.0,94.0,30.8,0.4,26.0,0
+2.0,141.0,58.0,34.0,128.0,25.4,0.6990000000000001,24.0,0
+1.0,95.0,66.0,13.0,38.0,19.6,0.33399999999999996,25.0,0
+3.0,106.0,72.0,0.0,0.0,25.8,0.207,27.0,0
+2.0,106.0,64.0,35.0,119.0,30.5,1.4,34.0,0
+3.0,148.0,66.0,25.0,0.0,32.5,0.256,22.0,0
+5.0,139.0,64.0,35.0,140.0,28.6,0.41100000000000003,26.0,0
+4.0,99.0,76.0,15.0,51.0,23.2,0.223,21.0,0
+1.0,111.0,62.0,13.0,182.0,24.0,0.138,23.0,0
+6.0,165.0,68.0,26.0,168.0,33.6,0.631,49.0,0
+3.0,125.0,58.0,0.0,0.0,31.6,0.151,24.0,0
+2.0,81.0,72.0,15.0,76.0,30.1,0.547,25.0,0
+6.0,117.0,96.0,0.0,0.0,28.7,0.157,30.0,0
+2.0,68.0,70.0,32.0,66.0,25.0,0.187,25.0,0
+1.0,97.0,70.0,40.0,0.0,38.1,0.218,30.0,0
+0.0,91.0,68.0,32.0,210.0,39.9,0.381,25.0,0
+1.0,95.0,74.0,21.0,73.0,25.9,0.6729999999999999,36.0,0
+3.0,81.0,86.0,16.0,66.0,27.5,0.306,22.0,0
+8.0,95.0,72.0,0.0,0.0,36.8,0.485,57.0,0
+6.0,99.0,60.0,19.0,54.0,26.9,0.4970000000000001,32.0,0
+5.0,105.0,72.0,29.0,325.0,36.9,0.159,28.0,0
+2.0,101.0,58.0,35.0,90.0,21.8,0.155,22.0,0
+7.0,124.0,70.0,33.0,215.0,25.5,0.161,37.0,0
+0.0,135.0,68.0,42.0,250.0,42.3,0.365,24.0,1
+5.0,166.0,76.0,0.0,0.0,45.7,0.34,27.0,1
+7.0,97.0,76.0,32.0,91.0,40.9,0.871,32.0,1
+7.0,184.0,84.0,33.0,0.0,35.5,0.355,41.0,1
+8.0,176.0,90.0,34.0,300.0,33.7,0.467,58.0,1
+3.0,171.0,72.0,33.0,135.0,33.3,0.19899999999999998,24.0,1
+8.0,133.0,72.0,0.0,0.0,32.9,0.27,39.0,1
+1.0,122.0,64.0,32.0,156.0,35.1,0.6920000000000001,30.0,1
+9.0,122.0,56.0,0.0,0.0,33.3,1.114,33.0,1
+4.0,145.0,82.0,18.0,0.0,32.5,0.235,70.0,1
+10.0,148.0,84.0,48.0,237.0,37.6,1.001,51.0,1
+2.0,93.0,64.0,32.0,160.0,38.0,0.674,23.0,1
+11.0,143.0,94.0,33.0,146.0,36.6,0.254,51.0,1
+10.0,111.0,70.0,27.0,0.0,27.5,0.141,40.0,1
+6.0,162.0,62.0,0.0,0.0,24.3,0.17800000000000002,50.0,1
+8.0,154.0,78.0,32.0,0.0,32.4,0.44299999999999995,45.0,1
+8.0,183.0,64.0,0.0,0.0,23.3,0.672,32.0,1
+7.0,100.0,0.0,0.0,0.0,30.0,0.484,32.0,1
+8.0,186.0,90.0,35.0,225.0,34.5,0.423,37.0,1
+5.0,112.0,66.0,0.0,0.0,37.8,0.261,41.0,1
+4.0,183.0,0.0,0.0,0.0,28.4,0.212,36.0,1
+3.0,174.0,58.0,22.0,194.0,32.9,0.593,36.0,1
+0.0,121.0,66.0,30.0,165.0,34.3,0.203,33.0,1
+12.0,84.0,72.0,31.0,0.0,29.7,0.297,46.0,1
+7.0,107.0,74.0,0.0,0.0,29.6,0.254,31.0,1
+5.0,137.0,108.0,0.0,0.0,48.8,0.22699999999999998,37.0,1
+5.0,187.0,76.0,27.0,207.0,43.6,1.034,53.0,1
+4.0,103.0,60.0,33.0,192.0,24.0,0.966,33.0,0
+1.0,131.0,64.0,14.0,415.0,23.7,0.389,21.0,0
+1.0,120.0,80.0,48.0,200.0,38.9,1.162,41.0,0
+4.0,95.0,70.0,32.0,0.0,32.1,0.612,24.0,0
+5.0,117.0,86.0,30.0,105.0,39.1,0.251,42.0,0
+2.0,90.0,60.0,0.0,0.0,23.5,0.191,25.0,0
+10.0,139.0,80.0,0.0,0.0,27.1,1.4409999999999998,57.0,0
+1.0,146.0,56.0,0.0,0.0,29.7,0.564,29.0,0
+7.0,133.0,84.0,0.0,0.0,40.2,0.696,37.0,0
+0.0,102.0,64.0,46.0,78.0,40.6,0.496,21.0,0
+2.0,112.0,66.0,22.0,0.0,25.0,0.307,24.0,0
+4.0,116.0,72.0,12.0,87.0,22.1,0.46299999999999997,37.0,0
+0.0,93.0,100.0,39.0,72.0,43.4,1.021,35.0,0
+0.0,102.0,52.0,0.0,0.0,25.1,0.078,21.0,0
+7.0,81.0,78.0,40.0,48.0,46.7,0.261,42.0,0
+0.0,100.0,88.0,60.0,110.0,46.8,0.9620000000000001,31.0,0
+3.0,122.0,78.0,0.0,0.0,23.0,0.254,40.0,0
+4.0,90.0,0.0,0.0,0.0,28.0,0.61,31.0,0
+2.0,100.0,70.0,52.0,57.0,40.5,0.677,25.0,0
+2.0,98.0,60.0,17.0,120.0,34.7,0.198,22.0,0
+3.0,130.0,64.0,0.0,0.0,23.1,0.314,22.0,0
+1.0,119.0,54.0,13.0,50.0,22.3,0.205,24.0,0
+1.0,136.0,74.0,50.0,204.0,37.4,0.39899999999999997,24.0,0
+1.0,81.0,72.0,18.0,40.0,26.6,0.28300000000000003,24.0,0
+1.0,125.0,70.0,24.0,110.0,24.3,0.221,25.0,0
+0.0,105.0,64.0,41.0,142.0,41.5,0.17300000000000001,22.0,0
+1.0,100.0,72.0,12.0,70.0,25.3,0.6579999999999999,28.0,0
+4.0,118.0,70.0,0.0,0.0,44.5,0.904,26.0,0
+7.0,125.0,86.0,0.0,0.0,37.6,0.304,51.0,0
+2.0,139.0,75.0,0.0,0.0,25.6,0.16699999999999998,29.0,0
+2.0,112.0,86.0,42.0,160.0,38.4,0.24600000000000002,28.0,0
+3.0,106.0,54.0,21.0,158.0,30.9,0.292,24.0,0
+1.0,124.0,60.0,32.0,0.0,35.8,0.514,21.0,0
+1.0,97.0,70.0,15.0,0.0,18.2,0.147,21.0,0
+1.0,100.0,66.0,15.0,56.0,23.6,0.6659999999999999,26.0,0
+5.0,99.0,54.0,28.0,83.0,34.0,0.499,30.0,0
+5.0,147.0,75.0,0.0,0.0,29.9,0.434,28.0,0
+0.0,117.0,80.0,31.0,53.0,45.2,0.08900000000000001,24.0,0
+2.0,125.0,60.0,20.0,140.0,33.8,0.08800000000000001,31.0,0
+2.0,85.0,65.0,0.0,0.0,39.6,0.93,27.0,0
+3.0,83.0,58.0,31.0,18.0,34.3,0.336,25.0,0
+3.0,99.0,54.0,19.0,86.0,25.6,0.154,24.0,0
+1.0,79.0,75.0,30.0,0.0,32.0,0.396,22.0,0
+4.0,146.0,85.0,27.0,100.0,28.9,0.18899999999999997,27.0,0
+3.0,74.0,68.0,28.0,45.0,29.7,0.293,23.0,0
+11.0,85.0,74.0,0.0,0.0,30.1,0.3,35.0,0
+1.0,97.0,66.0,15.0,140.0,23.2,0.48700000000000004,22.0,0
+4.0,84.0,90.0,23.0,56.0,39.5,0.159,25.0,0
+6.0,154.0,78.0,41.0,140.0,46.1,0.5710000000000001,27.0,0
+1.0,99.0,72.0,30.0,18.0,38.6,0.41200000000000003,21.0,0
+8.0,197.0,74.0,0.0,0.0,25.9,1.1909999999999998,39.0,1
+0.0,181.0,88.0,44.0,510.0,43.3,0.222,26.0,1
+3.0,141.0,0.0,0.0,0.0,30.0,0.7609999999999999,27.0,1
+0.0,107.0,62.0,30.0,74.0,36.6,0.757,25.0,1
+4.0,109.0,64.0,44.0,99.0,34.8,0.905,26.0,1
+2.0,146.0,70.0,38.0,360.0,28.0,0.337,29.0,1
+4.0,125.0,80.0,0.0,0.0,32.3,0.536,27.0,1
+3.0,182.0,74.0,0.0,0.0,30.5,0.345,29.0,1
+12.0,92.0,62.0,7.0,258.0,27.6,0.9259999999999999,44.0,1
+1.0,102.0,74.0,0.0,0.0,39.5,0.293,42.0,1
+1.0,113.0,64.0,35.0,0.0,33.6,0.5429999999999999,21.0,1
+1.0,167.0,74.0,17.0,144.0,23.4,0.447,33.0,1
+2.0,128.0,78.0,37.0,182.0,43.3,1.224,31.0,1
+9.0,171.0,110.0,24.0,240.0,45.4,0.721,54.0,1
+10.0,125.0,70.0,26.0,115.0,31.1,0.205,41.0,1
+0.0,146.0,70.0,0.0,0.0,37.9,0.33399999999999996,28.0,1
+0.0,141.0,0.0,0.0,0.0,42.4,0.205,29.0,1
+2.0,197.0,70.0,99.0,0.0,34.7,0.575,62.0,1
+1.0,125.0,50.0,40.0,167.0,33.3,0.9620000000000001,28.0,1
+9.0,112.0,82.0,32.0,175.0,34.2,0.26,36.0,1
+1.0,180.0,0.0,0.0,0.0,43.3,0.282,41.0,1
+2.0,124.0,68.0,28.0,205.0,32.9,0.875,30.0,1
+1.0,168.0,88.0,29.0,0.0,35.0,0.905,52.0,1
+3.0,121.0,52.0,0.0,0.0,36.0,0.127,25.0,1
+8.0,100.0,74.0,40.0,215.0,39.4,0.6609999999999999,43.0,1
+7.0,160.0,54.0,32.0,175.0,30.5,0.588,39.0,1
+8.0,120.0,0.0,0.0,0.0,30.0,0.183,38.0,1
+3.0,124.0,80.0,33.0,130.0,33.2,0.305,26.0,0
+13.0,145.0,82.0,19.0,110.0,22.2,0.245,57.0,0
+1.0,71.0,78.0,50.0,45.0,33.2,0.42200000000000004,21.0,0
+6.0,151.0,62.0,31.0,120.0,35.5,0.6920000000000001,28.0,0
+3.0,108.0,62.0,24.0,0.0,26.0,0.223,25.0,0
+3.0,90.0,78.0,0.0,0.0,42.7,0.5589999999999999,21.0,0
+1.0,0.0,68.0,35.0,0.0,32.0,0.389,22.0,0
+13.0,76.0,60.0,0.0,0.0,32.8,0.18,41.0,0
+2.0,87.0,58.0,16.0,52.0,32.7,0.166,25.0,0
+0.0,67.0,76.0,0.0,0.0,45.3,0.19399999999999998,46.0,0
+5.0,108.0,72.0,43.0,75.0,36.1,0.263,33.0,0
+9.0,124.0,70.0,33.0,402.0,35.4,0.282,34.0,0
+2.0,105.0,75.0,0.0,0.0,23.3,0.56,53.0,0
+3.0,126.0,88.0,41.0,235.0,39.3,0.7040000000000001,27.0,0
+10.0,122.0,78.0,31.0,0.0,27.6,0.512,45.0,0
+13.0,106.0,70.0,0.0,0.0,34.2,0.251,52.0,0
+6.0,154.0,74.0,32.0,193.0,29.3,0.8390000000000001,39.0,0
+0.0,91.0,80.0,0.0,0.0,32.4,0.601,27.0,0
+5.0,88.0,78.0,30.0,0.0,27.6,0.258,37.0,0
+7.0,102.0,74.0,40.0,105.0,37.2,0.204,45.0,0
+3.0,88.0,58.0,11.0,54.0,24.8,0.267,22.0,0
+4.0,189.0,110.0,31.0,0.0,28.5,0.68,37.0,0
+1.0,90.0,62.0,12.0,43.0,27.2,0.58,24.0,0
+2.0,122.0,52.0,43.0,158.0,36.2,0.816,28.0,0
+1.0,103.0,30.0,38.0,83.0,43.3,0.183,33.0,0
+9.0,123.0,70.0,44.0,94.0,33.1,0.374,40.0,0
+2.0,101.0,58.0,17.0,265.0,24.2,0.614,23.0,0
+2.0,84.0,50.0,23.0,76.0,30.4,0.968,21.0,0
+6.0,103.0,66.0,0.0,0.0,24.3,0.249,29.0,0
+7.0,94.0,64.0,25.0,79.0,33.3,0.738,41.0,0
+0.0,93.0,60.0,25.0,92.0,28.7,0.532,22.0,0
+1.0,153.0,82.0,42.0,485.0,40.6,0.687,23.0,0
+10.0,101.0,76.0,48.0,180.0,32.9,0.171,63.0,0
+4.0,129.0,60.0,12.0,231.0,27.5,0.527,31.0,0
+0.0,161.0,50.0,0.0,0.0,21.9,0.254,65.0,0
+8.0,99.0,84.0,0.0,0.0,35.4,0.38799999999999996,50.0,0
+4.0,110.0,92.0,0.0,0.0,37.6,0.191,30.0,0
+0.0,106.0,70.0,37.0,148.0,39.4,0.605,22.0,0
+8.0,120.0,78.0,0.0,0.0,25.0,0.409,64.0,0
+0.0,99.0,0.0,0.0,0.0,25.0,0.253,22.0,0
+1.0,111.0,86.0,19.0,0.0,30.1,0.14300000000000002,23.0,0
+1.0,97.0,68.0,21.0,0.0,27.2,1.095,22.0,0
+1.0,97.0,64.0,19.0,82.0,18.2,0.299,21.0,0
+6.0,109.0,60.0,27.0,0.0,25.0,0.20600000000000002,27.0,0
+1.0,87.0,78.0,27.0,32.0,34.6,0.10099999999999999,22.0,0
+1.0,107.0,50.0,19.0,0.0,28.3,0.18100000000000002,29.0,0
+5.0,104.0,74.0,0.0,0.0,28.8,0.153,48.0,0
+3.0,84.0,72.0,32.0,0.0,37.2,0.267,28.0,0
+8.0,91.0,82.0,0.0,0.0,35.6,0.5870000000000001,68.0,0
+2.0,90.0,70.0,17.0,0.0,27.3,0.085,22.0,0
+3.0,173.0,82.0,48.0,465.0,38.4,2.137,25.0,1
+0.0,113.0,76.0,0.0,0.0,33.3,0.278,23.0,1
+4.0,111.0,72.0,47.0,207.0,37.1,1.39,56.0,1
+2.0,197.0,70.0,45.0,543.0,30.5,0.158,53.0,1
+8.0,105.0,100.0,36.0,0.0,43.3,0.239,45.0,1
+13.0,104.0,72.0,0.0,0.0,31.2,0.465,38.0,1
+8.0,196.0,76.0,29.0,280.0,37.5,0.605,57.0,1
+1.0,119.0,86.0,39.0,220.0,45.6,0.8079999999999999,29.0,1
+4.0,136.0,70.0,0.0,0.0,31.2,1.182,22.0,1
+5.0,0.0,80.0,32.0,0.0,41.0,0.34600000000000003,37.0,1
+1.0,181.0,64.0,30.0,180.0,34.1,0.32799999999999996,38.0,1
+8.0,151.0,78.0,32.0,210.0,42.9,0.516,36.0,1
+7.0,168.0,88.0,42.0,321.0,38.2,0.787,40.0,1
+4.0,95.0,64.0,0.0,0.0,32.0,0.161,31.0,1
+1.0,133.0,102.0,28.0,140.0,32.8,0.23399999999999999,45.0,1
+4.0,132.0,0.0,0.0,0.0,32.9,0.302,23.0,1
+15.0,136.0,70.0,32.0,110.0,37.1,0.153,43.0,1
+10.0,161.0,68.0,23.0,132.0,25.5,0.326,47.0,1
+2.0,100.0,66.0,20.0,90.0,32.9,0.867,28.0,1
+3.0,130.0,78.0,23.0,79.0,28.4,0.32299999999999995,34.0,1
+2.0,146.0,0.0,0.0,0.0,27.5,0.24,28.0,1
+5.0,130.0,82.0,0.0,0.0,39.1,0.956,37.0,1
+5.0,168.0,64.0,0.0,0.0,32.9,0.135,41.0,1
+5.0,136.0,84.0,41.0,88.0,35.0,0.28600000000000003,35.0,1
+1.0,128.0,88.0,39.0,110.0,36.5,1.057,37.0,1
+5.0,144.0,82.0,26.0,285.0,32.0,0.452,58.0,1
+6.0,134.0,80.0,37.0,370.0,46.2,0.23800000000000002,46.0,1
+0.0,147.0,85.0,54.0,0.0,42.8,0.375,24.0,0
+6.0,123.0,72.0,45.0,230.0,33.6,0.733,34.0,0
+0.0,84.0,64.0,22.0,66.0,35.8,0.545,21.0,0
+5.0,136.0,82.0,0.0,0.0,0.0,0.64,69.0,0
+0.0,134.0,58.0,20.0,291.0,26.4,0.35200000000000004,21.0,0
+9.0,120.0,72.0,22.0,56.0,20.8,0.733,48.0,0
+1.0,99.0,58.0,10.0,0.0,25.4,0.551,21.0,0
+10.0,94.0,72.0,18.0,0.0,23.1,0.595,56.0,0
+1.0,121.0,78.0,39.0,74.0,39.0,0.261,28.0,0
+10.0,179.0,70.0,0.0,0.0,35.1,0.2,37.0,0
+7.0,105.0,0.0,0.0,0.0,0.0,0.305,24.0,0
+1.0,193.0,50.0,16.0,375.0,25.9,0.655,24.0,0
+2.0,114.0,68.0,22.0,0.0,28.7,0.092,25.0,0
+5.0,95.0,72.0,33.0,0.0,37.7,0.37,27.0,0
+4.0,154.0,72.0,29.0,126.0,31.3,0.33799999999999997,37.0,0
+4.0,91.0,70.0,32.0,88.0,33.1,0.446,22.0,0
+1.0,116.0,78.0,29.0,180.0,36.1,0.496,25.0,0
+2.0,175.0,88.0,0.0,0.0,22.9,0.326,22.0,0
+6.0,105.0,80.0,28.0,0.0,32.5,0.878,26.0,0
+11.0,138.0,76.0,0.0,0.0,33.2,0.42,35.0,0
+4.0,151.0,90.0,38.0,0.0,29.7,0.294,36.0,0
+7.0,133.0,88.0,15.0,155.0,32.4,0.262,37.0,0
+1.0,112.0,80.0,45.0,132.0,34.8,0.217,24.0,0
+1.0,79.0,80.0,25.0,37.0,25.4,0.583,22.0,0
+1.0,87.0,68.0,34.0,77.0,37.6,0.401,24.0,0
+1.0,0.0,48.0,20.0,0.0,24.7,0.14,22.0,0
+3.0,123.0,100.0,35.0,240.0,57.3,0.88,22.0,0
+8.0,126.0,74.0,38.0,75.0,25.9,0.162,39.0,0
+0.0,137.0,84.0,27.0,0.0,27.3,0.231,59.0,0
+0.0,127.0,80.0,37.0,210.0,36.3,0.804,23.0,0
+10.0,68.0,106.0,23.0,49.0,35.5,0.285,47.0,0
+0.0,111.0,65.0,0.0,0.0,24.6,0.66,31.0,0
+5.0,106.0,82.0,30.0,0.0,39.5,0.28600000000000003,38.0,0
+1.0,105.0,58.0,0.0,0.0,24.3,0.187,21.0,0
+3.0,102.0,74.0,0.0,0.0,29.5,0.121,32.0,0
+8.0,126.0,88.0,36.0,108.0,38.5,0.349,49.0,0
+1.0,112.0,72.0,30.0,176.0,34.4,0.528,25.0,0
+1.0,80.0,74.0,11.0,60.0,30.0,0.527,22.0,0
+0.0,119.0,64.0,18.0,92.0,34.9,0.725,23.0,0
+2.0,99.0,60.0,17.0,160.0,36.6,0.45299999999999996,21.0,0
+1.0,116.0,70.0,28.0,0.0,27.4,0.204,21.0,0
+2.0,109.0,92.0,0.0,0.0,42.7,0.845,54.0,0
+0.0,95.0,64.0,39.0,105.0,44.6,0.366,22.0,0
+5.0,103.0,108.0,37.0,0.0,39.2,0.305,65.0,0
+7.0,83.0,78.0,26.0,71.0,29.3,0.767,36.0,0
+8.0,74.0,70.0,40.0,49.0,35.3,0.705,39.0,0
+1.0,89.0,24.0,19.0,25.0,27.8,0.5589999999999999,21.0,0
+3.0,142.0,80.0,15.0,0.0,32.4,0.2,63.0,0
+2.0,142.0,82.0,18.0,64.0,24.7,0.7609999999999999,21.0,0
+2.0,129.0,84.0,0.0,0.0,28.0,0.284,27.0,0
+9.0,145.0,80.0,46.0,130.0,37.9,0.637,40.0,1
+0.0,179.0,50.0,36.0,159.0,37.8,0.455,22.0,1
+0.0,151.0,90.0,46.0,0.0,42.1,0.371,21.0,1
+1.0,173.0,74.0,0.0,0.0,36.8,0.08800000000000001,38.0,1
+3.0,139.0,54.0,0.0,0.0,25.6,0.402,22.0,1
+6.0,190.0,92.0,0.0,0.0,35.5,0.278,66.0,1
+11.0,138.0,74.0,26.0,144.0,36.1,0.557,50.0,1
+7.0,152.0,88.0,44.0,0.0,50.0,0.337,36.0,1
+3.0,80.0,82.0,31.0,70.0,34.2,1.2919999999999998,27.0,1
+0.0,95.0,85.0,25.0,36.0,37.4,0.247,24.0,1
+0.0,129.0,110.0,46.0,130.0,67.1,0.319,26.0,1
+4.0,142.0,86.0,0.0,0.0,44.0,0.645,22.0,1
+8.0,108.0,70.0,0.0,0.0,30.5,0.955,33.0,1
+1.0,128.0,48.0,45.0,194.0,40.5,0.613,24.0,1
+3.0,132.0,80.0,0.0,0.0,34.4,0.402,44.0,1
+9.0,145.0,88.0,34.0,165.0,30.3,0.7709999999999999,53.0,1
+7.0,147.0,76.0,0.0,0.0,39.4,0.257,43.0,1
+0.0,124.0,70.0,20.0,0.0,27.4,0.254,36.0,1
+3.0,193.0,70.0,31.0,0.0,34.9,0.24100000000000002,25.0,1
+3.0,163.0,70.0,18.0,105.0,31.6,0.268,28.0,1
+12.0,151.0,70.0,40.0,271.0,41.8,0.742,38.0,1
+1.0,128.0,98.0,41.0,58.0,32.0,1.321,33.0,1
+1.0,181.0,78.0,42.0,293.0,40.0,1.258,22.0,1
+0.0,177.0,60.0,29.0,478.0,34.6,1.072,21.0,1
+1.0,122.0,90.0,51.0,220.0,49.7,0.325,31.0,1
+1.0,189.0,60.0,23.0,846.0,30.1,0.39799999999999996,59.0,1
+11.0,111.0,84.0,40.0,0.0,46.8,0.925,45.0,1
+3.0,120.0,70.0,30.0,135.0,42.9,0.452,30.0,0
+12.0,100.0,84.0,33.0,105.0,30.0,0.488,46.0,0
+1.0,71.0,48.0,18.0,76.0,20.4,0.32299999999999995,22.0,0
+3.0,87.0,60.0,18.0,0.0,21.8,0.444,21.0,0
+2.0,107.0,74.0,30.0,100.0,33.6,0.40399999999999997,23.0,0
+6.0,80.0,80.0,36.0,0.0,39.8,0.177,28.0,0
+1.0,118.0,58.0,36.0,94.0,33.3,0.261,23.0,0
+0.0,73.0,0.0,0.0,0.0,21.1,0.342,25.0,0
+1.0,88.0,78.0,29.0,76.0,32.0,0.365,29.0,0
+3.0,80.0,0.0,0.0,0.0,0.0,0.174,22.0,0
+1.0,107.0,68.0,19.0,0.0,26.5,0.165,24.0,0
+3.0,89.0,74.0,16.0,85.0,30.4,0.551,38.0,0
+5.0,123.0,74.0,40.0,77.0,34.1,0.26899999999999996,28.0,0
+0.0,97.0,64.0,36.0,100.0,36.8,0.6,25.0,0
+3.0,78.0,70.0,0.0,0.0,32.5,0.27,39.0,0
+0.0,107.0,76.0,0.0,0.0,45.3,0.6859999999999999,24.0,0
+6.0,92.0,62.0,32.0,126.0,32.0,0.085,46.0,0
+1.0,101.0,50.0,15.0,36.0,24.2,0.526,26.0,0
+6.0,114.0,88.0,0.0,0.0,27.8,0.247,66.0,0
+0.0,165.0,90.0,33.0,680.0,52.3,0.4270000000000001,23.0,0
+1.0,109.0,56.0,21.0,135.0,25.2,0.833,23.0,0
+2.0,157.0,74.0,35.0,440.0,39.4,0.134,30.0,0
+1.0,124.0,74.0,36.0,0.0,27.8,0.1,30.0,0
+2.0,96.0,68.0,13.0,49.0,21.1,0.647,26.0,0
+3.0,61.0,82.0,28.0,0.0,34.4,0.243,46.0,0
+1.0,130.0,60.0,23.0,170.0,28.6,0.6920000000000001,21.0,0
+4.0,83.0,86.0,19.0,0.0,29.3,0.317,34.0,0
+1.0,114.0,66.0,36.0,200.0,38.1,0.289,21.0,0
+2.0,92.0,52.0,0.0,0.0,30.1,0.141,22.0,0
+2.0,108.0,52.0,26.0,63.0,32.5,0.318,22.0,0
+6.0,93.0,50.0,30.0,64.0,28.7,0.35600000000000004,23.0,0
+2.0,111.0,60.0,0.0,0.0,26.2,0.34299999999999997,23.0,0
+1.0,138.0,82.0,0.0,0.0,40.1,0.23600000000000002,28.0,0
+1.0,88.0,62.0,24.0,44.0,29.9,0.42200000000000004,23.0,0
+3.0,99.0,80.0,11.0,64.0,19.3,0.284,30.0,0
+5.0,86.0,68.0,28.0,71.0,30.2,0.364,24.0,0
+4.0,197.0,70.0,39.0,744.0,36.7,2.329,31.0,0
+2.0,123.0,48.0,32.0,165.0,42.1,0.52,26.0,0
+10.0,122.0,68.0,0.0,0.0,31.2,0.258,41.0,0
+0.0,139.0,62.0,17.0,210.0,22.1,0.207,21.0,0
+1.0,103.0,80.0,11.0,82.0,19.4,0.491,22.0,0
+8.0,100.0,76.0,0.0,0.0,38.7,0.19,42.0,0
+2.0,121.0,70.0,32.0,95.0,39.1,0.8859999999999999,23.0,0
+2.0,146.0,76.0,35.0,194.0,38.2,0.32899999999999996,29.0,0
+0.0,86.0,68.0,32.0,0.0,35.8,0.23800000000000002,25.0,0
+8.0,118.0,72.0,19.0,0.0,23.1,1.476,46.0,0
+4.0,122.0,68.0,0.0,0.0,35.0,0.39399999999999996,29.0,0
+0.0,94.0,70.0,27.0,115.0,43.5,0.34700000000000003,21.0,0
+7.0,159.0,64.0,0.0,0.0,27.4,0.294,40.0,0
+5.0,121.0,72.0,23.0,112.0,26.2,0.245,30.0,0
+5.0,116.0,74.0,29.0,0.0,32.3,0.66,35.0,1
+8.0,179.0,72.0,42.0,130.0,32.7,0.7190000000000001,36.0,1
+5.0,124.0,74.0,0.0,0.0,34.0,0.22,38.0,1
+0.0,128.0,68.0,19.0,180.0,30.5,1.391,25.0,1
+2.0,90.0,68.0,42.0,0.0,38.2,0.503,27.0,1
+3.0,170.0,64.0,37.0,225.0,34.5,0.35600000000000004,30.0,1
+12.0,140.0,82.0,43.0,325.0,39.2,0.528,58.0,1
+0.0,162.0,76.0,56.0,100.0,53.2,0.759,25.0,1
+7.0,106.0,60.0,24.0,0.0,26.5,0.29600000000000004,29.0,1
+6.0,125.0,78.0,31.0,0.0,27.6,0.565,49.0,1
+7.0,195.0,70.0,33.0,145.0,25.1,0.163,55.0,1
+4.0,146.0,92.0,0.0,0.0,31.2,0.539,61.0,1
+0.0,180.0,78.0,63.0,14.0,59.4,2.42,25.0,1
+13.0,158.0,114.0,0.0,0.0,42.3,0.257,44.0,1
+9.0,170.0,74.0,31.0,0.0,44.0,0.40299999999999997,43.0,1
+8.0,109.0,76.0,39.0,114.0,27.9,0.64,31.0,1
+1.0,147.0,94.0,41.0,0.0,49.3,0.358,27.0,1
+3.0,112.0,74.0,30.0,0.0,31.6,0.19699999999999998,25.0,1
+3.0,78.0,50.0,32.0,88.0,31.0,0.248,26.0,1
+9.0,130.0,70.0,0.0,0.0,34.2,0.652,45.0,1
+7.0,194.0,68.0,28.0,0.0,35.9,0.745,41.0,1
+4.0,148.0,60.0,27.0,318.0,30.9,0.15,29.0,1
+1.0,144.0,82.0,46.0,180.0,46.1,0.335,46.0,1
+5.0,166.0,72.0,19.0,175.0,25.8,0.5870000000000001,51.0,1
+2.0,144.0,58.0,33.0,135.0,31.6,0.42200000000000004,25.0,1
+3.0,158.0,76.0,36.0,245.0,31.6,0.851,28.0,1
+0.0,105.0,68.0,22.0,0.0,20.0,0.23600000000000002,22.0,0
+4.0,144.0,58.0,28.0,140.0,29.5,0.287,37.0,0
+1.0,95.0,60.0,18.0,58.0,23.9,0.26,22.0,0
+1.0,100.0,66.0,29.0,196.0,32.0,0.444,42.0,0
+5.0,111.0,72.0,28.0,0.0,23.9,0.40700000000000003,27.0,0
+2.0,108.0,62.0,32.0,56.0,25.2,0.128,21.0,0
+2.0,56.0,56.0,28.0,45.0,24.2,0.332,22.0,0
+1.0,84.0,64.0,23.0,115.0,36.9,0.47100000000000003,28.0,0
+5.0,44.0,62.0,0.0,0.0,25.0,0.5870000000000001,36.0,0
+0.0,135.0,94.0,46.0,145.0,40.6,0.284,26.0,0
+6.0,98.0,58.0,33.0,190.0,34.0,0.43,43.0,0
+2.0,129.0,74.0,26.0,205.0,33.2,0.591,25.0,0
+3.0,103.0,72.0,30.0,152.0,27.6,0.73,27.0,0
+1.0,82.0,64.0,13.0,95.0,21.2,0.415,23.0,0
+0.0,137.0,70.0,38.0,0.0,33.2,0.17,22.0,0
+1.0,140.0,74.0,26.0,180.0,24.1,0.828,23.0,0
+5.0,158.0,70.0,0.0,0.0,29.8,0.207,63.0,0
+4.0,97.0,60.0,23.0,0.0,28.2,0.44299999999999995,22.0,0
+2.0,84.0,0.0,0.0,0.0,0.0,0.304,21.0,0
+1.0,106.0,76.0,0.0,0.0,37.5,0.19699999999999998,26.0,0
+0.0,146.0,82.0,0.0,0.0,40.5,1.781,44.0,0
+1.0,86.0,66.0,52.0,65.0,41.3,0.917,29.0,0
+5.0,78.0,48.0,0.0,0.0,33.7,0.654,25.0,0
+0.0,119.0,66.0,27.0,0.0,38.8,0.259,22.0,0
+1.0,117.0,60.0,23.0,106.0,33.8,0.466,27.0,0
+2.0,90.0,80.0,14.0,55.0,24.4,0.249,24.0,0
+5.0,117.0,92.0,0.0,0.0,34.1,0.337,38.0,0
+5.0,155.0,84.0,44.0,545.0,38.7,0.619,34.0,0
+3.0,180.0,64.0,25.0,70.0,34.0,0.271,26.0,0
+7.0,114.0,76.0,17.0,110.0,23.8,0.466,31.0,0
+5.0,114.0,74.0,0.0,0.0,24.9,0.7440000000000001,57.0,0
+6.0,103.0,72.0,32.0,190.0,37.7,0.324,55.0,0
+4.0,96.0,56.0,17.0,49.0,20.8,0.34,26.0,0
+9.0,57.0,80.0,37.0,0.0,32.8,0.096,41.0,0
+2.0,112.0,78.0,50.0,140.0,39.4,0.175,24.0,0
+2.0,95.0,54.0,14.0,88.0,26.1,0.748,22.0,0
+4.0,114.0,64.0,0.0,0.0,28.9,0.126,24.0,0
+1.0,92.0,62.0,25.0,41.0,19.5,0.48200000000000004,25.0,0
+4.0,90.0,88.0,47.0,54.0,37.7,0.36200000000000004,29.0,0
+0.0,129.0,80.0,0.0,0.0,31.2,0.703,29.0,0
+8.0,107.0,80.0,0.0,0.0,24.6,0.856,34.0,0
+1.0,106.0,70.0,28.0,135.0,34.2,0.142,22.0,0
+1.0,87.0,60.0,37.0,75.0,37.2,0.509,22.0,0
+3.0,191.0,68.0,15.0,130.0,30.9,0.299,34.0,0
+1.0,89.0,66.0,23.0,94.0,28.1,0.16699999999999998,21.0,0
+5.0,96.0,74.0,18.0,67.0,33.6,0.997,43.0,0
+8.0,84.0,74.0,31.0,0.0,38.3,0.457,39.0,0
+9.0,154.0,78.0,30.0,100.0,30.9,0.16399999999999998,45.0,0
+6.0,87.0,80.0,0.0,0.0,23.2,0.084,32.0,0
+0.0,105.0,90.0,0.0,0.0,29.6,0.19699999999999998,46.0,0
+4.0,125.0,70.0,18.0,122.0,28.9,1.1440000000000001,45.0,1
+4.0,156.0,75.0,0.0,0.0,48.3,0.23800000000000002,32.0,1
+0.0,180.0,90.0,26.0,90.0,36.5,0.314,35.0,1
+1.0,163.0,72.0,0.0,0.0,39.0,1.222,33.0,1
+2.0,158.0,90.0,0.0,0.0,31.6,0.805,66.0,1
+5.0,97.0,76.0,27.0,0.0,35.6,0.37799999999999995,52.0,1
+8.0,125.0,96.0,0.0,0.0,0.0,0.23199999999999998,54.0,1
+1.0,95.0,82.0,25.0,180.0,35.0,0.233,43.0,1
+4.0,134.0,72.0,0.0,0.0,23.8,0.27699999999999997,60.0,1
+4.0,144.0,82.0,32.0,0.0,38.5,0.5539999999999999,37.0,1
+4.0,173.0,70.0,14.0,168.0,29.7,0.361,33.0,1
+0.0,105.0,84.0,0.0,0.0,27.9,0.741,62.0,1
+10.0,129.0,62.0,36.0,0.0,41.2,0.441,38.0,1
+1.0,199.0,76.0,43.0,0.0,42.9,1.3940000000000001,22.0,1
+0.0,109.0,88.0,30.0,0.0,32.5,0.855,38.0,1
+7.0,196.0,90.0,0.0,0.0,39.8,0.451,41.0,1
+7.0,159.0,66.0,0.0,0.0,30.4,0.38299999999999995,36.0,1
+1.0,115.0,70.0,30.0,96.0,34.6,0.529,32.0,1
+1.0,172.0,68.0,49.0,579.0,42.4,0.7020000000000001,28.0,1
+11.0,120.0,80.0,37.0,150.0,42.3,0.785,48.0,1
+2.0,134.0,70.0,0.0,0.0,28.9,0.542,23.0,1
+6.0,148.0,72.0,35.0,0.0,33.6,0.627,50.0,1
+1.0,126.0,60.0,0.0,0.0,30.1,0.349,47.0,1
+7.0,187.0,68.0,39.0,304.0,37.7,0.254,41.0,1
+9.0,119.0,80.0,35.0,0.0,29.0,0.263,29.0,1
+6.0,115.0,60.0,39.0,0.0,33.7,0.245,40.0,1
+7.0,136.0,74.0,26.0,135.0,26.0,0.647,51.0,0
+0.0,120.0,74.0,18.0,63.0,30.5,0.285,26.0,0
+5.0,116.0,74.0,0.0,0.0,25.6,0.201,30.0,0
+4.0,128.0,70.0,0.0,0.0,34.3,0.303,24.0,0
+6.0,96.0,0.0,0.0,0.0,23.7,0.19,28.0,0
+2.0,127.0,46.0,21.0,335.0,34.4,0.17600000000000002,22.0,0
+4.0,76.0,62.0,0.0,0.0,34.0,0.391,25.0,0
+3.0,96.0,56.0,34.0,115.0,24.7,0.9440000000000001,39.0,0
+6.0,137.0,61.0,0.0,0.0,24.2,0.151,55.0,0
+3.0,111.0,58.0,31.0,44.0,29.5,0.43,22.0,0
+2.0,81.0,60.0,22.0,0.0,27.7,0.29,25.0,0
+1.0,77.0,56.0,30.0,56.0,33.3,1.251,24.0,0
+3.0,111.0,62.0,0.0,0.0,22.6,0.142,21.0,0
+6.0,166.0,74.0,0.0,0.0,26.6,0.304,66.0,0
+1.0,143.0,86.0,30.0,330.0,30.1,0.892,23.0,0
+0.0,107.0,60.0,25.0,0.0,26.4,0.133,23.0,0
+2.0,99.0,70.0,16.0,44.0,20.4,0.235,27.0,0
+2.0,100.0,68.0,25.0,71.0,38.5,0.324,26.0,0
+2.0,120.0,54.0,0.0,0.0,26.8,0.455,27.0,0
+1.0,111.0,94.0,0.0,0.0,32.8,0.265,45.0,0
+6.0,108.0,44.0,20.0,130.0,24.0,0.813,35.0,0
+3.0,113.0,50.0,10.0,85.0,29.5,0.626,25.0,0
+4.0,141.0,74.0,0.0,0.0,27.6,0.244,40.0,0
+2.0,99.0,0.0,0.0,0.0,22.2,0.10800000000000001,23.0,0
+8.0,85.0,55.0,20.0,0.0,24.4,0.136,42.0,0
+1.0,89.0,76.0,34.0,37.0,31.2,0.192,23.0,0
+1.0,109.0,58.0,18.0,116.0,28.5,0.21899999999999997,22.0,0
+1.0,93.0,70.0,31.0,0.0,30.4,0.315,23.0,0
+12.0,140.0,85.0,33.0,0.0,37.4,0.244,41.0,0
+1.0,80.0,55.0,0.0,0.0,19.1,0.258,21.0,0
+4.0,99.0,72.0,17.0,0.0,25.6,0.294,28.0,0
+1.0,109.0,60.0,8.0,182.0,25.4,0.9470000000000001,21.0,0
+3.0,113.0,44.0,13.0,0.0,22.4,0.14,22.0,0
+0.0,95.0,80.0,45.0,92.0,36.5,0.33,26.0,0
+4.0,123.0,80.0,15.0,176.0,32.0,0.44299999999999995,34.0,0
+2.0,112.0,75.0,32.0,0.0,35.7,0.14800000000000002,21.0,0
+2.0,92.0,62.0,28.0,0.0,31.6,0.13,24.0,0
+1.0,144.0,82.0,40.0,0.0,41.3,0.607,28.0,0
+6.0,91.0,0.0,0.0,0.0,29.8,0.501,31.0,0
+0.0,124.0,56.0,13.0,105.0,21.8,0.452,21.0,0
+5.0,132.0,80.0,0.0,0.0,26.8,0.18600000000000005,69.0,0
+9.0,91.0,68.0,0.0,0.0,24.2,0.2,58.0,0
+3.0,128.0,78.0,0.0,0.0,21.1,0.268,55.0,0
+0.0,108.0,68.0,20.0,0.0,27.3,0.787,32.0,0
+2.0,112.0,68.0,22.0,94.0,34.1,0.315,26.0,0
+1.0,81.0,74.0,41.0,57.0,46.3,1.0959999999999999,32.0,0
+4.0,94.0,65.0,22.0,0.0,24.7,0.14800000000000002,21.0,0
+3.0,158.0,64.0,13.0,387.0,31.2,0.295,24.0,0
+0.0,57.0,60.0,0.0,0.0,21.7,0.735,67.0,0
+4.0,95.0,60.0,32.0,0.0,35.4,0.284,28.0,0
diff --git a/pages/RFxp/xrf/__init__.py b/pages/RFxp/xrf/__init__.py
new file mode 100644
index 0000000..9f52257
--- /dev/null
+++ b/pages/RFxp/xrf/__init__.py
@@ -0,0 +1,3 @@
+#from .tree import *
+from .rndmforest import *
+from .xforest import *
\ No newline at end of file
diff --git a/pages/RFxp/xrf/rndmforest.py b/pages/RFxp/xrf/rndmforest.py
new file mode 100644
index 0000000..62dd80f
--- /dev/null
+++ b/pages/RFxp/xrf/rndmforest.py
@@ -0,0 +1,137 @@
+from sklearn.ensemble._voting import VotingClassifier
+from sklearn.ensemble import RandomForestClassifier
+from sklearn.preprocessing import OneHotEncoder, LabelEncoder
+from sklearn.model_selection import train_test_split
+from sklearn.metrics import accuracy_score
+import numpy as np
+import sys
+import os
+import resource
+
+import collections
+from itertools import combinations
+from six.moves import range
+import six
+import math
+
+
+
+#
+#==============================================================================
+class VotingRF(VotingClassifier):
+    """
+        Majority rule classifier
+    """
+    
+    def fit(self, X, y, sample_weight=None):
+        self.estimators_ = []
+        for _, est in self.estimators:
+            self.estimators_.append(est)
+            
+        self.le_ = LabelEncoder().fit(y)
+        self.classes_ = self.le_.classes_   
+        
+            
+    def predict(self, X):
+        """Predict class labels for X.
+        Parameters
+        ----------
+        X : {array-like, sparse matrix} of shape (n_samples, n_features)
+            The input samples.
+        Returns
+        -------
+        maj : array-like of shape (n_samples,)
+            Predicted class labels.
+        """
+        #check_is_fitted(self)
+        
+        # 'hard' voting
+        predictions = self._predict(X)
+        predictions =  np.asarray(predictions, np.int64) #NEED TO BE CHECKED
+        maj = np.apply_along_axis(
+            lambda x: np.argmax(
+                np.bincount(x, weights=self._weights_not_none)),
+            axis=1, arr=predictions)
+   
+        maj = self.le_.inverse_transform(maj)
+
+        return maj
+    
+        
+#
+#==============================================================================
+class RF2001(object):
+    """
+        The main class to train Random Forest Classifier (RFC).
+    """
+
+    def __init__(self, **options):
+        """
+            Constructor.
+        """    
+        self.forest = None
+        self.voting = None
+              
+        param_dist = {'n_estimators':options['n_trees'],
+                      'max_depth':options['depth'],
+                      'criterion':'entropy',
+                      'random_state':324089}
+        
+        self.forest = RandomForestClassifier(**param_dist)
+        
+    def fit(self, X_train, y_train):
+        """
+            building Breiman'01 Random Forest 
+            (similar to train(dataset) fnc) 
+        """
+        self.forest.fit(X_train,y_train)
+        rtrees = [ ('dt', dt) for i, dt in enumerate(self.forest.estimators_)]
+        self.voting = VotingRF(estimators=rtrees)
+        self.voting.fit(X_train,y_train)
+        
+        return self
+        
+        
+    def train(self, dataset, verb=0):
+        """
+            Train a random forest.
+        """
+        
+        X_train, X_test, y_train, y_test = dataset.train_test_split()
+            
+        X_train = dataset.transform(X_train)
+        X_test = dataset.transform(X_test)
+        
+        print("Build a random forest.")
+        self.forest.fit(X_train,y_train)
+        
+        rtrees = [ ('dt', dt) for i, dt in enumerate(self.forest.estimators_)]
+        self.voting = VotingRF(estimators=rtrees)
+        self.voting.fit(X_train,y_train)
+        
+        train_acc = accuracy_score(self.predict(X_train), y_train)
+        test_acc = accuracy_score(self.predict(X_test), y_test)
+
+        if verb > 1:
+            self.print_acc_vote(X_train, X_test, y_train, y_test)
+            self.print_acc_prob(X_train, X_test, y_train, y_test)
+        
+        return train_acc, test_acc
+    
+    def predict(self, X):
+        return self.voting.predict(X)
+    
+    def predict_prob(self, X):
+        self.forest.predict(X)
+        
+    def estimators(self):
+        assert(self.forest.estimators_ is not None)
+        return self.forest.estimators_
+        
+    def n_estimators(self):
+        return self.forest.n_estimators
+    
+    def print_accuracy(self, X_test, y_test):  
+        test_acc = accuracy_score(self.predict(X_test), y_test)
+        print("c Model accuracy: {0:.2f}".format(100. * test_acc))
+        #print("----------------------")  
\ No newline at end of file
diff --git a/pages/RFxp/xrf/tree.py b/pages/RFxp/xrf/tree.py
new file mode 100644
index 0000000..5fddabd
--- /dev/null
+++ b/pages/RFxp/xrf/tree.py
@@ -0,0 +1,174 @@
+#
+#==============================================================================
+from anytree import Node, RenderTree,AsciiStyle
+import json
+import numpy as np
+import math
+import os
+
+
+#
+#==============================================================================
+class dt_node(Node):
+    def __init__(self, id, parent = None):
+        Node.__init__(self, id, parent)
+        self.id = id  # The node value
+        self.name = None
+        self.left_node_id = -1   #  Left child
+        self.right_node_id = -1  # Right child
+
+        self.feature = -1
+        self.threshold = None
+        self.values = -1 
+        #iai
+        #self.split = None
+
+    def __str__(self):
+        pref = ' ' * self.depth
+        if (len(self.children) == 0):
+            return (pref+ "leaf: {}  {}".format(self.id, self.values))
+        else:
+            if(self.name is None):
+                return (pref+ "{} f{}<{}".format(self.id, self.feature, self.threshold))
+            else:
+                return (pref+ "{} \"{}\"<{}".format(self.id, self.name, self.threshold))
+
+
+#==============================================================================
+def build_tree(tree_, feature_names = None):
+    ##  
+    feature = tree_.feature
+    threshold = tree_.threshold
+    values = tree_.value
+    n_nodes = tree_.node_count
+    children_left = tree_.children_left
+    children_right = tree_.children_right
+    node_depth = np.zeros(shape=n_nodes, dtype=np.int64)
+    is_leaf = np.zeros(shape=n_nodes, dtype=bool)
+    stack = [(0, -1)]  # seed is the root node id and its parent depth
+    while len(stack) > 0:
+        node_id, parent_depth = stack.pop()
+        node_depth[node_id] = parent_depth + 1
+    
+        # If we have a test node
+        if (children_left[node_id] != children_right[node_id]):
+            stack.append((children_left[node_id], parent_depth + 1))
+            stack.append((children_right[node_id], parent_depth + 1))
+        else:
+            is_leaf[node_id] = True    
+    ##        
+    
+    m = tree_.node_count  
+    assert (m > 0), "Empty tree"
+    
+    def extract_data(idx, root = None, feature_names = None):
+        i = idx
+        assert (i < m), "Error index node"
+        if (root is None):
+            node = dt_node(i)
+        else:
+            node = dt_node(i, parent = root)
+        #node.cover = json_node["cover"]
+        if is_leaf[i]:
+            node.values = np.argmax(values[i])
+            #if(inverse):
+            #    node.values = -node.values
+        else:
+            node.feature = feature[i]
+            if (feature_names is not None):
+                node.name = feature_names[feature[i]]
+            node.threshold = threshold[i]
+            node.left_node_id = children_left[i]
+            node.right_node_id = children_right[i]
+            extract_data(node.left_node_id, node, feature_names) #feat < threshold ( < 0.5 False)
+            extract_data(node.right_node_id, node, feature_names) #feat >= threshold ( >= 0.5 True)            
+
+        return node
+    
+    root = extract_data(0, None, feature_names)
+    
+    return root
+
+
+#==============================================================================
+def walk_tree(node):
+    if (len(node.children) == 0):
+        # leaf
+        print(node)
+    else:
+        print(node)
+        walk_tree(node.children[0])
+        walk_tree(node.children[1])
+
+def count_nodes(root):
+    def count(node):
+        if len(node.children):
+            return sum([1+count(n) for n in node.children])
+        else:
+            return 0
+    m = count(root) + 1
+    return m
+
+#
+#==============================================================================
+def predict_tree(node, sample):
+    if (len(node.children) == 0):
+        # leaf
+        return node.values
+    else:
+        feature_branch = node.feature
+        sample_value = sample[feature_branch]
+        assert(sample_value is not None)
+        if(sample_value < node.threshold):
+            return predict_tree(node.children[0], sample)
+        else:
+            return predict_tree(node.children[1], sample)
+
+            
+#
+#==============================================================================
+class Forest:
+    """ An ensemble of decision trees.
+
+    This object provides a common interface to many different types of models.
+    """
+    def __init__(self, rf, feature_names = None):
+        #self.rf = rf
+        self.trees = [ build_tree(dt.tree_, feature_names) for dt in rf.estimators()]
+        self.sz = sum([dt.tree_.node_count for dt in rf.estimators()])
+        self.md = max([dt.tree_.max_depth for dt in rf.estimators()])
+        ####
+        nb_nodes = [dt.tree_.node_count for dt in rf.estimators()]
+        print("min: {0} | max: {1}".format(min(nb_nodes), max(nb_nodes)))
+        assert([dt.tree_.node_count for dt in rf.estimators()] == [count_nodes(dt) for dt in self.trees])
+        #self.print_trees()
+        
+    def print_trees(self):
+        for i,t in enumerate(self.trees):
+            print("tree number: ", i)
+            walk_tree(t)
+
+    def predict_inst(self, inst):
+        scores = [predict_tree(dt, inst) for dt in self.trees]
+        scores = np.asarray(scores)
+        maj = np.argmax(np.bincount(scores))
+        return maj
+        
+        
+    def predict(self, samples):       
+        predictions = []
+        print("#Trees: ", len(self.trees))
+        for sample in np.asarray(samples):
+            scores = []
+            for i,t in enumerate(self.trees):
+                s = predict_tree(t, sample)
+                scores.append((s))
+            scores = np.asarray(scores)
+            predictions.append(scores)
+        predictions = np.asarray(predictions)    
+        #print(predictions)    
+        #np.bincount(x, weights=self._weights_not_none)
+        maj = np.apply_along_axis(lambda x: np.argmax(np.bincount(x)), axis=1, arr=predictions)
+            
+        return maj   
+
diff --git a/pages/RFxp/xrf/xforest.py b/pages/RFxp/xrf/xforest.py
new file mode 100644
index 0000000..b2bc978
--- /dev/null
+++ b/pages/RFxp/xrf/xforest.py
@@ -0,0 +1,874 @@
+
+#from sklearn.ensemble._voting import VotingClassifier
+#from sklearn.ensemble import RandomForestClassifier
+from sklearn.preprocessing import OneHotEncoder, LabelEncoder
+from sklearn.model_selection import train_test_split
+#from sklearn.metrics import accuracy_score
+import numpy as np
+import sys
+import os
+import resource
+
+import collections
+from itertools import combinations
+from six.moves import range
+import six
+import math
+
+from data import Data
+from .rndmforest import RF2001, VotingRF
+from .tree import Forest, predict_tree
+
+#from .encode import SATEncoder
+from pysat.formula import CNF, WCNF, IDPool
+from pysat.solvers import Solver
+from pysat.card import CardEnc, EncType
+from pysat.examples.lbx import LBX
+from pysat.examples.mcsls import MCSls
+from pysat.examples.rc2 import RC2
+
+
+    
+
+#
+#==============================================================================
+class Dataset(Data):
+    """
+        Class for representing dataset (transactions).
+    """
+    def __init__(self, filename=None, fpointer=None, mapfile=None,
+            separator=' ', use_categorical = False):
+        super().__init__(filename, fpointer, mapfile, separator, use_categorical)
+        
+        # split data into X and y
+        self.feature_names = self.names[:-1]
+        self.nb_features = len(self.feature_names)
+        self.use_categorical = use_categorical
+        
+        samples = np.asarray(self.samps)
+        if not all(c.isnumeric() for c in samples[:, -1]):            
+            le = LabelEncoder()
+            le.fit(samples[:, -1])
+            samples[:, -1]= le.transform(samples[:, -1])
+            self.class_names = le.classes_ 
+            print(le.classes_)
+            print(samples[1:4, :])
+        
+        samples = np.asarray(samples, dtype=np.float32)
+        self.X = samples[:, 0: self.nb_features]
+        self.y = samples[:, self.nb_features]
+        self.num_class = len(set(self.y))
+        self.target_name = list(range(self.num_class))          
+        
+        print("c nof features: {0}".format(self.nb_features))
+        print("c nof classes: {0}".format(self.num_class))
+        print("c nof samples: {0}".format(len(self.samps)))
+        
+        # check if we have info about categorical features
+        if (self.use_categorical):
+            self.target_name = self.class_names            
+            
+            self.binarizer = {}
+            for i in self.categorical_features:
+                self.binarizer.update({i: OneHotEncoder(categories='auto', sparse=False)})#,
+                self.binarizer[i].fit(self.X[:,[i]])
+        else:
+            self.categorical_features = []
+            self.categorical_names = []            
+            self.binarizer = []           
+        #feat map
+        self.mapping_features()        
+        
+        
+            
+    def train_test_split(self, test_size=0.2, seed=0):
+        return train_test_split(self.X, self.y, test_size=test_size, random_state=seed)
+           
+
+    def transform(self, x):
+        if(len(x) == 0):
+            return x
+        if (len(x.shape) == 1):
+            x = np.expand_dims(x, axis=0)
+        if (self.use_categorical):
+            assert(self.binarizer != [])
+            tx = []
+            for i in range(self.nb_features):
+                #self.binarizer[i].drop = None
+                if (i in self.categorical_features):
+                    self.binarizer[i].drop = None
+                    tx_aux = self.binarizer[i].transform(x[:,[i]])
+                    tx_aux = np.vstack(tx_aux)
+                    tx.append(tx_aux)
+                else:
+                    tx.append(x[:,[i]])
+            tx = np.hstack(tx)
+            return tx
+        else:
+            return x
+
+    def transform_inverse(self, x):
+        if(len(x) == 0):
+            return x
+        if (len(x.shape) == 1):
+            x = np.expand_dims(x, axis=0)
+        if (self.use_categorical):
+            assert(self.binarizer != [])
+            inverse_x = []
+            for i, xi in enumerate(x):
+                inverse_xi = np.zeros(self.nb_features)
+                for f in range(self.nb_features):
+                    if f in self.categorical_features:
+                        nb_values = len(self.categorical_names[f])
+                        v = xi[:nb_values]
+                        v = np.expand_dims(v, axis=0)
+                        iv = self.binarizer[f].inverse_transform(v)
+                        inverse_xi[f] =iv
+                        xi = xi[nb_values:]
+
+                    else:
+                        inverse_xi[f] = xi[0]
+                        xi = xi[1:]
+                inverse_x.append(inverse_xi)
+            return inverse_x
+        else:
+            return x
+
+    def transform_inverse_by_index(self, idx):
+        if (idx in self.extended_feature_names):
+            return self.extended_feature_names[idx]
+        else:
+            print("Warning there is no feature {} in the internal mapping".format(idx))
+            return None
+
+    def transform_by_value(self, feat_value_pair):
+        if (feat_value_pair in self.extended_feature_names.values()):
+            keys = (list(self.extended_feature_names.keys())[list( self.extended_feature_names.values()).index(feat_value_pair)])
+            return keys
+        else:
+            print("Warning there is no value {} in the internal mapping".format(feat_value_pair))
+            return None
+
+    def mapping_features(self):
+        self.extended_feature_names = {}
+        self.extended_feature_names_as_array_strings = []
+        counter = 0
+        if (self.use_categorical):
+            for i in range(self.nb_features):
+                if (i in self.categorical_features):
+                    for j, _ in enumerate(self.binarizer[i].categories_[0]):
+                        self.extended_feature_names.update({counter:  (self.feature_names[i], j)})
+                        self.extended_feature_names_as_array_strings.append("f{}_{}".format(i,j)) # str(self.feature_names[i]), j))
+                        counter = counter + 1
+                else:
+                    self.extended_feature_names.update({counter: (self.feature_names[i], None)})
+                    self.extended_feature_names_as_array_strings.append("f{}".format(i)) #(self.feature_names[i])
+                    counter = counter + 1
+        else:
+            for i in range(self.nb_features):
+                self.extended_feature_names.update({counter: (self.feature_names[i], None)})
+                self.extended_feature_names_as_array_strings.append("f{}".format(i))#(self.feature_names[i])
+                counter = counter + 1
+
+    def readable_sample(self, x):
+        readable_x = []
+        for i, v in enumerate(x):
+            if (i in self.categorical_features):
+                readable_x.append(self.categorical_names[i][int(v)])
+            else:
+                readable_x.append(v)
+        return np.asarray(readable_x)
+
+    
+    def test_encoding_transformes(self, X_train):
+        # test encoding
+
+        X = X_train[[0],:]
+
+        print("Sample of length", len(X[0])," : ", X)
+        enc_X = self.transform(X)
+        print("Encoded sample of length", len(enc_X[0])," : ", enc_X)
+        inv_X = self.transform_inverse(enc_X)
+        print("Back to sample", inv_X)
+        print("Readable sample", self.readable_sample(inv_X[0]))
+        assert((inv_X == X).all())
+
+        '''
+        for i in range(len(self.extended_feature_names)):
+            print(i, self.transform_inverse_by_index(i))
+        for key, value in self.extended_feature_names.items():
+            print(value, self.transform_by_value(value))   
+        '''       
+#
+#==============================================================================
+class XRF(object):
+    """
+        class to encode and explain Random Forest classifiers.
+    """
+    
+    def __init__(self, model, feature_names, class_names, verb=0):
+        self.cls = model
+        #self.data = dataset
+        self.verbose = verb
+        self.feature_names = feature_names
+        self.class_names = class_names
+        self.fnames = [f'f{i}' for i in range(len(feature_names))]
+        self.f = Forest(model, self.fnames)
+        
+        if self.verbose > 2:
+            self.f.print_trees()
+        if self.verbose:    
+            print("c RF sz:", self.f.sz)
+            print('c max-depth:', self.f.md)
+            print('c nof DTs:', len(self.f.trees))
+        
+    def __del__(self):
+        if 'enc' in dir(self):
+            del self.enc
+        if 'x' in dir(self):
+            if self.x.slv is not None:
+                self.x.slv.delete()
+            del self.x
+        del self.f
+        self.f = None
+        del self.cls
+        self.cls = None
+        
+    def encode(self, inst):
+        """
+            Encode a tree ensemble trained previously.
+        """
+        if 'f' not in dir(self):
+            self.f = Forest(self.cls, self.fnames)
+            #self.f.print_tree()
+            
+        time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime            
+            
+        self.enc = SATEncoder(self.f, self.feature_names, len(self.class_names), self.fnames)
+        
+        #inst = self.data.transform(np.array(inst))[0]
+        formula, _, _, _ = self.enc.encode(np.array(inst))
+        
+        time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime - time        
+        
+        if self.verbose:
+            print('c nof vars:', formula.nv) # number of variables 
+            print('c nof clauses:', len(formula.clauses)) # number of clauses    
+            print('c encoding time: {0:.3f}'.format(time))            
+        
+    def explain(self, inst, xtype='abd'):
+        """
+            Explain a prediction made for a given sample with a previously
+            trained RF.
+        """
+        
+        time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime          
+        
+        if 'enc' not in dir(self):
+            self.encode(inst)
+        
+        #inpvals = self.data.readable_sample(inst)
+        inpvals = np.asarray(inst)
+        preamble = []
+        for f, v in zip(self.feature_names, inpvals):
+            if f not in str(v):
+                preamble.append('{0} = {1}'.format(f, v))
+            else:
+                preamble.append(v)
+                    
+        inps = self.fnames # input (feature value) variables
+        #print("inps: {0}".format(inps))
+            
+        self.x = SATExplainer(self.enc, inps, preamble, self.class_names, verb=self.verbose)
+        #inst = self.data.transform(np.array(inst))[0]
+        expl = self.x.explain(np.array(inst), xtype)
+
+        time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime - time 
+        
+        if self.verbose:
+            print("c Total time: {0:.3f}".format(time))
+            
+        return expl
+    
+    def enumerate(self, inst, xtype='con', smallest=True):
+        """
+            list all XPs
+        """
+        if 'enc' not in dir(self):
+            self.encode(inst)
+            
+        if 'x' not in dir(self):
+            inpvals = np.asarray(inst)
+            preamble = []
+            for f, v in zip(self.feature_names, inpvals):
+                if f not in str(v):
+                    preamble.append('{0} = {1}'.format(f, v))
+                else:
+                    preamble.append(v)
+                    
+            inps = self.fnames
+            self.x = SATExplainer(self.enc, inps, preamble, self.class_names)
+            
+        for expl in self.x.enumerate(np.array(inst), xtype, smallest):
+            yield expl
+        
+#
+#==============================================================================
+class SATEncoder(object):
+    """
+        Encoder of Random Forest classifier into SAT.
+    """
+    
+    def __init__(self, forest, feats, nof_classes, extended_feature_names,  from_file=None):
+        self.forest = forest
+        #self.feats = {f: i for i, f in enumerate(feats)}
+        self.num_class = nof_classes
+        self.vpool = IDPool()
+        self.extended_feature_names = extended_feature_names
+        
+        #encoding formula
+        self.cnf = None
+
+        # for interval-based encoding
+        self.intvs, self.imaps, self.ivars, self.thvars = None, None, None, None
+       
+        
+    def newVar(self, name):
+        """
+            If a variable named 'name' already exists then
+            return its id; otherwise create a new var
+        """
+        if name in self.vpool.obj2id: #var has been already created 
+            return self.vpool.obj2id[name]
+        var = self.vpool.id('{0}'.format(name))
+        return var
+    
+    def nameVar(self, vid):
+        """
+            input a var id and return a var name
+        """
+        return self.vpool.obj(abs(vid))
+    
+    def printLits(self, lits):
+        print(["{0}{1}".format("-" if p<0 else "",self.vpool.obj(abs(p))) for p in lits])
+    
+    def traverse(self, tree, k, clause):
+        """
+            Traverse a tree and encode each node.
+        """
+
+        if tree.children:
+            f = tree.name
+            v = tree.threshold
+            pos = neg = []
+            if f in self.intvs:
+                d = self.imaps[f][v]
+                pos, neg = self.thvars[f][d], -self.thvars[f][d]
+            else:
+                var = self.newVar(tree.name)
+                pos, neg = var, -var
+                #print("{0} => {1}".format(tree.name, var))
+                
+            assert (pos and neg)
+            self.traverse(tree.children[0], k, clause + [-neg])
+            self.traverse(tree.children[1], k, clause + [-pos])            
+        else:  # leaf node
+            cvar = self.newVar('class{0}_tr{1}'.format(tree.values,k))
+            self.cnf.append(clause + [cvar])
+            #self.printLits(clause + [cvar])
+
+    def compute_intervals(self):
+        """
+            Traverse all trees in the ensemble and extract intervals for each
+            feature.
+
+            At this point, the method only works for numerical datasets!
+        """
+
+        def traverse_intervals(tree):
+            """
+                Auxiliary function. Recursive tree traversal.
+            """
+
+            if tree.children:
+                f = tree.name
+                v = tree.threshold
+                if f in self.intvs:
+                    self.intvs[f].add(v)
+
+                traverse_intervals(tree.children[0])
+                traverse_intervals(tree.children[1])
+
+        # initializing the intervals
+        self.intvs = {'{0}'.format(f): set([]) for f in self.extended_feature_names if '_' not in f}
+
+        for tree in self.forest.trees:
+            traverse_intervals(tree)
+                
+        # OK, we got all intervals; let's sort the values
+        self.intvs = {f: sorted(self.intvs[f]) + ([math.inf] if len(self.intvs[f]) else []) for f in six.iterkeys(self.intvs)}
+
+        self.imaps, self.ivars = {}, {}
+        self.thvars = {}
+        for feat, intvs in six.iteritems(self.intvs):
+            self.imaps[feat] = {}
+            self.ivars[feat] = []
+            self.thvars[feat] = []
+            for i, ub in enumerate(intvs):
+                self.imaps[feat][ub] = i
+
+                ivar = self.newVar('{0}_intv{1}'.format(feat, i))
+                self.ivars[feat].append(ivar)
+                #print('{0}_intv{1}'.format(feat, i))
+                
+                if ub != math.inf:
+                    #assert(i < len(intvs)-1)
+                    thvar = self.newVar('{0}_th{1}'.format(feat, i))
+                    self.thvars[feat].append(thvar)
+                    #print('{0}_th{1}'.format(feat, i))
+
+
+
+    def encode(self, sample):
+        """
+            Do the job.
+        """
+        
+        ###print('Encode RF into SAT ...')
+
+        self.cnf = CNF()
+        # getting a tree ensemble
+        #self.forest = Forest(self.model, self.extended_feature_names)
+        num_tree = len(self.forest.trees)
+        self.forest.predict_inst(sample)
+
+        #introducing class variables
+        #cvars = [self.newVar('class{0}'.format(i)) for i in range(self.num_class)]
+        
+        # define Tautology var
+        vtaut = self.newVar('Tautology')
+        self.cnf.append([vtaut])
+            
+        # introducing class-tree variables
+        ctvars = [[] for t in range(num_tree)]
+        for k in range(num_tree):
+            for j in range(self.num_class):
+                var = self.newVar('class{0}_tr{1}'.format(j,k))
+                ctvars[k].append(var)       
+
+        # traverse all trees and extract all possible intervals
+        # for each feature
+        ###print("compute intervarls ...")
+        self.compute_intervals()
+        
+        #print(self.intvs)
+        #print([len(self.intvs[f]) for f in self.intvs])
+        #print(self.imaps) 
+        #print(self.ivars)
+        #print(self.thvars)
+        #print(ctvars)
+        
+        
+        ##print("encode trees ...")
+        # traversing and encoding each tree
+        for k, tree in enumerate(self.forest.trees):
+            #print("Encode tree#{0}".format(k))
+            # encoding the tree     
+            self.traverse(tree, k, [])
+            # exactly one class var is true
+            #self.printLits(ctvars[k])
+            card = CardEnc.atmost(lits=ctvars[k], vpool=self.vpool,encoding=EncType.cardnetwrk) 
+            self.cnf.extend(card.clauses)
+        
+        
+            
+        # calculate the majority class   
+        self.cmaj = self.forest.predict_inst(sample)       
+        
+        ##print("encode majority class ...")                
+        #Cardinality constraint AtMostK to capture a j_th class
+        
+        if(self.num_class == 2):
+            rhs = math.floor(num_tree / 2) + 1
+            if(self.cmaj==1 and not num_tree%2):
+                rhs = math.floor(num_tree / 2)      
+            lhs = [ctvars[k][1 - self.cmaj] for k in range(num_tree)]
+            atls = CardEnc.atleast(lits = lhs, bound = rhs, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(atls)
+        else: 
+            zvars = []
+            zvars.append([self.newVar('z_0_{0}'.format(k)) for k in range (num_tree) ])
+            zvars.append([self.newVar('z_1_{0}'.format(k)) for k in range (num_tree) ])
+            ##
+            rhs = num_tree
+            lhs0 = zvars[0] + [ - ctvars[k][self.cmaj] for k in range(num_tree)]
+            ##self.printLits(lhs0)
+            atls = CardEnc.atleast(lits = lhs0, bound = rhs, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(atls)
+            ##
+            #rhs = num_tree - 1
+            rhs = num_tree + 1
+            ###########
+            lhs1 =  zvars[1] + [ - ctvars[k][self.cmaj] for k in range(num_tree)]
+            ##self.printLits(lhs1)
+            atls = CardEnc.atleast(lits = lhs1, bound = rhs, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(atls)            
+            #
+            pvars = [self.newVar('p_{0}'.format(k)) for k in range(self.num_class + 1)]
+            ##self.printLits(pvars)
+            for k,p in enumerate(pvars):
+                for i in range(num_tree):
+                    if k == 0:
+                        z = zvars[0][i]
+                        #self.cnf.append([-p, -z, vtaut])
+                        self.cnf.append([-p, z, -vtaut])       
+                        #self.printLits([-p, z, -vtaut])
+                        #print()
+                    elif k == self.cmaj+1:
+                        z = zvars[1][i]
+                        self.cnf.append([-p, z, -vtaut])       
+                        
+                        #self.printLits([-p, z, -vtaut])
+                        #print()                       
+                        
+                    else:
+                        z = zvars[0][i] if (k<self.cmaj+1) else zvars[1][i]
+                        self.cnf.append([-p, -z, ctvars[i][k-1] ])
+                        self.cnf.append([-p, z, -ctvars[i][k-1] ])  
+                        
+                        #self.printLits([-p, -z, ctvars[i][k-1] ])
+                        #self.printLits([-p, z, -ctvars[i][k-1] ])
+                        #print()
+                        
+            #
+            self.cnf.append([-pvars[0], -pvars[self.cmaj+1]])
+            ##
+            lhs1 =  pvars[:(self.cmaj+1)]
+            ##self.printLits(lhs1)
+            eqls = CardEnc.equals(lits = lhs1, bound = 1, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(eqls)
+            
+            
+            lhs2 = pvars[(self.cmaj + 1):]
+            ##self.printLits(lhs2)
+            eqls = CardEnc.equals(lits = lhs2, bound = 1, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(eqls)
+                
+        
+            
+        ##print("exactly-one feat const ...")
+        # enforce exactly one of the feature values to be chosen
+        # (for categorical features)
+        categories = collections.defaultdict(lambda: [])
+        for f in self.extended_feature_names:
+            if '_' in f:
+                categories[f.split('_')[0]].append(self.newVar(f))        
+        for c, feats in six.iteritems(categories):
+            # exactly-one feat is True
+            self.cnf.append(feats)
+            card = CardEnc.atmost(lits=feats, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(card.clauses)
+        # lits of intervals   
+        for f, intvs in six.iteritems(self.ivars):
+            if not len(intvs):
+                continue
+            self.cnf.append(intvs) 
+            card = CardEnc.atmost(lits=intvs, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(card.clauses)
+            #self.printLits(intvs)
+        
+            
+        
+        for f, threshold in six.iteritems(self.thvars):
+            for j, thvar in enumerate(threshold):
+                d = j+1
+                pos, neg = self.ivars[f][d:], self.ivars[f][:d] 
+                
+                if j == 0:
+                    assert(len(neg) == 1)
+                    self.cnf.append([thvar, neg[-1]])
+                    self.cnf.append([-thvar, -neg[-1]])
+                else:
+                    self.cnf.append([thvar, neg[-1], -threshold[j-1]])
+                    self.cnf.append([-thvar, threshold[j-1]])
+                    self.cnf.append([-thvar, -neg[-1]])
+                
+                if j == len(threshold) - 1:
+                    assert(len(pos) == 1)
+                    self.cnf.append([-thvar, pos[0]])
+                    self.cnf.append([thvar, -pos[0]])
+                else:
+                    self.cnf.append([-thvar, pos[0], threshold[j+1]])
+                    self.cnf.append([thvar, -pos[0]])
+                    self.cnf.append([thvar, -threshold[j+1]])
+          
+
+        
+        return self.cnf, self.intvs, self.imaps, self.ivars
+
+
+#
+#==============================================================================
+class SATExplainer(object):
+    """
+        An SAT-inspired minimal explanation extractor for Random Forest models.
+    """
+
+    def __init__(self, sat_enc, inps, preamble, target_name, verb=1):
+        """
+            Constructor.
+        """
+        self.enc = sat_enc
+        self.inps = inps  # input (feature value) variables
+        self.target_name = target_name
+        self.preamble = preamble
+        self.verbose = verb
+        self.slv = None    
+      
+    def prepare_selectors(self, sample):
+        # adapt the solver to deal with the current sample
+        #self.csel = []
+        self.assums = []  # var selectors to be used as assumptions
+        self.sel2fid = {}  # selectors to original feature ids
+        self.sel2vid = {}  # selectors to categorical feature ids
+        self.sel2v = {} # selectors to (categorical/interval) values
+        
+        #for i in range(self.enc.num_class):
+        #    self.csel.append(self.enc.newVar('class{0}'.format(i)))
+        #self.csel = self.enc.newVar('class{0}'.format(self.enc.cmaj))
+               
+        # preparing the selectors
+        for i, (inp, val) in enumerate(zip(self.inps, sample), 1):
+            if '_' in inp:
+                # binarized (OHE) features
+                assert (inp not in self.enc.intvs)
+                
+                feat = inp.split('_')[0]
+                selv = self.enc.newVar('selv_{0}'.format(feat))
+            
+                self.assums.append(selv)   
+                if selv not in self.sel2fid:
+                    self.sel2fid[selv] = int(feat[1:])
+                    self.sel2vid[selv] = [i - 1]
+                else:
+                    self.sel2vid[selv].append(i - 1)
+                    
+                p = self.enc.newVar(inp) 
+                if not val:
+                    p = -p
+                else:
+                    self.sel2v[selv] = p
+                    
+                self.enc.cnf.append([-selv, p])
+                #self.enc.printLits([-selv, p])
+                    
+            elif len(self.enc.intvs[inp]):
+                #v = None
+                #for intv in self.enc.intvs[inp]:
+                #    if intv > val:
+                #        v = intv
+                #        break         
+                v = next((intv for intv in self.enc.intvs[inp] if intv > val), None)     
+                assert(v is not None)
+                
+                selv = self.enc.newVar('selv_{0}'.format(inp))     
+                self.assums.append(selv)  
+                
+                assert (selv not in self.sel2fid)
+                self.sel2fid[selv] = int(inp[1:])
+                self.sel2vid[selv] = [i - 1]
+                            
+                for j,p in enumerate(self.enc.ivars[inp]):
+                    cl = [-selv]
+                    if j == self.enc.imaps[inp][v]:
+                        cl += [p]
+                        self.sel2v[selv] = p
+                    else:
+                        cl += [-p]
+                    
+                    self.enc.cnf.append(cl)
+                    #self.enc.printLits(cl)
+
+        
+    
+    def explain(self, sample, xtype='abd', smallest=False):
+        """
+            Hypotheses minimization.
+        """
+        if self.verbose:
+            print('  explaining:  "IF {0} THEN {1}"'.format(' AND '.join(self.preamble), self.target_name[self.enc.cmaj]))
+                    
+        
+        self.time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime
+        
+        self.prepare_selectors(sample)
+        
+        if xtype == 'abd':
+            # abductive (PI-) explanation
+            expl = self.compute_axp() 
+        else:
+            # contrastive explanation
+            expl = self.compute_cxp()
+ 
+        self.time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime - self.time
+    
+        # delete sat solver
+        self.slv.delete()
+        self.slv = None
+        
+        if self.verbose:
+            print('  time: {0:.3f}'.format(self.time))
+
+        return expl    
+
+    def compute_axp(self, smallest=False):
+        """
+            Compute an Abductive eXplanation
+        """         
+        self.assums = sorted(set(self.assums))
+        if self.verbose:
+            print('  # hypos:', len(self.assums))   
+        
+        #create a SAT solver
+        self.slv = Solver(name="glucose3")
+        
+        # pass a CNF formula
+        self.slv.append_formula(self.enc.cnf)    
+
+        def minimal():
+            vtaut = self.enc.newVar('Tautology')
+            # simple deletion-based linear search
+            for i, p in enumerate(self.assums):
+                to_test = [vtaut] + self.assums[:i] + self.assums[(i + 1):] + [-p, -self.sel2v[p]]
+                sat = self.slv.solve(assumptions=to_test)
+                if not sat:
+                    self.assums[i] = -p         
+            return
+        
+        if not smallest:
+            minimal()
+        else:
+            raise NotImplementedError('Smallest explanation is not yet implemented.')
+            #self.compute_smallest()
+
+        expl = sorted([self.sel2fid[h] for h in self.assums if h>0 ])
+        assert len(expl), 'Abductive explanation cannot be an empty-set! otherwise RF fcn is const, i.e. predicts only one class'
+        
+        if self.verbose:
+            print("expl-selctors: ", expl)
+            preamble = [self.preamble[i] for i in expl]
+            print('  explanation: "IF {0} THEN {1}"'.format(' AND '.join(preamble), self.target_name[self.enc.cmaj]))
+            print('  # hypos left:', len(expl))
+            
+        return expl
+        
+    def compute_cxp(self, smallest=True):
+        """
+            Compute a Contrastive eXplanation
+        """         
+        self.assums = sorted(set(self.assums))
+        if self.verbose:
+            print('  # hypos:', len(self.assums))   
+    
+        wcnf = WCNF()
+        for cl in self.enc.cnf:
+            wcnf.append(cl)    
+        for p in self.assums:
+            wcnf.append([p], weight=1)
+            
+        if not smallest:
+            # mcs solver
+            self.slv = LBX(wcnf, use_cld=True, solver_name='g3')
+            mcs = self.slv.compute()
+            expl = sorted([self.sel2fid[self.assums[i-1]] for i in mcs])
+        else:
+            # mxsat solver
+            self.slv = RC2(wcnf)
+            model = self.slv.compute()
+            model = [p for p in model if abs(p) in self.assums]            
+            expl = sorted([self.sel2fid[-p] for p in model if p<0 ])
+       
+        assert len(expl), 'Contrastive explanation cannot be an empty-set!'         
+        if self.verbose:
+            print("expl-selctors: ", expl)
+            preamble = [self.preamble[i] for i in expl]
+            pred = self.target_name[self.enc.cmaj]
+            print(f'  explanation: "IF {" AND ".join([f"!({p})" for p in preamble])} THEN !(class = {pred})"')
+            
+        return expl    
+    
+    def enumerate(self, sample, xtype='con', smallest=True):
+        """
+            list all CXp's or AXp's
+        """
+        if xtype == 'abd':
+            raise NotImplementedError('Enumerate abductive explanations is not yet implemented.')
+        time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime
+        
+        if 'assums' not in dir(self):
+            self.prepare_selectors(sample)
+            self.assums = sorted(set(self.assums))
+            #
+            
+        # compute CXp's/AE's    
+        if self.slv is None:    
+            wcnf = WCNF()
+            for cl in self.enc.cnf:
+                wcnf.append(cl)    
+            for p in self.assums:
+                wcnf.append([p], weight=1)
+            if smallest:    
+                # incremental maxsat solver    
+                self.slv = RC2(wcnf, adapt=True, exhaust=True, minz=True)
+            else:
+                # mcs solver
+                self.slv = LBX(wcnf, use_cld=True, solver_name='g3')
+                #self.slv = MCSls(wcnf, use_cld=True, solver_name='g3')                
+                
+        if smallest:    
+            print('smallest')
+            for model in self.slv.enumerate(block=-1):
+                #model = [p for p in model if abs(p) in self.assums]
+                expl = sorted([self.sel2fid[-p] for p in model if (p<0 and (-p in self.assums))])
+                cxp_feats = [f'f{j}' for j in expl]
+                advx = []
+                for f in cxp_feats:
+                    ps = [p for p in model if (p>0 and (p in self.enc.ivars[f]))]
+                    assert(len(ps) == 1)
+                    advx.append(tuple([f,self.enc.nameVar(ps[0])]))   
+                #yield expl
+                print(cxp_feats, advx)
+                yield advx
+        else:
+            print('LBX')
+            for mcs in self.slv.enumerate():
+                expl = sorted([self.sel2fid[self.assums[i-1]] for i in mcs])
+                assumptions = [-p if(i in mcs) else p for i,p in enumerate(self.assums, 1)]
+                #for k, model in enumerate(self.slv.oracle.enum_models(assumptions), 1):
+                assert (self.slv.oracle.solve(assumptions))
+                model = self.slv.oracle.get_model()
+                cxp_feats = [f'f{j}' for j in expl]
+                advx = []
+                for f in cxp_feats:
+                    ps = [p for p in model if (p>0 and (p in self.enc.ivars[f]))]
+                    assert(len(ps) == 1)
+                    advx.append(tuple([f,self.enc.nameVar(ps[0])]))
+                yield advx
+                self.slv.block(mcs)
+                #yield expl
+                
+                
+        time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime - time 
+        if self.verbose:
+            print('c expl time: {0:.3f}'.format(time))
+        #
+        self.slv.delete()
+        self.slv = None
\ No newline at end of file
-- 
GitLab