diff --git a/pages/RFxp/Classifiers/RF2001/pima/pima_nbestim_50_maxdepth_3.mod.pkl b/pages/RFxp/Classifiers/RF2001/pima/pima_nbestim_50_maxdepth_3.mod.pkl
new file mode 100644
index 0000000000000000000000000000000000000000..ac92a2373a6e27d1bf1b9537fcb42235914f452f
Binary files /dev/null and b/pages/RFxp/Classifiers/RF2001/pima/pima_nbestim_50_maxdepth_3.mod.pkl differ
diff --git a/pages/RFxp/RFxp.py b/pages/RFxp/RFxp.py
new file mode 100755
index 0000000000000000000000000000000000000000..5557e04fe0418a8eae77f70635a9849e46cc829b
--- /dev/null
+++ b/pages/RFxp/RFxp.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python3
+#-*- coding:utf-8 -*-
+##
+## xprf.py
+##
+##  Created on: Oct 08, 2020
+##      Author: Yacine Izza
+##      E-mail: yacine.izza@univ-toulouse.fr
+##
+
+#
+#==============================================================================
+from __future__ import print_function
+from data import Data
+from options import Options
+import os
+import sys
+import pickle
+import resource
+
+
+from xrf import XRF, RF2001, Dataset
+import numpy as np
+
+
+
+#
+#==============================================================================
+def show_info():
+    """
+        Print info message.
+    """
+    print("c RFxp: Random Forest explainer.")
+    print('c')
+
+    
+#
+#==============================================================================
+def pickle_save_file(filename, data):
+    try:
+        f =  open(filename, "wb")
+        pickle.dump(data, f)
+        f.close()
+    except:
+        print("Cannot save to file", filename)
+        exit()
+
+def pickle_load_file(filename):
+    try:
+        f =  open(filename, "rb")
+        data = pickle.load(f)
+        f.close()
+        return data
+    except Exception as e:
+        print(e)
+        print("Cannot load from file", filename)
+        exit()    
+        
+    
+#
+#==============================================================================
+if __name__ == '__main__':
+    # parsing command-line options
+    options = Options(sys.argv)
+    
+    # making output unbuffered
+    if sys.version_info.major == 2:
+        sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
+
+    # showing head
+    show_info()
+
+        
+        
+    if options.files:
+        cls = None
+        xrf = None
+        
+        print("loading data ...")
+        data = Dataset(filename=options.files[0], 
+                    separator=options.separator, use_categorical = options.use_categorical)
+            
+        if options.train:
+            '''
+            data = Dataset(filename=options.files[0], mapfile=options.mapfile,
+                    separator=options.separator,
+                    use_categorical = options.use_categorical)
+            '''        
+            params = {'n_trees': options.n_estimators,
+                        'depth': options.maxdepth}
+            cls = RF2001(**params)
+            train_accuracy, test_accuracy = cls.train(data)
+            
+            if options.verb == 1:
+                print("----------------------")
+                print("Train accuracy: {0:.2f}".format(100. * train_accuracy))
+                print("Test accuracy: {0:.2f}".format(100. * test_accuracy))
+                print("----------------------")           
+            
+            xrf = XRF(cls, data.feature_names, data.target_name, options.verb)
+            #xrf.test_tree_ensemble()          
+            
+            bench_name = os.path.basename(options.files[0])
+            assert (bench_name.endswith('.csv'))
+            bench_name = os.path.splitext(bench_name)[0]
+            bench_dir_name = options.output + "/RF2001/" + bench_name
+            try:
+                os.stat(bench_dir_name)
+            except:
+                os.makedirs(bench_dir_name)
+
+            basename = (os.path.join(bench_dir_name, bench_name +
+                            "_nbestim_" + str(options.n_estimators) +
+                            "_maxdepth_" + str(options.maxdepth)))
+
+            modfile =  basename + '.mod.pkl'
+            print("saving  model to ", modfile)
+            pickle_save_file(modfile, cls)        
+
+
+        # read a sample from options.explain
+        if options.explain:
+            options.explain = [float(v.strip()) for v in options.explain.split(',')]
+            
+            if not xrf:
+                print("loading model ...")
+                cls = pickle_load_file(options.files[1])
+                #print()
+                #print("class skl:",cls.forest.classes_)
+                #print("feat names:",data.feature_names)
+                #print("extended name:",data.extended_feature_names_as_array_strings)
+                #print("target:",data.target_name)
+                #print()
+                xrf = XRF(cls, data.feature_names, data.target_name, options.verb)
+                if options.verb:
+                    # print test accuracy of the RF model
+                    _, X_test, _, y_test = data.train_test_split()
+                    X_test = data.transform(X_test) 
+                    cls.print_accuracy(X_test, y_test) 
+            
+            expl = xrf.explain(options.explain, options.xtype)
+            
+            print(f"expl len: {len(expl)}")
+            
+            del xrf.enc
+            del xrf.x            
+          
+            
\ No newline at end of file
diff --git a/pages/RFxp/data.py b/pages/RFxp/data.py
new file mode 100644
index 0000000000000000000000000000000000000000..6c1546db94bb0fc26706bd197392c6babc40f114
--- /dev/null
+++ b/pages/RFxp/data.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+##
+## data.py
+##
+##  Created on: Sep 20, 2017
+##      Author: Alexey Ignatiev, Nina Narodytska
+##      E-mail: aignatiev@ciencias.ulisboa.pt, narodytska@vmware.com
+##
+
+#
+#==============================================================================
+from __future__ import print_function
+import collections
+import itertools
+import os, pickle
+import six
+from six.moves import range
+import numpy as np
+
+
+#
+#==============================================================================
+class Data(object):
+    """
+        Class for representing data (transactions).
+    """
+
+    def __init__(self, filename=None, fpointer=None, mapfile=None,
+            separator=',', use_categorical = False):
+        """
+            Constructor and parser.
+        """
+
+        self.names = None
+        self.nm2id = None
+        self.samps = None
+        self.wghts = None
+        self.feats = None
+        self.fvmap = None
+        self.ovmap = {}
+        self.fvars = None
+        self.fname = filename
+        self.mname = mapfile
+        self.deleted = set([])
+
+        if filename:
+            with open(filename, 'r') as fp:
+                self.parse(fp, separator)
+        elif fpointer:
+            self.parse(fpointer, separator)
+
+        if self.mname:
+            self.read_orig_values()
+
+        # check if we have extra info about categorical_features
+
+        if (use_categorical):
+            extra_file = filename+".pkl"
+            try:
+                f =  open(extra_file, "rb")
+                print("Attempt: loading extra data from ", extra_file)
+                extra_info = pickle.load(f)
+                print("loaded")
+                f.close()
+                self.categorical_features = extra_info["categorical_features"]
+                self.categorical_names = extra_info["categorical_names"]
+                self.class_names = extra_info["class_names"]
+                self.categorical_onehot_names  = extra_info["categorical_names"].copy()
+
+                for i, name in enumerate(self.class_names):
+                    self.class_names[i] = str(name).replace("b'","'")
+                for c in self.categorical_names.items():
+                    clean_feature_names = []
+                    for i, name in enumerate(c[1]):
+                        name = str(name).replace("b'","'")
+                        clean_feature_names.append(name)
+                    self.categorical_names[c[0]] = clean_feature_names
+
+            except Exception as e:
+                f.close()
+                print("Please provide info about categorical features or omit option -c", e)
+                exit()
+
+    def parse(self, fp, separator):
+        """
+            Parse input file.
+        """
+
+        # reading data set from file
+        lines = fp.readlines()
+
+        # reading preamble
+        self.names = lines[0].strip().split(separator)
+        self.feats = [set([]) for n in self.names]
+        del(lines[0])
+
+        # filling name to id mapping
+        self.nm2id = {name: i for i, name in enumerate(self.names)}
+
+        self.nonbin2bin = {}
+        for name in self.nm2id:
+            spl = name.rsplit(':',1)
+            if (spl[0] not in self.nonbin2bin):
+                self.nonbin2bin[spl[0]] = [name]
+            else:
+                self.nonbin2bin[spl[0]].append(name)
+
+        # reading training samples
+        self.samps, self.wghts = [], []
+
+        for line, w in six.iteritems(collections.Counter(lines)):
+            sample = line.strip().split(separator)
+            for i, f in enumerate(sample):
+                if f:
+                    self.feats[i].add(f)
+            self.samps.append(sample)
+            self.wghts.append(w)
+
+        # direct and opposite mappings for items
+        idpool = itertools.count(start=0)
+        FVMap = collections.namedtuple('FVMap', ['dir', 'opp'])
+        self.fvmap = FVMap(dir={}, opp={})
+
+        # mapping features to ids
+        for i in range(len(self.names) - 1):
+            feats = sorted(list(self.feats[i]), reverse=True)
+            if len(feats) > 2:
+                for l in feats:
+                    self.fvmap.dir[(self.names[i], l)] = l
+            else:
+                self.fvmap.dir[(self.names[i], feats[0])] = 1
+                if len(feats) == 2:
+                    self.fvmap.dir[(self.names[i], feats[1])] = 0
+
+        # opposite mapping
+        for key, val in six.iteritems(self.fvmap.dir):
+            self.fvmap.opp[val] = key
+
+        # determining feature variables (excluding class variables)
+        for v, pair in six.iteritems(self.fvmap.opp):
+            if pair[0] == self.names[-1]:
+                self.fvars = v - 1
+                break
+
+    def read_orig_values(self):
+        """
+            Read original values for all the features.
+            (from a separate CSV file)
+        """
+
+        self.ovmap = {}
+
+        for line in open(self.mname, 'r'):
+            featval, bits = line.strip().split(',')
+            feat, val = featval.split(':')
+
+            for i, b in enumerate(bits):
+                f = '{0}:b{1}'.format(feat, i + 1)
+                v = self.fvmap.dir[(f, '1')]
+
+                if v not in self.ovmap:
+                    self.ovmap[v] = [feat]
+
+                if -v not in self.ovmap:
+                    self.ovmap[-v] = [feat]
+
+                self.ovmap[v if b == '1' else -v].append(val)
diff --git a/pages/RFxp/options.py b/pages/RFxp/options.py
new file mode 100644
index 0000000000000000000000000000000000000000..446eb71702f91876d0f8bef64fd90d048dd95282
--- /dev/null
+++ b/pages/RFxp/options.py
@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+##
+## options.py
+##
+##  Created on: Dec 7, 2018
+##      Author: Alexey Ignatiev, Nina Narodytska
+##      E-mail: aignatiev@ciencias.ulisboa.pt, narodytska@vmware.com
+##
+
+#
+#==============================================================================
+from __future__ import print_function
+import getopt
+import math
+import os
+import sys
+
+
+#
+#==============================================================================
+class Options(object):
+    """
+        Class for representing command-line options.
+    """
+
+    def __init__(self, command):
+        """
+            Constructor.
+        """
+
+        # actions
+        self.train = False
+        self.encode = 'none'
+        self.explain = ''
+        self.xtype = 'abd'
+        self.use_categorical = False
+
+        # training options
+        self.accmin = 0.95
+        self.n_estimators = 100
+        self.maxdepth = 3
+        self.testsplit = 0.2
+        self.seed = 7
+
+        # other options
+        self.files = None
+        self.output = 'Classifiers'
+        self.mapfile = None
+        self.separator = ','
+        self.smallest = False
+        self.solver = 'g3'
+        self.verb = 0
+
+        
+        if command:
+            self.parse(command)
+
+    def parse(self, command):
+        """
+            Parser.
+        """
+
+        self.command = command
+
+        try:
+            opts, args = getopt.getopt(command[1:],
+                                    'e:hc:d:Mn:o:s:tvx:X:',
+                                    ['encode=', 'help', 'use-categorical=',
+                                     'maxdepth=', 'minimum', 'nbestims=',
+                                     'output=', 'seed=', 'solver=', 'testsplit=',
+                                     'train', 'verbose', 'explain=', 'xtype=' ])
+        except getopt.GetoptError as err:
+            sys.stderr.write(str(err).capitalize())
+            self.usage()
+            sys.exit(1)
+
+        for opt, arg in opts:
+            if opt in ('-a', '--accmin'):
+                self.accmin = float(arg)
+            elif opt in ('-c', '--use-categorical'):
+                self.use_categorical = True
+            elif opt in ('-d', '--maxdepth'):
+                self.maxdepth = int(arg)
+            elif opt in ('-e', '--encode'):
+                self.encode = str(arg)
+            elif opt in ('-h', '--help'):
+                self.usage()
+                sys.exit(0)
+
+            elif opt in ('-M', '--minimum'):
+                self.smallest = True
+            elif opt in ('-n', '--nbestims'):
+                self.n_estimators = int(arg)
+            elif opt in ('-o', '--output'):
+                self.output = str(arg)
+    
+            elif opt == '--seed':
+                self.seed = int(arg)
+            elif opt == '--sep':
+                self.separator = str(arg)
+            elif opt in ('-s', '--solver'):
+                self.solver = str(arg)
+            elif opt == '--testsplit':
+                self.testsplit = float(arg)
+            elif opt in ('-t', '--train'):
+                self.train = True
+            elif opt in ('-v', '--verbose'):
+                self.verb += 1
+            elif opt in ('-x', '--explain'):
+                self.explain = str(arg)
+            elif opt in ('-X', '--xtype'):
+                self.xtype = str(arg)
+            else:
+                assert False, 'Unhandled option: {0} {1}'.format(opt, arg)
+
+        if self.encode == 'none':
+            self.encode = None
+
+        self.files = args
+
+    def usage(self):
+        """
+            Print usage message.
+        """
+
+        print('Usage: ' + os.path.basename(self.command[0]) + ' [options] input-file')
+        print('Options:')
+        #print('        -a, --accmin=<float>       Minimal accuracy')
+        #print('                                   Available values: [0.0, 1.0] (default = 0.95)')
+        #print('        -c, --use-categorical      Treat categorical features as categorical (with categorical features info if available)')
+        print('        -d, --maxdepth=<int>       Maximal depth of a tree')
+        print('                                   Available values: [1, INT_MAX] (default = 3)')
+        #print('        -e, --encode=<smt>         Encode a previously trained model')
+        #print('                                   Available values: sat, maxsat, none (default = none)')
+        print('        -h, --help                 Show this message')
+  
+        #print('        -m, --map-file=<string>    Path to a file containing a mapping to original feature values. (default: none)')
+        #print('        -M, --minimum              Compute a smallest size explanation (instead of a subset-minimal one)')
+        print('        -n, --nbestims=<int>       Number of trees in the ensemble')
+        print('                                   Available values: [1, INT_MAX] (default = 100)')
+        print('        -o, --output=<string>      Directory where output files will be stored (default: \'temp\')')
+       
+        print('        --seed=<int>               Seed for random splitting')
+        print('                                   Available values: [1, INT_MAX] (default = 7)')
+        print('        --sep=<string>             Field separator used in input file (default = \',\')')
+        print('        -s, --solver=<string>      A SAT oracle to use')
+        print('                                   Available values: glucose3, minisat (default = g3)')
+        print('        -t, --train                Train a model of a given dataset')
+        print('        --testsplit=<float>        Training and test sets split')
+        print('                                   Available values: [0.0, 1.0] (default = 0.2)')
+        print('        -v, --verbose              Increase verbosity level')
+        print('        -x, --explain=<string>     Explain a decision for a given comma-separated sample (default: none)')
+        print('        -X, --xtype=<string>       Type of explanation to compute: abductive or contrastive')
diff --git a/pages/RFxp/pima.csv b/pages/RFxp/pima.csv
new file mode 100644
index 0000000000000000000000000000000000000000..f3fac60936efb97c6c201c1d29b858c362e3d189
--- /dev/null
+++ b/pages/RFxp/pima.csv
@@ -0,0 +1,769 @@
+Pregnant,plasma glucose,Diastolic blood pressure,Triceps skin fold thickness,2-Hour serum insulin,Body mass index,Diabetes pedigree function,Age,target
+4.0,117.0,62.0,12.0,0.0,29.7,0.38,30.0,1
+4.0,158.0,78.0,0.0,0.0,32.9,0.8029999999999999,31.0,1
+2.0,118.0,80.0,0.0,0.0,42.9,0.693,21.0,1
+13.0,129.0,0.0,30.0,0.0,39.9,0.569,44.0,1
+5.0,162.0,104.0,0.0,0.0,37.7,0.151,52.0,1
+7.0,114.0,64.0,0.0,0.0,27.4,0.732,34.0,1
+6.0,102.0,82.0,0.0,0.0,30.8,0.18,36.0,1
+1.0,196.0,76.0,36.0,249.0,36.5,0.875,29.0,1
+9.0,102.0,76.0,37.0,0.0,32.9,0.665,46.0,1
+7.0,161.0,86.0,0.0,0.0,30.4,0.165,47.0,1
+7.0,114.0,66.0,0.0,0.0,32.8,0.258,42.0,1
+4.0,184.0,78.0,39.0,277.0,37.0,0.264,31.0,1
+0.0,137.0,40.0,35.0,168.0,43.1,2.2880000000000003,33.0,1
+6.0,125.0,76.0,0.0,0.0,33.8,0.121,54.0,1
+11.0,155.0,76.0,28.0,150.0,33.3,1.3530000000000002,51.0,1
+7.0,187.0,50.0,33.0,392.0,33.9,0.826,34.0,1
+7.0,178.0,84.0,0.0,0.0,39.9,0.331,41.0,1
+0.0,180.0,66.0,39.0,0.0,42.0,1.893,25.0,1
+8.0,120.0,86.0,0.0,0.0,28.4,0.259,22.0,1
+2.0,105.0,80.0,45.0,191.0,33.7,0.711,29.0,1
+0.0,118.0,84.0,47.0,230.0,45.8,0.551,31.0,1
+7.0,150.0,78.0,29.0,126.0,35.2,0.6920000000000001,54.0,1
+1.0,149.0,68.0,29.0,127.0,29.3,0.349,42.0,1
+8.0,188.0,78.0,0.0,0.0,47.9,0.13699999999999998,43.0,1
+3.0,173.0,78.0,39.0,185.0,33.8,0.97,31.0,1
+0.0,189.0,104.0,25.0,0.0,34.3,0.435,41.0,1
+9.0,164.0,84.0,21.0,0.0,30.8,0.831,32.0,1
+4.0,131.0,68.0,21.0,166.0,33.1,0.16,28.0,0
+6.0,85.0,78.0,0.0,0.0,31.2,0.382,42.0,0
+5.0,143.0,78.0,0.0,0.0,45.0,0.19,47.0,0
+4.0,110.0,66.0,0.0,0.0,31.9,0.47100000000000003,29.0,0
+10.0,115.0,0.0,0.0,0.0,35.3,0.134,29.0,0
+5.0,73.0,60.0,0.0,0.0,26.8,0.268,27.0,0
+7.0,106.0,92.0,18.0,0.0,22.7,0.235,48.0,0
+0.0,98.0,82.0,15.0,84.0,25.2,0.299,22.0,0
+2.0,88.0,58.0,26.0,16.0,28.4,0.7659999999999999,22.0,0
+1.0,73.0,50.0,10.0,0.0,23.0,0.248,21.0,0
+6.0,144.0,72.0,27.0,228.0,33.9,0.255,40.0,0
+5.0,122.0,86.0,0.0,0.0,34.7,0.29,33.0,0
+1.0,107.0,72.0,30.0,82.0,30.8,0.821,24.0,0
+0.0,101.0,64.0,17.0,0.0,21.0,0.252,21.0,0
+6.0,80.0,66.0,30.0,0.0,26.2,0.313,41.0,0
+0.0,173.0,78.0,32.0,265.0,46.5,1.159,58.0,0
+2.0,122.0,76.0,27.0,200.0,35.9,0.483,26.0,0
+2.0,99.0,52.0,15.0,94.0,24.6,0.637,21.0,0
+1.0,151.0,60.0,0.0,0.0,26.1,0.179,22.0,0
+6.0,105.0,70.0,32.0,68.0,30.8,0.122,37.0,0
+1.0,119.0,44.0,47.0,63.0,35.5,0.28,25.0,0
+4.0,132.0,86.0,31.0,0.0,28.0,0.419,63.0,0
+10.0,129.0,76.0,28.0,122.0,35.9,0.28,39.0,0
+2.0,106.0,56.0,27.0,165.0,29.0,0.426,22.0,0
+4.0,127.0,88.0,11.0,155.0,34.5,0.598,28.0,0
+1.0,157.0,72.0,21.0,168.0,25.6,0.12300000000000001,24.0,0
+0.0,101.0,76.0,0.0,0.0,35.7,0.198,26.0,0
+6.0,125.0,68.0,30.0,120.0,30.0,0.46399999999999997,32.0,0
+2.0,82.0,52.0,22.0,115.0,28.5,1.699,25.0,0
+0.0,113.0,80.0,16.0,0.0,31.0,0.8740000000000001,21.0,0
+0.0,100.0,70.0,26.0,50.0,30.8,0.597,21.0,0
+2.0,120.0,76.0,37.0,105.0,39.7,0.215,29.0,0
+6.0,183.0,94.0,0.0,0.0,40.8,1.4609999999999999,45.0,0
+0.0,125.0,96.0,0.0,0.0,22.5,0.262,21.0,0
+1.0,126.0,56.0,29.0,152.0,28.7,0.8009999999999999,21.0,0
+9.0,89.0,62.0,0.0,0.0,22.5,0.142,33.0,0
+3.0,84.0,68.0,30.0,106.0,31.9,0.591,25.0,0
+2.0,122.0,60.0,18.0,106.0,29.8,0.7170000000000001,22.0,0
+2.0,117.0,90.0,19.0,71.0,25.2,0.313,21.0,0
+2.0,89.0,90.0,30.0,0.0,33.5,0.292,42.0,0
+1.0,91.0,54.0,25.0,100.0,25.2,0.23399999999999999,23.0,0
+6.0,102.0,90.0,39.0,0.0,35.7,0.674,28.0,0
+12.0,106.0,80.0,0.0,0.0,23.6,0.13699999999999998,44.0,0
+4.0,129.0,86.0,20.0,270.0,35.1,0.231,23.0,0
+6.0,129.0,90.0,7.0,326.0,19.6,0.5820000000000001,60.0,0
+9.0,134.0,74.0,33.0,60.0,25.9,0.46,81.0,0
+3.0,111.0,90.0,12.0,78.0,28.4,0.495,29.0,0
+1.0,128.0,82.0,17.0,183.0,27.5,0.115,22.0,0
+1.0,71.0,62.0,0.0,0.0,21.8,0.41600000000000004,26.0,0
+7.0,142.0,60.0,33.0,190.0,28.8,0.687,61.0,0
+4.0,115.0,72.0,0.0,0.0,28.9,0.376,46.0,1
+9.0,165.0,88.0,0.0,0.0,30.4,0.302,49.0,1
+13.0,152.0,90.0,33.0,29.0,26.8,0.731,43.0,1
+13.0,126.0,90.0,0.0,0.0,43.4,0.583,42.0,1
+6.0,194.0,78.0,0.0,0.0,23.5,0.129,59.0,1
+4.0,146.0,78.0,0.0,0.0,38.5,0.52,67.0,1
+3.0,129.0,92.0,49.0,155.0,36.4,0.968,32.0,1
+2.0,108.0,80.0,0.0,0.0,27.0,0.259,52.0,1
+0.0,123.0,72.0,0.0,0.0,36.3,0.258,52.0,1
+14.0,175.0,62.0,30.0,0.0,33.6,0.212,38.0,1
+3.0,107.0,62.0,13.0,48.0,22.9,0.6779999999999999,23.0,1
+8.0,143.0,66.0,0.0,0.0,34.9,0.129,41.0,1
+17.0,163.0,72.0,41.0,114.0,40.9,0.8170000000000001,47.0,1
+11.0,135.0,0.0,0.0,0.0,52.3,0.578,40.0,1
+9.0,156.0,86.0,28.0,155.0,34.3,1.189,42.0,1
+3.0,176.0,86.0,27.0,156.0,33.3,1.1540000000000001,52.0,1
+5.0,85.0,74.0,22.0,0.0,29.0,1.224,32.0,1
+3.0,173.0,84.0,33.0,474.0,35.7,0.258,22.0,1
+6.0,147.0,80.0,0.0,0.0,29.5,0.17800000000000002,50.0,1
+6.0,195.0,70.0,0.0,0.0,30.9,0.32799999999999996,31.0,1
+10.0,108.0,66.0,0.0,0.0,32.4,0.272,42.0,1
+9.0,140.0,94.0,0.0,0.0,32.7,0.7340000000000001,45.0,1
+6.0,0.0,68.0,41.0,0.0,39.0,0.727,41.0,1
+2.0,155.0,74.0,17.0,96.0,26.6,0.433,27.0,1
+7.0,181.0,84.0,21.0,192.0,35.9,0.586,51.0,1
+9.0,156.0,86.0,0.0,0.0,24.8,0.23,53.0,1
+7.0,109.0,80.0,31.0,0.0,35.9,1.127,43.0,1
+2.0,71.0,70.0,27.0,0.0,28.0,0.586,22.0,0
+10.0,92.0,62.0,0.0,0.0,25.9,0.16699999999999998,31.0,0
+12.0,88.0,74.0,40.0,54.0,35.3,0.37799999999999995,48.0,0
+2.0,128.0,64.0,42.0,0.0,40.0,1.101,24.0,0
+10.0,115.0,98.0,0.0,0.0,24.0,1.022,34.0,0
+1.0,79.0,60.0,42.0,48.0,43.5,0.6779999999999999,23.0,0
+1.0,100.0,74.0,12.0,46.0,19.5,0.149,28.0,0
+1.0,119.0,88.0,41.0,170.0,45.3,0.507,26.0,0
+9.0,72.0,78.0,25.0,0.0,31.6,0.28,38.0,0
+8.0,194.0,80.0,0.0,0.0,26.1,0.551,67.0,0
+13.0,153.0,88.0,37.0,140.0,40.6,1.1740000000000002,39.0,0
+2.0,119.0,0.0,0.0,0.0,19.6,0.8320000000000001,72.0,0
+2.0,88.0,74.0,19.0,53.0,29.0,0.22899999999999998,22.0,0
+2.0,130.0,96.0,0.0,0.0,22.6,0.268,21.0,0
+0.0,94.0,0.0,0.0,0.0,0.0,0.256,25.0,0
+8.0,110.0,76.0,0.0,0.0,27.8,0.237,58.0,0
+2.0,92.0,76.0,20.0,0.0,24.2,1.6980000000000002,28.0,0
+0.0,101.0,62.0,0.0,0.0,21.9,0.336,25.0,0
+2.0,122.0,70.0,27.0,0.0,36.8,0.34,27.0,0
+0.0,125.0,68.0,0.0,0.0,24.7,0.20600000000000002,21.0,0
+4.0,117.0,64.0,27.0,120.0,33.2,0.23,24.0,0
+1.0,85.0,66.0,29.0,0.0,26.6,0.35100000000000003,31.0,0
+2.0,108.0,62.0,10.0,278.0,25.3,0.8809999999999999,22.0,0
+2.0,74.0,0.0,0.0,0.0,0.0,0.102,22.0,0
+7.0,136.0,90.0,0.0,0.0,29.9,0.21,50.0,0
+3.0,115.0,66.0,39.0,140.0,38.1,0.15,28.0,0
+10.0,133.0,68.0,0.0,0.0,27.0,0.245,36.0,0
+1.0,139.0,46.0,19.0,83.0,28.7,0.654,22.0,0
+11.0,127.0,106.0,0.0,0.0,39.0,0.19,51.0,0
+4.0,99.0,68.0,38.0,0.0,32.8,0.145,33.0,0
+5.0,77.0,82.0,41.0,42.0,35.8,0.156,35.0,0
+1.0,139.0,62.0,41.0,480.0,40.7,0.536,21.0,0
+2.0,115.0,64.0,22.0,0.0,30.8,0.42100000000000004,21.0,0
+4.0,137.0,84.0,0.0,0.0,31.2,0.252,30.0,0
+2.0,100.0,54.0,28.0,105.0,37.8,0.498,24.0,0
+1.0,93.0,56.0,11.0,0.0,22.5,0.41700000000000004,22.0,0
+0.0,165.0,76.0,43.0,255.0,47.9,0.259,26.0,0
+2.0,129.0,0.0,0.0,0.0,38.5,0.304,41.0,0
+0.0,141.0,84.0,26.0,0.0,32.4,0.433,22.0,0
+0.0,101.0,65.0,28.0,0.0,24.6,0.237,22.0,0
+5.0,126.0,78.0,27.0,22.0,29.6,0.439,40.0,0
+3.0,82.0,70.0,0.0,0.0,21.1,0.389,25.0,0
+1.0,83.0,68.0,0.0,0.0,18.2,0.624,27.0,0
+9.0,106.0,52.0,0.0,0.0,31.2,0.38,42.0,0
+3.0,116.0,0.0,0.0,0.0,23.5,0.187,23.0,0
+4.0,110.0,76.0,20.0,100.0,28.4,0.11800000000000001,27.0,0
+3.0,111.0,56.0,39.0,0.0,30.1,0.557,30.0,0
+4.0,85.0,58.0,22.0,49.0,27.8,0.306,28.0,0
+0.0,118.0,64.0,23.0,89.0,0.0,1.7309999999999999,21.0,0
+5.0,147.0,78.0,0.0,0.0,33.7,0.218,65.0,0
+0.0,131.0,0.0,0.0,0.0,43.2,0.27,26.0,1
+4.0,123.0,62.0,0.0,0.0,32.0,0.226,35.0,1
+9.0,152.0,78.0,34.0,171.0,34.2,0.893,33.0,1
+2.0,155.0,52.0,27.0,540.0,38.7,0.24,25.0,1
+0.0,104.0,64.0,37.0,64.0,33.6,0.51,22.0,1
+9.0,112.0,82.0,24.0,0.0,28.2,1.2819999999999998,50.0,1
+8.0,155.0,62.0,26.0,495.0,34.0,0.5429999999999999,46.0,1
+5.0,115.0,76.0,0.0,0.0,31.2,0.34299999999999997,44.0,1
+5.0,189.0,64.0,33.0,325.0,31.2,0.583,29.0,1
+0.0,162.0,76.0,36.0,0.0,49.6,0.364,26.0,1
+5.0,158.0,84.0,41.0,210.0,39.4,0.395,29.0,1
+3.0,187.0,70.0,22.0,200.0,36.4,0.408,36.0,1
+7.0,103.0,66.0,32.0,0.0,39.1,0.344,31.0,1
+0.0,198.0,66.0,32.0,274.0,41.3,0.502,28.0,1
+10.0,168.0,74.0,0.0,0.0,38.0,0.537,34.0,1
+0.0,140.0,65.0,26.0,130.0,42.6,0.431,24.0,1
+3.0,169.0,74.0,19.0,125.0,29.9,0.268,31.0,1
+9.0,164.0,78.0,0.0,0.0,32.8,0.14800000000000002,45.0,1
+5.0,109.0,62.0,41.0,129.0,35.8,0.514,25.0,1
+0.0,131.0,66.0,40.0,0.0,34.3,0.196,22.0,1
+14.0,100.0,78.0,25.0,184.0,36.6,0.41200000000000003,46.0,1
+0.0,167.0,0.0,0.0,0.0,32.3,0.8390000000000001,30.0,1
+8.0,167.0,106.0,46.0,231.0,37.6,0.165,43.0,1
+2.0,174.0,88.0,37.0,120.0,44.5,0.6459999999999999,24.0,1
+0.0,138.0,60.0,35.0,167.0,34.6,0.534,21.0,1
+8.0,181.0,68.0,36.0,495.0,30.1,0.615,60.0,1
+2.0,102.0,86.0,36.0,120.0,45.5,0.127,23.0,1
+3.0,150.0,76.0,0.0,0.0,21.0,0.207,37.0,0
+7.0,179.0,95.0,31.0,0.0,34.2,0.16399999999999998,60.0,0
+0.0,102.0,78.0,40.0,90.0,34.5,0.23800000000000002,24.0,0
+1.0,96.0,64.0,27.0,87.0,33.2,0.289,21.0,0
+3.0,116.0,74.0,15.0,105.0,26.3,0.107,24.0,0
+1.0,164.0,82.0,43.0,67.0,32.8,0.341,50.0,0
+1.0,130.0,70.0,13.0,105.0,25.9,0.47200000000000003,22.0,0
+2.0,91.0,62.0,0.0,0.0,27.3,0.525,22.0,0
+0.0,114.0,80.0,34.0,285.0,44.2,0.16699999999999998,27.0,0
+6.0,114.0,0.0,0.0,0.0,0.0,0.18899999999999997,26.0,0
+12.0,121.0,78.0,17.0,0.0,26.5,0.259,62.0,0
+4.0,92.0,80.0,0.0,0.0,42.2,0.237,29.0,0
+1.0,90.0,68.0,8.0,0.0,24.5,1.138,36.0,0
+1.0,109.0,38.0,18.0,120.0,23.1,0.40700000000000003,26.0,0
+10.0,75.0,82.0,0.0,0.0,33.3,0.263,38.0,0
+1.0,143.0,74.0,22.0,61.0,26.2,0.256,21.0,0
+10.0,162.0,84.0,0.0,0.0,27.7,0.182,54.0,0
+7.0,150.0,66.0,42.0,342.0,34.7,0.718,42.0,0
+0.0,117.0,0.0,0.0,0.0,33.8,0.932,44.0,0
+8.0,65.0,72.0,23.0,0.0,32.0,0.6,42.0,0
+3.0,99.0,62.0,19.0,74.0,21.8,0.27899999999999997,26.0,0
+3.0,96.0,78.0,39.0,0.0,37.3,0.23800000000000002,40.0,0
+7.0,62.0,78.0,0.0,0.0,32.6,0.391,41.0,0
+5.0,128.0,80.0,0.0,0.0,34.6,0.14400000000000002,45.0,0
+5.0,110.0,68.0,0.0,0.0,26.0,0.292,30.0,0
+2.0,75.0,64.0,24.0,55.0,29.7,0.37,33.0,0
+2.0,108.0,64.0,0.0,0.0,30.8,0.158,21.0,0
+2.0,87.0,0.0,23.0,0.0,28.9,0.773,25.0,0
+7.0,119.0,0.0,0.0,0.0,25.2,0.209,37.0,0
+0.0,102.0,86.0,17.0,105.0,29.3,0.695,27.0,0
+0.0,126.0,84.0,29.0,215.0,30.7,0.52,24.0,0
+0.0,132.0,78.0,0.0,0.0,32.4,0.39299999999999996,21.0,0
+1.0,108.0,88.0,19.0,0.0,27.1,0.4,24.0,0
+0.0,123.0,88.0,37.0,0.0,35.2,0.19699999999999998,29.0,0
+5.0,88.0,66.0,21.0,23.0,24.4,0.342,30.0,0
+8.0,112.0,72.0,0.0,0.0,23.6,0.84,58.0,0
+0.0,84.0,82.0,31.0,125.0,38.2,0.233,23.0,0
+2.0,83.0,65.0,28.0,66.0,36.8,0.629,24.0,0
+7.0,137.0,90.0,41.0,0.0,32.0,0.391,39.0,0
+2.0,68.0,62.0,13.0,15.0,20.1,0.257,23.0,0
+0.0,117.0,66.0,31.0,188.0,30.8,0.493,22.0,0
+0.0,93.0,60.0,0.0,0.0,35.3,0.263,25.0,0
+3.0,100.0,68.0,23.0,81.0,31.6,0.9490000000000001,28.0,0
+4.0,112.0,78.0,40.0,0.0,39.4,0.23600000000000002,38.0,0
+1.0,143.0,84.0,23.0,310.0,42.4,1.0759999999999998,22.0,0
+6.0,92.0,92.0,0.0,0.0,19.9,0.188,28.0,0
+2.0,127.0,58.0,24.0,275.0,27.7,1.6,25.0,0
+2.0,94.0,68.0,18.0,76.0,26.0,0.561,21.0,0
+0.0,78.0,88.0,29.0,40.0,36.9,0.434,21.0,0
+0.0,152.0,82.0,39.0,272.0,41.5,0.27,27.0,0
+6.0,134.0,70.0,23.0,130.0,35.4,0.542,29.0,1
+11.0,136.0,84.0,35.0,130.0,28.3,0.26,42.0,1
+5.0,139.0,80.0,35.0,160.0,31.6,0.361,25.0,1
+3.0,158.0,70.0,30.0,328.0,35.5,0.344,35.0,1
+0.0,188.0,82.0,14.0,185.0,32.0,0.682,22.0,1
+6.0,104.0,74.0,18.0,156.0,29.9,0.722,41.0,1
+6.0,119.0,50.0,22.0,176.0,27.1,1.318,33.0,1
+8.0,124.0,76.0,24.0,600.0,28.7,0.687,52.0,1
+0.0,119.0,0.0,0.0,0.0,32.4,0.141,24.0,1
+1.0,88.0,30.0,42.0,99.0,55.0,0.496,26.0,1
+7.0,142.0,90.0,24.0,480.0,30.4,0.128,43.0,1
+10.0,101.0,86.0,37.0,0.0,45.6,1.136,38.0,1
+0.0,145.0,0.0,0.0,0.0,44.2,0.63,31.0,1
+10.0,90.0,85.0,32.0,0.0,34.9,0.825,56.0,1
+1.0,117.0,88.0,24.0,145.0,34.5,0.40299999999999997,40.0,1
+5.0,115.0,98.0,0.0,0.0,52.9,0.209,28.0,1
+0.0,179.0,90.0,27.0,0.0,44.1,0.6859999999999999,23.0,1
+7.0,129.0,68.0,49.0,125.0,38.5,0.439,43.0,1
+0.0,138.0,0.0,0.0,0.0,36.3,0.9329999999999999,25.0,1
+3.0,129.0,64.0,29.0,115.0,26.4,0.21899999999999997,28.0,1
+3.0,162.0,52.0,38.0,0.0,37.2,0.652,24.0,1
+9.0,184.0,85.0,15.0,0.0,30.0,1.213,49.0,1
+6.0,124.0,72.0,0.0,0.0,27.6,0.368,29.0,1
+4.0,171.0,72.0,0.0,0.0,43.6,0.479,26.0,1
+3.0,128.0,72.0,25.0,190.0,32.4,0.5489999999999999,27.0,1
+0.0,131.0,88.0,0.0,0.0,31.6,0.743,32.0,1
+10.0,115.0,0.0,0.0,0.0,0.0,0.261,30.0,1
+13.0,106.0,72.0,54.0,0.0,36.6,0.17800000000000002,45.0,0
+0.0,74.0,52.0,10.0,36.0,27.8,0.26899999999999996,22.0,0
+5.0,109.0,75.0,26.0,0.0,36.0,0.546,60.0,0
+2.0,83.0,66.0,23.0,50.0,32.2,0.4970000000000001,22.0,0
+4.0,154.0,62.0,31.0,284.0,32.8,0.237,23.0,0
+1.0,90.0,62.0,18.0,59.0,25.1,1.268,25.0,0
+6.0,111.0,64.0,39.0,0.0,34.2,0.26,24.0,0
+0.0,126.0,86.0,27.0,120.0,27.4,0.515,21.0,0
+1.0,96.0,122.0,0.0,0.0,22.4,0.207,27.0,0
+5.0,99.0,74.0,27.0,0.0,29.0,0.203,32.0,0
+1.0,108.0,60.0,46.0,178.0,35.5,0.415,24.0,0
+4.0,120.0,68.0,0.0,0.0,29.6,0.7090000000000001,34.0,0
+6.0,107.0,88.0,0.0,0.0,36.8,0.727,31.0,0
+4.0,114.0,65.0,0.0,0.0,21.9,0.4320000000000001,37.0,0
+2.0,94.0,76.0,18.0,66.0,31.6,0.649,23.0,0
+0.0,102.0,75.0,23.0,0.0,0.0,0.5720000000000001,21.0,0
+1.0,91.0,64.0,24.0,0.0,29.2,0.192,21.0,0
+1.0,0.0,74.0,20.0,23.0,27.7,0.299,21.0,0
+11.0,103.0,68.0,40.0,0.0,46.2,0.126,42.0,0
+1.0,135.0,54.0,0.0,0.0,26.7,0.687,62.0,0
+2.0,100.0,64.0,23.0,0.0,29.7,0.368,21.0,0
+2.0,110.0,74.0,29.0,125.0,32.4,0.698,27.0,0
+0.0,137.0,68.0,14.0,148.0,24.8,0.14300000000000002,21.0,0
+0.0,104.0,76.0,0.0,0.0,18.4,0.5820000000000001,27.0,0
+4.0,147.0,74.0,25.0,293.0,34.9,0.385,30.0,0
+0.0,104.0,64.0,23.0,116.0,27.8,0.45399999999999996,23.0,0
+2.0,105.0,58.0,40.0,94.0,34.9,0.225,25.0,0
+3.0,102.0,44.0,20.0,94.0,30.8,0.4,26.0,0
+2.0,141.0,58.0,34.0,128.0,25.4,0.6990000000000001,24.0,0
+1.0,95.0,66.0,13.0,38.0,19.6,0.33399999999999996,25.0,0
+3.0,106.0,72.0,0.0,0.0,25.8,0.207,27.0,0
+2.0,106.0,64.0,35.0,119.0,30.5,1.4,34.0,0
+3.0,148.0,66.0,25.0,0.0,32.5,0.256,22.0,0
+5.0,139.0,64.0,35.0,140.0,28.6,0.41100000000000003,26.0,0
+4.0,99.0,76.0,15.0,51.0,23.2,0.223,21.0,0
+1.0,111.0,62.0,13.0,182.0,24.0,0.138,23.0,0
+6.0,165.0,68.0,26.0,168.0,33.6,0.631,49.0,0
+3.0,125.0,58.0,0.0,0.0,31.6,0.151,24.0,0
+2.0,81.0,72.0,15.0,76.0,30.1,0.547,25.0,0
+6.0,117.0,96.0,0.0,0.0,28.7,0.157,30.0,0
+2.0,68.0,70.0,32.0,66.0,25.0,0.187,25.0,0
+1.0,97.0,70.0,40.0,0.0,38.1,0.218,30.0,0
+0.0,91.0,68.0,32.0,210.0,39.9,0.381,25.0,0
+1.0,95.0,74.0,21.0,73.0,25.9,0.6729999999999999,36.0,0
+3.0,81.0,86.0,16.0,66.0,27.5,0.306,22.0,0
+8.0,95.0,72.0,0.0,0.0,36.8,0.485,57.0,0
+6.0,99.0,60.0,19.0,54.0,26.9,0.4970000000000001,32.0,0
+5.0,105.0,72.0,29.0,325.0,36.9,0.159,28.0,0
+2.0,101.0,58.0,35.0,90.0,21.8,0.155,22.0,0
+7.0,124.0,70.0,33.0,215.0,25.5,0.161,37.0,0
+0.0,135.0,68.0,42.0,250.0,42.3,0.365,24.0,1
+5.0,166.0,76.0,0.0,0.0,45.7,0.34,27.0,1
+7.0,97.0,76.0,32.0,91.0,40.9,0.871,32.0,1
+7.0,184.0,84.0,33.0,0.0,35.5,0.355,41.0,1
+8.0,176.0,90.0,34.0,300.0,33.7,0.467,58.0,1
+3.0,171.0,72.0,33.0,135.0,33.3,0.19899999999999998,24.0,1
+8.0,133.0,72.0,0.0,0.0,32.9,0.27,39.0,1
+1.0,122.0,64.0,32.0,156.0,35.1,0.6920000000000001,30.0,1
+9.0,122.0,56.0,0.0,0.0,33.3,1.114,33.0,1
+4.0,145.0,82.0,18.0,0.0,32.5,0.235,70.0,1
+10.0,148.0,84.0,48.0,237.0,37.6,1.001,51.0,1
+2.0,93.0,64.0,32.0,160.0,38.0,0.674,23.0,1
+11.0,143.0,94.0,33.0,146.0,36.6,0.254,51.0,1
+10.0,111.0,70.0,27.0,0.0,27.5,0.141,40.0,1
+6.0,162.0,62.0,0.0,0.0,24.3,0.17800000000000002,50.0,1
+8.0,154.0,78.0,32.0,0.0,32.4,0.44299999999999995,45.0,1
+8.0,183.0,64.0,0.0,0.0,23.3,0.672,32.0,1
+7.0,100.0,0.0,0.0,0.0,30.0,0.484,32.0,1
+8.0,186.0,90.0,35.0,225.0,34.5,0.423,37.0,1
+5.0,112.0,66.0,0.0,0.0,37.8,0.261,41.0,1
+4.0,183.0,0.0,0.0,0.0,28.4,0.212,36.0,1
+3.0,174.0,58.0,22.0,194.0,32.9,0.593,36.0,1
+0.0,121.0,66.0,30.0,165.0,34.3,0.203,33.0,1
+12.0,84.0,72.0,31.0,0.0,29.7,0.297,46.0,1
+7.0,107.0,74.0,0.0,0.0,29.6,0.254,31.0,1
+5.0,137.0,108.0,0.0,0.0,48.8,0.22699999999999998,37.0,1
+5.0,187.0,76.0,27.0,207.0,43.6,1.034,53.0,1
+4.0,103.0,60.0,33.0,192.0,24.0,0.966,33.0,0
+1.0,131.0,64.0,14.0,415.0,23.7,0.389,21.0,0
+1.0,120.0,80.0,48.0,200.0,38.9,1.162,41.0,0
+4.0,95.0,70.0,32.0,0.0,32.1,0.612,24.0,0
+5.0,117.0,86.0,30.0,105.0,39.1,0.251,42.0,0
+2.0,90.0,60.0,0.0,0.0,23.5,0.191,25.0,0
+10.0,139.0,80.0,0.0,0.0,27.1,1.4409999999999998,57.0,0
+1.0,146.0,56.0,0.0,0.0,29.7,0.564,29.0,0
+7.0,133.0,84.0,0.0,0.0,40.2,0.696,37.0,0
+0.0,102.0,64.0,46.0,78.0,40.6,0.496,21.0,0
+2.0,112.0,66.0,22.0,0.0,25.0,0.307,24.0,0
+4.0,116.0,72.0,12.0,87.0,22.1,0.46299999999999997,37.0,0
+0.0,93.0,100.0,39.0,72.0,43.4,1.021,35.0,0
+0.0,102.0,52.0,0.0,0.0,25.1,0.078,21.0,0
+7.0,81.0,78.0,40.0,48.0,46.7,0.261,42.0,0
+0.0,100.0,88.0,60.0,110.0,46.8,0.9620000000000001,31.0,0
+3.0,122.0,78.0,0.0,0.0,23.0,0.254,40.0,0
+4.0,90.0,0.0,0.0,0.0,28.0,0.61,31.0,0
+2.0,100.0,70.0,52.0,57.0,40.5,0.677,25.0,0
+2.0,98.0,60.0,17.0,120.0,34.7,0.198,22.0,0
+3.0,130.0,64.0,0.0,0.0,23.1,0.314,22.0,0
+1.0,119.0,54.0,13.0,50.0,22.3,0.205,24.0,0
+1.0,136.0,74.0,50.0,204.0,37.4,0.39899999999999997,24.0,0
+1.0,81.0,72.0,18.0,40.0,26.6,0.28300000000000003,24.0,0
+1.0,125.0,70.0,24.0,110.0,24.3,0.221,25.0,0
+0.0,105.0,64.0,41.0,142.0,41.5,0.17300000000000001,22.0,0
+1.0,100.0,72.0,12.0,70.0,25.3,0.6579999999999999,28.0,0
+4.0,118.0,70.0,0.0,0.0,44.5,0.904,26.0,0
+7.0,125.0,86.0,0.0,0.0,37.6,0.304,51.0,0
+2.0,139.0,75.0,0.0,0.0,25.6,0.16699999999999998,29.0,0
+2.0,112.0,86.0,42.0,160.0,38.4,0.24600000000000002,28.0,0
+3.0,106.0,54.0,21.0,158.0,30.9,0.292,24.0,0
+1.0,124.0,60.0,32.0,0.0,35.8,0.514,21.0,0
+1.0,97.0,70.0,15.0,0.0,18.2,0.147,21.0,0
+1.0,100.0,66.0,15.0,56.0,23.6,0.6659999999999999,26.0,0
+5.0,99.0,54.0,28.0,83.0,34.0,0.499,30.0,0
+5.0,147.0,75.0,0.0,0.0,29.9,0.434,28.0,0
+0.0,117.0,80.0,31.0,53.0,45.2,0.08900000000000001,24.0,0
+2.0,125.0,60.0,20.0,140.0,33.8,0.08800000000000001,31.0,0
+2.0,85.0,65.0,0.0,0.0,39.6,0.93,27.0,0
+3.0,83.0,58.0,31.0,18.0,34.3,0.336,25.0,0
+3.0,99.0,54.0,19.0,86.0,25.6,0.154,24.0,0
+1.0,79.0,75.0,30.0,0.0,32.0,0.396,22.0,0
+4.0,146.0,85.0,27.0,100.0,28.9,0.18899999999999997,27.0,0
+3.0,74.0,68.0,28.0,45.0,29.7,0.293,23.0,0
+11.0,85.0,74.0,0.0,0.0,30.1,0.3,35.0,0
+1.0,97.0,66.0,15.0,140.0,23.2,0.48700000000000004,22.0,0
+4.0,84.0,90.0,23.0,56.0,39.5,0.159,25.0,0
+6.0,154.0,78.0,41.0,140.0,46.1,0.5710000000000001,27.0,0
+1.0,99.0,72.0,30.0,18.0,38.6,0.41200000000000003,21.0,0
+8.0,197.0,74.0,0.0,0.0,25.9,1.1909999999999998,39.0,1
+0.0,181.0,88.0,44.0,510.0,43.3,0.222,26.0,1
+3.0,141.0,0.0,0.0,0.0,30.0,0.7609999999999999,27.0,1
+0.0,107.0,62.0,30.0,74.0,36.6,0.757,25.0,1
+4.0,109.0,64.0,44.0,99.0,34.8,0.905,26.0,1
+2.0,146.0,70.0,38.0,360.0,28.0,0.337,29.0,1
+4.0,125.0,80.0,0.0,0.0,32.3,0.536,27.0,1
+3.0,182.0,74.0,0.0,0.0,30.5,0.345,29.0,1
+12.0,92.0,62.0,7.0,258.0,27.6,0.9259999999999999,44.0,1
+1.0,102.0,74.0,0.0,0.0,39.5,0.293,42.0,1
+1.0,113.0,64.0,35.0,0.0,33.6,0.5429999999999999,21.0,1
+1.0,167.0,74.0,17.0,144.0,23.4,0.447,33.0,1
+2.0,128.0,78.0,37.0,182.0,43.3,1.224,31.0,1
+9.0,171.0,110.0,24.0,240.0,45.4,0.721,54.0,1
+10.0,125.0,70.0,26.0,115.0,31.1,0.205,41.0,1
+0.0,146.0,70.0,0.0,0.0,37.9,0.33399999999999996,28.0,1
+0.0,141.0,0.0,0.0,0.0,42.4,0.205,29.0,1
+2.0,197.0,70.0,99.0,0.0,34.7,0.575,62.0,1
+1.0,125.0,50.0,40.0,167.0,33.3,0.9620000000000001,28.0,1
+9.0,112.0,82.0,32.0,175.0,34.2,0.26,36.0,1
+1.0,180.0,0.0,0.0,0.0,43.3,0.282,41.0,1
+2.0,124.0,68.0,28.0,205.0,32.9,0.875,30.0,1
+1.0,168.0,88.0,29.0,0.0,35.0,0.905,52.0,1
+3.0,121.0,52.0,0.0,0.0,36.0,0.127,25.0,1
+8.0,100.0,74.0,40.0,215.0,39.4,0.6609999999999999,43.0,1
+7.0,160.0,54.0,32.0,175.0,30.5,0.588,39.0,1
+8.0,120.0,0.0,0.0,0.0,30.0,0.183,38.0,1
+3.0,124.0,80.0,33.0,130.0,33.2,0.305,26.0,0
+13.0,145.0,82.0,19.0,110.0,22.2,0.245,57.0,0
+1.0,71.0,78.0,50.0,45.0,33.2,0.42200000000000004,21.0,0
+6.0,151.0,62.0,31.0,120.0,35.5,0.6920000000000001,28.0,0
+3.0,108.0,62.0,24.0,0.0,26.0,0.223,25.0,0
+3.0,90.0,78.0,0.0,0.0,42.7,0.5589999999999999,21.0,0
+1.0,0.0,68.0,35.0,0.0,32.0,0.389,22.0,0
+13.0,76.0,60.0,0.0,0.0,32.8,0.18,41.0,0
+2.0,87.0,58.0,16.0,52.0,32.7,0.166,25.0,0
+0.0,67.0,76.0,0.0,0.0,45.3,0.19399999999999998,46.0,0
+5.0,108.0,72.0,43.0,75.0,36.1,0.263,33.0,0
+9.0,124.0,70.0,33.0,402.0,35.4,0.282,34.0,0
+2.0,105.0,75.0,0.0,0.0,23.3,0.56,53.0,0
+3.0,126.0,88.0,41.0,235.0,39.3,0.7040000000000001,27.0,0
+10.0,122.0,78.0,31.0,0.0,27.6,0.512,45.0,0
+13.0,106.0,70.0,0.0,0.0,34.2,0.251,52.0,0
+6.0,154.0,74.0,32.0,193.0,29.3,0.8390000000000001,39.0,0
+0.0,91.0,80.0,0.0,0.0,32.4,0.601,27.0,0
+5.0,88.0,78.0,30.0,0.0,27.6,0.258,37.0,0
+7.0,102.0,74.0,40.0,105.0,37.2,0.204,45.0,0
+3.0,88.0,58.0,11.0,54.0,24.8,0.267,22.0,0
+4.0,189.0,110.0,31.0,0.0,28.5,0.68,37.0,0
+1.0,90.0,62.0,12.0,43.0,27.2,0.58,24.0,0
+2.0,122.0,52.0,43.0,158.0,36.2,0.816,28.0,0
+1.0,103.0,30.0,38.0,83.0,43.3,0.183,33.0,0
+9.0,123.0,70.0,44.0,94.0,33.1,0.374,40.0,0
+2.0,101.0,58.0,17.0,265.0,24.2,0.614,23.0,0
+2.0,84.0,50.0,23.0,76.0,30.4,0.968,21.0,0
+6.0,103.0,66.0,0.0,0.0,24.3,0.249,29.0,0
+7.0,94.0,64.0,25.0,79.0,33.3,0.738,41.0,0
+0.0,93.0,60.0,25.0,92.0,28.7,0.532,22.0,0
+1.0,153.0,82.0,42.0,485.0,40.6,0.687,23.0,0
+10.0,101.0,76.0,48.0,180.0,32.9,0.171,63.0,0
+4.0,129.0,60.0,12.0,231.0,27.5,0.527,31.0,0
+0.0,161.0,50.0,0.0,0.0,21.9,0.254,65.0,0
+8.0,99.0,84.0,0.0,0.0,35.4,0.38799999999999996,50.0,0
+4.0,110.0,92.0,0.0,0.0,37.6,0.191,30.0,0
+0.0,106.0,70.0,37.0,148.0,39.4,0.605,22.0,0
+8.0,120.0,78.0,0.0,0.0,25.0,0.409,64.0,0
+0.0,99.0,0.0,0.0,0.0,25.0,0.253,22.0,0
+1.0,111.0,86.0,19.0,0.0,30.1,0.14300000000000002,23.0,0
+1.0,97.0,68.0,21.0,0.0,27.2,1.095,22.0,0
+1.0,97.0,64.0,19.0,82.0,18.2,0.299,21.0,0
+6.0,109.0,60.0,27.0,0.0,25.0,0.20600000000000002,27.0,0
+1.0,87.0,78.0,27.0,32.0,34.6,0.10099999999999999,22.0,0
+1.0,107.0,50.0,19.0,0.0,28.3,0.18100000000000002,29.0,0
+5.0,104.0,74.0,0.0,0.0,28.8,0.153,48.0,0
+3.0,84.0,72.0,32.0,0.0,37.2,0.267,28.0,0
+8.0,91.0,82.0,0.0,0.0,35.6,0.5870000000000001,68.0,0
+2.0,90.0,70.0,17.0,0.0,27.3,0.085,22.0,0
+3.0,173.0,82.0,48.0,465.0,38.4,2.137,25.0,1
+0.0,113.0,76.0,0.0,0.0,33.3,0.278,23.0,1
+4.0,111.0,72.0,47.0,207.0,37.1,1.39,56.0,1
+2.0,197.0,70.0,45.0,543.0,30.5,0.158,53.0,1
+8.0,105.0,100.0,36.0,0.0,43.3,0.239,45.0,1
+13.0,104.0,72.0,0.0,0.0,31.2,0.465,38.0,1
+8.0,196.0,76.0,29.0,280.0,37.5,0.605,57.0,1
+1.0,119.0,86.0,39.0,220.0,45.6,0.8079999999999999,29.0,1
+4.0,136.0,70.0,0.0,0.0,31.2,1.182,22.0,1
+5.0,0.0,80.0,32.0,0.0,41.0,0.34600000000000003,37.0,1
+1.0,181.0,64.0,30.0,180.0,34.1,0.32799999999999996,38.0,1
+8.0,151.0,78.0,32.0,210.0,42.9,0.516,36.0,1
+7.0,168.0,88.0,42.0,321.0,38.2,0.787,40.0,1
+4.0,95.0,64.0,0.0,0.0,32.0,0.161,31.0,1
+1.0,133.0,102.0,28.0,140.0,32.8,0.23399999999999999,45.0,1
+4.0,132.0,0.0,0.0,0.0,32.9,0.302,23.0,1
+15.0,136.0,70.0,32.0,110.0,37.1,0.153,43.0,1
+10.0,161.0,68.0,23.0,132.0,25.5,0.326,47.0,1
+2.0,100.0,66.0,20.0,90.0,32.9,0.867,28.0,1
+3.0,130.0,78.0,23.0,79.0,28.4,0.32299999999999995,34.0,1
+2.0,146.0,0.0,0.0,0.0,27.5,0.24,28.0,1
+5.0,130.0,82.0,0.0,0.0,39.1,0.956,37.0,1
+5.0,168.0,64.0,0.0,0.0,32.9,0.135,41.0,1
+5.0,136.0,84.0,41.0,88.0,35.0,0.28600000000000003,35.0,1
+1.0,128.0,88.0,39.0,110.0,36.5,1.057,37.0,1
+5.0,144.0,82.0,26.0,285.0,32.0,0.452,58.0,1
+6.0,134.0,80.0,37.0,370.0,46.2,0.23800000000000002,46.0,1
+0.0,147.0,85.0,54.0,0.0,42.8,0.375,24.0,0
+6.0,123.0,72.0,45.0,230.0,33.6,0.733,34.0,0
+0.0,84.0,64.0,22.0,66.0,35.8,0.545,21.0,0
+5.0,136.0,82.0,0.0,0.0,0.0,0.64,69.0,0
+0.0,134.0,58.0,20.0,291.0,26.4,0.35200000000000004,21.0,0
+9.0,120.0,72.0,22.0,56.0,20.8,0.733,48.0,0
+1.0,99.0,58.0,10.0,0.0,25.4,0.551,21.0,0
+10.0,94.0,72.0,18.0,0.0,23.1,0.595,56.0,0
+1.0,121.0,78.0,39.0,74.0,39.0,0.261,28.0,0
+10.0,179.0,70.0,0.0,0.0,35.1,0.2,37.0,0
+7.0,105.0,0.0,0.0,0.0,0.0,0.305,24.0,0
+1.0,193.0,50.0,16.0,375.0,25.9,0.655,24.0,0
+2.0,114.0,68.0,22.0,0.0,28.7,0.092,25.0,0
+5.0,95.0,72.0,33.0,0.0,37.7,0.37,27.0,0
+4.0,154.0,72.0,29.0,126.0,31.3,0.33799999999999997,37.0,0
+4.0,91.0,70.0,32.0,88.0,33.1,0.446,22.0,0
+1.0,116.0,78.0,29.0,180.0,36.1,0.496,25.0,0
+2.0,175.0,88.0,0.0,0.0,22.9,0.326,22.0,0
+6.0,105.0,80.0,28.0,0.0,32.5,0.878,26.0,0
+11.0,138.0,76.0,0.0,0.0,33.2,0.42,35.0,0
+4.0,151.0,90.0,38.0,0.0,29.7,0.294,36.0,0
+7.0,133.0,88.0,15.0,155.0,32.4,0.262,37.0,0
+1.0,112.0,80.0,45.0,132.0,34.8,0.217,24.0,0
+1.0,79.0,80.0,25.0,37.0,25.4,0.583,22.0,0
+1.0,87.0,68.0,34.0,77.0,37.6,0.401,24.0,0
+1.0,0.0,48.0,20.0,0.0,24.7,0.14,22.0,0
+3.0,123.0,100.0,35.0,240.0,57.3,0.88,22.0,0
+8.0,126.0,74.0,38.0,75.0,25.9,0.162,39.0,0
+0.0,137.0,84.0,27.0,0.0,27.3,0.231,59.0,0
+0.0,127.0,80.0,37.0,210.0,36.3,0.804,23.0,0
+10.0,68.0,106.0,23.0,49.0,35.5,0.285,47.0,0
+0.0,111.0,65.0,0.0,0.0,24.6,0.66,31.0,0
+5.0,106.0,82.0,30.0,0.0,39.5,0.28600000000000003,38.0,0
+1.0,105.0,58.0,0.0,0.0,24.3,0.187,21.0,0
+3.0,102.0,74.0,0.0,0.0,29.5,0.121,32.0,0
+8.0,126.0,88.0,36.0,108.0,38.5,0.349,49.0,0
+1.0,112.0,72.0,30.0,176.0,34.4,0.528,25.0,0
+1.0,80.0,74.0,11.0,60.0,30.0,0.527,22.0,0
+0.0,119.0,64.0,18.0,92.0,34.9,0.725,23.0,0
+2.0,99.0,60.0,17.0,160.0,36.6,0.45299999999999996,21.0,0
+1.0,116.0,70.0,28.0,0.0,27.4,0.204,21.0,0
+2.0,109.0,92.0,0.0,0.0,42.7,0.845,54.0,0
+0.0,95.0,64.0,39.0,105.0,44.6,0.366,22.0,0
+5.0,103.0,108.0,37.0,0.0,39.2,0.305,65.0,0
+7.0,83.0,78.0,26.0,71.0,29.3,0.767,36.0,0
+8.0,74.0,70.0,40.0,49.0,35.3,0.705,39.0,0
+1.0,89.0,24.0,19.0,25.0,27.8,0.5589999999999999,21.0,0
+3.0,142.0,80.0,15.0,0.0,32.4,0.2,63.0,0
+2.0,142.0,82.0,18.0,64.0,24.7,0.7609999999999999,21.0,0
+2.0,129.0,84.0,0.0,0.0,28.0,0.284,27.0,0
+9.0,145.0,80.0,46.0,130.0,37.9,0.637,40.0,1
+0.0,179.0,50.0,36.0,159.0,37.8,0.455,22.0,1
+0.0,151.0,90.0,46.0,0.0,42.1,0.371,21.0,1
+1.0,173.0,74.0,0.0,0.0,36.8,0.08800000000000001,38.0,1
+3.0,139.0,54.0,0.0,0.0,25.6,0.402,22.0,1
+6.0,190.0,92.0,0.0,0.0,35.5,0.278,66.0,1
+11.0,138.0,74.0,26.0,144.0,36.1,0.557,50.0,1
+7.0,152.0,88.0,44.0,0.0,50.0,0.337,36.0,1
+3.0,80.0,82.0,31.0,70.0,34.2,1.2919999999999998,27.0,1
+0.0,95.0,85.0,25.0,36.0,37.4,0.247,24.0,1
+0.0,129.0,110.0,46.0,130.0,67.1,0.319,26.0,1
+4.0,142.0,86.0,0.0,0.0,44.0,0.645,22.0,1
+8.0,108.0,70.0,0.0,0.0,30.5,0.955,33.0,1
+1.0,128.0,48.0,45.0,194.0,40.5,0.613,24.0,1
+3.0,132.0,80.0,0.0,0.0,34.4,0.402,44.0,1
+9.0,145.0,88.0,34.0,165.0,30.3,0.7709999999999999,53.0,1
+7.0,147.0,76.0,0.0,0.0,39.4,0.257,43.0,1
+0.0,124.0,70.0,20.0,0.0,27.4,0.254,36.0,1
+3.0,193.0,70.0,31.0,0.0,34.9,0.24100000000000002,25.0,1
+3.0,163.0,70.0,18.0,105.0,31.6,0.268,28.0,1
+12.0,151.0,70.0,40.0,271.0,41.8,0.742,38.0,1
+1.0,128.0,98.0,41.0,58.0,32.0,1.321,33.0,1
+1.0,181.0,78.0,42.0,293.0,40.0,1.258,22.0,1
+0.0,177.0,60.0,29.0,478.0,34.6,1.072,21.0,1
+1.0,122.0,90.0,51.0,220.0,49.7,0.325,31.0,1
+1.0,189.0,60.0,23.0,846.0,30.1,0.39799999999999996,59.0,1
+11.0,111.0,84.0,40.0,0.0,46.8,0.925,45.0,1
+3.0,120.0,70.0,30.0,135.0,42.9,0.452,30.0,0
+12.0,100.0,84.0,33.0,105.0,30.0,0.488,46.0,0
+1.0,71.0,48.0,18.0,76.0,20.4,0.32299999999999995,22.0,0
+3.0,87.0,60.0,18.0,0.0,21.8,0.444,21.0,0
+2.0,107.0,74.0,30.0,100.0,33.6,0.40399999999999997,23.0,0
+6.0,80.0,80.0,36.0,0.0,39.8,0.177,28.0,0
+1.0,118.0,58.0,36.0,94.0,33.3,0.261,23.0,0
+0.0,73.0,0.0,0.0,0.0,21.1,0.342,25.0,0
+1.0,88.0,78.0,29.0,76.0,32.0,0.365,29.0,0
+3.0,80.0,0.0,0.0,0.0,0.0,0.174,22.0,0
+1.0,107.0,68.0,19.0,0.0,26.5,0.165,24.0,0
+3.0,89.0,74.0,16.0,85.0,30.4,0.551,38.0,0
+5.0,123.0,74.0,40.0,77.0,34.1,0.26899999999999996,28.0,0
+0.0,97.0,64.0,36.0,100.0,36.8,0.6,25.0,0
+3.0,78.0,70.0,0.0,0.0,32.5,0.27,39.0,0
+0.0,107.0,76.0,0.0,0.0,45.3,0.6859999999999999,24.0,0
+6.0,92.0,62.0,32.0,126.0,32.0,0.085,46.0,0
+1.0,101.0,50.0,15.0,36.0,24.2,0.526,26.0,0
+6.0,114.0,88.0,0.0,0.0,27.8,0.247,66.0,0
+0.0,165.0,90.0,33.0,680.0,52.3,0.4270000000000001,23.0,0
+1.0,109.0,56.0,21.0,135.0,25.2,0.833,23.0,0
+2.0,157.0,74.0,35.0,440.0,39.4,0.134,30.0,0
+1.0,124.0,74.0,36.0,0.0,27.8,0.1,30.0,0
+2.0,96.0,68.0,13.0,49.0,21.1,0.647,26.0,0
+3.0,61.0,82.0,28.0,0.0,34.4,0.243,46.0,0
+1.0,130.0,60.0,23.0,170.0,28.6,0.6920000000000001,21.0,0
+4.0,83.0,86.0,19.0,0.0,29.3,0.317,34.0,0
+1.0,114.0,66.0,36.0,200.0,38.1,0.289,21.0,0
+2.0,92.0,52.0,0.0,0.0,30.1,0.141,22.0,0
+2.0,108.0,52.0,26.0,63.0,32.5,0.318,22.0,0
+6.0,93.0,50.0,30.0,64.0,28.7,0.35600000000000004,23.0,0
+2.0,111.0,60.0,0.0,0.0,26.2,0.34299999999999997,23.0,0
+1.0,138.0,82.0,0.0,0.0,40.1,0.23600000000000002,28.0,0
+1.0,88.0,62.0,24.0,44.0,29.9,0.42200000000000004,23.0,0
+3.0,99.0,80.0,11.0,64.0,19.3,0.284,30.0,0
+5.0,86.0,68.0,28.0,71.0,30.2,0.364,24.0,0
+4.0,197.0,70.0,39.0,744.0,36.7,2.329,31.0,0
+2.0,123.0,48.0,32.0,165.0,42.1,0.52,26.0,0
+10.0,122.0,68.0,0.0,0.0,31.2,0.258,41.0,0
+0.0,139.0,62.0,17.0,210.0,22.1,0.207,21.0,0
+1.0,103.0,80.0,11.0,82.0,19.4,0.491,22.0,0
+8.0,100.0,76.0,0.0,0.0,38.7,0.19,42.0,0
+2.0,121.0,70.0,32.0,95.0,39.1,0.8859999999999999,23.0,0
+2.0,146.0,76.0,35.0,194.0,38.2,0.32899999999999996,29.0,0
+0.0,86.0,68.0,32.0,0.0,35.8,0.23800000000000002,25.0,0
+8.0,118.0,72.0,19.0,0.0,23.1,1.476,46.0,0
+4.0,122.0,68.0,0.0,0.0,35.0,0.39399999999999996,29.0,0
+0.0,94.0,70.0,27.0,115.0,43.5,0.34700000000000003,21.0,0
+7.0,159.0,64.0,0.0,0.0,27.4,0.294,40.0,0
+5.0,121.0,72.0,23.0,112.0,26.2,0.245,30.0,0
+5.0,116.0,74.0,29.0,0.0,32.3,0.66,35.0,1
+8.0,179.0,72.0,42.0,130.0,32.7,0.7190000000000001,36.0,1
+5.0,124.0,74.0,0.0,0.0,34.0,0.22,38.0,1
+0.0,128.0,68.0,19.0,180.0,30.5,1.391,25.0,1
+2.0,90.0,68.0,42.0,0.0,38.2,0.503,27.0,1
+3.0,170.0,64.0,37.0,225.0,34.5,0.35600000000000004,30.0,1
+12.0,140.0,82.0,43.0,325.0,39.2,0.528,58.0,1
+0.0,162.0,76.0,56.0,100.0,53.2,0.759,25.0,1
+7.0,106.0,60.0,24.0,0.0,26.5,0.29600000000000004,29.0,1
+6.0,125.0,78.0,31.0,0.0,27.6,0.565,49.0,1
+7.0,195.0,70.0,33.0,145.0,25.1,0.163,55.0,1
+4.0,146.0,92.0,0.0,0.0,31.2,0.539,61.0,1
+0.0,180.0,78.0,63.0,14.0,59.4,2.42,25.0,1
+13.0,158.0,114.0,0.0,0.0,42.3,0.257,44.0,1
+9.0,170.0,74.0,31.0,0.0,44.0,0.40299999999999997,43.0,1
+8.0,109.0,76.0,39.0,114.0,27.9,0.64,31.0,1
+1.0,147.0,94.0,41.0,0.0,49.3,0.358,27.0,1
+3.0,112.0,74.0,30.0,0.0,31.6,0.19699999999999998,25.0,1
+3.0,78.0,50.0,32.0,88.0,31.0,0.248,26.0,1
+9.0,130.0,70.0,0.0,0.0,34.2,0.652,45.0,1
+7.0,194.0,68.0,28.0,0.0,35.9,0.745,41.0,1
+4.0,148.0,60.0,27.0,318.0,30.9,0.15,29.0,1
+1.0,144.0,82.0,46.0,180.0,46.1,0.335,46.0,1
+5.0,166.0,72.0,19.0,175.0,25.8,0.5870000000000001,51.0,1
+2.0,144.0,58.0,33.0,135.0,31.6,0.42200000000000004,25.0,1
+3.0,158.0,76.0,36.0,245.0,31.6,0.851,28.0,1
+0.0,105.0,68.0,22.0,0.0,20.0,0.23600000000000002,22.0,0
+4.0,144.0,58.0,28.0,140.0,29.5,0.287,37.0,0
+1.0,95.0,60.0,18.0,58.0,23.9,0.26,22.0,0
+1.0,100.0,66.0,29.0,196.0,32.0,0.444,42.0,0
+5.0,111.0,72.0,28.0,0.0,23.9,0.40700000000000003,27.0,0
+2.0,108.0,62.0,32.0,56.0,25.2,0.128,21.0,0
+2.0,56.0,56.0,28.0,45.0,24.2,0.332,22.0,0
+1.0,84.0,64.0,23.0,115.0,36.9,0.47100000000000003,28.0,0
+5.0,44.0,62.0,0.0,0.0,25.0,0.5870000000000001,36.0,0
+0.0,135.0,94.0,46.0,145.0,40.6,0.284,26.0,0
+6.0,98.0,58.0,33.0,190.0,34.0,0.43,43.0,0
+2.0,129.0,74.0,26.0,205.0,33.2,0.591,25.0,0
+3.0,103.0,72.0,30.0,152.0,27.6,0.73,27.0,0
+1.0,82.0,64.0,13.0,95.0,21.2,0.415,23.0,0
+0.0,137.0,70.0,38.0,0.0,33.2,0.17,22.0,0
+1.0,140.0,74.0,26.0,180.0,24.1,0.828,23.0,0
+5.0,158.0,70.0,0.0,0.0,29.8,0.207,63.0,0
+4.0,97.0,60.0,23.0,0.0,28.2,0.44299999999999995,22.0,0
+2.0,84.0,0.0,0.0,0.0,0.0,0.304,21.0,0
+1.0,106.0,76.0,0.0,0.0,37.5,0.19699999999999998,26.0,0
+0.0,146.0,82.0,0.0,0.0,40.5,1.781,44.0,0
+1.0,86.0,66.0,52.0,65.0,41.3,0.917,29.0,0
+5.0,78.0,48.0,0.0,0.0,33.7,0.654,25.0,0
+0.0,119.0,66.0,27.0,0.0,38.8,0.259,22.0,0
+1.0,117.0,60.0,23.0,106.0,33.8,0.466,27.0,0
+2.0,90.0,80.0,14.0,55.0,24.4,0.249,24.0,0
+5.0,117.0,92.0,0.0,0.0,34.1,0.337,38.0,0
+5.0,155.0,84.0,44.0,545.0,38.7,0.619,34.0,0
+3.0,180.0,64.0,25.0,70.0,34.0,0.271,26.0,0
+7.0,114.0,76.0,17.0,110.0,23.8,0.466,31.0,0
+5.0,114.0,74.0,0.0,0.0,24.9,0.7440000000000001,57.0,0
+6.0,103.0,72.0,32.0,190.0,37.7,0.324,55.0,0
+4.0,96.0,56.0,17.0,49.0,20.8,0.34,26.0,0
+9.0,57.0,80.0,37.0,0.0,32.8,0.096,41.0,0
+2.0,112.0,78.0,50.0,140.0,39.4,0.175,24.0,0
+2.0,95.0,54.0,14.0,88.0,26.1,0.748,22.0,0
+4.0,114.0,64.0,0.0,0.0,28.9,0.126,24.0,0
+1.0,92.0,62.0,25.0,41.0,19.5,0.48200000000000004,25.0,0
+4.0,90.0,88.0,47.0,54.0,37.7,0.36200000000000004,29.0,0
+0.0,129.0,80.0,0.0,0.0,31.2,0.703,29.0,0
+8.0,107.0,80.0,0.0,0.0,24.6,0.856,34.0,0
+1.0,106.0,70.0,28.0,135.0,34.2,0.142,22.0,0
+1.0,87.0,60.0,37.0,75.0,37.2,0.509,22.0,0
+3.0,191.0,68.0,15.0,130.0,30.9,0.299,34.0,0
+1.0,89.0,66.0,23.0,94.0,28.1,0.16699999999999998,21.0,0
+5.0,96.0,74.0,18.0,67.0,33.6,0.997,43.0,0
+8.0,84.0,74.0,31.0,0.0,38.3,0.457,39.0,0
+9.0,154.0,78.0,30.0,100.0,30.9,0.16399999999999998,45.0,0
+6.0,87.0,80.0,0.0,0.0,23.2,0.084,32.0,0
+0.0,105.0,90.0,0.0,0.0,29.6,0.19699999999999998,46.0,0
+4.0,125.0,70.0,18.0,122.0,28.9,1.1440000000000001,45.0,1
+4.0,156.0,75.0,0.0,0.0,48.3,0.23800000000000002,32.0,1
+0.0,180.0,90.0,26.0,90.0,36.5,0.314,35.0,1
+1.0,163.0,72.0,0.0,0.0,39.0,1.222,33.0,1
+2.0,158.0,90.0,0.0,0.0,31.6,0.805,66.0,1
+5.0,97.0,76.0,27.0,0.0,35.6,0.37799999999999995,52.0,1
+8.0,125.0,96.0,0.0,0.0,0.0,0.23199999999999998,54.0,1
+1.0,95.0,82.0,25.0,180.0,35.0,0.233,43.0,1
+4.0,134.0,72.0,0.0,0.0,23.8,0.27699999999999997,60.0,1
+4.0,144.0,82.0,32.0,0.0,38.5,0.5539999999999999,37.0,1
+4.0,173.0,70.0,14.0,168.0,29.7,0.361,33.0,1
+0.0,105.0,84.0,0.0,0.0,27.9,0.741,62.0,1
+10.0,129.0,62.0,36.0,0.0,41.2,0.441,38.0,1
+1.0,199.0,76.0,43.0,0.0,42.9,1.3940000000000001,22.0,1
+0.0,109.0,88.0,30.0,0.0,32.5,0.855,38.0,1
+7.0,196.0,90.0,0.0,0.0,39.8,0.451,41.0,1
+7.0,159.0,66.0,0.0,0.0,30.4,0.38299999999999995,36.0,1
+1.0,115.0,70.0,30.0,96.0,34.6,0.529,32.0,1
+1.0,172.0,68.0,49.0,579.0,42.4,0.7020000000000001,28.0,1
+11.0,120.0,80.0,37.0,150.0,42.3,0.785,48.0,1
+2.0,134.0,70.0,0.0,0.0,28.9,0.542,23.0,1
+6.0,148.0,72.0,35.0,0.0,33.6,0.627,50.0,1
+1.0,126.0,60.0,0.0,0.0,30.1,0.349,47.0,1
+7.0,187.0,68.0,39.0,304.0,37.7,0.254,41.0,1
+9.0,119.0,80.0,35.0,0.0,29.0,0.263,29.0,1
+6.0,115.0,60.0,39.0,0.0,33.7,0.245,40.0,1
+7.0,136.0,74.0,26.0,135.0,26.0,0.647,51.0,0
+0.0,120.0,74.0,18.0,63.0,30.5,0.285,26.0,0
+5.0,116.0,74.0,0.0,0.0,25.6,0.201,30.0,0
+4.0,128.0,70.0,0.0,0.0,34.3,0.303,24.0,0
+6.0,96.0,0.0,0.0,0.0,23.7,0.19,28.0,0
+2.0,127.0,46.0,21.0,335.0,34.4,0.17600000000000002,22.0,0
+4.0,76.0,62.0,0.0,0.0,34.0,0.391,25.0,0
+3.0,96.0,56.0,34.0,115.0,24.7,0.9440000000000001,39.0,0
+6.0,137.0,61.0,0.0,0.0,24.2,0.151,55.0,0
+3.0,111.0,58.0,31.0,44.0,29.5,0.43,22.0,0
+2.0,81.0,60.0,22.0,0.0,27.7,0.29,25.0,0
+1.0,77.0,56.0,30.0,56.0,33.3,1.251,24.0,0
+3.0,111.0,62.0,0.0,0.0,22.6,0.142,21.0,0
+6.0,166.0,74.0,0.0,0.0,26.6,0.304,66.0,0
+1.0,143.0,86.0,30.0,330.0,30.1,0.892,23.0,0
+0.0,107.0,60.0,25.0,0.0,26.4,0.133,23.0,0
+2.0,99.0,70.0,16.0,44.0,20.4,0.235,27.0,0
+2.0,100.0,68.0,25.0,71.0,38.5,0.324,26.0,0
+2.0,120.0,54.0,0.0,0.0,26.8,0.455,27.0,0
+1.0,111.0,94.0,0.0,0.0,32.8,0.265,45.0,0
+6.0,108.0,44.0,20.0,130.0,24.0,0.813,35.0,0
+3.0,113.0,50.0,10.0,85.0,29.5,0.626,25.0,0
+4.0,141.0,74.0,0.0,0.0,27.6,0.244,40.0,0
+2.0,99.0,0.0,0.0,0.0,22.2,0.10800000000000001,23.0,0
+8.0,85.0,55.0,20.0,0.0,24.4,0.136,42.0,0
+1.0,89.0,76.0,34.0,37.0,31.2,0.192,23.0,0
+1.0,109.0,58.0,18.0,116.0,28.5,0.21899999999999997,22.0,0
+1.0,93.0,70.0,31.0,0.0,30.4,0.315,23.0,0
+12.0,140.0,85.0,33.0,0.0,37.4,0.244,41.0,0
+1.0,80.0,55.0,0.0,0.0,19.1,0.258,21.0,0
+4.0,99.0,72.0,17.0,0.0,25.6,0.294,28.0,0
+1.0,109.0,60.0,8.0,182.0,25.4,0.9470000000000001,21.0,0
+3.0,113.0,44.0,13.0,0.0,22.4,0.14,22.0,0
+0.0,95.0,80.0,45.0,92.0,36.5,0.33,26.0,0
+4.0,123.0,80.0,15.0,176.0,32.0,0.44299999999999995,34.0,0
+2.0,112.0,75.0,32.0,0.0,35.7,0.14800000000000002,21.0,0
+2.0,92.0,62.0,28.0,0.0,31.6,0.13,24.0,0
+1.0,144.0,82.0,40.0,0.0,41.3,0.607,28.0,0
+6.0,91.0,0.0,0.0,0.0,29.8,0.501,31.0,0
+0.0,124.0,56.0,13.0,105.0,21.8,0.452,21.0,0
+5.0,132.0,80.0,0.0,0.0,26.8,0.18600000000000005,69.0,0
+9.0,91.0,68.0,0.0,0.0,24.2,0.2,58.0,0
+3.0,128.0,78.0,0.0,0.0,21.1,0.268,55.0,0
+0.0,108.0,68.0,20.0,0.0,27.3,0.787,32.0,0
+2.0,112.0,68.0,22.0,94.0,34.1,0.315,26.0,0
+1.0,81.0,74.0,41.0,57.0,46.3,1.0959999999999999,32.0,0
+4.0,94.0,65.0,22.0,0.0,24.7,0.14800000000000002,21.0,0
+3.0,158.0,64.0,13.0,387.0,31.2,0.295,24.0,0
+0.0,57.0,60.0,0.0,0.0,21.7,0.735,67.0,0
+4.0,95.0,60.0,32.0,0.0,35.4,0.284,28.0,0
diff --git a/pages/RFxp/xrf/__init__.py b/pages/RFxp/xrf/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..9f52257095bc5c4ad22ff8810d2db39830109b31
--- /dev/null
+++ b/pages/RFxp/xrf/__init__.py
@@ -0,0 +1,3 @@
+#from .tree import *
+from .rndmforest import *
+from .xforest import *
\ No newline at end of file
diff --git a/pages/RFxp/xrf/rndmforest.py b/pages/RFxp/xrf/rndmforest.py
new file mode 100644
index 0000000000000000000000000000000000000000..62dd80f373fc971f0414fbe825c0058d6f6149c2
--- /dev/null
+++ b/pages/RFxp/xrf/rndmforest.py
@@ -0,0 +1,137 @@
+from sklearn.ensemble._voting import VotingClassifier
+from sklearn.ensemble import RandomForestClassifier
+from sklearn.preprocessing import OneHotEncoder, LabelEncoder
+from sklearn.model_selection import train_test_split
+from sklearn.metrics import accuracy_score
+import numpy as np
+import sys
+import os
+import resource
+
+import collections
+from itertools import combinations
+from six.moves import range
+import six
+import math
+
+
+
+#
+#==============================================================================
+class VotingRF(VotingClassifier):
+    """
+        Majority rule classifier
+    """
+    
+    def fit(self, X, y, sample_weight=None):
+        self.estimators_ = []
+        for _, est in self.estimators:
+            self.estimators_.append(est)
+            
+        self.le_ = LabelEncoder().fit(y)
+        self.classes_ = self.le_.classes_   
+        
+            
+    def predict(self, X):
+        """Predict class labels for X.
+        Parameters
+        ----------
+        X : {array-like, sparse matrix} of shape (n_samples, n_features)
+            The input samples.
+        Returns
+        -------
+        maj : array-like of shape (n_samples,)
+            Predicted class labels.
+        """
+        #check_is_fitted(self)
+        
+        # 'hard' voting
+        predictions = self._predict(X)
+        predictions =  np.asarray(predictions, np.int64) #NEED TO BE CHECKED
+        maj = np.apply_along_axis(
+            lambda x: np.argmax(
+                np.bincount(x, weights=self._weights_not_none)),
+            axis=1, arr=predictions)
+   
+        maj = self.le_.inverse_transform(maj)
+
+        return maj
+    
+        
+#
+#==============================================================================
+class RF2001(object):
+    """
+        The main class to train Random Forest Classifier (RFC).
+    """
+
+    def __init__(self, **options):
+        """
+            Constructor.
+        """    
+        self.forest = None
+        self.voting = None
+              
+        param_dist = {'n_estimators':options['n_trees'],
+                      'max_depth':options['depth'],
+                      'criterion':'entropy',
+                      'random_state':324089}
+        
+        self.forest = RandomForestClassifier(**param_dist)
+        
+    def fit(self, X_train, y_train):
+        """
+            building Breiman'01 Random Forest 
+            (similar to train(dataset) fnc) 
+        """
+        self.forest.fit(X_train,y_train)
+        rtrees = [ ('dt', dt) for i, dt in enumerate(self.forest.estimators_)]
+        self.voting = VotingRF(estimators=rtrees)
+        self.voting.fit(X_train,y_train)
+        
+        return self
+        
+        
+    def train(self, dataset, verb=0):
+        """
+            Train a random forest.
+        """
+        
+        X_train, X_test, y_train, y_test = dataset.train_test_split()
+            
+        X_train = dataset.transform(X_train)
+        X_test = dataset.transform(X_test)
+        
+        print("Build a random forest.")
+        self.forest.fit(X_train,y_train)
+        
+        rtrees = [ ('dt', dt) for i, dt in enumerate(self.forest.estimators_)]
+        self.voting = VotingRF(estimators=rtrees)
+        self.voting.fit(X_train,y_train)
+        
+        train_acc = accuracy_score(self.predict(X_train), y_train)
+        test_acc = accuracy_score(self.predict(X_test), y_test)
+
+        if verb > 1:
+            self.print_acc_vote(X_train, X_test, y_train, y_test)
+            self.print_acc_prob(X_train, X_test, y_train, y_test)
+        
+        return train_acc, test_acc
+    
+    def predict(self, X):
+        return self.voting.predict(X)
+    
+    def predict_prob(self, X):
+        self.forest.predict(X)
+        
+    def estimators(self):
+        assert(self.forest.estimators_ is not None)
+        return self.forest.estimators_
+        
+    def n_estimators(self):
+        return self.forest.n_estimators
+    
+    def print_accuracy(self, X_test, y_test):  
+        test_acc = accuracy_score(self.predict(X_test), y_test)
+        print("c Model accuracy: {0:.2f}".format(100. * test_acc))
+        #print("----------------------")  
\ No newline at end of file
diff --git a/pages/RFxp/xrf/tree.py b/pages/RFxp/xrf/tree.py
new file mode 100644
index 0000000000000000000000000000000000000000..5fddabd0dc27b0be6672903cbdc6085fbbcaf898
--- /dev/null
+++ b/pages/RFxp/xrf/tree.py
@@ -0,0 +1,174 @@
+#
+#==============================================================================
+from anytree import Node, RenderTree,AsciiStyle
+import json
+import numpy as np
+import math
+import os
+
+
+#
+#==============================================================================
+class dt_node(Node):
+    def __init__(self, id, parent = None):
+        Node.__init__(self, id, parent)
+        self.id = id  # The node value
+        self.name = None
+        self.left_node_id = -1   #  Left child
+        self.right_node_id = -1  # Right child
+
+        self.feature = -1
+        self.threshold = None
+        self.values = -1 
+        #iai
+        #self.split = None
+
+    def __str__(self):
+        pref = ' ' * self.depth
+        if (len(self.children) == 0):
+            return (pref+ "leaf: {}  {}".format(self.id, self.values))
+        else:
+            if(self.name is None):
+                return (pref+ "{} f{}<{}".format(self.id, self.feature, self.threshold))
+            else:
+                return (pref+ "{} \"{}\"<{}".format(self.id, self.name, self.threshold))
+
+
+#==============================================================================
+def build_tree(tree_, feature_names = None):
+    ##  
+    feature = tree_.feature
+    threshold = tree_.threshold
+    values = tree_.value
+    n_nodes = tree_.node_count
+    children_left = tree_.children_left
+    children_right = tree_.children_right
+    node_depth = np.zeros(shape=n_nodes, dtype=np.int64)
+    is_leaf = np.zeros(shape=n_nodes, dtype=bool)
+    stack = [(0, -1)]  # seed is the root node id and its parent depth
+    while len(stack) > 0:
+        node_id, parent_depth = stack.pop()
+        node_depth[node_id] = parent_depth + 1
+    
+        # If we have a test node
+        if (children_left[node_id] != children_right[node_id]):
+            stack.append((children_left[node_id], parent_depth + 1))
+            stack.append((children_right[node_id], parent_depth + 1))
+        else:
+            is_leaf[node_id] = True    
+    ##        
+    
+    m = tree_.node_count  
+    assert (m > 0), "Empty tree"
+    
+    def extract_data(idx, root = None, feature_names = None):
+        i = idx
+        assert (i < m), "Error index node"
+        if (root is None):
+            node = dt_node(i)
+        else:
+            node = dt_node(i, parent = root)
+        #node.cover = json_node["cover"]
+        if is_leaf[i]:
+            node.values = np.argmax(values[i])
+            #if(inverse):
+            #    node.values = -node.values
+        else:
+            node.feature = feature[i]
+            if (feature_names is not None):
+                node.name = feature_names[feature[i]]
+            node.threshold = threshold[i]
+            node.left_node_id = children_left[i]
+            node.right_node_id = children_right[i]
+            extract_data(node.left_node_id, node, feature_names) #feat < threshold ( < 0.5 False)
+            extract_data(node.right_node_id, node, feature_names) #feat >= threshold ( >= 0.5 True)            
+
+        return node
+    
+    root = extract_data(0, None, feature_names)
+    
+    return root
+
+
+#==============================================================================
+def walk_tree(node):
+    if (len(node.children) == 0):
+        # leaf
+        print(node)
+    else:
+        print(node)
+        walk_tree(node.children[0])
+        walk_tree(node.children[1])
+
+def count_nodes(root):
+    def count(node):
+        if len(node.children):
+            return sum([1+count(n) for n in node.children])
+        else:
+            return 0
+    m = count(root) + 1
+    return m
+
+#
+#==============================================================================
+def predict_tree(node, sample):
+    if (len(node.children) == 0):
+        # leaf
+        return node.values
+    else:
+        feature_branch = node.feature
+        sample_value = sample[feature_branch]
+        assert(sample_value is not None)
+        if(sample_value < node.threshold):
+            return predict_tree(node.children[0], sample)
+        else:
+            return predict_tree(node.children[1], sample)
+
+            
+#
+#==============================================================================
+class Forest:
+    """ An ensemble of decision trees.
+
+    This object provides a common interface to many different types of models.
+    """
+    def __init__(self, rf, feature_names = None):
+        #self.rf = rf
+        self.trees = [ build_tree(dt.tree_, feature_names) for dt in rf.estimators()]
+        self.sz = sum([dt.tree_.node_count for dt in rf.estimators()])
+        self.md = max([dt.tree_.max_depth for dt in rf.estimators()])
+        ####
+        nb_nodes = [dt.tree_.node_count for dt in rf.estimators()]
+        print("min: {0} | max: {1}".format(min(nb_nodes), max(nb_nodes)))
+        assert([dt.tree_.node_count for dt in rf.estimators()] == [count_nodes(dt) for dt in self.trees])
+        #self.print_trees()
+        
+    def print_trees(self):
+        for i,t in enumerate(self.trees):
+            print("tree number: ", i)
+            walk_tree(t)
+
+    def predict_inst(self, inst):
+        scores = [predict_tree(dt, inst) for dt in self.trees]
+        scores = np.asarray(scores)
+        maj = np.argmax(np.bincount(scores))
+        return maj
+        
+        
+    def predict(self, samples):       
+        predictions = []
+        print("#Trees: ", len(self.trees))
+        for sample in np.asarray(samples):
+            scores = []
+            for i,t in enumerate(self.trees):
+                s = predict_tree(t, sample)
+                scores.append((s))
+            scores = np.asarray(scores)
+            predictions.append(scores)
+        predictions = np.asarray(predictions)    
+        #print(predictions)    
+        #np.bincount(x, weights=self._weights_not_none)
+        maj = np.apply_along_axis(lambda x: np.argmax(np.bincount(x)), axis=1, arr=predictions)
+            
+        return maj   
+
diff --git a/pages/RFxp/xrf/xforest.py b/pages/RFxp/xrf/xforest.py
new file mode 100644
index 0000000000000000000000000000000000000000..b2bc978ce683396d578b5db3859de272640139c5
--- /dev/null
+++ b/pages/RFxp/xrf/xforest.py
@@ -0,0 +1,874 @@
+
+#from sklearn.ensemble._voting import VotingClassifier
+#from sklearn.ensemble import RandomForestClassifier
+from sklearn.preprocessing import OneHotEncoder, LabelEncoder
+from sklearn.model_selection import train_test_split
+#from sklearn.metrics import accuracy_score
+import numpy as np
+import sys
+import os
+import resource
+
+import collections
+from itertools import combinations
+from six.moves import range
+import six
+import math
+
+from data import Data
+from .rndmforest import RF2001, VotingRF
+from .tree import Forest, predict_tree
+
+#from .encode import SATEncoder
+from pysat.formula import CNF, WCNF, IDPool
+from pysat.solvers import Solver
+from pysat.card import CardEnc, EncType
+from pysat.examples.lbx import LBX
+from pysat.examples.mcsls import MCSls
+from pysat.examples.rc2 import RC2
+
+
+    
+
+#
+#==============================================================================
+class Dataset(Data):
+    """
+        Class for representing dataset (transactions).
+    """
+    def __init__(self, filename=None, fpointer=None, mapfile=None,
+            separator=' ', use_categorical = False):
+        super().__init__(filename, fpointer, mapfile, separator, use_categorical)
+        
+        # split data into X and y
+        self.feature_names = self.names[:-1]
+        self.nb_features = len(self.feature_names)
+        self.use_categorical = use_categorical
+        
+        samples = np.asarray(self.samps)
+        if not all(c.isnumeric() for c in samples[:, -1]):            
+            le = LabelEncoder()
+            le.fit(samples[:, -1])
+            samples[:, -1]= le.transform(samples[:, -1])
+            self.class_names = le.classes_ 
+            print(le.classes_)
+            print(samples[1:4, :])
+        
+        samples = np.asarray(samples, dtype=np.float32)
+        self.X = samples[:, 0: self.nb_features]
+        self.y = samples[:, self.nb_features]
+        self.num_class = len(set(self.y))
+        self.target_name = list(range(self.num_class))          
+        
+        print("c nof features: {0}".format(self.nb_features))
+        print("c nof classes: {0}".format(self.num_class))
+        print("c nof samples: {0}".format(len(self.samps)))
+        
+        # check if we have info about categorical features
+        if (self.use_categorical):
+            self.target_name = self.class_names            
+            
+            self.binarizer = {}
+            for i in self.categorical_features:
+                self.binarizer.update({i: OneHotEncoder(categories='auto', sparse=False)})#,
+                self.binarizer[i].fit(self.X[:,[i]])
+        else:
+            self.categorical_features = []
+            self.categorical_names = []            
+            self.binarizer = []           
+        #feat map
+        self.mapping_features()        
+        
+        
+            
+    def train_test_split(self, test_size=0.2, seed=0):
+        return train_test_split(self.X, self.y, test_size=test_size, random_state=seed)
+           
+
+    def transform(self, x):
+        if(len(x) == 0):
+            return x
+        if (len(x.shape) == 1):
+            x = np.expand_dims(x, axis=0)
+        if (self.use_categorical):
+            assert(self.binarizer != [])
+            tx = []
+            for i in range(self.nb_features):
+                #self.binarizer[i].drop = None
+                if (i in self.categorical_features):
+                    self.binarizer[i].drop = None
+                    tx_aux = self.binarizer[i].transform(x[:,[i]])
+                    tx_aux = np.vstack(tx_aux)
+                    tx.append(tx_aux)
+                else:
+                    tx.append(x[:,[i]])
+            tx = np.hstack(tx)
+            return tx
+        else:
+            return x
+
+    def transform_inverse(self, x):
+        if(len(x) == 0):
+            return x
+        if (len(x.shape) == 1):
+            x = np.expand_dims(x, axis=0)
+        if (self.use_categorical):
+            assert(self.binarizer != [])
+            inverse_x = []
+            for i, xi in enumerate(x):
+                inverse_xi = np.zeros(self.nb_features)
+                for f in range(self.nb_features):
+                    if f in self.categorical_features:
+                        nb_values = len(self.categorical_names[f])
+                        v = xi[:nb_values]
+                        v = np.expand_dims(v, axis=0)
+                        iv = self.binarizer[f].inverse_transform(v)
+                        inverse_xi[f] =iv
+                        xi = xi[nb_values:]
+
+                    else:
+                        inverse_xi[f] = xi[0]
+                        xi = xi[1:]
+                inverse_x.append(inverse_xi)
+            return inverse_x
+        else:
+            return x
+
+    def transform_inverse_by_index(self, idx):
+        if (idx in self.extended_feature_names):
+            return self.extended_feature_names[idx]
+        else:
+            print("Warning there is no feature {} in the internal mapping".format(idx))
+            return None
+
+    def transform_by_value(self, feat_value_pair):
+        if (feat_value_pair in self.extended_feature_names.values()):
+            keys = (list(self.extended_feature_names.keys())[list( self.extended_feature_names.values()).index(feat_value_pair)])
+            return keys
+        else:
+            print("Warning there is no value {} in the internal mapping".format(feat_value_pair))
+            return None
+
+    def mapping_features(self):
+        self.extended_feature_names = {}
+        self.extended_feature_names_as_array_strings = []
+        counter = 0
+        if (self.use_categorical):
+            for i in range(self.nb_features):
+                if (i in self.categorical_features):
+                    for j, _ in enumerate(self.binarizer[i].categories_[0]):
+                        self.extended_feature_names.update({counter:  (self.feature_names[i], j)})
+                        self.extended_feature_names_as_array_strings.append("f{}_{}".format(i,j)) # str(self.feature_names[i]), j))
+                        counter = counter + 1
+                else:
+                    self.extended_feature_names.update({counter: (self.feature_names[i], None)})
+                    self.extended_feature_names_as_array_strings.append("f{}".format(i)) #(self.feature_names[i])
+                    counter = counter + 1
+        else:
+            for i in range(self.nb_features):
+                self.extended_feature_names.update({counter: (self.feature_names[i], None)})
+                self.extended_feature_names_as_array_strings.append("f{}".format(i))#(self.feature_names[i])
+                counter = counter + 1
+
+    def readable_sample(self, x):
+        readable_x = []
+        for i, v in enumerate(x):
+            if (i in self.categorical_features):
+                readable_x.append(self.categorical_names[i][int(v)])
+            else:
+                readable_x.append(v)
+        return np.asarray(readable_x)
+
+    
+    def test_encoding_transformes(self, X_train):
+        # test encoding
+
+        X = X_train[[0],:]
+
+        print("Sample of length", len(X[0])," : ", X)
+        enc_X = self.transform(X)
+        print("Encoded sample of length", len(enc_X[0])," : ", enc_X)
+        inv_X = self.transform_inverse(enc_X)
+        print("Back to sample", inv_X)
+        print("Readable sample", self.readable_sample(inv_X[0]))
+        assert((inv_X == X).all())
+
+        '''
+        for i in range(len(self.extended_feature_names)):
+            print(i, self.transform_inverse_by_index(i))
+        for key, value in self.extended_feature_names.items():
+            print(value, self.transform_by_value(value))   
+        '''       
+#
+#==============================================================================
+class XRF(object):
+    """
+        class to encode and explain Random Forest classifiers.
+    """
+    
+    def __init__(self, model, feature_names, class_names, verb=0):
+        self.cls = model
+        #self.data = dataset
+        self.verbose = verb
+        self.feature_names = feature_names
+        self.class_names = class_names
+        self.fnames = [f'f{i}' for i in range(len(feature_names))]
+        self.f = Forest(model, self.fnames)
+        
+        if self.verbose > 2:
+            self.f.print_trees()
+        if self.verbose:    
+            print("c RF sz:", self.f.sz)
+            print('c max-depth:', self.f.md)
+            print('c nof DTs:', len(self.f.trees))
+        
+    def __del__(self):
+        if 'enc' in dir(self):
+            del self.enc
+        if 'x' in dir(self):
+            if self.x.slv is not None:
+                self.x.slv.delete()
+            del self.x
+        del self.f
+        self.f = None
+        del self.cls
+        self.cls = None
+        
+    def encode(self, inst):
+        """
+            Encode a tree ensemble trained previously.
+        """
+        if 'f' not in dir(self):
+            self.f = Forest(self.cls, self.fnames)
+            #self.f.print_tree()
+            
+        time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime            
+            
+        self.enc = SATEncoder(self.f, self.feature_names, len(self.class_names), self.fnames)
+        
+        #inst = self.data.transform(np.array(inst))[0]
+        formula, _, _, _ = self.enc.encode(np.array(inst))
+        
+        time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime - time        
+        
+        if self.verbose:
+            print('c nof vars:', formula.nv) # number of variables 
+            print('c nof clauses:', len(formula.clauses)) # number of clauses    
+            print('c encoding time: {0:.3f}'.format(time))            
+        
+    def explain(self, inst, xtype='abd'):
+        """
+            Explain a prediction made for a given sample with a previously
+            trained RF.
+        """
+        
+        time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime          
+        
+        if 'enc' not in dir(self):
+            self.encode(inst)
+        
+        #inpvals = self.data.readable_sample(inst)
+        inpvals = np.asarray(inst)
+        preamble = []
+        for f, v in zip(self.feature_names, inpvals):
+            if f not in str(v):
+                preamble.append('{0} = {1}'.format(f, v))
+            else:
+                preamble.append(v)
+                    
+        inps = self.fnames # input (feature value) variables
+        #print("inps: {0}".format(inps))
+            
+        self.x = SATExplainer(self.enc, inps, preamble, self.class_names, verb=self.verbose)
+        #inst = self.data.transform(np.array(inst))[0]
+        expl = self.x.explain(np.array(inst), xtype)
+
+        time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime - time 
+        
+        if self.verbose:
+            print("c Total time: {0:.3f}".format(time))
+            
+        return expl
+    
+    def enumerate(self, inst, xtype='con', smallest=True):
+        """
+            list all XPs
+        """
+        if 'enc' not in dir(self):
+            self.encode(inst)
+            
+        if 'x' not in dir(self):
+            inpvals = np.asarray(inst)
+            preamble = []
+            for f, v in zip(self.feature_names, inpvals):
+                if f not in str(v):
+                    preamble.append('{0} = {1}'.format(f, v))
+                else:
+                    preamble.append(v)
+                    
+            inps = self.fnames
+            self.x = SATExplainer(self.enc, inps, preamble, self.class_names)
+            
+        for expl in self.x.enumerate(np.array(inst), xtype, smallest):
+            yield expl
+        
+#
+#==============================================================================
+class SATEncoder(object):
+    """
+        Encoder of Random Forest classifier into SAT.
+    """
+    
+    def __init__(self, forest, feats, nof_classes, extended_feature_names,  from_file=None):
+        self.forest = forest
+        #self.feats = {f: i for i, f in enumerate(feats)}
+        self.num_class = nof_classes
+        self.vpool = IDPool()
+        self.extended_feature_names = extended_feature_names
+        
+        #encoding formula
+        self.cnf = None
+
+        # for interval-based encoding
+        self.intvs, self.imaps, self.ivars, self.thvars = None, None, None, None
+       
+        
+    def newVar(self, name):
+        """
+            If a variable named 'name' already exists then
+            return its id; otherwise create a new var
+        """
+        if name in self.vpool.obj2id: #var has been already created 
+            return self.vpool.obj2id[name]
+        var = self.vpool.id('{0}'.format(name))
+        return var
+    
+    def nameVar(self, vid):
+        """
+            input a var id and return a var name
+        """
+        return self.vpool.obj(abs(vid))
+    
+    def printLits(self, lits):
+        print(["{0}{1}".format("-" if p<0 else "",self.vpool.obj(abs(p))) for p in lits])
+    
+    def traverse(self, tree, k, clause):
+        """
+            Traverse a tree and encode each node.
+        """
+
+        if tree.children:
+            f = tree.name
+            v = tree.threshold
+            pos = neg = []
+            if f in self.intvs:
+                d = self.imaps[f][v]
+                pos, neg = self.thvars[f][d], -self.thvars[f][d]
+            else:
+                var = self.newVar(tree.name)
+                pos, neg = var, -var
+                #print("{0} => {1}".format(tree.name, var))
+                
+            assert (pos and neg)
+            self.traverse(tree.children[0], k, clause + [-neg])
+            self.traverse(tree.children[1], k, clause + [-pos])            
+        else:  # leaf node
+            cvar = self.newVar('class{0}_tr{1}'.format(tree.values,k))
+            self.cnf.append(clause + [cvar])
+            #self.printLits(clause + [cvar])
+
+    def compute_intervals(self):
+        """
+            Traverse all trees in the ensemble and extract intervals for each
+            feature.
+
+            At this point, the method only works for numerical datasets!
+        """
+
+        def traverse_intervals(tree):
+            """
+                Auxiliary function. Recursive tree traversal.
+            """
+
+            if tree.children:
+                f = tree.name
+                v = tree.threshold
+                if f in self.intvs:
+                    self.intvs[f].add(v)
+
+                traverse_intervals(tree.children[0])
+                traverse_intervals(tree.children[1])
+
+        # initializing the intervals
+        self.intvs = {'{0}'.format(f): set([]) for f in self.extended_feature_names if '_' not in f}
+
+        for tree in self.forest.trees:
+            traverse_intervals(tree)
+                
+        # OK, we got all intervals; let's sort the values
+        self.intvs = {f: sorted(self.intvs[f]) + ([math.inf] if len(self.intvs[f]) else []) for f in six.iterkeys(self.intvs)}
+
+        self.imaps, self.ivars = {}, {}
+        self.thvars = {}
+        for feat, intvs in six.iteritems(self.intvs):
+            self.imaps[feat] = {}
+            self.ivars[feat] = []
+            self.thvars[feat] = []
+            for i, ub in enumerate(intvs):
+                self.imaps[feat][ub] = i
+
+                ivar = self.newVar('{0}_intv{1}'.format(feat, i))
+                self.ivars[feat].append(ivar)
+                #print('{0}_intv{1}'.format(feat, i))
+                
+                if ub != math.inf:
+                    #assert(i < len(intvs)-1)
+                    thvar = self.newVar('{0}_th{1}'.format(feat, i))
+                    self.thvars[feat].append(thvar)
+                    #print('{0}_th{1}'.format(feat, i))
+
+
+
+    def encode(self, sample):
+        """
+            Do the job.
+        """
+        
+        ###print('Encode RF into SAT ...')
+
+        self.cnf = CNF()
+        # getting a tree ensemble
+        #self.forest = Forest(self.model, self.extended_feature_names)
+        num_tree = len(self.forest.trees)
+        self.forest.predict_inst(sample)
+
+        #introducing class variables
+        #cvars = [self.newVar('class{0}'.format(i)) for i in range(self.num_class)]
+        
+        # define Tautology var
+        vtaut = self.newVar('Tautology')
+        self.cnf.append([vtaut])
+            
+        # introducing class-tree variables
+        ctvars = [[] for t in range(num_tree)]
+        for k in range(num_tree):
+            for j in range(self.num_class):
+                var = self.newVar('class{0}_tr{1}'.format(j,k))
+                ctvars[k].append(var)       
+
+        # traverse all trees and extract all possible intervals
+        # for each feature
+        ###print("compute intervarls ...")
+        self.compute_intervals()
+        
+        #print(self.intvs)
+        #print([len(self.intvs[f]) for f in self.intvs])
+        #print(self.imaps) 
+        #print(self.ivars)
+        #print(self.thvars)
+        #print(ctvars)
+        
+        
+        ##print("encode trees ...")
+        # traversing and encoding each tree
+        for k, tree in enumerate(self.forest.trees):
+            #print("Encode tree#{0}".format(k))
+            # encoding the tree     
+            self.traverse(tree, k, [])
+            # exactly one class var is true
+            #self.printLits(ctvars[k])
+            card = CardEnc.atmost(lits=ctvars[k], vpool=self.vpool,encoding=EncType.cardnetwrk) 
+            self.cnf.extend(card.clauses)
+        
+        
+            
+        # calculate the majority class   
+        self.cmaj = self.forest.predict_inst(sample)       
+        
+        ##print("encode majority class ...")                
+        #Cardinality constraint AtMostK to capture a j_th class
+        
+        if(self.num_class == 2):
+            rhs = math.floor(num_tree / 2) + 1
+            if(self.cmaj==1 and not num_tree%2):
+                rhs = math.floor(num_tree / 2)      
+            lhs = [ctvars[k][1 - self.cmaj] for k in range(num_tree)]
+            atls = CardEnc.atleast(lits = lhs, bound = rhs, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(atls)
+        else: 
+            zvars = []
+            zvars.append([self.newVar('z_0_{0}'.format(k)) for k in range (num_tree) ])
+            zvars.append([self.newVar('z_1_{0}'.format(k)) for k in range (num_tree) ])
+            ##
+            rhs = num_tree
+            lhs0 = zvars[0] + [ - ctvars[k][self.cmaj] for k in range(num_tree)]
+            ##self.printLits(lhs0)
+            atls = CardEnc.atleast(lits = lhs0, bound = rhs, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(atls)
+            ##
+            #rhs = num_tree - 1
+            rhs = num_tree + 1
+            ###########
+            lhs1 =  zvars[1] + [ - ctvars[k][self.cmaj] for k in range(num_tree)]
+            ##self.printLits(lhs1)
+            atls = CardEnc.atleast(lits = lhs1, bound = rhs, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(atls)            
+            #
+            pvars = [self.newVar('p_{0}'.format(k)) for k in range(self.num_class + 1)]
+            ##self.printLits(pvars)
+            for k,p in enumerate(pvars):
+                for i in range(num_tree):
+                    if k == 0:
+                        z = zvars[0][i]
+                        #self.cnf.append([-p, -z, vtaut])
+                        self.cnf.append([-p, z, -vtaut])       
+                        #self.printLits([-p, z, -vtaut])
+                        #print()
+                    elif k == self.cmaj+1:
+                        z = zvars[1][i]
+                        self.cnf.append([-p, z, -vtaut])       
+                        
+                        #self.printLits([-p, z, -vtaut])
+                        #print()                       
+                        
+                    else:
+                        z = zvars[0][i] if (k<self.cmaj+1) else zvars[1][i]
+                        self.cnf.append([-p, -z, ctvars[i][k-1] ])
+                        self.cnf.append([-p, z, -ctvars[i][k-1] ])  
+                        
+                        #self.printLits([-p, -z, ctvars[i][k-1] ])
+                        #self.printLits([-p, z, -ctvars[i][k-1] ])
+                        #print()
+                        
+            #
+            self.cnf.append([-pvars[0], -pvars[self.cmaj+1]])
+            ##
+            lhs1 =  pvars[:(self.cmaj+1)]
+            ##self.printLits(lhs1)
+            eqls = CardEnc.equals(lits = lhs1, bound = 1, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(eqls)
+            
+            
+            lhs2 = pvars[(self.cmaj + 1):]
+            ##self.printLits(lhs2)
+            eqls = CardEnc.equals(lits = lhs2, bound = 1, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(eqls)
+                
+        
+            
+        ##print("exactly-one feat const ...")
+        # enforce exactly one of the feature values to be chosen
+        # (for categorical features)
+        categories = collections.defaultdict(lambda: [])
+        for f in self.extended_feature_names:
+            if '_' in f:
+                categories[f.split('_')[0]].append(self.newVar(f))        
+        for c, feats in six.iteritems(categories):
+            # exactly-one feat is True
+            self.cnf.append(feats)
+            card = CardEnc.atmost(lits=feats, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(card.clauses)
+        # lits of intervals   
+        for f, intvs in six.iteritems(self.ivars):
+            if not len(intvs):
+                continue
+            self.cnf.append(intvs) 
+            card = CardEnc.atmost(lits=intvs, vpool=self.vpool, encoding=EncType.cardnetwrk)
+            self.cnf.extend(card.clauses)
+            #self.printLits(intvs)
+        
+            
+        
+        for f, threshold in six.iteritems(self.thvars):
+            for j, thvar in enumerate(threshold):
+                d = j+1
+                pos, neg = self.ivars[f][d:], self.ivars[f][:d] 
+                
+                if j == 0:
+                    assert(len(neg) == 1)
+                    self.cnf.append([thvar, neg[-1]])
+                    self.cnf.append([-thvar, -neg[-1]])
+                else:
+                    self.cnf.append([thvar, neg[-1], -threshold[j-1]])
+                    self.cnf.append([-thvar, threshold[j-1]])
+                    self.cnf.append([-thvar, -neg[-1]])
+                
+                if j == len(threshold) - 1:
+                    assert(len(pos) == 1)
+                    self.cnf.append([-thvar, pos[0]])
+                    self.cnf.append([thvar, -pos[0]])
+                else:
+                    self.cnf.append([-thvar, pos[0], threshold[j+1]])
+                    self.cnf.append([thvar, -pos[0]])
+                    self.cnf.append([thvar, -threshold[j+1]])
+          
+
+        
+        return self.cnf, self.intvs, self.imaps, self.ivars
+
+
+#
+#==============================================================================
+class SATExplainer(object):
+    """
+        An SAT-inspired minimal explanation extractor for Random Forest models.
+    """
+
+    def __init__(self, sat_enc, inps, preamble, target_name, verb=1):
+        """
+            Constructor.
+        """
+        self.enc = sat_enc
+        self.inps = inps  # input (feature value) variables
+        self.target_name = target_name
+        self.preamble = preamble
+        self.verbose = verb
+        self.slv = None    
+      
+    def prepare_selectors(self, sample):
+        # adapt the solver to deal with the current sample
+        #self.csel = []
+        self.assums = []  # var selectors to be used as assumptions
+        self.sel2fid = {}  # selectors to original feature ids
+        self.sel2vid = {}  # selectors to categorical feature ids
+        self.sel2v = {} # selectors to (categorical/interval) values
+        
+        #for i in range(self.enc.num_class):
+        #    self.csel.append(self.enc.newVar('class{0}'.format(i)))
+        #self.csel = self.enc.newVar('class{0}'.format(self.enc.cmaj))
+               
+        # preparing the selectors
+        for i, (inp, val) in enumerate(zip(self.inps, sample), 1):
+            if '_' in inp:
+                # binarized (OHE) features
+                assert (inp not in self.enc.intvs)
+                
+                feat = inp.split('_')[0]
+                selv = self.enc.newVar('selv_{0}'.format(feat))
+            
+                self.assums.append(selv)   
+                if selv not in self.sel2fid:
+                    self.sel2fid[selv] = int(feat[1:])
+                    self.sel2vid[selv] = [i - 1]
+                else:
+                    self.sel2vid[selv].append(i - 1)
+                    
+                p = self.enc.newVar(inp) 
+                if not val:
+                    p = -p
+                else:
+                    self.sel2v[selv] = p
+                    
+                self.enc.cnf.append([-selv, p])
+                #self.enc.printLits([-selv, p])
+                    
+            elif len(self.enc.intvs[inp]):
+                #v = None
+                #for intv in self.enc.intvs[inp]:
+                #    if intv > val:
+                #        v = intv
+                #        break         
+                v = next((intv for intv in self.enc.intvs[inp] if intv > val), None)     
+                assert(v is not None)
+                
+                selv = self.enc.newVar('selv_{0}'.format(inp))     
+                self.assums.append(selv)  
+                
+                assert (selv not in self.sel2fid)
+                self.sel2fid[selv] = int(inp[1:])
+                self.sel2vid[selv] = [i - 1]
+                            
+                for j,p in enumerate(self.enc.ivars[inp]):
+                    cl = [-selv]
+                    if j == self.enc.imaps[inp][v]:
+                        cl += [p]
+                        self.sel2v[selv] = p
+                    else:
+                        cl += [-p]
+                    
+                    self.enc.cnf.append(cl)
+                    #self.enc.printLits(cl)
+
+        
+    
+    def explain(self, sample, xtype='abd', smallest=False):
+        """
+            Hypotheses minimization.
+        """
+        if self.verbose:
+            print('  explaining:  "IF {0} THEN {1}"'.format(' AND '.join(self.preamble), self.target_name[self.enc.cmaj]))
+                    
+        
+        self.time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime
+        
+        self.prepare_selectors(sample)
+        
+        if xtype == 'abd':
+            # abductive (PI-) explanation
+            expl = self.compute_axp() 
+        else:
+            # contrastive explanation
+            expl = self.compute_cxp()
+ 
+        self.time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime - self.time
+    
+        # delete sat solver
+        self.slv.delete()
+        self.slv = None
+        
+        if self.verbose:
+            print('  time: {0:.3f}'.format(self.time))
+
+        return expl    
+
+    def compute_axp(self, smallest=False):
+        """
+            Compute an Abductive eXplanation
+        """         
+        self.assums = sorted(set(self.assums))
+        if self.verbose:
+            print('  # hypos:', len(self.assums))   
+        
+        #create a SAT solver
+        self.slv = Solver(name="glucose3")
+        
+        # pass a CNF formula
+        self.slv.append_formula(self.enc.cnf)    
+
+        def minimal():
+            vtaut = self.enc.newVar('Tautology')
+            # simple deletion-based linear search
+            for i, p in enumerate(self.assums):
+                to_test = [vtaut] + self.assums[:i] + self.assums[(i + 1):] + [-p, -self.sel2v[p]]
+                sat = self.slv.solve(assumptions=to_test)
+                if not sat:
+                    self.assums[i] = -p         
+            return
+        
+        if not smallest:
+            minimal()
+        else:
+            raise NotImplementedError('Smallest explanation is not yet implemented.')
+            #self.compute_smallest()
+
+        expl = sorted([self.sel2fid[h] for h in self.assums if h>0 ])
+        assert len(expl), 'Abductive explanation cannot be an empty-set! otherwise RF fcn is const, i.e. predicts only one class'
+        
+        if self.verbose:
+            print("expl-selctors: ", expl)
+            preamble = [self.preamble[i] for i in expl]
+            print('  explanation: "IF {0} THEN {1}"'.format(' AND '.join(preamble), self.target_name[self.enc.cmaj]))
+            print('  # hypos left:', len(expl))
+            
+        return expl
+        
+    def compute_cxp(self, smallest=True):
+        """
+            Compute a Contrastive eXplanation
+        """         
+        self.assums = sorted(set(self.assums))
+        if self.verbose:
+            print('  # hypos:', len(self.assums))   
+    
+        wcnf = WCNF()
+        for cl in self.enc.cnf:
+            wcnf.append(cl)    
+        for p in self.assums:
+            wcnf.append([p], weight=1)
+            
+        if not smallest:
+            # mcs solver
+            self.slv = LBX(wcnf, use_cld=True, solver_name='g3')
+            mcs = self.slv.compute()
+            expl = sorted([self.sel2fid[self.assums[i-1]] for i in mcs])
+        else:
+            # mxsat solver
+            self.slv = RC2(wcnf)
+            model = self.slv.compute()
+            model = [p for p in model if abs(p) in self.assums]            
+            expl = sorted([self.sel2fid[-p] for p in model if p<0 ])
+       
+        assert len(expl), 'Contrastive explanation cannot be an empty-set!'         
+        if self.verbose:
+            print("expl-selctors: ", expl)
+            preamble = [self.preamble[i] for i in expl]
+            pred = self.target_name[self.enc.cmaj]
+            print(f'  explanation: "IF {" AND ".join([f"!({p})" for p in preamble])} THEN !(class = {pred})"')
+            
+        return expl    
+    
+    def enumerate(self, sample, xtype='con', smallest=True):
+        """
+            list all CXp's or AXp's
+        """
+        if xtype == 'abd':
+            raise NotImplementedError('Enumerate abductive explanations is not yet implemented.')
+        time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime
+        
+        if 'assums' not in dir(self):
+            self.prepare_selectors(sample)
+            self.assums = sorted(set(self.assums))
+            #
+            
+        # compute CXp's/AE's    
+        if self.slv is None:    
+            wcnf = WCNF()
+            for cl in self.enc.cnf:
+                wcnf.append(cl)    
+            for p in self.assums:
+                wcnf.append([p], weight=1)
+            if smallest:    
+                # incremental maxsat solver    
+                self.slv = RC2(wcnf, adapt=True, exhaust=True, minz=True)
+            else:
+                # mcs solver
+                self.slv = LBX(wcnf, use_cld=True, solver_name='g3')
+                #self.slv = MCSls(wcnf, use_cld=True, solver_name='g3')                
+                
+        if smallest:    
+            print('smallest')
+            for model in self.slv.enumerate(block=-1):
+                #model = [p for p in model if abs(p) in self.assums]
+                expl = sorted([self.sel2fid[-p] for p in model if (p<0 and (-p in self.assums))])
+                cxp_feats = [f'f{j}' for j in expl]
+                advx = []
+                for f in cxp_feats:
+                    ps = [p for p in model if (p>0 and (p in self.enc.ivars[f]))]
+                    assert(len(ps) == 1)
+                    advx.append(tuple([f,self.enc.nameVar(ps[0])]))   
+                #yield expl
+                print(cxp_feats, advx)
+                yield advx
+        else:
+            print('LBX')
+            for mcs in self.slv.enumerate():
+                expl = sorted([self.sel2fid[self.assums[i-1]] for i in mcs])
+                assumptions = [-p if(i in mcs) else p for i,p in enumerate(self.assums, 1)]
+                #for k, model in enumerate(self.slv.oracle.enum_models(assumptions), 1):
+                assert (self.slv.oracle.solve(assumptions))
+                model = self.slv.oracle.get_model()
+                cxp_feats = [f'f{j}' for j in expl]
+                advx = []
+                for f in cxp_feats:
+                    ps = [p for p in model if (p>0 and (p in self.enc.ivars[f]))]
+                    assert(len(ps) == 1)
+                    advx.append(tuple([f,self.enc.nameVar(ps[0])]))
+                yield advx
+                self.slv.block(mcs)
+                #yield expl
+                
+                
+        time = resource.getrusage(resource.RUSAGE_CHILDREN).ru_utime + \
+                resource.getrusage(resource.RUSAGE_SELF).ru_utime - time 
+        if self.verbose:
+            print('c expl time: {0:.3f}'.format(time))
+        #
+        self.slv.delete()
+        self.slv = None
\ No newline at end of file