# HG changeset patch
# User bgruening
# Date 1618346693 0
# Node ID fe627c026dc62c065b40ca4adb59bdd9ce9a4a19
# Parent  daece0f271089344cd37783775810c75b319bd59
"planemo upload for repository https://github.com/bgruening/galaxytools/tree/master/tools/sklearn commit 208a8d348e7c7a182cfbe1b6f17868146428a7e2"
diff -r daece0f27108 -r fe627c026dc6 fitted_model_eval.py
--- a/fitted_model_eval.py	Wed Mar 11 17:11:13 2020 +0000
+++ b/fitted_model_eval.py	Tue Apr 13 20:44:53 2021 +0000
@@ -1,17 +1,17 @@
 import argparse
 import json
-import pandas as pd
 import warnings
 
+import pandas as pd
+from galaxy_ml.utils import get_scoring, load_model, read_columns
 from scipy.io import mmread
-from sklearn.pipeline import Pipeline
 from sklearn.metrics.scorer import _check_multimetric_scoring
 from sklearn.model_selection._validation import _score
-from galaxy_ml.utils import get_scoring, load_model, read_columns
+from sklearn.pipeline import Pipeline
 
 
 def _get_X_y(params, infile1, infile2):
-    """ read from inputs and output X and y
+    """read from inputs and output X and y
 
     Parameters
     ----------
@@ -26,35 +26,40 @@
     # store read dataframe object
     loaded_df = {}
 
-    input_type = params['input_options']['selected_input']
+    input_type = params["input_options"]["selected_input"]
     # tabular input
-    if input_type == 'tabular':
-        header = 'infer' if params['input_options']['header1'] else None
-        column_option = (params['input_options']['column_selector_options_1']
-                         ['selected_column_selector_option'])
-        if column_option in ['by_index_number', 'all_but_by_index_number',
-                             'by_header_name', 'all_but_by_header_name']:
-            c = params['input_options']['column_selector_options_1']['col1']
+    if input_type == "tabular":
+        header = "infer" if params["input_options"]["header1"] else None
+        column_option = params["input_options"]["column_selector_options_1"]["selected_column_selector_option"]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = params["input_options"]["column_selector_options_1"]["col1"]
         else:
             c = None
 
         df_key = infile1 + repr(header)
-        df = pd.read_csv(infile1, sep='\t', header=header,
-                         parse_dates=True)
+        df = pd.read_csv(infile1, sep="\t", header=header, parse_dates=True)
         loaded_df[df_key] = df
 
         X = read_columns(df, c=c, c_option=column_option).astype(float)
     # sparse input
-    elif input_type == 'sparse':
-        X = mmread(open(infile1, 'r'))
+    elif input_type == "sparse":
+        X = mmread(open(infile1, "r"))
 
     # Get target y
-    header = 'infer' if params['input_options']['header2'] else None
-    column_option = (params['input_options']['column_selector_options_2']
-                     ['selected_column_selector_option2'])
-    if column_option in ['by_index_number', 'all_but_by_index_number',
-                         'by_header_name', 'all_but_by_header_name']:
-        c = params['input_options']['column_selector_options_2']['col2']
+    header = "infer" if params["input_options"]["header2"] else None
+    column_option = params["input_options"]["column_selector_options_2"]["selected_column_selector_option2"]
+    if column_option in [
+        "by_index_number",
+        "all_but_by_index_number",
+        "by_header_name",
+        "all_but_by_header_name",
+    ]:
+        c = params["input_options"]["column_selector_options_2"]["col2"]
     else:
         c = None
 
@@ -62,26 +67,24 @@
     if df_key in loaded_df:
         infile2 = loaded_df[df_key]
     else:
-        infile2 = pd.read_csv(infile2, sep='\t',
-                              header=header, parse_dates=True)
+        infile2 = pd.read_csv(infile2, sep="\t", header=header, parse_dates=True)
         loaded_df[df_key] = infile2
 
-    y = read_columns(
-            infile2,
-            c=c,
-            c_option=column_option,
-            sep='\t',
-            header=header,
-            parse_dates=True)
+    y = read_columns(infile2, c=c, c_option=column_option, sep="\t", header=header, parse_dates=True)
     if len(y.shape) == 2 and y.shape[1] == 1:
         y = y.ravel()
 
     return X, y
 
 
-def main(inputs, infile_estimator, outfile_eval,
-         infile_weights=None, infile1=None,
-         infile2=None):
+def main(
+    inputs,
+    infile_estimator,
+    outfile_eval,
+    infile_weights=None,
+    infile1=None,
+    infile2=None,
+):
     """
     Parameter
     ---------
@@ -103,49 +106,55 @@
     infile2 : str
         File path to dataset containing target values
     """
-    warnings.filterwarnings('ignore')
+    warnings.filterwarnings("ignore")
 
-    with open(inputs, 'r') as param_handler:
+    with open(inputs, "r") as param_handler:
         params = json.load(param_handler)
 
     X_test, y_test = _get_X_y(params, infile1, infile2)
 
     # load model
-    with open(infile_estimator, 'rb') as est_handler:
+    with open(infile_estimator, "rb") as est_handler:
         estimator = load_model(est_handler)
 
     main_est = estimator
     if isinstance(estimator, Pipeline):
         main_est = estimator.steps[-1][-1]
-    if hasattr(main_est, 'config') and hasattr(main_est, 'load_weights'):
-        if not infile_weights or infile_weights == 'None':
-            raise ValueError("The selected model skeleton asks for weights, "
-                             "but no dataset for weights was provided!")
+    if hasattr(main_est, "config") and hasattr(main_est, "load_weights"):
+        if not infile_weights or infile_weights == "None":
+            raise ValueError(
+                "The selected model skeleton asks for weights, " "but no dataset for weights was provided!"
+            )
         main_est.load_weights(infile_weights)
 
     # handle scorer, convert to scorer dict
-    scoring = params['scoring']
+    # Check if scoring is specified
+    scoring = params["scoring"]
+    if scoring is not None:
+        # get_scoring() expects secondary_scoring to be a comma separated string (not a list)
+        # Check if secondary_scoring is specified
+        secondary_scoring = scoring.get("secondary_scoring", None)
+        if secondary_scoring is not None:
+            # If secondary_scoring is specified, convert the list into comman separated string
+            scoring["secondary_scoring"] = ",".join(scoring["secondary_scoring"])
+
     scorer = get_scoring(scoring)
     scorer, _ = _check_multimetric_scoring(estimator, scoring=scorer)
 
-    if hasattr(estimator, 'evaluate'):
-        scores = estimator.evaluate(X_test, y_test=y_test,
-                                    scorer=scorer,
-                                    is_multimetric=True)
+    if hasattr(estimator, "evaluate"):
+        scores = estimator.evaluate(X_test, y_test=y_test, scorer=scorer, is_multimetric=True)
     else:
-        scores = _score(estimator, X_test, y_test, scorer,
-                        is_multimetric=True)
+        scores = _score(estimator, X_test, y_test, scorer, is_multimetric=True)
 
     # handle output
     for name, score in scores.items():
         scores[name] = [score]
     df = pd.DataFrame(scores)
     df = df[sorted(df.columns)]
-    df.to_csv(path_or_buf=outfile_eval, sep='\t',
-              header=True, index=False)
+    df.to_csv(path_or_buf=outfile_eval, sep="\t", header=True, index=False)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     aparser = argparse.ArgumentParser()
     aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
     aparser.add_argument("-e", "--infile_estimator", dest="infile_estimator")
@@ -155,6 +164,11 @@
     aparser.add_argument("-O", "--outfile_eval", dest="outfile_eval")
     args = aparser.parse_args()
 
-    main(args.inputs, args.infile_estimator, args.outfile_eval,
-         infile_weights=args.infile_weights, infile1=args.infile1,
-         infile2=args.infile2)
+    main(
+        args.inputs,
+        args.infile_estimator,
+        args.outfile_eval,
+        infile_weights=args.infile_weights,
+        infile1=args.infile1,
+        infile2=args.infile2,
+    )
diff -r daece0f27108 -r fe627c026dc6 keras_deep_learning.py
--- a/keras_deep_learning.py	Wed Mar 11 17:11:13 2020 +0000
+++ b/keras_deep_learning.py	Tue Apr 13 20:44:53 2021 +0000
@@ -1,14 +1,14 @@
 import argparse
 import json
+import pickle
+import warnings
+from ast import literal_eval
+
 import keras
 import pandas as pd
-import pickle
 import six
-import warnings
-
-from ast import literal_eval
-from keras.models import Sequential, Model
-from galaxy_ml.utils import try_get_attr, get_search_params, SafeEval
+from galaxy_ml.utils import get_search_params, SafeEval, try_get_attr
+from keras.models import Model, Sequential
 
 
 safe_eval = SafeEval()
@@ -177,11 +177,11 @@
         # merge layers
         if 'merging_layers' in options:
             idxs = literal_eval(options.pop('merging_layers'))
-            merging_layers = [all_layers[i-1] for i in idxs]
+            merging_layers = [all_layers[i - 1] for i in idxs]
             new_layer = klass(**options)(merging_layers)
         # non-input layers
         elif inbound_nodes is not None:
-            new_layer = klass(**options)(all_layers[inbound_nodes-1])
+            new_layer = klass(**options)(all_layers[inbound_nodes - 1])
         # input layers
         else:
             new_layer = klass(**options)
@@ -189,10 +189,10 @@
         all_layers.append(new_layer)
 
     input_indexes = _handle_shape(config['input_layers'])
-    input_layers = [all_layers[i-1] for i in input_indexes]
+    input_layers = [all_layers[i - 1] for i in input_indexes]
 
     output_indexes = _handle_shape(config['output_layers'])
-    output_layers = [all_layers[i-1] for i in output_indexes]
+    output_layers = [all_layers[i - 1] for i in output_indexes]
 
     return Model(inputs=input_layers, outputs=output_layers)
 
@@ -300,8 +300,7 @@
         options.update((inputs['mode_selection']['compile_params']
                         ['optimizer_selection']['optimizer_options']))
 
-        train_metrics = (inputs['mode_selection']['compile_params']
-                         ['metrics']).split(',')
+        train_metrics = inputs['mode_selection']['compile_params']['metrics']
         if train_metrics[-1] == 'none':
             train_metrics = train_metrics[:-1]
         options['metrics'] = train_metrics
diff -r daece0f27108 -r fe627c026dc6 keras_model_config.xml
--- a/keras_model_config.xml	Wed Mar 11 17:11:13 2020 +0000
+++ b/keras_model_config.xml	Tue Apr 13 20:44:53 2021 +0000
@@ -1,780 +1,778 @@
-
-  using Keras
-  
-    main_macros.xml
-    keras_macros.xml
-  
-  
-  
-  echo "@KERAS_VERSION@"
-  
-    
+    using Keras
+    
+        main_macros.xml
+        keras_macros.xml
+    
+    
+    
+    echo "@KERAS_VERSION@"
+    
+        
-  
-  
-    
-  
-  
-    
-      
-        
-        
-      
-      
-        
-        
-          
-            
-              
-              
-              
-              
-              
-              
-              
-              
-              
+    
+    
+        
+    
+    
+        
+            
+                
+                
             
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-          
-        
-      
-      
-        
-          
-            
-              
-              
-              
-              
-              
-              
-              
-              
-              
-              
-              
-            
-            
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
+            
+                
+                
+                    
+                        
+                            
+                            
+                            
+                            
+                            
+                            
+                            
+                            
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                    
+                
             
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
+            
+                
+                    
+                        
+                            
+                            
+                            
+                            
+                            
+                            
+                            
+                            
+                            
+                            
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                        
+                            
+                                
+                            
+                        
+                    
+                
+                
+                    
+                        
+                            
+                            
+                        
+                    
+                
+                
+                    
+                        
+                            
+                            
+                        
+                    
+                
             
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-            
-              
-                
-              
-            
-          
-        
-        
-          
-            
-              
-              
-            
-          
-        
-        
-          
-            
-              
-              
-            
-          
-        
-      
-    
-  
-  
-    
-  
-  
-    
-      
-        
-        
-        
-          
-            
-            
-          
-        
-        
-          
-            
-            
-          
-        
-        
-          
-            
-            
-          
-        
-        
-          
-            
-            
-          
-        
-      
-      
-    
-    
-      
-        
-        
-          
-            
-            
-            
-            
-          
-        
-        
-          
-            
-            
-            
-            
-            
-          
-        
-        
-          
-            
-            
-            
-          
-        
-        
-          
-            
-            
-            
-            
-          
-        
-        
-          
-            
-            
-            
-            
-          
-        
-        
-          
-            
-            
-          
-        
-        
-          
-            
-            
-            
-            
-          
-        
-        
-          
-            
-            
-            
-            
-          
-        
-        
-          
-            
-            
-            
-            
-          
-        
-        
-          
-            
-            
-            
-            
-          
-        
-        
-        
-      
-      
-    
-    
-      
-        
-        
-        
-          
-            
-            
-          
-        
-        
-          
-            
-            
-          
-        
-        
-          
-            
-            
-          
-        
-        
-          
-            
-            
-          
-        
-      
-      
-    
-  
-  
-      
+    
+    
+        
+    
+    
+        
+            
+                
+                
+                
+                    
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                    
+                
+            
+            
+        
+        
+            
+                
+                
+                    
+                        
+                        
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                        
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                        
+                        
+                    
+                
+                
+                
+            
+            
+        
+        
+            
+                
+                
+                
+                    
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                    
+                
+                
+                    
+                        
+                        
+                    
+                
+            
+            
+        
+    
+    
+        
-  
-  
-    
-    
-  
+    
+    
+        
+        
+    
 
diff -r daece0f27108 -r fe627c026dc6 keras_train_and_eval.py
--- a/keras_train_and_eval.py	Wed Mar 11 17:11:13 2020 +0000
+++ b/keras_train_and_eval.py	Tue Apr 13 20:44:53 2021 +0000
@@ -1,56 +1,65 @@
 import argparse
-import joblib
 import json
-import numpy as np
 import os
-import pandas as pd
 import pickle
 import warnings
 from itertools import chain
+
+import joblib
+import numpy as np
+import pandas as pd
+from galaxy_ml.externals.selene_sdk.utils import compute_score
+from galaxy_ml.keras_galaxy_models import _predict_generator
+from galaxy_ml.model_validations import train_test_split
+from galaxy_ml.utils import (
+    clean_params,
+    get_main_estimator,
+    get_module,
+    get_scoring,
+    load_model,
+    read_columns,
+    SafeEval,
+    try_get_attr,
+)
 from scipy.io import mmread
-from sklearn.pipeline import Pipeline
 from sklearn.metrics.scorer import _check_multimetric_scoring
-from sklearn import model_selection
+from sklearn.model_selection import _search, _validation
 from sklearn.model_selection._validation import _score
-from sklearn.model_selection import _search, _validation
+from sklearn.pipeline import Pipeline
 from sklearn.utils import indexable, safe_indexing
 
-from galaxy_ml.externals.selene_sdk.utils import compute_score
-from galaxy_ml.model_validations import train_test_split
-from galaxy_ml.keras_galaxy_models import _predict_generator
-from galaxy_ml.utils import (SafeEval, get_scoring, load_model,
-                             read_columns, try_get_attr, get_module,
-                             clean_params, get_main_estimator)
 
+_fit_and_score = try_get_attr("galaxy_ml.model_validations", "_fit_and_score")
+setattr(_search, "_fit_and_score", _fit_and_score)
+setattr(_validation, "_fit_and_score", _fit_and_score)
 
-_fit_and_score = try_get_attr('galaxy_ml.model_validations', '_fit_and_score')
-setattr(_search, '_fit_and_score', _fit_and_score)
-setattr(_validation, '_fit_and_score', _fit_and_score)
-
-N_JOBS = int(os.environ.get('GALAXY_SLOTS', 1))
-CACHE_DIR = os.path.join(os.getcwd(), 'cached')
+N_JOBS = int(os.environ.get("GALAXY_SLOTS", 1))
+CACHE_DIR = os.path.join(os.getcwd(), "cached")
 del os
-NON_SEARCHABLE = ('n_jobs', 'pre_dispatch', 'memory', '_path',
-                  'nthread', 'callbacks')
-ALLOWED_CALLBACKS = ('EarlyStopping', 'TerminateOnNaN', 'ReduceLROnPlateau',
-                     'CSVLogger', 'None')
+NON_SEARCHABLE = ("n_jobs", "pre_dispatch", "memory", "_path", "nthread", "callbacks")
+ALLOWED_CALLBACKS = (
+    "EarlyStopping",
+    "TerminateOnNaN",
+    "ReduceLROnPlateau",
+    "CSVLogger",
+    "None",
+)
 
 
 def _eval_swap_params(params_builder):
     swap_params = {}
 
-    for p in params_builder['param_set']:
-        swap_value = p['sp_value'].strip()
-        if swap_value == '':
+    for p in params_builder["param_set"]:
+        swap_value = p["sp_value"].strip()
+        if swap_value == "":
             continue
 
-        param_name = p['sp_name']
+        param_name = p["sp_name"]
         if param_name.lower().endswith(NON_SEARCHABLE):
-            warnings.warn("Warning: `%s` is not eligible for search and was "
-                          "omitted!" % param_name)
+            warnings.warn("Warning: `%s` is not eligible for search and was " "omitted!" % param_name)
             continue
 
-        if not swap_value.startswith(':'):
+        if not swap_value.startswith(":"):
             safe_eval = SafeEval(load_scipy=True, load_numpy=True)
             ev = safe_eval(swap_value)
         else:
@@ -77,23 +86,20 @@
         else:
             new_arrays.append(arr)
 
-    if kwargs['shuffle'] == 'None':
-        kwargs['shuffle'] = None
+    if kwargs["shuffle"] == "None":
+        kwargs["shuffle"] = None
 
-    group_names = kwargs.pop('group_names', None)
+    group_names = kwargs.pop("group_names", None)
 
     if group_names is not None and group_names.strip():
-        group_names = [name.strip() for name in
-                       group_names.split(',')]
+        group_names = [name.strip() for name in group_names.split(",")]
         new_arrays = indexable(*new_arrays)
-        groups = kwargs['labels']
+        groups = kwargs["labels"]
         n_samples = new_arrays[0].shape[0]
         index_arr = np.arange(n_samples)
         test = index_arr[np.isin(groups, group_names)]
         train = index_arr[~np.isin(groups, group_names)]
-        rval = list(chain.from_iterable(
-            (safe_indexing(a, train),
-             safe_indexing(a, test)) for a in new_arrays))
+        rval = list(chain.from_iterable((safe_indexing(a, train), safe_indexing(a, test)) for a in new_arrays))
     else:
         rval = train_test_split(*new_arrays, **kwargs)
 
@@ -104,7 +110,7 @@
 
 
 def _evaluate(y_true, pred_probas, scorer, is_multimetric=True):
-    """ output scores based on input scorer
+    """output scores based on input scorer
 
     Parameters
     ----------
@@ -118,52 +124,55 @@
     """
     if y_true.ndim == 1 or y_true.shape[-1] == 1:
         pred_probas = pred_probas.ravel()
-        pred_labels = (pred_probas > 0.5).astype('int32')
-        targets = y_true.ravel().astype('int32')
+        pred_labels = (pred_probas > 0.5).astype("int32")
+        targets = y_true.ravel().astype("int32")
         if not is_multimetric:
-            preds = pred_labels if scorer.__class__.__name__ == \
-                '_PredictScorer' else pred_probas
+            preds = pred_labels if scorer.__class__.__name__ == "_PredictScorer" else pred_probas
             score = scorer._score_func(targets, preds, **scorer._kwargs)
 
             return score
         else:
             scores = {}
             for name, one_scorer in scorer.items():
-                preds = pred_labels if one_scorer.__class__.__name__\
-                    == '_PredictScorer' else pred_probas
-                score = one_scorer._score_func(targets, preds,
-                                               **one_scorer._kwargs)
+                preds = pred_labels if one_scorer.__class__.__name__ == "_PredictScorer" else pred_probas
+                score = one_scorer._score_func(targets, preds, **one_scorer._kwargs)
                 scores[name] = score
 
     # TODO: multi-class metrics
     # multi-label
     else:
-        pred_labels = (pred_probas > 0.5).astype('int32')
-        targets = y_true.astype('int32')
+        pred_labels = (pred_probas > 0.5).astype("int32")
+        targets = y_true.astype("int32")
         if not is_multimetric:
-            preds = pred_labels if scorer.__class__.__name__ == \
-                '_PredictScorer' else pred_probas
-            score, _ = compute_score(preds, targets,
-                                     scorer._score_func)
+            preds = pred_labels if scorer.__class__.__name__ == "_PredictScorer" else pred_probas
+            score, _ = compute_score(preds, targets, scorer._score_func)
             return score
         else:
             scores = {}
             for name, one_scorer in scorer.items():
-                preds = pred_labels if one_scorer.__class__.__name__\
-                    == '_PredictScorer' else pred_probas
-                score, _ = compute_score(preds, targets,
-                                         one_scorer._score_func)
+                preds = pred_labels if one_scorer.__class__.__name__ == "_PredictScorer" else pred_probas
+                score, _ = compute_score(preds, targets, one_scorer._score_func)
                 scores[name] = score
 
     return scores
 
 
-def main(inputs, infile_estimator, infile1, infile2,
-         outfile_result, outfile_object=None,
-         outfile_weights=None, outfile_y_true=None,
-         outfile_y_preds=None, groups=None,
-         ref_seq=None, intervals=None, targets=None,
-         fasta_path=None):
+def main(
+    inputs,
+    infile_estimator,
+    infile1,
+    infile2,
+    outfile_result,
+    outfile_object=None,
+    outfile_weights=None,
+    outfile_y_true=None,
+    outfile_y_preds=None,
+    groups=None,
+    ref_seq=None,
+    intervals=None,
+    targets=None,
+    fasta_path=None,
+):
     """
     Parameter
     ---------
@@ -209,19 +218,19 @@
     fasta_path : str
         File path to dataset containing fasta file
     """
-    warnings.simplefilter('ignore')
+    warnings.simplefilter("ignore")
 
-    with open(inputs, 'r') as param_handler:
+    with open(inputs, "r") as param_handler:
         params = json.load(param_handler)
 
     #  load estimator
-    with open(infile_estimator, 'rb') as estimator_handler:
+    with open(infile_estimator, "rb") as estimator_handler:
         estimator = load_model(estimator_handler)
 
     estimator = clean_params(estimator)
 
     # swap hyperparameter
-    swapping = params['experiment_schemes']['hyperparams_swapping']
+    swapping = params["experiment_schemes"]["hyperparams_swapping"]
     swap_params = _eval_swap_params(swapping)
     estimator.set_params(**swap_params)
 
@@ -230,38 +239,39 @@
     # store read dataframe object
     loaded_df = {}
 
-    input_type = params['input_options']['selected_input']
+    input_type = params["input_options"]["selected_input"]
     # tabular input
-    if input_type == 'tabular':
-        header = 'infer' if params['input_options']['header1'] else None
-        column_option = (params['input_options']['column_selector_options_1']
-                         ['selected_column_selector_option'])
-        if column_option in ['by_index_number', 'all_but_by_index_number',
-                             'by_header_name', 'all_but_by_header_name']:
-            c = params['input_options']['column_selector_options_1']['col1']
+    if input_type == "tabular":
+        header = "infer" if params["input_options"]["header1"] else None
+        column_option = params["input_options"]["column_selector_options_1"]["selected_column_selector_option"]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = params["input_options"]["column_selector_options_1"]["col1"]
         else:
             c = None
 
         df_key = infile1 + repr(header)
-        df = pd.read_csv(infile1, sep='\t', header=header,
-                         parse_dates=True)
+        df = pd.read_csv(infile1, sep="\t", header=header, parse_dates=True)
         loaded_df[df_key] = df
 
         X = read_columns(df, c=c, c_option=column_option).astype(float)
     # sparse input
-    elif input_type == 'sparse':
-        X = mmread(open(infile1, 'r'))
+    elif input_type == "sparse":
+        X = mmread(open(infile1, "r"))
 
     # fasta_file input
-    elif input_type == 'seq_fasta':
-        pyfaidx = get_module('pyfaidx')
+    elif input_type == "seq_fasta":
+        pyfaidx = get_module("pyfaidx")
         sequences = pyfaidx.Fasta(fasta_path)
         n_seqs = len(sequences.keys())
         X = np.arange(n_seqs)[:, np.newaxis]
         for param in estimator_params.keys():
-            if param.endswith('fasta_path'):
-                estimator.set_params(
-                    **{param: fasta_path})
+            if param.endswith("fasta_path"):
+                estimator.set_params(**{param: fasta_path})
                 break
         else:
             raise ValueError(
@@ -270,25 +280,29 @@
                 "KerasGBatchClassifier with "
                 "FastaDNABatchGenerator/FastaProteinBatchGenerator "
                 "or having GenomeOneHotEncoder/ProteinOneHotEncoder "
-                "in pipeline!")
+                "in pipeline!"
+            )
 
-    elif input_type == 'refseq_and_interval':
+    elif input_type == "refseq_and_interval":
         path_params = {
-            'data_batch_generator__ref_genome_path': ref_seq,
-            'data_batch_generator__intervals_path': intervals,
-            'data_batch_generator__target_path': targets
+            "data_batch_generator__ref_genome_path": ref_seq,
+            "data_batch_generator__intervals_path": intervals,
+            "data_batch_generator__target_path": targets,
         }
         estimator.set_params(**path_params)
         n_intervals = sum(1 for line in open(intervals))
         X = np.arange(n_intervals)[:, np.newaxis]
 
     # Get target y
-    header = 'infer' if params['input_options']['header2'] else None
-    column_option = (params['input_options']['column_selector_options_2']
-                     ['selected_column_selector_option2'])
-    if column_option in ['by_index_number', 'all_but_by_index_number',
-                         'by_header_name', 'all_but_by_header_name']:
-        c = params['input_options']['column_selector_options_2']['col2']
+    header = "infer" if params["input_options"]["header2"] else None
+    column_option = params["input_options"]["column_selector_options_2"]["selected_column_selector_option2"]
+    if column_option in [
+        "by_index_number",
+        "all_but_by_index_number",
+        "by_header_name",
+        "all_but_by_header_name",
+    ]:
+        c = params["input_options"]["column_selector_options_2"]["col2"]
     else:
         c = None
 
@@ -296,37 +310,35 @@
     if df_key in loaded_df:
         infile2 = loaded_df[df_key]
     else:
-        infile2 = pd.read_csv(infile2, sep='\t',
-                              header=header, parse_dates=True)
+        infile2 = pd.read_csv(infile2, sep="\t", header=header, parse_dates=True)
         loaded_df[df_key] = infile2
 
-    y = read_columns(
-            infile2,
-            c=c,
-            c_option=column_option,
-            sep='\t',
-            header=header,
-            parse_dates=True)
+    y = read_columns(infile2,
+                     c=c,
+                     c_option=column_option,
+                     sep='\t',
+                     header=header,
+                     parse_dates=True)
     if len(y.shape) == 2 and y.shape[1] == 1:
         y = y.ravel()
-    if input_type == 'refseq_and_interval':
-        estimator.set_params(
-            data_batch_generator__features=y.ravel().tolist())
+    if input_type == "refseq_and_interval":
+        estimator.set_params(data_batch_generator__features=y.ravel().tolist())
         y = None
     # end y
 
     # load groups
     if groups:
-        groups_selector = (params['experiment_schemes']['test_split']
-                                 ['split_algos']).pop('groups_selector')
+        groups_selector = (params["experiment_schemes"]["test_split"]["split_algos"]).pop("groups_selector")
 
-        header = 'infer' if groups_selector['header_g'] else None
-        column_option = \
-            (groups_selector['column_selector_options_g']
-                            ['selected_column_selector_option_g'])
-        if column_option in ['by_index_number', 'all_but_by_index_number',
-                             'by_header_name', 'all_but_by_header_name']:
-            c = groups_selector['column_selector_options_g']['col_g']
+        header = "infer" if groups_selector["header_g"] else None
+        column_option = groups_selector["column_selector_options_g"]["selected_column_selector_option_g"]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = groups_selector["column_selector_options_g"]["col_g"]
         else:
             c = None
 
@@ -334,13 +346,12 @@
         if df_key in loaded_df:
             groups = loaded_df[df_key]
 
-        groups = read_columns(
-                groups,
-                c=c,
-                c_option=column_option,
-                sep='\t',
-                header=header,
-                parse_dates=True)
+        groups = read_columns(groups,
+                              c=c,
+                              c_option=column_option,
+                              sep='\t',
+                              header=header,
+                              parse_dates=True)
         groups = groups.ravel()
 
     # del loaded_df
@@ -349,86 +360,99 @@
     # cache iraps_core fits could increase search speed significantly
     memory = joblib.Memory(location=CACHE_DIR, verbose=0)
     main_est = get_main_estimator(estimator)
-    if main_est.__class__.__name__ == 'IRAPSClassifier':
+    if main_est.__class__.__name__ == "IRAPSClassifier":
         main_est.set_params(memory=memory)
 
     # handle scorer, convert to scorer dict
     scoring = params['experiment_schemes']['metrics']['scoring']
+    if scoring is not None:
+        # get_scoring() expects secondary_scoring to be a comma separated string (not a list)
+        # Check if secondary_scoring is specified
+        secondary_scoring = scoring.get("secondary_scoring", None)
+        if secondary_scoring is not None:
+            # If secondary_scoring is specified, convert the list into comman separated string
+            scoring["secondary_scoring"] = ",".join(scoring["secondary_scoring"])
+
     scorer = get_scoring(scoring)
     scorer, _ = _check_multimetric_scoring(estimator, scoring=scorer)
 
     # handle test (first) split
-    test_split_options = (params['experiment_schemes']
-                                ['test_split']['split_algos'])
+    test_split_options = params["experiment_schemes"]["test_split"]["split_algos"]
 
-    if test_split_options['shuffle'] == 'group':
-        test_split_options['labels'] = groups
-    if test_split_options['shuffle'] == 'stratified':
+    if test_split_options["shuffle"] == "group":
+        test_split_options["labels"] = groups
+    if test_split_options["shuffle"] == "stratified":
         if y is not None:
-            test_split_options['labels'] = y
+            test_split_options["labels"] = y
         else:
-            raise ValueError("Stratified shuffle split is not "
-                             "applicable on empty target values!")
+            raise ValueError("Stratified shuffle split is not " "applicable on empty target values!")
 
-    X_train, X_test, y_train, y_test, groups_train, groups_test = \
-        train_test_split_none(X, y, groups, **test_split_options)
+    (
+        X_train,
+        X_test,
+        y_train,
+        y_test,
+        groups_train,
+        _groups_test,
+    ) = train_test_split_none(X, y, groups, **test_split_options)
 
-    exp_scheme = params['experiment_schemes']['selected_exp_scheme']
+    exp_scheme = params["experiment_schemes"]["selected_exp_scheme"]
 
     # handle validation (second) split
-    if exp_scheme == 'train_val_test':
-        val_split_options = (params['experiment_schemes']
-                                   ['val_split']['split_algos'])
+    if exp_scheme == "train_val_test":
+        val_split_options = params["experiment_schemes"]["val_split"]["split_algos"]
 
-        if val_split_options['shuffle'] == 'group':
-            val_split_options['labels'] = groups_train
-        if val_split_options['shuffle'] == 'stratified':
+        if val_split_options["shuffle"] == "group":
+            val_split_options["labels"] = groups_train
+        if val_split_options["shuffle"] == "stratified":
             if y_train is not None:
-                val_split_options['labels'] = y_train
+                val_split_options["labels"] = y_train
             else:
-                raise ValueError("Stratified shuffle split is not "
-                                 "applicable on empty target values!")
+                raise ValueError("Stratified shuffle split is not " "applicable on empty target values!")
 
-        X_train, X_val, y_train, y_val, groups_train, groups_val = \
-            train_test_split_none(X_train, y_train, groups_train,
-                                  **val_split_options)
+        (
+            X_train,
+            X_val,
+            y_train,
+            y_val,
+            groups_train,
+            _groups_val,
+        ) = train_test_split_none(X_train, y_train, groups_train, **val_split_options)
 
     # train and eval
-    if hasattr(estimator, 'validation_data'):
-        if exp_scheme == 'train_val_test':
-            estimator.fit(X_train, y_train,
-                          validation_data=(X_val, y_val))
+    if hasattr(estimator, "validation_data"):
+        if exp_scheme == "train_val_test":
+            estimator.fit(X_train, y_train, validation_data=(X_val, y_val))
         else:
-            estimator.fit(X_train, y_train,
-                          validation_data=(X_test, y_test))
+            estimator.fit(X_train, y_train, validation_data=(X_test, y_test))
     else:
         estimator.fit(X_train, y_train)
 
-    if hasattr(estimator, 'evaluate'):
+    if hasattr(estimator, "evaluate"):
         steps = estimator.prediction_steps
         batch_size = estimator.batch_size
-        generator = estimator.data_generator_.flow(X_test, y=y_test,
-                                                   batch_size=batch_size)
-        predictions, y_true = _predict_generator(estimator.model_, generator,
-                                                 steps=steps)
+        generator = estimator.data_generator_.flow(X_test, y=y_test, batch_size=batch_size)
+        predictions, y_true = _predict_generator(estimator.model_, generator, steps=steps)
         scores = _evaluate(y_true, predictions, scorer, is_multimetric=True)
 
     else:
-        if hasattr(estimator, 'predict_proba'):
+        if hasattr(estimator, "predict_proba"):
             predictions = estimator.predict_proba(X_test)
         else:
             predictions = estimator.predict(X_test)
 
         y_true = y_test
-        scores = _score(estimator, X_test, y_test, scorer,
-                        is_multimetric=True)
+        scores = _score(estimator, X_test, y_test, scorer, is_multimetric=True)
     if outfile_y_true:
         try:
-            pd.DataFrame(y_true).to_csv(outfile_y_true, sep='\t',
-                                        index=False)
+            pd.DataFrame(y_true).to_csv(outfile_y_true, sep="\t", index=False)
             pd.DataFrame(predictions).astype(np.float32).to_csv(
-                outfile_y_preds, sep='\t', index=False,
-                float_format='%g', chunksize=10000)
+                outfile_y_preds,
+                sep="\t",
+                index=False,
+                float_format="%g",
+                chunksize=10000,
+            )
         except Exception as e:
             print("Error in saving predictions: %s" % e)
 
@@ -437,8 +461,7 @@
         scores[name] = [score]
     df = pd.DataFrame(scores)
     df = df[sorted(df.columns)]
-    df.to_csv(path_or_buf=outfile_result, sep='\t',
-              header=True, index=False)
+    df.to_csv(path_or_buf=outfile_result, sep="\t", header=True, index=False)
 
     memory.clear(warn=False)
 
@@ -447,23 +470,22 @@
         if isinstance(estimator, Pipeline):
             main_est = estimator.steps[-1][-1]
 
-        if hasattr(main_est, 'model_') \
-                and hasattr(main_est, 'save_weights'):
+        if hasattr(main_est, "model_") and hasattr(main_est, "save_weights"):
             if outfile_weights:
                 main_est.save_weights(outfile_weights)
             del main_est.model_
             del main_est.fit_params
             del main_est.model_class_
-            del main_est.validation_data
-            if getattr(main_est, 'data_generator_', None):
+            if getattr(main_est, "validation_data", None):
+                del main_est.validation_data
+            if getattr(main_est, "data_generator_", None):
                 del main_est.data_generator_
 
-        with open(outfile_object, 'wb') as output_handler:
-            pickle.dump(estimator, output_handler,
-                        pickle.HIGHEST_PROTOCOL)
+        with open(outfile_object, "wb") as output_handler:
+            pickle.dump(estimator, output_handler, pickle.HIGHEST_PROTOCOL)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     aparser = argparse.ArgumentParser()
     aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
     aparser.add_argument("-e", "--estimator", dest="infile_estimator")
@@ -481,11 +503,19 @@
     aparser.add_argument("-f", "--fasta_path", dest="fasta_path")
     args = aparser.parse_args()
 
-    main(args.inputs, args.infile_estimator, args.infile1, args.infile2,
-         args.outfile_result, outfile_object=args.outfile_object,
-         outfile_weights=args.outfile_weights,
-         outfile_y_true=args.outfile_y_true,
-         outfile_y_preds=args.outfile_y_preds,
-         groups=args.groups,
-         ref_seq=args.ref_seq, intervals=args.intervals,
-         targets=args.targets, fasta_path=args.fasta_path)
+    main(
+        args.inputs,
+        args.infile_estimator,
+        args.infile1,
+        args.infile2,
+        args.outfile_result,
+        outfile_object=args.outfile_object,
+        outfile_weights=args.outfile_weights,
+        outfile_y_true=args.outfile_y_true,
+        outfile_y_preds=args.outfile_y_preds,
+        groups=args.groups,
+        ref_seq=args.ref_seq,
+        intervals=args.intervals,
+        targets=args.targets,
+        fasta_path=args.fasta_path,
+    )
diff -r daece0f27108 -r fe627c026dc6 main_macros.xml
--- a/main_macros.xml	Wed Mar 11 17:11:13 2020 +0000
+++ b/main_macros.xml	Tue Apr 13 20:44:53 2021 +0000
@@ -1,1952 +1,1940 @@
 
-  1.0.8.2
+    1.0.8.3
 
-  
-      
-          python
-          Galaxy-ML
-          
-      
-  
+    
+        
+            Galaxy-ML
+            
+        
+    
 
-  
-    
-        
-    
-  
+    
+        
+            
+        
+    
 
 
-  
+    
 
-  
-    
-        
-            
-            
-        
-        
-            
-            
-            
-            
-                
-                    
-                    
-                
-                
-                
-                
-                
-            
-        
-        
-            
-                
-            
-        
-    
-  
+    
+        
+            
+                
+                
+            
+            
+                
+                
+                
+                
+                    
+                        
+                        
+                    
+                    
+                    
+                    
+                    
+                
+            
+            
+                
+                    
+                
+            
+        
+    
 
-  
-    
-  
+    
+        
+    
 
 
-  
-  
-    
-        
-        
-        
-        
-        
-    
-  
+    
+    
+        
+            
+            
+            
+            
+            
+        
+    
 
-  
-    
-        
-        
-        
-        
-        
-    
-  
+    
+        
+            
+            
+            
+            
+            
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-        
-        
-        
-        
-    
-  
+    
+        
+            
+            
+            
+            
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-    
-    
-    
-    
-    
-    
-    
-        
-        
-        
-        
-        
-        
-    
-    
-  
+    
+        
+        
+        
+        
+        
+        
+        
+        
+            
+            
+            
+            
+            
+            
+        
+        
+    
 
-  
-  
-    
-  
+    
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-        
-        
-        
-    
-  
+    
+        
+            
+            
+            
+        
+    
 
-  
-    
-      
-      
-      
-    
-  
+    
+        
+            
+            
+            
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-      
-        
-        
-        
-        
-      
-      
-      
-      
-      
-      
-      
-      
-        
-      
-    
-  
+    
+        
+            
+                
+                
+                
+                
+            
+            
+            
+            
+            
+            
+            
+            
+                
+            
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-      
-      
-      
-    
-  
+    
+        
+            
+            
+            
+        
+    
 
-  
-  
-    
-  
-  
-  
-    
-  
+    
+    
+        
+    
+
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
 
-  
-  
-        
-  
+    
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-      
-  
+    
+        
+    
 
-  
-      
-  
+    
+        
+    
 
-  
-      
-  
+    
+        
+    
 
-  
-      
-          
-          
-      
-  
+    
+        
+            
+            
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-      
-      
-      
-      
-      
-      
-    
-  
+    
+        
+            
+            
+            
+            
+            
+            
+        
+    
 
-  
-    
-  
+    
+        
+    
 
 
-  
+    
+
+    
+        
+        
+        
+            
+        
+        
+        
+        
+            
+        
+        
+    
+
+    
+        
+            
+            
+            
+            
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+        
+    
 
-  
-    
-    
-    
-      
-    
-    
-    
-    
-      
-    
-    
-  
+    
+        
+            
+                
+                
+            
+            
+                
+                
+            
+            
+                
+            
+        
+        
+            
+                
+                
+            
+            
+                
+                
+            
+            
+                
+            
+        
+    
+
+    
+        
+        
+        
+            
+        
+        
+        
+        
+            
+        
+    
+
+    
+        
+            
+        
+    
+
+    
+        
+        
+    
+
+    
+        
+            
+            
+        
+    
+
+    
+        
+            
+                
+                
+            
+            
+                
+                    
+                
+                
+                    
+                
+            
+        
+    
+
+    
+        
+            
+            
+            
+        
+    
 
-  
-    
-      
-      
-      
-      
-      
-    
-    
-      
-    
-    
-      
-    
-    
-      
-    
-    
-      
-    
-    
-    
-  
+    
+        
+            
+        
+        
+            
+        
+        
+    
+
+    
+        
+        
+        
+            
+        
+    
+
+    
+        
+        
+    
+
+    
+        
+        
+        
+        
+        
+        
+            
+        
+    
+
+    
+    
+        
+            
+            
+                
+                
+            
+            
+                
+                
+                
+                
+            
+            
+            
+            
+            
+        
+    
+
+    
+        
+            
+            
+                
+                
+                
+                
+                
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+        
+    
+
+    
+        
+            
+            
+                
+                
+                
+                
+            
+            
+            
+            
+            
+                
+                
+                
+            
+            
+            
+            
+                
+                
+            
+            
+            
+            
+        
+    
+
+    
+        
+    
+
+    
+        
+    
 
-  
-    
-      
-          
-          
-      
-      
-        
-        
-      
-      
-          
-      
-    
-    
-      
-          
-          
-      
-      
-        
-        
-      
-      
-          
-      
-    
-  
+    
+        
+            
+            
+            
+        
+    
+
+    
+        
+    
+
+    
+        
+            
+            
+            
+            
+                
+                
+                
+                
+            
+            
+        
+    
+
+    
+        
+            
+                
+                
+                
+                
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+        
+    
+
+    
+        
+            
+            
+            
+            
+            
+            
+            
+        
+    
+
+    
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+    
+
+    
+        
+            
+            
+            
+            
+            
+            
+        
+    
+
+    
+        
+            
+            
+            
+            
+        
+    
+
+    
+        
+        
+        
+        
+        
+        
+        
+        
+    
 
-  
-    
-    
-    
-      
-    
-    
-    
-    
-      
-    
-  
+    
+        
+            
+        
+        
+            
+        
+    
 
-  
-    
-        
-    
-  
+    
+        
+            
+        
+    
+
+    
+        
+            
+            
+            
+            
+            
+        
+    
+
+    
+        
+            
+            
+            
+            
+            
+            
+            
+        
+    
 
-  
-    
-    
-  
-
-  
-    
-        
-        
-    
-  
+    
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+    
 
-  
-    
-        
-            
-            
-        
-        
-            
-                
+    
+        
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
             
-            
-                
+            
+                
+                    
+                    
+                        
+                        
+                    
+                    
+                    
+                    
+                
+            
+            
+                
+                    
+                        
+                        
+                    
+                    
+                
+            
+            
+                
+                    
+                    
+                        
+                        
+                        
+                    
+                    
+                        
+                        
+                        
+                    
+                
             
         
-    
-  
-
-  
-    
-        
-        
-        
-    
-  
+    
 
-  
-    
-        
-    
-    
-        
-    
-    
-  
-
-  
-    
-    
-    
-      
-    
-  
+    
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+    
 
-  
-    
-    
-  
-
-  
-    
-    
-    
-    
-    
-    
-      
-    
-  
-
-  
-  
-    
-      
-      
-          
-          
-      
-      
-          
-          
-          
-          
-      
-      
-      
-      
-      
-    
-  
+    
+        
+            
+        
+        
+            
+            
+            
+        
+        
+            
+            
+            
+        
+        
+        
+        
+            
+        
+        
+            
+            
+            
+        
+        
+            
+            
+            
+        
+        
+            
+            
+            
+        
+        
+            
+            
+            
+        
+        
+            
+            
+        
+        
+            
+        
+        
+            
+            
+            
+        
+        
+            
+            
+            
+        
+        
+    
 
-  
-    
-        
-        
-            
-            
-            
-            
-            
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-    
-  
+    
+        
+            
+                
+                    
+                    
+                    
+                    
+                
+            
+            
+                
+                    
+                    
+                
+                
+                    
+                    
+                    
+                    
+                
+                
+                    
+                
+                
+                    
+                    
+                
+            
+        
+    
+
+    
+        
+            
+                
+            
+            
+        
+    
+
+    
+        
+    
+
+    
+        
+    
+
+    
+        
+    
 
-  
-    
-        
-        
-            
-            
-            
-            
-        
-        
-        
-        
-        
-            
-            
-            
-        
-        
-        
-        
-            
-            
-        
-        
-        
-        
-    
-  
+    
+        
+    
 
-  
-    
-  
+    
+        
+            
+                
+                
+                
+                
+            
+            
+                
+            
+            
+                
+                
+            
+            
+                
+                
+            
+            
+                
+                
+                
+                
+            
+        
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-      
-      
-      
-    
-  
-
-  
-    
-  
+    
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+        
+    
 
-  
-    
-      
-      
-      
-      
-          
-          
-          
-          
-      
-      
-    
-  
-
-  
-    
-      
-          
-          
-          
-          
-          
-      
-      
-          
-      
-      
-          
-      
-      
-          
-      
-      
-          
-      
-      
-          
-      
-    
-  
+    
+        
+            
+            
+                
+                    
+                    
+                    
+                    
+                    
+                
+                
+            
+        
+        
+            
+            
+        
+        
+            
+            
+        
+        
+            
+            
+        
+        
+            
+            
+        
+        
+            
+            
+        
+        
+            
+        
+    
 
-  
-    
-      
-      
-      
-      
-      
-      
-      
-    
-  
+    
+        
+            
+                
+                    
+                    
+                
+                
+                    
+                
+                
+                    
+                
+            
+            
+        
+    
 
-  
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-  
+    
+        
+            
+                
+                    
+                
+                
+                    
+                
+            
+            
+        
+    
 
-  
-    
-      
-      
-      
-      
-      
-      
-    
-  
+    
+        
+    
 
-  
-    
-      
-      
-      
-      
-    
-  
-
-  
-    
-    
-    
-    
-    
-    
-    
-    
-  
+    
+        
+            
+            
+        
+    
 
-  
-    
-      
-    
-    
-      
-    
-  
+    
+        
+            
+            
+        
+    
 
-  
-    
-      
-    
-  
+    
+        
+            
+            
+        
+    
 
-  
-    
-      
-      
-      
-      
-      
-    
-  
-
-  
-    
-      
-      
-      
-      
-      
-      
-      
-    
-  
+    
+        
+            
+            
+                
+                    
+                        
+                            
+                            
+                        
+                    
+                
+                
+                
+                
+                
+            
+        
+    
 
-  
-    
-        
-    
-    
-      
-    
-    
-      
-    
-    
-      
-        
-          
-          
-          
+    
+        
+        
+            
+                
+            
+            
+            
+            
+                
+            
+            
+                
+            
+            
+        
+    
+
+    
+        
+            
+                
+                    
+                
+            
+            
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+        
+    
+
+    
+        
+            
+            
+            
+            
+            
         
-        
-      
-    
-    
-  
+    
 
-  
-    
-      
-        
-      
-      
-          
-      
-      
-          
-      
-      
-          
-      
-      
-          
-              
-              
-                  
-                  
-              
-              
-              
-              
-          
-      
-      
-          
-              
-                  
-                  
-              
-              
-          
-      
-      
-          
-              
-              
-                  
-                  
-                  
-              
-              
-                  
-                  
-                  
-              
-          
-      
-    
-  
-
-  
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-  
-
-  
-    
-      
-    
-    
-      
-      
-      
-    
-    
-      
-      
-      
-    
-    
-    
-    
-      
-    
-    
-      
-      
-      
-    
-    
-      
-      
-      
-    
-    
-      
-      
-      
-    
-    
-      
-      
-      
-    
-    
-      
-      
-    
-    
-      
-    
-    
-      
-      
-      
-    
-    
-      
-      
-      
-    
-    
-  
+    
+        
+        
+        
+    
 
-  
-    
-      
-        
-          
-          
-          
-          
-        
-      
-      
-        
-          
-          
-        
-        
-          
-          
-          
-          
-        
-        
-          
-        
-        
-          
-          
-        
-      
-    
-  
-
-  
-    
-      
-        
-      
-      
-    
-  
-
-  
-    
-  
-
-  
-    
-  
-
-  
-    
-  
-
-  
-    
-  
-
-  
-    
-      
-        
-        
-        
-        
-      
-      
-        
-      
-      
-        
-        
-      
-      
-        
-        
-      
-      
-        
-        
-        
-        
-      
-    
-    
-  
-
-  
-    
-  
-
-  
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-  
-
-  
-    
-      
-      
-        
-          
-          
-          
-          
-          
-        
-        
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-      
-    
-  
-
-  
-    
-      
-        
-          
-          
-        
-        
-          
-        
-        
-          
-        
-      
-      
-    
-  
-
-  
-    
-      
-        
-          
-        
-        
-          
-        
-      
-      
-    
-  
-
-  
-    
-  
-
-  
-    
-      
-      
-    
-  
+    
+        
+            
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+            
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+        
+    
 
-  
-    
-      
-      
-    
-  
-
-  
-    
-      
-      
-    
-  
+    
+        
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+        
+    
 
-  
-    
-      
-      
-        
-          
-            
-              
-              
-            
-          
+    
+        
+            
+            
+            
+            
+            
+            
+            
         
-        
-        
-        
-        
-      
-    
-  
+    
 
-  
-    
-    
-      
-        
-      
-      
-      
-      
-        
-        
-      
-        
-      
-      
-    
-  
+    
+        
+            
+            
+        
+    
 
-  
-    
-      
-        
-          
-        
-      
-      
-      
-      
-        
-        
-      
-        
-      
-      
-        
-      
-    
-  
-
-  
-    
-      
-      
-      
-      
-      
-    
-  
-
-  
-    
-    
-    
-  
+    
+        
+    
 
-  
-    
-      
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-    
-  
+    
+        
+        
+            
+            
+                
+                    
+                        
+                        
+                        
+                    
+                
+                
+                    
+                        
+                            
+                            
+                        
+                    
+                
+            
+        
+    
+
+    
+        
+        
+        
+        
+        
+        
+        
+    
+
+    
+        
+        
+        
+        
+        
+        
+        
+        
+    
 
-  
-    
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-      
-    
-  
-
-  
-    
-      
-      
-      
-      
-      
-      
-      
-    
-  
-
-  
-    
-      
-      
-    
-  
-
-  
-    
-  
-
-  
-    
-    
-      
-      
-          
-            
-              
-              
-              
-            
-          
-          
-            
-              
-                
-                
-              
-            
-          
-      
-    
-  
-
-  
-      
-      
-      
-      
-      
-      
-      
-  
-
-  
-      
-      
-      
-      
-      
-      
-      
-      
-  
+    
+        
+            
+                
+                
+                
+                
+                
+                
+                
+            
+            
+        
+        
+            
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+            
+            
+        
+        
+            
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+            
+            
+        
+        
+            
+                
+                
+                
+            
+            
+        
+        
+            
+                
+                
+                
+                
+            
+            
+        
+        
+            
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+            
+            
+        
+        
+            
+                
+                
+            
+            
+        
+        
+    
 
-  
-      
-        
-          
-          
-          
-          
-          
-          
-          
-        
-        
-      
-      
-        
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-        
-        
-      
-      
-        
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-        
-        
-      
-      
-        
-          
-          
-          
-        
-        
-      
-      
-        
-          
-          
-          
-          
-        
-        
-      
-      
-        
-          
-          
-          
-          
-          
-          
-          
-          
-          
-          
-        
-        
-      
-      
-        
-          
-          
+    
+        
+            
+                
+            
+            
+        
+    
+
+    
+        
+            
+                
+                    
+                
+            
+            
+                
+                    
+                
+            
+        
+    
+
+    
+        
+            
+                
+                    
+                
+            
         
-        
-      
-      
-  
-
-  
-    
-      
-        
-      
-      
-    
-  
-
-  
-    
-      
-        
-            
-        
-      
-      
-        
-            
-        
-      
-    
-  
+    
 
-  
-    
-      
-        
-          
-        
-      
-    
-  
-
-  
-    
-      
-        
-        
-        
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-    
-  
+    
+        
+            
+                
+                
+                
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+        
+    
 
-  
-    
-      
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-    
-  
+    
+        
+            
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+        
+    
 
-  
-    
-      
-        
-      
-      
-        
-      
-    
-  
+    
+        
+            
+                
+            
+            
+                
+            
+        
+    
 
-  
-    
-      
-        
-        
-        
-        
-        
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
-    
-  
+        
+    
 
-  
-    
-      
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-      
-        
-      
-    
-  
+    
+        
+            
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+            
+                
+            
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-        
-            
-                
+    
+        
+            
+                
+                    
+                
+            
+            
+                
+                    
+                
             
-        
-        
-            
-                
-            
-        
-    
-  
+        
+    
 
-  
-    
-  
+    
+        
+    
 
-  
-    
-        
-            
-            
-        
-        
-            
-        
-        
-            
-        
-    
-  
+    
+        
+            
+                
+                
+            
+            
+                
+            
+            
+                
+            
+        
+    
 
-  
-    
-    
-  
+    
+        
+        
+    
 
-  
+    
 
-  
-    
-      
-          selected_tasks['selected_task'] == 'load'
-      
-      
-          selected_tasks['selected_task'] == 'train'
-      
-    
-  
+    
+        
+            
+                selected_tasks['selected_task'] == 'load'
+            
+            
+                selected_tasks['selected_task'] == 'train'
+            
+        
+    
 
-  
-  
-    
-        10.5281/zenodo.15094
-    
-  
+    
+    
+        
+            10.5281/zenodo.15094
+        
+    
 
-  
-    
-        
-          @article{scikit-learn,
-            title={Scikit-learn: Machine Learning in {P}ython},
-            author={Pedregosa, F. and Varoquaux, G. and Gramfort, A. and Michel, V.
+    
+        
+            
+          @article{scikit-learn, title={Scikit-learn: Machine Learning in {P}ython}, author={Pedregosa, F. and Varoquaux, G. and Gramfort, A. and Michel, V.
                     and Thirion, B. and Grisel, O. and Blondel, M. and Prettenhofer, P.
                     and Weiss, R. and Dubourg, V. and Vanderplas, J. and Passos, A. and
-                    Cournapeau, D. and Brucher, M. and Perrot, M. and Duchesnay, E.},
-            journal={Journal of Machine Learning Research},
-            volume={12},
-            pages={2825--2830},
-            year={2011}
+                    Cournapeau, D. and Brucher, M. and Perrot, M. and Duchesnay, E.}, journal={Journal of Machine Learning Research}, volume={12}, pages={2825--2830}, year={2011}
           }
-        
-        
-    
-  
+            
+            
+        
+    
 
-  
-    
-        
+    
+        
+            
           @Misc{,
           author =    {Eric Jones and Travis Oliphant and Pearu Peterson and others},
           title =     {{SciPy}: Open source scientific tools for {Python}},
@@ -1954,12 +1942,12 @@
           url = "http://www.scipy.org/",
           note = {[Online; accessed 2016-04-09]}
         }
-        
-    
-  
+            
+        
+    
 
-  
-    
+    
+        
       @article{DBLP:journals/corr/abs-1711-08477,
         author    = {Ryan J. Urbanowicz and
                     Randal S. Olson and
@@ -1977,11 +1965,11 @@
         biburl    = {https://dblp.org/rec/bib/journals/corr/abs-1711-08477},
         bibsource = {dblp computer science bibliography, https://dblp.org}
       }
-    
-  
+        
+    
 
-  
-    
+    
+        
       @inproceedings{Chen:2016:XST:2939672.2939785,
         author = {Chen, Tianqi and Guestrin, Carlos},
         title = {{XGBoost}: A Scalable Tree Boosting System},
@@ -1999,11 +1987,11 @@
         address = {New York, NY, USA},
         keywords = {large-scale machine learning},
       }
-    
-  
+        
+    
 
-  
-    
+    
+        
       @article{JMLR:v18:16-365,
         author  = {Guillaume  Lema{{\^i}}tre and Fernando Nogueira and Christos K. Aridas},
         title   = {Imbalanced-learn: A Python Toolbox to Tackle the Curse of Imbalanced Datasets in Machine Learning},
@@ -2014,22 +2002,14 @@
         pages   = {1-5},
         url     = {http://jmlr.org/papers/v18/16-365.html}
       }
-    
-  
+        
+    
 
-  
-    
-      @article{chen2019selene,
-        title={Selene: a PyTorch-based deep learning library for sequence data},
-        author={Chen, Kathleen M and Cofer, Evan M and Zhou, Jian and Troyanskaya, Olga G},
-        journal={Nature methods},
-        volume={16},
-        number={4},
-        pages={315},
-        year={2019},
-        publisher={Nature Publishing Group}
+    
+        
+      @article{chen2019selene, title={Selene: a PyTorch-based deep learning library for sequence data}, author={Chen, Kathleen M and Cofer, Evan M and Zhou, Jian and Troyanskaya, Olga G}, journal={Nature methods}, volume={16}, number={4}, pages={315}, year={2019}, publisher={Nature Publishing Group}
       }
-    
-  
+        
+    
 
 
diff -r daece0f27108 -r fe627c026dc6 ml_visualization_ex.py
--- a/ml_visualization_ex.py	Wed Mar 11 17:11:13 2020 +0000
+++ b/ml_visualization_ex.py	Tue Apr 13 20:44:53 2021 +0000
@@ -1,37 +1,36 @@
 import argparse
 import json
+import os
+import warnings
+
 import matplotlib
 import matplotlib.pyplot as plt
 import numpy as np
-import os
 import pandas as pd
 import plotly
 import plotly.graph_objs as go
-import warnings
-
+from galaxy_ml.utils import load_model, read_columns, SafeEval
 from keras.models import model_from_json
 from keras.utils import plot_model
 from sklearn.feature_selection.base import SelectorMixin
-from sklearn.metrics import precision_recall_curve, average_precision_score
-from sklearn.metrics import roc_curve, auc, confusion_matrix
+from sklearn.metrics import auc, average_precision_score, confusion_matrix, precision_recall_curve, roc_curve
 from sklearn.pipeline import Pipeline
-from galaxy_ml.utils import load_model, read_columns, SafeEval
 
 
 safe_eval = SafeEval()
 
 # plotly default colors
 default_colors = [
-    '#1f77b4',  # muted blue
-    '#ff7f0e',  # safety orange
-    '#2ca02c',  # cooked asparagus green
-    '#d62728',  # brick red
-    '#9467bd',  # muted purple
-    '#8c564b',  # chestnut brown
-    '#e377c2',  # raspberry yogurt pink
-    '#7f7f7f',  # middle gray
-    '#bcbd22',  # curry yellow-green
-    '#17becf'   # blue-teal
+    "#1f77b4",  # muted blue
+    "#ff7f0e",  # safety orange
+    "#2ca02c",  # cooked asparagus green
+    "#d62728",  # brick red
+    "#9467bd",  # muted purple
+    "#8c564b",  # chestnut brown
+    "#e377c2",  # raspberry yogurt pink
+    "#7f7f7f",  # middle gray
+    "#bcbd22",  # curry yellow-green
+    "#17becf",  # blue-teal
 ]
 
 
@@ -52,46 +51,31 @@
         y_true = df1.iloc[:, idx].values
         y_score = df2.iloc[:, idx].values
 
-        precision, recall, _ = precision_recall_curve(
-            y_true, y_score, pos_label=pos_label)
-        ap = average_precision_score(
-            y_true, y_score, pos_label=pos_label or 1)
+        precision, recall, _ = precision_recall_curve(y_true, y_score, pos_label=pos_label)
+        ap = average_precision_score(y_true, y_score, pos_label=pos_label or 1)
 
         trace = go.Scatter(
             x=recall,
             y=precision,
-            mode='lines',
-            marker=dict(
-                color=default_colors[idx % len(default_colors)]
-            ),
-            name='%s (area = %.3f)' % (idx, ap)
+            mode="lines",
+            marker=dict(color=default_colors[idx % len(default_colors)]),
+            name="%s (area = %.3f)" % (idx, ap),
         )
         data.append(trace)
 
     layout = go.Layout(
-        xaxis=dict(
-            title='Recall',
-            linecolor='lightslategray',
-            linewidth=1
-        ),
-        yaxis=dict(
-            title='Precision',
-            linecolor='lightslategray',
-            linewidth=1
-        ),
+        xaxis=dict(title="Recall", linecolor="lightslategray", linewidth=1),
+        yaxis=dict(title="Precision", linecolor="lightslategray", linewidth=1),
         title=dict(
-            text=title or 'Precision-Recall Curve',
+            text=title or "Precision-Recall Curve",
             x=0.5,
             y=0.92,
-            xanchor='center',
-            yanchor='top'
+            xanchor="center",
+            yanchor="top",
         ),
-        font=dict(
-            family="sans-serif",
-            size=11
-        ),
+        font=dict(family="sans-serif", size=11),
         # control backgroud colors
-        plot_bgcolor='rgba(255,255,255,0)'
+        plot_bgcolor="rgba(255,255,255,0)",
     )
     """
     legend=dict(
@@ -112,45 +96,47 @@
 
     plotly.offline.plot(fig, filename="output.html", auto_open=False)
     # to be discovered by `from_work_dir`
-    os.rename('output.html', 'output')
+    os.rename("output.html", "output")
 
 
 def visualize_pr_curve_matplotlib(df1, df2, pos_label, title=None):
-    """visualize pr-curve using matplotlib and output svg image
-    """
+    """visualize pr-curve using matplotlib and output svg image"""
     backend = matplotlib.get_backend()
     if "inline" not in backend:
         matplotlib.use("SVG")
-    plt.style.use('seaborn-colorblind')
+    plt.style.use("seaborn-colorblind")
     plt.figure()
 
     for idx in range(df1.shape[1]):
         y_true = df1.iloc[:, idx].values
         y_score = df2.iloc[:, idx].values
 
-        precision, recall, _ = precision_recall_curve(
-            y_true, y_score, pos_label=pos_label)
-        ap = average_precision_score(
-            y_true, y_score, pos_label=pos_label or 1)
+        precision, recall, _ = precision_recall_curve(y_true, y_score, pos_label=pos_label)
+        ap = average_precision_score(y_true, y_score, pos_label=pos_label or 1)
 
-        plt.step(recall, precision, 'r-', color="black", alpha=0.3,
-                 lw=1, where="post", label='%s (area = %.3f)' % (idx, ap))
+        plt.step(
+            recall,
+            precision,
+            "r-",
+            color="black",
+            alpha=0.3,
+            lw=1,
+            where="post",
+            label="%s (area = %.3f)" % (idx, ap),
+        )
 
     plt.xlim([0.0, 1.0])
     plt.ylim([0.0, 1.05])
-    plt.xlabel('Recall')
-    plt.ylabel('Precision')
-    title = title or 'Precision-Recall Curve'
+    plt.xlabel("Recall")
+    plt.ylabel("Precision")
+    title = title or "Precision-Recall Curve"
     plt.title(title)
     folder = os.getcwd()
     plt.savefig(os.path.join(folder, "output.svg"), format="svg")
-    os.rename(os.path.join(folder, "output.svg"),
-              os.path.join(folder, "output"))
+    os.rename(os.path.join(folder, "output.svg"), os.path.join(folder, "output"))
 
 
-def visualize_roc_curve_plotly(df1, df2, pos_label,
-                               drop_intermediate=True,
-                               title=None):
+def visualize_roc_curve_plotly(df1, df2, pos_label, drop_intermediate=True, title=None):
     """output roc-curve in html using plotly
 
     df1 : pandas.DataFrame
@@ -169,45 +155,31 @@
         y_true = df1.iloc[:, idx].values
         y_score = df2.iloc[:, idx].values
 
-        fpr, tpr, _ = roc_curve(y_true, y_score, pos_label=pos_label,
-                                drop_intermediate=drop_intermediate)
+        fpr, tpr, _ = roc_curve(y_true, y_score, pos_label=pos_label, drop_intermediate=drop_intermediate)
         roc_auc = auc(fpr, tpr)
 
         trace = go.Scatter(
             x=fpr,
             y=tpr,
-            mode='lines',
-            marker=dict(
-                color=default_colors[idx % len(default_colors)]
-            ),
-            name='%s (area = %.3f)' % (idx, roc_auc)
+            mode="lines",
+            marker=dict(color=default_colors[idx % len(default_colors)]),
+            name="%s (area = %.3f)" % (idx, roc_auc),
         )
         data.append(trace)
 
     layout = go.Layout(
-        xaxis=dict(
-            title='False Positive Rate',
-            linecolor='lightslategray',
-            linewidth=1
-        ),
-        yaxis=dict(
-            title='True Positive Rate',
-            linecolor='lightslategray',
-            linewidth=1
-        ),
+        xaxis=dict(title="False Positive Rate", linecolor="lightslategray", linewidth=1),
+        yaxis=dict(title="True Positive Rate", linecolor="lightslategray", linewidth=1),
         title=dict(
-            text=title or 'Receiver Operating Characteristic (ROC) Curve',
+            text=title or "Receiver Operating Characteristic (ROC) Curve",
             x=0.5,
             y=0.92,
-            xanchor='center',
-            yanchor='top'
+            xanchor="center",
+            yanchor="top",
         ),
-        font=dict(
-            family="sans-serif",
-            size=11
-        ),
+        font=dict(family="sans-serif", size=11),
         # control backgroud colors
-        plot_bgcolor='rgba(255,255,255,0)'
+        plot_bgcolor="rgba(255,255,255,0)",
     )
     """
     # legend=dict(
@@ -229,66 +201,84 @@
 
     plotly.offline.plot(fig, filename="output.html", auto_open=False)
     # to be discovered by `from_work_dir`
-    os.rename('output.html', 'output')
+    os.rename("output.html", "output")
 
 
-def visualize_roc_curve_matplotlib(df1, df2, pos_label,
-                                   drop_intermediate=True,
-                                   title=None):
-    """visualize roc-curve using matplotlib and output svg image
-    """
+def visualize_roc_curve_matplotlib(df1, df2, pos_label, drop_intermediate=True, title=None):
+    """visualize roc-curve using matplotlib and output svg image"""
     backend = matplotlib.get_backend()
     if "inline" not in backend:
         matplotlib.use("SVG")
-    plt.style.use('seaborn-colorblind')
+    plt.style.use("seaborn-colorblind")
     plt.figure()
 
     for idx in range(df1.shape[1]):
         y_true = df1.iloc[:, idx].values
         y_score = df2.iloc[:, idx].values
 
-        fpr, tpr, _ = roc_curve(y_true, y_score, pos_label=pos_label,
-                                drop_intermediate=drop_intermediate)
+        fpr, tpr, _ = roc_curve(y_true, y_score, pos_label=pos_label, drop_intermediate=drop_intermediate)
         roc_auc = auc(fpr, tpr)
 
-        plt.step(fpr, tpr, 'r-', color="black", alpha=0.3, lw=1,
-                 where="post", label='%s (area = %.3f)' % (idx, roc_auc))
+        plt.step(
+            fpr,
+            tpr,
+            "r-",
+            color="black",
+            alpha=0.3,
+            lw=1,
+            where="post",
+            label="%s (area = %.3f)" % (idx, roc_auc),
+        )
 
     plt.xlim([0.0, 1.0])
     plt.ylim([0.0, 1.05])
-    plt.xlabel('False Positive Rate')
-    plt.ylabel('True Positive Rate')
-    title = title or 'Receiver Operating Characteristic (ROC) Curve'
+    plt.xlabel("False Positive Rate")
+    plt.ylabel("True Positive Rate")
+    title = title or "Receiver Operating Characteristic (ROC) Curve"
     plt.title(title)
     folder = os.getcwd()
     plt.savefig(os.path.join(folder, "output.svg"), format="svg")
-    os.rename(os.path.join(folder, "output.svg"),
-              os.path.join(folder, "output"))
+    os.rename(os.path.join(folder, "output.svg"), os.path.join(folder, "output"))
 
 
 def get_dataframe(file_path, plot_selection, header_name, column_name):
-    header = 'infer' if plot_selection[header_name] else None
+    header = "infer" if plot_selection[header_name] else None
     column_option = plot_selection[column_name]["selected_column_selector_option"]
-    if column_option in ["by_index_number", "all_but_by_index_number", "by_header_name", "all_but_by_header_name"]:
+    if column_option in [
+        "by_index_number",
+        "all_but_by_index_number",
+        "by_header_name",
+        "all_but_by_header_name",
+    ]:
         col = plot_selection[column_name]["col1"]
     else:
         col = None
     _, input_df = read_columns(file_path, c=col,
-                                   c_option=column_option,
-                                   return_df=True,
-                                   sep='\t', header=header,
-                                   parse_dates=True)
+                               c_option=column_option,
+                               return_df=True,
+                               sep='\t', header=header,
+                               parse_dates=True)
     return input_df
 
 
-def main(inputs, infile_estimator=None, infile1=None,
-         infile2=None, outfile_result=None,
-         outfile_object=None, groups=None,
-         ref_seq=None, intervals=None,
-         targets=None, fasta_path=None,
-         model_config=None, true_labels=None,
-         predicted_labels=None, plot_color=None,
-         title=None):
+def main(
+    inputs,
+    infile_estimator=None,
+    infile1=None,
+    infile2=None,
+    outfile_result=None,
+    outfile_object=None,
+    groups=None,
+    ref_seq=None,
+    intervals=None,
+    targets=None,
+    fasta_path=None,
+    model_config=None,
+    true_labels=None,
+    predicted_labels=None,
+    plot_color=None,
+    title=None,
+):
     """
     Parameter
     ---------
@@ -341,34 +331,39 @@
     title : str, default is None
         Title of the confusion matrix heatmap
     """
-    warnings.simplefilter('ignore')
+    warnings.simplefilter("ignore")
 
-    with open(inputs, 'r') as param_handler:
+    with open(inputs, "r") as param_handler:
         params = json.load(param_handler)
 
-    title = params['plotting_selection']['title'].strip()
-    plot_type = params['plotting_selection']['plot_type']
-    plot_format = params['plotting_selection']['plot_format']
+    title = params["plotting_selection"]["title"].strip()
+    plot_type = params["plotting_selection"]["plot_type"]
+    plot_format = params["plotting_selection"]["plot_format"]
 
-    if plot_type == 'feature_importances':
-        with open(infile_estimator, 'rb') as estimator_handler:
+    if plot_type == "feature_importances":
+        with open(infile_estimator, "rb") as estimator_handler:
             estimator = load_model(estimator_handler)
 
-        column_option = (params['plotting_selection']
-                               ['column_selector_options']
-                               ['selected_column_selector_option'])
-        if column_option in ['by_index_number', 'all_but_by_index_number',
-                             'by_header_name', 'all_but_by_header_name']:
-            c = (params['plotting_selection']
-                       ['column_selector_options']['col1'])
+        column_option = params["plotting_selection"]["column_selector_options"]["selected_column_selector_option"]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = params["plotting_selection"]["column_selector_options"]["col1"]
         else:
             c = None
 
-        _, input_df = read_columns(infile1, c=c,
-                                   c_option=column_option,
-                                   return_df=True,
-                                   sep='\t', header='infer',
-                                   parse_dates=True)
+        _, input_df = read_columns(
+            infile1,
+            c=c,
+            c_option=column_option,
+            return_df=True,
+            sep="\t",
+            header="infer",
+            parse_dates=True,
+        )
 
         feature_names = input_df.columns.values
 
@@ -379,16 +374,14 @@
                     feature_names = feature_names[mask]
             estimator = estimator.steps[-1][-1]
 
-        if hasattr(estimator, 'coef_'):
+        if hasattr(estimator, "coef_"):
             coefs = estimator.coef_
         else:
-            coefs = getattr(estimator, 'feature_importances_', None)
+            coefs = getattr(estimator, "feature_importances_", None)
         if coefs is None:
-            raise RuntimeError('The classifier does not expose '
-                               '"coef_" or "feature_importances_" '
-                               'attributes')
+            raise RuntimeError("The classifier does not expose " '"coef_" or "feature_importances_" ' "attributes")
 
-        threshold = params['plotting_selection']['threshold']
+        threshold = params["plotting_selection"]["threshold"]
         if threshold is not None:
             mask = (coefs > threshold) | (coefs < -threshold)
             coefs = coefs[mask]
@@ -397,80 +390,74 @@
         # sort
         indices = np.argsort(coefs)[::-1]
 
-        trace = go.Bar(x=feature_names[indices],
-                       y=coefs[indices])
+        trace = go.Bar(x=feature_names[indices], y=coefs[indices])
         layout = go.Layout(title=title or "Feature Importances")
         fig = go.Figure(data=[trace], layout=layout)
 
-        plotly.offline.plot(fig, filename="output.html",
-                            auto_open=False)
+        plotly.offline.plot(fig, filename="output.html", auto_open=False)
         # to be discovered by `from_work_dir`
-        os.rename('output.html', 'output')
+        os.rename("output.html", "output")
 
         return 0
 
-    elif plot_type in ('pr_curve', 'roc_curve'):
-        df1 = pd.read_csv(infile1, sep='\t', header='infer')
-        df2 = pd.read_csv(infile2, sep='\t', header='infer').astype(np.float32)
+    elif plot_type in ("pr_curve", "roc_curve"):
+        df1 = pd.read_csv(infile1, sep="\t", header="infer")
+        df2 = pd.read_csv(infile2, sep="\t", header="infer").astype(np.float32)
 
-        minimum = params['plotting_selection']['report_minimum_n_positives']
+        minimum = params["plotting_selection"]["report_minimum_n_positives"]
         # filter out columns whose n_positives is beblow the threhold
         if minimum:
             mask = df1.sum(axis=0) >= minimum
             df1 = df1.loc[:, mask]
             df2 = df2.loc[:, mask]
 
-        pos_label = params['plotting_selection']['pos_label'].strip() \
-            or None
+        pos_label = params["plotting_selection"]["pos_label"].strip() or None
 
-        if plot_type == 'pr_curve':
-            if plot_format == 'plotly_html':
+        if plot_type == "pr_curve":
+            if plot_format == "plotly_html":
                 visualize_pr_curve_plotly(df1, df2, pos_label, title=title)
             else:
                 visualize_pr_curve_matplotlib(df1, df2, pos_label, title)
-        else:          # 'roc_curve'
-            drop_intermediate = (params['plotting_selection']
-                                       ['drop_intermediate'])
-            if plot_format == 'plotly_html':
-                visualize_roc_curve_plotly(df1, df2, pos_label,
-                                           drop_intermediate=drop_intermediate,
-                                           title=title)
+        else:  # 'roc_curve'
+            drop_intermediate = params["plotting_selection"]["drop_intermediate"]
+            if plot_format == "plotly_html":
+                visualize_roc_curve_plotly(
+                    df1,
+                    df2,
+                    pos_label,
+                    drop_intermediate=drop_intermediate,
+                    title=title,
+                )
             else:
                 visualize_roc_curve_matplotlib(
-                    df1, df2, pos_label,
+                    df1,
+                    df2,
+                    pos_label,
                     drop_intermediate=drop_intermediate,
-                    title=title)
+                    title=title,
+                )
 
         return 0
 
-    elif plot_type == 'rfecv_gridscores':
-        input_df = pd.read_csv(infile1, sep='\t', header='infer')
+    elif plot_type == "rfecv_gridscores":
+        input_df = pd.read_csv(infile1, sep="\t", header="infer")
         scores = input_df.iloc[:, 0]
-        steps = params['plotting_selection']['steps'].strip()
+        steps = params["plotting_selection"]["steps"].strip()
         steps = safe_eval(steps)
 
         data = go.Scatter(
             x=list(range(len(scores))),
             y=scores,
             text=[str(_) for _ in steps] if steps else None,
-            mode='lines'
+            mode="lines",
         )
         layout = go.Layout(
             xaxis=dict(title="Number of features selected"),
             yaxis=dict(title="Cross validation score"),
-            title=dict(
-                text=title or None,
-                x=0.5,
-                y=0.92,
-                xanchor='center',
-                yanchor='top'
-            ),
-            font=dict(
-                family="sans-serif",
-                size=11
-            ),
+            title=dict(text=title or None, x=0.5, y=0.92, xanchor="center", yanchor="top"),
+            font=dict(family="sans-serif", size=11),
             # control backgroud colors
-            plot_bgcolor='rgba(255,255,255,0)'
+            plot_bgcolor="rgba(255,255,255,0)",
         )
         """
         # legend=dict(
@@ -489,55 +476,43 @@
         """
 
         fig = go.Figure(data=[data], layout=layout)
-        plotly.offline.plot(fig, filename="output.html",
-                            auto_open=False)
+        plotly.offline.plot(fig, filename="output.html", auto_open=False)
         # to be discovered by `from_work_dir`
-        os.rename('output.html', 'output')
+        os.rename("output.html", "output")
 
         return 0
 
-    elif plot_type == 'learning_curve':
-        input_df = pd.read_csv(infile1, sep='\t', header='infer')
-        plot_std_err = params['plotting_selection']['plot_std_err']
+    elif plot_type == "learning_curve":
+        input_df = pd.read_csv(infile1, sep="\t", header="infer")
+        plot_std_err = params["plotting_selection"]["plot_std_err"]
         data1 = go.Scatter(
-            x=input_df['train_sizes_abs'],
-            y=input_df['mean_train_scores'],
-            error_y=dict(
-                array=input_df['std_train_scores']
-            ) if plot_std_err else None,
-            mode='lines',
+            x=input_df["train_sizes_abs"],
+            y=input_df["mean_train_scores"],
+            error_y=dict(array=input_df["std_train_scores"]) if plot_std_err else None,
+            mode="lines",
             name="Train Scores",
         )
         data2 = go.Scatter(
-            x=input_df['train_sizes_abs'],
-            y=input_df['mean_test_scores'],
-            error_y=dict(
-                array=input_df['std_test_scores']
-            ) if plot_std_err else None,
-            mode='lines',
+            x=input_df["train_sizes_abs"],
+            y=input_df["mean_test_scores"],
+            error_y=dict(array=input_df["std_test_scores"]) if plot_std_err else None,
+            mode="lines",
             name="Test Scores",
         )
         layout = dict(
-            xaxis=dict(
-                title='No. of samples'
-            ),
-            yaxis=dict(
-                title='Performance Score'
-            ),
+            xaxis=dict(title="No. of samples"),
+            yaxis=dict(title="Performance Score"),
             # modify these configurations to customize image
             title=dict(
-                text=title or 'Learning Curve',
+                text=title or "Learning Curve",
                 x=0.5,
                 y=0.92,
-                xanchor='center',
-                yanchor='top'
+                xanchor="center",
+                yanchor="top",
             ),
-            font=dict(
-                family="sans-serif",
-                size=11
-            ),
+            font=dict(family="sans-serif", size=11),
             # control backgroud colors
-            plot_bgcolor='rgba(255,255,255,0)'
+            plot_bgcolor="rgba(255,255,255,0)",
         )
         """
         # legend=dict(
@@ -556,27 +531,26 @@
         """
 
         fig = go.Figure(data=[data1, data2], layout=layout)
-        plotly.offline.plot(fig, filename="output.html",
-                            auto_open=False)
+        plotly.offline.plot(fig, filename="output.html", auto_open=False)
         # to be discovered by `from_work_dir`
-        os.rename('output.html', 'output')
+        os.rename("output.html", "output")
 
         return 0
 
-    elif plot_type == 'keras_plot_model':
-        with open(model_config, 'r') as f:
+    elif plot_type == "keras_plot_model":
+        with open(model_config, "r") as f:
             model_str = f.read()
         model = model_from_json(model_str)
         plot_model(model, to_file="output.png")
-        os.rename('output.png', 'output')
+        os.rename("output.png", "output")
 
         return 0
 
-    elif plot_type == 'classification_confusion_matrix':
+    elif plot_type == "classification_confusion_matrix":
         plot_selection = params["plotting_selection"]
         input_true = get_dataframe(true_labels, plot_selection, "header_true", "column_selector_options_true")
-        header_predicted = 'infer' if plot_selection["header_predicted"] else None
-        input_predicted = pd.read_csv(predicted_labels, sep='\t', parse_dates=True, header=header_predicted)
+        header_predicted = "infer" if plot_selection["header_predicted"] else None
+        input_predicted = pd.read_csv(predicted_labels, sep="\t", parse_dates=True, header=header_predicted)
         true_classes = input_true.iloc[:, -1].copy()
         predicted_classes = input_predicted.iloc[:, -1].copy()
         axis_labels = list(set(true_classes))
@@ -586,15 +560,15 @@
         for i in range(len(c_matrix)):
             for j in range(len(c_matrix)):
                 ax.text(j, i, c_matrix[i, j], ha="center", va="center", color="k")
-        ax.set_ylabel('True class labels')
-        ax.set_xlabel('Predicted class labels')
+        ax.set_ylabel("True class labels")
+        ax.set_xlabel("Predicted class labels")
         ax.set_title(title)
         ax.set_xticks(axis_labels)
         ax.set_yticks(axis_labels)
         fig.colorbar(im, ax=ax)
         fig.tight_layout()
         plt.savefig("output.png", dpi=125)
-        os.rename('output.png', 'output')
+        os.rename("output.png", "output")
 
         return 0
 
@@ -603,7 +577,7 @@
     # fig.write_image("image.pdf", format='pdf', width=340*2, height=226*2)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     aparser = argparse.ArgumentParser()
     aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
     aparser.add_argument("-e", "--estimator", dest="infile_estimator")
@@ -623,11 +597,21 @@
     aparser.add_argument("-pt", "--title", dest="title")
     args = aparser.parse_args()
 
-    main(args.inputs, args.infile_estimator, args.infile1, args.infile2,
-         args.outfile_result, outfile_object=args.outfile_object,
-         groups=args.groups, ref_seq=args.ref_seq, intervals=args.intervals,
-         targets=args.targets, fasta_path=args.fasta_path,
-         model_config=args.model_config, true_labels=args.true_labels,
-         predicted_labels=args.predicted_labels,
-         plot_color=args.plot_color,
-         title=args.title)
+    main(
+        args.inputs,
+        args.infile_estimator,
+        args.infile1,
+        args.infile2,
+        args.outfile_result,
+        outfile_object=args.outfile_object,
+        groups=args.groups,
+        ref_seq=args.ref_seq,
+        intervals=args.intervals,
+        targets=args.targets,
+        fasta_path=args.fasta_path,
+        model_config=args.model_config,
+        true_labels=args.true_labels,
+        predicted_labels=args.predicted_labels,
+        plot_color=args.plot_color,
+        title=args.title,
+    )
diff -r daece0f27108 -r fe627c026dc6 model_prediction.py
--- a/model_prediction.py	Wed Mar 11 17:11:13 2020 +0000
+++ b/model_prediction.py	Tue Apr 13 20:44:53 2021 +0000
@@ -1,23 +1,26 @@
 import argparse
 import json
+import warnings
+
 import numpy as np
 import pandas as pd
-import warnings
-
+from galaxy_ml.utils import get_module, load_model, read_columns, try_get_attr
 from scipy.io import mmread
 from sklearn.pipeline import Pipeline
 
-from galaxy_ml.utils import (load_model, read_columns,
-                             get_module, try_get_attr)
+N_JOBS = int(__import__("os").environ.get("GALAXY_SLOTS", 1))
 
 
-N_JOBS = int(__import__('os').environ.get('GALAXY_SLOTS', 1))
-
-
-def main(inputs, infile_estimator, outfile_predict,
-         infile_weights=None, infile1=None,
-         fasta_path=None, ref_seq=None,
-         vcf_path=None):
+def main(
+    inputs,
+    infile_estimator,
+    outfile_predict,
+    infile_weights=None,
+    infile1=None,
+    fasta_path=None,
+    ref_seq=None,
+    vcf_path=None,
+):
     """
     Parameter
     ---------
@@ -45,96 +48,94 @@
     vcf_path : str
         File path to dataset containing variants info.
     """
-    warnings.filterwarnings('ignore')
+    warnings.filterwarnings("ignore")
 
-    with open(inputs, 'r') as param_handler:
+    with open(inputs, "r") as param_handler:
         params = json.load(param_handler)
 
     # load model
-    with open(infile_estimator, 'rb') as est_handler:
+    with open(infile_estimator, "rb") as est_handler:
         estimator = load_model(est_handler)
 
     main_est = estimator
     if isinstance(estimator, Pipeline):
         main_est = estimator.steps[-1][-1]
-    if hasattr(main_est, 'config') and hasattr(main_est, 'load_weights'):
-        if not infile_weights or infile_weights == 'None':
-            raise ValueError("The selected model skeleton asks for weights, "
-                             "but dataset for weights wan not selected!")
+    if hasattr(main_est, "config") and hasattr(main_est, "load_weights"):
+        if not infile_weights or infile_weights == "None":
+            raise ValueError(
+                "The selected model skeleton asks for weights, " "but dataset for weights wan not selected!"
+            )
         main_est.load_weights(infile_weights)
 
     # handle data input
-    input_type = params['input_options']['selected_input']
+    input_type = params["input_options"]["selected_input"]
     # tabular input
-    if input_type == 'tabular':
-        header = 'infer' if params['input_options']['header1'] else None
-        column_option = (params['input_options']
-                               ['column_selector_options_1']
-                               ['selected_column_selector_option'])
-        if column_option in ['by_index_number', 'all_but_by_index_number',
-                             'by_header_name', 'all_but_by_header_name']:
-            c = params['input_options']['column_selector_options_1']['col1']
+    if input_type == "tabular":
+        header = "infer" if params["input_options"]["header1"] else None
+        column_option = params["input_options"]["column_selector_options_1"]["selected_column_selector_option"]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = params["input_options"]["column_selector_options_1"]["col1"]
         else:
             c = None
 
-        df = pd.read_csv(infile1, sep='\t', header=header, parse_dates=True)
+        df = pd.read_csv(infile1, sep="\t", header=header, parse_dates=True)
 
         X = read_columns(df, c=c, c_option=column_option).astype(float)
 
-        if params['method'] == 'predict':
+        if params["method"] == "predict":
             preds = estimator.predict(X)
         else:
             preds = estimator.predict_proba(X)
 
     # sparse input
-    elif input_type == 'sparse':
-        X = mmread(open(infile1, 'r'))
-        if params['method'] == 'predict':
+    elif input_type == "sparse":
+        X = mmread(open(infile1, "r"))
+        if params["method"] == "predict":
             preds = estimator.predict(X)
         else:
             preds = estimator.predict_proba(X)
 
     # fasta input
-    elif input_type == 'seq_fasta':
-        if not hasattr(estimator, 'data_batch_generator'):
+    elif input_type == "seq_fasta":
+        if not hasattr(estimator, "data_batch_generator"):
             raise ValueError(
                 "To do prediction on sequences in fasta input, "
                 "the estimator must be a `KerasGBatchClassifier`"
-                "equipped with data_batch_generator!")
-        pyfaidx = get_module('pyfaidx')
+                "equipped with data_batch_generator!"
+            )
+        pyfaidx = get_module("pyfaidx")
         sequences = pyfaidx.Fasta(fasta_path)
         n_seqs = len(sequences.keys())
         X = np.arange(n_seqs)[:, np.newaxis]
         seq_length = estimator.data_batch_generator.seq_length
-        batch_size = getattr(estimator, 'batch_size', 32)
+        batch_size = getattr(estimator, "batch_size", 32)
         steps = (n_seqs + batch_size - 1) // batch_size
 
-        seq_type = params['input_options']['seq_type']
-        klass = try_get_attr(
-            'galaxy_ml.preprocessors', seq_type)
+        seq_type = params["input_options"]["seq_type"]
+        klass = try_get_attr("galaxy_ml.preprocessors", seq_type)
 
-        pred_data_generator = klass(
-            fasta_path, seq_length=seq_length)
+        pred_data_generator = klass(fasta_path, seq_length=seq_length)
 
-        if params['method'] == 'predict':
-            preds = estimator.predict(
-                X, data_generator=pred_data_generator, steps=steps)
+        if params["method"] == "predict":
+            preds = estimator.predict(X, data_generator=pred_data_generator, steps=steps)
         else:
-            preds = estimator.predict_proba(
-                X, data_generator=pred_data_generator, steps=steps)
+            preds = estimator.predict_proba(X, data_generator=pred_data_generator, steps=steps)
 
     # vcf input
-    elif input_type == 'variant_effect':
-        klass = try_get_attr('galaxy_ml.preprocessors',
-                             'GenomicVariantBatchGenerator')
+    elif input_type == "variant_effect":
+        klass = try_get_attr("galaxy_ml.preprocessors", "GenomicVariantBatchGenerator")
 
-        options = params['input_options']
-        options.pop('selected_input')
-        if options['blacklist_regions'] == 'none':
-            options['blacklist_regions'] = None
+        options = params["input_options"]
+        options.pop("selected_input")
+        if options["blacklist_regions"] == "none":
+            options["blacklist_regions"] = None
 
-        pred_data_generator = klass(
-            ref_genome_path=ref_seq, vcf_path=vcf_path, **options)
+        pred_data_generator = klass(ref_genome_path=ref_seq, vcf_path=vcf_path, **options)
 
         pred_data_generator.set_processing_attrs()
 
@@ -143,9 +144,8 @@
         # predict 1600 sample at once then write to file
         gen_flow = pred_data_generator.flow(batch_size=1600)
 
-        file_writer = open(outfile_predict, 'w')
-        header_row = '\t'.join(['chrom', 'pos', 'name', 'ref',
-                                'alt', 'strand'])
+        file_writer = open(outfile_predict, "w")
+        header_row = "\t".join(["chrom", "pos", "name", "ref", "alt", "strand"])
         file_writer.write(header_row)
         header_done = False
 
@@ -155,23 +155,24 @@
         try:
             while steps_done < len(gen_flow):
                 index_array = next(gen_flow.index_generator)
-                batch_X = gen_flow._get_batches_of_transformed_samples(
-                    index_array)
+                batch_X = gen_flow._get_batches_of_transformed_samples(index_array)
 
-                if params['method'] == 'predict':
+                if params["method"] == "predict":
                     batch_preds = estimator.predict(
                         batch_X,
                         # The presence of `pred_data_generator` below is to
                         # override model carrying data_generator if there
                         # is any.
-                        data_generator=pred_data_generator)
+                        data_generator=pred_data_generator,
+                    )
                 else:
                     batch_preds = estimator.predict_proba(
                         batch_X,
                         # The presence of `pred_data_generator` below is to
                         # override model carrying data_generator if there
                         # is any.
-                        data_generator=pred_data_generator)
+                        data_generator=pred_data_generator,
+                    )
 
                 if batch_preds.ndim == 1:
                     batch_preds = batch_preds[:, np.newaxis]
@@ -181,12 +182,12 @@
 
                 if not header_done:
                     heads = np.arange(batch_preds.shape[-1]).astype(str)
-                    heads_str = '\t'.join(heads)
+                    heads_str = "\t".join(heads)
                     file_writer.write("\t%s\n" % heads_str)
                     header_done = True
 
                 for row in batch_out:
-                    row_str = '\t'.join(row)
+                    row_str = "\t".join(row)
                     file_writer.write("%s\n" % row_str)
 
                 steps_done += 1
@@ -200,14 +201,14 @@
 
     # output
     if len(preds.shape) == 1:
-        rval = pd.DataFrame(preds, columns=['Predicted'])
+        rval = pd.DataFrame(preds, columns=["Predicted"])
     else:
         rval = pd.DataFrame(preds)
 
-    rval.to_csv(outfile_predict, sep='\t', header=True, index=False)
+    rval.to_csv(outfile_predict, sep="\t", header=True, index=False)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     aparser = argparse.ArgumentParser()
     aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
     aparser.add_argument("-e", "--infile_estimator", dest="infile_estimator")
@@ -219,7 +220,13 @@
     aparser.add_argument("-v", "--vcf_path", dest="vcf_path")
     args = aparser.parse_args()
 
-    main(args.inputs, args.infile_estimator, args.outfile_predict,
-         infile_weights=args.infile_weights, infile1=args.infile1,
-         fasta_path=args.fasta_path, ref_seq=args.ref_seq,
-         vcf_path=args.vcf_path)
+    main(
+        args.inputs,
+        args.infile_estimator,
+        args.outfile_predict,
+        infile_weights=args.infile_weights,
+        infile1=args.infile1,
+        fasta_path=args.fasta_path,
+        ref_seq=args.ref_seq,
+        vcf_path=args.vcf_path,
+    )
diff -r daece0f27108 -r fe627c026dc6 pca.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pca.py	Tue Apr 13 20:44:53 2021 +0000
@@ -0,0 +1,186 @@
+import argparse
+
+import numpy as np
+from galaxy_ml.utils import read_columns
+from sklearn.decomposition import IncrementalPCA, KernelPCA, PCA
+
+
+def main():
+    parser = argparse.ArgumentParser(description="RDKit screen")
+    parser.add_argument("-i", "--infile", help="Input file")
+    parser.add_argument(
+        "--header", action="store_true", help="Include the header row or skip it"
+    )
+    parser.add_argument(
+        "-c",
+        "--columns",
+        type=str.lower,
+        default="all",
+        choices=[
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+            "all_columns",
+        ],
+        help="Choose to select all columns, or exclude/include some",
+    )
+    parser.add_argument(
+        "-ci",
+        "--column_indices",
+        type=str.lower,
+        help="Choose to select all columns, or exclude/include some",
+    )
+    parser.add_argument(
+        "-n",
+        "--number",
+        nargs="?",
+        type=int,
+        default=None,
+        help="Number of components to keep. If not set, all components are kept",
+    )
+    parser.add_argument("--whiten", action="store_true", help="Whiten the components")
+    parser.add_argument(
+        "-t",
+        "--pca_type",
+        type=str.lower,
+        default="classical",
+        choices=["classical", "incremental", "kernel"],
+        help="Choose which flavour of PCA to use",
+    )
+    parser.add_argument(
+        "-s",
+        "--svd_solver",
+        type=str.lower,
+        default="auto",
+        choices=["auto", "full", "arpack", "randomized"],
+        help="Choose the type of svd solver.",
+    )
+    parser.add_argument(
+        "-b",
+        "--batch_size",
+        nargs="?",
+        type=int,
+        default=None,
+        help="The number of samples to use for each batch",
+    )
+    parser.add_argument(
+        "-k",
+        "--kernel",
+        type=str.lower,
+        default="linear",
+        choices=["linear", "poly", "rbf", "sigmoid", "cosine", "precomputed"],
+        help="Choose the type of kernel.",
+    )
+    parser.add_argument(
+        "-g",
+        "--gamma",
+        nargs="?",
+        type=float,
+        default=None,
+        help="Kernel coefficient for rbf, poly and sigmoid kernels. Ignored by other kernels",
+    )
+    parser.add_argument(
+        "-tol",
+        "--tolerance",
+        type=float,
+        default=0.0,
+        help="Convergence tolerance for arpack. If 0, optimal value will be chosen by arpack",
+    )
+    parser.add_argument(
+        "-mi",
+        "--max_iter",
+        nargs="?",
+        type=int,
+        default=None,
+        help="Maximum number of iterations for arpack",
+    )
+    parser.add_argument(
+        "-d",
+        "--degree",
+        type=int,
+        default=3,
+        help="Degree for poly kernels. Ignored by other kernels",
+    )
+    parser.add_argument(
+        "-cf",
+        "--coef0",
+        type=float,
+        default=1.0,
+        help="Independent term in poly and sigmoid kernels",
+    )
+    parser.add_argument(
+        "-e",
+        "--eigen_solver",
+        type=str.lower,
+        default="auto",
+        choices=["auto", "dense", "arpack"],
+        help="Choose the type of eigen solver.",
+    )
+    parser.add_argument(
+        "-o", "--outfile", help="Base name for output file (no extension)."
+    )
+    args = parser.parse_args()
+
+    usecols = None
+    pca_params = {}
+
+    if args.columns == "by_index_number" or args.columns == "all_but_by_index_number":
+        usecols = [int(i) for i in args.column_indices.split(",")]
+    elif args.columns == "by_header_name" or args.columns == "all_but_by_header_name":
+        usecols = args.column_indices
+
+    header = "infer" if args.header else None
+
+    pca_input = read_columns(
+        f=args.infile,
+        c=usecols,
+        c_option=args.columns,
+        sep="\t",
+        header=header,
+        parse_dates=True,
+        encoding=None,
+        index_col=None,
+    )
+
+    pca_params.update({"n_components": args.number})
+
+    if args.pca_type == "classical":
+        pca_params.update({"svd_solver": args.svd_solver, "whiten": args.whiten})
+        if args.svd_solver == "arpack":
+            pca_params.update({"tol": args.tolerance})
+        pca = PCA()
+
+    elif args.pca_type == "incremental":
+        pca_params.update({"batch_size": args.batch_size, "whiten": args.whiten})
+        pca = IncrementalPCA()
+
+    elif args.pca_type == "kernel":
+        pca_params.update(
+            {
+                "kernel": args.kernel,
+                "eigen_solver": args.eigen_solver,
+                "gamma": args.gamma,
+            }
+        )
+
+        if args.kernel == "poly":
+            pca_params.update({"degree": args.degree, "coef0": args.coef0})
+        elif args.kernel == "sigmoid":
+            pca_params.update({"coef0": args.coef0})
+        elif args.kernel == "precomputed":
+            pca_input = np.dot(pca_input, pca_input.T)
+
+        if args.eigen_solver == "arpack":
+            pca_params.update({"tol": args.tolerance, "max_iter": args.max_iter})
+
+        pca = KernelPCA()
+
+    print(pca_params)
+    pca.set_params(**pca_params)
+    pca_output = pca.fit_transform(pca_input)
+    np.savetxt(fname=args.outfile, X=pca_output, fmt="%.4f", delimiter="\t")
+
+
+if __name__ == "__main__":
+    main()
diff -r daece0f27108 -r fe627c026dc6 search_model_validation.py
--- a/search_model_validation.py	Wed Mar 11 17:11:13 2020 +0000
+++ b/search_model_validation.py	Tue Apr 13 20:44:53 2021 +0000
@@ -1,55 +1,66 @@
 import argparse
 import collections
-import imblearn
-import joblib
 import json
-import numpy as np
 import os
-import pandas as pd
 import pickle
-import skrebate
 import sys
 import warnings
+
+import imblearn
+import joblib
+import numpy as np
+import pandas as pd
+import skrebate
+from galaxy_ml.utils import (
+    clean_params,
+    get_cv,
+    get_main_estimator,
+    get_module,
+    get_scoring,
+    load_model,
+    read_columns,
+    SafeEval,
+    try_get_attr
+)
 from scipy.io import mmread
-from sklearn import (cluster, decomposition, feature_selection,
-                     kernel_approximation, model_selection, preprocessing)
+from sklearn import (
+    cluster,
+    decomposition,
+    feature_selection,
+    kernel_approximation,
+    model_selection,
+    preprocessing,
+)
 from sklearn.exceptions import FitFailedWarning
-from sklearn.model_selection._validation import _score, cross_validate
 from sklearn.model_selection import _search, _validation
-from sklearn.pipeline import Pipeline
-
-from galaxy_ml.utils import (SafeEval, get_cv, get_scoring, load_model,
-                             read_columns, try_get_attr, get_module,
-                             clean_params, get_main_estimator)
+from sklearn.model_selection._validation import _score, cross_validate
 
 
-_fit_and_score = try_get_attr('galaxy_ml.model_validations', '_fit_and_score')
-setattr(_search, '_fit_and_score', _fit_and_score)
-setattr(_validation, '_fit_and_score', _fit_and_score)
+_fit_and_score = try_get_attr("galaxy_ml.model_validations", "_fit_and_score")
+setattr(_search, "_fit_and_score", _fit_and_score)
+setattr(_validation, "_fit_and_score", _fit_and_score)
 
-N_JOBS = int(os.environ.get('GALAXY_SLOTS', 1))
+N_JOBS = int(os.environ.get("GALAXY_SLOTS", 1))
 # handle  disk cache
-CACHE_DIR = os.path.join(os.getcwd(), 'cached')
+CACHE_DIR = os.path.join(os.getcwd(), "cached")
 del os
-NON_SEARCHABLE = ('n_jobs', 'pre_dispatch', 'memory', '_path',
-                  'nthread', 'callbacks')
+NON_SEARCHABLE = ("n_jobs", "pre_dispatch", "memory", "_path", "nthread", "callbacks")
 
 
 def _eval_search_params(params_builder):
     search_params = {}
 
-    for p in params_builder['param_set']:
-        search_list = p['sp_list'].strip()
-        if search_list == '':
+    for p in params_builder["param_set"]:
+        search_list = p["sp_list"].strip()
+        if search_list == "":
             continue
 
-        param_name = p['sp_name']
+        param_name = p["sp_name"]
         if param_name.lower().endswith(NON_SEARCHABLE):
-            print("Warning: `%s` is not eligible for search and was "
-                  "omitted!" % param_name)
+            print("Warning: `%s` is not eligible for search and was " "omitted!" % param_name)
             continue
 
-        if not search_list.startswith(':'):
+        if not search_list.startswith(":"):
             safe_eval = SafeEval(load_scipy=True, load_numpy=True)
             ev = safe_eval(search_list)
             search_params[param_name] = ev
@@ -60,26 +71,27 @@
             # TODO maybe add regular express check
             ev = safe_eval_es(search_list)
             preprocessings = (
-                preprocessing.StandardScaler(), preprocessing.Binarizer(),
+                preprocessing.StandardScaler(),
+                preprocessing.Binarizer(),
                 preprocessing.MaxAbsScaler(),
-                preprocessing.Normalizer(), preprocessing.MinMaxScaler(),
+                preprocessing.Normalizer(),
+                preprocessing.MinMaxScaler(),
                 preprocessing.PolynomialFeatures(),
-                preprocessing.RobustScaler(), feature_selection.SelectKBest(),
+                preprocessing.RobustScaler(),
+                feature_selection.SelectKBest(),
                 feature_selection.GenericUnivariateSelect(),
                 feature_selection.SelectPercentile(),
-                feature_selection.SelectFpr(), feature_selection.SelectFdr(),
+                feature_selection.SelectFpr(),
+                feature_selection.SelectFdr(),
                 feature_selection.SelectFwe(),
                 feature_selection.VarianceThreshold(),
                 decomposition.FactorAnalysis(random_state=0),
                 decomposition.FastICA(random_state=0),
                 decomposition.IncrementalPCA(),
                 decomposition.KernelPCA(random_state=0, n_jobs=N_JOBS),
-                decomposition.LatentDirichletAllocation(
-                    random_state=0, n_jobs=N_JOBS),
-                decomposition.MiniBatchDictionaryLearning(
-                    random_state=0, n_jobs=N_JOBS),
-                decomposition.MiniBatchSparsePCA(
-                    random_state=0, n_jobs=N_JOBS),
+                decomposition.LatentDirichletAllocation(random_state=0, n_jobs=N_JOBS),
+                decomposition.MiniBatchDictionaryLearning(random_state=0, n_jobs=N_JOBS),
+                decomposition.MiniBatchSparsePCA(random_state=0, n_jobs=N_JOBS),
                 decomposition.NMF(random_state=0),
                 decomposition.PCA(random_state=0),
                 decomposition.SparsePCA(random_state=0, n_jobs=N_JOBS),
@@ -94,59 +106,48 @@
                 skrebate.SURFstar(n_jobs=N_JOBS),
                 skrebate.MultiSURF(n_jobs=N_JOBS),
                 skrebate.MultiSURFstar(n_jobs=N_JOBS),
-                imblearn.under_sampling.ClusterCentroids(
-                    random_state=0, n_jobs=N_JOBS),
-                imblearn.under_sampling.CondensedNearestNeighbour(
-                    random_state=0, n_jobs=N_JOBS),
-                imblearn.under_sampling.EditedNearestNeighbours(
-                    random_state=0, n_jobs=N_JOBS),
-                imblearn.under_sampling.RepeatedEditedNearestNeighbours(
-                    random_state=0, n_jobs=N_JOBS),
+                imblearn.under_sampling.ClusterCentroids(random_state=0, n_jobs=N_JOBS),
+                imblearn.under_sampling.CondensedNearestNeighbour(random_state=0, n_jobs=N_JOBS),
+                imblearn.under_sampling.EditedNearestNeighbours(random_state=0, n_jobs=N_JOBS),
+                imblearn.under_sampling.RepeatedEditedNearestNeighbours(random_state=0, n_jobs=N_JOBS),
                 imblearn.under_sampling.AllKNN(random_state=0, n_jobs=N_JOBS),
-                imblearn.under_sampling.InstanceHardnessThreshold(
-                    random_state=0, n_jobs=N_JOBS),
-                imblearn.under_sampling.NearMiss(
-                    random_state=0, n_jobs=N_JOBS),
-                imblearn.under_sampling.NeighbourhoodCleaningRule(
-                    random_state=0, n_jobs=N_JOBS),
-                imblearn.under_sampling.OneSidedSelection(
-                    random_state=0, n_jobs=N_JOBS),
-                imblearn.under_sampling.RandomUnderSampler(
-                    random_state=0),
-                imblearn.under_sampling.TomekLinks(
-                    random_state=0, n_jobs=N_JOBS),
+                imblearn.under_sampling.InstanceHardnessThreshold(random_state=0, n_jobs=N_JOBS),
+                imblearn.under_sampling.NearMiss(random_state=0, n_jobs=N_JOBS),
+                imblearn.under_sampling.NeighbourhoodCleaningRule(random_state=0, n_jobs=N_JOBS),
+                imblearn.under_sampling.OneSidedSelection(random_state=0, n_jobs=N_JOBS),
+                imblearn.under_sampling.RandomUnderSampler(random_state=0),
+                imblearn.under_sampling.TomekLinks(random_state=0, n_jobs=N_JOBS),
                 imblearn.over_sampling.ADASYN(random_state=0, n_jobs=N_JOBS),
                 imblearn.over_sampling.RandomOverSampler(random_state=0),
                 imblearn.over_sampling.SMOTE(random_state=0, n_jobs=N_JOBS),
                 imblearn.over_sampling.SVMSMOTE(random_state=0, n_jobs=N_JOBS),
-                imblearn.over_sampling.BorderlineSMOTE(
-                    random_state=0, n_jobs=N_JOBS),
-                imblearn.over_sampling.SMOTENC(
-                    categorical_features=[], random_state=0, n_jobs=N_JOBS),
+                imblearn.over_sampling.BorderlineSMOTE(random_state=0, n_jobs=N_JOBS),
+                imblearn.over_sampling.SMOTENC(categorical_features=[], random_state=0, n_jobs=N_JOBS),
                 imblearn.combine.SMOTEENN(random_state=0),
-                imblearn.combine.SMOTETomek(random_state=0))
+                imblearn.combine.SMOTETomek(random_state=0),
+            )
             newlist = []
             for obj in ev:
                 if obj is None:
                     newlist.append(None)
-                elif obj == 'all_0':
+                elif obj == "all_0":
                     newlist.extend(preprocessings[0:35])
-                elif obj == 'sk_prep_all':      # no KernalCenter()
+                elif obj == "sk_prep_all":  # no KernalCenter()
                     newlist.extend(preprocessings[0:7])
-                elif obj == 'fs_all':
+                elif obj == "fs_all":
                     newlist.extend(preprocessings[7:14])
-                elif obj == 'decomp_all':
+                elif obj == "decomp_all":
                     newlist.extend(preprocessings[14:25])
-                elif obj == 'k_appr_all':
+                elif obj == "k_appr_all":
                     newlist.extend(preprocessings[25:29])
-                elif obj == 'reb_all':
+                elif obj == "reb_all":
                     newlist.extend(preprocessings[30:35])
-                elif obj == 'imb_all':
+                elif obj == "imb_all":
                     newlist.extend(preprocessings[35:54])
                 elif type(obj) is int and -1 < obj < len(preprocessings):
                     newlist.append(preprocessings[obj])
-                elif hasattr(obj, 'get_params'):       # user uploaded object
-                    if 'n_jobs' in obj.get_params():
+                elif hasattr(obj, "get_params"):  # user uploaded object
+                    if "n_jobs" in obj.get_params():
                         newlist.append(obj.set_params(n_jobs=N_JOBS))
                     else:
                         newlist.append(obj)
@@ -158,9 +159,17 @@
     return search_params
 
 
-def _handle_X_y(estimator, params, infile1, infile2, loaded_df={},
-                ref_seq=None, intervals=None, targets=None,
-                fasta_path=None):
+def _handle_X_y(
+    estimator,
+    params,
+    infile1,
+    infile2,
+    loaded_df={},
+    ref_seq=None,
+    intervals=None,
+    targets=None,
+    fasta_path=None,
+):
     """read inputs
 
     Params
@@ -192,15 +201,18 @@
     """
     estimator_params = estimator.get_params()
 
-    input_type = params['input_options']['selected_input']
+    input_type = params["input_options"]["selected_input"]
     # tabular input
-    if input_type == 'tabular':
-        header = 'infer' if params['input_options']['header1'] else None
-        column_option = (params['input_options']['column_selector_options_1']
-                         ['selected_column_selector_option'])
-        if column_option in ['by_index_number', 'all_but_by_index_number',
-                             'by_header_name', 'all_but_by_header_name']:
-            c = params['input_options']['column_selector_options_1']['col1']
+    if input_type == "tabular":
+        header = "infer" if params["input_options"]["header1"] else None
+        column_option = params["input_options"]["column_selector_options_1"]["selected_column_selector_option"]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = params["input_options"]["column_selector_options_1"]["col1"]
         else:
             c = None
 
@@ -209,25 +221,23 @@
         if df_key in loaded_df:
             infile1 = loaded_df[df_key]
 
-        df = pd.read_csv(infile1, sep='\t', header=header,
-                         parse_dates=True)
+        df = pd.read_csv(infile1, sep="\t", header=header, parse_dates=True)
         loaded_df[df_key] = df
 
         X = read_columns(df, c=c, c_option=column_option).astype(float)
     # sparse input
-    elif input_type == 'sparse':
-        X = mmread(open(infile1, 'r'))
+    elif input_type == "sparse":
+        X = mmread(open(infile1, "r"))
 
     # fasta_file input
-    elif input_type == 'seq_fasta':
-        pyfaidx = get_module('pyfaidx')
+    elif input_type == "seq_fasta":
+        pyfaidx = get_module("pyfaidx")
         sequences = pyfaidx.Fasta(fasta_path)
         n_seqs = len(sequences.keys())
         X = np.arange(n_seqs)[:, np.newaxis]
         for param in estimator_params.keys():
-            if param.endswith('fasta_path'):
-                estimator.set_params(
-                    **{param: fasta_path})
+            if param.endswith("fasta_path"):
+                estimator.set_params(**{param: fasta_path})
                 break
         else:
             raise ValueError(
@@ -236,25 +246,29 @@
                 "KerasGBatchClassifier with "
                 "FastaDNABatchGenerator/FastaProteinBatchGenerator "
                 "or having GenomeOneHotEncoder/ProteinOneHotEncoder "
-                "in pipeline!")
+                "in pipeline!"
+            )
 
-    elif input_type == 'refseq_and_interval':
+    elif input_type == "refseq_and_interval":
         path_params = {
-            'data_batch_generator__ref_genome_path': ref_seq,
-            'data_batch_generator__intervals_path': intervals,
-            'data_batch_generator__target_path': targets
+            "data_batch_generator__ref_genome_path": ref_seq,
+            "data_batch_generator__intervals_path": intervals,
+            "data_batch_generator__target_path": targets,
         }
         estimator.set_params(**path_params)
         n_intervals = sum(1 for line in open(intervals))
         X = np.arange(n_intervals)[:, np.newaxis]
 
     # Get target y
-    header = 'infer' if params['input_options']['header2'] else None
-    column_option = (params['input_options']['column_selector_options_2']
-                     ['selected_column_selector_option2'])
-    if column_option in ['by_index_number', 'all_but_by_index_number',
-                         'by_header_name', 'all_but_by_header_name']:
-        c = params['input_options']['column_selector_options_2']['col2']
+    header = "infer" if params["input_options"]["header2"] else None
+    column_option = params["input_options"]["column_selector_options_2"]["selected_column_selector_option2"]
+    if column_option in [
+        "by_index_number",
+        "all_but_by_index_number",
+        "by_header_name",
+        "all_but_by_header_name",
+    ]:
+        c = params["input_options"]["column_selector_options_2"]["col2"]
     else:
         c = None
 
@@ -262,30 +276,21 @@
     if df_key in loaded_df:
         infile2 = loaded_df[df_key]
     else:
-        infile2 = pd.read_csv(infile2, sep='\t',
-                              header=header, parse_dates=True)
+        infile2 = pd.read_csv(infile2, sep="\t", header=header, parse_dates=True)
         loaded_df[df_key] = infile2
 
-    y = read_columns(
-            infile2,
-            c=c,
-            c_option=column_option,
-            sep='\t',
-            header=header,
-            parse_dates=True)
+    y = read_columns(infile2, c=c, c_option=column_option, sep="\t", header=header, parse_dates=True)
     if len(y.shape) == 2 and y.shape[1] == 1:
         y = y.ravel()
-    if input_type == 'refseq_and_interval':
-        estimator.set_params(
-            data_batch_generator__features=y.ravel().tolist())
+    if input_type == "refseq_and_interval":
+        estimator.set_params(data_batch_generator__features=y.ravel().tolist())
         y = None
     # end y
 
     return estimator, X, y
 
 
-def _do_outer_cv(searcher, X, y, outer_cv, scoring, error_score='raise',
-                 outfile=None):
+def _do_outer_cv(searcher, X, y, outer_cv, scoring, error_score="raise", outfile=None):
     """Do outer cross-validation for nested CV
 
     Parameters
@@ -305,21 +310,31 @@
     outfile : str
         File path to store the restuls
     """
-    if error_score == 'raise':
+    if error_score == "raise":
         rval = cross_validate(
-            searcher, X, y, scoring=scoring,
-            cv=outer_cv, n_jobs=N_JOBS, verbose=0,
-            error_score=error_score)
+            searcher,
+            X,
+            y,
+            scoring=scoring,
+            cv=outer_cv,
+            n_jobs=N_JOBS,
+            verbose=0,
+            error_score=error_score,
+        )
     else:
-        warnings.simplefilter('always', FitFailedWarning)
+        warnings.simplefilter("always", FitFailedWarning)
         with warnings.catch_warnings(record=True) as w:
             try:
                 rval = cross_validate(
-                    searcher, X, y,
+                    searcher,
+                    X,
+                    y,
                     scoring=scoring,
-                    cv=outer_cv, n_jobs=N_JOBS,
+                    cv=outer_cv,
+                    n_jobs=N_JOBS,
                     verbose=0,
-                    error_score=error_score)
+                    error_score=error_score,
+                )
             except ValueError:
                 pass
             for warning in w:
@@ -327,55 +342,57 @@
 
     keys = list(rval.keys())
     for k in keys:
-        if k.startswith('test'):
-            rval['mean_' + k] = np.mean(rval[k])
-            rval['std_' + k] = np.std(rval[k])
-        if k.endswith('time'):
+        if k.startswith("test"):
+            rval["mean_" + k] = np.mean(rval[k])
+            rval["std_" + k] = np.std(rval[k])
+        if k.endswith("time"):
             rval.pop(k)
     rval = pd.DataFrame(rval)
     rval = rval[sorted(rval.columns)]
-    rval.to_csv(path_or_buf=outfile, sep='\t', header=True, index=False)
+    rval.to_csv(path_or_buf=outfile, sep="\t", header=True, index=False)
 
 
-def _do_train_test_split_val(searcher, X, y, params, error_score='raise',
-                             primary_scoring=None, groups=None,
-                             outfile=None):
-    """ do train test split, searchCV validates on the train and then use
+def _do_train_test_split_val(
+    searcher,
+    X,
+    y,
+    params,
+    error_score="raise",
+    primary_scoring=None,
+    groups=None,
+    outfile=None,
+):
+    """do train test split, searchCV validates on the train and then use
     the best_estimator_ to evaluate on the test
 
     Returns
     --------
     Fitted SearchCV object
     """
-    train_test_split = try_get_attr(
-        'galaxy_ml.model_validations', 'train_test_split')
-    split_options = params['outer_split']
+    train_test_split = try_get_attr("galaxy_ml.model_validations", "train_test_split")
+    split_options = params["outer_split"]
 
     # splits
-    if split_options['shuffle'] == 'stratified':
-        split_options['labels'] = y
+    if split_options["shuffle"] == "stratified":
+        split_options["labels"] = y
         X, X_test, y, y_test = train_test_split(X, y, **split_options)
-    elif split_options['shuffle'] == 'group':
+    elif split_options["shuffle"] == "group":
         if groups is None:
-            raise ValueError("No group based CV option was choosen for "
-                             "group shuffle!")
-        split_options['labels'] = groups
+            raise ValueError("No group based CV option was choosen for " "group shuffle!")
+        split_options["labels"] = groups
         if y is None:
-            X, X_test, groups, _ =\
-                train_test_split(X, groups, **split_options)
+            X, X_test, groups, _ = train_test_split(X, groups, **split_options)
         else:
-            X, X_test, y, y_test, groups, _ =\
-                train_test_split(X, y, groups, **split_options)
+            X, X_test, y, y_test, groups, _ = train_test_split(X, y, groups, **split_options)
     else:
-        if split_options['shuffle'] == 'None':
-            split_options['shuffle'] = None
-        X, X_test, y, y_test =\
-            train_test_split(X, y, **split_options)
+        if split_options["shuffle"] == "None":
+            split_options["shuffle"] = None
+        X, X_test, y, y_test = train_test_split(X, y, **split_options)
 
-    if error_score == 'raise':
+    if error_score == "raise":
         searcher.fit(X, y, groups=groups)
     else:
-        warnings.simplefilter('always', FitFailedWarning)
+        warnings.simplefilter("always", FitFailedWarning)
         with warnings.catch_warnings(record=True) as w:
             try:
                 searcher.fit(X, y, groups=groups)
@@ -390,33 +407,38 @@
     else:
         is_multimetric = False
 
-    best_estimator_ = getattr(searcher, 'best_estimator_')
+    best_estimator_ = getattr(searcher, "best_estimator_")
 
     # TODO Solve deep learning models in pipeline
-    if best_estimator_.__class__.__name__ == 'KerasGBatchClassifier':
-        test_score = best_estimator_.evaluate(
-            X_test, scorer=scorer_, is_multimetric=is_multimetric)
+    if best_estimator_.__class__.__name__ == "KerasGBatchClassifier":
+        test_score = best_estimator_.evaluate(X_test, scorer=scorer_, is_multimetric=is_multimetric)
     else:
-        test_score = _score(best_estimator_, X_test,
-                            y_test, scorer_,
-                            is_multimetric=is_multimetric)
+        test_score = _score(best_estimator_, X_test, y_test, scorer_, is_multimetric=is_multimetric)
 
     if not is_multimetric:
         test_score = {primary_scoring: test_score}
     for key, value in test_score.items():
         test_score[key] = [value]
     result_df = pd.DataFrame(test_score)
-    result_df.to_csv(path_or_buf=outfile, sep='\t', header=True,
-                     index=False)
+    result_df.to_csv(path_or_buf=outfile, sep="\t", header=True, index=False)
 
     return searcher
 
 
-def main(inputs, infile_estimator, infile1, infile2,
-         outfile_result, outfile_object=None,
-         outfile_weights=None, groups=None,
-         ref_seq=None, intervals=None, targets=None,
-         fasta_path=None):
+def main(
+    inputs,
+    infile_estimator,
+    infile1,
+    infile2,
+    outfile_result,
+    outfile_object=None,
+    outfile_weights=None,
+    groups=None,
+    ref_seq=None,
+    intervals=None,
+    targets=None,
+    fasta_path=None,
+):
     """
     Parameter
     ---------
@@ -456,154 +478,174 @@
     fasta_path : str
         File path to dataset containing fasta file
     """
-    warnings.simplefilter('ignore')
+    warnings.simplefilter("ignore")
 
     # store read dataframe object
     loaded_df = {}
 
-    with open(inputs, 'r') as param_handler:
+    with open(inputs, "r") as param_handler:
         params = json.load(param_handler)
 
     # Override the refit parameter
-    params['search_schemes']['options']['refit'] = True \
-        if params['save'] != 'nope' else False
+    params["search_schemes"]["options"]["refit"] = True if params["save"] != "nope" else False
 
-    with open(infile_estimator, 'rb') as estimator_handler:
+    with open(infile_estimator, "rb") as estimator_handler:
         estimator = load_model(estimator_handler)
 
-    optimizer = params['search_schemes']['selected_search_scheme']
+    optimizer = params["search_schemes"]["selected_search_scheme"]
     optimizer = getattr(model_selection, optimizer)
 
     # handle gridsearchcv options
-    options = params['search_schemes']['options']
+    options = params["search_schemes"]["options"]
 
     if groups:
-        header = 'infer' if (options['cv_selector']['groups_selector']
-                                    ['header_g']) else None
-        column_option = (options['cv_selector']['groups_selector']
-                                ['column_selector_options_g']
-                                ['selected_column_selector_option_g'])
-        if column_option in ['by_index_number', 'all_but_by_index_number',
-                             'by_header_name', 'all_but_by_header_name']:
-            c = (options['cv_selector']['groups_selector']
-                        ['column_selector_options_g']['col_g'])
+        header = "infer" if (options["cv_selector"]["groups_selector"]["header_g"]) else None
+        column_option = options["cv_selector"]["groups_selector"]["column_selector_options_g"][
+            "selected_column_selector_option_g"
+        ]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = options["cv_selector"]["groups_selector"]["column_selector_options_g"]["col_g"]
         else:
             c = None
 
         df_key = groups + repr(header)
 
-        groups = pd.read_csv(groups, sep='\t', header=header,
-                             parse_dates=True)
+        groups = pd.read_csv(groups, sep="\t", header=header, parse_dates=True)
         loaded_df[df_key] = groups
 
         groups = read_columns(
-                groups,
-                c=c,
-                c_option=column_option,
-                sep='\t',
-                header=header,
-                parse_dates=True)
+            groups,
+            c=c,
+            c_option=column_option,
+            sep="\t",
+            header=header,
+            parse_dates=True,
+        )
         groups = groups.ravel()
-        options['cv_selector']['groups_selector'] = groups
+        options["cv_selector"]["groups_selector"] = groups
 
-    splitter, groups = get_cv(options.pop('cv_selector'))
-    options['cv'] = splitter
-    primary_scoring = options['scoring']['primary_scoring']
-    options['scoring'] = get_scoring(options['scoring'])
-    if options['error_score']:
-        options['error_score'] = 'raise'
+    splitter, groups = get_cv(options.pop("cv_selector"))
+    options["cv"] = splitter
+    primary_scoring = options["scoring"]["primary_scoring"]
+    # get_scoring() expects secondary_scoring to be a comma separated string (not a list)
+    # Check if secondary_scoring is specified
+    secondary_scoring = options["scoring"].get("secondary_scoring", None)
+    if secondary_scoring is not None:
+        # If secondary_scoring is specified, convert the list into comman separated string
+        options["scoring"]["secondary_scoring"] = ",".join(options["scoring"]["secondary_scoring"])
+    options["scoring"] = get_scoring(options["scoring"])
+    if options["error_score"]:
+        options["error_score"] = "raise"
     else:
-        options['error_score'] = np.NaN
-    if options['refit'] and isinstance(options['scoring'], dict):
-        options['refit'] = primary_scoring
-    if 'pre_dispatch' in options and options['pre_dispatch'] == '':
-        options['pre_dispatch'] = None
+        options["error_score"] = np.NaN
+    if options["refit"] and isinstance(options["scoring"], dict):
+        options["refit"] = primary_scoring
+    if "pre_dispatch" in options and options["pre_dispatch"] == "":
+        options["pre_dispatch"] = None
 
-    params_builder = params['search_schemes']['search_params_builder']
+    params_builder = params["search_schemes"]["search_params_builder"]
     param_grid = _eval_search_params(params_builder)
 
     estimator = clean_params(estimator)
 
     # save the SearchCV object without fit
-    if params['save'] == 'save_no_fit':
+    if params["save"] == "save_no_fit":
         searcher = optimizer(estimator, param_grid, **options)
         print(searcher)
-        with open(outfile_object, 'wb') as output_handler:
-            pickle.dump(searcher, output_handler,
-                        pickle.HIGHEST_PROTOCOL)
+        with open(outfile_object, "wb") as output_handler:
+            pickle.dump(searcher, output_handler, pickle.HIGHEST_PROTOCOL)
         return 0
 
     # read inputs and loads new attributes, like paths
-    estimator, X, y = _handle_X_y(estimator, params, infile1, infile2,
-                                  loaded_df=loaded_df, ref_seq=ref_seq,
-                                  intervals=intervals, targets=targets,
-                                  fasta_path=fasta_path)
+    estimator, X, y = _handle_X_y(
+        estimator,
+        params,
+        infile1,
+        infile2,
+        loaded_df=loaded_df,
+        ref_seq=ref_seq,
+        intervals=intervals,
+        targets=targets,
+        fasta_path=fasta_path,
+    )
 
     # cache iraps_core fits could increase search speed significantly
     memory = joblib.Memory(location=CACHE_DIR, verbose=0)
     main_est = get_main_estimator(estimator)
-    if main_est.__class__.__name__ == 'IRAPSClassifier':
+    if main_est.__class__.__name__ == "IRAPSClassifier":
         main_est.set_params(memory=memory)
 
     searcher = optimizer(estimator, param_grid, **options)
 
-    split_mode = params['outer_split'].pop('split_mode')
+    split_mode = params["outer_split"].pop("split_mode")
 
-    if split_mode == 'nested_cv':
+    if split_mode == "nested_cv":
         # make sure refit is choosen
         # this could be True for sklearn models, but not the case for
         # deep learning models
-        if not options['refit'] and \
-                not all(hasattr(estimator, attr)
-                        for attr in ('config', 'model_type')):
+        if not options["refit"] and not all(hasattr(estimator, attr) for attr in ("config", "model_type")):
             warnings.warn("Refit is change to `True` for nested validation!")
-            setattr(searcher, 'refit', True)
+            setattr(searcher, "refit", True)
 
-        outer_cv, _ = get_cv(params['outer_split']['cv_selector'])
+        outer_cv, _ = get_cv(params["outer_split"]["cv_selector"])
         # nested CV, outer cv using cross_validate
-        if options['error_score'] == 'raise':
+        if options["error_score"] == "raise":
             rval = cross_validate(
-                searcher, X, y, scoring=options['scoring'],
-                cv=outer_cv, n_jobs=N_JOBS,
-                verbose=options['verbose'],
-                return_estimator=(params['save'] == 'save_estimator'),
-                error_score=options['error_score'],
-                return_train_score=True)
+                searcher,
+                X,
+                y,
+                scoring=options["scoring"],
+                cv=outer_cv,
+                n_jobs=N_JOBS,
+                verbose=options["verbose"],
+                return_estimator=(params["save"] == "save_estimator"),
+                error_score=options["error_score"],
+                return_train_score=True,
+            )
         else:
-            warnings.simplefilter('always', FitFailedWarning)
+            warnings.simplefilter("always", FitFailedWarning)
             with warnings.catch_warnings(record=True) as w:
                 try:
                     rval = cross_validate(
-                        searcher, X, y,
-                        scoring=options['scoring'],
-                        cv=outer_cv, n_jobs=N_JOBS,
-                        verbose=options['verbose'],
-                        return_estimator=(params['save'] == 'save_estimator'),
-                        error_score=options['error_score'],
-                        return_train_score=True)
+                        searcher,
+                        X,
+                        y,
+                        scoring=options["scoring"],
+                        cv=outer_cv,
+                        n_jobs=N_JOBS,
+                        verbose=options["verbose"],
+                        return_estimator=(params["save"] == "save_estimator"),
+                        error_score=options["error_score"],
+                        return_train_score=True,
+                    )
                 except ValueError:
                     pass
                 for warning in w:
                     print(repr(warning.message))
 
-        fitted_searchers = rval.pop('estimator', [])
+        fitted_searchers = rval.pop("estimator", [])
         if fitted_searchers:
             import os
+
             pwd = os.getcwd()
-            save_dir = os.path.join(pwd, 'cv_results_in_folds')
+            save_dir = os.path.join(pwd, "cv_results_in_folds")
             try:
                 os.mkdir(save_dir)
                 for idx, obj in enumerate(fitted_searchers):
-                    target_name = 'cv_results_' + '_' + 'split%d' % idx
+                    target_name = "cv_results_" + "_" + "split%d" % idx
                     target_path = os.path.join(pwd, save_dir, target_name)
-                    cv_results_ = getattr(obj, 'cv_results_', None)
+                    cv_results_ = getattr(obj, "cv_results_", None)
                     if not cv_results_:
                         print("%s is not available" % target_name)
                         continue
                     cv_results_ = pd.DataFrame(cv_results_)
                     cv_results_ = cv_results_[sorted(cv_results_.columns)]
-                    cv_results_.to_csv(target_path, sep='\t', header=True,
-                                       index=False)
+                    cv_results_.to_csv(target_path, sep="\t", header=True, index=False)
             except Exception as e:
                 print(e)
             finally:
@@ -611,18 +653,14 @@
 
         keys = list(rval.keys())
         for k in keys:
-            if k.startswith('test'):
-                rval['mean_' + k] = np.mean(rval[k])
-                rval['std_' + k] = np.std(rval[k])
-            if k.endswith('time'):
+            if k.startswith("test"):
+                rval["mean_" + k] = np.mean(rval[k])
+                rval["std_" + k] = np.std(rval[k])
+            if k.endswith("time"):
                 rval.pop(k)
         rval = pd.DataFrame(rval)
         rval = rval[sorted(rval.columns)]
-        rval.to_csv(path_or_buf=outfile_result, sep='\t', header=True,
-                    index=False)
-
-        return 0
-
+        rval.to_csv(path_or_buf=outfile_result, sep="\t", header=True, index=False)
         # deprecate train test split mode
         """searcher = _do_train_test_split_val(
             searcher, X, y, params,
@@ -630,14 +668,15 @@
             error_score=options['error_score'],
             groups=groups,
             outfile=outfile_result)"""
+        return 0
 
     # no outer split
     else:
         searcher.set_params(n_jobs=N_JOBS)
-        if options['error_score'] == 'raise':
+        if options["error_score"] == "raise":
             searcher.fit(X, y, groups=groups)
         else:
-            warnings.simplefilter('always', FitFailedWarning)
+            warnings.simplefilter("always", FitFailedWarning)
             with warnings.catch_warnings(record=True) as w:
                 try:
                     searcher.fit(X, y, groups=groups)
@@ -648,18 +687,19 @@
 
         cv_results = pd.DataFrame(searcher.cv_results_)
         cv_results = cv_results[sorted(cv_results.columns)]
-        cv_results.to_csv(path_or_buf=outfile_result, sep='\t',
-                          header=True, index=False)
+        cv_results.to_csv(path_or_buf=outfile_result, sep="\t", header=True, index=False)
 
     memory.clear(warn=False)
 
     # output best estimator, and weights if applicable
     if outfile_object:
-        best_estimator_ = getattr(searcher, 'best_estimator_', None)
+        best_estimator_ = getattr(searcher, "best_estimator_", None)
         if not best_estimator_:
-            warnings.warn("GridSearchCV object has no attribute "
-                          "'best_estimator_', because either it's "
-                          "nested gridsearch or `refit` is False!")
+            warnings.warn(
+                "GridSearchCV object has no attribute "
+                "'best_estimator_', because either it's "
+                "nested gridsearch or `refit` is False!"
+            )
             return
 
         # clean prams
@@ -667,24 +707,22 @@
 
         main_est = get_main_estimator(best_estimator_)
 
-        if hasattr(main_est, 'model_') \
-                and hasattr(main_est, 'save_weights'):
+        if hasattr(main_est, "model_") and hasattr(main_est, "save_weights"):
             if outfile_weights:
                 main_est.save_weights(outfile_weights)
             del main_est.model_
             del main_est.fit_params
             del main_est.model_class_
             del main_est.validation_data
-            if getattr(main_est, 'data_generator_', None):
+            if getattr(main_est, "data_generator_", None):
                 del main_est.data_generator_
 
-        with open(outfile_object, 'wb') as output_handler:
+        with open(outfile_object, "wb") as output_handler:
             print("Best estimator is saved: %s " % repr(best_estimator_))
-            pickle.dump(best_estimator_, output_handler,
-                        pickle.HIGHEST_PROTOCOL)
+            pickle.dump(best_estimator_, output_handler, pickle.HIGHEST_PROTOCOL)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     aparser = argparse.ArgumentParser()
     aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
     aparser.add_argument("-e", "--estimator", dest="infile_estimator")
@@ -700,8 +738,17 @@
     aparser.add_argument("-f", "--fasta_path", dest="fasta_path")
     args = aparser.parse_args()
 
-    main(args.inputs, args.infile_estimator, args.infile1, args.infile2,
-         args.outfile_result, outfile_object=args.outfile_object,
-         outfile_weights=args.outfile_weights, groups=args.groups,
-         ref_seq=args.ref_seq, intervals=args.intervals,
-         targets=args.targets, fasta_path=args.fasta_path)
+    main(
+        args.inputs,
+        args.infile_estimator,
+        args.infile1,
+        args.infile2,
+        args.outfile_result,
+        outfile_object=args.outfile_object,
+        outfile_weights=args.outfile_weights,
+        groups=args.groups,
+        ref_seq=args.ref_seq,
+        intervals=args.intervals,
+        targets=args.targets,
+        fasta_path=args.fasta_path,
+    )
diff -r daece0f27108 -r fe627c026dc6 simple_model_fit.py
--- a/simple_model_fit.py	Wed Mar 11 17:11:13 2020 +0000
+++ b/simple_model_fit.py	Tue Apr 13 20:44:53 2021 +0000
@@ -1,13 +1,14 @@
 import argparse
 import json
-import pandas as pd
 import pickle
 
+import pandas as pd
 from galaxy_ml.utils import load_model, read_columns
+from scipy.io import mmread
 from sklearn.pipeline import Pipeline
 
 
-N_JOBS = int(__import__('os').environ.get('GALAXY_SLOTS', 1))
+N_JOBS = int(__import__("os").environ.get("GALAXY_SLOTS", 1))
 
 
 # TODO import from galaxy_ml.utils in future versions
@@ -20,33 +21,35 @@
     ------
     Cleaned estimator object
     """
-    ALLOWED_CALLBACKS = ('EarlyStopping', 'TerminateOnNaN',
-                         'ReduceLROnPlateau', 'CSVLogger', 'None')
+    ALLOWED_CALLBACKS = (
+        "EarlyStopping",
+        "TerminateOnNaN",
+        "ReduceLROnPlateau",
+        "CSVLogger",
+        "None",
+    )
 
     estimator_params = estimator.get_params()
 
     for name, p in estimator_params.items():
         # all potential unauthorized file write
-        if name == 'memory' or name.endswith('__memory') \
-                or name.endswith('_path'):
+        if name == "memory" or name.endswith("__memory") or name.endswith("_path"):
             new_p = {name: None}
             estimator.set_params(**new_p)
-        elif n_jobs is not None and (name == 'n_jobs' or
-                                     name.endswith('__n_jobs')):
+        elif n_jobs is not None and (name == 'n_jobs' or name.endswith('__n_jobs')):
             new_p = {name: n_jobs}
             estimator.set_params(**new_p)
-        elif name.endswith('callbacks'):
+        elif name.endswith("callbacks"):
             for cb in p:
-                cb_type = cb['callback_selection']['callback_type']
+                cb_type = cb["callback_selection"]["callback_type"]
                 if cb_type not in ALLOWED_CALLBACKS:
-                    raise ValueError(
-                        "Prohibited callback type: %s!" % cb_type)
+                    raise ValueError("Prohibited callback type: %s!" % cb_type)
 
     return estimator
 
 
 def _get_X_y(params, infile1, infile2):
-    """ read from inputs and output X and y
+    """read from inputs and output X and y
 
     Parameters
     ----------
@@ -61,35 +64,40 @@
     # store read dataframe object
     loaded_df = {}
 
-    input_type = params['input_options']['selected_input']
+    input_type = params["input_options"]["selected_input"]
     # tabular input
-    if input_type == 'tabular':
-        header = 'infer' if params['input_options']['header1'] else None
-        column_option = (params['input_options']['column_selector_options_1']
-                         ['selected_column_selector_option'])
-        if column_option in ['by_index_number', 'all_but_by_index_number',
-                             'by_header_name', 'all_but_by_header_name']:
-            c = params['input_options']['column_selector_options_1']['col1']
+    if input_type == "tabular":
+        header = "infer" if params["input_options"]["header1"] else None
+        column_option = params["input_options"]["column_selector_options_1"]["selected_column_selector_option"]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = params["input_options"]["column_selector_options_1"]["col1"]
         else:
             c = None
 
         df_key = infile1 + repr(header)
-        df = pd.read_csv(infile1, sep='\t', header=header,
-                         parse_dates=True)
+        df = pd.read_csv(infile1, sep="\t", header=header, parse_dates=True)
         loaded_df[df_key] = df
 
         X = read_columns(df, c=c, c_option=column_option).astype(float)
     # sparse input
-    elif input_type == 'sparse':
-        X = mmread(open(infile1, 'r'))
+    elif input_type == "sparse":
+        X = mmread(open(infile1, "r"))
 
     # Get target y
-    header = 'infer' if params['input_options']['header2'] else None
-    column_option = (params['input_options']['column_selector_options_2']
-                     ['selected_column_selector_option2'])
-    if column_option in ['by_index_number', 'all_but_by_index_number',
-                         'by_header_name', 'all_but_by_header_name']:
-        c = params['input_options']['column_selector_options_2']['col2']
+    header = "infer" if params["input_options"]["header2"] else None
+    column_option = params["input_options"]["column_selector_options_2"]["selected_column_selector_option2"]
+    if column_option in [
+        "by_index_number",
+        "all_but_by_index_number",
+        "by_header_name",
+        "all_but_by_header_name",
+    ]:
+        c = params["input_options"]["column_selector_options_2"]["col2"]
     else:
         c = None
 
@@ -97,26 +105,23 @@
     if df_key in loaded_df:
         infile2 = loaded_df[df_key]
     else:
-        infile2 = pd.read_csv(infile2, sep='\t',
-                              header=header, parse_dates=True)
+        infile2 = pd.read_csv(infile2, sep="\t", header=header, parse_dates=True)
         loaded_df[df_key] = infile2
 
-    y = read_columns(
-            infile2,
-            c=c,
-            c_option=column_option,
-            sep='\t',
-            header=header,
-            parse_dates=True)
+    y = read_columns(infile2,
+                     c=c,
+                     c_option=column_option,
+                     sep='\t',
+                     header=header,
+                     parse_dates=True)
     if len(y.shape) == 2 and y.shape[1] == 1:
         y = y.ravel()
 
     return X, y
 
 
-def main(inputs, infile_estimator, infile1, infile2, out_object,
-         out_weights=None):
-    """ main
+def main(inputs, infile_estimator, infile1, infile2, out_object, out_weights=None):
+    """main
 
     Parameters
     ----------
@@ -139,38 +144,37 @@
         File path for output of weights
 
     """
-    with open(inputs, 'r') as param_handler:
+    with open(inputs, "r") as param_handler:
         params = json.load(param_handler)
 
     # load model
-    with open(infile_estimator, 'rb') as est_handler:
+    with open(infile_estimator, "rb") as est_handler:
         estimator = load_model(est_handler)
     estimator = clean_params(estimator, n_jobs=N_JOBS)
 
     X_train, y_train = _get_X_y(params, infile1, infile2)
 
     estimator.fit(X_train, y_train)
-    
+
     main_est = estimator
     if isinstance(main_est, Pipeline):
         main_est = main_est.steps[-1][-1]
-    if hasattr(main_est, 'model_') \
-            and hasattr(main_est, 'save_weights'):
+    if hasattr(main_est, "model_") and hasattr(main_est, "save_weights"):
         if out_weights:
             main_est.save_weights(out_weights)
         del main_est.model_
         del main_est.fit_params
         del main_est.model_class_
-        del main_est.validation_data
-        if getattr(main_est, 'data_generator_', None):
+        if getattr(main_est, "validation_data", None):
+            del main_est.validation_data
+        if getattr(main_est, "data_generator_", None):
             del main_est.data_generator_
 
-    with open(out_object, 'wb') as output_handler:
-        pickle.dump(estimator, output_handler,
-                    pickle.HIGHEST_PROTOCOL)
+    with open(out_object, "wb") as output_handler:
+        pickle.dump(estimator, output_handler, pickle.HIGHEST_PROTOCOL)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     aparser = argparse.ArgumentParser()
     aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
     aparser.add_argument("-X", "--infile_estimator", dest="infile_estimator")
@@ -180,5 +184,11 @@
     aparser.add_argument("-t", "--out_weights", dest="out_weights")
     args = aparser.parse_args()
 
-    main(args.inputs, args.infile_estimator, args.infile1,
-         args.infile2, args.out_object, args.out_weights)
+    main(
+        args.inputs,
+        args.infile_estimator,
+        args.infile1,
+        args.infile2,
+        args.out_object,
+        args.out_weights,
+    )
diff -r daece0f27108 -r fe627c026dc6 stacking_ensembles.py
--- a/stacking_ensembles.py	Wed Mar 11 17:11:13 2020 +0000
+++ b/stacking_ensembles.py	Tue Apr 13 20:44:53 2021 +0000
@@ -1,26 +1,22 @@
 import argparse
 import ast
 import json
-import mlxtend.regressor
-import mlxtend.classifier
-import pandas as pd
 import pickle
-import sklearn
 import sys
 import warnings
-from sklearn import ensemble
 
-from galaxy_ml.utils import (load_model, get_cv, get_estimator,
-                             get_search_params)
+import mlxtend.classifier
+import mlxtend.regressor
+import pandas as pd
+from galaxy_ml.utils import get_cv, get_estimator, get_search_params, load_model
 
 
-warnings.filterwarnings('ignore')
+warnings.filterwarnings("ignore")
 
-N_JOBS = int(__import__('os').environ.get('GALAXY_SLOTS', 1))
+N_JOBS = int(__import__("os").environ.get("GALAXY_SLOTS", 1))
 
 
-def main(inputs_path, output_obj, base_paths=None, meta_path=None,
-         outfile_params=None):
+def main(inputs_path, output_obj, base_paths=None, meta_path=None, outfile_params=None):
     """
     Parameter
     ---------
@@ -39,87 +35,79 @@
     outfile_params : str
         File path for params output
     """
-    with open(inputs_path, 'r') as param_handler:
+    with open(inputs_path, "r") as param_handler:
         params = json.load(param_handler)
 
-    estimator_type = params['algo_selection']['estimator_type']
+    estimator_type = params["algo_selection"]["estimator_type"]
     # get base estimators
     base_estimators = []
-    for idx, base_file in enumerate(base_paths.split(',')):
-        if base_file and base_file != 'None':
-            with open(base_file, 'rb') as handler:
+    for idx, base_file in enumerate(base_paths.split(",")):
+        if base_file and base_file != "None":
+            with open(base_file, "rb") as handler:
                 model = load_model(handler)
         else:
-            estimator_json = (params['base_est_builder'][idx]
-                              ['estimator_selector'])
+            estimator_json = params["base_est_builder"][idx]["estimator_selector"]
             model = get_estimator(estimator_json)
 
-        if estimator_type.startswith('sklearn'):
+        if estimator_type.startswith("sklearn"):
             named = model.__class__.__name__.lower()
-            named = 'base_%d_%s' % (idx, named)
+            named = "base_%d_%s" % (idx, named)
             base_estimators.append((named, model))
         else:
             base_estimators.append(model)
 
     # get meta estimator, if applicable
-    if estimator_type.startswith('mlxtend'):
+    if estimator_type.startswith("mlxtend"):
         if meta_path:
-            with open(meta_path, 'rb') as f:
+            with open(meta_path, "rb") as f:
                 meta_estimator = load_model(f)
         else:
-            estimator_json = (params['algo_selection']
-                              ['meta_estimator']['estimator_selector'])
+            estimator_json = params["algo_selection"]["meta_estimator"]["estimator_selector"]
             meta_estimator = get_estimator(estimator_json)
 
-    options = params['algo_selection']['options']
+    options = params["algo_selection"]["options"]
 
-    cv_selector = options.pop('cv_selector', None)
+    cv_selector = options.pop("cv_selector", None)
     if cv_selector:
-        splitter, groups = get_cv(cv_selector)
-        options['cv'] = splitter
+        splitter, _groups = get_cv(cv_selector)
+        options["cv"] = splitter
         # set n_jobs
-        options['n_jobs'] = N_JOBS
+        options["n_jobs"] = N_JOBS
 
-    weights = options.pop('weights', None)
+    weights = options.pop("weights", None)
     if weights:
         weights = ast.literal_eval(weights)
         if weights:
-            options['weights'] = weights
+            options["weights"] = weights
 
-    mod_and_name = estimator_type.split('_')
+    mod_and_name = estimator_type.split("_")
     mod = sys.modules[mod_and_name[0]]
     klass = getattr(mod, mod_and_name[1])
 
-    if estimator_type.startswith('sklearn'):
-        options['n_jobs'] = N_JOBS
+    if estimator_type.startswith("sklearn"):
+        options["n_jobs"] = N_JOBS
         ensemble_estimator = klass(base_estimators, **options)
 
     elif mod == mlxtend.classifier:
-        ensemble_estimator = klass(
-            classifiers=base_estimators,
-            meta_classifier=meta_estimator,
-            **options)
+        ensemble_estimator = klass(classifiers=base_estimators, meta_classifier=meta_estimator, **options)
 
     else:
-        ensemble_estimator = klass(
-            regressors=base_estimators,
-            meta_regressor=meta_estimator,
-            **options)
+        ensemble_estimator = klass(regressors=base_estimators, meta_regressor=meta_estimator, **options)
 
     print(ensemble_estimator)
     for base_est in base_estimators:
         print(base_est)
 
-    with open(output_obj, 'wb') as out_handler:
+    with open(output_obj, "wb") as out_handler:
         pickle.dump(ensemble_estimator, out_handler, pickle.HIGHEST_PROTOCOL)
 
-    if params['get_params'] and outfile_params:
+    if params["get_params"] and outfile_params:
         results = get_search_params(ensemble_estimator)
-        df = pd.DataFrame(results, columns=['', 'Parameter', 'Value'])
-        df.to_csv(outfile_params, sep='\t', index=False)
+        df = pd.DataFrame(results, columns=["", "Parameter", "Value"])
+        df.to_csv(outfile_params, sep="\t", index=False)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     aparser = argparse.ArgumentParser()
     aparser.add_argument("-b", "--bases", dest="bases")
     aparser.add_argument("-m", "--meta", dest="meta")
@@ -128,5 +116,10 @@
     aparser.add_argument("-p", "--outfile_params", dest="outfile_params")
     args = aparser.parse_args()
 
-    main(args.inputs, args.outfile, base_paths=args.bases,
-         meta_path=args.meta, outfile_params=args.outfile_params)
+    main(
+        args.inputs,
+        args.outfile,
+        base_paths=args.bases,
+        meta_path=args.meta,
+        outfile_params=args.outfile_params,
+    )
diff -r daece0f27108 -r fe627c026dc6 test-data/keras_batch_params01.tabular
--- a/test-data/keras_batch_params01.tabular	Wed Mar 11 17:11:13 2020 +0000
+++ b/test-data/keras_batch_params01.tabular	Tue Apr 13 20:44:53 2021 +0000
@@ -27,7 +27,7 @@
 @	schedule_decay	schedule_decay: None
 @	seed	seed: None
 @	steps_per_epoch	steps_per_epoch: None
-@	validation_data	validation_data: None
+@	validation_fraction	validation_fraction: 0.1
 @	validation_steps	validation_steps: None
 @	verbose	verbose: 0
 *	data_batch_generator__fasta_path	data_batch_generator__fasta_path: 'to_be_determined'
diff -r daece0f27108 -r fe627c026dc6 test-data/keras_batch_params04.tabular
--- a/test-data/keras_batch_params04.tabular	Wed Mar 11 17:11:13 2020 +0000
+++ b/test-data/keras_batch_params04.tabular	Tue Apr 13 20:44:53 2021 +0000
@@ -26,7 +26,7 @@
 @	schedule_decay	schedule_decay: None
 @	seed	seed: None
 @	steps_per_epoch	steps_per_epoch: None
-@	validation_data	validation_data: None
+@	validation_fraction	validation_fraction: 0.1
 @	validation_steps	validation_steps: None
 @	verbose	verbose: 0
 *	layers_0_Dense__class_name	layers_0_Dense__class_name: 'Dense'
diff -r daece0f27108 -r fe627c026dc6 test-data/keras_model01
Binary file test-data/keras_model01 has changed
diff -r daece0f27108 -r fe627c026dc6 test-data/keras_model02
Binary file test-data/keras_model02 has changed
diff -r daece0f27108 -r fe627c026dc6 test-data/keras_model04
Binary file test-data/keras_model04 has changed
diff -r daece0f27108 -r fe627c026dc6 test-data/keras_params04.tabular
--- a/test-data/keras_params04.tabular	Wed Mar 11 17:11:13 2020 +0000
+++ b/test-data/keras_params04.tabular	Tue Apr 13 20:44:53 2021 +0000
@@ -22,7 +22,7 @@
 @	schedule_decay	schedule_decay: None
 @	seed	seed: 42
 @	steps_per_epoch	steps_per_epoch: None
-@	validation_data	validation_data: None
+@	validation_fraction	validation_fraction: 0.1
 @	validation_steps	validation_steps: None
 @	verbose	verbose: 0
 *	layers_0_Dense__class_name	layers_0_Dense__class_name: 'Dense'
diff -r daece0f27108 -r fe627c026dc6 test-data/ohe_in_w_header.tabular
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ohe_in_w_header.tabular	Tue Apr 13 20:44:53 2021 +0000
@@ -0,0 +1,9 @@
+Label
+0
+1
+2
+3
+3
+2
+1
+0
diff -r daece0f27108 -r fe627c026dc6 test-data/ohe_in_wo_header.tabular
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ohe_in_wo_header.tabular	Tue Apr 13 20:44:53 2021 +0000
@@ -0,0 +1,8 @@
+0
+1
+2
+3
+3
+2
+1
+0
diff -r daece0f27108 -r fe627c026dc6 test-data/ohe_out_4.tabular
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ohe_out_4.tabular	Tue Apr 13 20:44:53 2021 +0000
@@ -0,0 +1,8 @@
+1	0	0	0
+0	1	0	0
+0	0	1	0
+0	0	0	1
+0	0	0	1
+0	0	1	0
+0	1	0	0
+1	0	0	0
diff -r daece0f27108 -r fe627c026dc6 test-data/ohe_out_5.tabular
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ohe_out_5.tabular	Tue Apr 13 20:44:53 2021 +0000
@@ -0,0 +1,8 @@
+1	0	0	0	0
+0	1	0	0	0
+0	0	1	0	0
+0	0	0	1	0
+0	0	0	1	0
+0	0	1	0	0
+0	1	0	0	0
+1	0	0	0	0
diff -r daece0f27108 -r fe627c026dc6 test-data/pca_classical_header_names_output.dat
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pca_classical_header_names_output.dat	Tue Apr 13 20:44:53 2021 +0000
@@ -0,0 +1,300 @@
+-3.9779	1.8818	2.6506	-1.1628	-0.0983
+-2.3700	0.6756	-1.6186	-1.4164	1.0327
+-3.0925	2.4577	-1.8726	-0.4041	1.5016
+-4.0151	2.3686	0.3256	-0.0685	-0.2168
+-4.7065	3.1394	2.3137	0.1907	-1.6303
+-4.9613	4.6941	1.2908	1.0298	-0.8817
+-4.8364	5.2936	0.8793	1.3212	0.0849
+-5.0224	2.7565	4.6351	-0.5397	-1.3489
+-2.9068	1.6461	-1.9066	-0.5100	-0.6820
+-4.7713	4.0171	2.1659	0.5434	-1.4630
+-2.7521	-1.9309	-0.9117	-3.5728	1.7161
+-3.7407	1.3845	0.0141	-1.5357	1.3068
+-4.3751	1.0682	2.3906	-1.5298	0.9064
+-2.8532	0.2430	-0.8937	-1.7428	-1.1825
+-2.4091	-0.1393	-0.0986	-2.0343	1.5498
+-3.8699	2.7278	0.9261	-0.4924	0.7933
+-4.6180	3.2535	2.4650	-0.0468	0.8258
+-2.3362	3.2961	-2.7247	0.9321	-0.1294
+-1.7741	0.3277	-3.2195	-1.6272	1.9721
+-2.9188	3.5196	-1.5959	0.6981	0.3541
+-4.0500	2.5696	1.0924	-0.1582	-1.7489
+-5.3435	6.4383	1.7066	1.9754	0.9120
+-4.1454	6.1296	-0.3939	1.8899	0.9309
+-2.4452	-0.6422	0.2580	-2.5687	0.0322
+-5.2047	7.4266	2.4849	2.8623	-1.3975
+-2.5237	-2.4427	-1.5498	-3.1847	-1.6248
+-1.7613	1.6919	-4.6466	-0.5331	1.3921
+-1.1236	2.5796	-4.4469	0.3771	0.0692
+-2.5892	3.4039	-1.3071	0.2542	0.1349
+-3.5099	-0.9352	1.4462	-2.5959	-0.3994
+-3.9546	6.3431	0.5939	2.3052	-0.2344
+-2.0819	-1.6617	-1.0140	-3.0790	1.1571
+-2.6320	0.0703	-0.0526	-1.5092	-1.8043
+-1.8865	0.2234	-2.0628	-1.9632	2.3486
+-4.5803	3.1525	2.6718	-0.3104	0.7210
+-6.5473	2.5731	6.0056	-0.6369	-1.2620
+-5.8360	4.1304	4.6222	0.5425	-0.1996
+-1.1136	3.4820	-5.1375	0.4626	3.1966
+-4.7698	0.9659	3.3912	-1.9008	-0.0558
+-2.4391	-0.7627	-1.6123	-1.9380	-0.8111
+-4.8859	4.2069	1.6117	0.6831	0.8832
+-2.2466	-1.0655	-1.2645	-2.4867	-0.1625
+-4.4644	5.0000	1.1159	1.6869	-0.4784
+-3.7358	0.9995	1.6597	-1.3727	-1.7457
+-5.8271	3.6580	5.8537	-0.0174	-0.9750
+-4.7392	4.2895	2.1369	0.6907	-0.6801
+-1.5432	2.7403	-5.0837	0.0664	2.6107
+-5.7249	7.7343	3.2678	3.5103	-2.0555
+-4.2566	5.3778	1.2450	1.4052	0.2429
+-5.7932	5.3928	4.6237	1.0285	1.0814
+-2.9574	-1.1660	1.2251	-2.5803	-0.5026
+-2.0365	4.7362	-3.8569	1.8582	-0.6083
+-5.1883	6.2608	1.6921	2.1737	0.9110
+-5.5934	1.2903	5.3088	-1.4372	0.2000
+-1.4178	0.5340	-3.0765	-1.4210	1.9659
+-5.1568	4.3100	2.6279	0.8400	-0.4656
+-4.2551	3.3395	1.2265	-0.0344	-0.0296
+-6.4636	3.6525	5.4351	-0.1493	1.1392
+-4.0271	-0.6214	2.0667	-2.5704	0.5389
+-3.2885	2.2421	0.4406	-0.5508	0.4760
+-3.2320	3.1264	0.1610	0.0045	-0.3199
+-2.6003	5.2398	-2.1366	1.6829	0.7428
+-4.3207	1.7506	1.6012	-0.9072	-1.5917
+-1.9287	2.7030	-3.8706	0.1751	0.9751
+-4.6549	5.5519	2.1315	1.7555	-0.4025
+-2.4743	1.5111	-1.6381	-0.8537	-0.4237
+-1.2837	3.5483	-5.9098	0.8155	0.5023
+-3.9514	5.4703	0.2135	1.6665	0.0226
+-3.1575	3.1697	-2.0242	-0.1906	2.4084
+-6.7971	3.1578	6.8243	-0.5140	-0.4121
+-5.9999	3.1135	6.0259	0.1711	-2.0321
+-2.3450	1.9814	-1.1103	-0.7338	0.6581
+-1.5478	0.3095	-3.1375	-1.9311	2.3145
+-3.6067	1.2237	-0.4271	-1.2399	-0.0987
+-3.0574	-0.0303	-1.0815	-1.5251	-1.7385
+-3.7608	4.9627	0.5748	1.3373	1.6977
+-3.3834	2.2529	-1.4015	-0.3531	-0.8381
+-5.3297	2.0845	4.0157	-1.0934	0.1069
+-4.6415	5.6565	1.0886	1.6713	-0.3536
+-4.7611	4.6882	1.0939	0.9883	1.7929
+-1.7499	1.7738	-2.6457	-0.0629	-0.5751
+-5.1579	7.5589	1.1299	3.1680	-0.8202
+-3.4019	-1.4226	0.3991	-2.5729	-0.9099
+-1.6689	1.3580	-3.7300	-0.7291	1.5630
+-5.5132	6.6256	3.6086	1.9423	0.3727
+-4.4010	7.0180	1.1796	2.6417	0.9847
+-2.1174	3.1273	-2.6107	-0.2004	2.3541
+-2.2818	-0.7861	-1.5672	-1.8685	-1.2308
+-4.2055	4.8158	-0.1348	1.2570	-0.2039
+-2.2741	1.1907	-1.5868	-1.0998	-0.5999
+-3.0433	3.1513	-1.8017	0.1704	0.3636
+-5.3872	1.7330	5.6772	-1.1538	-0.2345
+-3.5773	2.5712	-0.8771	0.2747	-1.2405
+-3.0843	1.4711	-0.1928	-1.2214	1.2785
+-1.9572	3.5730	-4.2197	0.3158	2.0016
+-2.3444	4.7106	-3.7159	1.0094	1.7919
+-3.4024	1.1605	0.5845	-1.1358	-0.6689
+-3.2321	4.3272	-1.2592	1.0365	-0.4073
+-5.0553	5.8588	3.0041	1.9760	-0.7261
+-3.6706	1.0101	1.8198	-1.8471	1.1714
+8.9574	3.4341	-0.6861	1.5391	0.1971
+10.4081	3.3686	0.6688	0.9791	2.2503
+8.8871	0.8254	3.9087	-0.9576	1.5038
+2.1683	4.0337	-3.6737	1.1996	0.8734
+5.7699	3.5062	-1.2833	0.8577	2.4657
+11.3777	1.8612	2.4649	0.1035	0.6817
+10.2968	3.5175	1.6948	1.6534	-1.0812
+7.4297	-0.8987	3.2658	-1.7151	-1.7543
+0.3252	3.7494	-4.2495	1.5590	-1.5370
+10.1331	4.8737	0.4858	2.3869	1.3120
+9.7602	2.5946	1.2877	0.8963	-0.9431
+6.3294	0.2420	0.0602	-0.3493	-2.5248
+-1.0055	-0.1004	-1.8691	-2.0088	1.7743
+7.0020	0.1138	2.6978	-1.2615	0.7305
+-0.8476	3.5869	-4.4656	1.2269	-1.3229
+1.5347	3.8958	-2.1649	0.5727	2.8613
+-1.1039	-1.9065	-1.5394	-2.3618	-2.7727
+0.8179	-1.7488	-0.4802	-2.6368	-1.4583
+1.4527	1.0742	-2.4115	-0.3672	-1.6999
+2.5720	-0.8562	0.7638	-2.6926	1.6123
+6.1515	-1.1095	3.4326	-2.4435	1.2520
+1.0178	1.8481	-3.4738	-0.0814	0.4915
+4.8837	2.0690	-1.0561	0.4931	-2.2182
+6.0339	2.9506	0.3218	0.6312	1.5915
+9.3950	-1.1761	4.1952	-2.3244	1.4185
+8.9403	4.0562	1.0275	1.7812	0.6851
+2.9249	-1.4130	2.0969	-2.9718	0.8180
+5.1324	2.3888	-0.7332	0.2684	1.1412
+0.9835	-1.6716	0.8378	-3.0465	0.5331
+1.1708	1.4905	-1.3652	-0.1570	-2.2617
+-1.8828	2.0223	-3.2202	-0.0543	-1.9808
+0.5994	3.1729	-4.3744	0.9607	-0.7149
+7.1452	-0.1955	3.9804	-1.6216	0.4437
+8.1279	2.8675	1.1357	0.9555	-0.2832
+4.7280	2.5837	-0.5679	-0.0291	2.4888
+10.7502	1.9831	2.6421	0.5508	-1.3455
+3.4919	0.5382	-0.2017	-1.4828	1.4970
+1.6535	-0.3138	-0.6716	-1.4892	-1.0583
+-0.6194	-1.1388	-2.0794	-2.2235	-1.8344
+-1.6807	2.3272	-4.5348	0.4232	-1.1923
+-0.0933	2.2712	-4.4117	0.2252	-1.4704
+0.9280	1.2142	-1.1817	-0.4388	-1.7023
+7.6300	2.3460	-0.7224	0.8452	-1.0681
+4.8565	4.5339	-1.3839	1.3020	2.7231
+6.4818	2.3348	0.5405	0.3241	0.3556
+8.7788	4.0387	-0.5764	2.0785	-1.5655
+7.3565	2.8117	1.2035	0.7784	0.3995
+4.5695	1.5832	-1.9081	0.1707	-1.8627
+8.6604	3.4046	-0.1895	1.9467	-2.1641
+7.4043	2.7787	-1.2558	1.0864	-0.3961
+1.5577	3.6391	-3.9646	0.8673	0.8518
+2.5617	-1.7256	-0.1226	-2.3352	-1.2180
+3.9498	0.7254	0.1543	-0.6388	-0.7131
+5.4952	2.8144	-1.4037	0.7478	0.5767
+7.4705	4.1598	-2.0259	1.8600	-0.0993
+0.0598	0.8416	-1.8465	-1.1915	1.1069
+3.1833	2.4826	-1.7316	0.3751	-0.1741
+-0.8004	-1.5851	-0.3839	-3.2690	1.6516
+2.8531	0.6398	0.3016	-1.1929	0.3837
+8.7954	4.0057	0.5143	1.4523	0.8167
+7.6619	1.6910	0.6441	0.3695	-0.7369
+2.1000	1.4302	-0.4868	-0.3345	-1.5018
+3.2123	2.3107	-2.2237	0.0569	1.4980
+9.7191	1.9952	2.9522	-0.2021	1.3635
+6.7689	0.8412	0.7453	-0.4112	-1.8253
+-0.2193	2.5655	-4.8300	0.4753	-0.3690
+4.1903	1.7643	-2.3111	0.2086	-1.7388
+0.2129	2.0407	-2.9543	0.2723	-1.4070
+0.3409	-1.3345	-1.3839	-2.5782	0.0244
+10.9350	1.1854	3.4640	-0.0883	-0.3700
+5.8165	0.6966	0.3014	-0.4095	-1.0547
+1.1058	1.6680	-2.1525	-0.5345	0.1171
+5.9936	-1.3469	1.5152	-1.8738	-2.3045
+-1.8666	-1.9038	-1.3508	-2.8457	-0.8487
+4.7057	3.0603	-2.0698	1.1172	-1.2828
+4.8382	2.7905	-2.4100	1.0784	-1.8450
+7.7583	-0.1178	3.4031	-1.0477	-1.6762
+-1.0944	-1.7645	0.1859	-2.5096	-2.9060
+6.7410	-0.4974	3.0210	-1.5827	0.0429
+2.8538	0.5411	0.4132	-1.6065	1.2372
+7.7713	1.7433	2.3259	-0.0797	0.8617
+4.9205	-1.1654	0.4683	-1.8680	-2.2668
+3.9175	0.5662	0.1128	-1.3169	1.9494
+-0.8118	-0.8348	-2.4830	-2.1239	-1.3528
+5.1125	-1.0569	2.7024	-2.3925	1.2405
+-0.3321	3.5314	-2.5750	0.4625	2.4769
+6.0636	-0.2583	1.0393	-1.1765	-2.0102
+10.2001	4.4529	0.5351	2.4796	-2.2048
+6.0224	0.4141	3.0444	-1.6036	1.3780
+9.5469	2.9457	1.8870	1.5290	-1.8539
+1.2694	0.9305	-1.2315	-0.5435	-1.8862
+7.3444	3.6803	-1.8285	1.8223	-0.6988
+0.8728	-1.0305	-1.6295	-1.8965	-1.8792
+-0.8348	2.7660	-5.3706	0.4722	-0.2320
+7.9119	2.9566	-0.1448	0.7199	1.3050
+1.2929	-1.3724	0.0347	-3.0890	1.5365
+6.4039	0.7545	2.1608	-1.1811	0.7715
+-1.7913	-1.2703	-0.6435	-2.7504	0.4447
+0.0868	-0.4073	-1.2184	-1.6489	-2.2725
+4.9808	-1.5720	2.3203	-2.7613	1.7252
+-2.5810	-5.5079	0.0561	2.7450	-1.9096
+0.0370	-2.1438	-0.4775	-3.6280	1.6759
+1.0997	-3.5629	1.4259	1.7291	-1.2305
+-1.3638	-2.9435	-0.1034	-1.4507	-0.9311
+-0.7078	-4.3400	-1.1441	0.4367	-1.7198
+-0.5174	-3.5645	0.2025	0.4664	0.8917
+-2.6934	-6.0138	-1.9617	2.8409	-1.2291
+-0.9761	-2.4423	-1.2032	-2.4114	-2.0578
+0.4854	-5.7924	0.7961	2.7641	0.1446
+0.2325	-3.0261	-0.5759	-1.7790	0.5984
+1.4671	-1.9240	0.8327	-1.8960	-0.4687
+-2.5020	-5.9264	-1.0911	2.3968	0.3954
+0.5726	-3.6055	1.0491	-0.0200	1.7178
+1.2348	-2.2932	1.4641	-1.1600	0.3434
+1.5601	-1.7994	0.7362	-1.6966	0.7891
+-1.2684	-4.2416	-1.5761	0.0715	-1.2373
+-1.2743	-1.4687	0.5370	-2.5136	1.0946
+-0.1360	-4.4661	-0.0391	0.5382	2.6005
+-0.3517	-4.6240	-0.4723	1.6165	-2.1852
+1.4760	-2.5560	0.2351	-0.9202	0.4155
+-1.7783	-5.3517	0.7277	3.2291	1.2118
+1.6356	-3.7406	0.1185	1.1998	-2.1563
+-1.5925	-4.0036	-0.4063	-0.3156	1.4120
+0.5318	-6.1624	1.3086	4.4905	-1.6720
+-2.1599	-2.1987	-1.7868	-2.7058	-0.8553
+-0.1707	-3.3282	0.2381	-0.0381	0.8412
+0.2620	-2.7657	-0.3002	-2.0651	-1.3784
+-1.4631	-2.2472	-0.7439	-2.3037	1.4133
+-0.6767	-2.6672	0.4721	-0.4983	-0.3085
+0.8239	-2.9922	0.8695	1.0520	-1.3905
+-2.8612	-4.9086	-0.2330	1.8419	-2.0691
+0.9513	-4.1058	-0.0948	0.4003	0.9779
+-0.7449	-2.5754	0.1638	-0.3664	-1.0395
+-2.8790	-6.2301	-1.0229	3.2147	-1.2238
+0.4482	-6.0936	0.7186	2.6087	1.1673
+-0.8581	-4.9450	1.2239	2.8168	1.9155
+-1.9585	-5.5486	0.1892	2.2741	0.9021
+-0.7563	-3.0605	0.3737	-1.2120	-0.5686
+-1.2176	-5.9017	-0.7336	3.0326	-1.7117
+-0.3342	-6.3764	-0.2172	4.3070	-1.8445
+-1.8562	-1.9825	0.1295	-1.7173	1.1371
+-0.2782	-4.6277	1.0273	1.3582	1.5796
+-2.8011	-3.9132	-0.0397	-0.1702	0.0073
+0.4691	-5.7114	1.0759	2.0051	2.1326
+-0.3390	-3.6590	-0.0755	-0.2534	1.4111
+-1.6579	-2.2403	0.1202	-2.8767	1.5805
+-1.9985	-1.4155	-0.0361	-2.4219	1.1876
+-0.7262	-2.5969	-0.9169	-1.8642	-1.8831
+0.2857	-6.9537	-0.1326	4.3486	-0.2990
+0.3086	-4.2096	1.1225	0.8869	2.5561
+-1.0663	-4.8302	-0.2336	1.1157	1.8525
+1.8021	-2.9988	0.2358	-0.4046	0.6294
+-0.6291	-4.2600	-0.6726	1.3688	-0.7486
+-0.3361	-2.0217	-0.1530	-1.7010	-2.3760
+1.2746	-3.1963	1.5362	-0.5552	0.5358
+1.3034	-4.6254	1.0466	0.2558	2.0886
+-1.8983	-4.7895	-0.5294	2.0973	-1.6399
+0.4626	-5.0153	1.1064	3.2728	-0.2280
+0.7110	-3.3192	-0.2483	-0.9576	0.0827
+-0.1416	-5.5202	1.0561	2.5331	0.7097
+-0.7655	-3.2629	0.7123	0.0562	1.2078
+-1.3523	-4.5564	0.2855	1.3386	-2.0493
+-1.7088	-2.2341	0.0825	-2.0599	0.8335
+-3.1236	-5.9987	-0.0811	3.0773	1.9977
+1.2663	-4.4825	1.5603	1.2249	1.6582
+-0.6526	-5.8705	0.9230	2.7318	1.7473
+0.3953	-2.5365	-0.0945	-3.0766	1.7426
+-0.1625	-3.4281	0.1297	-0.4754	1.6961
+-2.2140	-5.0133	-0.0794	0.9143	2.5583
+-0.4613	-6.2075	1.2719	4.1384	2.7730
+-1.4160	-7.0144	-0.4855	3.6068	0.5113
+0.6889	-3.8158	1.3029	0.6781	1.8137
+0.9439	-4.3010	0.8782	0.0062	2.0116
+-1.8597	-3.7948	-1.2024	-0.3578	-2.7539
+-1.6633	-3.2509	-1.2846	-1.4958	0.0063
+-1.6431	-2.0504	-0.6246	-2.3438	-2.7969
+-2.3001	-4.4964	-0.8472	-0.0680	-0.8250
+-1.5611	-4.4606	0.2359	0.9168	-0.6425
+1.3137	-1.1202	1.7077	-2.6874	1.5428
+-1.6780	-5.6718	0.0590	2.3780	-1.0233
+-2.5783	-6.3740	-0.3193	3.5583	3.0596
+-0.6061	-2.7607	-1.2368	-1.6965	-2.5497
+-2.6190	-3.5589	0.1136	0.0072	2.0735
+-1.0030	-6.4175	0.5149	3.7496	0.7330
+-1.7912	-5.1464	-1.0372	1.4546	-0.7956
+-1.5792	-4.4416	0.1851	0.9401	-1.6688
+0.2980	-4.1670	-0.0238	0.2527	-0.5423
+-1.1631	-3.4712	0.8661	-0.5976	1.4833
+-0.2476	-5.4577	-0.6047	2.7129	-1.6734
+0.2774	-6.7209	0.9426	4.4414	-0.2085
+-2.3883	-2.2052	-1.6941	-2.3223	-1.2636
+-1.3874	-4.1603	0.0421	0.9842	1.4190
+-2.8690	-5.6070	0.0897	3.4202	1.5811
+-0.6246	-6.5834	0.4792	3.6943	1.5019
+0.0832	-2.1370	-1.5266	-2.4836	-2.9471
+0.4752	-5.1350	1.0637	3.0672	-1.3305
+-1.1580	-4.1051	0.2756	1.9264	-1.8723
+-1.5835	-4.6959	-0.7588	-0.2192	0.3629
+-2.3501	-1.8305	-1.6013	-2.4534	-2.6362
+-0.9423	-3.9026	-0.2186	0.8125	-2.2811
diff -r daece0f27108 -r fe627c026dc6 test-data/pca_classical_output.dat
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pca_classical_output.dat	Tue Apr 13 20:44:53 2021 +0000
@@ -0,0 +1,300 @@
+-3.9779	1.8818	2.6506	-1.1628	-0.0983
+-2.3700	0.6756	-1.6186	-1.4164	1.0327
+-3.0925	2.4577	-1.8726	-0.4041	1.5016
+-4.0151	2.3686	0.3256	-0.0685	-0.2168
+-4.7065	3.1394	2.3137	0.1907	-1.6303
+-4.9613	4.6941	1.2908	1.0298	-0.8817
+-4.8364	5.2936	0.8793	1.3212	0.0849
+-5.0224	2.7565	4.6351	-0.5397	-1.3489
+-2.9068	1.6461	-1.9066	-0.5100	-0.6820
+-4.7713	4.0171	2.1659	0.5434	-1.4630
+-2.7521	-1.9309	-0.9117	-3.5728	1.7161
+-3.7407	1.3845	0.0141	-1.5357	1.3068
+-4.3751	1.0682	2.3906	-1.5298	0.9064
+-2.8532	0.2430	-0.8937	-1.7428	-1.1825
+-2.4091	-0.1393	-0.0986	-2.0343	1.5498
+-3.8699	2.7278	0.9261	-0.4924	0.7933
+-4.6180	3.2535	2.4650	-0.0468	0.8258
+-2.3362	3.2961	-2.7247	0.9321	-0.1294
+-1.7741	0.3277	-3.2195	-1.6272	1.9721
+-2.9188	3.5196	-1.5959	0.6981	0.3541
+-4.0500	2.5696	1.0924	-0.1582	-1.7489
+-5.3435	6.4383	1.7066	1.9754	0.9120
+-4.1454	6.1296	-0.3939	1.8899	0.9309
+-2.4452	-0.6422	0.2580	-2.5687	0.0322
+-5.2047	7.4266	2.4849	2.8623	-1.3975
+-2.5237	-2.4427	-1.5498	-3.1847	-1.6248
+-1.7613	1.6919	-4.6466	-0.5331	1.3921
+-1.1236	2.5796	-4.4469	0.3771	0.0692
+-2.5892	3.4039	-1.3071	0.2542	0.1349
+-3.5099	-0.9352	1.4462	-2.5959	-0.3994
+-3.9546	6.3431	0.5939	2.3052	-0.2344
+-2.0819	-1.6617	-1.0140	-3.0790	1.1571
+-2.6320	0.0703	-0.0526	-1.5092	-1.8043
+-1.8865	0.2234	-2.0628	-1.9632	2.3486
+-4.5803	3.1525	2.6718	-0.3104	0.7210
+-6.5473	2.5731	6.0056	-0.6369	-1.2620
+-5.8360	4.1304	4.6222	0.5425	-0.1996
+-1.1136	3.4820	-5.1375	0.4626	3.1966
+-4.7698	0.9659	3.3912	-1.9008	-0.0558
+-2.4391	-0.7627	-1.6123	-1.9380	-0.8111
+-4.8859	4.2069	1.6117	0.6831	0.8832
+-2.2466	-1.0655	-1.2645	-2.4867	-0.1625
+-4.4644	5.0000	1.1159	1.6869	-0.4784
+-3.7358	0.9995	1.6597	-1.3727	-1.7457
+-5.8271	3.6580	5.8537	-0.0174	-0.9750
+-4.7392	4.2895	2.1369	0.6907	-0.6801
+-1.5432	2.7403	-5.0837	0.0664	2.6107
+-5.7249	7.7343	3.2678	3.5103	-2.0555
+-4.2566	5.3778	1.2450	1.4052	0.2429
+-5.7932	5.3928	4.6237	1.0285	1.0814
+-2.9574	-1.1660	1.2251	-2.5803	-0.5026
+-2.0365	4.7362	-3.8569	1.8582	-0.6083
+-5.1883	6.2608	1.6921	2.1737	0.9110
+-5.5934	1.2903	5.3088	-1.4372	0.2000
+-1.4178	0.5340	-3.0765	-1.4210	1.9659
+-5.1568	4.3100	2.6279	0.8400	-0.4656
+-4.2551	3.3395	1.2265	-0.0344	-0.0296
+-6.4636	3.6525	5.4351	-0.1493	1.1392
+-4.0271	-0.6214	2.0667	-2.5704	0.5389
+-3.2885	2.2421	0.4406	-0.5508	0.4760
+-3.2320	3.1264	0.1610	0.0045	-0.3199
+-2.6003	5.2398	-2.1366	1.6829	0.7428
+-4.3207	1.7506	1.6012	-0.9072	-1.5917
+-1.9287	2.7030	-3.8706	0.1751	0.9751
+-4.6549	5.5519	2.1315	1.7555	-0.4025
+-2.4743	1.5111	-1.6381	-0.8537	-0.4237
+-1.2837	3.5483	-5.9098	0.8155	0.5023
+-3.9514	5.4703	0.2135	1.6665	0.0226
+-3.1575	3.1697	-2.0242	-0.1906	2.4084
+-6.7971	3.1578	6.8243	-0.5140	-0.4121
+-5.9999	3.1135	6.0259	0.1711	-2.0321
+-2.3450	1.9814	-1.1103	-0.7338	0.6581
+-1.5478	0.3095	-3.1375	-1.9311	2.3145
+-3.6067	1.2237	-0.4271	-1.2399	-0.0987
+-3.0574	-0.0303	-1.0815	-1.5251	-1.7385
+-3.7608	4.9627	0.5748	1.3373	1.6977
+-3.3834	2.2529	-1.4015	-0.3531	-0.8381
+-5.3297	2.0845	4.0157	-1.0934	0.1069
+-4.6415	5.6565	1.0886	1.6713	-0.3536
+-4.7611	4.6882	1.0939	0.9883	1.7929
+-1.7499	1.7738	-2.6457	-0.0629	-0.5751
+-5.1579	7.5589	1.1299	3.1680	-0.8202
+-3.4019	-1.4226	0.3991	-2.5729	-0.9099
+-1.6689	1.3580	-3.7300	-0.7291	1.5630
+-5.5132	6.6256	3.6086	1.9423	0.3727
+-4.4010	7.0180	1.1796	2.6417	0.9847
+-2.1174	3.1273	-2.6107	-0.2004	2.3541
+-2.2818	-0.7861	-1.5672	-1.8685	-1.2308
+-4.2055	4.8158	-0.1348	1.2570	-0.2039
+-2.2741	1.1907	-1.5868	-1.0998	-0.5999
+-3.0433	3.1513	-1.8017	0.1704	0.3636
+-5.3872	1.7330	5.6772	-1.1538	-0.2345
+-3.5773	2.5712	-0.8771	0.2747	-1.2405
+-3.0843	1.4711	-0.1928	-1.2214	1.2785
+-1.9572	3.5730	-4.2197	0.3158	2.0016
+-2.3444	4.7106	-3.7159	1.0094	1.7919
+-3.4024	1.1605	0.5845	-1.1358	-0.6689
+-3.2321	4.3272	-1.2592	1.0365	-0.4073
+-5.0553	5.8588	3.0041	1.9760	-0.7261
+-3.6706	1.0101	1.8198	-1.8471	1.1714
+8.9574	3.4341	-0.6861	1.5391	0.1971
+10.4081	3.3686	0.6688	0.9791	2.2503
+8.8871	0.8254	3.9087	-0.9576	1.5038
+2.1683	4.0337	-3.6737	1.1996	0.8734
+5.7699	3.5062	-1.2833	0.8577	2.4657
+11.3777	1.8612	2.4649	0.1035	0.6817
+10.2968	3.5175	1.6948	1.6534	-1.0812
+7.4297	-0.8987	3.2658	-1.7151	-1.7543
+0.3252	3.7494	-4.2495	1.5590	-1.5370
+10.1331	4.8737	0.4858	2.3869	1.3120
+9.7602	2.5946	1.2877	0.8963	-0.9431
+6.3294	0.2420	0.0602	-0.3493	-2.5248
+-1.0055	-0.1004	-1.8691	-2.0088	1.7743
+7.0020	0.1138	2.6978	-1.2615	0.7305
+-0.8476	3.5869	-4.4656	1.2269	-1.3229
+1.5347	3.8958	-2.1649	0.5727	2.8613
+-1.1039	-1.9065	-1.5394	-2.3618	-2.7727
+0.8179	-1.7488	-0.4802	-2.6368	-1.4583
+1.4527	1.0742	-2.4115	-0.3672	-1.6999
+2.5720	-0.8562	0.7638	-2.6926	1.6123
+6.1515	-1.1095	3.4326	-2.4435	1.2520
+1.0178	1.8481	-3.4738	-0.0814	0.4915
+4.8837	2.0690	-1.0561	0.4931	-2.2182
+6.0339	2.9506	0.3218	0.6312	1.5915
+9.3950	-1.1761	4.1952	-2.3244	1.4185
+8.9403	4.0562	1.0275	1.7812	0.6851
+2.9249	-1.4130	2.0969	-2.9718	0.8180
+5.1324	2.3888	-0.7332	0.2684	1.1412
+0.9835	-1.6716	0.8378	-3.0465	0.5331
+1.1708	1.4905	-1.3652	-0.1570	-2.2617
+-1.8828	2.0223	-3.2202	-0.0543	-1.9808
+0.5994	3.1729	-4.3744	0.9607	-0.7149
+7.1452	-0.1955	3.9804	-1.6216	0.4437
+8.1279	2.8675	1.1357	0.9555	-0.2832
+4.7280	2.5837	-0.5679	-0.0291	2.4888
+10.7502	1.9831	2.6421	0.5508	-1.3455
+3.4919	0.5382	-0.2017	-1.4828	1.4970
+1.6535	-0.3138	-0.6716	-1.4892	-1.0583
+-0.6194	-1.1388	-2.0794	-2.2235	-1.8344
+-1.6807	2.3272	-4.5348	0.4232	-1.1923
+-0.0933	2.2712	-4.4117	0.2252	-1.4704
+0.9280	1.2142	-1.1817	-0.4388	-1.7023
+7.6300	2.3460	-0.7224	0.8452	-1.0681
+4.8565	4.5339	-1.3839	1.3020	2.7231
+6.4818	2.3348	0.5405	0.3241	0.3556
+8.7788	4.0387	-0.5764	2.0785	-1.5655
+7.3565	2.8117	1.2035	0.7784	0.3995
+4.5695	1.5832	-1.9081	0.1707	-1.8627
+8.6604	3.4046	-0.1895	1.9467	-2.1641
+7.4043	2.7787	-1.2558	1.0864	-0.3961
+1.5577	3.6391	-3.9646	0.8673	0.8518
+2.5617	-1.7256	-0.1226	-2.3352	-1.2180
+3.9498	0.7254	0.1543	-0.6388	-0.7131
+5.4952	2.8144	-1.4037	0.7478	0.5767
+7.4705	4.1598	-2.0259	1.8600	-0.0993
+0.0598	0.8416	-1.8465	-1.1915	1.1069
+3.1833	2.4826	-1.7316	0.3751	-0.1741
+-0.8004	-1.5851	-0.3839	-3.2690	1.6516
+2.8531	0.6398	0.3016	-1.1929	0.3837
+8.7954	4.0057	0.5143	1.4523	0.8167
+7.6619	1.6910	0.6441	0.3695	-0.7369
+2.1000	1.4302	-0.4868	-0.3345	-1.5018
+3.2123	2.3107	-2.2237	0.0569	1.4980
+9.7191	1.9952	2.9522	-0.2021	1.3635
+6.7689	0.8412	0.7453	-0.4112	-1.8253
+-0.2193	2.5655	-4.8300	0.4753	-0.3690
+4.1903	1.7643	-2.3111	0.2086	-1.7388
+0.2129	2.0407	-2.9543	0.2723	-1.4070
+0.3409	-1.3345	-1.3839	-2.5782	0.0244
+10.9350	1.1854	3.4640	-0.0883	-0.3700
+5.8165	0.6966	0.3014	-0.4095	-1.0547
+1.1058	1.6680	-2.1525	-0.5345	0.1171
+5.9936	-1.3469	1.5152	-1.8738	-2.3045
+-1.8666	-1.9038	-1.3508	-2.8457	-0.8487
+4.7057	3.0603	-2.0698	1.1172	-1.2828
+4.8382	2.7905	-2.4100	1.0784	-1.8450
+7.7583	-0.1178	3.4031	-1.0477	-1.6762
+-1.0944	-1.7645	0.1859	-2.5096	-2.9060
+6.7410	-0.4974	3.0210	-1.5827	0.0429
+2.8538	0.5411	0.4132	-1.6065	1.2372
+7.7713	1.7433	2.3259	-0.0797	0.8617
+4.9205	-1.1654	0.4683	-1.8680	-2.2668
+3.9175	0.5662	0.1128	-1.3169	1.9494
+-0.8118	-0.8348	-2.4830	-2.1239	-1.3528
+5.1125	-1.0569	2.7024	-2.3925	1.2405
+-0.3321	3.5314	-2.5750	0.4625	2.4769
+6.0636	-0.2583	1.0393	-1.1765	-2.0102
+10.2001	4.4529	0.5351	2.4796	-2.2048
+6.0224	0.4141	3.0444	-1.6036	1.3780
+9.5469	2.9457	1.8870	1.5290	-1.8539
+1.2694	0.9305	-1.2315	-0.5435	-1.8862
+7.3444	3.6803	-1.8285	1.8223	-0.6988
+0.8728	-1.0305	-1.6295	-1.8965	-1.8792
+-0.8348	2.7660	-5.3706	0.4722	-0.2320
+7.9119	2.9566	-0.1448	0.7199	1.3050
+1.2929	-1.3724	0.0347	-3.0890	1.5365
+6.4039	0.7545	2.1608	-1.1811	0.7715
+-1.7913	-1.2703	-0.6435	-2.7504	0.4447
+0.0868	-0.4073	-1.2184	-1.6489	-2.2725
+4.9808	-1.5720	2.3203	-2.7613	1.7252
+-2.5810	-5.5079	0.0561	2.7450	-1.9096
+0.0370	-2.1438	-0.4775	-3.6280	1.6759
+1.0997	-3.5629	1.4259	1.7291	-1.2305
+-1.3638	-2.9435	-0.1034	-1.4507	-0.9311
+-0.7078	-4.3400	-1.1441	0.4367	-1.7198
+-0.5174	-3.5645	0.2025	0.4664	0.8917
+-2.6934	-6.0138	-1.9617	2.8409	-1.2291
+-0.9761	-2.4423	-1.2032	-2.4114	-2.0578
+0.4854	-5.7924	0.7961	2.7641	0.1446
+0.2325	-3.0261	-0.5759	-1.7790	0.5984
+1.4671	-1.9240	0.8327	-1.8960	-0.4687
+-2.5020	-5.9264	-1.0911	2.3968	0.3954
+0.5726	-3.6055	1.0491	-0.0200	1.7178
+1.2348	-2.2932	1.4641	-1.1600	0.3434
+1.5601	-1.7994	0.7362	-1.6966	0.7891
+-1.2684	-4.2416	-1.5761	0.0715	-1.2373
+-1.2743	-1.4687	0.5370	-2.5136	1.0946
+-0.1360	-4.4661	-0.0391	0.5382	2.6005
+-0.3517	-4.6240	-0.4723	1.6165	-2.1852
+1.4760	-2.5560	0.2351	-0.9202	0.4155
+-1.7783	-5.3517	0.7277	3.2291	1.2118
+1.6356	-3.7406	0.1185	1.1998	-2.1563
+-1.5925	-4.0036	-0.4063	-0.3156	1.4120
+0.5318	-6.1624	1.3086	4.4905	-1.6720
+-2.1599	-2.1987	-1.7868	-2.7058	-0.8553
+-0.1707	-3.3282	0.2381	-0.0381	0.8412
+0.2620	-2.7657	-0.3002	-2.0651	-1.3784
+-1.4631	-2.2472	-0.7439	-2.3037	1.4133
+-0.6767	-2.6672	0.4721	-0.4983	-0.3085
+0.8239	-2.9922	0.8695	1.0520	-1.3905
+-2.8612	-4.9086	-0.2330	1.8419	-2.0691
+0.9513	-4.1058	-0.0948	0.4003	0.9779
+-0.7449	-2.5754	0.1638	-0.3664	-1.0395
+-2.8790	-6.2301	-1.0229	3.2147	-1.2238
+0.4482	-6.0936	0.7186	2.6087	1.1673
+-0.8581	-4.9450	1.2239	2.8168	1.9155
+-1.9585	-5.5486	0.1892	2.2741	0.9021
+-0.7563	-3.0605	0.3737	-1.2120	-0.5686
+-1.2176	-5.9017	-0.7336	3.0326	-1.7117
+-0.3342	-6.3764	-0.2172	4.3070	-1.8445
+-1.8562	-1.9825	0.1295	-1.7173	1.1371
+-0.2782	-4.6277	1.0273	1.3582	1.5796
+-2.8011	-3.9132	-0.0397	-0.1702	0.0073
+0.4691	-5.7114	1.0759	2.0051	2.1326
+-0.3390	-3.6590	-0.0755	-0.2534	1.4111
+-1.6579	-2.2403	0.1202	-2.8767	1.5805
+-1.9985	-1.4155	-0.0361	-2.4219	1.1876
+-0.7262	-2.5969	-0.9169	-1.8642	-1.8831
+0.2857	-6.9537	-0.1326	4.3486	-0.2990
+0.3086	-4.2096	1.1225	0.8869	2.5561
+-1.0663	-4.8302	-0.2336	1.1157	1.8525
+1.8021	-2.9988	0.2358	-0.4046	0.6294
+-0.6291	-4.2600	-0.6726	1.3688	-0.7486
+-0.3361	-2.0217	-0.1530	-1.7010	-2.3760
+1.2746	-3.1963	1.5362	-0.5552	0.5358
+1.3034	-4.6254	1.0466	0.2558	2.0886
+-1.8983	-4.7895	-0.5294	2.0973	-1.6399
+0.4626	-5.0153	1.1064	3.2728	-0.2280
+0.7110	-3.3192	-0.2483	-0.9576	0.0827
+-0.1416	-5.5202	1.0561	2.5331	0.7097
+-0.7655	-3.2629	0.7123	0.0562	1.2078
+-1.3523	-4.5564	0.2855	1.3386	-2.0493
+-1.7088	-2.2341	0.0825	-2.0599	0.8335
+-3.1236	-5.9987	-0.0811	3.0773	1.9977
+1.2663	-4.4825	1.5603	1.2249	1.6582
+-0.6526	-5.8705	0.9230	2.7318	1.7473
+0.3953	-2.5365	-0.0945	-3.0766	1.7426
+-0.1625	-3.4281	0.1297	-0.4754	1.6961
+-2.2140	-5.0133	-0.0794	0.9143	2.5583
+-0.4613	-6.2075	1.2719	4.1384	2.7730
+-1.4160	-7.0144	-0.4855	3.6068	0.5113
+0.6889	-3.8158	1.3029	0.6781	1.8137
+0.9439	-4.3010	0.8782	0.0062	2.0116
+-1.8597	-3.7948	-1.2024	-0.3578	-2.7539
+-1.6633	-3.2509	-1.2846	-1.4958	0.0063
+-1.6431	-2.0504	-0.6246	-2.3438	-2.7969
+-2.3001	-4.4964	-0.8472	-0.0680	-0.8250
+-1.5611	-4.4606	0.2359	0.9168	-0.6425
+1.3137	-1.1202	1.7077	-2.6874	1.5428
+-1.6780	-5.6718	0.0590	2.3780	-1.0233
+-2.5783	-6.3740	-0.3193	3.5583	3.0596
+-0.6061	-2.7607	-1.2368	-1.6965	-2.5497
+-2.6190	-3.5589	0.1136	0.0072	2.0735
+-1.0030	-6.4175	0.5149	3.7496	0.7330
+-1.7912	-5.1464	-1.0372	1.4546	-0.7956
+-1.5792	-4.4416	0.1851	0.9401	-1.6688
+0.2980	-4.1670	-0.0238	0.2527	-0.5423
+-1.1631	-3.4712	0.8661	-0.5976	1.4833
+-0.2476	-5.4577	-0.6047	2.7129	-1.6734
+0.2774	-6.7209	0.9426	4.4414	-0.2085
+-2.3883	-2.2052	-1.6941	-2.3223	-1.2636
+-1.3874	-4.1603	0.0421	0.9842	1.4190
+-2.8690	-5.6070	0.0897	3.4202	1.5811
+-0.6246	-6.5834	0.4792	3.6943	1.5019
+0.0832	-2.1370	-1.5266	-2.4836	-2.9471
+0.4752	-5.1350	1.0637	3.0672	-1.3305
+-1.1580	-4.1051	0.2756	1.9264	-1.8723
+-1.5835	-4.6959	-0.7588	-0.2192	0.3629
+-2.3501	-1.8305	-1.6013	-2.4534	-2.6362
+-0.9423	-3.9026	-0.2186	0.8125	-2.2811
diff -r daece0f27108 -r fe627c026dc6 test-data/pca_incremental_header_names_output.dat
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pca_incremental_header_names_output.dat	Tue Apr 13 20:44:53 2021 +0000
@@ -0,0 +1,300 @@
+-4.5041	1.2787	-0.9244	-0.6417	2.4742	-0.6790	-2.2376
+-2.3058	0.3190	2.0467	-1.5089	-1.7879	-2.8580	2.3471
+-3.6298	1.9778	0.5695	3.2844	-2.1532	1.0649	0.2268
+-4.9371	1.9948	-1.1517	-0.7883	0.1443	0.3281	-1.6340
+-5.1251	2.1148	1.0578	-1.2113	2.7850	-0.4718	-0.5876
+-5.2311	3.1236	2.4521	-0.1645	2.0950	1.3657	-1.2221
+-5.2736	3.9401	3.1613	-0.4221	1.4421	0.6351	1.4170
+-5.8843	2.3108	-0.9970	-2.5925	4.5634	-1.7216	-0.6943
+-3.0847	0.9348	1.2297	-0.1673	-1.5983	-0.3054	-0.8598
+-5.0726	2.7358	2.1948	0.6672	2.8109	0.2199	0.2954
+-3.0489	-1.4621	-1.4039	2.2051	-2.1125	-3.5712	1.8233
+-3.9828	0.4789	-0.4798	3.1414	-0.2706	1.0323	-3.0673
+-4.7769	0.5637	-0.6189	2.4042	1.8522	-0.8892	0.1666
+-2.8177	-0.2411	1.1106	-1.6952	-0.7266	-2.7600	-0.0377
+-3.3395	0.6116	-2.4104	0.4482	-1.2702	-2.2185	1.2082
+-3.7895	1.3834	1.9512	0.4855	1.2103	0.2934	-1.2998
+-5.6850	2.9597	-0.6627	-1.3921	1.9849	-0.8803	0.8756
+-2.3684	1.9853	2.1848	1.9063	-1.9759	2.9765	-2.6155
+-2.7658	1.1415	-1.5911	-2.5974	-4.2491	-2.4650	1.0467
+-3.4687	2.6724	0.7399	-2.5064	-1.2477	1.3807	-2.8392
+-4.1960	1.4928	1.5341	3.2301	1.6231	0.5496	-0.1931
+-6.8974	5.6555	-0.4728	-0.2712	1.6418	2.8355	-1.2524
+-5.4610	5.7033	1.6135	-2.4726	-0.3671	0.4024	2.5259
+-3.6199	0.4197	-3.3176	0.7440	-0.9565	-3.1444	1.3050
+-6.9755	6.9895	0.8969	-1.2372	2.7908	1.0709	2.8530
+-2.4744	-2.4406	-1.2029	1.7749	-1.8143	-2.5760	-1.4119
+-2.2670	1.4244	0.3150	-0.3939	-4.8261	0.5653	-1.5405
+-1.2997	1.8646	1.6713	0.9233	-3.9371	1.9971	-2.1376
+-2.9030	2.7832	2.8769	-2.1591	-0.9938	-1.4089	2.2805
+-4.0193	-0.7911	-2.3511	-2.6334	0.8198	-3.1752	-1.7134
+-4.6438	5.1543	2.9044	-3.5749	1.3863	1.1820	0.2145
+-3.1379	-0.3608	-3.4466	2.6176	-2.5651	-3.1252	2.2431
+-3.3334	0.1605	-2.3825	-0.8492	-0.2283	-0.9548	-3.0823
+-1.8428	-0.3384	-0.2287	1.0854	-2.4548	0.2911	-3.1773
+-5.7377	2.9593	-1.3065	2.8244	2.0424	0.2718	0.8933
+-7.5818	1.9024	-2.0175	1.7637	5.6534	-0.4313	-0.7429
+-6.7842	3.2537	-0.5172	1.7928	4.5232	1.0479	-0.2527
+-2.6542	4.2982	-0.6507	-0.7068	-6.0759	0.4216	2.2052
+-4.7066	-0.0723	0.5610	2.7188	3.2642	-1.3372	-0.2390
+-3.1046	-0.4176	-2.2528	3.1483	-2.1742	-0.7089	-0.9199
+-5.1411	2.5961	1.4787	2.5253	1.9749	2.6935	-2.1514
+-3.0358	-0.4673	-3.0178	-0.1183	-2.0737	-1.9430	-1.7089
+-4.8013	3.3868	2.2209	-2.8917	1.9785	1.7562	-2.4000
+-4.8966	1.5345	-1.9107	1.6206	1.0586	-2.4528	2.2876
+-6.4597	2.5302	-0.0772	3.0385	5.9297	0.5882	-0.3289
+-5.9218	3.7935	-0.2556	3.3005	2.0236	1.1245	1.1257
+-2.6684	3.1373	-0.3467	-3.5191	-5.6729	-0.1143	-0.0563
+-6.6356	5.9313	2.3230	-4.2617	4.5651	2.7706	-2.2094
+-5.6003	4.9145	-0.0899	0.9846	1.1772	1.7796	0.3640
+-7.1334	4.7398	-0.3982	-1.1846	4.2620	0.3247	0.9009
+-3.7706	-0.4166	-2.7645	3.4809	0.1927	-2.6134	1.7933
+-2.5829	3.6848	2.0472	0.9575	-3.0212	3.5198	-2.3061
+-5.9636	4.7293	1.7384	-3.3395	2.2166	2.2397	-1.9459
+-5.6726	0.3054	0.5239	-1.8098	5.0980	-2.8714	0.6939
+-1.6695	0.2400	-0.7679	2.4374	-3.4909	1.2909	-3.0332
+-5.3286	2.8034	2.9264	-1.9200	3.2485	-0.4062	0.7878
+-4.1365	1.8401	2.8304	-2.6400	1.8543	-0.6305	-0.9748
+-7.3517	2.9113	-0.3355	-1.6157	4.9779	-1.2069	1.0193
+-4.1967	-0.9651	-1.0766	-0.8147	1.4782	-2.8124	-0.7958
+-3.3859	1.1867	0.7010	1.8196	0.6206	1.2032	-2.4485
+-4.8094	3.7086	-1.5026	-1.1737	-0.4332	-1.3418	2.0650
+-3.6897	4.9698	1.8855	1.2100	-2.0871	1.2348	3.0630
+-4.6351	1.0548	0.6174	-1.1733	1.8384	-1.7480	-0.2812
+-2.5884	2.4750	0.7398	1.9273	-3.9507	1.3361	0.0575
+-4.9906	4.0057	3.2665	-2.0939	2.9379	0.7270	0.5404
+-3.2473	1.8445	0.2434	-0.5658	-1.9457	-2.2797	2.6466
+-2.3835	3.6974	0.5424	0.1868	-5.9273	1.4366	0.1764
+-4.4944	4.1873	2.5387	2.3093	0.7909	2.4807	0.3451
+-4.0672	3.1357	0.7866	0.0835	-2.6209	-0.6464	2.6246
+-7.7257	2.4335	-0.7824	4.0495	6.3356	-0.7156	2.2593
+-7.7345	3.3520	-3.0476	3.4232	5.3875	-0.3854	2.2324
+-2.5709	1.6372	2.0053	0.0627	-1.1963	-1.7860	2.6362
+-1.4913	0.2742	2.0702	-1.8860	-3.6079	-3.4077	2.9305
+-3.8738	0.4747	-0.3085	0.2335	-0.4071	0.0756	-3.0254
+-4.1544	0.8081	-1.6855	-1.7544	-1.6783	-3.7309	2.5241
+-4.8867	4.3285	0.0026	0.5021	0.3675	2.2662	-0.5320
+-3.4362	1.1688	1.6552	-1.2709	-0.8309	0.0186	-2.1603
+-5.7805	1.1703	-0.7154	-1.9988	3.8637	-1.0734	-2.3152
+-6.2709	5.2044	-1.2410	2.2091	1.0452	3.1747	-1.3561
+-5.3901	3.6522	1.9170	0.3815	1.0644	0.8535	1.5809
+-2.3241	1.7176	0.5907	0.3335	-2.6000	-0.2806	0.7917
+-6.0218	6.1509	3.7438	-0.9355	2.0688	1.9320	2.2931
+-3.8865	-1.2257	-2.1561	2.3774	-0.2732	-2.2308	-0.1288
+-1.8827	1.0374	1.1856	0.3511	-3.8963	-0.1608	0.1192
+-6.2846	5.1593	2.3278	-0.6188	4.0817	1.5186	0.6472
+-5.2141	5.7113	2.8258	-0.2090	1.7114	2.6014	0.9906
+-2.5811	2.8134	1.9097	-2.6507	-2.8032	-0.9899	1.3097
+-3.0815	-0.4578	-3.1189	2.2872	-2.0861	-0.2196	-2.7690
+-6.1254	5.2751	-1.0343	-1.0472	-0.6193	-0.2364	2.9693
+-2.8232	1.3373	0.5949	-0.7927	-1.7707	-2.4858	2.1362
+-3.4122	2.3597	1.8698	0.2527	-1.5604	0.3673	0.1838
+-6.4323	1.4615	-2.8697	1.1520	4.9814	-0.7643	-1.1231
+-4.0745	1.7887	0.4200	0.5898	-0.4801	1.1216	-1.8534
+-3.2629	1.1370	1.8248	-1.1693	-0.5124	-2.8929	3.1263
+-2.0028	2.5104	3.1459	-2.4141	-3.8226	0.7029	-1.0988
+-2.7361	3.6728	2.8194	-0.5496	-3.3320	1.9027	-0.7531
+-3.5250	0.4456	0.3977	0.7169	0.7105	-0.6279	-1.2819
+-3.9791	3.6674	1.5133	0.6258	-0.9034	1.2040	0.3977
+-6.2804	5.0438	0.4767	-2.3433	3.3174	1.0917	-0.0308
+-3.7420	0.1593	-0.3654	-2.0204	1.6037	-1.1253	-3.2746
+9.5151	3.7062	3.8697	1.3868	0.2586	1.7338	1.0834
+8.5812	6.2908	-4.1399	-1.4445	-0.4607	1.1125	0.1729
+7.6440	3.5377	-4.4023	0.3615	2.5846	-1.2183	1.0357
+2.5921	3.1502	4.9758	-2.6982	-2.6638	0.6784	0.1647
+4.4183	5.1293	-1.9198	-1.9122	-2.0053	1.0818	-0.1255
+11.5799	3.0356	0.0777	3.1485	2.6135	1.7393	-1.2853
+10.6151	4.2457	2.7079	-2.5390	2.7172	0.4983	0.1686
+6.5518	1.4131	-4.3284	3.0582	2.4753	-1.4287	0.0280
+0.6110	2.7018	4.8455	1.9160	-2.9669	1.7329	1.0903
+8.8862	6.7098	-1.3650	0.0695	0.2906	3.0926	-0.2188
+8.3516	5.1600	-3.1937	-2.1437	0.9068	0.1059	-0.0776
+6.5749	1.0591	1.0860	-2.3285	0.6754	-1.9909	0.3649
+-1.3536	0.0963	-1.1881	-3.1040	-2.4649	-1.7361	-2.2365
+5.7554	2.5612	-4.7030	2.6323	1.4049	-0.6845	0.5814
+-2.5682	4.3850	-1.4615	1.9775	-4.6265	1.8620	0.5593
+1.5869	3.3199	3.0094	0.0874	-1.9313	1.4984	-0.1249
+0.0019	-2.7678	2.7422	0.3182	-0.7239	-2.9330	-0.4725
+2.3421	-2.9454	3.1008	3.8667	0.3177	-1.2439	-1.8251
+1.1281	1.5666	0.9091	-2.7407	-2.1087	-2.0198	0.9724
+1.5235	0.8024	-4.4624	1.8661	-0.6945	-1.5766	-0.8688
+7.4384	-0.9510	2.5739	0.7133	3.4514	-3.3296	1.7562
+1.9243	0.6118	4.2924	0.0358	-2.5054	0.8895	-1.6169
+4.9183	2.4368	1.8274	-0.6084	-0.2458	-0.1625	0.0769
+6.5999	2.8745	3.4413	-1.3241	0.8891	0.3147	0.4983
+10.2333	-0.1155	0.1643	4.0222	3.8264	-1.4876	0.6248
+6.7711	6.9607	-4.0865	0.3893	0.0512	1.4734	1.8907
+2.8361	-0.5592	-2.3155	0.2024	1.3347	-2.3676	-1.8030
+3.6792	4.2825	-2.5657	0.8910	-1.6217	0.2821	1.5377
+1.1960	-1.2069	-0.3808	2.9584	0.2167	-2.9221	1.3256
+2.5793	-0.1009	5.5918	0.3863	0.2953	-0.1268	-1.0559
+-2.5483	1.9288	0.6106	1.7118	-2.9326	0.0715	0.4489
+1.0182	2.3331	5.2430	-2.6561	-3.2526	-0.6230	1.7928
+6.2254	2.0926	-3.7485	2.8778	2.7909	-1.7703	1.8600
+7.7947	3.9599	0.3899	2.8628	1.3342	1.5191	0.8549
+5.9341	1.6811	5.0814	-1.7636	0.2261	0.0923	-0.4983
+8.9422	5.1988	-5.1226	-1.4216	1.8601	-0.2483	0.0129
+3.7559	0.7134	0.5774	2.1676	-0.3686	-0.1396	-0.9686
+1.2647	0.2938	-1.6141	-1.4450	-0.8494	-1.4320	-2.0799
+0.8697	-2.3541	4.8819	1.1583	-1.0850	-3.0040	1.1390
+-2.4243	2.2533	0.6132	-0.4352	-4.2540	0.4717	-0.2835
+0.8691	0.8276	5.7945	3.0405	-3.0247	0.7777	0.7993
+-0.0306	2.1994	-1.3816	-2.7338	-1.3169	-1.7597	0.1966
+6.8891	3.7843	-1.1987	-0.7267	-0.5589	1.1499	-1.3620
+4.9555	4.3737	3.2795	-2.9743	-0.9193	1.2818	-0.3394
+4.6003	4.9106	-4.1796	1.8274	-0.5694	0.3444	1.7707
+6.5173	6.8499	-4.3867	0.6973	-1.0706	2.3430	0.0319
+5.9594	4.7200	-3.3220	1.7297	0.5827	1.8873	-0.7937
+4.8011	1.7963	2.2601	-0.6781	-1.1054	-0.4366	0.0862
+9.4941	3.2988	4.8488	-1.5745	1.4571	0.8634	0.3784
+5.9275	5.0975	-1.9945	-1.7249	-1.7025	-0.2676	2.1382
+2.2637	2.3526	5.0792	2.9659	-2.8853	2.6294	-0.5906
+2.1362	-0.5662	-2.8698	-1.5406	-0.6178	-2.4869	-1.9779
+3.4238	1.6207	-2.0697	0.5927	0.0162	0.5742	-2.6645
+4.6129	4.0668	-0.4489	-0.0917	-1.6317	0.5627	1.0477
+5.6344	6.3936	-2.3886	-2.1122	-2.4168	1.2380	0.6516
+0.6899	-0.0045	2.5945	2.0729	-1.5520	0.0329	-0.7707
+2.1954	3.4315	-1.1342	3.2093	-1.9723	1.5515	0.1762
+-0.2728	-1.8801	0.5443	2.9489	-0.9169	-2.2882	-0.0039
+2.2464	1.6663	-1.2627	2.9359	-0.3243	-0.8919	1.4302
+7.8136	5.7358	-0.3169	-2.0148	0.3495	0.3957	1.5594
+7.2521	2.9262	-0.9792	-1.4871	0.7949	0.4403	-1.6491
+1.7315	1.7501	-0.2721	-1.8382	-0.2053	-0.5254	-1.7868
+4.5382	0.9630	5.3634	-1.6540	-1.0877	0.7456	-1.9428
+8.4922	4.3959	-4.0657	-1.0685	2.0394	0.3027	-1.5177
+5.8208	2.8653	-2.3174	3.0490	0.3595	-0.5681	1.6879
+-1.5657	3.4389	-0.8766	-0.7644	-5.0871	0.1973	0.8505
+2.9262	3.2748	-2.6100	-3.4518	-2.3979	-0.1388	-2.1841
+1.5202	0.3470	6.4457	-0.8811	-1.3563	-0.4038	0.2652
+0.2160	-0.8406	-1.2438	-1.9606	-1.8069	-2.7155	-1.5833
+9.9773	3.8447	-3.0340	-2.7374	2.8639	-1.6783	0.8315
+6.8182	0.6939	3.8276	0.0235	1.1482	-1.6252	1.8372
+0.4703	2.0718	-0.5704	0.1399	-2.3166	0.1054	-1.0614
+7.0703	-1.1055	1.8762	0.1576	2.2093	-2.5649	-0.3973
+-0.4376	-3.2665	3.1890	3.8191	-0.7461	-1.6104	-0.9830
+5.6321	2.2267	5.3601	2.1586	-0.6273	1.4508	0.5460
+4.3783	3.6338	1.5671	-0.9206	-1.8438	-0.2069	1.8630
+6.6664	2.3012	-4.4266	-2.9484	2.7377	-2.3245	-0.9019
+-0.7160	-1.9051	0.3750	1.8277	0.4336	-2.9009	0.0458
+6.8161	0.6358	-1.6462	-2.7192	2.7982	-1.8084	-2.3482
+3.6202	0.2021	2.5073	-2.1632	0.6444	-2.1642	-0.4466
+7.9950	2.4238	0.6365	-1.3418	2.4997	-0.0271	-1.1315
+6.3963	-1.4724	3.6385	2.0221	1.4533	-2.2412	0.4731
+5.1654	-0.2758	3.2312	0.1821	0.6037	-0.1159	-2.1626
+1.0380	-2.7407	5.4789	-0.6575	-1.1424	-2.0608	-1.9043
+3.8743	1.3289	-5.4026	2.9636	1.0426	-1.6882	0.5271
+0.8322	1.4722	6.1285	3.0091	-1.5825	2.4138	-0.5868
+4.6150	2.2866	-4.7298	1.6730	0.1353	-1.5406	0.7652
+8.7039	6.6432	-2.1439	-3.2018	0.8334	1.6967	-1.1076
+6.5330	0.6754	0.1167	1.6453	2.9371	-0.1888	-2.1854
+8.5000	4.9495	-1.9358	0.5401	1.9684	1.3168	0.1780
+0.8515	1.5139	0.3372	-2.5937	-1.0518	-2.3114	1.0306
+5.3179	6.1706	-3.1955	1.6488	-2.4046	1.8903	1.6645
+2.2969	-1.9570	4.6150	1.9233	-0.6721	-2.5942	1.5362
+-2.0710	3.3407	-0.3858	1.4794	-5.5546	1.0321	0.8251
+7.4623	4.0395	-0.3940	-1.0081	-0.1398	1.6219	-1.8175
+3.2457	-2.7162	5.0956	1.9736	0.5175	-2.9152	0.7246
+6.5881	1.7251	0.7603	-0.1360	1.9830	-2.0959	1.7091
+-0.0582	-3.1106	4.1112	3.1836	0.0464	-1.1565	-1.7583
+0.0513	-0.2091	0.3769	3.7662	-1.0985	-1.4892	1.0789
+6.2977	-1.5534	2.1208	-0.9169	2.2985	-3.2413	0.0615
+-1.4394	-5.9147	-0.1542	-0.8409	0.6219	1.6478	1.5854
+-0.4367	-1.3437	-3.8063	-0.3593	-1.6685	-2.1481	-3.4694
+2.2828	-3.9801	0.3987	-0.3808	2.0643	1.5617	-0.4308
+-0.6420	-3.2305	0.4085	-1.0408	0.0016	-2.2239	0.0187
+1.3416	-5.6774	3.4891	-1.2119	0.0337	-0.3871	0.0198
+-0.1420	-3.2416	-1.2991	0.0960	-0.2367	0.0961	1.4603
+-2.8178	-5.0957	-3.8157	-2.7428	-2.3669	1.3045	1.8838
+-1.3617	-1.8720	-3.0155	-0.5861	-1.5562	-1.9727	-2.8636
+0.2850	-4.3563	-5.1819	1.6696	-0.0587	3.0056	1.6736
+0.3583	-2.5959	-2.0437	3.6266	-1.2475	-0.3279	-0.5564
+0.5513	-0.3854	-4.3652	3.4373	-0.2906	-1.1745	0.2896
+-0.5884	-7.0655	1.7511	-1.6051	-0.4407	1.8331	0.9006
+0.3935	-2.6931	-4.0109	2.5334	0.0102	1.4028	-0.3750
+1.3565	-1.8875	-2.3231	0.5957	1.0166	-0.1657	-2.0777
+3.7613	-3.4500	4.5430	3.8266	1.6193	0.0156	-1.3127
+-1.6208	-3.4157	-4.4769	0.3612	-2.0838	1.1314	-2.2429
+-0.4794	-2.1701	0.9510	-1.3035	0.4743	-2.2567	-1.8181
+2.5691	-6.1575	4.8003	-2.6638	0.7564	-0.1210	0.6823
+-0.2381	-4.0798	-3.3514	1.4065	-0.4906	2.3233	-1.0396
+2.3010	-2.6211	0.2977	-2.3013	0.2922	-1.3277	-0.9939
+-1.5917	-4.6139	-3.1039	-3.1579	0.1047	1.6446	2.6565
+1.4751	-2.4914	-2.8961	0.4489	-0.1023	0.2975	1.6799
+0.7286	-5.8054	3.9412	-2.0106	0.3808	-0.4134	-0.8919
+1.3794	-5.6867	-1.9577	-0.1912	1.6085	3.3771	2.5230
+-1.1581	-3.1117	1.8577	3.4481	-1.4872	-1.6354	-0.7326
+-0.2917	-2.6622	-3.6147	-1.3967	-0.4247	0.6621	-2.0543
+2.4622	-4.3923	4.6318	3.2286	0.8290	-1.3138	-0.7092
+-0.3768	-3.3689	0.9185	3.4446	-0.7548	0.1917	-2.8933
+-1.5571	-1.2636	-4.4107	-0.4740	-0.5614	-1.1182	0.8865
+0.7017	-2.2599	-3.0353	-3.5220	0.7490	0.2524	-1.4167
+-2.0500	-5.2108	-0.5936	2.8702	0.0865	1.7500	1.8663
+3.0137	-5.0641	3.4237	3.1012	0.4598	0.6318	2.2810
+-1.5219	-1.6394	-4.8442	2.7113	-0.5852	1.1910	-1.9068
+-2.3981	-6.0576	-2.8749	-2.3686	-1.0002	2.5079	0.2015
+1.8921	-6.3274	-0.7450	-2.7750	0.8666	2.2501	-0.2021
+1.5814	-6.5836	3.2901	0.9566	1.9993	3.1253	1.3281
+-0.9830	-5.8270	-1.5955	-1.7203	0.1380	2.4005	-0.5153
+0.2115	-3.6162	0.4305	1.7197	0.5603	-0.5445	-0.9444
+-0.9981	-5.0750	-2.8310	-3.8982	-0.8326	0.7471	2.2110
+1.8743	-7.4837	2.1129	1.0955	1.0676	3.9861	1.5601
+-0.2173	-3.4647	3.5740	-0.5041	0.5984	-1.8010	-0.3018
+-0.8708	-3.0747	-5.4424	-0.2924	-0.2948	1.1786	1.4711
+-2.1813	-4.1987	-0.6857	-2.5649	-0.1683	-1.1085	0.0401
+1.0261	-4.8342	-2.5095	-3.2447	0.3524	0.5019	2.4819
+1.3312	-4.7451	1.9789	-1.0770	0.2929	0.0568	-1.0795
+-0.4084	-3.3231	1.7444	-2.2754	0.1706	-2.6516	-2.3484
+0.1271	-3.6357	5.1002	-0.8028	0.8378	-1.9041	-2.1182
+-0.3108	-2.5467	0.0172	0.5997	-0.8420	-2.4078	0.2985
+2.3229	-7.7318	0.5990	1.5900	0.6791	4.9678	0.7111
+2.2490	-5.2254	2.0891	-0.4591	1.4099	1.2195	0.1653
+0.8997	-5.9620	2.1464	-3.1994	0.2412	0.6705	-0.2757
+1.6351	-2.0544	-3.3725	2.7118	-0.5164	0.9859	-0.7674
+-0.3884	-3.7377	-2.0037	-3.7865	-0.8058	-0.1159	0.5169
+-1.2854	-0.6815	-3.7704	-1.2835	-0.8277	-2.8829	0.1127
+2.7532	-3.6849	1.9801	-1.1850	1.8530	-1.4601	0.8846
+1.3367	-3.5217	-4.2259	-1.7081	0.0284	0.6012	-0.8891
+0.2084	-6.3242	3.0772	-2.9331	0.7925	1.0186	-0.2763
+0.4991	-4.0107	-3.9909	-4.2000	0.7601	2.0399	0.2795
+2.0422	-3.9227	1.1896	0.2482	0.1053	-0.3852	-1.3664
+1.3943	-5.9124	0.3596	-2.3189	1.3754	1.5829	1.1913
+0.0278	-3.4528	-0.0648	-0.0516	0.5044	-0.1313	0.8862
+-1.1295	-4.1677	-2.8210	1.7549	0.2425	1.7050	-0.0711
+-0.8205	-2.8645	1.3685	1.3576	-0.0320	-2.0074	0.6672
+-2.3925	-5.9955	-2.2835	-4.1743	-0.4680	2.2192	0.7221
+2.4329	-4.5049	-0.5699	-3.2655	1.5086	0.7033	-0.3623
+0.0935	-5.5951	-2.6666	-0.0202	0.4672	3.1178	0.7036
+2.0608	-3.6542	2.5894	0.5949	0.1042	-1.9610	-2.1251
+1.3668	-4.3150	1.5058	-0.8470	0.3603	0.1046	-1.2965
+-0.7108	-5.7729	0.8471	-2.1552	-0.1474	0.4989	0.8110
+2.0865	-7.5741	2.4725	-3.2358	1.9803	3.5518	1.0200
+-0.7413	-6.5902	-3.3497	-4.0118	-0.7169	2.8734	-0.0154
+1.9231	-4.1669	0.6324	3.6388	1.1516	1.4119	1.9045
+0.9972	-3.3835	-3.8478	0.4535	-0.1122	0.9351	-0.6707
+-1.4850	-3.7556	-0.8861	2.0163	-1.0464	-0.5987	0.6890
+-0.6859	-4.0281	0.3683	3.6759	-1.1975	0.3557	-1.6469
+-1.8927	-1.7090	-2.2045	-2.1649	-0.6862	-2.7007	-2.6949
+-2.6846	-3.5555	-3.8040	-2.2653	-1.5920	-1.2491	0.6841
+0.3020	-5.6926	2.2922	-1.7143	1.0921	0.3534	-0.3102
+0.6378	0.0965	-3.4219	1.3310	0.4472	-2.1190	-0.2407
+-1.9094	-4.6694	-5.0032	-0.7066	-0.5111	2.2129	0.1366
+-0.5069	-7.3869	2.1287	-3.7626	-0.0722	1.8540	3.3739
+-0.8111	-2.1449	-2.0660	1.3755	-1.4613	-1.8106	-0.2669
+-2.1210	-3.7767	-1.4218	-1.0966	-0.4062	0.3971	-0.5842
+-1.0161	-5.3783	-5.2064	-4.0163	-0.1682	2.9910	0.1047
+-1.0107	-5.1148	-0.6561	0.8090	-1.0136	0.7724	2.2620
+0.3727	-5.6904	3.2177	3.2668	1.1544	0.6645	2.2839
+1.8337	-4.6784	1.9878	1.5645	0.4593	-0.2257	1.9580
+-0.1967	-4.0023	-0.2283	-1.0764	0.7112	-0.1925	-1.3203
+0.2941	-5.0048	-2.0272	2.6678	-0.5445	2.7349	1.9569
+2.5823	-7.5935	2.0663	0.1052	1.8888	3.5327	2.8774
+-2.5822	-2.1414	-2.1563	3.1156	-2.0672	-0.7239	-2.1677
+-1.2405	-3.6815	-2.5594	1.7052	-0.7032	1.3623	1.5195
+-2.8369	-5.1205	-4.5254	-0.1156	-0.6598	4.1633	0.1587
+0.3149	-6.2694	-2.0587	-0.3757	0.1556	3.2380	2.4606
+2.0357	-3.8799	4.1492	4.3973	-0.1287	-0.6609	-2.9551
+1.4539	-5.0928	-1.1201	1.5114	1.4226	3.0672	1.1131
+0.0900	-4.8785	0.4034	1.7989	1.0459	2.5805	-0.6281
+0.0903	-5.5698	2.3322	-1.8054	-0.3871	-1.4925	1.4869
+-3.2787	-0.9351	-3.4811	2.8220	-2.2307	-1.9613	-0.5324
+-0.8901	-3.3226	-2.1998	-3.3552	-0.2246	-0.9037	0.3637
diff -r daece0f27108 -r fe627c026dc6 test-data/pca_incremental_output.dat
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pca_incremental_output.dat	Tue Apr 13 20:44:53 2021 +0000
@@ -0,0 +1,300 @@
+-4.5041	1.2787	-0.9244	-0.6417	2.4742	-0.6790	-2.2376
+-2.3058	0.3190	2.0467	-1.5089	-1.7879	-2.8580	2.3471
+-3.6298	1.9778	0.5695	3.2844	-2.1532	1.0649	0.2268
+-4.9371	1.9948	-1.1517	-0.7883	0.1443	0.3281	-1.6340
+-5.1251	2.1148	1.0578	-1.2113	2.7850	-0.4718	-0.5876
+-5.2311	3.1236	2.4521	-0.1645	2.0950	1.3657	-1.2221
+-5.2736	3.9401	3.1613	-0.4221	1.4421	0.6351	1.4170
+-5.8843	2.3108	-0.9970	-2.5925	4.5634	-1.7216	-0.6943
+-3.0847	0.9348	1.2297	-0.1673	-1.5983	-0.3054	-0.8598
+-5.0726	2.7358	2.1948	0.6672	2.8109	0.2199	0.2954
+-3.0489	-1.4621	-1.4039	2.2051	-2.1125	-3.5712	1.8233
+-3.9828	0.4789	-0.4798	3.1414	-0.2706	1.0323	-3.0673
+-4.7769	0.5637	-0.6189	2.4042	1.8522	-0.8892	0.1666
+-2.8177	-0.2411	1.1106	-1.6952	-0.7266	-2.7600	-0.0377
+-3.3395	0.6116	-2.4104	0.4482	-1.2702	-2.2185	1.2082
+-3.7895	1.3834	1.9512	0.4855	1.2103	0.2934	-1.2998
+-5.6850	2.9597	-0.6627	-1.3921	1.9849	-0.8803	0.8756
+-2.3684	1.9853	2.1848	1.9063	-1.9759	2.9765	-2.6155
+-2.7658	1.1415	-1.5911	-2.5974	-4.2491	-2.4650	1.0467
+-3.4687	2.6724	0.7399	-2.5064	-1.2477	1.3807	-2.8392
+-4.1960	1.4928	1.5341	3.2301	1.6231	0.5496	-0.1931
+-6.8974	5.6555	-0.4728	-0.2712	1.6418	2.8355	-1.2524
+-5.4610	5.7033	1.6135	-2.4726	-0.3671	0.4024	2.5259
+-3.6199	0.4197	-3.3176	0.7440	-0.9565	-3.1444	1.3050
+-6.9755	6.9895	0.8969	-1.2372	2.7908	1.0709	2.8530
+-2.4744	-2.4406	-1.2029	1.7749	-1.8143	-2.5760	-1.4119
+-2.2670	1.4244	0.3150	-0.3939	-4.8261	0.5653	-1.5405
+-1.2997	1.8646	1.6713	0.9233	-3.9371	1.9971	-2.1376
+-2.9030	2.7832	2.8769	-2.1591	-0.9938	-1.4089	2.2805
+-4.0193	-0.7911	-2.3511	-2.6334	0.8198	-3.1752	-1.7134
+-4.6438	5.1543	2.9044	-3.5749	1.3863	1.1820	0.2145
+-3.1379	-0.3608	-3.4466	2.6176	-2.5651	-3.1252	2.2431
+-3.3334	0.1605	-2.3825	-0.8492	-0.2283	-0.9548	-3.0823
+-1.8428	-0.3384	-0.2287	1.0854	-2.4548	0.2911	-3.1773
+-5.7377	2.9593	-1.3065	2.8244	2.0424	0.2718	0.8933
+-7.5818	1.9024	-2.0175	1.7637	5.6534	-0.4313	-0.7429
+-6.7842	3.2537	-0.5172	1.7928	4.5232	1.0479	-0.2527
+-2.6542	4.2982	-0.6507	-0.7068	-6.0759	0.4216	2.2052
+-4.7066	-0.0723	0.5610	2.7188	3.2642	-1.3372	-0.2390
+-3.1046	-0.4176	-2.2528	3.1483	-2.1742	-0.7089	-0.9199
+-5.1411	2.5961	1.4787	2.5253	1.9749	2.6935	-2.1514
+-3.0358	-0.4673	-3.0178	-0.1183	-2.0737	-1.9430	-1.7089
+-4.8013	3.3868	2.2209	-2.8917	1.9785	1.7562	-2.4000
+-4.8966	1.5345	-1.9107	1.6206	1.0586	-2.4528	2.2876
+-6.4597	2.5302	-0.0772	3.0385	5.9297	0.5882	-0.3289
+-5.9218	3.7935	-0.2556	3.3005	2.0236	1.1245	1.1257
+-2.6684	3.1373	-0.3467	-3.5191	-5.6729	-0.1143	-0.0563
+-6.6356	5.9313	2.3230	-4.2617	4.5651	2.7706	-2.2094
+-5.6003	4.9145	-0.0899	0.9846	1.1772	1.7796	0.3640
+-7.1334	4.7398	-0.3982	-1.1846	4.2620	0.3247	0.9009
+-3.7706	-0.4166	-2.7645	3.4809	0.1927	-2.6134	1.7933
+-2.5829	3.6848	2.0472	0.9575	-3.0212	3.5198	-2.3061
+-5.9636	4.7293	1.7384	-3.3395	2.2166	2.2397	-1.9459
+-5.6726	0.3054	0.5239	-1.8098	5.0980	-2.8714	0.6939
+-1.6695	0.2400	-0.7679	2.4374	-3.4909	1.2909	-3.0332
+-5.3286	2.8034	2.9264	-1.9200	3.2485	-0.4062	0.7878
+-4.1365	1.8401	2.8304	-2.6400	1.8543	-0.6305	-0.9748
+-7.3517	2.9113	-0.3355	-1.6157	4.9779	-1.2069	1.0193
+-4.1967	-0.9651	-1.0766	-0.8147	1.4782	-2.8124	-0.7958
+-3.3859	1.1867	0.7010	1.8196	0.6206	1.2032	-2.4485
+-4.8094	3.7086	-1.5026	-1.1737	-0.4332	-1.3418	2.0650
+-3.6897	4.9698	1.8855	1.2100	-2.0871	1.2348	3.0630
+-4.6351	1.0548	0.6174	-1.1733	1.8384	-1.7480	-0.2812
+-2.5884	2.4750	0.7398	1.9273	-3.9507	1.3361	0.0575
+-4.9906	4.0057	3.2665	-2.0939	2.9379	0.7270	0.5404
+-3.2473	1.8445	0.2434	-0.5658	-1.9457	-2.2797	2.6466
+-2.3835	3.6974	0.5424	0.1868	-5.9273	1.4366	0.1764
+-4.4944	4.1873	2.5387	2.3093	0.7909	2.4807	0.3451
+-4.0672	3.1357	0.7866	0.0835	-2.6209	-0.6464	2.6246
+-7.7257	2.4335	-0.7824	4.0495	6.3356	-0.7156	2.2593
+-7.7345	3.3520	-3.0476	3.4232	5.3875	-0.3854	2.2324
+-2.5709	1.6372	2.0053	0.0627	-1.1963	-1.7860	2.6362
+-1.4913	0.2742	2.0702	-1.8860	-3.6079	-3.4077	2.9305
+-3.8738	0.4747	-0.3085	0.2335	-0.4071	0.0756	-3.0254
+-4.1544	0.8081	-1.6855	-1.7544	-1.6783	-3.7309	2.5241
+-4.8867	4.3285	0.0026	0.5021	0.3675	2.2662	-0.5320
+-3.4362	1.1688	1.6552	-1.2709	-0.8309	0.0186	-2.1603
+-5.7805	1.1703	-0.7154	-1.9988	3.8637	-1.0734	-2.3152
+-6.2709	5.2044	-1.2410	2.2091	1.0452	3.1747	-1.3561
+-5.3901	3.6522	1.9170	0.3815	1.0644	0.8535	1.5809
+-2.3241	1.7176	0.5907	0.3335	-2.6000	-0.2806	0.7917
+-6.0218	6.1509	3.7438	-0.9355	2.0688	1.9320	2.2931
+-3.8865	-1.2257	-2.1561	2.3774	-0.2732	-2.2308	-0.1288
+-1.8827	1.0374	1.1856	0.3511	-3.8963	-0.1608	0.1192
+-6.2846	5.1593	2.3278	-0.6188	4.0817	1.5186	0.6472
+-5.2141	5.7113	2.8258	-0.2090	1.7114	2.6014	0.9906
+-2.5811	2.8134	1.9097	-2.6507	-2.8032	-0.9899	1.3097
+-3.0815	-0.4578	-3.1189	2.2872	-2.0861	-0.2196	-2.7690
+-6.1254	5.2751	-1.0343	-1.0472	-0.6193	-0.2364	2.9693
+-2.8232	1.3373	0.5949	-0.7927	-1.7707	-2.4858	2.1362
+-3.4122	2.3597	1.8698	0.2527	-1.5604	0.3673	0.1838
+-6.4323	1.4615	-2.8697	1.1520	4.9814	-0.7643	-1.1231
+-4.0745	1.7887	0.4200	0.5898	-0.4801	1.1216	-1.8534
+-3.2629	1.1370	1.8248	-1.1693	-0.5124	-2.8929	3.1263
+-2.0028	2.5104	3.1459	-2.4141	-3.8226	0.7029	-1.0988
+-2.7361	3.6728	2.8194	-0.5496	-3.3320	1.9027	-0.7531
+-3.5250	0.4456	0.3977	0.7169	0.7105	-0.6279	-1.2819
+-3.9791	3.6674	1.5133	0.6258	-0.9034	1.2040	0.3977
+-6.2804	5.0438	0.4767	-2.3433	3.3174	1.0917	-0.0308
+-3.7420	0.1593	-0.3654	-2.0204	1.6037	-1.1253	-3.2746
+9.5151	3.7062	3.8697	1.3868	0.2586	1.7338	1.0834
+8.5812	6.2908	-4.1399	-1.4445	-0.4607	1.1125	0.1729
+7.6440	3.5377	-4.4023	0.3615	2.5846	-1.2183	1.0357
+2.5921	3.1502	4.9758	-2.6982	-2.6638	0.6784	0.1647
+4.4183	5.1293	-1.9198	-1.9122	-2.0053	1.0818	-0.1255
+11.5799	3.0356	0.0777	3.1485	2.6135	1.7393	-1.2853
+10.6151	4.2457	2.7079	-2.5390	2.7172	0.4983	0.1686
+6.5518	1.4131	-4.3284	3.0582	2.4753	-1.4287	0.0280
+0.6110	2.7018	4.8455	1.9160	-2.9669	1.7329	1.0903
+8.8862	6.7098	-1.3650	0.0695	0.2906	3.0926	-0.2188
+8.3516	5.1600	-3.1937	-2.1437	0.9068	0.1059	-0.0776
+6.5749	1.0591	1.0860	-2.3285	0.6754	-1.9909	0.3649
+-1.3536	0.0963	-1.1881	-3.1040	-2.4649	-1.7361	-2.2365
+5.7554	2.5612	-4.7030	2.6323	1.4049	-0.6845	0.5814
+-2.5682	4.3850	-1.4615	1.9775	-4.6265	1.8620	0.5593
+1.5869	3.3199	3.0094	0.0874	-1.9313	1.4984	-0.1249
+0.0019	-2.7678	2.7422	0.3182	-0.7239	-2.9330	-0.4725
+2.3421	-2.9454	3.1008	3.8667	0.3177	-1.2439	-1.8251
+1.1281	1.5666	0.9091	-2.7407	-2.1087	-2.0198	0.9724
+1.5235	0.8024	-4.4624	1.8661	-0.6945	-1.5766	-0.8688
+7.4384	-0.9510	2.5739	0.7133	3.4514	-3.3296	1.7562
+1.9243	0.6118	4.2924	0.0358	-2.5054	0.8895	-1.6169
+4.9183	2.4368	1.8274	-0.6084	-0.2458	-0.1625	0.0769
+6.5999	2.8745	3.4413	-1.3241	0.8891	0.3147	0.4983
+10.2333	-0.1155	0.1643	4.0222	3.8264	-1.4876	0.6248
+6.7711	6.9607	-4.0865	0.3893	0.0512	1.4734	1.8907
+2.8361	-0.5592	-2.3155	0.2024	1.3347	-2.3676	-1.8030
+3.6792	4.2825	-2.5657	0.8910	-1.6217	0.2821	1.5377
+1.1960	-1.2069	-0.3808	2.9584	0.2167	-2.9221	1.3256
+2.5793	-0.1009	5.5918	0.3863	0.2953	-0.1268	-1.0559
+-2.5483	1.9288	0.6106	1.7118	-2.9326	0.0715	0.4489
+1.0182	2.3331	5.2430	-2.6561	-3.2526	-0.6230	1.7928
+6.2254	2.0926	-3.7485	2.8778	2.7909	-1.7703	1.8600
+7.7947	3.9599	0.3899	2.8628	1.3342	1.5191	0.8549
+5.9341	1.6811	5.0814	-1.7636	0.2261	0.0923	-0.4983
+8.9422	5.1988	-5.1226	-1.4216	1.8601	-0.2483	0.0129
+3.7559	0.7134	0.5774	2.1676	-0.3686	-0.1396	-0.9686
+1.2647	0.2938	-1.6141	-1.4450	-0.8494	-1.4320	-2.0799
+0.8697	-2.3541	4.8819	1.1583	-1.0850	-3.0040	1.1390
+-2.4243	2.2533	0.6132	-0.4352	-4.2540	0.4717	-0.2835
+0.8691	0.8276	5.7945	3.0405	-3.0247	0.7777	0.7993
+-0.0306	2.1994	-1.3816	-2.7338	-1.3169	-1.7597	0.1966
+6.8891	3.7843	-1.1987	-0.7267	-0.5589	1.1499	-1.3620
+4.9555	4.3737	3.2795	-2.9743	-0.9193	1.2818	-0.3394
+4.6003	4.9106	-4.1796	1.8274	-0.5694	0.3444	1.7707
+6.5173	6.8499	-4.3867	0.6973	-1.0706	2.3430	0.0319
+5.9594	4.7200	-3.3220	1.7297	0.5827	1.8873	-0.7937
+4.8011	1.7963	2.2601	-0.6781	-1.1054	-0.4366	0.0862
+9.4941	3.2988	4.8488	-1.5745	1.4571	0.8634	0.3784
+5.9275	5.0975	-1.9945	-1.7249	-1.7025	-0.2676	2.1382
+2.2637	2.3526	5.0792	2.9659	-2.8853	2.6294	-0.5906
+2.1362	-0.5662	-2.8698	-1.5406	-0.6178	-2.4869	-1.9779
+3.4238	1.6207	-2.0697	0.5927	0.0162	0.5742	-2.6645
+4.6129	4.0668	-0.4489	-0.0917	-1.6317	0.5627	1.0477
+5.6344	6.3936	-2.3886	-2.1122	-2.4168	1.2380	0.6516
+0.6899	-0.0045	2.5945	2.0729	-1.5520	0.0329	-0.7707
+2.1954	3.4315	-1.1342	3.2093	-1.9723	1.5515	0.1762
+-0.2728	-1.8801	0.5443	2.9489	-0.9169	-2.2882	-0.0039
+2.2464	1.6663	-1.2627	2.9359	-0.3243	-0.8919	1.4302
+7.8136	5.7358	-0.3169	-2.0148	0.3495	0.3957	1.5594
+7.2521	2.9262	-0.9792	-1.4871	0.7949	0.4403	-1.6491
+1.7315	1.7501	-0.2721	-1.8382	-0.2053	-0.5254	-1.7868
+4.5382	0.9630	5.3634	-1.6540	-1.0877	0.7456	-1.9428
+8.4922	4.3959	-4.0657	-1.0685	2.0394	0.3027	-1.5177
+5.8208	2.8653	-2.3174	3.0490	0.3595	-0.5681	1.6879
+-1.5657	3.4389	-0.8766	-0.7644	-5.0871	0.1973	0.8505
+2.9262	3.2748	-2.6100	-3.4518	-2.3979	-0.1388	-2.1841
+1.5202	0.3470	6.4457	-0.8811	-1.3563	-0.4038	0.2652
+0.2160	-0.8406	-1.2438	-1.9606	-1.8069	-2.7155	-1.5833
+9.9773	3.8447	-3.0340	-2.7374	2.8639	-1.6783	0.8315
+6.8182	0.6939	3.8276	0.0235	1.1482	-1.6252	1.8372
+0.4703	2.0718	-0.5704	0.1399	-2.3166	0.1054	-1.0614
+7.0703	-1.1055	1.8762	0.1576	2.2093	-2.5649	-0.3973
+-0.4376	-3.2665	3.1890	3.8191	-0.7461	-1.6104	-0.9830
+5.6321	2.2267	5.3601	2.1586	-0.6273	1.4508	0.5460
+4.3783	3.6338	1.5671	-0.9206	-1.8438	-0.2069	1.8630
+6.6664	2.3012	-4.4266	-2.9484	2.7377	-2.3245	-0.9019
+-0.7160	-1.9051	0.3750	1.8277	0.4336	-2.9009	0.0458
+6.8161	0.6358	-1.6462	-2.7192	2.7982	-1.8084	-2.3482
+3.6202	0.2021	2.5073	-2.1632	0.6444	-2.1642	-0.4466
+7.9950	2.4238	0.6365	-1.3418	2.4997	-0.0271	-1.1315
+6.3963	-1.4724	3.6385	2.0221	1.4533	-2.2412	0.4731
+5.1654	-0.2758	3.2312	0.1821	0.6037	-0.1159	-2.1626
+1.0380	-2.7407	5.4789	-0.6575	-1.1424	-2.0608	-1.9043
+3.8743	1.3289	-5.4026	2.9636	1.0426	-1.6882	0.5271
+0.8322	1.4722	6.1285	3.0091	-1.5825	2.4138	-0.5868
+4.6150	2.2866	-4.7298	1.6730	0.1353	-1.5406	0.7652
+8.7039	6.6432	-2.1439	-3.2018	0.8334	1.6967	-1.1076
+6.5330	0.6754	0.1167	1.6453	2.9371	-0.1888	-2.1854
+8.5000	4.9495	-1.9358	0.5401	1.9684	1.3168	0.1780
+0.8515	1.5139	0.3372	-2.5937	-1.0518	-2.3114	1.0306
+5.3179	6.1706	-3.1955	1.6488	-2.4046	1.8903	1.6645
+2.2969	-1.9570	4.6150	1.9233	-0.6721	-2.5942	1.5362
+-2.0710	3.3407	-0.3858	1.4794	-5.5546	1.0321	0.8251
+7.4623	4.0395	-0.3940	-1.0081	-0.1398	1.6219	-1.8175
+3.2457	-2.7162	5.0956	1.9736	0.5175	-2.9152	0.7246
+6.5881	1.7251	0.7603	-0.1360	1.9830	-2.0959	1.7091
+-0.0582	-3.1106	4.1112	3.1836	0.0464	-1.1565	-1.7583
+0.0513	-0.2091	0.3769	3.7662	-1.0985	-1.4892	1.0789
+6.2977	-1.5534	2.1208	-0.9169	2.2985	-3.2413	0.0615
+-1.4394	-5.9147	-0.1542	-0.8409	0.6219	1.6478	1.5854
+-0.4367	-1.3437	-3.8063	-0.3593	-1.6685	-2.1481	-3.4694
+2.2828	-3.9801	0.3987	-0.3808	2.0643	1.5617	-0.4308
+-0.6420	-3.2305	0.4085	-1.0408	0.0016	-2.2239	0.0187
+1.3416	-5.6774	3.4891	-1.2119	0.0337	-0.3871	0.0198
+-0.1420	-3.2416	-1.2991	0.0960	-0.2367	0.0961	1.4603
+-2.8178	-5.0957	-3.8157	-2.7428	-2.3669	1.3045	1.8838
+-1.3617	-1.8720	-3.0155	-0.5861	-1.5562	-1.9727	-2.8636
+0.2850	-4.3563	-5.1819	1.6696	-0.0587	3.0056	1.6736
+0.3583	-2.5959	-2.0437	3.6266	-1.2475	-0.3279	-0.5564
+0.5513	-0.3854	-4.3652	3.4373	-0.2906	-1.1745	0.2896
+-0.5884	-7.0655	1.7511	-1.6051	-0.4407	1.8331	0.9006
+0.3935	-2.6931	-4.0109	2.5334	0.0102	1.4028	-0.3750
+1.3565	-1.8875	-2.3231	0.5957	1.0166	-0.1657	-2.0777
+3.7613	-3.4500	4.5430	3.8266	1.6193	0.0156	-1.3127
+-1.6208	-3.4157	-4.4769	0.3612	-2.0838	1.1314	-2.2429
+-0.4794	-2.1701	0.9510	-1.3035	0.4743	-2.2567	-1.8181
+2.5691	-6.1575	4.8003	-2.6638	0.7564	-0.1210	0.6823
+-0.2381	-4.0798	-3.3514	1.4065	-0.4906	2.3233	-1.0396
+2.3010	-2.6211	0.2977	-2.3013	0.2922	-1.3277	-0.9939
+-1.5917	-4.6139	-3.1039	-3.1579	0.1047	1.6446	2.6565
+1.4751	-2.4914	-2.8961	0.4489	-0.1023	0.2975	1.6799
+0.7286	-5.8054	3.9412	-2.0106	0.3808	-0.4134	-0.8919
+1.3794	-5.6867	-1.9577	-0.1912	1.6085	3.3771	2.5230
+-1.1581	-3.1117	1.8577	3.4481	-1.4872	-1.6354	-0.7326
+-0.2917	-2.6622	-3.6147	-1.3967	-0.4247	0.6621	-2.0543
+2.4622	-4.3923	4.6318	3.2286	0.8290	-1.3138	-0.7092
+-0.3768	-3.3689	0.9185	3.4446	-0.7548	0.1917	-2.8933
+-1.5571	-1.2636	-4.4107	-0.4740	-0.5614	-1.1182	0.8865
+0.7017	-2.2599	-3.0353	-3.5220	0.7490	0.2524	-1.4167
+-2.0500	-5.2108	-0.5936	2.8702	0.0865	1.7500	1.8663
+3.0137	-5.0641	3.4237	3.1012	0.4598	0.6318	2.2810
+-1.5219	-1.6394	-4.8442	2.7113	-0.5852	1.1910	-1.9068
+-2.3981	-6.0576	-2.8749	-2.3686	-1.0002	2.5079	0.2015
+1.8921	-6.3274	-0.7450	-2.7750	0.8666	2.2501	-0.2021
+1.5814	-6.5836	3.2901	0.9566	1.9993	3.1253	1.3281
+-0.9830	-5.8270	-1.5955	-1.7203	0.1380	2.4005	-0.5153
+0.2115	-3.6162	0.4305	1.7197	0.5603	-0.5445	-0.9444
+-0.9981	-5.0750	-2.8310	-3.8982	-0.8326	0.7471	2.2110
+1.8743	-7.4837	2.1129	1.0955	1.0676	3.9861	1.5601
+-0.2173	-3.4647	3.5740	-0.5041	0.5984	-1.8010	-0.3018
+-0.8708	-3.0747	-5.4424	-0.2924	-0.2948	1.1786	1.4711
+-2.1813	-4.1987	-0.6857	-2.5649	-0.1683	-1.1085	0.0401
+1.0261	-4.8342	-2.5095	-3.2447	0.3524	0.5019	2.4819
+1.3312	-4.7451	1.9789	-1.0770	0.2929	0.0568	-1.0795
+-0.4084	-3.3231	1.7444	-2.2754	0.1706	-2.6516	-2.3484
+0.1271	-3.6357	5.1002	-0.8028	0.8378	-1.9041	-2.1182
+-0.3108	-2.5467	0.0172	0.5997	-0.8420	-2.4078	0.2985
+2.3229	-7.7318	0.5990	1.5900	0.6791	4.9678	0.7111
+2.2490	-5.2254	2.0891	-0.4591	1.4099	1.2195	0.1653
+0.8997	-5.9620	2.1464	-3.1994	0.2412	0.6705	-0.2757
+1.6351	-2.0544	-3.3725	2.7118	-0.5164	0.9859	-0.7674
+-0.3884	-3.7377	-2.0037	-3.7865	-0.8058	-0.1159	0.5169
+-1.2854	-0.6815	-3.7704	-1.2835	-0.8277	-2.8829	0.1127
+2.7532	-3.6849	1.9801	-1.1850	1.8530	-1.4601	0.8846
+1.3367	-3.5217	-4.2259	-1.7081	0.0284	0.6012	-0.8891
+0.2084	-6.3242	3.0772	-2.9331	0.7925	1.0186	-0.2763
+0.4991	-4.0107	-3.9909	-4.2000	0.7601	2.0399	0.2795
+2.0422	-3.9227	1.1896	0.2482	0.1053	-0.3852	-1.3664
+1.3943	-5.9124	0.3596	-2.3189	1.3754	1.5829	1.1913
+0.0278	-3.4528	-0.0648	-0.0516	0.5044	-0.1313	0.8862
+-1.1295	-4.1677	-2.8210	1.7549	0.2425	1.7050	-0.0711
+-0.8205	-2.8645	1.3685	1.3576	-0.0320	-2.0074	0.6672
+-2.3925	-5.9955	-2.2835	-4.1743	-0.4680	2.2192	0.7221
+2.4329	-4.5049	-0.5699	-3.2655	1.5086	0.7033	-0.3623
+0.0935	-5.5951	-2.6666	-0.0202	0.4672	3.1178	0.7036
+2.0608	-3.6542	2.5894	0.5949	0.1042	-1.9610	-2.1251
+1.3668	-4.3150	1.5058	-0.8470	0.3603	0.1046	-1.2965
+-0.7108	-5.7729	0.8471	-2.1552	-0.1474	0.4989	0.8110
+2.0865	-7.5741	2.4725	-3.2358	1.9803	3.5518	1.0200
+-0.7413	-6.5902	-3.3497	-4.0118	-0.7169	2.8734	-0.0154
+1.9231	-4.1669	0.6324	3.6388	1.1516	1.4119	1.9045
+0.9972	-3.3835	-3.8478	0.4535	-0.1122	0.9351	-0.6707
+-1.4850	-3.7556	-0.8861	2.0163	-1.0464	-0.5987	0.6890
+-0.6859	-4.0281	0.3683	3.6759	-1.1975	0.3557	-1.6469
+-1.8927	-1.7090	-2.2045	-2.1649	-0.6862	-2.7007	-2.6949
+-2.6846	-3.5555	-3.8040	-2.2653	-1.5920	-1.2491	0.6841
+0.3020	-5.6926	2.2922	-1.7143	1.0921	0.3534	-0.3102
+0.6378	0.0965	-3.4219	1.3310	0.4472	-2.1190	-0.2407
+-1.9094	-4.6694	-5.0032	-0.7066	-0.5111	2.2129	0.1366
+-0.5069	-7.3869	2.1287	-3.7626	-0.0722	1.8540	3.3739
+-0.8111	-2.1449	-2.0660	1.3755	-1.4613	-1.8106	-0.2669
+-2.1210	-3.7767	-1.4218	-1.0966	-0.4062	0.3971	-0.5842
+-1.0161	-5.3783	-5.2064	-4.0163	-0.1682	2.9910	0.1047
+-1.0107	-5.1148	-0.6561	0.8090	-1.0136	0.7724	2.2620
+0.3727	-5.6904	3.2177	3.2668	1.1544	0.6645	2.2839
+1.8337	-4.6784	1.9878	1.5645	0.4593	-0.2257	1.9580
+-0.1967	-4.0023	-0.2283	-1.0764	0.7112	-0.1925	-1.3203
+0.2941	-5.0048	-2.0272	2.6678	-0.5445	2.7349	1.9569
+2.5823	-7.5935	2.0663	0.1052	1.8888	3.5327	2.8774
+-2.5822	-2.1414	-2.1563	3.1156	-2.0672	-0.7239	-2.1677
+-1.2405	-3.6815	-2.5594	1.7052	-0.7032	1.3623	1.5195
+-2.8369	-5.1205	-4.5254	-0.1156	-0.6598	4.1633	0.1587
+0.3149	-6.2694	-2.0587	-0.3757	0.1556	3.2380	2.4606
+2.0357	-3.8799	4.1492	4.3973	-0.1287	-0.6609	-2.9551
+1.4539	-5.0928	-1.1201	1.5114	1.4226	3.0672	1.1131
+0.0900	-4.8785	0.4034	1.7989	1.0459	2.5805	-0.6281
+0.0903	-5.5698	2.3322	-1.8054	-0.3871	-1.4925	1.4869
+-3.2787	-0.9351	-3.4811	2.8220	-2.2307	-1.9613	-0.5324
+-0.8901	-3.3226	-2.1998	-3.3552	-0.2246	-0.9037	0.3637
diff -r daece0f27108 -r fe627c026dc6 test-data/pca_input.dat
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pca_input.dat	Tue Apr 13 20:44:53 2021 +0000
@@ -0,0 +1,300 @@
+9.579515262220434924e-01	3.726315038026500881e-02	1.192922318396479886e+00	2.208390853256675612e+00	1.545465138214503487e+00	6.587709093437858598e+00	2.676346016926679283e+00	2.002623182927643519e+00	1.960111203222609788e+00	1.244000891424809074e-01
+5.456295681254983432e-01	3.130646354641937745e-01	6.007098680445335681e+00	3.644816120915847169e+00	1.043692024581632793e+00	1.970255599020762194e+00	7.768759398827200791e-01	6.435887304171391543e-01	4.933655623778216537e+00	5.543893932919085055e-01
+9.341619128268041639e-02	2.558875163847860179e-01	3.752555601842988420e+00	5.559464534602225783e+00	1.010576057771932490e+00	3.077425253345768663e+00	6.807691255479467962e+00	2.571623094724099445e+00	3.153422029415700933e+00	3.550137612924495478e-01
+1.471186662367064413e-01	4.878675188546680030e-01	1.721359340060558907e+00	4.167324499252111458e+00	1.985766726491912326e+00	5.316541920919662267e+00	2.962330081397085202e+00	6.305674360296442682e-02	1.008632361074792705e+00	1.954028793571292999e-01
+4.517113041288302044e-01	2.082857695479499172e-01	2.670306166999409037e+00	3.583860843014433861e+00	3.162463432948562581e+00	7.681858694197329029e+00	1.942922465406477395e+00	1.098113345375552274e+00	3.412374063138972069e+00	4.177369511025994520e-01
+7.303789721527453871e-02	1.033872090825091461e-01	1.795276419298532655e+00	5.719768891948289635e+00	2.977579113876869243e+00	7.844204854679616368e+00	3.384744044988120759e+00	2.998214211517472805e+00	4.270749911114063657e+00	4.257092154152972707e-01
+6.359175475607969918e-02	1.242103944058722265e-01	4.586547825302377923e+00	6.614372405184473891e+00	1.923730124171568079e+00	7.681919225138223339e+00	2.793831391806054665e+00	1.406817959154060160e+00	4.716223047352296582e+00	6.211888780251716424e-01
+9.886982415066914998e-01	4.584180816802013725e-02	2.793304094637673707e+00	1.871823089629541581e+00	2.607924236602056745e+00	9.206004712207825236e+00	4.623226894276893928e-01	2.966776892612695615e+00	1.485603429563761679e+00	4.654181765444357355e-01
+1.741783097973695904e-01	2.446343910749277373e-01	2.534641383481000876e+00	4.584056834431557093e+00	2.890133078107056441e+00	2.823965747627366518e+00	3.034991597984873835e+00	6.600338090755860643e-01	3.752675891901568583e+00	2.970984388064664694e-01
+4.690204907875814566e-01	2.929759132721354575e-02	3.548495454913231484e+00	4.482718753445549709e+00	3.203674397180960920e+00	8.065801814409903514e+00	3.773297073513870004e+00	2.113193009064737282e+00	4.579511767735440664e+00	4.024397631591818403e-01
+6.941135206286030979e-03	2.133769823942091026e-01	6.135694255460425239e+00	5.395845311332483352e-01	1.504596129252289138e-01	9.253872174123642935e-01	4.483882842463830620e+00	2.321382684831085008e+00	2.576937740611303873e+00	1.109827593740932983e-01
+2.746007390787119640e-03	6.638140727681796083e-02	4.108407001279307247e-01	3.212637467080699416e+00	4.920639575099698959e-01	4.018583101004429281e+00	6.796866753550522056e+00	1.316040638035593568e+00	2.620935479341985896e+00	2.976887894006831070e-01
+3.557722260573822348e-01	3.727028444011896702e-01	3.811343797909652054e+00	1.715026071489426762e+00	5.294113011251582179e-01	5.980858755297242979e+00	5.404241675653790544e+00	1.825392885196229997e+00	2.835734218533411788e+00	3.200816860194626301e-01
+3.642510923301112147e-01	7.309428690756680780e-03	3.666945761684001326e+00	2.430979500086832612e+00	3.031996394197797429e+00	2.708093900045675184e+00	7.623775896209878944e-01	7.865319376558289610e-01	4.100162854521766320e+00	8.307551984431076342e-01
+9.927215581748555229e-01	4.537144790675278760e-01	5.145060290158723681e+00	2.151991198713361086e+00	4.862387339994040936e-01	2.589672936803951053e+00	3.398039583724480561e+00	2.809787771375323651e+00	8.864381880832911120e-01	3.331137683083518208e-01
+5.057807499542814611e-01	1.402717130943404999e-01	1.883175574051066725e+00	4.122193241585520695e+00	1.035989381539633492e+00	5.670773882751129591e+00	3.687442345139384958e+00	1.684532121504235480e+00	4.642108569673514928e+00	9.305441677815040613e-01
+5.042353053817258957e-01	3.547819755513703099e-01	4.381967339967321351e+00	3.851122946472437469e+00	5.426308260180099374e-01	7.445193485365448893e+00	1.869875173163507931e+00	8.437445017751898924e-03	1.444658255941634994e+00	1.636723834558136970e-01
+6.808167520848328325e-01	4.889279161325942868e-01	1.800574817853715093e-01	6.987701665160759745e+00	2.781923379343957503e+00	2.993730387785709102e+00	5.946279606388386263e+00	9.945453378009785350e-01	4.532192935084170315e+00	8.927165058296504530e-01
+3.935353669966316081e-01	3.534471776807462939e-01	4.974393735263212157e+00	4.310249343469166661e+00	5.149149535110453257e-01	2.179395632453644538e-01	4.312162502099264882e-01	1.470934059506852742e+00	6.326429352635798420e-01	7.652243996922046820e-01
+5.817478561516450375e-01	3.650895867034674724e-01	1.106136061048209607e-01	6.499764055829857945e+00	2.050813058661709487e+00	4.066454208593111019e+00	1.490387097633062208e+00	1.061815166735420313e+00	2.302831003165218604e+00	2.174825207514023973e-01
+5.048471030130237125e-01	1.082304090000890673e-01	3.175827458076348364e+00	3.693216070028284115e+00	3.700472412642747955e+00	6.197372734372899927e+00	6.374064687621854297e+00	2.768340513420041837e+00	4.661995130609337679e+00	8.584541060279672342e-01
+9.485485718647990172e-02	2.363243089061691227e-01	1.837943546562954555e+00	7.414016988668096886e+00	1.058065255181876108e+00	9.078408743915952428e+00	4.183782154307690959e+00	1.516463707584718712e+00	3.901861889845664733e-01	2.074547211064202701e-01
+3.134106819658126764e-01	1.286339499547863308e-01	5.864729791423263450e+00	8.300357839112765035e+00	1.438336911915669702e+00	6.851092469972995147e+00	1.034029113386152776e+00	1.494799843839271336e+00	2.277085020013195127e+00	3.018026684870551701e-01
+9.974747480148854484e-01	7.417608355979210533e-02	5.576900855119989231e+00	1.085027230184690827e+00	1.610568082058991379e+00	2.681858695518825719e+00	3.483337744351994392e+00	1.340344201458989248e+00	1.785155307177410666e-01	6.064608937230153707e-02
+7.698707959570197934e-01	5.946693939209857227e-02	6.140872236681682139e+00	7.794500275667249234e+00	3.329936732740133021e+00	1.053683186798398808e+01	2.442474922407471993e+00	1.618248604822617676e+00	1.645175154595579059e+00	1.577878407070486544e-01
+2.921978170383732198e-02	2.098764262909763589e-01	2.571849891653310571e+00	1.701437788960805664e-01	3.606271296781285773e+00	6.151967513580181013e-01	4.407426394075528719e+00	2.238674479981308085e+00	2.918338395342272129e+00	2.203405589927943664e-01
+8.185553514857757396e-02	2.904901153328537933e-01	1.884207868724713020e+00	6.379742850556673339e+00	1.316060594347697510e+00	5.742231747444348500e-02	3.293024161175864162e+00	2.070570602227302937e-01	2.323330232077761792e+00	9.345412483474558885e-01
+9.870953766246871597e-01	2.838968068710498449e-01	8.984979565988782113e-01	7.228894867998849172e+00	3.009018931955541376e+00	7.470177361465970556e-01	4.791497852877925290e+00	2.138893706603099965e+00	3.957478162078499118e+00	9.206372464557524005e-01
+9.246986296046750819e-01	2.657540764872973194e-02	5.696958733461305435e+00	6.037947545874237321e+00	2.054547403001988126e+00	4.010012556963714836e+00	5.387264573805446988e-01	4.198151587672391827e-01	4.855865220608975186e+00	3.421872218672002974e-01
+4.891436182199244698e-01	2.957137425921102514e-01	2.092865426966933118e+00	1.304751064715160558e-01	1.857399876731623767e+00	3.929329864582863596e+00	1.890510040380741774e-01	1.808418679622973757e+00	8.962596057614125034e-01	5.071164143964516091e-01
+9.883745014565588072e-01	2.023820478627288422e-01	3.026225500297741178e+00	8.018625010860999325e+00	2.639151190842266015e+00	7.866860471267914434e+00	5.692475791922957384e-02	2.619732708394978626e+00	3.791192865048754079e+00	8.510442750449997984e-01
+6.766386709986900172e-01	3.163942649407911478e-01	6.650890489805712846e+00	9.981354386720189265e-01	8.895093378732830125e-01	8.880441813445409949e-01	5.269510439686068359e+00	2.800971666890863521e+00	4.336136086833519698e-01	6.151838148712087673e-01
+9.395548338899438745e-01	2.274826251714922587e-01	4.841436257692705025e-01	1.951295574220930362e+00	3.688485459922960619e+00	3.319068635438701342e+00	2.622624513963639714e+00	1.773842110030804609e+00	7.017709220203099951e-01	7.273521464765183575e-01
+7.131396196120692865e-01	2.829891666225929359e-01	1.809204461229397820e-01	3.578992630164364641e+00	5.190853847632848073e-02	9.799965411266482462e-01	4.614272826173850817e+00	2.684041190366065699e+00	2.984828522093719183e+00	2.167818654123231381e-01
+6.173854727816793231e-01	1.961753927292946287e-01	4.555255586921068023e+00	3.578807812709182024e+00	8.425821534786881450e-01	7.518481958157543943e+00	6.262313649648420188e+00	2.000433960754317564e+00	1.421954930513873006e+00	8.019504843363266522e-01
+6.671808236429033201e-02	2.702227356574862793e-01	2.867725189486065585e+00	8.406696908068906904e-01	1.816665326608355890e+00	1.069991981228995215e+01	5.100300539865704863e+00	1.022140401782990482e-01	9.935128054149117149e-01	2.217478442404806627e-01
+4.675882156850276195e-01	3.994470533120373923e-01	3.013415758827279856e+00	3.472047851101398130e+00	1.579805042782022717e+00	1.025374950194446200e+01	5.424839982873136179e+00	2.661207445309309083e+00	1.974765823248681329e+00	8.490805254939938207e-01
+7.440465128212200296e-01	3.185879639684230935e-01	5.875377909342210714e+00	8.735053206610791321e+00	1.922533667640191624e-02	2.664137099658425711e-01	3.070429299965248493e+00	1.797362708842066059e+00	5.848813121558199546e-01	4.383864302200134455e-01
+3.550604742260795588e-01	6.083111274854280381e-02	3.404146882195189239e+00	7.733962066765833754e-01	1.176797582398654285e+00	6.871446754170301929e+00	5.361716911609923919e+00	1.576687106248415571e+00	4.355077426059827062e+00	8.607241356371005958e-01
+3.559318532806881041e-01	4.015285662241686593e-01	2.944936440066225458e+00	2.111335278696990336e+00	2.963059944865208628e+00	1.476942511987013873e+00	6.450386812630387823e+00	1.907611126769427123e+00	1.381468577271076636e+00	3.983676385111964979e-01
+1.352222544842187624e-01	3.963201476435085358e-01	7.711539919790504349e-01	5.328381207561713850e+00	1.055305792258381725e+00	7.535148204080714507e+00	6.478550147632057055e+00	2.298107587130767904e+00	3.835975596160178469e+00	7.784889119545324032e-02
+5.656300588103846394e-01	2.408672177953232185e-01	2.222118682804348655e+00	1.584476418959711363e+00	2.205997974729819955e+00	1.320044695544450697e+00	3.093339610296233388e+00	2.078381944707535123e+00	2.709900781033619532e-01	1.998783060846459625e-01
+5.064693576216054982e-01	4.755666724189564487e-01	2.460782122272777706e-01	6.394893884651652982e+00	2.393279335645917882e+00	7.720168766472102817e+00	9.946162988780055114e-01	4.727471826960147894e-01	3.624628763819681243e+00	7.939552686881024002e-01
+7.126511753385316172e-01	1.251084009980807377e-02	6.424546400284762093e+00	1.732715732319492385e+00	3.291228557764222185e+00	5.494632343972354782e+00	4.309920997430734424e+00	1.785238040943090132e+00	1.319325481408312184e+00	3.987762049958972765e-01
+8.564779126009015586e-01	1.551694340859950394e-01	3.000664164177824667e+00	2.121531733172477452e+00	1.929201507138485816e+00	1.091606713354398295e+01	6.352850505658992120e+00	1.942089264915284774e+00	3.099275474026703225e+00	6.349421349719974739e-02
+4.875235919705348531e-01	1.165471825543921702e-01	4.659789198761613349e+00	4.857039597115840657e+00	2.268900869752328298e+00	8.073884519013418526e+00	6.867532374199831757e+00	7.110477929957825793e-01	2.139511718029329490e+00	3.270336697362830058e-01
+2.836462637303897028e-01	3.526582320725804021e-01	3.433435023383186291e+00	7.878111173527013733e+00	5.846439909687783931e-01	1.334584322193443917e-01	2.430764258810069744e-01	2.294882067517025170e+00	6.943377361908681378e-01	6.597104416060772802e-01
+7.138680788986382364e-01	3.866838295049366270e-01	1.938054711638698135e-01	7.776057762563468145e+00	3.843531806920502003e+00	1.169843574403325803e+01	1.317073001264212895e-02	9.974273699007062399e-01	2.805794644206359933e+00	2.846508994495189349e-02
+7.616522995605032875e-01	1.525341083280188914e-01	3.649857814806184653e+00	6.626064326264721416e+00	1.848485344378381079e+00	7.772079961576015883e+00	4.922747697985218274e+00	2.183176272993367029e+00	1.523463352631002188e+00	3.483587959766206721e-01
+6.174650468503004586e-01	2.890466460728707276e-01	4.198548001307910837e+00	4.756162259128787539e+00	1.215478403072798308e-01	1.073564948073302894e+01	2.393378431835311737e+00	7.963541702214833595e-01	1.221126147878501822e+00	3.627422424390945865e-01
+9.041286748679164820e-01	3.692704713445955078e-01	5.969223946468604325e+00	1.400042822911425899e-01	2.167809064247096185e+00	3.466765141422927687e+00	6.104187126619349257e+00	2.815584690378426735e+00	1.472968857439669765e+00	6.815901346942815353e-01
+7.420584650906367763e-01	2.629350119859111268e-01	5.157415509545635324e-01	8.936951265754993656e+00	3.499514727363592037e+00	2.929864868198977401e+00	5.289656243356750664e+00	3.794701199783089285e-01	3.570594754264058146e+00	5.454360810847372898e-01
+2.133902786741366242e-01	4.793775958545365423e-01	7.161025461060221975e-01	7.379345463324211707e+00	9.195588975890780503e-01	8.953799627402657890e+00	8.199588910676803843e-01	1.539214057143141190e-01	2.517268342599753961e+00	8.642672068937873320e-03
+4.189544227294224044e-01	4.677740958591244946e-01	4.169315152475023822e+00	2.198113856746841677e-01	4.442481038742749355e-01	8.853442692179621432e+00	5.871239829017361744e-01	1.642555038182410554e-01	3.779048982357357644e+00	5.353051674182843778e-01
+8.373426461385590214e-01	3.895842710949837251e-01	3.079787615886646623e-01	4.540155876800062273e+00	7.099227726704704367e-01	3.400668992101794075e-01	6.292240435961995004e+00	2.673955045372806705e+00	2.378211157475177195e+00	5.342130779135090268e-01
+3.068274218964279765e-01	3.192479805504804813e-01	3.859914935882651221e+00	4.687340409378260908e+00	1.900386926452752867e+00	8.623061138373001810e+00	1.033494255619878288e+00	8.997549532223314550e-02	4.911877241989344611e+00	9.510774047315817548e-01
+3.988512541926614308e-01	8.003053540870219695e-02	2.122999256011599556e+00	4.455738292615722962e+00	1.825496679610708828e+00	6.531267574926014063e+00	3.480291480026090900e-01	1.582969106163566453e+00	4.938475735461747007e+00	1.906205929699261725e-01
+7.111720075879690572e-02	4.221578019099042622e-01	4.465731829112144169e+00	2.564360848739634768e+00	2.152069261549716117e-02	1.058526283280879277e+01	1.462724588046307783e+00	2.852823949099446033e+00	1.829748887076950048e+00	2.844587557581582660e-01
+2.570944338116505357e-01	3.670273280771254032e-01	2.950100530191855697e+00	1.367357621902863807e-01	6.536517332253071189e-01	4.623777977996077482e+00	1.830565500886062402e+00	6.876079879205642120e-01	2.463356978321902702e+00	1.515963114524383482e-01
+8.196773403442627615e-01	2.439155028749898646e-01	6.976779897465351654e-01	4.021827899216881441e+00	1.629083009450637665e+00	4.895113507734013325e+00	5.420567352065293498e+00	2.923116514593426807e+00	3.675539915878816899e+00	4.820681859745293840e-01
+8.912757210535459107e-01	4.287706082586761891e-02	5.914097580762159190e+00	4.887219470061346271e+00	2.297300243872694381e+00	5.258879986696362252e+00	2.055176485482270277e+00	1.247382108420278080e+00	4.091406116197054965e-01	2.863708277926955148e-01
+9.165363924681566576e-01	2.077777304803083980e-01	6.425359611836583440e+00	8.532229331139205897e+00	1.890984676596261238e+00	4.482242194093222665e+00	4.719123490932191523e+00	9.368865626180848949e-01	3.424737866108997775e+00	3.692933466222642069e-01
+2.844432761355554229e-01	3.487910856226295042e-02	3.288851974081316243e+00	2.493792851872620453e+00	3.245659443981551373e+00	6.119401284154618814e+00	1.629426639698323909e+00	2.146211397792365361e+00	3.323215307906694616e+00	9.689245472658591929e-02
+4.605981453808145609e-01	2.951104911370585060e-01	3.466807508548188643e+00	7.015908445063255172e+00	2.012464416109721377e+00	1.411425132174343400e+00	5.640313420420241641e+00	2.551472185597204234e+00	2.921468481145677476e+00	5.154477507183952145e-02
+7.916776772020003294e-01	3.104882171066662977e-01	3.388406047132061616e+00	6.308076145350703712e+00	2.141204968913200002e+00	8.815894689517723037e+00	1.216772248282482316e+00	5.490792235288116663e-01	4.864033877707442777e+00	3.120795424356880288e-01
+6.520543319958096440e-01	7.498529269624598648e-02	6.546870924616779774e+00	4.313660942542439258e+00	2.842979234248417697e+00	2.708025294292042418e+00	2.101565155836826015e+00	2.779757313928136409e+00	2.766295492770797892e+00	8.242938770130925352e-02
+3.916403442843127136e-01	9.189287972434972973e-02	3.658241649007774488e+00	8.892285269585453733e+00	2.872944719502671163e+00	1.926067804146072149e-01	4.113071146818407797e+00	2.033786884589388499e+00	1.858529211147021964e+00	3.601164330247360601e-01
+6.658899523715963165e-01	1.901452620071845678e-01	3.358563768423928231e+00	7.307834854097906785e+00	2.230102878474681027e+00	6.994441846960493869e+00	6.046922242667236880e+00	1.681346319512073695e+00	4.453953342081854672e+00	7.760263753123383124e-01
+3.544087352095481869e-02	1.736442680483894496e-01	6.358750483131432318e+00	6.362553657877171531e+00	9.438518061698975004e-02	3.241545692161897474e+00	3.216563179949617268e+00	2.048445371027631801e+00	2.592256358497992519e+00	5.871829363638558785e-02
+2.278297361077044059e-01	2.521532626334166971e-01	6.006560048470274360e+00	1.036460639857805344e+00	9.596384576758123508e-01	1.161754935200590388e+01	6.921319460991003147e+00	9.492023907384942172e-01	2.570754313364566723e+00	9.898393847822013480e-01
+7.720727707112930993e-01	4.625201799694809712e-01	6.030038351299404020e+00	1.576843309640901802e+00	3.047360805551709184e+00	1.105183421427205381e+01	6.765794291140618810e+00	2.437125901810530237e+00	4.785718495907587133e-02	8.643252994269234879e-02
+9.816465867962677061e-01	1.040135248373981747e-01	6.269766387849492695e+00	4.587664995870221141e+00	1.471595298862088175e+00	3.137698622087241596e+00	2.585776241619418325e+00	1.094148950727448000e+00	4.787651271957346744e+00	8.537352255282423252e-01
+5.927286689665087760e-01	1.635250692112610871e-01	6.763777563834705298e+00	4.190158816469162772e+00	1.671137433875959921e-01	7.145892561514077812e-02	2.697194722513650555e-01	1.826917897949313918e+00	4.855283909060112535e+00	2.338052822860668956e-01
+1.591435657354911548e-02	1.309457494027614910e-01	4.027555471388495345e-01	3.269356767055083601e+00	2.135361695566277085e+00	3.812187332815673013e+00	3.745269592969274086e+00	2.239168581407309233e+00	2.403445557045992054e+00	4.127895397263747768e-02
+1.138459964891592735e-01	2.477562449655215659e-01	6.711903366433036311e+00	2.328903691994430591e+00	3.619135648902618296e+00	2.643113253632744630e+00	7.573805393623883608e-01	5.638560832013681523e-01	9.866348317250567268e-01	5.321998283901712146e-01
+8.335543816544467299e-01	4.852511675363395671e-01	2.522771010620390442e+00	6.865591808220169234e+00	2.614984821499222178e-01	6.661308755886223842e+00	4.631632858625027716e+00	2.016686176070514591e-02	1.670651928613386517e+00	8.001889580730393581e-01
+3.506629367480429416e-02	7.513357347200738667e-02	1.091955136302876994e+00	4.788485228215831668e+00	3.088440213543260704e+00	3.737602435514551402e+00	2.060091420371281679e+00	1.664114145344917528e+00	3.853021369729443979e+00	3.981246066772909931e-01
+2.941809570016090669e-01	1.951063404831046011e-01	1.053511941997986590e+00	1.626573601758511067e+00	1.073068181672781840e+00	8.246102825125259272e+00	1.229799882190759863e+00	2.046485654174841073e+00	1.887314229128450593e+00	8.477832457224251383e-01
+4.288411318977621844e-01	1.344940483989709445e-01	1.890175735159078219e+00	6.909579416417008346e+00	2.559618997696785136e+00	8.076451885637920824e+00	6.709931254318012073e+00	2.711755694833509533e+00	2.197259152647867975e-01	5.229458751166887742e-01
+7.962785959698204241e-02	4.791664160968215902e-01	4.775534443309384258e+00	6.195144705514182171e+00	1.095816602161203690e-01	7.253215415683056833e+00	3.785863733799192499e+00	9.032344566438499367e-01	3.712953563991834294e+00	3.658749193043641101e-01
+9.925977090507452916e-01	4.226472934729750275e-01	4.233597679026229521e+00	5.373501445983512426e+00	3.029885997105019158e+00	1.948856366491487524e+00	3.580389683767193798e+00	6.128625096828327345e-01	3.195613215016547937e+00	1.498067324162460912e-01
+3.211542228342814109e-01	2.435050065055133772e-01	5.191492939326351141e+00	8.811443367963034845e+00	3.119307887468464990e+00	9.566604729350618896e+00	2.662534653489017522e+00	2.048270692218978617e+00	4.513252133993818305e+00	1.534045164632015235e-01
+1.169841130722715228e-01	4.278805785103458326e-01	3.871981713139157399e+00	2.151432657451758379e-01	2.353074788516758442e+00	2.925683050900422355e+00	5.164172309356634294e+00	3.209672061804553778e-01	1.806778866741765022e+00	7.377245404193294442e-01
+4.920318247135287981e-01	3.980350958273896622e-01	3.553262242278445449e+00	5.660089801227229600e+00	1.130914505340574028e+00	5.111374270657123020e-01	3.594580615302541915e+00	1.377803570003424127e+00	3.733415954722649754e+00	3.790748149557219282e-01
+7.141555225002506058e-01	1.431060744802945539e-01	3.598062535886497493e+00	6.486014070157204436e+00	1.222357165551017477e+00	1.071797470084096382e+01	2.966727560848184453e+00	1.433401409283499239e+00	3.761066588446803749e+00	4.577426600413826430e-01
+8.846645270352224699e-01	3.643370058500688402e-01	3.776149232858763138e+00	8.514771398046235618e+00	1.333408164935339979e+00	8.719464634044745921e+00	3.680392243042417633e+00	2.323141187069376112e+00	3.968683511008049170e+00	5.273710580573882645e-01
+7.563601909975703474e-01	7.110020268898137319e-02	4.754320885232382388e+00	6.715112169672266873e+00	3.900410613846503338e-01	2.374351342693231892e+00	3.949776593352507525e-01	2.793668022917081828e+00	3.590934831061467047e+00	2.297057403281292931e-01
+4.973937884442386492e-01	3.879138870002525330e-01	9.799015387413885225e-01	2.012207080170396623e+00	3.118810053878664501e+00	1.476679627040943465e+00	5.927098696212524054e+00	1.896055595509993363e-02	3.766273251675572586e-01	9.605494500779653277e-01
+1.977451097083424703e-01	1.856512361107952036e-01	6.724615885357570377e+00	6.767753598078477317e+00	2.460427469322048477e+00	6.474778192244762032e+00	2.497838703773406444e+00	1.783570854432153130e+00	5.300044222306288244e-02	4.451008306808179604e-01
+7.777282859813858806e-01	1.108325048061198359e-02	5.994453580967894268e+00	3.907200072756264575e+00	2.802655265583420707e+00	2.467799246467401009e+00	1.767975654554970966e+00	1.542521176133627048e+00	3.301251420424628602e+00	2.703716625531150664e-01
+2.877553566109145589e-01	1.420816672999475339e-01	3.531815135282850626e+00	6.071634467360012444e+00	1.950296822580346312e+00	3.667561567003036149e+00	3.564813794526681789e+00	8.193521748791241510e-01	3.969846661030343782e+00	9.642674274532847534e-02
+8.461631661621418221e-01	3.874198608202865657e-01	2.460697897082127827e+00	4.483937646731980253e-01	9.536408659656969711e-01	9.374214254598758345e+00	4.523934853618548857e+00	8.998483173209120833e-01	4.534728615909888738e-01	1.316674336548497859e-01
+1.751881832985308085e-01	3.840511548841616940e-01	1.356205442152841334e+00	4.975298518310518503e+00	3.475732058856605633e+00	4.548907307536070199e+00	4.318674447424278640e+00	1.546572097412000968e+00	2.738768725851110197e+00	2.581125520633765769e-01
+5.541597656676272576e-01	2.055046901470117260e-01	6.866760560523623269e+00	3.572427800023941113e+00	5.759362217394889960e-01	3.707695882166028856e+00	1.083565015241491736e+00	8.888805063168140341e-01	4.589028250324568958e+00	6.712978422710076387e-01
+3.399810580604246679e-01	6.778009844025034702e-02	1.990157231926872106e+00	7.987721068254113987e+00	8.333111074045302935e-01	1.434155759051158885e+00	1.063680632215532995e+00	8.682505419899785304e-01	4.553780840671912244e+00	6.153621080682639688e-01
+3.805973074910944387e-01	1.809291374261368102e-02	2.326724448899936348e+00	8.808775644776478231e+00	1.071772823118059659e+00	2.671050593238351034e+00	3.236405416201733942e+00	1.156638793116922637e+00	4.089621847663170939e+00	8.958752205306643734e-01
+6.236783129330455422e-01	2.034130988847053789e-01	2.149437338793216412e+00	2.709264986875024039e+00	2.581735349094827381e+00	4.559785061212902946e+00	3.811076674546835186e+00	2.506501802065709494e+00	3.558610317643628562e+00	2.953998470582006597e-01
+5.786616826695738514e-01	1.092680018344108772e-01	3.673824620483585601e+00	6.935907382658493603e+00	2.924092204393669459e+00	4.994297253991534902e+00	4.216415579314912065e+00	2.221035033808330006e+00	3.308866108190522404e+00	6.005834101043915352e-01
+8.445682020932365752e-01	3.477257696376142015e-01	2.977918003630122712e+00	6.125943756092539161e+00	2.412136776594560494e+00	9.871992950448490234e+00	1.471692508678064337e+00	1.186189240533216616e+00	1.731159415996974538e+00	5.019757339821060427e-01
+7.262920712271674351e-01	1.070865338500862629e-01	6.515623171275430892e-02	1.910429663217140916e+00	3.785981980891470755e-01	5.136662978344108232e+00	1.190639365883919165e+00	2.126433858821366041e+00	2.551490600585693436e+00	2.282190611495389376e-01
+1.181796487183683730e+01	2.516406960282593364e-01	2.887277780206054967e+00	7.424704929314782653e+00	2.733699169495314329e+00	1.326499604911062224e-01	4.472085075329708559e+00	1.841612256174257833e+00	9.588965759771213015e+00	6.074890083495527726e-01
+1.355685210334684321e+01	1.476043055963610906e-01	2.536588592058465874e+00	6.894815252046440968e+00	4.629665076865330242e-01	1.591874408544442510e-01	2.659816619928975889e+00	1.992412304973402826e+00	9.409822587181350118e-01	8.428860916139044868e-01
+1.302344932128066368e+01	1.877957892610864488e-01	3.771232365671751907e+00	2.334982689796312627e+00	3.407718706286316790e-01	1.716052424590376324e+00	3.548651514000740193e+00	2.374591648505322450e+00	2.134503293655584777e+00	2.543046511567058632e-01
+4.451805618740952042e+00	6.764221264325200045e-02	2.669906521408719868e+00	8.632453926650279996e+00	2.033894446413802903e+00	7.142031037304861218e-01	4.194277576482712266e-01	1.402693657818623318e-01	7.586905366769697068e+00	1.843381480000478856e-01
+8.570192823591918696e+00	2.306084812036459097e-01	2.522438257729497479e+00	7.522008507552547130e+00	3.713341144773252278e-01	5.716961257420709597e-01	2.079090539304101704e+00	1.766024213797281917e+00	1.662647314050246683e+00	6.384753099614368654e-01
+1.492700367152876417e+01	5.956069900446444437e-02	5.193239326553306112e-01	4.348155739698301936e+00	1.498622145721807408e+00	4.168028931133043180e-01	6.514105873948607517e+00	1.300249280108312089e+00	7.509216655975119892e+00	6.466771718100665645e-01
+1.401469528149594801e+01	1.318450715802988726e-01	1.755366307435912709e+00	6.268538614231175643e+00	3.594158033755705084e+00	1.596063791967876844e+00	4.538357107641011190e-01	1.578349416671345651e+00	8.605308976007654209e+00	8.431350039872285151e-01
+1.125897718091157707e+01	7.088415129913086532e-02	3.073048649925616438e+00	4.169572134840827671e-01	3.391473123826589831e+00	1.253820676407349843e+00	6.033094418983614204e+00	1.669529100255978893e+00	2.743440461537412389e+00	3.447717414063620200e-01
+2.618236120635488717e+00	2.221707698733976499e-01	3.880148546665934006e+00	8.346365071605847064e+00	4.485232175994370074e+00	1.273339902009563929e+00	5.126040370747679376e+00	5.002442564550735327e-02	7.652504234144801742e+00	9.045218812960378330e-01
+1.352791799195179934e+01	2.958099853113912436e-01	1.620323493963710115e+00	8.514270244110930363e+00	1.405394051171777736e+00	1.228874034324174414e+00	4.366837956110266816e+00	4.681693532389841206e-01	3.658910928887952885e+00	5.685286335634343047e-01
+1.321105522112276098e+01	7.596543795040429370e-02	2.288656765287575112e+00	5.454334126686442019e+00	3.540974541852515589e+00	9.136761954539687203e-01	1.531916431358365749e+00	2.600489131884043736e+00	2.186958478364557035e+00	6.152510639313788499e-01
+9.248327943582943078e+00	2.953301427161937331e-01	2.912841777561403500e+00	3.210770144852208574e+00	4.678430187034543941e+00	1.903313079890405213e-01	1.852621718826671371e-01	1.901873807226989177e-01	6.598474242893042074e+00	1.863584605124068982e-01
+1.561511613665458764e+00	2.791101130788958273e-01	1.230729125078247499e+00	3.214729791326091046e+00	5.760499802570256955e-01	6.791526464572399391e-01	1.164793484277019120e-01	2.436090334891392928e+00	1.684437394931304732e+00	5.939045414290095026e-01
+1.074199933120756256e+01	2.768105466336150400e-01	3.600337787752637908e+00	1.988419733930615330e+00	1.121183222910840938e+00	1.262741873855612873e+00	5.955023194942483222e+00	1.775781023586198160e+00	1.635523073587350051e+00	9.451325867881940068e-01
+1.439453349318703390e+00	1.572125366345192565e-01	4.116939260327882266e+00	8.143883513662194318e+00	4.436336490499172669e+00	1.431008029759604838e+00	6.129158351904355762e+00	1.627229601340490550e+00	3.544342525406199984e-01	9.537385422254009981e-01
+4.385485861700014532e+00	1.072544990386351815e-01	2.555081167553841404e+00	7.797049674127350727e+00	6.437428060323546752e-03	1.690033945941410209e+00	3.546703381942077016e+00	2.355037673948323640e+00	6.000098633768423007e+00	5.634219751061730896e-02
+1.478941841002847202e+00	2.819342154043811277e-01	2.946748707794077937e+00	8.674573772230408375e-01	4.831892645548560239e+00	6.117546232929613748e-01	2.336129800349750152e+00	1.662089735231696785e+00	7.288065418938156625e+00	5.079145336272458122e-01
+3.566330915119718004e+00	1.054092958488348081e-01	1.262772079536855330e+00	6.632582037177616741e-01	3.155614409429076339e+00	4.560221336860896812e-01	6.107444411033736920e+00	9.724400924370590893e-02	8.869125512095319408e+00	4.531451318109073201e-01
+3.931188712123208084e+00	1.648487385228547819e-01	4.230745858304009843e+00	4.740462911360358689e+00	4.302603206380042344e+00	5.923369016635324780e-01	1.350657156605095111e-03	1.610503856182457394e+00	4.214531575126196472e+00	1.188686805020251480e-01
+5.690049848746455474e+00	1.611771331430286031e-02	2.797114322332588721e+00	1.326664870101148530e+00	2.184236794465954645e-01	7.244826847041867524e-01	5.088788284324481381e+00	1.892888045267941521e+00	5.251528732452677151e-01	8.270932539527531269e-01
+9.971187065128873428e+00	1.922234878798162738e-01	4.399243530319325579e+00	1.690861779292817113e-01	3.026193132922350149e-02	1.247811934568532566e+00	2.287373749222342489e+00	7.123297811357809373e-02	9.782325221036309415e+00	4.025349245250368080e-01
+3.166794098033590465e+00	2.895927629971295492e-01	1.010943063067509806e+00	6.246940915716239218e+00	2.296626417112780594e+00	1.124916422624346879e-01	3.155063467415336387e+00	1.122794863214901628e+00	7.747118569384047859e+00	8.813094613105451058e-01
+7.776074009241417784e+00	4.884943113850780116e-02	2.665091138087455569e+00	5.396374948611239120e+00	4.807416439179393208e+00	9.311196524679428155e-01	2.370568997313627246e+00	1.497537386829888018e+00	6.410622802993913183e+00	1.959147447247133744e-01
+9.386902307633294029e+00	2.918525147335055969e-01	2.580633210860679316e+00	6.117570487861421213e+00	9.440652787891057329e-01	1.478204798622005711e+00	1.583143507835594432e+00	1.878469714268893753e+00	8.341600309722448259e+00	2.296329479872928347e-01
+1.325226074192375592e+01	1.660088044261155971e-01	3.032911943104431352e+00	1.544500510963358053e-01	1.444444693560564819e-01	5.144081121761716613e-01	6.228472420827102063e+00	2.002195300328763139e+00	8.619120730699705746e+00	5.986701025311890190e-01
+1.253702247039758255e+01	2.815208299886055587e-01	4.418741917269815467e+00	7.187649403919402324e+00	1.857768480706825098e+00	1.693270109415251579e+00	4.500029768156658783e+00	9.386766274226390205e-01	7.712460417143821578e-01	1.490837705279541847e-01
+6.470192866248365426e+00	2.538814476100020867e-02	1.482709093692339941e+00	4.810511422160657080e-02	7.397535485801826072e-01	1.366029201822366090e+00	2.938463177620382449e+00	1.905659482665870819e+00	3.274297555223312450e+00	5.946455225965689984e-01
+8.046767686244354678e+00	2.379545842279764956e-01	4.589316043001465673e+00	5.892088873366180479e+00	1.308019335424309704e+00	7.484233826378567933e-01	4.495389998435046053e+00	6.202227507857822619e-01	1.707552168782031199e+00	9.367630937108080147e-01
+4.192176901095245434e+00	1.304693968976899099e-01	4.998027091565603541e+00	2.456526465425167283e-01	1.205473478694641942e+00	1.109994403062972701e+00	5.125196171178717286e+00	2.206741883831745188e+00	5.115846166892562330e+00	1.104691714503143185e-01
+4.157525610946958494e+00	9.298038433732908059e-02	1.491518671460473344e+00	4.496224394499284571e+00	4.648879436827211720e+00	1.793268724253979762e+00	2.896904069484845579e+00	1.356276322086461761e+00	9.923020114331025709e+00	3.558734125538330906e-01
+6.979832709755873221e-01	3.599810987684147479e-02	4.064902498699765943e+00	5.597103986278889387e+00	4.496807253190716835e+00	1.851319113294662699e+00	4.995032507818135237e+00	6.680000375419122838e-01	3.176705087670492755e+00	6.164391597089758923e-01
+2.680618751278343481e+00	1.681546055962608033e-01	4.674994237591107549e+00	7.872155819890408246e+00	3.635532819810061156e+00	5.723527244541317671e-01	1.228843659393086263e-02	1.624022154584328081e-01	7.634482806401528876e+00	9.621470740939815913e-01
+1.129656603051103581e+01	1.754676764823895063e-01	4.925438368822037560e+00	9.090625284174426168e-01	9.872898867967799807e-01	1.967411347408282829e+00	5.647085320216579163e+00	8.040378544211923550e-01	3.157643996826393629e+00	6.705813685367211230e-01
+1.166598416610384703e+01	1.866541563727542041e-01	3.082745872586427183e+00	5.684997481806196795e+00	2.688708007208003181e+00	1.498993448192174283e+00	6.225488991661570992e+00	1.314065400514044191e+00	6.343644934019415871e+00	5.416797703578821732e-01
+7.720230714264369354e+00	1.523894939496055478e-01	1.620746553117432542e+00	6.005901827039864749e+00	4.717729383985791447e-02	9.075246998741233195e-01	9.360149124843241131e-01	1.754815849294320884e+00	9.666726609856759467e+00	6.581401514679491305e-01
+1.453127659928862059e+01	7.703415326140243546e-02	2.479862457434094392e+00	4.182212439636490586e+00	3.488676768426026253e+00	1.206066251747953011e+00	2.285902497850169190e+00	1.194028499060063808e+00	9.286786278550043239e-01	8.091147826884519390e-01
+6.417666940507571915e+00	7.132128551135483163e-02	1.942224670162717270e+00	3.433069127251919817e+00	7.564280838361731041e-01	5.516799087701549364e-01	5.176162075754822567e+00	2.178815429942681181e+00	5.722924693718432820e+00	4.158454834199132311e-02
+4.502867570496368543e+00	2.239682077280483463e-01	1.141094924815538203e+00	2.453579665440670521e+00	2.992162924469959684e+00	8.370073651298797213e-01	1.718538404749463400e+00	4.075217867922253934e-02	2.539196137099058781e+00	4.990272000661035712e-01
+1.769506443642104854e+00	5.077985666082200644e-02	4.536839079078856862e+00	1.926622825410988948e+00	4.021468856985962681e+00	2.704122777555590940e-01	2.804453666907950282e+00	1.607921780184728977e+00	9.484877651296452328e+00	3.670233106390000666e-01
+4.295020083244110998e-01	2.868652070133119181e-01	3.134264451081448577e+00	6.836230003013163525e+00	4.042883960895656159e+00	9.348262365209494273e-01	3.157788950792046023e+00	6.497342539434567410e-01	2.517529722233489586e+00	8.540150270825374035e-01
+1.912602820186987129e+00	6.362962639611402689e-03	3.790830335966000497e+00	6.753767120657978573e+00	4.454347298520647591e+00	3.597830171502631735e-01	5.746913394800006181e+00	1.664073075055900741e+00	9.213646990669767334e+00	8.780134214441304152e-01
+3.963149975944498138e+00	1.436768738550251445e-01	3.590235964594930884e+00	4.171865940500004477e+00	4.196989506671150671e+00	1.790899951828085479e+00	3.907987905963472564e-01	2.629171659041773967e+00	1.771918908979907226e+00	1.659282155546771831e-01
+1.044847711025325054e+01	1.867999171686929583e-01	9.742152202794096549e-01	6.029364879715696901e+00	3.842755370236281998e+00	1.457930089422849740e-01	3.050806799572168160e+00	1.928860746757870892e+00	3.715892327997140310e+00	2.654349553692562091e-01
+7.856135199120164891e+00	1.212882676824621475e-01	1.801474098171824689e+00	8.484064353156798077e+00	2.754740315340553636e-01	1.410439285738713577e+00	4.796002926524840504e-01	2.610915875359472338e+00	6.670148976341948810e+00	6.486294231998313009e-03
+9.804395866994216391e+00	1.639075098111162643e-01	4.837423823790375721e+00	5.260179690540038422e+00	2.017636134845651075e+00	1.254612394194728031e+00	5.546452640168771708e+00	1.375458824455512818e+00	7.174505857330657488e-01	4.918004847159316295e-01
+1.183986689177626417e+01	7.872326102861289254e-02	2.582301908063294960e+00	7.740411186780283614e+00	4.407066198851386396e+00	8.853556556234460917e-01	5.162877457710448148e+00	1.109660738036303940e+00	1.268125593567879239e-01	8.662130880451797132e-01
+1.093683742604408948e+01	2.447064137767908099e-01	1.696855866075519081e+00	5.548678621604610228e+00	1.820809627873263281e+00	1.700607366429271350e+00	5.830452128040040982e+00	3.764385212451778484e-01	1.898685064749314089e+00	5.451812220747674909e-01
+7.056150651686926345e+00	8.818414996117814042e-02	2.747955950871696196e+00	5.338892801584529835e+00	4.473322648271785162e+00	7.333467400342130738e-02	2.177902358616423673e+00	9.320526616437706524e-01	6.762834017402112430e+00	7.935462801762755980e-01
+1.180908873504107781e+01	2.574753599584344022e-01	1.987005140477280385e+00	6.918063648604911187e+00	4.785668471712723360e+00	9.696239217014788192e-01	1.280135659543590876e+00	1.881727503129584367e-01	1.035012278208573377e+01	8.059150064856590578e-02
+1.008412809573388103e+01	2.343166128881368770e-01	4.905764759825019361e+00	6.797230994413787641e+00	3.285521074639472960e+00	3.493548680683078445e-03	1.756029501961107187e+00	1.794157670035024577e+00	2.319307927793950785e+00	1.193434686169847492e-02
+3.749280090904193230e+00	4.960933223025080357e-02	1.971450801044384304e+00	8.330916700800358399e+00	2.349270312429754259e+00	5.221738760240273169e-01	6.366458140784689412e+00	2.387898312417887414e+00	8.441503152405894994e+00	4.979337584731783339e-01
+5.343103528797895407e+00	2.723330372377434361e-01	1.378635711838508415e+00	8.671386126448166198e-01	3.147037907155366021e+00	8.963585668163664977e-02	1.405277975206570940e+00	1.309205332617405571e+00	1.965588001155148978e+00	8.059321310558243967e-01
+7.149160280820787250e+00	2.764868013404646607e-01	1.096825467691220357e-01	3.503192650439979960e+00	3.131184989945668740e+00	1.194188630214327640e+00	4.274505420449012050e+00	2.968756266528913557e+00	2.749295301370240363e+00	7.370923727332389097e-01
+8.210295738041565983e+00	2.272029223367369455e-01	3.779242145994847757e+00	6.686181645391151562e+00	2.011271144842419201e+00	4.622895905968951169e-01	3.398981903852160702e+00	2.343123357207267388e-01	3.729889267099015360e+00	5.792152207898214611e-01
+1.002328179326305246e+01	8.185470612146233949e-02	3.269943977541031721e+00	8.539125023520108826e+00	3.035522647556629483e+00	1.401749737447430810e-01	1.982996845708496103e+00	3.745616727807693502e-01	1.201117123994490221e+00	1.638310756436510696e-01
+2.710850844140515914e+00	2.242145718096996698e-01	2.296536249652671557e+00	4.220260338748655471e+00	1.230213951484597334e+00	9.427382488180584552e-01	4.976262280555096673e+00	1.332924357337322174e+00	6.600361386208043690e+00	9.862980787604007649e-01
+5.949570342069224971e+00	1.418784555555818727e-01	3.232287503680208562e+00	6.141638030385319524e+00	2.842720729387286482e+00	1.023630011169686105e+00	6.974549437024968768e+00	1.541945738455003934e+00	2.913259034713350015e+00	4.512992539354583332e-01
+2.028585498043759650e+00	1.398763410169154009e-01	3.691098756081625076e+00	7.976451317237516214e-01	4.435499859432201486e-02	7.467345392685726946e-01	5.281840013916209386e+00	9.546775874932222905e-01	5.365778369266787529e+00	7.770900276041966537e-01
+6.077535052205729649e+00	1.395387528894324602e-01	4.775093402342934112e+00	3.132020266567483890e+00	1.580718289061279869e+00	1.416317099261457102e+00	5.862579332019776324e+00	7.318328790735707523e-01	3.709729378713444436e+00	6.670468053137402764e-01
+1.214513163540950025e+01	4.365179876861721014e-02	3.838251055066958628e+00	7.238524315230883133e+00	1.660270397218274852e+00	1.239458019962108137e+00	1.401234933150055095e+00	2.814065416790874075e-01	4.456870379094779899e+00	2.891479005157764348e-01
+1.087941705836024830e+01	2.995998122378682460e-01	5.777085219163696772e-01	4.703046220800023214e+00	3.086014168894017917e+00	7.044434508496584257e-01	2.037881492641501424e+00	1.019327349906296476e+00	4.393779773589142401e+00	2.305255684467343302e-01
+5.290012626145627728e+00	9.008901407316972920e-02	1.142983813855805719e+00	4.118316186532029022e+00	3.637703293797282633e+00	1.906330254257075252e+00	1.445500604367967679e+00	5.164490469163260933e-01	3.532786407019917974e+00	8.270890916952341465e-01
+5.697086356845923127e+00	2.235572514757046270e-01	2.266800826831066873e-01	6.383456423897621690e+00	1.264197247239626343e+00	2.912491491275546185e-01	1.306548262207717137e+00	1.684488621935003927e+00	9.359893288902904729e+00	6.328523844944041743e-01
+1.359123925577359593e+01	2.715219536985591462e-03	8.374487623417636861e-01	4.033547555739824375e+00	7.378921961934747920e-01	1.372002555673720758e+00	2.694288461006839164e+00	2.082180871032943692e+00	1.911434796530308899e+00	7.656913496191556368e-01
+9.992533308249194945e+00	4.104749241998207482e-02	4.724075616116481413e+00	3.461796948861655121e+00	4.204372983561600385e+00	7.310738374531249306e-01	6.107621094624161806e+00	2.638927091445617990e+00	3.624613744624816114e+00	4.700163600933867647e-01
+1.685245491179172550e+00	2.115428105359661026e-01	4.407417050425523897e+00	7.476567774422387735e+00	3.634488913192286841e+00	1.698280219311323336e-01	2.945428761291103648e+00	2.713451434553051822e+00	1.012624145292138067e+00	3.648699049547410311e-02
+6.589428437578542130e+00	4.446906542749364671e-02	7.810041614180479597e-01	5.691285012262346399e+00	4.554086499149781808e+00	1.277799954679892380e-02	4.026680491475270740e-01	1.953473174748952212e+00	7.152482187616325815e-01	2.086430282341091846e-01
+2.720849865844793936e+00	2.440203981068089778e-01	2.917763308083296092e+00	5.899884531261626819e+00	3.940367037369739034e+00	1.211422445527540859e+00	1.567556955778588890e+00	5.600673976327497083e-02	9.876156837696910884e+00	1.599751919864411676e-01
+2.834482533918473379e+00	1.520034844725705325e-01	1.978602545261327617e+00	1.676165260704382654e+00	2.194029998012147153e+00	2.492932602456554569e-02	8.120351889658737887e-01	2.560447673664525858e+00	2.686589650575758270e+00	4.039721790527054379e-01
+1.484413911807410891e+01	1.998020163214577760e-01	3.141873601255305015e+00	3.066415597754779565e+00	2.205093406956332380e+00	1.116117348869391623e+00	2.615679339050218655e-01	4.804557806332970804e-01	3.540690054736377768e+00	9.294949005824936217e-01
+8.903890154201720364e+00	2.893997238796244265e-01	4.277785905487593077e+00	3.596708777222128894e+00	3.332199664018714813e+00	6.094561931515880548e-01	2.121001953072600532e+00	1.426684687927117112e+00	9.708702533734449247e+00	5.724400833134991595e-01
+3.704414716601320379e+00	3.534656415192761331e-02	2.191170133339663817e+00	5.192464390336396285e+00	2.340092081088205767e+00	9.268383918699469870e-01	3.615953197841254241e+00	1.025661142784032753e+00	2.734380318250539954e+00	8.545976971860628213e-01
+9.213691025538279789e+00	1.023114031986859657e-01	2.236053896606724933e+00	6.731642119286943560e-01	4.131381102030507790e+00	3.423592126394250368e-01	2.177072569545674519e+00	1.414220327358206131e+00	8.610263975317616669e+00	4.954105311458011363e-01
+7.882621005926893076e-01	2.756438662864029721e-01	2.440056720240608268e+00	8.095898270821609843e-01	2.982700784281210993e+00	7.367102799018900861e-01	6.005049730578022604e+00	2.935825349713905741e+00	8.105806119374374674e+00	7.343900173346469273e-01
+7.332899839919692297e+00	1.012310325009186385e-01	2.786199548500924550e+00	6.996915193838988856e+00	3.958649628430874756e+00	6.984966471313347469e-01	5.034280993433526419e+00	1.175980210067817389e-01	1.010859759006517855e+01	6.006376844057623732e-01
+7.335639593623159094e+00	1.035066387333896804e-01	4.632911160270253959e+00	6.920712126173365952e+00	4.840386095819143364e+00	3.536787893048165543e-01	2.116621517341860326e+00	2.028873912140862767e+00	5.484712972130116881e+00	8.008724869574028737e-01
+1.173589800901229019e+01	1.675454624679203752e-01	1.857307229807804827e+00	1.233541929072509369e+00	3.229707875128986050e+00	1.703661647359659526e+00	1.399653313244334507e-01	4.231632680835958338e-01	1.622735163775802736e+00	1.411886133281192723e-02
+2.170542045884753080e+00	1.649943693366306696e-01	3.719614864008544686e+00	2.628731378385484874e-02	4.659863887863504139e+00	1.953928332749399477e+00	4.030765489154215508e+00	1.884789318558428350e+00	5.218984001391103966e+00	3.659090632923160946e-01
+1.056590580826244263e+01	2.862455750574361968e-01	9.503624324612436336e-02	1.127223921843998111e+00	1.774286440361028117e+00	1.354483262491954498e+00	1.915978523368969677e-01	2.412656854678067564e+00	4.504271337259460495e+00	2.537074724100759626e-01
+6.043982956924061511e+00	6.522158466617089707e-03	2.382223597003132909e+00	2.938800347127498824e+00	6.293807419434965800e-01	1.252993652180068862e+00	1.825306399812175462e-01	5.606017677912336072e-01	7.204927649424960556e+00	3.160779924731268453e-01
+1.156234306026847136e+01	2.625389283153459208e-01	9.188016061419135294e-01	3.956596395967132196e+00	1.157796919074650654e+00	1.652334566517626113e+00	1.741466103683401379e+00	1.069893612030600405e+00	6.560230433252789872e+00	7.063619376068841493e-01
+7.867290877658208004e+00	3.150568657612348028e-02	3.208635889352626780e+00	1.260684268789601381e+00	4.422562013782658674e+00	8.540116238383288483e-02	3.831517191823027968e+00	2.753952785268408032e+00	1.024925082683439470e+01	5.986830995941015976e-01
+6.908975429542424607e+00	2.465841896031346625e-01	2.002845859032076348e-01	3.450171987307428356e+00	5.654276553583914566e-02	5.888214584539321894e-01	2.954089858527504830e+00	3.998447426773437119e-01	8.565177675858345552e+00	2.149680205088763119e-01
+1.440579812308176999e+00	2.654515414198045306e-02	1.135165258992570925e+00	2.417274957088451526e+00	3.433882671585630764e+00	1.266388978164296741e-01	1.387836349982975115e+00	2.496160923818382393e-01	9.660716199882918076e+00	9.465672103847755370e-02
+8.782530926087384060e+00	2.393484704828878762e-01	3.968161715697180902e+00	5.364616924126802688e-01	3.825267183065478305e-01	1.180409496794958990e+00	6.081207848146038764e+00	1.796741281632131981e+00	8.120621615047823871e-01	4.720230640647659071e-01
+2.467121281489119689e+00	2.669775602143918958e-01	1.920947605339624609e+00	7.436975019510371432e+00	1.718195053919568949e-01	1.974195934046878431e+00	6.198510534051498766e+00	8.831966827916776230e-01	9.463134124520076185e+00	6.678848326528338264e-01
+9.247156276444975731e+00	7.001843581120269222e-02	4.115753916944164814e+00	2.041110834822566389e+00	3.904491445485749335e+00	5.587411667288546546e-01	4.848061119307658373e+00	5.410753659731812881e-01	1.022435181440595420e+00	2.642349075966086147e-01
+1.366546319900654893e+01	2.924851229239786368e-02	8.435137812387760725e-01	7.697721002565737436e+00	4.954980248261398046e+00	1.503394228824324497e+00	9.547207340156207422e-01	1.149510349323715541e+00	2.550282953574176226e+00	4.858288944991739911e-03
+9.937610741476689213e+00	2.759728949218902619e-02	2.479287724279272132e-01	1.845578662935752234e+00	1.148725236157810414e-01	1.874378505644074044e+00	4.577898001225015001e+00	3.157179950007004132e-01	6.572428732526478967e+00	9.817710114629516482e-01
+1.335840810844957005e+01	2.901657393525319262e-01	2.291037384835683088e+00	5.523192339535359707e+00	4.263209114304837399e+00	1.850890820836459616e+00	4.248656287099825235e+00	1.437415418687666868e+00	3.969903039271164946e+00	6.444202791760184068e-01
+4.192252523941295550e+00	1.550577732890167648e-01	4.335142520595742788e+00	3.915362307244128814e+00	4.174447531315469639e+00	1.464441590023846773e+00	7.659465632505535471e-02	1.119809765001355650e+00	3.868034385081745352e+00	9.191814708483783125e-01
+9.950321370165964296e+00	2.416678064761437206e-01	4.452588664798807550e+00	7.980962757329372437e+00	3.576862137763775706e+00	1.878820577509221490e-01	5.810484732429623378e+00	2.818609720584823730e-01	1.043698983315975104e+00	1.320478448637341096e-02
+3.344531274564604306e+00	1.638261678427317125e-01	4.752068092969680357e+00	2.056633949183845012e+00	4.059152604022060906e+00	1.323178175655694844e-01	3.634674435700454787e+00	1.332248033612655647e+00	9.750778647413559597e+00	6.571876845461107619e-01
+9.313380312895419566e-01	1.327820969301612697e-01	4.452531935545533059e+00	7.853262599622130580e+00	3.589473016958418139e+00	8.655756972988859488e-02	5.281257933398629589e+00	2.878400204002617091e+00	1.530027895964270801e+00	1.410499062384990010e-01
+1.093728606033430673e+01	1.496850021362609306e-01	3.004629347358933611e-01	6.530279224244562641e+00	1.505697907373285682e+00	4.359977775952421641e-01	2.822161658968949549e+00	2.723579655675674616e+00	4.517358372113638332e+00	4.762740241746520331e-01
+4.126188688687098427e+00	1.785762566399805643e-02	3.827008942348112619e+00	9.799394438157742471e-01	1.190849990888681598e-01	3.586378183201568426e-01	3.473362514860903971e+00	4.907195619794019192e-01	1.082846597286479273e+01	7.338269154915928461e-01
+1.007171327046501119e+01	5.302731525826676236e-03	4.442829328707308179e+00	2.714799527596614137e+00	1.292804717399506087e+00	1.414685076132262687e+00	2.183756095362196170e+00	2.808301856380406747e+00	6.780760473974004832e+00	3.843291080113009350e-01
+1.167399330294518034e+00	2.464035477017099129e-01	1.402426005668689912e+00	1.150248779473895366e+00	1.522194921445518379e+00	1.381162689722999160e+00	5.435001694932432237e+00	2.392787620207044608e+00	8.988227846318018521e+00	2.269266068514101953e-01
+2.894390818868963322e+00	3.501963832568973783e-02	4.740386916137786066e+00	2.285802360415889467e+00	4.429435669874964177e+00	1.149591990037637013e+00	6.309683324067085763e+00	1.754139509852320478e+00	4.996119690460893992e+00	9.633675391347560346e-01
+8.428267793741513358e+00	2.975519105867783232e-01	2.738097388061362825e+00	2.713125296218298343e-01	3.983255273469377133e-02	5.832842143111387490e-01	9.222455190282240478e-01	2.660850484958854789e+00	8.646005308057572947e+00	7.723493728139844139e-01
+1.060844902950677238e+00	6.766848820457482816e+00	2.571544822122694196e+00	1.301410412620856993e-01	4.939075861095859388e+00	1.944985184657319843e+00	3.071352746565616521e+00	2.952877773172982057e+00	5.413409368896327400e+00	7.835805833878990434e-01
+2.676604664379357956e+00	1.015453292439209010e-01	2.796370894748441138e-01	3.783589975482610779e-01	1.615586647489880567e-01	1.462415378364156915e-02	2.849926147228915241e+00	1.990766760230136434e+00	4.608386640197815742e-01	4.913530386323300414e-01
+4.928622521604909323e+00	4.776267137399662133e+00	6.738922765189601094e-01	6.737755331040837792e-01	3.523416780856081232e+00	1.987539862409304803e+00	3.322034856620849741e+00	2.129310622060661684e-01	6.600863177109800084e+00	3.085759141198198829e-01
+1.785331680394218745e+00	2.003420991178347599e+00	2.900766376377190259e+00	1.569593908723631026e-01	2.959800314520040221e+00	1.332160482584163530e+00	1.574742777579251296e+00	1.474370607477436312e+00	5.161994254879935795e+00	3.573259702589637321e-01
+2.060566256973487498e+00	4.064024192901496413e+00	1.638291229583957342e+00	5.291965806117688853e-01	4.147376271759576127e+00	1.871711279692187535e-01	1.592167092938667761e+00	9.832800064235791559e-02	9.093859685572962093e+00	2.871171512211987409e-01
+2.802432727066052731e+00	4.152681619178160588e+00	3.519899751123976461e+00	8.767373897844914499e-01	1.497015974178587117e+00	1.178656320463950191e+00	3.590298473162238935e+00	1.121258186897473141e+00	3.999535611626285458e+00	8.235737108516272276e-02
+7.793345816216601385e-02	7.119616496871206124e+00	3.263514453318660635e+00	8.767708034167319697e-01	4.367621895504575136e+00	1.584033235097142001e-01	1.753016637889057261e+00	8.806782211544889050e-01	7.660523694529250971e-01	2.421807599200342942e-02
+1.650807741747343904e+00	6.676864939234765650e-01	7.158398866250768044e-01	4.208260033098576436e-01	4.020367360514349286e+00	4.706317984761432260e-01	2.612129318099994713e+00	1.171799316797829782e+00	1.085072636057884621e+00	9.463861023983399789e-02
+4.058767043920196826e+00	7.095671728991746363e+00	2.700297896325542180e+00	1.613772848781617197e-01	2.851637748082545443e+00	8.849171780970555012e-01	6.469348385074413343e+00	2.409711450205083150e+00	1.227804052323222539e+00	7.245949939817655228e-01
+2.952642653678108964e+00	1.886946931087596369e+00	2.473081195542755850e+00	5.362099416315594391e-01	1.419640415165426717e+00	1.903766283029795225e-02	6.887126590983250196e+00	5.688537003635961709e-01	3.423639520476965625e+00	4.938406145277395298e-01
+4.737499561105624757e+00	9.968754726664579291e-01	3.774680347781333989e+00	4.881731108025820776e-01	2.226755900717518610e+00	1.171985346633335423e+00	6.671227239296362299e+00	7.695013938605947956e-01	1.042858080409883925e+00	8.496226110071675164e-01
+5.325414666879468850e-01	7.018918782479677709e+00	1.608471156325481655e+00	5.862836927685144417e-01	2.672078926166417290e+00	4.874633009746880408e-01	2.209336683548580815e+00	1.743210228627345870e+00	7.300458180698917943e+00	3.180482670802708967e-01
+4.057375329048528378e+00	3.890521688612440965e+00	1.762635121494908752e+00	4.315974671518035821e-01	5.740273295456138625e-01	1.149928423510549580e+00	6.645380061650246084e+00	1.716175648811826715e+00	1.817203695888137016e+00	2.279386936010485698e-04
+4.819371354809093511e+00	1.986334992808725719e+00	4.583363789373673391e-01	3.992089559255137532e-01	1.511460483775759922e+00	1.660344970547767085e+00	4.073543600444102353e+00	1.064209283618703594e+00	3.163140007477470483e+00	6.883692634412575950e-01
+4.822861817330296397e+00	1.286480435582614712e+00	9.370740372064094714e-01	9.673040603625950862e-01	1.291594883259390425e+00	1.038459954771715044e+00	6.225515359923287306e+00	2.457650031437864868e+00	1.095606761972162424e+01	7.337655625192263109e-01
+1.388473219412195148e+00	3.816243179861769086e+00	1.442597880348657213e-01	7.222240964942758801e-01	4.062612762646490516e+00	2.874594600573110448e-02	4.710301168894129198e+00	2.970156004203755362e+00	1.950104669992388517e-01	1.213572941978990904e-03
+2.095978035147009866e+00	6.873067538223646133e-01	1.319090499678944095e+00	6.916304149708443205e-01	7.253452473503479991e-01	1.948495719716487162e+00	1.265245191334455122e+00	2.196421971177734633e+00	5.301304620523421995e+00	1.584176397116228152e-01
+2.979233187060020605e+00	4.956587004286280518e+00	1.688392721619113690e+00	7.532059070534440348e-01	1.887662047040422619e-01	2.805001211605031575e-01	9.824130470675018767e-02	2.962504536056278504e+00	1.072663709115177610e+01	5.013157111486933326e-01
+2.788962486475272673e+00	5.115971710107624304e+00	5.660005924878737105e-01	5.560222984350132736e-01	4.858087680745722281e+00	7.725785191495309068e-01	5.845998556520539680e+00	9.987118052417467418e-01	2.295433607375663598e+00	1.326889295282640546e-01
+4.557887825045138008e+00	2.247326501433015267e+00	1.308921194395065601e+00	9.753282383497733354e-01	1.730029373381708480e+00	5.701790289472219442e-01	6.352823959792196495e-01	1.147755410972870260e+00	5.520736993109069957e+00	9.774412642381167249e-01
+2.045438792050340293e+00	7.561046060398181723e+00	3.522811502396113070e+00	6.976548466048726471e-01	1.767378331045376783e+00	1.924976381949984106e+00	1.266202415138989235e+00	2.046343295450062438e+00	2.076108124751949724e+00	9.755719638581734632e-01
+4.868031211031395245e+00	4.182997028123785377e+00	3.669697829412847945e+00	8.683837530035829477e-01	4.809874760362171209e+00	7.146311160463343359e-01	4.140379287730196367e+00	1.871204810554052189e+00	2.927063933787202199e+00	1.781360315397091387e-01
+1.447543209935399089e+00	3.894063908911108207e+00	7.167099377268382199e-01	4.816816841673178251e-01	9.437790189709754918e-01	7.136790168415698687e-01	7.984261778454281311e-01	1.267997907136523494e+00	9.241262771031886203e+00	2.264187036508533701e-01
+4.492110383454359201e+00	8.423313033097198144e+00	2.601809612520743276e+00	1.877570508378717529e-01	4.720029746759013101e+00	1.700776811458877491e+00	4.379722879784793577e+00	1.286338778406133088e+00	4.897457407195897616e+00	1.463406370506259435e-02
+3.397516181353549269e-01	5.474743776680605079e-01	2.792881561416644676e+00	8.613177248107457551e-01	3.084547950087940915e+00	4.295939916996971419e-01	5.853057398141740642e+00	2.886701648573187562e+00	6.513812374119662962e+00	3.683927726861278007e-01
+3.123566792546268012e+00	3.586380650299095585e+00	8.986175900527371141e-02	8.341062939442902557e-01	1.688951864196345243e+00	1.079977094532845516e+00	2.714109110128215541e+00	2.839337530931991704e+00	1.218807949172431293e+00	7.915582761215033125e-01
+3.141700155842215736e+00	1.204667402016855737e+00	1.916351797648172095e+00	1.536958008100480777e-01	3.425634957816583004e+00	5.173225973429682600e-01	5.259306090770171416e+00	1.917625582619049140e+00	1.075917707540842727e+01	7.474619317582414935e-01
+1.334327177064201120e+00	1.275405518944317640e+00	2.568022248402845875e-02	8.801880626520913742e-01	4.297141956880407010e-01	6.992528373701238920e-01	6.553594825126268120e+00	2.898861772480366694e-01	5.921191598075131957e+00	7.429605987984524429e-01
+2.771024127637877310e+00	2.706234632094140391e+00	3.850437535134872746e+00	7.057112116190219497e-01	2.533276847739223125e+00	1.771636964438744632e+00	3.080408803160064934e+00	2.070444129927007104e+00	1.189083402543695822e-01	8.109099497493107433e-01
+4.429566490061738193e+00	3.870904287199198102e+00	3.908477669001917398e-01	9.583973851447378234e-01	3.595638219458183737e+00	1.789184903519657199e+00	4.609156216292533692e-01	5.237161367552056745e-02	1.898196986141101217e+00	4.720040556194841086e-01
+6.236410129144742687e-01	5.677002709722877682e+00	3.495932245976291153e+00	1.724677152701515892e-01	4.797077834826430021e+00	1.896003719678501565e+00	6.666830956711757850e+00	1.917567880520230972e+00	5.078350320183913524e+00	1.619340143708598578e-01
+3.981295223585228360e+00	4.293445223367310604e+00	3.800375528545455239e+00	7.832850811253453394e-01	1.697773609172191778e+00	1.107703012941383403e-01	5.882009120179721329e+00	2.377183558175493872e+00	1.016804229228126033e+01	7.498926347264457393e-01
+2.600915763215731591e+00	2.624416429045383747e+00	8.727133245527625149e-01	8.321007865109071311e-01	3.120145967108394647e+00	1.706568507429746751e+00	6.924345627676093073e+00	6.502709038408825970e-01	3.057969519658665369e-02	1.561628673971688919e-01
+2.990086920642259782e-01	7.612290167957059595e+00	1.048504608494758017e+00	4.243228873493485320e-01	4.221314528605337735e+00	9.278772379775166446e-01	2.349514943790474764e+00	5.255177442909533259e-01	2.143073036231040618e+00	6.074576627365183557e-01
+3.899808502523676168e+00	7.299461473479367690e+00	1.095057277835178589e-01	4.079961930720432672e-02	1.621607684387396553e+00	5.256243374272819846e-01	1.486732148961382727e+00	8.027452089786548672e-01	5.594994509646422642e+00	2.574106532041128270e-01
+3.023696564487597271e+00	7.074713638764114521e+00	1.522391750286943957e+00	6.952922812547067677e-01	7.386888415687059561e-01	1.904294326226209977e+00	4.712216395755660869e+00	6.501349564540930270e-01	9.969785446062145340e+00	7.550945275312674898e-01
+1.543379307984599214e+00	6.829479885114219329e+00	3.204292732322415738e-01	2.897902351539538568e-01	1.968018159493589270e+00	1.312508995789594346e+00	2.708863147028642615e+00	1.956603963514276678e+00	3.858471150102140257e+00	7.226613324983187647e-01
+2.530459391858276952e+00	2.304294321666098710e+00	1.613466033140124711e+00	6.299550659672248365e-02	2.557517824305648446e+00	1.403315613527134742e+00	4.690924014732587466e+00	1.213893307285142553e+00	5.916330575871066699e+00	7.792844130394782631e-02
+1.928958533970001055e+00	7.096037593440583713e+00	3.299214390196449909e+00	4.754507764571461736e-01	4.595032873457040701e+00	6.269257509934247352e-01	2.676009205554832349e-01	7.111427300413641905e-02	2.311387519441321370e+00	7.607719658377676275e-01
+3.049750470993188411e+00	8.335532589038272988e+00	1.373761433026730483e+00	5.780521456251697110e-01	4.958750575379713865e+00	7.704485760265957772e-01	5.295783844382295769e+00	3.106174373836714242e-01	9.119442286621413984e+00	1.454365312917409447e-01
+1.446643143411851629e+00	1.641242803584559207e+00	2.373684364313463746e+00	8.733542221566478192e-01	8.350946657808716589e-01	1.827085120550399511e+00	1.796517849664760247e+00	1.705076201114264389e+00	8.296752770476137684e+00	7.700551493773489797e-01
+3.366516415093608217e+00	5.615004677240882813e+00	3.229775535441422818e+00	3.310459399464378283e-01	1.088020205681265562e+00	1.380181017003498711e+00	4.110524683994838391e+00	2.481465439558942165e+00	1.115804813826559716e-02	1.616539324263285282e-01
+5.339002830872147110e-01	3.839053681064814594e+00	2.333489534725208259e+00	8.432771114289439041e-02	2.102522728545276109e+00	1.709023161576461725e+00	6.917442654625217680e-01	3.462567460748395831e-01	3.782775783847899476e+00	1.071179999213418554e-01
+4.038277759901608199e+00	6.782157613841707011e+00	3.464511868539065453e+00	3.371376637740397531e-02	7.730806184225119670e-01	7.252544893421604133e-01	6.872132022199162193e-01	2.814389010091199683e+00	3.447296041223141039e+00	1.893497025003773038e-01
+2.731398993279879228e+00	3.676456867956779728e+00	6.413375855037339868e-01	6.707794732330785159e-01	7.635992572959304781e-01	6.062167860722607049e-01	2.063055943432477779e+00	7.336308576363859135e-02	7.472848003733477462e+00	1.287928899982984099e-01
+1.453840364993632228e+00	9.062482719258549979e-01	6.882205879952656424e-01	2.182327096949714385e-01	2.024410040526736232e-01	1.295642963260670966e+00	1.330150396283942271e-01	1.736145709990954078e+00	6.112044371929087561e+00	7.027845512380351822e-01
+1.210119312939017533e+00	7.630467468868430414e-01	6.359188990088751048e-01	9.883815275234506537e-01	5.934758597976730332e-01	1.831489821220492642e+00	1.310176158219893416e+00	1.258981371172946284e+00	9.653015748557789877e+00	5.710674551489269835e-01
+2.041872616241314109e+00	1.223997932765245711e+00	3.592523080817271008e+00	4.957374306008986986e-01	3.949335508277540985e+00	6.291860827029358560e-01	3.113717329409727341e+00	1.389609354520767059e+00	4.805115070835719315e+00	1.445284193078527624e-01
+3.575248649915818255e+00	8.905544750152357025e+00	2.966905439089235763e-01	4.679521298887630021e-01	3.547669603089278034e+00	1.578738230960443634e-01	6.270181915307466269e+00	7.487332787197820139e-01	7.886412615362010214e+00	8.133212834800832569e-01
+3.898698540050224981e+00	5.123757154565923067e+00	1.178107462557977492e+00	5.520746813096077954e-01	4.592951289518920621e-02	1.171909103767588389e+00	2.927267136454773322e+00	2.552935710110848078e+00	8.342615367499133683e+00	8.906236190999595737e-01
+2.108021535094918519e+00	5.560002888746932825e+00	7.214745149145316283e-01	6.765900158851164692e-01	1.023718975037019030e+00	5.744441677430218718e-01	2.485628740508086798e-01	2.832571092367738075e+00	7.596538198565406574e+00	5.914159707314822212e-01
+4.869067486253129573e+00	2.921814738200520623e+00	1.610700494896931900e+00	9.811040652599956058e-01	1.628599890640177072e+00	3.374886126447058654e-01	6.583180705038420122e+00	1.022109354202180187e+00	2.448534262177565424e+00	4.087328272831077802e-01
+2.420585719788033607e+00	4.954656265767008350e+00	2.231237042402161030e+00	9.790433090249440751e-01	3.332677810332588031e+00	6.561609460180721776e-01	8.270420803686340250e-03	3.528899693185899578e-01	2.741096663606271022e+00	1.378071713616224603e-01
+2.754906745122323031e+00	9.676710641503563082e-01	3.754240456912557367e+00	5.250193878563741423e-01	4.142397075465397371e+00	1.381722150840025476e+00	1.687280922291270358e+00	1.067933655487581790e-01	4.015412949008595467e-01	2.082096208576281748e-01
+4.888597998415178836e+00	3.014616594535894478e+00	2.837447468287521257e+00	8.929321846889970438e-02	1.593208978592462355e+00	1.432010333523069168e+00	1.362472022175857500e+00	2.023735351705774121e+00	8.040906539490745075e+00	5.333388283334727920e-02
+4.672702450324211831e+00	4.692525988250669577e+00	8.114199473892678149e-01	6.638642607029177078e-03	4.763672448857547703e-01	4.352861108378174304e-01	2.409652310337002223e+00	2.792543818597630345e+00	1.519095639879380233e+00	9.479421782536426866e-01
+1.390626568565362930e+00	5.795403282043417370e+00	5.780808828259962340e-01	6.537836869813318241e-01	4.342143903361439072e+00	1.328515786636720097e+00	5.020182370459501664e-01	7.291349036975974807e-01	8.463574453670117848e+00	2.741521287108358296e-01
+4.274877195091104198e+00	7.028513230726042060e+00	9.529844002173715189e-01	7.084183854011977255e-01	3.053184380355488781e+00	1.621308385572049371e+00	5.106963486979936295e-01	1.138778733677962673e+00	1.513533548987051613e+00	4.128356930393282109e-01
+3.624662754171856349e+00	2.618325768156440247e+00	8.047122068257333005e-01	5.662174560363387732e-01	2.262254632401385024e+00	2.208252441574161118e-01	3.245715851864902746e+00	2.088639737645078043e+00	6.869899939030401192e+00	3.833214771865146586e-01
+3.575490259832633022e+00	6.910866830500633995e+00	1.717562694591180605e+00	1.419119188604517356e-01	2.158793574868946408e+00	1.330907174346497746e+00	1.533175421100802627e+00	2.256864015809196822e+00	6.590486673228634018e+00	3.476457046878822510e-01
+2.763065587192560457e+00	3.755485876415471047e+00	2.937747270412279121e+00	6.776105276205796057e-01	1.102731743682296539e+00	1.671531786654884888e+00	3.194714296103120077e+00	1.832857589884217653e+00	5.267733310304743455e+00	9.498442020616374437e-01
+2.180692874176391349e+00	4.999495552064338710e+00	1.702888717024749976e+00	6.344642518347354265e-02	4.735873322905289839e+00	1.719599405515690238e+00	5.875922789677373359e+00	2.656004507669272208e+00	2.799792892846766357e+00	9.027899878643840248e-01
+1.498261411619749683e+00	1.440449178719120038e+00	3.793107505633872112e+00	5.092674831952860304e-01	1.034353051067818274e+00	1.569113426588875537e+00	3.771789988063644294e+00	1.328199351063930056e+00	6.225317682380070217e+00	7.271156352129748424e-02
+4.315942201307298998e-01	7.981247257459226141e+00	1.336073327630646901e+00	6.032379115696278049e-01	1.219125442838360662e+00	1.456458998083272505e+00	4.742675091300004864e-01	2.983575448822485932e+00	2.556602655333460650e+00	3.088219887595959889e-01
+4.976840216515537207e+00	5.358430906005960281e+00	6.245523297173987132e-01	2.545868317479035303e-01	9.742520666157300502e-01	1.182300247768848678e+00	4.654689808343711599e-01	2.709837605689326967e+00	5.452250849671616884e+00	6.515934524029750596e-01
+3.027576266208495070e+00	7.465991597741851393e+00	1.312320289625385605e+00	1.808157433154811145e-01	1.175791304316830654e+00	1.202757075039505796e+00	4.623859408182577901e+00	2.068219070878894339e+00	3.538760796420987731e+00	3.191906695569177721e-01
+3.186056897890198414e+00	7.744541371714031008e-01	7.520333736299331662e-01	2.346946453265875698e-01	1.939963125002858169e-01	1.102878394022341890e-01	2.892927495768838675e+00	2.226458507448537905e+00	8.040128531690038827e+00	2.426324783043983802e-01
+3.010003224219015117e+00	3.421867958770924645e+00	5.220586116504088992e-01	7.415249008538514763e-01	7.940706222571397577e-01	7.440532342345227423e-01	2.342240626863175024e+00	2.838160438547582931e+00	6.965119358930052407e+00	7.994713698433695281e-01
+1.073150862609050904e+00	5.708596454024827516e+00	2.122872605431856297e+00	3.879631915218560989e-01	2.315027253640028482e-01	9.412461989804450369e-01	1.359498791221620095e+00	2.799047819563208250e+00	6.092966540584811241e+00	6.446687874293053344e-01
+3.458319946541093426e+00	8.965823126867210391e+00	3.472581333838808959e-01	6.522372110328005235e-01	4.295269312487254298e-01	1.454437017783624775e+00	1.026918893392381404e+00	2.262804025523970530e+00	9.073155138943480580e+00	5.447998210661599838e-01
+1.800815055028895939e+00	8.579365052829048821e+00	2.541477396290954260e-01	2.397851878036376672e-01	2.759387911051200515e+00	2.947509094624349846e-01	9.515331736896013304e-01	1.743545886323498051e+00	2.156608211313451928e+00	4.566652586160673266e-01
+4.296243379948193386e+00	4.589758336708962716e+00	3.455005162367942528e+00	4.939154835666571541e-01	3.524083380370812701e-01	1.332555812949300478e+00	6.993865493780931253e+00	8.776996462395436627e-02	7.306362098970477170e+00	9.464560745296662914e-01
+4.270913173204858637e+00	4.311969821782867029e+00	1.235273021108498348e+00	1.363559640515099591e-01	4.297344674974395984e-01	5.399230520058395655e-01	4.519230441177060520e+00	2.223130570132609840e+00	2.003741389920957516e+00	3.000032742213165626e-03
+9.696108658633073585e-01	3.029563706398431311e+00	3.435820656827302866e+00	2.808855835523164091e-01	4.964076077685161792e+00	7.870171799195373463e-01	5.160598369656576700e+00	1.615991618531589236e-01	4.168851015871288368e+00	7.207503227396369061e-01
+9.857942554944587599e-01	2.241766796766214931e+00	1.094309073772436580e+00	6.171782099877928252e-01	2.164628407777784425e+00	2.967518925643408689e-01	6.897409292902939804e+00	9.171811935351321665e-01	5.578640625010033105e+00	4.324319825361930825e-01
+1.359323138290471800e+00	4.701967131801875199e-01	9.050689110105434310e-01	2.965795773291597381e-01	4.828973782133643589e+00	1.476756345192362208e+00	7.663619972606046105e-01	2.684843417506681096e+00	1.573201667699820838e+00	9.645271733524597435e-01
+6.432401347291311078e-01	4.025095134154975440e+00	3.309291976114321798e+00	3.338009834108435214e-02	3.084159326974440951e+00	7.597149110462917410e-01	1.345625764516446221e+00	1.543092560770717991e-01	5.232739162356885299e-01	5.344642145090121588e-01
+1.893475424698252141e+00	4.846915016695094280e+00	1.017577622922591463e+00	1.454379078590283259e-01	3.217212279193671698e+00	1.534170379797259010e+00	1.500675184724801436e+00	2.195608560978177248e+00	7.840082621731490065e+00	9.423202685727883887e-01
+4.916089799921347314e+00	3.077404880415298694e-01	3.341398797528096321e+00	5.790895913656423089e-01	2.109983576554563811e-01	1.849697544503176694e+00	4.269817133289845579e+00	2.772678110702025744e+00	1.597462715869091276e+00	2.066184857660853247e-01
+1.781292690950635027e+00	6.637468747977988137e+00	1.464153733648121847e+00	2.712309115523448000e-02	3.857947122149120389e+00	1.296349898299295811e+00	4.044166349775594860e+00	1.645944211560824044e+00	3.273391468292933260e-01	8.630632196811667267e-01
+8.171982958624174342e-01	8.696289780825402715e+00	3.455462363279165672e+00	8.945385963649863381e-01	2.164679436470390295e-01	8.639775191003977728e-01	9.436930182918934484e-02	2.039374760416071020e+00	7.722056720400328089e+00	7.888445971939340717e-01
+2.023302408059274970e+00	1.310618978377030786e+00	3.155456554057674889e+00	5.010819036675714377e-01	4.538509162145537346e+00	3.704064878007766737e-01	4.328283814895621973e+00	2.633257692097203950e-01	2.643788071556784747e+00	2.432016064719373372e-01
+8.102427541700596425e-01	4.125234289775202789e+00	1.517297146146757925e+00	7.389238601698774334e-01	4.067571276286313653e-01	1.735885673249011996e+00	2.664598954585144064e+00	2.602674375990287281e+00	3.116895494600373961e+00	1.762468820849705242e-01
+2.621132195703196111e+00	8.425316661290198184e+00	4.994480734649351383e-01	2.844736464195067871e-01	2.275884645154663488e+00	1.142221825149852377e+00	1.212554029681834233e+00	9.240986804861089920e-01	2.170291307332780084e-01	6.849869128634944460e-03
+1.174342088277471774e+00	5.577399746499002831e+00	3.951771200670569417e+00	5.006730198987506819e-01	3.596816929922009187e+00	4.994925384211443831e-01	4.413151350909869208e+00	1.418601174288871114e+00	4.839712146933547565e+00	9.760601828982439221e-01
+1.878213562249231749e+00	4.684555617152866169e+00	3.889299363513806895e+00	2.336972704101780707e-02	4.196904367361877775e+00	1.636484945202322683e+00	6.120210824336769662e+00	1.961635110777701918e+00	9.569470113865142125e+00	2.306199899979174406e-01
+3.431803129291879362e+00	4.012460279071989433e+00	3.786995576138590280e+00	3.401138113223747261e-01	3.171776225702972241e+00	5.533791211625698203e-01	4.380475363051796300e+00	2.758179724465671612e+00	8.238428864941681695e+00	1.611340397700082860e-01
+2.349694539920080150e+00	3.425362999949447929e+00	7.467879539582895632e-01	1.309946362139290388e-01	5.805916871698890702e-01	1.657140208444111584e+00	2.281415527558649625e+00	1.158152241249966297e+00	4.945420862933350925e+00	7.787419149878671565e-02
+2.871323242818877297e+00	6.534521291113140506e+00	3.096514924644943711e+00	6.667111185354958058e-01	4.581752374807475547e+00	4.565652293776831794e-01	6.933935392164753964e+00	7.018283743141698894e-01	4.265168364923495936e+00	6.901545552296244335e-01
+4.035690704229409675e+00	8.932606707872219332e+00	2.440117375879147676e+00	1.749555891357814641e-01	3.388836934377787546e+00	1.096540733110390020e+00	4.167898589768252116e+00	1.510982178698893286e+00	9.444730460473605760e+00	1.685190174932924556e-01
+1.895368854776446899e-01	8.111771833719306768e-01	1.496918109897968030e+00	8.739513189132178672e-01	3.271337369168346054e+00	7.030233822128264531e-01	6.401423581208871560e+00	1.029249503237427366e+00	2.009554833626514103e+00	6.565754492988090663e-01
+1.951751457247066224e+00	5.044712160044664273e+00	3.421432883480826970e+00	8.109581787816579901e-01	1.128731846702503816e+00	1.209060606591956688e+00	5.696978360178965417e+00	1.330520244258292406e+00	2.811362781860978100e+00	4.875439923021647193e-01
+7.946496095974958651e-01	7.959850102243462011e+00	8.886633323027690601e-01	8.912678301211978127e-01	1.506339073727302580e+00	1.772468056111744072e+00	5.209003007333696367e+00	1.910008906957938679e+00	6.469615179120823401e-01	4.738444478905338153e-01
+2.923037361500433029e+00	8.570876002749072242e+00	2.708321601975356430e+00	3.282691449572501252e-01	1.618103487581618039e+00	7.910655433987572316e-01	4.227704764846127006e+00	1.475926172111246215e+00	4.356475937759435091e+00	6.465232347181339989e-01
+2.522665440307211071e+00	1.963912550509897548e-01	1.722541801969912356e-02	7.082969529076817983e-01	4.976037482905154796e+00	2.468844590379837278e-02	6.706997326725380404e+00	1.309674497144936556e+00	9.860787708680341268e+00	8.357747879146570913e-02
+4.255818572678702338e+00	6.744878797873571585e+00	1.760033311394217925e+00	3.676055726983046279e-01	4.092141947481376718e+00	1.612470654985766583e+00	5.778498425542084149e+00	1.079514954339773070e+00	5.568696898336217060e+00	8.545812171197980645e-01
+2.446430878144840282e+00	5.234567351584125561e+00	5.715867278950135244e-01	7.003532329827356628e-01	4.568505639054738054e+00	1.943174193938479233e+00	5.764774523003604934e+00	1.937713945979894881e+00	6.298501537697791086e+00	9.026276578818063223e-01
+1.273163398740040364e+00	4.162415703791141119e+00	3.455865592048522394e+00	3.656764736071405064e-02	1.954721520949571190e+00	2.734474567634836983e-01	8.281588142887611470e-01	4.484584054747960957e-01	7.652386767723299954e+00	6.708336181638869800e-01
+3.272573882698126546e-01	2.771858331744477821e-01	3.650874191969583293e+00	8.313793690203402642e-01	4.721314279438468375e+00	1.073305358961462286e+00	5.882774132732964567e+00	2.199287336285580263e+00	3.798152586004966969e-01	5.333129895506141249e-01
+2.307814819672060480e+00	4.104095493932964800e+00	2.489576320635186413e+00	4.496836765063628727e-01	4.578631912903595946e+00	1.329946843468508222e+00	1.683934112573501896e-01	7.002919109156724442e-02	2.471193668340454508e+00	5.351430311502515247e-01
diff -r daece0f27108 -r fe627c026dc6 test-data/pca_input_with_headers.dat
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pca_input_with_headers.dat	Tue Apr 13 20:44:53 2021 +0000
@@ -0,0 +1,301 @@
+col_1	col_2	col_3	col_4	col_5	col_6	col_7	col_8	col_9	col_10
+9.579515262220434924e-01	3.726315038026500881e-02	1.192922318396479886e+00	2.208390853256675612e+00	1.545465138214503487e+00	6.587709093437858598e+00	2.676346016926679283e+00	2.002623182927643519e+00	1.960111203222609788e+00	1.244000891424809074e-01
+5.456295681254983432e-01	3.130646354641937745e-01	6.007098680445335681e+00	3.644816120915847169e+00	1.043692024581632793e+00	1.970255599020762194e+00	7.768759398827200791e-01	6.435887304171391543e-01	4.933655623778216537e+00	5.543893932919085055e-01
+9.341619128268041639e-02	2.558875163847860179e-01	3.752555601842988420e+00	5.559464534602225783e+00	1.010576057771932490e+00	3.077425253345768663e+00	6.807691255479467962e+00	2.571623094724099445e+00	3.153422029415700933e+00	3.550137612924495478e-01
+1.471186662367064413e-01	4.878675188546680030e-01	1.721359340060558907e+00	4.167324499252111458e+00	1.985766726491912326e+00	5.316541920919662267e+00	2.962330081397085202e+00	6.305674360296442682e-02	1.008632361074792705e+00	1.954028793571292999e-01
+4.517113041288302044e-01	2.082857695479499172e-01	2.670306166999409037e+00	3.583860843014433861e+00	3.162463432948562581e+00	7.681858694197329029e+00	1.942922465406477395e+00	1.098113345375552274e+00	3.412374063138972069e+00	4.177369511025994520e-01
+7.303789721527453871e-02	1.033872090825091461e-01	1.795276419298532655e+00	5.719768891948289635e+00	2.977579113876869243e+00	7.844204854679616368e+00	3.384744044988120759e+00	2.998214211517472805e+00	4.270749911114063657e+00	4.257092154152972707e-01
+6.359175475607969918e-02	1.242103944058722265e-01	4.586547825302377923e+00	6.614372405184473891e+00	1.923730124171568079e+00	7.681919225138223339e+00	2.793831391806054665e+00	1.406817959154060160e+00	4.716223047352296582e+00	6.211888780251716424e-01
+9.886982415066914998e-01	4.584180816802013725e-02	2.793304094637673707e+00	1.871823089629541581e+00	2.607924236602056745e+00	9.206004712207825236e+00	4.623226894276893928e-01	2.966776892612695615e+00	1.485603429563761679e+00	4.654181765444357355e-01
+1.741783097973695904e-01	2.446343910749277373e-01	2.534641383481000876e+00	4.584056834431557093e+00	2.890133078107056441e+00	2.823965747627366518e+00	3.034991597984873835e+00	6.600338090755860643e-01	3.752675891901568583e+00	2.970984388064664694e-01
+4.690204907875814566e-01	2.929759132721354575e-02	3.548495454913231484e+00	4.482718753445549709e+00	3.203674397180960920e+00	8.065801814409903514e+00	3.773297073513870004e+00	2.113193009064737282e+00	4.579511767735440664e+00	4.024397631591818403e-01
+6.941135206286030979e-03	2.133769823942091026e-01	6.135694255460425239e+00	5.395845311332483352e-01	1.504596129252289138e-01	9.253872174123642935e-01	4.483882842463830620e+00	2.321382684831085008e+00	2.576937740611303873e+00	1.109827593740932983e-01
+2.746007390787119640e-03	6.638140727681796083e-02	4.108407001279307247e-01	3.212637467080699416e+00	4.920639575099698959e-01	4.018583101004429281e+00	6.796866753550522056e+00	1.316040638035593568e+00	2.620935479341985896e+00	2.976887894006831070e-01
+3.557722260573822348e-01	3.727028444011896702e-01	3.811343797909652054e+00	1.715026071489426762e+00	5.294113011251582179e-01	5.980858755297242979e+00	5.404241675653790544e+00	1.825392885196229997e+00	2.835734218533411788e+00	3.200816860194626301e-01
+3.642510923301112147e-01	7.309428690756680780e-03	3.666945761684001326e+00	2.430979500086832612e+00	3.031996394197797429e+00	2.708093900045675184e+00	7.623775896209878944e-01	7.865319376558289610e-01	4.100162854521766320e+00	8.307551984431076342e-01
+9.927215581748555229e-01	4.537144790675278760e-01	5.145060290158723681e+00	2.151991198713361086e+00	4.862387339994040936e-01	2.589672936803951053e+00	3.398039583724480561e+00	2.809787771375323651e+00	8.864381880832911120e-01	3.331137683083518208e-01
+5.057807499542814611e-01	1.402717130943404999e-01	1.883175574051066725e+00	4.122193241585520695e+00	1.035989381539633492e+00	5.670773882751129591e+00	3.687442345139384958e+00	1.684532121504235480e+00	4.642108569673514928e+00	9.305441677815040613e-01
+5.042353053817258957e-01	3.547819755513703099e-01	4.381967339967321351e+00	3.851122946472437469e+00	5.426308260180099374e-01	7.445193485365448893e+00	1.869875173163507931e+00	8.437445017751898924e-03	1.444658255941634994e+00	1.636723834558136970e-01
+6.808167520848328325e-01	4.889279161325942868e-01	1.800574817853715093e-01	6.987701665160759745e+00	2.781923379343957503e+00	2.993730387785709102e+00	5.946279606388386263e+00	9.945453378009785350e-01	4.532192935084170315e+00	8.927165058296504530e-01
+3.935353669966316081e-01	3.534471776807462939e-01	4.974393735263212157e+00	4.310249343469166661e+00	5.149149535110453257e-01	2.179395632453644538e-01	4.312162502099264882e-01	1.470934059506852742e+00	6.326429352635798420e-01	7.652243996922046820e-01
+5.817478561516450375e-01	3.650895867034674724e-01	1.106136061048209607e-01	6.499764055829857945e+00	2.050813058661709487e+00	4.066454208593111019e+00	1.490387097633062208e+00	1.061815166735420313e+00	2.302831003165218604e+00	2.174825207514023973e-01
+5.048471030130237125e-01	1.082304090000890673e-01	3.175827458076348364e+00	3.693216070028284115e+00	3.700472412642747955e+00	6.197372734372899927e+00	6.374064687621854297e+00	2.768340513420041837e+00	4.661995130609337679e+00	8.584541060279672342e-01
+9.485485718647990172e-02	2.363243089061691227e-01	1.837943546562954555e+00	7.414016988668096886e+00	1.058065255181876108e+00	9.078408743915952428e+00	4.183782154307690959e+00	1.516463707584718712e+00	3.901861889845664733e-01	2.074547211064202701e-01
+3.134106819658126764e-01	1.286339499547863308e-01	5.864729791423263450e+00	8.300357839112765035e+00	1.438336911915669702e+00	6.851092469972995147e+00	1.034029113386152776e+00	1.494799843839271336e+00	2.277085020013195127e+00	3.018026684870551701e-01
+9.974747480148854484e-01	7.417608355979210533e-02	5.576900855119989231e+00	1.085027230184690827e+00	1.610568082058991379e+00	2.681858695518825719e+00	3.483337744351994392e+00	1.340344201458989248e+00	1.785155307177410666e-01	6.064608937230153707e-02
+7.698707959570197934e-01	5.946693939209857227e-02	6.140872236681682139e+00	7.794500275667249234e+00	3.329936732740133021e+00	1.053683186798398808e+01	2.442474922407471993e+00	1.618248604822617676e+00	1.645175154595579059e+00	1.577878407070486544e-01
+2.921978170383732198e-02	2.098764262909763589e-01	2.571849891653310571e+00	1.701437788960805664e-01	3.606271296781285773e+00	6.151967513580181013e-01	4.407426394075528719e+00	2.238674479981308085e+00	2.918338395342272129e+00	2.203405589927943664e-01
+8.185553514857757396e-02	2.904901153328537933e-01	1.884207868724713020e+00	6.379742850556673339e+00	1.316060594347697510e+00	5.742231747444348500e-02	3.293024161175864162e+00	2.070570602227302937e-01	2.323330232077761792e+00	9.345412483474558885e-01
+9.870953766246871597e-01	2.838968068710498449e-01	8.984979565988782113e-01	7.228894867998849172e+00	3.009018931955541376e+00	7.470177361465970556e-01	4.791497852877925290e+00	2.138893706603099965e+00	3.957478162078499118e+00	9.206372464557524005e-01
+9.246986296046750819e-01	2.657540764872973194e-02	5.696958733461305435e+00	6.037947545874237321e+00	2.054547403001988126e+00	4.010012556963714836e+00	5.387264573805446988e-01	4.198151587672391827e-01	4.855865220608975186e+00	3.421872218672002974e-01
+4.891436182199244698e-01	2.957137425921102514e-01	2.092865426966933118e+00	1.304751064715160558e-01	1.857399876731623767e+00	3.929329864582863596e+00	1.890510040380741774e-01	1.808418679622973757e+00	8.962596057614125034e-01	5.071164143964516091e-01
+9.883745014565588072e-01	2.023820478627288422e-01	3.026225500297741178e+00	8.018625010860999325e+00	2.639151190842266015e+00	7.866860471267914434e+00	5.692475791922957384e-02	2.619732708394978626e+00	3.791192865048754079e+00	8.510442750449997984e-01
+6.766386709986900172e-01	3.163942649407911478e-01	6.650890489805712846e+00	9.981354386720189265e-01	8.895093378732830125e-01	8.880441813445409949e-01	5.269510439686068359e+00	2.800971666890863521e+00	4.336136086833519698e-01	6.151838148712087673e-01
+9.395548338899438745e-01	2.274826251714922587e-01	4.841436257692705025e-01	1.951295574220930362e+00	3.688485459922960619e+00	3.319068635438701342e+00	2.622624513963639714e+00	1.773842110030804609e+00	7.017709220203099951e-01	7.273521464765183575e-01
+7.131396196120692865e-01	2.829891666225929359e-01	1.809204461229397820e-01	3.578992630164364641e+00	5.190853847632848073e-02	9.799965411266482462e-01	4.614272826173850817e+00	2.684041190366065699e+00	2.984828522093719183e+00	2.167818654123231381e-01
+6.173854727816793231e-01	1.961753927292946287e-01	4.555255586921068023e+00	3.578807812709182024e+00	8.425821534786881450e-01	7.518481958157543943e+00	6.262313649648420188e+00	2.000433960754317564e+00	1.421954930513873006e+00	8.019504843363266522e-01
+6.671808236429033201e-02	2.702227356574862793e-01	2.867725189486065585e+00	8.406696908068906904e-01	1.816665326608355890e+00	1.069991981228995215e+01	5.100300539865704863e+00	1.022140401782990482e-01	9.935128054149117149e-01	2.217478442404806627e-01
+4.675882156850276195e-01	3.994470533120373923e-01	3.013415758827279856e+00	3.472047851101398130e+00	1.579805042782022717e+00	1.025374950194446200e+01	5.424839982873136179e+00	2.661207445309309083e+00	1.974765823248681329e+00	8.490805254939938207e-01
+7.440465128212200296e-01	3.185879639684230935e-01	5.875377909342210714e+00	8.735053206610791321e+00	1.922533667640191624e-02	2.664137099658425711e-01	3.070429299965248493e+00	1.797362708842066059e+00	5.848813121558199546e-01	4.383864302200134455e-01
+3.550604742260795588e-01	6.083111274854280381e-02	3.404146882195189239e+00	7.733962066765833754e-01	1.176797582398654285e+00	6.871446754170301929e+00	5.361716911609923919e+00	1.576687106248415571e+00	4.355077426059827062e+00	8.607241356371005958e-01
+3.559318532806881041e-01	4.015285662241686593e-01	2.944936440066225458e+00	2.111335278696990336e+00	2.963059944865208628e+00	1.476942511987013873e+00	6.450386812630387823e+00	1.907611126769427123e+00	1.381468577271076636e+00	3.983676385111964979e-01
+1.352222544842187624e-01	3.963201476435085358e-01	7.711539919790504349e-01	5.328381207561713850e+00	1.055305792258381725e+00	7.535148204080714507e+00	6.478550147632057055e+00	2.298107587130767904e+00	3.835975596160178469e+00	7.784889119545324032e-02
+5.656300588103846394e-01	2.408672177953232185e-01	2.222118682804348655e+00	1.584476418959711363e+00	2.205997974729819955e+00	1.320044695544450697e+00	3.093339610296233388e+00	2.078381944707535123e+00	2.709900781033619532e-01	1.998783060846459625e-01
+5.064693576216054982e-01	4.755666724189564487e-01	2.460782122272777706e-01	6.394893884651652982e+00	2.393279335645917882e+00	7.720168766472102817e+00	9.946162988780055114e-01	4.727471826960147894e-01	3.624628763819681243e+00	7.939552686881024002e-01
+7.126511753385316172e-01	1.251084009980807377e-02	6.424546400284762093e+00	1.732715732319492385e+00	3.291228557764222185e+00	5.494632343972354782e+00	4.309920997430734424e+00	1.785238040943090132e+00	1.319325481408312184e+00	3.987762049958972765e-01
+8.564779126009015586e-01	1.551694340859950394e-01	3.000664164177824667e+00	2.121531733172477452e+00	1.929201507138485816e+00	1.091606713354398295e+01	6.352850505658992120e+00	1.942089264915284774e+00	3.099275474026703225e+00	6.349421349719974739e-02
+4.875235919705348531e-01	1.165471825543921702e-01	4.659789198761613349e+00	4.857039597115840657e+00	2.268900869752328298e+00	8.073884519013418526e+00	6.867532374199831757e+00	7.110477929957825793e-01	2.139511718029329490e+00	3.270336697362830058e-01
+2.836462637303897028e-01	3.526582320725804021e-01	3.433435023383186291e+00	7.878111173527013733e+00	5.846439909687783931e-01	1.334584322193443917e-01	2.430764258810069744e-01	2.294882067517025170e+00	6.943377361908681378e-01	6.597104416060772802e-01
+7.138680788986382364e-01	3.866838295049366270e-01	1.938054711638698135e-01	7.776057762563468145e+00	3.843531806920502003e+00	1.169843574403325803e+01	1.317073001264212895e-02	9.974273699007062399e-01	2.805794644206359933e+00	2.846508994495189349e-02
+7.616522995605032875e-01	1.525341083280188914e-01	3.649857814806184653e+00	6.626064326264721416e+00	1.848485344378381079e+00	7.772079961576015883e+00	4.922747697985218274e+00	2.183176272993367029e+00	1.523463352631002188e+00	3.483587959766206721e-01
+6.174650468503004586e-01	2.890466460728707276e-01	4.198548001307910837e+00	4.756162259128787539e+00	1.215478403072798308e-01	1.073564948073302894e+01	2.393378431835311737e+00	7.963541702214833595e-01	1.221126147878501822e+00	3.627422424390945865e-01
+9.041286748679164820e-01	3.692704713445955078e-01	5.969223946468604325e+00	1.400042822911425899e-01	2.167809064247096185e+00	3.466765141422927687e+00	6.104187126619349257e+00	2.815584690378426735e+00	1.472968857439669765e+00	6.815901346942815353e-01
+7.420584650906367763e-01	2.629350119859111268e-01	5.157415509545635324e-01	8.936951265754993656e+00	3.499514727363592037e+00	2.929864868198977401e+00	5.289656243356750664e+00	3.794701199783089285e-01	3.570594754264058146e+00	5.454360810847372898e-01
+2.133902786741366242e-01	4.793775958545365423e-01	7.161025461060221975e-01	7.379345463324211707e+00	9.195588975890780503e-01	8.953799627402657890e+00	8.199588910676803843e-01	1.539214057143141190e-01	2.517268342599753961e+00	8.642672068937873320e-03
+4.189544227294224044e-01	4.677740958591244946e-01	4.169315152475023822e+00	2.198113856746841677e-01	4.442481038742749355e-01	8.853442692179621432e+00	5.871239829017361744e-01	1.642555038182410554e-01	3.779048982357357644e+00	5.353051674182843778e-01
+8.373426461385590214e-01	3.895842710949837251e-01	3.079787615886646623e-01	4.540155876800062273e+00	7.099227726704704367e-01	3.400668992101794075e-01	6.292240435961995004e+00	2.673955045372806705e+00	2.378211157475177195e+00	5.342130779135090268e-01
+3.068274218964279765e-01	3.192479805504804813e-01	3.859914935882651221e+00	4.687340409378260908e+00	1.900386926452752867e+00	8.623061138373001810e+00	1.033494255619878288e+00	8.997549532223314550e-02	4.911877241989344611e+00	9.510774047315817548e-01
+3.988512541926614308e-01	8.003053540870219695e-02	2.122999256011599556e+00	4.455738292615722962e+00	1.825496679610708828e+00	6.531267574926014063e+00	3.480291480026090900e-01	1.582969106163566453e+00	4.938475735461747007e+00	1.906205929699261725e-01
+7.111720075879690572e-02	4.221578019099042622e-01	4.465731829112144169e+00	2.564360848739634768e+00	2.152069261549716117e-02	1.058526283280879277e+01	1.462724588046307783e+00	2.852823949099446033e+00	1.829748887076950048e+00	2.844587557581582660e-01
+2.570944338116505357e-01	3.670273280771254032e-01	2.950100530191855697e+00	1.367357621902863807e-01	6.536517332253071189e-01	4.623777977996077482e+00	1.830565500886062402e+00	6.876079879205642120e-01	2.463356978321902702e+00	1.515963114524383482e-01
+8.196773403442627615e-01	2.439155028749898646e-01	6.976779897465351654e-01	4.021827899216881441e+00	1.629083009450637665e+00	4.895113507734013325e+00	5.420567352065293498e+00	2.923116514593426807e+00	3.675539915878816899e+00	4.820681859745293840e-01
+8.912757210535459107e-01	4.287706082586761891e-02	5.914097580762159190e+00	4.887219470061346271e+00	2.297300243872694381e+00	5.258879986696362252e+00	2.055176485482270277e+00	1.247382108420278080e+00	4.091406116197054965e-01	2.863708277926955148e-01
+9.165363924681566576e-01	2.077777304803083980e-01	6.425359611836583440e+00	8.532229331139205897e+00	1.890984676596261238e+00	4.482242194093222665e+00	4.719123490932191523e+00	9.368865626180848949e-01	3.424737866108997775e+00	3.692933466222642069e-01
+2.844432761355554229e-01	3.487910856226295042e-02	3.288851974081316243e+00	2.493792851872620453e+00	3.245659443981551373e+00	6.119401284154618814e+00	1.629426639698323909e+00	2.146211397792365361e+00	3.323215307906694616e+00	9.689245472658591929e-02
+4.605981453808145609e-01	2.951104911370585060e-01	3.466807508548188643e+00	7.015908445063255172e+00	2.012464416109721377e+00	1.411425132174343400e+00	5.640313420420241641e+00	2.551472185597204234e+00	2.921468481145677476e+00	5.154477507183952145e-02
+7.916776772020003294e-01	3.104882171066662977e-01	3.388406047132061616e+00	6.308076145350703712e+00	2.141204968913200002e+00	8.815894689517723037e+00	1.216772248282482316e+00	5.490792235288116663e-01	4.864033877707442777e+00	3.120795424356880288e-01
+6.520543319958096440e-01	7.498529269624598648e-02	6.546870924616779774e+00	4.313660942542439258e+00	2.842979234248417697e+00	2.708025294292042418e+00	2.101565155836826015e+00	2.779757313928136409e+00	2.766295492770797892e+00	8.242938770130925352e-02
+3.916403442843127136e-01	9.189287972434972973e-02	3.658241649007774488e+00	8.892285269585453733e+00	2.872944719502671163e+00	1.926067804146072149e-01	4.113071146818407797e+00	2.033786884589388499e+00	1.858529211147021964e+00	3.601164330247360601e-01
+6.658899523715963165e-01	1.901452620071845678e-01	3.358563768423928231e+00	7.307834854097906785e+00	2.230102878474681027e+00	6.994441846960493869e+00	6.046922242667236880e+00	1.681346319512073695e+00	4.453953342081854672e+00	7.760263753123383124e-01
+3.544087352095481869e-02	1.736442680483894496e-01	6.358750483131432318e+00	6.362553657877171531e+00	9.438518061698975004e-02	3.241545692161897474e+00	3.216563179949617268e+00	2.048445371027631801e+00	2.592256358497992519e+00	5.871829363638558785e-02
+2.278297361077044059e-01	2.521532626334166971e-01	6.006560048470274360e+00	1.036460639857805344e+00	9.596384576758123508e-01	1.161754935200590388e+01	6.921319460991003147e+00	9.492023907384942172e-01	2.570754313364566723e+00	9.898393847822013480e-01
+7.720727707112930993e-01	4.625201799694809712e-01	6.030038351299404020e+00	1.576843309640901802e+00	3.047360805551709184e+00	1.105183421427205381e+01	6.765794291140618810e+00	2.437125901810530237e+00	4.785718495907587133e-02	8.643252994269234879e-02
+9.816465867962677061e-01	1.040135248373981747e-01	6.269766387849492695e+00	4.587664995870221141e+00	1.471595298862088175e+00	3.137698622087241596e+00	2.585776241619418325e+00	1.094148950727448000e+00	4.787651271957346744e+00	8.537352255282423252e-01
+5.927286689665087760e-01	1.635250692112610871e-01	6.763777563834705298e+00	4.190158816469162772e+00	1.671137433875959921e-01	7.145892561514077812e-02	2.697194722513650555e-01	1.826917897949313918e+00	4.855283909060112535e+00	2.338052822860668956e-01
+1.591435657354911548e-02	1.309457494027614910e-01	4.027555471388495345e-01	3.269356767055083601e+00	2.135361695566277085e+00	3.812187332815673013e+00	3.745269592969274086e+00	2.239168581407309233e+00	2.403445557045992054e+00	4.127895397263747768e-02
+1.138459964891592735e-01	2.477562449655215659e-01	6.711903366433036311e+00	2.328903691994430591e+00	3.619135648902618296e+00	2.643113253632744630e+00	7.573805393623883608e-01	5.638560832013681523e-01	9.866348317250567268e-01	5.321998283901712146e-01
+8.335543816544467299e-01	4.852511675363395671e-01	2.522771010620390442e+00	6.865591808220169234e+00	2.614984821499222178e-01	6.661308755886223842e+00	4.631632858625027716e+00	2.016686176070514591e-02	1.670651928613386517e+00	8.001889580730393581e-01
+3.506629367480429416e-02	7.513357347200738667e-02	1.091955136302876994e+00	4.788485228215831668e+00	3.088440213543260704e+00	3.737602435514551402e+00	2.060091420371281679e+00	1.664114145344917528e+00	3.853021369729443979e+00	3.981246066772909931e-01
+2.941809570016090669e-01	1.951063404831046011e-01	1.053511941997986590e+00	1.626573601758511067e+00	1.073068181672781840e+00	8.246102825125259272e+00	1.229799882190759863e+00	2.046485654174841073e+00	1.887314229128450593e+00	8.477832457224251383e-01
+4.288411318977621844e-01	1.344940483989709445e-01	1.890175735159078219e+00	6.909579416417008346e+00	2.559618997696785136e+00	8.076451885637920824e+00	6.709931254318012073e+00	2.711755694833509533e+00	2.197259152647867975e-01	5.229458751166887742e-01
+7.962785959698204241e-02	4.791664160968215902e-01	4.775534443309384258e+00	6.195144705514182171e+00	1.095816602161203690e-01	7.253215415683056833e+00	3.785863733799192499e+00	9.032344566438499367e-01	3.712953563991834294e+00	3.658749193043641101e-01
+9.925977090507452916e-01	4.226472934729750275e-01	4.233597679026229521e+00	5.373501445983512426e+00	3.029885997105019158e+00	1.948856366491487524e+00	3.580389683767193798e+00	6.128625096828327345e-01	3.195613215016547937e+00	1.498067324162460912e-01
+3.211542228342814109e-01	2.435050065055133772e-01	5.191492939326351141e+00	8.811443367963034845e+00	3.119307887468464990e+00	9.566604729350618896e+00	2.662534653489017522e+00	2.048270692218978617e+00	4.513252133993818305e+00	1.534045164632015235e-01
+1.169841130722715228e-01	4.278805785103458326e-01	3.871981713139157399e+00	2.151432657451758379e-01	2.353074788516758442e+00	2.925683050900422355e+00	5.164172309356634294e+00	3.209672061804553778e-01	1.806778866741765022e+00	7.377245404193294442e-01
+4.920318247135287981e-01	3.980350958273896622e-01	3.553262242278445449e+00	5.660089801227229600e+00	1.130914505340574028e+00	5.111374270657123020e-01	3.594580615302541915e+00	1.377803570003424127e+00	3.733415954722649754e+00	3.790748149557219282e-01
+7.141555225002506058e-01	1.431060744802945539e-01	3.598062535886497493e+00	6.486014070157204436e+00	1.222357165551017477e+00	1.071797470084096382e+01	2.966727560848184453e+00	1.433401409283499239e+00	3.761066588446803749e+00	4.577426600413826430e-01
+8.846645270352224699e-01	3.643370058500688402e-01	3.776149232858763138e+00	8.514771398046235618e+00	1.333408164935339979e+00	8.719464634044745921e+00	3.680392243042417633e+00	2.323141187069376112e+00	3.968683511008049170e+00	5.273710580573882645e-01
+7.563601909975703474e-01	7.110020268898137319e-02	4.754320885232382388e+00	6.715112169672266873e+00	3.900410613846503338e-01	2.374351342693231892e+00	3.949776593352507525e-01	2.793668022917081828e+00	3.590934831061467047e+00	2.297057403281292931e-01
+4.973937884442386492e-01	3.879138870002525330e-01	9.799015387413885225e-01	2.012207080170396623e+00	3.118810053878664501e+00	1.476679627040943465e+00	5.927098696212524054e+00	1.896055595509993363e-02	3.766273251675572586e-01	9.605494500779653277e-01
+1.977451097083424703e-01	1.856512361107952036e-01	6.724615885357570377e+00	6.767753598078477317e+00	2.460427469322048477e+00	6.474778192244762032e+00	2.497838703773406444e+00	1.783570854432153130e+00	5.300044222306288244e-02	4.451008306808179604e-01
+7.777282859813858806e-01	1.108325048061198359e-02	5.994453580967894268e+00	3.907200072756264575e+00	2.802655265583420707e+00	2.467799246467401009e+00	1.767975654554970966e+00	1.542521176133627048e+00	3.301251420424628602e+00	2.703716625531150664e-01
+2.877553566109145589e-01	1.420816672999475339e-01	3.531815135282850626e+00	6.071634467360012444e+00	1.950296822580346312e+00	3.667561567003036149e+00	3.564813794526681789e+00	8.193521748791241510e-01	3.969846661030343782e+00	9.642674274532847534e-02
+8.461631661621418221e-01	3.874198608202865657e-01	2.460697897082127827e+00	4.483937646731980253e-01	9.536408659656969711e-01	9.374214254598758345e+00	4.523934853618548857e+00	8.998483173209120833e-01	4.534728615909888738e-01	1.316674336548497859e-01
+1.751881832985308085e-01	3.840511548841616940e-01	1.356205442152841334e+00	4.975298518310518503e+00	3.475732058856605633e+00	4.548907307536070199e+00	4.318674447424278640e+00	1.546572097412000968e+00	2.738768725851110197e+00	2.581125520633765769e-01
+5.541597656676272576e-01	2.055046901470117260e-01	6.866760560523623269e+00	3.572427800023941113e+00	5.759362217394889960e-01	3.707695882166028856e+00	1.083565015241491736e+00	8.888805063168140341e-01	4.589028250324568958e+00	6.712978422710076387e-01
+3.399810580604246679e-01	6.778009844025034702e-02	1.990157231926872106e+00	7.987721068254113987e+00	8.333111074045302935e-01	1.434155759051158885e+00	1.063680632215532995e+00	8.682505419899785304e-01	4.553780840671912244e+00	6.153621080682639688e-01
+3.805973074910944387e-01	1.809291374261368102e-02	2.326724448899936348e+00	8.808775644776478231e+00	1.071772823118059659e+00	2.671050593238351034e+00	3.236405416201733942e+00	1.156638793116922637e+00	4.089621847663170939e+00	8.958752205306643734e-01
+6.236783129330455422e-01	2.034130988847053789e-01	2.149437338793216412e+00	2.709264986875024039e+00	2.581735349094827381e+00	4.559785061212902946e+00	3.811076674546835186e+00	2.506501802065709494e+00	3.558610317643628562e+00	2.953998470582006597e-01
+5.786616826695738514e-01	1.092680018344108772e-01	3.673824620483585601e+00	6.935907382658493603e+00	2.924092204393669459e+00	4.994297253991534902e+00	4.216415579314912065e+00	2.221035033808330006e+00	3.308866108190522404e+00	6.005834101043915352e-01
+8.445682020932365752e-01	3.477257696376142015e-01	2.977918003630122712e+00	6.125943756092539161e+00	2.412136776594560494e+00	9.871992950448490234e+00	1.471692508678064337e+00	1.186189240533216616e+00	1.731159415996974538e+00	5.019757339821060427e-01
+7.262920712271674351e-01	1.070865338500862629e-01	6.515623171275430892e-02	1.910429663217140916e+00	3.785981980891470755e-01	5.136662978344108232e+00	1.190639365883919165e+00	2.126433858821366041e+00	2.551490600585693436e+00	2.282190611495389376e-01
+1.181796487183683730e+01	2.516406960282593364e-01	2.887277780206054967e+00	7.424704929314782653e+00	2.733699169495314329e+00	1.326499604911062224e-01	4.472085075329708559e+00	1.841612256174257833e+00	9.588965759771213015e+00	6.074890083495527726e-01
+1.355685210334684321e+01	1.476043055963610906e-01	2.536588592058465874e+00	6.894815252046440968e+00	4.629665076865330242e-01	1.591874408544442510e-01	2.659816619928975889e+00	1.992412304973402826e+00	9.409822587181350118e-01	8.428860916139044868e-01
+1.302344932128066368e+01	1.877957892610864488e-01	3.771232365671751907e+00	2.334982689796312627e+00	3.407718706286316790e-01	1.716052424590376324e+00	3.548651514000740193e+00	2.374591648505322450e+00	2.134503293655584777e+00	2.543046511567058632e-01
+4.451805618740952042e+00	6.764221264325200045e-02	2.669906521408719868e+00	8.632453926650279996e+00	2.033894446413802903e+00	7.142031037304861218e-01	4.194277576482712266e-01	1.402693657818623318e-01	7.586905366769697068e+00	1.843381480000478856e-01
+8.570192823591918696e+00	2.306084812036459097e-01	2.522438257729497479e+00	7.522008507552547130e+00	3.713341144773252278e-01	5.716961257420709597e-01	2.079090539304101704e+00	1.766024213797281917e+00	1.662647314050246683e+00	6.384753099614368654e-01
+1.492700367152876417e+01	5.956069900446444437e-02	5.193239326553306112e-01	4.348155739698301936e+00	1.498622145721807408e+00	4.168028931133043180e-01	6.514105873948607517e+00	1.300249280108312089e+00	7.509216655975119892e+00	6.466771718100665645e-01
+1.401469528149594801e+01	1.318450715802988726e-01	1.755366307435912709e+00	6.268538614231175643e+00	3.594158033755705084e+00	1.596063791967876844e+00	4.538357107641011190e-01	1.578349416671345651e+00	8.605308976007654209e+00	8.431350039872285151e-01
+1.125897718091157707e+01	7.088415129913086532e-02	3.073048649925616438e+00	4.169572134840827671e-01	3.391473123826589831e+00	1.253820676407349843e+00	6.033094418983614204e+00	1.669529100255978893e+00	2.743440461537412389e+00	3.447717414063620200e-01
+2.618236120635488717e+00	2.221707698733976499e-01	3.880148546665934006e+00	8.346365071605847064e+00	4.485232175994370074e+00	1.273339902009563929e+00	5.126040370747679376e+00	5.002442564550735327e-02	7.652504234144801742e+00	9.045218812960378330e-01
+1.352791799195179934e+01	2.958099853113912436e-01	1.620323493963710115e+00	8.514270244110930363e+00	1.405394051171777736e+00	1.228874034324174414e+00	4.366837956110266816e+00	4.681693532389841206e-01	3.658910928887952885e+00	5.685286335634343047e-01
+1.321105522112276098e+01	7.596543795040429370e-02	2.288656765287575112e+00	5.454334126686442019e+00	3.540974541852515589e+00	9.136761954539687203e-01	1.531916431358365749e+00	2.600489131884043736e+00	2.186958478364557035e+00	6.152510639313788499e-01
+9.248327943582943078e+00	2.953301427161937331e-01	2.912841777561403500e+00	3.210770144852208574e+00	4.678430187034543941e+00	1.903313079890405213e-01	1.852621718826671371e-01	1.901873807226989177e-01	6.598474242893042074e+00	1.863584605124068982e-01
+1.561511613665458764e+00	2.791101130788958273e-01	1.230729125078247499e+00	3.214729791326091046e+00	5.760499802570256955e-01	6.791526464572399391e-01	1.164793484277019120e-01	2.436090334891392928e+00	1.684437394931304732e+00	5.939045414290095026e-01
+1.074199933120756256e+01	2.768105466336150400e-01	3.600337787752637908e+00	1.988419733930615330e+00	1.121183222910840938e+00	1.262741873855612873e+00	5.955023194942483222e+00	1.775781023586198160e+00	1.635523073587350051e+00	9.451325867881940068e-01
+1.439453349318703390e+00	1.572125366345192565e-01	4.116939260327882266e+00	8.143883513662194318e+00	4.436336490499172669e+00	1.431008029759604838e+00	6.129158351904355762e+00	1.627229601340490550e+00	3.544342525406199984e-01	9.537385422254009981e-01
+4.385485861700014532e+00	1.072544990386351815e-01	2.555081167553841404e+00	7.797049674127350727e+00	6.437428060323546752e-03	1.690033945941410209e+00	3.546703381942077016e+00	2.355037673948323640e+00	6.000098633768423007e+00	5.634219751061730896e-02
+1.478941841002847202e+00	2.819342154043811277e-01	2.946748707794077937e+00	8.674573772230408375e-01	4.831892645548560239e+00	6.117546232929613748e-01	2.336129800349750152e+00	1.662089735231696785e+00	7.288065418938156625e+00	5.079145336272458122e-01
+3.566330915119718004e+00	1.054092958488348081e-01	1.262772079536855330e+00	6.632582037177616741e-01	3.155614409429076339e+00	4.560221336860896812e-01	6.107444411033736920e+00	9.724400924370590893e-02	8.869125512095319408e+00	4.531451318109073201e-01
+3.931188712123208084e+00	1.648487385228547819e-01	4.230745858304009843e+00	4.740462911360358689e+00	4.302603206380042344e+00	5.923369016635324780e-01	1.350657156605095111e-03	1.610503856182457394e+00	4.214531575126196472e+00	1.188686805020251480e-01
+5.690049848746455474e+00	1.611771331430286031e-02	2.797114322332588721e+00	1.326664870101148530e+00	2.184236794465954645e-01	7.244826847041867524e-01	5.088788284324481381e+00	1.892888045267941521e+00	5.251528732452677151e-01	8.270932539527531269e-01
+9.971187065128873428e+00	1.922234878798162738e-01	4.399243530319325579e+00	1.690861779292817113e-01	3.026193132922350149e-02	1.247811934568532566e+00	2.287373749222342489e+00	7.123297811357809373e-02	9.782325221036309415e+00	4.025349245250368080e-01
+3.166794098033590465e+00	2.895927629971295492e-01	1.010943063067509806e+00	6.246940915716239218e+00	2.296626417112780594e+00	1.124916422624346879e-01	3.155063467415336387e+00	1.122794863214901628e+00	7.747118569384047859e+00	8.813094613105451058e-01
+7.776074009241417784e+00	4.884943113850780116e-02	2.665091138087455569e+00	5.396374948611239120e+00	4.807416439179393208e+00	9.311196524679428155e-01	2.370568997313627246e+00	1.497537386829888018e+00	6.410622802993913183e+00	1.959147447247133744e-01
+9.386902307633294029e+00	2.918525147335055969e-01	2.580633210860679316e+00	6.117570487861421213e+00	9.440652787891057329e-01	1.478204798622005711e+00	1.583143507835594432e+00	1.878469714268893753e+00	8.341600309722448259e+00	2.296329479872928347e-01
+1.325226074192375592e+01	1.660088044261155971e-01	3.032911943104431352e+00	1.544500510963358053e-01	1.444444693560564819e-01	5.144081121761716613e-01	6.228472420827102063e+00	2.002195300328763139e+00	8.619120730699705746e+00	5.986701025311890190e-01
+1.253702247039758255e+01	2.815208299886055587e-01	4.418741917269815467e+00	7.187649403919402324e+00	1.857768480706825098e+00	1.693270109415251579e+00	4.500029768156658783e+00	9.386766274226390205e-01	7.712460417143821578e-01	1.490837705279541847e-01
+6.470192866248365426e+00	2.538814476100020867e-02	1.482709093692339941e+00	4.810511422160657080e-02	7.397535485801826072e-01	1.366029201822366090e+00	2.938463177620382449e+00	1.905659482665870819e+00	3.274297555223312450e+00	5.946455225965689984e-01
+8.046767686244354678e+00	2.379545842279764956e-01	4.589316043001465673e+00	5.892088873366180479e+00	1.308019335424309704e+00	7.484233826378567933e-01	4.495389998435046053e+00	6.202227507857822619e-01	1.707552168782031199e+00	9.367630937108080147e-01
+4.192176901095245434e+00	1.304693968976899099e-01	4.998027091565603541e+00	2.456526465425167283e-01	1.205473478694641942e+00	1.109994403062972701e+00	5.125196171178717286e+00	2.206741883831745188e+00	5.115846166892562330e+00	1.104691714503143185e-01
+4.157525610946958494e+00	9.298038433732908059e-02	1.491518671460473344e+00	4.496224394499284571e+00	4.648879436827211720e+00	1.793268724253979762e+00	2.896904069484845579e+00	1.356276322086461761e+00	9.923020114331025709e+00	3.558734125538330906e-01
+6.979832709755873221e-01	3.599810987684147479e-02	4.064902498699765943e+00	5.597103986278889387e+00	4.496807253190716835e+00	1.851319113294662699e+00	4.995032507818135237e+00	6.680000375419122838e-01	3.176705087670492755e+00	6.164391597089758923e-01
+2.680618751278343481e+00	1.681546055962608033e-01	4.674994237591107549e+00	7.872155819890408246e+00	3.635532819810061156e+00	5.723527244541317671e-01	1.228843659393086263e-02	1.624022154584328081e-01	7.634482806401528876e+00	9.621470740939815913e-01
+1.129656603051103581e+01	1.754676764823895063e-01	4.925438368822037560e+00	9.090625284174426168e-01	9.872898867967799807e-01	1.967411347408282829e+00	5.647085320216579163e+00	8.040378544211923550e-01	3.157643996826393629e+00	6.705813685367211230e-01
+1.166598416610384703e+01	1.866541563727542041e-01	3.082745872586427183e+00	5.684997481806196795e+00	2.688708007208003181e+00	1.498993448192174283e+00	6.225488991661570992e+00	1.314065400514044191e+00	6.343644934019415871e+00	5.416797703578821732e-01
+7.720230714264369354e+00	1.523894939496055478e-01	1.620746553117432542e+00	6.005901827039864749e+00	4.717729383985791447e-02	9.075246998741233195e-01	9.360149124843241131e-01	1.754815849294320884e+00	9.666726609856759467e+00	6.581401514679491305e-01
+1.453127659928862059e+01	7.703415326140243546e-02	2.479862457434094392e+00	4.182212439636490586e+00	3.488676768426026253e+00	1.206066251747953011e+00	2.285902497850169190e+00	1.194028499060063808e+00	9.286786278550043239e-01	8.091147826884519390e-01
+6.417666940507571915e+00	7.132128551135483163e-02	1.942224670162717270e+00	3.433069127251919817e+00	7.564280838361731041e-01	5.516799087701549364e-01	5.176162075754822567e+00	2.178815429942681181e+00	5.722924693718432820e+00	4.158454834199132311e-02
+4.502867570496368543e+00	2.239682077280483463e-01	1.141094924815538203e+00	2.453579665440670521e+00	2.992162924469959684e+00	8.370073651298797213e-01	1.718538404749463400e+00	4.075217867922253934e-02	2.539196137099058781e+00	4.990272000661035712e-01
+1.769506443642104854e+00	5.077985666082200644e-02	4.536839079078856862e+00	1.926622825410988948e+00	4.021468856985962681e+00	2.704122777555590940e-01	2.804453666907950282e+00	1.607921780184728977e+00	9.484877651296452328e+00	3.670233106390000666e-01
+4.295020083244110998e-01	2.868652070133119181e-01	3.134264451081448577e+00	6.836230003013163525e+00	4.042883960895656159e+00	9.348262365209494273e-01	3.157788950792046023e+00	6.497342539434567410e-01	2.517529722233489586e+00	8.540150270825374035e-01
+1.912602820186987129e+00	6.362962639611402689e-03	3.790830335966000497e+00	6.753767120657978573e+00	4.454347298520647591e+00	3.597830171502631735e-01	5.746913394800006181e+00	1.664073075055900741e+00	9.213646990669767334e+00	8.780134214441304152e-01
+3.963149975944498138e+00	1.436768738550251445e-01	3.590235964594930884e+00	4.171865940500004477e+00	4.196989506671150671e+00	1.790899951828085479e+00	3.907987905963472564e-01	2.629171659041773967e+00	1.771918908979907226e+00	1.659282155546771831e-01
+1.044847711025325054e+01	1.867999171686929583e-01	9.742152202794096549e-01	6.029364879715696901e+00	3.842755370236281998e+00	1.457930089422849740e-01	3.050806799572168160e+00	1.928860746757870892e+00	3.715892327997140310e+00	2.654349553692562091e-01
+7.856135199120164891e+00	1.212882676824621475e-01	1.801474098171824689e+00	8.484064353156798077e+00	2.754740315340553636e-01	1.410439285738713577e+00	4.796002926524840504e-01	2.610915875359472338e+00	6.670148976341948810e+00	6.486294231998313009e-03
+9.804395866994216391e+00	1.639075098111162643e-01	4.837423823790375721e+00	5.260179690540038422e+00	2.017636134845651075e+00	1.254612394194728031e+00	5.546452640168771708e+00	1.375458824455512818e+00	7.174505857330657488e-01	4.918004847159316295e-01
+1.183986689177626417e+01	7.872326102861289254e-02	2.582301908063294960e+00	7.740411186780283614e+00	4.407066198851386396e+00	8.853556556234460917e-01	5.162877457710448148e+00	1.109660738036303940e+00	1.268125593567879239e-01	8.662130880451797132e-01
+1.093683742604408948e+01	2.447064137767908099e-01	1.696855866075519081e+00	5.548678621604610228e+00	1.820809627873263281e+00	1.700607366429271350e+00	5.830452128040040982e+00	3.764385212451778484e-01	1.898685064749314089e+00	5.451812220747674909e-01
+7.056150651686926345e+00	8.818414996117814042e-02	2.747955950871696196e+00	5.338892801584529835e+00	4.473322648271785162e+00	7.333467400342130738e-02	2.177902358616423673e+00	9.320526616437706524e-01	6.762834017402112430e+00	7.935462801762755980e-01
+1.180908873504107781e+01	2.574753599584344022e-01	1.987005140477280385e+00	6.918063648604911187e+00	4.785668471712723360e+00	9.696239217014788192e-01	1.280135659543590876e+00	1.881727503129584367e-01	1.035012278208573377e+01	8.059150064856590578e-02
+1.008412809573388103e+01	2.343166128881368770e-01	4.905764759825019361e+00	6.797230994413787641e+00	3.285521074639472960e+00	3.493548680683078445e-03	1.756029501961107187e+00	1.794157670035024577e+00	2.319307927793950785e+00	1.193434686169847492e-02
+3.749280090904193230e+00	4.960933223025080357e-02	1.971450801044384304e+00	8.330916700800358399e+00	2.349270312429754259e+00	5.221738760240273169e-01	6.366458140784689412e+00	2.387898312417887414e+00	8.441503152405894994e+00	4.979337584731783339e-01
+5.343103528797895407e+00	2.723330372377434361e-01	1.378635711838508415e+00	8.671386126448166198e-01	3.147037907155366021e+00	8.963585668163664977e-02	1.405277975206570940e+00	1.309205332617405571e+00	1.965588001155148978e+00	8.059321310558243967e-01
+7.149160280820787250e+00	2.764868013404646607e-01	1.096825467691220357e-01	3.503192650439979960e+00	3.131184989945668740e+00	1.194188630214327640e+00	4.274505420449012050e+00	2.968756266528913557e+00	2.749295301370240363e+00	7.370923727332389097e-01
+8.210295738041565983e+00	2.272029223367369455e-01	3.779242145994847757e+00	6.686181645391151562e+00	2.011271144842419201e+00	4.622895905968951169e-01	3.398981903852160702e+00	2.343123357207267388e-01	3.729889267099015360e+00	5.792152207898214611e-01
+1.002328179326305246e+01	8.185470612146233949e-02	3.269943977541031721e+00	8.539125023520108826e+00	3.035522647556629483e+00	1.401749737447430810e-01	1.982996845708496103e+00	3.745616727807693502e-01	1.201117123994490221e+00	1.638310756436510696e-01
+2.710850844140515914e+00	2.242145718096996698e-01	2.296536249652671557e+00	4.220260338748655471e+00	1.230213951484597334e+00	9.427382488180584552e-01	4.976262280555096673e+00	1.332924357337322174e+00	6.600361386208043690e+00	9.862980787604007649e-01
+5.949570342069224971e+00	1.418784555555818727e-01	3.232287503680208562e+00	6.141638030385319524e+00	2.842720729387286482e+00	1.023630011169686105e+00	6.974549437024968768e+00	1.541945738455003934e+00	2.913259034713350015e+00	4.512992539354583332e-01
+2.028585498043759650e+00	1.398763410169154009e-01	3.691098756081625076e+00	7.976451317237516214e-01	4.435499859432201486e-02	7.467345392685726946e-01	5.281840013916209386e+00	9.546775874932222905e-01	5.365778369266787529e+00	7.770900276041966537e-01
+6.077535052205729649e+00	1.395387528894324602e-01	4.775093402342934112e+00	3.132020266567483890e+00	1.580718289061279869e+00	1.416317099261457102e+00	5.862579332019776324e+00	7.318328790735707523e-01	3.709729378713444436e+00	6.670468053137402764e-01
+1.214513163540950025e+01	4.365179876861721014e-02	3.838251055066958628e+00	7.238524315230883133e+00	1.660270397218274852e+00	1.239458019962108137e+00	1.401234933150055095e+00	2.814065416790874075e-01	4.456870379094779899e+00	2.891479005157764348e-01
+1.087941705836024830e+01	2.995998122378682460e-01	5.777085219163696772e-01	4.703046220800023214e+00	3.086014168894017917e+00	7.044434508496584257e-01	2.037881492641501424e+00	1.019327349906296476e+00	4.393779773589142401e+00	2.305255684467343302e-01
+5.290012626145627728e+00	9.008901407316972920e-02	1.142983813855805719e+00	4.118316186532029022e+00	3.637703293797282633e+00	1.906330254257075252e+00	1.445500604367967679e+00	5.164490469163260933e-01	3.532786407019917974e+00	8.270890916952341465e-01
+5.697086356845923127e+00	2.235572514757046270e-01	2.266800826831066873e-01	6.383456423897621690e+00	1.264197247239626343e+00	2.912491491275546185e-01	1.306548262207717137e+00	1.684488621935003927e+00	9.359893288902904729e+00	6.328523844944041743e-01
+1.359123925577359593e+01	2.715219536985591462e-03	8.374487623417636861e-01	4.033547555739824375e+00	7.378921961934747920e-01	1.372002555673720758e+00	2.694288461006839164e+00	2.082180871032943692e+00	1.911434796530308899e+00	7.656913496191556368e-01
+9.992533308249194945e+00	4.104749241998207482e-02	4.724075616116481413e+00	3.461796948861655121e+00	4.204372983561600385e+00	7.310738374531249306e-01	6.107621094624161806e+00	2.638927091445617990e+00	3.624613744624816114e+00	4.700163600933867647e-01
+1.685245491179172550e+00	2.115428105359661026e-01	4.407417050425523897e+00	7.476567774422387735e+00	3.634488913192286841e+00	1.698280219311323336e-01	2.945428761291103648e+00	2.713451434553051822e+00	1.012624145292138067e+00	3.648699049547410311e-02
+6.589428437578542130e+00	4.446906542749364671e-02	7.810041614180479597e-01	5.691285012262346399e+00	4.554086499149781808e+00	1.277799954679892380e-02	4.026680491475270740e-01	1.953473174748952212e+00	7.152482187616325815e-01	2.086430282341091846e-01
+2.720849865844793936e+00	2.440203981068089778e-01	2.917763308083296092e+00	5.899884531261626819e+00	3.940367037369739034e+00	1.211422445527540859e+00	1.567556955778588890e+00	5.600673976327497083e-02	9.876156837696910884e+00	1.599751919864411676e-01
+2.834482533918473379e+00	1.520034844725705325e-01	1.978602545261327617e+00	1.676165260704382654e+00	2.194029998012147153e+00	2.492932602456554569e-02	8.120351889658737887e-01	2.560447673664525858e+00	2.686589650575758270e+00	4.039721790527054379e-01
+1.484413911807410891e+01	1.998020163214577760e-01	3.141873601255305015e+00	3.066415597754779565e+00	2.205093406956332380e+00	1.116117348869391623e+00	2.615679339050218655e-01	4.804557806332970804e-01	3.540690054736377768e+00	9.294949005824936217e-01
+8.903890154201720364e+00	2.893997238796244265e-01	4.277785905487593077e+00	3.596708777222128894e+00	3.332199664018714813e+00	6.094561931515880548e-01	2.121001953072600532e+00	1.426684687927117112e+00	9.708702533734449247e+00	5.724400833134991595e-01
+3.704414716601320379e+00	3.534656415192761331e-02	2.191170133339663817e+00	5.192464390336396285e+00	2.340092081088205767e+00	9.268383918699469870e-01	3.615953197841254241e+00	1.025661142784032753e+00	2.734380318250539954e+00	8.545976971860628213e-01
+9.213691025538279789e+00	1.023114031986859657e-01	2.236053896606724933e+00	6.731642119286943560e-01	4.131381102030507790e+00	3.423592126394250368e-01	2.177072569545674519e+00	1.414220327358206131e+00	8.610263975317616669e+00	4.954105311458011363e-01
+7.882621005926893076e-01	2.756438662864029721e-01	2.440056720240608268e+00	8.095898270821609843e-01	2.982700784281210993e+00	7.367102799018900861e-01	6.005049730578022604e+00	2.935825349713905741e+00	8.105806119374374674e+00	7.343900173346469273e-01
+7.332899839919692297e+00	1.012310325009186385e-01	2.786199548500924550e+00	6.996915193838988856e+00	3.958649628430874756e+00	6.984966471313347469e-01	5.034280993433526419e+00	1.175980210067817389e-01	1.010859759006517855e+01	6.006376844057623732e-01
+7.335639593623159094e+00	1.035066387333896804e-01	4.632911160270253959e+00	6.920712126173365952e+00	4.840386095819143364e+00	3.536787893048165543e-01	2.116621517341860326e+00	2.028873912140862767e+00	5.484712972130116881e+00	8.008724869574028737e-01
+1.173589800901229019e+01	1.675454624679203752e-01	1.857307229807804827e+00	1.233541929072509369e+00	3.229707875128986050e+00	1.703661647359659526e+00	1.399653313244334507e-01	4.231632680835958338e-01	1.622735163775802736e+00	1.411886133281192723e-02
+2.170542045884753080e+00	1.649943693366306696e-01	3.719614864008544686e+00	2.628731378385484874e-02	4.659863887863504139e+00	1.953928332749399477e+00	4.030765489154215508e+00	1.884789318558428350e+00	5.218984001391103966e+00	3.659090632923160946e-01
+1.056590580826244263e+01	2.862455750574361968e-01	9.503624324612436336e-02	1.127223921843998111e+00	1.774286440361028117e+00	1.354483262491954498e+00	1.915978523368969677e-01	2.412656854678067564e+00	4.504271337259460495e+00	2.537074724100759626e-01
+6.043982956924061511e+00	6.522158466617089707e-03	2.382223597003132909e+00	2.938800347127498824e+00	6.293807419434965800e-01	1.252993652180068862e+00	1.825306399812175462e-01	5.606017677912336072e-01	7.204927649424960556e+00	3.160779924731268453e-01
+1.156234306026847136e+01	2.625389283153459208e-01	9.188016061419135294e-01	3.956596395967132196e+00	1.157796919074650654e+00	1.652334566517626113e+00	1.741466103683401379e+00	1.069893612030600405e+00	6.560230433252789872e+00	7.063619376068841493e-01
+7.867290877658208004e+00	3.150568657612348028e-02	3.208635889352626780e+00	1.260684268789601381e+00	4.422562013782658674e+00	8.540116238383288483e-02	3.831517191823027968e+00	2.753952785268408032e+00	1.024925082683439470e+01	5.986830995941015976e-01
+6.908975429542424607e+00	2.465841896031346625e-01	2.002845859032076348e-01	3.450171987307428356e+00	5.654276553583914566e-02	5.888214584539321894e-01	2.954089858527504830e+00	3.998447426773437119e-01	8.565177675858345552e+00	2.149680205088763119e-01
+1.440579812308176999e+00	2.654515414198045306e-02	1.135165258992570925e+00	2.417274957088451526e+00	3.433882671585630764e+00	1.266388978164296741e-01	1.387836349982975115e+00	2.496160923818382393e-01	9.660716199882918076e+00	9.465672103847755370e-02
+8.782530926087384060e+00	2.393484704828878762e-01	3.968161715697180902e+00	5.364616924126802688e-01	3.825267183065478305e-01	1.180409496794958990e+00	6.081207848146038764e+00	1.796741281632131981e+00	8.120621615047823871e-01	4.720230640647659071e-01
+2.467121281489119689e+00	2.669775602143918958e-01	1.920947605339624609e+00	7.436975019510371432e+00	1.718195053919568949e-01	1.974195934046878431e+00	6.198510534051498766e+00	8.831966827916776230e-01	9.463134124520076185e+00	6.678848326528338264e-01
+9.247156276444975731e+00	7.001843581120269222e-02	4.115753916944164814e+00	2.041110834822566389e+00	3.904491445485749335e+00	5.587411667288546546e-01	4.848061119307658373e+00	5.410753659731812881e-01	1.022435181440595420e+00	2.642349075966086147e-01
+1.366546319900654893e+01	2.924851229239786368e-02	8.435137812387760725e-01	7.697721002565737436e+00	4.954980248261398046e+00	1.503394228824324497e+00	9.547207340156207422e-01	1.149510349323715541e+00	2.550282953574176226e+00	4.858288944991739911e-03
+9.937610741476689213e+00	2.759728949218902619e-02	2.479287724279272132e-01	1.845578662935752234e+00	1.148725236157810414e-01	1.874378505644074044e+00	4.577898001225015001e+00	3.157179950007004132e-01	6.572428732526478967e+00	9.817710114629516482e-01
+1.335840810844957005e+01	2.901657393525319262e-01	2.291037384835683088e+00	5.523192339535359707e+00	4.263209114304837399e+00	1.850890820836459616e+00	4.248656287099825235e+00	1.437415418687666868e+00	3.969903039271164946e+00	6.444202791760184068e-01
+4.192252523941295550e+00	1.550577732890167648e-01	4.335142520595742788e+00	3.915362307244128814e+00	4.174447531315469639e+00	1.464441590023846773e+00	7.659465632505535471e-02	1.119809765001355650e+00	3.868034385081745352e+00	9.191814708483783125e-01
+9.950321370165964296e+00	2.416678064761437206e-01	4.452588664798807550e+00	7.980962757329372437e+00	3.576862137763775706e+00	1.878820577509221490e-01	5.810484732429623378e+00	2.818609720584823730e-01	1.043698983315975104e+00	1.320478448637341096e-02
+3.344531274564604306e+00	1.638261678427317125e-01	4.752068092969680357e+00	2.056633949183845012e+00	4.059152604022060906e+00	1.323178175655694844e-01	3.634674435700454787e+00	1.332248033612655647e+00	9.750778647413559597e+00	6.571876845461107619e-01
+9.313380312895419566e-01	1.327820969301612697e-01	4.452531935545533059e+00	7.853262599622130580e+00	3.589473016958418139e+00	8.655756972988859488e-02	5.281257933398629589e+00	2.878400204002617091e+00	1.530027895964270801e+00	1.410499062384990010e-01
+1.093728606033430673e+01	1.496850021362609306e-01	3.004629347358933611e-01	6.530279224244562641e+00	1.505697907373285682e+00	4.359977775952421641e-01	2.822161658968949549e+00	2.723579655675674616e+00	4.517358372113638332e+00	4.762740241746520331e-01
+4.126188688687098427e+00	1.785762566399805643e-02	3.827008942348112619e+00	9.799394438157742471e-01	1.190849990888681598e-01	3.586378183201568426e-01	3.473362514860903971e+00	4.907195619794019192e-01	1.082846597286479273e+01	7.338269154915928461e-01
+1.007171327046501119e+01	5.302731525826676236e-03	4.442829328707308179e+00	2.714799527596614137e+00	1.292804717399506087e+00	1.414685076132262687e+00	2.183756095362196170e+00	2.808301856380406747e+00	6.780760473974004832e+00	3.843291080113009350e-01
+1.167399330294518034e+00	2.464035477017099129e-01	1.402426005668689912e+00	1.150248779473895366e+00	1.522194921445518379e+00	1.381162689722999160e+00	5.435001694932432237e+00	2.392787620207044608e+00	8.988227846318018521e+00	2.269266068514101953e-01
+2.894390818868963322e+00	3.501963832568973783e-02	4.740386916137786066e+00	2.285802360415889467e+00	4.429435669874964177e+00	1.149591990037637013e+00	6.309683324067085763e+00	1.754139509852320478e+00	4.996119690460893992e+00	9.633675391347560346e-01
+8.428267793741513358e+00	2.975519105867783232e-01	2.738097388061362825e+00	2.713125296218298343e-01	3.983255273469377133e-02	5.832842143111387490e-01	9.222455190282240478e-01	2.660850484958854789e+00	8.646005308057572947e+00	7.723493728139844139e-01
+1.060844902950677238e+00	6.766848820457482816e+00	2.571544822122694196e+00	1.301410412620856993e-01	4.939075861095859388e+00	1.944985184657319843e+00	3.071352746565616521e+00	2.952877773172982057e+00	5.413409368896327400e+00	7.835805833878990434e-01
+2.676604664379357956e+00	1.015453292439209010e-01	2.796370894748441138e-01	3.783589975482610779e-01	1.615586647489880567e-01	1.462415378364156915e-02	2.849926147228915241e+00	1.990766760230136434e+00	4.608386640197815742e-01	4.913530386323300414e-01
+4.928622521604909323e+00	4.776267137399662133e+00	6.738922765189601094e-01	6.737755331040837792e-01	3.523416780856081232e+00	1.987539862409304803e+00	3.322034856620849741e+00	2.129310622060661684e-01	6.600863177109800084e+00	3.085759141198198829e-01
+1.785331680394218745e+00	2.003420991178347599e+00	2.900766376377190259e+00	1.569593908723631026e-01	2.959800314520040221e+00	1.332160482584163530e+00	1.574742777579251296e+00	1.474370607477436312e+00	5.161994254879935795e+00	3.573259702589637321e-01
+2.060566256973487498e+00	4.064024192901496413e+00	1.638291229583957342e+00	5.291965806117688853e-01	4.147376271759576127e+00	1.871711279692187535e-01	1.592167092938667761e+00	9.832800064235791559e-02	9.093859685572962093e+00	2.871171512211987409e-01
+2.802432727066052731e+00	4.152681619178160588e+00	3.519899751123976461e+00	8.767373897844914499e-01	1.497015974178587117e+00	1.178656320463950191e+00	3.590298473162238935e+00	1.121258186897473141e+00	3.999535611626285458e+00	8.235737108516272276e-02
+7.793345816216601385e-02	7.119616496871206124e+00	3.263514453318660635e+00	8.767708034167319697e-01	4.367621895504575136e+00	1.584033235097142001e-01	1.753016637889057261e+00	8.806782211544889050e-01	7.660523694529250971e-01	2.421807599200342942e-02
+1.650807741747343904e+00	6.676864939234765650e-01	7.158398866250768044e-01	4.208260033098576436e-01	4.020367360514349286e+00	4.706317984761432260e-01	2.612129318099994713e+00	1.171799316797829782e+00	1.085072636057884621e+00	9.463861023983399789e-02
+4.058767043920196826e+00	7.095671728991746363e+00	2.700297896325542180e+00	1.613772848781617197e-01	2.851637748082545443e+00	8.849171780970555012e-01	6.469348385074413343e+00	2.409711450205083150e+00	1.227804052323222539e+00	7.245949939817655228e-01
+2.952642653678108964e+00	1.886946931087596369e+00	2.473081195542755850e+00	5.362099416315594391e-01	1.419640415165426717e+00	1.903766283029795225e-02	6.887126590983250196e+00	5.688537003635961709e-01	3.423639520476965625e+00	4.938406145277395298e-01
+4.737499561105624757e+00	9.968754726664579291e-01	3.774680347781333989e+00	4.881731108025820776e-01	2.226755900717518610e+00	1.171985346633335423e+00	6.671227239296362299e+00	7.695013938605947956e-01	1.042858080409883925e+00	8.496226110071675164e-01
+5.325414666879468850e-01	7.018918782479677709e+00	1.608471156325481655e+00	5.862836927685144417e-01	2.672078926166417290e+00	4.874633009746880408e-01	2.209336683548580815e+00	1.743210228627345870e+00	7.300458180698917943e+00	3.180482670802708967e-01
+4.057375329048528378e+00	3.890521688612440965e+00	1.762635121494908752e+00	4.315974671518035821e-01	5.740273295456138625e-01	1.149928423510549580e+00	6.645380061650246084e+00	1.716175648811826715e+00	1.817203695888137016e+00	2.279386936010485698e-04
+4.819371354809093511e+00	1.986334992808725719e+00	4.583363789373673391e-01	3.992089559255137532e-01	1.511460483775759922e+00	1.660344970547767085e+00	4.073543600444102353e+00	1.064209283618703594e+00	3.163140007477470483e+00	6.883692634412575950e-01
+4.822861817330296397e+00	1.286480435582614712e+00	9.370740372064094714e-01	9.673040603625950862e-01	1.291594883259390425e+00	1.038459954771715044e+00	6.225515359923287306e+00	2.457650031437864868e+00	1.095606761972162424e+01	7.337655625192263109e-01
+1.388473219412195148e+00	3.816243179861769086e+00	1.442597880348657213e-01	7.222240964942758801e-01	4.062612762646490516e+00	2.874594600573110448e-02	4.710301168894129198e+00	2.970156004203755362e+00	1.950104669992388517e-01	1.213572941978990904e-03
+2.095978035147009866e+00	6.873067538223646133e-01	1.319090499678944095e+00	6.916304149708443205e-01	7.253452473503479991e-01	1.948495719716487162e+00	1.265245191334455122e+00	2.196421971177734633e+00	5.301304620523421995e+00	1.584176397116228152e-01
+2.979233187060020605e+00	4.956587004286280518e+00	1.688392721619113690e+00	7.532059070534440348e-01	1.887662047040422619e-01	2.805001211605031575e-01	9.824130470675018767e-02	2.962504536056278504e+00	1.072663709115177610e+01	5.013157111486933326e-01
+2.788962486475272673e+00	5.115971710107624304e+00	5.660005924878737105e-01	5.560222984350132736e-01	4.858087680745722281e+00	7.725785191495309068e-01	5.845998556520539680e+00	9.987118052417467418e-01	2.295433607375663598e+00	1.326889295282640546e-01
+4.557887825045138008e+00	2.247326501433015267e+00	1.308921194395065601e+00	9.753282383497733354e-01	1.730029373381708480e+00	5.701790289472219442e-01	6.352823959792196495e-01	1.147755410972870260e+00	5.520736993109069957e+00	9.774412642381167249e-01
+2.045438792050340293e+00	7.561046060398181723e+00	3.522811502396113070e+00	6.976548466048726471e-01	1.767378331045376783e+00	1.924976381949984106e+00	1.266202415138989235e+00	2.046343295450062438e+00	2.076108124751949724e+00	9.755719638581734632e-01
+4.868031211031395245e+00	4.182997028123785377e+00	3.669697829412847945e+00	8.683837530035829477e-01	4.809874760362171209e+00	7.146311160463343359e-01	4.140379287730196367e+00	1.871204810554052189e+00	2.927063933787202199e+00	1.781360315397091387e-01
+1.447543209935399089e+00	3.894063908911108207e+00	7.167099377268382199e-01	4.816816841673178251e-01	9.437790189709754918e-01	7.136790168415698687e-01	7.984261778454281311e-01	1.267997907136523494e+00	9.241262771031886203e+00	2.264187036508533701e-01
+4.492110383454359201e+00	8.423313033097198144e+00	2.601809612520743276e+00	1.877570508378717529e-01	4.720029746759013101e+00	1.700776811458877491e+00	4.379722879784793577e+00	1.286338778406133088e+00	4.897457407195897616e+00	1.463406370506259435e-02
+3.397516181353549269e-01	5.474743776680605079e-01	2.792881561416644676e+00	8.613177248107457551e-01	3.084547950087940915e+00	4.295939916996971419e-01	5.853057398141740642e+00	2.886701648573187562e+00	6.513812374119662962e+00	3.683927726861278007e-01
+3.123566792546268012e+00	3.586380650299095585e+00	8.986175900527371141e-02	8.341062939442902557e-01	1.688951864196345243e+00	1.079977094532845516e+00	2.714109110128215541e+00	2.839337530931991704e+00	1.218807949172431293e+00	7.915582761215033125e-01
+3.141700155842215736e+00	1.204667402016855737e+00	1.916351797648172095e+00	1.536958008100480777e-01	3.425634957816583004e+00	5.173225973429682600e-01	5.259306090770171416e+00	1.917625582619049140e+00	1.075917707540842727e+01	7.474619317582414935e-01
+1.334327177064201120e+00	1.275405518944317640e+00	2.568022248402845875e-02	8.801880626520913742e-01	4.297141956880407010e-01	6.992528373701238920e-01	6.553594825126268120e+00	2.898861772480366694e-01	5.921191598075131957e+00	7.429605987984524429e-01
+2.771024127637877310e+00	2.706234632094140391e+00	3.850437535134872746e+00	7.057112116190219497e-01	2.533276847739223125e+00	1.771636964438744632e+00	3.080408803160064934e+00	2.070444129927007104e+00	1.189083402543695822e-01	8.109099497493107433e-01
+4.429566490061738193e+00	3.870904287199198102e+00	3.908477669001917398e-01	9.583973851447378234e-01	3.595638219458183737e+00	1.789184903519657199e+00	4.609156216292533692e-01	5.237161367552056745e-02	1.898196986141101217e+00	4.720040556194841086e-01
+6.236410129144742687e-01	5.677002709722877682e+00	3.495932245976291153e+00	1.724677152701515892e-01	4.797077834826430021e+00	1.896003719678501565e+00	6.666830956711757850e+00	1.917567880520230972e+00	5.078350320183913524e+00	1.619340143708598578e-01
+3.981295223585228360e+00	4.293445223367310604e+00	3.800375528545455239e+00	7.832850811253453394e-01	1.697773609172191778e+00	1.107703012941383403e-01	5.882009120179721329e+00	2.377183558175493872e+00	1.016804229228126033e+01	7.498926347264457393e-01
+2.600915763215731591e+00	2.624416429045383747e+00	8.727133245527625149e-01	8.321007865109071311e-01	3.120145967108394647e+00	1.706568507429746751e+00	6.924345627676093073e+00	6.502709038408825970e-01	3.057969519658665369e-02	1.561628673971688919e-01
+2.990086920642259782e-01	7.612290167957059595e+00	1.048504608494758017e+00	4.243228873493485320e-01	4.221314528605337735e+00	9.278772379775166446e-01	2.349514943790474764e+00	5.255177442909533259e-01	2.143073036231040618e+00	6.074576627365183557e-01
+3.899808502523676168e+00	7.299461473479367690e+00	1.095057277835178589e-01	4.079961930720432672e-02	1.621607684387396553e+00	5.256243374272819846e-01	1.486732148961382727e+00	8.027452089786548672e-01	5.594994509646422642e+00	2.574106532041128270e-01
+3.023696564487597271e+00	7.074713638764114521e+00	1.522391750286943957e+00	6.952922812547067677e-01	7.386888415687059561e-01	1.904294326226209977e+00	4.712216395755660869e+00	6.501349564540930270e-01	9.969785446062145340e+00	7.550945275312674898e-01
+1.543379307984599214e+00	6.829479885114219329e+00	3.204292732322415738e-01	2.897902351539538568e-01	1.968018159493589270e+00	1.312508995789594346e+00	2.708863147028642615e+00	1.956603963514276678e+00	3.858471150102140257e+00	7.226613324983187647e-01
+2.530459391858276952e+00	2.304294321666098710e+00	1.613466033140124711e+00	6.299550659672248365e-02	2.557517824305648446e+00	1.403315613527134742e+00	4.690924014732587466e+00	1.213893307285142553e+00	5.916330575871066699e+00	7.792844130394782631e-02
+1.928958533970001055e+00	7.096037593440583713e+00	3.299214390196449909e+00	4.754507764571461736e-01	4.595032873457040701e+00	6.269257509934247352e-01	2.676009205554832349e-01	7.111427300413641905e-02	2.311387519441321370e+00	7.607719658377676275e-01
+3.049750470993188411e+00	8.335532589038272988e+00	1.373761433026730483e+00	5.780521456251697110e-01	4.958750575379713865e+00	7.704485760265957772e-01	5.295783844382295769e+00	3.106174373836714242e-01	9.119442286621413984e+00	1.454365312917409447e-01
+1.446643143411851629e+00	1.641242803584559207e+00	2.373684364313463746e+00	8.733542221566478192e-01	8.350946657808716589e-01	1.827085120550399511e+00	1.796517849664760247e+00	1.705076201114264389e+00	8.296752770476137684e+00	7.700551493773489797e-01
+3.366516415093608217e+00	5.615004677240882813e+00	3.229775535441422818e+00	3.310459399464378283e-01	1.088020205681265562e+00	1.380181017003498711e+00	4.110524683994838391e+00	2.481465439558942165e+00	1.115804813826559716e-02	1.616539324263285282e-01
+5.339002830872147110e-01	3.839053681064814594e+00	2.333489534725208259e+00	8.432771114289439041e-02	2.102522728545276109e+00	1.709023161576461725e+00	6.917442654625217680e-01	3.462567460748395831e-01	3.782775783847899476e+00	1.071179999213418554e-01
+4.038277759901608199e+00	6.782157613841707011e+00	3.464511868539065453e+00	3.371376637740397531e-02	7.730806184225119670e-01	7.252544893421604133e-01	6.872132022199162193e-01	2.814389010091199683e+00	3.447296041223141039e+00	1.893497025003773038e-01
+2.731398993279879228e+00	3.676456867956779728e+00	6.413375855037339868e-01	6.707794732330785159e-01	7.635992572959304781e-01	6.062167860722607049e-01	2.063055943432477779e+00	7.336308576363859135e-02	7.472848003733477462e+00	1.287928899982984099e-01
+1.453840364993632228e+00	9.062482719258549979e-01	6.882205879952656424e-01	2.182327096949714385e-01	2.024410040526736232e-01	1.295642963260670966e+00	1.330150396283942271e-01	1.736145709990954078e+00	6.112044371929087561e+00	7.027845512380351822e-01
+1.210119312939017533e+00	7.630467468868430414e-01	6.359188990088751048e-01	9.883815275234506537e-01	5.934758597976730332e-01	1.831489821220492642e+00	1.310176158219893416e+00	1.258981371172946284e+00	9.653015748557789877e+00	5.710674551489269835e-01
+2.041872616241314109e+00	1.223997932765245711e+00	3.592523080817271008e+00	4.957374306008986986e-01	3.949335508277540985e+00	6.291860827029358560e-01	3.113717329409727341e+00	1.389609354520767059e+00	4.805115070835719315e+00	1.445284193078527624e-01
+3.575248649915818255e+00	8.905544750152357025e+00	2.966905439089235763e-01	4.679521298887630021e-01	3.547669603089278034e+00	1.578738230960443634e-01	6.270181915307466269e+00	7.487332787197820139e-01	7.886412615362010214e+00	8.133212834800832569e-01
+3.898698540050224981e+00	5.123757154565923067e+00	1.178107462557977492e+00	5.520746813096077954e-01	4.592951289518920621e-02	1.171909103767588389e+00	2.927267136454773322e+00	2.552935710110848078e+00	8.342615367499133683e+00	8.906236190999595737e-01
+2.108021535094918519e+00	5.560002888746932825e+00	7.214745149145316283e-01	6.765900158851164692e-01	1.023718975037019030e+00	5.744441677430218718e-01	2.485628740508086798e-01	2.832571092367738075e+00	7.596538198565406574e+00	5.914159707314822212e-01
+4.869067486253129573e+00	2.921814738200520623e+00	1.610700494896931900e+00	9.811040652599956058e-01	1.628599890640177072e+00	3.374886126447058654e-01	6.583180705038420122e+00	1.022109354202180187e+00	2.448534262177565424e+00	4.087328272831077802e-01
+2.420585719788033607e+00	4.954656265767008350e+00	2.231237042402161030e+00	9.790433090249440751e-01	3.332677810332588031e+00	6.561609460180721776e-01	8.270420803686340250e-03	3.528899693185899578e-01	2.741096663606271022e+00	1.378071713616224603e-01
+2.754906745122323031e+00	9.676710641503563082e-01	3.754240456912557367e+00	5.250193878563741423e-01	4.142397075465397371e+00	1.381722150840025476e+00	1.687280922291270358e+00	1.067933655487581790e-01	4.015412949008595467e-01	2.082096208576281748e-01
+4.888597998415178836e+00	3.014616594535894478e+00	2.837447468287521257e+00	8.929321846889970438e-02	1.593208978592462355e+00	1.432010333523069168e+00	1.362472022175857500e+00	2.023735351705774121e+00	8.040906539490745075e+00	5.333388283334727920e-02
+4.672702450324211831e+00	4.692525988250669577e+00	8.114199473892678149e-01	6.638642607029177078e-03	4.763672448857547703e-01	4.352861108378174304e-01	2.409652310337002223e+00	2.792543818597630345e+00	1.519095639879380233e+00	9.479421782536426866e-01
+1.390626568565362930e+00	5.795403282043417370e+00	5.780808828259962340e-01	6.537836869813318241e-01	4.342143903361439072e+00	1.328515786636720097e+00	5.020182370459501664e-01	7.291349036975974807e-01	8.463574453670117848e+00	2.741521287108358296e-01
+4.274877195091104198e+00	7.028513230726042060e+00	9.529844002173715189e-01	7.084183854011977255e-01	3.053184380355488781e+00	1.621308385572049371e+00	5.106963486979936295e-01	1.138778733677962673e+00	1.513533548987051613e+00	4.128356930393282109e-01
+3.624662754171856349e+00	2.618325768156440247e+00	8.047122068257333005e-01	5.662174560363387732e-01	2.262254632401385024e+00	2.208252441574161118e-01	3.245715851864902746e+00	2.088639737645078043e+00	6.869899939030401192e+00	3.833214771865146586e-01
+3.575490259832633022e+00	6.910866830500633995e+00	1.717562694591180605e+00	1.419119188604517356e-01	2.158793574868946408e+00	1.330907174346497746e+00	1.533175421100802627e+00	2.256864015809196822e+00	6.590486673228634018e+00	3.476457046878822510e-01
+2.763065587192560457e+00	3.755485876415471047e+00	2.937747270412279121e+00	6.776105276205796057e-01	1.102731743682296539e+00	1.671531786654884888e+00	3.194714296103120077e+00	1.832857589884217653e+00	5.267733310304743455e+00	9.498442020616374437e-01
+2.180692874176391349e+00	4.999495552064338710e+00	1.702888717024749976e+00	6.344642518347354265e-02	4.735873322905289839e+00	1.719599405515690238e+00	5.875922789677373359e+00	2.656004507669272208e+00	2.799792892846766357e+00	9.027899878643840248e-01
+1.498261411619749683e+00	1.440449178719120038e+00	3.793107505633872112e+00	5.092674831952860304e-01	1.034353051067818274e+00	1.569113426588875537e+00	3.771789988063644294e+00	1.328199351063930056e+00	6.225317682380070217e+00	7.271156352129748424e-02
+4.315942201307298998e-01	7.981247257459226141e+00	1.336073327630646901e+00	6.032379115696278049e-01	1.219125442838360662e+00	1.456458998083272505e+00	4.742675091300004864e-01	2.983575448822485932e+00	2.556602655333460650e+00	3.088219887595959889e-01
+4.976840216515537207e+00	5.358430906005960281e+00	6.245523297173987132e-01	2.545868317479035303e-01	9.742520666157300502e-01	1.182300247768848678e+00	4.654689808343711599e-01	2.709837605689326967e+00	5.452250849671616884e+00	6.515934524029750596e-01
+3.027576266208495070e+00	7.465991597741851393e+00	1.312320289625385605e+00	1.808157433154811145e-01	1.175791304316830654e+00	1.202757075039505796e+00	4.623859408182577901e+00	2.068219070878894339e+00	3.538760796420987731e+00	3.191906695569177721e-01
+3.186056897890198414e+00	7.744541371714031008e-01	7.520333736299331662e-01	2.346946453265875698e-01	1.939963125002858169e-01	1.102878394022341890e-01	2.892927495768838675e+00	2.226458507448537905e+00	8.040128531690038827e+00	2.426324783043983802e-01
+3.010003224219015117e+00	3.421867958770924645e+00	5.220586116504088992e-01	7.415249008538514763e-01	7.940706222571397577e-01	7.440532342345227423e-01	2.342240626863175024e+00	2.838160438547582931e+00	6.965119358930052407e+00	7.994713698433695281e-01
+1.073150862609050904e+00	5.708596454024827516e+00	2.122872605431856297e+00	3.879631915218560989e-01	2.315027253640028482e-01	9.412461989804450369e-01	1.359498791221620095e+00	2.799047819563208250e+00	6.092966540584811241e+00	6.446687874293053344e-01
+3.458319946541093426e+00	8.965823126867210391e+00	3.472581333838808959e-01	6.522372110328005235e-01	4.295269312487254298e-01	1.454437017783624775e+00	1.026918893392381404e+00	2.262804025523970530e+00	9.073155138943480580e+00	5.447998210661599838e-01
+1.800815055028895939e+00	8.579365052829048821e+00	2.541477396290954260e-01	2.397851878036376672e-01	2.759387911051200515e+00	2.947509094624349846e-01	9.515331736896013304e-01	1.743545886323498051e+00	2.156608211313451928e+00	4.566652586160673266e-01
+4.296243379948193386e+00	4.589758336708962716e+00	3.455005162367942528e+00	4.939154835666571541e-01	3.524083380370812701e-01	1.332555812949300478e+00	6.993865493780931253e+00	8.776996462395436627e-02	7.306362098970477170e+00	9.464560745296662914e-01
+4.270913173204858637e+00	4.311969821782867029e+00	1.235273021108498348e+00	1.363559640515099591e-01	4.297344674974395984e-01	5.399230520058395655e-01	4.519230441177060520e+00	2.223130570132609840e+00	2.003741389920957516e+00	3.000032742213165626e-03
+9.696108658633073585e-01	3.029563706398431311e+00	3.435820656827302866e+00	2.808855835523164091e-01	4.964076077685161792e+00	7.870171799195373463e-01	5.160598369656576700e+00	1.615991618531589236e-01	4.168851015871288368e+00	7.207503227396369061e-01
+9.857942554944587599e-01	2.241766796766214931e+00	1.094309073772436580e+00	6.171782099877928252e-01	2.164628407777784425e+00	2.967518925643408689e-01	6.897409292902939804e+00	9.171811935351321665e-01	5.578640625010033105e+00	4.324319825361930825e-01
+1.359323138290471800e+00	4.701967131801875199e-01	9.050689110105434310e-01	2.965795773291597381e-01	4.828973782133643589e+00	1.476756345192362208e+00	7.663619972606046105e-01	2.684843417506681096e+00	1.573201667699820838e+00	9.645271733524597435e-01
+6.432401347291311078e-01	4.025095134154975440e+00	3.309291976114321798e+00	3.338009834108435214e-02	3.084159326974440951e+00	7.597149110462917410e-01	1.345625764516446221e+00	1.543092560770717991e-01	5.232739162356885299e-01	5.344642145090121588e-01
+1.893475424698252141e+00	4.846915016695094280e+00	1.017577622922591463e+00	1.454379078590283259e-01	3.217212279193671698e+00	1.534170379797259010e+00	1.500675184724801436e+00	2.195608560978177248e+00	7.840082621731490065e+00	9.423202685727883887e-01
+4.916089799921347314e+00	3.077404880415298694e-01	3.341398797528096321e+00	5.790895913656423089e-01	2.109983576554563811e-01	1.849697544503176694e+00	4.269817133289845579e+00	2.772678110702025744e+00	1.597462715869091276e+00	2.066184857660853247e-01
+1.781292690950635027e+00	6.637468747977988137e+00	1.464153733648121847e+00	2.712309115523448000e-02	3.857947122149120389e+00	1.296349898299295811e+00	4.044166349775594860e+00	1.645944211560824044e+00	3.273391468292933260e-01	8.630632196811667267e-01
+8.171982958624174342e-01	8.696289780825402715e+00	3.455462363279165672e+00	8.945385963649863381e-01	2.164679436470390295e-01	8.639775191003977728e-01	9.436930182918934484e-02	2.039374760416071020e+00	7.722056720400328089e+00	7.888445971939340717e-01
+2.023302408059274970e+00	1.310618978377030786e+00	3.155456554057674889e+00	5.010819036675714377e-01	4.538509162145537346e+00	3.704064878007766737e-01	4.328283814895621973e+00	2.633257692097203950e-01	2.643788071556784747e+00	2.432016064719373372e-01
+8.102427541700596425e-01	4.125234289775202789e+00	1.517297146146757925e+00	7.389238601698774334e-01	4.067571276286313653e-01	1.735885673249011996e+00	2.664598954585144064e+00	2.602674375990287281e+00	3.116895494600373961e+00	1.762468820849705242e-01
+2.621132195703196111e+00	8.425316661290198184e+00	4.994480734649351383e-01	2.844736464195067871e-01	2.275884645154663488e+00	1.142221825149852377e+00	1.212554029681834233e+00	9.240986804861089920e-01	2.170291307332780084e-01	6.849869128634944460e-03
+1.174342088277471774e+00	5.577399746499002831e+00	3.951771200670569417e+00	5.006730198987506819e-01	3.596816929922009187e+00	4.994925384211443831e-01	4.413151350909869208e+00	1.418601174288871114e+00	4.839712146933547565e+00	9.760601828982439221e-01
+1.878213562249231749e+00	4.684555617152866169e+00	3.889299363513806895e+00	2.336972704101780707e-02	4.196904367361877775e+00	1.636484945202322683e+00	6.120210824336769662e+00	1.961635110777701918e+00	9.569470113865142125e+00	2.306199899979174406e-01
+3.431803129291879362e+00	4.012460279071989433e+00	3.786995576138590280e+00	3.401138113223747261e-01	3.171776225702972241e+00	5.533791211625698203e-01	4.380475363051796300e+00	2.758179724465671612e+00	8.238428864941681695e+00	1.611340397700082860e-01
+2.349694539920080150e+00	3.425362999949447929e+00	7.467879539582895632e-01	1.309946362139290388e-01	5.805916871698890702e-01	1.657140208444111584e+00	2.281415527558649625e+00	1.158152241249966297e+00	4.945420862933350925e+00	7.787419149878671565e-02
+2.871323242818877297e+00	6.534521291113140506e+00	3.096514924644943711e+00	6.667111185354958058e-01	4.581752374807475547e+00	4.565652293776831794e-01	6.933935392164753964e+00	7.018283743141698894e-01	4.265168364923495936e+00	6.901545552296244335e-01
+4.035690704229409675e+00	8.932606707872219332e+00	2.440117375879147676e+00	1.749555891357814641e-01	3.388836934377787546e+00	1.096540733110390020e+00	4.167898589768252116e+00	1.510982178698893286e+00	9.444730460473605760e+00	1.685190174932924556e-01
+1.895368854776446899e-01	8.111771833719306768e-01	1.496918109897968030e+00	8.739513189132178672e-01	3.271337369168346054e+00	7.030233822128264531e-01	6.401423581208871560e+00	1.029249503237427366e+00	2.009554833626514103e+00	6.565754492988090663e-01
+1.951751457247066224e+00	5.044712160044664273e+00	3.421432883480826970e+00	8.109581787816579901e-01	1.128731846702503816e+00	1.209060606591956688e+00	5.696978360178965417e+00	1.330520244258292406e+00	2.811362781860978100e+00	4.875439923021647193e-01
+7.946496095974958651e-01	7.959850102243462011e+00	8.886633323027690601e-01	8.912678301211978127e-01	1.506339073727302580e+00	1.772468056111744072e+00	5.209003007333696367e+00	1.910008906957938679e+00	6.469615179120823401e-01	4.738444478905338153e-01
+2.923037361500433029e+00	8.570876002749072242e+00	2.708321601975356430e+00	3.282691449572501252e-01	1.618103487581618039e+00	7.910655433987572316e-01	4.227704764846127006e+00	1.475926172111246215e+00	4.356475937759435091e+00	6.465232347181339989e-01
+2.522665440307211071e+00	1.963912550509897548e-01	1.722541801969912356e-02	7.082969529076817983e-01	4.976037482905154796e+00	2.468844590379837278e-02	6.706997326725380404e+00	1.309674497144936556e+00	9.860787708680341268e+00	8.357747879146570913e-02
+4.255818572678702338e+00	6.744878797873571585e+00	1.760033311394217925e+00	3.676055726983046279e-01	4.092141947481376718e+00	1.612470654985766583e+00	5.778498425542084149e+00	1.079514954339773070e+00	5.568696898336217060e+00	8.545812171197980645e-01
+2.446430878144840282e+00	5.234567351584125561e+00	5.715867278950135244e-01	7.003532329827356628e-01	4.568505639054738054e+00	1.943174193938479233e+00	5.764774523003604934e+00	1.937713945979894881e+00	6.298501537697791086e+00	9.026276578818063223e-01
+1.273163398740040364e+00	4.162415703791141119e+00	3.455865592048522394e+00	3.656764736071405064e-02	1.954721520949571190e+00	2.734474567634836983e-01	8.281588142887611470e-01	4.484584054747960957e-01	7.652386767723299954e+00	6.708336181638869800e-01
+3.272573882698126546e-01	2.771858331744477821e-01	3.650874191969583293e+00	8.313793690203402642e-01	4.721314279438468375e+00	1.073305358961462286e+00	5.882774132732964567e+00	2.199287336285580263e+00	3.798152586004966969e-01	5.333129895506141249e-01
+2.307814819672060480e+00	4.104095493932964800e+00	2.489576320635186413e+00	4.496836765063628727e-01	4.578631912903595946e+00	1.329946843468508222e+00	1.683934112573501896e-01	7.002919109156724442e-02	2.471193668340454508e+00	5.351430311502515247e-01
diff -r daece0f27108 -r fe627c026dc6 test-data/pipeline_params05.tabular
--- a/test-data/pipeline_params05.tabular	Wed Mar 11 17:11:13 2020 +0000
+++ b/test-data/pipeline_params05.tabular	Tue Apr 13 20:44:53 2021 +0000
@@ -13,6 +13,6 @@
 *	n_jobs	n_jobs: 1
 @	oob_score	oob_score: False
 @	random_state	random_state: 42
-*	verbose	verbose: 0
+@	verbose	verbose: 0
 @	warm_start	warm_start: False
 	Note:	@, params eligible for search in searchcv tool.
diff -r daece0f27108 -r fe627c026dc6 test-data/pipeline_params18
--- a/test-data/pipeline_params18	Wed Mar 11 17:11:13 2020 +0000
+++ b/test-data/pipeline_params18	Tue Apr 13 20:44:53 2021 +0000
@@ -47,7 +47,7 @@
                                                            output_distribution='uniform',
                                                            random_state=10,
                                                            subsample=100000))"
-*	verbose	verbose: False
+@	verbose	verbose: False
 @	powertransformer__copy	powertransformer__copy: True
 @	powertransformer__method	powertransformer__method: 'yeo-johnson'
 @	powertransformer__standardize	powertransformer__standardize: True
@@ -75,7 +75,7 @@
 *	transformedtargetregressor__regressor__n_jobs	transformedtargetregressor__regressor__n_jobs: 1
 @	transformedtargetregressor__regressor__oob_score	transformedtargetregressor__regressor__oob_score: False
 @	transformedtargetregressor__regressor__random_state	transformedtargetregressor__regressor__random_state: 10
-*	transformedtargetregressor__regressor__verbose	transformedtargetregressor__regressor__verbose: 0
+@	transformedtargetregressor__regressor__verbose	transformedtargetregressor__regressor__verbose: 0
 @	transformedtargetregressor__regressor__warm_start	transformedtargetregressor__regressor__warm_start: False
 @	transformedtargetregressor__transformer	"transformedtargetregressor__transformer: QuantileTransformer(copy=True, ignore_implicit_zeros=False, n_quantiles=1000,
                     output_distribution='uniform', random_state=10,
diff -r daece0f27108 -r fe627c026dc6 test-data/train_test_eval_model01
Binary file test-data/train_test_eval_model01 has changed
diff -r daece0f27108 -r fe627c026dc6 test-data/train_test_eval_weights01.h5
Binary file test-data/train_test_eval_weights01.h5 has changed
diff -r daece0f27108 -r fe627c026dc6 test-data/train_test_eval_weights02.h5
Binary file test-data/train_test_eval_weights02.h5 has changed
diff -r daece0f27108 -r fe627c026dc6 to_categorical.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/to_categorical.py	Tue Apr 13 20:44:53 2021 +0000
@@ -0,0 +1,50 @@
+import argparse
+import json
+import warnings
+
+import numpy as np
+import pandas as pd
+from keras.utils import to_categorical
+
+
+def main(inputs, infile, outfile, num_classes=None):
+    """
+    Parameter
+    ---------
+    input : str
+        File path to galaxy tool parameter
+
+    infile : str
+        File paths of input vector
+
+    outfile : str
+        File path to output matrix
+
+    num_classes : str
+        Total number of classes. If None, this would be inferred as the (largest number in y) + 1
+
+    """
+    warnings.simplefilter("ignore")
+
+    with open(inputs, "r") as param_handler:
+        params = json.load(param_handler)
+
+    input_header = params["header0"]
+    header = "infer" if input_header else None
+
+    input_vector = pd.read_csv(infile, sep="\t", header=header)
+
+    output_matrix = to_categorical(input_vector, num_classes=num_classes)
+
+    np.savetxt(outfile, output_matrix, fmt="%d", delimiter="\t")
+
+
+if __name__ == "__main__":
+    aparser = argparse.ArgumentParser()
+    aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
+    aparser.add_argument("-y", "--infile", dest="infile")
+    aparser.add_argument("-n", "--num_classes", dest="num_classes", type=int, default=None)
+    aparser.add_argument("-o", "--outfile", dest="outfile")
+    args = aparser.parse_args()
+
+    main(args.inputs, args.infile, args.outfile, args.num_classes)
diff -r daece0f27108 -r fe627c026dc6 train_test_eval.py
--- a/train_test_eval.py	Wed Mar 11 17:11:13 2020 +0000
+++ b/train_test_eval.py	Tue Apr 13 20:44:53 2021 +0000
@@ -1,59 +1,64 @@
 import argparse
-import joblib
 import json
-import numpy as np
 import os
-import pandas as pd
 import pickle
 import warnings
 from itertools import chain
+
+import joblib
+import numpy as np
+import pandas as pd
+from galaxy_ml.model_validations import train_test_split
+from galaxy_ml.utils import (
+    get_module,
+    get_scoring,
+    load_model,
+    read_columns,
+    SafeEval,
+    try_get_attr,
+)
 from scipy.io import mmread
-from sklearn.base import clone
-from sklearn import (cluster, compose, decomposition, ensemble,
-                     feature_extraction, feature_selection,
-                     gaussian_process, kernel_approximation, metrics,
-                     model_selection, naive_bayes, neighbors,
-                     pipeline, preprocessing, svm, linear_model,
-                     tree, discriminant_analysis)
-from sklearn.exceptions import FitFailedWarning
+from sklearn import pipeline
 from sklearn.metrics.scorer import _check_multimetric_scoring
-from sklearn.model_selection._validation import _score, cross_validate
 from sklearn.model_selection import _search, _validation
+from sklearn.model_selection._validation import _score
 from sklearn.utils import indexable, safe_indexing
 
-from galaxy_ml.model_validations import train_test_split
-from galaxy_ml.utils import (SafeEval, get_scoring, load_model,
-                             read_columns, try_get_attr, get_module)
 
+_fit_and_score = try_get_attr("galaxy_ml.model_validations", "_fit_and_score")
+setattr(_search, "_fit_and_score", _fit_and_score)
+setattr(_validation, "_fit_and_score", _fit_and_score)
 
-_fit_and_score = try_get_attr('galaxy_ml.model_validations', '_fit_and_score')
-setattr(_search, '_fit_and_score', _fit_and_score)
-setattr(_validation, '_fit_and_score', _fit_and_score)
-
-N_JOBS = int(os.environ.get('GALAXY_SLOTS', 1))
-CACHE_DIR = os.path.join(os.getcwd(), 'cached')
+N_JOBS = int(os.environ.get("GALAXY_SLOTS", 1))
+CACHE_DIR = os.path.join(os.getcwd(), "cached")
 del os
-NON_SEARCHABLE = ('n_jobs', 'pre_dispatch', 'memory', '_path',
-                  'nthread', 'callbacks')
-ALLOWED_CALLBACKS = ('EarlyStopping', 'TerminateOnNaN', 'ReduceLROnPlateau',
-                     'CSVLogger', 'None')
+NON_SEARCHABLE = ("n_jobs", "pre_dispatch", "memory", "_path", "nthread", "callbacks")
+ALLOWED_CALLBACKS = (
+    "EarlyStopping",
+    "TerminateOnNaN",
+    "ReduceLROnPlateau",
+    "CSVLogger",
+    "None",
+)
 
 
 def _eval_swap_params(params_builder):
     swap_params = {}
 
-    for p in params_builder['param_set']:
-        swap_value = p['sp_value'].strip()
-        if swap_value == '':
+    for p in params_builder["param_set"]:
+        swap_value = p["sp_value"].strip()
+        if swap_value == "":
             continue
 
-        param_name = p['sp_name']
+        param_name = p["sp_name"]
         if param_name.lower().endswith(NON_SEARCHABLE):
-            warnings.warn("Warning: `%s` is not eligible for search and was "
-                          "omitted!" % param_name)
+            warnings.warn(
+                "Warning: `%s` is not eligible for search and was "
+                "omitted!" % param_name
+            )
             continue
 
-        if not swap_value.startswith(':'):
+        if not swap_value.startswith(":"):
             safe_eval = SafeEval(load_scipy=True, load_numpy=True)
             ev = safe_eval(swap_value)
         else:
@@ -80,23 +85,24 @@
         else:
             new_arrays.append(arr)
 
-    if kwargs['shuffle'] == 'None':
-        kwargs['shuffle'] = None
+    if kwargs["shuffle"] == "None":
+        kwargs["shuffle"] = None
 
-    group_names = kwargs.pop('group_names', None)
+    group_names = kwargs.pop("group_names", None)
 
     if group_names is not None and group_names.strip():
-        group_names = [name.strip() for name in
-                       group_names.split(',')]
+        group_names = [name.strip() for name in group_names.split(",")]
         new_arrays = indexable(*new_arrays)
-        groups = kwargs['labels']
+        groups = kwargs["labels"]
         n_samples = new_arrays[0].shape[0]
         index_arr = np.arange(n_samples)
         test = index_arr[np.isin(groups, group_names)]
         train = index_arr[~np.isin(groups, group_names)]
-        rval = list(chain.from_iterable(
-            (safe_indexing(a, train),
-             safe_indexing(a, test)) for a in new_arrays))
+        rval = list(
+            chain.from_iterable(
+                (safe_indexing(a, train), safe_indexing(a, test)) for a in new_arrays
+            )
+        )
     else:
         rval = train_test_split(*new_arrays, **kwargs)
 
@@ -106,11 +112,20 @@
     return rval
 
 
-def main(inputs, infile_estimator, infile1, infile2,
-         outfile_result, outfile_object=None,
-         outfile_weights=None, groups=None,
-         ref_seq=None, intervals=None, targets=None,
-         fasta_path=None):
+def main(
+    inputs,
+    infile_estimator,
+    infile1,
+    infile2,
+    outfile_result,
+    outfile_object=None,
+    outfile_weights=None,
+    groups=None,
+    ref_seq=None,
+    intervals=None,
+    targets=None,
+    fasta_path=None,
+):
     """
     Parameter
     ---------
@@ -150,17 +165,17 @@
     fasta_path : str
         File path to dataset containing fasta file
     """
-    warnings.simplefilter('ignore')
+    warnings.simplefilter("ignore")
 
-    with open(inputs, 'r') as param_handler:
+    with open(inputs, "r") as param_handler:
         params = json.load(param_handler)
 
     #  load estimator
-    with open(infile_estimator, 'rb') as estimator_handler:
+    with open(infile_estimator, "rb") as estimator_handler:
         estimator = load_model(estimator_handler)
 
     # swap hyperparameter
-    swapping = params['experiment_schemes']['hyperparams_swapping']
+    swapping = params["experiment_schemes"]["hyperparams_swapping"]
     swap_params = _eval_swap_params(swapping)
     estimator.set_params(**swap_params)
 
@@ -169,38 +184,41 @@
     # store read dataframe object
     loaded_df = {}
 
-    input_type = params['input_options']['selected_input']
+    input_type = params["input_options"]["selected_input"]
     # tabular input
-    if input_type == 'tabular':
-        header = 'infer' if params['input_options']['header1'] else None
-        column_option = (params['input_options']['column_selector_options_1']
-                         ['selected_column_selector_option'])
-        if column_option in ['by_index_number', 'all_but_by_index_number',
-                             'by_header_name', 'all_but_by_header_name']:
-            c = params['input_options']['column_selector_options_1']['col1']
+    if input_type == "tabular":
+        header = "infer" if params["input_options"]["header1"] else None
+        column_option = params["input_options"]["column_selector_options_1"][
+            "selected_column_selector_option"
+        ]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = params["input_options"]["column_selector_options_1"]["col1"]
         else:
             c = None
 
         df_key = infile1 + repr(header)
-        df = pd.read_csv(infile1, sep='\t', header=header,
-                         parse_dates=True)
+        df = pd.read_csv(infile1, sep="\t", header=header, parse_dates=True)
         loaded_df[df_key] = df
 
         X = read_columns(df, c=c, c_option=column_option).astype(float)
     # sparse input
-    elif input_type == 'sparse':
-        X = mmread(open(infile1, 'r'))
+    elif input_type == "sparse":
+        X = mmread(open(infile1, "r"))
 
     # fasta_file input
-    elif input_type == 'seq_fasta':
-        pyfaidx = get_module('pyfaidx')
+    elif input_type == "seq_fasta":
+        pyfaidx = get_module("pyfaidx")
         sequences = pyfaidx.Fasta(fasta_path)
         n_seqs = len(sequences.keys())
         X = np.arange(n_seqs)[:, np.newaxis]
         for param in estimator_params.keys():
-            if param.endswith('fasta_path'):
-                estimator.set_params(
-                    **{param: fasta_path})
+            if param.endswith("fasta_path"):
+                estimator.set_params(**{param: fasta_path})
                 break
         else:
             raise ValueError(
@@ -209,25 +227,31 @@
                 "KerasGBatchClassifier with "
                 "FastaDNABatchGenerator/FastaProteinBatchGenerator "
                 "or having GenomeOneHotEncoder/ProteinOneHotEncoder "
-                "in pipeline!")
+                "in pipeline!"
+            )
 
-    elif input_type == 'refseq_and_interval':
+    elif input_type == "refseq_and_interval":
         path_params = {
-            'data_batch_generator__ref_genome_path': ref_seq,
-            'data_batch_generator__intervals_path': intervals,
-            'data_batch_generator__target_path': targets
+            "data_batch_generator__ref_genome_path": ref_seq,
+            "data_batch_generator__intervals_path": intervals,
+            "data_batch_generator__target_path": targets,
         }
         estimator.set_params(**path_params)
         n_intervals = sum(1 for line in open(intervals))
         X = np.arange(n_intervals)[:, np.newaxis]
 
     # Get target y
-    header = 'infer' if params['input_options']['header2'] else None
-    column_option = (params['input_options']['column_selector_options_2']
-                     ['selected_column_selector_option2'])
-    if column_option in ['by_index_number', 'all_but_by_index_number',
-                         'by_header_name', 'all_but_by_header_name']:
-        c = params['input_options']['column_selector_options_2']['col2']
+    header = "infer" if params["input_options"]["header2"] else None
+    column_option = params["input_options"]["column_selector_options_2"][
+        "selected_column_selector_option2"
+    ]
+    if column_option in [
+        "by_index_number",
+        "all_but_by_index_number",
+        "by_header_name",
+        "all_but_by_header_name",
+    ]:
+        c = params["input_options"]["column_selector_options_2"]["col2"]
     else:
         c = None
 
@@ -235,37 +259,39 @@
     if df_key in loaded_df:
         infile2 = loaded_df[df_key]
     else:
-        infile2 = pd.read_csv(infile2, sep='\t',
-                              header=header, parse_dates=True)
+        infile2 = pd.read_csv(infile2, sep="\t", header=header, parse_dates=True)
         loaded_df[df_key] = infile2
 
-    y = read_columns(
-            infile2,
-            c=c,
-            c_option=column_option,
-            sep='\t',
-            header=header,
-            parse_dates=True)
+    y = read_columns(infile2,
+                     c=c,
+                     c_option=column_option,
+                     sep='\t',
+                     header=header,
+                     parse_dates=True)
     if len(y.shape) == 2 and y.shape[1] == 1:
         y = y.ravel()
-    if input_type == 'refseq_and_interval':
-        estimator.set_params(
-            data_batch_generator__features=y.ravel().tolist())
+    if input_type == "refseq_and_interval":
+        estimator.set_params(data_batch_generator__features=y.ravel().tolist())
         y = None
     # end y
 
     # load groups
     if groups:
-        groups_selector = (params['experiment_schemes']['test_split']
-                                 ['split_algos']).pop('groups_selector')
+        groups_selector = (
+            params["experiment_schemes"]["test_split"]["split_algos"]
+        ).pop("groups_selector")
 
-        header = 'infer' if groups_selector['header_g'] else None
-        column_option = \
-            (groups_selector['column_selector_options_g']
-                            ['selected_column_selector_option_g'])
-        if column_option in ['by_index_number', 'all_but_by_index_number',
-                             'by_header_name', 'all_but_by_header_name']:
-            c = groups_selector['column_selector_options_g']['col_g']
+        header = "infer" if groups_selector["header_g"] else None
+        column_option = groups_selector["column_selector_options_g"][
+            "selected_column_selector_option_g"
+        ]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = groups_selector["column_selector_options_g"]["col_g"]
         else:
             c = None
 
@@ -273,13 +299,12 @@
         if df_key in loaded_df:
             groups = loaded_df[df_key]
 
-        groups = read_columns(
-                groups,
-                c=c,
-                c_option=column_option,
-                sep='\t',
-                header=header,
-                parse_dates=True)
+        groups = read_columns(groups,
+                              c=c,
+                              c_option=column_option,
+                              sep='\t',
+                              header=header,
+                              parse_dates=True)
         groups = groups.ravel()
 
     # del loaded_df
@@ -288,15 +313,15 @@
     # handle memory
     memory = joblib.Memory(location=CACHE_DIR, verbose=0)
     # cache iraps_core fits could increase search speed significantly
-    if estimator.__class__.__name__ == 'IRAPSClassifier':
+    if estimator.__class__.__name__ == "IRAPSClassifier":
         estimator.set_params(memory=memory)
     else:
         # For iraps buried in pipeline
         new_params = {}
         for p, v in estimator_params.items():
-            if p.endswith('memory'):
+            if p.endswith("memory"):
                 # for case of `__irapsclassifier__memory`
-                if len(p) > 8 and p[:-8].endswith('irapsclassifier'):
+                if len(p) > 8 and p[:-8].endswith("irapsclassifier"):
                     # cache iraps_core fits could increase search
                     # speed significantly
                     new_params[p] = memory
@@ -305,88 +330,98 @@
                 elif v:
                     new_params[p] = None
             # handle n_jobs
-            elif p.endswith('n_jobs'):
+            elif p.endswith("n_jobs"):
                 # For now, 1 CPU is suggested for iprasclassifier
-                if len(p) > 8 and p[:-8].endswith('irapsclassifier'):
+                if len(p) > 8 and p[:-8].endswith("irapsclassifier"):
                     new_params[p] = 1
                 else:
                     new_params[p] = N_JOBS
             # for security reason, types of callback are limited
-            elif p.endswith('callbacks'):
+            elif p.endswith("callbacks"):
                 for cb in v:
-                    cb_type = cb['callback_selection']['callback_type']
+                    cb_type = cb["callback_selection"]["callback_type"]
                     if cb_type not in ALLOWED_CALLBACKS:
-                        raise ValueError(
-                            "Prohibited callback type: %s!" % cb_type)
+                        raise ValueError("Prohibited callback type: %s!" % cb_type)
 
         estimator.set_params(**new_params)
 
     # handle scorer, convert to scorer dict
-    scoring = params['experiment_schemes']['metrics']['scoring']
+    # Check if scoring is specified
+    scoring = params["experiment_schemes"]["metrics"].get("scoring", None)
+    if scoring is not None:
+        # get_scoring() expects secondary_scoring to be a comma separated string (not a list)
+        # Check if secondary_scoring is specified
+        secondary_scoring = scoring.get("secondary_scoring", None)
+        if secondary_scoring is not None:
+            # If secondary_scoring is specified, convert the list into comman separated string
+            scoring["secondary_scoring"] = ",".join(scoring["secondary_scoring"])
     scorer = get_scoring(scoring)
     scorer, _ = _check_multimetric_scoring(estimator, scoring=scorer)
 
     # handle test (first) split
-    test_split_options = (params['experiment_schemes']
-                                ['test_split']['split_algos'])
+    test_split_options = params["experiment_schemes"]["test_split"]["split_algos"]
 
-    if test_split_options['shuffle'] == 'group':
-        test_split_options['labels'] = groups
-    if test_split_options['shuffle'] == 'stratified':
+    if test_split_options["shuffle"] == "group":
+        test_split_options["labels"] = groups
+    if test_split_options["shuffle"] == "stratified":
         if y is not None:
-            test_split_options['labels'] = y
+            test_split_options["labels"] = y
         else:
-            raise ValueError("Stratified shuffle split is not "
-                             "applicable on empty target values!")
+            raise ValueError(
+                "Stratified shuffle split is not " "applicable on empty target values!"
+            )
 
-    X_train, X_test, y_train, y_test, groups_train, groups_test = \
-        train_test_split_none(X, y, groups, **test_split_options)
+    X_train, X_test, y_train, y_test, groups_train, _groups_test = train_test_split_none(
+        X, y, groups, **test_split_options
+    )
 
-    exp_scheme = params['experiment_schemes']['selected_exp_scheme']
+    exp_scheme = params["experiment_schemes"]["selected_exp_scheme"]
 
     # handle validation (second) split
-    if exp_scheme == 'train_val_test':
-        val_split_options = (params['experiment_schemes']
-                                   ['val_split']['split_algos'])
+    if exp_scheme == "train_val_test":
+        val_split_options = params["experiment_schemes"]["val_split"]["split_algos"]
 
-        if val_split_options['shuffle'] == 'group':
-            val_split_options['labels'] = groups_train
-        if val_split_options['shuffle'] == 'stratified':
+        if val_split_options["shuffle"] == "group":
+            val_split_options["labels"] = groups_train
+        if val_split_options["shuffle"] == "stratified":
             if y_train is not None:
-                val_split_options['labels'] = y_train
+                val_split_options["labels"] = y_train
             else:
-                raise ValueError("Stratified shuffle split is not "
-                                 "applicable on empty target values!")
+                raise ValueError(
+                    "Stratified shuffle split is not "
+                    "applicable on empty target values!"
+                )
 
-        X_train, X_val, y_train, y_val, groups_train, groups_val = \
-            train_test_split_none(X_train, y_train, groups_train,
-                                  **val_split_options)
+        (
+            X_train,
+            X_val,
+            y_train,
+            y_val,
+            groups_train,
+            _groups_val,
+        ) = train_test_split_none(X_train, y_train, groups_train, **val_split_options)
 
     # train and eval
-    if hasattr(estimator, 'validation_data'):
-        if exp_scheme == 'train_val_test':
-            estimator.fit(X_train, y_train,
-                          validation_data=(X_val, y_val))
+    if hasattr(estimator, "validation_data"):
+        if exp_scheme == "train_val_test":
+            estimator.fit(X_train, y_train, validation_data=(X_val, y_val))
         else:
-            estimator.fit(X_train, y_train,
-                          validation_data=(X_test, y_test))
+            estimator.fit(X_train, y_train, validation_data=(X_test, y_test))
     else:
         estimator.fit(X_train, y_train)
 
-    if hasattr(estimator, 'evaluate'):
-        scores = estimator.evaluate(X_test, y_test=y_test,
-                                    scorer=scorer,
-                                    is_multimetric=True)
+    if hasattr(estimator, "evaluate"):
+        scores = estimator.evaluate(
+            X_test, y_test=y_test, scorer=scorer, is_multimetric=True
+        )
     else:
-        scores = _score(estimator, X_test, y_test, scorer,
-                        is_multimetric=True)
+        scores = _score(estimator, X_test, y_test, scorer, is_multimetric=True)
     # handle output
     for name, score in scores.items():
         scores[name] = [score]
     df = pd.DataFrame(scores)
     df = df[sorted(df.columns)]
-    df.to_csv(path_or_buf=outfile_result, sep='\t',
-              header=True, index=False)
+    df.to_csv(path_or_buf=outfile_result, sep="\t", header=True, index=False)
 
     memory.clear(warn=False)
 
@@ -395,23 +430,25 @@
         if isinstance(estimator, pipeline.Pipeline):
             main_est = estimator.steps[-1][-1]
 
-        if hasattr(main_est, 'model_') \
-                and hasattr(main_est, 'save_weights'):
+        if hasattr(main_est, "model_") and hasattr(main_est, "save_weights"):
             if outfile_weights:
                 main_est.save_weights(outfile_weights)
-            del main_est.model_
-            del main_est.fit_params
-            del main_est.model_class_
-            del main_est.validation_data
-            if getattr(main_est, 'data_generator_', None):
+            if getattr(main_est, "model_", None):
+                del main_est.model_
+            if getattr(main_est, "fit_params", None):
+                del main_est.fit_params
+            if getattr(main_est, "model_class_", None):
+                del main_est.model_class_
+            if getattr(main_est, "validation_data", None):
+                del main_est.validation_data
+            if getattr(main_est, "data_generator_", None):
                 del main_est.data_generator_
 
-        with open(outfile_object, 'wb') as output_handler:
-            pickle.dump(estimator, output_handler,
-                        pickle.HIGHEST_PROTOCOL)
+        with open(outfile_object, "wb") as output_handler:
+            pickle.dump(estimator, output_handler, pickle.HIGHEST_PROTOCOL)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     aparser = argparse.ArgumentParser()
     aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
     aparser.add_argument("-e", "--estimator", dest="infile_estimator")
@@ -427,8 +464,17 @@
     aparser.add_argument("-f", "--fasta_path", dest="fasta_path")
     args = aparser.parse_args()
 
-    main(args.inputs, args.infile_estimator, args.infile1, args.infile2,
-         args.outfile_result, outfile_object=args.outfile_object,
-         outfile_weights=args.outfile_weights, groups=args.groups,
-         ref_seq=args.ref_seq, intervals=args.intervals,
-         targets=args.targets, fasta_path=args.fasta_path)
+    main(
+        args.inputs,
+        args.infile_estimator,
+        args.infile1,
+        args.infile2,
+        args.outfile_result,
+        outfile_object=args.outfile_object,
+        outfile_weights=args.outfile_weights,
+        groups=args.groups,
+        ref_seq=args.ref_seq,
+        intervals=args.intervals,
+        targets=args.targets,
+        fasta_path=args.fasta_path,
+    )
diff -r daece0f27108 -r fe627c026dc6 train_test_split.py
--- a/train_test_split.py	Wed Mar 11 17:11:13 2020 +0000
+++ b/train_test_split.py	Tue Apr 13 20:44:53 2021 +0000
@@ -1,15 +1,14 @@
 import argparse
 import json
-import pandas as pd
 import warnings
 
+import pandas as pd
 from galaxy_ml.model_validations import train_test_split
 from galaxy_ml.utils import get_cv, read_columns
 
 
-def _get_single_cv_split(params, array, infile_labels=None,
-                         infile_groups=None):
-    """ output (train, test) subset from a cv splitter
+def _get_single_cv_split(params, array, infile_labels=None, infile_groups=None):
+    """output (train, test) subset from a cv splitter
 
     Parameters
     ----------
@@ -25,45 +24,50 @@
     y = None
     groups = None
 
-    nth_split = params['mode_selection']['nth_split']
+    nth_split = params["mode_selection"]["nth_split"]
 
     # read groups
     if infile_groups:
-        header = 'infer' if (params['mode_selection']['cv_selector']
-                             ['groups_selector']['header_g']) else None
-        column_option = (params['mode_selection']['cv_selector']
-                         ['groups_selector']['column_selector_options_g']
-                         ['selected_column_selector_option_g'])
-        if column_option in ['by_index_number', 'all_but_by_index_number',
-                             'by_header_name', 'all_but_by_header_name']:
-            c = (params['mode_selection']['cv_selector']['groups_selector']
-                 ['column_selector_options_g']['col_g'])
+        header = "infer" if (params["mode_selection"]["cv_selector"]["groups_selector"]["header_g"]) else None
+        column_option = params["mode_selection"]["cv_selector"]["groups_selector"]["column_selector_options_g"][
+            "selected_column_selector_option_g"
+        ]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = params["mode_selection"]["cv_selector"]["groups_selector"]["column_selector_options_g"]["col_g"]
         else:
             c = None
 
-        groups = read_columns(infile_groups, c=c, c_option=column_option,
-                              sep='\t', header=header, parse_dates=True)
+        groups = read_columns(
+            infile_groups,
+            c=c,
+            c_option=column_option,
+            sep="\t",
+            header=header,
+            parse_dates=True,
+        )
         groups = groups.ravel()
 
-        params['mode_selection']['cv_selector']['groups_selector'] = groups
+        params["mode_selection"]["cv_selector"]["groups_selector"] = groups
 
     # read labels
     if infile_labels:
-        target_input = (params['mode_selection']
-                        ['cv_selector'].pop('target_input'))
-        header = 'infer' if target_input['header1'] else None
-        col_index = target_input['col'][0] - 1
-        df = pd.read_csv(infile_labels, sep='\t', header=header,
-                         parse_dates=True)
+        target_input = params["mode_selection"]["cv_selector"].pop("target_input")
+        header = "infer" if target_input["header1"] else None
+        col_index = target_input["col"][0] - 1
+        df = pd.read_csv(infile_labels, sep="\t", header=header, parse_dates=True)
         y = df.iloc[:, col_index].values
 
     # construct the cv splitter object
-    splitter, groups = get_cv(params['mode_selection']['cv_selector'])
+    splitter, groups = get_cv(params["mode_selection"]["cv_selector"])
 
     total_n_splits = splitter.get_n_splits(array.values, y=y, groups=groups)
     if nth_split > total_n_splits:
-        raise ValueError("Total number of splits is {}, but got `nth_split` "
-                         "= {}".format(total_n_splits, nth_split))
+        raise ValueError("Total number of splits is {}, but got `nth_split` " "= {}".format(total_n_splits, nth_split))
 
     i = 1
     for train_index, test_index in splitter.split(array.values, y=y, groups=groups):
@@ -79,8 +83,14 @@
     return train, test
 
 
-def main(inputs, infile_array, outfile_train, outfile_test,
-         infile_labels=None, infile_groups=None):
+def main(
+    inputs,
+    infile_array,
+    outfile_train,
+    outfile_test,
+    infile_labels=None,
+    infile_groups=None,
+):
     """
     Parameter
     ---------
@@ -102,45 +112,41 @@
     outfile_test : str
         File path to dataset containing test split
     """
-    warnings.simplefilter('ignore')
+    warnings.simplefilter("ignore")
 
-    with open(inputs, 'r') as param_handler:
+    with open(inputs, "r") as param_handler:
         params = json.load(param_handler)
 
-    input_header = params['header0']
-    header = 'infer' if input_header else None
-    array = pd.read_csv(infile_array, sep='\t', header=header,
-                        parse_dates=True)
+    input_header = params["header0"]
+    header = "infer" if input_header else None
+    array = pd.read_csv(infile_array, sep="\t", header=header, parse_dates=True)
 
     # train test split
-    if params['mode_selection']['selected_mode'] == 'train_test_split':
-        options = params['mode_selection']['options']
-        shuffle_selection = options.pop('shuffle_selection')
-        options['shuffle'] = shuffle_selection['shuffle']
+    if params["mode_selection"]["selected_mode"] == "train_test_split":
+        options = params["mode_selection"]["options"]
+        shuffle_selection = options.pop("shuffle_selection")
+        options["shuffle"] = shuffle_selection["shuffle"]
         if infile_labels:
-            header = 'infer' if shuffle_selection['header1'] else None
-            col_index = shuffle_selection['col'][0] - 1
-            df = pd.read_csv(infile_labels, sep='\t', header=header,
-                             parse_dates=True)
+            header = "infer" if shuffle_selection["header1"] else None
+            col_index = shuffle_selection["col"][0] - 1
+            df = pd.read_csv(infile_labels, sep="\t", header=header, parse_dates=True)
             labels = df.iloc[:, col_index].values
-            options['labels'] = labels
+            options["labels"] = labels
 
         train, test = train_test_split(array, **options)
 
     # cv splitter
     else:
-        train, test = _get_single_cv_split(params, array,
-                                           infile_labels=infile_labels,
-                                           infile_groups=infile_groups)
+        train, test = _get_single_cv_split(params, array, infile_labels=infile_labels, infile_groups=infile_groups)
 
     print("Input shape: %s" % repr(array.shape))
     print("Train shape: %s" % repr(train.shape))
     print("Test shape: %s" % repr(test.shape))
-    train.to_csv(outfile_train, sep='\t', header=input_header, index=False)
-    test.to_csv(outfile_test, sep='\t', header=input_header, index=False)
+    train.to_csv(outfile_train, sep="\t", header=input_header, index=False)
+    test.to_csv(outfile_test, sep="\t", header=input_header, index=False)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     aparser = argparse.ArgumentParser()
     aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
     aparser.add_argument("-X", "--infile_array", dest="infile_array")
@@ -150,5 +156,11 @@
     aparser.add_argument("-t", "--outfile_test", dest="outfile_test")
     args = aparser.parse_args()
 
-    main(args.inputs, args.infile_array, args.outfile_train,
-         args.outfile_test, args.infile_labels, args.infile_groups)
+    main(
+        args.inputs,
+        args.infile_array,
+        args.outfile_train,
+        args.outfile_test,
+        args.infile_labels,
+        args.infile_groups,
+    )