changeset 101:4235fcb5b090 draft

Uploaded
author bgruening
date Sun, 02 Mar 2014 11:49:08 -0500
parents d2a6bbeeb474
children 5f4562dc0890
files MiClip.xml MiClip_wrapper.sh cca.py cca.xml cor.py cor.xml gsummary.py gsummary.xml histogram.py histogram.xml kcca.py kcca.xml kpca.py kpca.xml linear_regression.py linear_regression.xml logistic_regression_vif.py logistic_regression_vif.xml partialR_square.py partialR_square.xml pca.py pca.xml readme.rst scatterplot.py scatterplot.xml short_reads_figure_score.py short_reads_figure_score.xml statistic_tools_macros.xml test-data/1.bed test-data/454.qual test-data/454Score.png test-data/cca_out1.tabular test-data/cca_out2.pdf test-data/cor.tabular test-data/cor_out.txt test-data/gsummary_out1.tabular test-data/histogram_in1.tabular test-data/histogram_out1.pdf test-data/histooutold.pdf test-data/iris.tabular test-data/kcca_out1.tabular test-data/kcca_out2.tabular test-data/kpca_out1.tabular test-data/kpca_out2.pdf test-data/logreg_inp.tabular test-data/logreg_out2.tabular test-data/partialR_result.tabular test-data/pca_out1.tabular test-data/pca_out2.pdf test-data/pca_out3.tabular test-data/pca_out4.pdf test-data/pca_out5.tabular test-data/pca_out6.pdf test-data/regr_inp.tabular test-data/regr_out.pdf test-data/regr_out.tabular test-data/scatterplot_in1.tabular test-data/scatterplot_out1.pdf test-data/solexa.qual test-data/solexaScore.png tool_dependencies.xml
diffstat 61 files changed, 115 insertions(+), 5607 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MiClip.xml	Sun Mar 02 11:49:08 2014 -0500
@@ -0,0 +1,89 @@
+<tool id="mi_clip" name="MiClip">
+    <description>Identify Binding Sites in CLIP-Seq Data</description>
+    <requirements>
+        <requirement type="binary">Rscript</requirement>
+        <requirement type="package" version="3.0.1">R_3_0_1</requirement>
+        <requirement type="package" version="1.2">miclip</requirement>
+        <requirement type="package" version="5.18.1">perl</requirement>
+        <requirement type="set_environment">MICLIP_SCRIPT_PATH</requirement>
+    </requirements>
+    <command interpreter="sh">
+        MiClip_wrapper.sh $input_file $control_file $mutation $paired $suffix $step $maxbin $model_cut $max_iter $convergence $emperical $output
+    </command>
+    <inputs>
+        <param name="input_file" type="data" format="sam" label="Input File" help="Input SAM File. Use Bam to Sam converter if Input file is in Bam format."/>
+        <param name="control_file" type="data" format="sam" optional="true" label="Control File" help="ex: Control experiment without crosslinking for distinguishing SNPs."/> 
+        <param name="mutation" type="select" label="Mutation Type" display="checkboxes" multiple="true" help="Select One or Multiple Mutations.">
+            <option value="T2C" selected="true">T-&gt;C</option>
+            <option value="T2A">T-&gt;A</option>
+            <option value="T2G">T-&gt;G</option>
+            <option value="C2A">C-&gt;A</option>
+            <option value="C2T">C-&gt;T</option>
+            <option value="C2G">C-&gt;G</option>
+            <option value="A2T">A-&gt;T</option>
+            <option value="A2G">A-&gt;G</option>
+            <option value="A2C">A-&gt;C</option>
+            <option value="G2A">G-&gt;A</option>
+            <option value="G2C">G-&gt;C</option>
+            <option value="G2T">G-&gt;T</option>
+            <option value="Ins">Ins</option>
+            <option value="Del">Del</option>
+        </param>
+
+        <param name="paired" type="boolean" checked="no" truevalue="TRUE" falsevalue="FALSE" label="Sequence is Pair-End" help=""/>
+        <param name="suffix" type="text" size="20" value="Forward,Backward" label="Suffix of Paired-End Read" help="Change only if Paired-End Reads. See Manual for more details."/>
+        <param name="step" type="integer" value="5" label="Bin Step Size" help="In the first HMM, all clusters will be divided into bins of the same length of step bp."/>
+        <param name="maxbin" type="integer" value="100" label="Max Number of Reads per Bin" help="Maximum number of Reads in a Bin or on a Base."/>
+        <param name="emperical" type="text" value="auto" label="Emperical" help="Used in model fitting in the first HMM. Default is 'auto'."/> 
+        <param name="model_cut" type="float" value="0.2" min="0.0" max="1.0" label="Mixure Model Cutoff" help="Cutoff for Fitting the mixture model in the Second HMM."/>
+        <param name="max_iter" type="integer" value="20" label="Max Number of HMM Iterations" help="The Maximum number of Iterations for both HMM iterations."/>
+        <param name="convergence" type="float" value="0.01" label="Convergence Cutoff" help="The Cutoff for reaching Convergence."/>
+    </inputs>
+    <outputs>
+        <data format="zip" name="output" />
+    </outputs>
+    <help>
+
+**Description**
+
+Cross-linking immunoprecipitation coupled with high-throughput sequencing (CLIP-Seq) has made it possible to identify targeting sites of RNA-binding proteins in various cell culture systems and tissue types on a genome-wide scale. Here we present MiClip,a novel model-based approach to identify high-confidence protein-RNA binding sites in CLIP-Seq datasets. This approach assigns confidence value to each binding site on a probabilistic basis. The MiClip package can be flexibly applied to analyze both HITS-CLIP data and PAR-CLIP data. 
+
+
+------
+
+**Manual**
+GalaxyManual_
+
+.. _GalaxyManual: http://galaxy.swmed.edu/galaxy/static/galaxy_manual.pdf
+
+MiniClipDemo_
+
+.. _MiniClipDemo: http://galaxy.swmed.edu/galaxy/u/tpers1/p/miniclipwalkthrough
+
+Vignette.pdf_ 
+
+.. _Vignette.pdf: http://galaxy.swmed.edu/galaxy/static/MiClip_vignette.pdf
+
+Manual.pdf_ 
+
+.. _Manual.pdf: http://galaxy.swmed.edu/galaxy/static/MiClip-manual.pdf
+
+
+------
+
+**Source**
+
+MiClip_R_Package_
+
+.. _MiClip_R_Package: http://galaxy.swmed.edu/galaxy/static/MiClip.tar.gz
+
+------
+
+**Author** 
+
+Tao Wang. 
+For any suggestions or inquiries please contact Tao.Wang@UTSouthwestern.edu
+
+
+    </help>
+</tool>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MiClip_wrapper.sh	Sun Mar 02 11:49:08 2014 -0500
@@ -0,0 +1,10 @@
+R --vanilla --slave --args $1 $2 $3 $4 $5 $6 $7 $8 $9 ${10} ${11} < /HOME/galaxy/galaxy-dist/tools/MiClip/MiClip.R > dump
+if [ -f ${__tool_data_path__}clusters.csv ]
+ then
+   zip temp ${__tool_data_path__}log.txt
+   zip temp ${__tool_data_path__}*.csv
+   mv ${__tool_data_path__}temp.zip ${12}
+ else 
+   cat dump >&2
+fi
+
--- a/cca.py	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,219 +0,0 @@
-#!/usr/bin/env python
-
-import sys, string
-#from rpy import *
-import rpy2.robjects as robjects
-import rpy2.rlike.container as rlc
-from rpy2.robjects.packages import importr
-r = robjects.r
-grdevices = importr('grDevices')
-import numpy
-
-
-def stop_err(msg):
-    sys.stderr.write(msg)
-    sys.exit()
-
-infile = sys.argv[1]
-x_cols = sys.argv[2].split(',')
-y_cols = sys.argv[3].split(',')
-
-x_scale = x_center = "FALSE"
-if sys.argv[4] == 'both':
-    x_scale = x_center = "TRUE"
-elif sys.argv[4] == 'center':
-    x_center = "TRUE"
-elif sys.argv[4] == 'scale':
-    x_scale = "TRUE"
-    
-y_scale = y_center = "FALSE"
-if sys.argv[5] == 'both':
-    y_scale = y_center = "TRUE"
-elif sys.argv[5] == 'center':
-    y_center = "TRUE"
-elif sys.argv[5] == 'scale':
-    y_scale = "TRUE"
-
-std_scores = "FALSE"   
-if sys.argv[6] == "yes":
-    std_scores = "TRUE"
-    
-outfile = sys.argv[7]
-outfile2 = sys.argv[8]
-
-fout = open(outfile,'w')
-elems = []
-for i, line in enumerate( file ( infile )):
-    line = line.rstrip('\r\n')
-    if len( line )>0 and not line.startswith( '#' ):
-        elems = line.split( '\t' )
-        break 
-    if i == 30:
-        break # Hopefully we'll never get here...
-
-if len( elems )<1:
-    stop_err( "The data in your input dataset is either missing or not formatted properly." )
-
-x_vals = []
-
-for k,col in enumerate(x_cols):
-    x_cols[k] = int(col)-1
-    #x_vals.append([])
-
-y_vals = []
-
-for k,col in enumerate(y_cols):
-    y_cols[k] = int(col)-1
-    #y_vals.append([])
-
-skipped = 0
-for ind,line in enumerate( file( infile )):
-    if line and not line.startswith( '#' ):
-        try:
-            fields = line.strip().split("\t")
-            valid_line = True
-            for col in x_cols+y_cols:
-                try:
-                    assert float(fields[col])
-                except:
-                    skipped += 1
-                    valid_line = False
-                    break
-            if valid_line:
-                for k,col in enumerate(x_cols):
-                    try:
-                        xval = float(fields[col])
-                    except:
-                        xval = NaN#
-                    #x_vals[k].append(xval)
-                    x_vals.append(xval)
-                for k,col in enumerate(y_cols):
-                    try:
-                        yval = float(fields[col])
-                    except:
-                        yval = NaN#
-                    #y_vals[k].append(yval)
-                    y_vals.append(yval)
-        except:
-            skipped += 1
-
-#x_vals1 = numpy.asarray(x_vals).transpose()
-#y_vals1 = numpy.asarray(y_vals).transpose()
-
-#x_dat= r.list(array(x_vals1))
-#y_dat= r.list(array(y_vals1))
-
-x_dat = r['matrix'](robjects.FloatVector(x_vals),ncol=len(x_cols),byrow=True)
-y_dat = r['matrix'](robjects.FloatVector(y_vals),ncol=len(y_cols),byrow=True)
-
-try:
-    r.suppressWarnings(r.library("yacca"))
-except:
-    stop_err("Missing R library yacca.")
-    
-#set_default_mode(NO_CONVERSION)
-try:
-    xcolnames = ["c%d" %(el+1) for el in x_cols]
-    ycolnames = ["c%d" %(el+1) for el in y_cols]
-    #cc = r.cca(x=x_dat, y=y_dat, xlab=xcolnames, ylab=ycolnames, xcenter=r(x_center), ycenter=r(y_center), xscale=r(x_scale), yscale=r(y_scale), standardize_scores=r(std_scores))
-    cc = r.cca(x=x_dat, y=y_dat, xlab=xcolnames, ylab=ycolnames, xcenter=r(x_center), ycenter=r(y_center), xscale=r(x_scale), yscale=r(y_scale), **{'standardize.scores':r(std_scores)})
-    #ftest = r.F_test_cca(cc)
-    ftest = r['F.test.cca'](cc)
-except RException, rex:
-    stop_err("Encountered error while performing CCA on the input data: %s" %(rex))
-
-#set_default_mode(BASIC_CONVERSION)
-summary = r.summary(cc)
-
-#ncomps = len(summary['corr'])
-ncomps = len(summary.rx2('corr'))
-#comps = summary['corr'].keys()
-#comps = summary.rx2('corr').names
-comps = (','.join(summary.rx2('corr').names)).split(',')
-#corr = summary['corr'].values()
-corr = summary.rx2('corr')
-#xlab = summary['xlab']
-xlab = summary.rx2('xlab')
-#ylab = summary['ylab']
-ylab = summary.rx2('ylab')
-
-for i in range(ncomps):
-    corr[comps.index('CV %s' %(i+1))] = summary.rx2('corr')[i]
-    #corr[comps.index('CV %s' %(i+1))] = summary['corr'].values()[i]
-
-#ftest=ftest.as_py()
-print >>fout, "#Component\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-print >>fout, "#Correlation\t%s" %("\t".join(["%.4g" % el for el in corr]))
-#print >>fout, "#F-statistic\t%s" %("\t".join(["%.4g" % el for el in ftest['statistic']]))
-print >>fout, "#F-statistic\t%s" %("\t".join(["%.4g" % el for el in ftest.rx2('statistic')]))
-#print >>fout, "#p-value\t%s" %("\t".join(["%.4g" % el for el in ftest['p.value']]))
-print >>fout, "#p-value\t%s" %("\t".join(["%.4g" % el for el in ftest.rx2('p.value')]))
-
-
-print >>fout, "#X-Coefficients\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-#for i,val in enumerate(summary['xcoef']):
-#    print >>fout, "%s\t%s" %(xlab[i], "\t".join(["%.4g" % el for el in val]))
-vm = summary.rx2('xcoef')
-for i in range(vm.nrow):
-    vals = []
-    for j in range(vm.ncol):
-       vals.append("%.4g" % vm.rx2(i+1,j+1)[0])
-    print >>fout, "%s\t%s" %(xlab[i][0], "\t".join(vals))
-
-print >>fout, "#Y-Coefficients\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-#for i,val in enumerate(summary['ycoef']):
-#    print >>fout, "%s\t%s" %(ylab[i], "\t".join(["%.4g" % el for el in val]))
-vm = summary.rx2('ycoef')
-for i in range(vm.nrow):
-    vals = []
-    for j in range(vm.ncol):
-       vals.append("%.4g" % vm.rx2(i+1,j+1)[0])
-    print >>fout, "%s\t%s" %(ylab[i][0], "\t".join(vals))
-
-print >>fout, "#X-Loadings\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-#for i,val in enumerate(summary['xstructcorr']):
-#    print >>fout, "%s\t%s" %(xlab[i], "\t".join(["%.4g" % el for el in val]))
-vm = summary.rx2('xstructcorr')
-for i in range(vm.nrow):
-    vals = []
-    for j in range(vm.ncol):
-       vals.append("%.4g" % vm.rx2(i+1,j+1)[0])
-    print >>fout, "%s\t%s" %(xlab[i][0], "\t".join(vals))
-
-print >>fout, "#Y-Loadings\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-#for i,val in enumerate(summary['ystructcorr']):
-#    print >>fout, "%s\t%s" %(ylab[i], "\t".join(["%.4g" % el for el in val]))
-vm = summary.rx2('ystructcorr')
-for i in range(vm.nrow):
-    vals = []
-    for j in range(vm.ncol):
-       vals.append("%.4g" % vm.rx2(i+1,j+1)[0])
-    print >>fout, "%s\t%s" %(ylab[i][0], "\t".join(vals))
-
-print >>fout, "#X-CrossLoadings\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-#for i,val in enumerate(summary['xcrosscorr']):
-#    print >>fout, "%s\t%s" %(xlab[i], "\t".join(["%.4g" % el for el in val]))
-vm = summary.rx2('xcrosscorr')
-for i in range(vm.nrow):
-    vals = []
-    for j in range(vm.ncol):
-       vals.append("%.4g" % vm.rx2(i+1,j+1)[0])
-    print >>fout, "%s\t%s" %(xlab[i][0], "\t".join(vals))
-
-print >>fout, "#Y-CrossLoadings\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-#for i,val in enumerate(summary['ycrosscorr']):
-#    print >>fout, "%s\t%s" %(ylab[i], "\t".join(["%.4g" % el for el in val]))
-vm = summary.rx2('ycrosscorr')
-for i in range(vm.nrow):
-    vals = []
-    for j in range(vm.ncol):
-       vals.append("%.4g" % vm.rx2(i+1,j+1)[0])
-    print >>fout, "%s\t%s" %(ylab[i][0], "\t".join(vals))
-
-r.pdf( outfile2, 8, 8 )
-#r.plot(cc)
-for i in range(ncomps):
-    r['helio.plot'](cc, cv = i+1, main = r.paste("Explained Variance for CV",i+1), type = "variance")
-#r.dev_off()
-grdevices.dev_off()
-
--- a/cca.xml	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,97 +0,0 @@
-<tool id="cca1" name="Canonical Correlation Analysis" version="1.1.0">
-    <description> </description>
-    <expand macro="requirements" />
-    <macros>
-        <import>statistic_tools_macros.xml</import>
-    </macros>
-    <command interpreter="python">
-        cca.py 
-          $input1
-          $x_cols
-          $y_cols
-          $x_scale
-          $y_scale
-          $std_scores
-          $out_file1
-          $out_file2
-    </command>
-  <inputs>
-    <param format="tabular" name="input1" type="data" label="Select data" help="Dataset missing? See TIP below."/>
-    <param name="x_cols" label="Select columns containing X variables " type="data_column" data_ref="input1" numerical="True" multiple="true" >
-        <validator type="no_options" message="Please select at least one column."/>
-    </param>
-    <param name="y_cols" label="Select columns containing Y variables " type="data_column" data_ref="input1" numerical="True" multiple="true" >
-        <validator type="no_options" message="Please select at least one column."/>
-    </param>
-    <param name="x_scale" type="select" label="Type of Scaling for X variables" help="Can be used to center and/or scale variables">
-        <option value="none" selected="true">None</option>
-        <option value="center">Center only</option>
-        <option value="scale">Scale only</option>
-        <option value="both">Center and Scale</option>
-    </param>
-    <param name="y_scale" type="select" label="Type of Scaling for Y variables" help="Can be used to center and/or scale variables">
-        <option value="none" selected="true">None</option>
-        <option value="center">Center only</option>
-        <option value="scale">Scale only</option>
-        <option value="both">Center and Scale</option>
-    </param>
-    <param name="std_scores" type="select" label="Report standardized scores?" help="Selecting 'Yes' will rescale scores (and coefficients) to produce scores of unit variance">
-        <option value="no" selected="true">No</option>
-        <option value="yes">Yes</option>
-    </param>
-  </inputs>
-  <outputs>
-    <data format="input" name="out_file1" metadata_source="input1" />
-    <data format="pdf" name="out_file2" />
-  </outputs>
-  <tests>
-    <test>
-        <param name="input1" value="iris.tabular"/>
-        <param name="x_cols" value="3,4"/>
-        <param name="y_cols" value="1,2"/>
-        <param name="x_scale" value="both"/>
-        <param name="y_scale" value="scale"/>
-        <param name="std_scores" value="yes"/>
-        <output name="out_file1" file="cca_out1.tabular"/>
-        <output name="out_file2" file="cca_out2.pdf"/>
-    </test>
-  </tests>
-  <help>
-
-
-.. class:: infomark
-
-**TIP:** If your data is not TAB delimited, use *Edit Datasets-&gt;Convert characters*
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool uses functions from 'yacca' library from R statistical package to perform Canonical Correlation Analysis (CCA) on the input data. 
-It outputs two files, one containing the summary statistics of the performed CCA, and the other containing helioplots, which display structural loadings of X and Y variables on different canonical components.   
-
-*Carter T. Butts (2009). yacca: Yet Another Canonical Correlation Analysis Package. R package version 1.1.*
-
------
-
-.. class:: warningmark
-
-**Note**
-
-- This tool currently treats all predictor and response variables as continuous numeric variables. Running the tool on categorical variables might result in incorrect results.
-
-- Rows containing non-numeric (or missing) data in any of the chosen columns will be skipped from the analysis.
-
-- The summary statistics in the output are described below:
-
-  - correlation: Canonical correlation between the canonical variates (i.e. transformed variables)
-  - F-statistic: F-value obtained from F Test for Canonical Correlations Using Rao's Approximation
-  - p-value: denotes significance of canonical correlations
-  - Coefficients: represent the coefficients of X and Y variables on each canonical variate
-  - Loadings: represent the correlations between the original variables in each set and their respective canonical variates 
-  - CrossLoadings: represent the correlations between the original variables in each set and the opposite canonical variates 
-  
-  </help>
-</tool>
--- a/cor.py	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,102 +0,0 @@
-#!/usr/bin/env python
-#Greg Von Kuster
-"""
-Calculate correlations between numeric columns in a tab delim file.
-usage: %prog infile output.txt columns method
-"""
-
-import sys
-#from rpy import *
-import rpy2.robjects as robjects
-r = robjects.r
-
-def stop_err(msg):
-    sys.stderr.write(msg)
-    sys.exit()
-    
-def main():
-    method = sys.argv[4]
-    assert method in ( "pearson", "kendall", "spearman" )
-
-    try:
-        columns = map( int, sys.argv[3].split( ',' ) )
-    except:
-        stop_err( "Problem determining columns, perhaps your query does not contain a column of numerical data." )
-    
-    matrix = []
-    skipped_lines = 0
-    first_invalid_line = 0
-    invalid_value = ''
-    invalid_column = 0
-
-    for i, line in enumerate( file( sys.argv[1] ) ):
-        valid = True
-        line = line.rstrip('\n\r')
-
-        if line and not line.startswith( '#' ): 
-            # Extract values and convert to floats
-            row = []
-            for column in columns:
-                column -= 1
-                fields = line.split( "\t" )
-                if len( fields ) <= column:
-                    valid = False
-                else:
-                    val = fields[column]
-                    if val.lower() == "na": 
-                        row.append( float( "nan" ) )
-                    else:
-                        try:
-                            row.append( float( fields[column] ) )
-                        except:
-                            valid = False
-                            skipped_lines += 1
-                            if not first_invalid_line:
-                                first_invalid_line = i+1
-                                invalid_value = fields[column]
-                                invalid_column = column+1
-        else:
-            valid = False
-            skipped_lines += 1
-            if not first_invalid_line:
-                first_invalid_line = i+1
-
-        if valid:
-            # matrix.append( row )
-            matrix += row 
-
-    if skipped_lines < i:
-        try:
-            out = open( sys.argv[2], "w" )
-        except:
-            stop_err( "Unable to open output file" )
-
-        # Run correlation
-        # print >> sys.stderr, "matrix: %s" % matrix
-        # print >> sys.stderr, "array: %s" % array( matrix )
-        try:
-            # value = r.cor( array( matrix ), use="pairwise.complete.obs", method=method )
-            fv = robjects.FloatVector(matrix)
-            m = r['matrix'](fv, ncol=len(columns),byrow=True)
-            rslt_mat = r.cor(m, use="pairwise.complete.obs", method=method )
-            value = []
-            for ri in range(1, rslt_mat.nrow + 1):
-                row = []
-                for ci in range(1, rslt_mat.ncol + 1):
-		    row.append(rslt_mat.rx(ri,ci)[0])
-                value.append(row)
-        except Exception, exc:
-            out.close()
-            stop_err("%s" %str( exc ))
-        for row in value:
-            print >> out, "\t".join( map( str, row ) )
-        out.close()
-
-    if skipped_lines > 0:
-        msg = "..Skipped %d lines starting with line #%d. " %( skipped_lines, first_invalid_line )
-        if invalid_value and invalid_column > 0:
-            msg += "Value '%s' in column %d is not numeric." % ( invalid_value, invalid_column )
-        print msg
-
-if __name__ == "__main__":
-    main()
--- a/cor.xml	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,102 +0,0 @@
-<tool id="cor2" name="Correlation" version="1.1.0">
-    <description>for numeric columns</description>
-    <expand macro="requirements" />
-    <macros>
-        <import>statistic_tools_macros.xml</import>
-    </macros>
-    <command interpreter="python">cor.py $input1 $out_file1 $numeric_columns $method</command>
-    <inputs>
-        <param format="tabular" name="input1" type="data" label="Dataset" help="Dataset missing? See TIP below"/>
-        <param name="numeric_columns" label="Numerical columns" type="data_column" numerical="True" multiple="True" data_ref="input1" help="Multi-select list - hold the appropriate key while clicking to select multiple columns" />
-        <param name="method" type="select" label="Method">
-            <option value="pearson">Pearson</option>
-            <option value="kendall">Kendall rank</option>
-            <option value="spearman">Spearman rank</option>
-        </param>
-    </inputs>
-  <outputs>
-    <data format="txt" name="out_file1" />
-  </outputs>
-  <tests>
-    <!--
-    Test a tabular input with the first line being a comment without a # character to start
-    -->
-    <test>
-      <param name="input1" value="cor.tabular" />
-      <param name="numeric_columns" value="2,3" />
-      <param name="method" value="pearson" />
-      <output name="out_file1" file="cor_out.txt" />
-    </test>
-  </tests>
-  <help>
-
-.. class:: infomark
-
-**TIP:** If your data is not TAB delimited, use *Text Manipulation-&gt;Convert*
-
-.. class:: warningmark
-
-Missing data ("nan") removed from each pairwise comparison
-
------
-
-**Syntax**
-
-This tool computes the matrix of correlation coefficients between numeric columns.
-
-- All invalid, blank and comment lines are skipped when performing computations.  The number of skipped lines is displayed in the resulting history item.
-
-- **Pearson's Correlation** reflects the degree of linear relationship between two variables. It ranges from +1 to -1. A correlation of +1 means that there is a perfect positive linear relationship between variables. The formula for Pearson's correlation is:
-
-    .. image:: $PATH_TO_IMAGES/pearson.png
-
-    where n is the number of items
-
-- **Kendall's rank correlation** is used to measure the degree of correspondence between two rankings and assessing the significance of this correspondence. The formula for Kendall's rank correlation is:
-
-    .. image:: $PATH_TO_IMAGES/kendall.png
-
-    where n is the number of items, and P is the sum.
-
-- **Spearman's rank correlation** assesses how well an arbitrary monotonic function could describe the relationship between two variables, without making any assumptions about the frequency distribution of the variables. The formula for Spearman's rank correlation is
-
-    .. image:: $PATH_TO_IMAGES/spearman.png
-
-    where D is the difference between the ranks of corresponding values of X and Y, and N is the number of pairs of values.
-
------
-
-**Example**
-
-- Input file::
-
-    #Person	Height	Self Esteem
-    1		68		4.1
-    2 		71 		4.6
-    3 		62 		3.8
-    4 		75 		4.4
-    5 		58 		3.2
-    6 		60 		3.1
-    7 		67 		3.8
-    8 		68 		4.1
-    9 		71 		4.3
-    10 		69 		3.7
-    11 		68 		3.5
-    12 		67 		3.2
-    13 		63 		3.7
-    14 		62 		3.3
-    15 		60 		3.4
-    16 		63 		4.0
-    17 		65 		4.1
-    18 		67 		3.8
-    19 		63 		3.4
-    20 		61 		3.6
-
-- Computing the correlation coefficients between columns 2 and 3 of the above file (using Pearson's Correlation), the output is::
-
-    1.0	0.730635686279
-    0.730635686279	1.0
-
-  So the correlation for our twenty cases is .73, which is a fairly strong positive relationship.
-  </help>
-</tool>
--- a/gsummary.py	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,117 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-import re
-import tempfile
-#from rpy import *
-import rpy2.robjects as robjects
-r = robjects.r
-from rpy2.robjects.vectors import DataFrame
-
-assert sys.version_info[:2] >= ( 2, 4 )
-
-def stop_err( msg ):
-    sys.stderr.write( msg )
-    sys.exit()
-
-def S3_METHODS( all="key" ):
-    Group_Math =  [ "abs", "sign", "sqrt", "floor", "ceiling", "trunc", "round", "signif",
-        "exp", "log", "cos", "sin", "tan", "acos", "asin", "atan", "cosh", "sinh", "tanh",
-        "acosh", "asinh", "atanh", "lgamma", "gamma", "gammaCody", "digamma", "trigamma",
-        "cumsum", "cumprod", "cummax", "cummin", "c" ]
-    Group_Ops = [ "+", "-", "*", "/", "^", "%%", "%/%", "&", "|", "!", "==", "!=", "<", "<=", ">=", ">", "(", ")", "~", "," ]
-    if all is "key":
-        return { 'Math' : Group_Math, 'Ops' : Group_Ops }
-
-def main():
-    try:
-        datafile = sys.argv[1]
-        outfile_name = sys.argv[2]
-        expression = sys.argv[3]
-    except: 
-        stop_err( 'Usage: python gsummary.py input_file ouput_file expression' )
-
-    math_allowed = S3_METHODS()[ 'Math' ]
-    ops_allowed = S3_METHODS()[ 'Ops' ]
-
-    # Check for invalid expressions
-    for word in re.compile( '[a-zA-Z]+' ).findall( expression ):
-        if word and not word in math_allowed: 
-            stop_err( "Invalid expression '%s': term '%s' is not recognized or allowed" %( expression, word ) )
-    symbols = set()
-    for symbol in re.compile( '[^a-z0-9\s]+' ).findall( expression ):
-        if symbol and not symbol in ops_allowed:
-            stop_err( "Invalid expression '%s': operator '%s' is not recognized or allowed" % ( expression, symbol ) )
-        else:
-            symbols.add( symbol )
-    if len( symbols ) == 1 and ',' in symbols:
-        # User may have entered a comma-separated list r_data_frame columns
-        stop_err( "Invalid columns '%s': this tool requires a single column or expression" % expression )
-
-    # Find all column references in the expression
-    cols = []
-    for col in re.compile( 'c[0-9]+' ).findall( expression ):
-        try:
-            cols.append( int( col[1:] ) - 1 )
-        except:
-            pass
- 
-    tmp_file = tempfile.NamedTemporaryFile( 'w+b' )
-    # Write the R header row to the temporary file
-    hdr_str = "\t".join( "c%s" % str( col+1 ) for col in cols )
-    tmp_file.write( "%s\n" % hdr_str )
-    skipped_lines = 0
-    first_invalid_line = 0
-    i = 0
-    for i, line in enumerate( file( datafile ) ):
-        line = line.rstrip( '\r\n' )
-        if line and not line.startswith( '#' ):
-            valid = True
-            fields = line.split( '\t' )
-            # Write the R data row to the temporary file
-            for col in cols:
-                try:
-                    float( fields[ col ] )
-                except:
-                    skipped_lines += 1
-                    if not first_invalid_line:
-                        first_invalid_line = i + 1
-                    valid = False
-                    break
-            if valid:
-                data_str = "\t".join( fields[ col ] for col in cols )
-                tmp_file.write( "%s\n" % data_str )
-    tmp_file.flush()
-
-    if skipped_lines == i + 1:
-        stop_err( "Invalid column or column data values invalid for computation.  See tool tips and syntax for data requirements." )
-    else:
-        # summary function and return labels
-        summary_func = r( "function( x ) { c( sum=sum( as.numeric( x ), na.rm=T ), mean=mean( as.numeric( x ), na.rm=T ), stdev=sd( as.numeric( x ), na.rm=T ), quantile( as.numeric( x ), na.rm=TRUE ) ) }" )
-        headings = [ 'sum', 'mean', 'stdev', '0%', '25%', '50%', '75%', '100%' ]
-        headings_str = "\t".join( headings )
-        
-        #r.set_default_mode( NO_CONVERSION )
-        #r_data_frame = r.read_table( tmp_file.name, header=True, sep="\t" )
-        r_data_frame = DataFrame.from_csvfile( tmp_file.name, header=True, sep="\t" )
-        
-        outfile = open( outfile_name, 'w' )
-
-        for col in re.compile( 'c[0-9]+' ).findall( expression ):
-            r.assign( col, r[ "$" ]( r_data_frame, col ) )
-        try:
-            summary = summary_func( r( expression ) )
-        except RException, s:
-            outfile.close()
-            stop_err( "Computation resulted in the following error: %s" % str( s ) )
-        #summary = summary.as_py( BASIC_CONVERSION )
-        outfile.write( "#%s\n" % headings_str )
-        print summary
-        print summary.r_repr()
-        outfile.write( "%s\n" % "\t".join( [ "%g" % ( summary.rx2( k )[0] ) for k in headings ] ) )
-        outfile.close()
-
-        if skipped_lines:
-            print "Skipped %d invalid lines beginning with line #%d.  See tool tips for data requirements." % ( skipped_lines, first_invalid_line )        
-
-if __name__ == "__main__": main()
--- a/gsummary.xml	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,77 +0,0 @@
-<tool id="Summary_Statistics1" name="Summary Statistics" version="1.3.0">
-    <description>for any numerical column</description>
-    <expand macro="requirements" />
-    <macros>
-        <import>statistic_tools_macros.xml</import>
-    </macros>
-    <command interpreter="python">gsummary.py $input $out_file1 "$cond"</command>
-    <inputs>
-        <param format="tabular" name="input" type="data" label="Summary statistics on" help="Dataset missing? See TIP below"/>
-        <param name="cond" size="30" type="text" value="c5" label="Column or expression" help="See syntax below">
-            <validator type="empty_field" message="Enter a valid column or expression, see syntax below for examples"/>
-        </param>
-    </inputs>
-    <outputs>
-        <data format="tabular" name="out_file1" />
-    </outputs>
-    <tests>
-        <test>
-            <param name="input" value="1.bed"/>
-            <output name="out_file1" file="gsummary_out1.tabular"/>
-            <param name="cond" value="c2"/>
-        </test>
-    </tests>
-    <help>
-
-.. class:: warningmark
-
-This tool expects input datasets consisting of tab-delimited columns (blank or comment lines beginning with a # character are automatically skipped).
-
-.. class:: infomark
-
-**TIP:** If your data is not TAB delimited, use *Text Manipulation-&gt;Convert delimiters to TAB*
-
-.. class:: infomark
-
-**TIP:** Computing summary statistics may throw exceptions if the data value in every line of the columns being summarized is not numerical.  If a line is missing a value or contains a non-numerical value in the column being summarized, that line is skipped and the value is not included in the statistical computation.  The number of invalid skipped lines is documented in the resulting history item.
-
-.. class:: infomark
-
-**USING R FUNCTIONS:** Most functions (like *abs*) take only a single expression. *log* can take one or two parameters, like *log(expression,base)*
-
-Currently, these R functions are supported: *abs, sign, sqrt, floor, ceiling, trunc, round, signif, exp, log, cos, sin, tan, acos, asin, atan, cosh, sinh, tanh, acosh, asinh, atanh, lgamma, gamma, gammaCody, digamma, trigamma, cumsum, cumprod, cummax, cummin*
-
------
-
-**Syntax**
-
-This tool computes basic summary statistics on a given column, or on a valid expression containing one or more columns.
-
-- Columns are referenced with **c** and a **number**. For example, **c1** refers to the first column of a tab-delimited file.
-
-- For example:
-
-  - **log(c5)** calculates the summary statistics for the natural log of column 5
-  - **(c5 + c6 + c7) / 3** calculates the summary statistics on the average of columns 5-7
-  - **log(c5,10)** summary statistics of the base 10 log of column 5
-  - **sqrt(c5+c9)** summary statistics of the square root of column 5 + column 9
-
------
-
-**Examples**
-
-- Input Dataset::
-
-    c1      c2      c3      c4      c5              c6
-    586     chrX    161416  170887  41108_at        16990
-    73      chrX    505078  532318  35073_at        1700
-    595     chrX    1361578 1388460 33665_s_at      1960
-    74      chrX    1420620 1461919 1185_at         8600
-
-- Summary Statistics on column c6 of the above input dataset::
-
-    #sum       mean      stdev     0%        25%       50%       75%        100%
-    29250.000  7312.500  7198.636  1700.000  1895.000  5280.000  10697.500  16990.000
-
-    </help>
-</tool>
--- a/histogram.py	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,110 +0,0 @@
-#!/usr/bin/env python
-#Greg Von Kuster
-
-import sys
-#from rpy import *
-import rpy2.robjects as robjects
-from rpy2.robjects.packages import importr
-r = robjects.r
-grdevices = importr('grDevices')
-
-
-assert sys.version_info[:2] >= ( 2, 4 )
-
-def stop_err(msg):
-    sys.stderr.write(msg)
-    sys.exit()
-
-def main():
-
-    # Handle input params
-    in_fname = sys.argv[1]
-    out_fname = sys.argv[2] 
-    try:
-        column = int( sys.argv[3] ) - 1
-    except:
-        stop_err( "Column not specified, your query does not contain a column of numerical data." )
-    title = sys.argv[4]
-    xlab = sys.argv[5]
-    breaks = int( sys.argv[6] )
-    if breaks == 0:
-        breaks = "Sturges"
-    if sys.argv[7] == "true":
-        density = True
-    else: density = False
-    if len( sys.argv ) >= 9 and sys.argv[8] == "true":
-        frequency = True
-    else: frequency = False
-
-    matrix = []
-    skipped_lines = 0
-    first_invalid_line = 0
-    invalid_value = ''
-    i = 0
-    for i, line in enumerate( file( in_fname ) ):
-        valid = True
-        line = line.rstrip('\r\n')
-        # Skip comments
-        if line and not line.startswith( '#' ): 
-            # Extract values and convert to floats
-            row = []
-            try:
-                fields = line.split( "\t" )
-                val = fields[column]
-                if val.lower() == "na":
-                    row.append( float( "nan" ) )
-            except:
-                valid = False
-                skipped_lines += 1
-                if not first_invalid_line:
-                    first_invalid_line = i+1
-            else:
-                try:
-                    row.append( float( val ) )
-                except ValueError:
-                    valid = False
-                    skipped_lines += 1
-                    if not first_invalid_line:
-                        first_invalid_line = i+1
-                        invalid_value = fields[column]
-        else:
-            valid = False
-            skipped_lines += 1
-            if not first_invalid_line:
-                first_invalid_line = i+1
-
-        if valid:
-            matrix += row
-
-    if skipped_lines < i:
-        try:
-            #a = r.array( matrix )
-            fv = robjects.FloatVector(matrix)
-            a = r['matrix'](fv, ncol=1,byrow=True)
-            #r.pdf( out_fname, 8, 8 )
-            r.pdf( out_fname, 8, 8 )
-            histogram = r.hist( a, probability=not frequency, main=title, xlab=xlab, breaks=breaks )
-            if density:
-                density = r.density( a )
-                if frequency:
-                    scale_factor = len( matrix ) * ( histogram['mids'][1] - histogram['mids'][0] ) #uniform bandwidth taken from first 2 midpoints
-                    density[ 'y' ] = map( lambda x: x * scale_factor, density[ 'y' ] )
-                r.lines( density )
-            #r.dev_off()
-            grdevices.dev_off()   
-        except Exception, exc:
-            stop_err( "%s" %str( exc ) )
-    else:
-        if i == 0:
-            stop_err("Input dataset is empty.")
-        else:
-            stop_err( "All values in column %s are non-numeric." %sys.argv[3] )
-
-    print "Histogram of column %s. " %sys.argv[3]
-    if skipped_lines > 0:
-        print "Skipped %d invalid lines starting with line #%d, '%s'." % ( skipped_lines, first_invalid_line, invalid_value )
-
-    #r.quit( save="no" )
-    
-if __name__ == "__main__":
-    main()
--- a/histogram.xml	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,77 +0,0 @@
-<tool id="histogram_rpy" name="Histogram" version="1.1.0">
-  <description>of a numeric column</description>
-  <expand macro="requirements" />
-    <macros>
-        <import>statistic_tools_macros.xml</import>
-    </macros>
-  <command interpreter="python">histogram.py $input $out_file1 $numerical_column "$title" "$xlab" $breaks $density $frequency</command>
-  <inputs>
-    <param name="input" type="data" format="tabular" label="Dataset" help="Dataset missing? See TIP below"/>
-    <param name="numerical_column" type="data_column" data_ref="input" numerical="True" label="Numerical column for x axis" />
-    <param name="breaks" type="integer" size="4" value="0" label="Number of breaks (bars)"/>
-    <param name="title" type="text" size="30" value="Histogram" label="Plot title"/>
-    <param name="xlab" type="text" size="30" value="V1" label="Label for x axis"/>
-    <param name="density" type="boolean" checked="yes" label="Include smoothed density"/>
-    <param name="frequency" type="boolean" checked="no" label="Plot as frequency (counts)"/>
-  </inputs>
-  <outputs>
-    <data format="pdf" name="out_file1" />
-  </outputs>
-  <tests>
-    <test>
-      <param name="input" value="histogram_in1.tabular" ftype="tabular"/>
-      <param name="numerical_column" value="2"/>
-      <param name="breaks" value="0"/>
-      <param name="title" value="Histogram"/>
-      <param name="xlab" value="V1"/>
-      <param name="density" value="true"/>
-      <param name="frequency" value="false"/>
-      <output name="out_file1" lines_diff="10" file="histogram_out1.pdf"/>
-    </test>
-  </tests>
-  <help>
-
-.. class:: infomark
-
-**TIP:** To remove comment lines that do not begin with a *#* character, use *Text Manipulation-&gt;Remove beginning*
-
- .. class:: infomark
-
-**TIP:** If your data is not TAB delimited, use *Text Manipulation-&gt;Convert*
-
------
-
-**Syntax**
-
-This tool computes a histogram of the numerical values in a column of a dataset.
-
-- All invalid, blank and comment lines in the dataset are skipped.  The number of skipped lines is displayed in the resulting history item.
-- **Column for x axis** - only numerical columns are possible.
-- **Number of breaks(bars)** - breakpoints between histogram cells. Value of '0' will determine breaks automatically.
-- **Plot title** - the histogram title.
-- **Label for x axis** - the label of the x axis for the histogram.
-- **Include smoothed density** - if checked, the resulting graph will join the given corresponding points with line segments.
-
------
-
-**Example**
-
-- Input file::
-
-    1	68	4.1
-    2	71	4.6
-    3	62	3.8
-    4	75	4.4
-    5	58	3.2
-    6	60	3.1
-    7	67	3.8
-    8	68	4.1
-    9	71	4.3
-    10	69	3.7 
-
-- Create a histogram on column 2 of the above dataset. 
-
-.. image:: $PATH_TO_IMAGES/histogram2.png
-
-</help>
-</tool>
--- a/kcca.py	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,166 +0,0 @@
-#!/usr/bin/env python
-
-"""
-Run kernel CCA using kcca() from R 'kernlab' package
-
-usage: %prog [options]
-   -i, --input=i: Input file
-   -o, --output1=o: Summary output
-   -x, --x_cols=x: X-Variable columns
-   -y, --y_cols=y: Y-Variable columns
-   -k, --kernel=k: Kernel function
-   -f, --features=f: Number of canonical components to return
-   -s, --sigma=s: sigma
-   -d, --degree=d: degree
-   -l, --scale=l: scale
-   -t, --offset=t: offset
-   -r, --order=r: order
-
-usage: %prog input output1 x_cols y_cols kernel features sigma(or_None) degree(or_None) scale(or_None) offset(or_None) order(or_None)
-"""
-
-from galaxy import eggs
-import sys, string
-#from rpy import *
-import rpy2.robjects as robjects
-import rpy2.rlike.container as rlc
-from rpy2.robjects.packages import importr
-r = robjects.r
-import numpy
-import pkg_resources; pkg_resources.require( "bx-python" )
-from bx.cookbook import doc_optparse
-
-
-def stop_err(msg):
-    sys.stderr.write(msg)
-    sys.exit()
-
-#Parse Command Line
-options, args = doc_optparse.parse( __doc__ )
-#{'options= kernel': 'rbfdot', 'var_cols': '1,2,3,4', 'degree': 'None', 'output2': '/afs/bx.psu.edu/home/gua110/workspace/galaxy_bitbucket/database/files/000/dataset_260.dat', 'output1': '/afs/bx.psu.edu/home/gua110/workspace/galaxy_bitbucket/database/files/000/dataset_259.dat', 'scale': 'None', 'offset': 'None', 'input': '/afs/bx.psu.edu/home/gua110/workspace/galaxy_bitbucket/database/files/000/dataset_256.dat', 'sigma': '1.0', 'order': 'None'}
-
-infile = options.input
-x_cols = options.x_cols.split(',')
-y_cols = options.y_cols.split(',')
-kernel = options.kernel
-outfile = options.output1
-ncomps = int(options.features)
-fout = open(outfile,'w')
-
-if ncomps < 1:
-    print "You chose to return '0' canonical components. Please try rerunning the tool with number of components = 1 or more."
-    sys.exit()
-elems = []
-for i, line in enumerate( file ( infile )):
-    line = line.rstrip('\r\n')
-    if len( line )>0 and not line.startswith( '#' ):
-        elems = line.split( '\t' )
-        break 
-    if i == 30:
-        break # Hopefully we'll never get here...
-
-if len( elems )<1:
-    stop_err( "The data in your input dataset is either missing or not formatted properly." )
-
-x_vals = []
-for k,col in enumerate(x_cols):
-    x_cols[k] = int(col)-1
-    #x_vals.append([])
-y_vals = []
-for k,col in enumerate(y_cols):
-    y_cols[k] = int(col)-1
-    #y_vals.append([])
-NA = 'NA'
-skipped = 0
-for ind,line in enumerate( file( infile )):
-    if line and not line.startswith( '#' ):
-        try:
-            fields = line.strip().split("\t")
-            valid_line = True
-            for col in x_cols+y_cols:
-                try:
-                    assert float(fields[col])
-                except:
-                    skipped += 1
-                    valid_line = False
-                    break
-            if valid_line:
-                for k,col in enumerate(x_cols):
-                    try:
-                        xval = float(fields[col])
-                    except:
-                        xval = NaN#
-                    #x_vals[k].append(xval)
-                    x_vals.append(xval)
-                for k,col in enumerate(y_cols):
-                    try:
-                        yval = float(fields[col])
-                    except:
-                        yval = NaN#
-                    #y_vals[k].append(yval)
-                    y_vals.append(yval)
-        except:
-            skipped += 1
-
-#x_vals1 = numpy.asarray(x_vals).transpose()
-#y_vals1 = numpy.asarray(y_vals).transpose()
-
-#x_dat= r.list(array(x_vals1))
-#y_dat= r.list(array(y_vals1))
-
-x_dat = r['matrix'](robjects.FloatVector(x_vals),ncol=len(x_cols),byrow=True)
-y_dat = r['matrix'](robjects.FloatVector(y_vals),ncol=len(y_cols),byrow=True)
-
-try:
-    r.suppressWarnings(r.library('kernlab'))
-except:
-    stop_err('Missing R library kernlab')
-            
-#set_default_mode(NO_CONVERSION)
-if kernel=="rbfdot" or kernel=="anovadot":
-    pars = r.list(sigma=float(options.sigma))
-elif kernel=="polydot":
-    pars = r.list(degree=float(options.degree),scale=float(options.scale),offset=float(options.offset))
-elif kernel=="tanhdot":
-    pars = r.list(scale=float(options.scale),offset=float(options.offset))
-elif kernel=="besseldot":
-    pars = r.list(degree=float(options.degree),sigma=float(options.sigma),order=float(options.order))
-elif kernel=="anovadot":
-    pars = r.list(degree=float(options.degree),sigma=float(options.sigma))
-else:
-    pars = rlist()
-    
-try:
-    kcc = r.kcca(x=x_dat, y=y_dat, kernel=kernel, kpar=pars, ncomps=ncomps)
-except RException, rex:
-    stop_err("Encountered error while performing kCCA on the input data: %s" %(rex))
-
-#set_default_mode(BASIC_CONVERSION)    
-kcor = r.kcor(kcc)
-if ncomps == 1:
-    kcor = [kcor]
-xcoef = r.xcoef(kcc)
-ycoef = r.ycoef(kcc)
-
-print >>fout, "#Component\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-
-print >>fout, "#Correlation\t%s" %("\t".join(["%.4g" % el for el in kcor]))
-    
-print >>fout, "#Estimated X-coefficients\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-#for obs,val in enumerate(xcoef):
-#    print >>fout, "%s\t%s" %(obs+1, "\t".join(["%.4g" % el for el in val]))
-for i in range(1,xcoef.nrow+1):
-    vals = []
-    for j in range(1,xcoef.ncol+1):
-       vals.append("%.4g" % xcoef.rx2(i,j)[0])
-    print >>fout, "%s\t%s" %(i, "\t".join(vals))
-
-
-print >>fout, "#Estimated Y-coefficients\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-#for obs,val in enumerate(ycoef):
-#    print >>fout, "%s\t%s" %(obs+1, "\t".join(["%.4g" % el for el in val]))
-for i in range(1,ycoef.nrow+1):
-    vals = []
-    for j in range(1,ycoef.ncol+1):
-       vals.append("%.4g" % ycoef.rx2(i,j)[0])
-    print >>fout, "%s\t%s" %(i, "\t".join(vals))
--- a/kcca.xml	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,151 +0,0 @@
-<tool id="kcca1" name="Kernel Canonical Correlation Analysis" version="1.1.0">
-  <description> </description>
-    <expand macro="requirements" />
-    <macros>
-        <import>statistic_tools_macros.xml</import>
-    </macros>
-  <command interpreter="python">
-    kcca.py 
-      --input=$input1
-      --output1=$out_file1
-      --x_cols=$x_cols
-      --y_cols=$y_cols
-      --kernel=$kernelChoice.kernel
-      --features=$features
-      #if $kernelChoice.kernel == "rbfdot" or $kernelChoice.kernel == "anovadot":
-      --sigma=$kernelChoice.sigma
-      --degree="None"
-      --scale="None"
-      --offset="None"
-      --order="None"
-      #elif $kernelChoice.kernel == "polydot":
-      --sigma="None"
-      --degree=$kernelChoice.degree
-      --scale=$kernelChoice.scale
-      --offset=$kernelChoice.offset
-      --order="None"
-      #elif $kernelChoice.kernel == "tanhdot":
-      --sigma="None"
-      --degree="None"
-      --scale=$kernelChoice.scale
-      --offset=$kernelChoice.offset
-      --order="None"
-      #elif $kernelChoice.kernel == "besseldot":
-      --sigma=$kernelChoice.sigma
-      --degree=$kernelChoice.degree
-      --scale="None"
-      --offset="None"
-      --order=$kernelChoice.order
-      #elif $kernelChoice.kernel == "anovadot":
-      --sigma=$kernelChoice.sigma
-      --degree=$kernelChoice.degree
-      --scale="None"
-      --offset="None"
-      --order="None"
-      #else:
-      --sigma="None"
-      --degree="None"
-      --scale="None"
-      --offset="None"
-      --order="None"
-      #end if
-  </command>
-  <inputs>
-    <param format="tabular" name="input1" type="data" label="Select data" help="Dataset missing? See TIP below."/>
-    <param name="x_cols" label="Select columns containing X variables " type="data_column" data_ref="input1" numerical="True" multiple="true" >
-        <validator type="no_options" message="Please select at least one column."/>
-    </param>
-    <param name="y_cols" label="Select columns containing Y variables " type="data_column" data_ref="input1" numerical="True" multiple="true" >
-        <validator type="no_options" message="Please select at least one column."/>
-    </param>
-    <param name="features" size="10" type="integer" value="2" label="Number of canonical components to return" help="Enter an integer value greater than 0"/>
-    <conditional name="kernelChoice">
-        <param name="kernel" type="select" label="Kernel function">
-            <option value="rbfdot" selected="true">Gaussian Radial Basis Function</option>
-            <option value="polydot">Polynomial</option>
-            <option value="vanilladot">Linear</option>
-            <option value="tanhdot">Hyperbolic</option>
-            <option value="laplacedot">Laplacian</option>
-            <option value="besseldot">Bessel</option>
-            <option value="anovadot">ANOVA Radial Basis Function</option>
-            <option value="splinedot">Spline</option>
-        </param>
-        <when value="vanilladot" />
-        <when value="splinedot" />
-        <when value="rbfdot">
-            <param name="sigma" size="10" type="float" value="1" label="sigma (inverse kernel width)" />
-        </when>
-        <when value="laplacedot">
-            <param name="sigma" size="10" type="float" value="1" label="sigma (inverse kernel width)" />
-        </when>
-        <when value="polydot">
-            <param name="degree" size="10" type="float" value="1" label="degree" />
-            <param name="scale" size="10" type="float" value="1" label="scale" />
-            <param name="offset" size="10" type="float" value="1" label="offset" />
-        </when>
-        <when value="tanhdot">
-            <param name="scale" size="10" type="float" value="1" label="scale" />
-            <param name="offset" size="10" type="float" value="1" label="offset" />
-        </when>
-        <when value="besseldot">
-            <param name="sigma" size="10" type="float" value="1" label="sigma" />
-            <param name="order" size="10" type="float" value="1" label="order" />
-            <param name="degree" size="10" type="float" value="1" label="degree" />
-        </when>
-        <when value="anovadot">
-            <param name="sigma" size="10" type="float" value="1" label="sigma" />
-            <param name="degree" size="10" type="float" value="1" label="degree" />
-        </when>
-    </conditional>    
-  </inputs>
-  <outputs>
-    <data format="input" name="out_file1" metadata_source="input1" />
-  </outputs>
-  <tests>
-    <test>
-        <param name="input1" value="iris.tabular"/>
-        <param name="x_cols" value="1,2"/>
-        <param name="y_cols" value="3,4"/>
-        <param name="kernel" value="anovadot"/>
-        <param name="features" value="4"/>
-        <param name="sigma" value="0.1"/>
-        <param name="degree" value="2"/>
-        <output name="out_file1" file="kcca_out1.tabular" compare="re_match"/>
-    </test>
-    <test>
-        <param name="input1" value="iris.tabular"/>
-        <param name="x_cols" value="3,4"/>
-        <param name="y_cols" value="1,2"/>
-        <param name="kernel" value="rbfdot"/>
-        <param name="features" value="2"/>
-        <param name="sigma" value="0.5"/>
-        <output name="out_file1" file="kcca_out2.tabular" compare="re_match"/>
-    </test>
-  </tests>
-  <help>
-
-
-.. class:: infomark
-
-**TIP:** If your data is not TAB delimited, use *Edit Datasets-&gt;Convert characters*
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool uses functions from 'kernlab' library from R statistical package to perform Kernel Canonical Correlation Analysis (kCCA) on the input data. 
-
-*Alexandros Karatzoglou, Alex Smola, Kurt Hornik, Achim Zeileis (2004). kernlab - An S4 Package for Kernel Methods in R. Journal of Statistical Software 11(9), 1-20. URL http://www.jstatsoft.org/v11/i09/*
-
------
-
-.. class:: warningmark
-
-**Note**
-
-This tool currently treats all variables as continuous numeric variables. Running the tool on categorical variables might result in incorrect results. Rows containing non-numeric (or missing) data in any of the chosen columns will be skipped from the analysis.
-
-  </help>
-</tool>
--- a/kpca.py	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,159 +0,0 @@
-#!/usr/bin/env python
-
-"""
-Run kernel PCA using kpca() from R 'kernlab' package
-
-usage: %prog [options]
-   -i, --input=i: Input file
-   -o, --output1=o: Summary output
-   -p, --output2=p: Figures output
-   -c, --var_cols=c: Variable columns
-   -k, --kernel=k: Kernel function
-   -f, --features=f: Number of principal components to return
-   -s, --sigma=s: sigma
-   -d, --degree=d: degree
-   -l, --scale=l: scale
-   -t, --offset=t: offset
-   -r, --order=r: order
-
-usage: %prog input output1 output2 var_cols kernel features sigma(or_None) degree(or_None) scale(or_None) offset(or_None) order(or_None)
-"""
-
-from galaxy import eggs
-import sys, string
-#from rpy import *
-import rpy2.robjects as robjects
-import rpy2.rlike.container as rlc
-from rpy2.robjects.packages import importr
-r = robjects.r
-grdevices = importr('grDevices')
-import numpy
-import pkg_resources; pkg_resources.require( "bx-python" )
-from bx.cookbook import doc_optparse
-
-
-def stop_err(msg):
-    sys.stderr.write(msg)
-    sys.exit()
-
-#Parse Command Line
-options, args = doc_optparse.parse( __doc__ )
-#{'options= kernel': 'rbfdot', 'var_cols': '1,2,3,4', 'degree': 'None', 'output2': '/afs/bx.psu.edu/home/gua110/workspace/galaxy_bitbucket/database/files/000/dataset_260.dat', 'output1': '/afs/bx.psu.edu/home/gua110/workspace/galaxy_bitbucket/database/files/000/dataset_259.dat', 'scale': 'None', 'offset': 'None', 'input': '/afs/bx.psu.edu/home/gua110/workspace/galaxy_bitbucket/database/files/000/dataset_256.dat', 'sigma': '1.0', 'order': 'None'}
-
-infile = options.input
-x_cols = options.var_cols.split(',')
-kernel = options.kernel
-outfile = options.output1
-outfile2 = options.output2
-ncomps = int(options.features)
-fout = open(outfile,'w')
-
-elems = []
-for i, line in enumerate( file ( infile )):
-    line = line.rstrip('\r\n')
-    if len( line )>0 and not line.startswith( '#' ):
-        elems = line.split( '\t' )
-        break 
-    if i == 30:
-        break # Hopefully we'll never get here...
-
-if len( elems )<1:
-    stop_err( "The data in your input dataset is either missing or not formatted properly." )
-
-x_vals = []
-
-for k,col in enumerate(x_cols):
-    x_cols[k] = int(col)-1
-    #x_vals.append([])
-
-NA = 'NA'
-skipped = 0
-for ind,line in enumerate( file( infile )):
-    if line and not line.startswith( '#' ):
-        try:
-            fields = line.strip().split("\t")
-            for k,col in enumerate(x_cols):
-                try:
-                    xval = float(fields[col])
-                except:
-                    #xval = r('NA')
-                    xval = NaN#
-                #x_vals[k].append(xval)
-                x_vals.append(xval)
-        except:
-            skipped += 1
-
-#x_vals1 = numpy.asarray(x_vals).transpose()
-#dat= r.list(array(x_vals1))
-dat = r['matrix'](robjects.FloatVector(x_vals),ncol=len(x_cols),byrow=True)
-
-
-try:
-    r.suppressWarnings(r.library('kernlab'))
-except:
-    stop_err('Missing R library kernlab')
-            
-#set_default_mode(NO_CONVERSION)
-if kernel=="rbfdot" or kernel=="anovadot":
-    pars = r.list(sigma=float(options.sigma))
-elif kernel=="polydot":
-    pars = r.list(degree=float(options.degree),scale=float(options.scale),offset=float(options.offset))
-elif kernel=="tanhdot":
-    pars = r.list(scale=float(options.scale),offset=float(options.offset))
-elif kernel=="besseldot":
-    pars = r.list(degree=float(options.degree),sigma=float(options.sigma),order=float(options.order))
-elif kernel=="anovadot":
-    pars = r.list(degree=float(options.degree),sigma=float(options.sigma))
-else:
-    pars = r.list()
-    
-try:
-    #kpc = r.kpca(x=r.na_exclude(dat), kernel=kernel, kpar=pars, features=ncomps)
-    kpc = r.kpca(x=r['na.exclude'](dat), kernel=kernel, kpar=pars, features=ncomps)
-#except RException, rex:
-except Exception, rex:  # need to find rpy2 RException
-    stop_err("Encountered error while performing kPCA on the input data: %s" %(rex))
-#set_default_mode(BASIC_CONVERSION)
-    
-eig = r.eig(kpc)
-pcv = r.pcv(kpc)
-rotated = r.rotated(kpc)
-
-#comps = eig.keys()
-comps = eig.names
-#eigv = eig.values()
-#for i in range(ncomps):
-#    eigv[comps.index('Comp.%s' %(i+1))] = eig.values()[i]
-
-print >>fout, "#Component\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-
-#print >>fout, "#Eigenvalue\t%s" %("\t".join(["%.4g" % el for el in eig.values()]))
-print >>fout, "#Eigenvalue\t%s" %("\t".join(["%.4g" % el for el in eig]))
-print >>fout, "#Principal component vectors\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-#for obs,val in enumerate(pcv):
-#    print >>fout, "%s\t%s" %(obs+1, "\t".join(["%.4g" % el for el in val]))
-for i in range(1,pcv.nrow+1):
-    vals = []
-    for j in range(1,pcv.ncol+1):
-       vals.append("%.4g" % pcv.rx2(i,j)[0])
-    print >>fout, "%s\t%s" %(i, "\t".join(vals))
-
-
-print >>fout, "#Rotated values\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-#for obs,val in enumerate(rotated):
-#    print >>fout, "%s\t%s" %(obs+1, "\t".join(["%.4g" % el for el in val]))
-for i in range(1,rotated.nrow+1):
-    vals = []
-    for j in range(1,rotated.ncol+1):
-       vals.append("%.4g" % rotated.rx2(i,j)[0])
-    print >>fout, "%s\t%s" %(i, "\t".join(vals))
-
-r.pdf( outfile2, 8, 8 )
-if ncomps != 1:
-    #r.pairs(rotated,labels=r.list(range(1,ncomps+1)),main="Scatterplot of rotated values")
-    r.pairs(rotated,labels=robjects.StrVector(range(1,ncomps+1)),main="Scatterplot of rotated values")
-else:
-    r.plot(rotated, ylab='Comp.1', main="Scatterplot of rotated values")
-#r.dev_off()
-grdevices.dev_off()
-
--- a/kpca.xml	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,141 +0,0 @@
-<tool id="kpca1" name="Kernel Principal Component Analysis" version="1.1.0">
-  <description> </description>
-  <expand macro="requirements" />
-    <macros>
-        <import>statistic_tools_macros.xml</import>
-    </macros>
-  <command interpreter="python">
-    kpca.py 
-      --input=$input1
-      --output1=$out_file1
-      --output2=$out_file2
-      --var_cols=$var_cols
-      --kernel=$kernelChoice.kernel
-      --features=$features
-      #if $kernelChoice.kernel == "rbfdot" or $kernelChoice.kernel == "anovadot":
-      --sigma=$kernelChoice.sigma
-      --degree="None"
-      --scale="None"
-      --offset="None"
-      --order="None"
-      #elif $kernelChoice.kernel == "polydot":
-      --sigma="None"
-      --degree=$kernelChoice.degree
-      --scale=$kernelChoice.scale
-      --offset=$kernelChoice.offset
-      --order="None"
-      #elif $kernelChoice.kernel == "tanhdot":
-      --sigma="None"
-      --degree="None"
-      --scale=$kernelChoice.scale
-      --offset=$kernelChoice.offset
-      --order="None"
-      #elif $kernelChoice.kernel == "besseldot":
-      --sigma=$kernelChoice.sigma
-      --degree=$kernelChoice.degree
-      --scale="None"
-      --offset="None"
-      --order=$kernelChoice.order
-      #elif $kernelChoice.kernel == "anovadot":
-      --sigma=$kernelChoice.sigma
-      --degree=$kernelChoice.degree
-      --scale="None"
-      --offset="None"
-      --order="None"
-      #else:
-      --sigma="None"
-      --degree="None"
-      --scale="None"
-      --offset="None"
-      --order="None"
-      #end if
-  </command>
-  <inputs>
-    <param format="tabular" name="input1" type="data" label="Select data" help="Dataset missing? See TIP below."/>
-    <param name="var_cols" label="Select columns containing input variables " type="data_column" data_ref="input1" numerical="True" multiple="true" >
-        <validator type="no_options" message="Please select at least one column."/>
-    </param>
-    <param name="features" size="10" type="integer" value="2" label="Number of principal components to return" help="To return all, enter 0"/>
-    <conditional name="kernelChoice">
-        <param name="kernel" type="select" label="Kernel function">
-            <option value="rbfdot" selected="true">Gaussian Radial Basis Function</option>
-            <option value="polydot">Polynomial</option>
-            <option value="vanilladot">Linear</option>
-            <option value="tanhdot">Hyperbolic</option>
-            <option value="laplacedot">Laplacian</option>
-            <option value="besseldot">Bessel</option>
-            <option value="anovadot">ANOVA Radial Basis Function</option>
-            <option value="splinedot">Spline</option>
-        </param>
-        <when value="vanilladot" />
-        <when value="splinedot" />
-        <when value="rbfdot">
-            <param name="sigma" size="10" type="float" value="1" label="sigma (inverse kernel width)" />
-        </when>
-        <when value="laplacedot">
-            <param name="sigma" size="10" type="float" value="1" label="sigma (inverse kernel width)" />
-        </when>
-        <when value="polydot">
-            <param name="degree" size="10" type="integer" value="1" label="degree" />
-            <param name="scale" size="10" type="integer" value="1" label="scale" />
-            <param name="offset" size="10" type="integer" value="1" label="offset" />
-        </when>
-        <when value="tanhdot">
-            <param name="scale" size="10" type="integer" value="1" label="scale" />
-            <param name="offset" size="10" type="integer" value="1" label="offset" />
-        </when>
-        <when value="besseldot">
-            <param name="sigma" size="10" type="integer" value="1" label="sigma" />
-            <param name="order" size="10" type="integer" value="1" label="order" />
-            <param name="degree" size="10" type="integer" value="1" label="degree" />
-        </when>
-        <when value="anovadot">
-            <param name="sigma" size="10" type="integer" value="1" label="sigma" />
-            <param name="degree" size="10" type="integer" value="1" label="degree" />
-        </when>
-    </conditional>    
-  </inputs>
-  <outputs>
-    <data format="input" name="out_file1" metadata_source="input1" />
-    <data format="pdf" name="out_file2" />
-  </outputs>
-  <tests>
-    <test>
-        <param name="input1" value="iris.tabular"/>
-        <param name="var_cols" value="1,2,3,4"/>
-        <param name="kernel" value="polydot"/>
-        <param name="features" value="2"/>
-        <param name="offset" value="0"/>
-        <param name="scale" value="1"/>
-        <param name="degree" value="2"/>
-        <output name="out_file1" file="kpca_out1.tabular"/>
-        <output name="out_file2" file="kpca_out2.pdf"/>
-    </test>
-  </tests>
-  <help>
-
-
-.. class:: infomark
-
-**TIP:** If your data is not TAB delimited, use *Edit Datasets-&gt;Convert characters*
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool uses functions from 'kernlab' library from R statistical package to perform Kernel Principal Component Analysis (kPCA) on the input data. It outputs two files, one containing the summary statistics of the performed kPCA, and the other containing a scatterplot matrix of rotated values reported by kPCA.   
-
-*Alexandros Karatzoglou, Alex Smola, Kurt Hornik, Achim Zeileis (2004). kernlab - An S4 Package for Kernel Methods in R. Journal of Statistical Software 11(9), 1-20. URL http://www.jstatsoft.org/v11/i09/*
-
------
-
-.. class:: warningmark
-
-**Note**
-
-This tool currently treats all variables as continuous numeric variables. Running the tool on categorical variables might result in incorrect results. Rows containing non-numeric (or missing) data in any of the chosen columns will be skipped from the analysis.
-
-  </help>
-</tool>
--- a/linear_regression.py	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,158 +0,0 @@
-#!/usr/bin/env python
-
-import sys, string
-import rpy2.robjects as robjects
-import rpy2.rlike.container as rlc
-from rpy2.robjects.packages import importr
-r = robjects.r
-grdevices = importr('grDevices')
-#  from rpy import *
-import numpy
-
-
-def stop_err(msg):
-    sys.stderr.write(msg)
-    sys.exit()
-
-infile = sys.argv[1]
-y_col = int(sys.argv[2])-1
-x_cols = sys.argv[3].split(',')
-outfile = sys.argv[4]
-outfile2 = sys.argv[5]
-
-print "Predictor columns: %s; Response column: %d" %(x_cols,y_col+1)
-fout = open(outfile,'w')
-elems = []
-for i, line in enumerate( file ( infile )):
-    line = line.rstrip('\r\n')
-    if len( line )>0 and not line.startswith( '#' ):
-        elems = line.split( '\t' )
-        break 
-    if i == 30:
-        break # Hopefully we'll never get here...
-
-if len( elems )<1:
-    stop_err( "The data in your input dataset is either missing or not formatted properly." )
-
-y_vals = []
-x_vals = []
-
-for k,col in enumerate(x_cols):
-    x_cols[k] = int(col)-1
-    # x_vals.append([])
-
-NA = 'NA'
-for ind,line in enumerate( file( infile )):
-    if line and not line.startswith( '#' ):
-        try:
-            fields = line.split("\t")
-            try:
-                yval = float(fields[y_col])
-            except:
-                yval = r('NA')
-            y_vals.append(yval)
-            for k,col in enumerate(x_cols):
-                try:
-                    xval = float(fields[col])
-                except:
-                    xval = r('NA')
-                # x_vals[k].append(xval)
-                x_vals.append(xval)
-        except:
-            pass
-# x_vals1 = numpy.asarray(x_vals).transpose()
-# dat= r.list(x=array(x_vals1), y=y_vals)
-fv = robjects.FloatVector(x_vals)
-m = r['matrix'](fv, ncol=len(x_cols),byrow=True)
-# ensure order for generating formula
-od = rlc.OrdDict([('y',robjects.FloatVector(y_vals)),('x',m)])
-dat = robjects.DataFrame(od)
-# convert dat.names: ["y","x.1","x.2"] to formula string: 'y ~ x.1 + x.2'
-formula = ' + '.join(dat.names).replace('+','~',1)
-
-#set_default_mode(NO_CONVERSION)
-try:
-    #linear_model = r.lm(r("y ~ x"), data = r.na_exclude(dat))
-    linear_model = r.lm(formula,  data =  r['na.exclude'](dat))
-except RException, rex:
-    stop_err("Error performing linear regression on the input data.\nEither the response column or one of the predictor columns contain only non-numeric or invalid values.")
-#set_default_mode(BASIC_CONVERSION)
-
-#coeffs=linear_model.as_py()['coefficients']
-#yintercept= coeffs['(Intercept)']
-coeffs=linear_model.rx2('coefficients')
-yintercept= coeffs.rx2('(Intercept)')[0]
-summary = r.summary(linear_model)
-
-#co = summary.get('coefficients', 'NA')
-co = summary.rx2("coefficients")
-
-"""
-if len(co) != len(x_vals)+1:
-    stop_err("Stopped performing linear regression on the input data, since one of the predictor columns contains only non-numeric or invalid values.")
-"""
-#print >>fout, "p-value (Y-intercept)\t%s" %(co[0][3])
-print >>fout, "p-value (Y-intercept)\t%s" %(co.rx(1,4)[0])
-
-if len(x_vals) == 1:    #Simple linear  regression case with 1 predictor variable
-    try:
-        #slope = coeffs['x']
-        slope = r.round(float(coeffs.rx2('x')[0]), digits=10)
-    except:
-        slope = 'NA'
-    try:
-        #pval = co[1][3]
-        pval = r.round(float(co.rx(2,4)[0]), digits=10)
-    except:
-        pval = 'NA'
-    print >>fout, "Slope (c%d)\t%s" %(x_cols[0]+1,slope)
-    print >>fout, "p-value (c%d)\t%s" %(x_cols[0]+1,pval)
-else:    #Multiple regression case with >1 predictors
-    ind=1
-    #while ind < len(coeffs.keys()):
-    while ind < len(coeffs.names):
-        # print >>fout, "Slope (c%d)\t%s" %(x_cols[ind-1]+1,coeffs['x'+str(ind)])
-        print >>fout, "Slope (c%d)\t%s" %(x_cols[ind-1]+1,coeffs.rx2(coeffs.names[ind])[0])
-        try:
-            #pval = co[ind][3]
-            pval = r.round(float(co.rx(ind+1,4)[0]), digits=10)
-        except:
-            pval = 'NA'
-        print >>fout, "p-value (c%d)\t%s" %(x_cols[ind-1]+1,pval)
-        ind+=1
-
-rsq = summary.rx2('r.squared')[0]
-adjrsq = summary.rx2('adj.r.squared')[0]
-fstat = summary.rx2('fstatistic').rx2('value')[0]
-sigma = summary.rx2('sigma')[0]
-
-try:
-    rsq = r.round(float(rsq), digits=5)
-    adjrsq = r.round(float(adjrsq), digits=5)
-    fval = r.round(fstat['value'], digits=5)
-    fstat['value'] = str(fval)
-    sigma = r.round(float(sigma), digits=10)
-except:
-    pass
-
-print >>fout, "R-squared\t%s" %(rsq)
-print >>fout, "Adjusted R-squared\t%s" %(adjrsq)
-print >>fout, "F-statistic\t%s" %(fstat)
-print >>fout, "Sigma\t%s" %(sigma)
-
-r.pdf( outfile2, 8, 8 )
-if len(x_vals) == 1:    #Simple linear  regression case with 1 predictor variable
-    sub_title =  "Slope = %s; Y-int = %s" %(slope,yintercept)
-    try:
-        r.plot(x=x_vals[0], y=y_vals, xlab="X", ylab="Y", sub=sub_title, main="Scatterplot with regression")
-        r.abline(a=yintercept, b=slope, col="red")
-    except:
-        pass
-else:
-    r.pairs(dat, main="Scatterplot Matrix", col="blue")
-try:
-    r.plot(linear_model)
-except:
-    pass
-#r.dev_off()
-grdevices.dev_off()
--- a/linear_regression.xml	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,72 +0,0 @@
-<tool id="LinearRegression1" name="Perform Linear Regression" version="1.1.0">
-  <description> </description>
-  <expand macro="requirements" />
-    <macros>
-        <import>statistic_tools_macros.xml</import>
-    </macros>
-  <command interpreter="python">
-    linear_regression.py 
-      $input1
-      $response_col
-      $predictor_cols
-      $out_file1
-      $out_file2
-      1>/dev/null
-  </command>
-  <inputs>
-    <param format="tabular" name="input1" type="data" label="Select data" help="Dataset missing? See TIP below."/>
-    <param name="response_col" label="Response column (Y)" type="data_column" data_ref="input1" numerical="True"/>
-    <param name="predictor_cols" label="Predictor columns (X)" type="data_column" data_ref="input1" numerical="True" multiple="true" >
-        <validator type="no_options" message="Please select at least one column."/>
-    </param>
-  </inputs>
-  <outputs>
-    <data format="input" name="out_file1" metadata_source="input1" />
-    <data format="pdf" name="out_file2" />
-  </outputs>
-  <tests>
-    <test>
-        <param name="input1" value="regr_inp.tabular"/>
-        <param name="response_col" value="3"/>
-        <param name="predictor_cols" value="1,2"/>
-        <output name="out_file1" file="regr_out.tabular"/>
-        <output name="out_file2" file="regr_out.pdf"/>
-    </test>
-  </tests>
-  <help>
-
-
-.. class:: infomark
-
-**TIP:** If your data is not TAB delimited, use *Edit Datasets-&gt;Convert characters*
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool uses the 'lm' function from R statistical package to perform linear regression on the input data. It outputs two files, one containing the summary statistics of the performed regression, and the other containing diagnostic plots to check whether model assumptions are satisfied.   
-
-*R Development Core Team (2009). R: A language and environment for statistical computing. R Foundation for Statistical Computing, Vienna, Austria. ISBN 3-900051-07-0, URL http://www.R-project.org.*
-
------
-
-.. class:: warningmark
-
-**Note**
-
-- This tool currently treats all predictor and response variables as continuous numeric variables. Running the tool on categorical variables might result in incorrect results.
-
-- Rows containing non-numeric (or missing) data in any of the chosen columns will be skipped from the analysis.
-
-- The summary statistics in the output are described below:
-
-  - sigma: the square root of the estimated variance of the random error (standard error of the residiuals)
-  - R-squared: the fraction of variance explained by the model
-  - Adjusted R-squared: the above R-squared statistic adjusted, penalizing for the number of the predictors (p)
-  - p-value: p-value for the t-test of the null hypothesis that the corresponding slope is equal to zero against the two-sided alternative.
-
-
-  </help>
-</tool>
--- a/logistic_regression_vif.py	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,229 +0,0 @@
-#!/usr/bin/env python
-
-#from galaxy import eggs
-import sys, string
-#from rpy import *
-import rpy2.robjects as robjects
-import rpy2.rlike.container as rlc
-import rpy2.rinterface as ri
-r = robjects.r
-import numpy
-
-def stop_err(msg):
-    sys.stderr.write(msg)
-    sys.exit()
-
-infile = sys.argv[1]
-y_col = int(sys.argv[2])-1
-x_cols = sys.argv[3].split(',')
-outfile = sys.argv[4]
-
-
-print "Predictor columns: %s; Response column: %d" %(x_cols,y_col+1)
-fout = open(outfile,'w')
-elems = []
-for i, line in enumerate( file ( infile )):
-    line = line.rstrip('\r\n')
-    if len( line )>0 and not line.startswith( '#' ):
-        elems = line.split( '\t' )
-        break 
-    if i == 30:
-        break # Hopefully we'll never get here...
-
-if len( elems )<1:
-    stop_err( "The data in your input dataset is either missing or not formatted properly." )
-
-y_vals = []
-x_vals = []
-x_vector = []
-for k,col in enumerate(x_cols):
-    x_cols[k] = int(col)-1
-    x_vals.append([])
-
-NA = 'NA'
-for ind,line in enumerate( file( infile )):
-    if line and not line.startswith( '#' ):
-        try:
-            fields = line.split("\t")
-            try:
-                yval = float(fields[y_col])
-            except:
-                yval = r('NA')
-            y_vals.append(yval)
-            for k,col in enumerate(x_cols):
-                try:
-                    xval = float(fields[col])
-                except:
-                    xval = r('NA')
-                x_vals[k].append(xval)
-                x_vector.append(xval)
-        except Exception, e:
-            print e
-            pass
-
-#x_vals1 = numpy.asarray(x_vals).transpose()
-
-check1=0
-check0=0
-for i in y_vals:
-    if i == 1:
-        check1=1
-    if i == 0:
-        check0=1
-if check1==0 or check0==0:
-    sys.exit("Warning: logistic regression must have at least two classes")
-
-for i in y_vals:
-    if i not in [1,0,r('NA')]:
-        print >>fout, str(i)
-        sys.exit("Warning: the current version of this tool can run only with two classes and need to be labeled as 0 and 1.")
-    
-    
-#dat= r.list(x=array(x_vals1), y=y_vals)
-novif=0
-#set_default_mode(NO_CONVERSION)
-#try:
-#    linear_model = r.glm(r("y ~ x"), data = r.na_exclude(dat),family="binomial")
-#    #r('library(car)')
-#    #r.assign('dat',dat)
-#    #r.assign('ncols',len(x_cols))
-#    #r.vif(r('glm(dat$y ~ ., data = na.exclude(data.frame(as.matrix(dat$x,ncol=ncols))->datx),family="binomial")')).as_py()
-#   
-#except RException, rex:
-#    stop_err("Error performing logistic regression on the input data.\nEither the response column or one of the predictor columns contain only non-numeric or invalid values.")
-
-fv = robjects.FloatVector(x_vector)
-m = r['matrix'](fv, ncol=len(x_cols),byrow=True)
-# ensure order for generating formula
-od = rlc.OrdDict([('y',robjects.FloatVector(y_vals)),('x',m)])
-dat = robjects.DataFrame(od)
-# convert dat.names: ["y","x.1","x.2"] to formula string: 'y ~ x.1 + x.2'
-formula = ' + '.join(dat.names).replace('+','~',1)
-print formula
-try:
-    linear_model = r.glm(formula,  data =  r['na.exclude'](dat), family="binomial")
-except Exception, rex:
-    stop_err("Error performing linear regression on the input data.\nEither the response column or one of the predictor columns contain only non-numeric or invalid values.")
-
-if len(x_cols)>1:
-    try:
-        r('library(car)')
-        r.assign('dat',dat)
-        r.assign('ncols',len(x_cols))
-        #vif=r.vif(r('glm(dat$y ~ ., data = na.exclude(data.frame(as.matrix(dat$x,ncol=ncols))->datx),family="binomial")'))
-        od2 = rlc.OrdDict([('datx', m)])
-        glm_data_frame = robjects.DataFrame(od2)
-        glm_result = r.glm("dat$y ~ .", data = r['na.exclude'](glm_data_frame),family="binomial")
-        print 'Have glm'
-        vif = r.vif(glm_result)
-    except Exception, rex:        
-        print rex
-else:
-    novif=1
-    
-#set_default_mode(BASIC_CONVERSION)
-
-#coeffs=linear_model.as_py()['coefficients']
-coeffs=linear_model.rx2('coefficients')
-#null_deviance=linear_model.as_py()['null.deviance']
-null_deviance=linear_model.rx2('null.deviance')[0]
-#residual_deviance=linear_model.as_py()['deviance']
-residual_deviance=linear_model.rx2('deviance')[0]
-#yintercept= coeffs['(Intercept)']
-yintercept= coeffs.rx2('(Intercept)')[0]
-
-summary = r.summary(linear_model)
-#co = summary.get('coefficients', 'NA')
-co = summary.rx2("coefficients")
-print co
-"""
-if len(co) != len(x_vals)+1:
-    stop_err("Stopped performing logistic regression on the input data, since one of the predictor columns contains only non-numeric or invalid values.")
-"""
-
-try:
-    yintercept = r.round(float(yintercept), digits=10)[0]
-    #pvaly = r.round(float(co[0][3]), digits=10)
-    pvaly = r.round(float(co.rx(1,4)[0]), digits=10)[0]
-except Exception, e:
-    print str(e)
-    pass
-print >>fout, "response column\tc%d" %(y_col+1)
-tempP=[]
-for i in x_cols:
-    tempP.append('c'+str(i+1))
-tempP=','.join(tempP)
-print >>fout, "predictor column(s)\t%s" %(tempP)
-print >>fout, "Y-intercept\t%s" %(yintercept)
-print >>fout, "p-value (Y-intercept)\t%s" %(pvaly)
-
-print coeffs
-if len(x_vals) == 1:    #Simple linear  regression case with 1 predictor variable
-    try:
-        #slope = r.round(float(coeffs['x']), digits=10)
-        raw_slope = coeffs.rx2('x')[0]
-        slope = r.round(float(raw_slope), digits=10)[0] 
-    except:
-        slope = 'NA'
-    try:
-        #pval = r.round(float(co[1][3]), digits=10)
-        pval = r.round(float(co.rx2(2,4)[0]), digits=10)[0]
-    except:
-        pval = 'NA'
-    print >>fout, "Slope (c%d)\t%s" %(x_cols[0]+1,slope)
-    print >>fout, "p-value (c%d)\t%s" %(x_cols[0]+1,pval)
-else:    #Multiple regression case with >1 predictors
-    ind=1
-    #while ind < len(coeffs.keys()):
-    print len(coeffs.names)
-    while ind < len(coeffs.names):
-        try:
-            #slope = r.round(float(coeffs['x'+str(ind)]), digits=10)
-            raw_slope = coeffs.rx2('x.' + str(ind))[0]
-            slope = r.round(float(raw_slope), digits=10)[0]
-        except:
-            slope = 'NA'
-        print >>fout, "Slope (c%d)\t%s" %(x_cols[ind-1]+1,slope)
-
-        try:
-            #pval = r.round(float(co[ind][3]), digits=10)
-            pval = r.round(float(co.rx2(ind+1, 4)[0]), digits=10)[0]
-        except:
-            pval = 'NA'
-        print >>fout, "p-value (c%d)\t%s" %(x_cols[ind-1]+1,pval)
-        ind+=1
-
-#rsq = summary.get('r.squared','NA')
-rsq = summary.rx2('r.squared')
-if rsq == ri.RNULLType():
-    rsq = 'NA'
-else:
-    rsq = rsq[0]
-
-
-try:
-    #rsq= r.round(float((null_deviance-residual_deviance)/null_deviance), digits=5)
-    rsq= r.round(float((null_deviance-residual_deviance)/null_deviance), digits=5)[0]
-    #null_deviance= r.round(float(null_deviance), digits=5)
-    null_deviance= r.round(float(null_deviance), digits=5)[0]
-    #residual_deviance= r.round(float(residual_deviance), digits=5)
-    residual_deviance= r.round(float(residual_deviance), digits=5)[0]
-    
-except:
-    pass
-
-print >>fout, "Null deviance\t%s" %(null_deviance)
-
-print >>fout, "Residual deviance\t%s" %(residual_deviance)
-print >>fout, "pseudo R-squared\t%s" %(rsq)
-print >>fout, "\n"
-print >>fout, 'vif'
-
-if novif==0:
-    #py_vif=vif.as_py()
-    count=0
-    for i in sorted(vif.names):
-        print >>fout,'c'+str(x_cols[count]+1) ,str(vif.rx2(i)[0])
-        count+=1
-elif novif==1:
-    print >>fout, "vif can calculate only when model have more than 1 predictor"
--- a/logistic_regression_vif.xml	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,75 +0,0 @@
-<tool id="LogisticRegression" name="Perform Logistic Regression with vif" version="1.1.0">
-  <description> </description>
-  <expand macro="requirements" />
-    <macros>
-        <import>statistic_tools_macros.xml</import>
-    </macros>
-  <command interpreter="python">
-    logistic_regression_vif.py 
-      $input1
-      $response_col
-      $predictor_cols
-      $out_file1
-      1>/dev/null
-  </command>
-  <inputs>
-    <param format="tabular" name="input1" type="data" label="Select data" help="Dataset missing? See TIP below."/>
-    <param name="response_col" label="Response column (Y)" type="data_column" data_ref="input1" numerical="True"/>
-    <param name="predictor_cols" label="Predictor columns (X)" type="data_column" data_ref="input1" numerical="True" multiple="true" >
-        <validator type="no_options" message="Please select at least one column."/>
-    </param>
-  </inputs>
-  <outputs>
-    <data format="input" name="out_file1" metadata_source="input1" />
-
-  </outputs>
-  <tests>
-    <test>
-        <param name="input1" value="logreg_inp.tabular"/>
-        <param name="response_col" value="4"/>
-        <param name="predictor_cols" value="1,2,3"/>
-        <output name="out_file1" file="logreg_out2.tabular"/>
-
-    </test>
-  </tests>
-  <help>
-
-
-.. class:: infomark
-
-**TIP:** If your data is not TAB delimited, use *Edit Datasets-&gt;Convert characters*
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool uses the **'glm'** function from R statistical package to perform logistic regression on the input data. It outputs one file containing the summary statistics of the performed regression. Also, it calculates VIF(Variance Inflation Factor) with **'vif'** function from library (car) in R.
-
-
-*R Development Core Team (2010). R: A language and environment for statistical computing. R Foundation for Statistical Computing, Vienna, Austria. ISBN 3-900051-07-0, URL http://www.R-project.org.*
-
------
-
-.. class:: warningmark
-
-**Note**
-
-- This tool currently treats all predictor variables as continuous numeric variables and response variable as categorical variable. Currently, the response variable can have only two classes, namely 0 and 1. The program will take 0 as base class.
-
-- Rows containing non-numeric (or missing) data in any of the chosen columns will be skipped from the analysis.
-
-- The summary statistics in the output are described below:
-
-- Pseudo R-squared: the proportion of model improvement from null model
-- p-value: p-value for the z-test of the null hypothesis that the corresponding slope is equal to zero against the two-sided alternative.
-- Coefficient indicates log ratio of (probability to be class 1 / probability to be class 0)
-
-- This tool also provides **Variance Inflation Factor or VIF** which quantifies the level of multicollinearity. The tool will automatic generate VIF if the model has more than one predictor. The higher the VIF, the higher is the multicollinearity. Multicollinearity will inflate  standard error and reduce level of significance of the predictor. In the worst case, it can reverse direction of slope for highly correlated predictors if one of them is significant. A general thumb-rule is to use those predictors having VIF lower than 10 or 5.
-- **vif** is calculated by 
-    - First, regressing each predictor over all other predictors, and recording R-squared for each regression.
-    - Second, computing vif as 1/(1- R_squared)
-
-  </help>
-</tool>
--- a/partialR_square.py	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,183 +0,0 @@
-#!/usr/bin/env python
-
-#from galaxy import eggs
-
-import sys, string
-#from rpy import *
-
-import rpy2.robjects as robjects
-import rpy2.rlike.container as rlc
-r = robjects.r
-import numpy
-
-#export PYTHONPATH=~/galaxy/lib/
-#running command python partialR_square.py reg_inp.tab 4 1,2,3 partialR_result.tabular
-
-def stop_err(msg):
-    sys.stderr.write(msg)
-    sys.exit()
-
-def sscombs(s):
-    if len(s) == 1:
-        return [s]
-    else:
-        ssc = sscombs(s[1:])
-        return [s[0]] + [s[0]+comb for comb in ssc] + ssc
-
-
-infile = sys.argv[1]
-y_col = int(sys.argv[2])-1
-x_cols = sys.argv[3].split(',')
-outfile = sys.argv[4]
-
-print "Predictor columns: %s; Response column: %d" %(x_cols,y_col+1)
-fout = open(outfile,'w')
-
-for i, line in enumerate( file ( infile )):
-    line = line.rstrip('\r\n')
-    if len( line )>0 and not line.startswith( '#' ):
-        elems = line.split( '\t' )
-        break 
-    if i == 30:
-        break # Hopefully we'll never get here...
-
-if len( elems )<1:
-    stop_err( "The data in your input dataset is either missing or not formatted properly." )
-
-y_vals = []
-x_vals = []
-x_vector = []
-for k,col in enumerate(x_cols):
-    x_cols[k] = int(col)-1
-    x_vals.append([])    
-    """
-    try:
-        float( elems[x_cols[k]] )
-    except:
-        try:
-            msg = "This operation cannot be performed on non-numeric column %d containing value '%s'." %( col, elems[x_cols[k]] )
-        except:
-            msg = "This operation cannot be performed on non-numeric data."
-        stop_err( msg )
-    """
-NA = 'NA'
-for ind,line in enumerate( file( infile )):
-    if line and not line.startswith( '#' ):
-        try:
-            fields = line.split("\t")
-            try:
-                yval = float(fields[y_col])
-            except Exception, ey:
-                yval = r('NA')
-                #print >>sys.stderr, "ey = %s" %ey
-            y_vals.append(yval)
-            for k,col in enumerate(x_cols):
-                try:
-                    xval = float(fields[col])
-                except Exception, ex:
-                    xval = r('NA')
-                    #print >>sys.stderr, "ex = %s" %ex
-                x_vals[k].append(xval)
-                x_vector.append(xval)
-        except:
-            pass
-
-#x_vals1 = numpy.asarray(x_vals).transpose()
-#dat= r.list(x=array(x_vals1), y=y_vals)
-
-#set_default_mode(NO_CONVERSION)
-#try:
-#    full = r.lm(r("y ~ x"), data= r.na_exclude(dat))    #full model includes all the predictor variables specified by the user
-#except RException, rex:
-#    stop_err("Error performing linear regression on the input data.\nEither the response column or one of the predictor columns contain no numeric values.")
-#set_default_mode(BASIC_CONVERSION)
-
-fv = robjects.FloatVector(x_vector)
-m = r['matrix'](fv, ncol=len(x_cols),byrow=True)
-# ensure order for generating formula
-od = rlc.OrdDict([('y',robjects.FloatVector(y_vals)),('x',m)])
-dat = robjects.DataFrame(od)
-# convert dat.names: ["y","x.1","x.2"] to formula string: 'y ~ x.1 + x.2'
-formula = ' + '.join(dat.names).replace('+','~',1)
-try:
-    full = r.lm(formula,  data =  r['na.exclude'](dat))
-except RException, rex:
-    stop_err("Error performing linear regression on the input data.\nEither the response column or one of the predictor columns contain only non-numeric or invalid values.")
-
-
-
-summary = r.summary(full)
-#fullr2 = summary.get('r.squared','NA')
-fullr2 = summary.rx2('r.squared')[0]
-
-if fullr2 == 'NA':
-    stop_error("Error in linear regression")
-
-if len(x_vals) < 10:
-    s = ""
-    for ch in range(len(x_vals)):
-        s += str(ch)
-else:
-    stop_err("This tool only works with less than 10 predictors.")
-
-print >>fout, "#Model\tR-sq\tpartial_R_Terms\tpartial_R_Value"
-all_combos = sorted(sscombs(s), key=len)
-all_combos.reverse()
-for j,cols in enumerate(all_combos):
-    #if len(cols) == len(s):    #Same as the full model above
-    #    continue
-    if len(cols) == 1:
-        #x_vals1 = x_vals[int(cols)]
-        x_v = x_vals[int(cols)]        
-    else:
-        x_v = []
-        for col in cols:
-            #x_v.append(x_vals[int(col)])
-            x_v.extend(x_vals[int(col)])
-        #x_vals1 = numpy.asarray(x_v).transpose()
-    #dat= r.list(x=array(x_vals1), y=y_vals)
-    #set_default_mode(NO_CONVERSION)
-    #red = r.lm(r("y ~ x"), data= dat)    #Reduced model
-    #set_default_mode(BASIC_CONVERSION)
-    fv = robjects.FloatVector(x_v)
-    m = r['matrix'](fv, ncol=len(cols),byrow=False)
-    # ensure order for generating formula
-    od = rlc.OrdDict([('y',robjects.FloatVector(y_vals)),('x',m)])
-    dat = robjects.DataFrame(od)
-    # convert dat.names: ["y","x.1","x.2"] to formula string: 'y ~ x.1 + x.2'
-    formula = ' + '.join(dat.names).replace('+','~',1)
-    try:
-        red = r.lm(formula,  data =  r['na.exclude'](dat))
-    except RException, rex:
-        stop_err("Error performing linear regression on the input data.\nEither the response column or one of the predictor columns contain only non-numeric or invalid values.")
-    
-
-    summary = r.summary(red)
-    #redr2 = summary.get('r.squared','NA')
-    redr2 = summary.rx2('r.squared')[0]
-
-    try:
-        partial_R = (float(fullr2)-float(redr2))/(1-float(redr2))
-    except:
-        partial_R = 'NA'
-    col_str = ""
-    for col in cols:
-        col_str = col_str + str(int(x_cols[int(col)]) + 1) + " "
-    col_str.strip()
-    partial_R_col_str = ""
-    for col in s:
-        if col not in cols:
-            partial_R_col_str = partial_R_col_str + str(int(x_cols[int(col)]) + 1) + " "
-    partial_R_col_str.strip()
-    if len(cols) == len(s):    #full model
-        partial_R_col_str = "-"
-        partial_R = "-"
-    try:
-        redr2 = "%.4f" %(float(redr2))
-    except:
-        pass
-    try:
-        partial_R = "%.4f" %(float(partial_R))
-    except:
-        pass
-    print >>fout, "%s\t%s\t%s\t%s" %(col_str,redr2,partial_R_col_str,partial_R)
--- a/partialR_square.xml	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,69 +0,0 @@
-<tool id="partialRsq" name="Compute partial R square" version="1.1.0">
-  <description> </description>
-  <expand macro="requirements" />
-    <macros>
-        <import>statistic_tools_macros.xml</import>
-    </macros>
-  <command interpreter="python">
-    partialR_square.py 
-      $input1
-      $response_col
-      $predictor_cols
-      $out_file1
-      1>/dev/null
-  </command>
-  <inputs>
-    <param format="tabular" name="input1" type="data" label="Select data" help="Dataset missing? See TIP below."/>
-    <param name="response_col" label="Response column (Y)" type="data_column" data_ref="input1" />
-    <param name="predictor_cols" label="Predictor columns (X)" type="data_column" data_ref="input1" multiple="true">
-        <validator type="no_options" message="Please select at least one column."/>
-    </param>
-  </inputs>
-  <outputs>
-    <data format="input" name="out_file1" metadata_source="input1" />
-  </outputs>
-  <tests>
-    <!-- Test data with vlid values -->
-  	<test>
-      <param name="input1" value="regr_inp.tabular"/>
-      <param name="response_col" value="3"/>
-      <param name="predictor_cols" value="1,2"/>
-      <output name="out_file1" file="partialR_result.tabular"/>
-    </test>
-    
-  </tests>
-  <help>
-
-.. class:: infomark
-
-**TIP:** If your data is not TAB delimited, use *Edit Datasets-&gt;Convert characters*
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool computes the Partial R squared for all possible variable subsets using the following formula:
-
-**Partial R squared = [SSE(without i: 1,2,...,p-1) - SSE (full: 1,2,..,i..,p-1) / SSE(without i: 1,2,...,p-1)]**, which denotes the case where the 'i'th predictor is dropped. 
-
-
-
-In general, **Partial R squared = [SSE(without i: 1,2,...,p-1) - SSE (full: 1,2,..,i..,p-1) / SSE(without i: 1,2,...,p-1)]**, where,
-
-- SSE (full: 1,2,..,i..,p-1) = Sum of Squares left out by the full set of predictors SSE(X1, X2 … Xp)
-- SSE (full: 1,2,..,i..,p-1) = Sum of Squares left out by the set of predictors excluding; for example, if we omit the first predictor, it will be SSE(X2 … Xp).
-
-
-The 4 columns in the output are described below:
-
-- Column 1 (Model): denotes the variables present in the model
-- Column 2 (R-sq): denotes the R-squared value corresponding to the model in Column 1
-- Column 3 (Partial R squared_Terms): denotes the variable/s for which Partial R squared is computed. These are the variables that are absent in the reduced model in Column 1. A '-' in this column indicates that the model in Column 1 is the Full model.
-- Column 4 (Partial R squared): denotes the Partial R squared value corresponding to the variable/s in Column 3. A '-' in this column indicates that the model in Column 1 is the Full model.
-
-*R Development Core Team (2010). R: A language and environment for statistical computing. R Foundation for Statistical Computing, Vienna, Austria. ISBN 3-900051-07-0, URL http://www.R-project.org.*  
-  
-  </help>
-</tool>
--- a/pca.py	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,164 +0,0 @@
-#!/usr/bin/env python
-
-import sys, string
-#from rpy import *
-import rpy2.robjects as robjects
-import rpy2.rlike.container as rlc
-from rpy2.robjects.packages import importr
-r = robjects.r
-grdevices = importr('grDevices')
-import numpy
-
-def stop_err(msg):
-    sys.stderr.write(msg)
-    sys.exit()
-
-infile = sys.argv[1]
-x_cols = sys.argv[2].split(',')
-method = sys.argv[3]
-outfile = sys.argv[4]
-outfile2 = sys.argv[5]
-
-if method == 'svd':
-    scale = center = "FALSE"
-    if sys.argv[6] == 'both':
-        scale = center = "TRUE"
-    elif sys.argv[6] == 'center':
-        center = "TRUE"
-    elif sys.argv[6] == 'scale':
-        scale = "TRUE"
-    
-fout = open(outfile,'w')
-elems = []
-for i, line in enumerate( file ( infile )):
-    line = line.rstrip('\r\n')
-    if len( line )>0 and not line.startswith( '#' ):
-        elems = line.split( '\t' )
-        break 
-    if i == 30:
-        break # Hopefully we'll never get here...
-
-if len( elems )<1:
-    stop_err( "The data in your input dataset is either missing or not formatted properly." )
-
-x_vals = []
-
-for k,col in enumerate(x_cols):
-    x_cols[k] = int(col)-1
-    # x_vals.append([])
-
-NA = 'NA'
-skipped = 0
-for ind,line in enumerate( file( infile )):
-    if line and not line.startswith( '#' ):
-        try:
-            fields = line.strip().split("\t")
-            valid_line = True
-            for k,col in enumerate(x_cols):
-                try:
-                    xval = float(fields[col])
-                except:
-                    skipped += 1 
-                    valid_line = False
-                    break
-            if valid_line:
-                for k,col in enumerate(x_cols):
-                    xval = float(fields[col])
-                    #x_vals[k].append(xval)
-                    x_vals.append(xval)
-        except:
-            skipped += 1
-
-#x_vals1 = numpy.asarray(x_vals).transpose()
-#dat= r.list(array(x_vals1))
-dat = r['matrix'](robjects.FloatVector(x_vals),ncol=len(x_cols),byrow=True)
-
-#set_default_mode(NO_CONVERSION)
-try:
-    if method == "cor":
-        #pc = r.princomp(r.na_exclude(dat), cor = r("TRUE"))
-        pc = r.princomp(r['na.exclude'](dat), cor = r("TRUE"))
-    elif method == "cov":
-        #pc = r.princomp(r.na_exclude(dat), cor = r("FALSE"))
-        pc = r.princomp(r['na.exclude'](dat), cor = r("FALSE"))
-    elif method=="svd":
-        #pc = r.prcomp(r.na_exclude(dat), center = r(center), scale = r(scale))
-        pc = r.prcomp(r['na.exclude'](dat), center = r(center), scale = r(scale))
-#except RException, rex:
-except Exception, rex:  # need to find rpy2 RException
-    stop_err("Encountered error while performing PCA on the input data: %s" %(rex))
-
-#set_default_mode(BASIC_CONVERSION)
-summary = r.summary(pc, loadings="TRUE")
-#ncomps = len(summary['sdev'])
-ncomps = len(summary.rx2('sdev'))
-
-#if type(summary['sdev']) == type({}):
-#    comps_unsorted = summary['sdev'].keys()
-#    comps=[]
-#    sd = summary['sdev'].values()
-#    for i in range(ncomps):
-#        sd[i] = summary['sdev'].values()[comps_unsorted.index('Comp.%s' %(i+1))]
-#        comps.append('Comp.%s' %(i+1))
-#elif type(summary['sdev']) == type([]):
-#    comps=[]
-#    for i in range(ncomps):
-#        comps.append('Comp.%s' %(i+1))
-#        sd = summary['sdev']
-
-comps=[]
-for i in range(ncomps):
-     comps.append('Comp.%s' %(i+1))
-sd = summary.rx2('sdev')
-
-print >>fout, "#Component\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-#print >>fout, "#Std. deviation\t%s" %("\t".join(["%.4g" % el for el in sd]))
-print >>fout, "#Std. deviation\t%s" %("\t".join(["%.4g" % el for el in sd]))
-total_var = 0
-vars = []
-for s in sd:
-    var = s*s
-    total_var += var
-    vars.append(var)
-for i,var in enumerate(vars):
-    vars[i] = vars[i]/total_var
-       
-print >>fout, "#Proportion of variance explained\t%s" %("\t".join(["%.4g" % el for el in vars]))
-
-print >>fout, "#Loadings\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-xcolnames = ["c%d" %(el+1) for el in x_cols]
-#if 'loadings' in summary: #in case of princomp
-if 'loadings' in summary.names: #in case of princomp
-    loadings = 'loadings'
-#elif 'rotation' in summary: #in case of prcomp
-elif 'rotation' in summary.names: #in case of prcomp
-    loadings = 'rotation'
-#for i,val in enumerate(summary[loadings]):
-#    print >>fout, "%s\t%s" %(xcolnames[i], "\t".join(["%.4g" % el for el in val]))
-vm = summary.rx2(loadings)
-for i in range(vm.nrow):
-    vals = []
-    for j in range(vm.ncol):
-       vals.append("%.4g" % vm.rx2(i+1,j+1)[0])
-    print >>fout, "%s\t%s" %(xcolnames[i], "\t".join(vals))
-
-print >>fout, "#Scores\t%s" %("\t".join(["%s" % el for el in range(1,ncomps+1)]))
-#if 'scores' in summary: #in case of princomp
-if 'scores' in summary.names: #in case of princomp
-    scores = 'scores'
-#elif 'x' in summary: #in case of prcomp
-elif 'x' in summary.names: #in case of prcomp
-    scores = 'x'
-#for obs,sc in enumerate(summary[scores]):
-#    print >>fout, "%s\t%s" %(obs+1, "\t".join(["%.4g" % el for el in sc]))
-vm = summary.rx2(scores)
-for i in range(vm.nrow):
-    vals = []
-    for j in range(vm.ncol):
-       vals.append("%.4g" % vm.rx2(i+1,j+1)[0])
-    print >>fout, "%s\t%s" %(i+1, "\t".join(vals))
-r.pdf( outfile2, 8, 8 )
-r.biplot(pc)
-#r.dev_off()
-grdevices.dev_off()
-
--- a/pca.xml	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,101 +0,0 @@
-<tool id="pca1" name="Principal Component Analysis" version="1.1.0">
-  <description> </description>
-  <expand macro="requirements" />
-    <macros>
-        <import>statistic_tools_macros.xml</import>
-    </macros>
-  <command interpreter="python">
-    pca.py 
-      $input1
-      $var_cols
-      $methodChoice.method
-      $out_file1
-      $out_file2
-      #if $methodChoice.method == "svd":
-      $methodChoice.scale
-      #end if
-  </command>
-  <inputs>
-    <param format="tabular" name="input1" type="data" label="Select data" help="Dataset missing? See TIP below."/>
-    <param name="var_cols" label="Select columns containing input variables " type="data_column" data_ref="input1" numerical="True" multiple="true" >
-        <validator type="no_options" message="Please select at least one column."/>
-    </param>
-    <conditional name="methodChoice">
-        <param name="method" type="select" label="Method" help="The correlation matrix can only be used if there are no constant variables">
-            <option value="cor" selected="true">Eigenvectors of Correlation (princomp)</option>
-            <option value="cov">Eigenvectors of Covariance (princomp)</option>
-            <option value="svd">Singular Value Decomposition (prcomp)</option>
-        </param>
-        <when value="cor" />
-        <when value="cov" />
-        <when value="svd">
-            <param name="scale" type="select" label="Centering and Scaling" help="Can be used to center and/or scale variables">
-                <option value="none" selected="true">None</option>
-                <option value="center">Center only</option>
-                <option value="scale">Scale only</option>
-                <option value="both">Center and Scale</option>
-            </param>        
-        </when>
-    </conditional>
-  </inputs>
-  <outputs>
-    <data format="input" name="out_file1" metadata_source="input1" />
-    <data format="pdf" name="out_file2" />
-  </outputs>
-  <tests>
-    <test>
-        <param name="input1" value="iris.tabular"/>
-        <param name="var_cols" value="1,2,3,4"/>
-        <param name="method" value="cor"/>
-        <output name="out_file1" file="pca_out1.tabular"/>
-        <output name="out_file2" file="pca_out2.pdf"/>
-    </test>
-    <test>
-        <param name="input1" value="iris.tabular"/>
-        <param name="var_cols" value="1,2,3,4"/>
-        <param name="method" value="cov"/>
-        <output name="out_file1" file="pca_out3.tabular"/>
-        <output name="out_file2" file="pca_out4.pdf"/>
-    </test>
-    <test>
-        <param name="input1" value="iris.tabular"/>
-        <param name="var_cols" value="1,2,3,4"/>
-        <param name="method" value="svd"/>
-        <param name="scale" value="both"/>
-        <output name="out_file1" file="pca_out5.tabular"/>
-        <output name="out_file2" file="pca_out6.pdf"/>
-    </test>
-  </tests>
-  <help>
-
-
-.. class:: infomark
-
-**TIP:** If your data is not TAB delimited, use *Edit Datasets-&gt;Convert characters*
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool performs Principal Component Analysis on the given numeric input data using functions from R statistical package - 'princomp' function (for Eigenvector based solution) and 'prcomp' function (for Singular value decomposition based solution). It outputs two files, one containing the summary statistics of PCA, and the other containing biplots of the observations and principal components.   
-
-*R Development Core Team (2009). R: A language and environment for statistical computing. R Foundation for Statistical Computing, Vienna, Austria. ISBN 3-900051-07-0, URL http://www.R-project.org.*
-
------
-
-.. class:: warningmark
-
-**Note**
-
-- This tool currently treats all variables as continuous numeric variables. Running the tool on categorical variables might result in incorrect results. Rows containing non-numeric (or missing) data in any of the chosen columns will be skipped from the analysis.
-
-- The summary statistics in the output are described below:
-
-  - Std. deviation: Standard deviations of the principal components
-  - Loadings: a list of eigen-vectors/variable loadings
-  - Scores: Scores of the input data on the principal components
-
-  </help>
-</tool>
--- a/readme.rst	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,61 +0,0 @@
-Galaxy wrapper using R and RPy2
-===============================
-
-These wrappers are based on RPy1 versions included in Galaxy-Main and
-ported by John Chilton to RPy2. Please see the following Mail from
-galaxy-dev:
-
-  http://lists.bx.psu.edu/pipermail/galaxy-dev/2013-May/014694.html
-
-Missing ports to RPy2:
-
-- rgenetics/rgQC.py
-- regVariation/rcve.py
-- ngs_simulation/ngs_simulation.py
-- metag_tools/short_reads_figure_high_quality_length.py
-- taxonomy/poisson2test.py
-
-ToDo:
-
-- add tool_dependencies.xml to RPy2 and R-3.0
-- testing
-- porting of missing tools
-
-
-============
-Installation
-============
-
-
-
-=======
-History
-=======
-
-
-  - v0.1: no release yet
-
-
-
-
-Wrapper Licence (MIT/BSD style)
-===============================
-
-Permission to use, copy, modify, and distribute this software and its
-documentation with or without modifications and for any purpose and
-without fee is hereby granted, provided that any copyright notices
-appear in all copies and that both those copyright notices and this
-permission notice appear in supporting documentation, and that the
-names of the contributors or copyright holders not be used in
-advertising or publicity pertaining to distribution of the software
-without specific prior permission.
-
-THE CONTRIBUTORS AND COPYRIGHT HOLDERS OF THIS SOFTWARE DISCLAIM ALL
-WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL THE
-CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT
-OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE
-OR PERFORMANCE OF THIS SOFTWARE.
-
--- a/scatterplot.py	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,88 +0,0 @@
-#!/usr/bin/env python
-#Greg Von Kuster
-
-import sys
-#from rpy import *
-import rpy2.robjects as robjects
-from rpy2.robjects.packages import importr
-r = robjects.r
-grdevices = importr('grDevices')
-
-def stop_err(msg):
-    sys.stderr.write(msg)
-    sys.exit()
-
-def main():
-
-    in_fname = sys.argv[1]
-    out_fname = sys.argv[2]
-    try:
-        columns = int( sys.argv[3] ) - 1, int( sys.argv[4] ) - 1
-    except:
-        stop_err( "Columns not specified, your query does not contain a column of numerical data." )
-    title = sys.argv[5]
-    xlab = sys.argv[6]
-    ylab = sys.argv[7]
-
-    matrix = []
-    skipped_lines = 0
-    first_invalid_line = 0
-    invalid_value = ''
-    invalid_column = 0
-    i = 0
-    for i, line in enumerate( file( in_fname ) ):
-        valid = True
-        line = line.rstrip( '\r\n' )
-        if line and not line.startswith( '#' ): 
-            row = []
-            fields = line.split( "\t" )
-            for column in columns:
-                try:
-                    val = fields[column]
-                    if val.lower() == "na": 
-                        row.append( float( "nan" ) )
-                    else:
-                        row.append( float( fields[column] ) )
-                except:
-                    valid = False
-                    skipped_lines += 1
-                    if not first_invalid_line:
-                        first_invalid_line = i + 1
-                        try:
-                            invalid_value = fields[column]
-                        except:
-                            invalid_value = ''
-                        invalid_column = column + 1
-                    break
-        else:
-            valid = False
-            skipped_lines += 1
-            if not first_invalid_line:
-                first_invalid_line = i+1
-
-        if valid:
-            #matrix.append( row )
-            matrix += row
-
-    if skipped_lines < i:
-        try:
-            fv = robjects.FloatVector(matrix)
-            m = r['matrix'](fv, ncol=len(columns),byrow=True)
-            r.pdf( out_fname, 8, 8 )
-            #r.plot( array( matrix ), type="p", main=title, xlab=xlab, ylab=ylab, col="blue", pch=19 )
-            r.plot( m, type="p", main=title, xlab=xlab, ylab=ylab, col="blue", pch=19 )
-            #r.dev_off()
-            grdevices.dev_off()
-        except Exception, exc:
-            stop_err( "%s" %str( exc ) )
-    else:
-        stop_err( "All values in both columns %s and %s are non-numeric or empty." % ( sys.argv[3], sys.argv[4] ) )
-
-    print "Scatter plot on columns %s, %s. " % ( sys.argv[3], sys.argv[4] )
-    if skipped_lines > 0:
-        print "Skipped %d lines starting with line #%d, value '%s' in column %d is not numeric." % ( skipped_lines, first_invalid_line, invalid_value, invalid_column )
-
-    #r.quit( save="no" )
-
-if __name__ == "__main__":
-    main()
--- a/scatterplot.xml	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,70 +0,0 @@
-<tool id="scatterplot_rpy" name="Scatterplot" version="1.1.0">
-  <description>of two numeric columns</description>
-  <expand macro="requirements" />
-    <macros>
-        <import>statistic_tools_macros.xml</import>
-    </macros>
-  <command interpreter="python">scatterplot.py $input $out_file1 $col1 $col2 "$title" "$xlab" "$ylab"</command>
-  <inputs>
-    <param name="input" type="data" format="tabular" label="Dataset" help="Dataset missing? See TIP below"/>
-    <param name="col1" type="data_column" data_ref="input" numerical="True" label="Numerical column for x axis" />
-    <param name="col2" type="data_column" data_ref="input" numerical="True" label="Numerical column for y axis" />
-    <param name="title" size="30" type="text" value="Scatterplot" label="Plot title"/>
-    <param name="xlab" size="30" type="text" value="V1" label="Label for x axis"/>
-    <param name="ylab" size="30" type="text" value="V2" label="Label for y axis"/>
-  </inputs>
-  <outputs>
-    <data format="pdf" name="out_file1" />
-  </outputs>
-
-  <tests>
-    <test>
-      <param name="input" value="scatterplot_in1.tabular" ftype="tabular"/>
-      <param name="col1" value="2"/>
-      <param name="col2" value="3"/>
-      <param name="title" value="Scatterplot"/>
-      <param name="xlab" value="V1"/>
-      <param name="ylab" value="V2"/>
-      <output name="out_file1" file="scatterplot_out1.pdf" />
-    </test>
-  </tests>
-  <help>
-
-.. class:: infomark
-
-**TIP:** If your data is not TAB delimited, use *Text Manipulation-&gt;Convert*
-
------
-
-**Syntax**
-
-This tool creates a simple scatter plot between two variables containing numeric values of a selected dataset. 
-
-- All invalid, blank and comment lines in the dataset are skipped.  The number of skipped lines is displayed in the resulting history item.
-
-- **Plot title** The scatterplot title
-- **Label for x axis** and **Label for y axis** The labels for x and y axis of the scatterplot.
-
------
-
-**Example**
-
-- Input file::
-
-    1   68  4.1
-    2   71  4.6
-    3   62  3.8
-    4   75  4.4
-    5   58  3.2
-    6   60  3.1
-    7   67  3.8
-    8   68  4.1
-    9   71  4.3
-    10  69  3.7 
-
-- Create a simple scatterplot between the variables in column 2 and column 3 of the above dataset.
-
-.. image:: $PATH_TO_IMAGES/images/scatterplot.png
-
-</help>
-</tool>
--- a/short_reads_figure_score.py	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,258 +0,0 @@
-#!/usr/bin/env python
-"""
-boxplot:
-- box: first quartile and third quartile
-- line inside the box: median
-- outlier: 1.5 IQR higher than the third quartile or 1.5 IQR lower than the first quartile
-           IQR = third quartile - first quartile
-- The smallest/largest value that is not an outlier is connected to the box by with a horizontal line.
-"""
-
-import os, sys, math, tempfile, re
-#from rpy import *
-import rpy2.robjects as robjects
-import rpy2.rlike.container as rlc
-import rpy2.rinterface as ri
-r = robjects.r
-
-assert sys.version_info[:2] >= ( 2, 4 )
-
-def stop_err( msg ):
-    sys.stderr.write( "%s\n" % msg )
-    sys.exit()
-
-def merge_to_20_datapoints( score ):
-    number_of_points = 20
-    read_length = len( score )
-    step = int( math.floor( ( read_length - 1 ) * 1.0 / number_of_points ) )
-    scores = []
-    point = 1
-    point_sum = 0
-    step_average = 0
-    score_points = 0
-    
-    for i in xrange( 1, read_length ):
-        if i < ( point * step ):
-            point_sum += int( score[i] )
-            step_average += 1
-        else:
-            point_avg = point_sum * 1.0 / step_average
-            scores.append( point_avg )
-            point += 1
-            point_sum = 0
-            step_average = 0                       
-    if step_average > 0:
-        point_avg = point_sum * 1.0 / step_average
-        scores.append( point_avg )
-    if len( scores ) > number_of_points:
-        last_avg = 0
-        for j in xrange( number_of_points - 1, len( scores ) ):
-            last_avg += scores[j]
-        last_avg = last_avg / ( len(scores) - number_of_points + 1 )
-    else:    
-        last_avg = scores[-1]
-    score_points = []
-    for k in range( number_of_points - 1 ):
-        score_points.append( scores[k] )
-    score_points.append( last_avg )
-    return score_points
-
-def __main__():
-
-    invalid_lines = 0
-
-    infile_score_name = sys.argv[1].strip()
-    outfile_R_name = sys.argv[2].strip()
-
-    infile_name = infile_score_name
-
-    # Determine tabular or fasta format within the first 100 lines
-    seq_method = None
-    data_type = None
-    for i, line in enumerate( file( infile_name ) ):
-        line = line.rstrip( '\r\n' )
-        if not line or line.startswith( '#' ):
-            continue
-        if data_type == None:
-            if line.startswith( '>' ):
-                data_type = 'fasta'
-                continue
-            elif len( line.split( '\t' ) ) > 0:
-                fields = line.split()
-                for score in fields:
-                    try:
-                        int( score )
-                        data_type = 'tabular'
-                        seq_method = 'solexa'
-                        break
-                    except:
-                        break
-        elif data_type == 'fasta':
-            fields = line.split()
-            for score in fields:
-                try: 
-                    int( score )
-                    seq_method = '454'
-                    break
-                except:
-                    break
-        if i == 100:
-            break
-
-    if data_type is None:
-        stop_err( 'This tool can only use fasta data or tabular data.' ) 
-    if seq_method is None:
-        stop_err( 'Invalid data for fasta format.')
-
-    # Determine fixed length or variable length within the first 100 lines
-    read_length = 0
-    variable_length = False
-    if seq_method == 'solexa':
-        for i, line in enumerate( file( infile_name ) ):
-            line = line.rstrip( '\r\n' )
-            if not line or line.startswith( '#' ):
-                continue
-            scores = line.split('\t')
-            if read_length == 0:
-                read_length = len( scores )
-            if read_length != len( scores ):
-                variable_length = True
-                break
-            if i == 100:
-                break
-    elif seq_method == '454':
-        score = ''
-        for i, line in enumerate( file( infile_name ) ):
-            line = line.rstrip( '\r\n' )
-            if not line or line.startswith( '#' ):
-                continue
-            if line.startswith( '>' ):
-                if len( score ) > 0:
-                    score = score.split()
-                    if read_length == 0:
-                        read_length = len( score )
-                    if read_length != len( score ):
-                        variable_length = True
-                        break
-                score = ''
-            else:
-                score = score + ' ' + line
-            if i == 100:
-                break
-
-    if variable_length:
-        number_of_points = 20
-    else:
-        number_of_points = read_length
-    read_length_threshold = 100 # minimal read length for 454 file
-    score_points = []   
-    score_matrix = []
-    invalid_scores = 0   
-
-    if seq_method == 'solexa':
-        for i, line in enumerate( open( infile_name ) ):
-            line = line.rstrip( '\r\n' )
-            if not line or line.startswith( '#' ):
-                continue
-            tmp_array = []
-            scores = line.split( '\t' )
-            for bases in scores:
-                nuc_errors = bases.split()
-                try:
-                    nuc_errors[0] = int( nuc_errors[0] )
-                    nuc_errors[1] = int( nuc_errors[1] )
-                    nuc_errors[2] = int( nuc_errors[2] )
-                    nuc_errors[3] = int( nuc_errors[3] )
-                    big = max( nuc_errors )
-                except:
-                    #print 'Invalid numbers in the file. Skipped.'
-                    invalid_scores += 1
-                    big = 0
-                tmp_array.append( big )                        
-            score_points.append( tmp_array )
-    elif seq_method == '454':
-        # skip the last fasta sequence
-        score = ''
-        for i, line in enumerate( open( infile_name ) ):
-            line = line.rstrip( '\r\n' )
-            if not line or line.startswith( '#' ):
-                continue
-            if line.startswith( '>' ):
-                if len( score ) > 0:
-                    score = ['0'] + score.split()
-                    read_length = len( score )
-                    tmp_array = []
-                    if not variable_length:
-                        score.pop(0)
-                        score_points.append( score )
-                        tmp_array = score
-                    elif read_length > read_length_threshold:
-                        score_points_tmp = merge_to_20_datapoints( score )
-                        score_points.append( score_points_tmp )
-                        tmp_array = score_points_tmp
-                score = ''
-            else:
-                score = "%s %s" % ( score, line )
-        if len( score ) > 0:
-            score = ['0'] + score.split()
-            read_length = len( score )
-            if not variable_length:
-                score.pop(0)
-                score_points.append( score )
-            elif read_length > read_length_threshold:
-                score_points_tmp = merge_to_20_datapoints( score )
-                score_points.append( score_points_tmp )
-                tmp_array = score_points_tmp
-
-    # reverse the matrix, for R
-    for i in range( number_of_points - 1 ):
-        tmp_array = []
-        for j in range( len( score_points ) ):
-            try:
-                tmp_array.append( int( score_points[j][i] ) )
-            except:
-                invalid_lines += 1
-        score_matrix.append( tmp_array )
-
-    # generate pdf figures
-    #outfile_R_pdf = outfile_R_name 
-    #r.pdf( outfile_R_pdf )
-    outfile_R_png = outfile_R_name
-    print 'Writing bitmap'
-    r.bitmap( outfile_R_png )
-    
-    title = "boxplot of quality scores"
-    empty_score_matrix_columns = 0
-    for i, subset in enumerate( score_matrix ):
-        if not subset:
-            empty_score_matrix_columns += 1
-            score_matrix[i] = [0]
-            
-    if not variable_length:
-        print 'Creating fixed boxplot '
-        r.boxplot( score_matrix, xlab="location in read length", main=title )
-    else:
-        print 'Creating variable boxplot'
-        r.boxplot( score_matrix, xlab="position within read (% of total length)", xaxt="n", main=title )
-        x_old_range = []
-        x_new_range = []
-        step = read_length_threshold / number_of_points 
-        for i in xrange( 0, read_length_threshold, step ):
-            x_old_range.append( ( i / step ) )
-            x_new_range.append( i )
-        print 'Writing axis'
-        r.axis( 1, x_old_range, x_new_range )
-
-    print 'calling dev.off()'
-    r('dev.off()')
-
-    if invalid_scores > 0:
-        print 'Skipped %d invalid scores. ' % invalid_scores
-    if invalid_lines > 0:
-        print 'Skipped %d invalid lines. ' % invalid_lines
-    if empty_score_matrix_columns > 0:
-        print '%d missing scores in score_matrix. ' % empty_score_matrix_columns
-
-    #r.quit(save = "no")
-
-if __name__=="__main__":__main__()
--- a/short_reads_figure_score.xml	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,87 +0,0 @@
-<tool id="quality_score_distribution" name="Build base quality distribution" version="1.1.0">
-<description></description>
-<expand macro="requirements" />
-    <macros>
-        <import>statistic_tools_macros.xml</import>
-    </macros>
-<command interpreter="python">short_reads_figure_score.py $input1 $output1 </command>
-
-<inputs>
-<page>
-    <param name="input1" type="data" format="qualsolexa, qual454" label="Quality score file" help="No dataset? Read tip below"/>
-</page>
-</inputs>
-
-<outputs>
-  	<data name="output1" format="png" />
-</outputs> 
-<tests>
-	<test>
-		<param name="input1" value="solexa.qual" ftype="qualsolexa" />
-  		<output name="output1" file="solexaScore.png" ftype="png" />
-	</test>
-	<test>
-		<param name="input1" value="454.qual" ftype="qual454" />
-		<output name="output1" file="454Score.png" ftype="png" />
-	</test>
-</tests>
-<help>
-
-.. class:: warningmark
-
-To use this tool, your dataset needs to be in the *Quality Score* format. Click the pencil icon next to your dataset to set the datatype to *Quality Score* (see below for examples).
-
------
-
-**What it does**
-
-This tool takes Quality Files generated by Roche (454), Illumina (Solexa), or ABI SOLiD machines and builds a graph showing score distribution like the one below. Such graph allows you to perform initial evaluation of data quality in a single pass.
-
------
-
-**Examples of Quality Data**
-
-Roche (454) or ABI SOLiD data::
-
-	&gt;seq1
-	23 33 34 25 28 28 28 32 23 34 27 4 28 28 31 21 28
-
-Illumina (Solexa) data::
-
- 	-40 -40 40 -40	 -40 -40 -40 40	 
- 
------
-
-**Output example**
-
-Quality scores are summarized as boxplot (Roche 454 FLX data):
-
-.. image:: $PATH_TO_IMAGES/short_reads_boxplot.png
-
-where the **X-axis** is coordinate along the read and the **Y-axis** is quality score adjusted to comply with the Phred score metric. Units on the X-axis depend on whether your data comes from Roche (454) or Illumina (Solexa) and ABI SOLiD machines:
-
-  - For Roche (454) X-axis (shown above) indicates **relative** position (in %) within reads as this technology produces reads of different lengths;
-  - For Illumina (Solexa) and ABI SOLiD X-axis shows **absolute** position in nucleotides within reads.
-  
-Every box on the plot shows the following values::
-
-       o     &lt;---- Outliers
-       o
-      -+-    &lt;---- Upper Extreme Value that is no more 
-       |           than box length away from the box   
-       |
-    +--+--+  &lt;---- Upper Quartile
-    |     |
-    +-----+  &lt;---- Median
-    |     |
-    +--+--+  &lt;---- Lower Quartile 
-       |
-       |
-      -+-    &lt;---- Lower Extreme Value that is no more
-                   than box length away from the box
-       o     &lt;---- Outlier
- 
- 
-     
-</help>
-</tool>
--- a/statistic_tools_macros.xml	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-<macros>
-  <xml name="requirements">
-    <requirements>
-        <requirement type="package" version="3.0.1">R_3_0_1</requirement>
-        <requirement type="package" version="2.2.3.6">rpy2</requirement>
-        <requirement type="package" version="1.7.1">numpy</requirement>
-        <requirement type="package" version="5.9">ncurses</requirement>
-        <requirement type="package" version="6.2">readline</requirement>
-        <requirement type="package" version="1.0">R_statistic_tools</requirement>
-    </requirements>
-  </xml>
-</macros>
--- a/test-data/1.bed	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,65 +0,0 @@
-chr1	147962192	147962580	CCDS989.1_cds_0_0_chr1_147962193_r	0	-
-chr1	147984545	147984630	CCDS990.1_cds_0_0_chr1_147984546_f	0	+
-chr1	148078400	148078582	CCDS993.1_cds_0_0_chr1_148078401_r	0	-
-chr1	148185136	148185276	CCDS996.1_cds_0_0_chr1_148185137_f	0	+
-chr10	55251623	55253124	CCDS7248.1_cds_0_0_chr10_55251624_r	0	-
-chr11	116124407	116124501	CCDS8374.1_cds_0_0_chr11_116124408_r	0	-
-chr11	116206508	116206563	CCDS8377.1_cds_0_0_chr11_116206509_f	0	+
-chr11	116211733	116212337	CCDS8378.1_cds_0_0_chr11_116211734_r	0	-
-chr11	1812377	1812407	CCDS7726.1_cds_0_0_chr11_1812378_f	0	+
-chr12	38440094	38440321	CCDS8736.1_cds_0_0_chr12_38440095_r	0	-
-chr13	112381694	112381953	CCDS9526.1_cds_0_0_chr13_112381695_f	0	+
-chr14	98710240	98712285	CCDS9949.1_cds_0_0_chr14_98710241_r	0	-
-chr15	41486872	41487060	CCDS10096.1_cds_0_0_chr15_41486873_r	0	-
-chr15	41673708	41673857	CCDS10097.1_cds_0_0_chr15_41673709_f	0	+
-chr15	41679161	41679250	CCDS10098.1_cds_0_0_chr15_41679162_r	0	-
-chr15	41826029	41826196	CCDS10101.1_cds_0_0_chr15_41826030_f	0	+
-chr16	142908	143003	CCDS10397.1_cds_0_0_chr16_142909_f	0	+
-chr16	179963	180135	CCDS10401.1_cds_0_0_chr16_179964_r	0	-
-chr16	244413	244681	CCDS10402.1_cds_0_0_chr16_244414_f	0	+
-chr16	259268	259383	CCDS10403.1_cds_0_0_chr16_259269_r	0	-
-chr18	23786114	23786321	CCDS11891.1_cds_0_0_chr18_23786115_r	0	-
-chr18	59406881	59407046	CCDS11985.1_cds_0_0_chr18_59406882_f	0	+
-chr18	59455932	59456337	CCDS11986.1_cds_0_0_chr18_59455933_r	0	-
-chr18	59600586	59600754	CCDS11988.1_cds_0_0_chr18_59600587_f	0	+
-chr19	59068595	59069564	CCDS12866.1_cds_0_0_chr19_59068596_f	0	+
-chr19	59236026	59236146	CCDS12872.1_cds_0_0_chr19_59236027_r	0	-
-chr19	59297998	59298008	CCDS12877.1_cds_0_0_chr19_59297999_f	0	+
-chr19	59302168	59302288	CCDS12878.1_cds_0_0_chr19_59302169_r	0	-
-chr2	118288583	118288668	CCDS2120.1_cds_0_0_chr2_118288584_f	0	+
-chr2	118394148	118394202	CCDS2121.1_cds_0_0_chr2_118394149_r	0	-
-chr2	220190202	220190242	CCDS2441.1_cds_0_0_chr2_220190203_f	0	+
-chr2	220229609	220230869	CCDS2443.1_cds_0_0_chr2_220229610_r	0	-
-chr20	33330413	33330423	CCDS13249.1_cds_0_0_chr20_33330414_r	0	-
-chr20	33513606	33513792	CCDS13255.1_cds_0_0_chr20_33513607_f	0	+
-chr20	33579500	33579527	CCDS13256.1_cds_0_0_chr20_33579501_r	0	-
-chr20	33593260	33593348	CCDS13257.1_cds_0_0_chr20_33593261_f	0	+
-chr21	32707032	32707192	CCDS13614.1_cds_0_0_chr21_32707033_f	0	+
-chr21	32869641	32870022	CCDS13615.1_cds_0_0_chr21_32869642_r	0	-
-chr21	33321040	33322012	CCDS13620.1_cds_0_0_chr21_33321041_f	0	+
-chr21	33744994	33745040	CCDS13625.1_cds_0_0_chr21_33744995_r	0	-
-chr22	30120223	30120265	CCDS13897.1_cds_0_0_chr22_30120224_f	0	+
-chr22	30160419	30160661	CCDS13898.1_cds_0_0_chr22_30160420_r	0	-
-chr22	30665273	30665360	CCDS13901.1_cds_0_0_chr22_30665274_f	0	+
-chr22	30939054	30939266	CCDS13903.1_cds_0_0_chr22_30939055_r	0	-
-chr5	131424298	131424460	CCDS4149.1_cds_0_0_chr5_131424299_f	0	+
-chr5	131556601	131556672	CCDS4151.1_cds_0_0_chr5_131556602_r	0	-
-chr5	131621326	131621419	CCDS4152.1_cds_0_0_chr5_131621327_f	0	+
-chr5	131847541	131847666	CCDS4155.1_cds_0_0_chr5_131847542_r	0	-
-chr6	108299600	108299744	CCDS5061.1_cds_0_0_chr6_108299601_r	0	-
-chr6	108594662	108594687	CCDS5063.1_cds_0_0_chr6_108594663_f	0	+
-chr6	108640045	108640151	CCDS5064.1_cds_0_0_chr6_108640046_r	0	-
-chr6	108722976	108723115	CCDS5067.1_cds_0_0_chr6_108722977_f	0	+
-chr7	113660517	113660685	CCDS5760.1_cds_0_0_chr7_113660518_f	0	+
-chr7	116512159	116512389	CCDS5771.1_cds_0_0_chr7_116512160_r	0	-
-chr7	116714099	116714152	CCDS5773.1_cds_0_0_chr7_116714100_f	0	+
-chr7	116945541	116945787	CCDS5774.1_cds_0_0_chr7_116945542_r	0	-
-chr8	118881131	118881317	CCDS6324.1_cds_0_0_chr8_118881132_r	0	-
-chr9	128764156	128764189	CCDS6914.1_cds_0_0_chr9_128764157_f	0	+
-chr9	128787519	128789136	CCDS6915.1_cds_0_0_chr9_128787520_r	0	-
-chr9	128882427	128882523	CCDS6917.1_cds_0_0_chr9_128882428_f	0	+
-chr9	128937229	128937445	CCDS6919.1_cds_0_0_chr9_128937230_r	0	-
-chrX	122745047	122745924	CCDS14606.1_cds_0_0_chrX_122745048_f	0	+
-chrX	152648964	152649196	CCDS14733.1_cds_0_0_chrX_152648965_r	0	-
-chrX	152691446	152691471	CCDS14735.1_cds_0_0_chrX_152691447_f	0	+
-chrX	152694029	152694263	CCDS14736.1_cds_0_0_chrX_152694030_r	0	-
--- a/test-data/454.qual	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,52 +0,0 @@
->EYKX4VC01B65GS length=54 xy=0784_1754 region=1 run=R_2007_11_07_16_15_57_
-33 23 34 25 28 28 28 32 23 34 27 4 28 28 31 21 28 27 27 28 28 28 28 28 28 28 33 23 28 33 24 36 27 31 21 28 28 33 26 33 24 27 28 28 28 28 28 28 28 32 23 28 34 25
->EYKX4VC01BNCSP length=187 xy=0558_3831 region=1 run=R_2007_11_07_16_15_57_
-27 35 26 25 37 28 37 28 25 28 27 36 27 28 36 27 28 28 27 36 27 30 19 27 28 36 28 23 36 27 27 28 27 27 28 37 29 27 26 27 24 24 36 27 26 28 36 28 24 25 21 28 24 28 26 34 25 26 43 36
-22 9 23 19 28 28 28 28 26 28 39 32 12 27 36 28 28 26 37 28 28 26 28 28 28 27 28 26 36 27 27 27 36 28 27 27 28 28 36 27 36 28 39 32 12 35 28 26 37 29 28 28 28 28 37 29 28 36 28 35
-26 27 37 29 28 26 28 36 28 26 24 38 32 11 28 26 32 24 36 32 18 2 27 25 33 26 32 28 6 18 22 26 17 15 14 28 20 8 22 21 14 22 26 16 26 16 28 20 22 27 18 27 18 27 28 27 20 25 34 27
-27 33 25 34 28 8 26
->EYKX4VC01CD9FT length=115 xy=0865_1719 region=1 run=R_2007_11_07_16_15_57_
-35 24 16 9 2 27 39 33 13 36 27 36 27 28 28 28 27 28 28 33 23 37 28 28 28 36 27 28 28 28 28 36 27 28 28 28 27 28 28 28 28 28 28 28 37 28 28 28 28 37 28 26 28 36 27 28 28 28 28 28
-28 28 28 37 28 28 35 26 27 28 28 27 36 27 35 25 32 22 28 28 28 28 28 28 28 28 28 34 25 36 27 34 25 28 27 28 28 36 27 28 35 29 6 28 28 28 37 30 8 33 24 28 27 27 27
->EYKX4VC01B8FW0 length=95 xy=0799_0514 region=1 run=R_2007_11_07_16_15_57_
-28 40 34 15 35 28 6 27 33 23 34 24 32 22 28 28 28 41 34 17 28 28 28 37 30 9 28 28 28 28 28 27 28 37 30 8 28 28 27 28 35 26 27 35 26 28 37 30 9 28 27 28 28 28 34 25 28 28 32 22
-26 28 28 28 28 27 43 36 23 12 1 28 21 28 27 16 28 32 23 27 28 27 28 28 27 28 28 28 32 22 28 26 26 27 28
->EYKX4VC01BCGYW length=115 xy=0434_3926 region=1 run=R_2007_11_07_16_15_57_
-28 6 26 15 27 28 37 28 41 35 17 28 21 28 23 21 27 36 27 24 36 28 40 34 14 22 25 28 24 27 28 37 28 26 28 27 27 28 28 28 28 27 43 36 22 8 28 26 28 27 26 14 28 25 20 28 34 24 25 40
-33 18 1 19 27 16 36 28 36 28 21 27 25 41 34 16 22 28 37 29 26 26 35 27 28 26 41 34 16 28 28 27 28 37 29 25 43 36 23 12 1 11 39 32 12 28 17 20 28 28 17 36 29 7 24
->EYKX4VC01AZXC6 length=116 xy=0292_0280 region=1 run=R_2007_11_07_16_15_57_
-35 24 17 11 5 26 24 40 33 14 34 25 33 24 28 27 27 26 28 28 33 24 36 27 28 27 36 27 27 28 27 28 35 26 27 27 28 27 28 28 28 28 28 27 28 36 27 28 28 28 37 28 27 26 35 26 27 28 28 28
-27 28 28 28 37 29 28 35 26 28 27 28 28 35 26 35 26 31 21 28 28 28 28 28 28 28 28 28 34 25 35 26 35 26 28 28 28 28 37 28 27 37 30 9 28 28 28 37 30 9 33 23 28 28 28 27
->EYKX4VC01CATH5 length=82 xy=0826_0843 region=1 run=R_2007_11_07_16_15_57_
-28 28 41 35 17 33 24 28 27 28 28 28 36 27 28 28 28 36 27 34 25 27 28 28 28 28 28 28 28 37 28 27 35 25 28 26 28 27 28 28 24 36 27 26 26 37 30 9 28 28 28 28 28 25 25 35 26 26 27 35
-25 28 36 28 28 28 31 21 25 13 32 22 41 34 17 0 22 10 32 23 24 28
->EYKX4VC01BCEIV length=47 xy=0434_0757 region=1 run=R_2007_11_07_16_15_57_
-28 28 26 26 28 26 28 27 28 25 32 22 27 26 25 27 28 28 27 26 27 28 32 23 28 28 34 25 27 22 26 26 27 28 17 28 28 28 28 28 28 34 24 35 25 28 28
->EYKX4VC01BWERM length=83 xy=0662_0304 region=1 run=R_2007_11_07_16_15_57_
-28 36 27 28 28 35 26 34 24 28 28 28 32 22 28 28 32 23 28 28 32 23 27 28 27 34 27 3 27 43 36 22 9 35 26 37 29 26 27 32 23 28 28 27 28 36 27 28 36 27 28 28 28 28 28 35 26 34 25 28
-36 30 8 28 28 28 28 27 27 28 28 28 28 37 28 28 36 27 28 39 33 13 27
->EYKX4VC01BT2O7 length=69 xy=0635_1945 region=1 run=R_2007_11_07_16_15_57_
-28 28 28 28 41 34 17 27 28 31 21 28 27 32 23 36 27 28 28 33 24 28 27 28 28 27 32 22 28 34 27 3 27 43 36 22 8 27 28 34 27 3 28 28 28 28 28 28 28 33 23 28 28 28 28 34 24 28 34 24
-28 28 27 36 27 28 37 30 9
->EYKX4VC01BO0UO length=222 xy=0577_3838 region=1 run=R_2007_11_07_16_15_57_
-27 27 28 36 27 28 39 33 13 28 28 28 27 28 37 28 28 41 35 17 28 28 28 27 28 26 36 27 28 36 27 27 28 27 35 26 27 26 28 28 28 28 28 36 27 28 28 38 31 10 24 27 27 27 27 27 28 28 37 28
-27 28 35 26 28 28 36 27 28 28 27 28 28 28 28 28 28 27 36 28 27 36 27 37 28 27 28 27 28 28 28 27 28 28 27 36 27 26 27 28 28 28 28 28 37 28 37 29 25 28 36 27 28 27 28 34 27 26 24 34
-28 28 28 31 23 27 28 34 27 28 37 33 14 23 37 33 15 38 34 23 13 2 26 24 28 26 35 31 12 36 32 14 31 22 24 28 27 33 26 26 27 27 27 27 28 27 35 30 11 26 27 35 31 12 28 27 26 27 36 32
-14 27 34 27 37 33 15 27 27 34 28 27 23 27 35 31 11 27 28 28 26 34 26 27 28 34 28 28 28 39 35 22 9 27 27 23 27 35 28 34 27 27
->EYKX4VC01CBCPK length=83 xy=0832_1158 region=1 run=R_2007_11_07_16_15_57_
-28 35 26 28 28 35 26 35 26 28 28 28 34 24 28 28 35 25 28 28 34 25 28 28 27 35 28 5 28 43 36 22 9 35 26 37 28 28 27 32 23 27 28 28 28 36 27 28 36 27 28 28 28 28 28 36 27 35 25 28
-37 30 9 28 28 28 28 28 28 28 28 28 28 36 27 28 35 26 28 38 31 10 28
->EYKX4VC01B474S length=54 xy=0762_2010 region=1 run=R_2007_11_07_16_15_57_
-28 28 28 28 27 43 36 23 11 33 23 27 25 26 28 28 39 33 13 28 27 29 18 28 26 27 26 28 27 28 36 27 26 28 28 28 28 25 28 41 34 17 24 36 28 37 28 28 26 28 17 27 28 26
->EYKX4VC01BB4QL length=57 xy=0431_0363 region=1 run=R_2007_11_07_16_15_57_
-36 24 15 7 28 33 26 28 27 27 26 29 18 28 35 26 28 26 28 25 27 27 28 36 27 41 34 20 5 27 36 28 28 28 27 28 32 22 34 25 28 28 28 28 26 28 27 28 36 27 40 34 18 3 28 37 28
->EYKX4VC01BJ37M length=64 xy=0522_0192 region=1 run=R_2007_11_07_16_15_57_
-28 26 28 28 28 28 28 28 27 28 28 27 27 36 27 37 28 28 28 28 28 28 28 28 28 27 36 29 8 39 33 13 28 36 27 41 34 20 5 28 28 28 27 36 28 28 28 28 27 28 28 28 28 27 37 30 8 27 28 26
-33 26 35 26
->EYKX4VC01BV9R8 length=54 xy=0660_2038 region=1 run=R_2007_11_07_16_15_57_
-41 34 19 4 27 28 28 30 20 28 28 34 27 4 28 28 27 34 25 27 28 28 28 28 28 28 28 28 28 28 28 28 38 31 11 27 28 28 28 28 37 28 40 33 18 2 24 15 25 24 12 26 34 27
->EYKX4VC01CEPP8 length=60 xy=0870_2350 region=1 run=R_2007_11_07_16_15_57_
-26 21 40 34 17 26 36 29 8 26 28 22 26 28 28 20 24 28 34 26 23 11 28 28 26 27 26 40 33 14 27 35 26 26 23 10 28 31 21 28 23 27 23 28 36 27 26 36 28 27 36 28 28 27 25 27 27 27 26 28
->EYKX4VC01BTLME length=78 xy=0630_0292 region=1 run=R_2007_11_07_16_15_57_
-36 27 25 24 33 23 28 26 28 28 28 28 27 27 26 36 27 36 28 28 28 36 27 28 28 27 27 36 28 28 37 29 28 26 36 27 27 28 27 27 28 27 36 28 28 28 36 27 36 27 28 36 28 25 36 28 28 28 27 28
-39 33 13 28 28 37 28 28 41 34 16 28 28 28 26 36 28 24
Binary file test-data/454Score.png has changed
--- a/test-data/cca_out1.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,22 +0,0 @@
-#Component	1	2
-#Correlation	0.9409	0.1311
-#F-statistic	144.4	2.57
-#p-value	6.213e-68	0.1111
-#X-Coefficients	1	2
-c3	1.507	-3.378
-c4	-0.5372	3.659
-#Y-Coefficients	1	2
-c1	6.35	3.379
-c2	-2.66	6.67
-#X-Loadings	1	2
-c3	0.9894	0.1452
-c4	0.9133	0.4073
-#Y-Loadings	1	2
-c1	0.9289	0.3704
-c2	-0.4698	0.8828
-#X-CrossLoadings	1	2
-c3	0.9309	0.01904
-c4	0.8593	0.05339
-#Y-CrossLoadings	1	2
-c1	0.874	0.04855
-c2	-0.442	0.1157
Binary file test-data/cca_out2.pdf has changed
--- a/test-data/cor.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,21 +0,0 @@
-Person	Height	Self Esteem
-1	68	4.1
-2	71	4.6
-3	62	3.8
-4	75	4.4
-5	58	3.2
-6	60	3.1
-7	67	3.8
-8	68	4.1
-9	71	4.3
-1	69	3.7
-1	68	3.5
-1	67	3.2
-1	63	3.7
-1	62	3.3
-1	60	3.4
-1	63	4.0
-1	65	4.1
-1	67	3.8
-1	63	3.4
-2	61	3.6
\ No newline at end of file
--- a/test-data/cor_out.txt	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2 +0,0 @@
-1.0	0.730635686279
-0.730635686279	1.0
--- a/test-data/gsummary_out1.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2 +0,0 @@
-#sum	mean	stdev	0%	25%	50%	75%	100%
-5.55921e+09	8.55264e+07	5.37839e+07	142908	3.35933e+07	1.083e+08	1.28764e+08	2.2023e+08
--- a/test-data/histogram_in1.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-1	68	4.1
-2	71	4.6
-3	62	3.8
-4	75	4.4
-5	58	3.2
-6	60	3.1
-7	67	3.8
-8	68	4.1
-9	71	4.3
-10	69	3.7
Binary file test-data/histogram_out1.pdf has changed
--- a/test-data/histooutold.pdf	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,545 +0,0 @@
-%PDF-1.4
-%âãÏÓ\r
-1 0 obj
-<<
-/CreationDate (D:20110303082028)
-/ModDate (D:20110303082028)
-/Title (R Graphics Output)
-/Producer (R 2.11.0)
-/Creator (R)
->>
-endobj
-2 0 obj
-<<
-/Type /Catalog
-/Pages 3 0 R
->>
-endobj
-5 0 obj
-<<
-/Type /Page
-/Parent 3 0 R
-/Contents 6 0 R
-/Resources 4 0 R
->>
-endobj
-6 0 obj
-<<
-/Length 7 0 R
->>
-stream
-1 J 1 j q
-Q q
-BT
-0.000 0.000 0.000 rg
-/F3 1 Tf 14.00 0.00 -0.00 14.00 267.78 541.45 Tm (Histogram) Tj
-ET
-BT
-/F2 1 Tf 12.00 0.00 -0.00 12.00 295.06 18.72 Tm (V1) Tj
-ET
-BT
-/F2 1 Tf 0.00 12.00 -12.00 0.00 12.96 275.20 Tm (Density) Tj
-ET
-Q q
-0.000 0.000 0.000 RG
-0.75 w
-[] 0 d
-1 J
-1 j
-10.00 M
-77.07 73.44 m 527.73 73.44 l S
-77.07 73.44 m 77.07 66.24 l S
-189.73 73.44 m 189.73 66.24 l S
-302.40 73.44 m 302.40 66.24 l S
-415.07 73.44 m 415.07 66.24 l S
-527.73 73.44 m 527.73 66.24 l S
-BT
-0.000 0.000 0.000 rg
-/F2 1 Tf 12.00 0.00 -0.00 12.00 70.39 47.52 Tm (55) Tj
-ET
-BT
-/F2 1 Tf 12.00 0.00 -0.00 12.00 183.06 47.52 Tm (60) Tj
-ET
-BT
-/F2 1 Tf 12.00 0.00 -0.00 12.00 295.73 47.52 Tm (65) Tj
-ET
-BT
-/F2 1 Tf 12.00 0.00 -0.00 12.00 408.39 47.52 Tm (70) Tj
-ET
-BT
-/F2 1 Tf 12.00 0.00 -0.00 12.00 521.06 47.52 Tm (75) Tj
-ET
-59.04 89.87 m 59.04 500.53 l S
-59.04 89.87 m 51.84 89.87 l S
-59.04 192.53 m 51.84 192.53 l S
-59.04 295.20 m 51.84 295.20 l S
-59.04 397.87 m 51.84 397.87 l S
-59.04 500.53 m 51.84 500.53 l S
-BT
-/F2 1 Tf 0.00 12.00 -12.00 0.00 41.76 78.19 Tm (0.00) Tj
-ET
-BT
-/F2 1 Tf 0.00 12.00 -12.00 0.00 41.76 180.86 Tm (0.02) Tj
-ET
-BT
-/F2 1 Tf 0.00 12.00 -12.00 0.00 41.76 283.52 Tm (0.04) Tj
-ET
-BT
-/F2 1 Tf 0.00 12.00 -12.00 0.00 41.76 386.19 Tm (0.06) Tj
-ET
-BT
-/F2 1 Tf 0.00 12.00 -12.00 0.00 41.76 488.86 Tm (0.08) Tj
-ET
-Q q 59.04 73.44 486.72 443.52 re W n
-0.000 0.000 0.000 RG
-0.75 w
-[] 0 d
-1 J
-1 j
-10.00 M
-77.07 89.87 112.67 205.33 re S
-189.73 89.87 112.67 102.67 re S
-302.40 89.87 112.67 410.67 re S
-415.07 89.87 112.67 308.00 re S
-0.00 98.74 m
-0.12 98.77 l
-1.67 99.23 l
-3.22 99.72 l
-4.77 100.22 l
-6.33 100.74 l
-7.88 101.29 l
-9.43 101.85 l
-10.98 102.44 l
-12.53 103.05 l
-14.09 103.69 l
-15.64 104.35 l
-17.19 105.03 l
-18.74 105.74 l
-20.30 106.47 l
-21.85 107.23 l
-23.40 108.02 l
-24.95 108.83 l
-26.51 109.67 l
-28.06 110.53 l
-29.61 111.43 l
-31.16 112.35 l
-32.72 113.31 l
-34.27 114.29 l
-35.82 115.30 l
-37.37 116.35 l
-38.92 117.42 l
-40.48 118.52 l
-42.03 119.66 l
-43.58 120.82 l
-45.13 122.02 l
-46.69 123.24 l
-48.24 124.51 l
-49.79 125.80 l
-51.34 127.11 l
-52.90 128.48 l
-54.45 129.85 l
-56.00 131.28 l
-57.55 132.73 l
-59.11 134.20 l
-60.66 135.72 l
-62.21 137.25 l
-63.76 138.83 l
-65.31 140.43 l
-66.87 142.07 l
-68.42 143.73 l
-69.97 145.41 l
-71.52 147.14 l
-73.08 148.89 l
-74.63 150.66 l
-76.18 152.47 l
-77.73 154.29 l
-79.29 156.15 l
-80.84 158.02 l
-82.39 159.93 l
-83.94 161.85 l
-85.50 163.79 l
-87.05 165.77 l
-88.60 167.75 l
-90.15 169.76 l
-91.70 171.78 l
-93.26 173.82 l
-94.81 175.88 l
-96.36 177.95 l
-97.91 180.03 l
-99.47 182.13 l
-101.02 184.23 l
-102.57 186.35 l
-104.12 188.47 l
-105.68 190.60 l
-107.23 192.73 l
-108.78 194.87 l
-110.33 197.01 l
-111.89 199.14 l
-113.44 201.28 l
-114.99 203.42 l
-116.54 205.55 l
-118.09 207.67 l
-119.65 209.79 l
-121.20 211.89 l
-122.75 213.99 l
-124.30 216.08 l
-125.86 218.15 l
-127.41 220.21 l
-128.96 222.24 l
-130.51 224.27 l
-132.07 226.26 l
-133.62 228.24 l
-135.17 230.20 l
-136.72 232.13 l
-138.28 234.04 l
-139.83 235.91 l
-141.38 237.77 l
-142.93 239.59 l
-144.48 241.37 l
-146.04 243.14 l
-147.59 244.85 l
-149.14 246.54 l
-150.69 248.19 l
-152.25 249.80 l
-153.80 251.39 l
-155.35 252.91 l
-156.90 254.41 l
-158.46 255.87 l
-160.01 257.28 l
-161.56 258.66 l
-163.11 259.98 l
-164.67 261.28 l
-166.22 262.51 l
-167.77 263.71 l
-169.32 264.88 l
-170.87 265.98 l
-172.43 267.06 l
-173.98 268.07 l
-175.53 269.05 l
-177.08 269.98 l
-178.64 270.87 l
-180.19 271.73 l
-181.74 272.52 l
-183.29 273.28 l
-184.85 274.00 l
-186.40 274.67 l
-187.95 275.32 l
-189.50 275.90 l
-191.06 276.47 l
-192.61 276.99 l
-194.16 277.47 l
-195.71 277.93 l
-197.26 278.33 l
-198.82 278.72 l
-200.37 279.07 l
-201.92 279.39 l
-203.47 279.69 l
-205.03 279.96 l
-206.58 280.21 l
-208.13 280.43 l
-209.68 280.63 l
-211.24 280.82 l
-212.79 280.99 l
-214.34 281.15 l
-215.89 281.29 l
-217.45 281.43 l
-219.00 281.56 l
-220.55 281.69 l
-222.10 281.81 l
-223.65 281.93 l
-225.21 282.06 l
-226.76 282.19 l
-228.31 282.34 l
-229.86 282.48 l
-231.42 282.65 l
-232.97 282.83 l
-234.52 283.03 l
-236.07 283.25 l
-237.63 283.49 l
-239.18 283.77 l
-240.73 284.06 l
-242.28 284.39 l
-243.84 284.75 l
-245.39 285.15 l
-246.94 285.59 l
-248.49 286.06 l
-250.04 286.59 l
-251.60 287.15 l
-253.15 287.76 l
-254.70 288.42 l
-256.25 289.12 l
-257.81 289.90 l
-259.36 290.71 l
-260.91 291.59 l
-262.46 292.53 l
-264.02 293.50 l
-265.57 294.58 l
-267.12 295.68 l
-268.67 296.87 l
-270.23 298.11 l
-271.78 299.40 l
-273.33 300.80 l
-274.88 302.22 l
-276.43 303.74 l
-277.99 305.31 l
-279.54 306.94 l
-281.09 308.66 l
-282.64 310.42 l
-284.20 312.27 l
-285.75 314.17 l
-287.30 316.14 l
-288.85 318.19 l
-290.41 320.27 l
-291.96 322.45 l
-293.51 324.67 l
-295.06 326.95 l
-296.62 329.30 l
-298.17 331.68 l
-299.72 334.16 l
-301.27 336.66 l
-302.82 339.22 l
-304.38 341.83 l
-305.93 344.47 l
-307.48 347.18 l
-309.03 349.91 l
-310.59 352.69 l
-312.14 355.51 l
-313.69 358.34 l
-315.24 361.22 l
-316.80 364.12 l
-318.35 367.04 l
-319.90 369.99 l
-321.45 372.94 l
-323.01 375.91 l
-324.56 378.89 l
-326.11 381.87 l
-327.66 384.86 l
-329.21 387.84 l
-330.77 390.81 l
-332.32 393.78 l
-333.87 396.72 l
-335.42 399.66 l
-336.98 402.57 l
-338.53 405.45 l
-340.08 408.31 l
-341.63 411.12 l
-343.19 413.90 l
-344.74 416.64 l
-346.29 419.33 l
-347.84 421.99 l
-349.39 424.56 l
-350.95 427.11 l
-352.50 429.58 l
-354.05 431.98 l
-355.60 434.34 l
-357.16 436.60 l
-358.71 438.81 l
-360.26 440.92 l
-361.81 442.96 l
-363.37 444.93 l
-364.92 446.79 l
-366.47 448.60 l
-368.02 450.27 l
-369.58 451.88 l
-371.13 453.39 l
-372.68 454.78 l
-374.23 456.12 l
-375.78 457.29 l
-377.34 458.41 l
-378.89 459.40 l
-380.44 460.28 l
-381.99 461.09 l
-383.55 461.74 l
-385.10 462.32 l
-386.65 462.76 l
-388.20 463.10 l
-389.76 463.35 l
-391.31 463.45 l
-392.86 463.48 l
-394.41 463.35 l
-395.97 463.14 l
-397.52 462.83 l
-399.07 462.36 l
-400.62 461.84 l
-402.17 461.16 l
-403.73 460.40 l
-405.28 459.53 l
-406.83 458.53 l
-408.38 457.47 l
-409.94 456.25 l
-411.49 454.96 l
-413.04 453.57 l
-414.59 452.06 l
-416.15 450.50 l
-417.70 448.78 l
-419.25 447.01 l
-420.80 445.13 l
-422.36 443.17 l
-423.91 441.14 l
-425.46 438.99 l
-427.01 436.79 l
-428.56 434.48 l
-430.12 432.11 l
-431.67 429.68 l
-433.22 427.15 l
-434.77 424.58 l
-436.33 421.92 l
-437.88 419.21 l
-439.43 416.44 l
-440.98 413.60 l
-442.54 410.73 l
-444.09 407.78 l
-445.64 404.80 l
-447.19 401.77 l
-448.75 398.69 l
-450.30 395.58 l
-451.85 392.41 l
-453.40 389.23 l
-454.95 386.00 l
-456.51 382.75 l
-458.06 379.48 l
-459.61 376.16 l
-461.16 372.84 l
-462.72 369.49 l
-464.27 366.13 l
-465.82 362.75 l
-467.37 359.36 l
-468.93 355.96 l
-470.48 352.55 l
-472.03 349.14 l
-473.58 345.72 l
-475.14 342.30 l
-476.69 338.88 l
-478.24 335.46 l
-479.79 332.05 l
-481.34 328.64 l
-482.90 325.24 l
-484.45 321.85 l
-486.00 318.47 l
-487.55 315.10 l
-489.11 311.75 l
-490.66 308.41 l
-492.21 305.08 l
-493.76 301.78 l
-495.32 298.49 l
-496.87 295.22 l
-498.42 291.97 l
-499.97 288.74 l
-501.53 285.54 l
-503.08 282.35 l
-504.63 279.19 l
-506.18 276.06 l
-507.73 272.94 l
-509.29 269.86 l
-510.84 266.79 l
-512.39 263.76 l
-513.94 260.76 l
-515.50 257.77 l
-517.05 254.82 l
-518.60 251.89 l
-520.15 249.00 l
-521.71 246.13 l
-523.26 243.28 l
-524.81 240.47 l
-526.36 237.68 l
-527.92 234.93 l
-529.47 232.20 l
-531.02 229.50 l
-532.57 226.83 l
-534.12 224.18 l
-535.68 221.58 l
-537.23 218.99 l
-538.78 216.43 l
-540.33 213.91 l
-541.89 211.40 l
-543.44 208.94 l
-544.99 206.49 l
-546.54 204.08 l
-548.10 201.69 l
-549.65 199.33 l
-551.20 197.01 l
-552.75 194.70 l
-554.31 192.43 l
-555.86 190.18 l
-557.41 187.96 l
-558.96 185.77 l
-560.51 183.60 l
-562.07 181.48 l
-563.62 179.36 l
-565.17 177.28 l
-566.72 175.23 l
-568.28 173.20 l
-569.83 171.21 l
-571.38 169.24 l
-572.93 167.30 l
-574.49 165.38 l
-576.00 163.54 l
-S
-Q
-endstream
-endobj
-7 0 obj
-7425
-endobj
-3 0 obj
-<<
-/Type /Pages
-/Kids [
-5 0 R
-]
-/Count 1
-/MediaBox [0 0 576 576]
->>
-endobj
-4 0 obj
-<<
-/ProcSet [/PDF /Text]
-/Font <</F2 9 0 R /F3 10 0 R >>
-/ExtGState << >>
->>
-endobj
-8 0 obj
-<<
-/Type /Encoding
-/BaseEncoding /WinAnsiEncoding
-/Differences [ 45/minus 96/quoteleft
-144/dotlessi /grave /acute /circumflex /tilde /macron /breve /dotaccent
-/dieresis /.notdef /ring /cedilla /.notdef /hungarumlaut /ogonek /caron /space]
->>
-endobj
-9 0 obj <<
-/Type /Font
-/Subtype /Type1
-/Name /F2
-/BaseFont /Helvetica
-/Encoding 8 0 R
->> endobj
-10 0 obj <<
-/Type /Font
-/Subtype /Type1
-/Name /F3
-/BaseFont /Helvetica-Bold
-/Encoding 8 0 R
->> endobj
-xref
-0 11
-0000000000 65535 f 
-0000000021 00000 n 
-0000000164 00000 n 
-0000007791 00000 n 
-0000007874 00000 n 
-0000000213 00000 n 
-0000000293 00000 n 
-0000007771 00000 n 
-0000007966 00000 n 
-0000008223 00000 n 
-0000008319 00000 n 
-trailer
-<<
-/Size 11
-/Info 1 0 R
-/Root 2 0 R
->>
-startxref
-8421
-%%EOF
--- a/test-data/iris.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,151 +0,0 @@
-5.1	3.5	1.4	0.2	Iris-setosa
-4.9	3.0	1.4	0.2	Iris-setosa
-4.7	3.2	1.3	0.2	Iris-setosa
-4.6	3.1	1.5	0.2	Iris-setosa
-5.0	3.6	1.4	0.2	Iris-setosa
-5.4	3.9	1.7	0.4	Iris-setosa
-4.6	3.4	1.4	0.3	Iris-setosa
-5.0	3.4	1.5	0.2	Iris-setosa
-4.4	2.9	1.4	0.2	Iris-setosa
-4.9	3.1	1.5	0.1	Iris-setosa
-5.4	3.7	1.5	0.2	Iris-setosa
-4.8	3.4	1.6	0.2	Iris-setosa
-4.8	3.0	1.4	0.1	Iris-setosa
-4.3	3.0	1.1	0.1	Iris-setosa
-5.8	4.0	1.2	0.2	Iris-setosa
-5.7	4.4	1.5	0.4	Iris-setosa
-5.4	3.9	1.3	0.4	Iris-setosa
-5.1	3.5	1.4	0.3	Iris-setosa
-5.7	3.8	1.7	0.3	Iris-setosa
-5.1	3.8	1.5	0.3	Iris-setosa
-5.4	3.4	1.7	0.2	Iris-setosa
-5.1	3.7	1.5	0.4	Iris-setosa
-4.6	3.6	1.0	0.2	Iris-setosa
-5.1	3.3	1.7	0.5	Iris-setosa
-4.8	3.4	1.9	0.2	Iris-setosa
-5.0	3.0	1.6	0.2	Iris-setosa
-5.0	3.4	1.6	0.4	Iris-setosa
-5.2	3.5	1.5	0.2	Iris-setosa
-5.2	3.4	1.4	0.2	Iris-setosa
-4.7	3.2	1.6	0.2	Iris-setosa
-4.8	3.1	1.6	0.2	Iris-setosa
-5.4	3.4	1.5	0.4	Iris-setosa
-5.2	4.1	1.5	0.1	Iris-setosa
-5.5	4.2	1.4	0.2	Iris-setosa
-4.9	3.1	1.5	0.1	Iris-setosa
-5.0	3.2	1.2	0.2	Iris-setosa
-5.5	3.5	1.3	0.2	Iris-setosa
-4.9	3.1	1.5	0.1	Iris-setosa
-4.4	3.0	1.3	0.2	Iris-setosa
-5.1	3.4	1.5	0.2	Iris-setosa
-5.0	3.5	1.3	0.3	Iris-setosa
-4.5	2.3	1.3	0.3	Iris-setosa
-4.4	3.2	1.3	0.2	Iris-setosa
-5.0	3.5	1.6	0.6	Iris-setosa
-5.1	3.8	1.9	0.4	Iris-setosa
-4.8	3.0	1.4	0.3	Iris-setosa
-5.1	3.8	1.6	0.2	Iris-setosa
-4.6	3.2	1.4	0.2	Iris-setosa
-5.3	3.7	1.5	0.2	Iris-setosa
-5.0	3.3	1.4	0.2	Iris-setosa
-7.0	3.2	4.7	1.4	Iris-versicolor
-6.4	3.2	4.5	1.5	Iris-versicolor
-6.9	3.1	4.9	1.5	Iris-versicolor
-5.5	2.3	4.0	1.3	Iris-versicolor
-6.5	2.8	4.6	1.5	Iris-versicolor
-5.7	2.8	4.5	1.3	Iris-versicolor
-6.3	3.3	4.7	1.6	Iris-versicolor
-4.9	2.4	3.3	1.0	Iris-versicolor
-6.6	2.9	4.6	1.3	Iris-versicolor
-5.2	2.7	3.9	1.4	Iris-versicolor
-5.0	2.0	3.5	1.0	Iris-versicolor
-5.9	3.0	4.2	1.5	Iris-versicolor
-6.0	2.2	4.0	1.0	Iris-versicolor
-6.1	2.9	4.7	1.4	Iris-versicolor
-5.6	2.9	3.6	1.3	Iris-versicolor
-6.7	3.1	4.4	1.4	Iris-versicolor
-5.6	3.0	4.5	1.5	Iris-versicolor
-5.8	2.7	4.1	1.0	Iris-versicolor
-6.2	2.2	4.5	1.5	Iris-versicolor
-5.6	2.5	3.9	1.1	Iris-versicolor
-5.9	3.2	4.8	1.8	Iris-versicolor
-6.1	2.8	4.0	1.3	Iris-versicolor
-6.3	2.5	4.9	1.5	Iris-versicolor
-6.1	2.8	4.7	1.2	Iris-versicolor
-6.4	2.9	4.3	1.3	Iris-versicolor
-6.6	3.0	4.4	1.4	Iris-versicolor
-6.8	2.8	4.8	1.4	Iris-versicolor
-6.7	3.0	5.0	1.7	Iris-versicolor
-6.0	2.9	4.5	1.5	Iris-versicolor
-5.7	2.6	3.5	1.0	Iris-versicolor
-5.5	2.4	3.8	1.1	Iris-versicolor
-5.5	2.4	3.7	1.0	Iris-versicolor
-5.8	2.7	3.9	1.2	Iris-versicolor
-6.0	2.7	5.1	1.6	Iris-versicolor
-5.4	3.0	4.5	1.5	Iris-versicolor
-6.0	3.4	4.5	1.6	Iris-versicolor
-6.7	3.1	4.7	1.5	Iris-versicolor
-6.3	2.3	4.4	1.3	Iris-versicolor
-5.6	3.0	4.1	1.3	Iris-versicolor
-5.5	2.5	4.0	1.3	Iris-versicolor
-5.5	2.6	4.4	1.2	Iris-versicolor
-6.1	3.0	4.6	1.4	Iris-versicolor
-5.8	2.6	4.0	1.2	Iris-versicolor
-5.0	2.3	3.3	1.0	Iris-versicolor
-5.6	2.7	4.2	1.3	Iris-versicolor
-5.7	3.0	4.2	1.2	Iris-versicolor
-5.7	2.9	4.2	1.3	Iris-versicolor
-6.2	2.9	4.3	1.3	Iris-versicolor
-5.1	2.5	3.0	1.1	Iris-versicolor
-5.7	2.8	4.1	1.3	Iris-versicolor
-6.3	3.3	6.0	2.5	Iris-virginica
-5.8	2.7	5.1	1.9	Iris-virginica
-7.1	3.0	5.9	2.1	Iris-virginica
-6.3	2.9	5.6	1.8	Iris-virginica
-6.5	3.0	5.8	2.2	Iris-virginica
-7.6	3.0	6.6	2.1	Iris-virginica
-4.9	2.5	4.5	1.7	Iris-virginica
-7.3	2.9	6.3	1.8	Iris-virginica
-6.7	2.5	5.8	1.8	Iris-virginica
-7.2	3.6	6.1	2.5	Iris-virginica
-6.5	3.2	5.1	2.0	Iris-virginica
-6.4	2.7	5.3	1.9	Iris-virginica
-6.8	3.0	5.5	2.1	Iris-virginica
-5.7	2.5	5.0	2.0	Iris-virginica
-5.8	2.8	5.1	2.4	Iris-virginica
-6.4	3.2	5.3	2.3	Iris-virginica
-6.5	3.0	5.5	1.8	Iris-virginica
-7.7	3.8	6.7	2.2	Iris-virginica
-7.7	2.6	6.9	2.3	Iris-virginica
-6.0	2.2	5.0	1.5	Iris-virginica
-6.9	3.2	5.7	2.3	Iris-virginica
-5.6	2.8	4.9	2.0	Iris-virginica
-7.7	2.8	6.7	2.0	Iris-virginica
-6.3	2.7	4.9	1.8	Iris-virginica
-6.7	3.3	5.7	2.1	Iris-virginica
-7.2	3.2	6.0	1.8	Iris-virginica
-6.2	2.8	4.8	1.8	Iris-virginica
-6.1	3.0	4.9	1.8	Iris-virginica
-6.4	2.8	5.6	2.1	Iris-virginica
-7.2	3.0	5.8	1.6	Iris-virginica
-7.4	2.8	6.1	1.9	Iris-virginica
-7.9	3.8	6.4	2.0	Iris-virginica
-6.4	2.8	5.6	2.2	Iris-virginica
-6.3	2.8	5.1	1.5	Iris-virginica
-6.1	2.6	5.6	1.4	Iris-virginica
-7.7	3.0	6.1	2.3	Iris-virginica
-6.3	3.4	5.6	2.4	Iris-virginica
-6.4	3.1	5.5	1.8	Iris-virginica
-6.0	3.0	4.8	1.8	Iris-virginica
-6.9	3.1	5.4	2.1	Iris-virginica
-6.7	3.1	5.6	2.4	Iris-virginica
-6.9	3.1	5.1	2.3	Iris-virginica
-5.8	2.7	5.1	1.9	Iris-virginica
-6.8	3.2	5.9	2.3	Iris-virginica
-6.7	3.3	5.7	2.5	Iris-virginica
-6.7	3.0	5.2	2.3	Iris-virginica
-6.3	2.5	5.0	1.9	Iris-virginica
-6.5	3.0	5.2	2.0	Iris-virginica
-6.2	3.4	5.4	2.3	Iris-virginica
-5.9	3.0	5.1	1.8	Iris-virginica
-
--- a/test-data/kcca_out1.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,304 +0,0 @@
-\#Component\	1\	2\	3\	4
-\#Correlation\	\-?0\.99\d*\	\-?0\.99\d*\	\-?0\.93\d*\	\-?0\.93\d*
-\#Estimated\ X\-?coefficients\	1\	2\	3\	4
-1\	\-?0\.0015\d*\	\-?0\.0015\d*\	\-?0\.034\d*\	\-?0\.034\d*
-2\	\-?0\.001\d*\	\-?0\.001\d*\	\-?0\.032\d*\	\-?0\.032\d*
-3\	\-?0\.0001\d*\	\-?0\.0001\d*\	\-?0\.018\d*\	\-?0\.018\d*
-4\	\-?0\.0010\d*\	\-?0\.0010\d*\	\-?0\.0048\d*\	\-?0\.0048\d*
-5\	\-?0\.000\d*\	\-?0\.000\d*\	\-?0\.052\d*\	\-?0\.052\d*
-6\	\-?0\.0023\d*\	\-?0\.0023\d*\	\-?0\.019\d*\	\-?0\.019\d*
-7\	\-?0\.0014\d*\	\-?0\.0014\d*\	\-?0\.0057\d*\	\-?0\.0057\d*
-8\	\-?0\.0015\d*\	\-?0\.0015\d*\	\-?0\.038\d*\	\-?0\.038\d*
-9\	\-?0\.0036\d*\	\-?0\.0036\d*\	\-?0\.07\d*\	\-?0\.07\d*
-\d*\	\-?0\.0013\d*\	\-?0\.0013\d*\	\-?0\.031\d*\	\-?0\.031\d*
-\d*\	\-?0\.0008\d*\	\-?0\.0008\d*\	\-?0\.0013\d*\	\-?0\.0013\d*
-\d*\	\-?0\.00\d*\	\-?0\.00\d*\	\-?0\.039\d*\	\-?0\.039\d*
-\d*\	\-?0\.00037\d*\	\-?0\.00037\d*\	\-?0\.029\d*\	\-?0\.029\d*
-\d*\	\-?0\.0042\d*\	\-?0\.0042\d*\	\-?0\.13\d*\	\-?0\.13\d*
-\d*\	\-?0\.0044\d*\	\-?0\.0044\d*\	\-?0\.021\d*\	\-?0\.021\d*
-\d*\	\-?0\.015\d*\	\-?0\.015\d*\	\-?0\.041\d*\	\-?0\.041\d*
-\d*\	\-?0\.0023\d*\	\-?0\.0023\d*\	\-?0\.019\d*\	\-?0\.019\d*
-\d*\	\-?0\.0015\d*\	\-?0\.0015\d*\	\-?0\.034\d*\	\-?0\.034\d*
-\d*\	\-?0\.00047\d*\	\-?0\.00047\d*\	\-?0\.033\d*\	\-?0\.033\d*
-\d*\	\-?0\.0017\d*\	\-?0\.0017\d*\	\-?0\.059\d*\	\-?0\.059\d*
-\d*\	\-?0\.0033\d*\	\-?0\.0033\d*\	\-?0\.021\d*\	\-?0\.021\d*
-\d*\	\-?0\.0003\d*\	\-?0\.0003\d*\	\-?0\.050\d*\	\-?0\.050\d*
-\d*\	\-?0\.0028\d*\	\-?0\.0028\d*\	\-?0\.019\d*\	\-?0\.019\d*
-\d*\	\-?0\.0025\d*\	\-?0\.0025\d*\	\-?0\.02\d*\	\-?0\.02\d*
-\d*\	\-?0\.00\d*\	\-?0\.00\d*\	\-?0\.039\d*\	\-?0\.039\d*
-\d*\	\-?0\.0017\d*\	\-?0\.0017\d*\	\-?0\.028\d*\	\-?0\.028\d*
-\d*\	\-?0\.0015\d*\	\-?0\.0015\d*\	\-?0\.038\d*\	\-?0\.038\d*
-\d*\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.020\d*\	\-?0\.020\d*
-\d*\	\-?0\.0026\d*\	\-?0\.0026\d*\	\-?0\.013\d*\	\-?0\.013\d*
-\d*\	\-?0\.00010\d*\	\-?0\.00010\d*\	\-?0\.018\d*\	\-?0\.018\d*
-\d*\	\-?0\.00062\d*\	\-?0\.00062\d*\	\-?0\.028\d*\	\-?0\.028\d*
-\d*\	\-?0\.0033\d*\	\-?0\.0033\d*\	\-?0\.021\d*\	\-?0\.021\d*
-\d*\	\-?0\.007\d*\	\-?0\.007\d*\	\-?0\.075\d*\	\-?0\.075\d*
-\d*\	\-?0\.0092\d*\	\-?0\.0092\d*\	\-?0\.036\d*\	\-?0\.036\d*
-\d*\	\-?0\.0013\d*\	\-?0\.0013\d*\	\-?0\.031\d*\	\-?0\.031\d*
-\d*\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.029\d*\	\-?0\.029\d*
-\d*\	\-?0\.0029\d*\	\-?0\.0029\d*\	\-?0\.031\d*\	\-?0\.031\d*
-\d*\	\-?0\.0013\d*\	\-?0\.0013\d*\	\-?0\.031\d*\	\-?0\.031\d*
-\d*\	\-?0\.0031\d*\	\-?0\.0031\d*\	\-?0\.078\d*\	\-?0\.078\d*
-\d*\	\-?0\.0021\d*\	\-?0\.0021\d*\	\-?0\.028\d*\	\-?0\.028\d*
-\d*\	\-?0\.0010\d*\	\-?0\.0010\d*\	\-?0\.044\d*\	\-?0\.044\d*
-\d*\	\-?0\.0088\d*\	\-?0\.0088\d*\	\-?0\.041\d*\	\-?0\.041\d*
-\d*\	\-?0\.0028\d*\	\-?0\.0028\d*\	\-?0\.077\d*\	\-?0\.077\d*
-\d*\	\-?0\.0010\d*\	\-?0\.0010\d*\	\-?0\.044\d*\	\-?0\.044\d*
-\d*\	\-?0\.0017\d*\	\-?0\.0017\d*\	\-?0\.059\d*\	\-?0\.059\d*
-\d*\	\-?0\.00037\d*\	\-?0\.00037\d*\	\-?0\.029\d*\	\-?0\.029\d*
-\d*\	\-?0\.0017\d*\	\-?0\.0017\d*\	\-?0\.059\d*\	\-?0\.059\d*
-\d*\	\-?0\.00096\d*\	\-?0\.00096\d*\	\-?0\.0032\d*\	\-?0\.0032\d*
-\d*\	\-?0\.00052\d*\	\-?0\.00052\d*\	\-?0\.019\d*\	\-?0\.019\d*
-\d*\	\-?0\.001\d*\	\-?0\.001\d*\	\-?0\.032\d*\	\-?0\.032\d*
-\d*\	\-?6\.5\d*e-\d*\	\-?6\.5\d*e-\d*\	\-?0\.096\d*\	\-?0\.096\d*
-\d*\	\-?0\.0011\d*\	\-?0\.0011\d*\	\-?0\.020\d*\	\-?0\.020\d*
-\d*\	\-?0\.00017\d*\	\-?0\.00017\d*\	\-?0\.08\d*\	\-?0\.08\d*
-\d*\	\-?0\.0029\d*\	\-?0\.0029\d*\	\-?0\.029\d*\	\-?0\.029\d*
-\d*\	\-?0\.00060\d*\	\-?0\.00060\d*\	\-?0\.011\d*\	\-?0\.011\d*
-\d*\	\-?0\.0026\d*\	\-?0\.0026\d*\	\-?0\.065\d*\	\-?0\.065\d*
-\d*\	\-?0\.0015\d*\	\-?0\.0015\d*\	\-?0\.037\d*\	\-?0\.037\d*
-\d*\	\-?0\.0040\d*\	\-?0\.0040\d*\	\-?0\.092\d*\	\-?0\.092\d*
-\d*\	\-?0\.00036\d*\	\-?0\.00036\d*\	\-?0\.02\d*\	\-?0\.02\d*
-\d*\	\-?0\.0010\d*\	\-?0\.0010\d*\	\-?0\.022\d*\	\-?0\.022\d*
-\d*\	\-?0\.010\d*\	\-?0\.010\d*\	\-?0\.15\d*\	\-?0\.15\d*
-\d*\	\-?0\.0031\d*\	\-?0\.0031\d*\	\-?0\.084\d*\	\-?0\.084\d*
-\d*\	\-?0\.0055\d*\	\-?0\.0055\d*\	\-?0\.012\d*\	\-?0\.012\d*
-\d*\	\-?0\.0018\d*\	\-?0\.0018\d*\	\-?0\.069\d*\	\-?0\.069\d*
-\d*\	\-?0\.0032\d*\	\-?0\.0032\d*\	\-?0\.059\d*\	\-?0\.059\d*
-\d*\	\-?4\.1\d*e-\d*\	\-?4\.1\d*e-\d*\	\-?0\.048\d*\	\-?0\.048\d*
-\d*\	\-?0\.0037\d*\	\-?0\.0037\d*\	\-?0\.06\d*\	\-?0\.06\d*
-\d*\	\-?0\.0016\d*\	\-?0\.0016\d*\	\-?0\.063\d*\	\-?0\.063\d*
-\d*\	\-?0\.0065\d*\	\-?0\.0065\d*\	\-?0\.034\d*\	\-?0\.034\d*
-\d*\	\-?0\.0001\d*\	\-?0\.0001\d*\	\-?0\.01\d*\	\-?0\.01\d*
-\d*\	\-?0\.0034\d*\	\-?0\.0034\d*\	\-?0\.083\d*\	\-?0\.083\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*\	\-?0\.063\d*\	\-?0\.063\d*
-\d*\	\-?0\.002\d*\	\-?0\.002\d*\	\-?0\.0022\d*\	\-?0\.0022\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*\	\-?0\.063\d*\	\-?0\.063\d*
-\d*\	\-?0\.00042\d*\	\-?0\.00042\d*\	\-?0\.017\d*\	\-?0\.017\d*
-\d*\	\-?6\.0\d*e-\d*\	\-?6\.0\d*e-\d*\	\-?0\.026\d*\	\-?0\.026\d*
-\d*\	\-?0\.0014\d*\	\-?0\.0014\d*\	\-?0\.079\d*\	\-?0\.079\d*
-\d*\	\-?0\.00020\d*\	\-?0\.00020\d*\	\-?0\.049\d*\	\-?0\.049\d*
-\d*\	\-?0\.002\d*\	\-?0\.002\d*\	\-?0\.078\d*\	\-?0\.078\d*
-\d*\	\-?0\.00090\d*\	\-?0\.00090\d*\	\-?0\.044\d*\	\-?0\.044\d*
-\d*\	\-?0\.0014\d*\	\-?0\.0014\d*\	\-?0\.012\d*\	\-?0\.012\d*
-\d*\	\-?0\.0014\d*\	\-?0\.0014\d*\	\-?0\.012\d*\	\-?0\.012\d*
-\d*\	\-?0\.0016\d*\	\-?0\.0016\d*\	\-?0\.063\d*\	\-?0\.063\d*
-\d*\	\-?0\.00096\d*\	\-?0\.00096\d*\	\-?0\.063\d*\	\-?0\.063\d*
-\d*\	\-?0\.003\d*\	\-?0\.003\d*\	\-?0\.030\d*\	\-?0\.030\d*
-\d*\	\-?0\.0025\d*\	\-?0\.0025\d*\	\-?0\.071\d*\	\-?0\.071\d*
-\d*\	\-?4\.1\d*e\-?\d*\	\-?4\.1\d*e\-?\d*\	\-?0\.048\d*\	\-?0\.048\d*
-\d*\	\-?0\.0053\d*\	\-?0\.0053\d*\	\-?0\.035\d*\	\-?0\.035\d*
-\d*\	\-?0\.0037\d*\	\-?0\.0037\d*\	\-?0\.06\d*\	\-?0\.06\d*
-\d*\	\-?0\.00017\d*\	\-?0\.00017\d*\	\-?0\.0037\d*\	\-?0\.0037\d*
-\d*\	\-?0\.00093\d*\	\-?0\.00093\d*\	\-?0\.017\d*\	\-?0\.017\d*
-\d*\	\-?0\.0023\d*\	\-?0\.0023\d*\	\-?0\.073\d*\	\-?0\.073\d*
-\d*\	\-?0\.00070\d*\	\-?0\.00070\d*\	\-?0\.052\d*\	\-?0\.052\d*
-\d*\	\-?0\.0048\d*\	\-?0\.0048\d*\	\-?0\.10\d*\	\-?0\.10\d*
-\d*\	\-?0\.0019\d*\	\-?0\.0019\d*\	\-?0\.044\d*\	\-?0\.044\d*
-\d*\	\-?0\.0036\d*\	\-?0\.0036\d*\	\-?0\.074\d*\	\-?0\.074\d*
-\d*\	\-?0\.0032\d*\	\-?0\.0032\d*\	\-?0\.071\d*\	\-?0\.071\d*
-\d*\	\-?0\.0014\d*\	\-?0\.0014\d*\	\-?0\.056\d*\	\-?0\.056\d*
-\d*\	\-?0\.0015\d*\	\-?0\.0015\d*\	\-?0\.062\d*\	\-?0\.062\d*
-1\d*\	\-?0\.0026\d*\	\-?0\.0026\d*\	\-?0\.065\d*\	\-?0\.065\d*
-1\d*\	\-?0\.0015\d*\	\-?0\.0015\d*\	\-?0\.037\d*\	\-?0\.037\d*
-1\d*\	\-?0\.0016\d*\	\-?0\.0016\d*\	\-?0\.063\d*\	\-?0\.063\d*
-1\d*\	\-?0\.00025\d*\	\-?0\.00025\d*\	\-?0\.097\d*\	\-?0\.097\d*
-1\d*\	\-?0\.00090\d*\	\-?0\.00090\d*\	\-?0\.038\d*\	\-?0\.038\d*
-1\d*\	\-?0\.00042\d*\	\-?0\.00042\d*\	\-?0\.0021\d*\	\-?0\.0021\d*
-1\d*\	\-?0\.00025\d*\	\-?0\.00025\d*\	\-?0\.048\d*\	\-?0\.048\d*
-1\d*\	\-?0\.0027\d*\	\-?0\.0027\d*\	\-?0\.076\d*\	\-?0\.076\d*
-1\d*\	\-?0\.00034\d*\	\-?0\.00034\d*\	\-?0\.076\d*\	\-?0\.076\d*
-1\d*\	\-?0\.0040\d*\	\-?0\.0040\d*\	\-?0\.093\d*\	\-?0\.093\d*
-1\d*\	\-?0\.001\d*\	\-?0\.001\d*\	\-?0\.11\d*\	\-?0\.11\d*
-1\d*\	\-?0\.00073\d*\	\-?0\.00073\d*\	\-?0\.0031\d*\	\-?0\.0031\d*
-1\d*\	\-?0\.00094\d*\	\-?0\.00094\d*\	\-?0\.0025\d*\	\-?0\.0025\d*
-1\d*\	\-?0\.00037\d*\	\-?0\.00037\d*\	\-?0\.069\d*\	\-?0\.069\d*
-1\d*\	\-?0\.00021\d*\	\-?0\.00021\d*\	\-?0\.030\d*\	\-?0\.030\d*
-1\d*\	\-?0\.0024\d*\	\-?0\.0024\d*\	\-?0\.072\d*\	\-?0\.072\d*
-1\d*\	\-?0\.0011\d*\	\-?0\.0011\d*\	\-?0\.020\d*\	\-?0\.020\d*
-1\d*\	\-?0\.00042\d*\	\-?0\.00042\d*\	\-?0\.0021\d*\	\-?0\.0021\d*
-1\d*\	\-?0\.0051\d*\	\-?0\.0051\d*\	\-?0\.071\d*\	\-?0\.071\d*
-1\d*\	\-?0\.0037\d*\	\-?0\.0037\d*\	\-?0\.0\d*\	\-?0\.0\d*
-1\d*\	\-?0\.0055\d*\	\-?0\.0055\d*\	\-?0\.012\d*\	\-?0\.012\d*
-1\d*\	\-?0\.00011\d*\	\-?0\.00011\d*\	\-?0\.086\d*\	\-?0\.086\d*
-1\d*\	\-?0\.0026\d*\	\-?0\.0026\d*\	\-?0\.053\d*\	\-?0\.053\d*
-1\d*\	\-?0\.002\d*\	\-?0\.002\d*\	\-?0\.10\d*\	\-?0\.10\d*
-1\d*\	\-?0\.0004\d*\	\-?0\.0004\d*\	\-?0\.023\d*\	\-?0\.023\d*
-1\d*\	\-?3\.2\d*e-\d*\	\-?3\.2\d*e-\d*\	\-?0\.053\d*\	\-?0\.053\d*
-1\d*\	\-?0\.0002\d*\	\-?0\.0002\d*\	\-?0\.09\d*\	\-?0\.09\d*
-1\d*\	\-?0\.00080\d*\	\-?0\.00080\d*\	\-?0\.050\d*\	\-?0\.050\d*
-1\d*\	\-?0\.0023\d*\	\-?0\.0023\d*\	\-?0\.073\d*\	\-?0\.073\d*
-1\d*\	\-?0\.00017\d*\	\-?0\.00017\d*\	\-?0\.011\d*\	\-?0\.011\d*
-1\d*\	\-?7\.7\d*e-\d*\	\-?7\.7\d*e-\d*\	\-?0\.090\d*\	\-?0\.090\d*
-1\d*\	\-?0\.00085\d*\	\-?0\.00085\d*\	\-?0\.054\d*\	\-?0\.054\d*
-1\d*\	\-?0\.0083\d*\	\-?0\.0083\d*\	\-?0\.23\d*\	\-?0\.23\d*
-1\d*\	\-?0\.00017\d*\	\-?0\.00017\d*\	\-?0\.011\d*\	\-?0\.011\d*
-1\d*\	\-?0\.00030\d*\	\-?0\.00030\d*\	\-?0\.032\d*\	\-?0\.032\d*
-1\d*\	\-?0\.00042\d*\	\-?0\.00042\d*\	\-?0\.043\d*\	\-?0\.043\d*
-1\d*\	\-?0\.00099\d*\	\-?0\.00099\d*\	\-?0\.11\d*\	\-?0\.11\d*
-1\d*\	\-?0\.0011\d*\	\-?0\.0011\d*\	\-?0\.032\d*\	\-?0\.032\d*
-1\d*\	\-?0\.0011\d*\	\-?0\.0011\d*\	\-?0\.021\d*\	\-?0\.021\d*
-1\d*\	\-?0\.0027\d*\	\-?0\.0027\d*\	\-?0\.081\d*\	\-?0\.081\d*
-1\d*\	\-?0\.00017\d*\	\-?0\.00017\d*\	\-?0\.08\d*\	\-?0\.08\d*
-1\d*\	\-?4\.\d*e-\d*\	\-?4\.\d*e-\d*\	\-?0\.048\d*\	\-?0\.048\d*
-1\d*\	\-?0\.00017\d*\	\-?0\.00017\d*\	\-?0\.08\d*\	\-?0\.08\d*
-1\d*\	\-?0\.0016\d*\	\-?0\.0016\d*\	\-?0\.063\d*\	\-?0\.063\d*
-1\d*\	\-?5\.9\d*e-\d*\	\-?5\.9\d*e-\d*\	\-?0\.070\d*\	\-?0\.070\d*
-1\d*\	\-?3\.2\d*e-\d*\	\-?3\.2\d*e-\d*\	\-?0\.053\d*\	\-?0\.053\d*
-1\d*\	\-?0\.00020\d*\	\-?0\.00020\d*\	\-?0\.049\d*\	\-?0\.049\d*
-1\d*\	\-?0\.002\d*\	\-?0\.002\d*\	\-?0\.0022\d*\	\-?0\.0022\d*
-1\d*\	\-?0\.00042\d*\	\-?0\.00042\d*\	\-?0\.0021\d*\	\-?0\.0021\d*
-1\d*\	\-?0\.0016\d*\	\-?0\.0016\d*\	\-?0\.050\d*\	\-?0\.050\d*
-1\d*\	\-?0\.0031\d*\	\-?0\.0031\d*\	\-?0\.084\d*\	\-?0\.084\d*
-\#Estimated\ Y\-?coefficients\	1\	2\	3\	4
-1\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.014\d*\	\-?0\.014\d*
-2\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.014\d*\	\-?0\.014\d*
-3\	\-?0\.0018\d*\	\-?0\.0018\d*\	\-?0\.004\d*\	\-?0\.004\d*
-4\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.0094\d*\	\-?0\.0094\d*
-5\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.014\d*\	\-?0\.014\d*
-6\	\-?0\.0046\d*\	\-?0\.0046\d*\	\-?0\.0047\d*\	\-?0\.0047\d*
-7\	\-?0\.0012\d*\	\-?0\.0012\d*\	\-?0\.030\d*\	\-?0\.030\d*
-8\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.0094\d*\	\-?0\.0094\d*
-9\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.014\d*\	\-?0\.014\d*
-\d*\	\-?0\.0059\d*\	\-?0\.0059\d*\	\-?0\.013\d*\	\-?0\.013\d*
-\d*\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.0094\d*\	\-?0\.0094\d*
-\d*\	\-?0\.0018\d*\	\-?0\.0018\d*\	\-?0\.0066\d*\	\-?0\.0066\d*
-\d*\	\-?0\.005\d*\	\-?0\.005\d*\	\-?0\.0080\d*\	\-?0\.0080\d*
-\d*\	\-?0\.0041\d*\	\-?0\.0041\d*\	\-?0\.10\d*\	\-?0\.10\d*
-\d*\	\-?0\.0013\d*\	\-?0\.0013\d*\	\-?0\.027\d*\	\-?0\.027\d*
-\d*\	\-?0\.0041\d*\	\-?0\.0041\d*\	\-?0\.033\d*\	\-?0\.033\d*
-\d*\	\-?0\.0043\d*\	\-?0\.0043\d*\	\-?0\.028\d*\	\-?0\.028\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*\	\-?0\.030\d*\	\-?0\.030\d*
-\d*\	\-?0\.0017\d*\	\-?0\.0017\d*\	\-?0\.013\d*\	\-?0\.013\d*
-\d*\	\-?0\.0013\d*\	\-?0\.0013\d*\	\-?0\.025\d*\	\-?0\.025\d*
-\d*\	\-?0\.001\d*\	\-?0\.001\d*\	\-?0\.029\d*\	\-?0\.029\d*
-\d*\	\-?0\.0041\d*\	\-?0\.0041\d*\	\-?0\.033\d*\	\-?0\.033\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*\	\-?0\.17\d*\	\-?0\.17\d*
-\d*\	\-?0\.0071\d*\	\-?0\.0071\d*\	\-?0\.0043\d*\	\-?0\.0043\d*
-\d*\	\-?0\.0010\d*\	\-?0\.0010\d*\	\-?0\.07\d*\	\-?0\.07\d*
-\d*\	\-?0\.0018\d*\	\-?0\.0018\d*\	\-?0\.0066\d*\	\-?0\.0066\d*
-\d*\	\-?0\.0043\d*\	\-?0\.0043\d*\	\-?0\.017\d*\	\-?0\.017\d*
-\d*\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.0094\d*\	\-?0\.0094\d*
-\d*\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.014\d*\	\-?0\.014\d*
-\d*\	\-?0\.0018\d*\	\-?0\.0018\d*\	\-?0\.0066\d*\	\-?0\.0066\d*
-\d*\	\-?0\.0018\d*\	\-?0\.0018\d*\	\-?0\.0066\d*\	\-?0\.0066\d*
-\d*\	\-?0\.0041\d*\	\-?0\.0041\d*\	\-?0\.033\d*\	\-?0\.033\d*
-\d*\	\-?0\.0059\d*\	\-?0\.0059\d*\	\-?0\.013\d*\	\-?0\.013\d*
-\d*\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.014\d*\	\-?0\.014\d*
-\d*\	\-?0\.0059\d*\	\-?0\.0059\d*\	\-?0\.013\d*\	\-?0\.013\d*
-\d*\	\-?0\.0013\d*\	\-?0\.0013\d*\	\-?0\.027\d*\	\-?0\.027\d*
-\d*\	\-?0\.0018\d*\	\-?0\.0018\d*\	\-?0\.004\d*\	\-?0\.004\d*
-\d*\	\-?0\.0059\d*\	\-?0\.0059\d*\	\-?0\.013\d*\	\-?0\.013\d*
-\d*\	\-?0\.0018\d*\	\-?0\.0018\d*\	\-?0\.004\d*\	\-?0\.004\d*
-\d*\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.0094\d*\	\-?0\.0094\d*
-\d*\	\-?0\.0014\d*\	\-?0\.0014\d*\	\-?0\.020\d*\	\-?0\.020\d*
-\d*\	\-?0\.0014\d*\	\-?0\.0014\d*\	\-?0\.020\d*\	\-?0\.020\d*
-\d*\	\-?0\.0018\d*\	\-?0\.0018\d*\	\-?0\.004\d*\	\-?0\.004\d*
-\d*\	\-?0\.0088\d*\	\-?0\.0088\d*\	\-?0\.010\d*\	\-?0\.010\d*
-\d*\	\-?0\.0051\d*\	\-?0\.0051\d*\	\-?0\.054\d*\	\-?0\.054\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*\	\-?0\.030\d*\	\-?0\.030\d*
-\d*\	\-?0\.0018\d*\	\-?0\.0018\d*\	\-?0\.0066\d*\	\-?0\.0066\d*
-\d*\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.014\d*\	\-?0\.014\d*
-\d*\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.0094\d*\	\-?0\.0094\d*
-\d*\	\-?0\.0020\d*\	\-?0\.0020\d*\	\-?0\.014\d*\	\-?0\.014\d*
-\d*\	\-?0\.00017\d*\	\-?0\.00017\d*\	\-?0\.018\d*\	\-?0\.018\d*
-\d*\	\-?0\.00033\d*\	\-?0\.00033\d*\	\-?0\.023\d*\	\-?0\.023\d*
-\d*\	\-?0\.00084\d*\	\-?0\.00084\d*\	\-?0\.013\d*\	\-?0\.013\d*
-\d*\	\-?0\.00050\d*\	\-?0\.00050\d*\	\-?0\.0067\d*\	\-?0\.0067\d*
-\d*\	\-?0\.00045\d*\	\-?0\.00045\d*\	\-?0\.014\d*\	\-?0\.014\d*
-\d*\	\-?0\.00061\d*\	\-?0\.00061\d*\	\-?0\.029\d*\	\-?0\.029\d*
-\d*\	\-?0\.000\d*\	\-?0\.000\d*\	\-?0\.025\d*\	\-?0\.025\d*
-\d*\	\-?0\.00\d*\	\-?0\.00\d*\	\-?0\.019\d*\	\-?0\.019\d*
-\d*\	\-?0\.00049\d*\	\-?0\.00049\d*\	\-?0\.038\d*\	\-?0\.038\d*
-\d*\	\-?0\.00034\d*\	\-?0\.00034\d*\	\-?0\.044\d*\	\-?0\.044\d*
-\d*\	\-?0\.00031\d*\	\-?0\.00031\d*\	\-?0\.023\d*\	\-?0\.023\d*
-\d*\	\-?0\.00016\d*\	\-?0\.00016\d*\	\-?0\.044\d*\	\-?0\.044\d*
-\d*\	\-?0\.0027\d*\	\-?0\.0027\d*\	\-?0\.089\d*\	\-?0\.089\d*
-\d*\	\-?0\.00017\d*\	\-?0\.00017\d*\	\-?0\.018\d*\	\-?0\.018\d*
-\d*\	\-?0\.0016\d*\	\-?0\.0016\d*\	\-?0\.054\d*\	\-?0\.054\d*
-\d*\	\-?0\.0001\d*\	\-?0\.0001\d*\	\-?0\.0062\d*\	\-?0\.0062\d*
-\d*\	\-?0\.00033\d*\	\-?0\.00033\d*\	\-?0\.023\d*\	\-?0\.023\d*
-\d*\	\-?0\.0029\d*\	\-?0\.0029\d*\	\-?0\.097\d*\	\-?0\.097\d*
-\d*\	\-?0\.00033\d*\	\-?0\.00033\d*\	\-?0\.023\d*\	\-?0\.023\d*
-\d*\	\-?0\.0015\d*\	\-?0\.0015\d*\	\-?0\.048\d*\	\-?0\.048\d*
-\d*\	\-?0\.0010\d*\	\-?0\.0010\d*\	\-?0\.038\d*\	\-?0\.038\d*
-\d*\	\-?0\.00050\d*\	\-?0\.00050\d*\	\-?0\.0067\d*\	\-?0\.0067\d*
-\d*\	\-?0\.00084\d*\	\-?0\.00084\d*\	\-?0\.013\d*\	\-?0\.013\d*
-\d*\	\-?0\.00099\d*\	\-?0\.00099\d*\	\-?0\.078\d*\	\-?0\.078\d*
-\d*\	\-?0\.00078\d*\	\-?0\.00078\d*\	\-?0\.015\d*\	\-?0\.015\d*
-\d*\	\-?0\.0001\d*\	\-?0\.0001\d*\	\-?0\.0062\d*\	\-?0\.0062\d*
-\d*\	\-?0\.00030\d*\	\-?0\.00030\d*\	\-?0\.027\d*\	\-?0\.027\d*
-\d*\	\-?0\.0013\d*\	\-?0\.0013\d*\	\-?0\.010\d*\	\-?0\.010\d*
-\d*\	\-?0\.00033\d*\	\-?0\.00033\d*\	\-?0\.023\d*\	\-?0\.023\d*
-\d*\	\-?0\.00031\d*\	\-?0\.00031\d*\	\-?0\.023\d*\	\-?0\.023\d*
-\d*\	\-?0\.0011\d*\	\-?0\.0011\d*\	\-?0\.037\d*\	\-?0\.037\d*
-\d*\	\-?0\.0013\d*\	\-?0\.0013\d*\	\-?0\.056\d*\	\-?0\.056\d*
-\d*\	\-?0\.00082\d*\	\-?0\.00082\d*\	\-?0\.015\d*\	\-?0\.015\d*
-\d*\	\-?0\.0013\d*\	\-?0\.0013\d*\	\-?0\.012\d*\	\-?0\.012\d*
-\d*\	\-?0\.00033\d*\	\-?0\.00033\d*\	\-?0\.023\d*\	\-?0\.023\d*
-\d*\	\-?0\.00062\d*\	\-?0\.00062\d*\	\-?0\.042\d*\	\-?0\.042\d*
-\d*\	\-?0\.00059\d*\	\-?0\.00059\d*\	\-?0\.0061\d*\	\-?0\.0061\d*
-\d*\	\-?0\.00071\d*\	\-?0\.00071\d*\	\-?0\.022\d*\	\-?0\.022\d*
-\d*\	\-?0\.00069\d*\	\-?0\.00069\d*\	\-?0\.0011\d*\	\-?0\.0011\d*
-\d*\	\-?0\.00050\d*\	\-?0\.00050\d*\	\-?0\.0067\d*\	\-?0\.0067\d*
-\d*\	\-?0\.0013\d*\	\-?0\.0013\d*\	\-?0\.053\d*\	\-?0\.053\d*
-\d*\	\-?4\.0\d*e-\d*\	\-?4\.0\d*e-\d*\	\-?0\.009\d*\	\-?0\.009\d*
-\d*\	\-?0\.0011\d*\	\-?0\.0011\d*\	\-?0\.024\d*\	\-?0\.024\d*
-\d*\	\-?0\.00\d*\	\-?0\.00\d*\	\-?0\.019\d*\	\-?0\.019\d*
-\d*\	\-?0\.00078\d*\	\-?0\.00078\d*\	\-?0\.0083\d*\	\-?0\.0083\d*
-\d*\	\-?0\.0014\d*\	\-?0\.0014\d*\	\-?0\.039\d*\	\-?0\.039\d*
-\d*\	\-?0\.00078\d*\	\-?0\.00078\d*\	\-?0\.0083\d*\	\-?0\.0083\d*
-\d*\	\-?0\.00078\d*\	\-?0\.00078\d*\	\-?0\.015\d*\	\-?0\.015\d*
-\d*\	\-?0\.0076\d*\	\-?0\.0076\d*\	\-?0\.12\d*\	\-?0\.12\d*
-1\d*\	\-?0\.00069\d*\	\-?0\.00069\d*\	\-?0\.0011\d*\	\-?0\.0011\d*
-1\d*\	\-?0\.0067\d*\	\-?0\.0067\d*\	\-?0\.046\d*\	\-?0\.046\d*
-1\d*\	\-?0\.001\d*\	\-?0\.001\d*\	\-?0\.013\d*\	\-?0\.013\d*
-1\d*\	\-?0\.0017\d*\	\-?0\.0017\d*\	\-?0\.011\d*\	\-?0\.011\d*
-1\d*\	\-?0\.0024\d*\	\-?0\.0024\d*\	\-?0\.008\d*\	\-?0\.008\d*
-1\d*\	\-?8\.[6-8]\d*e\-?\d*\	\-?8\.[6-8]\d*e\-?\d*\	\-?0\.00049\d*\	\-?0\.00049\d*
-1\d*\	\-?0\.0007\d*\	\-?0\.0007\d*\	\-?7\.8\d*e\-?\d*\	\-?7\.8\d*e\-?\d*
-1\d*\	\-?0\.00075\d*\	\-?0\.00075\d*\	\-?0\.056\d*\	\-?0\.056\d*
-1\d*\	\-?0\.004\d*\	\-?0\.004\d*\	\-?0\.04\d*\	\-?0\.04\d*
-1\d*\	\-?0\.0032\d*\	\-?0\.0032\d*\	\-?0\.0028\d*\	\-?0\.0028\d*
-1\d*\	\-?0\.0063\d*\	\-?0\.0063\d*\	\-?0\.056\d*\	\-?0\.056\d*
-1\d*\	\-?0\.00050\d*\	\-?0\.00050\d*\	\-?0\.012\d*\	\-?0\.012\d*
-1\d*\	\-?0\.0014\d*\	\-?0\.0014\d*\	\-?0\.00081\d*\	\-?0\.00081\d*
-1\d*\	\-?0\.00024\d*\	\-?0\.00024\d*\	\-?0\.0095\d*\	\-?0\.0095\d*
-1\d*\	\-?0\.00038\d*\	\-?0\.00038\d*\	\-?0\.02\d*\	\-?0\.02\d*
-1\d*\	\-?0\.0063\d*\	\-?0\.0063\d*\	\-?0\.016\d*\	\-?0\.016\d*
-1\d*\	\-?0\.0035\d*\	\-?0\.0035\d*\	\-?0\.0055\d*\	\-?0\.0055\d*
-1\d*\	\-?0\.0021\d*\	\-?0\.0021\d*\	\-?0\.0096\d*\	\-?0\.0096\d*
-1\d*\	\-?0\.0027\d*\	\-?0\.0027\d*\	\-?0\.044\d*\	\-?0\.044\d*
-1\d*\	\-?0\.011\d*\	\-?0\.011\d*\	\-?0\.19\d*\	\-?0\.19\d*
-1\d*\	\-?0\.00096\d*\	\-?0\.00096\d*\	\-?0\.022\d*\	\-?0\.022\d*
-1\d*\	\-?0\.002\d*\	\-?0\.002\d*\	\-?0\.0054\d*\	\-?0\.0054\d*
-1\d*\	\-?0\.0002\d*\	\-?0\.0002\d*\	\-?0\.031\d*\	\-?0\.031\d*
-1\d*\	\-?0\.00034\d*\	\-?0\.00034\d*\	\-?0\.039\d*\	\-?0\.039\d*
-1\d*\	\-?0\.0011\d*\	\-?0\.0011\d*\	\-?0\.028\d*\	\-?0\.028\d*
-1\d*\	\-?0\.00091\d*\	\-?0\.00091\d*\	\-?0\.003\d*\	\-?0\.003\d*
-1\d*\	\-?0\.0041\d*\	\-?0\.0041\d*\	\-?0\.021\d*\	\-?0\.021\d*
-1\d*\	\-?0\.0010\d*\	\-?0\.0010\d*\	\-?0\.038\d*\	\-?0\.038\d*
-1\d*\	\-?0\.0011\d*\	\-?0\.0011\d*\	\-?0\.028\d*\	\-?0\.028\d*
-1\d*\	\-?0\.00054\d*\	\-?0\.00054\d*\	\-?0\.0081\d*\	\-?0\.0081\d*
-1\d*\	\-?0\.0031\d*\	\-?0\.0031\d*\	\-?0\.01\d*\	\-?0\.01\d*
-1\d*\	\-?0\.0042\d*\	\-?0\.0042\d*\	\-?0\.034\d*\	\-?0\.034\d*
-1\d*\	\-?0\.0037\d*\	\-?0\.0037\d*\	\-?0\.041\d*\	\-?0\.041\d*
-1\d*\	\-?0\.00087\d*\	\-?0\.00087\d*\	\-?0\.010\d*\	\-?0\.010\d*
-1\d*\	\-?0\.0010\d*\	\-?0\.0010\d*\	\-?0\.031\d*\	\-?0\.031\d*
-1\d*\	\-?0\.001\d*\	\-?0\.001\d*\	\-?0\.074\d*\	\-?0\.074\d*
-1\d*\	\-?0\.00069\d*\	\-?0\.00069\d*\	\-?0\.029\d*\	\-?0\.029\d*
-1\d*\	\-?0\.0053\d*\	\-?0\.0053\d*\	\-?0\.0019\d*\	\-?0\.0019\d*
-1\d*\	\-?0\.0021\d*\	\-?0\.0021\d*\	\-?0\.0096\d*\	\-?0\.0096\d*
-1\d*\	\-?0\.0010\d*\	\-?0\.0010\d*\	\-?0\.038\d*\	\-?0\.038\d*
-1\d*\	\-?3\.[4-6]\d*e-\d*\	\-?3\.[4-6]\d*e-\d*\	\-?0\.0081\d*\	\-?0\.0081\d*
-1\d*\	\-?0\.0053\d*\	\-?0\.0053\d*\	\-?0\.0019\d*\	\-?0\.0019\d*
-1\d*\	\-?0\.0038\d*\	\-?0\.0038\d*\	\-?0\.0085\d*\	\-?0\.0085\d*
-1\d*\	\-?0\.001\d*\	\-?0\.001\d*\	\-?0\.013\d*\	\-?0\.013\d*
-1\d*\	\-?0\.0015\d*\	\-?0\.0015\d*\	\-?0\.010\d*\	\-?0\.010\d*
-1\d*\	\-?0\.0081\d*\	\-?0\.0081\d*\	\-?0\.020\d*\	\-?0\.020\d*
-1\d*\	\-?0\.0037\d*\	\-?0\.0037\d*\	\-?0\.00065\d*\	\-?0\.00065\d*
-1\d*\	\-?0\.0010\d*\	\-?0\.0010\d*\	\-?0\.022\d*\	\-?0\.022\d*
-1\d*\	\-?0\.00063\d*\	\-?0\.00063\d*\	\-?0\.0048\d*\	\-?0\.0048\d*
-1\d*\	\-?0\.0033\d*\	\-?0\.0033\d*\	\-?0\.0096\d*\	\-?0\.0096\d*
-1\d*\	\-?0\.0014\d*\	\-?0\.0014\d*\	\-?0\.010\d*\	\-?0\.010\d*
--- a/test-data/kcca_out2.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,304 +0,0 @@
-\#Component\	1\	2
-\#Correlation\	\-?0\.99\d*\	\-?0\.99\d*
-\#Estimated\ X\-?coefficients\	1\	2
-1\	\-?0\.0012\d*\	\-?0\.0012\d*
-2\	\-?0\.0012\d*\	\-?0\.0012\d*
-3\	\-?0\.0016\d*\	\-?0\.0016\d*
-4\	\-?0\.0017\d*\	\-?0\.0017\d*
-5\	\-?0\.0012\d*\	\-?0\.0012\d*
-6\	\-?0\.0029\d*\	\-?0\.0029\d*
-7\	\-?0\.0046\d*\	\-?0\.0046\d*
-8\	\-?0\.0017\d*\	\-?0\.0017\d*
-9\	\-?0\.0012\d*\	\-?0\.0012\d*
-\d*\	\-?0\.0068\d*\	\-?0\.0068\d*
-\d*\	\-?0\.0017\d*\	\-?0\.0017\d*
-\d*\	\-?8\.\d*e\-?\d*\	\-?8\.\d*e\-?\d*
-\d*\	\-?0\.0063\d*\	\-?0\.0063\d*
-\d*\	\-?0\.008\d*\	\-?0\.008\d*
-\d*\	\-?0\.0065\d*\	\-?0\.0065\d*
-\d*\	\-?0\.006\d*\	\-?0\.006\d*
-\d*\	\-?0\.0087\d*\	\-?0\.0087\d*
-\d*\	\-?0\.0046\d*\	\-?0\.0046\d*
-\d*\	\-?0\.0021\d*\	\-?0\.0021\d*
-\d*\	\-?0\.0069\d*\	\-?0\.0069\d*
-\d*\	\-?0\.0029\d*\	\-?0\.0029\d*
-\d*\	\-?0\.006\d*\	\-?0\.006\d*
-\d*\	\-?0\.017\d*\	\-?0\.017\d*
-\d*\	\-?0\.0024\d*\	\-?0\.0024\d*
-\d*\	\-?0\.0079\d*\	\-?0\.0079\d*
-\d*\	\-?8\.\d*e\-?\d*\	\-?8\.\d*e\-?\d*
-\d*\	\-?0\.0063\d*\	\-?0\.0063\d*
-\d*\	\-?0\.0017\d*\	\-?0\.0017\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*
-\d*\	\-?8\.\d*e\-?\d*\	\-?8\.\d*e\-?\d*
-\d*\	\-?8\.\d*e\-?\d*\	\-?8\.\d*e\-?\d*
-\d*\	\-?0\.006\d*\	\-?0\.006\d*
-\d*\	\-?0\.0068\d*\	\-?0\.0068\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*
-\d*\	\-?0\.0068\d*\	\-?0\.0068\d*
-\d*\	\-?0\.0065\d*\	\-?0\.0065\d*
-\d*\	\-?0\.0016\d*\	\-?0\.0016\d*
-\d*\	\-?0\.0068\d*\	\-?0\.0068\d*
-\d*\	\-?0\.0016\d*\	\-?0\.0016\d*
-\d*\	\-?0\.0017\d*\	\-?0\.0017\d*
-\d*\	\-?0\.00\d*\	\-?0\.00\d*
-\d*\	\-?0\.00\d*\	\-?0\.00\d*
-\d*\	\-?0\.0016\d*\	\-?0\.0016\d*
-\d*\	\-?0\.015\d*\	\-?0\.015\d*
-\d*\	\-?0\.0084\d*\	\-?0\.0084\d*
-\d*\	\-?0\.0046\d*\	\-?0\.0046\d*
-\d*\	\-?8\.\d*e\-?\d*\	\-?8\.\d*e\-?\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*
-\d*\	\-?0\.0017\d*\	\-?0\.0017\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*
-\d*\	\-?0\.0006\d*\	\-?0\.0006\d*
-\d*\	\-?0\.0021\d*\	\-?0\.0021\d*
-\d*\	\-?0\.0017\d*\	\-?0\.0017\d*
-\d*\	\-?1\.1\d*e\-?\d*\	\-?1\.1\d*e\-?\d*
-\d*\	\-?0\.0027\d*\	\-?0\.0027\d*
-\d*\	\-?0\.0021\d*\	\-?0\.0021\d*
-\d*\	\-?0\.0030\d*\	\-?0\.0030\d*
-\d*\	\-?0\.0018\d*\	\-?0\.0018\d*
-\d*\	\-?0\.0047\d*\	\-?0\.0047\d*
-\d*\	\-?0\.0038\d*\	\-?0\.0038\d*
-\d*\	\-?0\.00\d*\	\-?0\.00\d*
-\d*\	\-?0\.0025\d*\	\-?0\.0025\d*
-\d*\	\-?0\.0064\d*\	\-?0\.0064\d*
-\d*\	\-?0\.0006\d*\	\-?0\.0006\d*
-\d*\	\-?0\.0067\d*\	\-?0\.0067\d*
-\d*\	\-?0\.0023\d*\	\-?0\.0023\d*
-\d*\	\-?0\.0021\d*\	\-?0\.0021\d*
-\d*\	\-?0\.011\d*\	\-?0\.011\d*
-\d*\	\-?0\.0021\d*\	\-?0\.0021\d*
-\d*\	\-?0\.0006\d*\	\-?0\.0006\d*
-\d*\	\-?0\.0011\d*\	\-?0\.0011\d*
-\d*\	\-?1\.1\d*e\-?\d*\	\-?1\.1\d*e\-?\d*
-\d*\	\-?0\.0017\d*\	\-?0\.0017\d*
-\d*\	\-?0\.018\d*\	\-?0\.018\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*
-\d*\	\-?0\.0023\d*\	\-?0\.0023\d*
-\d*\	\-?0\.00\d*\	\-?0\.00\d*
-\d*\	\-?0\.005\d*\	\-?0\.005\d*
-\d*\	\-?0\.0021\d*\	\-?0\.0021\d*
-\d*\	\-?0\.00\d*\	\-?0\.00\d*
-\d*\	\-?0\.00032\d*\	\-?0\.00032\d*
-\d*\	\-?0\.002\d*\	\-?0\.002\d*
-\d*\	\-?0\.00058\d*\	\-?0\.00058\d*
-\d*\	\-?0\.0033\d*\	\-?0\.0033\d*
-\d*\	\-?0\.0021\d*\	\-?0\.0021\d*
-\d*\	\-?0\.0014\d*\	\-?0\.0014\d*
-\d*\	\-?0\.0027\d*\	\-?0\.0027\d*
-\d*\	\-?7\.3e\-?05\	\-?7\.3e\-?\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*
-\d*\	\-?1\.1\d*e\-?\d*\	\-?1\.1\d*e\-?\d*
-\d*\	\-?0\.0062\d*\	\-?0\.0062\d*
-\d*\	\-?0\.00075\d*\	\-?0\.00075\d*
-\d*\	\-?0\.00013\d*\	\-?0\.00013\d*
-\d*\	\-?0\.0018\d*\	\-?0\.0018\d*
-\d*\	\-?0\.0016\d*\	\-?0\.0016\d*
-\d*\	\-?0\.0010\d*\	\-?0\.0010\d*
-\d*\	\-?0\.0016\d*\	\-?0\.0016\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*
-\d*\	\-?0\.027\d*\	\-?0\.027\d*
-1\d*\	\-?0\.0012\d*\	\-?0\.0012\d*
-1\d*\	\-?0\.016\d*\	\-?0\.016\d*
-1\d*\	\-?0\.0058\d*\	\-?0\.0058\d*
-1\d*\	\-?0\.017\d*\	\-?0\.017\d*
-1\d*\	\-?0\.00081\d*\	\-?0\.00081\d*
-1\d*\	\-?0\.0063\d*\	\-?0\.0063\d*
-1\d*\	\-?0\.0027\d*\	\-?0\.0027\d*
-1\d*\	\-?0\.0095\d*\	\-?0\.0095\d*
-1\d*\	\-?0\.015\d*\	\-?0\.015\d*
-1\d*\	\-?0\.0030\d*\	\-?0\.0030\d*
-1\d*\	\-?0\.011\d*\	\-?0\.011\d*
-1\d*\	\-?0\.0039\d*\	\-?0\.0039\d*
-1\d*\	\-?0\.0044\d*\	\-?0\.0044\d*
-1\d*\	\-?0\.0017\d*\	\-?0\.0017\d*
-1\d*\	\-?0\.00087\d*\	\-?0\.00087\d*
-1\d*\	\-?0\.02\d*\	\-?0\.02\d*
-1\d*\	\-?0\.0013\d*\	\-?0\.0013\d*
-1\d*\	\-?0\.00093\d*\	\-?0\.00093\d*
-1\d*\	\-?0\.015\d*\	\-?0\.015\d*
-1\d*\	\-?0\.011\d*\	\-?0\.011\d*
-1\d*\	\-?0\.00080\d*\	\-?0\.00080\d*
-1\d*\	\-?0\.0028\d*\	\-?0\.0028\d*
-1\d*\	\-?0\.0096\d*\	\-?0\.0096\d*
-1\d*\	\-?0\.015\d*\	\-?0\.015\d*
-1\d*\	\-?0\.0032\d*\	\-?0\.0032\d*
-1\d*\	\-?0\.0044\d*\	\-?0\.0044\d*
-1\d*\	\-?0\.0036\d*\	\-?0\.0036\d*
-1\d*\	\-?0\.0011\d*\	\-?0\.0011\d*
-1\d*\	\-?0\.0032\d*\	\-?0\.0032\d*
-1\d*\	\-?0\.0018\d*\	\-?0\.0018\d*
-1\d*\	\-?0\.022\d*\	\-?0\.022\d*
-1\d*\	\-?0\.018\d*\	\-?0\.018\d*
-1\d*\	\-?0\.016\d*\	\-?0\.016\d*
-1\d*\	\-?9\.1\d*e\-?\d*\	\-?9\.1\d*e\-?\d*
-1\d*\	\-?0\.00039\d*\	\-?0\.00039\d*
-1\d*\	\-?0\.036\d*\	\-?0\.036\d*
-1\d*\	\-?0\.020\d*\	\-?0\.020\d*
-1\d*\	\-?0\.0071\d*\	\-?0\.0071\d*
-1\d*\	\-?0\.00093\d*\	\-?0\.00093\d*
-1\d*\	\-?0\.0011\d*\	\-?0\.0011\d*
-1\d*\	\-?0\.0031\d*\	\-?0\.0031\d*
-1\d*\	\-?0\.0071\d*\	\-?0\.0071\d*
-1\d*\	\-?0\.015\d*\	\-?0\.015\d*
-1\d*\	\-?0\.0058\d*\	\-?0\.0058\d*
-1\d*\	\-?0\.005\d*\	\-?0\.005\d*
-1\d*\	\-?0\.018\d*\	\-?0\.018\d*
-1\d*\	\-?0\.0035\d*\	\-?0\.0035\d*
-1\d*\	\-?0\.0036\d*\	\-?0\.0036\d*
-1\d*\	\-?0\.0055\d*\	\-?0\.0055\d*
-1\d*\	\-?0\.0014\d*\	\-?0\.0014\d*
-1\d*\	\-?0\.006\d*\	\-?0\.006\d*
-\#Estimated\ Y\-?coefficients\	1\	2
-1\	\-?0\.00095\d*\	\-?0\.00095\d*
-2\	\-?0\.0075\d*\	\-?0\.0075\d*
-3\	\-?0\.0051\d*\	\-?0\.0051\d*
-4\	\-?0\.0088\d*\	\-?0\.0088\d*
-5\	\-?0\.0014\d*\	\-?0\.0014\d*
-6\	\-?0\.0033\d*\	\-?0\.0033\d*
-7\	\-?0\.0081\d*\	\-?0\.0081\d*
-8\	\-?0\.0015\d*\	\-?0\.0015\d*
-9\	\-?0\.0077\d*\	\-?0\.0077\d*
-\d*\	\-?0\.0054\d*\	\-?0\.0054\d*
-\d*\	\-?0\.0032\d*\	\-?0\.0032\d*
-\d*\	\-?0\.0030\d*\	\-?0\.0030\d*
-\d*\	\-?0\.0044\d*\	\-?0\.0044\d*
-\d*\	\-?0\.0035\d*\	\-?0\.0035\d*
-\d*\	\-?0\.0085\d*\	\-?0\.0085\d*
-\d*\	\-?0\.03\d*\	\-?0\.03\d*
-\d*\	\-?0\.0033\d*\	\-?0\.0033\d*
-\d*\	\-?0\.00095\d*\	\-?0\.00095\d*
-\d*\	\-?0\.0081\d*\	\-?0\.0081\d*
-\d*\	\-?0\.0030\d*\	\-?0\.0030\d*
-\d*\	\-?0\.00097\d*\	\-?0\.00097\d*
-\d*\	\-?0\.0016\d*\	\-?0\.0016\d*
-\d*\	\-?0\.0060\d*\	\-?0\.0060\d*
-\d*\	\-?0\.0034\d*\	\-?0\.0034\d*
-\d*\	\-?0\.0030\d*\	\-?0\.0030\d*
-\d*\	\-?0\.0083\d*\	\-?0\.0083\d*
-\d*\	\-?0\.0015\d*\	\-?0\.0015\d*
-\d*\	\-?0\.0013\d*\	\-?0\.0013\d*
-\d*\	\-?0\.00\d*\	\-?0\.00\d*
-\d*\	\-?0\.0051\d*\	\-?0\.0051\d*
-\d*\	\-?0\.0020\d*\	\-?0\.0020\d*
-\d*\	\-?0\.00097\d*\	\-?0\.00097\d*
-\d*\	\-?0\.011\d*\	\-?0\.011\d*
-\d*\	\-?0\.0087\d*\	\-?0\.0087\d*
-\d*\	\-?0\.0054\d*\	\-?0\.0054\d*
-\d*\	\-?0\.0049\d*\	\-?0\.0049\d*
-\d*\	\-?0\.00033\d*\	\-?0\.00033\d*
-\d*\	\-?0\.0054\d*\	\-?0\.0054\d*
-\d*\	\-?0\.010\d*\	\-?0\.010\d*
-\d*\	\-?0\.0022\d*\	\-?0\.0022\d*
-\d*\	\-?5\.\d*e\-?\d*\	\-?5\.\d*e\-?\d*
-\d*\	\-?0\.023\d*\	\-?0\.023\d*
-\d*\	\-?0\.013\d*\	\-?0\.013\d*
-\d*\	\-?5\.\d*e\-?\d*\	\-?5\.\d*e\-?\d*
-\d*\	\-?0\.0030\d*\	\-?0\.0030\d*
-\d*\	\-?0\.0044\d*\	\-?0\.0044\d*
-\d*\	\-?0\.0030\d*\	\-?0\.0030\d*
-\d*\	\-?0\.010\d*\	\-?0\.010\d*
-\d*\	\-?0\.0016\d*\	\-?0\.0016\d*
-\d*\	\-?0\.0032\d*\	\-?0\.0032\d*
-\d*\	\-?0\.0020\d*\	\-?0\.0020\d*
-\d*\	\-?0\.00069\d*\	\-?0\.00069\d*
-\d*\	\-?0\.001\d*\	\-?0\.001\d*
-\d*\	\-?0\.0034\d*\	\-?0\.0034\d*
-\d*\	\-?0\.00099\d*\	\-?0\.00099\d*
-\d*\	\-?0\.0019\d*\	\-?0\.0019\d*
-\d*\	\-?0\.0010\d*\	\-?0\.0010\d*
-\d*\	\-?0\.006\d*\	\-?0\.006\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*
-\d*\	\-?0\.007\d*\	\-?0\.007\d*
-\d*\	\-?0\.031\d*\	\-?0\.031\d*
-\d*\	\-?0\.0\d*\	\-?0\.0\d*
-\d*\	\-?0\.011\d*\	\-?0\.011\d*
-\d*\	\-?0\.0017\d*\	\-?0\.0017\d*
-\d*\	\-?0\.0044\d*\	\-?0\.0044\d*
-\d*\	\-?0\.0019\d*\	\-?0\.0019\d*
-\d*\	\-?0\.0060\d*\	\-?0\.0060\d*
-\d*\	\-?0\.0015\d*\	\-?0\.0015\d*
-\d*\	\-?0\.015\d*\	\-?0\.015\d*
-\d*\	\-?0\.0011\d*\	\-?0\.0011\d*
-\d*\	\-?0\.0063\d*\	\-?0\.0063\d*
-\d*\	\-?0\.0041\d*\	\-?0\.0041\d*
-\d*\	\-?0\.0028\d*\	\-?0\.0028\d*
-\d*\	\-?0\.0041\d*\	\-?0\.0041\d*
-\d*\	\-?0\.0022\d*\	\-?0\.0022\d*
-\d*\	\-?0\.0011\d*\	\-?0\.0011\d*
-\d*\	\-?0\.008\d*\	\-?0\.008\d*
-\d*\	\-?0\.0023\d*\	\-?0\.0023\d*
-\d*\	\-?0\.00020\d*\	\-?0\.00020\d*
-\d*\	\-?0\.001\d*\	\-?0\.001\d*
-\d*\	\-?1\.3\d*e\-?\d*\	\-?1\.3\d*e\-?\d*
-\d*\	\-?1\.3\d*e\-?\d*\	\-?1\.3\d*e\-?\d*
-\d*\	\-?0\.0015\d*\	\-?0\.0015\d*
-\d*\	\-?0\.0048\d*\	\-?0\.0048\d*
-\d*\	\-?0\.0022\d*\	\-?0\.0022\d*
-\d*\	\-?0\.003\d*\	\-?0\.003\d*
-\d*\	\-?0\.0019\d*\	\-?0\.0019\d*
-\d*\	\-?0\.0091\d*\	\-?0\.0091\d*
-\d*\	\-?0\.0060\d*\	\-?0\.0060\d*
-\d*\	\-?0\.0014\d*\	\-?0\.0014\d*
-\d*\	\-?0\.0012\d*\	\-?0\.0012\d*
-\d*\	\-?0\.00057\d*\	\-?0\.00057\d*
-\d*\	\-?0\.002\d*\	\-?0\.002\d*
-\d*\	\-?0\.0022\d*\	\-?0\.0022\d*
-\d*\	\-?0\.00040\d*\	\-?0\.00040\d*
-\d*\	\-?0\.0062\d*\	\-?0\.0062\d*
-\d*\	\-?0\.0043\d*\	\-?0\.0043\d*
-\d*\	\-?0\.002\d*\	\-?0\.002\d*
-\d*\	\-?0\.0095\d*\	\-?0\.0095\d*
-1\d*\	\-?0\.0019\d*\	\-?0\.0019\d*
-1\d*\	\-?0\.0010\d*\	\-?0\.0010\d*
-1\d*\	\-?0\.0015\d*\	\-?0\.0015\d*
-1\d*\	\-?0\.0017\d*\	\-?0\.0017\d*
-1\d*\	\-?0\.0029\d*\	\-?0\.0029\d*
-1\d*\	\-?9\.6\d*e\-?\d*\	\-?9\.6\d*e\-?\d*
-1\d*\	\-?0\.0010\d*\	\-?0\.0010\d*
-1\d*\	\-?0\.010\d*\	\-?0\.010\d*
-1\d*\	\-?0\.003\d*\	\-?0\.003\d*
-1\d*\	\-?0\.019\d*\	\-?0\.019\d*
-1\d*\	\-?0\.0024\d*\	\-?0\.0024\d*
-1\d*\	\-?0\.00090\d*\	\-?0\.00090\d*
-1\d*\	\-?0\.0035\d*\	\-?0\.0035\d*
-1\d*\	\-?0\.0031\d*\	\-?0\.0031\d*
-1\d*\	\-?0\.0016\d*\	\-?0\.0016\d*
-1\d*\	\-?0\.00073\d*\	\-?0\.00073\d*
-1\d*\	\-?0\.00069\d*\	\-?0\.00069\d*
-1\d*\	\-?9\.6\d*e\-?\d*\	\-?9\.6\d*e\-?\d*
-1\d*\	\-?0\.017\d*\	\-?0\.017\d*
-1\d*\	\-?0\.0062\d*\	\-?0\.0062\d*
-1\d*\	\-?0\.011\d*\	\-?0\.011\d*
-1\d*\	\-?0\.00045\d*\	\-?0\.00045\d*
-1\d*\	\-?0\.0023\d*\	\-?0\.0023\d*
-1\d*\	\-?0\.0029\d*\	\-?0\.0029\d*
-1\d*\	\-?0\.0054\d*\	\-?0\.0054\d*
-1\d*\	\-?0\.0020\d*\	\-?0\.0020\d*
-1\d*\	\-?0\.0097\d*\	\-?0\.0097\d*
-1\d*\	\-?0\.0048\d*\	\-?0\.0048\d*
-1\d*\	\-?0\.00057\d*\	\-?0\.00057\d*
-1\d*\	\-?0\.0032\d*\	\-?0\.0032\d*
-1\d*\	\-?0\.0056\d*\	\-?0\.0056\d*
-1\d*\	\-?0\.0014\d*\	\-?0\.0014\d*
-1\d*\	\-?0\.047\d*\	\-?0\.047\d*
-1\d*\	\-?0\.0032\d*\	\-?0\.0032\d*
-1\d*\	\-?0\.0045\d*\	\-?0\.0045\d*
-1\d*\	\-?0\.0066\d*\	\-?0\.0066\d*
-1\d*\	\-?0\.022\d*\	\-?0\.022\d*
-1\d*\	\-?0\.00047\d*\	\-?0\.00047\d*
-1\d*\	\-?8\.\d*e\-?\d*\	\-?8\.\d*e\-?\d*
-1\d*\	\-?0\.0022\d*\	\-?0\.0022\d*
-1\d*\	\-?0\.001\d*\	\-?0\.001\d*
-1\d*\	\-?0\.0019\d*\	\-?0\.0019\d*
-1\d*\	\-?0\.001\d*\	\-?0\.001\d*
-1\d*\	\-?0\.0015\d*\	\-?0\.0015\d*
-1\d*\	\-?0\.0016\d*\	\-?0\.0016\d*
-1\d*\	\-?0\.0020\d*\	\-?0\.0020\d*
-1\d*\	\-?0\.0023\d*\	\-?0\.0023\d*
-1\d*\	\-?0\.0028\d*\	\-?0\.0028\d*
-1\d*\	\-?9\.6\d*e\-?\d*\	\-?9\.6\d*e\-?\d*
-1\d*\	\-?0\.0010\d*\	\-?0\.0010\d*
-1\d*\	\-?0\.0\d*\	\-?0\.0\d*
--- a/test-data/kpca_out1.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,304 +0,0 @@
-#Component	1	2
-#Eigenvalue	748.5	31.76
-#Principal component vectors	1	2
-1	-0.003553	0.0108
-2	-0.003722	-0.003192
-3	-0.003887	-0.003062
-4	-0.0038	-0.007509
-5	-0.0036	0.01068
-6	-0.002995	0.02441
-7	-0.003814	-0.001828
-8	-0.00354	0.006305
-9	-0.004002	-0.01452
-10	-0.003663	-0.001368
-11	-0.003267	0.02134
-12	-0.003575	0.001815
-13	-0.003807	-0.004912
-14	-0.004281	-0.01352
-15	-0.003251	0.03864
-16	-0.002922	0.04394
-17	-0.003349	0.02585
-18	-0.003521	0.01053
-19	-0.002847	0.02952
-20	-0.003403	0.01643
-21	-0.003128	0.01419
-22	-0.003382	0.01406
-23	-0.004124	0.003402
-24	-0.003227	0.004863
-25	-0.003329	0.0007417
-26	-0.003506	-0.001891
-27	-0.003393	0.005424
-28	-0.003411	0.01263
-29	-0.003506	0.01091
-30	-0.003655	-0.004034
-31	-0.00361	-0.003953
-32	-0.003234	0.01438
-33	-0.003377	0.02573
-34	-0.003236	0.0352
-35	-0.003663	-0.001368
-36	-0.003801	0.003321
-37	-0.003401	0.01999
-38	-0.003663	-0.001368
-39	-0.004064	-0.01246
-40	-0.003482	0.008422
-41	-0.00366	0.008718
-42	-0.004057	-0.02284
-43	-0.004044	-0.008892
-44	-0.003315	0.006892
-45	-0.003022	0.01469
-46	-0.003747	-0.005408
-47	-0.003352	0.01636
-48	-0.003865	-0.00534
-49	-0.003328	0.01907
-50	-0.003632	0.004649
-51	0.002131	0.02353
-52	0.001268	0.01077
-53	0.00232	0.0165
-54	-0.0005446	-0.02218
-55	0.001391	0.002863
-56	0.0003522	-0.01197
-57	0.001503	0.008832
-58	-0.001879	-0.02529
-59	0.001425	0.008304
-60	-0.000784	-0.0196
-61	-0.001688	-0.03188
-62	0.000332	-0.002273
-63	-0.0002401	-0.01364
-64	0.001079	-0.003714
-65	-0.0007557	-0.005543
-66	0.001375	0.01646
-67	0.0004002	-0.01059
-68	-0.0001866	-0.007917
-69	0.0007813	-0.01567
-70	-0.0006023	-0.01492
-71	0.001281	-0.003829
-72	0.0001411	-0.0001777
-73	0.00148	-0.01053
-74	0.0009533	-0.005025
-75	0.0008315	0.006252
-76	0.001242	0.01179
-77	0.001929	0.008282
-78	0.002309	0.007654
-79	0.000773	-0.00466
-80	-0.0009729	-0.007942
-81	-0.0008252	-0.01809
-82	-0.0009752	-0.01705
-83	-0.0003282	-0.007332
-84	0.001528	-0.01434
-85	0.0002046	-0.0145
-86	0.0009586	0.006116
-87	0.001825	0.01357
-88	0.0006918	-0.009912
-89	-0.0001648	-0.006837
-90	-0.0004951	-0.01827
-91	-5.753e-05	-0.01874
-92	0.00098	-0.0006915
-93	-0.0002379	-0.01011
-94	-0.001821	-0.02537
-95	-0.0001244	-0.01378
-96	1.283e-06	-0.005116
-97	2.13e-05	-0.007654
-98	0.0006301	0.001939
-99	-0.001958	-0.01857
-100	-0.000121	-0.00904
-101	0.003803	-0.00735
-102	0.001462	-0.01964
-103	0.004318	0.006231
-104	0.002705	-0.009198
-105	0.003481	-0.006612
-106	0.006089	0.01059
-107	-0.0003162	-0.03404
-108	0.004981	0.005895
-109	0.003329	-0.01195
-110	0.005217	0.01984
-111	0.002443	0.005619
-112	0.002393	-0.009436
-113	0.003352	0.003334
-114	0.001216	-0.02527
-115	0.001742	-0.01987
-116	0.002768	0.0001267
-117	0.002831	-0.001715
-118	0.006776	0.03336
-119	0.006694	-0.002136
-120	0.0012	-0.02356
-121	0.003951	0.00754
-122	0.001074	-0.02014
-123	0.006251	0.00718
-124	0.001691	-0.007518
-125	0.003625	0.006525
-126	0.004493	0.01431
-127	0.001482	-0.006514
-128	0.001565	-0.004955
-129	0.002951	-0.01093
-130	0.004001	0.01226
-131	0.004821	0.00717
-132	0.006416	0.04314
-133	0.003005	-0.01144
-134	0.001834	-0.005554
-135	0.002168	-0.01795
-136	0.005535	0.01723
-137	0.003198	-0.000593
-138	0.002747	-0.001526
-139	0.001328	-0.006202
-140	0.00336	0.008984
-141	0.003579	0.001004
-142	0.003037	0.01072
-143	0.001462	-0.01964
-144	0.004127	0.003295
-145	0.003853	0.004392
-146	0.002912	0.002869
-147	0.001813	-0.01327
-148	0.002518	-2.875e-06
-149	0.002741	-0.0004153
-150	0.001611	-0.01074
-#Rotated values	1	2
-1	-398.9	51.44
-2	-417.9	-15.21
-3	-436.4	-14.59
-4	-426.6	-35.77
-5	-404.2	50.89
-6	-336.3	116.3
-7	-428.2	-8.706
-8	-397.5	30.04
-9	-449.3	-69.17
-10	-411.3	-6.517
-11	-366.8	101.6
-12	-401.4	8.645
-13	-427.5	-23.4
-14	-480.6	-64.4
-15	-365	184
-16	-328.1	209.3
-17	-376	123.1
-18	-395.3	50.18
-19	-319.7	140.6
-20	-382.1	78.28
-21	-351.3	67.61
-22	-379.7	66.98
-23	-463.1	16.21
-24	-362.3	23.16
-25	-373.8	3.533
-26	-393.6	-9.008
-27	-381	25.84
-28	-383	60.16
-29	-393.6	51.96
-30	-410.4	-19.22
-31	-405.3	-18.83
-32	-363.1	68.48
-33	-379.2	122.6
-34	-363.4	167.7
-35	-411.3	-6.517
-36	-426.8	15.82
-37	-381.9	95.23
-38	-411.3	-6.517
-39	-456.3	-59.36
-40	-391	40.12
-41	-410.9	41.53
-42	-455.5	-108.8
-43	-454.1	-42.36
-44	-372.2	32.83
-45	-339.3	69.98
-46	-420.7	-25.76
-47	-376.4	77.94
-48	-433.9	-25.44
-49	-373.7	90.82
-50	-407.8	22.14
-51	239.3	112.1
-52	142.4	51.31
-53	260.5	78.62
-54	-61.14	-105.7
-55	156.2	13.64
-56	39.54	-57.03
-57	168.7	42.07
-58	-210.9	-120.5
-59	160	39.56
-60	-88.03	-93.36
-61	-189.5	-151.8
-62	37.28	-10.83
-63	-26.96	-64.96
-64	121.1	-17.69
-65	-84.85	-26.41
-66	154.4	78.39
-67	44.94	-50.46
-68	-20.95	-37.71
-69	87.72	-74.64
-70	-67.63	-71.05
-71	143.8	-18.24
-72	15.84	-0.8465
-73	166.2	-50.16
-74	107	-23.94
-75	93.36	29.78
-76	139.5	56.16
-77	216.6	39.45
-78	259.2	36.46
-79	86.79	-22.2
-80	-109.2	-37.83
-81	-92.66	-86.17
-82	-109.5	-81.2
-83	-36.85	-34.93
-84	171.6	-68.3
-85	22.97	-69.09
-86	107.6	29.13
-87	204.9	64.63
-88	77.68	-47.21
-89	-18.51	-32.57
-90	-55.59	-87.04
-91	-6.459	-89.26
-92	110	-3.294
-93	-26.71	-48.17
-94	-204.5	-120.9
-95	-13.97	-65.62
-96	0.1441	-24.37
-97	2.392	-36.46
-98	70.75	9.238
-99	-219.8	-88.47
-100	-13.59	-43.06
-101	427	-35.01
-102	164.1	-93.54
-103	484.8	29.68
-104	303.8	-43.81
-105	390.9	-31.49
-106	683.7	50.46
-107	-35.5	-162.1
-108	559.2	28.08
-109	373.7	-56.94
-110	585.8	94.5
-111	274.2	26.77
-112	268.7	-44.95
-113	376.3	15.88
-114	136.5	-120.4
-115	195.6	-94.67
-116	310.8	0.6036
-117	317.9	-8.169
-118	760.8	158.9
-119	751.6	-10.17
-120	134.7	-112.2
-121	443.7	35.91
-122	120.6	-95.95
-123	701.8	34.2
-124	189.9	-35.81
-125	407	31.08
-126	504.5	68.18
-127	166.4	-31.03
-128	175.7	-23.6
-129	331.3	-52.07
-130	449.2	58.4
-131	541.3	34.15
-132	720.4	205.5
-133	337.4	-54.51
-134	205.9	-26.46
-135	243.4	-85.49
-136	621.5	82.05
-137	359.1	-2.825
-138	308.4	-7.267
-139	149.1	-29.54
-140	377.2	42.8
-141	401.9	4.783
-142	341	51.07
-143	164.1	-93.54
-144	463.4	15.7
-145	432.6	20.92
-146	327	13.67
-147	203.6	-63.21
-148	282.7	-0.0137
-149	307.8	-1.978
-150	180.9	-51.18
Binary file test-data/kpca_out2.pdf has changed
--- a/test-data/logreg_inp.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,100 +0,0 @@
-2.04	2.01	1070	1
-2.56	3.4	1254	1
-3.75	3.68	1466	1
-1.1	1.54	706	1
-3	3.32	1160	1
-0.05	0.33	756	1
-1.38	0.36	1058	1
-1.5	1.97	1008	0
-1.38	2.03	1104	1
-4.01	2.05	1200	1
-1.5	2.13	896	1
-1.29	1.34	848	1
-1.9	1.51	958	1
-3.11	3.12	1246	0
-1.92	2.14	1106	1
-0.81	2.6	790	1
-1.01	1.9	954	1
-3.66	3.06	1500	0
-2	1.6	1046	0
-2.05	1.96	1054	1
-2.6	1.96	1198	0
-2.55	1.56	940	1
-0.38	1.6	456	0
-2.48	1.92	1150	1
-2.74	3.09	636	0
-1.77	0.78	744	1
-1.61	2.12	644	0
-0.99	1.85	842	1
-1.62	1.78	852	1
-2.03	1.03	1170	0
-3.5	3.44	1034	1
-3.18	2.42	1202	1
-2.39	1.74	1018	1
-1.48	1.89	1180	1
-1.54	1.43	952	0
-1.57	1.64	1038	1
-2.46	2.69	1090	0
-2.42	1.79	694	0
-2.11	2.72	1096	0
-2.04	2.15	1114	0
-1.68	2.22	1256	1
-1.64	1.55	1208	0
-2.41	2.34	820	0
-2.1	2.92	1222	0
-1.4	2.1	1120	0
-2.03	1.64	886	0
-1.99	2.83	1126	0
-2.24	1.76	1158	0
-0.45	1.81	676	0
-2.31	2.68	1214	0
-2.41	2.55	1136	1
-2.56	2.7	1264	0
-2.5	1.66	1116	1
-2.92	2.23	1292	1
-2.35	2.01	604	1
-2.82	1.24	854	1
-1.8	1.95	814	0
-1.29	1.73	778	1
-1.68	1.08	800	0
-3.44	3.46	1424	0
-1.9	3.01	950	0
-2.06	0.54	1056	1
-3.3	3.2	956	1
-1.8	1.5	1352	1
-2	1.71	852	1
-1.68	1.99	1168	0
-1.94	2.76	970	1
-0.97	1.56	776	1
-1.12	1.78	854	1
-1.31	1.32	1232	0
-1.68	0.87	1140	0
-3.09	1.75	1084	0
-1.87	1.41	954	0
-2	2.77	1000	0
-2.39	1.78	1084	0
-1.5	1.34	1058	0
-1.82	1.52	816	0
-1.8	2.97	1146	0
-2.01	1.75	1000	1
-1.88	1.64	856	1
-1.64	1.8	798	1
-2.42	3.37	1324	1
-0.22	1.15	704	1
-2.31	1.72	1222	1
-0.95	2.27	948	0
-1.99	2.85	1182	0
-1.86	2.21	1000	1
-1.79	1.94	910	0
-3.02	4.25	1374	1
-1.85	1.83	1014	0
-1.98	2.75	1420	0
-2.15	1.71	400	0
-1.46	2.2	998	1
-2.29	2.13	776	1
-2.39	2.38	1134	0
-1.8	1.64	772	0
-2.64	1.87	1304	0
-2.08	2.53	1212	0
-0.7	1.78	818	1
-0.89	1.2	864	1
\ No newline at end of file
--- a/test-data/logreg_out2.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,19 +0,0 @@
-response column	c4
-predictor column(s)	c1,c2,c3
-Y-intercept	0.9111624714
-p-value (Y-intercept)	0.3571052008
-Slope (c1)	0.057995684
-p-value (c1)	0.8677866885
-Slope (c2)	-0.2319990287
-p-value (c2)	0.4986584837
-Slope (c3)	-0.0004633556
-p-value (c3)	0.6785709433
-Null deviance	138.46939
-Residual deviance	137.44023
-pseudo R-squared	0.00743
-
-
-vif
-c1 1.65649272465
-c2 1.47696547452
-c3 1.4307725027
--- a/test-data/partialR_result.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,4 +0,0 @@
-#Model	R-sq	partial_R_Terms	partial_R_Value
-1 2 	0.9388	-	-
-2 	0.7280	1 	0.7750
-1 	0.9104	2 	0.3167
\ No newline at end of file
--- a/test-data/pca_out1.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,159 +0,0 @@
-#Component	1	2	3	4
-#Std. deviation	1.706	0.9598	0.3839	0.1436
-#Proportion of variance explained	0.7277	0.2303	0.03684	0.005152
-#Loadings	1	2	3	4
-c1	0.5224	-0.3723	0.721	0.262
-c2	-0.2634	-0.9256	-0.242	-0.1241
-c3	0.5813	-0.02109	-0.1409	-0.8012
-c4	0.5656	-0.06542	-0.6338	0.5235
-#Scores	1	2	3	4
-1	-2.265	-0.5057	0.1219	0.02307
-2	-2.086	0.6554	0.2273	0.1032
-3	-2.368	0.3185	-0.05148	0.02783
-4	-2.304	0.5754	-0.09886	-0.06631
-5	-2.389	-0.6748	-0.02143	-0.0374
-6	-2.071	-1.519	-0.03068	0.004399
-7	-2.446	-0.07456	-0.3422	-0.0381
-8	-2.234	-0.2476	0.08257	-0.02551
-9	-2.342	1.095	-0.1536	-0.02679
-10	-2.189	0.4486	0.2466	-0.03991
-11	-2.163	-1.071	0.264	0.0153
-12	-2.327	-0.1586	-0.1002	-0.1346
-13	-2.224	0.7091	0.2232	0.002631
-14	-2.64	0.9383	-0.1896	-0.01942
-15	-2.192	-1.89	0.4695	0.1928
-16	-2.251	-2.722	-0.0326	0.04713
-17	-2.203	-1.514	0.001363	0.1866
-18	-2.19	-0.5143	0.03862	0.09191
-19	-1.894	-1.431	0.3707	0.05953
-20	-2.34	-1.158	-0.1374	-0.03983
-21	-1.915	-0.4305	0.416	0.01036
-22	-2.205	-0.9525	-0.1647	0.05773
-23	-2.774	-0.4895	-0.3388	0.01785
-24	-1.82	-0.1068	-0.04006	0.1503
-25	-2.228	-0.1622	-0.1242	-0.2712
-26	-1.957	0.6079	0.2986	0.04384
-27	-2.052	-0.266	-0.09209	0.0666
-28	-2.168	-0.552	0.2013	0.009261
-29	-2.14	-0.3366	0.2653	0.08354
-30	-2.269	0.3149	-0.07552	-0.1088
-31	-2.145	0.4839	0.06786	-0.04838
-32	-1.832	-0.4453	0.2654	0.2391
-33	-2.608	-1.828	-0.05142	-0.2319
-34	-2.438	-2.185	0.07935	-0.05102
-35	-2.189	0.4486	0.2466	-0.03991
-36	-2.211	0.1843	0.2186	0.1686
-37	-2.044	-0.685	0.4794	0.1956
-38	-2.189	0.4486	0.2466	-0.03991
-39	-2.436	0.8822	-0.2016	-0.009961
-40	-2.171	-0.2927	0.1699	0.00624
-41	-2.287	-0.468	-0.04074	0.1057
-42	-1.872	2.328	0.1945	0.2917
-43	-2.558	0.4538	-0.3136	-0.06741
-44	-1.964	-0.4974	-0.3148	0.1755
-45	-2.133	-1.171	-0.2528	-0.1532
-46	-2.075	0.6919	0.05656	0.1403
-47	-2.381	-1.151	-0.0621	-0.1542
-48	-2.398	0.3624	-0.1469	-0.04948
-49	-2.227	-1.025	0.1766	-0.01644
-50	-2.206	-0.03224	0.1466	0.04878
-51	1.104	-0.8631	0.6856	0.03498
-52	0.7325	-0.5986	0.09407	0.004454
-53	1.242	-0.6148	0.5548	0.009673
-54	0.3973	1.758	0.01857	0.0674
-55	1.073	0.2118	0.3974	0.1055
-56	0.3845	0.5911	-0.1268	-0.2405
-57	0.7487	-0.7787	-0.1487	-0.0783
-58	-0.4979	1.849	-0.2556	-0.03939
-59	0.9262	-0.03033	0.5955	-0.0291
-60	0.004968	1.029	-0.5429	-0.02835
-61	-0.1247	2.658	0.03981	0.01614
-62	0.4387	0.05888	-0.2067	0.03985
-63	0.5516	1.773	0.7614	0.04835
-64	0.7172	0.1854	0.0673	-0.1646
-65	-0.03726	0.4328	-0.1981	0.109
-66	0.8759	-0.51	0.5035	0.1051
-67	0.348	0.1906	-0.4928	-0.1921
-68	0.1534	0.7907	0.2986	-0.2043
-69	1.215	1.633	0.4794	0.2282
-70	0.1569	1.303	0.1686	-0.05041
-71	0.7383	-0.4025	-0.6168	-0.08445
-72	0.4724	0.4166	0.2627	0.1142
-73	1.228	0.9409	0.3667	-0.008448
-74	0.6294	0.4168	0.29	-0.2735
-75	0.7005	0.06349	0.4448	0.04408
-76	0.8735	-0.2507	0.4721	0.1021
-77	1.254	0.08262	0.7268	0.04083
-78	1.358	-0.3288	0.2615	0.06701
-79	0.6621	0.2243	-0.08737	-0.03635
-80	-0.04728	1.057	0.3153	0.06601
-81	0.1215	1.564	0.1452	-0.007875
-82	0.01412	1.573	0.2366	-0.03115
-83	0.236	0.7759	0.148	0.02446
-84	1.057	0.6369	-0.1068	-0.1834
-85	0.2214	0.2808	-0.6676	-0.2556
-86	0.4318	-0.8551	-0.4507	-0.1111
-87	1.049	-0.5222	0.3961	0.0373
-88	1.036	1.392	0.6854	0.1391
-89	0.06707	0.2126	-0.2941	-0.1475
-90	0.2754	1.33	-0.09344	0.009948
-91	0.2723	1.119	-0.09817	-0.2698
-92	0.6232	-0.02754	0.0193	-0.1477
-93	0.33	0.9889	0.196	0.007627
-94	-0.3736	2.018	-0.1122	0.02108
-95	0.2829	0.854	-0.1341	-0.1069
-96	0.08905	0.1749	-0.1314	-0.2301
-97	0.2244	0.3805	-0.1588	-0.1326
-98	0.5739	0.1537	0.27	-0.01941
-99	-0.457	1.539	-0.1961	0.2009
-100	0.2522	0.5959	-0.09475	-0.0583
-101	1.848	-0.8717	-1.003	-0.05107
-102	1.153	0.7013	-0.5315	-0.04041
-103	2.206	-0.5545	0.2055	0.0593
-104	1.439	0.05001	-0.1634	-0.2358
-105	1.868	-0.2912	-0.394	-0.01678
-106	2.754	-0.7884	0.5862	-0.1009
-107	0.3584	1.56	-0.991	-0.133
-108	2.303	-0.4095	0.6542	-0.2372
-109	2.002	0.7239	0.3941	-0.08499
-110	2.268	-1.921	-0.3925	0.1029
-111	1.366	-0.6939	-0.2833	0.107
-112	1.599	0.4282	-0.0233	0.05894
-113	1.884	-0.4143	-0.02455	0.1463
-114	1.253	1.167	-0.5821	0.09968
-115	1.464	0.4441	-1.004	0.275
-116	1.592	-0.677	-0.6367	0.1906
-117	1.471	-0.2532	-0.03666	-0.1554
-118	2.437	-2.557	0.1342	-0.2757
-119	3.309	0.002361	0.7069	0.04676
-120	1.254	1.718	0.2646	-0.06307
-121	2.04	-0.9074	-0.2319	0.1671
-122	0.9739	0.5712	-0.8295	0.02732
-123	2.898	-0.3978	0.8609	-0.1261
-124	1.329	0.4868	0.004707	0.1406
-125	1.704	-1.014	-0.296	-0.06274
-126	1.958	-1.003	0.4228	-0.2185
-127	1.172	0.3189	-0.1307	0.1257
-128	1.02	-0.06554	-0.338	-0.009069
-129	1.786	0.1933	-0.27	0.03121
-130	1.865	-0.5554	0.7175	-0.2076
-131	2.435	-0.2467	0.7302	-0.01679
-132	2.316	-2.626	0.4996	-0.2132
-133	1.86	0.1847	-0.3533	0.1
-134	1.111	0.296	0.1827	-0.1857
-135	1.197	0.8172	0.1632	-0.4884
-136	2.801	-0.8447	0.547	0.2963
-137	1.58	-1.072	-0.9434	0.03361
-138	1.347	-0.4223	-0.18	-0.2159
-139	0.9234	-0.01923	-0.4174	0.004744
-140	1.854	-0.6724	0.01482	0.1949
-141	2.016	-0.6104	-0.4259	0.2468
-142	1.903	-0.686	-0.1278	0.4692
-143	1.153	0.7013	-0.5315	-0.04041
-144	2.043	-0.8647	-0.3353	0.04428
-145	2.002	-1.049	-0.6293	0.2126
-146	1.871	-0.3828	-0.2545	0.3889
-147	1.558	0.9053	0.02538	0.2213
-148	1.521	-0.2668	-0.1793	0.1189
-149	1.376	-1.016	-0.9314	0.02415
-150	0.9593	0.02228	-0.5288	-0.1637
Binary file test-data/pca_out2.pdf has changed
--- a/test-data/pca_out3.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,159 +0,0 @@
-#Component	1	2	3	4
-#Std. deviation	2.049	0.4905	0.2793	0.1534
-#Proportion of variance explained	0.9246	0.05302	0.01719	0.005183
-#Loadings	1	2	3	4
-c1	0.3616	-0.6565	0.581	0.3173
-c2	-0.08227	-0.7297	-0.5964	-0.3241
-c3	0.8566	0.1758	-0.07252	-0.4797
-c4	0.3588	0.07471	-0.5491	0.7511
-#Scores	1	2	3	4
-1	-2.684	-0.3266	0.02151	0.001006
-2	-2.715	0.1696	0.2035	0.0996
-3	-2.89	0.1373	-0.02471	0.0193
-4	-2.746	0.3111	-0.03767	-0.07596
-5	-2.729	-0.3339	-0.09623	-0.06313
-6	-2.28	-0.7478	-0.1743	-0.02715
-7	-2.821	0.0821	-0.2643	-0.0501
-8	-2.626	-0.1704	0.0158	-0.04628
-9	-2.888	0.5708	-0.02734	-0.02662
-10	-2.674	0.1067	0.1915	-0.05589
-11	-2.507	-0.6519	0.06927	-0.01661
-12	-2.613	-0.02152	-0.1077	-0.1577
-13	-2.787	0.2277	0.2003	-0.007235
-14	-3.225	0.5033	-0.06841	-0.02195
-15	-2.644	-1.186	0.1445	0.157
-16	-2.384	-1.345	-0.2837	0.001926
-17	-2.623	-0.8181	-0.1453	0.1647
-18	-2.648	-0.3191	-0.03339	0.07612
-19	-2.199	-0.8792	0.1145	0.02533
-20	-2.587	-0.5205	-0.2196	-0.06908
-21	-2.311	-0.3979	0.2337	-0.01532
-22	-2.543	-0.44	-0.2148	0.03844
-23	-3.216	-0.1416	-0.2996	0.001857
-24	-2.303	-0.1055	-0.04568	0.1472
-25	-2.356	0.03121	-0.1294	-0.3016
-26	-2.508	0.1391	0.2471	0.03538
-27	-2.469	-0.1379	-0.1013	0.05597
-28	-2.562	-0.3747	0.07236	-0.01524
-29	-2.64	-0.3193	0.1393	0.06514
-30	-2.633	0.1901	-0.04647	-0.1246
-31	-2.588	0.1974	0.07128	-0.06048
-32	-2.41	-0.4181	0.1384	0.2308
-33	-2.648	-0.82	-0.2306	-0.2848
-34	-2.597	-1.1	-0.1636	-0.09896
-35	-2.674	0.1067	0.1915	-0.05589
-36	-2.867	-0.07719	0.1568	0.1625
-37	-2.625	-0.6068	0.2612	0.1759
-38	-2.674	0.1067	0.1915	-0.05589
-39	-2.982	0.4803	-0.07972	-0.01105
-40	-2.59	-0.2361	0.0739	-0.01456
-41	-2.77	-0.2711	-0.08424	0.09236
-42	-2.852	0.9329	0.341	0.3227
-43	-2.998	0.3343	-0.199	-0.07587
-44	-2.406	-0.1959	-0.2707	0.1738
-45	-2.209	-0.4427	-0.3035	-0.1859
-46	-2.716	0.2427	0.09052	0.143
-47	-2.538	-0.5104	-0.1719	-0.1922
-48	-2.84	0.2206	-0.09006	-0.06039
-49	-2.543	-0.5863	0.01118	-0.04833
-50	-2.704	-0.115	0.0827	0.0341
-51	1.285	-0.6854	0.4061	0.01929
-52	0.9324	-0.3192	0.01713	-6.758e-06
-53	1.464	-0.5042	0.3383	-0.0008576
-54	0.181	0.8256	0.1771	0.09578
-55	1.087	-0.07539	0.3065	0.1134
-56	0.6404	0.4173	-0.04119	-0.2427
-57	1.095	-0.2839	-0.17	-0.08497
-58	-0.7515	1.001	-0.01567	-0.01651
-59	1.043	-0.229	0.4148	-0.03752
-60	-0.01019	0.7206	-0.2834	-0.005946
-61	-0.5111	1.262	0.2665	0.04891
-62	0.5111	0.1023	-0.1323	0.0501
-63	0.2623	0.5479	0.6919	0.06148
-64	0.984	0.1244	0.06216	-0.169
-65	-0.1749	0.2518	-0.09366	0.1249
-66	0.9276	-0.4682	0.3132	0.1004
-67	0.6596	0.352	-0.3284	-0.189
-68	0.2345	0.3319	0.2703	-0.212
-69	0.9424	0.5418	0.4973	0.2606
-70	0.04325	0.5815	0.233	-0.03956
-71	1.116	0.08421	-0.4598	-0.07721
-72	0.3568	0.06682	0.2275	0.1241
-73	1.296	0.3276	0.3475	0.003246
-74	0.9205	0.1824	0.2316	-0.2868
-75	0.714	-0.1504	0.3204	0.04294
-76	0.8996	-0.3296	0.3148	0.1011
-77	1.331	-0.2447	0.5212	0.03751
-78	1.557	-0.2674	0.1646	0.07035
-79	0.8125	0.1623	-0.03634	-0.02968
-80	-0.3073	0.3651	0.3153	0.07653
-81	-0.07034	0.7025	0.2418	0.009095
-82	-0.1919	0.6775	0.3039	-0.01805
-83	0.135	0.3117	0.175	0.03418
-84	1.379	0.4212	-0.01548	-0.1776
-85	0.5873	0.4833	-0.4446	-0.2524
-86	0.8072	-0.1951	-0.3895	-0.1166
-87	1.22	-0.408	0.2366	0.03164
-88	0.8129	0.3707	0.6129	0.1577
-89	0.2452	0.2667	-0.1896	-0.1473
-90	0.1645	0.6797	0.0578	0.03097
-91	0.463	0.6695	0.02405	-0.2684
-92	0.8902	0.03381	0.009768	-0.1534
-93	0.2289	0.4023	0.2274	0.01862
-94	-0.7071	1.008	0.1021	0.04762
-95	0.3555	0.5032	-0.01789	-0.09807
-96	0.3311	0.2112	-0.08381	-0.2387
-97	0.3752	0.2916	-0.07907	-0.1312
-98	0.6417	-0.01907	0.2042	-0.02051
-99	-0.9085	0.7516	0.007737	0.2336
-100	0.2978	0.347	-0.01218	-0.05078
-101	2.532	0.01184	-0.7585	-0.0326
-102	1.414	0.5749	-0.2964	-0.0157
-103	2.616	-0.3419	0.1121	0.06596
-104	1.971	0.1811	-0.1065	-0.2369
-105	2.35	0.04188	-0.2841	-0.001313
-106	3.397	-0.5472	0.3519	-0.1112
-107	0.5194	1.191	-0.5467	-0.0988
-108	2.932	-0.3524	0.4237	-0.2554
-109	2.32	0.2455	0.3499	-0.07626
-110	2.918	-0.7804	-0.4217	0.1077
-111	1.662	-0.242	-0.2428	0.1194
-112	1.802	0.2162	0.0377	0.07871
-113	2.165	-0.2153	-0.03315	0.1627
-114	1.345	0.7764	-0.2829	0.1405
-115	1.585	0.5393	-0.6306	0.3275
-116	1.905	-0.1188	-0.4801	0.2171
-117	1.949	-0.04073	-0.04273	-0.1578
-118	3.489	-1.172	-0.1293	-0.3116
-119	3.795	-0.2533	0.517	0.05645
-120	1.298	0.761	0.3449	-0.04267
-121	2.428	-0.3768	-0.2186	0.1839
-122	1.198	0.6056	-0.5126	0.0595
-123	3.499	-0.4568	0.5769	-0.1378
-124	1.388	0.204	0.06351	0.1638
-125	2.276	-0.3334	-0.2847	-0.06223
-126	2.614	-0.5584	0.2084	-0.2404
-127	1.258	0.1791	-0.04698	0.1476
-128	1.291	0.1164	-0.2316	0.003084
-129	2.123	0.2109	-0.1535	0.05261
-130	2.388	-0.4625	0.452	-0.2299
-131	2.841	-0.3727	0.501	-0.02022
-132	3.232	-1.371	0.1184	-0.2545
-133	2.159	0.2183	-0.2084	0.1277
-134	1.443	0.1438	0.1541	-0.1899
-135	1.78	0.5015	0.1758	-0.5035
-136	3.077	-0.6858	0.3364	0.3106
-137	2.145	-0.1389	-0.7342	0.05177
-138	1.905	-0.04805	-0.1605	-0.222
-139	1.169	0.1645	-0.2825	0.01933
-140	2.108	-0.3715	-0.02744	0.21
-141	2.314	-0.1826	-0.3229	0.2759
-142	1.922	-0.4093	-0.1155	0.5041
-143	1.414	0.5749	-0.2964	-0.0157
-144	2.563	-0.276	-0.2913	0.05618
-145	2.419	-0.3035	-0.5043	0.2382
-146	1.944	-0.1874	-0.1793	0.4251
-147	1.526	0.375	0.1206	0.2557
-148	1.764	-0.07852	-0.1308	0.1363
-149	1.902	-0.1159	-0.7229	0.04087
-150	1.39	0.2829	-0.3623	-0.1563
Binary file test-data/pca_out4.pdf has changed
--- a/test-data/pca_out5.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,159 +0,0 @@
-#Component	1	2	3	4
-#Std. deviation	1.706	0.9598	0.3839	0.1436
-#Proportion of variance explained	0.7277	0.2303	0.03684	0.005152
-#Loadings	1	2	3	4
-c1	0.5224	-0.3723	0.721	0.262
-c2	-0.2634	-0.9256	-0.242	-0.1241
-c3	0.5813	-0.02109	-0.1409	-0.8012
-c4	0.5656	-0.06542	-0.6338	0.5235
-#Scores	1	2	3	4
-1	-2.257	-0.504	0.1215	0.023
-2	-2.079	0.6532	0.2265	0.1029
-3	-2.36	0.3174	-0.05131	0.02773
-4	-2.297	0.5734	-0.09853	-0.06609
-5	-2.381	-0.6725	-0.02136	-0.03727
-6	-2.064	-1.513	-0.03058	0.004384
-7	-2.438	-0.07431	-0.3411	-0.03797
-8	-2.226	-0.2468	0.0823	-0.02542
-9	-2.334	1.091	-0.153	-0.0267
-10	-2.181	0.4471	0.2457	-0.03977
-11	-2.156	-1.067	0.2631	0.01525
-12	-2.32	-0.1581	-0.09983	-0.1341
-13	-2.217	0.7068	0.2225	0.002622
-14	-2.631	0.9351	-0.1889	-0.01936
-15	-2.185	-1.884	0.4679	0.1921
-16	-2.244	-2.713	-0.03249	0.04697
-17	-2.195	-1.509	0.001359	0.186
-18	-2.183	-0.5126	0.03849	0.0916
-19	-1.888	-1.426	0.3695	0.05933
-20	-2.332	-1.154	-0.137	-0.0397
-21	-1.908	-0.429	0.4146	0.01033
-22	-2.197	-0.9493	-0.1642	0.05754
-23	-2.765	-0.4879	-0.3377	0.01779
-24	-1.814	-0.1064	-0.03993	0.1498
-25	-2.221	-0.1616	-0.1238	-0.2703
-26	-1.95	0.6059	0.2976	0.04369
-27	-2.045	-0.2651	-0.09179	0.06638
-28	-2.161	-0.5502	0.2006	0.00923
-29	-2.133	-0.3355	0.2644	0.08326
-30	-2.261	0.3138	-0.07526	-0.1085
-31	-2.137	0.4823	0.06763	-0.04822
-32	-1.826	-0.4438	0.2645	0.2383
-33	-2.599	-1.822	-0.05125	-0.2311
-34	-2.43	-2.178	0.07908	-0.05085
-35	-2.181	0.4471	0.2457	-0.03977
-36	-2.204	0.1837	0.2179	0.1681
-37	-2.038	-0.6827	0.4778	0.195
-38	-2.181	0.4471	0.2457	-0.03977
-39	-2.428	0.8792	-0.2009	-0.009928
-40	-2.163	-0.2917	0.1694	0.006219
-41	-2.279	-0.4664	-0.0406	0.1054
-42	-1.865	2.32	0.1939	0.2907
-43	-2.549	0.4523	-0.3125	-0.06719
-44	-1.958	-0.4957	-0.3137	0.175
-45	-2.126	-1.168	-0.2519	-0.1527
-46	-2.068	0.6896	0.05637	0.1398
-47	-2.373	-1.147	-0.06189	-0.1537
-48	-2.39	0.3612	-0.1464	-0.04931
-49	-2.219	-1.022	0.1761	-0.01639
-50	-2.199	-0.03213	0.1461	0.04862
-51	1.1	-0.8602	0.6833	0.03486
-52	0.73	-0.5966	0.09375	0.004439
-53	1.238	-0.6128	0.553	0.009641
-54	0.396	1.752	0.01851	0.06717
-55	1.069	0.2111	0.3961	0.1052
-56	0.3832	0.5891	-0.1264	-0.2397
-57	0.7462	-0.7761	-0.1482	-0.07804
-58	-0.4962	1.843	-0.2547	-0.03926
-59	0.9231	-0.03023	0.5935	-0.02901
-60	0.004951	1.026	-0.5411	-0.02825
-61	-0.1243	2.649	0.03968	0.01609
-62	0.4373	0.05868	-0.206	0.03972
-63	0.5498	1.767	0.7588	0.04819
-64	0.7148	0.1848	0.06708	-0.164
-65	-0.03713	0.4314	-0.1974	0.1087
-66	0.873	-0.5083	0.5018	0.1048
-67	0.3468	0.19	-0.4912	-0.1914
-68	0.1529	0.7881	0.2976	-0.2036
-69	1.211	1.628	0.4778	0.2275
-70	0.1564	1.299	0.168	-0.05024
-71	0.7358	-0.4011	-0.6147	-0.08417
-72	0.4708	0.4152	0.2618	0.1139
-73	1.224	0.9378	0.3655	-0.00842
-74	0.6273	0.4154	0.289	-0.2726
-75	0.6981	0.06328	0.4433	0.04393
-76	0.8706	-0.2499	0.4706	0.1018
-77	1.25	0.08234	0.7244	0.04069
-78	1.354	-0.3277	0.2606	0.06679
-79	0.6599	0.2236	-0.08708	-0.03623
-80	-0.04712	1.054	0.3143	0.06579
-81	0.1211	1.558	0.1448	-0.007849
-82	0.01407	1.568	0.2358	-0.03105
-83	0.2352	0.7733	0.1475	0.02438
-84	1.053	0.6348	-0.1064	-0.1828
-85	0.2207	0.2799	-0.6653	-0.2547
-86	0.4303	-0.8523	-0.4492	-0.1108
-87	1.046	-0.5205	0.3948	0.03717
-88	1.032	1.388	0.6831	0.1387
-89	0.06684	0.2119	-0.2931	-0.147
-90	0.2745	1.325	-0.09313	0.009914
-91	0.2714	1.116	-0.09784	-0.2689
-92	0.6211	-0.02745	0.01924	-0.1472
-93	0.3289	0.9856	0.1953	0.007601
-94	-0.3724	2.011	-0.1118	0.02101
-95	0.282	0.8511	-0.1337	-0.1065
-96	0.08876	0.1743	-0.131	-0.2294
-97	0.2236	0.3792	-0.1582	-0.1321
-98	0.572	0.1532	0.2691	-0.01934
-99	-0.4555	1.534	-0.1955	0.2002
-100	0.2514	0.5939	-0.09443	-0.0581
-101	1.842	-0.8688	-0.9994	-0.0509
-102	1.149	0.699	-0.5297	-0.04028
-103	2.199	-0.5526	0.2048	0.0591
-104	1.434	0.04984	-0.1628	-0.235
-105	1.862	-0.2902	-0.3927	-0.01673
-106	2.745	-0.7858	0.5843	-0.1005
-107	0.3572	1.555	-0.9877	-0.1325
-108	2.295	-0.4081	0.652	-0.2364
-109	1.995	0.7214	0.3928	-0.08471
-110	2.26	-1.915	-0.3912	0.1026
-111	1.361	-0.6916	-0.2823	0.1067
-112	1.594	0.4268	-0.02323	0.05875
-113	1.878	-0.4129	-0.02447	0.1458
-114	1.249	1.163	-0.5802	0.09935
-115	1.459	0.4427	-1.001	0.2741
-116	1.586	-0.6748	-0.6345	0.19
-117	1.466	-0.2523	-0.03654	-0.1549
-118	2.429	-2.548	0.1338	-0.2747
-119	3.298	0.002353	0.7046	0.0466
-120	1.25	1.712	0.2637	-0.06286
-121	2.034	-0.9044	-0.2311	0.1666
-122	0.9707	0.5693	-0.8267	0.02723
-123	2.888	-0.3965	0.8581	-0.1257
-124	1.325	0.4851	0.004692	0.1401
-125	1.699	-1.011	-0.295	-0.06253
-126	1.951	-1	0.4214	-0.2177
-127	1.168	0.3178	-0.1302	0.1253
-128	1.016	-0.06532	-0.3369	-0.009038
-129	1.78	0.1926	-0.2691	0.0311
-130	1.859	-0.5535	0.7151	-0.2069
-131	2.427	-0.2458	0.7278	-0.01674
-132	2.308	-2.617	0.498	-0.2124
-133	1.854	0.1841	-0.3522	0.09971
-134	1.108	0.295	0.182	-0.1851
-135	1.193	0.8144	0.1627	-0.4868
-136	2.792	-0.8419	0.5452	0.2953
-137	1.575	-1.069	-0.9402	0.0335
-138	1.343	-0.4208	-0.1794	-0.2152
-139	0.9203	-0.01917	-0.416	0.004728
-140	1.847	-0.6702	0.01477	0.1942
-141	2.009	-0.6084	-0.4245	0.2459
-142	1.897	-0.6837	-0.1274	0.4676
-143	1.149	0.699	-0.5297	-0.04028
-144	2.036	-0.8618	-0.3341	0.04413
-145	1.995	-1.045	-0.6272	0.2119
-146	1.864	-0.3815	-0.2537	0.3876
-147	1.553	0.9023	0.0253	0.2206
-148	1.516	-0.2659	-0.1787	0.1185
-149	1.372	-1.013	-0.9283	0.02407
-150	0.9561	0.02221	-0.527	-0.1631
Binary file test-data/pca_out6.pdf has changed
--- a/test-data/regr_inp.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-7	33	42
-4	41	33
-16	7	75
-3	49	28
-21	5	91
-8	31	55
-7	35	52
-5	30	16
-15	10	69
-20	10	94
Binary file test-data/regr_out.pdf has changed
--- a/test-data/regr_out.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,13 +0,0 @@
-p-value (Y-intercept)	0.294263204753
-Slope (c1)	5.74713412453
-p-value (c1)	[1] 0.001732877
-
-Slope (c2)	0.904274537427
-p-value (c2)	[1] 0.1146675
-
-R-squared	[1] 0.93881
-
-Adjusted R-squared	[1] 0.92132
-
-F-statistic	53.694626423
-Sigma	7.43450071131
--- a/test-data/scatterplot_in1.tabular	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-1	68	4.1
-2	71	4.6
-3	62	3.8
-4	75	4.4
-5	58	3.2
-6	60	3.1
-7	67	3.8
-8	68	4.1
-9	71	4.3
-10	69	3.7
Binary file test-data/scatterplot_out1.pdf has changed
--- a/test-data/solexa.qual	Sun Feb 16 06:16:54 2014 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
- -40  -40   40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40  -15   15	 -40   40  -40  -40
- -40  -40   40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	  -5    5  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	 -40   21  -40  -21	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  12  -40  -40  -12	 -36  -40   36  -40	 -40  -40   40  -40	  -4    4  -40  -40	 -40  -40  -40   40	 -40  -40   14  -14	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -25   25
- -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   34  -40  -34	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -25   25  -40	 -40  -40  -40   40	 -37  -40   37  -40	 -40    7  -40   -7	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  38  -40  -40  -38	  40  -40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	  11  -16  -13  -22	 -40  -40   40  -40	 -40  -40  -40   40
-  40  -40  -40  -40	 -40   40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -33   33  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40  -40  -25   25	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   27  -27	  -5    5  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -37   37  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -25   25	  40  -40  -40  -40	 -40  -40   34  -34	 -40   40  -40  -40
- -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   -2    2	 -40  -40   35  -35	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	 -40   36  -40  -36	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	   5   -5  -40  -28	 -40  -16  -40   16	 -40   40  -40  -40
- -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   28  -28	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40   40  -40
- -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   27  -27	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   14  -14	 -40   40  -40  -40
- -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -36  -40  -40   36	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40
- -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40   40  -40	 -40  -40   22  -22	 -40   40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40
- -40  -40   40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   -6    6	 -40   40  -40  -40	 -40  -40  -40   40	   3  -40  -40   -3	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	  -0  -40  -40    0	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -20   20  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	   8  -40  -40   -8	 -40   40  -40  -40	 -40  -40   40  -40	  -4    4  -40  -40
- -40  -40   40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	  -1    1  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   28  -28	 -40  -40  -40   40	 -10  -40   10  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40    2   -2  -40	  40  -40  -40  -40	  17  -40  -17  -40	 -40   40  -40  -40	   3   -3  -40  -40	 -40  -40  -40   40	 -28  -40   28  -40	 -40  -40   40  -40	  13  -26  -13  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   18  -18	  40  -40  -40  -40	 -10  -40   10  -40	 -40  -40   40  -40	  17  -40  -17  -40	 -28   28  -40  -40
- -40  -40   40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	   5  -40  -40   -5	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	  40  -40  -40  -40	 -10   10  -40  -40	 -40  -40   40  -40	 -40   28  -28  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	  32  -32  -40  -40	 -40    4  -40   -4	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40
- -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40   40  -40  -40	 -40   40  -40  -40	  27  -27  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40
- -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -15   15  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  32  -40  -40  -32	 -40  -40  -40   40
- -40  -40   40  -40	 -40  -40  -40   40	 -40   40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40   40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40
- -40  -40   40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  25  -40  -40  -25	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -20   20	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -31   31  -40  -40	 -40  -40  -40   40	 -40  -23   23  -40	 -40  -40  -40   40	 -40  -40   40  -40	  15  -15  -40  -40	 -40   40  -40  -40	  25  -40  -25  -40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	  39  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40
- -40  -40   40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40
- -40  -40   40  -40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40
- -40  -40   40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	 -40   40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40  -40   40
- -40  -40   40  -40	 -40  -40  -40   40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40
- -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -36  -40   36	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -24  -40  -40   24	 -40  -40  -40   40	 -14  -40  -40   14	 -40  -40  -40   40	 -40  -40  -40   40
- -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40    9   -9	 -40  -40  -40   40	  23  -40  -40  -23
- -40  -40   40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40   20  -20	  40  -40  -40  -40	 -40  -10   10  -40	  14  -40  -40  -14	 -40  -40   40  -40	 -40  -40  -40   40	 -35  -40   35  -40	 -40   38  -38  -40	  40  -40  -40  -40	 -40  -40   13  -13	 -40  -40   40  -40	 -40  -40  -40   40	  29  -29  -40  -40	 -40  -40  -40   40	 -15  -40  -40   15	  40  -40  -40  -40	  40  -40  -40  -40	 -23  -40  -40   23	  40  -40  -40  -40	  -1  -40  -40    1	 -40   38  -38  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40   -9    9	 -40  -40  -21   21	 -40  -40   21  -21	 -40  -40  -29   29	 -40  -13   13  -40	  40  -40  -40  -40	 -40  -40   40  -40
- -40  -40   40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   -3    3	  40  -40  -40  -40	 -40   40  -40  -40	 -40   40  -40  -40	  12  -40  -40  -12	 -10  -40   10  -40	 -40  -40   40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	 -21  -40  -40   21
- -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40
- -40  -40   40  -40	 -40  -40   40  -40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40   -1    1  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -31   31  -40	 -40   40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40
- -40  -40   40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40   40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	  40  -40  -40  -40	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40   40  -40
- -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40   40  -40  -40	 -29   29  -40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40   36  -36	  40  -40  -40  -40
- -40  -40   40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	  20  -20  -40  -40	 -40  -40  -40   40	   3  -40  -40   -3	  40  -40  -40  -40	  40  -40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40  -40   40  -40	 -40   40  -40  -40	 -40   40  -40  -40	 -40  -40   33  -33	  40  -40  -40  -40	 -13  -40  -40   13	   8   -8  -40  -40	 -40   28  -28  -40	 -29  -40  -40   29	 -40  -40  -40   40	   2  -40   -2  -26	 -40  -28   27  -33	  -1    0  -40  -35	  31  -31  -40  -40	 -40   40  -40  -40	 -11  -40  -40   11	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40
- -40  -40   40  -40	 -40   40  -40  -40	 -40  -40   40  -40	 -40  -40  -40   40	  40  -40  -40  -40	 -40   40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	  38  -38  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	 -40   40  -40  -40	 -40  -40   40  -40	 -40   40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40   40  -40  -40	 -40   40  -40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40   40  -40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	  40  -40  -40  -40	 -40  -40  -40   40	 -40  -40  -40   40	  29  -40  -40  -29	 -40   40  -40  -40	 -40   10  -40  -10	  40  -40  -40  -40	 -40  -40    2   -2	 -13  -40  -40   13	 -40  -40   40  -40	 -40  -40  -40   40	 -40  -40  -40   40
Binary file test-data/solexaScore.png has changed
--- a/tool_dependencies.xml	Sun Feb 16 06:16:54 2014 -0500
+++ b/tool_dependencies.xml	Sun Mar 02 11:49:08 2014 -0500
@@ -1,32 +1,31 @@
 <?xml version="1.0"?>
 <tool_dependency>
-    <package name="ncurses" version="5.9">
-        <repository changeset_revision="f62fa47b852d" name="package_ncurses_5_9" owner="iuc" toolshed="http://testtoolshed.g2.bx.psu.edu" />
-    </package>
-    <package name="readline" version="6.2">
-        <repository changeset_revision="246f31787ca1" name="package_readline_6_2" owner="iuc" toolshed="http://testtoolshed.g2.bx.psu.edu" />
+    <package name="R_3_0_1" version="3.0.1">
+        <repository changeset_revision="4666f68ad4d5" name="package_r_3_0_1" owner="iuc" prior_installation_required="True" toolshed="http://testtoolshed.g2.bx.psu.edu" />
     </package>
-    <package name="R_3_0_1" version="3.0.1">
-        <repository changeset_revision="4666f68ad4d5" name="package_r_3_0_1" owner="iuc" toolshed="http://testtoolshed.g2.bx.psu.edu" />
+    <package name="perl" version="5.18.1">
+        <repository changeset_revision="ab58b9f990c5" name="package_perl_5_18" owner="iuc" prior_installation_required="True" toolshed="http://testtoolshed.g2.bx.psu.edu" />
     </package>
-    <package name="rpy2" version="2.2.3.6">
-        <repository changeset_revision="191967e6496b" name="package_rpy2_2_3_6" owner="bgruening" toolshed="http://testtoolshed.g2.bx.psu.edu" />
-    </package>
-    <package name="numpy" version="1.7.1">
-        <repository changeset_revision="84125ffacb90" name="package_numpy_1_7" owner="iuc" toolshed="http://testtoolshed.g2.bx.psu.edu" />
-    </package>
-    <package name="R_statistic_tools" version="1.0">
+    <set_environment version="1.0">
+        <environment_variable action="set_to" name="MICLIP_SCRIPT_PATH">$REPOSITORY_INSTALL_DIR</environment_variable>
+    </set_environment>
+    <package name="miclip" version="1.2">
         <install version="1.0">
             <actions>
                 <action type="setup_r_environment">
                     <repository changeset_revision="4666f68ad4d5" name="package_r_3_0_1" owner="iuc" toolshed="http://testtoolshed.g2.bx.psu.edu">
                         <package name="R_3_0_1" version="3.0.1" />
                     </repository>
-                    <package>https://github.com/bgruening/download_store/raw/master/r_statistic_tools/kernlab_0.9-19.tar.gz</package>
-                    <package>https://github.com/bgruening/download_store/raw/master/r_statistic_tools/yacca_1.1.tar.gz</package>
-                    <package>https://github.com/bgruening/download_store/raw/master/r_statistic_tools/car_2.0-19.tar.gz</package>
+                        <package>https://github.com/bgruening/download_store/raw/master/miclip/moments_0.13.tar.gz</package>
+                        <package>https://github.com/bgruening/download_store/raw/master/miclip/VGAM_0.9-3.tar.gz</package>
+                        <package>https://github.com/bgruening/download_store/raw/master/miclip/MiClip_1.2.tar.gz</package>
                 </action>
             </actions>
         </install>
+        <readme>
+        MiClip: A Model-based Approach to Identify Binding Sites in CLIP-Seq Data
+        Cross-linking immunoprecipitation coupled with high-throughput sequencing (CLIP-Seq) has made it possible to identify targeting sites of RNA-binding proteins in various cell culture systems and tissue types on a genome-wide scale. Here we present MiClip,a novel model-based approach to identify high-confidence protein-RNA binding sites in CLIP-Seq datasets. This approach assigns confidence value to each binding site on a probabilistic basis. The MiClip package can be flexibly applied to analyze both HITS-CLIP data and PAR-CLIP data.
+        http://cran.r-project.org/web/packages/MiClip/
+        </readme>
     </package>
 </tool_dependency>