Mercurial > repos > scottx611x > cooler_convert
changeset 20:797818b4e2f6 draft
planemo upload commit 9e980862c8b763cf3dd0209be49f0d2d75a90614-dirty
author | scottx611x |
---|---|
date | Mon, 21 Nov 2016 13:00:20 -0500 |
parents | 70d6350f982d |
children | 202ef48959d3 |
files | recursive_agg_onefile.py recursive_agg_onefile.xml |
diffstat | 2 files changed, 28 insertions(+), 34 deletions(-) [+] |
line wrap: on
line diff
--- a/recursive_agg_onefile.py Mon Nov 21 10:45:17 2016 -0500 +++ b/recursive_agg_onefile.py Mon Nov 21 13:00:20 2016 -0500 @@ -33,11 +33,8 @@ # transfer base matrix with h5py.File(outfile, 'w') as dest, \ h5py.File(infile, 'r') as src: - try: - zoomLevel = str(n_zooms) - src.copy('/', dest, zoomLevel) - except RuntimeWarning as e: - print("Error: %s" % e) + zoomLevel = str(n_zooms) + src.copy('/', dest, zoomLevel) print(zoomLevel, file=sys.stderr) @@ -50,38 +47,35 @@ for i in range(n_zooms - 1, -1, -1): zoomLevel = str(i) - try: - # aggregate - new_binsize = binsize * FACTOR - new_bins = cooler.util.binnify(chromsizes, new_binsize) - - reader = CoolerAggregator(c, new_bins, chunksize) - - grp = f.create_group(zoomLevel) - f.attrs[zoomLevel] = new_binsize - cooler.io.create(grp, chroms, lengths, new_bins, reader) + # aggregate + new_binsize = binsize * FACTOR + new_bins = cooler.util.binnify(chromsizes, new_binsize) + + reader = CoolerAggregator(c, new_bins, chunksize) + + grp = f.create_group(zoomLevel) + f.attrs[zoomLevel] = new_binsize + cooler.io.create(grp, chroms, lengths, new_bins, reader) - # balance - #with Pool(N_CPU) as pool: - too_close = 20000 # for HindIII - ignore_diags = max(int(np.ceil(too_close / new_binsize)), 3) + # balance + #with Pool(N_CPU) as pool: + too_close = 20000 # for HindIII + ignore_diags = max(int(np.ceil(too_close / new_binsize)), 3) - bias = cooler.ice.iterative_correction( - f, zoomLevel, - chunksize=chunksize, - min_nnz=10, - mad_max=3, - ignore_diags=ignore_diags, - map=map) - h5opts = dict(compression='gzip', compression_opts=6) - grp['bins'].create_dataset('weight', data=bias, **h5opts) + bias = cooler.ice.iterative_correction( + f, zoomLevel, + chunksize=chunksize, + min_nnz=10, + mad_max=3, + ignore_diags=ignore_diags, + map=map) + h5opts = dict(compression='gzip', compression_opts=6) + grp['bins'].create_dataset('weight', data=bias, **h5opts) - print(zoomLevel, file=sys.stderr) + print(zoomLevel, file=sys.stderr) - c = cooler.Cooler(grp) - binsize = new_binsize - except RuntimeWarning: - pass + c = cooler.Cooler(grp) + binsize = new_binsize if __name__ == '__main__': parser = argparse.ArgumentParser(
--- a/recursive_agg_onefile.xml Mon Nov 21 10:45:17 2016 -0500 +++ b/recursive_agg_onefile.xml Mon Nov 21 13:00:20 2016 -0500 @@ -1,7 +1,7 @@ <tool id="recursive_agg_cooler" name="Generate Multi-res Cooler File" version="1.0.0"> <description>Recursively aggregate a single resolution cooler file into a multi-resolution file.</description> - <command> /usr/bin/python $__tool_directory__/recursive_agg_onefile.py $input --out $output</command> + <command> /usr/bin/python $__tool_directory__/recursive_agg_onefile.py $input --out $output > /dev/null 2>&1</command> <inputs> <param format="h5" name="input" type="data" label="Source cooler file"/> </inputs>