Mercurial > repos > jeremyjliu > region_motif_data_manager
view data_manager/data_manager_fetch_motifs.py @ 5:6621a6ac8bb4 draft
Uploaded
author | jeremyjliu |
---|---|
date | Wed, 28 Jan 2015 22:49:33 -0500 |
parents | 75d825e1b00d |
children | aa0d1b185070 |
line wrap: on
line source
#!/usr/bin/env python #Dan Blankenberg import sys import os import tempfile import shutil import optparse import urllib2 #import uuid from ftplib import FTP import tarfile import zipfile import gzip import bz2 from galaxy.util.json import from_json_string, to_json_string CHUNK_SIZE = 2**20 #1mb def download_motif_databases( data_manager_dict, params, target_directory, motif_db ): TEST_BGZ_URL = 'http://gehlenborg.com/wp-content/uploads/motif/pouya_test_motifs.bed.bgz' TEST_TBI_URL = 'http://gehlenborg.com/wp-content/uploads/motif/pouya_test_motifs.bed.bgz.tbi' POUYA_BGZ_URL = '' POUYA_TBI_URL = '' JOLMA_BGZ_URL = '' JOLMA_TBI_URL = '' MM9_BGZ_URL = '' MM9_TBI_URL = '' bgz_reader = urllib2.urlopen( TEST_BGZ_URL ) bgz_data_table_entry = _stream_fasta_to_file( bgz_reader, target_directory, params, "pouya_test_motifs.bed.bgz", "test_bgz", "Test Pouya Subset BGZ (hg19)" ) _add_data_table_entry( data_manager_dict, 'motif_databases', bgz_data_table_entry ) tbi_reader = urllib2.urlopen( TEST_TBI_URL ) tbi_data_table_entry = _stream_fasta_to_file( tbi_reader, target_directory, params, "pouya_test_motifs.bed.bgz.tbi", "test_tbi", "Test Pouya Subset TBI (hg19)" ) _add_data_table_entry( data_manager_dict, 'motif_databases', tbi_data_table_entry ) def _add_data_table_entry( data_manager_dict, data_table, data_table_entry ): data_manager_dict['data_tables'] = data_manager_dict.get( 'data_tables', {} ) data_manager_dict['data_tables'][data_table] = data_manager_dict['data_tables'].get( data_table, [] ) data_manager_dict['data_tables'][data_table].append( data_table_entry ) return data_manager_dict def _stream_fasta_to_file( fasta_stream, target_directory, params, fasta_base_filename, value, name, close_stream=True ): fasta_filename = os.path.join( target_directory, fasta_base_filename ) fasta_writer = open( fasta_filename, 'wb+' ) while True: buffer = fasta_stream.read(CHUNK_SIZE) if not buffer: break fasta_writer.write(buffer) fasta_stream.close() fasta_writer.close() return dict( value=value, name=name, path=fasta_base_filename ) def main(): #Parse Command Line parser = optparse.OptionParser() parser.add_option( '-m', '--motif_db', dest='motif_db', action='store', type="string", default=None, help='motif_db' ) (options, args) = parser.parse_args() filename = args[0] params = from_json_string( open( filename ).read() ) target_directory = params[ 'output_data' ][0]['extra_files_path'] os.mkdir( target_directory ) data_manager_dict = {} #Fetch the Motif Database download_motif_databases( data_manager_dict, params, target_directory, options.motif_db ) #save info to json file open( filename, 'wb' ).write( to_json_string( data_manager_dict ) ) if __name__ == "__main__": main()