# HG changeset patch # User devteam # Date 1446837388 18000 # Node ID 078e803565cfb250f0c422a68b51ec6ba1a4f7c4 planemo upload for repository https://github.com/galaxyproject/tools-devteam/blob/master/data_managers/data_manager_fetch_ncbi_taxonomy/ commit 86cf90107482cab1cb47fc0d42d6705f8077daa7 diff -r 000000000000 -r 078e803565cf data_manager/data_manager.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/data_manager/data_manager.py Fri Nov 06 14:16:28 2015 -0500 @@ -0,0 +1,68 @@ +import argparse +import datetime +import json +import os +import shutil +import sys +import tarfile +import urllib2 +import zipfile + +parser = argparse.ArgumentParser(description='Create data manager json.') +parser.add_argument('--out', dest='output', action='store', help='JSON filename') +parser.add_argument('--name', dest='name', action='store', default=str(datetime.date.today()), help='Data table entry unique ID') +parser.add_argument('--url', dest='url', action='store', default='ftp://ftp.ncbi.nih.gov/pub/taxonomy/taxdump.tar.gz', help='Download URL') + +args = parser.parse_args() + +def url_download(url, workdir): + file_path = os.path.join(workdir, 'download.dat') + if not os.path.exists(workdir): + os.makedirs(workdir) + src = None + dst = None + try: + req = urllib2.Request(url) + src = urllib2.urlopen(req) + dst = open(file_path, 'wb') + while True: + chunk = src.read(2**10) + if chunk: + dst.write(chunk) + else: + break + except Exception, e: + print >>sys.stderr, str(e) + finally: + if src: + src.close() + if dst: + dst.close() + if tarfile.is_tarfile(file_path): + fh = tarfile.open(file_path, 'r:*') + elif zipfile.is_zipfile(file_path): + fh = zipfile.ZipFile(file_path, 'r') + else: + return + fh.extractall(workdir) + os.remove(file_path) + + +def main(args): + workdir = os.path.join(os.getcwd(), 'taxonomy') + url_download(args.url, workdir) + data_manager_entry = {} + data_manager_entry['value'] = args.name.lower() + data_manager_entry['name'] = args.name + data_manager_entry['path'] = '.' + data_manager_json = dict(data_tables=dict(ncbi_taxonomy=data_manager_entry)) + params = json.loads(open(args.output).read()) + target_directory = params['output_data'][0]['extra_files_path'] + os.mkdir(target_directory) + output_path = os.path.abspath(os.path.join(os.getcwd(), 'taxonomy')) + for filename in os.listdir(workdir): + shutil.move(os.path.join(output_path, filename), target_directory) + file(args.output, 'w').write(json.dumps(data_manager_json)) + +if __name__ == '__main__': + main(args) diff -r 000000000000 -r 078e803565cf data_manager/ncbi_taxonomy_fetcher.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/data_manager/ncbi_taxonomy_fetcher.xml Fri Nov 06 14:16:28 2015 -0500 @@ -0,0 +1,25 @@ + + + taxonomy downloader + + + + + + + + + + + + + + diff -r 000000000000 -r 078e803565cf data_manager_conf.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/data_manager_conf.xml Fri Nov 06 14:16:28 2015 -0500 @@ -0,0 +1,18 @@ + + + + + + + + + + ncbi_taxonomy/${value} + + ${GALAXY_DATA_MANAGER_DATA_PATH}/ncbi_taxonomy/${value} + abspath + + + + + diff -r 000000000000 -r 078e803565cf tool-data/ncbi_taxonomy.loc.sample diff -r 000000000000 -r 078e803565cf tool_data_table_conf.xml.sample --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tool_data_table_conf.xml.sample Fri Nov 06 14:16:28 2015 -0500 @@ -0,0 +1,8 @@ + + + + + value, name, path + +
+