Mercurial > repos > devteam > data_manager_fetch_ncbi_taxonomy
comparison data_manager/data_manager.py @ 5:eaca3e270bf6 draft default tip
"planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/data_managers/data_manager_fetch_ncbi_taxonomy/ commit 3d78021971b83d585c432830cee0488ada7bd257"
| author | iuc |
|---|---|
| date | Fri, 19 Mar 2021 21:43:14 +0000 |
| parents | cef5c909ccb8 |
| children |
comparison
equal
deleted
inserted
replaced
| 4:cef5c909ccb8 | 5:eaca3e270bf6 |
|---|---|
| 3 import json | 3 import json |
| 4 import os | 4 import os |
| 5 import shutil | 5 import shutil |
| 6 import tarfile | 6 import tarfile |
| 7 import zipfile | 7 import zipfile |
| 8 try: | 8 from urllib.request import Request, urlopen |
| 9 # For Python 3.0 and later | |
| 10 from urllib.request import Request, urlopen | |
| 11 except ImportError: | |
| 12 # Fall back to Python 2 imports | |
| 13 from urllib2 import Request, urlopen | |
| 14 | 9 |
| 15 | 10 |
| 16 def url_download(url, workdir): | 11 def url_download(url, workdir): |
| 17 file_path = os.path.join(workdir, 'download.dat') | 12 file_path = os.path.join(workdir, 'download.dat') |
| 18 if not os.path.exists(workdir): | 13 if not os.path.exists(workdir): |
| 40 return | 35 return |
| 41 fh.extractall(workdir) | 36 fh.extractall(workdir) |
| 42 os.remove(file_path) | 37 os.remove(file_path) |
| 43 | 38 |
| 44 | 39 |
| 40 def download_name_maps(url, workdir, partial): | |
| 41 | |
| 42 if partial: | |
| 43 map_files = [ | |
| 44 'pdb.accession2taxid.gz', | |
| 45 ] | |
| 46 else: | |
| 47 map_files = [ | |
| 48 'dead_nucl.accession2taxid.gz', | |
| 49 'dead_prot.accession2taxid.gz', | |
| 50 'dead_wgs.accession2taxid.gz', | |
| 51 'nucl_gb.accession2taxid.gz', | |
| 52 'nucl_wgs.accession2taxid.gz', | |
| 53 'pdb.accession2taxid.gz', | |
| 54 'prot.accession2taxid.gz', | |
| 55 'prot.accession2taxid.FULL.gz' | |
| 56 ] | |
| 57 | |
| 58 if not os.path.exists(workdir): | |
| 59 os.makedirs(workdir) | |
| 60 | |
| 61 for map in map_files: | |
| 62 src = "{}{}".format(url, map) | |
| 63 dest = os.path.join(workdir, map) | |
| 64 | |
| 65 print("Downloading taxonomy accession2taxid file from {} to {}".format(src, dest)) | |
| 66 | |
| 67 try: | |
| 68 req = Request(src) | |
| 69 src = urlopen(req) | |
| 70 with open(dest, 'wb') as dst: | |
| 71 while True: | |
| 72 chunk = src.read(2**10) | |
| 73 if chunk: | |
| 74 dst.write(chunk) | |
| 75 else: | |
| 76 break | |
| 77 finally: | |
| 78 if src: | |
| 79 src.close() | |
| 80 | |
| 81 | |
| 82 def move_files_to_final_dir(workdir, target_directory, copy=False): | |
| 83 for filename in os.listdir(workdir): | |
| 84 if copy: | |
| 85 shutil.copy(os.path.join(workdir, filename), target_directory) | |
| 86 else: | |
| 87 shutil.move(os.path.join(workdir, filename), target_directory) | |
| 88 | |
| 89 | |
| 45 def main(args): | 90 def main(args): |
| 46 workdir = os.path.join(os.getcwd(), 'taxonomy') | 91 workdir = os.path.abspath(os.path.join(os.getcwd(), 'taxonomy')) |
| 47 url_download(args.url, workdir) | 92 url_download(args.url, workdir) |
| 93 | |
| 48 data_manager_entry = {} | 94 data_manager_entry = {} |
| 49 data_manager_entry['value'] = args.name.lower() | 95 data_manager_entry['value'] = args.name.lower() |
| 50 data_manager_entry['name'] = args.name | 96 data_manager_entry['name'] = args.name |
| 51 data_manager_entry['path'] = '.' | 97 data_manager_entry['path'] = '.' |
| 52 data_manager_json = dict(data_tables=dict(ncbi_taxonomy=data_manager_entry)) | 98 data_manager_json = dict(data_tables=dict(ncbi_taxonomy=data_manager_entry)) |
| 53 params = json.loads(open(args.output).read()) | 99 |
| 54 target_directory = params['output_data'][0]['extra_files_path'] | 100 with open(args.output) as fh: |
| 55 os.mkdir(target_directory) | 101 params = json.load(fh) |
| 56 output_path = os.path.abspath(os.path.join(os.getcwd(), 'taxonomy')) | 102 |
| 57 for filename in os.listdir(workdir): | 103 if args.name_maps: |
| 58 shutil.move(os.path.join(output_path, filename), target_directory) | 104 workdir_a2t = os.path.join(os.getcwd(), 'accession2taxid') |
| 59 with open(args.output, 'w') as out: | 105 download_name_maps("ftp://ftp.ncbi.nlm.nih.gov/pub/taxonomy/accession2taxid/", workdir_a2t, args.partial) |
| 60 out.write(json.dumps(data_manager_json, sort_keys=True)) | 106 |
| 107 target_directory_a2t = os.path.join(params['output_data'][0]['extra_files_path'], "accession2taxid") | |
| 108 os.makedirs(target_directory_a2t) | |
| 109 move_files_to_final_dir(workdir_a2t, target_directory_a2t) | |
| 110 | |
| 111 # Also copy taxonomy data to accession2taxid dir | |
| 112 move_files_to_final_dir(workdir, target_directory_a2t, copy=True) | |
| 113 | |
| 114 data_manager_json['data_tables']['ncbi_accession2taxid'] = data_manager_entry | |
| 115 | |
| 116 target_directory_tax = os.path.join(params['output_data'][0]['extra_files_path'], "taxonomy") | |
| 117 os.makedirs(target_directory_tax) | |
| 118 | |
| 119 move_files_to_final_dir(workdir, target_directory_tax) | |
| 120 | |
| 121 with open(args.output, 'w') as fh: | |
| 122 json.dump(data_manager_json, fh, sort_keys=True) | |
| 61 | 123 |
| 62 | 124 |
| 63 if __name__ == '__main__': | 125 if __name__ == '__main__': |
| 64 parser = argparse.ArgumentParser(description='Create data manager json.') | 126 parser = argparse.ArgumentParser(description='Create data manager json.') |
| 65 parser.add_argument('--out', dest='output', action='store', help='JSON filename') | 127 parser.add_argument('--out', dest='output', action='store', help='JSON filename') |
| 66 parser.add_argument('--name', dest='name', action='store', default=str(datetime.date.today()), help='Data table entry unique ID') | 128 parser.add_argument('--name', dest='name', action='store', default=str(datetime.date.today()), help='Data table entry unique ID') |
| 67 parser.add_argument('--url', dest='url', action='store', default='ftp://ftp.ncbi.nih.gov/pub/taxonomy/taxdump.tar.gz', help='Download URL') | 129 parser.add_argument('--url', dest='url', action='store', default='ftp://ftp.ncbi.nih.gov/pub/taxonomy/taxdump.tar.gz', help='Download URL') |
| 130 parser.add_argument('--name-maps', dest='name_maps', action='store_true', help='') | |
| 131 parser.add_argument('--partial', dest='partial', action='store_true', help='Only download a small subset of data (for testing)') | |
| 68 args = parser.parse_args() | 132 args = parser.parse_args() |
| 69 | 133 |
| 70 main(args) | 134 main(args) |
