Mercurial > repos > devteam > data_manager_fetch_ncbi_taxonomy
comparison data_manager/data_manager.py @ 6:8eb18af0ece1 draft default tip
planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/main/data_managers/data_manager_fetch_ncbi_taxonomy commit cf8607692417bdc4f663c726aea34c1056dd9c48
| author | iuc |
|---|---|
| date | Mon, 17 Nov 2025 21:46:53 +0000 |
| parents | eaca3e270bf6 |
| children |
comparison
equal
deleted
inserted
replaced
| 5:eaca3e270bf6 | 6:8eb18af0ece1 |
|---|---|
| 1 import argparse | |
| 2 import datetime | |
| 3 import json | |
| 4 import os | |
| 5 import shutil | |
| 6 import tarfile | |
| 7 import zipfile | |
| 8 from urllib.request import Request, urlopen | |
| 9 | |
| 10 | |
| 11 def url_download(url, workdir): | |
| 12 file_path = os.path.join(workdir, 'download.dat') | |
| 13 if not os.path.exists(workdir): | |
| 14 os.makedirs(workdir) | |
| 15 src = None | |
| 16 dst = None | |
| 17 try: | |
| 18 req = Request(url) | |
| 19 src = urlopen(req) | |
| 20 with open(file_path, 'wb') as dst: | |
| 21 while True: | |
| 22 chunk = src.read(2**10) | |
| 23 if chunk: | |
| 24 dst.write(chunk) | |
| 25 else: | |
| 26 break | |
| 27 finally: | |
| 28 if src: | |
| 29 src.close() | |
| 30 if tarfile.is_tarfile(file_path): | |
| 31 fh = tarfile.open(file_path, 'r:*') | |
| 32 elif zipfile.is_zipfile(file_path): | |
| 33 fh = zipfile.ZipFile(file_path, 'r') | |
| 34 else: | |
| 35 return | |
| 36 fh.extractall(workdir) | |
| 37 os.remove(file_path) | |
| 38 | |
| 39 | |
| 40 def download_name_maps(url, workdir, partial): | |
| 41 | |
| 42 if partial: | |
| 43 map_files = [ | |
| 44 'pdb.accession2taxid.gz', | |
| 45 ] | |
| 46 else: | |
| 47 map_files = [ | |
| 48 'dead_nucl.accession2taxid.gz', | |
| 49 'dead_prot.accession2taxid.gz', | |
| 50 'dead_wgs.accession2taxid.gz', | |
| 51 'nucl_gb.accession2taxid.gz', | |
| 52 'nucl_wgs.accession2taxid.gz', | |
| 53 'pdb.accession2taxid.gz', | |
| 54 'prot.accession2taxid.gz', | |
| 55 'prot.accession2taxid.FULL.gz' | |
| 56 ] | |
| 57 | |
| 58 if not os.path.exists(workdir): | |
| 59 os.makedirs(workdir) | |
| 60 | |
| 61 for map in map_files: | |
| 62 src = "{}{}".format(url, map) | |
| 63 dest = os.path.join(workdir, map) | |
| 64 | |
| 65 print("Downloading taxonomy accession2taxid file from {} to {}".format(src, dest)) | |
| 66 | |
| 67 try: | |
| 68 req = Request(src) | |
| 69 src = urlopen(req) | |
| 70 with open(dest, 'wb') as dst: | |
| 71 while True: | |
| 72 chunk = src.read(2**10) | |
| 73 if chunk: | |
| 74 dst.write(chunk) | |
| 75 else: | |
| 76 break | |
| 77 finally: | |
| 78 if src: | |
| 79 src.close() | |
| 80 | |
| 81 | |
| 82 def move_files_to_final_dir(workdir, target_directory, copy=False): | |
| 83 for filename in os.listdir(workdir): | |
| 84 if copy: | |
| 85 shutil.copy(os.path.join(workdir, filename), target_directory) | |
| 86 else: | |
| 87 shutil.move(os.path.join(workdir, filename), target_directory) | |
| 88 | |
| 89 | |
| 90 def main(args): | |
| 91 workdir = os.path.abspath(os.path.join(os.getcwd(), 'taxonomy')) | |
| 92 url_download(args.url, workdir) | |
| 93 | |
| 94 data_manager_entry = {} | |
| 95 data_manager_entry['value'] = args.name.lower() | |
| 96 data_manager_entry['name'] = args.name | |
| 97 data_manager_entry['path'] = '.' | |
| 98 data_manager_json = dict(data_tables=dict(ncbi_taxonomy=data_manager_entry)) | |
| 99 | |
| 100 with open(args.output) as fh: | |
| 101 params = json.load(fh) | |
| 102 | |
| 103 if args.name_maps: | |
| 104 workdir_a2t = os.path.join(os.getcwd(), 'accession2taxid') | |
| 105 download_name_maps("ftp://ftp.ncbi.nlm.nih.gov/pub/taxonomy/accession2taxid/", workdir_a2t, args.partial) | |
| 106 | |
| 107 target_directory_a2t = os.path.join(params['output_data'][0]['extra_files_path'], "accession2taxid") | |
| 108 os.makedirs(target_directory_a2t) | |
| 109 move_files_to_final_dir(workdir_a2t, target_directory_a2t) | |
| 110 | |
| 111 # Also copy taxonomy data to accession2taxid dir | |
| 112 move_files_to_final_dir(workdir, target_directory_a2t, copy=True) | |
| 113 | |
| 114 data_manager_json['data_tables']['ncbi_accession2taxid'] = data_manager_entry | |
| 115 | |
| 116 target_directory_tax = os.path.join(params['output_data'][0]['extra_files_path'], "taxonomy") | |
| 117 os.makedirs(target_directory_tax) | |
| 118 | |
| 119 move_files_to_final_dir(workdir, target_directory_tax) | |
| 120 | |
| 121 with open(args.output, 'w') as fh: | |
| 122 json.dump(data_manager_json, fh, sort_keys=True) | |
| 123 | |
| 124 | |
| 125 if __name__ == '__main__': | |
| 126 parser = argparse.ArgumentParser(description='Create data manager json.') | |
| 127 parser.add_argument('--out', dest='output', action='store', help='JSON filename') | |
| 128 parser.add_argument('--name', dest='name', action='store', default=str(datetime.date.today()), help='Data table entry unique ID') | |
| 129 parser.add_argument('--url', dest='url', action='store', default='ftp://ftp.ncbi.nih.gov/pub/taxonomy/taxdump.tar.gz', help='Download URL') | |
| 130 parser.add_argument('--name-maps', dest='name_maps', action='store_true', help='') | |
| 131 parser.add_argument('--partial', dest='partial', action='store_true', help='Only download a small subset of data (for testing)') | |
| 132 args = parser.parse_args() | |
| 133 | |
| 134 main(args) |
