Mercurial > repos > cathywise > truststore_browse
comparison TrustStoreGalaxyBrowse.py @ 9:3e8bd0d01725
Attempt to guess data type.
author | Wise, Catherine (Digital, Acton) <Catherine.Wise@csiro.au> |
---|---|
date | Thu, 25 Jun 2015 08:26:34 +1000 |
parents | 2ca750b9083c |
children | 7301c2e96fce |
comparison
equal
deleted
inserted
replaced
8:278e80313c7f | 9:3e8bd0d01725 |
---|---|
8 import json | 8 import json |
9 import operator | 9 import operator |
10 import urlparse | 10 import urlparse |
11 from py_ts import TrustStoreClient, utils | 11 from py_ts import TrustStoreClient, utils |
12 from galaxy.datatypes.checkers import util | 12 from galaxy.datatypes.checkers import util |
13 from galaxy.datatypes import sniff | |
14 from galaxy.datatypes.registry import Registry | |
13 | 15 |
14 # Tell urllib3 to use pyOpenSSL because we are on old Python stdlib. | 16 # Tell urllib3 to use pyOpenSSL because we are on old Python stdlib. |
15 # import urllib3.contrib.pyopenssl | 17 # import urllib3.contrib.pyopenssl |
16 # urllib3.contrib.pyopenssl.inject_into_urllib3() | 18 # urllib3.contrib.pyopenssl.inject_into_urllib3() |
17 # | 19 # |
117 output_data = all_params.get('output_data') | 119 output_data = all_params.get('output_data') |
118 extra_files_path, file_name, ext, out_data_name, hda_id, dataset_id = \ | 120 extra_files_path, file_name, ext, out_data_name, hda_id, dataset_id = \ |
119 operator.itemgetter('extra_files_path', 'file_name', 'ext', 'out_data_name', 'hda_id', 'dataset_id')(output_data[0]) | 121 operator.itemgetter('extra_files_path', 'file_name', 'ext', 'out_data_name', 'hda_id', 'dataset_id')(output_data[0]) |
120 extra_files_path = json_params['__new_file_path__'] | 122 extra_files_path = json_params['__new_file_path__'] |
121 | 123 |
124 datatypes_registry = Registry() | |
125 datatypes_registry.load_datatypes( | |
126 root_dir=all_params['job_config']['GALAXY_ROOT_DIR'], | |
127 config=all_params['job_config']['GALAXY_DATATYPES_CONF_FILE'] | |
128 ) | |
129 | |
122 url_params = urlparse.unquote(json_params['URL']).split(";") | 130 url_params = urlparse.unquote(json_params['URL']).split(";") |
123 if len(url_params) < 3: | 131 if len(url_params) < 3: |
124 print("The url we got back is malformed: "+ json_params['URL']) | 132 print("The url we got back is malformed: "+ json_params['URL']) |
125 sys.exit(5) | 133 sys.exit(5) |
126 short_url = url_params[0] | 134 short_url = url_params[0] |
182 target_output_filename = file_name | 190 target_output_filename = file_name |
183 dataset = "new_primary_dataset" | 191 dataset = "new_primary_dataset" |
184 first = False | 192 first = False |
185 else: | 193 else: |
186 target_output_filename = os.path.normpath(os.path.join(extra_files_path, name)) | 194 target_output_filename = os.path.normpath(os.path.join(extra_files_path, name)) |
195 ext = sniff.handle_uploaded_dataset_file(filename, datatypes_registry, ext=ext) | |
187 metadata_file.write( | 196 metadata_file.write( |
188 metadata_to_json(dataset_id, target_output_filename, name, extension, data_type)) | 197 metadata_to_json(dataset_id, target_output_filename, name, ext, data_type)) |
189 download = truststore.getFile(store, location) | 198 download = truststore.getFile(store, location) |
190 if download is None: | 199 if download is None: |
191 print("File %s not found." % location.name) | 200 print("File %s not found." % location.name) |
192 sys.exit(4) | 201 sys.exit(4) |
193 ungzip(download, target_output_filename) | 202 ungzip(download, target_output_filename) |