Mercurial > repos > cathywise > truststore_browse
changeset 14:c7287129f37f
And again.
author | Wise, Catherine (Digital, Acton) <Catherine.Wise@csiro.au> |
---|---|
date | Thu, 25 Jun 2015 09:16:21 +1000 |
parents | da8958ad788e |
children | 8b88de25dd2c |
files | TrustStoreGalaxyBrowse.py |
diffstat | 1 files changed, 12 insertions(+), 11 deletions(-) [+] |
line wrap: on
line diff
--- a/TrustStoreGalaxyBrowse.py Thu Jun 25 09:05:29 2015 +1000 +++ b/TrustStoreGalaxyBrowse.py Thu Jun 25 09:16:21 2015 +1000 @@ -9,10 +9,10 @@ import operator import urlparse from py_ts import TrustStoreClient, utils -import galaxy.model # need to import model before sniff to resolve a circular import dependency +# import galaxy.model # need to import model before sniff to resolve a circular import dependency from galaxy.datatypes.checkers import util -from galaxy.datatypes import sniff -from galaxy.datatypes.registry import Registry +# from galaxy.datatypes import sniff +# from galaxy.datatypes.registry import Registry # Tell urllib3 to use pyOpenSSL because we are on old Python stdlib. # import urllib3.contrib.pyopenssl @@ -122,11 +122,11 @@ operator.itemgetter('extra_files_path', 'file_name', 'ext', 'out_data_name', 'hda_id', 'dataset_id')(output_data[0]) extra_files_path = json_params['__new_file_path__'] - datatypes_registry = Registry() - datatypes_registry.load_datatypes( - root_dir=all_params['job_config']['GALAXY_ROOT_DIR'], - config=all_params['job_config']['GALAXY_DATATYPES_CONF_FILE'] - ) + # datatypes_registry = Registry() + # datatypes_registry.load_datatypes( + # root_dir=all_params['job_config']['GALAXY_ROOT_DIR'], + # config=all_params['job_config']['GALAXY_DATATYPES_CONF_FILE'] + # ) url_params = urlparse.unquote(json_params['URL']).split(";") if len(url_params) < 3: @@ -198,10 +198,11 @@ print("File %s not found." % location.name) sys.exit(4) ungzip(download, target_output_filename) - ext = sniff.handle_uploaded_dataset_file(filename, datatypes_registry, ext=ext) - print("Guessed file type: " + ext) + # ext = sniff.handle_uploaded_dataset_file(target_output_filename, datatypes_registry, ext=ext) + # print("Guessed file type: " + ext) + print("in file: " + properties_file) metadata_file.write( - metadata_to_json(dataset_id, target_output_filename, name, ext, data_type)) + metadata_to_json(dataset_id, target_output_filename, name, extension, data_type)) else: print("Store is damaged or we don't have sufficient access.") sys.exit(4)