changeset 43:174df9815ecd

Fix data types.
author Wise, Catherine (Digital, Acton) <Catherine.Wise@csiro.au>
date Thu, 25 Jun 2015 11:09:43 +1000
parents 1ca5aa0a4d6b
children 5589d367885e
files TrustStoreGalaxyBrowse.py
diffstat 1 files changed, 23 insertions(+), 8 deletions(-) [+]
line wrap: on
line diff
--- a/TrustStoreGalaxyBrowse.py	Fri May 29 12:18:14 2015 +1000
+++ b/TrustStoreGalaxyBrowse.py	Thu Jun 25 11:09:43 2015 +1000
@@ -9,7 +9,10 @@
 import operator
 import urlparse
 from py_ts import TrustStoreClient, utils
+# import galaxy.model # need to import model before sniff to resolve a circular import dependency
 from galaxy.datatypes.checkers import util
+# from galaxy.datatypes import sniff
+# from galaxy.datatypes.registry import Registry
 
 # Tell urllib3 to use pyOpenSSL because we are on old Python stdlib.
 # import urllib3.contrib.pyopenssl
@@ -119,6 +122,12 @@
       operator.itemgetter('extra_files_path', 'file_name', 'ext', 'out_data_name', 'hda_id', 'dataset_id')(output_data[0])
     extra_files_path = json_params['__new_file_path__']
 
+    # datatypes_registry = Registry()
+    # datatypes_registry.load_datatypes(
+    #     root_dir=all_params['job_config']['GALAXY_ROOT_DIR'],
+    #     config=all_params['job_config']['GALAXY_DATATYPES_CONF_FILE']
+    # )
+
     url_params = urlparse.unquote(json_params['URL']).split(";")
     if len(url_params) < 3:
         print("The url we got back is malformed: "+ json_params['URL'])
@@ -163,6 +172,9 @@
 
     print("Preparing the following for downloading: " + str(paths))
 
+    # Empty parameter file.
+    open(file_name, 'w' ).close()
+
     if root is not None:
         with open(metadata_path, 'wb') as metadata_file:
             for path in paths:
@@ -178,19 +190,22 @@
                     name = construct_multi_filename(hda_id, filename, extension)
                     target_output_filename = None
                     data_type = "new_primary_dataset"
-                    if first:
-                        target_output_filename = file_name
-                        dataset = "new_primary_dataset"
-                        first = False
-                    else:
-                        target_output_filename = os.path.normpath(os.path.join(extra_files_path, name))
-                    metadata_file.write(
-                        metadata_to_json(dataset_id, target_output_filename, name, extension, data_type))
+                    # if first:
+                    #     target_output_filename = file_name
+                    #     dataset = "new_primary_dataset"
+                    #     first = False
+                    # else:
+                    target_output_filename = os.path.normpath(os.path.join(extra_files_path, name))
                     download = truststore.getFile(store, location)
                     if download is None:
                         print("File %s not found." % location.name)
                         sys.exit(4)
                     ungzip(download, target_output_filename)
+                    # ext = sniff.handle_uploaded_dataset_file(target_output_filename, datatypes_registry, ext=ext)
+                    # print("Guessed file type: " + ext)
+                    print("in file: " + properties_file)
+                    metadata_file.write(
+                        metadata_to_json(dataset_id, target_output_filename, name, extension, data_type))
     else:
         print("Store is damaged or we don't have sufficient access.")
         sys.exit(4)