# HG changeset patch # User Wise, Catherine (Digital, Acton) # Date 1435278280 -36000 # Node ID 31838e0ecf45c6470925e1e0d0c131075e0dda75 # Parent 5589d367885e5901789c31a239b396c279e66ce8 Fixes. diff -r 5589d367885e -r 31838e0ecf45 TrustStoreGalaxyBrowse.py --- a/TrustStoreGalaxyBrowse.py Thu Jun 25 11:11:15 2015 +1000 +++ b/TrustStoreGalaxyBrowse.py Fri Jun 26 10:24:40 2015 +1000 @@ -72,14 +72,20 @@ os.close(file_handle) gzipped_file.close() - shutil.copy(uncompressed, outputFile) + try: + shutil.copy(uncompressed, outputFile) + except shutil.Error: + pass # If the file is already in the right location, move along. try: os.remove(uncompressed) os.remove(download) except OSError: pass else: - shutil.copy(download, outputFile) + try: + shutil.copy(download, outputFile) + except shutil.Error: + pass # If the file is already in the right location, move along. def construct_multi_filename(id, name, file_type): """ Implementation of *Number of Output datasets cannot be determined until @@ -90,12 +96,15 @@ filename = "%s_%s_%s_%s_%s" % ('primary', id, name, 'visible', file_type) return filename -def metadata_to_json(dataset_id, filename, name, extesion, ds_type='dataset', primary=False): +def metadata_to_json(dataset_id, filename, name, extension, ds_type='dataset', primary=False): """ Return line separated JSON From https://github.com/mdshw5/galaxy-json-data-source/blob/master/json_data_source.py """ + ext = extension + if ext == 'fa': + ext = 'fasta' meta_dict = dict(type=ds_type, - ext=extesion, + ext=ext, filename=filename, name=name, metadata={}) @@ -190,22 +199,21 @@ name = construct_multi_filename(hda_id, filename, extension) target_output_filename = None data_type = "new_primary_dataset" - # if first: - # target_output_filename = file_name - # dataset = "new_primary_dataset" - # first = False - # else: target_output_filename = os.path.normpath(os.path.join(extra_files_path, name)) download = truststore.getFile(store, location) + primary = not first if download is None: print("File %s not found." % location.name) sys.exit(4) + if first: + first = False + target_output_filename = file_name + data_type = "dataset" ungzip(download, target_output_filename) + metadata_file.write( + metadata_to_json(dataset_id, target_output_filename, name, extension, data_type, primary=primary)) # ext = sniff.handle_uploaded_dataset_file(target_output_filename, datatypes_registry, ext=ext) # print("Guessed file type: " + ext) - print("in file: " + properties_file) - metadata_file.write( - metadata_to_json(dataset_id, target_output_filename, name, extension, data_type)) else: print("Store is damaged or we don't have sufficient access.") sys.exit(4)