Mercurial > repos > matt-shirley > json_data_source
comparison json_data_source.py @ 5:33fa019735a4
Save extra files path as extra_files, not extra_data. Changed type of dataset for new primary datasets. db_key becomes dbkey. Added example json files.
author | Matt Shirley <mdshw5@gmail.com> |
---|---|
date | Thu, 10 Jul 2014 11:41:54 -0400 |
parents | 96103d66b7af |
children | 46b589e9747a |
comparison
equal
deleted
inserted
replaced
4:96103d66b7af | 5:33fa019735a4 |
---|---|
71 ext = metadata.get( 'extension' ), | 71 ext = metadata.get( 'extension' ), |
72 filename = filename, | 72 filename = filename, |
73 name = metadata.get( 'name' ), | 73 name = metadata.get( 'name' ), |
74 metadata = metadata.get( 'metadata' ) ) | 74 metadata = metadata.get( 'metadata' ) ) |
75 if metadata.get( 'extra_data', None ): | 75 if metadata.get( 'extra_data', None ): |
76 meta_dict[ 'extra_data' ] = '_'.join( [ filename, 'files' ] ) | 76 meta_dict[ 'extra_files' ] = '_'.join( [ filename, 'files' ] ) |
77 if primary: | 77 if primary: |
78 meta_dict[ 'base_dataset_id' ] = dataset_id | 78 meta_dict[ 'base_dataset_id' ] = dataset_id |
79 else: | 79 else: |
80 meta_dict[ 'dataset_id' ] = dataset_id | 80 meta_dict[ 'dataset_id' ] = dataset_id |
81 return "%s\n" % json.dumps( meta_dict ) | 81 return "%s\n" % json.dumps( meta_dict ) |
94 extra_data = query_item.get( 'extra_data', None ) | 94 extra_data = query_item.get( 'extra_data', None ) |
95 if primary: | 95 if primary: |
96 filename = ''.join( c in VALID_CHARS and c or '-' for c in filename ) | 96 filename = ''.join( c in VALID_CHARS and c or '-' for c in filename ) |
97 name = construct_multi_filename( hda_id, filename, extension ) | 97 name = construct_multi_filename( hda_id, filename, extension ) |
98 target_output_filename = os.path.normpath( '/'.join( [ output_base_path, name ] ) ) | 98 target_output_filename = os.path.normpath( '/'.join( [ output_base_path, name ] ) ) |
99 metadata_parameter_file.write( metadata_to_json( dataset_id, query_item, | |
100 target_output_filename, | |
101 ds_type='new_primary_dataset' | |
102 primary=primary) ) | |
99 else: | 103 else: |
100 target_output_filename = output_filename | 104 target_output_filename = output_filename |
105 metadata_parameter_file.write( metadata_to_json( dataset_id, query_item, | |
106 target_output_filename, | |
107 ds_type='dataset' | |
108 primary=primary) ) | |
101 download_from_query( query_item, target_output_filename ) | 109 download_from_query( query_item, target_output_filename ) |
102 if extra_data: | 110 if extra_data: |
103 download_extra_data( extra_data, '_'.join( [ target_output_filename, 'files' ] ) ) | 111 download_extra_data( extra_data, '_'.join( [ target_output_filename, 'files' ] ) ) |
104 metadata_parameter_file.write( metadata_to_json( dataset_id, query_item, | |
105 target_output_filename, | |
106 primary=primary) ) | |
107 return True | 112 return True |
108 | 113 |
109 | 114 |
110 def set_up_config_values(json_params): | 115 def set_up_config_values(json_params): |
111 """ Parse json_params file and return a tuple of necessary configuration | 116 """ Parse json_params file and return a tuple of necessary configuration |