# HG changeset patch # User rhpvorderman # Date 1504860581 14400 # Node ID 288819f540fdf8609b7f467b18244ca0551c4c0c planemo upload for repository https://github.com/LUMC/lumc-galaxy-tools/tree/master/data_manager_select_index_by_path commit 3ffb01380e2fe315bf3fe88f97781a83595b3fff diff -r 000000000000 -r 288819f540fd data_manager/data_manager_select_index_by_path.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/data_manager/data_manager_select_index_by_path.xml Fri Sep 08 04:49:41 2017 -0400 @@ -0,0 +1,56 @@ + + path inputer + + path_name_value_key_manager.py + --value "${value}" + --dbkey "${dbkey}" + --name "${name}" + --path "${path}" + --data_table_name "${data_table}" + --json_output_file "${json_output_file}" + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +Adds a server path to the selected data table. + +The tool will check the path exists but NOT check that it holds the expected data type. + +If name is not provided the filename from path less the extension is used. + +If value is not provided, the name will be used (or its default) + +If dbkey is not provided, the value will be used (or its default) + + + + + + diff -r 000000000000 -r 288819f540fd data_manager/path_name_value_key_manager.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/data_manager/path_name_value_key_manager.py Fri Sep 08 04:49:41 2017 -0400 @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +import json +import optparse +import os.path + +def _add_data_table_entry( data_manager_dict, data_table_name, data_table_entry ): + data_manager_dict['data_tables'] = data_manager_dict.get( 'data_tables', {} ) + data_manager_dict['data_tables'][ data_table_name ] = data_manager_dict['data_tables'].get( data_table_name, [] ) + data_manager_dict['data_tables'][ data_table_name ].append( data_table_entry ) + return data_manager_dict + + +def check_param(name, value, default=None, check_tab=True): + if value in [ None, '', '?' ]: + if default: + print "Using {0} for {1} as no value provided".format( default, name ) + value = default + else: + raise Exception( '{0} is not a valid {1}. You must specify a valid {1}.'.format( value, name ) ) + if check_tab and "\t" in value: + raise Exception( '{0} is not a valid {1}. It may not contain a tab because these are used as seperators by galaxy .'.format( value, name ) ) + return value + + +def main(): + + #value = "test_value" + #name = "test_name" + #print '{0} other {1} more{0}'.format(value, name ) + #print '{0} is not a valid {1}. It may not contain a tab.'.format( value, name ) + + #Parse Command Line + parser = optparse.OptionParser() + parser.add_option( '--value', action='store', type="string", default=None, help='value' ) + parser.add_option( '--dbkey', action='store', type="string", default=None, help='dbkey' ) + parser.add_option( '--name', action='store', type="string", default=None, help='name' ) + parser.add_option( '--path', action='store', type="string", default=None, help='path' ) + parser.add_option( '--data_table_name', action='store', type="string", default=None, help='path' ) + parser.add_option( '--json_output_file', action='store', type="string", default=None, help='path' ) + (options, args) = parser.parse_args() + + path = check_param("path", options.path) + if not os.path.exists(path): + raise Exception( 'Unable to find path {0}.'.format( path ) ) + basename = os.path.basename(path) + filename = os.path.splitext(basename)[0] + name = check_param("name", options.name, default=filename) + value = check_param("value", options.value, default=name) + dbkey = check_param("dbkey", options.dbkey, default=value) + data_table_name = check_param("data_table_name", options.data_table_name) + json_output_file = check_param("json_output_file", options.json_output_file, check_tab=False) + + if os.path.exists(json_output_file): + params = json.loads( open( json_output_file ).read() ) + print "params", params + else: + params = {} + + data_manager_dict = {} + data_table_entry = dict( value=value, dbkey=dbkey, name=name, path=path ) + _add_data_table_entry( data_manager_dict, data_table_name, data_table_entry ) + + #save info to json file + with open( json_output_file, 'wb' ) as output_file: + output_file.write( json.dumps( data_manager_dict ) ) + output_file.write( "\n" ) + +if __name__ == "__main__": + main() diff -r 000000000000 -r 288819f540fd data_manager_conf.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/data_manager_conf.xml Fri Sep 08 04:49:41 2017 -0400 @@ -0,0 +1,143 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r 000000000000 -r 288819f540fd test.json --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test.json Fri Sep 08 04:49:41 2017 -0400 @@ -0,0 +1,1 @@ +{"data_tables": {"all_fasta": [{"path": "/home/christian/Dropbox/Gene_data/ercc_and_TPA_mouse_rRNA.fa", "dbkey": "ercc_and_TPA_mouse_rRNA", "name": "ercc_and_TPA_mouse_rRNA", "value": "ercc_and_TPA_mouse_rRNA"}]}} diff -r 000000000000 -r 288819f540fd tool-data/all_fasta.loc.sample --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tool-data/all_fasta.loc.sample Fri Sep 08 04:49:41 2017 -0400 @@ -0,0 +1,17 @@ +#This file lists the locations and dbkeys of all the fasta files + +#This file has the format (white space characters are TAB characters): +# +# +# +#So, all_fasta.loc could look something like this: +# +#apiMel3 apiMel3 Honeybee (Apis mellifera): apiMel3 /path/to/genome/apiMel3/apiMel3.fa +#hg19canon hg19 Human (Homo sapiens): hg19 Canonical /path/to/genome/hg19/hg19canon.fa +#hg19full hg19 Human (Homo sapiens): hg19 Full /path/to/genome/hg19/hg19full.fa +# +#Your all_fasta.loc file should contain an entry for each individual +#fasta file. So there will be multiple fasta files for each build, +#such as with hg19 above. +# + diff -r 000000000000 -r 288819f540fd tool_data_table_conf.xml.sample --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tool_data_table_conf.xml.sample Fri Sep 08 04:49:41 2017 -0400 @@ -0,0 +1,7 @@ + + + + value, dbkey, name, path + +
+