Mercurial > repos > yating-l > jbrowsehubtoapollo
changeset 1:2ae1e96a8380 draft
planemo upload for repository https://github.com/Yating-L/jbrowse-archive-creator.git commit a8c47ae0025953ef398bdc689dc5df5516edf686-dirty
author | yating-l |
---|---|
date | Tue, 24 Oct 2017 18:24:40 -0400 |
parents | bc00f5c4c59e |
children | 8ff4b84d709f |
files | apollo/ApolloInstance.py apollo/ApolloInstance.pyc apollo/ApolloUser.py jbrowsehubToApollo.py jbrowsehubToApollo.xml logging.json templates/custom_track_styles.css todo.md util/Logger.py util/Reader.py util/subtools.py |
diffstat | 11 files changed, 229 insertions(+), 335 deletions(-) [+] |
line wrap: on
line diff
--- a/apollo/ApolloInstance.py Tue Oct 17 17:28:05 2017 -0400 +++ b/apollo/ApolloInstance.py Tue Oct 24 18:24:40 2017 -0400 @@ -4,27 +4,30 @@ import shutil import tempfile import logging +import random +import string from util import subtools from mako.lookup import TemplateLookup class ApolloInstance(object): - def __init__(self, apollo_host, tool_directory, user_email): + def __init__(self, apollo_host, apollo_admin, tool_directory): self.apollo_host = apollo_host self.tool_directory = tool_directory - self.default_user = user_email self.logger = logging.getLogger(__name__) + self.apollo_admin = apollo_admin self.apolloTemplate = self._getApolloTemplate() self._arrow_init() - #TODO: Encode password + def _arrow_init(self): + subtools.verify_user_login(self.apollo_admin.user_email, self.apollo_admin.password) arrow_config = tempfile.NamedTemporaryFile(bufsize=0) with open(arrow_config.name, 'w') as conf: htmlMakoRendered = self.apolloTemplate.render( apollo_host = self.apollo_host, - admin_user = self.default_user, - admin_pw = '1234' + admin_user = self.apollo_admin.user_email, + admin_pw = self.apollo_admin.password ) conf.write(htmlMakoRendered) @@ -32,7 +35,14 @@ arrow_config_dir = os.path.join(home_dir, '.apollo-arrow.yml') shutil.copyfile(arrow_config.name, arrow_config_dir) self.logger.debug("Initated arrow: apollo-arrow.yml= %s", arrow_config_dir) - + + #TODO: Encode admin password + ''' + def _generatePassword(self, length=8): + chars = string.digits + string.letters + pw = ''.join([random.choice(chars) for _ in range(length)]) + return pw + ''' def _getApolloTemplate(self): mylookup = TemplateLookup(directories=[os.path.join(self.tool_directory, 'templates')], @@ -40,17 +50,15 @@ apolloTemplate = mylookup.get_template("apollo-arrow.yml") return apolloTemplate - def getHost(self): - return self.apollo_host - def createApolloUser(self, apollo_user, admin=None): p = subtools.arrow_create_user(apollo_user.user_email, apollo_user.firstname, apollo_user.lastname, apollo_user.password, admin) user_info = json.loads(p) user_id = user_info.get('userId') if not user_id: self.logger.debug("Cannot create new user: %s; The user may already exist", apollo_user.user_email) + subtools.verify_user_login(apollo_user.user_email, apollo_user.password) user_id = subtools.arrow_get_users(apollo_user.user_email) - self.logger.debug("Got user_id for new or existing user: user_id = %s", str(user_id)) + self.logger.debug("Got user_id for new or existing user: user_id = %s", str(user_id)) return user_id def grantPermission(self, user_id, organism_id, **user_permissions): @@ -59,13 +67,16 @@ def addOrganism(self, organism_name, organism_dir): p = subtools.arrow_add_organism(organism_name, organism_dir) + if not p: + self.logger.error("The user is not authorized to add organism") + exit(-1) organism = json.loads(p) organism_id = organism['id'] self.logger.debug("Added new organism to Apollo instance, %s", p) return organism_id def loadHubToApollo(self, apollo_user, organism_name, organism_dir, admin_user=False, **user_permissions): - user_id = self.createApolloUser(apollo_user, admin_user) + #user_id = self.createApolloUser(apollo_user, admin_user) organism_id = self.addOrganism(organism_name, organism_dir) - self.grantPermission(user_id, organism_id, **user_permissions) + #self.grantPermission(user_id, organism_id, **user_permissions) self.logger.debug("Successfully load the hub to Apollo") \ No newline at end of file
--- a/apollo/ApolloUser.py Tue Oct 17 17:28:05 2017 -0400 +++ b/apollo/ApolloUser.py Tue Oct 24 18:24:40 2017 -0400 @@ -3,7 +3,7 @@ import os class ApolloUser(object): - def __init__(self, user_email, firstname, lastname, password): + def __init__(self, user_email, password, firstname=None, lastname=None): self.user_email = user_email self.firstname = firstname self.lastname = lastname
--- a/jbrowsehubToApollo.py Tue Oct 17 17:28:05 2017 -0400 +++ b/jbrowsehubToApollo.py Tue Oct 24 18:24:40 2017 -0400 @@ -4,9 +4,10 @@ import argparse import json import logging -import collections +import socket from apollo.ApolloInstance import ApolloInstance from util.Reader import Reader +from util.Logger import Logger def main(argv): @@ -29,30 +30,42 @@ # Begin init variables extra_files_path = reader.getExtFilesPath() - user_email = reader.getUserEmail() - genome_name = reader.getGenomeName() + #user_email = reader.getUserEmail() species_name = reader.getSpeciesName() - apollo_host = reader.getApolloHost() - apollo_user = reader.getApolloUser() + #apollo_host = reader.getApolloHost() + apollo_port = reader.getPortNum() + apollo_host = "http://localhost:"+ apollo_port + "/apollo" + #apollo_host = "http://localhost:8080/apollo" + #apollo_user = reader.getApolloUser() + apollo_admin_user = reader.getAdminUser() toolDirectory = reader.getToolDir() - jbrowse_hub = reader.getJBrowseHubDir() + #jbrowse_hub = reader.getJBrowseHubDir() + debug_mode = reader.getDebugMode() + + #### Logging management #### + # If we are in Debug mode, also print in stdout the debug dump + log = Logger(tool_directory=toolDirectory, debug=debug_mode, extra_files_path=extra_files_path) + log.setup_logging() logging.info("#### JBrowseArchiveCreator: Start to upload JBrowse Hub to Apollo instance: %s #### ", apollo_host) + logging.debug('JSON parameters: %s\n\n', json.dumps(reader.args)) # Set up apollo - apollo = ApolloInstance(apollo_host, toolDirectory, user_email) - jbrowse_hub_dir = _getHubDir(jbrowse_hub, extra_files_path, genome_name) - apollo.loadHubToApollo(apollo_user, species_name, jbrowse_hub_dir, admin=True) + apollo = ApolloInstance(apollo_host, apollo_admin_user, toolDirectory) + jbrowse_hub_dir = _getHubDir(extra_files_path) + apollo.loadHubToApollo(apollo_admin_user, species_name, jbrowse_hub_dir, admin=True) outHtml(outputFile, apollo_host, species_name) logging.info('#### JBrowseArchiveCreator: Congratulation! JBrowse Hub is uploaded! ####\n') -def _getHubDir(outputFile, extra_files_path, genome_name): - file_dir = os.path.abspath(outputFile) - source_dir = os.path.dirname(file_dir) - output_folder_name = os.path.basename(extra_files_path) - jbrowse_hub_dir = os.path.join(source_dir, output_folder_name, 'myHub', genome_name) - return jbrowse_hub_dir +def _getHubDir(extra_files_path): + for root, dirs, files in os.walk(extra_files_path): + for name in files: + if name == "trackList.json": + logging.debug("JBrowse hub directory: %s", root) + return root + logging.error("Cannot find jbrowsehub") + exit(-1) def outHtml(outputFile, host_name, species_name): with open(outputFile, 'w') as htmlfile:
--- a/jbrowsehubToApollo.xml Tue Oct 17 17:28:05 2017 -0400 +++ b/jbrowsehubToApollo.xml Tue Oct 24 18:24:40 2017 -0400 @@ -11,21 +11,31 @@ </stdio> <command detect_errors="exit_code"><![CDATA[ - #import json - #set global data_parameter_dict = {} - #silent $data_parameter_dict.update({"jbrowse_hub": str($jbrowse_hub)}) - #silent $data_parameter_dict.update({"genome_name": str($genome_name)}) - #silent $data_parameter_dict.update({"apollo_host": str($apollo_host)}) - #silent $data_parameter_dict.update({"user_email": str($__user_email__)}) - #silent $data_parameter_dict.update({"tool_directory": str($__tool_directory__)}) - #silent $data_parameter_dict.update({"extra_files_path": str($jbrowsehub.extra_files_path)}) - #if $apollo_users_settings.apollo_users_selector == "yes": - #set apollo_user = {"firstname": str($apollo_users_settings.firstname), "lastname": str($apollo_users_settings.lastname), "password": str($apollo_users_settings.password), "user_email": str($apollo_users_settings.user_email)} - $data_parameter_dict.update({"apollo_user": $apollo_user}) - #end if - #set input_data = json.dumps($data_parameter_dict) - python $__tool_directory__/jbrowsehubToApollo.py --data_json $input_data -o $output + + ## Dump the tool parameters into a JSON file + python $json_file parameters.json; + python $__tool_directory__/jbrowsehubToApollo.py --data_json parameters.json -o $output ]]></command> + <configfiles> + <configfile name="json_file"> +import json +import sys + +file_path = sys.argv[1] +#set global data_parameter_dict = {} +#silent $data_parameter_dict.update({"species_name": str($species_name)}) +#set apollo_admin = {"user_email": str($admin_username), "password": str($admin_password)} +#silent $data_parameter_dict.update({"apollo_admin": $apollo_admin}) +##silent $data_parameter_dict.update({"user_email": str($__user_email__)}) +#silent $data_parameter_dict.update({"tool_directory": str($__tool_directory__)}) +#silent $data_parameter_dict.update({"extra_files_path": str($jbrowse_hub.extra_files_path)}) +#silent $data_parameter_dict.update({"port": str($advanced_options.port)}) +#silent $data_parameter_dict.update({"debug_mode": str($advanced_options.debug_mode)}) +with open(file_path, 'w') as f: + json.dump($data_parameter_dict, f) + </configfile> + </configfiles> + <inputs> <param @@ -35,47 +45,43 @@ label="JBrowse Hub created by JBrowse Archive Creator" /> <param - name="genome_name" + name="species_name" type="text" size="30" value="unknown" label="Species name" /> + <param name="admin_username" type="text" label="Admin username" help="Login in with Apollo admin account"> + <sanitizer> + <valid initial="string.letters,string.digits"> + <add value="@-=_.()/+*^,:?!"/> + </valid> + </sanitizer> + </param> <param - name="apollo_host" - type="text" - label="Apollo host" + name="admin_password" + type="text" + label="Admin password" /> - <conditional name="apollo_users_settings"> - <param name="apollo_users_selector" type="select" label="Create or specify your Apollo account"> - <option value="no" selected="true">Use exist demo user account (will use your galaxy email address for apollo, password: gonramp) </option> - <option value="yes">Create or use your own Apollo account</option> + <conditional name="advanced_options"> + <param name="advanced_options_selector" type="select" label="Advanced options"> + <option value="off" selected="true">Hide advanced options</option> + <option value="on">Display advanced options</option> </param> <!-- TODO: Avoid redundancy here --> - <when value="yes"> - <param - name="firstname" - type="text" - label="First Name" - /> - <param - name="lastname" - type="text" - label="Last Name" - /> - <param - name="user_email" - type="text" - label="Email Address" - /> - <param - name="password" - type="text" - label="Password" - /> + <when value="on"> + <param name="port" type="integer" min="8000" max="8888" value="8080" label="Port number of Apollo" /> + <param name="debug_mode" type="select" label="Activate debug mode"> + <option value="false" selected="true">No</option> + <option value="true">Yes</option> + <help> + Use this option if you are a G-OnRamp developer + </help> + </param> </when> - <when value="no"> - <param name="default_user" type="hidden" + <when value="off"> + <param name="port" type="hidden" value="8080" /> + <param name="debug_mode" type="hidden" value="false"> </param> </when>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/logging.json Tue Oct 24 18:24:40 2017 -0400 @@ -0,0 +1,38 @@ +{ + "version": 1, + "disable_existing_loggers": false, + "formatters": { + "simple": { + "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + } + }, + + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": "INFO", + "formatter": "simple", + "stream": "ext://sys.stdout" + }, + + "console_stderr": { + "class": "logging.StreamHandler", + "level": "ERROR", + "formatter": "simple", + "stream": "ext://sys.stderr" + } + }, + + "loggers": { + "Reader": { + "level": "INFO", + "handlers": ["console"], + "propagate": "yes" + } + }, + + "root": { + "level": "DEBUG", + "handlers": ["console", "console_stderr"] + } +} \ No newline at end of file
--- a/templates/custom_track_styles.css Tue Oct 17 17:28:05 2017 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -.${label}, -.plus-${label}, -.minus-${label} -{ - background-color: ${color}; - height: 90%; - top: 5%; -} -
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/todo.md Tue Oct 24 18:24:40 2017 -0400 @@ -0,0 +1,9 @@ +# JBrowseHubToApollo's TODO + +- [ ] Correct new Apollo user's email address. Galaxy santitizes '@' to '__at__' +- [ ] Check password for admin login + +### DONE + + +- [x] upload jbrowsehub to Apollo instance
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/util/Logger.py Tue Oct 24 18:24:40 2017 -0400 @@ -0,0 +1,38 @@ +import os +import sys +import json +import logging +import logging.config + +#from util.Filters import TraceBackFormatter + +class Logger(object): + def __init__(self, tool_directory, debug="False", extra_files_path=None): + self.tool_directory = tool_directory + self.default_level = logging.INFO + self.debug = debug + self.extra_files_path = extra_files_path + + def setup_logging(self): + """Setup logging configuration + reference: https://fangpenlin.com/posts/2012/08/26/good-logging-practice-in-python/ + """ + config_path = os.path.join(self.tool_directory, 'logging.json') + default_level=logging.INFO + if self.debug.lower() == "true": + default_level=logging.DEBUG + if os.path.exists(config_path): + with open(config_path, 'rt') as f: + config = json.load(f) + config["handlers"]["console"]["level"] = default_level + if self.extra_files_path: + for i in config["handlers"]: + if "filename" in config["handlers"][i]: + config["handlers"][i]["filename"] = os.path.join(self.extra_files_path, config["handlers"][i]["filename"]) + logging.config.dictConfig(config) + else: + logging.warn("Extra files path is not set. The log files will exist at current working directory instead of final output folder") + else: + logging.basicConfig(level=default_level) + logging.warn("Cannot find logging configuration file!\n") +
--- a/util/Reader.py Tue Oct 17 17:28:05 2017 -0400 +++ b/util/Reader.py Tue Oct 24 18:24:40 2017 -0400 @@ -2,6 +2,7 @@ import re import logging import codecs +import socket from apollo.ApolloUser import ApolloUser from util import santitizer @@ -54,28 +55,33 @@ except KeyError: print ("debug_mode is not defined in the input file!") exit(1) + + def getPortNum(self): + try: + return self.args["port"] + except KeyError: + print ("port is not defined in the input file!") + exit(1) def getApolloHost(self): - apollo_host = self.args.get("apollo_host") + #apollo_host = self.args.get("apollo_host") + hostname = socket.gethostname() + ip = socket.gethostbyname(hostname) + protocol = socket.getprotobyname(hostname) + apollo_host = str(protocol) + str(ip) return apollo_host def getSpeciesName(self): species_name = santitizer.sanitize_name_input(self.args["species_name"]) return species_name - - def getGenomeName(self): - jbrowse_hub = self.getJBrowseHubDir() - with open(jbrowse_hub, 'r') as f: - html = f.read() - m = re.search('The new Organism "(.+?)" is created on Apollo', html) - if m: - genome_name = m.group(1) - else: - print("Cannot find genome name in the jbrowse hub file!") - exit(1) - return genome_name - + + def getAdminUser(self): + admin_info = self.args.get("apollo_admin") + user_email = admin_info['user_email'] + password = admin_info['password'] + apollo_admin = ApolloUser(user_email, password) + return apollo_admin def getApolloUser(self): user_info = self.args.get("apollo_user")
--- a/util/subtools.py Tue Oct 17 17:28:05 2017 -0400 +++ b/util/subtools.py Tue Oct 24 18:24:40 2017 -0400 @@ -63,13 +63,13 @@ # If we detect an error from the subprocess, then we raise an exception # TODO: Manage if we raise an exception for everything, or use CRITICAL etc... but not stop process # TODO: The responsability of returning a sys.exit() should not be there, but up in the app. + #if p.returncode: if p.returncode: if stderr == subprocess.PIPE: raise PopenError(cmd, error, p.returncode) else: # TODO: To Handle properly with a design behind, if we received a option as a file for the error - raise Exception("Error when calling {0}. Error as been logged in your file {1}. Error code: {2}" - .format(cmd, stderr.name, p.returncode)) + raise Exception("Error when calling {0}. Error as been logged in your file {1}. Error code: {2}".format(cmd, stderr.name, p.returncode)) except OSError as e: message = "The subprocess {0} has encountered an OSError: {1}".format( @@ -94,248 +94,14 @@ logging.exception(message) sys.exit(-1) - return p - - -def write_features(field, attribute, gff3): - """ - The function write the features to gff3 format (defined in https://github.com/The-Sequence-Ontology/Specifications/blob/master/gff3.md) - field, attribute are ordered dictionary - gff3 is the file handler - """ - attr = [] - for v in field.values(): - gff3.write(str(v) + '\t') - for k, v in attribute.items(): - s = str(k) + '=' + str(v) - attr.append(s) - gff3.write(';'.join(attr)) - gff3.write('\n') - -def twoBitInfo(two_bit_file_name, two_bit_info_file): - """ - Call twoBitInfo and write the result into twoBit_info_file - :param two_bit_file_name: - :param two_bit_info_file: - :return the subprocess.check_call return object: - """ - array_call = ['twoBitInfo', two_bit_file_name, two_bit_info_file] - p = _handleExceptionAndCheckCall(array_call) - return p - - -def faToTwoBit(fasta_file_name, twoBitFile): - """ - This function call faToTwoBit UCSC tool, and return the twoBitFile - :param fasta_file_name: - :param mySpecieFolder: - :return: - """ - - array_call = ['faToTwoBit', fasta_file_name, twoBitFile] - _handleExceptionAndCheckCall(array_call) - - return twoBitFile - -def sortChromSizes(two_bit_info_file_name, chrom_sizes_file_name): - """ - Call sort with -k2rn on two_bit_info_file_name and write the result into chrom_sizes_file_name - :param two_bit_info_file_name: - :param chrom_sizes_file_name: - :return: - """ - array_call = ['sort', '-k2rn', two_bit_info_file_name, - '-o', chrom_sizes_file_name] - p = _handleExceptionAndCheckCall(array_call) - return p - -def getChromSizes(reference, tool_dir): - #TODO: find a better way instead of shipping the two exec files with the tool - faToTwoBit = os.path.join(tool_dir, 'faToTwoBit') - twoBitInfo = os.path.join(tool_dir, 'twoBitInfo') - try: - twoBitFile = tempfile.NamedTemporaryFile(bufsize=0) - chrom_sizes = tempfile.NamedTemporaryFile(bufsize=0, suffix='.chrom.sizes', delete=False) - except IOError as err: - print "Cannot create tempfile err({0}): {1}".format(err.errno, err.strerror) - try: - subprocess.call(['faToTwoBit', reference, twoBitFile.name]) - except OSError as err: - print "Cannot generate twoBitFile from faToTwoBit err({0}): {1}".format(err.errno, err.strerror) - try: - subprocess.call(['twoBitInfo', twoBitFile.name, chrom_sizes.name]) - except OSError as err: - print "Cannot generate chrom_sizes from twoBitInfo err({0}): {1}".format(err.errno, err.strerror) - return chrom_sizes - -def sequence_region(chrom_sizes): - """ - This function read from a chromatin size file generated by twoBitInfo and write the information to dict - return a dict - """ - f = open(chrom_sizes, 'r') - sizes = f.readlines() - sizes_dict = {} - for line in sizes: - chrom_info = line.rstrip().split('\t') - sizes_dict[chrom_info[0]] = chrom_info[1] - return sizes_dict - -def child_blocks(parent_field, parent_attr, gff3, child_type): - num = 0 - blockcount = int(parent_attr['blockcount']) - chromstart = parent_attr['chromstarts'].split(',') - blocksize = parent_attr['blocksizes'].split(',') - parent_start = parent_field['start'] - while num < blockcount: - child_attr = OrderedDict() - child_field = parent_field - child_field['type'] = child_type - child_field['start'] = int(chromstart[num]) + int(parent_start) - child_field['end'] = int(child_field['start']) + int(blocksize[num]) - 1 - child_attr['ID'] = parent_attr['ID'] + '_part_' + str(num+1) - child_attr['Parent'] = parent_attr['ID'] - write_features(child_field, child_attr, gff3) - num = num + 1 - -def add_tracks_to_json(trackList_json, new_tracks, modify_type): - """ - Add to track configuration (trackList.json) - # modify_type = 'add_tracks': add a new track like bam or bigwig, new_track = dict() - # modify_type = 'add_attr': add configuration to the existing track, new_track = dict(track_name: dict()) - """ - with open(trackList_json, 'r+') as f: - data = json.load(f) - if modify_type == 'add_tracks': - data['tracks'].append(new_tracks) - elif modify_type == 'add_attr': - for k in new_tracks: - for track in data['tracks']: - if k.lower() in track['urlTemplate'].lower(): - attr = new_tracks[k] - for k, v in attr.items(): - track[k] = v - f.seek(0, 0) - f.write(json.dumps(data, separators=(',' , ':'), indent=4)) - f.truncate() - f.close() - - -def createBamIndex(bamfile): - subprocess.call(['samtools', 'index', bamfile]) - filename = bamfile + '.bai' - if os.path.exists(filename): - return filename - else: - raise ValueError('Did not find bai file') - -def flatfile_to_json(inputFile, dataType, trackType, trackLabel, outputFolder, options=None, compress=False): - if "bed" in dataType: - fileType = "--bed" - elif "gff" in dataType: - fileType = "--gff" - else: - raise ValueError("%s is not a valid filetype for flatfile_to_json" % dataType) - - - array_call = ['flatfile-to-json.pl', - fileType, inputFile, - '--trackType', trackType, - '--trackLabel', trackLabel, - '--out', outputFolder] - if compress: - array_call.append('--compress') - if options: - config = options.get("config") - clientConfig = options.get("clientConfig") - renderClassName = options.get('renderClassName') - subfeatureClasses = options.get('subfeatureClasses') - load_type = options.get("type") - if clientConfig: - array_call.append('--clientConfig') - array_call.append(clientConfig) - if config: - array_call.append('--config') - array_call.append(config) - if load_type: - array_call.append('--type') - array_call.append(load_type) - if renderClassName: - array_call.append('--renderClassName') - array_call.append(renderClassName) - if subfeatureClasses: - array_call.append('--subfeatureClasses') - array_call.append(json.dumps(subfeatureClasses)) - - p = _handleExceptionAndCheckCall(array_call) - return p - -def bam_to_json(inputFile, trackLabel, outputFolder, options=None, compress=False): - - array_call = ['bam-to-json.pl', - '--bam', inputFile, - '--trackLabel', trackLabel, - '--out', outputFolder] - if compress: - array_call.append('--compress') - if options: - config = options.get('config') - clientConfig = options.get('clientConfig') - if clientConfig: - array_call.append('--clientConfig') - array_call.append(clientConfig) - if config: - array_call.append('--config') - array_call.append(config) - - p = _handleExceptionAndCheckCall(array_call) - return p - -def add_track_json(trackList, track_json): - track_json = json.dumps(track_json) - new_track = subprocess.Popen(['echo', track_json], stdout=subprocess.PIPE) - p = subprocess.call(['add-track-json.pl', trackList], stdin=new_track.stdout) - return p - -def prepare_refseqs(fasta_file_name, outputFolder): - array_call = ['prepare-refseqs.pl', '--fasta', fasta_file_name, '--out', outputFolder] - p = _handleExceptionAndCheckCall(array_call) - return p - -def generate_names(outputFolder): - array_call = ['generate-names.pl', '-v', '--out', outputFolder] - p = _handleExceptionAndCheckCall(array_call) - return p - -def validateFiles(input_file, chrom_sizes_file_name, file_type, options=None): - """ - Call validateFiles on input_file, using chrom_sizes_file_name and file_type - :param input_file: - :param chrom_sizes_file_name: - :param file_type: - :return: - """ - - array_call = ['validateFiles', '-chromInfo=' + chrom_sizes_file_name, '-type='+ file_type, input_file] - if options: - tab = options.get("tab") - autoSql = options.get("autoSql") - logging.debug("tab: {0}".format(tab)) - logging.debug("autoSql: {0}".format(autoSql)) - if autoSql: - autoSql = ''.join(['-as=', autoSql]) - array_call.append(autoSql) - if tab: - array_call.append('-tab') - p = _handleExceptionAndCheckCall(array_call) - return p + return output def arrow_add_organism(organism_name, organism_dir, public=False): array_call = ['arrow', 'organisms', 'add_organism', organism_name, organism_dir] if public: array_call.append('--public') - print array_call - p = subprocess.check_output(array_call) + p = _handleExceptionAndCheckCall(array_call) + #p = subprocess.check_output(array_call) return p def arrow_create_user(user_email, firstname, lastname, password, admin=False): @@ -377,3 +143,19 @@ return d['userId'] logging.error("Cannot find user %s", user_email) +def verify_user_login(username, password): + user_info = {'username': username, 'password': password} + array_call = ['curl', + '-b', 'cookies.txt', + '-c', 'cookies.txt', + '-H', 'Content-Type:application/json', + '-d', json.dumps(user_info), + 'http://localhost:8080/apollo/Login?operation=login' + ] + p = _handleExceptionAndCheckCall(array_call) + msg = json.loads(p) + if 'error' in msg: + logging.error("The Authentication for user %s failed. Get error message %s", username, msg['error']) + exit(-1) + +