# HG changeset patch # User fubar # Date 1619766417 0 # Node ID 1c652687a08fa9d6bae25c6edbf81a5ab9d1a527 # Parent 5fc0c9a93072337d2739e28050e2129eb5492c92 Uploaded diff -r 5fc0c9a93072 -r 1c652687a08f toolfactory/ToolFactory.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/ToolFactory.py Fri Apr 30 07:06:57 2021 +0000 @@ -0,0 +1,975 @@ + +# see https://github.com/fubar2/toolfactory +# +# copyright ross lazarus (ross stop lazarus at gmail stop com) May 2012 +# +# all rights reserved +# Licensed under the LGPL +# suggestions for improvement and bug fixes welcome at +# https://github.com/fubar2/toolfactory +# +# April 2021: Refactored into two tools - generate and test/install +# as part of GTN tutorial development and biocontainer adoption +# The tester runs planemo on a non-tested archive, creates the test outputs +# and returns a new proper tool with test. +# The tester was generated from the ToolFactory_tester.py script + + +import argparse +import copy +import json +import logging +import os +import re +import shlex +import shutil +import subprocess +import sys +import tarfile +import tempfile +import time +import urllib + +from bioblend import ConnectionError +from bioblend import galaxy +from bioblend import toolshed + +import galaxyxml.tool as gxt +import galaxyxml.tool.parameters as gxtp + +import lxml.etree as ET + +import yaml + +myversion = "V2.3 April 2021" +verbose = True +debug = True +toolFactoryURL = "https://github.com/fubar2/toolfactory" +FAKEEXE = "~~~REMOVE~~~ME~~~" +# need this until a PR/version bump to fix galaxyxml prepending the exe even +# with override. + + +def timenow(): + """return current time as a string""" + return time.strftime("%d/%m/%Y %H:%M:%S", time.localtime(time.time())) + +cheetah_escape_table = {"$": "\\$", "#": "\\#"} + +def cheetah_escape(text): + """Produce entities within text.""" + return "".join([cheetah_escape_table.get(c, c) for c in text]) + +def parse_citations(citations_text): + """""" + citations = [c for c in citations_text.split("**ENTRY**") if c.strip()] + citation_tuples = [] + for citation in citations: + if citation.startswith("doi"): + citation_tuples.append(("doi", citation[len("doi") :].strip())) + else: + citation_tuples.append(("bibtex", citation[len("bibtex") :].strip())) + return citation_tuples + + +class Tool_Conf_Updater(): + # update config/tool_conf.xml with a new tool unpacked in /tools + # requires highly insecure docker settings - like write to tool_conf.xml and to tools ! + # if in a container possibly not so courageous. + # Fine on your own laptop but security red flag for most production instances + + def __init__(self, args, tool_conf_path, new_tool_archive_path, new_tool_name, tool_dir): + self.args = args + self.tool_conf_path = os.path.join(args.galaxy_root,tool_conf_path) + self.tool_dir = os.path.join(args.galaxy_root, tool_dir) + self.our_name = 'ToolFactory' + tff = tarfile.open(new_tool_archive_path, "r:*") + flist = tff.getnames() + ourdir = os.path.commonpath(flist) # eg pyrevpos + self.tool_id = ourdir # they are the same for TF tools + ourxml = [x for x in flist if x.lower().endswith('.xml')] + res = tff.extractall() + tff.close() + self.run_rsync(ourdir, self.tool_dir) + self.update_toolconf(ourdir,ourxml) + + def run_rsync(self, srcf, dstf): + src = os.path.abspath(srcf) + dst = os.path.abspath(dstf) + if os.path.isdir(src): + cll = ['rsync', '-vr', src, dst] + else: + cll = ['rsync', '-v', src, dst] + p = subprocess.run( + cll, + capture_output=False, + encoding='utf8', + shell=False, + ) + + def install_deps(self): + gi = galaxy.GalaxyInstance(url=self.args.galaxy_url, key=self.args.galaxy_api_key) + x = gi.tools.install_dependencies(self.tool_id) + print(f"Called install_dependencies on {self.tool_id} - got {x}") + + def update_toolconf(self,ourdir,ourxml): # path is relative to tools + updated = False + localconf = './local_tool_conf.xml' + self.run_rsync(self.tool_conf_path,localconf) + tree = ET.parse(localconf) + root = tree.getroot() + hasTF = False + TFsection = None + for e in root.findall('section'): + if e.attrib['name'] == self.our_name: + hasTF = True + TFsection = e + if not hasTF: + TFsection = ET.Element('section') + root.insert(0,TFsection) # at the top! + our_tools = TFsection.findall('tool') + conf_tools = [x.attrib['file'] for x in our_tools] + for xml in ourxml: # may be > 1 + if not xml in conf_tools: # new + updated = True + ET.SubElement(TFsection, 'tool', {'file':xml}) + ET.indent(tree) + newconf = f"{self.tool_id}_conf" + tree.write(newconf, pretty_print=True) + self.run_rsync(newconf,self.tool_conf_path) + if False and self.args.packages and self.args.packages > '': + self.install_deps() + +class Tool_Factory: + """Wrapper for an arbitrary script + uses galaxyxml + + """ + + def __init__(self, args=None): # noqa + """ + prepare command line cl for running the tool here + and prepare elements needed for galaxyxml tool generation + """ + self.ourcwd = os.getcwd() + self.collections = [] + if len(args.collection) > 0: + try: + self.collections = [ + json.loads(x) for x in args.collection if len(x.strip()) > 1 + ] + except Exception: + print( + f"--collections parameter {str(args.collection)} is malformed - should be a dictionary" + ) + try: + self.infiles = [ + json.loads(x) for x in args.input_files if len(x.strip()) > 1 + ] + except Exception: + print( + f"--input_files parameter {str(args.input_files)} is malformed - should be a dictionary" + ) + try: + self.outfiles = [ + json.loads(x) for x in args.output_files if len(x.strip()) > 1 + ] + except Exception: + print( + f"--output_files parameter {args.output_files} is malformed - should be a dictionary" + ) + try: + self.addpar = [ + json.loads(x) for x in args.additional_parameters if len(x.strip()) > 1 + ] + except Exception: + print( + f"--additional_parameters {args.additional_parameters} is malformed - should be a dictionary" + ) + try: + self.selpar = [ + json.loads(x) for x in args.selecttext_parameters if len(x.strip()) > 1 + ] + except Exception: + print( + f"--selecttext_parameters {args.selecttext_parameters} is malformed - should be a dictionary" + ) + self.args = args + self.cleanuppar() + self.lastxclredirect = None + self.xmlcl = [] + self.is_positional = self.args.parampass == "positional" + if self.args.sysexe: + if ' ' in self.args.sysexe: + self.executeme = self.args.sysexe.split(' ') + else: + self.executeme = [self.args.sysexe, ] + else: + if self.args.packages: + self.executeme = [self.args.packages.split(",")[0].split(":")[0].strip(), ] + else: + self.executeme = None + aXCL = self.xmlcl.append + assert args.parampass in [ + "0", + "argparse", + "positional", + ], 'args.parampass must be "0","positional" or "argparse"' + self.tool_name = re.sub("[^a-zA-Z0-9_]+", "", args.tool_name) + self.tool_id = self.tool_name + self.newtool = gxt.Tool( + self.tool_name, + self.tool_id, + self.args.tool_version, + self.args.tool_desc, + FAKEEXE, + ) + self.newtarpath = "%s_toolshed.gz" % self.tool_name + self.tooloutdir = "./tfout" + self.repdir = "./TF_run_report" + self.testdir = os.path.join(self.tooloutdir, "test-data") + if not os.path.exists(self.tooloutdir): + os.mkdir(self.tooloutdir) + if not os.path.exists(self.testdir): + os.mkdir(self.testdir) + if not os.path.exists(self.repdir): + os.mkdir(self.repdir) + self.tinputs = gxtp.Inputs() + self.toutputs = gxtp.Outputs() + self.testparam = [] + if self.args.script_path: + self.prepScript() + if self.args.command_override: + scos = open(self.args.command_override, "r").readlines() + self.command_override = [x.rstrip() for x in scos] + else: + self.command_override = None + if self.args.test_override: + stos = open(self.args.test_override, "r").readlines() + self.test_override = [x.rstrip() for x in stos] + else: + self.test_override = None + if self.args.script_path: + for ex in self.executeme: + aXCL(ex) + aXCL("$runme") + else: + for ex in self.executeme: + aXCL(ex) + + if self.args.parampass == "0": + self.clsimple() + else: + if self.args.parampass == "positional": + self.prepclpos() + self.clpositional() + else: + self.prepargp() + self.clargparse() + + def clsimple(self): + """no parameters or repeats - uses < and > for i/o""" + aXCL = self.xmlcl.append + if len(self.infiles) > 0: + aXCL("<") + aXCL("$%s" % self.infiles[0]["infilename"]) + if len(self.outfiles) > 0: + aXCL(">") + aXCL("$%s" % self.outfiles[0]["name"]) + if self.args.cl_user_suffix: # DIY CL end + clp = shlex.split(self.args.cl_user_suffix) + for c in clp: + aXCL(c) + + def prepargp(self): + xclsuffix = [] + for i, p in enumerate(self.infiles): + nam = p["infilename"] + if p["origCL"].strip().upper() == "STDIN": + xappendme = [ + nam, + nam, + "< $%s" % nam, + ] + else: + rep = p["repeat"] == "1" + over = "" + if rep: + over = f'#for $rep in $R_{nam}:\n--{nam} "$rep.{nam}"\n#end for' + xappendme = [p["CL"], "$%s" % p["CL"], over] + xclsuffix.append(xappendme) + for i, p in enumerate(self.outfiles): + if p["origCL"].strip().upper() == "STDOUT": + self.lastxclredirect = [">", "$%s" % p["name"]] + else: + xclsuffix.append([p["name"], "$%s" % p["name"], ""]) + for p in self.addpar: + nam = p["name"] + rep = p["repeat"] == "1" + if rep: + over = f'#for $rep in $R_{nam}:\n--{nam} "$rep.{nam}"\n#end for' + else: + over = p["override"] + xclsuffix.append([p["CL"], '"$%s"' % nam, over]) + for p in self.selpar: + xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) + self.xclsuffix = xclsuffix + + def prepclpos(self): + xclsuffix = [] + for i, p in enumerate(self.infiles): + if p["origCL"].strip().upper() == "STDIN": + xappendme = [ + "999", + p["infilename"], + "< $%s" % p["infilename"], + ] + else: + xappendme = [p["CL"], "$%s" % p["infilename"], ""] + xclsuffix.append(xappendme) + for i, p in enumerate(self.outfiles): + if p["origCL"].strip().upper() == "STDOUT": + self.lastxclredirect = [">", "$%s" % p["name"]] + else: + xclsuffix.append([p["CL"], "$%s" % p["name"], ""]) + for p in self.addpar: + nam = p["name"] + rep = p["repeat"] == "1" # repeats make NO sense + if rep: + print(f'### warning. Repeats for {nam} ignored - not permitted in positional parameter command lines!') + over = p["override"] + xclsuffix.append([p["CL"], '"$%s"' % nam, over]) + for p in self.selpar: + xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) + xclsuffix.sort() + self.xclsuffix = xclsuffix + + def prepScript(self): + rx = open(self.args.script_path, "r").readlines() + rx = [x.rstrip() for x in rx] + rxcheck = [x.strip() for x in rx if x.strip() > ""] + assert len(rxcheck) > 0, "Supplied script is empty. Cannot run" + self.script = "\n".join(rx) + fhandle, self.sfile = tempfile.mkstemp( + prefix=self.tool_name, suffix="_%s" % (self.executeme[0]) + ) + tscript = open(self.sfile, "w") + tscript.write(self.script) + tscript.close() + self.spacedScript = [f" {x}" for x in rx if x.strip() > ""] + rx.insert(0,'#raw') + rx.append('#end raw') + self.escapedScript = rx + art = "%s.%s" % (self.tool_name, self.executeme[0]) + artifact = open(art, "wb") + artifact.write(bytes(self.script, "utf8")) + artifact.close() + + def cleanuppar(self): + """ positional parameters are complicated by their numeric ordinal""" + if self.args.parampass == "positional": + for i, p in enumerate(self.infiles): + assert ( + p["CL"].isdigit() or p["CL"].strip().upper() == "STDIN" + ), "Positional parameters must be ordinal integers - got %s for %s" % ( + p["CL"], + p["label"], + ) + for i, p in enumerate(self.outfiles): + assert ( + p["CL"].isdigit() or p["CL"].strip().upper() == "STDOUT" + ), "Positional parameters must be ordinal integers - got %s for %s" % ( + p["CL"], + p["name"], + ) + for i, p in enumerate(self.addpar): + assert p[ + "CL" + ].isdigit(), "Positional parameters must be ordinal integers - got %s for %s" % ( + p["CL"], + p["name"], + ) + for i, p in enumerate(self.infiles): + infp = copy.copy(p) + infp["origCL"] = infp["CL"] + if self.args.parampass in ["positional", "0"]: + infp["infilename"] = infp["label"].replace(" ", "_") + else: + infp["infilename"] = infp["CL"] + self.infiles[i] = infp + for i, p in enumerate(self.outfiles): + p["origCL"] = p["CL"] # keep copy + self.outfiles[i] = p + for i, p in enumerate(self.addpar): + p["origCL"] = p["CL"] + self.addpar[i] = p + + def clpositional(self): + # inputs in order then params + aXCL = self.xmlcl.append + for (k, v, koverride) in self.xclsuffix: + aXCL(v) + if self.lastxclredirect: + aXCL(self.lastxclredirect[0]) + aXCL(self.lastxclredirect[1]) + if self.args.cl_user_suffix: # DIY CL end + clp = shlex.split(self.args.cl_user_suffix) + for c in clp: + aXCL(c) + + + def clargparse(self): + """argparse style""" + aXCL = self.xmlcl.append + # inputs then params in argparse named form + + for (k, v, koverride) in self.xclsuffix: + if koverride > "": + k = koverride + aXCL(k) + else: + if len(k.strip()) == 1: + k = "-%s" % k + else: + k = "--%s" % k + aXCL(k) + aXCL(v) + if self.lastxclredirect: + aXCL(self.lastxclredirect[0]) + aXCL(self.lastxclredirect[1]) + if self.args.cl_user_suffix: # DIY CL end + clp = shlex.split(self.args.cl_user_suffix) + for c in clp: + aXCL(c) + + def getNdash(self, newname): + if self.is_positional: + ndash = 0 + else: + ndash = 2 + if len(newname) < 2: + ndash = 1 + return ndash + + def doXMLparam(self): # noqa + """Add all needed elements to tool""" + for p in self.outfiles: + newname = p["name"] + newfmt = p["format"] + newcl = p["CL"] + test = p["test"] + oldcl = p["origCL"] + test = test.strip() + ndash = self.getNdash(newcl) + aparm = gxtp.OutputData( + name=newname, format=newfmt, num_dashes=ndash, label=newname + ) + aparm.positional = self.is_positional + if self.is_positional: + if oldcl.upper() == "STDOUT": + aparm.positional = 9999999 + aparm.command_line_override = "> $%s" % newname + else: + aparm.positional = int(oldcl) + aparm.command_line_override = "$%s" % newname + self.toutputs.append(aparm) + ld = None + if test.strip() > "": + if test.startswith("diff"): + c = "diff" + ld = 0 + if test.split(":")[1].isdigit: + ld = int(test.split(":")[1]) + tp = gxtp.TestOutput( + name=newname, + value="%s_sample" % newname, + compare=c, + lines_diff=ld, + ) + elif test.startswith("sim_size"): + c = "sim_size" + tn = test.split(":")[1].strip() + if tn > "": + if "." in tn: + delta = None + delta_frac = min(1.0, float(tn)) + else: + delta = int(tn) + delta_frac = None + tp = gxtp.TestOutput( + name=newname, + value="%s_sample" % newname, + compare=c, + delta=delta, + delta_frac=delta_frac, + ) + else: + c = test + tp = gxtp.TestOutput( + name=newname, + value="%s_sample" % newname, + compare=c, + ) + self.testparam.append(tp) + for p in self.infiles: + newname = p["infilename"] + newfmt = p["format"] + ndash = self.getNdash(newname) + reps = p.get("repeat", "0") == "1" + if not len(p["label"]) > 0: + alab = p["CL"] + else: + alab = p["label"] + aninput = gxtp.DataParam( + newname, + optional=False, + label=alab, + help=p["help"], + format=newfmt, + multiple=False, + num_dashes=ndash, + ) + aninput.positional = self.is_positional + if self.is_positional: + if p["origCL"].upper() == "STDIN": + aninput.positional = 9999998 + aninput.command_line_override = "> $%s" % newname + else: + aninput.positional = int(p["origCL"]) + aninput.command_line_override = "$%s" % newname + if reps: + repe = gxtp.Repeat(name=f"R_{newname}", title=f"Add as many {alab} as needed") + repe.append(aninput) + self.tinputs.append(repe) + tparm = gxtp.TestRepeat(name=f"R_{newname}") + tparm2 = gxtp.TestParam(newname, value="%s_sample" % newname) + tparm.append(tparm2) + self.testparam.append(tparm) + else: + self.tinputs.append(aninput) + tparm = gxtp.TestParam(newname, value="%s_sample" % newname) + self.testparam.append(tparm) + for p in self.addpar: + newname = p["name"] + newval = p["value"] + newlabel = p["label"] + newhelp = p["help"] + newtype = p["type"] + newcl = p["CL"] + oldcl = p["origCL"] + reps = p["repeat"] == "1" + if not len(newlabel) > 0: + newlabel = newname + ndash = self.getNdash(newname) + if newtype == "text": + aparm = gxtp.TextParam( + newname, + label=newlabel, + help=newhelp, + value=newval, + num_dashes=ndash, + ) + elif newtype == "integer": + aparm = gxtp.IntegerParam( + newname, + label=newlabel, + help=newhelp, + value=newval, + num_dashes=ndash, + ) + elif newtype == "float": + aparm = gxtp.FloatParam( + newname, + label=newlabel, + help=newhelp, + value=newval, + num_dashes=ndash, + ) + elif newtype == "boolean": + aparm = gxtp.BooleanParam( + newname, + label=newlabel, + help=newhelp, + value=newval, + num_dashes=ndash, + ) + else: + raise ValueError( + 'Unrecognised parameter type "%s" for\ + additional parameter %s in makeXML' + % (newtype, newname) + ) + aparm.positional = self.is_positional + if self.is_positional: + aparm.positional = int(oldcl) + if reps: + repe = gxtp.Repeat(name=f"R_{newname}", title=f"Add as many {newlabel} as needed") + repe.append(aparm) + self.tinputs.append(repe) + tparm = gxtp.TestRepeat(name=f"R_{newname}") + tparm2 = gxtp.TestParam(newname, value=newval) + tparm.append(tparm2) + self.testparam.append(tparm) + else: + self.tinputs.append(aparm) + tparm = gxtp.TestParam(newname, value=newval) + self.testparam.append(tparm) + for p in self.selpar: + newname = p["name"] + newval = p["value"] + newlabel = p["label"] + newhelp = p["help"] + newtype = p["type"] + newcl = p["CL"] + if not len(newlabel) > 0: + newlabel = newname + ndash = self.getNdash(newname) + if newtype == "selecttext": + newtext = p["texts"] + aparm = gxtp.SelectParam( + newname, + label=newlabel, + help=newhelp, + num_dashes=ndash, + ) + for i in range(len(newval)): + anopt = gxtp.SelectOption( + value=newval[i], + text=newtext[i], + ) + aparm.append(anopt) + aparm.positional = self.is_positional + if self.is_positional: + aparm.positional = int(newcl) + self.tinputs.append(aparm) + tparm = gxtp.TestParam(newname, value=newval) + self.testparam.append(tparm) + else: + raise ValueError( + 'Unrecognised parameter type "%s" for\ + selecttext parameter %s in makeXML' + % (newtype, newname) + ) + for p in self.collections: + newkind = p["kind"] + newname = p["name"] + newlabel = p["label"] + newdisc = p["discover"] + collect = gxtp.OutputCollection(newname, label=newlabel, type=newkind) + disc = gxtp.DiscoverDatasets( + pattern=newdisc, directory=f"{newname}", visible="false" + ) + collect.append(disc) + self.toutputs.append(collect) + try: + tparm = gxtp.TestOutputCollection(newname) # broken until PR merged. + self.testparam.append(tparm) + except Exception: + print("#### WARNING: Galaxyxml version does not have the PR merged yet - tests for collections must be over-ridden until then!") + + def doNoXMLparam(self): + """filter style package - stdin to stdout""" + if len(self.infiles) > 0: + alab = self.infiles[0]["label"] + if len(alab) == 0: + alab = self.infiles[0]["infilename"] + max1s = ( + "Maximum one input if parampass is 0 but multiple input files supplied - %s" + % str(self.infiles) + ) + assert len(self.infiles) == 1, max1s + newname = self.infiles[0]["infilename"] + aninput = gxtp.DataParam( + newname, + optional=False, + label=alab, + help=self.infiles[0]["help"], + format=self.infiles[0]["format"], + multiple=False, + num_dashes=0, + ) + aninput.command_line_override = "< $%s" % newname + aninput.positional = True + self.tinputs.append(aninput) + tp = gxtp.TestParam(name=newname, value="%s_sample" % newname) + self.testparam.append(tp) + if len(self.outfiles) > 0: + newname = self.outfiles[0]["name"] + newfmt = self.outfiles[0]["format"] + anout = gxtp.OutputData(newname, format=newfmt, num_dashes=0) + anout.command_line_override = "> $%s" % newname + anout.positional = self.is_positional + self.toutputs.append(anout) + tp = gxtp.TestOutput(name=newname, value="%s_sample" % newname) + self.testparam.append(tp) + + def makeXML(self): # noqa + """ + Create a Galaxy xml tool wrapper for the new script + Uses galaxyhtml + Hmmm. How to get the command line into correct order... + """ + if self.command_override: + self.newtool.command_override = self.command_override # config file + else: + self.newtool.command_override = self.xmlcl + cite = gxtp.Citations() + acite = gxtp.Citation(type="doi", value="10.1093/bioinformatics/bts573") + cite.append(acite) + self.newtool.citations = cite + safertext = "" + if self.args.help_text: + helptext = open(self.args.help_text, "r").readlines() + safertext = "\n".join([cheetah_escape(x) for x in helptext]) + if len(safertext.strip()) == 0: + safertext = ( + "Ask the tool author (%s) to rebuild with help text please\n" + % (self.args.user_email) + ) + if self.args.script_path: + if len(safertext) > 0: + safertext = safertext + "\n\n------\n" # transition allowed! + scr = [x for x in self.spacedScript if x.strip() > ""] + scr.insert(0, "\n\nScript::\n") + if len(scr) > 300: + scr = ( + scr[:100] + + [" >300 lines - stuff deleted", " ......"] + + scr[-100:] + ) + scr.append("\n") + safertext = safertext + "\n".join(scr) + self.newtool.help = safertext + self.newtool.version_command = f'echo "{self.args.tool_version}"' + std = gxtp.Stdios() + std1 = gxtp.Stdio() + std.append(std1) + self.newtool.stdios = std + requirements = gxtp.Requirements() + if self.args.packages: + try: + for d in self.args.packages.split(","): + ver = "" + d = d.replace("==", ":") + d = d.replace("=", ":") + if ":" in d: + packg, ver = d.split(":") + else: + packg = d + requirements.append( + gxtp.Requirement("package", packg.strip(), ver.strip()) + ) + except Exception: + print('### malformed packages string supplied - cannot parse =',self.args.packages) + sys.exit(2) + self.newtool.requirements = requirements + if self.args.parampass == "0": + self.doNoXMLparam() + else: + self.doXMLparam() + self.newtool.outputs = self.toutputs + self.newtool.inputs = self.tinputs + if self.args.script_path: + configfiles = gxtp.Configfiles() + configfiles.append( + gxtp.Configfile(name="runme", text="\n".join(self.escapedScript)) + ) + self.newtool.configfiles = configfiles + tests = gxtp.Tests() + test_a = gxtp.Test() + for tp in self.testparam: + test_a.append(tp) + tests.append(test_a) + self.newtool.tests = tests + self.newtool.add_comment( + "Created by %s at %s using the Galaxy Tool Factory." + % (self.args.user_email, timenow()) + ) + self.newtool.add_comment("Source in git at: %s" % (toolFactoryURL)) + exml0 = self.newtool.export() + exml = exml0.replace(FAKEEXE, "") # temporary work around until PR accepted + if ( + self.test_override + ): # cannot do this inside galaxyxml as it expects lxml objects for tests + part1 = exml.split("")[0] + part2 = exml.split("")[1] + fixed = "%s\n%s\n%s" % (part1, "\n".join(self.test_override), part2) + exml = fixed + # exml = exml.replace('range="1:"', 'range="1000:"') + xf = open("%s.xml" % self.tool_name, "w") + xf.write(exml) + xf.write("\n") + xf.close() + # ready for the tarball + + def writeShedyml(self): + """for planemo""" + yuser = self.args.user_email.split("@")[0] + yfname = os.path.join(self.tooloutdir, ".shed.yml") + yamlf = open(yfname, "w") + odict = { + "name": self.tool_name, + "owner": yuser, + "type": "unrestricted", + "description": self.args.tool_desc, + "synopsis": self.args.tool_desc, + "category": "TF Generated Tools", + } + yaml.dump(odict, yamlf, allow_unicode=True) + yamlf.close() + + def makeTool(self): + """write xmls and input samples into place""" + if self.args.parampass == 0: + self.doNoXMLparam() + else: + self.makeXML() + if self.args.script_path: + stname = os.path.join(self.tooloutdir, self.sfile) + if not os.path.exists(stname): + shutil.copyfile(self.sfile, stname) + xreal = "%s.xml" % self.tool_name + xout = os.path.join(self.tooloutdir, xreal) + shutil.copyfile(xreal, xout) + for p in self.infiles: + pth = p["name"] + dest = os.path.join(self.testdir, "%s_sample" % p["infilename"]) + shutil.copyfile(pth, dest) + dest = os.path.join(self.repdir, "%s_sample.%s" % (p["infilename"],p["format"])) + shutil.copyfile(pth, dest) + + def makeToolTar(self, report_fail=False): + """move outputs into test-data and prepare the tarball""" + excludeme = "_planemo_test_report.html" + + def exclude_function(tarinfo): + filename = tarinfo.name + return None if filename.endswith(excludeme) else tarinfo + + for p in self.outfiles: + oname = p["name"] + tdest = os.path.join(self.testdir, "%s_sample" % oname) + src = os.path.join(self.testdir, oname) + if not os.path.isfile(tdest): + if os.path.isfile(src): + shutil.copyfile(src, tdest) + dest = os.path.join(self.repdir, "%s.sample" % (oname)) + shutil.copyfile(src, dest) + else: + if report_fail: + print( + "###Tool may have failed - output file %s not found in testdir after planemo run %s." + % (tdest, self.testdir) + ) + tf = tarfile.open(self.newtarpath, "w:gz") + tf.add( + name=self.tooloutdir, + arcname=self.tool_name, + filter=exclude_function, + ) + tf.close() + shutil.copyfile(self.newtarpath, self.args.new_tool) + + def moveRunOutputs(self): + """need to move planemo or run outputs into toolfactory collection""" + with os.scandir(self.tooloutdir) as outs: + for entry in outs: + if not entry.is_file(): + continue + if not entry.name.endswith('.html'): + _, ext = os.path.splitext(entry.name) + newname = f"{entry.name.replace('.','_')}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.tooloutdir, entry.name) + shutil.copyfile(src, dest) + if self.args.include_tests: + with os.scandir(self.testdir) as outs: + for entry in outs: + if (not entry.is_file()) or entry.name.endswith( + "_planemo_test_report.html" + ): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".tgz", ".json"]: + continue + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.testdir, entry.name) + shutil.copyfile(src, dest) + + +def main(): + """ + This is a Galaxy wrapper. + It expects to be called by a special purpose tool.xml + + """ + parser = argparse.ArgumentParser() + a = parser.add_argument + a("--script_path", default=None) + a("--history_test", default=None) + a("--cl_user_suffix", default=None) + a("--sysexe", default=None) + a("--packages", default=None) + a("--tool_name", default="newtool") + a("--tool_dir", default=None) + a("--input_files", default=[], action="append") + a("--output_files", default=[], action="append") + a("--user_email", default="Unknown") + a("--bad_user", default=None) + a("--help_text", default=None) + a("--tool_desc", default=None) + a("--tool_version", default=None) + a("--citations", default=None) + a("--command_override", default=None) + a("--test_override", default=None) + a("--additional_parameters", action="append", default=[]) + a("--selecttext_parameters", action="append", default=[]) + a("--edit_additional_parameters", action="store_true", default=False) + a("--parampass", default="positional") + a("--tfout", default="./tfout") + a("--new_tool", default="new_tool") + a("--galaxy_root", default="/galaxy-central") + a("--galaxy_venv", default="/galaxy_venv") + a("--collection", action="append", default=[]) + a("--include_tests", default=False, action="store_true") + a("--admin_only", default=False, action="store_true") + a("--install", default=False, action="store_true") + a("--run_test", default=False, action="store_true") + a("--local_tools", default='tools') # relative to $__root_dir__ + a("--tool_conf_path", default='config/tool_conf.xml') # relative to $__root_dir__ + a("--galaxy_url", default="http://localhost:8080") + a("--toolshed_url", default="http://localhost:9009") + # make sure this is identical to tool_sheds_conf.xml + # localhost != 127.0.0.1 so validation fails + a("--toolshed_api_key", default="fakekey") + a("--galaxy_api_key", default="8993d65865e6d6d1773c2c34a1cc207d") + args = parser.parse_args() + if args.admin_only: + assert not args.bad_user, ( + 'UNAUTHORISED: %s is NOT authorized to use this tool until Galaxy \ +admin adds %s to "admin_users" in the galaxy.yml Galaxy configuration file' + % (args.bad_user, args.bad_user) + ) + assert args.tool_name, "## Tool Factory expects a tool name - eg --tool_name=DESeq" + r = Tool_Factory(args) + r.writeShedyml() + r.makeTool() + r.makeToolTar() + if args.install: + #try: + tcu = Tool_Conf_Updater(args=args, tool_dir=args.local_tools, + new_tool_archive_path=r.newtarpath, tool_conf_path=args.tool_conf_path, + new_tool_name=r.tool_name) + #except Exception: + # print("### Unable to install the new tool. Are you sure you have all the required special settings?") + +if __name__ == "__main__": + main() + diff -r 5fc0c9a93072 -r 1c652687a08f toolfactory/ToolFactory.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/ToolFactory.xml Fri Apr 30 07:06:57 2021 +0000 @@ -0,0 +1,550 @@ + + Scripts into tools v2.0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + + galaxyxml + bioblend + rsync + + + 0: +--cl_user_suffix "$cl_suffix" + #end if + #if $cover.commover == "yes": + #if len(str($cover.command_override)) > 10: +--command_override "$commandoverride" + #end if + #if len(str($cover.test_override)) > 10: +--test_override "$testoverride" + #end if + #end if + #if $deps.packages > "": + --packages "$deps.packages" + #end if + #if $deps.usescript.choosescript == "yes": +--script_path "$runme" +--sysexe "$deps.usescript.scriptrunner" + #end if +--tool_name "$tool_name" --user_email "$__user_email__" --citations "$citeme" --parampass "$io_param.ppass.parampass" +--tool_desc "$tool_desc" +--tool_version "$tool_version" +--help_text "$helpme" +--new_tool "$new_tool" + #if $install: + --install + #end if + #if $run_test: + --run_test + #end if + #if $io_param.ppass.parampass != '0': + #if str($io_param.ppass.addparam.edit_params) == "yes": +--edit_additional_parameters + #end if + #for $apar in $io_param.ppass.addparam.additional_parameters: + #if $apar.ap_type.param_type=="selecttext": +--selecttext_parameters '{"name":"$apar.param_name", "label":"$apar.param_label", "help":"$apar.param_help", +"type":"$apar.ap_type.param_type","CL":"$apar.param_CL","override":"$apar.param_CLprefixed","value": [ + #for $i,$st in enumerate($apar.ap_type.selectTexts): + "$st.select_value" + #if ($i < (len($apar.ap_type.selectTexts)-1)): + , + #end if + #end for + ], "texts": [ + #for $i,$st in enumerate($apar.ap_type.selectTexts): + "$st.select_text" + #if ($i < (len($apar.ap_type.selectTexts)-1)): + , + #end if + + #end for + ] + }' + #else: +--additional_parameters '{"name": "$apar.param_name", "value": "$apar.ap_type.param_value", "label": "$apar.param_label", "help": "$apar.param_help", +"type": "$apar.ap_type.param_type","CL": "$apar.param_CL","override": "$apar.param_CLprefixed", "repeat": "$apar.param_repeat"}' + #end if + #end for + #end if + #for $intab in $io_param.ppass.io.history_inputs: +--input_files '{"name": "$intab.input_files", "CL": "$intab.input_CL", "format": "$intab.input_formats", "label": "$intab.input_label", "help": "$intab.input_help", "repeat": "$intab.input_repeat"}' + #end for + #for $otab in $io_param.ppass.io.history_outputs: +--output_files '{"name": "$otab.history_name", "format": "$otab.history_format", "CL": "$otab.history_CL", "test": "$otab.history_test"}' + #end for + #for $collect in $io_param.ppass.io.collection_outputs: +--collection '{"name": "$collect.name", "kind": "$collect.kind", "discover": "$collect.discover", "label": "$collect.label"}' + #end for +--galaxy_root "$__root_dir__" +--tool_dir "$__tool_directory__" + #end if +]]> + + +$deps.usescript.dynScript + + +#if $cover.commover == "yes" and len(str($cover.command_override).strip()) > 1: +$cover.command_override +#end if + + +#if $cover.commover == "yes" and len(str($cover.test_override).strip()) > 1: +$cover.test_override +#end if + + +${help_text} + + + +#for $citation in $citations: + #if $citation.citation_type.type == "bibtex": + **ENTRY**bibtex + ${citation.citation_type.bibtex} + #else + **ENTRY**doi + ${citation.citation_type.doi} + #end if +#end for + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +.. class:: warningmark + +**Details and attribution** +(see GTF_) + +**Local Admins ONLY** +Only users whose IDs found in the local admin_user configuration setting in universe_wsgi.ini can run this tool. + +**If you find a bug** +Please raise an issue, or even better, submit a pull request fixing it, on the github repository GTF_ + +**What it does** +This tool optionally generates normal workflow compatible first class Galaxy tools + +Generated tools can run existing binary packages that become requirements, existing scripts, or new scripts pasted into this tool form. +Pasted scripts are written so they are part of the new tool and cannot be adjusted by the downstream user. +Binary packages are managed by the dependency subsystem - conda usually, so anything in bioconda or conda_forge is available for example. + +Any number of parameters can be built into the new tool form for passing in to the script or executable at runtime. +These can be editable by the downstream user or baked in. + +A new tarball compatible with any Galaxy toolshed is created in your history, however, it does not have the test prepared. +There is a separate tool you can run to update the archive after testing with Planemo + +If the configuration in job_conf.xml allows tools to write to [galaxy_root]/tools, the new tool will be installed so you can view and test it locally. + +.. class:: warningmark + +**Note to system administrators** +This tool offers *NO* built in protection against malicious scripts. It should only be installed on private/personnal Galaxy instances. +Admin_users will have the power to do anything they want as the Galaxy user if you install this tool. + +.. class:: warningmark + +**Use on public servers** is STRONGLY discouraged for obvious reasons + +The tools generated by this tool will run just as securely as any other normal installed Galaxy tool but like any other new tools, should always be checked carefully before installation. +We recommend that you follow the good code hygiene practices associated with safe toolshed practices. + +Here's a sample python script that can be cut and pasted into the tool form, suitable for positional parameter passing: + +:: + + # reverse order of text by row + import sys + inp = sys.argv[1] + outp = sys.argv[2] + i = open(inp,'r').readlines() + o = open(outp,'w') + for row in i: + rs = row.rstrip() + rs = list(rs) + rs.reverse() + o.write(''.join(rs)) + o.write('\n') + o.close() + +With argparse style parameters: + +:: + + # reverse order of text by row + import argparse + parser = argparse.ArgumentParser() + a = parser.add_argument + a('--infile',default='') + a('--outfile',default=None) + args = parser.parse_args() + inp = args.infile + outp = args.outfile + i = open(inp,'r').readlines() + o = open(outp,'w') + for row in i: + rs = row.rstrip() + rs = list(rs) + rs.reverse() + o.write(''.join(rs)) + o.write('\n') + o.close() + +R script to draw some plots - use a collection. + +:: + + + \# note this script takes NO input because it generates random data + dir.create('plots') + for (i in 1:10) { + foo = runif(100) + bar = rnorm(100) + bar = foo + 0.05*bar + pdf(paste('plots/yet',i,"anotherplot.pdf",sep='_')) + plot(foo,bar,main=paste("Foo by Bar plot \#",i),col="maroon", pch=3,cex=0.6) + dev.off() + foo = data.frame(a=runif(100),b=runif(100),c=runif(100),d=runif(100),e=runif(100),f=runif(100)) + bar = as.matrix(foo) + pdf(paste('plots/yet',i,"anotherheatmap.pdf",sep='_')) + heatmap(bar,main='Random Heatmap') + dev.off() + } + + + +Paper_ + +*Licensing* + +Copyright Ross Lazarus (ross period lazarus at gmail period com) May 2012 +All rights reserved. +Licensed under the LGPL_ + +.. _LGPL: http://www.gnu.org/copyleft/lesser.html +.. _GTF: https://github.com/fubar2/toolfactory +.. _Paper: https://academic.oup.com/bioinformatics/article/28/23/3139/192853 + + + + + 10.1093/bioinformatics/bts573 + +
+ + diff -r 5fc0c9a93072 -r 1c652687a08f toolfactory/ToolFactory_tester.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/ToolFactory_tester.xml Fri Apr 30 07:06:57 2021 +0000 @@ -0,0 +1,454 @@ + + + + Test an untested tool and update it + + git + planemo + + + + + +"TF_run_report/${in_tool_archive.name}_test_log.txt" +]]> + + 0 + self.ourxmls = ourxmls # [os.path.join(tool_path,x) for x in ourxmls] + res = tff.extractall() + self.update_tests(ourdir) + tff.close() + self.tooloutdir = "./tfout" + self.repdir = "./TF_run_report" + self.testdir = os.path.join(self.tooloutdir, "test-data") + if not os.path.exists(self.tooloutdir): + os.mkdir(self.tooloutdir) + if not os.path.exists(self.testdir): + os.mkdir(self.testdir) + if not os.path.exists(self.repdir): + os.mkdir(self.repdir) + self.makeTool() + self.moveRunOutputs() + self.makeToolTar() + + def call_planemo(self,xmlpath,ourdir): + penv = os.environ + penv['HOME'] = os.path.join(self.args.galaxy_root,'planemo') + newpath = f"{penv['HOME']}:{penv['PATH']}" + penv['PATH'] = newpath + penv["GALAXY_VIRTUAL_ENV"] = os.path.join(self.args.galaxy_root,'.venv') + penv["PIP_CACHE_DIR"] = os.path.join(self.args.galaxy_root,'pipcache') + toolfile = os.path.split(xmlpath)[1] + tool_name = self.tool_name + tool_test_output = f"{tool_name}_planemo_test_report.html" + cll = [ + "planemo", + "test", + "--no_cleanup", + "--test_data", + os.path.abspath(ourdir), + "--test_output", + os.path.abspath(self.tool_test_output), + "--galaxy_root", + self.args.galaxy_root, + "--update_test_data", + os.path.abspath(xmlpath), + ] + print(cll) + p = subprocess.run( + cll, + #capture_output=True, + encoding='utf8', + cwd = os.path.abspath(self.tool_name), + env = penv, + shell=False, + ) + return p + + def makeTool(self): + """write xmls and input samples into place""" + for xreal in self.ourxmls: + x = os.path.split(xreal)[1] + xout = os.path.join(self.tooloutdir,x) + shutil.copyfile(xreal, xout) + + def makeToolTar(self): + """move outputs into test-data and prepare the tarball""" + excludeme = "_planemo_test_report.html" + + def exclude_function(tarinfo): + filename = tarinfo.name + return None if filename.endswith(excludeme) else tarinfo + + newtar = 'new_%s_toolshed.gz' % self.tool_name + ttf = tarfile.open(newtar, "w:gz") + ttf.add(name=self.tool_name, + arcname=self.tool_name, + filter=exclude_function) + ttf.close() + shutil.copyfile(newtar, self.new_tool_archive) + + def moveRunOutputs(self): + """need to move planemo or run outputs into toolfactory collection""" + with os.scandir(self.tooloutdir) as outs: + for entry in outs: + if not entry.is_file(): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".tgz", ".json"]: + continue + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.tooloutdir, entry.name) + shutil.copyfile(src, dest) + with os.scandir('.') as outs: + for entry in outs: + if not entry.is_file() or entry.name == "conda_activate.log": + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src =entry.name + shutil.copyfile(src, dest) + if True or self.args.include_tests: + with os.scandir(self.testdir) as outs: + for entry in outs: + if (not entry.is_file()) or entry.name.endswith( + "_planemo_test_report.html" + ): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".tgz", ".json"]: + continue + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.testdir, entry.name) + shutil.copyfile(src, dest) + + + def update_tests(self,ourdir): + for xmlf in self.ourxmls: + capture = self.call_planemo(xmlf,ourdir) + +def main(): + """ + This is a Galaxy wrapper. + It expects to be called by a special purpose tool.xml + + """ + parser = argparse.ArgumentParser() + a = parser.add_argument + a("--in_tool_archive", default=None) + a("--new_tested_tool_archive", default=None) + a("--galaxy_root", default="/home/ross/gal21/") + args = parser.parse_args() + print('Hello from',os.getcwd()) + tt = ToolTester(args=args, in_tool_archive=args.in_tool_archive, new_tool_archive=args.new_tested_tool_archive) + +if __name__ == "__main__": + main() + + +#end raw]]> + + + + + + + + + + + + + + + 0 + self.ourxmls = ourxmls # [os.path.join(tool_path,x) for x in ourxmls] + res = tff.extractall() + self.update_tests(ourdir) + tff.close() + self.tooloutdir = "./tfout" + self.repdir = "./TF_run_report" + self.testdir = os.path.join(self.tooloutdir, "test-data") + if not os.path.exists(self.tooloutdir): + os.mkdir(self.tooloutdir) + if not os.path.exists(self.testdir): + os.mkdir(self.testdir) + if not os.path.exists(self.repdir): + os.mkdir(self.repdir) + self.makeTool() + self.moveRunOutputs() + self.makeToolTar() + + def call_planemo(self,xmlpath,ourdir): + penv = os.environ + penv["PIP_CACHE_DIR"] = os.path.join(self.args.galaxy_root,'pipcache') + toolfile = os.path.split(xmlpath)[1] + tool_name = self.tool_name + tool_test_output = f"{tool_name}_planemo_test_report.html" + cll = [ + "planemo", + "test", + "--biocontainers", + "--test_output", + os.path.abspath(tool_test_output), + "--galaxy_root", + self.args.galaxy_root, + "--update_test_data", + os.path.abspath(xmlpath), + ] + print(cll) + p = subprocess.run( + cll, + #capture_output=True, + encoding='utf8', + env = penv, + shell=False, + ) + return p + + def makeTool(self): + """write xmls and input samples into place""" + for xreal in self.ourxmls: + x = os.path.split(xreal)[1] + xout = os.path.join(self.tooloutdir,x) + shutil.copyfile(xreal, xout) + + def makeToolTar(self): + """move outputs into test-data and prepare the tarball""" + excludeme = "_planemo_test_report.html" + + def exclude_function(tarinfo): + filename = tarinfo.name + return None if filename.endswith(excludeme) else tarinfo + + newtar = 'new_%s_toolshed.gz' % self.tool_name + ttf = tarfile.open(newtar, "w:gz") + ttf.add(name=self.tool_name, + arcname=self.tool_name, + filter=exclude_function) + ttf.close() + shutil.copyfile(newtar, self.new_tool_archive) + + def moveRunOutputs(self): + """need to move planemo or run outputs into toolfactory collection""" + with os.scandir(self.tooloutdir) as outs: + for entry in outs: + if not entry.is_file(): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".tgz", ".json"]: + continue + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.tooloutdir, entry.name) + shutil.copyfile(src, dest) + with os.scandir('.') as outs: + for entry in outs: + if not entry.is_file(): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src =entry.name + shutil.copyfile(src, dest) + if True or self.args.include_tests: + with os.scandir(self.testdir) as outs: + for entry in outs: + if (not entry.is_file()) or entry.name.endswith( + "_planemo_test_report.html" + ): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".tgz", ".json"]: + continue + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.testdir, entry.name) + shutil.copyfile(src, dest) + + + def update_tests(self,ourdir): + for xmlf in self.ourxmls: + capture = self.call_planemo(xmlf,ourdir) + + def main(): + """ + This is a Galaxy wrapper. + It expects to be called by a special purpose tool.xml + + """ + parser = argparse.ArgumentParser() + a = parser.add_argument + a("--in_tool_archive", default=None) + a("--new_tested_tool_archive", default=None) + a("--galaxy_root", default="/home/ross/gal21/") + args = parser.parse_args() + print('Hello from',os.getcwd()) + tt = ToolTester(args=args, in_tool_archive=args.in_tool_archive, new_tool_archive=args.new_tested_tool_archive) + + if __name__ == "__main__": + main() + + +]]> + + 10.1093/bioinformatics/bts573 + + + diff -r 5fc0c9a93072 -r 1c652687a08f toolfactory/install-history.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/install-history.py Fri Apr 30 07:06:57 2021 +0000 @@ -0,0 +1,41 @@ +import argparse +import os + + +from bioblend import galaxy + + +def _parser(): + parser = argparse.ArgumentParser() + parser.add_argument("-g", "--galaxy", help='URL of target galaxy') + parser.add_argument("-p", "--password", help='Galaxy admin password') + parser.add_argument("-e", "--email", help='Galaxy admin email') + parser.add_argument("-a", "--key", help='Galaxy admin key', default=None) + parser.add_argument("-i", "--history_path", help='Path to history gz files to be loaded') + return parser + +def main(): + """ + load a folder of histories or a single gz + """ + args = _parser().parse_args() + if args.key: + gi = galaxy.GalaxyInstance(url=args.galaxy, key=args.key) + else: + gi = galaxy.GalaxyInstance(url=args.galaxy, email=args.email, password=args.password) + hdir = args.history_path + # h = gi.histories.get_most_recently_used_history() + if os.path.isdir(hdir): + for fp in os.listdir(hdir): + hp = os.path.join(hdir,fp) + if os.path.isfile(hp): + x = gi.histories.import_history(file_path=hp, url=None) + print('installed ',hp,'res=',x) + else: + x = gi.histories.import_history(file_path=hdir, url=None) + print('installed',hdir,'res=',x) + + +if __name__ == "__main__": + main() + diff -r 5fc0c9a93072 -r 1c652687a08f toolfactory/install_tf_demos.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/install_tf_demos.py Fri Apr 30 07:06:57 2021 +0000 @@ -0,0 +1,42 @@ +import argparse +import urllib.request + +from bioblend import galaxy + +WF = "https://drive.google.com/uc?export=download&id=13xE8o7tucHGNA0qYkEP98FfUGl2wdOU5" +HIST = ( + "https://zenodo.org/record/4686436/files/TFdemo_wf_april13_planemo.ga?download=1" +) +WF_FILE = "tf_workflow.ga" +HIST_FILE = "tf_history.tgz" + + +def _parser(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-g", "--galaxy", help="URL of target galaxy", default="http://localhost:8080" + ) + parser.add_argument("-a", "--key", help="Galaxy admin key", default="8993d65865e6d6d1773c2c34a1cc207d") + return parser + + +def main(): + """ + load the planemo tool_factory demonstration history and tool generating workflow + fails in planemo served galaxies because there seems to be no user in trans? + """ + args = _parser().parse_args() + urllib.request.urlretrieve(WF, WF_FILE) + urllib.request.urlretrieve(HIST, HIST_FILE) + assert args.key, "Need an administrative key for the target Galaxy supplied please" + gi = galaxy.GalaxyInstance( + url=args.galaxy, key=args.key, email="planemo@galaxyproject.org" + ) + x = gi.workflows.import_workflow_from_local_path(WF_FILE, publish=True) + print(f"installed {WF_FILE} Returned = {x}\n") + x = gi.histories.import_history(file_path=HIST_FILE) + print(f"installed {HIST_FILE} Returned = {x}\n") + + +if __name__ == "__main__": + main() diff -r 5fc0c9a93072 -r 1c652687a08f toolfactory/rgToolFactory2.py --- a/toolfactory/rgToolFactory2.py Sun Apr 18 03:55:56 2021 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1150 +0,0 @@ -# replace with shebang for biocontainer -# see https://github.com/fubar2/toolfactory -# -# copyright ross lazarus (ross stop lazarus at gmail stop com) May 2012 -# -# all rights reserved -# Licensed under the LGPL -# suggestions for improvement and bug fixes welcome at -# https://github.com/fubar2/toolfactory -# -# July 2020: BCC was fun and I feel like rip van winkle after 5 years. -# Decided to -# 1. Fix the toolfactory so it works - done for simplest case -# 2. Fix planemo so the toolfactory function works -# 3. Rewrite bits using galaxyxml functions where that makes sense - done - -import argparse -import copy -import json -import logging -import os -import re -import shlex -import shutil -import subprocess -import sys -import tarfile -import tempfile -import time - -from bioblend import ConnectionError -from bioblend import toolshed - -import galaxyxml.tool as gxt -import galaxyxml.tool.parameters as gxtp - -import lxml - -import yaml - -myversion = "V2.2 February 2021" -verbose = True -debug = True -toolFactoryURL = "https://github.com/fubar2/toolfactory" -foo = len(lxml.__version__) -FAKEEXE = "~~~REMOVE~~~ME~~~" -# need this until a PR/version bump to fix galaxyxml prepending the exe even -# with override. - - -def timenow(): - """return current time as a string""" - return time.strftime("%d/%m/%Y %H:%M:%S", time.localtime(time.time())) - - -cheetah_escape_table = {"$": "\\$", "#": "\\#"} - - -def cheetah_escape(text): - """Produce entities within text.""" - return "".join([cheetah_escape_table.get(c, c) for c in text]) - - -def parse_citations(citations_text): - """""" - citations = [c for c in citations_text.split("**ENTRY**") if c.strip()] - citation_tuples = [] - for citation in citations: - if citation.startswith("doi"): - citation_tuples.append(("doi", citation[len("doi") :].strip())) - else: - citation_tuples.append(("bibtex", citation[len("bibtex") :].strip())) - return citation_tuples - - -class ScriptRunner: - """Wrapper for an arbitrary script - uses galaxyxml - - """ - - def __init__(self, args=None): # noqa - """ - prepare command line cl for running the tool here - and prepare elements needed for galaxyxml tool generation - """ - self.ourcwd = os.getcwd() - self.collections = [] - if len(args.collection) > 0: - try: - self.collections = [ - json.loads(x) for x in args.collection if len(x.strip()) > 1 - ] - except Exception: - print( - f"--collections parameter {str(args.collection)} is malformed - should be a dictionary" - ) - try: - self.infiles = [ - json.loads(x) for x in args.input_files if len(x.strip()) > 1 - ] - except Exception: - print( - f"--input_files parameter {str(args.input_files)} is malformed - should be a dictionary" - ) - try: - self.outfiles = [ - json.loads(x) for x in args.output_files if len(x.strip()) > 1 - ] - except Exception: - print( - f"--output_files parameter {args.output_files} is malformed - should be a dictionary" - ) - try: - self.addpar = [ - json.loads(x) for x in args.additional_parameters if len(x.strip()) > 1 - ] - except Exception: - print( - f"--additional_parameters {args.additional_parameters} is malformed - should be a dictionary" - ) - try: - self.selpar = [ - json.loads(x) for x in args.selecttext_parameters if len(x.strip()) > 1 - ] - except Exception: - print( - f"--selecttext_parameters {args.selecttext_parameters} is malformed - should be a dictionary" - ) - self.args = args - self.cleanuppar() - self.lastclredirect = None - self.lastxclredirect = None - self.cl = [] - self.xmlcl = [] - self.is_positional = self.args.parampass == "positional" - if self.args.sysexe: - if ' ' in self.args.sysexe: - self.executeme = self.args.sysexe.split(' ') - else: - self.executeme = [self.args.sysexe, ] - else: - if self.args.packages: - self.executeme = [self.args.packages.split(",")[0].split(":")[0].strip(), ] - else: - self.executeme = None - aCL = self.cl.append - aXCL = self.xmlcl.append - assert args.parampass in [ - "0", - "argparse", - "positional", - ], 'args.parampass must be "0","positional" or "argparse"' - self.tool_name = re.sub("[^a-zA-Z0-9_]+", "", args.tool_name) - self.tool_id = self.tool_name - self.newtool = gxt.Tool( - self.tool_name, - self.tool_id, - self.args.tool_version, - self.args.tool_desc, - FAKEEXE, - ) - self.newtarpath = "%s_toolshed.gz" % self.tool_name - self.tooloutdir = "./tfout" - self.repdir = "./TF_run_report_tempdir" - self.testdir = os.path.join(self.tooloutdir, "test-data") - if not os.path.exists(self.tooloutdir): - os.mkdir(self.tooloutdir) - if not os.path.exists(self.testdir): - os.mkdir(self.testdir) - if not os.path.exists(self.repdir): - os.mkdir(self.repdir) - self.tinputs = gxtp.Inputs() - self.toutputs = gxtp.Outputs() - self.testparam = [] - if self.args.script_path: - self.prepScript() - if self.args.command_override: - scos = open(self.args.command_override, "r").readlines() - self.command_override = [x.rstrip() for x in scos] - else: - self.command_override = None - if self.args.test_override: - stos = open(self.args.test_override, "r").readlines() - self.test_override = [x.rstrip() for x in stos] - else: - self.test_override = None - if self.args.script_path: - for ex in self.executeme: - aCL(ex) - aXCL(ex) - aCL(self.sfile) - aXCL("$runme") - else: - for ex in self.executeme: - aCL(ex) - aXCL(ex) - - self.elog = os.path.join(self.repdir, "%s_error_log.txt" % self.tool_name) - self.tlog = os.path.join(self.repdir, "%s_runner_log.txt" % self.tool_name) - if self.args.parampass == "0": - self.clsimple() - else: - if self.args.parampass == "positional": - self.prepclpos() - self.clpositional() - else: - self.prepargp() - self.clargparse() - if self.args.cl_suffix: # DIY CL end - clp = shlex.split(self.args.cl_suffix) - for c in clp: - aCL(c) - aXCL(c) - - def clsimple(self): - """no parameters or repeats - uses < and > for i/o""" - aCL = self.cl.append - aXCL = self.xmlcl.append - if len(self.infiles) > 0: - aCL("<") - aCL(self.infiles[0]["infilename"]) - aXCL("<") - aXCL("$%s" % self.infiles[0]["infilename"]) - if len(self.outfiles) > 0: - aCL(">") - aCL(self.outfiles[0]["name"]) - aXCL(">") - aXCL("$%s" % self.outfiles[0]["name"]) - - def prepargp(self): - clsuffix = [] - xclsuffix = [] - for i, p in enumerate(self.infiles): - nam = p["infilename"] - if p["origCL"].strip().upper() == "STDIN": - appendme = [ - nam, - nam, - "< %s" % nam, - ] - xappendme = [ - nam, - nam, - "< $%s" % nam, - ] - else: - rep = p["repeat"] == "1" - over = "" - if rep: - over = f'#for $rep in $R_{nam}:\n--{nam} "$rep.{nam}"\n#end for' - appendme = [p["CL"], p["CL"], ""] - xappendme = [p["CL"], "$%s" % p["CL"], over] - clsuffix.append(appendme) - xclsuffix.append(xappendme) - for i, p in enumerate(self.outfiles): - if p["origCL"].strip().upper() == "STDOUT": - self.lastclredirect = [">", p["name"]] - self.lastxclredirect = [">", "$%s" % p["name"]] - else: - clsuffix.append([p["name"], p["name"], ""]) - xclsuffix.append([p["name"], "$%s" % p["name"], ""]) - for p in self.addpar: - nam = p["name"] - rep = p["repeat"] == "1" - if rep: - over = f'#for $rep in $R_{nam}:\n--{nam} "$rep.{nam}"\n#end for' - else: - over = p["override"] - clsuffix.append([p["CL"], nam, over]) - xclsuffix.append([p["CL"], nam, over]) - for p in self.selpar: - clsuffix.append([p["CL"], p["name"], p["override"]]) - xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) - self.xclsuffix = xclsuffix - self.clsuffix = clsuffix - - def prepclpos(self): - clsuffix = [] - xclsuffix = [] - for i, p in enumerate(self.infiles): - if p["origCL"].strip().upper() == "STDIN": - appendme = [ - "999", - p["infilename"], - "< $%s" % p["infilename"], - ] - xappendme = [ - "999", - p["infilename"], - "< $%s" % p["infilename"], - ] - else: - appendme = [p["CL"], p["infilename"], ""] - xappendme = [p["CL"], "$%s" % p["infilename"], ""] - clsuffix.append(appendme) - xclsuffix.append(xappendme) - for i, p in enumerate(self.outfiles): - if p["origCL"].strip().upper() == "STDOUT": - self.lastclredirect = [">", p["name"]] - self.lastxclredirect = [">", "$%s" % p["name"]] - else: - clsuffix.append([p["CL"], p["name"], ""]) - xclsuffix.append([p["CL"], "$%s" % p["name"], ""]) - for p in self.addpar: - nam = p["name"] - rep = p["repeat"] == "1" # repeats make NO sense - if rep: - print(f'### warning. Repeats for {nam} ignored - not permitted in positional parameter command lines!') - over = p["override"] - clsuffix.append([p["CL"], nam, over]) - xclsuffix.append([p["CL"], '"$%s"' % nam, over]) - for p in self.selpar: - clsuffix.append([p["CL"], p["name"], p["override"]]) - xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) - clsuffix.sort() - xclsuffix.sort() - self.xclsuffix = xclsuffix - self.clsuffix = clsuffix - - def prepScript(self): - rx = open(self.args.script_path, "r").readlines() - rx = [x.rstrip() for x in rx] - rxcheck = [x.strip() for x in rx if x.strip() > ""] - assert len(rxcheck) > 0, "Supplied script is empty. Cannot run" - self.script = "\n".join(rx) - fhandle, self.sfile = tempfile.mkstemp( - prefix=self.tool_name, suffix="_%s" % (self.executeme[0]) - ) - tscript = open(self.sfile, "w") - tscript.write(self.script) - tscript.close() - self.escapedScript = [cheetah_escape(x) for x in rx] - self.spacedScript = [f" {x}" for x in rx if x.strip() > ""] - art = "%s.%s" % (self.tool_name, self.executeme[0]) - artifact = open(art, "wb") - artifact.write(bytes("\n".join(self.escapedScript), "utf8")) - artifact.close() - - def cleanuppar(self): - """ positional parameters are complicated by their numeric ordinal""" - if self.args.parampass == "positional": - for i, p in enumerate(self.infiles): - assert ( - p["CL"].isdigit() or p["CL"].strip().upper() == "STDIN" - ), "Positional parameters must be ordinal integers - got %s for %s" % ( - p["CL"], - p["label"], - ) - for i, p in enumerate(self.outfiles): - assert ( - p["CL"].isdigit() or p["CL"].strip().upper() == "STDOUT" - ), "Positional parameters must be ordinal integers - got %s for %s" % ( - p["CL"], - p["name"], - ) - for i, p in enumerate(self.addpar): - assert p[ - "CL" - ].isdigit(), "Positional parameters must be ordinal integers - got %s for %s" % ( - p["CL"], - p["name"], - ) - for i, p in enumerate(self.infiles): - infp = copy.copy(p) - infp["origCL"] = infp["CL"] - if self.args.parampass in ["positional", "0"]: - infp["infilename"] = infp["label"].replace(" ", "_") - else: - infp["infilename"] = infp["CL"] - self.infiles[i] = infp - for i, p in enumerate(self.outfiles): - p["origCL"] = p["CL"] # keep copy - self.outfiles[i] = p - for i, p in enumerate(self.addpar): - p["origCL"] = p["CL"] - self.addpar[i] = p - - def clpositional(self): - # inputs in order then params - aCL = self.cl.append - for (k, v, koverride) in self.clsuffix: - if " " in v: - aCL("%s" % v) - else: - aCL(v) - aXCL = self.xmlcl.append - for (k, v, koverride) in self.xclsuffix: - aXCL(v) - if self.lastxclredirect: - aXCL(self.lastxclredirect[0]) - aXCL(self.lastxclredirect[1]) - - def clargparse(self): - """argparse style""" - aCL = self.cl.append - aXCL = self.xmlcl.append - # inputs then params in argparse named form - - for (k, v, koverride) in self.xclsuffix: - if koverride > "": - k = koverride - aXCL(k) - else: - if len(k.strip()) == 1: - k = "-%s" % k - else: - k = "--%s" % k - aXCL(k) - aXCL(v) - for (k, v, koverride) in self.clsuffix: - if koverride > "": - k = koverride - elif len(k.strip()) == 1: - k = "-%s" % k - else: - k = "--%s" % k - aCL(k) - aCL(v) - if self.lastxclredirect: - aXCL(self.lastxclredirect[0]) - aXCL(self.lastxclredirect[1]) - - def getNdash(self, newname): - if self.is_positional: - ndash = 0 - else: - ndash = 2 - if len(newname) < 2: - ndash = 1 - return ndash - - def doXMLparam(self): - """Add all needed elements to tool""" # noqa - for p in self.outfiles: - newname = p["name"] - newfmt = p["format"] - newcl = p["CL"] - test = p["test"] - oldcl = p["origCL"] - test = test.strip() - ndash = self.getNdash(newcl) - aparm = gxtp.OutputData( - name=newname, format=newfmt, num_dashes=ndash, label=newname - ) - aparm.positional = self.is_positional - if self.is_positional: - if oldcl.upper() == "STDOUT": - aparm.positional = 9999999 - aparm.command_line_override = "> $%s" % newname - else: - aparm.positional = int(oldcl) - aparm.command_line_override = "$%s" % newname - self.toutputs.append(aparm) - ld = None - if test.strip() > "": - if test.startswith("diff"): - c = "diff" - ld = 0 - if test.split(":")[1].isdigit: - ld = int(test.split(":")[1]) - tp = gxtp.TestOutput( - name=newname, - value="%s_sample" % newname, - compare=c, - lines_diff=ld, - ) - elif test.startswith("sim_size"): - c = "sim_size" - tn = test.split(":")[1].strip() - if tn > "": - if "." in tn: - delta = None - delta_frac = min(1.0, float(tn)) - else: - delta = int(tn) - delta_frac = None - tp = gxtp.TestOutput( - name=newname, - value="%s_sample" % newname, - compare=c, - delta=delta, - delta_frac=delta_frac, - ) - else: - c = test - tp = gxtp.TestOutput( - name=newname, - value="%s_sample" % newname, - compare=c, - ) - self.testparam.append(tp) - for p in self.infiles: - newname = p["infilename"] - newfmt = p["format"] - ndash = self.getNdash(newname) - reps = p.get("repeat", "0") == "1" - if not len(p["label"]) > 0: - alab = p["CL"] - else: - alab = p["label"] - aninput = gxtp.DataParam( - newname, - optional=False, - label=alab, - help=p["help"], - format=newfmt, - multiple=False, - num_dashes=ndash, - ) - aninput.positional = self.is_positional - if self.is_positional: - if p["origCL"].upper() == "STDIN": - aninput.positional = 9999998 - aninput.command_line_override = "> $%s" % newname - else: - aninput.positional = int(p["origCL"]) - aninput.command_line_override = "$%s" % newname - if reps: - repe = gxtp.Repeat(name=f"R_{newname}", title=f"Add as many {alab} as needed") - repe.append(aninput) - self.tinputs.append(repe) - tparm = gxtp.TestRepeat(name=f"R_{newname}") - tparm2 = gxtp.TestParam(newname, value="%s_sample" % newname) - tparm.append(tparm2) - self.testparam.append(tparm) - else: - self.tinputs.append(aninput) - tparm = gxtp.TestParam(newname, value="%s_sample" % newname) - self.testparam.append(tparm) - for p in self.addpar: - newname = p["name"] - newval = p["value"] - newlabel = p["label"] - newhelp = p["help"] - newtype = p["type"] - newcl = p["CL"] - oldcl = p["origCL"] - reps = p["repeat"] == "1" - if not len(newlabel) > 0: - newlabel = newname - ndash = self.getNdash(newname) - if newtype == "text": - aparm = gxtp.TextParam( - newname, - label=newlabel, - help=newhelp, - value=newval, - num_dashes=ndash, - ) - elif newtype == "integer": - aparm = gxtp.IntegerParam( - newname, - label=newlabel, - help=newhelp, - value=newval, - num_dashes=ndash, - ) - elif newtype == "float": - aparm = gxtp.FloatParam( - newname, - label=newlabel, - help=newhelp, - value=newval, - num_dashes=ndash, - ) - elif newtype == "boolean": - aparm = gxtp.BooleanParam( - newname, - label=newlabel, - help=newhelp, - value=newval, - num_dashes=ndash, - ) - else: - raise ValueError( - 'Unrecognised parameter type "%s" for\ - additional parameter %s in makeXML' - % (newtype, newname) - ) - aparm.positional = self.is_positional - if self.is_positional: - aparm.positional = int(oldcl) - if reps: - repe = gxtp.Repeat(name=f"R_{newname}", title=f"Add as many {newlabel} as needed") - repe.append(aparm) - self.tinputs.append(repe) - tparm = gxtp.TestRepeat(name=f"R_{newname}") - tparm2 = gxtp.TestParam(newname, value=newval) - tparm.append(tparm2) - self.testparam.append(tparm) - else: - self.tinputs.append(aparm) - tparm = gxtp.TestParam(newname, value=newval) - self.testparam.append(tparm) - for p in self.selpar: - newname = p["name"] - newval = p["value"] - newlabel = p["label"] - newhelp = p["help"] - newtype = p["type"] - newcl = p["CL"] - if not len(newlabel) > 0: - newlabel = newname - ndash = self.getNdash(newname) - if newtype == "selecttext": - newtext = p["texts"] - aparm = gxtp.SelectParam( - newname, - label=newlabel, - help=newhelp, - num_dashes=ndash, - ) - for i in range(len(newval)): - anopt = gxtp.SelectOption( - value=newval[i], - text=newtext[i], - ) - aparm.append(anopt) - aparm.positional = self.is_positional - if self.is_positional: - aparm.positional = int(newcl) - self.tinputs.append(aparm) - tparm = gxtp.TestParam(newname, value=newval) - self.testparam.append(tparm) - else: - raise ValueError( - 'Unrecognised parameter type "%s" for\ - selecttext parameter %s in makeXML' - % (newtype, newname) - ) - for p in self.collections: - newkind = p["kind"] - newname = p["name"] - newlabel = p["label"] - newdisc = p["discover"] - collect = gxtp.OutputCollection(newname, label=newlabel, type=newkind) - disc = gxtp.DiscoverDatasets( - pattern=newdisc, directory=f"{newname}", visible="false" - ) - collect.append(disc) - self.toutputs.append(collect) - try: - tparm = gxtp.TestOutputCollection(newname) # broken until PR merged. - self.testparam.append(tparm) - except Exception: - print("#### WARNING: Galaxyxml version does not have the PR merged yet - tests for collections must be over-ridden until then!") - - def doNoXMLparam(self): - """filter style package - stdin to stdout""" - if len(self.infiles) > 0: - alab = self.infiles[0]["label"] - if len(alab) == 0: - alab = self.infiles[0]["infilename"] - max1s = ( - "Maximum one input if parampass is 0 but multiple input files supplied - %s" - % str(self.infiles) - ) - assert len(self.infiles) == 1, max1s - newname = self.infiles[0]["infilename"] - aninput = gxtp.DataParam( - newname, - optional=False, - label=alab, - help=self.infiles[0]["help"], - format=self.infiles[0]["format"], - multiple=False, - num_dashes=0, - ) - aninput.command_line_override = "< $%s" % newname - aninput.positional = True - self.tinputs.append(aninput) - tp = gxtp.TestParam(name=newname, value="%s_sample" % newname) - self.testparam.append(tp) - if len(self.outfiles) > 0: - newname = self.outfiles[0]["name"] - newfmt = self.outfiles[0]["format"] - anout = gxtp.OutputData(newname, format=newfmt, num_dashes=0) - anout.command_line_override = "> $%s" % newname - anout.positional = self.is_positional - self.toutputs.append(anout) - tp = gxtp.TestOutput(name=newname, value="%s_sample" % newname) - self.testparam.append(tp) - - def makeXML(self): # noqa - """ - Create a Galaxy xml tool wrapper for the new script - Uses galaxyhtml - Hmmm. How to get the command line into correct order... - """ - if self.command_override: - self.newtool.command_override = self.command_override # config file - else: - self.newtool.command_override = self.xmlcl - cite = gxtp.Citations() - acite = gxtp.Citation(type="doi", value="10.1093/bioinformatics/bts573") - cite.append(acite) - self.newtool.citations = cite - safertext = "" - if self.args.help_text: - helptext = open(self.args.help_text, "r").readlines() - safertext = "\n".join([cheetah_escape(x) for x in helptext]) - if len(safertext.strip()) == 0: - safertext = ( - "Ask the tool author (%s) to rebuild with help text please\n" - % (self.args.user_email) - ) - if self.args.script_path: - if len(safertext) > 0: - safertext = safertext + "\n\n------\n" # transition allowed! - scr = [x for x in self.spacedScript if x.strip() > ""] - scr.insert(0, "\n\nScript::\n") - if len(scr) > 300: - scr = ( - scr[:100] - + [" >300 lines - stuff deleted", " ......"] - + scr[-100:] - ) - scr.append("\n") - safertext = safertext + "\n".join(scr) - self.newtool.help = safertext - self.newtool.version_command = f'echo "{self.args.tool_version}"' - std = gxtp.Stdios() - std1 = gxtp.Stdio() - std.append(std1) - self.newtool.stdios = std - requirements = gxtp.Requirements() - if self.args.packages: - for d in self.args.packages.split(","): - ver = "" - d = d.replace("==", ":") - d = d.replace("=", ":") - if ":" in d: - packg, ver = d.split(":") - else: - packg = d - requirements.append( - gxtp.Requirement("package", packg.strip(), ver.strip()) - ) - self.newtool.requirements = requirements - if self.args.parampass == "0": - self.doNoXMLparam() - else: - self.doXMLparam() - self.newtool.outputs = self.toutputs - self.newtool.inputs = self.tinputs - if self.args.script_path: - configfiles = gxtp.Configfiles() - configfiles.append( - gxtp.Configfile(name="runme", text="\n".join(self.escapedScript)) - ) - self.newtool.configfiles = configfiles - tests = gxtp.Tests() - test_a = gxtp.Test() - for tp in self.testparam: - test_a.append(tp) - tests.append(test_a) - self.newtool.tests = tests - self.newtool.add_comment( - "Created by %s at %s using the Galaxy Tool Factory." - % (self.args.user_email, timenow()) - ) - self.newtool.add_comment("Source in git at: %s" % (toolFactoryURL)) - exml0 = self.newtool.export() - exml = exml0.replace(FAKEEXE, "") # temporary work around until PR accepted - if ( - self.test_override - ): # cannot do this inside galaxyxml as it expects lxml objects for tests - part1 = exml.split("")[0] - part2 = exml.split("")[1] - fixed = "%s\n%s\n%s" % (part1, "\n".join(self.test_override), part2) - exml = fixed - # exml = exml.replace('range="1:"', 'range="1000:"') - xf = open("%s.xml" % self.tool_name, "w") - xf.write(exml) - xf.write("\n") - xf.close() - # ready for the tarball - - def run(self): - """ - generate test outputs by running a command line - won't work if command or test override in play - planemo is the - easiest way to generate test outputs for that case so is - automagically selected - """ - scl = " ".join(self.cl) - err = None - if self.args.parampass != "0": - if os.path.exists(self.elog): - ste = open(self.elog, "a") - else: - ste = open(self.elog, "w") - if self.lastclredirect: - sto = open(self.lastclredirect[1], "wb") # is name of an output file - else: - if os.path.exists(self.tlog): - sto = open(self.tlog, "a") - else: - sto = open(self.tlog, "w") - sto.write( - "## Executing Toolfactory generated command line = %s\n" % scl - ) - sto.flush() - subp = subprocess.run( - self.cl, shell=False, stdout=sto, stderr=ste - ) - sto.close() - ste.close() - retval = subp.returncode - else: # work around special case - stdin and write to stdout - if len(self.infiles) > 0: - sti = open(self.infiles[0]["name"], "rb") - else: - sti = sys.stdin - if len(self.outfiles) > 0: - sto = open(self.outfiles[0]["name"], "wb") - else: - sto = sys.stdout - subp = subprocess.run( - self.cl, shell=False, stdout=sto, stdin=sti - ) - sto.write("## Executing Toolfactory generated command line = %s\n" % scl) - retval = subp.returncode - sto.close() - sti.close() - if os.path.isfile(self.tlog) and os.stat(self.tlog).st_size == 0: - os.unlink(self.tlog) - if os.path.isfile(self.elog) and os.stat(self.elog).st_size == 0: - os.unlink(self.elog) - if retval != 0 and err: # problem - sys.stderr.write(err) - logging.debug("run done") - return retval - - def shedLoad(self): - """ - use bioblend to create new repository - or update existing - - """ - if os.path.exists(self.tlog): - sto = open(self.tlog, "a") - else: - sto = open(self.tlog, "w") - - ts = toolshed.ToolShedInstance( - url=self.args.toolshed_url, - key=self.args.toolshed_api_key, - verify=False, - ) - repos = ts.repositories.get_repositories() - rnames = [x.get("name", "?") for x in repos] - rids = [x.get("id", "?") for x in repos] - tfcat = "ToolFactory generated tools" - if self.tool_name not in rnames: - tscat = ts.categories.get_categories() - cnames = [x.get("name", "?").strip() for x in tscat] - cids = [x.get("id", "?") for x in tscat] - catID = None - if tfcat.strip() in cnames: - ci = cnames.index(tfcat) - catID = cids[ci] - res = ts.repositories.create_repository( - name=self.args.tool_name, - synopsis="Synopsis:%s" % self.args.tool_desc, - description=self.args.tool_desc, - type="unrestricted", - remote_repository_url=self.args.toolshed_url, - homepage_url=None, - category_ids=catID, - ) - tid = res.get("id", None) - sto.write(f"#create_repository {self.args.tool_name} tid={tid} res={res}\n") - else: - i = rnames.index(self.tool_name) - tid = rids[i] - try: - res = ts.repositories.update_repository( - id=tid, tar_ball_path=self.newtarpath, commit_message=None - ) - sto.write(f"#update res id {id} ={res}\n") - except ConnectionError: - sto.write( - "####### Is the toolshed running and the API key correct? Bioblend shed upload failed\n" - ) - sto.close() - - def eph_galaxy_load(self): - """ - use ephemeris to load the new tool from the local toolshed after planemo uploads it - """ - if os.path.exists(self.tlog): - tout = open(self.tlog, "a") - else: - tout = open(self.tlog, "w") - cll = [ - "shed-tools", - "install", - "-g", - self.args.galaxy_url, - "--latest", - "-a", - self.args.galaxy_api_key, - "--name", - self.tool_name, - "--owner", - "fubar", - "--toolshed", - self.args.toolshed_url, - "--section_label", - "ToolFactory", - ] - tout.write("running\n%s\n" % " ".join(cll)) - subp = subprocess.run( - cll, - cwd=self.ourcwd, - shell=False, - stderr=tout, - stdout=tout, - ) - tout.write( - "installed %s - got retcode %d\n" % (self.tool_name, subp.returncode) - ) - tout.close() - return subp.returncode - - def writeShedyml(self): - """for planemo""" - yuser = self.args.user_email.split("@")[0] - yfname = os.path.join(self.tooloutdir, ".shed.yml") - yamlf = open(yfname, "w") - odict = { - "name": self.tool_name, - "owner": yuser, - "type": "unrestricted", - "description": self.args.tool_desc, - "synopsis": self.args.tool_desc, - "category": "TF Generated Tools", - } - yaml.dump(odict, yamlf, allow_unicode=True) - yamlf.close() - - def makeTool(self): - """write xmls and input samples into place""" - if self.args.parampass == 0: - self.doNoXMLparam() - else: - self.makeXML() - if self.args.script_path: - stname = os.path.join(self.tooloutdir, self.sfile) - if not os.path.exists(stname): - shutil.copyfile(self.sfile, stname) - xreal = "%s.xml" % self.tool_name - xout = os.path.join(self.tooloutdir, xreal) - shutil.copyfile(xreal, xout) - for p in self.infiles: - pth = p["name"] - dest = os.path.join(self.testdir, "%s_sample" % p["infilename"]) - shutil.copyfile(pth, dest) - dest = os.path.join(self.repdir, "%s_sample" % p["infilename"]) - shutil.copyfile(pth, dest) - - def makeToolTar(self, report_fail=False): - """move outputs into test-data and prepare the tarball""" - excludeme = "_planemo_test_report.html" - - def exclude_function(tarinfo): - filename = tarinfo.name - return None if filename.endswith(excludeme) else tarinfo - - if os.path.exists(self.tlog): - tout = open(self.tlog, "a") - else: - tout = open(self.tlog, "w") - for p in self.outfiles: - oname = p["name"] - tdest = os.path.join(self.testdir, "%s_sample" % oname) - src = os.path.join(self.testdir, oname) - if not os.path.isfile(tdest): - if os.path.isfile(src): - shutil.copyfile(src, tdest) - dest = os.path.join(self.repdir, "%s.sample" % (oname)) - shutil.copyfile(src, dest) - else: - if report_fail: - tout.write( - "###Tool may have failed - output file %s not found in testdir after planemo run %s." - % (tdest, self.testdir) - ) - tf = tarfile.open(self.newtarpath, "w:gz") - tf.add( - name=self.tooloutdir, - arcname=self.tool_name, - filter=exclude_function, - ) - tf.close() - shutil.copyfile(self.newtarpath, self.args.new_tool) - - def moveRunOutputs(self): - """need to move planemo or run outputs into toolfactory collection""" - with os.scandir(self.tooloutdir) as outs: - for entry in outs: - if not entry.is_file(): - continue - if "." in entry.name: - _, ext = os.path.splitext(entry.name) - if ext in [".tgz", ".json"]: - continue - if ext in [".yml", ".xml", ".yaml"]: - newname = f"{entry.name.replace('.','_')}.txt" - else: - newname = entry.name - else: - newname = f"{entry.name}.txt" - dest = os.path.join(self.repdir, newname) - src = os.path.join(self.tooloutdir, entry.name) - shutil.copyfile(src, dest) - if self.args.include_tests: - with os.scandir(self.testdir) as outs: - for entry in outs: - if (not entry.is_file()) or entry.name.endswith( - "_planemo_test_report.html" - ): - continue - if "." in entry.name: - _, ext = os.path.splitext(entry.name) - if ext in [".tgz", ".json"]: - continue - if ext in [".yml", ".xml", ".yaml"]: - newname = f"{entry.name.replace('.','_')}.txt" - else: - newname = entry.name - else: - newname = f"{entry.name}.txt" - dest = os.path.join(self.repdir, newname) - src = os.path.join(self.testdir, entry.name) - shutil.copyfile(src, dest) - - def planemo_test_once(self): - """planemo is a requirement so is available for testing but needs a - different call if in the biocontainer - see above - and for generating test outputs if command or test overrides are - supplied test outputs are sent to repdir for display - # "--galaxy_python_version", - # self.args.python_version, - - """ - xreal = "%s.xml" % self.tool_name - tool_test_path = os.path.join( - self.repdir, f"{self.tool_name}_planemo_test_report.html" - ) - if os.path.exists(self.tlog): - tout = open(self.tlog, "a") - else: - tout = open(self.tlog, "w") - cll = [ - "planemo", - "test", - "--test_data", - os.path.abspath(self.testdir), - "--test_output", - os.path.abspath(tool_test_path), - "--galaxy_root", - self.args.galaxy_root, - "--update_test_data", - os.path.abspath(xreal), - ] - p = subprocess.run( - cll, - shell=False, - cwd=self.tooloutdir, - stderr=tout, - stdout=tout, - ) - tout.close() - return p.returncode - -def main(): - """ - This is a Galaxy wrapper. - It expects to be called by a special purpose tool.xml - - """ - parser = argparse.ArgumentParser() - a = parser.add_argument - a("--script_path", default=None) - a("--history_test", default=None) - a("--cl_suffix", default=None) - a("--sysexe", default=None) - a("--packages", default=None) - a("--tool_name", default="newtool") - a("--tool_dir", default=None) - a("--input_files", default=[], action="append") - a("--output_files", default=[], action="append") - a("--user_email", default="Unknown") - a("--bad_user", default=None) - a("--make_Tool", default="runonly") - a("--help_text", default=None) - a("--tool_desc", default=None) - a("--tool_version", default=None) - a("--citations", default=None) - a("--command_override", default=None) - a("--test_override", default=None) - a("--additional_parameters", action="append", default=[]) - a("--selecttext_parameters", action="append", default=[]) - a("--edit_additional_parameters", action="store_true", default=False) - a("--parampass", default="positional") - a("--tfout", default="./tfout") - a("--new_tool", default="new_tool") - a("--galaxy_url", default="http://localhost:8080") - a("--toolshed_url", default="http://localhost:9009") - # make sure this is identical to tool_sheds_conf.xml - # localhost != 127.0.0.1 so validation fails - a("--toolshed_api_key", default="fakekey") - a("--galaxy_api_key", default="fakekey") - a("--galaxy_root", default="/galaxy-central") - a("--galaxy_venv", default="/galaxy_venv") - a("--collection", action="append", default=[]) - a("--include_tests", default=False, action="store_true") - a("--python_version", default="3.9") - args = parser.parse_args() - assert not args.bad_user, ( - 'UNAUTHORISED: %s is NOT authorized to use this tool until Galaxy \ -admin adds %s to "admin_users" in the galaxy.yml Galaxy configuration file' - % (args.bad_user, args.bad_user) - ) - assert args.tool_name, "## Tool Factory expects a tool name - eg --tool_name=DESeq" - assert ( - args.sysexe or args.packages - ), "## Tool Factory wrapper expects an interpreter \ -or an executable package in --sysexe or --packages" - r = ScriptRunner(args) - r.writeShedyml() - r.makeTool() - if args.make_Tool == "generate": - r.run() - r.moveRunOutputs() - r.makeToolTar() - else: - r.planemo_test_once() - r.moveRunOutputs() - r.makeToolTar(report_fail=True) - if args.make_Tool == "gentestinstall": - r.shedLoad() - r.eph_galaxy_load() - - -if __name__ == "__main__": - main() diff -r 5fc0c9a93072 -r 1c652687a08f toolfactory/rgToolFactory2.xml --- a/toolfactory/rgToolFactory2.xml Sun Apr 18 03:55:56 2021 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,612 +0,0 @@ - - Scripts into tools v2.0 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
- -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - - quay.io/biocontainers/mulled-v2-0a86ccf22d71945a175383bcea5edd6b51c25ed0:55f1b8132a10ad91f0016ba9f4c692627ca2d25b-0 - galaxyxml - planemo - - - 0: ---cl_suffix "$cl_suffix" - #end if - #if $cover.commover == "yes": - #if len(str($cover.command_override)) > 10: ---command_override "$commandoverride" - #end if - #if len(str($cover.test_override)) > 10: ---test_override "$testoverride" - #end if - #end if ---packages "$deps.packages" - #if $deps.usescript.choosescript == "yes": ---script_path "$runme" ---sysexe "$deps.usescript.scriptrunner" - #end if ---tool_name "$tool_name" --user_email "$__user_email__" --citations "$citeme" --parampass "$io_param.ppass.parampass" - - #if str($make.makeMode.make_Tool)!="runonly": ---make_Tool "$make.makeMode.make_Tool" ---tool_desc "$make.makeMode.tool_desc" ---tool_version "$make.makeMode.tool_version" ---help_text "$helpme" ---new_tool "$new_tool" ---toolshed_api_key "$make.makeMode.toolshed_apikey" ---galaxy_api_key "$make.makeMode.galaxy_apikey" ---toolshed_url "$make.makeMode.toolshed_url" ---galaxy_url "$make.makeMode.galaxy_url" - #end if - #if $io_param.ppass.parampass != '0': - #if str($io_param.ppass.addparam.edit_params) == "yes": ---edit_additional_parameters - #end if - #for $apar in $io_param.ppass.addparam.additional_parameters: - #if $apar.ap_type.param_type=="selecttext": ---selecttext_parameters '{"name":"$apar.param_name", "label":"$apar.param_label", "help":"$apar.param_help", -"type":"$apar.ap_type.param_type","CL":"$apar.param_CL","override":"$apar.param_CLprefixed","value": [ - #for $i,$st in enumerate($apar.ap_type.selectTexts): - "$st.select_value" - #if ($i < (len($apar.ap_type.selectTexts)-1)): - , - #end if - #end for - ], "texts": [ - #for $i,$st in enumerate($apar.ap_type.selectTexts): - "$st.select_text" - #if ($i < (len($apar.ap_type.selectTexts)-1)): - , - #end if - - #end for - ] - }' - #else: ---additional_parameters '{"name": "$apar.param_name", "value": "$apar.ap_type.param_value", "label": "$apar.param_label", "help": "$apar.param_help", -"type": "$apar.ap_type.param_type","CL": "$apar.param_CL","override": "$apar.param_CLprefixed", "repeat": "$apar.param_repeat"}' - #end if - #end for - #end if - #for $intab in $io_param.ppass.io.history_inputs: ---input_files '{"name": "$intab.input_files", "CL": "$intab.input_CL", "format": "$intab.input_formats", "label": "$intab.input_label", "help": "$intab.input_help", "repeat": "$intab.input_repeat"}' - #end for - #for $otab in $io_param.ppass.io.history_outputs: ---output_files '{"name": "$otab.history_name", "format": "$otab.history_format", "CL": "$otab.history_CL", "test": "$otab.history_test"}' - #end for - #for $collect in $io_param.ppass.io.collection_outputs: ---collection '{"name": "$collect.name", "kind": "$collect.kind", "discover": "$collect.discover", "label": "$collect.label"}' - #end for ---galaxy_root "$__root_dir__" ---tool_dir "$__tool_directory__" - #end if -]]> - - -$deps.usescript.dynScript - - -#if $cover.commover == "yes" and len(str($cover.command_override).strip()) > 1: -$cover.command_override -#end if - - -#if $cover.commover == "yes" and len(str($cover.test_override).strip()) > 1: -$cover.test_override -#end if - - - #if $make.makeMode.make_Tool != "runonly": -${make.makeMode.help_text} - #else -$tool_name help goes here - #end if - - -#if $make.makeMode.make_Tool != "runonly": - #for $citation in $make.makeMode.citations: - #if $citation.citation_type.type == "bibtex": - **ENTRY**bibtex - ${citation.citation_type.bibtex} - #else - **ENTRY**doi - ${citation.citation_type.doi} - #end if - #end for -#end if - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - -
-
- - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
- - - - makeMode['make_Tool'] != "runonly" - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -.. class:: warningmark - -**Details and attribution** -(see GTF_) - -**Local Admins ONLY** -Only users whose IDs found in the local admin_user configuration setting in universe_wsgi.ini can run this tool. - -**If you find a bug** -Please raise an issue, or even better, submit a pull request fixing it, on the github repository GTF_ - -**What it does** -This tool optionally generates normal workflow compatible first class Galaxy tools - -Generated tools can run existing binary packages that become requirements, existing scripts, or new scripts pasted into this tool form. -Pasted scripts are written so they are part of the new tool and cannot be adjusted by the downstream user. -Binary packages are managed by the dependency subsystem - conda usually, so anything in bioconda or conda_forge is available for example. - -Any number of parameters can be built into the new tool form for passing in to the script or executable at runtime. -These can be editable by the downstream user or baked in. - -When you run this tool, your executable or script and supplied parameter values will be run to produce a canonical -set of outputs - these are used to construct a test for the new tool. - -If tool generation is required, a new tarball compatible with any Galaxy toolshed is created. -It can be unpacked in your galaxy/tools directory and manually added to tool_conf.xml, or -installed into any toolshed from where it can be installed into your Galaxy. - - -.. class:: warningmark - -**Note to system administrators** -This tool offers *NO* built in protection against malicious scripts. It should only be installed on private/personnal Galaxy instances. -Admin_users will have the power to do anything they want as the Galaxy user if you install this tool. - -.. class:: warningmark - -**Use on public servers** is STRONGLY discouraged for obvious reasons - -The tools generated by this tool will run just as securely as any other normal installed Galaxy tool but like any other new tools, should always be checked carefully before installation. -We recommend that you follow the good code hygiene practices associated with safe toolshed practices. - -Here's a sample python script that can be cut and pasted into the tool form, suitable for positional parameter passing: - -:: - - # reverse order of text by row - import sys - inp = sys.argv[1] - outp = sys.argv[2] - i = open(inp,'r').readlines() - o = open(outp,'w') - for row in i: - rs = row.rstrip() - rs = list(rs) - rs.reverse() - o.write(''.join(rs)) - o.write('\n') - o.close() - -With argparse style parameters: - -:: - - # reverse order of text by row - import argparse - parser = argparse.ArgumentParser() - a = parser.add_argument - a('--infile',default='') - a('--outfile',default=None) - args = parser.parse_args() - inp = args.infile - outp = args.outfile - i = open(inp,'r').readlines() - o = open(outp,'w') - for row in i: - rs = row.rstrip() - rs = list(rs) - rs.reverse() - o.write(''.join(rs)) - o.write('\n') - o.close() - -R script to draw some plots - use a collection. - -:: - - - \# note this script takes NO input because it generates random data - dir.create('plots') - for (i in 1:10) { - foo = runif(100) - bar = rnorm(100) - bar = foo + 0.05*bar - pdf(paste('plots/yet',i,"anotherplot.pdf",sep='_')) - plot(foo,bar,main=paste("Foo by Bar plot \#",i),col="maroon", pch=3,cex=0.6) - dev.off() - foo = data.frame(a=runif(100),b=runif(100),c=runif(100),d=runif(100),e=runif(100),f=runif(100)) - bar = as.matrix(foo) - pdf(paste('plots/yet',i,"anotherheatmap.pdf",sep='_')) - heatmap(bar,main='Random Heatmap') - dev.off() - } - - - -Paper_ - -*Licensing* - -Copyright Ross Lazarus (ross period lazarus at gmail period com) May 2012 -All rights reserved. -Licensed under the LGPL_ - -.. _LGPL: http://www.gnu.org/copyleft/lesser.html -.. _GTF: https://github.com/fubar2/toolfactory -.. _Paper: https://academic.oup.com/bioinformatics/article/28/23/3139/192853 - - - - - 10.1093/bioinformatics/bts573 - -
- - diff -r 5fc0c9a93072 -r 1c652687a08f toolfactory/testclass.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/testclass.py Fri Apr 30 07:06:57 2021 +0000 @@ -0,0 +1,108 @@ +import argparse +import json +import os +import requests + + +from bioblend import galaxy + +class ToolTester(): + # test a newly installed tool using bioblend + """ + + https://github.com/nsoranzo/bioblend-tutorial/blob/master/historical_exercises/api-scripts.exercises/run_tool.py +import sys +import json +import requests +import output + +BASE_URL = 'http://localhost:8080' + +# ----------------------------------------------------------------------------- +def run_tool( tool_id, history_id, **kwargs ): + full_url = BASE_URL + '/api/tools' + + #EXERCISE: POST... + +# ----------------------------------------------------------------------------- +if __name__ == '__main__': + # e.g. ./run_tool.py Filter1 ebfb8f50c6abde6d '{ "input" : { "src": "hda", "id": "77f74776fd03cbc5" }, "cond" : "c6>=100.0" }' + # e.g. ./run_tool.py sort1 f597429621d6eb2b '{ "input": { "src": "hda", "id": "b472e2eb553fa0d1" }, "column": "c6", "style": "alpha", "column_set_0|other_column" : "c2", "column_set_0|other_style": "num" }' + tool_id, history_id = sys.argv[1:3] + params = json.loads( sys.argv[3] ) if len( sys.argv ) >= 4 else {} + response = run_tool( tool_id, history_id, **params ) + output.output_response( response ) + + + def get_testdata(self,urlin,fout): + ''' + grab a test file + GET /api/tools/{tool_id}/test_data_download?tool_version={tool_version}&filename={filename} + http://localhost:8080/api/tools/tacrev/test_data_download?tool_version=2.00&filename=in + ''' + """ + def __init__(self, args): + self.galaxy = args.galaxy + self.key = args.key + self.tool_id = args.tool_id + + def run_test(self): + """ + GET /api/tools/{tool_id}/test_data_download?tool_version={tool_version}&filename={filename} + http://localhost:8080/api/tools/tacrev/test_data_download?tool_version=2.00&filename=input1 + """ + inputs = {} + gi = galaxy.GalaxyInstance(url=self.galaxy, key=self.key, verify=False) + chistory = gi.histories.get_most_recently_used_history() + chistory_id = chistory['id'] + #contents = gi.histories.show_history(chistory_id, contents=True) + #print('####chistory',chistory,'\n#### contents=',contents) + #history = gi.histories.create_history(name=f"{self.tool_id}_test_history") + #new_hist_id = history['id'] + fapi = ''.join([self.galaxy, '/api/tools/', self.tool_id, '/build']) + build = gi.make_get_request(url=fapi,params={"history_id":chistory_id}).json() + fapi = ''.join([self.galaxy, '/api/tools/', self.tool_id, '/test_data']) + test_data = requests.get(fapi, params={'key':self.key, 'history_id':chistory_id})# gi.make_get_request(url=fapi,params={"history_id":chistory_id,'key':self.key}).json() + print(test_data) + testinputs = test_data.json()[0].get('inputs',None) + print('testinputs',testinputs) + stateinputs = build.get('state_inputs',None) # 'input1': {'values': [{'id': '7b326180327c3fcc', 'src': 'hda'}]}} + if testinputs: + for k in testinputs.keys(): + v = testinputs[k] + if '|' in k: + nk = k.split('|')[-1] + inputs[nk] = v + else: + inputs[k] = v + if stateinputs: + print('stateinputs',stateinputs) + for k in stateinputs.keys(): + inp = stateinputs[k] + if isinstance(inp,dict): + if inp.get('values',None): + for anin in inp['values']: + if anin.get('id', None) and anin.get('src', None): + gi.histories.copy_dataset(chistory_id, anin['id'], source=anin['src']) + print('******copied id', anin['id']) + up = {k:anin} + print(up) + inputs.update(up) # replace the input def + print('after state inputs', inputs) + fapi = ''.join([self.galaxy, '/api/tools']) + r = gi.tools.run_tool(chistory_id, self.tool_id, inputs, input_format='legacy') + print(f"Called test on {self.tool_id} - got {r}") + +def _parser(): + parser = argparse.ArgumentParser() + parser.add_argument("-g", "--galaxy", help='URL of target galaxy',default="http://localhost:8080") + parser.add_argument("-a", "--key", help='Galaxy admin key', default="13073fde17d06591ce36e596e3c29904") + parser.add_argument("-t", "--tool_id", help='Tool id to test', default="plotter") + return parser + + +if __name__ == "__main__": + args = _parser().parse_args() + tt = ToolTester(args) + tt.run_test() + diff -r 5fc0c9a93072 -r 1c652687a08f toolfactory/toolwatcher.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/toolwatcher.py Fri Apr 30 07:06:57 2021 +0000 @@ -0,0 +1,123 @@ +#!/usr/bin/python +from datetime import datetime, timedelta +from io import BytesIO as BIO +import logging +import os +import subprocess +import tarfile +import time +from watchdog.observers import Observer +from watchdog.events import FileSystemEventHandler +from watchdog.events import PatternMatchingEventHandler + +class ToolHandler(PatternMatchingEventHandler): + + def __init__(self, watchme): + PatternMatchingEventHandler.__init__(self, patterns=['*.xml'], + ignore_directories=False, case_sensitive=False) + self.last_modified = datetime.now() + self.tool_dir = watchme + self.work_dir = os.getcwd() + self.galaxy_root = os.path.split(watchme)[0] + logging.info(self.galaxy_root) + self.tar_dir = os.path.join(self.galaxy_root, 'tooltardir') + if not os.path.exists(self.tar_dir): + os.mkdir(self.tar_dir) + + def on_created(self, event): + self.on_modified(event) + + def on_modified(self, event): + if datetime.now() - self.last_modified < timedelta(seconds=1): + return + else: + if os.path.exists(event.src_path): + self.last_modified = datetime.now() + logging.info(f"{event.src_path} was {event.event_type}") + p = self.planemo_test(event.src_path) + if p: + if p.returncode == 0: + newtarpath = self.makeToolTar(event.src_path) + logging.info('### Tested toolshed tarball %s written' % newtarpath) + else: + logging.debug('### planemo stdout:') + logging.debug(p.stdout) + logging.debug('### planemo stderr:') + logging.debug(p.stderr) + logging.info('### Planemo call return code =' % p.returncode) + else: + logging.info('Directory %s deleted' % event.src_path) + + def planemo_test(self, xml_path): + toolpath, toolfile = os.path.split(xml_path) + dirlist = os.listdir(toolpath) + toolname = os.path.basename(toolpath) + logging.info('### test dirlist %s, path %s toolname %s' % (dirlist, xml_path, toolname)) + xmls = [x for x in dirlist if os.path.splitext(x)[1] == '.xml'] + if not len(xmls) > 0: + logging.warning('Found no xml files after change to %s' % xml_path) + return None + tool_test_output = os.path.join(toolpath, f"{toolname}_planemo_test_report.html") + cll = [ + "planemo", + "test", + "--test_output", + tool_test_output, + "--galaxy_root", + self.galaxy_root, + "--update_test_data", + xml_path, + ] + logging.info('### calling %s' % ' '.join(cll)) + p = subprocess.run( + cll, + cwd = toolpath, + shell=False, + capture_output=True, + encoding='utf8', + ) + return p + + def makeToolTar(self, xml_path): + """move outputs into test-data and prepare the tarball""" + excludeme = "_planemo_test_report.html" + + def exclude_function(tarinfo): + filename = tarinfo.name + return None if filename.endswith(excludeme) else tarinfo + + tooldir, xml_file = os.path.split(xml_path) + os.chdir(self.tool_dir) + toolname = os.path.splitext(xml_file)[0] + newtarpath = os.path.join(self.tar_dir, '%s_toolshed.gz' % toolname) + tf = tarfile.open(newtarpath, "w:gz") + tf.add( + name=toolname, + arcname=toolname, + filter=exclude_function, + ) + tf.close() + os.chdir(self.work_dir) + return newtarpath + + +if __name__ == "__main__": + watchme = '/home/ross/gal21/tools' + logging.basicConfig(level=logging.INFO, + #filename = os.path.join(watchme,"toolwatcher.log") + #filemode = "w", + format='%(asctime)s - %(message)s', + datefmt='%Y-%m-%d %H:%M:%S') + event_handler = ToolHandler(watchme=watchme) + observer = Observer() + observer.schedule(event_handler, path=watchme, recursive=True) + observer.start() + try: + while True: + time.sleep(1) + except KeyboardInterrupt: + observer.stop() + observer.join() + + +