# HG changeset patch # User tduigou # Date 1752228616 0 # Node ID 1a766d8d388380fdd32ba1bcccbf0cec7935c466 planemo upload for repository https://github.com/brsynth commit fa4c85dd6ad48d404a28e21667f18b628bbdc702-dirty diff -r 000000000000 -r 1a766d8d3883 json_db_config_generating_boolean.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/json_db_config_generating_boolean.xml Fri Jul 11 10:10:16 2025 +0000 @@ -0,0 +1,76 @@ + + Store parameters of tools in a workflow to be used again + + 1 + 0.1.0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @unpublished{json_db_config_generating + author = {Ramiz Khaled}, + title = {{json_db_config_generating}}, + url = {https://github.com/brsynth/}, + } + + + \ No newline at end of file diff -r 000000000000 -r 1a766d8d3883 maystro.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/maystro.py Fri Jul 11 10:10:16 2025 +0000 @@ -0,0 +1,98 @@ +import argparse +import tempfile +import os +import json +import shutil + + +def parse_command_line_args(): + parser = argparse.ArgumentParser(description="Maystro JSON Handler") + + parser.add_argument("--distribute_json", required=True, help="true or false") + parser.add_argument("--json_from_workflow", required=False, nargs='+', help="JSON files from tools", default=[]) + parser.add_argument("--json_from_user", required=False, help="User-provided JSON") + parser.add_argument("--json_name_mapping", required=True, help="map the real json name") + parser.add_argument("--output_workflow", required=True, help="JSON output for next workflow steps") + parser.add_argument("--output_user", required=True, help="Final JSON output to user") + + return parser.parse_args() + + +def parse_file_name_mapping(mapping_str): + mapping = {} + if mapping_str: + for pair in mapping_str.split(','): + stored, original = pair.strip().split(':', 1) + # Strip .json from original + real_name = os.path.splitext(original)[0] + mapping[os.path.basename(stored)] = real_name + return mapping + + +def handle_distribute_json_false(args): + temp_dir = tempfile.mkdtemp(prefix="maystro_merge_") + print(f"[INFO] Watching temp dir for new JSONs: {temp_dir}") + + try: + # Collect JSONs from json_from_workflow + initial_jsons = list(filter(os.path.isfile, args.json_from_workflow)) + print(f"[INFO] Initial JSONs from workflow: {initial_jsons}") + + # Parse filename mapping if provided + filename_mapping = parse_file_name_mapping(getattr(args, 'json_name_mapping', '')) + + # Merge all together + merged = {} + for file_path in initial_jsons: + try: + with open(file_path, 'r') as f: + data = json.load(f) + basename = os.path.basename(file_path) + real_name = filename_mapping.get(basename, basename) # fallback if not in mapping + merged[real_name] = data + print(f"[INFO] Added data under key: {real_name}") + except json.JSONDecodeError as e: + print(f"[WARN] Skipping invalid JSON file {file_path}: {e}") + + with open(args.output_user, "w") as f: + json.dump(merged, f, indent=2) + print(f"[INFO] Merged JSON written to: {args.output_user}") + + finally: + print(f"[INFO] Cleaning up: {temp_dir}") + shutil.rmtree(temp_dir) + + +def merge_json_files(paths): + merged = {} + for path in paths: + try: + with open(path, "r") as f: + data = json.load(f) + merged.update(data) + except Exception as e: + print(f"[WARN] Skipping {path}: {e}") + return merged + + +def handle_distribute_json_true(args): + if not args.json_from_user: + raise ValueError("json_from_user is required when distribute_json is true") + + with open(args.json_from_user, 'r') as in_f: + user_data = json.load(in_f) + + with open(args.output_workflow, 'w') as out_f: + json.dump(user_data, out_f, indent=2) + + +def main(): + args = parse_command_line_args() + + if args.distribute_json.lower() == 'false': + handle_distribute_json_false(args) + else: + handle_distribute_json_true(args) + +if __name__ == "__main__": + main() diff -r 000000000000 -r 1a766d8d3883 output.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/output.html Fri Jul 11 10:10:16 2025 +0000 @@ -0,0 +1,291 @@ + + + + + + + Test Results (powered by Planemo) + + + + + + + + + + +
+
+
+
+ + + + + + + \ No newline at end of file diff -r 000000000000 -r 1a766d8d3883 output.json --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/output.json Fri Jul 11 10:10:16 2025 +0000 @@ -0,0 +1,197 @@ +{ + "summary": { + "num_errors": 0, + "num_failures": 0, + "num_skips": 0, + "num_tests": 2 + }, + "tests": [ + { + "data": { + "inputs": { + "distribute_json": false, + "json_from_workflow": [ + { + "id": "051ed7ff50845d8c", + "src": "hda" + }, + { + "id": "25656ddfec77a3b3", + "src": "hda" + } + ] + }, + "job": { + "command_line": "python '/home/rkhaled/galaxytools/tools/parameters_maystro/maystro.py' --distribute_json 'false' --json_from_workflow \"/tmp/tmpfpd6tei1/files/2/f/4/dataset_2f4ca6ae-77d6-4bfe-8e39-0051c294e4f6.dat\" \"/tmp/tmpfpd6tei1/files/9/6/0/dataset_960eac8e-fb66-4982-8009-4896c9e57b38.dat\" --json_from_user 'None' --json_name_mapping /tmp/tmpfpd6tei1/files/2/f/4/dataset_2f4ca6ae-77d6-4bfe-8e39-0051c294e4f6.dat:seq_from_db_param.json,/tmp/tmpfpd6tei1/files/9/6/0/dataset_960eac8e-fb66-4982-8009-4896c9e57b38.dat:seq_to_db_param.json --output_workflow '/tmp/tmpfpd6tei1/job_working_directory/000/3/outputs/dataset_d100f4ee-f845-4a68-be68-004c7af9679d.dat' --output_user '/tmp/tmpfpd6tei1/job_working_directory/000/3/outputs/dataset_1e2a9024-ad29-4d18-b95d-645817393383.dat' && echo 'DEBUB' && cat '/tmp/tmpfpd6tei1/job_working_directory/000/3/outputs/dataset_1e2a9024-ad29-4d18-b95d-645817393383.dat'", + "command_version": "", + "copied_from_job_id": null, + "create_time": "2025-07-11T09:42:39.854743", + "dependencies": [], + "exit_code": 0, + "external_id": "79277", + "galaxy_version": "25.0", + "handler": null, + "history_id": "051ed7ff50845d8c", + "id": "d66447d427a79ce4", + "inputs": { + "json_from_workflow": { + "id": "051ed7ff50845d8c", + "src": "hda", + "uuid": "2f4ca6ae-77d6-4bfe-8e39-0051c294e4f6" + }, + "json_from_workflow1": { + "id": "051ed7ff50845d8c", + "src": "hda", + "uuid": "2f4ca6ae-77d6-4bfe-8e39-0051c294e4f6" + }, + "json_from_workflow2": { + "id": "25656ddfec77a3b3", + "src": "hda", + "uuid": "960eac8e-fb66-4982-8009-4896c9e57b38" + } + }, + "job_messages": [], + "job_metrics": [], + "job_runner_name": null, + "job_stderr": "", + "job_stdout": "", + "model_class": "Job", + "output_collections": {}, + "outputs": { + "output_user": { + "id": "39496c7f8d685f43", + "src": "hda", + "uuid": "1e2a9024-ad29-4d18-b95d-645817393383" + }, + "output_workflow": { + "id": "d66447d427a79ce4", + "src": "hda", + "uuid": "d100f4ee-f845-4a68-be68-004c7af9679d" + } + }, + "params": { + "__input_ext": "\"input\"", + "chromInfo": "\"/tmp/tmpfpd6tei1/galaxy-dev/tool-data/shared/ucsc/chrom/?.len\"", + "dbkey": "\"?\"", + "distribute_json": "false", + "json_from_user": null + }, + "state": "ok", + "stderr": "", + "stdout": "[INFO] Watching temp dir for new JSONs: /tmp/tmpfpd6tei1/tmp/maystro_merge_xd06s1et\n[INFO] Initial JSONs from workflow: ['/tmp/tmpfpd6tei1/files/2/f/4/dataset_2f4ca6ae-77d6-4bfe-8e39-0051c294e4f6.dat', '/tmp/tmpfpd6tei1/files/9/6/0/dataset_960eac8e-fb66-4982-8009-4896c9e57b38.dat']\n[INFO] Added data under key: seq_from_db_param\n[INFO] Added data under key: seq_to_db_param\n[INFO] Merged JSON written to: /tmp/tmpfpd6tei1/job_working_directory/000/3/outputs/dataset_1e2a9024-ad29-4d18-b95d-645817393383.dat\n[INFO] Cleaning up: /tmp/tmpfpd6tei1/tmp/maystro_merge_xd06s1et\nDEBUB\n{\n \"seq_from_db_param\": {\n \"db_uri\": \"postgresql://postgres:RK17@localhost:5432/test_fragments_db\",\n \"table\": \"sample\",\n \"fragment_column\": \"fragment\",\n \"sequence_column\": \"sequence\",\n \"annotation_column\": \"annotation\"\n },\n \"seq_to_db_param\": {\n \"execution\": true,\n \"db_uri\": \"postgresql://postgres:RK17@localhost:5432/test_fragments_db\",\n \"table\": \"sample\",\n \"fragment_column\": \"fragment\",\n \"sequence_column\": \"sequence\",\n \"annotation_column\": \"annotation\"\n }\n}", + "tool_id": "parameters_maystro", + "tool_stderr": "", + "tool_stdout": "[INFO] Watching temp dir for new JSONs: /tmp/tmpfpd6tei1/tmp/maystro_merge_xd06s1et\n[INFO] Initial JSONs from workflow: ['/tmp/tmpfpd6tei1/files/2/f/4/dataset_2f4ca6ae-77d6-4bfe-8e39-0051c294e4f6.dat', '/tmp/tmpfpd6tei1/files/9/6/0/dataset_960eac8e-fb66-4982-8009-4896c9e57b38.dat']\n[INFO] Added data under key: seq_from_db_param\n[INFO] Added data under key: seq_to_db_param\n[INFO] Merged JSON written to: /tmp/tmpfpd6tei1/job_working_directory/000/3/outputs/dataset_1e2a9024-ad29-4d18-b95d-645817393383.dat\n[INFO] Cleaning up: /tmp/tmpfpd6tei1/tmp/maystro_merge_xd06s1et\nDEBUB\n{\n \"seq_from_db_param\": {\n \"db_uri\": \"postgresql://postgres:RK17@localhost:5432/test_fragments_db\",\n \"table\": \"sample\",\n \"fragment_column\": \"fragment\",\n \"sequence_column\": \"sequence\",\n \"annotation_column\": \"annotation\"\n },\n \"seq_to_db_param\": {\n \"execution\": true,\n \"db_uri\": \"postgresql://postgres:RK17@localhost:5432/test_fragments_db\",\n \"table\": \"sample\",\n \"fragment_column\": \"fragment\",\n \"sequence_column\": \"sequence\",\n \"annotation_column\": \"annotation\"\n }\n}", + "update_time": "2025-07-11T09:42:45.494563", + "user_email": "planemo@galaxyproject.org", + "user_id": "051ed7ff50845d8c" + }, + "status": "success", + "test_index": 0, + "time_seconds": 16.301273107528687, + "tool_id": "parameters_maystro", + "tool_version": "0.1.0+galaxy1" + }, + "has_data": true, + "id": "parameters_maystro-0" + }, + { + "data": { + "inputs": { + "distribute_json": true, + "json_from_user": { + "id": "10ea9be91661a5fd", + "src": "hda" + }, + "json_from_workflow": [ + { + "id": "b7e19a053756efb9", + "src": "hda" + }, + { + "id": "45e8a4dfb17d83f9", + "src": "hda" + } + ] + }, + "job": { + "command_line": "python '/home/rkhaled/galaxytools/tools/parameters_maystro/maystro.py' --distribute_json 'true' --json_from_workflow \"/tmp/tmpfpd6tei1/files/c/1/e/dataset_c1e98b32-965c-4a0d-9c7f-26973a7e0fe6.dat\" \"/tmp/tmpfpd6tei1/files/3/a/5/dataset_3a560c20-0d3f-4aae-abd4-4a460698f28f.dat\" --json_from_user '/tmp/tmpfpd6tei1/files/0/a/b/dataset_0abf65a6-bd5b-47fc-94ba-a6a97fa6d88b.dat' --json_name_mapping /tmp/tmpfpd6tei1/files/c/1/e/dataset_c1e98b32-965c-4a0d-9c7f-26973a7e0fe6.dat:seq_from_db_param.json,/tmp/tmpfpd6tei1/files/3/a/5/dataset_3a560c20-0d3f-4aae-abd4-4a460698f28f.dat:seq_to_db_param.json --output_workflow '/tmp/tmpfpd6tei1/job_working_directory/000/7/outputs/dataset_5254084b-4d71-4098-b933-f73a9e7d5a28.dat' --output_user '/tmp/tmpfpd6tei1/job_working_directory/000/7/outputs/dataset_04c60445-2ff4-4b59-bcbc-75f08b68625a.dat' && echo 'DEBUB' && cat '/tmp/tmpfpd6tei1/job_working_directory/000/7/outputs/dataset_04c60445-2ff4-4b59-bcbc-75f08b68625a.dat'", + "command_version": "", + "copied_from_job_id": null, + "create_time": "2025-07-11T09:42:56.883885", + "dependencies": [], + "exit_code": 0, + "external_id": "79598", + "galaxy_version": "25.0", + "handler": null, + "history_id": "25656ddfec77a3b3", + "id": "10ea9be91661a5fd", + "inputs": { + "json_from_user": { + "id": "10ea9be91661a5fd", + "src": "hda", + "uuid": "0abf65a6-bd5b-47fc-94ba-a6a97fa6d88b" + }, + "json_from_workflow": { + "id": "b7e19a053756efb9", + "src": "hda", + "uuid": "c1e98b32-965c-4a0d-9c7f-26973a7e0fe6" + }, + "json_from_workflow1": { + "id": "b7e19a053756efb9", + "src": "hda", + "uuid": "c1e98b32-965c-4a0d-9c7f-26973a7e0fe6" + }, + "json_from_workflow2": { + "id": "45e8a4dfb17d83f9", + "src": "hda", + "uuid": "3a560c20-0d3f-4aae-abd4-4a460698f28f" + } + }, + "job_messages": [], + "job_metrics": [], + "job_runner_name": null, + "job_stderr": "", + "job_stdout": "", + "model_class": "Job", + "output_collections": {}, + "outputs": { + "output_user": { + "id": "295d652904818249", + "src": "hda", + "uuid": "04c60445-2ff4-4b59-bcbc-75f08b68625a" + }, + "output_workflow": { + "id": "8c61ed432685cfe0", + "src": "hda", + "uuid": "5254084b-4d71-4098-b933-f73a9e7d5a28" + } + }, + "params": { + "__input_ext": "\"input\"", + "chromInfo": "\"/tmp/tmpfpd6tei1/galaxy-dev/tool-data/shared/ucsc/chrom/?.len\"", + "dbkey": "\"?\"", + "distribute_json": "true" + }, + "state": "ok", + "stderr": "", + "stdout": "DEBUB\n", + "tool_id": "parameters_maystro", + "tool_stderr": "", + "tool_stdout": "DEBUB\n", + "update_time": "2025-07-11T09:43:02.149865", + "user_email": "planemo@galaxyproject.org", + "user_id": "051ed7ff50845d8c" + }, + "status": "success", + "test_index": 1, + "time_seconds": 16.54760479927063, + "tool_id": "parameters_maystro", + "tool_version": "0.1.0+galaxy1" + }, + "has_data": true, + "id": "parameters_maystro-1" + } + ], + "version": "0.1" +} \ No newline at end of file diff -r 000000000000 -r 1a766d8d3883 test-data/merged_test.json --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/merged_test.json Fri Jul 11 10:10:16 2025 +0000 @@ -0,0 +1,17 @@ +{ + "seq_from_db_param": { + "db_uri": "postgresql://postgres:RK17@localhost:5432/test_fragments_db", + "table": "sample", + "fragment_column": "fragment", + "sequence_column": "sequence", + "annotation_column": "annotation" + }, + "seq_to_db_param": { + "execution": true, + "db_uri": "postgresql://postgres:RK17@localhost:5432/test_fragments_db", + "table": "sample", + "fragment_column": "fragment", + "sequence_column": "sequence", + "annotation_column": "annotation" + } +} \ No newline at end of file diff -r 000000000000 -r 1a766d8d3883 test-data/seq_from_db_param.json --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/seq_from_db_param.json Fri Jul 11 10:10:16 2025 +0000 @@ -0,0 +1,7 @@ +{ + "db_uri": "postgresql://postgres:RK17@localhost:5432/test_fragments_db", + "table": "sample", + "fragment_column": "fragment", + "sequence_column": "sequence", + "annotation_column": "annotation" +} \ No newline at end of file diff -r 000000000000 -r 1a766d8d3883 test-data/seq_to_db_param.json --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/seq_to_db_param.json Fri Jul 11 10:10:16 2025 +0000 @@ -0,0 +1,8 @@ +{ + "execution": true, + "db_uri": "postgresql://postgres:RK17@localhost:5432/test_fragments_db", + "table": "sample", + "fragment_column": "fragment", + "sequence_column": "sequence", + "annotation_column": "annotation" +} \ No newline at end of file diff -r 000000000000 -r 1a766d8d3883 test-data/testoutput_workflow.json --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/testoutput_workflow.json Fri Jul 11 10:10:16 2025 +0000 @@ -0,0 +1,17 @@ +{ + "seq_from_db_param": { + "db_uri": "postgresql://postgres:RK17@localhost:5432/test_fragments_db", + "table": "sample", + "fragment_column": "fragment", + "sequence_column": "sequence", + "annotation_column": "annotation" + }, + "seq_to_db_param": { + "execution": true, + "db_uri": "postgresql://postgres:RK17@localhost:5432/test_fragments_db", + "table": "sample", + "fragment_column": "fragment", + "sequence_column": "sequence", + "annotation_column": "annotation" + } +} \ No newline at end of file