changeset 11:2bb0d8ca1710 draft

"planemo upload for repository https://github.com/brsynth/synbiocad-galaxy-wrappers commit 47caed1dd87e80ae226fabb584e9d63d7c86a436-dirty"
author ggricourt
date Thu, 24 Feb 2022 13:01:50 +0000
parents a9f72fd191b5
children e339b8d84de0
files data_manager/bigg_model_sbml_fetcher.py
diffstat 1 files changed, 19 insertions(+), 17 deletions(-) [+]
line wrap: on
line diff
--- a/data_manager/bigg_model_sbml_fetcher.py	Thu Feb 24 12:05:43 2022 +0000
+++ b/data_manager/bigg_model_sbml_fetcher.py	Thu Feb 24 13:01:50 2022 +0000
@@ -40,28 +40,20 @@
     return data
 
 
-def get_model_organism(model_id):
-    data = url_json(MODEL_DETAIL_URL + model_id)
-    org = data.get("organism", "")
-    if org is None:
-        org = ""
-    res = "%s - %s" % (model_id, org)
-    return res
-
-
-def download_entries(model_ids, workdir):
-    for model_id in model_ids:
+def download_entries(model_ids, id2org, workdir):
+    for ix, model_id in enumerate(model_ids):
         model_filename = model_id + ".xml"
         path = os.path.abspath(os.path.join(workdir, model_filename))
 
         url_download(MODEL_URL + model_filename, path)
         data_manager_entry = {}
         data_manager_entry["value"] = model_id
-        data_manager_entry["name"] = get_model_organism(model_id)
+        data_manager_entry["name"] = id2org[model_id]
         data_manager_entry["path"] = path
 
         # Make sure that less than 10 requests per second, as required by host (http://bigg.ucsd.edu/data_access)
-        time.sleep(1)
+        if ix % 5 == 0:
+            time.sleep(1)
         yield data_manager_entry
 
 
@@ -81,15 +73,25 @@
     workdir = params["output_data"][0]["extra_files_path"]
     os.makedirs(workdir)
 
+    # Load models and models metadata.
+    models = url_json(MODEL_DETAIL_URL)
+    id2org = {}
+    for result in models.get("results", []):
+        ident = result["bigg_id"]
+        ident = ident.replace(" ", "")
+        id2org[ident] = result["organism"]
+
+    # Select model_ids.
     model_ids = []
     if args.model_id:
+        if args.model_id not in id2org.keys():
+            sys.exit("Model id: %s is not available" % (args.model_id,))
         model_ids.append(args.model_id)
     else:
-        data = url_json(MODEL_DETAIL_URL)
-        for result in data.get("results", []):
-            model_ids.append(result.get("bigg_id"))
+        model_ids.extend(list(id2org.keys()))
 
-    entries = list(download_entries(model_ids, workdir))
+    # Download.
+    entries = list(download_entries(model_ids, id2org, workdir))
 
     # Write data.
     data_manager_json["data_tables"]["bigg_model_sbml"] = entries