diff data_manager/FROGS_data_manager.py @ 10:238a5328279d draft

planemo upload commit b0ebe74a020dcb21b79d8d39e7b6a2f6533b2fc4-dirty
author dchristiany
date Mon, 28 Oct 2019 06:46:53 -0400
parents 0d9cb5c5aa35
children 0cc5f020640e
line wrap: on
line diff
--- a/data_manager/FROGS_data_manager.py	Mon Oct 07 10:56:23 2019 -0400
+++ b/data_manager/FROGS_data_manager.py	Mon Oct 28 06:46:53 2019 -0400
@@ -5,19 +5,61 @@
 def get_args():
     parser = argparse.ArgumentParser()
     parser.add_argument("-d","--database")
-    parser.add_argument("--custom_db")
+    parser.add_argument("--all_dbs")
+    parser.add_argument("--date")
     parser.add_argument("--amplicons")
+    parser.add_argument("--bases")
+    parser.add_argument("--filters")
+    parser.add_argument("--only_last_versions")
+    parser.add_argument("--tool_data")
     parser.add_argument("-o","--output")
     args = parser.parse_args()
     return args
 
+#build database last version dictionary: key=base_id, value=last version
+def build_last_version_dict(db_index):
+    last_version_dict={}
+    for line in db_index :
+        date=int(line[0])
+        base_id=line[5]
+        if base_id in last_version_dict:
+            if date > last_version_dict[base_id] : last_version_dict[base_id]=date
+        else:
+            last_version_dict[base_id]=date
+    return(last_version_dict)
+
 def _add_data_table_entry(data_manager_dict, data_table_entry,data_table):
     data_manager_dict['data_tables'] = data_manager_dict.get('data_tables', {})
     data_manager_dict['data_tables'][data_table] = data_manager_dict['data_tables'].get(data_table, [])
     data_manager_dict['data_tables'][data_table].append(data_table_entry)
     return data_manager_dict
 
-def frogs_sources(data_manager_dict,target_directory,amplicons_list):
+def keep_only_last_version(db_index):
+    values=["_".join(line[5].split("_")[:-1]) for line in db_index]
+    to_filter = list(set([val for val in values if values.count(val) >1]))
+    out = [line for line in db_index if "_".join(line[5].split("_")[:-1]) not in to_filter] 
+    for bd in to_filter:
+        versions = [line[4] for line in db_index if "_".join(line[5].split("_")[:-1])==bd]
+        to_keep = bd+"_"+sorted(versions)[-1]
+        for line in db_index:
+            if line[5]==to_keep:
+                out.append(line)
+                print(line)
+                break
+    return(out)
+
+def frogs_sources(data_manager_dict,target_directory):
+
+    #variables
+    amplicons_list=[]
+    bases_list=[]
+    filters_list=[]
+    if  args.all_dbs=="false": 
+        amplicons_list = [amplicon.lower().strip() for amplicon in args.amplicons.split(",") if amplicon != ""]
+        bases_list = [base.lower().strip() for base in args.bases.split(",") if base != ""]
+        filters_list = [filter.lower().strip() for filter in args.filters.split(",") if filter!=""]
+        bottom_date = int(args.date)
+    tool_data_path=args.tool_data
 
     #get frogs database index
     frogs_db_index_link="http://genoweb.toulouse.inra.fr/frogs_databanks/assignation/FROGS_databases.tsv"
@@ -26,11 +68,16 @@
         decoded_content = download.content.decode('utf-8')
         db_index = download.content.splitlines()    
         db_index = [line.split("\t") for line in db_index[1:]]
-        db_index = [line[:4]+[line[1]+"_"+line[2]+"_"+line[3]]+[line[4]] for line in db_index]  #add column name
+        db_index = [[line[0],line[1].lower(),line[2].lower(),line[3].lower()]+line[4:] for line in db_index]
 
-    #filter amplicons
-    if len(amplicons_list)!=0:
-        db_index = [line for line in db_index if line[4] in amplicons_list]
+    #filter databases
+    last_version_dict=build_last_version_dict(db_index)
+    if args.all_dbs=="false":
+        if len(amplicons_list)!=0: db_index = [line for line in db_index if any([amplicon in amplicons_list for amplicon in line[1].split(',')])]   #filter by amplicons
+        if len(bases_list)!=0: db_index = [line for line in db_index if line[2] in bases_list]                                                      #filter by base
+        if len(filters_list)!=0: db_index = [line for line in db_index if line[3] in filters_list]                                                  #filter by filters
+    if bottom_date!=0: db_index = [line for line in db_index if int(line[0])>=bottom_date]                                                          #filter by date      
+        db_index = keep_only_last_version(db_index)                                                          #keep only last version
 
     #get frogs dbs
     os.chdir(target_directory)
@@ -38,33 +85,36 @@
     os.mkdir(dir_name)
     dbs=set([])
     for line in db_index:
-        value=line[4]
+        value=line[5]
         name=value.replace("_"," ")
-        link=line[5]
+        link=line[6]
+        name_dir="".join([line[6].replace(".tar.gz","").split("/")[-1]])
+        file_path=tool_data_path+"/frogs_db/"+name_dir
+        if not os.path.exists(file_path):   #if the file is not already in frogs_db directory
+            
+            #download frogs db
+            dl_file = urllib.URLopener()
+            dl_file.retrieve(link, "tmp.tar.gz")
+            
+            #unzip frogs db
+            with tarfile.open("tmp.tar.gz") as tar:
+                tar.extractall(dir_name)
+                tar.close()
+                os.remove('tmp.tar.gz')
+            
+            #get fasta file path
+            tmp = set(os.listdir(dir_name))
+            new_db = dir_name+"/"+"".join(tmp.difference(dbs))
+            files = os.listdir(new_db)
+            fasta = "".join([file for file in files if file.endswith('.fasta')])
+            path = new_db+'/'+fasta
+            dbs = os.listdir(dir_name)
+            release = value+"_"+time.strftime("%Y-%m-%d")
+            date=time.strftime("%Y%m%d")
+            path = os.path.join(target_directory,path)
 
-        #download frogs db
-        dl_file = urllib.URLopener()
-        dl_file.retrieve(link, "tmp.tar.gz")
-        
-        #unzip frogs db
-        with tarfile.open("tmp.tar.gz") as tar:
-            tar.extractall(dir_name)
-            tar.close()
-            os.remove('tmp.tar.gz')
-        
-        #get fasta file path
-        tmp = set(os.listdir(dir_name))
-        new_db = dir_name+"/"+"".join(tmp.difference(dbs))
-        files = os.listdir(new_db)
-        fasta = "".join([file for file in files if file.endswith('.fasta')])
-        path = new_db+'/'+fasta
-        dbs = os.listdir(dir_name)
-        release = value+"_"+time.strftime("%Y-%m-%d")
-        date=time.strftime("%Y%m%d")
-        path = os.path.join(target_directory,path)
-
-        data_table_entry = dict(name = name, value = value, path=path)
-        _add_data_table_entry(data_manager_dict, data_table_entry, "frogs_db")
+            data_table_entry = dict(name = name, value = value, path=path)
+            _add_data_table_entry(data_manager_dict, data_table_entry, "frogs_db")
 
 def HVL_sources(data_manager_dict,target_directory):
 
@@ -89,11 +139,8 @@
 def main():
 
     #get args from command line
+    global args
     args = get_args()
-    if args.database=="frogs_db_data" and args.custom_db=="true":
-        amplicons_list = args.amplicons.split(",")
-    else :
-        amplicons_list = []
 
     # Extract json file params
     data_manager_dict = {}
@@ -103,7 +150,7 @@
     os.mkdir(target_directory)
 
     if args.database=="frogs_db_data":
-        frogs_sources(data_manager_dict,target_directory,amplicons_list)
+        frogs_sources(data_manager_dict,target_directory)
     elif args.database=="HVL_db_data":
         HVL_sources(data_manager_dict,target_directory)