comparison demultiplex.py @ 0:da4101033e10 draft default tip

planemo upload
author oinizan
date Wed, 18 Oct 2017 05:30:40 -0400
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:da4101033e10
1 #!/usr/bin/env python2.7
2 #
3 # Copyright (C) 2014 INRA
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License as published by
7 # the Free Software Foundation, either version 3 of the License, or
8 # (at your option) any later version.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
14 #
15 # You should have received a copy of the GNU General Public License
16 # along with this program. If not, see <http://www.gnu.org/licenses/>.
17 #
18
19 __author__ = 'Plateforme bioinformatique Toulouse and SIGENAE'
20 __copyright__ = 'Copyright (C) 2015 INRA'
21 __license__ = 'GNU General Public License'
22 __version__ = '1.2.0'
23 __email__ = 'frogs@inra.fr'
24 __status__ = 'prod'
25
26 import os
27 import sys
28 import gzip
29 import time
30 import tarfile
31 import argparse
32
33 CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
34 # PATH
35 BIN_DIR = os.path.abspath(os.path.join(os.path.dirname(CURRENT_DIR), "libexec"))
36 os.environ['PATH'] = BIN_DIR + os.pathsep + os.environ['PATH']
37 # PYTHONPATH
38 LIB_DIR = os.path.abspath(os.path.join(os.path.dirname(CURRENT_DIR), "lib"))
39 sys.path.append(LIB_DIR)
40 if os.getenv('PYTHONPATH') is None: os.environ['PYTHONPATH'] = LIB_DIR
41 else: os.environ['PYTHONPATH'] = os.environ['PYTHONPATH'] + os.pathsep + LIB_DIR
42
43 from frogsUtils import *
44
45
46 ##################################################################################################################################################
47 #
48 # COMMAND LINES
49 #
50 ##################################################################################################################################################
51 class Demultiplex(Cmd):
52 """
53 @summary : Demultiplex samples.
54 """
55 def __init__(self, R1_input_file, R2_input_file, barcode_file, mismatches, end, global_tmp_files, R1_output_files, R2_output_files, demultiplex_err_files1, demultiplex_err_files2, demultiplex_log):
56 """
57 @param R1_input_file : [str] Path to the R1 fastq file.
58 @param R2_input_file : [str] Path to the R2 fastq file.
59 @param barcode_file : [str] Path to barcodes and samples (one line by sample) description file. Line format : SAMPLE_NAME<TAB>BARCODE.
60 @param mismatches : [int] Number of mismatches allowed
61 @param end : [str] barcode ends ? forward : bol or reverse : eol (def bol)
62 @param global_tmp_files : [str] Path for R1 and R2 files.
63 @param R1_output_files : [list] Paths to the R1 fastq files (one by sample). User provides an empty list.
64 @param R2_output_files : [list] Paths to the R2 fastq files (one by sample). User provides an empty list.
65 @param demultiplex_err_files : [list] Paths to the files with ambiguous and unmatched reads. User provides an empty list.
66 """
67
68 tmp_files = TmpFiles( global_tmp_files.tmp_dir )
69
70 tmp_folder = os.path.join( global_tmp_files.tmp_dir, global_tmp_files.prefix + "_tmp", tmp_files.prefix )
71 global_tmp_files.dirs.append(tmp_folder)
72 if not os.path.exists(tmp_folder):
73 os.mkdir(tmp_folder)
74 self.samples_names = list()
75 # Update output data
76 FH_barcode = open( barcode_file )
77 for line in FH_barcode:
78 sample_name, barcode = line.strip().rsplit(None, 1)
79 R1_output_files.append( os.path.join(tmp_folder, sample_name + '_R1.fastq') )
80 global_tmp_files.files.append(os.path.join(tmp_folder, sample_name + '_R1.fastq') )
81 if R2_input_file != None:
82 R2_output_files.append( os.path.join(tmp_folder, sample_name + '_R2.fastq') )
83 global_tmp_files.files.append(os.path.join(tmp_folder, sample_name + '_R2.fastq'))
84 self.samples_names.append( sample_name.replace(' ', '_') )
85 FH_barcode.close()
86 self.R1_input_file = R1_input_file
87 self.ambiguous = os.path.join(tmp_folder, 'ambiguous_R1.fastq')
88 self.unmatched = os.path.join(tmp_folder, 'unmatched_R1.fastq')
89 demultiplex_err_files1.extend( [self.ambiguous,self.unmatched] )
90 global_tmp_files.files.extend( [self.ambiguous,self.unmatched] )
91 if R2_input_file != None:
92 demultiplex_err_files2.extend( [os.path.join(tmp_folder, 'ambiguous_R2.fastq'),os.path.join(tmp_folder, 'unmatched_R2.fastq') ])
93 global_tmp_files.files.extend( [os.path.join(tmp_folder, 'ambiguous_R2.fastq'),os.path.join(tmp_folder, 'unmatched_R2.fastq') ])
94
95 # Set class
96 if R2_input_file != None:
97 Cmd.__init__( self,
98 'perl ' + os.environ['TOOL_DIREDCTORY'] + '/splitbc.pl',
99 'Demultiplex reads.',
100 R1_input_file + ' ' + R2_input_file + ' --' + end + ' --bcfile ' + barcode_file + ' --mismatches ' + `mismatches` + ' --trim --no_adapt --prefix-r1 ' + os.path.join(tmp_folder, '%_R1.fastq') +\
101 ' --prefix-r2 ' + os.path.join(tmp_folder, '%_R2.fastq') + ' >> ' + demultiplex_log,
102 None )
103 else:
104 Cmd.__init__( self,
105 'perl ' + os.environ['TOOL_DIRECTORY'] + '/splitbc.pl',
106 'Demultiplex reads.',
107 R1_input_file + ' --' + end + ' --bcfile ' + barcode_file + ' --mismatches ' + `mismatches` + ' --trim --no_adapt --prefix-r1 ' + os.path.join(tmp_folder, '%_R1.fastq') +\
108 ' >> ' + demultiplex_log,
109 None )
110
111 def parser(self, log_file):
112 """
113 @summary : Parse the command results to add information in log_file.
114 @log_file : [str] Path to the sample process log file.
115 """
116 # Parse output
117 nb_seq_before = get_fastq_nb_seq(self.R1_input_file)
118 nb_seq_unmatched = get_fastq_nb_seq(self.unmatched)
119 nb_seq_ambiguous = get_fastq_nb_seq(self.ambiguous)
120 # Write result
121 FH_log = Logger( log_file )
122 FH_log.write( 'Results :\n' )
123 FH_log.write( '\tnb seq before demultiplexing : ' + str(nb_seq_before) + '\n' )
124 FH_log.write( '\tnb seq after process matched : ' + str(nb_seq_before - nb_seq_unmatched) + '\n' )
125 FH_log.write( '\tnb seq after process non-ambiguous : ' + str(nb_seq_before - nb_seq_unmatched - nb_seq_ambiguous) + '\n' )
126 FH_log.close()
127
128 def get_version(self):
129 """
130 @summary : Returns the program version number.
131 @return : version number if this is possible, otherwise this method return 'unknown'.
132 """
133 return Cmd.get_version(self, 'stdout')
134
135
136 class Archive(Cmd):
137 """
138 @summary : Creates an archive with files.
139 """
140 def __init__(self, archived_files, archive_path):
141 """
142 @param archived_files: [list] Files added in final archive.
143 @param archive_path: [str] Path to the new archive.
144 """
145
146 tmp_files=TmpFiles( os.path.dirname(archive_path) )
147 tmp_folder = os.path.join( tmp_files.tmp_dir, tmp_files.prefix)
148 tmp_files.dirs.append(tmp_folder)
149 if not os.path.exists(tmp_folder):
150 os.makedirs(tmp_folder)
151
152 if len(archived_files) == 0:
153 raise Exception( "At least one file must be add to the archive '" + archive_path + "'." )
154
155 archived_basenames = list()
156 for current in archived_files:
157 if not os.path.dirname(current) == tmp_folder:
158 os.rename(current, os.path.join(tmp_folder,os.path.basename(current)))
159 tmp_files.files.append(os.path.join(tmp_folder,os.path.basename(current)))
160 archived_basenames.append(os.path.basename(current))
161
162
163 Cmd.__init__( self,
164 'tar',
165 'Archives files.',
166 '-zcf ' + archive_path + ' -C ' + tmp_folder + " " + " ".join(archived_basenames),
167 None )
168
169 self.Files=tmp_files
170
171
172 def parser(self,log_file):
173 self.Files.deleteAll()
174
175
176 ##################################################################################################################################################
177 #
178 # FUNCTIONS
179 #
180 ##################################################################################################################################################
181 def is_gzip( file ):
182 """
183 @return: [bool] True if the file is gziped.
184 @param file : [str] Path to processed file.
185 """
186 is_gzip = None
187 FH_input = gzip.open( file )
188 try:
189 FH_input.readline()
190 is_gzip = True
191 except:
192 is_gzip = False
193 finally:
194 FH_input.close()
195 return is_gzip
196
197 def split_barcode_file( barcode_file, barcodes_file_list, global_tmp_files ):
198 """
199 @summary: In case of double multiplexe, split barcode file in one forward and multiple reverse barcode files
200 @param barcode_file: [str] Path to the input barcode file
201 @param barcodes_file_list: [list] List of path to the ouput barcode files
202 @param out_dir: [str] path to the output directory to write barcode files
203 """
204 out_dir = global_tmp_files.tmp_dir
205 barcode_input = open(barcode_file,"r")
206 barcode_dict={}
207 for l in barcode_input.readlines():
208 [s,f,r]=l.strip().split()
209 if not "forward_bc" in barcode_dict:
210 barcode_dict["forward_bc"] = [f+"\t"+f]
211 elif not f+"\t"+f in barcode_dict["forward_bc"]:
212 barcode_dict["forward_bc"].append( f+"\t"+f)
213 if not f+"_reverse_bc" in barcode_dict:
214 barcode_dict[f+"_reverse_bc"] = [s+"\t"+r]
215 else :
216 barcode_dict[f+"_reverse_bc"].append(s+"\t"+r)
217
218 f=barcode_dict.pop("forward_bc")
219 barcodes_file_list.append(os.path.join(out_dir,"forward_bc"))
220 global_tmp_files.files.append(os.path.join(out_dir,"forward_bc"))
221 FH_out = open(os.path.join(out_dir,"forward_bc"),"w")
222 FH_out.write("\n".join(f)+"\n")
223 FH_out.close()
224
225 for bc_file in barcode_dict:
226 barcodes_file_list.append(os.path.join(out_dir,bc_file))
227 global_tmp_files.files.append(os.path.join(out_dir,bc_file))
228 FH_out = open(os.path.join(out_dir,bc_file),"w")
229 FH_out.write("\n".join(barcode_dict[bc_file])+"\n")
230 FH_out.close()
231
232 def get_fastq_nb_seq( fastq_file ):
233 """
234 @summary: Returns the number of sequences in fastq_file.
235 @param fastq_file: [str] Path to the fastq file processed.
236 @return: [int] The number of sequences.
237 """
238 FH_input = None
239 if not is_gzip(fastq_file):
240 FH_input = open( fastq_file )
241 else:
242 FH_input = gzip.open( fastq_file )
243 nb_line = 0
244 for line in FH_input:
245 nb_line += 1
246 FH_input.close()
247 nb_seq = nb_line/4
248 return nb_seq
249
250 def concat_files(list_input, output_file):
251
252 FH_out=open(output_file,"w")
253 for f in list_input :
254 FH_in = open(f)
255 string=""
256 i=0
257 for line in FH_in:
258 string+= line
259 i+=1
260 if i==2000 :
261 FH_out.write(string)
262 string=""
263 i=0
264 if i != 0:
265 FH_out.write(string)
266 FH_in.close()
267 FH_out.close()
268
269 def summarise_results( summary_file, barcode_file, log_file ):
270 """
271 @summary: Writes one summary of results from several logs.
272 @param summary_file: [str] The output file.
273 @param log_files: [list] The list of path to log files (one log file by sample).
274 """
275 sample_dict=dict()
276 FH_barcode= open(barcode_file)
277 for line in FH_barcode:
278 sample_dict[line.split()[0]]=0
279
280 FH_summary = open(summary_file, "w")
281 FH_summary.write( "#sample\tcount\n")
282 FH_log = open(log_file,"r")
283 sample_dict["unmatched"]=0
284 sample_dict["ambiguous"]=0
285
286 for line in FH_log.readlines():
287 if line.startswith("Barcode") or line.startswith("total") :
288 pass
289 else :
290 l=line.replace('(','\t').split()
291 if l[0] in sample_dict:
292 sample_dict[l[0]] += int(l[1])
293
294 for s in sample_dict:
295 FH_summary.write(s + '\t' + str(sample_dict[s]) + '\n')
296 FH_summary.close()
297
298
299 ##################################################################################################################################################
300 #
301 # MAIN
302 #
303 ##################################################################################################################################################
304 if __name__ == "__main__":
305 # Manage parameters
306 parser = argparse.ArgumentParser(
307 description='Split by samples the reads in function of inner barcode.'
308 )
309 parser.add_argument('-m', '--mismatches', type=int, default=0, help="Number of mismatches allowed in barcode. [Default: %(default)s]")
310 parser.add_argument('-e', '--end', type=str, default="bol", help="barcode is at the begining of the forward end (bol) or of the reverse (eol) or both (both). [Default: %(default)s]")
311 parser.add_argument( '--debug', default=False, action='store_true', help="Keep temporary files to debug program." )
312 parser.add_argument( '-v', '--version', action='version', version=__version__ )
313 # Inputs
314 group_input = parser.add_argument_group( 'Inputs' )
315 group_input.add_argument( '--input-R1', required=True, help='The R1 sequence file with all samples (format: fastq).' )
316 group_input.add_argument( '--input-R2', default=None, help='The R2 sequence file with all samples (format: fastq).' )
317 group_input.add_argument( '--input-barcode', help='This file describes barcodes and samples (one line by sample). Line format : SAMPLE_NAME<TAB>BARCODE or SAMPLE_NAME<TAB>BARCODE_FW<TAB>BARCODE_RV.' )
318 group_output = parser.add_argument_group( 'Outputs' )
319 # Outputs
320 group_output.add_argument( '--output-demultiplexed', default="demultiplexed_read.tar.gz", help='The tar file containing R1 files and R2 files for each sample (format: tar). [Default: %(default)s]' )
321 group_output.add_argument( '--output-excluded', default="undemultiplexed_read.tar.gz", help='The tar file containing R1 files and R2 files not demultiplexed (format: tar). [Default: %(default)s]' )
322 group_output.add_argument( '-s', '--summary', default='summary.tsv', help='TSV file with summary of filters results (format: TSV). [Default: %(default)s]')
323 group_output.add_argument( '-l', '--log-file', default=sys.stdout, help='This output file will contain several information on executed commands.')
324 args = parser.parse_args()
325 prevent_shell_injections(args)
326
327 Logger.static_write(args.log_file, "## Application\nSoftware :" + sys.argv[0] + " (version : " + str(__version__) + ")\nCommand : " + " ".join(sys.argv) + "\n\n")
328
329 # Process
330 R1_files = list()
331 R2_files = list()
332 tmp_barcode_files = list()
333 tmp_R1_files = list()
334 tmp_R2_files = list()
335 demultiplex_err_files1 = list()
336 demultiplex_err_files2 = list()
337 excluded_R1_file = os.path.join(os.path.split(args.output_demultiplexed)[0],os.path.basename(args.input_R1)+"_excluded_demult")
338 if args.input_R2 != None :
339 excluded_R2_file = os.path.join(os.path.split(args.output_demultiplexed)[0],os.path.basename(args.input_R2)+"_excluded_demult")
340 uniq_id = str(time.time()) + "_" + str(os.getpid())
341
342 tmp_files = TmpFiles( os.path.split(args.output_demultiplexed)[0] )
343 demultiplex_log = tmp_files.add("Demult.log")
344 tmp_folder=tmp_files.add_dir("tmp")
345 os.mkdir(tmp_folder)
346
347 sample_list=[]
348 try:
349 # Process
350 if args.end == "bol" or args.end == "eol" :
351
352 info="\n#Demultiplexing " + os.path.basename(args.input_R1)
353 if args.input_R2 != None:
354 info+= " and " + os.path.basename(args.input_R2)
355 info += " with " + os.path.basename(args.input_barcode) + " in " + args.end + " strand\n"
356 Logger.static_write(args.log_file,info)
357 Demultiplex(args.input_R1, args.input_R2, args.input_barcode, args.mismatches, args.end, tmp_files, R1_files, R2_files, demultiplex_err_files1,demultiplex_err_files2, demultiplex_log).submit( args.log_file )
358 else:
359 split_barcode_file(args.input_barcode, tmp_barcode_files, tmp_files)
360 info="\n#Demultiplexing " + os.path.basename(args.input_R1)
361 if args.input_R2 != None:
362 info+= " and " + os.path.basename(args.input_R2)
363 info += " with " + os.path.basename(tmp_barcode_files[0]) + " in bol strand\n"
364 Logger.static_write(args.log_file,info)
365 Demultiplex(args.input_R1, args.input_R2, tmp_barcode_files[0], args.mismatches, "bol", tmp_files, tmp_R1_files, tmp_R2_files, demultiplex_err_files1,demultiplex_err_files2, demultiplex_log).submit( args.log_file )
366 for idx,read1_file in enumerate(tmp_R1_files):
367 bc = os.path.basename(read1_file).replace("_R1.fastq","")
368 if os.path.join(tmp_files.tmp_dir,bc+"_reverse_bc") in tmp_barcode_files:
369 if os.stat(tmp_R1_files[idx]).st_size != 0 :
370 info="\n#Demultiplexing " + os.path.basename(tmp_R1_files[idx])
371 if args.input_R2 != None:
372 info+= " and " + os.path.basename(tmp_R2_files[idx])
373 info += " with " + bc+"_reverse_bc" + " in eol strand\n"
374 Logger.static_write(args.log_file,info)
375 if args.input_R2 != None:
376 Demultiplex(tmp_R1_files[idx], tmp_R2_files[idx], os.path.join(tmp_files.tmp_dir,bc+"_reverse_bc"), args.mismatches, "eol", tmp_files, R1_files, R2_files, demultiplex_err_files1, demultiplex_err_files2, demultiplex_log).submit( args.log_file )
377 else:
378 Demultiplex(tmp_R1_files[idx], None, os.path.join(tmp_files.tmp_dir,bc+"_reverse_bc"), args.mismatches, "eol", tmp_files, R1_files, R2_files, demultiplex_err_files1, demultiplex_err_files2, demultiplex_log).submit( args.log_file )
379
380 Logger.static_write(args.log_file,"\n#Summarising result\n")
381 summarise_results( args.summary, args.input_barcode, demultiplex_log )
382
383 Logger.static_write(args.log_file,"\n#Concatenation of undemultiplexed files 1\n")
384 concat_files(demultiplex_err_files1, excluded_R1_file )
385 if len(R2_files) > 0:
386 Logger.static_write(args.log_file,"\n#Concatenation of undemultiplexed files 2\n")
387 concat_files(demultiplex_err_files2, excluded_R2_file )
388 Logger.static_write(args.log_file,"\n#Archive demultiplexed R1 and R2 files\n")
389 Archive(R1_files + R2_files, args.output_demultiplexed).submit( args.log_file )
390 Logger.static_write(args.log_file,"\n#Archive undemultiplexed R1 and R2 files\n")
391 Archive([excluded_R1_file,excluded_R2_file], args.output_excluded).submit( args.log_file )
392 else:
393 Logger.static_write(args.log_file,"\n#Archive demultiplexed files\n")
394 Archive(R1_files, args.output_demultiplexed).submit( args.log_file )
395 Logger.static_write(args.log_file,"\n#Archive undemultiplexed files\n")
396 Archive([excluded_R1_file], args.output_excluded).submit( args.log_file )
397
398 # Remove temporary files
399 finally:
400 if not args.debug:
401 Logger.static_write(args.log_file,"\n#Removing temporary files\n")
402 tmp_files.deleteAll()
403