2
|
1 #!/usr/bin/env python
|
|
2
|
|
3 tool_description = """
|
|
4 Remove spurious events originating from errors in random sequence tags.
|
|
5
|
|
6 This script compares all events sharing the same coordinates. Among each group
|
|
7 of events the maximum number of PCR duplicates is determined. All events that
|
|
8 are supported by less than 10 percent of this maximum count are removed.
|
|
9
|
|
10 By default output is written to stdout.
|
|
11
|
|
12 Input:
|
|
13 * bed6 file containing crosslinking events with score field set to number of PCR
|
|
14 duplicates
|
|
15
|
|
16 Output:
|
|
17 * bed6 file with spurious crosslinking events removed, sorted by fields chrom,
|
|
18 start, stop, strand
|
|
19
|
|
20 Example usage:
|
|
21 - remove spurious events from spurious.bed and write results to file cleaned.bed
|
|
22 rm_spurious_events.py spurious.bed --out cleaned.bed
|
|
23 """
|
|
24
|
|
25 epilog = """
|
|
26 Author: Daniel Maticzka
|
|
27 Copyright: 2015
|
|
28 License: Apache
|
|
29 Email: maticzkd@informatik.uni-freiburg.de
|
|
30 Status: Testing
|
|
31 """
|
|
32
|
|
33 import argparse
|
|
34 import logging
|
|
35 from sys import stdout
|
|
36 import pandas as pd
|
|
37
|
|
38
|
|
39 class DefaultsRawDescriptionHelpFormatter(argparse.ArgumentDefaultsHelpFormatter,
|
|
40 argparse.RawDescriptionHelpFormatter):
|
|
41 # To join the behaviour of RawDescriptionHelpFormatter with that of ArgumentDefaultsHelpFormatter
|
|
42 pass
|
|
43
|
|
44 # avoid ugly python IOError when stdout output is piped into another program
|
|
45 # and then truncated (such as piping to head)
|
|
46 from signal import signal, SIGPIPE, SIG_DFL
|
|
47 signal(SIGPIPE, SIG_DFL)
|
|
48
|
|
49 # parse command line arguments
|
|
50 parser = argparse.ArgumentParser(description=tool_description,
|
|
51 epilog=epilog,
|
|
52 formatter_class=DefaultsRawDescriptionHelpFormatter)
|
|
53 # positional arguments
|
|
54 parser.add_argument(
|
|
55 "events",
|
|
56 help="Path to bed6 file containing alignments.")
|
|
57 # optional arguments
|
|
58 parser.add_argument(
|
|
59 "-o", "--outfile",
|
|
60 help="Write results to this file.")
|
|
61 parser.add_argument(
|
|
62 "-t", "--threshold",
|
|
63 type=float,
|
|
64 default=0.1,
|
|
65 help="Threshold for spurious event removal."
|
|
66 )
|
|
67 # misc arguments
|
|
68 parser.add_argument(
|
|
69 "-v", "--verbose",
|
|
70 help="Be verbose.",
|
|
71 action="store_true")
|
|
72 parser.add_argument(
|
|
73 "-d", "--debug",
|
|
74 help="Print lots of debugging information",
|
|
75 action="store_true")
|
|
76 parser.add_argument(
|
|
77 '--version',
|
|
78 action='version',
|
|
79 version='0.1.0')
|
|
80
|
|
81 args = parser.parse_args()
|
|
82
|
|
83 if args.debug:
|
|
84 logging.basicConfig(level=logging.DEBUG, format="%(asctime)s - %(filename)s - %(levelname)s - %(message)s")
|
|
85 elif args.verbose:
|
|
86 logging.basicConfig(level=logging.INFO, format="%(filename)s - %(levelname)s - %(message)s")
|
|
87 else:
|
|
88 logging.basicConfig(format="%(filename)s - %(levelname)s - %(message)s")
|
|
89 logging.info("Parsed arguments:")
|
|
90 logging.info(" alignments: '{}'".format(args.events))
|
|
91 logging.info(" threshold: '{}'".format(args.threshold))
|
|
92 if args.outfile:
|
|
93 logging.info(" outfile: enabled writing to file")
|
|
94 logging.info(" outfile: '{}'".format(args.outfile))
|
|
95 logging.info("")
|
|
96
|
|
97 # check threshold parameter value
|
|
98 if args.threshold < 0 or args.threshold > 1:
|
|
99 raise ValueError("Threshold must be in [0,1].")
|
|
100
|
|
101 # load alignments
|
|
102 alns = pd.read_csv(
|
|
103 args.events,
|
|
104 sep="\t",
|
|
105 names=["chrom", "start", "stop", "read_id", "score", "strand"])
|
|
106
|
|
107 # remove all alignments that not enough PCR duplicates with respect to
|
|
108 # the group maximum
|
|
109 grouped = alns.groupby(['chrom', 'start', 'stop', 'strand'], group_keys=False)
|
|
110 alns_cleaned = grouped.apply(lambda g: g[g["score"] >= args.threshold * g["score"].max()])
|
|
111
|
|
112 # write coordinates of crosslinking event alignments
|
|
113 alns_cleaned_out = (open(args.outfile, "w") if args.outfile is not None else stdout)
|
|
114 alns_cleaned.to_csv(
|
|
115 alns_cleaned_out,
|
|
116 columns=['chrom', 'start', 'stop', 'read_id', 'score', 'strand'],
|
|
117 sep="\t", index=False, header=False)
|
|
118 alns_cleaned_out.close()
|