comparison galaxy-tools/biobank/tools/check_merge_individuals.py @ 0:ba6cf6ede027 draft default tip

Uploaded
author ric
date Wed, 28 Sep 2016 06:03:30 -0400
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:ba6cf6ede027
1 import sys, csv, argparse, os
2 from collections import Counter
3
4 from bl.vl.kb import KnowledgeBase as KB
5 import bl.vl.utils.ome_utils as vlu
6 from bl.vl.utils import LOG_LEVELS, get_logger
7
8
9 def make_parser():
10 parser = argparse.ArgumentParser(description='check data that will be passed to the merge_individuals tool')
11 parser.add_argument('--logfile', type=str, help='log file (default=stderr)')
12 parser.add_argument('--loglevel', type=str, choices=LOG_LEVELS,
13 help='logging level (default=INFO)', default='INFO')
14 parser.add_argument('-H', '--host', type=str, help='omero hostname')
15 parser.add_argument('-U', '--user', type=str, help='omero user')
16 parser.add_argument('-P', '--passwd', type=str, help='omero password')
17 parser.add_argument('--in_file', type=str, required=True,
18 help='input file')
19 parser.add_argument('--out_file', type=str, required=True,
20 help='output file')
21 return parser
22
23
24 def get_invalid_vids(records, logger):
25 records_map = {}
26 invalid_vids = []
27
28 for rec in records:
29 for k,v in rec.iteritems():
30 records_map.setdefault(k, []).append(v)
31 # Check for duplicated sources
32 ct = Counter()
33 for x in records_map['source']:
34 ct[x] += 1
35 for k, v in ct.iteritems():
36 if v > 1:
37 logger.error('ID %s appears %d times as source, this ID has been marked as invalid' % (k, v))
38 invalid_vids.append(k)
39 # Check for VIDs that appear bots in 'source' and 'target' fields
40 sources = set(records_map['source'])
41 targets = set(records_map['target'])
42 commons = sources.intersection(targets)
43 for c in commons:
44 logger.error('ID %s appears both in \'source\' and \'target\' columns, this ID has been marked as invalid' % c)
45 invalid_vids.append(c)
46
47 return set(invalid_vids)
48
49
50 def check_row(row, individuals, logger):
51 try:
52 source = individuals[row['source']]
53 logger.debug('%s is a valid Individual ID' % source.id)
54 target = individuals[row['target']]
55 logger.debug('%s is a valid Individual ID' % target.id)
56 return True
57 except KeyError, ke:
58 logger.error('%s is not a valid Individual ID' % ke)
59 return False
60
61
62 def main(argv):
63 parser = make_parser()
64 args = parser.parse_args(argv)
65
66 logger = get_logger('check_merge_individuals', level=args.loglevel,
67 filename=args.logfile)
68
69 try:
70 host = args.host or vlu.ome_host()
71 user = args.user or vlu.ome_user()
72 passwd = args.passwd or vlu.ome_passwd()
73 except ValueError, ve:
74 logger.critical(ve)
75 sys.exit(ve)
76
77 kb = KB(driver='omero')(host, user, passwd)
78
79 logger.info('Preloading all individuals')
80 inds = kb.get_objects(kb.Individual)
81 logger.info('Loaded %d individuals' % len(inds))
82 inds_map = {}
83 for i in inds:
84 inds_map[i.id] = i
85
86 with open(args.in_file) as infile, open(args.out_file, 'w') as outfile:
87 reader = csv.DictReader(infile, delimiter='\t')
88 records = [row for row in reader]
89 invalid_vids = get_invalid_vids(records, logger)
90
91 writer = csv.DictWriter(outfile, reader.fieldnames, delimiter='\t')
92 writer.writeheader()
93
94 for record in records:
95 if record['source'] in invalid_vids or record['target'] in invalid_vids:
96 logger.error('Skipping record %r because at least one ID was marked as invalid' % record)
97 else:
98 if check_row(record, inds_map, logger):
99 writer.writerow(record)
100 logger.debug('Record %r written in output file' % record)
101
102
103 if __name__ == '__main__':
104 main(sys.argv[1:])