Mercurial > repos > jjohnson > query_tabular
comparison query_tabular.py @ 4:afdbc7198353 draft
planemo upload for repository https://github.com/jj-umn/galaxytools/tree/master/query_tabular commit 9ae87502ea7c3da33ecc453872c4eb2f41ecea4a-dirty
author | jjohnson |
---|---|
date | Thu, 21 Jan 2016 08:09:36 -0500 |
parents | ffa5e34a55c1 |
children | aa2409ae9dc0 |
comparison
equal
deleted
inserted
replaced
2:ffa5e34a55c1 | 4:afdbc7198353 |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 """ | 2 """ |
3 """ | 3 """ |
4 import sys | 4 import sys |
5 import re | |
5 import os.path | 6 import os.path |
7 import json | |
6 import sqlite3 as sqlite | 8 import sqlite3 as sqlite |
7 import optparse | 9 import optparse |
8 from optparse import OptionParser | 10 from optparse import OptionParser |
9 | 11 |
10 """ | 12 """ |
11 TODO: | 13 TODO: |
12 - could add some transformations on tabular columns, e.g. a regex to format date/time strings | 14 - could read column names from comment lines, but issues with legal names |
15 - could add some transformations on tabular columns, | |
16 e.g. a regex to format date/time strings | |
17 c2 : re.sub('pat', 'sub', c2) | |
18 c3 : | |
19 - column_defs dict of columns to create from tabular input | |
20 column_defs : { 'name1' : 'expr', 'name2' : 'expr'} | |
13 - allow multiple queries and outputs | 21 - allow multiple queries and outputs |
14 - add a --json input for table definitions (or yaml) | 22 - add a --json input for table definitions (or yaml) |
15 JSON config: | 23 JSON config: |
16 { tables : [ | 24 { tables : [ |
17 { file_path : '/home/galaxy/dataset_101.dat', | 25 { file_path : '/home/galaxy/dataset_101.dat', |
18 table_name : 't1', | 26 table_name : 't1', |
19 column_names : ['c1','c2','c3'] | 27 column_names : ['c1', 'c2', 'c3'], |
28 comment_lines : 1 | |
20 }, | 29 }, |
21 { file_path : '/home/galaxy/dataset_102.dat', | 30 { file_path : '/home/galaxy/dataset_102.dat', |
22 table_name : 't2', | 31 table_name : 't2', |
23 column_names : ['c1','c2','c3'] | 32 column_names : ['c1', 'c2', 'c3'] |
33 }, | |
34 { file_path : '/home/galaxy/dataset_103.dat', | |
35 table_name : 'test', | |
36 column_names : ['c1', 'c2', 'c3'] | |
24 } | 37 } |
25 ] | 38 ] |
26 } | 39 } |
27 """ | 40 """ |
28 | 41 |
42 tables_query = \ | |
43 "SELECT name, sql FROM sqlite_master WHERE type='table' ORDER BY name" | |
44 | |
45 | |
29 def getValueType(val): | 46 def getValueType(val): |
30 if val or 0. == val: | 47 if val or 0. == val: |
48 try: | |
49 int(val) | |
50 return 'INTEGER' | |
51 except: | |
52 try: | |
53 float(val) | |
54 return 'REAL' | |
55 except: | |
56 return 'TEXT' | |
57 return None | |
58 | |
59 | |
60 def get_column_def(file_path, table_name, skip=0, comment_char='#', | |
61 column_names=None, max_lines=100): | |
62 col_pref = ['TEXT', 'REAL', 'INTEGER', None] | |
63 col_types = [] | |
64 data_lines = 0 | |
31 try: | 65 try: |
32 int(val) | 66 with open(file_path, "r") as fh: |
33 return 'INTEGER' | 67 for linenum, line in enumerate(fh): |
34 except: | 68 if linenum < skip: |
35 try: | 69 continue |
36 float(val) | 70 if line.startswith(comment_char): |
37 return 'REAL' | 71 continue |
38 except: | 72 data_lines += 1 |
39 return 'TEXT' | 73 try: |
40 return None | 74 fields = line.split('\t') |
41 | 75 while len(col_types) < len(fields): |
42 | 76 col_types.append(None) |
43 def get_column_def(file_path,table_name,skip=0,comment_char='#',column_names=None,max_lines=100): | 77 for i, val in enumerate(fields): |
44 col_pref = ['TEXT','REAL','INTEGER',None] | 78 colType = getValueType(val) |
45 col_types = [] | 79 if col_pref.index(colType) < col_pref.index(col_types[i]): |
46 data_lines = 0 | 80 col_types[i] = colType |
47 try: | 81 except Exception, e: |
48 with open(file_path,"r") as fh: | 82 print >> sys.stderr, 'Failed at line: %d err: %s' % (linenum, e) |
49 for linenum,line in enumerate(fh): | 83 except Exception, e: |
50 if linenum < skip: | 84 print >> sys.stderr, 'Failed: %s' % (e) |
51 continue | 85 for i, col_type in enumerate(col_types): |
52 if line.startswith(comment_char): | 86 if not col_type: |
53 continue | 87 col_types[i] = 'TEXT' |
54 data_lines += 1 | 88 col_names = ['c%d' % i for i in range(1, len(col_types) + 1)] |
55 try: | 89 if column_names: |
56 fields = line.split('\t') | 90 for i, cname in enumerate([cn.strip() for cn in column_names.split(',')]): |
57 while len(col_types) < len(fields): | 91 if cname and i < len(col_names): |
58 col_types.append(None) | 92 col_names[i] = cname |
59 for i,val in enumerate(fields): | 93 col_def = [] |
60 colType = getValueType(val) | 94 for i, col_name in enumerate(col_names): |
61 if col_pref.index(colType) < col_pref.index(col_types[i]): | 95 col_def.append('%s %s' % (col_names[i], col_types[i])) |
62 col_types[i] = colType | 96 return col_names, col_types, col_def |
97 | |
98 | |
99 def create_table(conn, file_path, table_name, skip=0, comment_char='#', column_names=None): | |
100 col_names, col_types, col_def = get_column_def(file_path, table_name, skip=skip, comment_char=comment_char, column_names=column_names) | |
101 col_func = [float if t == 'REAL' else int if t == 'INTEGER' else str for t in col_types] | |
102 table_def = 'CREATE TABLE %s (\n %s\n);' % (table_name, ', \n '.join(col_def)) | |
103 # print >> sys.stdout, table_def | |
104 insert_stmt = 'INSERT INTO %s(%s) VALUES(%s)' % (table_name, ','.join(col_names), ','.join(["?" for x in col_names])) | |
105 # print >> sys.stdout, insert_stmt | |
106 data_lines = 0 | |
107 try: | |
108 c = conn.cursor() | |
109 c.execute(table_def) | |
110 with open(file_path, "r") as fh: | |
111 for linenum, line in enumerate(fh): | |
112 if linenum < skip or line.startswith(comment_char): | |
113 continue | |
114 data_lines += 1 | |
115 try: | |
116 fields = line.rstrip('\r\n').split('\t') | |
117 vals = [col_func[i](x) if x else None for i, x in enumerate(fields)] | |
118 c.execute(insert_stmt, vals) | |
119 except Exception, e: | |
120 print >> sys.stderr, 'Failed at line: %d err: %s' % (linenum, e) | |
121 conn.commit() | |
122 c.close() | |
123 except Exception, e: | |
124 print >> sys.stderr, 'Failed: %s' % (e) | |
125 exit(1) | |
126 | |
127 | |
128 def regex_match(expr, item): | |
129 return re.match(expr, item) is not None | |
130 | |
131 | |
132 def regex_search(expr, item): | |
133 return re.search(expr, item) is not None | |
134 | |
135 | |
136 def regex_sub(expr, replace, item): | |
137 return re.sub(expr, replace, item) | |
138 | |
139 | |
140 def get_connection(sqlitedb_path, addfunctions=False): | |
141 conn = sqlite.connect(sqlitedb_path) | |
142 if addfunctions: | |
143 conn.create_function("re_match", 2, regex_match) | |
144 conn.create_function("re_search", 2, regex_search) | |
145 conn.create_function("re_sub", 3, regex_sub) | |
146 return conn | |
147 | |
148 | |
149 def __main__(): | |
150 # Parse Command Line | |
151 parser = optparse.OptionParser() | |
152 parser.add_option('-s', '--sqlitedb', dest='sqlitedb', default=None, help='The SQLite Database') | |
153 parser.add_option('-t', '--table', dest='tables', action="append", default=[], help='Tabular file: file_path[=table_name[:column_name, ...]') | |
154 parser.add_option('-j', '--jsonfile', dest='jsonfile', default=None, help='Tabular file: file_path[=table_name[:column_name, ...]') | |
155 parser.add_option('-q', '--query', dest='query', default=None, help='SQL query') | |
156 parser.add_option('-Q', '--query_file', dest='query_file', default=None, help='SQL query file') | |
157 parser.add_option('-n', '--no_header', dest='no_header', action='store_true', default=False, help='Include a column headers line') | |
158 parser.add_option('-o', '--output', dest='output', default=None, help='Output file for query results') | |
159 (options, args) = parser.parse_args() | |
160 | |
161 # open sqlite connection | |
162 conn = get_connection(options.sqlitedb) | |
163 # determine output destination | |
164 if options.output is not None: | |
165 try: | |
166 outputPath = os.path.abspath(options.output) | |
167 outputFile = open(outputPath, 'w') | |
63 except Exception, e: | 168 except Exception, e: |
64 print >> sys.stderr, 'Failed at line: %d err: %s' % (linenum,e) | 169 print >> sys.stderr, "failed: %s" % e |
65 except Exception, e: | 170 exit(3) |
66 print >> sys.stderr, 'Failed: %s' % (e) | 171 else: |
67 for i,col_type in enumerate(col_types): | 172 outputFile = sys.stdout |
68 if not col_type: | 173 |
69 col_types[i] = 'TEXT' | 174 # get table defs |
70 col_names = ['c%d' % i for i in range(1,len(col_types) + 1)] | 175 if options.tables: |
71 if column_names: | 176 for ti, table in enumerate(options.tables): |
72 for i,cname in enumerate([cn.strip() for cn in column_names.split(',')]): | 177 table_name = 't%d' % (ti + 1) |
73 if cname and i < len(col_names): | 178 column_names = None |
74 col_names[i] = cname | 179 fields = table.split('=') |
75 col_def = [] | 180 path = fields[0] |
76 for i,col_name in enumerate(col_names): | 181 if len(fields) > 1: |
77 col_def.append('%s %s' % (col_names[i],col_types[i])) | 182 names = fields[1].split(':') |
78 return col_names,col_types,col_def | 183 table_name = names[0] if names[0] else table_name |
79 | 184 if len(names) > 1: |
80 def create_table(conn,file_path,table_name,skip=0,comment_char='#',column_names=None): | 185 column_names = names[1] |
81 col_names,col_types,col_def = get_column_def(file_path,table_name,skip=skip,comment_char=comment_char,column_names=column_names) | 186 # print >> sys.stdout, '%s %s' % (table_name, path) |
82 col_func = [float if t == 'REAL' else int if t == 'INTEGER' else str for t in col_types] | 187 create_table(conn, path, table_name, column_names=column_names) |
83 table_def = 'CREATE TABLE %s (\n %s\n);' % (table_name,',\n '.join(col_def)) | 188 if options.jsonfile: |
84 # print >> sys.stdout, table_def | 189 try: |
85 insert_stmt = 'INSERT INTO %s(%s) VALUES(%s)' % (table_name,','.join(col_names),','.join([ "?" for x in col_names])) | 190 fh = open(options.jsonfile) |
86 # print >> sys.stdout, insert_stmt | 191 tdef = json.load(fh) |
87 data_lines = 0 | 192 if 'tables' in tdef: |
88 try: | 193 for ti, table in enumerate(tdef['tables']): |
89 c = conn.cursor() | 194 path = table['file_path'] |
90 c.execute(table_def) | 195 table_name = table['table_name'] if 'table_name' in table else 't%d' % (ti + 1) |
91 with open(file_path,"r") as fh: | 196 column_names = table['column_names'] if 'column_names' in table else None |
92 for linenum,line in enumerate(fh): | 197 comment_lines = table['comment_lines'] if 'comment_lines' in table else 0 |
93 if linenum < skip or line.startswith(comment_char): | 198 create_table(conn, path, table_name, column_names=column_names, skip=comment_lines) |
94 continue | 199 except Exception, exc: |
95 data_lines += 1 | 200 print >> sys.stderr, "Error: %s" % exc |
96 try: | 201 conn.close() |
97 fields = line.split('\t') | 202 |
98 vals = [col_func[i](x) if x else None for i,x in enumerate(fields)] | 203 query = None |
99 c.execute(insert_stmt,vals) | 204 if (options.query_file is not None): |
100 except Exception, e: | 205 with open(options.query_file, 'r') as fh: |
101 print >> sys.stderr, 'Failed at line: %d err: %s' % (linenum,e) | 206 query = '' |
102 conn.commit() | 207 for line in fh: |
103 c.close() | 208 query += line |
104 except Exception, e: | 209 elif (options.query is not None): |
105 print >> sys.stderr, 'Failed: %s' % (e) | 210 query = options.query |
106 exit(1) | 211 |
107 | 212 if (query is None): |
108 def __main__(): | 213 try: |
109 #Parse Command Line | 214 conn = get_connection(options.sqlitedb) |
110 parser = optparse.OptionParser() | 215 c = conn.cursor() |
111 parser.add_option( '-s', '--sqlitedb', dest='sqlitedb', default=None, help='The SQLite Database' ) | 216 rslt = c.execute(tables_query).fetchall() |
112 parser.add_option( '-t', '--table', dest='tables', action="append", default=[], help='Tabular file: file_path[=table_name[:column_name,...]' ) | 217 for table, sql in rslt: |
113 parser.add_option( '-q', '--query', dest='query', default=None, help='SQL query' ) | 218 print >> sys.stderr, "Table %s:" % table |
114 parser.add_option( '-Q', '--query_file', dest='query_file', default=None, help='SQL query file' ) | 219 try: |
115 parser.add_option( '-n', '--no_header', dest='no_header', action='store_true', default=False, help='Include a column headers line' ) | 220 col_query = 'SELECT * FROM %s LIMIT 0' % table |
116 parser.add_option( '-o', '--output', dest='output', default=None, help='Output file for query results' ) | 221 cur = conn.cursor().execute(col_query) |
117 (options, args) = parser.parse_args() | 222 cols = [col[0] for col in cur.description] |
118 | 223 print >> sys.stderr, " Columns: %s" % cols |
119 # oprn sqlite connection | 224 except Exception, exc: |
120 conn = sqlite.connect(options.sqlitedb) | 225 print >> sys.stderr, "Error: %s" % exc |
121 # determine output destination | 226 except Exception, exc: |
122 if options.output != None: | 227 print >> sys.stderr, "Error: %s" % exc |
228 exit(0) | |
229 # if not sqlite.is_read_only_query(query): | |
230 # print >> sys.stderr, "Error: Must be a read only query" | |
231 # exit(2) | |
123 try: | 232 try: |
124 outputPath = os.path.abspath(options.output) | 233 conn = get_connection(options.sqlitedb, addfunctions=True) |
125 outputFile = open(outputPath, 'w') | 234 cur = conn.cursor() |
126 except Exception, e: | 235 results = cur.execute(query) |
127 print >> sys.stderr, "failed: %s" % e | 236 if not options.no_header: |
128 exit(3) | 237 outputFile.write("#%s\n" % '\t'.join([str(col[0]) for col in cur.description])) |
129 else: | 238 # yield [col[0] for col in cur.description] |
130 outputFile = sys.stdout | 239 for i, row in enumerate(results): |
131 | 240 # yield [val for val in row] |
132 # determine output destination | 241 outputFile.write("%s\n" % '\t'.join([str(val) for val in row])) |
133 for ti,table in enumerate(options.tables): | |
134 table_name = 't%d' % (ti + 1) | |
135 column_names = None | |
136 fields = table.split('=') | |
137 path = fields[0] | |
138 if len(fields) > 1: | |
139 names = fields[1].split(':') | |
140 table_name = names[0] if names[0] else table_name | |
141 if len(names) > 1: | |
142 column_names = names[1] | |
143 # print >> sys.stdout, '%s %s' % (table_name, path) | |
144 create_table(conn,path,table_name,column_names=column_names) | |
145 conn.close() | |
146 | |
147 query = None | |
148 if (options.query_file != None): | |
149 with open(options.query_file,'r') as fh: | |
150 query = '' | |
151 for line in fh: | |
152 query += line | |
153 elif (options.query != None): | |
154 query = options.query | |
155 | |
156 if (query is None): | |
157 try: | |
158 conn = sqlite.connect(options.sqlitedb) | |
159 c = conn.cursor() | |
160 tables_query = "SELECT name,sql FROM sqlite_master WHERE type='table' ORDER BY name" | |
161 rslt = c.execute(tables_query).fetchall() | |
162 for table,sql in rslt: | |
163 print >> sys.stderr, "Table %s:" % table | |
164 try: | |
165 col_query = 'SELECT * FROM %s LIMIT 0' % table | |
166 cur = conn.cursor().execute(col_query) | |
167 cols = [col[0] for col in cur.description] | |
168 print >> sys.stderr, " Columns: %s" % cols | |
169 except Exception, exc: | |
170 print >> sys.stderr, "Error: %s" % exc | |
171 except Exception, exc: | 242 except Exception, exc: |
172 print >> sys.stderr, "Error: %s" % exc | 243 print >> sys.stderr, "Error: %s" % exc |
173 exit(0) | 244 exit(1) |
174 #if not sqlite.is_read_only_query(query): | 245 |
175 # print >> sys.stderr, "Error: Must be a read only query" | 246 if __name__ == "__main__": |
176 # exit(2) | 247 __main__() |
177 try: | |
178 conn = sqlite.connect(options.sqlitedb) | |
179 cur = conn.cursor() | |
180 results = cur.execute(query) | |
181 if not options.no_header: | |
182 outputFile.write("#%s\n" % '\t'.join([str(col[0]) for col in cur.description])) | |
183 # yield [col[0] for col in cur.description] | |
184 for i,row in enumerate(results): | |
185 # yield [val for val in row] | |
186 outputFile.write("%s\n" % '\t'.join([str(val) for val in row])) | |
187 except Exception, exc: | |
188 print >> sys.stderr, "Error: %s" % exc | |
189 exit(1) | |
190 | |
191 if __name__ == "__main__": __main__() | |
192 | |
193 |