|
0
|
1 import sys
|
|
|
2 sys.path.insert(0, '/home/saket/requests-new-urllib3-api/requests/packages/')
|
|
|
3 sys.path.insert(0, '/home/saket/requests-new-urllib3-api')
|
|
|
4
|
|
|
5
|
|
|
6 import requests
|
|
|
7 import pycurl
|
|
|
8 import os
|
|
|
9 from os.path import getsize
|
|
|
10 import argparse
|
|
|
11 import sys
|
|
|
12 import cStringIO
|
|
|
13 from functools import wraps
|
|
|
14 import tempfile, shutil,time
|
|
|
15
|
|
|
16 __url__="http://bg.upf.edu/transfic/taskService"
|
|
|
17 def stop_err( msg ):
|
|
|
18 sys.stderr.write( '%s\n' % msg )
|
|
|
19 sys.exit()
|
|
|
20
|
|
|
21 def retry(ExceptionToCheck, tries=14, delay=3, backoff=2, logger=None):
|
|
|
22 """Retry calling the decorated function using an exponential backoff.
|
|
|
23
|
|
|
24 http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
|
|
|
25 original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
|
|
|
26
|
|
|
27 :param ExceptionToCheck: the exception to check. may be a tuple of
|
|
|
28 exceptions to check
|
|
|
29 :type ExceptionToCheck: Exception or tuple
|
|
|
30 :param tries: number of times to try (not retry) before giving up
|
|
|
31 :type tries: int
|
|
|
32 :param delay: initial delay between retries in seconds
|
|
|
33 :type delay: int
|
|
|
34 :param backoff: backoff multiplier e.g. value of 2 will double the delay
|
|
|
35 each retry
|
|
|
36 :type backoff: int
|
|
|
37 :param logger: logger to use. If None, print
|
|
|
38 :type logger: logging.Logger instance
|
|
|
39 """
|
|
|
40 def deco_retry(f):
|
|
|
41
|
|
|
42 @wraps(f)
|
|
|
43 def f_retry(*args, **kwargs):
|
|
|
44 mtries, mdelay = tries, delay
|
|
|
45 while mtries > 1:
|
|
|
46 try:
|
|
|
47 return f(*args, **kwargs)
|
|
|
48 except ExceptionToCheck, e:
|
|
|
49 #msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
|
|
|
50 msg = "Retrying in %d seconds..." % (mdelay)
|
|
|
51 if logger:
|
|
|
52 logger.warning(msg)
|
|
|
53 else:
|
|
|
54 #print msg
|
|
|
55 pass
|
|
|
56 time.sleep(mdelay)
|
|
|
57 mtries -= 1
|
|
|
58 mdelay *= backoff
|
|
|
59 return f(*args, **kwargs)
|
|
|
60
|
|
|
61 return f_retry # true decorator
|
|
|
62
|
|
|
63 return deco_retry
|
|
|
64
|
|
|
65 class TransficUploader:
|
|
|
66 def __init__(self):
|
|
|
67
|
|
|
68 self.c = pycurl.Curl()
|
|
|
69 self.c.setopt(pycurl.URL, __url__)
|
|
|
70 self.c.setopt(pycurl.UPLOAD, 1)
|
|
|
71 #c.setopt(pycurl.USERPWD, 'saket.kumar:whatsinaname.')
|
|
|
72 self.c.setopt(pycurl.PROXY, 'http://saket.kumar:whatsinaname.@netmon.iitb.ac.in:80/')
|
|
|
73 #c.setopt(pycurl.PROXYPORT, 80)
|
|
|
74 #c.setopt(pycurl.PROXYTYPE_HTTP,1)
|
|
|
75 #self.c.setopt(pycurl.VERBOSE, 1)
|
|
|
76 self.c.setopt(pycurl.HTTPHEADER, ['Expect:'])
|
|
|
77 #c.setopt(pycurl.HTTPPROXYTUNNEL, 1)
|
|
|
78 self.c.setopt(pycurl.UPLOAD, 1)
|
|
|
79 #c.perform()
|
|
|
80 self.c.setopt(pycurl.NOPROGRESS, 1);
|
|
|
81 #curl_easy_setopt(hnd, CURLOPT_UPLOAD, 1L);
|
|
|
82 self.c.setopt(pycurl.USERAGENT, "curl/7.27.0");
|
|
|
83 #curl_easy_setopt(hnd, CURLOPT_HTTPHEADER, slist1);
|
|
|
84 #curl_easy_setopt(hnd, CURLOPT_MAXREDIRS, 50L);
|
|
|
85 self.c.setopt(pycurl.SSL_VERIFYPEER, 1);
|
|
|
86 self.c.setopt(pycurl.CUSTOMREQUEST, "PUT")
|
|
|
87 #curl_easy_setopt(hnd, CURLOPT_VERBOSE, 1L);
|
|
|
88 self.c.setopt(pycurl.TCP_NODELAY, 1);
|
|
|
89 self.buf = cStringIO.StringIO()
|
|
|
90 self.c.setopt(self.c.WRITEFUNCTION, self.buf.write)
|
|
|
91
|
|
|
92
|
|
|
93 def upload_file(self,filepath):
|
|
|
94 f = open(filepath)
|
|
|
95 self.c.setopt(pycurl.INFILE, f)
|
|
|
96 self.c.setopt(pycurl.INFILESIZE, getsize(filepath))
|
|
|
97 #print f.read()
|
|
|
98
|
|
|
99 def run(self):
|
|
|
100 self.c.perform()
|
|
|
101
|
|
|
102 def get_url(self):
|
|
|
103 return self.buf.getvalue().strip()
|
|
|
104
|
|
|
105 @retry(requests.exceptions.HTTPError)
|
|
|
106 def result_exists(self,url ):
|
|
|
107 #url="http://www.cravat.us/results/%s/%s.zip" %(job_id,job_id)
|
|
|
108 download_request = requests.request("GET", url)
|
|
|
109 if download_request.status_code==404 or download_request==500:
|
|
|
110 raise requests.HTTPError()
|
|
|
111 elif "Task status is : error" in download_request.text:
|
|
|
112 stop_err("No SNVs found!")
|
|
|
113 else:
|
|
|
114 return url
|
|
|
115 @retry(requests.exceptions.HTTPError)
|
|
|
116 def download_result(self, url,outpath):
|
|
|
117 tmp_dir = tempfile.mkdtemp()
|
|
|
118 r = requests.get( url, stream=True )
|
|
|
119 if r.status_code == 500:
|
|
|
120 raise requests.HTTPError()
|
|
|
121 else:
|
|
|
122 path = os.path.join( tmp_dir,"results.csv")
|
|
|
123 with open(path, 'wb') as f:
|
|
|
124 for chunk in r.iter_content(128):
|
|
|
125 f.write(chunk)
|
|
|
126 shutil.move(path,outpath)
|
|
|
127 shutil.rmtree(tmp_dir)
|
|
|
128
|
|
|
129
|
|
|
130
|
|
|
131 def main(params):
|
|
|
132 parser = argparse.ArgumentParser()
|
|
|
133 parser.add_argument("--input",type=str,required=True)
|
|
|
134 parser.add_argument("--output",type=str,required=True)
|
|
|
135 args = parser.parse_args(params)
|
|
|
136 uploader = TransficUploader();
|
|
|
137 uploader.upload_file(args.input)
|
|
|
138 uploader.run()
|
|
|
139 url = uploader.get_url()
|
|
|
140 url = uploader.result_exists(url)
|
|
|
141 download = uploader.download_result(url,args.output)
|
|
|
142 if __name__=="__main__":
|
|
|
143 main(sys.argv[1:])
|