/
Httpy.py
executable file
·309 lines (262 loc) · 8.68 KB
/
Httpy.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
HTTP/Web class.
Holds commonly-used HTTP/web request/post methods.
Compatible with Python 2.5, 2.6, 2.7
"""
import time
import urllib2, cookielib, urllib, httplib
from sys import stderr
class Httpy:
"""
Class used for communicating with web servers.
"""
def __init__(self, user_agent=None, debugging=False):
"""
Sets this class's user agent.
"""
self.debugging = debugging
self.cj = cookielib.CookieJar()
self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
self.Request = urllib2.Request
self.urlopen = self.opener.open
if user_agent != None:
self.user_agent = user_agent
else:
self.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:19.0) Gecko/20100101 Firefox/19.0'
def raise_timeout(self, signum, frame):
raise Exception("Timeout")
def get_meta(self, url):
""" Reads file info (content type, length, etc) without downloading
Times out after 10 seconds (5 to unshorten, 5 to get meta) """
url = self.unshorten(url)
try:
headers = {'User-agent' : self.user_agent}
req = urllib2.Request(url, headers=headers)
site = self.urlopen(req, timeout=10)
#site = self.urlopen(url)
except Exception:
return {'content-type': 'unknown', 'content-length': '0'}
return site.info()
def unshorten(self, url):
""" Unshortens URL. Follows until no more redirects. Times out after 5 seconds """
try:
headers = {'User-agent' : self.user_agent}
req = urllib2.Request(url, headers=headers)
site = self.urlopen(req, timeout=10)
except urllib2.HTTPError:
return url
except Exception:
return url
return site.url
def check(self, url):
""" Check if a URL is valid """
try:
self.urlopen(url, timeout=10)
except:
return False
return True
def get(self, url, headers={}):
"""
Attempts GET request with web server.
Returns html source of a webpage (string).
Returns '' if unable to retrieve webpage for any reason.
Will attempt to repeatedly post if '504' response error is received
or 'getaddrinfo' fails.
"""
if not 'User-agent' in headers:
headers['User-agent'] = self.user_agent
try:
req = urllib2.Request(url, headers=headers)
handle = self.urlopen(req, timeout=10)
except Exception, e:
if self.debugging: stderr.write('Httpy: Exception while creating request: %s\n' % str(e))
raise e
try:
result = handle.read()
except Exception, e:
if self.debugging: stderr.write('Httpy: Exception while reading response: %s\n' % str(e))
raise e
return result
def getter(self, url, headers={}, retry=1):
"""
Attempts GET request with extended options.
Returns html source of a webpage (string).
Returns '' if unable to retrieve webpage for any reason.
Will retry attempts that fail.
Does *NOT* utilize cookie jar!
"""
if not 'User-agent' in headers:
headers['User-agent'] = self.user_agent
(https, host, path) = self.get_https_host_path(url)
if self.debugging: stderr.write('Httpy.py: GET http%s://%s%s\n' % ('s' if https else '', host, path))
try:
if https:
req = httplib.HTTPSConnection(host)
else:
req = httplib.HTTPConnection(host)
req.putrequest('GET', path)
if self.debugging: stderr.write('Httpy.py: headers:\n')
for hkey in headers.keys():
if self.debugging: stderr.write(' %s:\t%s\n' % (hkey, headers[hkey]))
req.putheader(hkey, headers[hkey])
req.endheaders()
resp = req.getresponse()
if self.debugging: stderr.write('Httpy.py: response headers:')
for h,v in resp.getheaders():
if self.debugging: stderr.write(' %s: "%s"\n' % (h, v))
if resp.status == 200:
return resp.read()
elif resp.status in [301, 302] and resp.getheader('Location') != None:
if self.debugging: stderr.write('Httpy.py: Got %d to %s' % (resp.status, resp.getheader('Location')))
return self.getter(resp.getheader('Location'), headers=headers, retry=retry-1)
else:
result = ''
try: result = resp.read()
except: pass
if self.debugging: stderr.write('Httpy.py: HTTP status %s: %s\n' % (resp.status, resp.reason))
return result
except Exception, e:
if self.debugging: stderr.write('Httpy.py: Exception: %s: %s\n' % (url, str(e)))
if retry > 0:
return self.getter(url, headers=headers, retry=retry-1)
return ''
def get_https_host_path(self, url):
https = url.startswith('https')
path = ''
host = url[url.find('//')+2:]
if '/' in host:
host = host[:host.find('/')]
path = url[url.find(host)+len(host):]
return (https, host, path)
def fix_string(self, s):
r = ''
for c in s:
c2 = ''
try:
c2 = str(c)
except UnicodeEncodeError:
c2 = ''
r += c2
return r
def fix_dict(self, dict):
d = {}
for key in dict:
value = dict[key]
d[key] = self.fix_string(value)
return d
def oldpost(self, url, postdict=None, headers={}):
"""
Submits a POST request to URL. Posts 'postdict' if
not None. URL-encodes postdata (if dict)
and strips Unicode chars.
"""
result = ''
if not 'User-agent' in headers:
headers['User-agent'] = self.user_agent
if postdict == None:
encoded_data = ''
elif type(postdict) == dict:
encoded_data = urllib.urlencode(postdict)
elif type(postdict) == str:
encoded_data = postdict
try:
req = self.Request(url, encoded_data, headers)
handle = self.urlopen(req, timeout=10)
result = handle.read()
except Exception, e:
if self.debugging: stderr.write('Httpy.py: Exception: %s: %s\n' % (url, str(e)))
return result
def post(self, url, postdict=None, headers={}):
"""
Attempts POST request with web server.
Returns response of a POST request to a web server.
'postdict' must be a dictionary of keys/values to post to the server.
Returns '' if unable to post/retrieve response.
Will attempt to repeatedly post if '504' response error is received
or 'getaddrinfo' fails.
"""
if not 'User-agent' in headers:
headers['User-agent'] = self.user_agent
data = ''
if postdict != None and type(postdict) == dict:
fixed_dict = self.fix_dict(postdict)
data = urllib.urlencode(fixed_dict)
elif postdict != None and type(postdict) == str:
data = postdict
headers['Content-Length'] = len(data)
host = url[url.find('//')+2:]
host = host[:host.find('/')]
if self.debugging: stderr.write('Httpy.py: host: "%s"\n' % host)
path = url[url.find(host)+len(host):]
if self.debugging: stderr.write('Httpy.py: path: "%s"\n' % path)
if self.debugging: stderr.write('Httpy.py: headers: %s\n' % str(headers))
if self.debugging: stderr.write('Httpy.py: postdata: "%s"\n' % data)
try:
if url.startswith('https'):
req = httplib.HTTPSConnection(host)
else:
req = httplib.HTTPConnection(host)
req.putrequest('POST', path)
for hkey in headers.keys():
req.putheader(hkey, headers[hkey])
req.endheaders()
req.send(data)
resp = req.getresponse()
if resp.status == 200:
return resp.read()
else:
if self.debugging: stderr.write('Httpy.py: HTTP status %s: %s: %s\n' % (resp.status, resp.reason, resp.read()))
if self.debugging: stderr.write('Httpy.py: Response headers:\n')
for name, value in resp.getheaders():
if self.debugging: stderr.write('Httpy.py: \t"%s"="%s"\n' % (name, value))
return ''
except Exception, e:
if self.debugging: stderr.write('Httpy.py: Exception: %s: %s\n' % (url, str(e)))
return ''
def download(self, url, save_as):
"""
Downloads a file from 'url' and saves the file locally as 'save_as'.
Throws exceptions if errors occur
"""
output = open(save_as, 'wb')
headers = {'User-agent' : self.user_agent}
req = urllib2.Request(url, headers=headers)
file_on_web = self.urlopen(req, timeout=10)
while True:
buf = file_on_web.read(65536)
if len(buf) == 0:
break
output.write(buf)
output.close()
def clear_cookies(self):
"""
Clears cookies in cookie jar.
"""
self.cj.clear()
def set_user_agent(user_agent):
"""
Changes the user-agent used when connecting.
"""
self.user_agent = user_agent
def between(self, source, start, finish):
"""
Helper method. Useful when parsing responses from web servers.
Looks through a given source string for all items between two other strings,
returns the list of items (or empty list if none are found).
Example:
test = 'hello >30< test >20< asdf >>10<< sadf>'
print between(test, '>', '<')
would print the list:
['30', '20', '>10']
"""
result = []
i = source.find(start)
j = source.find(finish, i + len(start))
while i >= 0 and j >= 0:
i = i + len(start)
result.append(source[i:j])
i = source.find(start, j + len(finish))
j = source.find(finish, i + len(start))
return result