Skip to content


Subversion checkout URL

You can clone with
Download ZIP
Fetching contributors…
Cannot retrieve contributors at this time
93 lines (72 sloc) 2.45 KB
# coding: utf-8
from urllib.parse import (urlencode, urlparse, urlunparse,
parse_qs, ParseResult)
except ImportError:
from urllib import urlencode
from urlparse import urlparse, urlunparse, parse_qs, ParseResult
#: Parts for RFC 3986 URI syntax
#: <scheme>://<netloc>/<path>;<params>?<query>#<fragment>
URL_PARTS = ('scheme', 'netloc', 'path', 'params', 'query', 'fragment')
class Url(object):
"""Parse (absolute and relative) URLs for humans."""
def __init__(self, url, **parts):
Constructor for Url object.
:param url:
:type url: string
:param parts: scheme, netloc, path, params,
query, fragment
:type parts: dict
self._url = url
self.params = dict((URL_PARTS[k], v)
for k, v in enumerate(urlparse(self._url)))
for option, value in parts.items():
if option in parts:
self.params[option] = value
def url(self):
return urlunparse(ParseResult(**self.params))
def scheme(self):
return self.params.get('scheme')
def scheme(self, value):
self.params['scheme'] = value
def host(self):
# Following the syntax specifications in RFC 1808,
# urlparse recognizes a netloc only if it is properly
# introduced by ‘//’. Otherwise the input is presumed
# to be a relative URL and thus to start with a path component.
if self.params.get('path').startswith('www.'):
self.params['netloc'] = '//' + self.params.get('path')
self.params['path'] = ''
return self.params.get('netloc')
def netloc(self):
def host(self, value):
self.params['netloc'] = value
def path(self):
return self.params.get('path')
def path(self, value):
self.params['path'] = value
def querystring(self):
if self.params.get('query'):
return urlencode(dict((k, ''.join(map(str, v)))
for k, v in parse_qs(self.params.get('query')).items()))
return ''
def fragment(self):
return self.params.get('fragment')
def fragment(self, value):
self.params['fragment'] = value
def __str__(self):
return self.url
Jump to Line
Something went wrong with that request. Please try again.