Skip to content

Commit

Permalink
Initial commit.
Browse files Browse the repository at this point in the history
  • Loading branch information
jkbrzt committed Feb 25, 2012
0 parents commit b966efa
Show file tree
Hide file tree
Showing 7 changed files with 236 additions and 0 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
dist
httpie.egg-info
build
*.pyc
15 changes: 15 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
## HTTPie: cURL for humans

HTTPie is a CLI frontend for [python-requests](python-requests.org).


### Installation

pip install httpie


### Screenshot

![httpie](https://github.com/jkbr/httpie/raw/master/httpie.png)


Binary file added httpie.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file added httpie/__init__.py
Empty file.
157 changes: 157 additions & 0 deletions httpie/httpie.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
#!/usr/bin/env python
import os
import sys
import json
import argparse
from collections import namedtuple
import requests
from requests.structures import CaseInsensitiveDict
from .pretty import prettify


__author__ = 'Jakub Roztocil'
__version__ = '0.1'


DEFAULT_UA = 'HTTPie/%s' % __version__
SEP_COMMON = ':'
SEP_DATA = '='
TYPE_FORM = 'application/x-www-form-urlencoded; charset=utf-8'
TYPE_JSON = 'application/json; charset=utf-8'


KeyValue = namedtuple('KeyValue', ['key', 'value', 'sep'])


class KeyValueType(object):

def __init__(self, separators):
self.separators = separators

def __call__(self, string):

found = {string.find(sep): sep
for sep in self.separators
if string.find(sep) != -1}
if not found:
raise argparse.ArgumentTypeError(
'"%s" is not a valid value' % string)
sep = found[min(found.keys())]
key, value = string.split(sep, 1)
return KeyValue(key=key, value=value, sep=sep)


parser = argparse.ArgumentParser(
description='HTTPie - URL for humans.')


# Content type.
group = parser.add_mutually_exclusive_group(required=False)
group.add_argument('--json', '-j', action='store_true',
help='Serialize data items as a JSON object and set'
' Content-Type to application/json, if not specified.')
group.add_argument('--form', '-f', action='store_true',
help='Serialize data items as form values and set'
' Content-Type to application/x-www-form-urlencoded,'
' if not specified.')

parser.add_argument('--ugly', '-u', help='Do not prettify the response.',
dest='prettify', action='store_false', default=True)

# ``requests.request`` keyword arguments.
parser.add_argument('--auth', help='username:password',
type=KeyValueType(SEP_COMMON))
parser.add_argument('--verify',
help='Set to "yes" to check the host\'s SSL certificate.'
' You can also pass the path to a CA_BUNDLE'
' file for private certs. You can also set '
'the REQUESTS_CA_BUNDLE environment variable.')
parser.add_argument('--proxy', default=[], action='append',
type=KeyValueType(SEP_COMMON),
help='String mapping protocol to the URL of the proxy'
' (e.g. http:foo.bar:3128).')
parser.add_argument('--allow-redirects', default=False, action='store_true',
help='Set this flag if full redirects are allowed'
' (e.g. re-POST-ing of data at new ``Location``)')
parser.add_argument('--file', metavar='PATH', type=argparse.FileType(),
default=[], action='append',
help='File to multipart upload')
parser.add_argument('--timeout', type=float,
help='Float describes the timeout of the request'
' (Use socket.setdefaulttimeout() as fallback).')

# Positional arguments.
parser.add_argument('method',
help='HTTP method to be used for the request'
' (GET, POST, PUT, DELETE, PATCH, ...).')
parser.add_argument('url', metavar='URL',
help='Protocol defaults to http:// if the'
' URL does not include it.')
parser.add_argument('items', metavar='item', nargs='*',
type=KeyValueType([SEP_COMMON, SEP_DATA]),
help='HTTP header (key:value) or data field (key=value)')




def main():
args = parser.parse_args()

# Parse request headers and data from the command line.
headers = CaseInsensitiveDict()
headers['User-Agent'] = DEFAULT_UA
data = {}
for item in args.items:
if item.sep == SEP_COMMON:
target = headers
else:
if not sys.stdin.isatty():
parser.error('Request body (stdin) and request '
'data (key=value) cannot be mixed.')
target = data
target[item.key] = item.value

if not sys.stdin.isatty():
data = sys.stdin.read()

# JSON/Form content type.
if args.json or (not args.form and data):
if sys.stdin.isatty():
data = json.dumps(data)
if 'Content-Type' not in headers and (data or args.json):
headers['Content-Type'] = TYPE_JSON
elif 'Content-Type' not in headers:
headers['Content-Type'] = TYPE_FORM

# Fire the request.
response = requests.request(
method=args.method.lower(),
url=args.url if '://' in args.url else 'http://%s' % args.url,
headers=headers,
data=data,
verify=True if args.verify == 'yes' else args.verify,
timeout=args.timeout,
auth=(args.auth.key, args.auth.value) if args.auth else None,
proxies={proxy.key: proxy.value for proxy in args.proxy},
files={os.path.basename(f.name): f for f in args.file}
)

# Display the response.
original = response.raw._original_response
response_bits = (
u'HTTP/{version} {status} {reason}'.format(
version='.'.join(str(original.version)),
status=original.status, reason=original.reason,
),
str(original.msg).decode('utf-8'),
response.content.decode('utf-8') if response.content else u''
)

if args.prettify and sys.stdout.isatty():
response_bits = prettify(response.headers['content-type'], *response_bits)

print u'\n'.join(response_bits)


if __name__ == '__main__':
main()
49 changes: 49 additions & 0 deletions httpie/pretty.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import json
from functools import partial
import pygments
from pygments.lexers import get_lexer_for_mimetype
from pygments.formatters.terminal256 import Terminal256Formatter
from pygments.lexer import RegexLexer, bygroups
from pygments import token


TYPE_JS = 'application/javascript'


class HTTPLexer(RegexLexer):
name = 'HTTP'
aliases = ['http']
filenames = ['*.http']
tokens = {
'root': [
(r'\s+', token.Text),
(r'(HTTP/[\d.]+\s+)(\d+)(\s+.+)', bygroups(
token.Operator, token.Number, token.String)),
(r'(.*?:)(.+)', bygroups(token.Name, token.String))
]}


highlight = partial(pygments.highlight,
formatter=Terminal256Formatter(style='native'))
highlight_http = partial(highlight, lexer=HTTPLexer())


def prettify(content_type, status_line, headers, body):
content_type = content_type.split(';')[0]

if 'json' in content_type:
content_type = TYPE_JS
try:
# Indent JSON
body = json.dumps(json.loads(body), sort_keys=True, indent=4)
except Exception:
pass

try:
body = highlight(code=body, lexer=get_lexer_for_mimetype(content_type))
except Exception:
pass

return (highlight_http(code=status_line).strip(),
highlight_http(code=headers),
body)
11 changes: 11 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from setuptools import setup


setup(name='httpie',version='0.1',
description='cURL for humans',
url='https://github.com/jkbr/httpie',
author='Jakub Roztocil',
license='BSD',
packages=['httpie'],
entry_points={'console_scripts': ['httpie = httpie.httpie:main']},
install_requires=['requests>=0.10.4', 'Pygments>=1.4'])

0 comments on commit b966efa

Please sign in to comment.