From 62657b203d6093c58997b9b1865b1fb94a154816 Mon Sep 17 00:00:00 2001 From: Flavio Percoco Premoli Date: Sat, 30 Jun 2012 01:52:42 +0200 Subject: [PATCH] Added support for: * Robots.txt requests which is usefule for robots rules tests. * Robots denied page (/deny) * Simple html page for views / calls that expect an html. --- httpbin/core.py | 31 +++++++++++++++++++++++++++++-- httpbin/helpers.py | 15 +++++++++++++++ httpbin/templates/httpbin.1.html | 3 +++ 3 files changed, 47 insertions(+), 2 deletions(-) diff --git a/httpbin/core.py b/httpbin/core.py index 1fad64cf..9bd95214 100644 --- a/httpbin/core.py +++ b/httpbin/core.py @@ -14,12 +14,12 @@ import newrelic.agent -from flask import Flask, Response, request, render_template, redirect, jsonify +from flask import Flask, Response, request, render_template, redirect, jsonify, make_response from raven.contrib.flask import Sentry from werkzeug.datastructures import WWWAuthenticate from . import filters -from .helpers import get_headers, status_code, get_dict, check_basic_auth, check_digest_auth, H +from .helpers import get_headers, status_code, get_dict, check_basic_auth, check_digest_auth, H, ROBOT_TXT, ANGRY_ASCII from .utils import weighted_choice from .structures import CaseInsensitiveDict @@ -51,6 +51,33 @@ def view_landing_page(): return render_template('index.html') +@app.route('/html') +def view_html_page(): + """Simple Html Page""" + + return render_template('moby.html') + + +@app.route('/robots.txt') +def view_robots_page(): + """Simple Html Page""" + + response = make_response() + response.data = ROBOT_TXT + response.content_type = "text/plain" + return response + + +@app.route('/deny') +def view_deny_page(): + """Simple Html Page""" + response = make_response() + response.data = ANGRY_ASCII + response.content_type = "text/plain" + return response + # return "YOU SHOULDN'T BE HERE" + + @app.route('/ip') def view_origin(): """Returns Origin IP.""" diff --git a/httpbin/helpers.py b/httpbin/helpers.py index 83800032..6bb57402 100644 --- a/httpbin/helpers.py +++ b/httpbin/helpers.py @@ -45,7 +45,22 @@ 'X-Forwarded-Port' ) +ROBOT_TXT = """User-agent: * +Disallow: /deny +""" +ANGRY_ASCII =""" + .-''''''-. + .' _ _ '. + / O O \\ + : : + | | + : __ : + \ .-"` `"-. / + '. .' + '-......-' + YOU SHOUDN'T BE HERE +""" def get_files(): """Returns files dict from request context.""" diff --git a/httpbin/templates/httpbin.1.html b/httpbin/templates/httpbin.1.html index 857948f4..ac6e3ce4 100644 --- a/httpbin/templates/httpbin.1.html +++ b/httpbin/templates/httpbin.1.html @@ -26,6 +26,9 @@

ENDPOINTS

  • /digest-auth/:qop/:user/:passwd Challenges HTTP Digest Auth.
  • /stream/:n Streams n–100 lines.
  • /delay/:n Delays responding for n–10 seconds.
  • +
  • /html Renders an HTML Page
  • +
  • /robots.txt Returns some robots.txt rules
  • +
  • /deny Denied by robots.txt file