Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP

Comparing changes

Choose two branches to see what's changed or to start a new pull request. If you need to, you can also compare across forks.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also compare across forks.
...
Checking mergeability… Don't worry, you can still create the pull request.
  • 8 commits
  • 320 files changed
  • 0 commit comments
  • 2 contributors
Commits on Jan 12, 2013
@whit537 whit537 Redirect to new location a3ba8f8
@whit537 whit537 Format README as markdown ac63f46
@whit537 whit537 Update README.md 11b59bb
Commits on Mar 12, 2013
@wilkie wilkie Updates README.md to point to new location of repo
Since the repo was moved when the gittip organization was made, this README must be updated.
b9f822d
Commits on Mar 19, 2013
@whit537 whit537 Update README.md 9896e5a
Commits on Jun 18, 2013
@whit537 whit537 Merge pull request #1 from wilkie/patch-1
Updates README.md to point to new location of repo
96e77ca
@whit537 whit537 Update README.md 7429f44
Commits on Mar 16, 2015
@whit537 whit537 gittip -> gratipay b67d920
Showing with 3 additions and 33,723 deletions.
  1. +0 −11 .gitignore
  2. +0 −249 .pylintrc
  3. +0 −22 .travis.yml
  4. +0 −22 COPYRIGHT
  5. +0 −4 MANIFEST.in
  6. +0 −126 Makefile
  7. +0 −1  Procfile
  8. +0 −5 README
  9. +3 −0  README.md
  10. +0 −30 aspen/__init__.py
  11. +0 −24 aspen/auth/__init__.py
  12. +0 −89 aspen/auth/cookie.py
  13. +0 −124 aspen/auth/httpbasic.py
  14. +0 −412 aspen/auth/httpdigest.py
  15. +0 −148 aspen/backcompat.py
  16. +0 −388 aspen/configuration/__init__.py
  17. +0 −14 aspen/configuration/exceptions.py
  18. +0 −11 aspen/configuration/mime.types
  19. +0 −143 aspen/configuration/options.py
  20. +0 −149 aspen/configuration/parse.py
  21. +0 −32 aspen/context.py
  22. +0 −352 aspen/dispatcher.py
  23. +0 −8 aspen/exceptions.py
  24. +0 −157 aspen/execution.py
  25. +0 −65 aspen/hooks/__init__.py
  26. +0 −32 aspen/hooks/filters.py
  27. +0 −42 aspen/http/__init__.py
  28. +0 −43 aspen/http/baseheaders.py
  29. +0 −120 aspen/http/mapping.py
  30. +0 −682 aspen/http/request.py
  31. +0 −130 aspen/http/response.py
  32. +0 −102 aspen/json_.py
  33. +0 −66 aspen/logging.py
  34. +0 −79 aspen/network_engines/__init__.py
  35. +0 −37 aspen/network_engines/cheroot_.py
  36. +0 −48 aspen/network_engines/cherrypy_.py
  37. +0 −31 aspen/network_engines/diesel_.py
  38. +0 −163 aspen/network_engines/eventlet_.py
  39. +0 −154 aspen/network_engines/gevent_.py
  40. +0 −23 aspen/network_engines/pants_.py
  41. +0 −35 aspen/network_engines/rocket_.py
  42. +0 −165 aspen/network_engines/tornado_.py
  43. +0 −76 aspen/network_engines/twisted_.py
  44. +0 −168 aspen/renderers/__init__.py
  45. +0 −51 aspen/renderers/jinja2.py
  46. +0 −13 aspen/renderers/pystache.py
  47. +0 −13 aspen/renderers/stdlib_format.py
  48. +0 −13 aspen/renderers/stdlib_percent.py
  49. +0 −13 aspen/renderers/stdlib_template.py
  50. +0 −26 aspen/renderers/tornado.py
  51. +0 −5 aspen/rendering.py
  52. +0 −211 aspen/resources/__init__.py
  53. +0 −199 aspen/resources/dynamic_resource.py
  54. +0 −33 aspen/resources/json_resource.py
  55. +0 −193 aspen/resources/negotiated_resource.py
  56. +0 −70 aspen/resources/rendered_resource.py
  57. +0 −10 aspen/resources/resource.py
  58. +0 −45 aspen/resources/socket_resource.py
  59. +0 −26 aspen/resources/static_resource.py
  60. +0 −137 aspen/server.py
  61. +0 −141 aspen/sockets/__init__.py
  62. +0 −108 aspen/sockets/buffer.py
  63. +0 −39 aspen/sockets/channel.py
  64. +0 −9 aspen/sockets/event.py
  65. +0 −51 aspen/sockets/loop.py
  66. +0 −92 aspen/sockets/message.py
  67. +0 −43 aspen/sockets/packet.py
  68. +0 −168 aspen/sockets/socket.py
  69. +0 −50 aspen/sockets/transport.py
  70. +0 −119 aspen/testing/__init__.py
  71. +0 −122 aspen/testing/fsfix.py
  72. +0 −30 aspen/testing/sockets.py
  73. +0 −338 aspen/utils.py
  74. +0 −269 aspen/website.py
  75. +0 −11 aspen/wsgi.py
  76. +0 −145 aspen/www/autoindex.html
  77. +0 −31 aspen/www/error.html
  78. BIN  aspen/www/favicon.ico
  79. +0 −9 build.bat
  80. +0 −485 distribute_setup.py
  81. +0 −55 doc/.aspen/aspen_io.py
  82. +0 −35 doc/.aspen/base.html
  83. +0 −14 doc/.aspen/configure-aspen.py
  84. +0 −22 doc/.aspen/doc.html
  85. +0 −17 doc/.aspen/ga.html
  86. BIN  doc/0-page.png
  87. BIN  doc/1-simplate.png
  88. BIN  doc/2-library.png
  89. +0 −34 doc/CONTRIBUTORS
  90. +0 −128 doc/HISTORY
  91. +0 −2  doc/README
  92. +0 −131 doc/_lib/aspen.0.css
  93. +0 −149 doc/_lib/aspen.1.css
  94. +0 −159 doc/_lib/aspen.2.css
  95. +0 −171 doc/_lib/aspen.3.css
  96. +0 −171 doc/_lib/aspen.4.css
  97. +0 −174 doc/_lib/aspen.5.css
  98. +0 −178 doc/_lib/aspen.6.css
  99. +0 −193 doc/_lib/aspen.7.css
  100. +0 −193 doc/_lib/aspen.8.css
  101. +0 −193 doc/_lib/aspen.9.css
  102. BIN  doc/_lib/exclamation.png
  103. BIN  doc/_lib/for-facebook.jpg
  104. BIN  doc/_lib/for-facebook.png
  105. BIN  doc/_lib/for-twitter.acorn
  106. BIN  doc/_lib/for-twitter.jpg
  107. BIN  doc/_lib/for-twitter.png
  108. BIN  doc/_lib/grove.jpg
  109. +0 −16 doc/_lib/jquery-1.5.min.js
  110. +0 −2  doc/_lib/reset.css
  111. +0 −117 doc/_lib/shTheme.css
  112. +0 −165 doc/_lib/syntaxhighlighter_3.0.83/LGPL-LICENSE
  113. +0 −20 doc/_lib/syntaxhighlighter_3.0.83/MIT-LICENSE
  114. +0 −120 doc/_lib/syntaxhighlighter_3.0.83/compass/_theme_template.scss
  115. +0 −14 doc/_lib/syntaxhighlighter_3.0.83/compass/config.rb
  116. +0 −216 doc/_lib/syntaxhighlighter_3.0.83/compass/shCore.scss
  117. +0 −2  doc/_lib/syntaxhighlighter_3.0.83/compass/shCoreDefault.scss
  118. +0 −2  doc/_lib/syntaxhighlighter_3.0.83/compass/shCoreDjango.scss
  119. +0 −2  doc/_lib/syntaxhighlighter_3.0.83/compass/shCoreEclipse.scss
  120. +0 −2  doc/_lib/syntaxhighlighter_3.0.83/compass/shCoreEmacs.scss
  121. +0 −2  doc/_lib/syntaxhighlighter_3.0.83/compass/shCoreFadeToGrey.scss
  122. +0 −2  doc/_lib/syntaxhighlighter_3.0.83/compass/shCoreMDUltra.scss
  123. +0 −2  doc/_lib/syntaxhighlighter_3.0.83/compass/shCoreMidnight.scss
  124. +0 −2  doc/_lib/syntaxhighlighter_3.0.83/compass/shCoreRDark.scss
  125. +0 −7 doc/_lib/syntaxhighlighter_3.0.83/compass/shThemeDefault.scss
  126. +0 −36 doc/_lib/syntaxhighlighter_3.0.83/compass/shThemeDjango.scss
  127. +0 −48 doc/_lib/syntaxhighlighter_3.0.83/compass/shThemeEclipse.scss
  128. +0 −32 doc/_lib/syntaxhighlighter_3.0.83/compass/shThemeEmacs.scss
  129. +0 −36 doc/_lib/syntaxhighlighter_3.0.83/compass/shThemeFadeToGrey.scss
  130. +0 −32 doc/_lib/syntaxhighlighter_3.0.83/compass/shThemeMDUltra.scss
  131. +0 −32 doc/_lib/syntaxhighlighter_3.0.83/compass/shThemeMidnight.scss
  132. +0 −32 doc/_lib/syntaxhighlighter_3.0.83/compass/shThemeRDark.scss
  133. +0 −22 doc/_lib/syntaxhighlighter_3.0.83/index.html
  134. +0 −17 doc/_lib/syntaxhighlighter_3.0.83/scripts/shAutoloader.js
  135. +0 −59 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushAS3.js
  136. +0 −75 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushAppleScript.js
  137. +0 −59 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushBash.js
  138. +0 −65 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushCSharp.js
  139. +0 −100 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushColdFusion.js
  140. +0 −97 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushCpp.js
  141. +0 −91 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushCss.js
  142. +0 −55 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushDelphi.js
  143. +0 −41 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushDiff.js
  144. +0 −52 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushErlang.js
  145. +0 −67 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushGroovy.js
  146. +0 −52 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushJScript.js
  147. +0 −57 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushJava.js
  148. +0 −58 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushJavaFX.js
  149. +0 −72 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushPerl.js
  150. +0 −88 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushPhp.js
  151. +0 −33 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushPlain.js
  152. +0 −74 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushPowerShell.js
  153. +0 −64 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushPython.js
  154. +0 −55 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushRuby.js
  155. +0 −94 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushSass.js
  156. +0 −51 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushScala.js
  157. +0 −66 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushSql.js
  158. +0 −56 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushVb.js
  159. +0 −69 doc/_lib/syntaxhighlighter_3.0.83/scripts/shBrushXml.js
  160. +0 −17 doc/_lib/syntaxhighlighter_3.0.83/scripts/shCore.js
  161. +0 −17 doc/_lib/syntaxhighlighter_3.0.83/scripts/shLegacy.js
  162. +0 −130 doc/_lib/syntaxhighlighter_3.0.83/src/shAutoloader.js
  163. +0 −1,721 doc/_lib/syntaxhighlighter_3.0.83/src/shCore.js
  164. +0 −157 doc/_lib/syntaxhighlighter_3.0.83/src/shLegacy.js
  165. +0 −226 doc/_lib/syntaxhighlighter_3.0.83/styles/shCore.css
  166. +0 −328 doc/_lib/syntaxhighlighter_3.0.83/styles/shCoreDefault.css
  167. +0 −331 doc/_lib/syntaxhighlighter_3.0.83/styles/shCoreDjango.css
  168. +0 −339 doc/_lib/syntaxhighlighter_3.0.83/styles/shCoreEclipse.css
  169. +0 −324 doc/_lib/syntaxhighlighter_3.0.83/styles/shCoreEmacs.css
  170. +0 −328 doc/_lib/syntaxhighlighter_3.0.83/styles/shCoreFadeToGrey.css
  171. +0 −324 doc/_lib/syntaxhighlighter_3.0.83/styles/shCoreMDUltra.css
  172. +0 −324 doc/_lib/syntaxhighlighter_3.0.83/styles/shCoreMidnight.css
  173. +0 −324 doc/_lib/syntaxhighlighter_3.0.83/styles/shCoreRDark.css
  174. +0 −117 doc/_lib/syntaxhighlighter_3.0.83/styles/shThemeDefault.css
  175. +0 −120 doc/_lib/syntaxhighlighter_3.0.83/styles/shThemeDjango.css
  176. +0 −128 doc/_lib/syntaxhighlighter_3.0.83/styles/shThemeEclipse.css
  177. +0 −113 doc/_lib/syntaxhighlighter_3.0.83/styles/shThemeEmacs.css
  178. +0 −117 doc/_lib/syntaxhighlighter_3.0.83/styles/shThemeFadeToGrey.css
  179. +0 −113 doc/_lib/syntaxhighlighter_3.0.83/styles/shThemeMDUltra.css
  180. +0 −113 doc/_lib/syntaxhighlighter_3.0.83/styles/shThemeMidnight.css
  181. +0 −113 doc/_lib/syntaxhighlighter_3.0.83/styles/shThemeRDark.css
  182. +0 −1  doc/_lib/syntaxhighlighter_3.0.83/tests/.rvmrc
  183. +0 −204 doc/_lib/syntaxhighlighter_3.0.83/tests/brushes/sass.html
  184. +0 −136 doc/_lib/syntaxhighlighter_3.0.83/tests/brushes_tests.html
  185. +0 −42 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/001_basic.html
  186. +0 −50 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/002_brushes.html
  187. +0 −42 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/003_script_tag.html
  188. +0 −43 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/004_url_parsing.html
  189. +0 −33 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/005_no_gutter.html
  190. +0 −39 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/006_pad_line_numbers.html
  191. +0 −60 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/007_collapse.html
  192. +0 −44 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/007_collapse_interaction.html
  193. +0 −29 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/008_first_line.html
  194. +0 −32 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/009_class_name.html
  195. +0 −70 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/010_highlight.html
  196. +0 −98 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/011_smart_tabs.html
  197. +0 −35 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/012_server_side.html
  198. +0 −34 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/013_html_script.html
  199. +0 −70 doc/_lib/syntaxhighlighter_3.0.83/tests/cases/014_legacy.html
  200. +0 −52 doc/_lib/syntaxhighlighter_3.0.83/tests/commonjs_tests.js
  201. +0 −6,240 doc/_lib/syntaxhighlighter_3.0.83/tests/js/jquery-1.4.2.js
  202. +0 −135 doc/_lib/syntaxhighlighter_3.0.83/tests/js/qunit.css
  203. +0 −1,069 doc/_lib/syntaxhighlighter_3.0.83/tests/js/qunit.js
  204. +0 −242 doc/_lib/syntaxhighlighter_3.0.83/tests/syntaxhighlighter_tests.html
  205. +0 −134 doc/_lib/syntaxhighlighter_3.0.83/tests/theme_tests.html
  206. +0 −11 doc/_lib/syntaxhighlighter_3.0.83/tests/webrick.rb
  207. +0 −2  doc/_lib/syntaxhighlighter_3.0.83/tests/webrick.sh
  208. +0 −185 doc/api/request/index.html
  209. +0 −168 doc/api/request/index.html.orig
  210. BIN  doc/api/response/fishing.200.png
  211. BIN  doc/api/response/fishing.200.small.png
  212. BIN  doc/api/response/fishing.404.png
  213. BIN  doc/api/response/fishing.404.small.png
  214. +0 −87 doc/api/response/index.html
  215. +0 −120 doc/api/website/index.html
  216. +0 −100 doc/aspen.conf/index.html
  217. +0 −84 doc/aspen/index.html
  218. +0 −37 doc/configuration/index.html
  219. +0 −20 doc/configure-aspen.py/index.html
  220. +0 −58 doc/deployment/index.html
  221. +0 −2  doc/dotaspen/index.html
  222. +0 −4 doc/form-feed/index.html
  223. +0 −47 doc/hooks/index.html
  224. +0 −593 doc/index.html
  225. +0 −77 doc/mime.types/index.html
  226. BIN  doc/mime.types/text.html.png
  227. BIN  doc/mime.types/text.html.small.png
  228. BIN  doc/mime.types/text.plain.png
  229. BIN  doc/mime.types/text.plain.small.png
  230. +0 −23 doc/nice-errors/index.html
  231. +0 −86 doc/page-break/index.html
  232. BIN  doc/power-of-python.png
  233. +0 −62 doc/project_root/index.html
  234. BIN  doc/quick-start/greetings-program.png
  235. BIN  doc/quick-start/greetings-program.small.png
  236. +0 −42 doc/quick-start/index.html
  237. +0 −2  doc/request/index.html
  238. +0 −2  doc/response/index.html
  239. BIN  doc/simplates/extra-excitement.png
  240. BIN  doc/simplates/extra-excitement.small.png
  241. +0 −129 doc/simplates/index.html
  242. +0 −139 doc/simplates/json/index.html
  243. BIN  doc/simplates/json/json.png
  244. BIN  doc/simplates/json/json.small.png
  245. +0 −59 doc/simplates/negotiated/index.html
  246. +0 −221 doc/simplates/rendered/index.html
  247. +0 −3  doc/simplates/rendered/pystache.html
  248. BIN  doc/simplates/sorry-program.png
  249. BIN  doc/simplates/sorry-program.small.png
  250. +0 −20 doc/simplates/static/index.html
  251. +0 −2  doc/simplates/template/index.html
  252. +0 −34 doc/success/approximatrix.html
  253. BIN  doc/success/approximatrix.png
  254. +0 −2  doc/templating/index.html
  255. +0 −191 doc/unicode/index.html
  256. BIN  doc/unicode/linear-b.eot
  257. +0 −401 doc/unicode/linear-b.svg
  258. BIN  doc/unicode/linear-b.ttf
  259. BIN  doc/unicode/linear-b.woff
  260. BIN  doc/virtual-paths/1999.png
  261. BIN  doc/virtual-paths/1999.small.png
  262. BIN  doc/virtual-paths/aspen.png
  263. BIN  doc/virtual-paths/aspen.small.png
  264. BIN  doc/virtual-paths/cheese.png
  265. BIN  doc/virtual-paths/cheese.small.png
  266. +0 −109 doc/virtual-paths/index.html
  267. BIN  doc/virtual-paths/no-cheese.png
  268. BIN  doc/virtual-paths/no-cheese.small.png
  269. BIN  doc/virtual-paths/oops.png
  270. BIN  doc/virtual-paths/oops.small.png
  271. BIN  doc/virtual-paths/python.png
  272. BIN  doc/virtual-paths/python.small.png
  273. +0 −9 fcgi_aspen.py
  274. +0 −44 release.sh
  275. +0 −49 setup.py
  276. +0 −55 swaddle.py
  277. +0 −9 tests/all-utf8.py
  278. +0 −17 tests/assert_test.py
  279. +0 −20 tests/except_test.py
  280. +0 −4 tests/go-tdd
  281. +0 −48 tests/test_charset_re.py
  282. +0 −235 tests/test_configuration.py
  283. +0 −513 tests/test_dispatcher.py
  284. +0 −20 tests/test_execution.py
  285. +0 −58 tests/test_hooks.py
  286. +0 −59 tests/test_httpbasic.py
  287. +0 −96 tests/test_httpdigest.py
  288. +0 −145 tests/test_json_resource.py
  289. +0 −51 tests/test_logging.py
  290. +0 −227 tests/test_mappings.py
  291. +0 −300 tests/test_negotiated_resource.py
  292. +0 −113 tests/test_rendering.py
  293. +0 −180 tests/test_request.py
  294. +0 −65 tests/test_request_body.py
  295. +0 −410 tests/test_request_line.py
  296. +0 −183 tests/test_resources.py
  297. +0 −46 tests/test_response.py
  298. +0 −47 tests/test_sockets_.py
  299. +0 −36 tests/test_sockets_buffer.py
  300. +0 −55 tests/test_sockets_channel.py
Sorry, we could not display the entire diff because too many files (320) changed.
View
11 .gitignore
@@ -1,11 +0,0 @@
-*.egg-info/
-aspen/tests/log
-env
-*.pyc
-distribute-*
-__pycache__
-*\$py.class
-.tox
-/build/
-/downloads/
-.coverage
View
249 .pylintrc
@@ -1,249 +0,0 @@
-[MASTER]
-
-# Specify a configuration file.
-#rcfile=
-
-# Python code to execute, usually for sys.path manipulation such as
-# pygtk.require().
-#init-hook=
-
-# Profiled execution.
-profile=no
-
-# Add files or directories to the blacklist. They should be base names, not
-# paths.
-ignore=CVS
-
-# Pickle collected data for later comparisons.
-persistent=no
-
-# List of plugins (as comma separated values of python modules names) to load,
-# usually to register additional checkers.
-load-plugins=
-
-
-[MESSAGES CONTROL]
-
-# Enable the message, report, category or checker with the given id(s). You can
-# either give multiple identifier separated by comma (,) or put this option
-# multiple time.
-#enable=
-
-# Disable the message, report, category or checker with the given id(s). You
-# can either give multiple identifier separated by comma (,) or put this option
-# multiple time (only on the command line, not in the configuration file where
-# it should appear only once).
-#disable=
-
-
-[REPORTS]
-
-# Set the output format. Available formats are text, parseable, colorized, msvs
-# (visual studio) and html
-output-format=parseable
-
-# Include message's id in output
-include-ids=no
-
-# Put messages in a separate file for each module / package specified on the
-# command line instead of printing them on stdout. Reports (if any) will be
-# written in a file name "pylint_global.[txt|html]".
-files-output=no
-
-# Tells whether to display a full report or only the messages
-reports=yes
-
-# Python expression which should return a note less than 10 (10 is the highest
-# note). You have access to the variables errors warning, statement which
-# respectively contain the number of errors / warnings messages and the total
-# number of statements analyzed. This is used by the global evaluation report
-# (RP0004).
-evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
-
-# Add a comment according to your evaluation note. This is used by the global
-# evaluation report (RP0004).
-comment=no
-
-
-[FORMAT]
-
-# Maximum number of characters on a single line.
-max-line-length=80
-
-# Maximum number of lines in a module
-max-module-lines=1000
-
-# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
-# tab).
-indent-string=' '
-
-
-[MISCELLANEOUS]
-
-# List of note tags to take in consideration, separated by a comma.
-notes=FIXME,XXX,TODO
-
-
-[SIMILARITIES]
-
-# Minimum lines number of a similarity.
-min-similarity-lines=4
-
-# Ignore comments when computing similarities.
-ignore-comments=yes
-
-# Ignore docstrings when computing similarities.
-ignore-docstrings=yes
-
-
-[BASIC]
-
-# Required attributes for module, separated by a comma
-required-attributes=
-
-# List of builtins function names that should not be used, separated by a comma
-bad-functions=map,filter,apply,input
-
-# Regular expression which should only match correct module names
-module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
-
-# Regular expression which should only match correct module level names
-const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
-
-# Regular expression which should only match correct class names
-class-rgx=[A-Z_][a-zA-Z0-9]+$
-
-# Regular expression which should only match correct function names
-function-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct method names
-method-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct instance attribute names
-attr-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct argument names
-argument-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct variable names
-variable-rgx=[a-z_][a-z0-9_]{1,30}$
-
-# Regular expression which should only match correct list comprehension /
-# generator expression variable names
-inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
-
-# Good variable names which should always be accepted, separated by a comma
-good-names=i,j,k,v,s,ex,Run,_,HA1,HA2,A2
-
-# Bad variable names which should always be refused, separated by a comma
-bad-names=foo,bar,baz,toto,tutu,tata
-
-# Regular expression which should only match functions or classes name which do
-# not require a docstring
-no-docstring-rgx=__.*__
-
-
-[TYPECHECK]
-
-# Tells whether missing members accessed in mixin class should be ignored. A
-# mixin class is detected if its name ends with "mixin" (case insensitive).
-ignore-mixin-members=yes
-
-# List of classes names for which member attributes should not be checked
-# (useful for classes with attributes dynamically set).
-ignored-classes=SQLObject
-
-# When zope mode is activated, add a predefined set of Zope acquired attributes
-# to generated-members.
-zope=no
-
-# List of members which are set dynamically and missed by pylint inference
-# system, and so shouldn't trigger E0201 when accessed. Python regular
-# expressions are accepted.
-generated-members=REQUEST,acl_users,aq_parent
-
-
-[VARIABLES]
-
-# Tells whether we should check for unused import in __init__ files.
-init-import=no
-
-# A regular expression matching the beginning of the name of dummy variables
-# (i.e. not used).
-dummy-variables-rgx=_|dummy
-
-# List of additional names supposed to be defined in builtins. Remember that
-# you should avoid to define new builtins when possible.
-additional-builtins=
-
-
-[CLASSES]
-
-# List of interface methods to ignore, separated by a comma. This is used for
-# instance to not check methods defines in Zope's Interface base class.
-ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
-
-# List of method names used to declare (i.e. assign) instance attributes.
-defining-attr-methods=__init__,__new__,setUp
-
-# List of valid names for the first argument in a class method.
-valid-classmethod-first-arg=cls
-
-
-[DESIGN]
-
-# Maximum number of arguments for function / method
-max-args=8
-
-# Argument names that match this expression will be ignored. Default to name
-# with leading underscore
-ignored-argument-names=_.*
-
-# Maximum number of locals for function / method body
-max-locals=15
-
-# Maximum number of return / yield for function / method body
-max-returns=14
-
-# Maximum number of branch for function / method body
-max-branchs=12
-
-# Maximum number of statements in function / method body
-max-statements=50
-
-# Maximum number of parents for a class (see R0901).
-max-parents=7
-
-# Maximum number of attributes for a class (see R0902).
-max-attributes=14
-
-# Minimum number of public methods for a class (see R0903).
-min-public-methods=1
-
-# Maximum number of public methods for a class (see R0904).
-max-public-methods=20
-
-
-[IMPORTS]
-
-# Deprecated modules which should not be used, separated by a comma
-deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
-
-# Create a graph of every (i.e. internal and external) dependencies in the
-# given file (report RP0402 must not be disabled)
-import-graph=
-
-# Create a graph of external dependencies in the given file (report RP0402 must
-# not be disabled)
-ext-import-graph=
-
-# Create a graph of internal dependencies in the given file (report RP0402 must
-# not be disabled)
-int-import-graph=
-
-
-[EXCEPTIONS]
-
-# Exceptions that will emit a warning when being caught. Defaults to
-# "Exception"
-overgeneral-exceptions=Exception
View
22 .travis.yml
@@ -1,22 +0,0 @@
-language: python
-python:
- - 2.5
- - 2.6
- - 2.7
-install:
- - python setup.py -q install
- - pip install --use-mirrors simplejson coverage
-script: nosetests --with-coverage tests
-notifications:
- email: false
- irc:
- channels:
- - "irc.freenode.org#aspen"
- on_success: never
- on_failure: always
- template:
- - "Failed build: %{build_url}"
- skip_join: true
-branches:
- only:
- - master
View
22 COPYRIGHT
@@ -1,22 +0,0 @@
-All original work is copyright (c) 2006-2013 Chad Whitacre and contributors,
-all rights reserved, and is released under the MIT license:
-
- Permission is hereby granted, free of charge, to any person obtaining a
- copy of this software and associated documentation files (the "Software"),
- to deal in the Software without restriction, including without limitation
- the rights to use, copy, modify, merge, publish, distribute, sublicense,
- and/or sell copies of the Software, and to permit persons to whom the
- Software is furnished to do so, subject to the following conditions:
-
- The above copyright notice and this permission notice shall be included in
- all copies or substantial portions of the Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
- IN THE SOFTWARE.
-
- http://opensource.org/licenses/mit-license.php
View
4 MANIFEST.in
@@ -1,4 +0,0 @@
-include distribute_setup.py
-include version.txt
-include aspen/configuration/mime.types
-graft aspen/www
View
126 Makefile
@@ -1,126 +0,0 @@
-PYTHON=python
-
-
-# Core Executables
-# ================
-# We satisfy dependencies using local tarballs, to ensure that we can build
-# without a network connection. They're kept in our repo in ./vendor.
-
-env/bin/aspen: env/bin/pip
- ./env/bin/pip install ./vendor/Cheroot-4.0.0beta.tar.gz
- ./env/bin/pip install ./vendor/mimeparse-0.1.3.tar.gz
- ./env/bin/pip install ./vendor/tornado-1.2.1.tar.gz
- ./env/bin/python setup.py develop
-
-env/bin/nosetests: env/bin/pip
- ./env/bin/pip install ./vendor/coverage-3.5.3.tar.gz
- ./env/bin/pip install ./vendor/nose-1.1.2.tar.gz
- ./env/bin/pip install ./vendor/nosexcover-1.0.7.tar.gz
- ./env/bin/pip install ./vendor/snot-0.6.tar.gz
-
-env/bin/pip:
- $(PYTHON) ./vendor/virtualenv-1.7.1.2.py \
- --distribute \
- --unzip-setuptools \
- --prompt="[aspen] " \
- --never-download \
- --extra-search-dir=./vendor/ \
- env/
-
-env: env/bin/pip
-
-env-clean:
- find . -name \*.pyc -delete
- rm -rf env smoke-test
-
-
-# Doc / Smoke
-# ===========
-
-docs: env/bin/aspen
- ./env/bin/aspen -a:5370 -wdoc/ -pdoc/.aspen --changes_reload=1
-
-doc: docs
-
-smoke: env/bin/aspen
- @mkdir smoke-test && echo "Greetings, program!" > smoke-test/index.html
- ./env/bin/aspen -w smoke-test
-
-
-# Testing
-# =======
-
-test: env/bin/aspen env/bin/nosetests
- ./env/bin/nosetests -sx tests/
-
--coverage-env: env/bin/pip
- ./env/bin/pip install coverage nosexcover
-
--pylint-env: env/bin/pip
- ./env/bin/pip install pylint
-
-nosetests.xml coverage.xml: env/bin/aspen env/bin/nosetests -coverage-env
- ./env/bin/nosetests \
- --with-xcoverage \
- --with-xunit tests \
- --cover-package aspen
-
-pylint.out: -pylint-env
- ./env/bin/pylint --rcfile=.pylintrc aspen | tee pylint.out
-
-analyse: pylint.out coverage.xml nosetests.xml
- @echo done!
-
-testing-clean:
- rm -rf .coverage coverage.xml nosetests.xml pylint.out
-
-
-# Build
-# =====
-
-build:
- python setup.py bdist_egg
-
-build-clean:
- python setup.py clean -a
- rm -rf dist
-
-
-# Jython
-# ======
-
-vendor/jython-installer.jar:
- #@wget "http://downloads.sourceforge.net/project/jython/jython/2.5.2/jython_installer-2.5.2.jar?r=http%3A%2F%2Fwiki.python.org%2Fjython%2FDownloadInstructions&ts=1336182239&use_mirror=superb-dca2" -O ./vendor/jython-installer.jar
- @wget "http://search.maven.org/remotecontent?filepath=org/python/jython-installer/2.5.3/jython-installer-2.5.3.jar" -O ./vendor/jython-installer.jar
-
-jython_home: vendor/jython-installer.jar
- @java -jar ./vendor/jython-installer.jar -s -d jython_home
-
-jenv: jython_home
- PATH=`pwd`/jython_home/bin:$$PATH jython ./vendor/virtualenv-1.7.1.2.py \
- --python=jython \
- --distribute \
- --unzip-setuptools \
- --prompt="[aspen] " \
- --never-download \
- --extra-search-dir=./vendor/ \
- jenv/
- ./jenv/bin/pip install -r requirements.txt
- ./jenv/bin/pip install -e ./
- # always required for jython since it's ~= python 2.5
- ./jenv/bin/pip install simplejson
-
-jython-nosetests.xml: jenv
- ./jenv/bin/jython ./jenv/bin/nosetests --with-xunit tests --xunit-file=jython-nosetests.xml --cover-package aspen
-
-jython-test: jython-nosetests.xml
-
-jython-clean:
- rm -rf jenv vendor/jython-installer.jar jython_home jython-nosetests.xml
- find . -name \*.class -delete
-
-
-# Clean
-# =====
-
-clean: env-clean testing-clean jython-clean build-clean
View
1  Procfile
@@ -1 +0,0 @@
-web: aspen --network_address=:$PORT --www_root=doc/ --project_root=doc/.aspen
View
5 README
@@ -1,5 +0,0 @@
-Aspen is a Python web framework. Simplates are the main attraction.
-
-See the website for more information:
-
- http://aspen.io/
View
3  README.md
@@ -0,0 +1,3 @@
+This is the old location of the repo for the Aspen web framework.
+
+### New location: https://github.com/gratipay/aspen-python
View
30 aspen/__init__.py
@@ -1,30 +0,0 @@
-import sys
-import pkg_resources
-
-try: # Python >= 2.6
- from collections import Callable
- def is_callable(obj):
- return isinstance(obj, Callable)
-except ImportError: # Python < 2.6
- from operator import isCallable as is_callable
-
-# imports of convenience
-from aspen.http.response import Response
-from aspen import json_ as json
-from aspen.logging import log, log_dammit
-
-# Shut up, PyFlakes. I know I'm addicted to you.
-Response, json, is_callable, log, log_dammit
-
-dist = pkg_resources.get_distribution('aspen')
-__version__ = dist.version
-WINDOWS = sys.platform[:3] == 'win'
-NETWORK_ENGINES = ['cheroot', 'cherrypy', 'diesel', 'eventlet', 'gevent',
- 'pants', 'rocket', 'tornado', 'twisted']
-RENDERERS = ['jinja2',
- 'pystache',
- 'tornado',
- 'stdlib_format',
- 'stdlib_percent',
- 'stdlib_template'
- ]
View
24 aspen/auth/__init__.py
@@ -1,24 +0,0 @@
-"""
-Aspen's Auth modules.
-
-Currently:
-
- * cookie - Cookie Auth
- * httpbasic - HTTP BASIC Auth
- * httpdigest - HTTP DIGEST Auth
-
-"""
-from aspen.utils import typecheck
-
-
-class BaseUser(object):
-
- def __init__(self, token):
- typecheck(token, (unicode, None))
- self.token = token
-
- @property
- def ANON(self):
- return self.token is None
-
-User = BaseUser
View
89 aspen/auth/cookie.py
@@ -1,89 +0,0 @@
-"""This is a cookie authentication implementation for Aspen.
-"""
-import datetime
-
-from aspen import auth
-from aspen.utils import to_rfc822, utcnow
-from aspen.website import THE_PAST
-
-
-MINUTE = datetime.timedelta(seconds=60)
-HOUR = 60 * MINUTE
-DAY = 24 * HOUR
-WEEK = 7 * DAY
-
-
-TIMEOUT = 2 * HOUR
-
-
-# Public config knobs
-# ===================
-# Feel free to set these in, e.g., configure-aspen.py
-
-NAME = "auth"
-DOMAIN = None
-PATH = "/"
-HTTPONLY = "Yes, please."
-
-
-# Hooks
-# =====
-
-def inbound_early(request):
- """Authenticate from a cookie.
- """
- if 'user' not in request.context:
- token = None
- if NAME in request.headers.cookie:
- token = request.headers.cookie[NAME].value
- token = token.decode('US-ASCII')
- request.context['user'] = auth.User(token)
-
-
-def outbound_late(response):
- """Set outbound auth cookie.
- """
- if 'user' not in response.request.context:
- # XXX When does this happen? When auth.inbound_early hasn't run, eh?
- raise
-
- user = response.request.context['user']
- if not isinstance(user, auth.User):
- raise Exception("If you define 'user' in a simplate it has to be an "
- "instance of an aspen.auth.User.")
-
- if NAME not in response.request.headers.cookie:
- # no cookie in the request, don't set one on response
- return
- elif user.ANON:
- # user is anonymous, instruct browser to delete any auth cookie
- cookie_value = ''
- cookie_expires = THE_PAST
- else:
- # user is authenticated, keep it rolling for them
- cookie_value = user.token
- cookie_expires = to_rfc822(utcnow() + TIMEOUT)
-
-
- # Configure outgoing cookie.
- # ==========================
-
- response.headers.cookie[NAME] = cookie_value # creates a cookie object?
- cookie = response.headers.cookie[NAME] # loads a cookie object?
-
- cookie['expires'] = cookie_expires
-
- if DOMAIN is not None:
- # Browser default is the domain of the resource requested.
- # Aspen default is the browser default.
- cookie['domain'] = DOMAIN
-
- if PATH is not None:
- # XXX What's the browser default? Probably /? Or current dir?
- # Aspen default is "/".
- cookie['path'] = PATH
-
- if HTTPONLY is not None:
- # Browser default is to allow access from JavaScript.
- # Aspen default is to prevent access from JavaScript.
- cookie['httponly'] = HTTPONLY
View
124 aspen/auth/httpbasic.py
@@ -1,124 +0,0 @@
-"""
-HTTP BASIC Auth module for Aspen.
-
-To use:
-
- # import it
- from aspen.auth import httpbasic
-
- # configure it - see the docs on the BasicAuth object for args to inbound_responder()
- auth = httpbasic.inbound_responder(my_password_verifier)
-
- # install it
- website.hooks.inbound_early.register(auth)
-"""
-
-import base64
-
-from aspen import Response
-
-
-def inbound_responder(*args, **kwargs):
- """ see BasicAuth object for args; they're passed through """
- auth = BasicAuth(*args, **kwargs)
- def _(request):
- """generated request-handling method"""
- request.auth = BAWrapper(auth, request)
- authed, response = auth.authorized(request)
- if not authed:
- raise response
- return request
- return _
-
-
-class BAWrapper(object):
- """A convenience wrapper for BasicAuth handler to put on the request
- object so the user can do 'request.auth.username()'
- instead of 'request.auth.username(request)'
- """
-
- def __init__(self, basicauth, request):
- self.auth = basicauth
- self.request = request
-
- def authorized(self):
- return self.auth.authorized(self.request)
-
- def username(self):
- return self.auth.username(self.request)
-
- def logout(self):
- return self.auth.logout(self.request)
-
-
-class BasicAuth(object):
- """An HTTP BASIC AUTH handler for Aspen."""
-
- def __init__(self, verify_password, html=None, realm='protected'):
- """Constructor for an HTTP BASIC AUTH handler.
-
- :verify_password - a function that, when passed the args
- (user, password), will return True iff the password is
- correct for the specified user
- :html - The HTML page to return along with a 401 'Not
- Authorized' response. Has a reasonable default
- :realm - the name of the auth realm
- """
- failhtml = html or '''Not Authorized. <a href="#">Try again.</a>'''
- self.verify_password = verify_password
- fail_header = { 'WWW-Authenticate': 'Basic realm="%s"' % realm }
- self.fail_401 = Response(401, failhtml, fail_header)
- self.fail_400 = Response(400, failhtml, fail_header)
- self.logging_out = set([])
-
- def authorized(self, request):
- """Returns whether this request passes BASIC auth or not, and
- the Response to raise if not
- """
- header = request.headers.get('Authorization', '')
- if not header:
- #print("no auth header.")
- # no auth header at all
- return False, self.fail_401
- if not header.startswith('Basic'):
- #print("not a Basic auth header.")
- # not a basic auth header at all
- return False, self.fail_400
- try:
- userpass = base64.b64decode(header[len('Basic '):])
- except TypeError:
- # malformed user:pass
- return False, self.fail_400
- if not ':' in userpass:
- # malformed user:pass
- return False, self.fail_400
- user, passwd = userpass.split(':', 1)
- if user in self.logging_out:
- #print("logging out, so failing once.")
- self.logging_out.discard(user)
- return False, self.fail_401
- if not self.verify_password(user, passwd):
- #print("wrong password.")
- # wrong password
- # TODO: add a max attempts per timespan to slow down bot attacks
- return False, self.fail_401
- return True, None
-
- def username(self, request):
- """Returns the username in the current Auth header"""
- header = request.headers.get('Authorization', '')
- if not header.startswith('Basic'):
- return None
- userpass = base64.b64decode(header[len('Basic '):])
- if not ':' in userpass:
- return None
- user, _ = userpass.split(':', 1)
- return user
-
- def logout(self, request):
- """Will force the next auth request (ie. HTTP request) to fail,
- thereby prompting the user for their username/password again
- """
- self.logging_out.add(self.username(request))
- return request
-
View
412 aspen/auth/httpdigest.py
@@ -1,412 +0,0 @@
-# Originally by Josh Goldoot
-# version 0.01
-# Public domain.
-# from http://www.autopond.com/digestauth.py
-# modified by Paul Jimenez
-
-import random, time, re
-
-try:
- from hashlib import md5
-except ImportError:
- from md5 import new as md5
-
-
-class MalformedAuthenticationHeader(Exception): pass
-
-## wrapper bits
-
-class AspenHTTPProvider:
- """An abstraction layer between the Auth object and
- http-framework specific code."""
-
- def __init__(self, request):
- self.request = request
-
- def set_request(self, request):
- self.request = request
-
- def auth_header(self, default):
- return self.request.headers.get('Authorization', default)
-
- def user_agent(self):
- return self.request.headers.get('User-Agent') or ''
-
- def request_method(self):
- return self.request.line.method
-
- def path_and_query(self):
- return self.request.line.uri.raw
-
- def send_400(self, html, extraheaders):
- from aspen import Response
- return Response(400, html, extraheaders)
-
- def send_401(self, html, extraheaders):
- from aspen import Response
- return Response(401, html, extraheaders)
-
- def send_403(self, html, extraheaders):
- from aspen import Response
- return Response(403, html, extraheaders)
-
-
-## make a generator of containers that aspen will like
-
-def inbound_responder(*args, **kw):
- """ This should be used in your configure-aspen.py like so:
-
- import aspen.auth.httpdigest as digestauth
-
- def get_digest(username, realm):
- users = { 'guest':'guest',
- }
- password = users[username]
- return digestauth.digest(':'.join([username, realm, password]))
-
- website.hooks.inbound_early.register(digestauth.inbound_responder(get_digest))
- """
- kwargs = kw.copy()
- kwargs['http_provider'] = AspenHTTPProvider
- auth = Auth(*args, **kwargs)
- def _(request):
- """generated hook function"""
- request.auth = AspenAuthWrapper(auth, request)
- authed, response = auth.authorized(request)
- if not authed:
- #print "Response: %s" % repr(response.headers)
- raise response
- return request
- return _
-
-
-class AspenAuthWrapper(object):
- """Convenience class to put on a request that
- has a reference to the request its on so accessing
- auth methods doesn't require repeating the request arg.
- """
-
- def __init__(self, auth, request):
- self.auth = auth
- self.request = request
-
- def authorized(self):
- """delegates to self.auth object"""
- return self.auth.authorized(self.request)[0]
-
- def username(self):
- """delegates to self.auth object"""
- return self.auth.username(self.request)
-
- def logout(self):
- """delegates to self.auth object"""
- return self.auth.logout(self.request)
-
-
-## Fundamental utilities
-
-class Storage(dict):
- """
- (from web.py)
- A Storage object is like a dictionary except `obj.foo` can be used
- in addition to `obj['foo']`.
-
- >>> o = storage(a=1)
- >>> o.a
- 1
- >>> o['a']
- 1
- >>> o.a = 2
- >>> o['a']
- 2
- >>> del o.a
- >>> o.a
- Traceback (most recent call last):
- ...
- AttributeError: 'a'
-
- """
- def __getattr__(self, key):
- try:
- return self[key]
- except KeyError, k:
- raise AttributeError, k
-
- def __setattr__(self, key, value):
- self[key] = value
-
- def __delattr__(self, key):
- try:
- del self[key]
- except KeyError, k:
- raise AttributeError, k
-
- def __repr__(self):
- return '<Storage ' + dict.__repr__(self) + '>'
-
-
-## Actual authentication obj
-
-class Auth(object):
- """A decorator class implementing digest authentication (RFC 2617)"""
- def __init__(self, get_digest, realm="Protected", tolerate_ie = True, redirect_url = '/newuser', unauth_html = None, nonce_skip = 0, lockout_time = 20, nonce_life = 180, tries=3, domain=[], http_provider=None):
- """Creates a decorator specific to a particular web application.
- get_digest: a function taking the arguments (username, realm), and returning digestauth.digest(username:realm:password), or
- throwing KeyError if no such user
- realm: the authentication "realm"
- tolerate_ie: don't deny requests from Internet Explorer, even though it is standards uncompliant and kind of insecure
- redirect_url: when user hits "cancel," they are redirected here
- unauth_html: the HTML that is sent to the user and displayed if they hit cancel (default is a redirect page to redirect_url)
- nonce_skip: tolerate skips in the nonce count, only up to this amount (useful if CSS or JavaScript is being loaded unbeknownst to your code)
- lockout_time: number of seconds a user is locked out if they send a wrong password (tries) times
- nonce_life: number of seconds a nonce remains valid
- tries: number of tries a user gets to enter a correct password before the account is locked for lockout_time seconds
- http_provider: interface to HTTP protocol workings (see above code)
- """
- self.http_provider = http_provider
- if self.http_provider is None:
- raise Exception("no http_provider provided")
- self.get_digest, self.realm, self.tolerate_ie = (get_digest, realm, tolerate_ie)
- self.lockout_time, self.tries, self.nonce_life, self.domain = (lockout_time, tries - 1, nonce_life, domain)
- self.unauth_html = unauth_html or self._default_401_html.replace("$redirecturl", redirect_url)
- self.outstanding_nonces = NonceMemory()
- self.outstanding_nonces.set_nonce_skip(nonce_skip)
- self.user_status = {}
- self.opaque = "%032x" % random.getrandbits(128)
-
- _default_401_html = """
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html xmlns="http://www.w3.org/1999/xhtml">
-<head>
- <meta http-equiv="REFRESH" content="1; URL=$redirecturl" />
- <title></title>
-</head>
-<body>
-</body>
-</html>
-"""
-
- def authorized(self, request):
- """ is this request authorized?
- returns a tuple where the first value is true if so and false if not, and the second value is the response to return
- """
- http = self.http_provider(request)
- request_header = http.auth_header(None)
- if not request_header:
- # client has failed to include an authentication header; send a 401 response
- return False, self._send_401_unauth_response(http, "No auth header")
- if request_header[0:7] != "Digest ":
- # client has attempted to use something other than Digest authenication; deny
- return False, self._deny_bad_request(http)
- req_header_dict = parse_auth_header(request_header)
- if not self._directive_proper(http.user_agent(), req_header_dict, http.path_and_query()):
- # Something is wrong with the authentication header
- if req_header_dict.get('opaque', self.opaque) != self.opaque:
- # Didn't send back the correct "opaque;" probably, our server restarted. Just send
- # them another authentication header with the correct opaque field.
- return False, self._send_401_unauth_response(http, "Incorrect opaque field.")
- else:
- # Their header had a more fundamental problem. Something is fishy. Deny access.
- return False, self._deny_bad_request(http, "Authorization Request Header does not conform to RFC 2617 section 3.2.2")
- # if user sent a "logout" nonce, make them type in the password again
- if len(req_header_dict['nonce']) != 34:
- return False, self._send_401_unauth_response(http, "Logged out.")
- nonce_response = self.outstanding_nonces.nonce_state(req_header_dict)
- if nonce_response == NonceMemory.NONCE_INVALID:
- # Client sent a nonce we've never heard of before
- return False, self._deny_bad_request(http)
- if nonce_response == NonceMemory.NONCE_OLD:
- # Client sent an old nonce. Give the client a new one, and ask to authenticate again before continuing.
- return False, self._send_401_unauth_response(http, "Stale nonce. Try again.", stale=True)
- username = req_header_dict['username']
- status = self.user_status.get(username, (self.tries, 0))
- if status[0] < 1 and time.time() < status[1]:
- # User got the password wrong within the last (self.lockout_time) seconds
- return False, self._deny_forbidden(http)
- if status[0] < 1:
- # User sent the wrong password, but more than (self.lockout_time) seconds have passed, so give
- # them another try. However, send a 401 header so user's browser prompts for a password
- # again.
- self.user_status[username] = (1, 0)
- return False, self._send_401_unauth_response(http, "Wrong password, try again.")
- if self._request_digest_valid(req_header_dict, http.request_method()):
- # User authenticated; forgive any past incorrect passwords and run the function we're decorating
- self.user_status[username] = (self.tries, 0)
- return True, None
- else:
- # User entered the wrong password. Deduct one try, and lock account if necessary
- self.user_status[username] = (status[0] - 1, time.time() + self.lockout_time)
- self._log_incorrect_password(username, req_header_dict)
- return False, self._send_401_unauth_response(http, "Wrong password. One try burned.")
-
- def _log_incorrect_password(self, username, req_header_dict):
- """Hook to log incorrrect password attempts"""
- pass # Do your own logging here
-
- def _directive_proper(self, user_agent, req_header_dict, req_path):
- """Verifies that the client's authentication header contained the required fields"""
- for variable in ['username', 'realm', 'nonce', 'uri', 'response', 'cnonce', 'nc']:
- if variable not in req_header_dict:
- return False
- # IE doesn't send "opaque" and does not include GET parameters in the Digest field
- standards_uncompliant = self.tolerate_ie and ("MSIE" in user_agent)
- return req_header_dict['realm'] == self.realm \
- and (standards_uncompliant or req_header_dict.get('opaque','') == self.opaque) \
- and len(req_header_dict['nc']) == 8 \
- and (req_header_dict['uri'] == req_path or (standards_uncompliant and "?" in req_path and req_path.startswith(req_header_dict['uri'])))
-
- def _request_digest_valid(self, req_header_dict, req_method):
- """Checks to see if the client's request properly authenticates"""
- # Ask the application for the hash of A1 corresponding to this username and realm
- try:
- HA1 = self.get_digest(req_header_dict['username'], req_header_dict['realm'])
- except KeyError:
- # No such user
- return False
- qop = req_header_dict.get('qop','auth')
- A2 = req_method + ':' + req_header_dict['uri']
- # auth-int stuff would go here, but few browsers support it
- nonce = req_header_dict['nonce']
- # Calculate the response we should have received from the client
- correct_answer = digest(":".join([HA1, nonce, req_header_dict['nc'], req_header_dict['cnonce'], qop, digest(A2) ]))
- # Compare the correct response to what the client sent
- return req_header_dict['response'] == correct_answer
-
- def _send_401_unauth_response(self, http, why_msg, stale=False):
- """send a 401, optionally with a stale flag"""
- nonce = self.outstanding_nonces.get_new_nonce(self.nonce_life)
- challenge_list = [ "realm=" + quote_it(self.realm),
- 'qop="auth"',
- 'nonce=' + quote_it(nonce),
- 'opaque=' + quote_it(self.opaque)
- ]
- if self.domain: challenge_list.append( 'domain=' + quote_it(" ".join(self.domain)) )
- if stale: challenge_list.append( 'stale="true"')
- extraheaders = [("WWW-Authenticate", "Digest " + ",".join(challenge_list)),
- ("Content-Type","text/html"),
- ("X-Why-Auth-Failed", why_msg)]
- return http.send_401(self.unauth_html, extraheaders)
-
- def _deny_bad_request(self, http, info=""):
- return http.send_400(info, [('Content-Type', 'text/html')])
-
- def _deny_forbidden(self, http):
- """Sent when user has entered an incorrect password too many times"""
- return http.send_403(self.unauth_html, [('Content-Type', 'text/html')])
-
- def _get_valid_auth_header(self, http):
- """returns valid dictionary of authorization header, or None"""
- request_header = http.auth_header(None)
- if not request_header:
- raise MalformedAuthenticationHeader()
- if request_header[0:7] != "Digest ":
- raise MalformedAuthenticationHeader()
- req_header_dict = parse_auth_header(request_header)
- if not self._directive_proper(http.user_agent(), req_header_dict, http.path_and_query()):
- raise MalformedAuthenticationHeader()
- return req_header_dict
-
- def logout(self, request):
- """Cause user's browser to stop sending correct authentication requests until user re-enters password"""
- http = self.http_provider(request)
- try:
- req_header_dict = self._get_valid_auth_header(http)
- except MalformedAuthenticationHeader:
- return
- if len(req_header_dict['nonce']) == 34:
- # First time: send a 401 giving the user the fake "logout" nonce
- nonce = "%032x" % random.getrandbits(136)
- challenge_list = [ "realm=" + quote_it(self.realm),
- 'qop="auth"',
- 'nonce=' + quote_it(nonce),
- 'opaque=' + quote_it(self.opaque),
- 'stale="true"']
- extraheaders = [("WWW-Authenticate", "Digest " + ",".join(challenge_list))]
- return http.send_401(None, extraheaders)
-
- def username(self, request):
- """Returns the HTTP username, or None if not logged in."""
- http = self.http_provider(request)
- try:
- req_header_dict = self._get_valid_auth_header(http)
- except MalformedAuthenticationHeader:
- return None
- if len(req_header_dict['nonce']) != 34:
- return None
- nonce_response = self.outstanding_nonces.nonce_state(req_header_dict)
- if nonce_response != NonceMemory.NONCE_VALID:
- # Client sent a nonce we've never heard of before
- # Client sent an old nonce. Give the client a new one, and ask to authenticate again before continuing.
- return None
- return req_header_dict.username
-
-
-
-def digest(data):
- """Return a hex digest MD5 hash of the argument"""
- return md5(data).hexdigest()
-
-def quote_it(s):
- """Return the argument quoted, suitable for a quoted-string"""
- return '"%s"' % (s.replace("\\","\\\\").replace('"','\\"'))
-
-## Code to parse the authentication header
-parse_auth_header_re = re.compile(r"""
- ( (?P<varq>[a-z]+)="(?P<valueq>.+?)"(,|$) ) # match variable="value", (terminated by a comma or end of line)
- |
- ( (?P<var>[a-z]+)=(?P<value>.+?)(,|$) ) # match variable=value, (same as above, but no quotes)
- """, re.VERBOSE | re.IGNORECASE )
-def parse_auth_header(header):
- """parse an authentication header into a dict"""
- result = Storage()
- for m in parse_auth_header_re.finditer(header):
- g = m.groupdict()
- if g['varq'] and g['valueq']:
- result[g['varq']] = g['valueq'].replace(r'\"', '"')
- elif g['var'] and g['value']:
- result[g['var']] = g['value']
- return result
-
-class NonceMemory(dict):
- """
- A dict of in-use nonces, with a couple methods to create new nonces and get the state of a nonce
- """
-
- NONCE_VALID = 1
- NONCE_INVALID = 2
- NONCE_OLD = 3
-
- def set_nonce_skip(self, nonce_skip):
- self.nonce_skip = nonce_skip
-
- def get_new_nonce(self, lifespan = 180):
- """Generate a new, unused nonce, with a nonce-count set to 1.
- :lifespan - how long (in seconds) the nonce is good for before it's considered 'old'
- """
- is_new = False
- while not is_new:
- nonce = "%034x" % random.getrandbits(136) # a random 136-bit zero-padded lowercase hex string
- is_new = not nonce in self
- self[nonce] = (time.time() + lifespan, 1)
- return nonce
-
- def nonce_state(self, req_header_dict):
- """ 1 = nonce valid, proceed; 2 = nonce totally invalid; 3 = nonce requires refreshing """
- nonce = req_header_dict.get('nonce', None)
- exp_time, nCount = self.get(nonce, (0, 0) )
- if exp_time == 0:
- # Client sent some totally unknown nonce -- reject
- return self.NONCE_INVALID
- try:
- incoming_nc = int((req_header_dict['nc']), 16)
- except ValueError:
- return self.NONCE_INVALID # the "nc" field was deformed (not hexadecimal); reject
- # default nonce_skip value
- nonce_skip = getattr(self, 'nonce_skip', 1)
- if exp_time == 1 or nCount > 1000 or exp_time < time.time() or incoming_nc - nCount > nonce_skip:
- # Client sent good nonce, but it is too old, or the count has gotten screwed up; give them a new one
- del self[nonce]
- return self.NONCE_OLD
- self[nonce] = (exp_time, incoming_nc + 1)
- return self.NONCE_VALID
-
View
148 aspen/backcompat.py
@@ -1,148 +0,0 @@
-
-
-from operator import itemgetter as _itemgetter
-from keyword import iskeyword as _iskeyword
-import sys as _sys
-
-
-
-def namedtuple(typename, field_names, verbose=False, rename=False):
- """Returns a new subclass of tuple with named fields.
-
- >>> Point = namedtuple('Point', 'x y')
- >>> Point.__doc__ # docstring for the new class
- 'Point(x, y)'
- >>> p = Point(11, y=22) # instantiate with positional args or keywords
- >>> p[0] + p[1] # indexable like a plain tuple
- 33
- >>> x, y = p # unpack like a regular tuple
- >>> x, y
- (11, 22)
- >>> p.x + p.y # fields also accessable by name
- 33
- >>> d = p._asdict() # convert to a dictionary
- >>> d['x']
- 11
- >>> Point(**d) # convert from a dictionary
- Point(x=11, y=22)
- >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
- Point(x=100, y=22)
-
- """
-
- # Parse and validate the field names. Validation serves two purposes,
- # generating informative error messages and preventing template injection attacks.
- if isinstance(field_names, basestring):
- field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas
- field_names = tuple(map(str, field_names))
- if rename:
- names = list(field_names)
- seen = set()
- for i, name in enumerate(names):
- if (not min(c.isalnum() or c=='_' for c in name) or _iskeyword(name)
- or not name or name[0].isdigit() or name.startswith('_')
- or name in seen):
- names[i] = '_%d' % i
- seen.add(name)
- field_names = tuple(names)
- for name in (typename,) + field_names:
- if not min(c.isalnum() or c=='_' for c in name):
- raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name)
- if _iskeyword(name):
- raise ValueError('Type names and field names cannot be a keyword: %r' % name)
- if name[0].isdigit():
- raise ValueError('Type names and field names cannot start with a number: %r' % name)
- seen_names = set()
- for name in field_names:
- if name.startswith('_') and not rename:
- raise ValueError('Field names cannot start with an underscore: %r' % name)
- if name in seen_names:
- raise ValueError('Encountered duplicate field name: %r' % name)
- seen_names.add(name)
-
- # Create and fill-in the class template
- numfields = len(field_names)
- argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes
- reprtxt = ', '.join('%s=%%r' % name for name in field_names)
- template = '''class %(typename)s(tuple):
- '%(typename)s(%(argtxt)s)' \n
- __slots__ = () \n
- _fields = %(field_names)r \n
- def __new__(_cls, %(argtxt)s):
- return _tuple.__new__(_cls, (%(argtxt)s)) \n
- @classmethod
- def _make(cls, iterable, new=tuple.__new__, len=len):
- 'Make a new %(typename)s object from a sequence or iterable'
- result = new(cls, iterable)
- if len(result) != %(numfields)d:
- raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
- return result \n
- def __repr__(self):
- return '%(typename)s(%(reprtxt)s)' %% self \n
- def _asdict(self):
- 'Return a new dict which maps field names to their values'
- return dict(zip(self._fields, self)) \n
- def _replace(_self, **kwds):
- 'Return a new %(typename)s object replacing specified fields with new values'
- result = _self._make(map(kwds.pop, %(field_names)r, _self))
- if kwds:
- raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
- return result \n
- def __getnewargs__(self):
- return tuple(self) \n\n''' % locals()
- for i, name in enumerate(field_names):
- template += ' %s = _property(_itemgetter(%d))\n' % (name, i)
- if verbose:
- print template
-
- # Execute the template string in a temporary namespace
- namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
- _property=property, _tuple=tuple)
- try:
- exec template in namespace
- except SyntaxError, e:
- raise SyntaxError(e.message + ':\n' + template)
- result = namespace[typename]
-
- # For pickling to work, the __module__ variable needs to be set to the frame
- # where the named tuple is created. Bypass this step in enviroments where
- # sys._getframe is not defined (Jython for example) or sys._getframe is not
- # defined for arguments greater than 0 (IronPython).
- try:
- result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
- except (AttributeError, ValueError):
- pass
-
- return result
-
-def namedtuple_test():
- # verify that instances can be pickled
- from cPickle import loads, dumps
- Point = namedtuple('Point', 'x, y', True)
- p = Point(x=10, y=20)
- assert p == loads(dumps(p, -1))
-
- # test and demonstrate ability to override methods
- class Point(namedtuple('Point', 'x y')):
- @property
- def hypot(self):
- return (self.x ** 2 + self.y ** 2) ** 0.5
- def __str__(self):
- return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot)
-
- for p in Point(3,4), Point(14,5), Point(9./7,6):
- print p
-
- class Point(namedtuple('Point', 'x y')):
- 'Point class with optimized _make() and _replace() without error-checking'
- _make = classmethod(tuple.__new__)
- def _replace(self, _map=map, **kwds):
- return self._make(_map(kwds.get, ('x', 'y'), self))
-
- print Point(11, 22)._replace(x=100)
-
- import doctest
- TestResults = namedtuple('TestResults', 'failed attempted')
- print TestResults(*doctest.testmod())
-
-
View
388 aspen/configuration/__init__.py
@@ -1,388 +0,0 @@
-"""Define configuration objects.
-"""
-import collections
-import errno
-import mimetypes
-import os
-import socket
-import sys
-import traceback
-
-import aspen
-import aspen.logging
-from aspen import execution
-from aspen.hooks import Hooks
-from aspen.configuration import parse
-from aspen.configuration.exceptions import ConfigurationError
-from aspen.configuration.options import OptionParser, DEFAULT
-from aspen.utils import ascii_dammit
-
-
-# Nicer defaultdict
-# =================
-
-NO_DEFAULT = object()
-
-
-class NicerDefaultDict(collections.defaultdict):
- """Subclass to support .default assignment.
- """
-
- __default = ''
- def _get_default(self, name):
- """property getter for default property"""
- return self.__default
- def _set_default(self, value):
- """property setter for default property"""
- self.default_factory = lambda: value
- self.__default = value
- default = property(_get_default, _set_default)
-
-
- def get(self, name, default=NO_DEFAULT):
- if default is NO_DEFAULT:
- default = self.default
- collections.defaultdict.get(self, name, default)
-
-
-# Defaults
-# ========
-# The from_unicode callable converts from unicode to whatever format is
-# required by the variable, raising ValueError appropriately. Note that
-# name is supposed to match the options in our optparser. I like it wet.
-
-KNOBS = \
- { 'configuration_scripts': (lambda: [], parse.list_)
- , 'network_engine': (u'cheroot', parse.network_engine)
- , 'network_address': ( ((u'0.0.0.0', 8080), socket.AF_INET)
- , parse.network_address
- )
- , 'project_root': (None, parse.identity)
- , 'logging_threshold': (0, int)
- , 'www_root': (None, parse.identity)
-
-
- # Extended Options
- # 'name': (default, from_unicode)
- , 'changes_reload': (False, parse.yes_no)
- , 'charset_dynamic': (u'UTF-8', parse.charset)
- , 'charset_static': (None, parse.charset)
- , 'indices': ( lambda: [u'index.html', u'index.json', u'index']
- , parse.list_
- )
- , 'list_directories': (False, parse.yes_no)
- , 'media_type_default': ('text/plain', parse.media_type)
- , 'media_type_json': ('application/json', parse.media_type)
- , 'renderer_default': ('tornado', parse.renderer)
- , 'show_tracebacks': (False, parse.yes_no)
- }
-
-
-# Configurable
-# ============
-# Designed as a singleton.
-
-class Configurable(object):
- """Mixin object for aggregating configuration from several sources.
- """
-
- protected = False # Set to True to require authentication for all
- # requests.
-
- @classmethod
- def from_argv(cls, argv):
- """return a Configurable based on the passed-in arguments list
- """
- configurable = cls()
- configurable.configure(argv)
- return configurable
-
-
- def _set(self, name, hydrated, flat, context, name_in_context):
- """Set value at self.name, calling value if it's callable.
- """
- if aspen.is_callable(hydrated):
- hydrated = hydrated() # Call it if we can.
- setattr(self, name, hydrated)
- if name_in_context:
- assert isinstance(flat, unicode) # sanity check
- name_in_context = " %s=%s" % (name_in_context, flat)
- out = " %-22s %-30s %-24s"
- return out % (name, hydrated, context + name_in_context)
-
- def set(self, name, raw, from_unicode, context, name_in_context):
- assert isinstance(raw, str), "%s isn't a bytestring" % name
-
- error = None
- try:
- value = raw.decode('US-ASCII')
- hydrated = from_unicode(value)
- except UnicodeDecodeError, error:
- value = ascii_dammit(value)
- error_detail = "Configuration values must be US-ASCII."
- except ValueError, error:
- error_detail = error.args[0]
-
- if error is not None:
- msg = "Got a bad value '%s' for %s %s:"
- msg %= (value, context, name_in_context)
- if error_detail:
- msg += " " + error_detail + "."
- raise ConfigurationError(msg)
-
- # special-case lists, so we can layer them
- if from_unicode is parse.list_:
- extend, new_value = hydrated
- if extend:
- old_value = getattr(self, name)
- hydrated = old_value + new_value
- else:
- hydrated = new_value
-
- args = (name, hydrated, value, context, name_in_context)
- return self._set(*args)
-
- def configure(self, argv):
- """Takes an argv list, and gives it straight to optparser.parse_args.
-
- The argv list should not include the executable name.
-
- """
-
- # Do some base-line configuration.
- # ================================
- # We want to do the following configuration of our Python environment
- # regardless of the user's configuration preferences
-
- # mimetypes
- aspens_mimetypes = os.path.join(os.path.dirname(__file__), 'mime.types')
- mimetypes.knownfiles += [aspens_mimetypes]
- # mimetypes.init is called below after the user has a turn.
-
- # XXX register codecs here
-
-
- # Parse argv.
- # ===========
-
- opts, args = OptionParser().parse_args(argv)
-
-
- # Configure from defaults, environment, and command line.
- # =======================================================
-
- msgs = ["Reading configuration from defaults, environment, and "
- "command line."] # can't actually log until configured
-
- for name, (default, func) in sorted(KNOBS.items()):
-
- # set the default value for this variable
- msgs.append(self._set(name, default, None, "default", ''))
-
- # set from the environment
- envvar = 'ASPEN_' + name.upper()
- value = os.environ.get(envvar, '').strip()
- if value:
- msgs.append(self.set( name
- , value
- , func
- , "environment variable"
- , envvar
- ))
-
- # set from the command line
- value = getattr(opts, name)
- if value is not DEFAULT:
- msgs.append(self.set( name
- , value
- , func
- , "command line option"
- , "--"+name
- ))
-
-
- # Set some attributes.
- # ====================
-
-
- def safe_getcwd(errorstr):
- try:
- # If the working directory no longer exists, then the following
- # will raise OSError: [Errno 2] No such file or directory. I
- # swear I've seen this under supervisor, though I don't have
- # steps to reproduce. :-( To get around this you specify a
- # www_root explicitly, or you can use supervisor's cwd
- # facility.
-
- return os.getcwd()
- except OSError, err:
- if err.errno != errno.ENOENT:
- raise
- raise ConfigurationError(errorstr)
-
-
-
- # LOGGING_THRESHOLD
- # -----------------
- # This is initially set to -1 and not 0 so that we can tell if the user
- # changed it programmatically directly before we got here. I do this in
- # the testing module, that's really what this is about.
- if aspen.logging.LOGGING_THRESHOLD == -1:
- aspen.logging.LOGGING_THRESHOLD = self.logging_threshold
- # Now that we know the user's desires, we can log appropriately.
- aspen.log_dammit(os.linesep.join(msgs))
-
- # project root
- if self.project_root is None:
- aspen.log_dammit("project_root not configured (no template bases, "
- "etc.).")
- configure_aspen_py = None
- else:
- # canonicalize it
- if not os.path.isabs(self.project_root):
- aspen.log_dammit("project_root is relative to CWD: '%s'."
- % self.project_root)
- cwd = safe_getcwd("Could not get a current working "
- "directory. You can specify "
- "ASPEN_PROJECT_ROOT in the environment, "
- "or --project_root on the command line.")
- self.project_root = os.path.join(cwd, self.project_root)
-
- self.project_root = os.path.realpath(self.project_root)
- aspen.log_dammit("project_root set to %s." % self.project_root)
-
- # mime.types
- users_mimetypes = os.path.join(self.project_root, 'mime.types')
- mimetypes.knownfiles += [users_mimetypes]
-
- # configure-aspen.py
- configure_aspen_py = os.path.join( self.project_root
- , 'configure-aspen.py'
- )
- self.configuration_scripts.append(configure_aspen_py) # last word
-
- # PYTHONPATH
- sys.path.insert(0, self.project_root)
-
- # www_root
- if self.www_root is None:
- self.www_root = safe_getcwd("Could not get a current working "
- "directory. You can specify "
- "ASPEN_WWW_ROOT in the environment, "
- "or --www_root on the command line.")
-
- self.www_root = os.path.realpath(self.www_root)
- os.chdir(self.www_root)
-
- # renderers
- self.renderer_factories = {}
- for name in aspen.RENDERERS:
- # Pre-populate renderers so we can report on ImportErrors early
- try:
- capture = {}
- python_syntax = 'from aspen.renderers.%s import Factory'
- exec python_syntax % name in capture
- make_renderer = capture['Factory'](self)
- except ImportError, err:
- make_renderer = err
- err.info = sys.exc_info()
- self.renderer_factories[name] = make_renderer
-
- default_renderer = self.renderer_factories[self.renderer_default]
- if isinstance(default_renderer, ImportError):
- msg = "\033[1;31mImportError loading the default renderer, %s:\033[0m"
- aspen.log_dammit(msg % self.renderer_default)
- sys.excepthook(*default_renderer.info)
- raise default_renderer
-
- aspen.log_dammit("Renderers (*ed are unavailable, CAPS is default):")
- width = max(map(len, self.renderer_factories))
- for name, factory in self.renderer_factories.items():
- star = " "
- if isinstance(factory, ImportError):
- star = "*"
- error = "ImportError: " + factory.args[0]
- else:
- error = ""
- if name == self.renderer_default:
- name = name.upper()
- name = name.ljust(width + 2)
- aspen.log_dammit(" %s%s%s" % (star, name, error))
-
- self.default_renderers_by_media_type = NicerDefaultDict()
- self.default_renderers_by_media_type.default = self.renderer_default
-
- # mime.types
- mimetypes.init()
-
- # network_engine
- try:
- capture = {}
- python_syntax = 'from aspen.network_engines.%s_ import Engine'
- exec python_syntax % self.network_engine in capture
- Engine = capture['Engine']
- except ImportError:
- # ANSI colors:
- # http://stackoverflow.com/questions/287871/
- # http://en.wikipedia.org/wiki/ANSI_escape_code#CSI_codes
- # XXX consider http://pypi.python.org/pypi/colorama
- msg = "\033[1;31mImportError loading the %s network engine:\033[0m"
- aspen.log_dammit(msg % self.network_engine)
- raise
- self.network_engine = Engine(self.network_engine, self)
-
- # network_address, network_sockfam, network_port
- self.network_address, self.network_sockfam = self.network_address
- if self.network_sockfam == socket.AF_INET:
- self.network_port = self.network_address[1]
- else:
- self.network_port = None
-
- # hooks
- self.hooks = Hooks([ 'startup'
- , 'inbound_early'
- , 'inbound_late'
- , 'outbound_early'
- , 'outbound_late'
- , 'shutdown'
- ])
-
-
- # Finally, exec any configuration scripts.
- # ========================================
- # The user gets self as 'website' inside their configuration scripts.
-
- for filepath in self.configuration_scripts: