Skip to content

Commit

Permalink
git-svn-id: http://micolog.googlecode.com/svn/trunk@75 fd139d67-4554-…
Browse files Browse the repository at this point in the history
…0410-8437-97b8145f5b4d
  • Loading branch information
coolchyni committed Dec 30, 2009
1 parent d93bf45 commit e8e4857
Show file tree
Hide file tree
Showing 31 changed files with 1,919 additions and 442 deletions.
596 changes: 267 additions & 329 deletions admin.py

Large diffs are not rendered by default.

122 changes: 120 additions & 2 deletions api_rpc.py
@@ -1,16 +1,19 @@
# -*- coding: utf-8 -*-
import wsgiref.handlers
import xmlrpclib
from xmlrpclib import Fault
import sys
import cgi
import base64
from datetime import datetime
from SimpleXMLRPCServer import SimpleXMLRPCDispatcher
from functools import wraps

from django.utils.html import strip_tags
sys.path.append('modules')
from base import *
from model import *
from micolog_plugin import *
from urlparse import urlparse

def checkauth(pos=1):
def _decorate(method):
Expand Down Expand Up @@ -106,8 +109,14 @@ def metaWeblog_newPost(blogid, struct, publish):
if struct.has_key('mt_excerpt'):
entry.excerpt=struct['mt_excerpt']

if struct.has_key('wp_password'):
entry.password=struct['wp_password']

if publish:
entry.publish(True)
if struct.has_key('mt_tb_ping_urls'):
for url in struct['mt_tb_ping_urls']:
util.do_trackback(url,entry.title,entry.get_content_excerpt(more='')[:60],entry.fullurl(),g_blog.title)
else:
entry.save()
postid =entry.key().id()
Expand Down Expand Up @@ -145,6 +154,9 @@ def metaWeblog_editPost(postid, struct, publish):
if struct.has_key('mt_excerpt'):
entry.excerpt=struct['mt_excerpt']

if struct.has_key('wp_password'):
entry.password=struct['wp_password']


entry.title = struct['title']
entry.content = struct['description']
Expand Down Expand Up @@ -220,6 +232,8 @@ def wp_newPage(blogid,struct,publish):
entry.slug=struct['wp_slug']
if struct.has_key('wp_page_order'):
entry.menu_order=int(struct['wp_page_order'])
if struct.has_key('wp_password'):
entry.password=struct['wp_password']
entry.entrytype='page'
if publish:
entry.publish(True)
Expand Down Expand Up @@ -254,6 +268,8 @@ def wp_editPage(blogid,pageid,struct,publish):
if struct.has_key('wp_page_order'):
entry.menu_order=int(struct['wp_page_order'])

if struct.has_key('wp_password'):
entry.password=struct['wp_password']

entry.title = struct['title']
entry.content = struct['description']
Expand Down Expand Up @@ -298,8 +314,107 @@ def mt_getPostCategories(blogid):

def mt_setPostCategories(*arg):
return True
#-------------------------------------------------------------------------------

#------------------------------------------------------------------------------
#pingback
#------------------------------------------------------------------------------
_title_re = re.compile(r'<title>(.*?)</title>(?i)')
_pingback_re = re.compile(r'<link rel="pingback" href="([^"]+)" ?/?>(?i)')
_chunk_re = re.compile(r'\n\n|<(?:p|div|h\d)[^>]*>')
def pingback_ping(source_uri, target_uri):
# next we check if the source URL does indeed exist
if not g_blog.allow_pingback:
raise Fault(49,"Access denied.")
try:

g_blog.tigger_action("pre_ping",source_uri,target_uri)
response = urlfetch.fetch(source_uri)
except Exception ,e :
#logging.info(e.message)
raise Fault(16, 'The source URL does not exist.%s'%source_uri)
# we only accept pingbacks for links below our blog URL
blog_url = g_blog.baseurl
if not blog_url.endswith('/'):
blog_url += '/'
if not target_uri.startswith(blog_url):
raise Fault(32, 'The specified target URL does not exist.')
path_info = target_uri[len(blog_url):]

pingback_post(response,source_uri,target_uri,path_info)
try:

return "Micolog pingback succeed!"
except:
raise Fault(49,"Access denied.")


def get_excerpt(response, url_hint, body_limit=1024 * 512):
"""Get an excerpt from the given `response`. `url_hint` is the URL
which will be used as anchor for the excerpt. The return value is a
tuple in the form ``(title, body)``. If one of the two items could
not be calculated it will be `None`.
"""
contents = response.content[:body_limit]

title_match = _title_re.search(contents)
title = title_match and strip_tags(title_match.group(1)) or None

link_re = re.compile(r'<a[^>]+?"\s*%s\s*"[^>]*>(.*?)</a>(?is)' %
re.escape(url_hint))
for chunk in _chunk_re.split(contents):
match = link_re.search(chunk)
if not match:
continue
before = chunk[:match.start()]
after = chunk[match.end():]
raw_body = '%s\0%s' % (strip_tags(before).replace('\0', ''),
strip_tags(after).replace('\0', ''))
body_match = re.compile(r'(?:^|\b)(.{0,120})\0(.{0,120})(?:\b|$)') \
.search(raw_body)
if body_match:
break
else:
return title, None


before, after = body_match.groups()
link_text = strip_tags(match.group(1))
if len(link_text) > 60:
link_text = link_text[:60] + u' …'

bits = before.split()
bits.append(link_text)
bits.extend(after.split())
return title, u'[…] %s […]' % u' '.join(bits)

def pingback_post(response,source_uri, target_uri, slug):
"""This is the pingback handler for posts."""
entry = Entry.all().filter("published =", True).filter('link =', slug).get()
#use allow_trackback as allow_pingback
if entry is None or not entry.allow_trackback:
raise Fault(33, 'no such post')
title, excerpt = get_excerpt(response, target_uri)
if not title:
raise Fault(17, 'no title provided')
elif not excerpt:
raise Fault(17, 'no useable link to target')

comment = Comment.all().filter("entry =", entry).filter("weburl =", source_uri).get()
if comment:
raise Fault(48, 'pingback has already been registered')
return

comment=Comment(author=urlparse(source_uri).hostname,
content="<strong>"+title[:250]+"...</strong><br/>" +
excerpt[:250] + '...',
weburl=source_uri,
entry=entry)
comment.ctype=COMMENT_PINGBACK
comment.save()
g_blog.tigger_action("pingback_post",comment)
memcache.delete("/"+entry.link)
return True
##------------------------------------------------------------------------------
class PlogXMLRPCDispatcher(SimpleXMLRPCDispatcher):
def __init__(self, funcs):
SimpleXMLRPCDispatcher.__init__(self, True, 'utf-8')
Expand Down Expand Up @@ -329,6 +444,9 @@ def __init__(self, funcs):
'mt.setPostCategories':mt_setPostCategories,
'mt.getPostCategories':mt_getPostCategories,

##pingback
'pingback.ping':pingback_ping,



})
Expand Down
7 changes: 5 additions & 2 deletions app.yaml
@@ -1,4 +1,4 @@
application: mlog
application: micolog
version: 1
runtime: python
api_version: 1
Expand All @@ -9,7 +9,7 @@ handlers:
static_files: static/images/favicon.ico
upload: static/images/favicon.ico
mime_type: image/x-icon

- url: /robots\.txt
static_files: static/robots.txt
upload: static/robots.txt
Expand All @@ -32,9 +32,12 @@ handlers:

- url: /admin/.*
script: admin.py
login: admin


- url: /admin
script: admin.py
login: admin

- url: /.*
script: blog.py
Expand Down
88 changes: 88 additions & 0 deletions app/gmemsess.py
@@ -0,0 +1,88 @@
# gmemsess.py - memcache-backed session Class for Google Appengine
# Version 1.4
# Copyright 2008 Greg Fawcett <greg@vig.co.nz>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.

import random
from google.appengine.api import memcache

_sidChars='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
_defaultTimeout=30*60 # 30 min
_defaultCookieName='gsid'

#----------------------------------------------------------------------
class Session(dict):
"""A secure lightweight memcache-backed session Class for Google Appengine."""

#----------------------------------------------------------
def __init__(self,rh,name=_defaultCookieName,timeout=_defaultTimeout):
"""Create a session object.
Keyword arguments:
rh -- the parent's request handler (usually self)
name -- the cookie name (defaults to "gsid")
timeout -- the number of seconds the session will last between
requests (defaults to 1800 secs - 30 minutes)
"""
self.rh=rh # request handler
self._timeout=timeout
self._name=name
self._new=True
self._invalid=False
dict.__init__(self)

if name in rh.request.str_cookies:
self._sid=rh.request.str_cookies[name]
data=memcache.get(self._sid)
if data!=None:
self.update(data)
# memcache timeout is absolute, so we need to reset it on each access
memcache.set(self._sid,data,self._timeout)
self._new=False
return

# Create a new session ID
# There are about 10^14 combinations, so guessing won't work
self._sid=random.choice(_sidChars)+random.choice(_sidChars)+\
random.choice(_sidChars)+random.choice(_sidChars)+\
random.choice(_sidChars)+random.choice(_sidChars)+\
random.choice(_sidChars)+random.choice(_sidChars)
# Added path so session works with any path
rh.response.headers.add_header('Set-Cookie','%s=%s; path=/;'%(name,self._sid))

#----------------------------------------------------------
def save(self):
"""Save session data."""
if not self._invalid:
memcache.set(self._sid,self.copy(),self._timeout)

#----------------------------------------------------------
def is_new(self):
"""Returns True if session was created during this request."""
return self._new

#----------------------------------------------------------
def get_id(self):
"""Returns session id string."""
return self._sid

#----------------------------------------------------------
def invalidate(self):
"""Delete session data and cookie."""
self.rh.response.headers.add_header('Set-Cookie',
'%s=; expires=Sat, 1-Jan-2000 00:00:00 GMT;'%(self._name))
memcache.delete(self._sid)
self.clear()
self._invalid=True

0 comments on commit e8e4857

Please sign in to comment.