-
Notifications
You must be signed in to change notification settings - Fork 3
/
core.py
85 lines (69 loc) · 2.51 KB
/
core.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
import re, cgi, bz2, uuid
from google.appengine.api import urlfetch, users
import models, tools
import os
ADDRESS = 'http://www.peeep.us/'
ADDRESS2 = 'http://peeep.us/'
ANONYMOUS_DOMAIN = 'anonymous'
def getEffectiveAddress():
address = ADDRESS
if 'HTTP_HOST' in os.environ:
address = 'http://%s/' % os.environ['HTTP_HOST']
return address
def isAnonymous(user):
return user.email().endswith('@'+ANONYMOUS_DOMAIN)
def generateAnonymous():
return users.User('%s@%s' % (str(uuid.uuid1()), ANONYMOUS_DOMAIN))
def getBookmarklet(html=False):
code = (u"javascript: void(function(){var s=document.createElement('script'),sa='setAttribute';s[sa]('type','text/javascript');"+
u"s[sa]('src','%sassets/send.js');document.body.appendChild(s); })();" % getEffectiveAddress())
if html:
code = u'''<a href="%s" onclick="window.alert('Drag this link to your bookmark bar'); return false">Get peeep link</a>''' % code
return code
class NotFound(Exception):
pass
class Forbidden(Exception):
pass
class DownloadFail(Exception):
pass
def decodeContent(content):
if content[0:2] == 'BZ':
try:
return bz2.decompress(content)
except IOError, e:
return content
return content
def fetch(url):
try:
req, a_url = tools.smartFetch(tools.asciify_url(url), allow_truncated=True, deadline=10)
except urlfetch.Error, e:
raise DownloadFail(url, e)
if req.status_code != 200: raise DownloadFail(url, req.status_code)
content = req.content
contentType = req.headers['Content-type']
return content, contentType, a_url
def getCache(page):
return models.Cache.all().filter('page =', page).filter('url =', tools.md5(unicode(page.url))).get()
def preprocessHtml(html, url):
safe_url = cgi.escape(url, True).encode('utf-8')
offs = 0
m = re.match(r'(?iL)((?:\s+|<!DOCTYPE\b[^>]*>|<html\b[^>]*>|<head\b[^>]*>)*)', html)
if m: # skip any heading tags
offs = m.end(0)
html = html[:offs] + r'<!--PEEEP--><base href="%s"/><!--/PEEEP-->'%safe_url + html[offs:]
return html
#def invalidateCache(page):
# caches = models.Cache.all().filter('page =', page).fetch(2000)
# for cache in caches:
# cache.delete()
#def updateCache(page, content=None, contentType=None, invalidate=False):
# if invalidate:
# invalidateCache(page)
#
# if content is None:
# content, contentType = fetch(page)
#
# content = bz2.compress(content)
# cache = models.Cache(page=page, url=tools.md5(unicode(page.url)), content=content, contentType=contentType)
# cache.put()
# return cache