#!/usr/bin/python
# http://del.icio.us/doc/api
# serve up a delicious proxy
#
# side-step anything with certain tags - and if "selection" is listed, grab the tag?
# and then augment the tags as needed...
# xx = xml.dom.minidom.parse("tagsample")
parent_url = "http://del.icio.us/api/"
# check for 503 errors, and at least pass them back
# email joshua about it if released
# set user-agent (wrap the caller's?)
# all of these are base urls off of parent_url
# that take cgi arguments
# posts/dates
# tag= optional, filter
# returns a list of dates with the number of posts at each date.
# tags/get
# returns all tags, as XML
# <?xml version='1.0' standalone='yes'?>
# <tags>
# <tag count="2" tag="bluetooth" />
# <tag count="1" tag="gaim" />
# etc.
# posts/get
# tag= optional, filter
# dt= filter by date (default most recent)
# returns list of posts
# posts/recent
# tag= optional, filter
# count= optional, number to get, default=15, max=100
# list of posts
# posts/all
# all posts. don't use.
# posts/add
# url=
# description=
# extended=
# tags= (space delimited)
# dt=CCYY-MM-DDThh:mm:ssZ (ie. "1984-09-01T14:21:31Z")
# posts/delete
# url=
# tags/rename
# old=
# new=
import BaseHTTPServer
import urllib
import urlparse
urllib.URLopener.version = "thok.org-delicious-proxy/0.0"
def construct_parent_call(noun, verb, args):
argstring = "?"
if args:
## this probably needs a level of argquoting too
argstring = "?" + "&".join(["%s=%s" % (k,v) for k,v in args.items()])
return urllib.basejoin(parent_url, "".join([noun, "/", verb, argstring]))
class URLrepeater(urllib.URLopener):
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Default error handler: close the connection and raise IOError."""
void = fp.read()
fp.close()
raise IOError, ('http error', errcode, errmsg, headers)
class DeliciousRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
# print "Got me!", self.__dict__.items()
print "Got headers!", self.headers
if "?" not in self.path:
self.send_error(404, "Arguments not found")
return
# <scheme>://<netloc>/<path>?<query>#<fragment>
type, host, upath, argset, fragment = urlparse.urlsplit("http://localhost" + self.path)
while upath.startswith("/"):
upath = upath.replace("/","",1)
noun, verb = upath.split("/")
## path, argset = self.path.split("?",1)
arglist = filter(None,argset.split("&"))
## this probably needs a level of argquoting too
argdict = dict([arg.split("=",1) for arg in arglist])
## print "PATH:", path
# may need to just strip all of the leading slashes...
## noun, verb = path.replace("/","",1).split("/") # strip leading slash
print "NOUN", noun, "VERB", verb
print "args", argdict
# uo = urllib.URLopener()
uo = URLrepeater()
for h in self.headers.headers:
k,v = h.strip().split(": ",1)
if k == "User-Agent": continue
if k == "Host": continue
if k == "Connection" and v == "keep-alive":
v = "close"
print "adding", k, "value", v
uo.addheader(k,v)
try:
newurl = construct_parent_call(noun, verb, argdict)
print "NEW:", newurl
u = uo.open(newurl)
except IOError, http_error_info:
# raise IOError, ('http error', errcode, errmsg, headers)
kind, errcode, errmsg, headers = http_error_info
print "Got error:", errcode, errmsg
self.send_response(errcode)
if 'www-authenticate' in headers:
print "AUTH HEADER:", headers['www-authenticate']
self.send_header('www-authenticate', headers['www-authenticate'])
self.send_header("Content-Type", "text/html")
self.send_header('Connection', 'close')
self.end_headers()
print "DONE rejecting", noun, verb, errcode
return
# print "UO:", uo.__dict__.items()
# print "U:", u.__dict__.items()
print "UH:", u.headers
content = u.read()
self.send_response(200)
# for now, let it be readonly
# let it modify headers and return content later
self.server.lookaside.handle(noun, verb, content, argdict, u.headers)
for h in u.headers.headers:
k,v = h.strip().split(": ",1)
print "passing back", k, "as", v
self.send_header(k, v)
#self.send_header("Content-type", ctype)
# self.send_header("Content-Length", len(content))
self.end_headers()
self.wfile.write(content)
print "DONE with", noun, verb
return
import gzip
import xml.dom.minidom
import StringIO
import os
import time
class LookAside:
def __init__(self):
self.delibase = os.path.expanduser("~/.delicious.stuff")
if not os.path.isdir(self.delibase):
os.mkdir(self.delibase)
def handle(self, noun, verb, content, args, headers):
name = 'handle_%s_%s' % (noun, verb)
if not hasattr(self, name):
print "UNKNOWN:", noun, verb
return
handler = getattr(self, name)
if "Content-Encoding" in headers and headers["Content-Encoding"] == "gzip":
# from feedparser.parse:
content = gzip.GzipFile(fileobj=StringIO.StringIO(content)).read()
fname = "_".join([noun, verb, "_".join(["%s=%s" % (k,v) for k,v in args.items()])])[:225]
fname = fname + "_" + str(time.time())
# should save the full tags and such too, though, later
print >> open(os.path.join(self.delibase, fname),"w"), content
handler(content, args)
# posts/dates
# tag= optional, filter
# returns a list of dates with the number of posts at each date.
def handle_posts_dates(self, content, args):
print "dates posts(tag):", args
print "got dates posts:", content
# tags/get
# returns all tags, as XML
# <?xml version='1.0' standalone='yes'?>
# <tags>
# <tag count="2" tag="bluetooth" />
# <tag count="1" tag="gaim" />
# etc.
def handle_tags_get(self, content, args):
print "Tags:"
xx = xml.dom.minidom.parseString(content)
count = 0
for tag in xx.getElementsByTagName("tag"):
print "TAG:", tag.attributes["tag"].value, "COUNT:", tag.attributes["count"].value
count += 1
print count, "tags total."
# posts/get
# tag= optional, filter
# dt= filter by date (default most recent)
# returns list of posts
def handle_posts_get(self, content, args):
print "get posts(tag,dt):", args
print "got posts:", content
# posts/recent
# tag= optional, filter
# count= optional, number to get, default=15, max=100
# list of posts
def handle_posts_recent(self, content, args):
print "recent posts(tag,count):", args
print "got recent posts:", content
# posts/all
# all posts. don't use. cocoalicious does, though.
def handle_posts_all(self, content, args):
print "all posts:", args
# print "got all posts:", content
print "Posts:"
xx = xml.dom.minidom.parseString(content)
# <posts update="2004-12-09T13:01:18Z" user="eichin">
count = 0
for post in xx.getElementsByTagName("post"):
# <post href="http://hem.fyristorg.com/matben/"
# description="The Coccinella"
# extended="jabber client that does sasl?"
# hash="ff54b80de7acd94f069bf0563d1a9c54"
# tag="jabber sasl xmpp zephyr-vs-jabber"
# time="2004-12-05T11:27:22Z" />
print "URL:", post.attributes["href"].value
print " desc:", post.attributes["description"].value
print " keys:", post.attributes["tag"].value
count += 1
print count, "posts total."
# posts/add
# url=
# description=
# extended=
# tags= (space delimited)
# dt=CCYY-MM-DDThh:mm:ssZ (ie. "1984-09-01T14:21:31Z")
def handle_posts_add(self, content, args):
print "add posts(url,desc,etc):", args
print "added posts:", content
# posts/delete
# url=
def handle_posts_delete(self, content, args):
print "delete posts(url):", args
print "deleted posts:", content
# tags/rename
# old=
# new=
def handle_tags_rename(self, content, args):
print "rename tags(old, new):", args
print "renamed tags:", content
# change this to bind to localhost, once testing is done
server_address = ('', 8989)
httpd = BaseHTTPServer.HTTPServer(server_address, DeliciousRequestHandler)
httpd.lookaside = LookAside()
httpd.serve_forever()