import sys
import os
import stat
+import httplib
import urllib
import feedparser
}
class HTML2Text(HTMLParser):
-
+
def __init__(self):
self.inheadingone = False
self.inheadingtwo = False
elif tag.lower() == "a":
self.inlink = True
elif tag.lower() == "br":
- if self.inparagraph:
- self.text = self.text \
- + u'\n'.join( \
- textwrap.wrap(self.currentparagraph, 70)) \
- + u'\n'
- self.currentparagraph = ""
- elif self.inblockquote:
- self.text = self.text \
- + u'\n> ' \
- + u'\n> '.join( \
- [a.strip() \
- for a in textwrap.wrap(self.blockquote, 68) \
- ]) \
- + u'\n'
- self.blockquote = u''
- else:
- self.text = self.text + u'\n'
+ self.handle_br()
elif tag.lower() == "blockquote":
self.inblockquote = True
self.text = self.text + u'\n'
def handle_startendtag(self, tag, attrs):
if tag.lower() == "br":
+ self.handle_br()
+
+ def handle_br(self):
if self.inparagraph:
self.text = self.text \
+ u'\n'.join( \
return data
def parse_and_deliver(maildir, url, statedir):
- fp = feedparser.parse(url)
+ feedhandle = None
+ headers = None
+ # first check if we know about this feed already
+ feeddb = dbm.open(os.path.join(statedir, "feeds"), "c")
+ # we need all the parts of the url
+ (type, rest) = urllib.splittype(url)
+ (host, path) = urllib.splithost(rest)
+ (host, port) = urllib.splitport(host)
+ if port == None:
+ port = 80
+ if feeddb.has_key(url):
+ data = feeddb[url]
+ data = cgi.parse_qs(data)
+ # now do a head on the feed to see if it's been updated
+ conn = httplib.HTTPConnection("%s:%s" %(host, port))
+ conn.request("HEAD", path)
+ response = conn.getresponse()
+ headers = response.getheaders()
+ ischanged = False
+ try:
+ for header in headers:
+ if header[0] == "content-length":
+ if header[1] != data["content-length"][0]:
+ ischanged = True
+ elif header[0] == "etag":
+ if header[1] != data["etag"][0]:
+ ischanged = True
+ elif header[0] == "last-modified":
+ if header[1] != data["last-modified"][0]:
+ ischanged = True
+ elif header[0] == "content-md5":
+ if header[1] != data["content-md5"][0]:
+ ischanged = True
+ except:
+ ischanged = True
+ if ischanged:
+ conn = httplib.HTTPConnection("%s:%s" %(host, port))
+ conn.request("GET", path)
+ response = conn.getresponse()
+ headers = response.getheaders()
+ feedhandle = response
+ else:
+ return # don't need to do anything, nothings changed.
+ else:
+ conn = httplib.HTTPConnection("%s:%s" %(host, port))
+ conn.request("GET", path)
+ response = conn.getresponse()
+ headers = response.getheaders()
+ feedhandle = response
+
+ fp = feedparser.parse(feedhandle)
db = dbm.open(os.path.join(statedir, "seen"), "c")
for item in fp["items"]:
# have we seen it before?
))
db[url + "|" + item["link"]] = data
+ if headers:
+ data = []
+ for header in headers:
+ if header[0] in ["content-md5", "etag", "last-modified", "content-length"]:
+ data.append((header[0], header[1]))
+ if len(data) > 0:
+ data = urllib.urlencode(data)
+ feeddb[url] = data
+
db.close()
+ feeddb.close()
# first off, parse the command line arguments