]> git.sommitrealweird.co.uk Git - rss2maildir.git/blobdiff - rss2maildir.py
* be slightly more forgiving on connection resets
[rss2maildir.git] / rss2maildir.py
index 6a319f47ad2ba14a44df777dfd83b21c7cf0d9da..d8cd3e4b109d4af1035f1ffd3b4d7cd935da721e 100755 (executable)
@@ -20,6 +20,7 @@
 import sys
 import os
 import stat
+import httplib
 import urllib
 
 import feedparser
@@ -57,7 +58,7 @@ entities = {
     }
 
 class HTML2Text(HTMLParser):
-    
+
     def __init__(self):
         self.inheadingone = False
         self.inheadingtwo = False
@@ -88,23 +89,7 @@ class HTML2Text(HTMLParser):
         elif tag.lower() == "a":
             self.inlink = True
         elif tag.lower() == "br":
-            if self.inparagraph:
-                self.text = self.text \
-                    + u'\n'.join( \
-                        textwrap.wrap(self.currentparagraph, 70)) \
-                    + u'\n'
-                self.currentparagraph = ""
-            elif self.inblockquote:
-                self.text = self.text \
-                    + u'\n> ' \
-                    + u'\n> '.join( \
-                        [a.strip() \
-                            for a in textwrap.wrap(self.blockquote, 68) \
-                        ]) \
-                    + u'\n'
-                self.blockquote = u''
-            else:
-                self.text = self.text + u'\n'
+            self.handle_br()
         elif tag.lower() == "blockquote":
             self.inblockquote = True
             self.text = self.text + u'\n'
@@ -132,12 +117,16 @@ class HTML2Text(HTMLParser):
             else:
                 self.text = self.text \
                     + u' * ' \
-                    + u'\n   '.join([a.strip() for a in textwrap.wrap(self.item, 67)]) \
+                    + u'\n   '.join([a.strip() for a in \
+                        textwrap.wrap(self.item, 67)]) \
                     + u'\n'
                 self.item = u''
 
     def handle_startendtag(self, tag, attrs):
         if tag.lower() == "br":
+            self.handle_br()
+
+    def handle_br(self):
             if self.inparagraph:
                 self.text = self.text \
                 + u'\n'.join( \
@@ -199,8 +188,10 @@ class HTML2Text(HTMLParser):
             self.text = self.text \
                 + u'\n> ' \
                 + u'\n> '.join( \
-                    [a.strip() for a in textwrap.wrap(self.blockquote, 68)] \
-                    ).encode("utf-8") \
+                    [a.strip() \
+                        for a in textwrap.wrap( \
+                            self.blockquote, 68)] \
+                    ) \
                 + u'\n'
             self.inblockquote = False
             self.blockquote = u''
@@ -248,11 +239,12 @@ class HTML2Text(HTMLParser):
             entity = "&" + name + ";"
 
         if self.inparagraph:
-            self.currentparagraph = self.currentparagraph + entity
+            self.currentparagraph = self.currentparagraph \
+                + unicode(entity, "utf-8")
         elif self.inblockquote:
-            self.blockquote = self.blockquote + entity
+            self.blockquote = self.blockquote + unicode(entity, "utf-8")
         else:
-            self.text = self.text + entity
+            self.text = self.text + unicode(entity, "utf-8")
 
     def gettext(self):
         data = self.text
@@ -261,7 +253,62 @@ class HTML2Text(HTMLParser):
         return data
 
 def parse_and_deliver(maildir, url, statedir):
-    fp = feedparser.parse(url)
+    feedhandle = None
+    headers = None
+    # first check if we know about this feed already
+    feeddb = dbm.open(os.path.join(statedir, "feeds"), "c")
+    # we need all the parts of the url 
+    (type, rest) = urllib.splittype(url)
+    (host, path) = urllib.splithost(rest)
+    (host, port) = urllib.splitport(host)
+    if port == None:
+        port = 80
+    if feeddb.has_key(url):
+        data = feeddb[url]
+        data = cgi.parse_qs(data)
+        # now do a head on the feed to see if it's been updated
+        conn = httplib.HTTPConnection("%s:%s" %(host, port))
+        conn.request("HEAD", path)
+        response = conn.getresponse()
+        headers = response.getheaders()
+        ischanged = False
+        try:
+            for header in headers:
+                if header[0] == "content-length":
+                    if header[1] != data["content-length"][0]:
+                        ischanged = True
+                elif header[0] == "etag":
+                    if header[1] != data["etag"][0]:
+                        ischanged = True
+                elif header[0] == "last-modified":
+                    if header[1] != data["last-modified"][0]:
+                        ischanged = True
+                elif header[0] == "content-md5":
+                    if header[1] != data["content-md5"][0]:
+                        ischanged = True
+        except:
+            ischanged = True
+        if ischanged:
+            conn = httplib.HTTPConnection("%s:%s" %(host, port))
+            conn.request("GET", path)
+            response = conn.getresponse()
+            headers = response.getheaders()
+            feedhandle = response
+        else:
+            return # don't need to do anything, nothings changed.
+    else:
+        conn = httplib.HTTPConnection("%s:%s" %(host, port))
+        conn.request("GET", path)
+        response = None
+        try:
+            response = conn.getresponse()
+        except:
+            print "Failed to fetch feed: %s" %(url)
+            return
+        headers = response.getheaders()
+        feedhandle = response
+
+    fp = feedparser.parse(feedhandle)
     db = dbm.open(os.path.join(statedir, "seen"), "c")
     for item in fp["items"]:
         # have we seen it before?
@@ -274,9 +321,13 @@ def parse_and_deliver(maildir, url, statedir):
 
         md5sum = md5.md5(content.encode("utf-8")).hexdigest()
 
+        prevmessageid = None
+
         if db.has_key(url + "|" + item["link"]):
             data = db[url + "|" + item["link"]]
             data = cgi.parse_qs(data)
+            if data.has_key("message-id"):
+                prevmessageid = data["message-id"][0]
             if data["contentmd5"][0] == md5sum:
                 continue
 
@@ -299,8 +350,15 @@ def parse_and_deliver(maildir, url, statedir):
         msg.set_unixfrom("\"%s\" <rss2maildir@localhost>" %(url))
         msg.add_header("From", "\"%s\" <rss2maildir@localhost>" %(author))
         msg.add_header("To", "\"%s\" <rss2maildir@localhost>" %(url))
-        createddate = datetime.datetime(*item["updated_parsed"][0:6]) \
+        if prevmessageid:
+            msg.add_header("References", prevmessageid)
+        createddate = datetime.datetime.now() \
             .strftime("%a, %e %b %Y %T -0000")
+        try:
+            createddate = datetime.datetime(*item["updated_parsed"][0:6]) \
+                .strftime("%a, %e %b %Y %T -0000")
+        except:
+            pass
         msg.add_header("Date", createddate)
         msg.add_header("Subject", item["title"])
         msg.set_default_type("text/plain")
@@ -333,6 +391,8 @@ def parse_and_deliver(maildir, url, statedir):
         os.unlink(fn)
 
         # now add to the database about the item
+        if prevmessageid:
+            messageid = prevmessageid + " " + messageid
         data = urllib.urlencode((
             ("message-id", messageid), \
             ("created", createddate), \
@@ -340,7 +400,17 @@ def parse_and_deliver(maildir, url, statedir):
             ))
         db[url + "|" + item["link"]] = data
 
+    if headers:
+        data = []
+        for header in headers:
+            if header[0] in ["content-md5", "etag", "last-modified", "content-length"]:
+                data.append((header[0], header[1]))
+        if len(data) > 0:
+            data = urllib.urlencode(data)
+            feeddb[url] = data
+
     db.close()
+    feeddb.close()
 
 # first off, parse the command line arguments
 
@@ -367,7 +437,8 @@ if options.conf != None:
         configfile = options.conf
     except:
         # should exit here as the specified file doesn't exist
-        sys.stderr.write("Config file %s does not exist. Exiting.\n" %(options.conf,))
+        sys.stderr.write( \
+            "Config file %s does not exist. Exiting.\n" %(options.conf,))
         sys.exit(2)
 else:
     # check through the default locations
@@ -396,7 +467,8 @@ if options.statedir != None:
     try:
         mode = os.stat(state_dir)[stat.ST_MODE]
         if not stat.S_ISDIR(mode):
-            sys.stderr.write("State directory (%s) is not a directory\n" %(state_dir))
+            sys.stderr.write( \
+                "State directory (%s) is not a directory\n" %(state_dir))
             sys.exit(1)
     except:
         # try to make the directory
@@ -410,7 +482,8 @@ elif scp.has_option("general", "state_dir"):
     try:
         mode = os.stat(state_dir)[stat.ST_MODE]
         if not stat.S_ISDIR(mode):
-            sys.stderr.write("State directory (%s) is not a directory\n" %(state_dir))
+            sys.stderr.write( \
+                "State directory (%s) is not a directory\n" %(state_dir))
             sys.exit(1)
     except:
         # try to create it
@@ -418,19 +491,22 @@ elif scp.has_option("general", "state_dir"):
             os.mkdir(new_state_dir)
             state_dir = new_state_dir
         except:
-            sys.stderr.write("Couldn't create state directory %s\n" %(new_state_dir))
+            sys.stderr.write( \
+                "Couldn't create state directory %s\n" %(new_state_dir))
             sys.exit(1)
 else:
     try:
         mode = os.stat(state_dir)[stat.ST_MODE]
         if not stat.S_ISDIR(mode):
-            sys.stderr.write("State directory %s is not a directory\n" %(state_dir))
+            sys.stderr.write( \
+                "State directory %s is not a directory\n" %(state_dir))
             sys.exit(1)
     except:
         try:
             os.mkdir(state_dir)
         except:
-            sys.stderr.write("State directory %s could not be created\n" %(state_dir))
+            sys.stderr.write( \
+                "State directory %s could not be created\n" %(state_dir))
             sys.exit(1)
 
 if scp.has_option("general", "maildir_root"):
@@ -439,7 +515,9 @@ if scp.has_option("general", "maildir_root"):
 try:
     mode = os.stat(maildir_root)[stat.ST_MODE]
     if not stat.S_ISDIR(mode):
-        sys.stderr.write("Maildir Root %s is not a directory\n" %(maildir_root))
+        sys.stderr.write( \
+            "Maildir Root %s is not a directory\n" \
+            %(maildir_root))
         sys.exit(1)
 except:
     try:
@@ -500,7 +578,9 @@ for section in feeds:
             os.mkdir(os.path.join(maildir, "cur"))
             os.mkdir(os.path.join(maildir, "tmp"))
         except:
-            sys.stderr.write("Couldn't create required maildir directories for %s\n" %(section,))
+            sys.stderr.write( \
+                "Couldn't create required maildir directories for %s\n" \
+                %(section,))
             sys.exit(1)
 
     # right - we've got the directories, we've got the section, we know the