]> git.sommitrealweird.co.uk Git - rss2maildir.git/blobdiff - rss2maildir.py
* Add redirect support
[rss2maildir.git] / rss2maildir.py
index 25ddb3c676cb8de3eb07d2502ed41d27be1087e0..aa3b0ccac5badb4a6c21f46591470ab3e5fc0531 100755 (executable)
@@ -20,6 +20,7 @@
 import sys
 import os
 import stat
 import sys
 import os
 import stat
+import httplib
 import urllib
 
 import feedparser
 import urllib
 
 import feedparser
@@ -52,12 +53,12 @@ entities = {
     "pound": "£",
     "copy": "©",
     "apos": "'",
     "pound": "£",
     "copy": "©",
     "apos": "'",
-    "quote": "\"",
+    "quot": "\"",
     "nbsp": " ",
     }
 
 class HTML2Text(HTMLParser):
     "nbsp": " ",
     }
 
 class HTML2Text(HTMLParser):
-    
+
     def __init__(self):
         self.inheadingone = False
         self.inheadingtwo = False
     def __init__(self):
         self.inheadingone = False
         self.inheadingtwo = False
@@ -251,8 +252,79 @@ class HTML2Text(HTMLParser):
             data = data + "\n".join(textwrap.wrap(self.currentparagraph, 70))
         return data
 
             data = data + "\n".join(textwrap.wrap(self.currentparagraph, 70))
         return data
 
+def open_url(method, url):
+    redirectcount = 0
+    while redirectcount < 3:
+        (type, rest) = urllib.splittype(url)
+        (host, path) = urllib.splithost(rest)
+        (host, port) = urllib.splitport(host)
+        if port == None:
+            port = 80
+        try:
+            conn = httplib.HTTPConnection("%s:%s" %(host, port))
+            conn.request(method, path)
+            response = conn.getresponse()
+            if response.status in [301, 302, 303, 307]:
+                headers = response.getheaders()
+                for header in headers:
+                    if header[0] == "location":
+                        url = header[1]
+            elif response.status == 200:
+                return response
+        except:
+            pass
+        redirectcount = redirectcount + 1
+    return None
+
 def parse_and_deliver(maildir, url, statedir):
 def parse_and_deliver(maildir, url, statedir):
-    fp = feedparser.parse(url)
+    feedhandle = None
+    headers = None
+    # first check if we know about this feed already
+    feeddb = dbm.open(os.path.join(statedir, "feeds"), "c")
+    if feeddb.has_key(url):
+        data = feeddb[url]
+        data = cgi.parse_qs(data)
+        response = open_url("HEAD", url)
+        headers = None
+        if response:
+            headers = response.getheaders()
+        ischanged = False
+        try:
+            for header in headers:
+                if header[0] == "content-length":
+                    if header[1] != data["content-length"][0]:
+                        ischanged = True
+                elif header[0] == "etag":
+                    if header[1] != data["etag"][0]:
+                        ischanged = True
+                elif header[0] == "last-modified":
+                    if header[1] != data["last-modified"][0]:
+                        ischanged = True
+                elif header[0] == "content-md5":
+                    if header[1] != data["content-md5"][0]:
+                        ischanged = True
+        except:
+            ischanged = True
+        if ischanged:
+            response = open_url("GET", url)
+            if response != None:
+                headers = response.getheaders()
+                feedhandle = response
+            else:
+                sys.stderr.write("Failed to fetch feed: %s\n" %(url))
+                return
+        else:
+            return # don't need to do anything, nothings changed.
+    else:
+        response = open_url("GET", url)
+        if response != None:
+            headers = response.getheaders()
+            feedhandle = response
+        else:
+            sys.stderr.write("Failed to fetch feed: %s\n" %(url))
+            return
+
+    fp = feedparser.parse(feedhandle)
     db = dbm.open(os.path.join(statedir, "seen"), "c")
     for item in fp["items"]:
         # have we seen it before?
     db = dbm.open(os.path.join(statedir, "seen"), "c")
     for item in fp["items"]:
         # have we seen it before?
@@ -296,8 +368,13 @@ def parse_and_deliver(maildir, url, statedir):
         msg.add_header("To", "\"%s\" <rss2maildir@localhost>" %(url))
         if prevmessageid:
             msg.add_header("References", prevmessageid)
         msg.add_header("To", "\"%s\" <rss2maildir@localhost>" %(url))
         if prevmessageid:
             msg.add_header("References", prevmessageid)
-        createddate = datetime.datetime(*item["updated_parsed"][0:6]) \
+        createddate = datetime.datetime.now() \
             .strftime("%a, %e %b %Y %T -0000")
             .strftime("%a, %e %b %Y %T -0000")
+        try:
+            createddate = datetime.datetime(*item["updated_parsed"][0:6]) \
+                .strftime("%a, %e %b %Y %T -0000")
+        except:
+            pass
         msg.add_header("Date", createddate)
         msg.add_header("Subject", item["title"])
         msg.set_default_type("text/plain")
         msg.add_header("Date", createddate)
         msg.add_header("Subject", item["title"])
         msg.set_default_type("text/plain")
@@ -339,7 +416,17 @@ def parse_and_deliver(maildir, url, statedir):
             ))
         db[url + "|" + item["link"]] = data
 
             ))
         db[url + "|" + item["link"]] = data
 
+    if headers:
+        data = []
+        for header in headers:
+            if header[0] in ["content-md5", "etag", "last-modified", "content-length"]:
+                data.append((header[0], header[1]))
+        if len(data) > 0:
+            data = urllib.urlencode(data)
+            feeddb[url] = data
+
     db.close()
     db.close()
+    feeddb.close()
 
 # first off, parse the command line arguments
 
 
 # first off, parse the command line arguments