Fix for h1, h2 in body
[planet-ucc.git] / XMLParse2.py
index 1cec668..d1ca814 100644 (file)
@@ -13,8 +13,11 @@ import CacheHandler
 sys.path.insert(0, 'extra')
 import feedparser
 
+feedparser.USER_AGENT = "PlanetUCC/1.0b +http://planet.ucc.asn.au/ %s" % feedparser.USER_AGENT
+
 class Blog:
        def __init__(self):
+               self.blogName   = None
                self.blogTitle  = None
                self.blogURL    = None
                self.feedURL    = None
@@ -38,21 +41,21 @@ class XMLParse:
        def parse(self):
                "Return a single Blog object"
                item            = Blog()
-               if self.blogObject:
-                       sys.stdout.write('Downloading feed %s...' % self.feedURL)
+               if self.blogObject and self.blogObject.cache:
+                       sys.stdout.write('Downloading feed %s... ' % self.feedURL)
                        try:
                                data    = feedparser.parse(self.feedURL, self.blogObject.cache.etag, self.blogObject.cache.date)
-                               sys.stdout.write('done.\n')
+                               # check to see what we got returned
+                               if data['items'] == [] and data['channel'] == {}:
+                                       sys.stdout.write('cached.\n')
+                                       return self.blogObject
+                               else:
+                                       sys.stdout.write('done.\n')
                        except:
                                sys.stdout.write('failed.\n')
-                               raise
                                return None
-                       # check to see what we got returned
-                       if data['items'] == [] and data['channel'] == {}:
-                               sys.stdout.write('Feed %s is upto date.\n' % self.feedURL)
-                               return self.blogObject
                else:
-                       sys.stdout.write('Downloading feed from %s (no cache)...' % self.feedURL)
+                       sys.stdout.write('Downloading feed (no cache) %s... ' % self.feedURL)
                        try:
                                data    = feedparser.parse(self.feedURL)
                                sys.stdout.write('done.\n')
@@ -62,10 +65,17 @@ class XMLParse:
                # create caching data
                try:
                        cache           = CacheHandler.CacheObject()
-                       cache.etag      = data['etag']
-                       cache.date      = data['modified']
+                       try:
+                               cache.etag      = data['etag']
+                       except:
+                               cache.etag      = None
+                       try:
+                               cache.date      = data['modified']
+                       except:
+                               cache.date      = None
                        item.cache      = cache
                except:
+                       sys.stderr.write('DEBUG: XMLParse2: cache item generation failed\n')
                        item.cache      = None
                # parse the return of data into a blog
                if data['channel'].has_key('title'):
@@ -87,7 +97,7 @@ class XMLParse:
                        else:
                                blogItem.itemURL        = item.blogURL
                        if entry.has_key('date_parsed'):
-                               blogItem.itemDate       = time.mktime(entry['date_parsed'])
+                               blogItem.itemDate       = time.mktime(entry['date_parsed']) + 28800
                        else:
                                blogItem.itemDate       = 0
                        if entry.has_key('description'):

UCC git Repository :: git.ucc.asn.au