X-Git-Url: https://git.ucc.asn.au/?p=planet-ucc.git;a=blobdiff_plain;f=XMLParse2.py;h=3a14e2bc808e2af18998c1d838a5d46d002ece00;hp=1cec66838912963af64dffb38e37e2f6b20c3069;hb=52296aaabfab3fd89036e1acca23cd26d2a00173;hpb=783caef083f5dcafaed71dfa3f3a8035109717e9 diff --git a/XMLParse2.py b/XMLParse2.py index 1cec668..3a14e2b 100644 --- a/XMLParse2.py +++ b/XMLParse2.py @@ -13,6 +13,8 @@ import CacheHandler sys.path.insert(0, 'extra') import feedparser +feedparser.USER_AGENT = "PlanetUCC/1.0b +http://planet.ucc.asn.au/ %s" % feedparser.USER_AGENT + class Blog: def __init__(self): self.blogTitle = None @@ -38,14 +40,13 @@ class XMLParse: def parse(self): "Return a single Blog object" item = Blog() - if self.blogObject: + if self.blogObject and self.blogObject.cache: sys.stdout.write('Downloading feed %s...' % self.feedURL) try: data = feedparser.parse(self.feedURL, self.blogObject.cache.etag, self.blogObject.cache.date) sys.stdout.write('done.\n') except: sys.stdout.write('failed.\n') - raise return None # check to see what we got returned if data['items'] == [] and data['channel'] == {}: @@ -62,10 +63,17 @@ class XMLParse: # create caching data try: cache = CacheHandler.CacheObject() - cache.etag = data['etag'] - cache.date = data['modified'] + try: + cache.etag = data['etag'] + except: + cache.etag = None + try: + cache.date = data['modified'] + except: + cache.date = None item.cache = cache except: + sys.stderr.write('DEBUG: XMLParse2: cache item generation failed\n') item.cache = None # parse the return of data into a blog if data['channel'].has_key('title'): @@ -87,7 +95,7 @@ class XMLParse: else: blogItem.itemURL = item.blogURL if entry.has_key('date_parsed'): - blogItem.itemDate = time.mktime(entry['date_parsed']) + blogItem.itemDate = time.mktime(entry['date_parsed']) + 28800 else: blogItem.itemDate = 0 if entry.has_key('description'):