#!/usr/bin/python # # update-planet # # Downloads feeds from the URLs specified and generates the XHTML files. # # (c) 2004, Davyd Madeley # import sys, codecs import XMLParse2 as XMLParse, XMLWriter, CacheHandler # step 1: read in the config and check each object from cache cache = CacheHandler.CacheHandler() feeds = [] for feed in open('feedlist').readlines(): if feed.strip()[0] != '#': storage = feed.strip().split('\t') name, feed = storage[0], storage[-1] try: feeds.append((name, feed, cache.getBlog(name, feed))) except: sys.stderr.write('DEBUG: update-planet: something went wrong retrieving feed\n') # step 2: process each feed blogs = [] for feed in feeds: # XMLParse2 takes two paramaters, a URL and a CacheObject blog = XMLParse.XMLParse(feed[1], feed[2]).parse() if blog: blog.blogTitle = feed[0] blog.feedURL = feed[1] blogs.append(blog) # write the cache back down to disk cache.storeBlog(blog) else: pass # step 3: write feed to disk try: codecs.open('planet.html', 'wb', 'utf-8').write(XMLWriter.XMLWriter(XMLWriter.XHTMLWriter, blogs).write()) except: sys.stderr.write('DEBUG: update-planet: could not write planet.html, aborting\n')