5 # Downloads feeds from the URLs specified and generates the XHTML files.
11 import XMLParse2 as XMLParse, XMLWriter, CacheHandler
12 import XHTMLWriter, RSS2Writer
14 # step 1: read in the config and check each object from cache
15 cache = CacheHandler.CacheHandler()
18 for feed in open('feedlist').readlines():
19 if feed.strip()[0] != '#':
20 storage = feed.strip().split('\t')
21 name, feed = storage[0], storage[-1]
23 feeds.append((name, feed, cache.getBlog(name, feed)))
25 sys.stderr.write('DEBUG: update-planet: something went wrong retrieving feed\n')
27 # step 2: process each feed
30 # XMLParse2 takes two paramaters, a URL and a CacheObject
31 blog = XMLParse.XMLParse(feed[1], feed[2]).parse()
33 blog.blogTitle = feed[0]
34 blog.feedURL = feed[1]
36 # write the cache back down to disk
41 # step 3: sift the feeds
42 xmlwriter = XMLWriter.XMLWriter(blogs)
44 # step 4: write feed to disk
46 codecs.open('planet.html', 'wb', 'utf-8').write(xmlwriter.write(XHTMLWriter.XHTMLWriter))
48 sys.stderr.write('DEBUG: update-planet: could not write planet.html, aborting\n')
52 codecs.open('rss2.xml', 'wb', 'utf-8').write(xmlwriter.write(RSS2Writer.RSS2Writer))
54 sys.stderr.write('DEBUG: update-planet: could not write rss2.xml, aborting\n')