From b9d9d352f6a748d1c35696e7eec7de8bbb724556 Mon Sep 17 00:00:00 2001 From: davyd Date: Sat, 7 Feb 2004 06:57:46 +0000 Subject: [PATCH] Initial Upload --- XMLWriter.py | 3 ++- feedlist | 9 +++++++++ update-planet | 40 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 51 insertions(+), 1 deletion(-) create mode 100644 feedlist create mode 100755 update-planet diff --git a/XMLWriter.py b/XMLWriter.py index 1311fb6..267789c 100644 --- a/XMLWriter.py +++ b/XMLWriter.py @@ -60,7 +60,8 @@ class Planet: if lastItem == None: break # this checks to see if it's a new day - if time.localtime(lastItem.itemDate) != lastDate: + if time.localtime(lastItem.itemDate)[6] != lastDate: + lastDate = time.localtime(lastItem.itemDate)[6] workingDate = PlanetDate(lastItem.itemDate) self.dates.append(workingDate) # append the item to the current date diff --git a/feedlist b/feedlist new file mode 100644 index 0000000..20658ca --- /dev/null +++ b/feedlist @@ -0,0 +1,9 @@ +# feedlist +# +# read in by update-planet, to generate planet.html +# name url +# +Davyd Madeley http://www.livejournal.com/users/davyd/data/rss +Ian McKellar http://ian.mckellar.org/wp-rss2.php +Grahame Bowland http://www.livejournal.com/users/grahame/data/rss +Adam Wright http://www.livejournal.com/users/hipikat/data/rss diff --git a/update-planet b/update-planet new file mode 100755 index 0000000..c6f04dd --- /dev/null +++ b/update-planet @@ -0,0 +1,40 @@ +#!/usr/bin/python +# +# update-planet +# +# Downloads feeds from the URLs specified and generates the XHTML files. +# +# (c) 2004, Davyd Madeley +# + +import sys, urllib2, codecs +import XMLParse, XMLWriter + +# step 1: read in the config and download the feeds +feeds = [] +for feed in open('feedlist').readlines(): + if feed.strip()[0] != '#': + storage = feed.strip().split('\t') + name, feed = storage[0], storage[-1] + sys.stdout.write('Downloading feed "%s" from %s... ' % (name, feed)) + try: + # XXX: might want to consider some good caching code in here + feeds.append((name, feed, urllib2.urlopen(feed).read())) + sys.stdout.write('done.\n') + except: + sys.stdout.write('failed.\n') + +# step 2: process each feed +blogs = [] +for feed in feeds: + xml = XMLParse.XMLParse(feed[2]).parse() + for blog in xml: + blog.blogTitle = feed[0] + blogs += xml + +# step 3: write feed to disk +try: + codecs.open('planet.html', 'wb', 'utf-8').write(XMLWriter.XMLWriter(XMLWriter.XHTMLWriter, blogs).write()) +except: + sys.stderr.write('DEBUG: update-planet: could not write planet.html, aborting\n') + raise -- 2.20.1