X-Git-Url: https://git.ucc.asn.au/?p=frenchie%2Ficalparse.git;a=blobdiff_plain;f=icalparse.py;h=1ed6076da0d36257c62abf5a12294b0cec95fbc6;hp=f41c4a1c6208ee38f3ab53f7c7f37695a9dbbff1;hb=48d87039714129b0798a4efba2938d15190af7d8;hpb=c5df217cb78f340a12a0ba9145af79faee186772 diff --git a/icalparse.py b/icalparse.py index f41c4a1..1ed6076 100755 --- a/icalparse.py +++ b/icalparse.py @@ -24,17 +24,26 @@ import sys import urlparse import os + class InvalidICS(Exception): pass class notJoined(Exception): pass +class IncompleteICS(InvalidICS): pass + def lineJoiner(oldcal): '''Takes a string containing a calendar and returns an array of its lines''' + if not oldcal[0:15] == 'BEGIN:VCALENDAR': + raise InvalidICS, "Does not appear to be a valid ICS file" + + if not 'END:VCALENDAR' in oldcal[-15:-1]: + raise IncompleteICS, "File appears to be incomplete" + if list(oldcal) == oldcal: oldcal = '\r\n'.join(oldcal) - oldcal.replace('\r\n ', '') - return oldcal.split('\r\n') + oldcal = oldcal.replace('\r\n ', '').replace('\r\n\t','') + return oldcal.strip().split('\r\n') def lineFolder(oldcal, length=75): @@ -60,8 +69,97 @@ def lineFolder(oldcal, length=75): return cal + +def splitFields(cal): + '''Takes a list of lines in a calendar file and returns a list of key, value pairs''' + + ical = [tuple(x.split(':',1)) for x in cal] + + # Check that we got 2 items on every line + for line in ical: + if not len(line) == 2: + raise InvalidICS, "Didn't find a content key on: %s"%(line) + + return ical + + def getContent(url='',stdin=False): - pass + '''Generic content retriever, DO NOT use this function in a CGI script as + it can read from the local disk (which you probably don't want it to). + ''' + + # Special case, if this is a HTTP url, return the data from it using + # the HTTP functions which attempt to play a bit nicer. + parsedURL = urlparse.urlparse(url) + if 'http' in parsedURL[0]: return getHTTPContent(url) + + if stdin: + content = sys.stdin.read() + return content + + if not parsedURL[0]: + try: content = open(os.path.abspath(url),'r').read() + except (IOError, OSError), e: + sys.stderr.write('%s\n'%e) + sys.exit(1) + return content + + # If we've survived, use python's generic URL opening library to handle it + import urllib2 + try: + res = urllib2.urlopen(url) + content = res.read() + res.close() + except (urllib2.URLError, OSError), e: + sys.stderr.write('%s\n'%e) + sys.exit(1) + return content + + +def getHTTPContent(url='',cache='.httplib2-cache'): + '''This function attempts to play nice when retrieving content from HTTP + services. It's what you should use in a CGI script. It will (by default) + slurp the first 20 bytes of the file and check that we are indeed looking + at an ICS file before going for broke.''' + + try: + import httplib2 + except ImportError: + import urllib2 + + if not url: return '' + + if 'httplib2' in sys.modules: + try: h = httplib2.Http('.httplib2-cache') + except OSError: h = httplib2.Http() + else: h = False + + try: + if h: content = h.request(url)[1] + return content + except ValueError, e: + sys.stderr.write('%s\n'%e) + sys.exit(1) + + try: + content = urllib2.urlopen(url).read() + return content + except (urllib2.URLError, OSError), e: + sys.stderr.write('%s\n'%e) + sys.exit(1) + + return '' + + +def generateRules(): + '''Attempts to load a series of rules into a list''' + try: + import parserrules + except ImportError: + return [] + + rules = [getattr(parserrules, rule) for rule in dir(parserrules) if callable(getattr(parserrules, rule))] + return rules if __name__ == '__main__': @@ -85,43 +183,8 @@ if __name__ == '__main__': else: url = '' - # Work out what url parsers we're going to need based on what the user - # gave us on the command line - we do like files after all - parsedURL = urlparse.urlparse(url) - http = 'http' in parsedURL[0] - - if not parsedURL[0]: u = False - else: u = True - - if not options.stdin and http: - try: - import httplib2 - except ImportError: - import urllib2 - - # Try and play nice with HTTP servers unless something goes wrong. We don't - # really care about this cache (A lot of ics files seem to be generated with - # php which hates caching with a passion). - h = False - if 'httplib2' in sys.modules: - try: h = httplib2.Http('.httplib2-cache') - except OSError: h = httplib2.Http() - - # Load urllib2 if this is not a stdin - if not options.stdin and (not http or not 'httplib2' in sys.modules): - import urllib2 - - try: - content = u and (h and h.request(url)[1] or urllib2.urlopen(url).read()) - except (ValueError, urllib2.URLError), e: - sys.stderr.write('%s\n'%e) - sys.exit(1) - - if not u and not options.stdin: - try: content = open(os.path.abspath(url),'r').read() - except (IOError, OSError), e: - sys.stderr.write('%s\n'%e) - sys.exit(1) - - if options.stdin: - content = sys.stdin.read() + content = getContent(url, options.stdin) + cal = lineJoiner(content) + ical = splitFields(cal) + rules = generateRules() + print rules