import sys
import re
-from optparse import OptionParser
+import urlparse
+import os
class InvalidICS(Exception): pass
class notJoined(Exception): pass
-# RFC5545 and RFC5546 iCalendar registries contain upper case letters
-# and dashes only and are separated from the value by a colon (:)
icalEntry = re.compile('^[A-Z\-]+:.*')
def lineJoiner(oldcal):
- '''Unfolds a calendar so that items can be parsed'''
+ '''Takes a string containing a calendar and returns an array of its lines'''
- cal = []
+ if list(oldcal) == oldcal:
+ oldcal = '\r\n'.join(oldcal)
- # Strip newlines (
- for line in oldcal:
- line = line.rstrip('\r\n')
-
- # Reassemble broken Lines
- if not line:
- if not cal: continue
- else: cal[-1] += '\\n'
- elif line[0] == ' ':
- if not cal: raise InvalidICS, 'First line of ICS must be element'
- line = line[1:len(line)]
- cal[-1] += line
- elif not icalEntry.match(line):
- if not cal: raise InvalidICS, 'First line of ICS must be element'
- cal[-1] += '\\n' + line
- else:
- if cal: cal[-1] += '\r\n'
- cal.append(line)
+ oldcal.replace('\r\n ', '')
+ return oldcal.split('\r\n')
- cal[-1] += '\r\n'
- return cal
-
-def lineSplitter(oldcal, length=75):
+def lineFolder(oldcal, length=75):
'''Folds content lines to a specified length, returns a list'''
+ if length > 75:
+ sys.stderr.write('WARN: lines > 75 octets are not RFC compliant\n')
+
cal = []
sl = length - 1
for line in oldcal:
- # Line & line ending line ending fit inside length, do nothing
+ # Line fits inside length, do nothing
if len(line.rstrip()) <= length:
cal.append(line)
else:
return cal
+def getContent(url='',stdin=False):
+ pass
+
+
if __name__ == '__main__':
+ from optparse import OptionParser
# If the user passed us a 'stdin' argument, we'll go with that,
# otherwise we'll try for a url opener
- parser = OptionParser()
+ parser = OptionParser('usage: %prog [options] url')
parser.add_option('-s', '--stdin', action='store_true', dest='stdin',
default=False, help='Take a calendar from standard input')
parser.add_option('-o', '--output', dest='outfile', default='',
(options, args) = parser.parse_args()
- if not options.stdin:
+ if not args and not options.stdin:
+ parser.print_usage()
+ sys.exit(0)
+ elif not options.stdin:
+ url = args[0]
+ else:
+ url = ''
+
+ # Work out what url parsers we're going to need based on what the user
+ # gave us on the command line - we do like files after all
+ parsedURL = urlparse.urlparse(url)
+ http = 'http' in parsedURL[0]
+
+ if not parsedURL[0]: u = False
+ else: u = True
+
+ if not options.stdin and http:
try:
import httplib2
- urllib = False
except ImportError:
- try:
- import urllib
- urllib = True
- except ImportError:
- sys.stderr.write('Failed to find a suitable http downloader\n')
- raise
-
+ import urllib2
+
+ # Try and play nice with HTTP servers unless something goes wrong. We don't
+ # really care about this cache (A lot of ics files seem to be generated with
+ # php which hates caching with a passion).
+ h = False
+ if 'httplib2' in sys.modules:
+ try: h = httplib2.Http('.httplib2-cache')
+ except OSError: h = httplib2.Http()
+
+ # Load urllib2 if this is not a stdin
+ if not options.stdin and (not http or not 'httplib2' in sys.modules):
+ import urllib2
+
+ try:
+ content = u and (h and h.request(url)[1] or urllib2.urlopen(url).read())
+ except (ValueError, urllib2.URLError), e:
+ sys.stderr.write('%s\n'%e)
+ sys.exit(1)
+
+ if not u and not options.stdin:
+ try: content = open(os.path.abspath(url),'r').read()
+ except (IOError, OSError), e:
+ sys.stderr.write('%s\n'%e)
+ sys.exit(1)
+
+ if options.stdin:
+ content = sys.stdin.read()