1.1 --- a/EventAggregatorSupport.py Sun Jul 24 00:58:52 2011 +0200
1.2 +++ b/EventAggregatorSupport.py Mon Jul 25 00:07:03 2011 +0200
1.3 @@ -9,6 +9,8 @@
1.4 """
1.5
1.6 from MoinMoin.Page import Page
1.7 +from MoinMoin.action import cache
1.8 +from MoinMoin import caching
1.9 from MoinMoin import search, version
1.10 from MoinMoin import wikiutil
1.11 import calendar
1.12 @@ -21,6 +23,11 @@
1.13 import urllib
1.14
1.15 try:
1.16 + from cStringIO import StringIO
1.17 +except ImportError:
1.18 + from StringIO import StringIO
1.19 +
1.20 +try:
1.21 set
1.22 except NameError:
1.23 from sets import Set as set
1.24 @@ -1207,22 +1214,52 @@
1.25 else:
1.26 continue
1.27
1.28 - # Access the remote data source.
1.29 -
1.30 - f = urllib.urlopen(url)
1.31 -
1.32 - if f.headers.has_key("content-type"):
1.33 - encoding = getContentEncoding(f.headers["content-type"])
1.34 - else:
1.35 - encoding = None
1.36 -
1.37 - uf = codecs.getreader(encoding or "utf-8")(f)
1.38 -
1.39 + # See if the URL is cached.
1.40 +
1.41 + cache_key = cache.key(request, content=url)
1.42 + cache_entry = caching.CacheEntry(request, "EventAggregator", cache_key, scope='wiki')
1.43 +
1.44 + # If no entry exists, create one with the response from the URL.
1.45 + # NOTE: This could either be invalidated after a period of time,
1.46 + # NOTE: and/or the URL could be checked and the 'If-Modified-Since'
1.47 + # NOTE: header (see MoinMoin.action.pollsistersites) could be
1.48 + # NOTE: checked.
1.49 +
1.50 + if not cache_entry.exists():
1.51 +
1.52 + # Access the remote data source.
1.53 +
1.54 + cache_entry.open(mode="w")
1.55 + f = urllib.urlopen(url)
1.56 + try:
1.57 + cache_entry.write(url + "\n")
1.58 + cache_entry.write((f.headers.get("content-type") or "") + "\n")
1.59 + cache_entry.write(f.read())
1.60 + finally:
1.61 + cache_entry.close()
1.62 + f.close()
1.63 +
1.64 + # Open the cache entry and read it.
1.65 +
1.66 + cache_entry.open()
1.67 try:
1.68 - resources.append(resource_cls(url, parser(uf)))
1.69 + data = cache_entry.read()
1.70 + finally:
1.71 + cache_entry.close()
1.72 +
1.73 + # Process the entry, parsing the content.
1.74 +
1.75 + f = StringIO(data)
1.76 + try:
1.77 + url = f.readline()
1.78 + encoding = getContentEncoding(f.readline())
1.79 + uf = codecs.getreader(encoding or "utf-8")(f)
1.80 + try:
1.81 + resources.append(resource_cls(url, parser(uf)))
1.82 + finally:
1.83 + uf.close()
1.84 finally:
1.85 f.close()
1.86 - uf.close()
1.87
1.88 except (KeyError, ValueError):
1.89 pass
2.1 --- a/TO_DO.txt Sun Jul 24 00:58:52 2011 +0200
2.2 +++ b/TO_DO.txt Mon Jul 25 00:07:03 2011 +0200
2.3 @@ -11,7 +11,8 @@
2.4 GriCal and External Aggregation
2.5 -------------------------------
2.6
2.7 -Support caching of remote event data.
2.8 +Support temporary caching of remote event data, either invalidating the cache
2.9 +periodically and/or actively checking remote sources for updated content.
2.10
2.11 Localised Keywords
2.12 ------------------