# HG changeset patch # User Paul Boddie # Date 1367949825 -7200 # Node ID 96b9ac545802cd20c32710e056289b3195dc9cef # Parent 73efd8e98a62d46f1d0a79c23b3461ff7e7fb6cd Reorganised the macro, separating update retrieval from formatting, introducing retrieval of multiple feeds and the aggregation of entries into a single list that can then be sorted and displayed. Added a stylesheet for clearer display of updates. diff -r 73efd8e98a62 -r 96b9ac545802 css/moinshare.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/css/moinshare.css Tue May 07 20:03:45 2013 +0200 @@ -0,0 +1,17 @@ +/* moinshare.css - some additional styles for the SharedContent macro + which can be included in screen.css and print.css + using... + + @import "moinshare.css"; + + ...before any rules. + +Copyright (c) 2013 by Paul Boddie +Licensed under the GNU GPL (v2 or later), see COPYING.txt for details. +*/ + +div.moinshare-update { + margin: 1em; + border: 1px solid black; + padding: 0.5em; +} diff -r 73efd8e98a62 -r 96b9ac545802 macros/SharedContent.py --- a/macros/SharedContent.py Tue May 07 20:01:49 2013 +0200 +++ b/macros/SharedContent.py Tue May 07 20:03:45 2013 +0200 @@ -6,9 +6,11 @@ @license: GNU GPL (v2 or later), see COPYING.txt for details. """ +from DateSupport import getDateTime, DateTime from MoinMoin.Page import Page from MoinRemoteSupport import * from MoinSupport import parseMacroArguments +from email.utils import parsedate import xml.dom.pulldom try: @@ -21,6 +23,8 @@ MAX_ENTRIES = 5 ATOM_NS = "http://www.w3.org/2005/Atom" +# Utility functions. + def text(element): nodes = [] for node in element.childNodes: @@ -37,64 +41,78 @@ else: return element.getAttribute("href") -def execute(macro, args): - request = macro.request - fmt = macro.formatter - _ = request.getText +# Error classes. + +class FeedError(Exception): + pass + +class FeedMissingError(FeedError): + pass - feed_url = None - show_content = None - max_entries = None +class FeedContentTypeError(FeedError): + pass + +# Entry/update classes. + +class Update: + + "A feed update entry." - for arg, value in parseMacroArguments(args): - if arg == "url": - feed_url = value - elif arg == "show": - show_content = value in ("true", "True", "yes") - elif arg == "limit": - try: - max_entries = int(value) - except ValueError: - return fmt.text(_("SharedContent: limit must be set to the maximum number of entries to be shown")) + def __init__(self): + self.title = None + self.link = None + self.content = None + self.content_type = None + self.updated = None - if not feed_url: - return fmt.text(_("SharedContent: a feed URL must be specified")) + def __cmp__(self, other): + if self.updated is None and other.updated is not None: + return 1 + elif self.updated is not None and other.updated is None: + return -1 + else: + return cmp(self.updated, other.updated) + +# Feed retrieval. - show_content = show_content or False - max_entries = max_entries or MAX_ENTRIES +def getUpdates(request, feed_url, max_entries): + + """ + Using the given 'request', retrieve from 'feed_url' up to the given number + 'max_entries' of update entries. + + A tuple of the form ((feed_type, channel_title, channel_link), updates) is + returned. + """ + + feed_updates = [] # Obtain the resource, using a cached version if appropriate. max_cache_age = int(getattr(request.cfg, "moin_share_max_cache_age", "300")) data = getCachedResource(request, feed_url, "MoinShare", "wiki", max_cache_age) if not data: - return fmt.text(_("SharedContent: updates could not be retrieved for %s") % feed_url) + raise FeedMissingError + + # Interpret the cached feed. feed = StringIO(data) - _url, content_type, _encoding, _metadata = getCachedResourceMetadata(feed) if content_type not in ("application/atom+xml", "application/rss+xml"): - return fmt.text(_("SharedContent: updates for %s were not provided in Atom or RSS format") % feed_url) + raise FeedContentTypeError try: # Parse each node from the feed. - title = link = content = content_type = None channel_title = channel_link = None - output = [] - append = output.append - feed_type = None - in_item = False + update = None nentries = 0 events = xml.dom.pulldom.parse(feed) - if not show_content: - append(fmt.bullet_list(on=1)) - for event, value in events: if event == xml.dom.pulldom.START_ELEMENT: @@ -113,27 +131,38 @@ elif feed_type == "rss" and tagname == "item" or \ feed_type == "atom" and tagname == "entry": - in_item = True + update = Update() elif tagname == "title": events.expandNode(value) - if in_item: - title = value + if update: + update.title = text(value) else: - channel_title = value + channel_title = text(value) elif tagname == "link": events.expandNode(value) - if in_item: - link = value + if update: + update.link = linktext(value, feed_type) else: - channel_link = value + channel_link = linktext(value, feed_type) elif feed_type == "atom" and tagname == "content": events.expandNode(value) - if in_item: - content = value - content_type = value.getAttribute("type") + if update: + update.content = text(value) + update.content_type = value.getAttribute("type") + + elif feed_type == "atom" and tagname == "updated" or \ + feed_type == "rss" and tagname == "pubDate": + events.expandNode(value) + + if update: + if feed_type == "atom": + value = getDateTime(text(value)) + else: + value = DateTime(parsedate(text(value))) + update.updated = value elif event == xml.dom.pulldom.END_ELEMENT: tagname = value.localName @@ -141,39 +170,104 @@ if feed_type == "rss" and tagname == "item" or \ feed_type == "atom" and tagname == "entry": - in_item = False - - # Emit content where appropriate. - # NOTE: HTML should be sanitised. - - if show_content: - if content and content_type == "html": - append(fmt.rawHTML(unescape(text(content)))) - - # Or emit title and link information for items. + if nentries < max_entries: + feed_updates.append(update) - elif title and link and nentries < max_entries: - link_text = linktext(link, feed_type) - - append(fmt.listitem(on=1)) - append(fmt.url(on=1, href=link_text)) - append(fmt.icon('www')) - append(fmt.text(" " + text(title))) - append(fmt.url(on=0)) - append(fmt.listitem(on=0)) - - title = link = content = content_type = None + update = None nentries += 1 - if not show_content: - append(fmt.bullet_list(on=0)) + finally: + feed.close() + + return (feed_type, channel_title, channel_link), feed_updates + +# The macro itself. + +def execute(macro, args): + request = macro.request + fmt = macro.formatter + _ = request.getText + + feed_urls = [] + show_content = None + max_entries = None + + for arg, value in parseMacroArguments(args): + if arg == "url": + feed_urls.append(value) + elif arg == "show": + show_content = value in ("true", "True", "yes") + elif arg == "limit": + try: + max_entries = int(value) + except ValueError: + return fmt.text(_("SharedContent: limit must be set to the maximum number of entries to be shown")) + + if not feed_urls: + return fmt.text(_("SharedContent: a feed URL must be specified")) + + show_content = show_content or False + max_entries = max_entries or MAX_ENTRIES + + updates = [] + feeds = [] + missing = [] + bad_content = [] + for feed_url in feed_urls: + try: + feed_info, feed_updates = getUpdates(request, feed_url, max_entries) + updates += feed_updates + feeds.append(feed_info) + except FeedMissingError: + missing.append(feed_url) + except FeedContentTypeError: + bad_content.append(feed_url) + + output = [] + append = output.append + + # Show the updates. + + if not show_content: + append(fmt.bullet_list(on=1)) + + # NOTE: Permit configurable sorting. + + updates.sort() + updates.reverse() + + for update in updates: + + # Emit content where appropriate. + # NOTE: HTML should be sanitised. + + if show_content: + append(fmt.div(on=1, css_class="moinshare-update")) + if update.content and update.content_type == "html": + append(fmt.rawHTML(unescape(update.content))) + append(fmt.div(on=0)) + + # Or emit title and link information for items. + + elif update.title and update.link: + append(fmt.listitem(on=1, css_class="moinshare-update")) + append(fmt.url(on=1, href=update.link)) + append(fmt.icon('www')) + append(fmt.text(" " + update.title)) + append(fmt.url(on=0)) + append(fmt.listitem(on=0)) + + if not show_content: + append(fmt.bullet_list(on=0)) + + # Show the feeds. + + for feed_type, channel_title, channel_link in feeds: if channel_title and channel_link: - channel_link_text = linktext(channel_link, feed_type) - - append(fmt.paragraph(on=1)) - append(fmt.url(on=1, href=channel_link_text)) - append(fmt.text(text(channel_title))) + append(fmt.paragraph(on=1, css_class="moinshare-feed")) + append(fmt.url(on=1, href=channel_link)) + append(fmt.text(channel_title)) append(fmt.url(on=0)) append(fmt.text(" ")) append(fmt.url(on=1, href=feed_url)) @@ -181,8 +275,17 @@ append(fmt.url(on=0)) append(fmt.paragraph(on=0)) - finally: - feed.close() + # Show errors. + + for feed_url in missing: + append(fmt.paragraph(on=1, css_class="moinshare-missing-feed-error")) + append(fmt.text(_("SharedContent: updates could not be retrieved for %s") % feed_url)) + append(fmt.paragraph(on=0)) + + for feed_url in bad_content: + append(fmt.paragraph(on=1, css_class="moinshare-content-type-feed-error")) + return fmt.text(_("SharedContent: updates for %s were not provided in Atom or RSS format") % feed_url) + append(fmt.paragraph(on=0)) return ''.join(output)