# HG changeset patch # User Paul Boddie # Date 1197421345 -3600 # Node ID 32c47fe2dee5792f858053ec30879c659bd4ad5e # Parent 48c22a63bf12582df2240b909efab8ad77b74912 Added archiving program. Added configuration alternatives for multiple Wikis (useful for testing archived copies of Wikis in parallel with the development Wiki). diff -r 48c22a63bf12 -r 32c47fe2dee5 archive_wiki.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/archive_wiki.py Wed Dec 12 02:02:25 2007 +0100 @@ -0,0 +1,85 @@ +#!/usr/bin/env python + +import sys, os + +wiki_dir, archive_dir = sys.argv[1:3] +dry_run = ("-n" in sys.argv) + +pages = os.path.join(wiki_dir, "wiki", "data", "pages") +users = os.path.join(wiki_dir, "wiki", "data", "user") + +# Make the archive's page directory. + +archive_pages = os.path.join(archive_dir, "wiki", "data", "pages") +if not dry_run and not os.path.exists(archive_pages): + os.makedirs(archive_pages) + +archive_users = os.path.join(archive_dir, "wiki", "data", "user") +if not dry_run and not os.path.exists(archive_users): + os.makedirs(archive_users) + +# Copy user details. + +print "Copying users..." +if not dry_run: + cmd = "cp %s %s" % (os.path.join(users, "*"), archive_users) + print cmd + os.system(cmd) + +# Find active Wiki pages. + +for page in os.listdir(pages): + page_dir = os.path.join(pages, page) + + if os.path.isdir(page_dir) and "current" in os.listdir(page_dir): + + f = open(os.path.join(page_dir, "current")) + try: + current = f.read().strip() + finally: + f.close() + + archive_page_dir = os.path.join(archive_pages, page) + archive_page_revisions = os.path.join(archive_page_dir, "revisions") + page_revisions = os.path.join(page_dir, "revisions") + + if current not in os.listdir(page_revisions): + continue + + print "Archiving", page + print "Revision", current + + if not dry_run: + if not os.path.exists(archive_page_revisions): + os.makedirs(archive_page_revisions) + + f = open(os.path.join(archive_page_dir, "current"), "w") + try: + f.write("00000001") + finally: + f.close() + + cmd = "cp %s %s" % (os.path.join(page_revisions, current), os.path.join(archive_page_revisions, "00000001")) + print cmd + if not dry_run: + os.system(cmd) + + attachments_dir = os.path.join(page_dir, "attachments") + if os.path.exists(attachments_dir): + print "Archiving attachments..." + + archive_attachments_dir = os.path.join(archive_page_dir, "attachments") + if not dry_run and not os.path.exists(archive_attachments_dir): + os.mkdir(archive_attachments_dir) + + cmd = "cp %s %s" % (os.path.join(attachments_dir, "*"), archive_attachments_dir) + print cmd + if not dry_run: + os.system(cmd) + + print + +print "Now run the following, if adding to a live Wiki:" +print "chown -R www-data: %s/www %s/wiki" % (archive_dir, archive_dir) + +# vim: tabstop=4 expandtab shiftwidth=4 diff -r 48c22a63bf12 -r 32c47fe2dee5 farmconfig.py --- a/farmconfig.py Wed Dec 12 01:40:33 2007 +0100 +++ b/farmconfig.py Wed Dec 12 02:02:25 2007 +0100 @@ -46,6 +46,8 @@ # wikiname, url regular expression (no protocol) # --------------------------------------------------------------- ("ep2008", r".*"), # this is ok for a single wiki + #("ep2008", r"localhost/ep2008/.*"), + #("tmp_ep2008", r"localhost/tmp_ep2008/.*"), # for multiple wikis, do something like this: #("moinmoin", r"^moinmoin.wikiwikiweb.de/.*$"),