← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~stub/launchpad/update-storm into lp:~stub/launchpad/db-devel

 

Stuart Bishop has proposed merging lp:~stub/launchpad/update-storm into lp:~stub/launchpad/db-devel.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)
Related bugs:
  #352965 Update Storm
  https://bugs.launchpad.net/bugs/352965
  #388798 further precache work
  https://bugs.launchpad.net/bugs/388798
  #392016 Use GenerationalCache implementation from Storm 0.15
  https://bugs.launchpad.net/bugs/392016
  #393625 update-pkgcache using too much memory on staging
  https://bugs.launchpad.net/bugs/393625
  #670906 In() casts str strings differently to ==
  https://bugs.launchpad.net/bugs/670906


Code changes to migrate us to modern psycopg2, such as the one packaged in Lucid. And delinting.

Rather than change the tests, I elected to cast to Unicode in the main code - fixing the tests might not be enough as other call sites might still be sending str on untested code paths.

-- 
The attached diff has been truncated due to its size.
https://code.launchpad.net/~stub/launchpad/update-storm/+merge/40263
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~stub/launchpad/update-storm into lp:~stub/launchpad/db-devel.
=== modified file '.bzrignore'
--- .bzrignore	2009-09-10 06:11:21 +0000
+++ .bzrignore	2010-11-07 00:31:57 +0000
@@ -30,11 +30,12 @@
 database/sampledata/newsampledata-dev.sql
 database/sampledata/lintdata.sql
 database/sampledata/lintdata-dev.sql
-lib/canonical/launchpad/apidoc/index.html
+lib/canonical/launchpad/apidoc/*.html
 xxx-report.*
-lib/canonical/launchpad/apidoc/wadl-development.xml
+lib/canonical/launchpad/apidoc/wadl-development-*.xml
 lib/canonical/launchpad/apidoc/wadl-test-playground.xml
 lib/canonical/launchpad/icing/build/*
+lib/canonical/launchpad/icing/combo.css
 trace.log
 test-appserver-layer-trace.log
 bin
@@ -51,3 +52,32 @@
 ./production-configs
 bzr.dev
 _trial_temp
+lazr-js
+.bazaar
+.cache
+.subversion
+.testrepository
+.memcache.pid
+./pipes
+lib/canonical/launchpad/apidoc/wadl-development-*.xml
+tags.new
+lp-clustered.dot
+lp-clustered.sfood
+lp-clustered.svg
+lp.sfood
+apidocs
+twistd.pid
+lib/canonical/launchpad/apidoc
+*.prof
+lib/canonical/launchpad-buildd_*.dsc
+lib/canonical/launchpad-buildd_*.tar.gz
+lib/canonical/launchpad-buildd_*_all.deb
+lib/canonical/launchpad-buildd_*.changes
+lib/canonical/launchpad-buildd_*_source.build
+lib/canonical/launchpad-buildd_*_source.changes
+lib/canonical/buildd/debian/*
+lib/canonical/buildd/launchpad-files/*
+*.pt.py
+.project
+.pydevproject
+librarian.log

=== added file '.ctags'
--- .ctags	1970-01-01 00:00:00 +0000
+++ .ctags	2010-11-07 00:31:57 +0000
@@ -0,0 +1,4 @@
+--python-kinds=-iv
+--exclude=*-min.js
+--exclude=*-debug.js
+--extra=+f

=== added file '.testr.conf'
--- .testr.conf	1970-01-01 00:00:00 +0000
+++ .testr.conf	2010-11-07 00:31:57 +0000
@@ -0,0 +1,3 @@
+[DEFAULT]
+test_command=xvfb-run ./bin/test --subunit $IDOPTION
+test_id_option=--load-list $IDFILE

=== modified file 'Makefile'
--- Makefile	2009-08-21 17:50:58 +0000
+++ Makefile	2010-11-07 00:31:57 +0000
@@ -1,8 +1,7 @@
 # This file modified from Zope3/Makefile
 # Licensed under the ZPL, (c) Zope Corporation and contributors.
 
-PYTHON_VERSION=2.4
-PYTHON=python${PYTHON_VERSION}
+PYTHON=python
 WD:=$(shell pwd)
 PY=$(WD)/bin/py
 PYTHONPATH:=$(WD)/lib:$(WD)/lib/mailman:${PYTHONPATH}
@@ -15,7 +14,12 @@
 SHHH=utilities/shhh.py
 HERE:=$(shell pwd)
 
-LPCONFIG=development
+LPCONFIG?=development
+
+JSFLAGS=
+ICING=lib/canonical/launchpad/icing
+LP_BUILT_JS_ROOT=${ICING}/build
+LAZR_BUILT_JS_ROOT=lazr-js/build
 
 MINS_TO_SHUTDOWN=15
 
@@ -23,12 +27,23 @@
 
 BZR_VERSION_INFO = bzr-version-info.py
 
-WADL_FILE = lib/canonical/launchpad/apidoc/wadl-$(LPCONFIG).xml
-API_INDEX = lib/canonical/launchpad/apidoc/index.html
+APIDOC_DIR = lib/canonical/launchpad/apidoc
+WADL_TEMPLATE = $(APIDOC_DIR).tmp/wadl-$(LPCONFIG)-%(version)s.xml
+API_INDEX = $(APIDOC_DIR)/index.html
 
-EXTRA_JS_FILES=lib/canonical/launchpad/icing/MochiKit.js \
-				$(shell $(HERE)/utilities/yui-deps.py) \
-				lib/canonical/launchpad/icing/lazr/build/lazr.js
+# Do not add bin/buildout to this list.
+# It is impossible to get buildout to tell us all the files it would
+# build, since each egg's setup.py doesn't tell us that information.
+BUILDOUT_BIN = \
+    $(PY) bin/apiindex bin/combine-css bin/fl-build-report \
+    bin/fl-credential-ctl bin/fl-install-demo bin/fl-monitor-ctl \
+    bin/fl-record bin/fl-run-bench bin/fl-run-test bin/googletestservice \
+    bin/i18ncompile bin/i18nextract bin/i18nmergeall bin/i18nstats \
+    bin/harness bin/iharness bin/ipy bin/jsbuild bin/jslint bin/jssize \
+    bin/jstest bin/killservice bin/kill-test-services bin/lint.sh \
+    bin/lp-windmill bin/retest bin/run bin/sprite-util \
+    bin/start_librarian bin/stxdocs bin/tags bin/test bin/tracereport \
+    bin/twistd bin/update-download-cache bin/windmill
 
 # DO NOT ALTER : this should just build by default
 default: inplace
@@ -43,22 +58,14 @@
 hosted_branches: $(PY)
 	$(PY) ./utilities/make-dummy-hosted-branches
 
-$(WADL_FILE): $(BZR_VERSION_INFO)
-	LPCONFIG=$(LPCONFIG) $(PY) ./utilities/create-lp-wadl.py > $@.tmp
-	mv $@.tmp $@
-
-$(API_INDEX): $(WADL_FILE)
-	bin/apiindex $(WADL_FILE) > $@.tmp
-	mv $@.tmp $@
+$(API_INDEX): $(BZR_VERSION_INFO)
+	mkdir -p $(APIDOC_DIR).tmp
+	LPCONFIG=$(LPCONFIG) $(PY) ./utilities/create-lp-wadl-and-apidoc.py --force "$(WADL_TEMPLATE)"
+	mv $(APIDOC_DIR).tmp $(APIDOC_DIR)
 
 apidoc: compile $(API_INDEX)
 
-check_loggerhead_on_merge:
-	# Loggerhead doesn't depend on anything else in rocketfuel and nothing
-	# depends on it (yet).
-	make -C sourcecode/loggerhead check PYTHON=${PYTHON} \
-		PYTHON_VERSION=${PYTHON_VERSION} PYTHONPATH=$(PYTHONPATH)
-
+# Run by PQM.
 check_merge: $(PY)
 	[ `PYTHONPATH= bzr status -S database/schema/ | \
 		grep -v "\(^P\|pending\|security.cfg\|Makefile\|unautovacuumable\|_pythonpath.py\)" | wc -l` -eq 0 ]
@@ -67,22 +74,18 @@
 check_db_merge: $(PY)
 	${PY} lib/canonical/tests/test_no_conflict_marker.py
 
-# This can be removed once we move to zc.buildout and we have versioned
-# dependencies, but for now we run both Launchpad and all other
-# dependencies tests for any merge to sourcecode.
-check_sourcecode_merge: check
-	$(MAKE) -C sourcecode check PYTHON=${PYTHON} \
-		PYTHON_VERSION=${PYTHON_VERSION} PYTHONPATH=$(PYTHONPATH)
-
 check_config: build
 	bin/test -m canonical.config.tests -vvt test_config
 
+check_schema: build
+	${PY} utilities/check-db-revision.py
+
 # Clean before running the test suite, since the build might fail depending
 # what source changes happened. (e.g. apidoc depends on interfaces)
 check: clean build
 	# Run all tests. test_on_merge.py takes care of setting up the
 	# database.
-	${PY} -t ./test_on_merge.py $(VERBOSITY)
+	${PY} -t ./test_on_merge.py $(VERBOSITY) $(TESTOPTS)
 
 jscheck: build
 	# Run all JavaScript integration tests.  The test runner takes care of
@@ -90,12 +93,21 @@
 	@echo
 	@echo "Running the JavaScript integration test suite"
 	@echo
+	bin/test $(VERBOSITY) $(TESTOPTS) --layer=WindmillLayer
+
+jscheck_functest: build
+    # Run the old functest Windmill integration tests.  The test runner
+    # takes care of setting up the test environment.
+	@echo
+	@echo "Running Windmill funtest integration test suite"
+	@echo
 	bin/jstest
 
 check_mailman: build
 	# Run all tests, including the Mailman integration
 	# tests. test_on_merge.py takes care of setting up the database.
-	${PY} -t ./test_on_merge.py $(VERBOSITY) --layer=MailmanLayer
+	${PY} -t ./test_on_merge.py $(VERBOSITY) $(TESTOPTS) \
+		--layer=MailmanLayer
 
 lint: ${PY}
 	@bash ./bin/lint.sh
@@ -114,37 +126,90 @@
 
 inplace: build
 
-build: $(BZR_VERSION_INFO) compile apidoc
+build: compile apidoc jsbuild css_combine
+
+css_combine: sprite_css bin/combine-css
+	${SHHH} bin/combine-css
+
+sprite_css: ${LP_BUILT_JS_ROOT}/style-3-0.css
+
+${LP_BUILT_JS_ROOT}/style-3-0.css: bin/sprite-util ${ICING}/style-3-0.css.in ${ICING}/icon-sprites.positioning
+	${SHHH} bin/sprite-util create-css
+
+sprite_image:
+	${SHHH} bin/sprite-util create-image
+
+jsbuild_lazr: bin/jsbuild
+	# We absolutely do not want to include the lazr.testing module and its
+	# jsTestDriver test harness modifications in the lazr.js and launchpad.js
+	# roll-up files.  They fiddle with built-in functions!  See Bug 482340.
+	${SHHH} bin/jsbuild $(JSFLAGS) -b $(LAZR_BUILT_JS_ROOT) -x testing/ -c $(LAZR_BUILT_JS_ROOT)/yui
+
+jsbuild: jsbuild_lazr bin/jsbuild bin/jssize
+	${SHHH} bin/jsbuild \
+		$(JSFLAGS) \
+		-n launchpad \
+		-s lib/canonical/launchpad/javascript \
+		-b $(LP_BUILT_JS_ROOT) \
+		$(shell $(HERE)/utilities/yui-deps.py) \
+		$(shell $(PY) $(HERE)/utilities/lp-deps.py) \
+		lib/canonical/launchpad/icing/lazr/build/lazr.js
+	${SHHH} bin/jssize
 
 eggs:
 	# Usually this is linked via link-external-sourcecode, but in
 	# deployment we create this ourselves.
 	mkdir eggs
 
+# LP_SOURCEDEPS_PATH should point to the sourcecode directory, but we
+# want the parent directory where the download-cache and eggs directory
+# are. We re-use the variable that is using for the rocketfuel-get script.
 download-cache:
+ifdef LP_SOURCEDEPS_PATH
+	utilities/link-external-sourcecode $(LP_SOURCEDEPS_PATH)/..
+else
 	@echo "Missing ./download-cache."
 	@echo "Developers: please run utilities/link-external-sourcecode."
 	@exit 1
+endif
+
+buildonce_eggs: $(PY)
+	find eggs -name '*.pyc' -exec rm {} \;
 
 # The download-cache dependency comes *before* eggs so that developers get the
 # warning before the eggs directory is made.  The target for the eggs directory
 # is only there for deployment convenience.
+# Note that the buildout version must be maintained here and in versions.cfg
+# to make sure that the build does not go over the network.
 bin/buildout: download-cache eggs
 	$(SHHH) PYTHONPATH= $(PYTHON) bootstrap.py\
-                --ez_setup-source=ez_setup.py \
-		--download-base=download-cache/dist --eggs=eggs
-
-$(PY): bin/buildout versions.cfg $(BUILDOUT_CFG) setup.py
+		--setup-source=ez_setup.py \
+		--download-base=download-cache/dist --eggs=eggs \
+		--version=1.5.1
+
+# This target is used by LOSAs to prepare a build to be pushed out to
+# destination machines.  We only want eggs: they are the expensive bits,
+# and the other bits might run into problems like bug 575037.  This
+# target runs buildout, and then removes everything created except for
+# the eggs.
+build_eggs: $(BUILDOUT_BIN) clean_buildout
+
+# This builds bin/py and all the other bin files except bin/buildout.
+# Remove the target before calling buildout to ensure that buildout
+# updates the timestamp.
+$(BUILDOUT_BIN): bin/buildout versions.cfg $(BUILDOUT_CFG) setup.py
+	$(RM) $@
 	$(SHHH) PYTHONPATH= ./bin/buildout \
                 configuration:instance_name=${LPCONFIG} -c $(BUILDOUT_CFG)
 
-compile: $(PY)
+# bin/compile_templates is responsible for building all chameleon templates,
+# of which there is currently one, but of which many more are coming.
+compile: $(PY) $(BZR_VERSION_INFO)
+	mkdir -p /var/tmp/vostok-archive
 	${SHHH} $(MAKE) -C sourcecode build PYTHON=${PYTHON} \
-	    PYTHON_VERSION=${PYTHON_VERSION} LPCONFIG=${LPCONFIG}
-	${SHHH} LPCONFIG=${LPCONFIG} $(PY) -t buildmailman.py
-	${SHHH} $(PY) sourcecode/lazr-js/tools/build.py \
-		-n launchpad -s lib/canonical/launchpad/javascript \
-		-b lib/canonical/launchpad/icing/build $(EXTRA_JS_FILES)
+	    LPCONFIG=${LPCONFIG}
+	${SHHH} LPCONFIG=${LPCONFIG} ${PY} -t buildmailman.py
+	bin/compile_templates
 
 test_build: build
 	bin/test $(TESTFLAGS) $(TESTOPTS)
@@ -158,50 +223,57 @@
 ftest_inplace: inplace
 	bin/test -f $(TESTFLAGS) $(TESTOPTS)
 
-run: inplace stop
+merge-proposal-jobs:
+	# Handle merge proposal email jobs.
+	$(PY) cronscripts/merge-proposal-jobs.py -v
+
+run: check_schema inplace stop
 	$(RM) thread*.request
-	bin/run -r librarian,google-webservice -i $(LPCONFIG)
+	bin/run -r librarian,google-webservice,memcached -i $(LPCONFIG)
 
-start-gdb: inplace stop support_files
+start-gdb: check_schema inplace stop support_files
 	$(RM) thread*.request
 	nohup gdb -x run.gdb --args bin/run -i $(LPCONFIG) \
 		-r librarian,google-webservice
 		> ${LPCONFIG}-nohup.out 2>&1 &
 
-run_all: inplace stop hosted_branches
+run_all: check_schema inplace stop
 	$(RM) thread*.request
-	bin/run -r librarian,buildsequencer,sftp,mailman,codebrowse,google-webservice -i $(LPCONFIG)
+	bin/run -r librarian,sftp,forker,mailman,codebrowse,google-webservice,memcached \
+	    -i $(LPCONFIG)
 
 run_codebrowse: build
-	BZR_PLUGIN_PATH=bzrplugins $(PY) sourcecode/launchpad-loggerhead/start-loggerhead.py -f
+	BZR_PLUGIN_PATH=bzrplugins $(PY) scripts/start-loggerhead.py -f
 
 start_codebrowse: build
-	BZR_PLUGIN_PATH=$(shell pwd)/bzrplugins $(PY) sourcecode/launchpad-loggerhead/start-loggerhead.py
+	BZR_PLUGIN_PATH=$(shell pwd)/bzrplugins $(PY) scripts/start-loggerhead.py
 
 stop_codebrowse:
-	$(PY) sourcecode/launchpad-loggerhead/stop-loggerhead.py
-
-start_librarian: build
+	$(PY) scripts/stop-loggerhead.py
+
+run_codehosting: check_schema inplace stop
+	$(RM) thread*.request
+	bin/run -r librarian,sftp,forker,codebrowse -i $(LPCONFIG)
+
+start_librarian: compile
 	bin/start_librarian
 
 stop_librarian:
 	bin/killservice librarian
 
 pull_branches: support_files
-	# Mirror the hosted branches in the development upload area to the
-	# mirrored area.
-	$(PY) cronscripts/supermirror-pull.py upload
+	$(PY) cronscripts/supermirror-pull.py
 
 scan_branches:
 	# Scan branches from the filesystem into the database.
-	$(PY) cronscripts/branch-scanner.py
+	$(PY) cronscripts/scan_branches.py
 
-sync_branches: pull_branches scan_branches
+sync_branches: pull_branches scan_branches merge-proposal-jobs
 
 $(BZR_VERSION_INFO):
 	scripts/update-bzr-version-info.sh
 
-support_files: $(WADL_FILE) $(BZR_VERSION_INFO)
+support_files: $(API_INDEX) $(BZR_VERSION_INFO)
 
 # Intended for use on developer machines
 start: inplace stop support_files initscript-start
@@ -223,7 +295,7 @@
 # servers, where we know we don't need the extra steps in a full
 # "make stop" because of how the code is deployed/built.
 initscript-stop:
-	bin/killservice librarian buildsequencer launchpad mailman
+	bin/killservice librarian launchpad mailman
 
 shutdown: scheduleoutage stop
 	$(RM) +maintenancetime.txt
@@ -234,43 +306,56 @@
 	echo Sleeping ${MINS_TO_SHUTDOWN} mins
 	sleep ${MINS_TO_SHUTDOWN}m
 
-harness:
+harness: bin/harness
 	bin/harness
 
-iharness:
+iharness: bin/iharness
 	bin/iharness
 
 rebuildfti:
 	@echo Rebuilding FTI indexes on launchpad_dev database
 	$(PY) database/schema/fti.py -d launchpad_dev --force
 
-clean:
+clean_js:
+	$(RM) $(LP_BUILT_JS_ROOT)/launchpad.js
+	$(RM) -r $(LAZR_BUILT_JS_ROOT)
+
+clean_buildout:
+	$(RM) -r bin
+	$(RM) -r parts
+	$(RM) -r develop-eggs
+	$(RM) .installed.cfg
+	$(RM) -r build
+	$(RM) _pythonpath.py
+
+clean: clean_js clean_buildout
 	$(MAKE) -C sourcecode/pygettextpo clean
+	# XXX gary 2009-11-16 bug 483782
+	# The pygettextpo Makefile should have this next line in it for its make
+	# clean, and then we should remove this line.
+	$(RM) sourcecode/pygpgme/gpgme/*.so
 	if test -f sourcecode/mailman/Makefile; then \
 		$(MAKE) -C sourcecode/mailman clean; \
 	fi
 	find . -path ./eggs -prune -false -o \
 		-type f \( -name '*.o' -o -name '*.so' -o -name '*.la' -o \
-	    -name '*.lo' -o -name '*.py[co]' -o -name '*.dll' \) \
+	    -name '*.lo' -o -name '*.py[co]' -o -name '*.dll' -o \
+	    -name '*.pt.py' \) \
 	    -print0 | xargs -r0 $(RM)
-	$(RM) -r bin
-	$(RM) -r parts
-	$(RM) .installed.cfg
-	$(RM) -r build
 	$(RM) thread*.request
 	$(RM) -r lib/mailman
 	$(RM) -rf lib/canonical/launchpad/icing/build/*
 	$(RM) -r $(CODEHOSTING_ROOT)
-	$(RM) $(WADL_FILE) $(API_INDEX)
+	$(RM) -rf $(APIDOC_DIR)
+	$(RM) -rf $(APIDOC_DIR).tmp
 	$(RM) $(BZR_VERSION_INFO)
-	$(RM) _pythonpath.py
+	$(RM) +config-overrides.zcml
 	$(RM) -rf \
 			  /var/tmp/builddmaster \
 			  /var/tmp/bzrsync \
 			  /var/tmp/codehosting.test \
 			  /var/tmp/codeimport \
 			  /var/tmp/fatsam.appserver \
-			  /var/tmp/launchpad_mailqueue \
 			  /var/tmp/lperr \
 			  /var/tmp/lperr.test \
 			  /var/tmp/mailman \
@@ -278,6 +363,11 @@
 			  /var/tmp/ppa \
 			  /var/tmp/ppa.test \
 			  /var/tmp/zeca
+	# /var/tmp/launchpad_mailqueue is created read-only on ec2test
+	# instances.
+	if [ -w /var/tmp/launchpad_mailqueue ]; then $(RM) -rf /var/tmp/launchpad_mailqueue; fi
+	$(RM) -f lp.sfood lp-clustered.sfood lp-clustered.dot lp-clustered.svg
+
 
 realclean: clean
 	$(RM) TAGS tags
@@ -302,6 +392,8 @@
 launchpad.pot:
 	bin/i18nextract.py
 
+# Called by the rocketfuel-setup script. You probably don't want to run this
+# on its own.
 install: reload-apache
 
 copy-certificates:
@@ -313,17 +405,16 @@
 	# We insert the absolute path to the branch-rewrite script
 	# into the Apache config as we copy the file into position.
 	sed -e 's,%BRANCH_REWRITE%,$(shell pwd)/scripts/branch-rewrite.py,' configs/development/local-launchpad-apache > /etc/apache2/sites-available/local-launchpad
+	cp configs/development/local-vostok-apache /etc/apache2/sites-available/local-vostok
 	touch /var/tmp/bazaar.launchpad.dev/rewrite.log
 	chown $(SUDO_UID):$(SUDO_GID) /var/tmp/bazaar.launchpad.dev/rewrite.log
 
 enable-apache-launchpad: copy-apache-config copy-certificates
 	a2ensite local-launchpad
+	a2ensite local-vostok
 
 reload-apache: enable-apache-launchpad
-	/etc/init.d/apache2 reload
-
-static:
-	$(PY) scripts/make-static.py
+	/etc/init.d/apache2 restart
 
 TAGS: compile
 	# emacs tags
@@ -337,8 +428,45 @@
 	# idutils ID file
 	bin/tags -i
 
+lp.sfood:
+	# Generate import dependency graph
+	sfood -i -u -I lib/sqlobject -I lib/schoolbell -I lib/devscripts -I lib/contrib \
+	-I lib/canonical/not-used lib/canonical lib/lp 2>/dev/null | grep -v contrib/ \
+	| grep -v sqlobject | grep -v BeautifulSoup | grep -v psycopg \
+	| grep -v schoolbell > lp.sfood.tmp
+	mv lp.sfood.tmp lp.sfood
+
+
+lp-clustered.sfood: lp.sfood lp-sfood-packages
+	# Cluster the import dependency graph
+	sfood-cluster -f lp-sfood-packages < lp.sfood > lp-clustered.sfood.tmp
+	mv lp-clustered.sfood.tmp lp-clustered.sfood
+
+
+lp-clustered.dot: lp-clustered.sfood
+	# Build the visual graph
+	sfood-graph -p < lp-clustered.sfood > lp-clustered.dot.tmp
+	mv lp-clustered.dot.tmp lp-clustered.dot
+
+
+lp-clustered.svg: lp-clustered.dot
+	# Render to svg
+	dot -Tsvg < lp-clustered.dot > lp-clustered.svg.tmp
+	mv lp-clustered.svg.tmp lp-clustered.svg
+
+PYDOCTOR = pydoctor
+PYDOCTOR_OPTIONS =
+
+pydoctor:
+	$(PYDOCTOR) --make-html --html-output=apidocs --add-package=lib/lp \
+		--add-package=lib/canonical --project-name=Launchpad \
+		--docformat restructuredtext --verbose-about epytext-summary \
+		$(PYDOCTOR_OPTIONS)
+
 .PHONY: apidoc check tags TAGS zcmldocs realclean clean debug stop\
 	start run ftest_build ftest_inplace test_build test_inplace pagetests\
-	check check_loggerhead_on_merge  check_merge check_sourcecode_merge \
+	check check_merge \
 	schema default launchpad.pot check_merge_ui pull scan sync_branches\
-	reload-apache hosted_branches check_db_merge check_mailman check_config
+	reload-apache hosted_branches check_db_merge check_mailman check_config\
+	jsbuild jsbuild_lazr clean_js clean_buildout buildonce_eggs build_eggs\
+	sprite_css sprite_image css_combine compile check_schema pydoctor

=== modified file 'README'
--- README	2009-03-24 12:43:49 +0000
+++ README	2010-11-07 00:31:57 +0000
@@ -1,4 +1,106 @@
-This is the top level project, that supplies the infrastructure for testing,
-and running launchpad.
-
-Documentation is in the doc directory or on the wiki.
+====================
+README for Launchpad
+====================
+
+Launchpad is an open source suite of tools that help people and teams to work
+together on software projects.  Unlike many open source projects, Launchpad
+isn't something you install and run yourself (although you are welcome to do
+so), instead, contributors help make <https://launchpad.net> better.
+
+Launchpad is a project of Canonical <http://www.canonical.com> and has
+received many contributions from many wonderful people
+<https://dev.launchpad.net/Contributions>.
+
+If you want help using Launchpad, then please visit our help wiki at:
+
+    https://help.launchpad.net
+
+If you'd like to contribute to Launchpad, have a look at:
+
+    https://dev.launchpad.net
+
+Alternatively, have a poke around in the code, which you probably already know
+how to get if you are reading this file.
+
+
+Getting started
+===============
+
+There's a full guide for getting up-and-running with a development Launchpad
+environment at <https://dev.launchpad.net/Getting>.  When you are ready to
+submit a patch, please consult <https://dev.launchpad.net/PatchSubmission>.
+
+Our bug tracker is at <https://bugs.launchpad.net/launchpad/> and you can get
+the source code any time by doing:
+
+  $ bzr branch lp:launchpad
+
+
+Navigating the tree
+-------------------
+
+The Launchpad tree is big, messy and changing.  Sorry about that.  Don't panic
+though, it can sense fear.  Keep a firm grip on `grep` and pay attention to
+these important top-level folders:
+
+  bin/, utilities/
+    Where you will find scripts intended for developers and admins.  There's
+    no rhyme or reason to what goes in bin/ and what goes in utilities/, so
+    take a look in both. bin/ will be empty in a fresh checkout, the actual
+    content lives in 'buildout-templates'.
+
+  configs/
+    Configuration files for various kinds of Launchpad instances.
+    'development' and 'testrunner' are of particular interest to developers.
+
+  cronscripts/
+    Scripts that are run on actual production instances of Launchpad as
+    cronjobs.
+
+  daemons/
+    Entry points for various daemons that form part of Launchpad
+
+  database/
+    Our database schema, our sample data, and some other stuff that causes
+    fear.
+
+  doc/
+    General system-wide documentation. You can also find documentation on
+    <https://dev.launchpad.net>, in docstrings and in doctests.
+
+  lib/
+    Where the vast majority of the code lives, along with our templates, tests
+    and the bits of our documentation that are written as doctests. 'lp' and
+    'canonical' are the two most interesting packages. Note that 'canonical'
+    is deprecated in favour of 'lp'.  To learn more about how the 'lp' package
+    is laid out, take a look at its docstring.
+
+  Makefile
+    Ahh, bliss.  The Makefile has all sorts of goodies.  If you spend any
+    length of time hacking on Launchpad, you'll use it often.  The most
+    important targets are 'make clean', 'make compile', 'make schema', 'make
+    run' and 'make run_all'.
+
+  scripts/
+    Scripts that are run on actual production instances of Launchpad,
+    generally triggered by some automatic process.
+
+
+You can spend years hacking on Launchpad full-time and not know what all of
+the files in the top-level directory are for.  However, here's a guide to some
+of the ones that come up from time to time.
+
+  buildout-templates/
+    Templates that are generated into actual files, normally bin/ scripts,
+    when buildout is run. If you want to change the behaviour of bin/test,
+    look here.
+
+  bzrplugins/, optionalbzrplugins/
+    Bazaar plugins used in running Launchpad.
+
+  sourcecode/
+    A directory into which we symlink branches of some of Launchpad's
+    dependencies.  Don't ask.
+
+You never have to care about 'benchmarks', 'override-includes' or
+'package-includes'.

=== modified file 'bootstrap.py'
--- bootstrap.py	2009-08-05 18:52:52 +0000
+++ bootstrap.py	2010-11-07 00:31:57 +0000
@@ -1,6 +1,6 @@
 ##############################################################################
 #
-# Copyright (c) 2006 Zope Corporation and Contributors.
+# Copyright (c) 2006 Zope Foundation and Contributors.
 # All Rights Reserved.
 #
 # This software is subject to the provisions of the Zope Public License,
@@ -16,106 +16,160 @@
 Simply run this script in a directory containing a buildout.cfg.
 The script accepts buildout command-line options, so you can
 use the -c option to specify an alternate configuration file.
-
-$Id: bootstrap.py 101930 2009-07-15 18:34:35Z gary $
 """
 
-import os, re, shutil, sys, tempfile, textwrap, urllib, urllib2
-
-# We have to manually parse our options rather than using one of the stdlib
-# tools because we want to pass the ones we don't recognize along to
-# zc.buildout.buildout.main.
-
-configuration = {
-    '--ez_setup-source': 'http://peak.telecommunity.com/dist/ez_setup.py',
-    '--version': '',
-    '--download-base': None,
-    '--eggs': None}
-
-helpstring = __doc__ + textwrap.dedent('''
-    This script recognizes the following options itself.  The first option it
-    encounters that is not one of these will cause the script to stop parsing
-    options and pass the rest on to buildout.  Therefore, if you want to use
-    any of the following options *and* buildout command-line options like
-    -c, first use the following options, and then use the buildout options.
-
-    Options: 
-      --version=ZC_BUILDOUT_VERSION
-                Specify a version number of the zc.buildout to use
-      --ez_setup-source=URL_OR_FILE
-                Specify a URL or file location for the ez_setup file.
-                Defaults to
-                %(--ez_setup-source)s
-      --download-base=URL_OR_DIRECTORY
-                Specify a URL or directory for downloading setuptools and
-                zc.buildout.  Defaults to PyPI.
-      --eggs=DIRECTORY
-                Specify a directory for storing eggs.  Defaults to a temporary
-                directory that is deleted when the bootstrap script completes.
-
-    By using --ez_setup-source and --download-base to point to local resources,
-    you can keep this script from going over the network.
-    ''' % configuration)
-match_equals = re.compile(r'(%s)=(.*)' % ('|'.join(configuration),)).match
-args = sys.argv[1:]
-if args == ['--help']:
-    print helpstring
-    sys.exit(0)
-
-# If we end up using a temporary directory for storing our eggs, this will
-# hold the path of that directory.  On the other hand, if an explicit directory
-# is specified in the argv, this will remain None.
-tmpeggs = None
-
-while args:
-    val = args[0]
-    if val in configuration:
-        del args[0]
-        if not args or args[0].startswith('-'):
-            print "ERROR: %s requires an argument."
-            print helpstring
-            sys.exit(1)
-        configuration[val] = args[0]
-    else:
-        match = match_equals(val)
-        if match and match.group(1) in configuration:
-            configuration[match.group(1)] = match.group(2)
+import os, shutil, sys, tempfile, textwrap, urllib, urllib2, subprocess
+from optparse import OptionParser
+
+if sys.platform == 'win32':
+    def quote(c):
+        if ' ' in c:
+            return '"%s"' % c # work around spawn lamosity on windows
         else:
-            break
-    del args[0]
-
-for name in ('--ez_setup-source', '--download-base'):
-    val = configuration[name]
-    if val is not None and '://' not in val: # We're being lazy.
-        configuration[name] = 'file://%s' % (
-            urllib.pathname2url(os.path.abspath(os.path.expanduser(val))),)
-
-if (configuration['--download-base'] and
-    not configuration['--download-base'].endswith('/')):
-    # Download base needs a trailing slash to make the world happy.
-    configuration['--download-base'] += '/'
-
-if not configuration['--eggs']:
-    configuration['--eggs'] = tmpeggs = tempfile.mkdtemp()
-else:
-    configuration['--eggs'] = os.path.abspath(
-        os.path.expanduser(configuration['--eggs']))
-
-# The requirement is what we will pass to setuptools to specify zc.buildout.
-requirement = 'zc.buildout'
-if configuration['--version']:
-    requirement += '==' + configuration['--version']
+            return c
+else:
+    quote = str
+
+# See zc.buildout.easy_install._has_broken_dash_S for motivation and comments.
+stdout, stderr = subprocess.Popen(
+    [sys.executable, '-Sc',
+     'try:\n'
+     '    import ConfigParser\n'
+     'except ImportError:\n'
+     '    print 1\n'
+     'else:\n'
+     '    print 0\n'],
+    stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
+has_broken_dash_S = bool(int(stdout.strip()))
+
+# In order to be more robust in the face of system Pythons, we want to
+# run without site-packages loaded.  This is somewhat tricky, in
+# particular because Python 2.6's distutils imports site, so starting
+# with the -S flag is not sufficient.  However, we'll start with that:
+if not has_broken_dash_S and 'site' in sys.modules:
+    # We will restart with python -S.
+    args = sys.argv[:]
+    args[0:0] = [sys.executable, '-S']
+    args = map(quote, args)
+    os.execv(sys.executable, args)
+# Now we are running with -S.  We'll get the clean sys.path, import site
+# because distutils will do it later, and then reset the path and clean
+# out any namespace packages from site-packages that might have been
+# loaded by .pth files.
+clean_path = sys.path[:]
+import site
+sys.path[:] = clean_path
+for k, v in sys.modules.items():
+    if (hasattr(v, '__path__') and
+        len(v.__path__)==1 and
+        not os.path.exists(os.path.join(v.__path__[0],'__init__.py'))):
+        # This is a namespace package.  Remove it.
+        sys.modules.pop(k)
+
+is_jython = sys.platform.startswith('java')
+
+setuptools_source = 'http://peak.telecommunity.com/dist/ez_setup.py'
+distribute_source = 'http://python-distribute.org/distribute_setup.py'
+
+# parsing arguments
+def normalize_to_url(option, opt_str, value, parser):
+    if value:
+        if '://' not in value: # It doesn't smell like a URL.
+            value = 'file://%s' % (
+                urllib.pathname2url(
+                    os.path.abspath(os.path.expanduser(value))),)
+        if opt_str == '--download-base' and not value.endswith('/'):
+            # Download base needs a trailing slash to make the world happy.
+            value += '/'
+    else:
+        value = None
+    name = opt_str[2:].replace('-', '_')
+    setattr(parser.values, name, value)
+
+usage = '''\
+[DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options]
+
+Bootstraps a buildout-based project.
+
+Simply run this script in a directory containing a buildout.cfg, using the
+Python that you want bin/buildout to use.
+
+Note that by using --setup-source and --download-base to point to
+local resources, you can keep this script from going over the network.
+'''
+
+parser = OptionParser(usage=usage)
+parser.add_option("-v", "--version", dest="version",
+                          help="use a specific zc.buildout version")
+parser.add_option("-d", "--distribute",
+                   action="store_true", dest="use_distribute", default=False,
+                   help="Use Distribute rather than Setuptools.")
+parser.add_option("--setup-source", action="callback", dest="setup_source",
+                  callback=normalize_to_url, nargs=1, type="string",
+                  help=("Specify a URL or file location for the setup file. "
+                        "If you use Setuptools, this will default to " +
+                        setuptools_source + "; if you use Distribute, this "
+                        "will default to " + distribute_source +"."))
+parser.add_option("--download-base", action="callback", dest="download_base",
+                  callback=normalize_to_url, nargs=1, type="string",
+                  help=("Specify a URL or directory for downloading "
+                        "zc.buildout and either Setuptools or Distribute. "
+                        "Defaults to PyPI."))
+parser.add_option("--eggs",
+                  help=("Specify a directory for storing eggs.  Defaults to "
+                        "a temporary directory that is deleted when the "
+                        "bootstrap script completes."))
+parser.add_option("-t", "--accept-buildout-test-releases",
+                  dest='accept_buildout_test_releases',
+                  action="store_true", default=False,
+                  help=("Normally, if you do not specify a --version, the "
+                        "bootstrap script and buildout gets the newest "
+                        "*final* versions of zc.buildout and its recipes and "
+                        "extensions for you.  If you use this flag, "
+                        "bootstrap and buildout will get the newest releases "
+                        "even if they are alphas or betas."))
+parser.add_option("-c", None, action="store", dest="config_file",
+                   help=("Specify the path to the buildout configuration "
+                         "file to be used."))
+
+options, args = parser.parse_args()
+
+# if -c was provided, we push it back into args for buildout's main function
+if options.config_file is not None:
+    args += ['-c', options.config_file]
+
+if options.eggs:
+    eggs_dir = os.path.abspath(os.path.expanduser(options.eggs))
+else:
+    eggs_dir = tempfile.mkdtemp()
+
+if options.setup_source is None:
+    if options.use_distribute:
+        options.setup_source = distribute_source
+    else:
+        options.setup_source = setuptools_source
+
+if options.accept_buildout_test_releases:
+    args.append('buildout:accept-buildout-test-releases=true')
+args.append('bootstrap')
 
 try:
+    import pkg_resources
     import setuptools # A flag.  Sometimes pkg_resources is installed alone.
-    import pkg_resources
+    if not hasattr(pkg_resources, '_distribute'):
+        raise ImportError
 except ImportError:
+    ez_code = urllib2.urlopen(
+        options.setup_source).read().replace('\r\n', '\n')
     ez = {}
-    exec urllib2.urlopen(configuration['--ez_setup-source']).read() in ez
-    setuptools_args = dict(to_dir=configuration['--eggs'], download_delay=0)
-    if configuration['--download-base']:
-        setuptools_args['download_base'] = configuration['--download-base']
-    ez['use_setuptools'](**setuptools_args)
+    exec ez_code in ez
+    setup_args = dict(to_dir=eggs_dir, download_delay=0)
+    if options.download_base:
+        setup_args['download_base'] = options.download_base
+    if options.use_distribute:
+        setup_args['no_fake'] = True
+    ez['use_setuptools'](**setup_args)
+    reload(sys.modules['pkg_resources'])
     import pkg_resources
     # This does not (always?) update the default working set.  We will
     # do it.
@@ -123,48 +177,82 @@
         if path not in pkg_resources.working_set.entries:
             pkg_resources.working_set.add_entry(path)
 
-if sys.platform == 'win32':
-    def quote(c):
-        if ' ' in c:
-            return '"%s"' % c # work around spawn lamosity on windows
-        else:
-            return c
-else:
-    def quote (c):
-        return c
 cmd = [quote(sys.executable),
        '-c',
        quote('from setuptools.command.easy_install import main; main()'),
        '-mqNxd',
-       quote(configuration['--eggs'])]
-
-if configuration['--download-base']:
-    cmd.extend(['-f', quote(configuration['--download-base'])])
-
-cmd.append(requirement)
-
+       quote(eggs_dir)]
+
+if not has_broken_dash_S:
+    cmd.insert(1, '-S')
+
+find_links = options.download_base
+if not find_links:
+    find_links = os.environ.get('bootstrap-testing-find-links')
+if find_links:
+    cmd.extend(['-f', quote(find_links)])
+
+if options.use_distribute:
+    setup_requirement = 'distribute'
+else:
+    setup_requirement = 'setuptools'
 ws = pkg_resources.working_set
+setup_requirement_path = ws.find(
+    pkg_resources.Requirement.parse(setup_requirement)).location
 env = dict(
     os.environ,
-    PYTHONPATH=ws.find(pkg_resources.Requirement.parse('setuptools')).location)
-
-is_jython = sys.platform.startswith('java')
+    PYTHONPATH=setup_requirement_path)
+
+requirement = 'zc.buildout'
+version = options.version
+if version is None and not options.accept_buildout_test_releases:
+    # Figure out the most recent final version of zc.buildout.
+    import setuptools.package_index
+    _final_parts = '*final-', '*final'
+    def _final_version(parsed_version):
+        for part in parsed_version:
+            if (part[:1] == '*') and (part not in _final_parts):
+                return False
+        return True
+    index = setuptools.package_index.PackageIndex(
+        search_path=[setup_requirement_path])
+    if find_links:
+        index.add_find_links((find_links,))
+    req = pkg_resources.Requirement.parse(requirement)
+    if index.obtain(req) is not None:
+        best = []
+        bestv = None
+        for dist in index[req.project_name]:
+            distv = dist.parsed_version
+            if _final_version(distv):
+                if bestv is None or distv > bestv:
+                    best = [dist]
+                    bestv = distv
+                elif distv == bestv:
+                    best.append(dist)
+        if best:
+            best.sort()
+            version = best[-1].version
+if version:
+    requirement = '=='.join((requirement, version))
+cmd.append(requirement)
+
 if is_jython:
     import subprocess
     exitcode = subprocess.Popen(cmd, env=env).wait()
-else: # Windows needs this, apparently; otherwise we would prefer subprocess
+else: # Windows prefers this, apparently; otherwise we would prefer subprocess
     exitcode = os.spawnle(*([os.P_WAIT, sys.executable] + cmd + [env]))
 if exitcode != 0:
     sys.stdout.flush()
-    print ("An error occured when trying to install zc.buildout. "
+    sys.stderr.flush()
+    print ("An error occurred when trying to install zc.buildout. "
            "Look above this message for any errors that "
            "were output by easy_install.")
     sys.exit(exitcode)
 
-ws.add_entry(configuration['--eggs'])
+ws.add_entry(eggs_dir)
 ws.require(requirement)
 import zc.buildout.buildout
-args.append('bootstrap')
 zc.buildout.buildout.main(args)
-if tmpeggs is not None:
-    shutil.rmtree(tmpeggs)
+if not options.eggs: # clean up temporary egg directory
+    shutil.rmtree(eggs_dir)

=== modified file 'buildmailman.py'
--- buildmailman.py	2009-09-11 02:17:29 +0000
+++ buildmailman.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#! /usr/bin/python2.4
+#! /usr/bin/python
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -13,9 +13,9 @@
 import subprocess
 
 from canonical.config import config
-from canonical.launchpad.mailman.config import (
+from lp.services.mailman.config import (
     configure_prefix, configure_siteowner)
-from canonical.launchpad.mailman.monkeypatches import monkey_patch
+from lp.services.mailman.monkeypatches import monkey_patch
 from lazr.config import as_username_groupname
 
 basepath = [part for part in sys.path if part]
@@ -42,9 +42,9 @@
         return 0
 
     # sys.path_importer_cache is a mapping of elements of sys.path to importer
-    # objects used to handle them. In Python2.5+ when an element of sys.path is
-    # found to not exist on disk, a NullImporter is created and cached - this
-    # causes Python to never bother re-inspecting the disk for that path
+    # objects used to handle them. In Python2.5+ when an element of sys.path
+    # is found to not exist on disk, a NullImporter is created and cached -
+    # this causes Python to never bother re-inspecting the disk for that path
     # element. We must clear that cache element so that our second attempt to
     # import MailMan after building it will actually check the disk.
     del sys.path_importer_cache[mailman_path]
@@ -84,25 +84,52 @@
 
     # Build and install the Mailman software.  Note that we don't care about
     # --with-cgi-gid because we're not going to use that Mailman subsystem.
+    executable = os.path.abspath('bin/py')
     configure_args = (
         './configure',
         '--prefix', mailman_path,
         '--with-var-prefix=' + var_dir,
-        '--with-python=' + sys.executable,
+        '--with-python=' + executable,
         '--with-username=' + user,
         '--with-groupname=' + group,
         '--with-mail-gid=' + group,
         '--with-mailhost=' + build_host_name,
         '--with-urlhost=' + build_host_name,
         )
+    # Configure.
     retcode = subprocess.call(configure_args, cwd=mailman_source)
     if retcode:
         print >> sys.stderr, 'Could not configure Mailman:'
         sys.exit(retcode)
-    retcode = subprocess.call(('make',), cwd=mailman_source)
+    # Make.
+    retcode = subprocess.call(('make', ), cwd=mailman_source)
     if retcode:
         print >> sys.stderr, 'Could not make Mailman.'
         sys.exit(retcode)
+    # We have a brief interlude before we install.  Hardy will not
+    # accept a script as the executable for the shebang line--it will
+    # treat the file as a shell script instead. The ``bin/by``
+    # executable that we specified in '--with-python' above is a script
+    # so this behavior causes problems for us. Our work around is to
+    # prefix the ``bin/py`` script with ``/usr/bin/env``, which makes
+    # Hardy happy.  We need to do this before we install because the
+    # installation will call Mailman's ``bin/update``, which is a script
+    # that needs this fix.
+    build_dir = os.path.join(mailman_source, 'build')
+    original = '#! %s\n' % (executable, )
+    modified = '#! /usr/bin/env %s\n' % (executable, )
+    for (dirpath, dirnames, filenames) in os.walk(build_dir):
+        for filename in filenames:
+            filename = os.path.join(dirpath, filename)
+            f = open(filename, 'r')
+            if f.readline() == original:
+                rest = f.read()
+                f.close()
+                f = open(filename, 'w')
+                f.write(modified)
+                f.write(rest)
+            f.close()
+    # Now we actually install.
     retcode = subprocess.call(('make', 'install'), cwd=mailman_source)
     if retcode:
         print >> sys.stderr, 'Could not install Mailman.'
@@ -164,7 +191,8 @@
 def configure_site_list(mailman_bin, site_list_name):
     """Configure the site list.
 
-    Currently, the only thing we want to set is to not advertise the site list.
+    Currently, the only thing we want to set is to not advertise the
+    site list.
     """
     fd, config_file_name = tempfile.mkstemp()
     try:
@@ -192,7 +220,6 @@
     return build_mailman()
 
 
-
 if __name__ == '__main__':
     return_code = main()
     sys.exit(return_code)

=== modified file 'buildout-templates/_pythonpath.py.in'
--- buildout-templates/_pythonpath.py.in	2009-08-21 19:13:05 +0000
+++ buildout-templates/_pythonpath.py.in	2010-11-07 00:31:57 +0000
@@ -4,17 +4,41 @@
 # NOTE: This is a generated file.  The original is in
 # buildout-templates/_pythonpath.py.in
 
-__metaclass__ = type
-
-import sys, os
-
-sys.path[0:0] = [${string-paths}]
-# Enable Storm's C extensions
-os.environ['STORM_CEXTENSIONS'] = '1'
-
-# We don't want to bother tests or logs with these.
+# This file works if the Python has been started with -S, or if bin/py
+# has been used.
+
+# Auto-generated code to handle relative paths
+${python-relative-path-setup}
+
+import os
+import sys
 import warnings
+
+# XXX: 2010-04-26, Salgado, bug=570246: Silence python2.6 deprecation
+# warnings.
 warnings.filterwarnings(
-    'ignore',
-    'Module .+ was already imported from .+, but .+ is being added.*',
-    UserWarning)
+    'ignore', '.*(md5|sha|sets)', DeprecationWarning,
+    )
+
+site_dir = ${scripts:parts-directory|path-repr}
+
+if ('site' in sys.modules and
+    not sys.modules['site'].__file__.startswith(
+        os.path.join(site_dir, 'site.py'))):
+    # We have the wrong site.py, so our paths are not set up correctly.
+    # We blow up, with a hopefully helpful error message.
+    raise RuntimeError(
+        'The wrong site.py is imported (%r imported, %r expected). '
+        'Scripts should usually be '
+        "started with Launchpad's bin/py, or with a Python invoked with "
+        'the -S flag.' % (
+        sys.modules['site'].__file__, os.path.join(site_dir, 'site.py')))
+
+if site_dir not in sys.path:
+    sys.path.insert(0, site_dir)
+elif 'site' not in sys.modules:
+    # XXX 2010-05-04 gary bug 575206
+    # This one line is to support Mailman 2, which does something unexpected
+    # to set up its paths.
+    sys.path[:] = [p for p in sys.path if 'site-packages' not in p]
+import site # sets up paths

=== added file 'buildout-templates/bin/combine-css.in'
--- buildout-templates/bin/combine-css.in	1970-01-01 00:00:00 +0000
+++ buildout-templates/bin/combine-css.in	2010-11-07 00:31:57 +0000
@@ -0,0 +1,51 @@
+#!${buildout:executable} -S
+
+# Initialize our paths.
+${python-relative-path-setup}
+import sys
+sys.path.insert(0, ${scripts:parts-directory|path-repr})
+import site
+
+import os
+
+from lazr.js.build import ComboFile
+from lazr.js.combo import combine_files
+
+
+root = ${buildout:directory|path-repr}
+icing = os.path.join(root, 'lib/canonical/launchpad/icing')
+target = os.path.join(icing, 'combo.css')
+# It'd probably be nice to have this script find all the CSS files we might
+# need and combine them together, but if we do that we'd certainly end up
+# including lots of styles that we don't need/want, so keeping this hard-coded
+# list seems like the best option for now.
+names = [
+    'style.css',
+    'lazr/build/yui/cssreset/reset.css',
+    'lazr/build/yui/cssfonts/fonts.css',
+    'lazr/build/yui/cssgrids/grids.css',
+    'lazr/build/lazr/assets/skins/sam/lazr.css',
+    'lazr/build/inlineedit/assets/skins/sam/editor.css',
+    'lazr/build/autocomplete/assets/skins/sam/autocomplete.css',
+    'lazr/build/overlay/assets/skins/sam/pretty-overlay.css',
+    'lazr/build/formoverlay/assets/formoverlay-core.css',
+    'lazr/build/picker/assets/skins/sam/picker.css',
+    'lazr/build/activator/assets/skins/sam/activator.css',
+    'lazr/build/choiceedit/assets/choiceedit-core.css',
+    # This one goes at the end because it's our main stylesheet and should
+    # take precedence over the others.
+    'build/style-3-0.css']
+
+absolute_names = []
+for name in names:
+    absolute_names.append(os.path.join(icing, name))
+
+combo = ComboFile(absolute_names, target)
+if combo.needs_update():
+    result = ''
+    for content in combine_files(names, icing):
+        result += content
+
+    f = open(target, 'w')
+    f.write(result)
+    f.close()

=== modified file 'buildout-templates/bin/jstest.in'
--- buildout-templates/bin/jstest.in	2009-07-31 20:35:24 +0000
+++ buildout-templates/bin/jstest.in	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!${buildout:executable}
+#!${buildout:executable} -S
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
@@ -14,10 +14,11 @@
 
 """
 
-# Add the buildout sys.path
+# Initialize our paths.
+${python-relative-path-setup}
 import sys
-sys.path[0:0] = [${string-paths}]
-
+sys.path.insert(0, ${scripts:parts-directory|path-repr})
+import site
 
 import subprocess
 import os
@@ -48,7 +49,7 @@
         'domain':    'code.launchpad.dev'
     },
     'soyuz': {
-        'suite_dir': 'lib/canonical/launchpad/windmill/tests/test_soyuz',
+        'suite_dir': 'lib/lp/soyuz/windmill',
         'domain':    'launchpad.dev'
     },
     'translations': {

=== added file 'buildout-templates/bin/kill-test-services.in'
--- buildout-templates/bin/kill-test-services.in	1970-01-01 00:00:00 +0000
+++ buildout-templates/bin/kill-test-services.in	2010-11-07 00:31:57 +0000
@@ -0,0 +1,40 @@
+#!${buildout:executable} -S
+#
+# Copyright 2009 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+"""Kill all the test services that may persist between test runs."""
+
+# Initialize our paths.
+${python-relative-path-setup}
+import sys
+sys.path.insert(0, ${scripts:parts-directory|path-repr})
+import site
+
+# Tell canonical.config to use the testrunner config instance, so that
+# we don't kill the real services.
+from canonical.config import config
+config.setInstance('testrunner')
+config.generate_overrides()
+
+import sys
+
+from canonical.testing.layers import MemcachedLayer
+from canonical.librarian.testing.server import LibrarianTestSetup
+from lp.services.osutils import kill_by_pidfile
+
+
+def main(args):
+    if '-h' in args or '--help' in args:
+        print __doc__
+        return 0
+    print "Killing Memcached....",
+    kill_by_pidfile(MemcachedLayer.getPidFile())
+    print "done."
+    print "Killing Librarian....",
+    LibrarianTestSetup().tearDownRoot()
+    print "done."
+    return 0
+
+
+if __name__ == '__main__':
+    sys.exit(main(sys.argv[1:]))

=== modified file 'buildout-templates/bin/lint.sh.in'
--- buildout-templates/bin/lint.sh.in	2009-07-17 00:26:05 +0000
+++ buildout-templates/bin/lint.sh.in	2010-11-07 00:31:57 +0000
@@ -1,87 +1,29 @@
 #!/bin/bash
 #
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
+# Copyright 2009-2010 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 #
-# Runs xmlint, pyflakes and pylint on files changed from parent branch.
-# Use '-v' to run pylint under stricter conditions with additional messages.
-
-utilitiesdir=`dirname $0`/../utilities
+# Runs pocketlint on files changed from parent branch.
+
+
+${shell-relative-path-setup}
+
+utilitiesdir=${buildout:directory/utilities|shell-path}
 [ -z "$utilitiesdir" ] && utilitiesdir=.
 
-# Fail if any of the required tools are not installed.
-if ! which pylint >/dev/null; then
-    echo "Error: pylint is not installed."
-    echo "    Install the pylint package."
-    exit 1
-elif ! which xmllint >/dev/null; then
-    echo "Error: xmlllint is not installed."
-    echo "    Install the libxml2-utils package."
-    exit 1
-elif ! which pyflakes >/dev/null; then
-    echo "Error: pyflakes is not installed."
-    echo "    Install the pyflakes package."
-    exit 1
-fi
-
-bzr() {
-    # For pylint to operate properly, PYTHONPATH must point to the ./lib
-    # directory in the launchpad tree. This directory includes a bzrlib. When
-    # this script calls bzr, we want it to use the system bzrlib, not the one
-    # in the launchpad tree.
-    PYTHONPATH='' `which bzr` "$@"
-}
-
-rules="Using normal rules."
-rcfile="--rcfile=utilities/lp.pylintrc"
-if [ "$1" == "-v" ]; then
-    shift
-    rules="Using verbose rules."
-    rcfile="--rcfile=utilities/lp-verbose.pylintrc"
-elif [ "$1" == "-vv" ]; then
-    shift
-    rules="Using very verbose rules."
-    rcfile="--rcfile=utilities/lp-very-verbose.pylintrc"
-fi
-
 
 if [ -z "$1" ]; then
-    # No command line argument provided, use the default logic.
-    bzr diff > /dev/null
-    diff_status=$?
-    if [ $diff_status -eq 0 ] ; then
-        # No uncommitted changes in the tree.
-        bzr status | grep "^Current thread:" > /dev/null
-        if [ $? -eq 0 ] ; then
-            # This is a loom, lint changes relative to the lower thread.
-            rev_option="-r thread:"
-        else
-            # Lint changes relative to the parent.
-            rev=`bzr info | sed '/parent branch:/!d; s/ *parent branch: /ancestor:/'`
-            rev_option="-r $rev"
-        fi
-    elif [ $diff_status -eq 1 ] ; then
-        # Uncommitted changes in the tree, lint those changes.
-        rev_option=""
-    else
-        # bzr diff failed
-        exit 1
-    fi
-    files=`bzr st --short $rev_option | sed '/^.[MN]/!d; s/.* //'`
+    # No command line argument provided, lint all changed files.
+    files=$($utilitiesdir/find-changed-files.sh)
 else
     # Add newlines so grep filters out pyfiles correctly later.
     files=`echo $* | tr " " "\n"`
 fi
 
-# Are there patches to the schema or changes to current.sql?
-database_changes=$(echo $files | sed '/database.*\(patch-\|current\)/!d')
 
 echo "= Launchpad lint ="
 echo ""
-echo "Checking for conflicts. and issues in doctests and templates."
-echo "Running jslint, xmllint, pyflakes, and pylint."
-
-echo "$rules"
+echo "Checking for conflicts and issues in changed files."
 
 if [ -z "$files" ]; then
     echo "No changed files detected."
@@ -95,28 +37,13 @@
 fi
 
 
-group_lines_by_file() {
-    # Format file:line:message output as lines grouped by file.
-    file_name=""
-    echo "$1" | sed 's,\(^[^ :<>=+]*:\),~~\1\n,' | while read line; do
-        current=`echo $line | sed '/^~~/!d; s/^~~\(.*\):$/\1/;'`
-        if [ -z "$current" ]; then
-            echo "    $line"
-        elif [ "$file_name" != "$current" ]; then
-            file_name="$current"
-            echo ""
-            echo "$file_name"
-        fi
-    done
-}
-
-
+# Are there patches to the schema or changes to current.sql?
 sample_dir="database/sampledata"
 current_sql="$sample_dir/current.sql"
 current_dev_sql="$sample_dir/current-dev.sql"
 lintdata_sql="$sample_dir/lintdata.sql"
 lintdata_dev_sql="$sample_dir/lintdata-dev.sql"
-
+database_changes=$(echo $files | sed '/database.*\(patch-\|current\)/!d')
 if [ -n "$database_changes" ]; then
     make -C database/schema lintdata > /dev/null
     sql_diff=$(diff -q "$current_sql" "$lintdata_sql")
@@ -147,7 +74,7 @@
     echo "        cp $4 $1"
     echo "    Run make schema again to update the test/dev database."
 }
-    
+
 if [ -n "$sql_diff" -o -n "$sql_dev_diff" -o -n "$karma_bombs" ]; then
     echo ""
     echo ""
@@ -155,7 +82,7 @@
     echo ""
 fi
 
-# 
+#
 if [ -n "$sql_diff" -o -n "$karma_bombs" ]; then
     echo "$current_sql"
 fi
@@ -178,129 +105,12 @@
        	"newsampledata-dev.sql"
 fi
 
-conflicts=""
-for file in $files; do
-    # NB. Odd syntax on following line to stop lint.sh detecting conflict
-    # markers in itself.
-    if [ ! -f "$file" ]; then
-        continue
-    fi
-    if grep -q -e '<<<''<<<<' -e '>>>''>>>>' $file; then
-        conflicts="$conflicts $file"
-    fi
-done
-
-if [ "$conflicts" ]; then
-    echo ""
-    echo ""
-    echo "== Conflicts =="
-    echo ""
-    for conflict in $conflicts; do
-        echo "$conflict"
-    done
-fi
-
-
-xmlfiles=`echo "$files" | grep -E '(xml|zcml|pt)$'`
-xmllint_notices=""
-if [ ! -z "$xmlfiles" ]; then
-    xmllint_notices=`xmllint --noout $xmlfiles 2>&1 |
-        sed -e '/Entity/,+2d; {/StartTag/N; /define-slot="doctype"/,+1d}'`
-fi
-if [ ! -z "$xmllint_notices" ]; then
-    echo ""
-    echo ""
-    echo "== XmlLint notices =="
-    group_lines_by_file "$xmllint_notices"
-fi
-
-
-templatefiles=`echo "$files" | grep -E '(pt)$'`
-template_notices=""
-if [ ! -z "$templatefiles" ]; then
-    obsolete='"(portlets_one|portlets_two|pageheading|help)"'
-    template_notices=`grep -HE "fill-slot=$obsolete" $templatefiles`
-fi
-if [ ! -z "$template_notices" ]; then
-    echo ""
-    echo ""
-    echo "== Template notices =="
-    echo ""
-    echo "There are obsolete slots in these templates."
-    group_lines_by_file "$template_notices"
-fi
-
-
-doctestfiles=`echo "$files" | grep -E '/(doc|pagetests|f?tests)/.*txt$'`
-if [ ! -z "$doctestfiles" ]; then
-    pyflakes_doctest_notices=`$utilitiesdir/pyflakes-doctest.py $doctestfiles`
-    if [ ! -z "$pyflakes_doctest_notices" ]; then
-        echo ""
-        echo ""
-        echo "== Pyflakes Doctest notices =="
-        group_lines_by_file "$pyflakes_doctest_notices"
-    fi
-fi
-
-
-jsfiles=`echo "$files" | grep -E 'js$'`
-if [ ! -z "$jsfiles" ]; then
-    jslint_notices=`$utilitiesdir/../sourcecode/lazr-js/tools/jslint.py 2>&1`
-    if [ ! -z "$jslint_notices" ]; then
-        echo ""
-        echo ""
-        echo "== JSLint notices =="
-        echo "$jslint_notices"
-    fi
-fi
-
-
-pyfiles=`echo "$files" | grep '.py$'`
-if [ -z "$pyfiles" ]; then
+
+# Sample data contains auto generated files with long lines.
+pocketlint_files=`echo "$files" | grep -v "$sample_dir"`
+if [ -z "$pocketlint_files" ]; then
     exit 0
 fi
 
-
-sed_deletes="/detect undefined names/d; /'_pythonpath' .* unused/d; "
-sed_deletes="$sed_deletes /BYUSER/d; "
-sed_deletes="$sed_deletes /ENABLED/d; "
-pyflakes_notices=`pyflakes $pyfiles 2>&1 | sed "$sed_deletes"`
-if [ ! -z "$pyflakes_notices" ]; then
-    echo ""
-    echo ""
-    echo "== Pyflakes notices =="
-    group_lines_by_file "$pyflakes_notices"
-fi
-
-extra_path="/usr/share/pyshared:/usr/share/pycentral/pylint/site-packages"
-export PYTHONPATH="${os-paths}:$extra_path:$PYTHONPATH"
-pylint="${buildout:executable} -Wi::DeprecationWarning `which pylint`"
-
-# XXX sinzui 2007-10-18 bug=154140:
-# Pylint should really do a better job of not reporting false positives.
-sed_deletes="/^*/d; /Unused import \(action\|_python\)/d; "
-sed_deletes="$sed_deletes /Unable to import .*sql\(object\|base\)/d; "
-sed_deletes="$sed_deletes /_action.* Undefined variable/d; "
-sed_deletes="$sed_deletes /_getByName.* Instance/d; "
-sed_deletes="$sed_deletes /Redefining built-in .id/d; "
-sed_deletes="$sed_deletes /Redefining built-in 'filter'/d; "
-sed_deletes="$sed_deletes /<lambda>] Using variable .* before assignment/d; "
-sed_deletes="$sed_deletes /Comma not followed by a space/{N;N};/,[])}]/d; "
-sed_deletes="$sed_deletes /Undefined variable.*valida/d; "
-sed_deletes="$sed_deletes s,^/.*lib/canonical/,lib/canonical,; "
-sed_deletes="$sed_deletes /ENABLED/d; "
-sed_deletes="$sed_deletes /BYUSER/d; "
-sed_deletes="$sed_deletes /zope.*No module/d;"
-
-# Note that you can disable specific tests by placing pylint
-# instruction in a comment:
-# # pylint: disable-msg=W0401,W0612,W0403
-pylint_notices=`$pylint $rcfile $pyfiles | sed "$sed_deletes"`
-
-if [ ! -z "$pylint_notices" ]; then
-    echo ""
-    echo ""
-    echo "== Pylint notices =="
-    group_lines_by_file "$pylint_notices"
-fi
-
+echo ""
+pocketlint $pocketlint_files 2>&1

=== removed file 'buildout-templates/bin/py.in'
--- buildout-templates/bin/py.in	2009-06-12 16:36:02 +0000
+++ buildout-templates/bin/py.in	1970-01-01 00:00:00 +0000
@@ -1,2 +0,0 @@
-#!/bin/sh
-PYTHONPATH=${os-paths} exec ${buildout:executable} "$@"

=== added file 'buildout-templates/bin/retest.in'
--- buildout-templates/bin/retest.in	1970-01-01 00:00:00 +0000
+++ buildout-templates/bin/retest.in	2010-11-07 00:31:57 +0000
@@ -0,0 +1,99 @@
+#!${buildout:executable}
+#
+# Copyright 2009 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""
+Given an error report, run all of the failed tests again.
+
+For instance, it can be used in the following scenario:
+
+  % bin/test -vvm lp.registry | tee test.out
+  % # Oh nos!  Failures!
+  % # Fix tests.
+  % bin/retest test.out
+
+Or, when run without arguments (or if any argument is "-"), a test
+report (or a part of) can be piped in, for example by pasting it:
+
+  % bin/retest
+  Tests with failures:
+     lib/lp/registry/browser/tests/sourcepackage-views.txt
+     lib/lp/registry/tests/../stories/product/xx-product-package-pages.txt
+  Total: ... tests, 2 failures, 0 errors in ...
+
+"""
+
+import fileinput
+import os
+import re
+import sys
+from itertools import takewhile
+from pprint import pprint
+
+${python-relative-path-setup}
+
+# The test script for this branch.
+TEST = "${buildout:directory/bin/test}"
+
+# Regular expression to match numbered stories.
+STORY_RE = re.compile("(.*)/\d{2}-.*")
+
+
+def get_test_name(test):
+    """Get the test name of a failed test.
+
+    If the test is part of a numbered story,
+    e.g. 'stories/gpg-coc/01-claimgpgp.txt', then return the directory name
+    since all of the stories must be run together.
+    """
+    match = STORY_RE.match(test)
+    if match:
+        return match.group(1)
+    # Otherwise split the test and return the first portion.  The split will
+    # chop off windmill descriptions.
+    return test.split()[0]
+
+
+def gen_test_lines(lines):
+    def p_start(line):
+        return line.startswith('Tests with failures:')
+    def p_take(line):
+        return not line.startswith('Total:')
+    lines = iter(lines)
+    for line in lines:
+        if p_start(line):
+            for line in takewhile(p_take, lines):
+                yield line
+
+
+def gen_tests(test_lines):
+    for test_line in test_lines:
+        yield get_test_name(test_line.strip())
+
+
+def extract_tests(lines):
+    return set(gen_tests(gen_test_lines(lines)))
+
+
+def run_tests(tests):
+    """Given a set of tests, run them as one group."""
+    print "Running tests:"
+    pprint(sorted(tests))
+    args = ['-vv']
+    for test in tests:
+        args.append('-t')
+        args.append(test)
+    os.execl(TEST, TEST, *args)
+
+
+if __name__ == '__main__':
+    tests = extract_tests(fileinput.input())
+    if len(tests) >= 1:
+        run_tests(tests)
+    else:
+        sys.stdout.write(
+            "Error: no tests found\n"
+            "Usage: %s [test_output_file|-] ...\n\n%s\n\n" % (
+                sys.argv[0], __doc__.strip()))
+        sys.exit(1)

=== added file 'buildout-templates/bin/sprite-util.in'
--- buildout-templates/bin/sprite-util.in	1970-01-01 00:00:00 +0000
+++ buildout-templates/bin/sprite-util.in	2010-11-07 00:31:57 +0000
@@ -0,0 +1,47 @@
+#!${buildout:executable} -S
+
+import os
+import sys
+
+# Initialize our paths.
+${python-relative-path-setup}
+sys.path.insert(0, ${scripts:parts-directory|path-repr})
+import site
+
+from lp.services.spriteutils import SpriteUtil
+
+command_options = ('create-image', 'create-css')
+
+def usage():
+    return " Usage: %s %s" % (sys.argv[0], '|'.join(command_options))
+
+if len(sys.argv) != 2:
+    print >> sys.stderr, "Expected a single argument."
+    print >> sys.stderr, usage()
+    sys.exit(1)
+else:
+    command = sys.argv[1]
+    if command not in command_options:
+        print >> sys.stderr, "Unknown argument: %s" % command
+        print >> sys.stderr, usage()
+        sys.exit(2)
+
+icing = ${buildout:directory/lib/canonical/launchpad/icing|path-repr}
+combined_image_file = os.path.join(icing, 'icon-sprites')
+positioning_file = os.path.join(icing, 'icon-sprites.positioning')
+css_template_file = os.path.join(icing, 'style-3-0.css.in')
+css_file = os.path.join(icing, 'build/style-3-0.css')
+
+sprite_util = SpriteUtil(
+    css_template_file, 'icon-sprites',
+    url_prefix_substitutions={'/@@/': '../images/'})
+
+if command == 'create-image':
+    sprite_util.combineImages(icing)
+    sprite_util.savePNG(combined_image_file)
+    sprite_util.savePositioning(positioning_file)
+elif command == 'create-css':
+    sprite_util.loadPositioning(positioning_file)
+    # The icing/icon-sprites file is relative to the css file
+    # in the icing/build/ directory.
+    sprite_util.saveConvertedCSS(css_file, '../icon-sprites')

=== modified file 'buildout-templates/bin/test.in'
--- buildout-templates/bin/test.in	2009-09-10 00:17:39 +0000
+++ buildout-templates/bin/test.in	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!${buildout:executable}
+#!${buildout:executable} -S
 ##############################################################################
 #
 # Copyright (c) 2004 Zope Corporation and Contributors.
@@ -14,33 +14,49 @@
 ##############################################################################
 """Test script
 """
-import sys, os, time, logging, warnings, re
-
-BUILD_DIR = '${buildout:directory}'
-
-if os.getsid(0) == os.getsid(os.getppid()):
-    # We need to become the process group leader so test_on_merge.py
-    # can reap its children.
-    #
-    # Note that if setpgrp() is used to move a process from one
-    # process group to another (as is done by some shells when
-    # creating pipelines), then both process groups must be part of
-    # the same session.
-    os.setpgrp()
+
+# NOTE: This is a generated file.  The original is in
+# buildout-templates/bin/test.in
+
+import logging, os, re, sys, time, warnings
+
+# Initialize our paths.
+${python-relative-path-setup}
+import sys
+sys.path.insert(0, ${scripts:parts-directory|path-repr})
+import site
+
+
+# Fix doctest so that it can handle mixed unicode and encoded output.
+import doctest
+
+_RealSpoofOut = doctest._SpoofOut
+
+class _SpoofOut(doctest._SpoofOut):
+
+    def write(self, value):
+        if isinstance(value, unicode):
+            value = value.encode('utf8')
+        _RealSpoofOut.write(self, value)
+
+doctest._SpoofOut = _SpoofOut
+
+
+BUILD_DIR = ${buildout:directory|path-repr}
+CUSTOM_SITE_DIR = ${scripts:parts-directory|path-repr}
 
 # Make tests run in a timezone no launchpad developers live in.
 # Our tests need to run in any timezone.
-# (No longer actually required, as PQM does this)
+# (This is no longer actually required, as PQM does this.)
 os.environ['TZ'] = 'Asia/Calcutta'
 time.tzset()
 
-# Enable Storm's C extensions
-os.environ['STORM_CEXTENSIONS'] = '1'
-
-sys.path[0:0] = [${string-paths}]
-
-# Set PYTHONPATH environment variable for spawned processes
-os.environ['PYTHONPATH'] = ':'.join(sys.path)
+# Storm's C extensions should already be enabled from lp_sitecustomize.py,
+# which our custom sitecustomize.py ran.
+assert os.environ['STORM_CEXTENSIONS'] == '1'
+
+# Make sure our site.py is the one that subprocesses use.
+os.environ['PYTHONPATH'] = CUSTOM_SITE_DIR
 
 # Set a flag if this is the main testrunner process
 if len(sys.argv) > 1 and sys.argv[1] == '--resume-layer':
@@ -62,7 +78,6 @@
     sys.exit(-1 * signal.SIGTERM)
 signal.signal(signal.SIGTERM, exit_with_atexit_handlers)
 
-
 # Tell canonical.config to use the testrunner config instance.
 from canonical.config import config
 config.setInstance('testrunner')
@@ -88,14 +103,28 @@
 # need to be silenced should have an accompanied Bug reference.
 #
 warnings.filterwarnings(
-    'ignore', 'PyCrypto', RuntimeWarning, 'twisted[.]conch[.]ssh'
+    'ignore', 'PyCrypto', RuntimeWarning, 'twisted[.]conch[.]ssh',
     )
 warnings.filterwarnings(
     'ignore', 'twisted.python.plugin', DeprecationWarning,
     )
 warnings.filterwarnings(
-    'ignore', 'bzrlib.*was deprecated', DeprecationWarning
-)
+    'ignore', 'zope.testing.doctest', DeprecationWarning,
+    )
+warnings.filterwarnings(
+    'ignore', 'bzrlib.*was deprecated', DeprecationWarning,
+    )
+# XXX: 2010-04-26, Salgado, bug=570246: Silence python2.6 deprecation
+# warnings.
+# We cannot narrow this warnings filter to just twisted because
+# warnings.warn_explicit() sees this import as coming from importfascist, not
+# from twisted.  It makes no sense to put module='importfascist' here though
+# because /everything/ gets imported through it.  So, sad as it is, until
+# twisted doesn't produce warnings under Python 2.6, just ignore all these
+# deprecations.
+warnings.filterwarnings(
+    'ignore', '.*(md5|sha|sets)', DeprecationWarning,
+    )
 # The next one is caused by a lamosity in python-openid.  The following change
 # to openid/server/server.py would make the warning filter unnecessary:
 # 978c974,974
@@ -120,10 +149,11 @@
     re.escape('clear_request_started() called outside of a request'),
     UserWarning
     )
-
-# Any warnings not explicitly silenced are errors
-warnings.filterwarnings('error', append=True)
-
+# Unicode warnings are always fatal
+warnings.filterwarnings('error', category=UnicodeWarning)
+
+# shortlist() raises an error when it is misused.
+warnings.filterwarnings('error', r'shortlist\(\)')
 
 from canonical.ftests import pgsql
 # If this is removed, make sure canonical.ftests.pgsql is updated
@@ -134,15 +164,13 @@
 from zope.testing import testrunner
 from zope.testing.testrunner import options
 
-defaults = [
+defaults = {
     # Find tests in the tests and ftests directories
-    '--tests-pattern=^f?tests$',
-    '--test-path=${buildout:directory}/lib',
-    '--package=canonical',
-    '--package=lp',
-    '--package=devscripts',
-    '--layer=!MailmanLayer',
-    ]
+    'tests_pattern': '^f?tests$',
+    'test_path': [${buildout:directory/lib|path-repr}],
+    'package': ['canonical', 'lp', 'devscripts', 'launchpad_loggerhead'],
+    'layer': ['!(MailmanLayer)'],
+    }
 
 # Monkey-patch os.listdir to randomise the results
 original_listdir = os.listdir
@@ -163,13 +191,7 @@
 os.listdir = listdir
 
 
-from canonical.testing.customresult import (
-    filter_tests,
-    list_tests,
-    patch_find_tests,
-    patch_zope_testresult,
-    )
-from subunit import TestProtocolClient
+from canonical.testing.customresult import filter_tests, patch_find_tests
 
 
 if __name__ == '__main__':
@@ -188,22 +210,37 @@
     else:
         args = sys.argv
 
+    # thunk across to parallel support if needed.
+    if '--parallel' in sys.argv and '--list-tests' not in sys.argv:
+        # thunk over to parallel testing.
+        from canonical.testing.parallel import main
+        sys.exit(main(sys.argv))
+
     def load_list(option, opt_str, list_name, parser):
         patch_find_tests(filter_tests(list_name))
     options.parser.add_option(
         '--load-list', type=str, action='callback', callback=load_list)
-
-    def list_test_option(option, opt, value, parser):
-        patch_find_tests(list_tests)
-    options.parser.add_option(
-        '--list', action='callback', callback=list_test_option)
-
-    def use_subunit(option, opt, value, parser):
-        patch_zope_testresult(TestProtocolClient(sys.stdout))
-    options.parser.add_option(
-        '--subunit', action='callback', callback=use_subunit)
-
-    local_options = options.get_options(args=args, defaults=defaults)
+    options.parser.add_option(
+        '--parallel', action='store_true',
+        help='Run tests in parallel processes. '
+            'Poorly isolated tests will break.')
+
+    # tests_pattern is a regexp, so the parsed value is hard to compare
+    # with the default value in the loop below.
+    options.parser.defaults['tests_pattern'] = defaults['tests_pattern']
+    local_options = options.get_options(args=args)
+    # Set our default options, if the options aren't specified.
+    for name, value in defaults.items():
+        parsed_option = getattr(local_options, name)
+        if ((parsed_option == []) or
+            (parsed_option == options.parser.defaults.get(name))):
+            # The option probably wasn't specified on the command line,
+            # let's replace it with our default value. It could be that
+            # the real default (as specified in
+            # zope.testing.testrunner.options) was specified, and we
+            # shouldn't replace it with our default, but it's such and
+            # edge case, so we don't have to care about it.
+            options.parser.defaults[name] = value
 
     # Turn on Layer profiling if requested.
     from canonical.testing import profiled
@@ -215,15 +252,14 @@
     # tree. This is very useful for IDE integration, so an IDE can
     # e.g. run the test that you are currently editing.
     try:
-        there = os.getcwd()
-        os.chdir('${buildout:directory}')
-        result = testrunner.run(defaults)
+        try:
+            there = os.getcwd()
+            os.chdir(BUILD_DIR)
+            testrunner.run([])
+        except SystemExit:
+            # Print Layer profiling report if requested.
+            if main_process and local_options.verbose >= 3:
+                profiled.report_profile_stats()
+            raise
     finally:
         os.chdir(there)
-    # Cribbed from sourcecode/zope/test.py - avoid spurious error during exit.
-    logging.disable(999999999)
-
-    # Print Layer profiling report if requested.
-    if main_process and local_options.verbose >= 3:
-        profiled.report_profile_stats()
-    sys.exit(result)

=== modified file 'buildout-templates/bin/update-download-cache.in'
--- buildout-templates/bin/update-download-cache.in	2009-05-13 00:29:48 +0000
+++ buildout-templates/bin/update-download-cache.in	2010-11-07 00:31:57 +0000
@@ -1,2 +1,4 @@
-bzr up ${buildout:directory}/buildout/download-cache
+${shell-relative-path-setup}
+
+bzr up ${buildout:directory/buildout/download-cache|shell-path}
 

=== modified file 'buildout.cfg'
--- buildout.cfg	2009-08-21 19:13:05 +0000
+++ buildout.cfg	2010-11-07 00:31:57 +0000
@@ -11,6 +11,7 @@
 unzip = true
 eggs-directory = eggs
 download-cache = download-cache
+relative-paths = true
 
 # Disable this option temporarily if you want buildout to find software
 # dependencies *other* than those in our download-cache.  Once you have the
@@ -25,9 +26,6 @@
 
 allow-picked-versions = false
 
-allowed-eggs-from-site-packages =
-include-site-packages-for-buildout = false
-
 prefer-final = true
 
 develop = .
@@ -37,45 +35,42 @@
 
 [filetemplates]
 recipe = z3c.recipe.filetemplate
-eggs = lp
-       windmill
-# XXX gary 2009-5-12 bug 375751:
-# Make mailman built and installed in a more normal way.
-extra-paths = ${buildout:directory}/lib/mailman
 source-directory = buildout-templates
 
 [scripts]
-recipe = zc.recipe.egg
+recipe = z3c.recipe.scripts
 eggs = lp
     windmill
     funkload
+    zc.zservertracelog
 # XXX gary 2009-5-12 bug 375751:
 # Make mailman built and installed in a more normal way.
 extra-paths = ${buildout:directory}/lib/mailman
-# note that any indentation is lost in initialization blocks
-initialization = import os
-                 os.environ['STORM_CEXTENSIONS'] = '1'
-                 os.environ.setdefault('LPCONFIG', '${configuration:instance_name}')
-                 # XXX 2009-08-21 gary bug 417077
-                 # This can hopefully be removed when Twisted is used as an egg.
-                 import warnings
-                 warnings.filterwarnings(
-                 'ignore',
-                 'Module .+ was already imported from .+, but .+ is being added.*',
-                 UserWarning)
+include-site-packages = true
+allowed-eggs-from-site-packages =
+interpreter = py
+# Note that any indentation is lost in initialization blocks.
+initialization =
+    # See buildout.cfg, [scripts] section, "initialization" key.
+    from lp_sitecustomize import main
+    main('${configuration:instance_name}') # Initializes LP environment.
 entry-points = stxdocs=zope.configuration.stxdocs:main
     googletestservice=canonical.launchpad.testing.googletestservice:main
     windmill=windmill.bin.windmill_bin:main
     lp-windmill=lp.scripts.utilities.lpwindmill:main
+    jsbuild=lazr.js.build:main
+    jslint=lazr.js.jslint:main
+    tracereport=zc.zservertracelog.tracereport:main
+    jssize=lp.scripts.utilities.jssize:main
 
 [iharness]
-recipe = zc.recipe.egg
-eggs = lp
-       ipython
-extra-paths = ${buildout:directory}/lib/mailman
-initialization = import os
-                 os.environ['STORM_CEXTENSIONS'] = '1'
-                 os.environ['LPCONFIG'] = '${configuration:instance_name}'
+recipe = z3c.recipe.scripts
+eggs = ${scripts:eggs}
+     ipython
+extra-paths = ${scripts:extra-paths}
+include-site-packages = true
+allowed-eggs-from-site-packages =
+initialization = ${scripts:initialization}
 entry-points = iharness=canonical.database.harness:ipython
 scripts = iharness ipython=ipy
 

=== added symlink 'bzrplugins/builder'
=== target is u'../sourcecode/bzr-builder/'
=== added directory 'bzrplugins/lpserve'
=== renamed file 'bzrplugins/lpserve.py' => 'bzrplugins/lpserve/__init__.py'
--- bzrplugins/lpserve.py	2009-07-17 00:26:05 +0000
+++ bzrplugins/lpserve/__init__.py	2010-11-07 00:31:57 +0000
@@ -8,14 +8,34 @@
 
 __metaclass__ = type
 
-__all__ = ['cmd_launchpad_server']
-
-
+__all__ = [
+    'cmd_launchpad_server',
+    'cmd_launchpad_forking_service',
+    ]
+
+
+import errno
+import logging
+import os
+import resource
+import shlex
+import shutil
+import signal
+import socket
 import sys
+import tempfile
+import threading
+import time
 
 from bzrlib.commands import Command, register_command
 from bzrlib.option import Option
-from bzrlib import lockdir, ui
+from bzrlib import (
+    commands,
+    lockdir,
+    osutils,
+    trace,
+    ui,
+    )
 
 from bzrlib.smart import medium, server
 from bzrlib.transport import get_transport
@@ -44,9 +64,9 @@
         Option('mirror-directory',
                help='serve branches from this directory. Defaults to '
                     'config.codehosting.mirrored_branches_root.'),
-        Option('branchfs-endpoint',
+        Option('codehosting-endpoint',
                help='the url of the internal XML-RPC server. Defaults to '
-                    'config.codehosting.branchfs_endpoint.',
+                    'config.codehosting.codehosting_endpoint.',
                type=unicode),
         ]
 
@@ -84,15 +104,18 @@
         finally:
             ui.ui_factory = old_factory
 
-    def run(self, user_id, port=None, upload_directory=None,
-            mirror_directory=None, branchfs_endpoint_url=None, inet=False):
+    def run(self, user_id, port=None, branch_directory=None,
+            codehosting_endpoint_url=None, inet=False):
         from lp.codehosting.bzrutils import install_oops_handler
-        from lp.codehosting.vfs import get_lp_server
+        from lp.codehosting.vfs import get_lp_server, hooks
         install_oops_handler(user_id)
+        four_gig = int(4e9)
+        resource.setrlimit(resource.RLIMIT_AS, (four_gig, four_gig))
+        seen_new_branch = hooks.SetProcTitleHook()
         lp_server = get_lp_server(
-            int(user_id), branchfs_endpoint_url,
-            upload_directory, mirror_directory)
-        lp_server.setUp()
+            int(user_id), codehosting_endpoint_url, branch_directory,
+            seen_new_branch.seen)
+        lp_server.start_server()
 
         old_lockdir_timeout = lockdir._DEFAULT_TIMEOUT_SECONDS
         try:
@@ -102,7 +125,721 @@
             self.run_server(smart_server)
         finally:
             lockdir._DEFAULT_TIMEOUT_SECONDS = old_lockdir_timeout
-            lp_server.tearDown()
+            lp_server.stop_server()
 
 
 register_command(cmd_launchpad_server)
+
+
+class LPForkingService(object):
+    """A service that can be asked to start a new bzr subprocess via fork.
+
+    The basic idea is that bootstrapping time is long. Most of this is time
+    spent during import of all needed libraries (lp.*).  For example, the
+    original 'lp-serve' command could take 2.5s just to start up, before any
+    actual actions could be performed.
+
+    This class provides a service sitting on a socket, which can then be
+    requested to fork and run a given bzr command.
+
+    Clients connect to the socket and make a single request, which then
+    receives a response. The possible requests are:
+
+        "hello\n":  Trigger a heartbeat to report that the program is still
+                    running, and write status information to the log file.
+        "quit\n":   Stop the service, but do so 'nicely', waiting for children
+                    to exit, etc. Once this is received the service will stop
+                    taking new requests on the port.
+        "fork-env <command>\n<env>\nend\n": Request a new subprocess to be
+            started.  <command> is the bzr command to be run, such as "rocks"
+            or "lp-serve --inet 12".
+            The immediate response will be the path-on-disk to a directory full
+            of named pipes (fifos) that will be the stdout/stderr/stdin (named
+            accordingly) of the new process.
+            If a client holds the socket open, when the child process exits,
+            the exit status (as given by 'wait()') will be written to the
+            socket.
+
+            Note that one of the key bits is that the client will not be
+            started with exec*, we just call 'commands.run_bzr*()' directly.
+            This way, any modules that are already loaded will not need to be
+            loaded again. However, care must be taken with any global-state
+            that should be reset.
+
+            fork-env allows you to supply environment variables such as
+            "BZR_EMAIL: joe@xxxxxxx" which will be set in os.environ before the
+            command is run.
+    """
+
+    # Design decisions. These are bits where we could have chosen a different
+    # method/implementation and weren't sure what would be best. Documenting
+    # the current decision, and the alternatives.
+    #
+    # [Decision #1]
+    #   Serve on a named AF_UNIX socket.
+    #       1) It doesn't make sense to serve to arbitrary hosts, we only want
+    #          the local host to make requests. (Since the client needs to
+    #          access the named fifos on the current filesystem.)
+    #       2) You can set security parameters on a filesystem path (g+rw,
+    #          a-rw).
+    # [Decision #2]
+    #   SIGCHLD
+    #       We want to quickly detect that children have exited so that we can
+    #       inform the client process quickly. At the moment, we register a
+    #       SIGCHLD handler that doesn't do anything. However, it means that
+    #       when we get the signal, if we are currently blocked in something
+    #       like '.accept()', we will jump out temporarily. At that point the
+    #       main loop will check if any children have exited. We could have
+    #       done this work as part of the signal handler, but that felt 'racy'
+    #       doing any serious work in a signal handler.
+    #       If we just used socket.timeout as the indicator to go poll for
+    #       children exiting, it slows the disconnect by as much as the full
+    #       timeout. (So a timeout of 1.0s will cause the process to hang by
+    #       that long until it determines that a child has exited, and can
+    #       close the connection.)
+    #       The current flow means that we'll notice exited children whenever
+    #       we finish the current work.
+    # [Decision #3]
+    #   Child vs Parent actions.
+    #       There are several actions that are done when we get a new request.
+    #       We have to create the fifos on disk, fork a new child, connect the
+    #       child to those handles, and inform the client of the new path (not
+    #       necessarily in that order.) It makes sense to wait to send the path
+    #       message until after the fifos have been created. That way the
+    #       client can just try to open them immediately, and the
+    #       client-and-child will be synchronized by the open() calls.
+    #       However, should the client be the one doing the mkfifo, should the
+    #       server? Who should be sending the message? Should we fork after the
+    #       mkfifo or before.
+    #       The current thoughts:
+    #           1) Try to do work in the child when possible. This should allow
+    #              for 'scaling' because the server is single-threaded.
+    #           2) We create the directory itself in the server, because that
+    #              allows the server to monitor whether the client failed to
+    #              clean up after itself or not.
+    #           3) Otherwise we create the fifos in the client, and then send
+    #              the message back.
+    # [Decision #4]
+    #   Exit information
+    #       Inform the client that the child has exited on the socket they used
+    #       to request the fork.
+    #       1) Arguably they could see that stdout and stderr have been closed,
+    #          and thus stop reading. In testing, I wrote a client which uses
+    #          select.poll() over stdin/stdout/stderr and used that to ferry
+    #          the content to the appropriate local handle. However for the
+    #          FIFOs, when the remote end closed, I wouldn't see any
+    #          corresponding information on the local end. There obviously
+    #          wasn't any data to be read, so they wouldn't show up as
+    #          'readable' (for me to try to read, and get 0 bytes, indicating
+    #          it was closed). I also wasn't seeing POLLHUP, which seemed to be
+    #          the correct indicator.  As such, we decided to inform the client
+    #          on the socket that they originally made the fork request, rather
+    #          than just closing the socket immediately.
+    #       2) We could have had the forking server close the socket, and only
+    #          the child hold the socket open. When the child exits, then the
+    #          OS naturally closes the socket.
+    #          If we want the returncode, then we should put that as bytes on
+    #          the socket before we exit. Having the child do the work means
+    #          that in error conditions, it could easily die before being able
+    #          to write anything (think SEGFAULT, etc). The forking server is
+    #          already 'wait'() ing on its children. So that we don't get
+    #          zombies, and with wait3() we can get the rusage (user time,
+    #          memory consumption, etc.)
+    #          As such, it seems reasonable that the server can then also
+    #          report back when a child is seen as exiting.
+    # [Decision #5]
+    #   cleanup once connected
+    #       The child process blocks during 'open()' waiting for the client to
+    #       connect to its fifos. Once the client has connected, the child then
+    #       deletes the temporary directory and the fifos from disk. This means
+    #       that there isn't much left for diagnosis, but it also means that
+    #       the client won't leave garbage around if it crashes, etc.
+    #       Note that the forking service itself still monitors the paths
+    #       created, and will delete garbage if it sees that a child failed to
+    #       do so.
+    # [Decision #6]
+    #   os._exit(retcode) in the child
+    #       Calling sys.exit(retcode) raises an exception, which then bubbles
+    #       up the stack and runs exit functions (and finally statements). When
+    #       I tried using it originally, I would see the current child bubble
+    #       all the way up the stack (through the server code that it fork()
+    #       through), and then get to main() returning code 0. The process
+    #       would still exit nonzero. My guess is that something in the atexit
+    #       functions was failing, but that it was happening after logging, etc
+    #       had been shut down.
+    #       Any global state from the child process should be flushed before
+    #       run_bzr_* has exited (which we *do* wait for), and any other global
+    #       state is probably a remnant from the service process. Which will be
+    #       cleaned up by the service itself, rather than the child.
+    #       There is some concern that log files may not get flushed, so we
+    #       currently call sys.exitfunc() first. The main problem is that I
+    #       don't know any way to *remove* a function registered via 'atexit()'
+    #       so if the forking service has some state, we my try to clean it up
+    #       incorrectly.
+    #       Note that the bzr script itself uses sys.exitfunc(); os._exit() in
+    #       the 'bzr' main script, as the teardown time of all the python state
+    #       was quite noticeable in real-world runtime. As such, bzrlib should
+    #       be pretty safe, or it would have been failing for people already.
+    # [Decision #7]
+    #   prefork vs max children vs ?
+    #       For simplicity it seemed easiest to just fork when requested. Over
+    #       time, I realized it would be easy to allow running an arbitrary
+    #       command (no harder than just running one command), so it seemed
+    #       reasonable to switch over. If we go the prefork route, then we'll
+    #       need a way to tell the pre-forked children what command to run.
+    #       This could be as easy as just adding one more fifo that they wait
+    #       on in the same directory.
+    #       For now, I've chosen not to limit the number of forked children. I
+    #       don't know what a reasonable value is, and probably there are
+    #       already limitations at play. (If Conch limits connections, then it
+    #       will already be doing all the work, etc.)
+    # [Decision #8]
+    #   nicer errors on the request socket
+    #       This service is meant to be run only on the local system. As such,
+    #       we don't try to be extra defensive about leaking information to
+    #       the one connecting to the socket. (We should still watch out what
+    #       we send across the per-child fifos, since those are connected to
+    #       remote clients.) Instead we try to be helpful, and tell them as
+    #       much as we know about what went wrong.
+
+    DEFAULT_PATH = '/var/run/launchpad_forking_service.sock'
+    DEFAULT_PERMISSIONS = 00660 # Permissions on the master socket (rw-rw----)
+    WAIT_FOR_CHILDREN_TIMEOUT = 5*60 # Wait no more than 5 min for children
+    SOCKET_TIMEOUT = 1.0
+    SLEEP_FOR_CHILDREN_TIMEOUT = 1.0
+    WAIT_FOR_REQUEST_TIMEOUT = 1.0 # No request should take longer than this to
+                                   # be read
+
+    _fork_function = os.fork
+
+    def __init__(self, path=DEFAULT_PATH, perms=DEFAULT_PERMISSIONS):
+        self.master_socket_path = path
+        self._perms = perms
+        self._start_time = None
+        self._should_terminate = threading.Event()
+        # We address these locally, in case of shutdown socket may be gc'd
+        # before we are
+        self._socket_timeout = socket.timeout
+        self._socket_error = socket.error
+        # Map from pid => (temp_path_for_handles, request_socket)
+        self._child_processes = {}
+        self._children_spawned = 0
+
+    def _create_master_socket(self):
+        self._server_socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+        self._server_socket.bind(self.master_socket_path)
+        if self._perms is not None:
+            os.chmod(self.master_socket_path, self._perms)
+        self._server_socket.listen(5)
+        self._server_socket.settimeout(self.SOCKET_TIMEOUT)
+        trace.mutter('set socket timeout to: %s' % (self.SOCKET_TIMEOUT,))
+
+    def _cleanup_master_socket(self):
+        self._server_socket.close()
+        try:
+            os.remove(self.master_socket_path)
+        except (OSError, IOError), e:
+            # If we don't delete it, then we get 'address already in
+            # use' failures
+            trace.mutter('failed to cleanup: %s'
+                         % (self.master_socket_path,))
+
+    def _handle_sigchld(self, signum, frm):
+        # We don't actually do anything here, we just want an interrupt (EINTR)
+        # on socket.accept() when SIGCHLD occurs.
+        pass
+
+    def _handle_sigterm(self, signum, frm):
+        # Unregister this as the default handler, 2 SIGTERMs will exit us.
+        signal.signal(signal.SIGTERM, signal.SIG_DFL)
+        # SIGTERM should also generate EINTR on our wait loop, so this should
+        # be enough
+        self._should_terminate.set()
+
+    def _register_signals(self):
+        """Register a SIGCHILD and SIGTERM handler.
+
+        If we have a trigger for SIGCHILD then we can quickly respond to
+        clients when their process exits. The main risk is getting more EAGAIN
+        errors elsewhere.
+
+        SIGTERM allows us to cleanup nicely before we exit.
+        """
+        signal.signal(signal.SIGCHLD, self._handle_sigchld)
+        signal.signal(signal.SIGTERM, self._handle_sigterm)
+
+    def _unregister_signals(self):
+        signal.signal(signal.SIGCHLD, signal.SIG_DFL)
+        signal.signal(signal.SIGTERM, signal.SIG_DFL)
+
+    def _create_child_file_descriptors(self, base_path):
+        stdin_path = os.path.join(base_path, 'stdin')
+        stdout_path = os.path.join(base_path, 'stdout')
+        stderr_path = os.path.join(base_path, 'stderr')
+        os.mkfifo(stdin_path)
+        os.mkfifo(stdout_path)
+        os.mkfifo(stderr_path)
+
+    def _bind_child_file_descriptors(self, base_path):
+        stdin_path = os.path.join(base_path, 'stdin')
+        stdout_path = os.path.join(base_path, 'stdout')
+        stderr_path = os.path.join(base_path, 'stderr')
+        # These open calls will block until another process connects (which
+        # must connect in the same order)
+        stdin_fid = os.open(stdin_path, os.O_RDONLY)
+        stdout_fid = os.open(stdout_path, os.O_WRONLY)
+        stderr_fid = os.open(stderr_path, os.O_WRONLY)
+        # Note: by this point bzrlib has opened stderr for logging
+        #       (as part of starting the service process in the first place).
+        #       As such, it has a stream handler that writes to stderr. logging
+        #       tries to flush and close that, but the file is already closed.
+        #       This just supresses that exception
+        logging.raiseExceptions = False
+        sys.stdin.close()
+        sys.stdout.close()
+        sys.stderr.close()
+        os.dup2(stdin_fid, 0)
+        os.dup2(stdout_fid, 1)
+        os.dup2(stderr_fid, 2)
+        sys.stdin = os.fdopen(stdin_fid, 'rb')
+        sys.stdout = os.fdopen(stdout_fid, 'wb')
+        sys.stderr = os.fdopen(stderr_fid, 'wb')
+        ui.ui_factory.stdin = sys.stdin
+        ui.ui_factory.stdout = sys.stdout
+        ui.ui_factory.stderr = sys.stderr
+        # Now that we've opened the handles, delete everything so that we don't
+        # leave garbage around. Because the open() is done in blocking mode, we
+        # know that someone has already connected to them, and we don't want
+        # anyone else getting confused and connecting.
+        # See [Decision #5]
+        os.remove(stderr_path)
+        os.remove(stdout_path)
+        os.remove(stdin_path)
+        os.rmdir(base_path)
+
+    def _close_child_file_descriptors(self):
+        sys.stdin.close()
+        sys.stderr.close()
+        sys.stdout.close()
+
+    def become_child(self, command_argv, path):
+        """We are in the spawned child code, do our magic voodoo."""
+        # Stop tracking new signals
+        self._unregister_signals()
+        # Reset the start time
+        trace._bzr_log_start_time = time.time()
+        trace.mutter('%d starting %r'
+                     % (os.getpid(), command_argv))
+        self._bind_child_file_descriptors(path)
+        self._run_child_command(command_argv)
+
+    def _run_child_command(self, command_argv):
+        # This is the point where we would actually want to do something with
+        # our life
+        # TODO: We may want to consider special-casing the 'lp-serve' command.
+        #       As that is the primary use-case for this service, it might be
+        #       interesting to have an already-instantiated instance, where we
+        #       can just pop on an extra argument and be ready to go. However,
+        #       that would probably only really be measurable if we prefork. As
+        #       it looks like ~200ms is 'fork()' time, but only 50ms is
+        #       run-the-command time.
+        retcode = commands.run_bzr_catch_errors(command_argv)
+        self._close_child_file_descriptors()
+        trace.mutter('%d finished %r'
+                     % (os.getpid(), command_argv))
+        # We force os._exit() here, because we don't want to unwind the stack,
+        # which has complex results. (We can get it to unwind back to the
+        # cmd_launchpad_forking_service code, and even back to main() reporting
+        # thereturn code, but after that, suddenly the return code changes from
+        # a '0' to a '1', with no logging of info.
+        # TODO: Should we call sys.exitfunc() here? it allows atexit functions
+        #       to fire, however, some of those may be still around from the
+        #       parent process, which we don't really want.
+        sys.exitfunc()
+        # See [Decision #6]
+        os._exit(retcode)
+
+    @staticmethod
+    def command_to_argv(command_str):
+        """Convert a 'foo bar' style command to [u'foo', u'bar']"""
+        # command_str must be a utf-8 string
+        return [s.decode('utf-8') for s in shlex.split(command_str)]
+
+    @staticmethod
+    def parse_env(env_str):
+        """Convert the environment information into a dict.
+
+        :param env_str: A string full of environment variable declarations.
+            Each key is simple ascii "key: value\n"
+            The string must end with "end\n".
+        :return: A dict of environment variables
+        """
+        env = {}
+        if not env_str.endswith('end\n'):
+            raise ValueError('Invalid env-str: %r' % (env_str,))
+        env_str = env_str[:-5]
+        if not env_str:
+            return env
+        env_entries = env_str.split('\n')
+        for entry in env_entries:
+            key, value = entry.split(': ', 1)
+            env[key] = value
+        return env
+
+    def fork_one_request(self, conn, client_addr, command_argv, env):
+        """Fork myself and serve a request."""
+        temp_name = tempfile.mkdtemp(prefix='lp-forking-service-child-')
+        # Now that we've set everything up, send the response to the client we
+        # create them first, so the client can start trying to connect to them,
+        # while we fork and have the child do the same.
+        self._children_spawned += 1
+        pid = self._fork_function()
+        if pid == 0:
+            pid = os.getpid()
+            trace.mutter('%d spawned' % (pid,))
+            self._server_socket.close()
+            for env_var, value in env.iteritems():
+                osutils.set_or_unset_env(env_var, value)
+            # See [Decision #3]
+            self._create_child_file_descriptors(temp_name)
+            conn.sendall('ok\n%d\n%s\n' % (pid, temp_name))
+            conn.close()
+            self.become_child(command_argv, temp_name)
+            trace.warning('become_child returned!!!')
+            sys.exit(1)
+        else:
+            self._child_processes[pid] = (temp_name, conn)
+            self.log(client_addr, 'Spawned process %s for %r: %s'
+                            % (pid, command_argv, temp_name))
+
+    def main_loop(self):
+        self._start_time = time.time()
+        self._should_terminate.clear()
+        self._register_signals()
+        self._create_master_socket()
+        trace.note('Listening on socket: %s' % (self.master_socket_path,))
+        try:
+            try:
+                self._do_loop()
+            finally:
+                # Stop talking to others, we are shutting down
+                self._cleanup_master_socket()
+        except KeyboardInterrupt:
+            # SIGINT received, try to shutdown cleanly
+            pass
+        trace.note('Shutting down. Waiting up to %.0fs for %d child processes'
+                   % (self.WAIT_FOR_CHILDREN_TIMEOUT,
+                      len(self._child_processes)))
+        self._shutdown_children()
+        trace.note('Exiting')
+
+    def _do_loop(self):
+        while not self._should_terminate.isSet():
+            try:
+                conn, client_addr = self._server_socket.accept()
+            except self._socket_timeout:
+                pass # run shutdown and children checks
+            except self._socket_error, e:
+                if e.args[0] == errno.EINTR:
+                    pass # run shutdown and children checks
+                elif e.args[0] != errno.EBADF:
+                    # We can get EBADF here while we are shutting down
+                    # So we just ignore it for now
+                    pass
+                else:
+                    # Log any other failure mode
+                    trace.warning("listening socket error: %s", e)
+            else:
+                self.log(client_addr, 'connected')
+                # TODO: We should probably trap exceptions coming out of this
+                #       and log them, so that we don't kill the service because
+                #       of an unhandled error
+                # Note: settimeout is used so that a malformed request doesn't
+                #       cause us to hang forever. Note that the particular
+                #       implementation means that a malicious client could
+                #       probably send us one byte every Xms, and we would just
+                #       keep trying to read it. However, as a local service, we
+                #       aren't worrying about it.
+                conn.settimeout(self.WAIT_FOR_REQUEST_TIMEOUT)
+                try:
+                    self.serve_one_connection(conn, client_addr)
+                except self._socket_timeout, e:
+                    trace.log_exception_quietly()
+                    self.log(client_addr, 'request timeout failure: %s' % (e,))
+                    conn.sendall('FAILURE\nrequest timed out\n')
+                    conn.close()
+            self._poll_children()
+
+    def log(self, client_addr, message):
+        """Log a message to the trace log.
+
+        Include the information about what connection is being served.
+        """
+        if client_addr is not None:
+            # Note, we don't use conn.getpeername() because if a client
+            # disconnects before we get here, that raises an exception
+            conn_info = '[%s] ' % (client_addr,)
+        else:
+            conn_info = ''
+        trace.mutter('%s%s' % (conn_info, message))
+
+    def log_information(self):
+        """Log the status information.
+
+        This includes stuff like number of children, and ... ?
+        """
+        self._poll_children()
+        self.log(None, 'Running for %.3fs' % (time.time() - self._start_time))
+        self.log(None, '%d children currently running (spawned %d total)'
+                       % (len(self._child_processes), self._children_spawned))
+        # Read the current information about memory consumption, etc.
+        self.log(None, 'Self: %s'
+                       % (resource.getrusage(resource.RUSAGE_SELF),))
+        # This seems to be the sum of all rusage for all children that have
+        # been collected (not for currently running children, or ones we
+        # haven't "wait"ed on.) We may want to read /proc/PID/status, since
+        # 'live' information is probably more useful.
+        self.log(None, 'Finished children: %s'
+                       % (resource.getrusage(resource.RUSAGE_CHILDREN),))
+
+    def _poll_children(self):
+        """See if children are still running, etc.
+
+        One interesting hook here would be to track memory consumption, etc.
+        """
+        while self._child_processes:
+            try:
+                c_id, exit_code, rusage = os.wait3(os.WNOHANG)
+            except OSError, e:
+                if e.errno == errno.ECHILD:
+                    # TODO: We handle this right now because the test suite
+                    #       fakes a child, since we wanted to test some code
+                    #       without actually forking anything
+                    trace.mutter('_poll_children() called, and'
+                        ' self._child_processes indicates there are'
+                        ' children, but os.wait3() says there are not.'
+                        ' current_children: %s' % (self._child_processes,))
+                    return
+            if c_id == 0:
+                # No more children stopped right now
+                return
+            c_path, sock = self._child_processes.pop(c_id)
+            trace.mutter('%s exited %s and usage: %s'
+                         % (c_id, exit_code, rusage))
+            # See [Decision #4]
+            try:
+                sock.sendall('exited\n%s\n' % (exit_code,))
+            except (self._socket_timeout, self._socket_error), e:
+                # The client disconnected before we wanted them to,
+                # no big deal
+                trace.mutter('%s\'s socket already closed: %s' % (c_id, e))
+            else:
+                sock.close()
+            if os.path.exists(c_path):
+                # The child failed to cleanup after itself, do the work here
+                trace.warning('Had to clean up after child %d: %s\n'
+                              % (c_id, c_path))
+                shutil.rmtree(c_path, ignore_errors=True)
+
+    def _wait_for_children(self, secs):
+        start = time.time()
+        end = start + secs
+        while self._child_processes:
+            self._poll_children()
+            if secs > 0 and time.time() > end:
+                break
+            time.sleep(self.SLEEP_FOR_CHILDREN_TIMEOUT)
+
+    def _shutdown_children(self):
+        self._wait_for_children(self.WAIT_FOR_CHILDREN_TIMEOUT)
+        if self._child_processes:
+            trace.warning('Children still running: %s'
+                % ', '.join(map(str, self._child_processes)))
+            for c_id in self._child_processes:
+                trace.warning('sending SIGINT to %d' % (c_id,))
+                os.kill(c_id, signal.SIGINT)
+            # We sent the SIGINT signal, see if they exited
+            self._wait_for_children(self.SLEEP_FOR_CHILDREN_TIMEOUT)
+        if self._child_processes:
+            # No? Then maybe something more powerful
+            for c_id in self._child_processes:
+                trace.warning('sending SIGKILL to %d' % (c_id,))
+                os.kill(c_id, signal.SIGKILL)
+            # We sent the SIGKILL signal, see if they exited
+            self._wait_for_children(self.SLEEP_FOR_CHILDREN_TIMEOUT)
+        if self._child_processes:
+            for c_id, (c_path, sock) in self._child_processes.iteritems():
+                # TODO: We should probably put something into this message?
+                #       However, the likelyhood is very small that this isn't
+                #       already closed because of SIGKILL + _wait_for_children
+                #       And I don't really know what to say...
+                sock.close()
+                if os.path.exists(c_path):
+                    trace.warning('Cleaning up after immortal child %d: %s\n'
+                                  % (c_id, c_path))
+                    shutil.rmtree(c_path)
+
+    def _parse_fork_request(self, conn, client_addr, request):
+        if request.startswith('fork-env '):
+            while not request.endswith('end\n'):
+                request += osutils.read_bytes_from_socket(conn)
+            command, env = request[9:].split('\n', 1)
+        else:
+            command = request[5:].strip()
+            env = 'end\n' # No env set
+        try:
+            command_argv = self.command_to_argv(command)
+            env = self.parse_env(env)
+        except Exception, e:
+            # TODO: Log the traceback?
+            self.log(client_addr, 'command or env parsing failed: %r'
+                                  % (str(e),))
+            conn.sendall('FAILURE\ncommand or env parsing failed: %r'
+                         % (str(e),))
+        else:
+            return command_argv, env
+        return None, None
+
+    def serve_one_connection(self, conn, client_addr):
+        request = ''
+        while '\n' not in request:
+            request += osutils.read_bytes_from_socket(conn)
+        # telnet likes to use '\r\n' rather than '\n', and it is nice to have
+        # an easy way to debug.
+        request = request.replace('\r\n', '\n')
+        self.log(client_addr, 'request: %r' % (request,))
+        if request == 'hello\n':
+            conn.sendall('ok\nyep, still alive\n')
+            self.log_information()
+            conn.close()
+        elif request == 'quit\n':
+            self._should_terminate.set()
+            conn.sendall('ok\nquit command requested... exiting\n')
+            conn.close()
+        elif request.startswith('fork ') or request.startswith('fork-env '):
+            command_argv, env = self._parse_fork_request(conn, client_addr,
+                                                         request)
+            if command_argv is not None:
+                # See [Decision #7]
+                # TODO: Do we want to limit the number of children? And/or
+                #       prefork additional instances? (the design will need to
+                #       change if we prefork and run arbitrary commands.)
+                self.fork_one_request(conn, client_addr, command_argv, env)
+                # We don't close the conn like other code paths, since we use
+                # it again later.
+            else:
+                conn.close()
+        else:
+            self.log(client_addr, 'FAILURE: unknown request: %r' % (request,))
+            # See [Decision #8]
+            conn.sendall('FAILURE\nunknown request: %r\n' % (request,))
+            conn.close()
+
+
+class cmd_launchpad_forking_service(Command):
+    """Launch a long-running process, where you can ask for new processes.
+
+    The process will block on a given AF_UNIX socket waiting for requests to be
+    made.  When a request is made, it will fork itself and redirect
+    stdout/in/err to fifos on the filesystem, and start running the requested
+    command. The caller will be informed where those file handles can be found.
+    Thus it only makes sense that the process connecting to the port must be on
+    the same system.
+    """
+
+    aliases = ['lp-service']
+
+    takes_options = [Option('path',
+                        help='Listen for connections at PATH',
+                        type=str),
+                     Option('perms',
+                        help='Set the mode bits for the socket, interpreted'
+                             ' as an octal integer (same as chmod)'),
+                     Option('preload',
+                        help="Do/don't preload libraries before startup."),
+                     Option('children-timeout', type=int, argname='SEC',
+                        help="Only wait SEC seconds for children to exit"),
+                    ]
+
+    def _preload_libraries(self):
+        for pyname in libraries_to_preload:
+            try:
+                __import__(pyname)
+            except ImportError, e:
+                trace.mutter('failed to preload %s: %s' % (pyname, e))
+
+    def run(self, path=None, perms=None, preload=True,
+            children_timeout=LPForkingService.WAIT_FOR_CHILDREN_TIMEOUT):
+        if path is None:
+            path = LPForkingService.DEFAULT_PATH
+        if perms is None:
+            perms = LPForkingService.DEFAULT_PERMISSIONS
+        if preload:
+            # We 'note' this because it often takes a fair amount of time.
+            trace.note('Preloading %d modules' % (len(libraries_to_preload),))
+            self._preload_libraries()
+        service = LPForkingService(path, perms)
+        service.WAIT_FOR_CHILDREN_TIMEOUT = children_timeout
+        service.main_loop()
+
+register_command(cmd_launchpad_forking_service)
+
+
+class cmd_launchpad_replay(Command):
+    """Write input from stdin back to stdout or stderr.
+
+    This is a hidden command, primarily available for testing
+    cmd_launchpad_forking_service.
+    """
+
+    hidden = True
+
+    def run(self):
+        # Just read line-by-line from stdin, and write out to stdout or stderr
+        # depending on the prefix
+        for line in sys.stdin:
+            channel, contents = line.split(' ', 1)
+            channel = int(channel)
+            if channel == 1:
+                sys.stdout.write(contents)
+                sys.stdout.flush()
+            elif channel == 2:
+                sys.stderr.write(contents)
+                sys.stderr.flush()
+            else:
+                raise RuntimeError('Invalid channel request.')
+        return 0
+
+register_command(cmd_launchpad_replay)
+
+# This list was generated by run lsprofing a spawned child, and looking for
+# <module ...> times, which indicate an import occured. Other possibilities are
+# to just run "bzr lp-serve --profile-imports" manually, and observe what was
+# expensive to import. It doesn't seem very easy to get this right
+# automatically.
+libraries_to_preload = [
+    'bzrlib.errors',
+    'bzrlib.repofmt.groupcompress_repo',
+    'bzrlib.repository',
+    'bzrlib.smart',
+    'bzrlib.smart.protocol',
+    'bzrlib.smart.request',
+    'bzrlib.smart.server',
+    'bzrlib.smart.vfs',
+    'bzrlib.transport.local',
+    'bzrlib.transport.readonly',
+    'lp.codehosting.bzrutils',
+    'lp.codehosting.vfs',
+    'lp.codehosting.vfs.branchfs',
+    'lp.codehosting.vfs.branchfsclient',
+    'lp.codehosting.vfs.hooks',
+    'lp.codehosting.vfs.transport',
+    ]
+
+
+def load_tests(standard_tests, module, loader):
+    standard_tests.addTests(loader.loadTestsFromModuleNames(
+        [__name__ + '.' + x for x in [
+            'test_lpserve',
+        ]]))
+    return standard_tests

=== added file 'bzrplugins/lpserve/test_lpserve.py'
--- bzrplugins/lpserve/test_lpserve.py	1970-01-01 00:00:00 +0000
+++ bzrplugins/lpserve/test_lpserve.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,534 @@
+# Copyright 2010 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+import os
+import signal
+import socket
+import subprocess
+import tempfile
+import threading
+import time
+
+from testtools import content
+
+from bzrlib import (
+    osutils,
+    tests,
+    trace,
+    )
+from bzrlib.plugins import lpserve
+
+from canonical.config import config
+from lp.codehosting import get_bzr_path, get_BZR_PLUGIN_PATH_for_subprocess
+
+
+class TestingLPForkingServiceInAThread(lpserve.LPForkingService):
+    """A test-double to run a "forking service" in a thread.
+
+    Note that we don't allow actually forking, but it does allow us to interact
+    with the service for other operations.
+    """
+
+    # For testing, we set the timeouts much lower, because we want the tests to
+    # run quickly
+    WAIT_FOR_CHILDREN_TIMEOUT = 0.5
+    SOCKET_TIMEOUT = 0.01
+    SLEEP_FOR_CHILDREN_TIMEOUT = 0.01
+    WAIT_FOR_REQUEST_TIMEOUT = 0.1
+
+    # We're running in a thread as part of the test suite, blow up if we try to
+    # fork
+    _fork_function = None
+
+    def __init__(self, path, perms=None):
+        self.service_started = threading.Event()
+        self.service_stopped = threading.Event()
+        self.this_thread = None
+        self.fork_log = []
+        super(TestingLPForkingServiceInAThread, self).__init__(
+            path=path, perms=None)
+
+    def _register_signals(self):
+        pass # Don't register it for the test suite
+
+    def _unregister_signals(self):
+        pass # We don't fork, and didn't register, so don't unregister
+
+    def _create_master_socket(self):
+        super(TestingLPForkingServiceInAThread, self)._create_master_socket()
+        self.service_started.set()
+
+    def main_loop(self):
+        self.service_stopped.clear()
+        super(TestingLPForkingServiceInAThread, self).main_loop()
+        self.service_stopped.set()
+
+    def fork_one_request(self, conn, client_addr, command, env):
+        # We intentionally don't allow the test suite to request a fork, as
+        # threads + forks and everything else don't exactly play well together
+        self.fork_log.append((command, env))
+        conn.sendall('ok\nfake forking\n')
+        conn.close()
+
+    @staticmethod
+    def start_service(test):
+        """Start a new LPForkingService in a thread at a random path.
+
+        This will block until the service has created its socket, and is ready
+        to communicate.
+
+        :return: A new TestingLPForkingServiceInAThread instance
+        """
+        fd, path = tempfile.mkstemp(prefix='tmp-lp-forking-service-',
+                                    suffix='.sock')
+        # We don't want a temp file, we want a temp socket
+        os.close(fd)
+        os.remove(path)
+        new_service = TestingLPForkingServiceInAThread(path=path)
+        thread = threading.Thread(target=new_service.main_loop,
+                                  name='TestingLPForkingServiceInAThread')
+        new_service.this_thread = thread
+        # should we be doing thread.setDaemon(True) ?
+        thread.start()
+        new_service.service_started.wait(10.0)
+        if not new_service.service_started.isSet():
+            raise RuntimeError(
+                'Failed to start the TestingLPForkingServiceInAThread')
+        test.addCleanup(new_service.stop_service)
+        # what about returning new_service._sockname ?
+        return new_service
+
+    def stop_service(self):
+        """Stop the test-server thread. This can be called multiple times."""
+        if self.this_thread is None:
+            # We already stopped the process
+            return
+        self._should_terminate.set()
+        self.service_stopped.wait(10.0)
+        if not self.service_stopped.isSet():
+            raise RuntimeError(
+                'Failed to stop the TestingLPForkingServiceInAThread')
+        self.this_thread.join()
+        # Break any refcycles
+        self.this_thread = None
+
+
+class TestTestingLPForkingServiceInAThread(tests.TestCaseWithTransport):
+
+    def test_start_and_stop_service(self):
+        service = TestingLPForkingServiceInAThread.start_service(self)
+        service.stop_service()
+
+    def test_multiple_stops(self):
+        service = TestingLPForkingServiceInAThread.start_service(self)
+        service.stop_service()
+        # calling stop_service repeatedly is a no-op (and not an error)
+        service.stop_service()
+
+    def test_autostop(self):
+        # We shouldn't leak a thread here, as it should be part of the test
+        # case teardown.
+        service = TestingLPForkingServiceInAThread.start_service(self)
+
+
+class TestCaseWithLPForkingService(tests.TestCaseWithTransport):
+
+    def setUp(self):
+        super(TestCaseWithLPForkingService, self).setUp()
+        self.service = TestingLPForkingServiceInAThread.start_service(self)
+
+    def send_message_to_service(self, message, one_byte_at_a_time=False):
+        client_sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+        client_sock.connect(self.service.master_socket_path)
+        if one_byte_at_a_time:
+            for byte in message:
+                client_sock.send(byte)
+        else:
+            client_sock.sendall(message)
+        response = client_sock.recv(1024)
+        return response
+
+
+class TestLPForkingServiceCommandToArgv(tests.TestCase):
+
+    def assertAsArgv(self, argv, command_str):
+        self.assertEqual(argv,
+            lpserve.LPForkingService.command_to_argv(command_str))
+
+    def test_simple(self):
+        self.assertAsArgv([u'foo'], 'foo')
+        self.assertAsArgv([u'foo', u'bar'], 'foo bar')
+
+    def test_quoted(self):
+        self.assertAsArgv([u'foo'], 'foo')
+        self.assertAsArgv([u'foo bar'], '"foo bar"')
+
+    def test_unicode(self):
+        self.assertAsArgv([u'command', u'\xe5'], 'command \xc3\xa5')
+
+
+class TestLPForkingServiceParseEnv(tests.TestCase):
+
+    def assertEnv(self, env, env_str):
+        self.assertEqual(env, lpserve.LPForkingService.parse_env(env_str))
+
+    def assertInvalid(self, env_str):
+        self.assertRaises(ValueError, lpserve.LPForkingService.parse_env,
+                                      env_str)
+
+    def test_no_entries(self):
+        self.assertEnv({}, 'end\n')
+
+    def test_one_entries(self):
+        self.assertEnv({'BZR_EMAIL': 'joe@xxxxxxx'},
+                       'BZR_EMAIL: joe@xxxxxxx\n'
+                       'end\n')
+
+    def test_two_entries(self):
+        self.assertEnv({'BZR_EMAIL': 'joe@xxxxxxx', 'BAR': 'foo'},
+                       'BZR_EMAIL: joe@xxxxxxx\n'
+                       'BAR: foo\n'
+                       'end\n')
+
+    def test_invalid_empty(self):
+        self.assertInvalid('')
+
+    def test_invalid_end(self):
+        self.assertInvalid("BZR_EMAIL: joe@xxxxxxx\n")
+
+    def test_invalid_entry(self):
+        self.assertInvalid("BZR_EMAIL joe@xxxxxxx\nend\n")
+
+
+class TestLPForkingService(TestCaseWithLPForkingService):
+
+    def test_send_quit_message(self):
+        response = self.send_message_to_service('quit\n')
+        self.assertEqual('ok\nquit command requested... exiting\n', response)
+        self.service.service_stopped.wait(10.0)
+        self.assertTrue(self.service.service_stopped.isSet())
+
+    def test_send_invalid_message_fails(self):
+        response = self.send_message_to_service('unknown\n')
+        self.assertStartsWith(response, 'FAILURE')
+
+    def test_send_hello_heartbeat(self):
+        response = self.send_message_to_service('hello\n')
+        self.assertEqual('ok\nyep, still alive\n', response)
+
+    def test_send_simple_fork(self):
+        response = self.send_message_to_service('fork rocks\n')
+        self.assertEqual('ok\nfake forking\n', response)
+        self.assertEqual([(['rocks'], {})], self.service.fork_log)
+
+    def test_send_fork_env_with_empty_env(self):
+        response = self.send_message_to_service(
+            'fork-env rocks\n'
+            'end\n')
+        self.assertEqual('ok\nfake forking\n', response)
+        self.assertEqual([(['rocks'], {})], self.service.fork_log)
+
+    def test_send_fork_env_with_env(self):
+        response = self.send_message_to_service(
+            'fork-env rocks\n'
+            'BZR_EMAIL: joe@xxxxxxxxxxx\n'
+            'end\n')
+        self.assertEqual('ok\nfake forking\n', response)
+        self.assertEqual([(['rocks'], {'BZR_EMAIL': 'joe@xxxxxxxxxxx'})],
+                         self.service.fork_log)
+
+    def test_send_fork_env_slowly(self):
+        response = self.send_message_to_service(
+            'fork-env rocks\n'
+            'BZR_EMAIL: joe@xxxxxxxxxxx\n'
+            'end\n', one_byte_at_a_time=True)
+        self.assertEqual('ok\nfake forking\n', response)
+        self.assertEqual([(['rocks'], {'BZR_EMAIL': 'joe@xxxxxxxxxxx'})],
+                         self.service.fork_log)
+
+    def test_send_incomplete_fork_env_timeout(self):
+        # We should get a failure message if we can't quickly read the whole
+        # content
+        response = self.send_message_to_service(
+            'fork-env rocks\n'
+            'BZR_EMAIL: joe@xxxxxxxxxxx\n',
+            one_byte_at_a_time=True)
+        # Note that we *don't* send a final 'end\n'
+        self.assertStartsWith(response, 'FAILURE\n')
+
+    def test_send_incomplete_request_timeout(self):
+        # Requests end with '\n', send one without it
+        response = self.send_message_to_service('hello',
+                                                one_byte_at_a_time=True)
+        self.assertStartsWith(response, 'FAILURE\n')
+
+
+class TestCaseWithSubprocess(tests.TestCaseWithTransport):
+    """Override the bzr start_bzr_subprocess command.
+
+    The launchpad infrastructure requires a fair amount of configuration to get
+    paths, etc correct. This provides a "start_bzr_subprocess" command that
+    has all of those paths appropriately set, but otherwise functions the same
+    as the bzrlib.tests.TestCase version.
+    """
+
+    def get_python_path(self):
+        """Return the path to the Python interpreter."""
+        return '%s/bin/py' % config.root
+
+    def start_bzr_subprocess(self, process_args, env_changes=None,
+                             working_dir=None):
+        """Start bzr in a subprocess for testing.
+
+        Copied and modified from `bzrlib.tests.TestCase.start_bzr_subprocess`.
+        This version removes some of the skipping stuff, some of the
+        irrelevant comments (e.g. about win32) and uses Launchpad's own
+        mechanisms for getting the path to 'bzr'.
+
+        Comments starting with 'LAUNCHPAD' are comments about our
+        modifications.
+        """
+        if env_changes is None:
+            env_changes = {}
+        env_changes['BZR_PLUGIN_PATH'] = get_BZR_PLUGIN_PATH_for_subprocess()
+        old_env = {}
+
+        def cleanup_environment():
+            for env_var, value in env_changes.iteritems():
+                old_env[env_var] = osutils.set_or_unset_env(env_var, value)
+
+        def restore_environment():
+            for env_var, value in old_env.iteritems():
+                osutils.set_or_unset_env(env_var, value)
+
+        cwd = None
+        if working_dir is not None:
+            cwd = osutils.getcwd()
+            os.chdir(working_dir)
+
+        # LAUNCHPAD: Because of buildout, we need to get a custom Python
+        # binary, not sys.executable.
+        python_path = self.get_python_path()
+        # LAUNCHPAD: We can't use self.get_bzr_path(), since it'll find
+        # lib/bzrlib, rather than the path to sourcecode/bzr/bzr.
+        bzr_path = get_bzr_path()
+        try:
+            cleanup_environment()
+            command = [python_path, bzr_path]
+            command.extend(process_args)
+            process = self._popen(
+                command, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+                stderr=subprocess.PIPE)
+        finally:
+            restore_environment()
+            if cwd is not None:
+                os.chdir(cwd)
+
+        return process
+
+
+class TestCaseWithLPForkingServiceSubprocess(TestCaseWithSubprocess):
+    """Tests will get a separate process to communicate to.
+
+    The number of these tests should be small, because it is expensive to start
+    and stop the daemon.
+
+    TODO: This should probably use testresources, or layers somehow...
+    """
+
+    def setUp(self):
+        super(TestCaseWithLPForkingServiceSubprocess, self).setUp()
+        (self.service_process,
+         self.service_path) = self.start_service_subprocess()
+        self.addCleanup(self.stop_service)
+
+    def start_conversation(self, message, one_byte_at_a_time=False):
+        """Start talking to the service, and get the initial response."""
+        client_sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+        trace.mutter('sending %r to socket %s' % (message, self.service_path))
+        client_sock.connect(self.service_path)
+        if one_byte_at_a_time:
+            for byte in message:
+                client_sock.send(byte)
+        else:
+            client_sock.sendall(message)
+        response = client_sock.recv(1024)
+        trace.mutter('response: %r' % (response,))
+        if response.startswith("FAILURE"):
+            raise RuntimeError('Failed to send message: %r' % (response,))
+        return response, client_sock
+
+    def send_message_to_service(self, message, one_byte_at_a_time=False):
+        response, client_sock = self.start_conversation(message,
+            one_byte_at_a_time=one_byte_at_a_time)
+        client_sock.close()
+        return response
+
+    def send_fork_request(self, command, env=None):
+        if env is not None:
+            request_lines = ['fork-env %s\n' % (command,)]
+            for key, value in env.iteritems():
+                request_lines.append('%s: %s\n' % (key, value))
+            request_lines.append('end\n')
+            request = ''.join(request_lines)
+        else:
+            request = 'fork %s\n' % (command,)
+        response, sock = self.start_conversation(request)
+        ok, pid, path, tail = response.split('\n')
+        self.assertEqual('ok', ok)
+        self.assertEqual('', tail)
+        # Don't really care what it is, but should be an integer
+        pid = int(pid)
+        path = path.strip()
+        self.assertContainsRe(path, '/lp-forking-service-child-')
+        return path, pid, sock
+
+    def start_service_subprocess(self):
+        # Make sure this plugin is exposed to the subprocess
+        # SLOOWWW (~2 seconds, which is why we are doing the work anyway)
+        fd, tempname = tempfile.mkstemp(prefix='tmp-log-bzr-lp-forking-')
+        # I'm not 100% sure about when cleanup runs versus addDetail, but I
+        # think this will work.
+        self.addCleanup(os.remove, tempname)
+
+        def read_log():
+            f = os.fdopen(fd)
+            f.seek(0)
+            content = f.read()
+            f.close()
+            return [content]
+        self.addDetail('server-log', content.Content(
+            content.ContentType('text', 'plain', {"charset": "utf8"}),
+            read_log))
+        service_fd, path = tempfile.mkstemp(prefix='tmp-lp-service-',
+                                            suffix='.sock')
+        os.close(service_fd)
+        os.remove(path) # service wants create it as a socket
+        env_changes = {'BZR_PLUGIN_PATH': lpserve.__path__[0],
+                       'BZR_LOG': tempname}
+        proc = self.start_bzr_subprocess(
+            ['lp-service', '--path', path, '--no-preload',
+             '--children-timeout=1'],
+            env_changes=env_changes)
+        trace.mutter('started lp-service subprocess')
+        expected = 'Listening on socket: %s\n' % (path,)
+        path_line = proc.stderr.readline()
+        trace.mutter(path_line)
+        self.assertEqual(expected, path_line)
+        # The process won't delete it, so we do
+        return proc, path
+
+    def stop_service(self):
+        if self.service_process is None:
+            # Already stopped
+            return
+        # First, try to stop the service gracefully, by sending a 'quit'
+        # message
+        try:
+            response = self.send_message_to_service('quit\n')
+        except socket.error, e:
+            # Ignore a failure to connect, the service must be stopping/stopped
+            # already
+            response = None
+        tend = time.time() + 10.0
+        while self.service_process.poll() is None:
+            if time.time() > tend:
+                self.finish_bzr_subprocess(process=self.service_process,
+                    send_signal=signal.SIGINT, retcode=3)
+                self.fail('Failed to quit gracefully after 10.0 seconds')
+            time.sleep(0.1)
+        if response is not None:
+            self.assertEqual('ok\nquit command requested... exiting\n',
+                             response)
+
+    def _get_fork_handles(self, path):
+        trace.mutter('getting handles for: %s' % (path,))
+        stdin_path = os.path.join(path, 'stdin')
+        stdout_path = os.path.join(path, 'stdout')
+        stderr_path = os.path.join(path, 'stderr')
+        # The ordering must match the ordering of the service or we get a
+        # deadlock.
+        child_stdin = open(stdin_path, 'wb')
+        child_stdout = open(stdout_path, 'rb')
+        child_stderr = open(stderr_path, 'rb')
+        return child_stdin, child_stdout, child_stderr
+
+    def communicate_with_fork(self, path, stdin=None):
+        child_stdin, child_stdout, child_stderr = self._get_fork_handles(path)
+        if stdin is not None:
+            child_stdin.write(stdin)
+        child_stdin.close()
+        stdout_content = child_stdout.read()
+        stderr_content = child_stderr.read()
+        return stdout_content, stderr_content
+
+    def assertReturnCode(self, expected_code, sock):
+        """Assert that we get the expected return code as a message."""
+        response = sock.recv(1024)
+        self.assertStartsWith(response, 'exited\n')
+        code = int(response.split('\n', 1)[1])
+        self.assertEqual(expected_code, code)
+
+    def test_fork_lp_serve_hello(self):
+        path, _, sock = self.send_fork_request('lp-serve --inet 2')
+        stdout_content, stderr_content = self.communicate_with_fork(path,
+            'hello\n')
+        self.assertEqual('ok\x012\n', stdout_content)
+        self.assertEqual('', stderr_content)
+        self.assertReturnCode(0, sock)
+
+    def test_fork_replay(self):
+        path, _, sock = self.send_fork_request('launchpad-replay')
+        stdout_content, stderr_content = self.communicate_with_fork(path,
+            '1 hello\n2 goodbye\n1 maybe\n')
+        self.assertEqualDiff('hello\nmaybe\n', stdout_content)
+        self.assertEqualDiff('goodbye\n', stderr_content)
+        self.assertReturnCode(0, sock)
+
+    def test_just_run_service(self):
+        # Start and stop are defined in setUp()
+        pass
+
+    def test_fork_multiple_children(self):
+        paths = []
+        for idx in range(4):
+            paths.append(self.send_fork_request('launchpad-replay'))
+        # Do them out of order, as order shouldn't matter.
+        for idx in [3, 2, 0, 1]:
+            p, pid, sock = paths[idx]
+            stdout_msg = 'hello %d\n' % (idx,)
+            stderr_msg = 'goodbye %d\n' % (idx+1,)
+            stdout, stderr = self.communicate_with_fork(p,
+                '1 %s2 %s' % (stdout_msg, stderr_msg))
+            self.assertEqualDiff(stdout_msg, stdout)
+            self.assertEqualDiff(stderr_msg, stderr)
+            self.assertReturnCode(0, sock)
+
+    def test_fork_respects_env_vars(self):
+        path, pid, sock = self.send_fork_request('whoami',
+            env={'BZR_EMAIL': 'this_test@xxxxxxxxxxx'})
+        stdout_content, stderr_content = self.communicate_with_fork(path)
+        self.assertEqual('', stderr_content)
+        self.assertEqual('this_test@xxxxxxxxxxx\n', stdout_content)
+
+    def _check_exits_nicely(self, sig_id):
+        path, _, sock = self.send_fork_request('rocks')
+        self.assertEqual(None, self.service_process.poll())
+        # Now when we send SIGTERM, it should wait for the child to exit,
+        # before it tries to exit itself.
+        # In python2.6+ we could use self.service_process.terminate()
+        os.kill(self.service_process.pid, sig_id)
+        self.assertEqual(None, self.service_process.poll())
+        # Now talk to the child, so the service can close
+        stdout_content, stderr_content = self.communicate_with_fork(path)
+        self.assertEqual('It sure does!\n', stdout_content)
+        self.assertEqual('', stderr_content)
+        self.assertReturnCode(0, sock)
+        # And the process should exit cleanly
+        self.assertEqual(0, self.service_process.wait())
+
+    def test_sigterm_exits_nicely(self):
+        self._check_exits_nicely(signal.SIGTERM)
+
+    def test_sigint_exits_nicely(self):
+        self._check_exits_nicely(signal.SIGINT)

=== modified file 'configs/README.txt'
--- configs/README.txt	2009-04-29 19:10:17 +0000
+++ configs/README.txt	2010-11-07 00:31:57 +0000
@@ -281,9 +281,13 @@
         |         |
         |         + authserver-lazr.conf
         |         |
+        |         + testrunner_\d+/launchpad-lazr.conf
+        |         |
         |         + testrunner-appserver/launchpad-lazr.conf
         |             |
         |             + authserver-lazr.conf
+        |             |
+        |             + testrunner-appserver_\d+/launchpad-lazr.conf
         |
         + staging-lazr.conf
         |    |
@@ -295,10 +299,6 @@
         |    |
         |    + staging-mailman/launchpad-lazr.conf
         |
-        + edge-lazr.conf
-        |    |
-        |    + edge<1-4>/launchpad-lazr.conf
-        |
         + lpnet-lazr.conf
         |    |
         |    + lpnet<1-8>/launchpad-lazr.conf

=== renamed file 'configs/development/apidoc-configure-normal.zcml.OFF' => 'configs/development/apidoc-configure-normal.zcml'
--- configs/development/apidoc-configure-normal.zcml.OFF	2008-09-02 16:03:35 +0000
+++ configs/development/apidoc-configure-normal.zcml	2010-11-07 00:31:57 +0000
@@ -1,48 +1,130 @@
 <configure
     xmlns="http://namespaces.zope.org/zope";
-    xmlns:meta="http://namespaces.zope.org/meta";>
-    <!-- These packages are required by apidoc. If they are deemed
-    generally useful, move them to zopeapp.zcml
-    -->
-    <!--
-    How frustrating.  This is needed for just one page registration
-    in introspector.zcml.
-    -->
-    <include package="zope.app" file="menus.zcml" />
-    <include package="zope.app.tree.browser" />
-    <include package="zope.app.tree" />  
-    <include package="zope.app.renderer" file="meta.zcml" />
-    <include package="zope.app.renderer" />
-
-    <!-- XXX: StuartBishop 2005-03-13 bug=39834:
-    We also need the Z3 preference junk, whatever that is.
-    Unfortunately, this depends on annotatable principals so will not
-    currently work with Launchpad. We can still get some apidoc functionality,
-    such as the ZCML browser, but the bulk of it is not functional.
-    -->
+    xmlns:browser="http://namespaces.zope.org/browser";
+    xmlns:meta="http://namespaces.zope.org/meta";
+    xmlns:i18n="http://namespaces.zope.org/i18n";
+    xmlns:apidoc="http://namespaces.zope.org/apidoc";
+    i18n_domain="canonical">
+
+    <!-- These packages/declarations are required by apidoc. If they are
+      deemed generally useful, move them to zopeapp.zcml -->
+
+    <browser:menu
+        id="zmi_views"
+        title="Views"
+        description="Menu for displaying alternate representations of an object"
+        />
+    <browser:menu
+        id="zmi_actions"
+        title="Actions"
+        description="Menu for displaying actions to be performed"
+        />
+
+    <!-- Use the default IAbsoluteURL adapter for requests on the apidoc
+         vhost. -->
+    <adapter
+        for="zope.interface.Interface
+             canonical.launchpad.webapp.servers.APIDocBrowserRequest"
+        provides="zope.traversing.browser.interfaces.IAbsoluteURL"
+        factory="zope.traversing.browser.AbsoluteURL"
+        />
+
+    <view
+      for="zope.container.interfaces.IReadContainer"
+      type="zope.publisher.interfaces.http.IHTTPRequest"
+      provides="zope.publisher.interfaces.IPublishTraverse"
+      factory="zope.container.traversal.ContainerTraverser"
+      permission="zope.Public"
+      allowed_interface="zope.publisher.interfaces.IPublishTraverse"
+      />
+
+    <utility
+        component="canonical.launchpad.systemhomes.apidocroot"
+        provides="canonical.launchpad.webapp.interfaces.IAPIDocRoot" />
+
+    <adapter factory="canonical.launchpad.webapp.authentication.TemporaryPrincipalAnnotations" />
+    <adapter
+      factory="canonical.launchpad.webapp.authentication.TemporaryUnauthenticatedPrincipalAnnotations" />
+
+    <class class="canonical.launchpad.webapp.servers.LaunchpadBrowserRequest">
+      <implements interface="zope.app.apidoc.browser.skin.APIDOC" />
+    </class>
+
+    <!-- apidoc.lp.dev breaks if we make IAPIDocRoot subclass ISite, so we
+      need to register this view here. -->
+    <view
+        for="canonical.launchpad.webapp.interfaces.IAPIDocRoot"
+        type="zope.publisher.interfaces.browser.IDefaultBrowserLayer"
+        name=""
+        factory="zope.browserresource.resources.Resources"
+        permission="zope.Public"
+        allowed_interface="zope.publisher.interfaces.browser.IBrowserPublisher"
+        />
+
+    <browser:defaultView
+        for="canonical.launchpad.webapp.interfaces.IAPIDocRoot"
+        name="++apidoc++"
+        />
+
+    <!-- Turn on devmode for the following includes to work -->
+    <meta:provides feature="devmode" />
+
     <include package="zope.app.preference" file="meta.zcml" />
-    <!--
+    <include package="zope.app.apidoc.codemodule" file="meta.zcml" />
+    <include package="zope.app.apidoc.bookmodule" file="meta.zcml" />
+    <include package="zope.app.onlinehelp" file="meta.zcml" />
+
+    <include package="zope.app.apidoc" />
+    <include package="zope.app.applicationcontrol" />
+    <include package="zope.app.onlinehelp" />
     <include package="zope.app.preference" />
-    -->
-
-
-    <!-- Turn on devmode for the following includes principal=work -->
-    <meta:provides feature="devmode" />
-
-    <include package="zope.app.apidoc" file="meta.zcml" />
-    <include package="zope.app.apidoc" />
-
-    <meta:redefinePermission
-        from="zope.app.apidoc.UseAPIDoc"  to="zope.Public"
-        />
-
-    <!-- Override a strange permission in apidoc -->
-    <class class="zope.app.apidoc.apidoc.APIDocumentation">
-        <require
-            interface="zope.app.container.interfaces.IReadContainer"
-            permission="zope.Public"
-            />
-    </class>
+    <include package="zope.app.renderer" />
+    <include package="zope.app.tree" />
+    <include package="zope.location" />
+
+    <apidoc:rootModule module="canonical" />
+    <apidoc:rootModule module="lp" />
+    <apidoc:rootModule module="lazr" />
+    <apidoc:rootModule module="zc" />
+    <apidoc:rootModule module="wadllib" />
+    <apidoc:rootModule module="martian" />
+    <apidoc:rootModule module="manuel" />
+    <apidoc:rootModule module="chameleon" />
+    <apidoc:rootModule module="storm" />
+
+    <apidoc:bookchapter
+        id="lp"
+        title="Launchpad"
+        />
+    <apidoc:bookchapter
+        id="dbpolicy"
+        title="Storm Stores and Database Policies"
+        doc_path="../../lib/canonical/launchpad/doc/db-policy.txt"
+        parent="lp"
+        />
+    <apidoc:bookchapter
+        id="memcachetales"
+        title="Memcache Tales Expressions"
+        doc_path="../../lib/lp/services/memcache/doc/tales-cache.txt"
+        parent="lp"
+        />
+    <apidoc:bookchapter
+        id="sprites"
+        title="Image Sprites"
+        doc_path="../../lib/lp/services/doc/sprites.txt"
+        parent="lp"
+        />
+    <apidoc:bookchapter
+        id="buildout"
+        title="Buildout"
+        doc_path="../../doc/buildout.txt"
+        parent="lp"
+        />
+    <apidoc:bookchapter
+        id="profiling"
+        title="Profiling"
+        doc_path="../../lib/canonical/launchpad/doc/profiling.txt"
+        parent="lp"
+        />
 
 </configure>
-

=== modified file 'configs/development/launchpad-lazr.conf'
--- configs/development/launchpad-lazr.conf	2009-08-19 12:28:32 +0000
+++ configs/development/launchpad-lazr.conf	2010-11-07 00:31:57 +0000
@@ -7,6 +7,7 @@
 
 [archivepublisher]
 root: /var/tmp/archive
+base_url: http://archive.launchpad.dev/
 
 [branchscanner]
 oops_prefix: BS
@@ -14,7 +15,8 @@
 
 [builddmaster]
 root: /var/tmp/builddmaster/
-uploader: /bin/echo Uploader invocation of build BUILDID in:
+uploader: scripts/process-upload.py -Mvv
+bzr_builder_sources_list: None
 
 [buildsequencer]
 mailproblemsto: root
@@ -23,6 +25,9 @@
 [buildsequencer_job.slave_scanner]
 mindelay: 10
 
+[build_from_branch]
+enabled = True
+
 [bzr_lpserve]
 error_dir: /var/tmp/codehosting.test
 oops_prefix: BZR
@@ -43,16 +48,18 @@
 bugzilla-3.4.example.com.password: test
 
 [codebrowse]
-cachepath: /var/tmp/codebrowse.launchpad.dev/cache
-log_folder:
+cachepath: /var/tmp/bazaar.launchpad.dev/cache
+log_folder: /var/tmp/bazaar.launchpad.dev/logs
 launchpad_root: https://code.launchpad.dev/
 secret_path: configs/development/codebrowse-secret
+error_dir: /var/tmp/codebrowse.launchpad.dev/errors
+oops_prefix: CB
+copy_to_zlog: false
 
 [codehosting]
 launch: True
 authentication_endpoint: http://xmlrpc-private.launchpad.dev:8087/authserver
-branchfs_endpoint: http://xmlrpc-private.launchpad.dev:8087/branchfilesystem
-branch_puller_endpoint: http://xmlrpc-private.launchpad.dev:8087/branch_puller
+codehosting_endpoint: http://xmlrpc-private.launchpad.dev:8087/codehosting
 supermirror_root: http://bazaar.launchpad.dev/
 hosted_branches_root: /var/tmp/bazaar.launchpad.dev/push-branches/
 codebrowse_root: http://bazaar.launchpad.dev/
@@ -69,6 +76,7 @@
 lp_url_hosts: dev
 access_log: /var/tmp/bazaar.launchpad.dev/codehosting-access.log
 blacklisted_hostnames:
+use_forking_daemon: True
 
 [codeimport]
 bazaar_branch_store: file:///tmp/bazaar-branches
@@ -90,10 +98,17 @@
 
 
 [database]
-main_master: dbname=launchpad_dev
-main_slave:  dbname=launchpad_dev
-auth_master: dbname=launchpad_dev
-auth_slave:  dbname=launchpad_dev
+rw_main_master: dbname=launchpad_dev
+rw_main_slave:  dbname=launchpad_dev
+# Use our _template databases here just so that we have different values from
+# the rw_* configs.
+ro_main_master: dbname=launchpad_dev_template
+ro_main_slave:  dbname=launchpad_dev_template
+
+# XXX stub 20100407 bug=557271: These next two are ignored, and should
+# be removed after the May 2010 rollout.
+auth_master: bug 557271
+auth_slave:  bug 557271
 
 [distributionmirrorprober]
 use_proxy: False
@@ -118,19 +133,27 @@
 host: keyserver.launchpad.dev
 public_host: keyserver.launchpad.dev
 
+[initialisedistroseries]
+oops_prefix: IDSJ
+error_dir: /var/tmp/soyuz.test
+
 [launchpad]
+enable_test_openid_provider: True
+openid_provider_vhost: testopenid
 code_domain: code.launchpad.dev
 default_batch_size: 5
 max_attachment_size: 2097152
 branchlisting_batch_size: 6
+mugshot_batch_size: 8
+announcement_batch_size: 4
+download_batch_size: 4
+summary_list_size: 5
 openid_preauthorization_acl:
     localhost http://launchpad.dev/
 max_bug_feed_cache_minutes: 30
 bzr_imports_root_url: file:///tmp/bazaar-branches
 geoip_database: /usr/share/GeoIP/GeoLiteCity.dat
 geonames_identity: lpdev
-# Set to True to test read-only mode.
-read_only: False
 storm_cache: generational
 storm_cache_size: 100
 
@@ -145,10 +168,14 @@
 restricted_upload_port: 58095
 restricted_download_port: 58085
 restricted_download_url: http://launchpad.dev:58085/
+use_https = False
+oops_prefix: L
+error_dir: /var/tmp/codehosting.test
 
 [librarian_server]
 root: /var/tmp/fatsam
 launch: True
+logfile: librarian.log
 
 [malone]
 bugmail_error_from_address: noreply@xxxxxxxxxxxxxxxxxx
@@ -157,8 +184,11 @@
 bugnotification_interval: 1
 search_comments: True
 debbugs_db_location: lib/canonical/launchpad/scripts/tests
-comments_list_max_length: 7
-comments_list_truncate_to: 4
+comments_list_max_length: 12
+comments_list_truncate_oldest_to: 4
+comments_list_truncate_newest_to: 6
+
+ubuntu_disable_filebug: false
 
 [mailman]
 launch: True
@@ -178,18 +208,52 @@
 soft_max_size: 40000
 hard_max_size: 1000000
 
+[memcache]
+servers: (127.0.0.1:11217,1)
+
+[memcached]
+launch: True
+verbose: False
+address: 127.0.0.1
+port: 11217
+memory_size: 1
+
+[merge_proposal_jobs]
+error_dir: /var/tmp/codehosting.test
+oops_prefix: DMPJ
+
 [personalpackagearchive]
 root: /var/tmp/ppa/
 private_root: /var/tmp/ppa
 base_url: http://ppa.launchpad.dev
 private_base_url: http://private-ppa.launchpad.dev
 
+[poppy]
+authentication_endpoint: http://xmlrpc-private.launchpad.dev:8087/authserver
+host_key_private=lib/lp/poppy/tests/poppy-sftp
+host_key_public=lib/lp/poppy/tests/poppy-sftp.pub
+
 [reclaimbranchspace]
 error_dir: /var/tmp/codehosting.test
 oops_prefix: RBS
 
 [rosetta]
 global_suggestions_enabled: True
+generate_templates: True
+
+[rosettabranches]
+error_dir: /var/tmp/rosettabranches.test
+oops_prefix: RSBR
+
+[poimport]
+error_dir: /var/tmp/poimport
+oops_prefix: POI
+
+[process_apport_blobs]
+error_dir: /var/tmp/lperr
+
+[profiling]
+profiling_allowed: True
 
 [supermirror_puller]
 error_dir: /var/tmp/codehosting.test
@@ -207,6 +271,10 @@
 error_dir: /var/tmp/codehosting.test
 oops_prefix: USMP
 
+[upgrade_branches]
+oops_prefix: UBJD
+error_dir: /var/tmp/codehosting.test
+
 [uploader]
 default_recipient_name: Local Root
 default_sender_address: root@localhost
@@ -223,7 +291,10 @@
 
 [vhost.api]
 hostname: api.launchpad.dev
-rooturl: https://api.launchpad.dev/beta/
+rooturl: https://api.launchpad.dev/
+# Turn this on once we've solved cache invalidation problems and are
+# ready to test.
+# enable_server_side_representation_cache: True
 
 [vhost.blueprints]
 hostname: blueprints.launchpad.dev
@@ -243,6 +314,15 @@
 [vhost.openid]
 hostname: openid.launchpad.dev
 
+[vhost.apidoc]
+hostname: apidoc.launchpad.dev
+
+[vhost.testopenid]
+hostname: testopenid.dev
+
+[vhost.ubuntu_openid]
+hostname: ubuntu-openid.launchpad.dev
+
 [vhost.shipitubuntu]
 hostname: shipit.ubuntu.dev
 
@@ -262,6 +342,9 @@
 [vhost.feeds]
 hostname: feeds.launchpad.dev
 
+[vhost.vostok]
+hostname: vostok.dev
+
 [zopeless]
 # XXX sinzui 2008-03-26:
 # A development box should never send email to the outer world,

=== modified file 'configs/development/launchpad.conf'
--- configs/development/launchpad.conf	2009-06-12 16:36:02 +0000
+++ configs/development/launchpad.conf	2010-11-07 00:31:57 +0000
@@ -19,7 +19,7 @@
 # an exception, Zope will drop into pdb at the point of the exception.
 <server>
   type PostmortemDebuggingHTTP
-  address 8089
+  address 8088
 </server>
 
 <server>
@@ -66,7 +66,7 @@
 </eventlog>
 
 <logger>
-  name zc.zservertracelog
+  name zc.tracelog
   propagate false
 
   <logfile>

=== modified file 'configs/development/local-launchpad-apache'
--- configs/development/local-launchpad-apache	2009-07-24 01:57:06 +0000
+++ configs/development/local-launchpad-apache	2010-11-07 00:31:57 +0000
@@ -112,6 +112,19 @@
   </Directory>
 </VirtualHost>
 
+<VirtualHost 127.0.0.88:80>
+  ServerName archive.launchpad.dev
+  LogLevel debug
+
+  DocumentRoot /var/tmp/archive
+  <Directory /var/tmp/archive/>
+    Order Deny,Allow
+    Deny from all
+    Allow from 127.0.0.0/255.0.0.0
+    Options Indexes
+  </Directory>
+</VirtualHost>
+
 <VirtualHost 127.0.0.88:443>
   ServerName launchpad.dev
   ServerAlias *.launchpad.dev

=== added file 'configs/development/local-vostok-apache'
--- configs/development/local-vostok-apache	1970-01-01 00:00:00 +0000
+++ configs/development/local-vostok-apache	2010-11-07 00:31:57 +0000
@@ -0,0 +1,52 @@
+NameVirtualHost 127.0.0.77:80
+
+RewriteLock /var/tmp/vostok-rewrite-lock
+
+<VirtualHost 127.0.0.77:80>
+  ServerName archive.vostok.dev
+  LogLevel debug
+
+  DocumentRoot /var/tmp/vostok-archive
+  <Directory /var/tmp/vostok-archive/>
+    Order Deny,Allow
+    Deny from all
+    Allow from 127.0.0.0/255.0.0.0
+    Options Indexes
+  </Directory>
+</VirtualHost>
+
+<VirtualHost 127.0.0.77:443>
+  ServerName vostok.dev
+  ServerAlias *.vostok.dev
+  <Proxy *>
+    Order deny,allow
+    Allow from 127.0.0.0/255.0.0.0
+  </Proxy>
+  SSLEngine On
+  SSLCertificateFile /etc/apache2/ssl/launchpad.crt
+  SSLCertificateKeyFile /etc/apache2/ssl/launchpad.key
+
+  ProxyPreserveHost on
+  ProxyPass / http://localhost:8086/ retry=1
+
+  <Location />
+    # Insert filter
+    SetOutputFilter DEFLATE
+
+    # Don't compress images
+    SetEnvIfNoCase Request_URI \
+    \.(?:gif|jpe?g|png)$ no-gzip dont-vary
+
+    # Don't gzip anything that starts /@@/ and doesn't end .js (ie images)
+    SetEnvIfNoCase Request_URI ^/@@/ no-gzip dont-vary
+    SetEnvIfNoCase Request_URI ^/@@/.*\.js$ !no-gzip !dont-vary
+  </Location>
+
+</VirtualHost>
+
+<VirtualHost 127.0.0.77:80>
+  ServerName vostok.dev
+  ServerAlias *.vostok.dev
+  RewriteEngine On
+  RewriteRule (.*) https://%{HTTP_HOST}%{REQUEST_URI} [L,R=301]
+</VirtualHost>

=== modified file 'configs/development/salesforce-configure-normal.zcml'
--- configs/development/salesforce-configure-normal.zcml	2009-07-13 18:15:02 +0000
+++ configs/development/salesforce-configure-normal.zcml	2010-11-07 00:31:57 +0000
@@ -9,18 +9,18 @@
     xmlns:zope="http://namespaces.zope.org/zope";
     i18n_domain="launchpad">
 
-    <class class="lp.registry.utilities.salesforce.SalesforceVoucherProxy">
-        <allow interface="lp.registry.interfaces.salesforce.ISalesforceVoucherProxy" />
+    <class class="lp.services.salesforce.proxy.SalesforceVoucherProxy">
+        <allow interface="lp.services.salesforce.interfaces.ISalesforceVoucherProxy" />
     </class>
 
-    <class class="lp.registry.utilities.salesforce.Voucher">
-        <allow attributes="id project status term __str__" />
+    <class class="lp.services.salesforce.proxy.Voucher">
+        <allow attributes="voucher_id project status term_months __str__" />
     </class>
 
    <securedutility
-        class="lp.registry.tests.salesforce.TestSalesforceVoucherProxy"
-        provides="lp.registry.interfaces.salesforce.ISalesforceVoucherProxy">
-        <allow interface="lp.registry.interfaces.salesforce.ISalesforceVoucherProxy" />
+        class="lp.services.salesforce.tests.proxy.TestSalesforceVoucherProxy"
+        provides="lp.services.salesforce.interfaces.ISalesforceVoucherProxy">
+        <allow interface="lp.services.salesforce.interfaces.ISalesforceVoucherProxy" />
     </securedutility>
 
 </configure>

=== modified file 'configs/replicated-development/launchpad-lazr.conf'
--- configs/replicated-development/launchpad-lazr.conf	2008-10-14 11:10:35 +0000
+++ configs/replicated-development/launchpad-lazr.conf	2010-11-07 00:31:57 +0000
@@ -6,8 +6,7 @@
 extends: ../development/launchpad-lazr.conf
 
 [database]
-main_master: dbname=launchpad_dev
-main_slave: dbname=launchpad_dev_slave
-auth_master: dbname=launchpad_dev
-auth_slave: dbname=launchpad_dev_slave
-
+rw_main_master: dbname=launchpad_dev
+rw_main_slave: dbname=launchpad_dev_slave
+ro_main_master: dbname=launchpad_dev
+ro_main_slave: dbname=launchpad_dev_slave

=== modified file 'configs/test-playground/launchpad-lazr.conf'
--- configs/test-playground/launchpad-lazr.conf	2008-11-10 16:12:10 +0000
+++ configs/test-playground/launchpad-lazr.conf	2010-11-07 00:31:57 +0000
@@ -6,7 +6,7 @@
 extends: ../development/launchpad-lazr.conf
 
 [database]
-main_master: dbname=launchpad_ftest_playground
-main_slave:  dbname=launchpad_ftest_playground
-auth_master: dbname=launchpad_ftest_playground
-auth_slave:  dbname=launchpad_ftest_playground
+rw_main_master: dbname=launchpad_ftest_playground
+rw_main_slave:  dbname=launchpad_ftest_playground
+ro_main_master: dbname=launchpad_ftest_playground
+ro_main_slave:  dbname=launchpad_ftest_playground

=== modified file 'configs/test-playground/launchpad.conf'
--- configs/test-playground/launchpad.conf	2008-11-10 16:12:10 +0000
+++ configs/test-playground/launchpad.conf	2010-11-07 00:31:57 +0000
@@ -66,7 +66,7 @@
 </eventlog>
 
 <logger>
-  name zc.zservertracelog
+  name zc.tracelog
   propagate false
 
   <logfile>

=== modified file 'configs/testrunner-appserver/launchpad-lazr.conf'
--- configs/testrunner-appserver/launchpad-lazr.conf	2009-04-29 19:10:17 +0000
+++ configs/testrunner-appserver/launchpad-lazr.conf	2010-11-07 00:31:57 +0000
@@ -23,7 +23,7 @@
 rooturl: http://launchpad.dev:8085/
 
 [vhost.api]
-rooturl: http://api.launchpad.dev:8085/beta/
+rooturl: http://api.launchpad.dev:8085/
 
 [vhost.blueprints]
 rooturl: http://blueprints.launchpad.dev:8085/
@@ -43,6 +43,9 @@
 [vhost.openid]
 rooturl: http://openid.launchpad.dev:8085/
 
+[vhost.testopenid]
+rooturl: http://testopenid.dev:8085/
+
 [vhost.shipitubuntu]
 rooturl: http://shipit.ubuntu.dev:8085/
 

=== modified file 'configs/testrunner-appserver/launchpad.conf'
--- configs/testrunner-appserver/launchpad.conf	2009-05-12 21:22:02 +0000
+++ configs/testrunner-appserver/launchpad.conf	2010-11-07 00:31:57 +0000
@@ -39,7 +39,7 @@
 </eventlog>
 
 <logger>
-  name zc.zservertracelog
+  name zc.tracelog
   propagate false
 
   <logfile>

=== added file 'configs/testrunner-appserver/yui-unittest.zcml'
--- configs/testrunner-appserver/yui-unittest.zcml	1970-01-01 00:00:00 +0000
+++ configs/testrunner-appserver/yui-unittest.zcml	2010-11-07 00:31:57 +0000
@@ -0,0 +1,16 @@
+<!-- Copyright 2010 Canonical Ltd.  This software is licensed under the
+     GNU Affero General Public License version 3 (see the file LICENSE).
+-->
+
+<configure
+    xmlns="http://namespaces.zope.org/zope";
+    xmlns:browser="http://namespaces.zope.org/browser";>
+
+    <browser:page
+        name="+yui-unittest"
+        for="canonical.launchpad.webapp.interfaces.ILaunchpadRoot"
+        class="lp.testing.views.YUITestFileView"
+        attribute="__call__"
+        permission="zope.Public"/>
+
+</configure>

=== added symlink 'configs/testrunner/apidoc-configure-normal.zcml'
=== target is u'../development/apidoc-configure-normal.zcml'
=== modified file 'configs/testrunner/launchpad-lazr.conf'
--- configs/testrunner/launchpad-lazr.conf	2009-08-28 21:02:50 +0000
+++ configs/testrunner/launchpad-lazr.conf	2010-11-07 00:31:57 +0000
@@ -7,6 +7,10 @@
 
 [canonical]
 chunkydiff: False
+cron_control_url: file:lib/lp/services/scripts/tests/cronscripts.ini
+
+[archivepublisher]
+base_url: http://ftpmaster.internal/
 
 [branchscanner]
 oops_prefix: TSMS
@@ -14,7 +18,7 @@
 
 [builddmaster]
 socket_timeout: 10
-uploader: scripts/process-upload.py -Mvv --context buildd
+uploader: scripts/process-upload.py -Mvv
 
 [buildsequencer]
 mailproblemsto: -
@@ -22,6 +26,9 @@
 [buildsequencer_job.slave_scanner]
 mindelay: 5
 
+[build_from_branch]
+enabled = True
+
 [checkwatches]
 sync_debbugs_comments: True
 oops_prefix: TCW
@@ -31,21 +38,23 @@
 bzr_lp_prefix: lp://dev/
 hosted_branches_root: /tmp/sftp-test/branches
 host_key_pair_path: lib/lp/codehosting/sshserver/tests/keys
-port: tcp:22222:interface=127.0.0.1
+port: tcp:22222:interface=bazaar.launchpad.dev
 error_dir: /var/tmp/codehosting.test
 oops_prefix: SMPSSH
 access_log: /tmp/test-codehosting-access.log
-internal_branch_by_id_root: file:///var/tmp/bzrsync/
+internal_branch_by_id_root: file:///var/tmp/bazaar.launchpad.dev/mirrors
 
 [create_merge_proposals]
 oops_prefix: TMPCJ
 error_dir: /var/tmp/codehosting.test
 
 [database]
-main_master: dbname=launchpad_ftest
-main_slave:  dbname=launchpad_ftest
-auth_master: dbname=launchpad_ftest
-auth_slave:  dbname=launchpad_ftest
+rw_main_master: dbname=launchpad_ftest
+rw_main_slave:  dbname=launchpad_ftest
+# Use our _template databases here just so that we have different values from
+# the rw_* configs.
+ro_main_master: dbname=launchpad_ftest_template
+ro_main_slave:  dbname=launchpad_ftest_template
 randomise_select_results: true
 
 [error_reports]
@@ -122,8 +131,11 @@
 
 [launchpad]
 max_attachment_size: 1024
-bzr_imports_root_url: http://localhost:10899
 geoip_database: /usr/share/GeoIP/GeoLiteCity.dat
+logparser_max_parsed_lines: 100000
+# We use the stub Google Service here which maps URL fragment to
+# to static content
+homepage_recent_posts_feed: http://launchpad.dev:8092/blog-feed
 
 [launchpad_session]
 cookie: launchpad_tests
@@ -154,18 +166,49 @@
 bugnotification_interval: 5
 debbugs_db_location: lib/canonical/launchpad/components/ftests/debbugs_db
 
-
-[mpcreationjobs]
-oops_prefix: TMPCJ
+[memcache]
+servers: (127.0.0.1:11242,1)
+
+[memcached]
+# The test suite takes care of launching this as necessary.
+launch: false
+verbose: false
+memory_size: 1
+address: 127.0.0.1
+# We want a different port to ensure we don't pick up stray memcached
+# processes spawned through some other mechanism.
+port: 11242
+
+[merge_proposal_jobs]
+oops_prefix: TMPJ
 error_dir: /var/tmp/codehosting.test
 
-[update_preview_diffs]
-oops_prefix: TUPD
+[upgrade_branches]
+oops_prefix: TUB
 error_dir: /var/tmp/codehosting.test
 
 [personalpackagearchive]
 root: /var/tmp/ppa.test/
 
+[ppa_apache_log_parser]
+logs_root: lib/lp/soyuz/scripts/tests/ppa-apache-log-files
+
+[poimport]
+error_dir: /var/tmp/poimport.test
+oops_prefix: TPOI
+
+[process_apport_blobs]
+dbuser: process-apport-blobs
+oops_prefix: TAPPORTBLOB
+error_dir: /var/tmp/lperr.test
+
+[request_daily_builds]
+oops_prefix: TRDB
+error_dir: /var/tmp/lperr.test
+
+[rosetta]
+generate_templates: True
+
 [rosettabranches]
 oops_prefix: TRSBR
 error_dir: /var/tmp/rosettabranches.test
@@ -194,7 +237,7 @@
 rooturl: http://launchpad.dev/
 
 [vhost.api]
-rooturl: http://api.launchpad.dev/beta/
+rooturl: http://api.launchpad.dev/
 
 [vhosts]
 use_https: False

=== modified file 'cronscripts/allocate-revision-karma.py'
--- cronscripts/allocate-revision-karma.py	2009-06-24 20:52:01 +0000
+++ cronscripts/allocate-revision-karma.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== removed file 'cronscripts/branch-scanner.py'
--- cronscripts/branch-scanner.py	2009-06-24 20:52:01 +0000
+++ cronscripts/branch-scanner.py	1970-01-01 00:00:00 +0000
@@ -1,36 +0,0 @@
-#!/usr/bin/python2.4
-#
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-# pylint: disable-msg=C0103,W0403
-# Author: Gustavo Niemeyer <gustavo@xxxxxxxxxxxx>
-#         David Allouche <david@xxxxxxxxxxxx>
-
-"""Update bzr branches information in the database"""
-
-
-import _pythonpath
-import logging
-
-from lp.codehosting.scanner.branch_scanner import BranchScanner
-from canonical.config import config
-from lp.services.scripts.base import LaunchpadCronScript
-from canonical.launchpad.webapp.errorlog import globalErrorUtility
-
-
-class UpdateBranches(LaunchpadCronScript):
-    def main(self):
-        # We don't want debug messages from bzr at that point.
-        bzr_logger = logging.getLogger("bzr")
-        bzr_logger.setLevel(logging.INFO)
-        globalErrorUtility.configure('branchscanner')
-
-        BranchScanner(self.txn, self.logger).scanAllBranches()
-
-
-if __name__ == '__main__':
-    script = UpdateBranches(
-        "updatebranches", dbuser=config.branchscanner.dbuser)
-    script.lock_and_run()
-

=== modified file 'cronscripts/buildd-queue-builder.py'
--- cronscripts/buildd-queue-builder.py	2009-06-24 20:52:01 +0000
+++ cronscripts/buildd-queue-builder.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'cronscripts/buildd-retry-depwait.py'
--- cronscripts/buildd-retry-depwait.py	2009-06-24 20:52:01 +0000
+++ cronscripts/buildd-retry-depwait.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== removed file 'cronscripts/buildd-slave-scanner.py'
--- cronscripts/buildd-slave-scanner.py	2009-06-24 20:52:01 +0000
+++ cronscripts/buildd-slave-scanner.py	1970-01-01 00:00:00 +0000
@@ -1,27 +0,0 @@
-#!/usr/bin/python2.4
-#
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-# pylint: disable-msg=C0103,W0403
-# Author: Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx>
-#         Celso Providelo <celso.providelo@xxxxxxxxxxxxx>
-#
-# Builder Slave Scanner and result collector
-
-__metaclass__ = type
-
-import _pythonpath
-
-from canonical.config import config
-from lp.soyuz.scripts.buildd import SlaveScanner
-
-
-if __name__ == '__main__':
-    script = SlaveScanner('slave-scanner', dbuser=config.builddmaster.dbuser)
-    script.lock_or_quit()
-    try:
-        script.run()
-    finally:
-        script.unlock()
-

=== modified file 'cronscripts/check-teamparticipation.py'
--- cronscripts/check-teamparticipation.py	2009-07-29 01:14:50 +0000
+++ cronscripts/check-teamparticipation.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -18,11 +18,11 @@
 situation, but that's not a simple thing and this should do for now.
 """
 
+import _pythonpath
+
 import optparse
 import sys
 
-import _pythonpath
-
 from canonical.database.sqlbase import cursor
 from canonical.launchpad.scripts import (
     execute_zcml_for_scripts, logger_options, logger)

=== modified file 'cronscripts/checkwatches.py'
--- cronscripts/checkwatches.py	2009-06-24 20:52:01 +0000
+++ cronscripts/checkwatches.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -9,47 +9,12 @@
 Cron job to run daily to check all of the BugWatches
 """
 
-import time
 import _pythonpath
 
 from canonical.config import config
-from lp.bugs.scripts.checkwatches import BugWatchUpdater
-from lp.services.scripts.base import LaunchpadCronScript
-
-
-class CheckWatches(LaunchpadCronScript):
-
-    def add_my_options(self):
-        """See `LaunchpadScript`."""
-        self.parser.add_option(
-            '-t', '--bug-tracker', action='append',
-            dest='bug_trackers', metavar="BUG_TRACKER",
-            help="Only check a given bug tracker. Specifying more than "
-                "one bugtracker using this option will check all the "
-                "bugtrackers specified.")
-        self.parser.add_option(
-            '-b', '--batch-size', action='store', dest='batch_size',
-            help="Set the number of watches to be checked per bug "
-                 "tracker in this run. If BATCH_SIZE is 0, all watches "
-                 "on the bug tracker that are eligible for checking will "
-                 "be checked.")
-
-    def main(self):
-        start_time = time.time()
-
-        updater = BugWatchUpdater(self.txn, self.logger)
-
-        # Make sure batch_size is an integer or None.
-        batch_size = self.options.batch_size
-        if batch_size is not None:
-            batch_size = int(batch_size)
-
-        updater.updateBugTrackers(self.options.bug_trackers, batch_size)
-
-        run_time = time.time() - start_time
-        self.logger.info("Time for this run: %.3f seconds." % run_time)
-
+from lp.bugs.scripts.checkwatches import CheckWatchesCronScript
 
 if __name__ == '__main__':
-    script = CheckWatches("checkwatches", dbuser=config.checkwatches.dbuser)
+    script = CheckWatchesCronScript(
+        "checkwatches", dbuser=config.checkwatches.dbuser)
     script.lock_and_run()

=== modified file 'cronscripts/code-import-dispatcher.py'
--- cronscripts/code-import-dispatcher.py	2009-06-24 20:52:01 +0000
+++ cronscripts/code-import-dispatcher.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -18,6 +18,12 @@
 
 class CodeImportDispatcherScript(LaunchpadScript):
 
+    def add_my_options(self):
+        self.parser.add_option(
+            "--max-jobs", dest="max_jobs", type=int,
+            default=config.codeimportdispatcher.max_jobs_per_machine,
+            help="The maximum number of jobs to run on this machine.")
+
     def run(self, use_web_security=False, implicit_begin=True,
             isolation=None):
         """See `LaunchpadScript.run`.
@@ -30,7 +36,8 @@
     def main(self):
         globalErrorUtility.configure('codeimportdispatcher')
 
-        CodeImportDispatcher(self.logger).findAndDispatchJob(
+        dispatcher = CodeImportDispatcher(self.logger, self.options.max_jobs)
+        dispatcher.findAndDispatchJobs(
             ServerProxy(config.codeimportdispatcher.codeimportscheduler_url))
 
 

=== modified file 'cronscripts/create-debwatches.py'
--- cronscripts/create-debwatches.py	2009-06-24 20:52:01 +0000
+++ cronscripts/create-debwatches.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -11,9 +11,9 @@
 
 __metaclass__ = type
 
+import _pythonpath
 import os
 import logging
-import _pythonpath
 
 # zope bits
 from zope.component import getUtility
@@ -77,9 +77,9 @@
 
         # first find all the published ubuntu packages
         ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
-        for p in ubuntu.currentrelease.publishedBinaryPackages(
-            component='main'):
-            target_package_set.add(p.binarypackagename.name)
+        for p in ubuntu.currentrelease.getAllPublishedBinaries():
+            target_package_set.add(
+                p.binarypackagerelease.binarypackagename.name)
         # then add packages passed on the command line
         for package in self.options.packages:
             target_package_set.add(package)

=== modified file 'cronscripts/create_merge_proposals.py'
--- cronscripts/create_merge_proposals.py	2009-09-03 19:46:42 +0000
+++ cronscripts/create_merge_proposals.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'cronscripts/distributionmirror-prober.py'
--- cronscripts/distributionmirror-prober.py	2009-06-24 20:52:01 +0000
+++ cronscripts/distributionmirror-prober.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== renamed file 'cronscripts/expire-ppa-binaries.py' => 'cronscripts/expire-archive-files.py'
--- cronscripts/expire-ppa-binaries.py	2009-06-24 20:52:01 +0000
+++ cronscripts/expire-archive-files.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -12,11 +12,11 @@
 import _pythonpath
 
 from canonical.config import config
-from lp.soyuz.scripts.expire_ppa_binaries import PPABinaryExpirer
+from lp.soyuz.scripts.expire_archive_files import ArchiveExpirer
 
 
 if __name__ == '__main__':
-    script = PPABinaryExpirer(
-        'expire-ppa-binaries', dbuser=config.binaryfile_expire.dbuser)
+    script = ArchiveExpirer(
+        'expire-archive-files', dbuser=config.binaryfile_expire.dbuser)
     script.lock_and_run()
 

=== modified file 'cronscripts/expire-bugtasks.py'
--- cronscripts/expire-bugtasks.py	2009-06-24 20:52:01 +0000
+++ cronscripts/expire-bugtasks.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -15,6 +15,8 @@
 
 import _pythonpath
 
+from zope.component import getUtility
+
 from canonical.config import config
 from lp.services.scripts.base import LaunchpadCronScript
 from lp.bugs.scripts.bugexpire import BugJanitor
@@ -30,9 +32,23 @@
     usage = "usage: %prog [options]"
     description =  '    %s' % __doc__
 
+    def add_my_options(self):
+        self.parser.add_option('-u', '--ubuntu', action='store_true',
+                               dest='ubuntu', default=False,
+                               help='Only expire Ubuntu bug tasks.')
+        self.parser.add_option('-l', '--limit', action='store', dest='limit',
+                               type='int', metavar='NUMBER', default=None,
+                               help='Limit expiry to NUMBER of bug tasks.')
+
     def main(self):
         """Run the BugJanitor."""
-        janitor = BugJanitor(log=self.logger)
+        target = None
+        if self.options.ubuntu:
+            # Avoid circular import.
+            from lp.registry.interfaces.distribution import IDistributionSet
+            target = getUtility(IDistributionSet).getByName('ubuntu')
+        janitor = BugJanitor(
+            log=self.logger, target=target, limit=self.options.limit)
         janitor.expireBugTasks(self.txn)
 
 

=== modified file 'cronscripts/expire-questions.py'
--- cronscripts/expire-questions.py	2009-06-24 20:52:01 +0000
+++ cronscripts/expire-questions.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'cronscripts/flag-expired-memberships.py'
--- cronscripts/flag-expired-memberships.py	2009-06-24 20:52:01 +0000
+++ cronscripts/flag-expired-memberships.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -41,8 +41,10 @@
             days=DAYS_BEFORE_EXPIRATION_WARNING_IS_SENT)
         self.txn.begin()
         for membership in membershipset.getMembershipsToExpire(
-                min_date_for_warning):
+            min_date_for_warning, exclude_autorenewals=True):
             membership.sendExpirationWarningEmail()
+            self.logger.debug("Sent warning email to %s in %s team."
+                          % (membership.person.name, membership.team.name))
         self.txn.commit()
 
     def main(self):
@@ -59,4 +61,3 @@
     script = ExpireMemberships('flag-expired-memberships',
                                dbuser=config.expiredmembershipsflagger.dbuser)
     script.lock_and_run()
-

=== modified file 'cronscripts/foaf-update-karma-cache.py'
--- cronscripts/foaf-update-karma-cache.py	2009-06-24 20:52:01 +0000
+++ cronscripts/foaf-update-karma-cache.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -10,8 +10,10 @@
 from zope.component import getUtility
 
 from canonical.config import config
-from canonical.database.sqlbase import ISOLATION_LEVEL_AUTOCOMMIT
-from canonical.launchpad.interfaces import IKarmaCacheManager, NotFoundError
+from canonical.database.sqlbase import (
+    ISOLATION_LEVEL_AUTOCOMMIT, flush_database_updates)
+from canonical.launchpad.interfaces import IKarmaCacheManager
+from lp.app.errors import NotFoundError
 from lp.services.scripts.base import LaunchpadCronScript
 
 
@@ -83,6 +85,7 @@
         scaling = self.calculate_scaling(results)
         for entry in results:
             self.update_one_karma_cache_entry(entry, scaling)
+        flush_database_updates()
 
         # Delete the entries we're going to replace.
         self.cur.execute("DELETE FROM KarmaCache WHERE category IS NULL")
@@ -121,7 +124,7 @@
         # VACUUM KarmaTotalCache since we have just touched every row in it.
         self.cur.execute("""VACUUM KarmaTotalCache""")
 
-        # Insert new records into the KarmaTotalCache table. 
+        # Insert new records into the KarmaTotalCache table.
 
         # XXX: salgado 2007-02-06:
         # If deadlocks ever become a problem, first LOCK the
@@ -146,7 +149,7 @@
 
     def C_add_karmacache_sums(self):
         self.logger.info("Step C: Calculating KarmaCache sums")
-        # We must issue some SUM queries to insert the karma totals for: 
+        # We must issue some SUM queries to insert the karma totals for:
         # - All actions of a person on a given product.
         # - All actions of a person on a given distribution.
         # - All actions of a person on a given project.
@@ -156,7 +159,7 @@
 
         # - All actions with a specific category of a person.
         self.cur.execute("""
-            INSERT INTO KarmaCache 
+            INSERT INTO KarmaCache
                 (person, category, karmavalue, product, distribution,
                  sourcepackagename, project)
             SELECT person, category, SUM(karmavalue), NULL, NULL, NULL, NULL
@@ -167,7 +170,7 @@
 
         # - All actions of a person on a given product.
         self.cur.execute("""
-            INSERT INTO KarmaCache 
+            INSERT INTO KarmaCache
                 (person, category, karmavalue, product, distribution,
                  sourcepackagename, project)
             SELECT person, NULL, SUM(karmavalue), product, NULL, NULL, NULL
@@ -178,7 +181,7 @@
 
         # - All actions of a person on a given distribution.
         self.cur.execute("""
-            INSERT INTO KarmaCache 
+            INSERT INTO KarmaCache
                 (person, category, karmavalue, product, distribution,
                  sourcepackagename, project)
             SELECT person, NULL, SUM(karmavalue), NULL, distribution, NULL, NULL
@@ -189,7 +192,7 @@
 
         # - All actions of a person on a given project.
         self.cur.execute("""
-            INSERT INTO KarmaCache 
+            INSERT INTO KarmaCache
                 (person, category, karmavalue, product, distribution,
                  sourcepackagename, project)
             SELECT person, NULL, SUM(karmavalue), NULL, NULL, NULL,
@@ -206,7 +209,7 @@
         # inserted here will be included in the calculation of the overall
         # karma of a person on a given project.
         self.cur.execute("""
-            INSERT INTO KarmaCache 
+            INSERT INTO KarmaCache
                 (person, category, karmavalue, product, distribution,
                  sourcepackagename, project)
             SELECT person, category, SUM(karmavalue), NULL, NULL, NULL,
@@ -244,15 +247,18 @@
                 scaling[category] = 1
             else:
                 scaling[category] = float(largest_total) / float(points)
-            self.logger.debug('Scaling %s by a factor of %0.4f'
-                              % (categories[category], scaling[category]))
             max_scaling = config.karmacacheupdater.max_scaling
             if scaling[category] > max_scaling:
+                self.logger.info(
+                    'Scaling %s by a factor of %0.4f (capped to %0.4f)'
+                    % (categories[category], scaling[category], max_scaling))
                 scaling[category] = max_scaling
-                self.logger.debug('Reducing %s scaling to %d to avoid spikes' 
-                                  % (categories[category], max_scaling))
+            else:
+                self.logger.info(
+                    'Scaling %s by a factor of %0.4f'
+                    % (categories[category], scaling[category]))
         return scaling
-    
+
     def update_one_karma_cache_entry(self, entry, scaling):
         """Updates an individual (non-summed) KarmaCache entry.
 
@@ -262,7 +268,7 @@
         """
         (person_id, category_id, product_id, distribution_id, points) = entry
         points *= scaling[category_id] # Scaled. wow.
-        self.logger.debug("Setting person_id=%d, category_id=%d, points=%d" 
+        self.logger.debug("Setting person_id=%d, category_id=%d, points=%d"
                           % (person_id, category_id, points))
 
         points = int(points)
@@ -285,7 +291,7 @@
 
 
 if __name__ == '__main__':
-    script = KarmaCacheUpdater('karma-update', 
+    script = KarmaCacheUpdater('karma-update',
         dbuser=config.karmacacheupdater.dbuser)
     script.lock_and_run(implicit_begin=True)
 

=== modified file 'cronscripts/garbo-daily.py'
--- cronscripts/garbo-daily.py	2009-06-24 20:52:01 +0000
+++ cronscripts/garbo-daily.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -13,7 +13,7 @@
 __all__ = []
 
 import _pythonpath
-from canonical.launchpad.scripts.garbo import DailyDatabaseGarbageCollector
+from lp.scripts.garbo import DailyDatabaseGarbageCollector
 
 if __name__ == '__main__':
     script = DailyDatabaseGarbageCollector()

=== modified file 'cronscripts/garbo-hourly.py'
--- cronscripts/garbo-hourly.py	2009-06-24 20:52:01 +0000
+++ cronscripts/garbo-hourly.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -13,7 +13,7 @@
 __all__ = []
 
 import _pythonpath
-from canonical.launchpad.scripts.garbo import HourlyDatabaseGarbageCollector
+from lp.scripts.garbo import HourlyDatabaseGarbageCollector
 
 if __name__ == '__main__':
     script = HourlyDatabaseGarbageCollector()

=== modified file 'cronscripts/generate-ppa-htaccess.py'
--- cronscripts/generate-ppa-htaccess.py	2009-06-24 20:52:01 +0000
+++ cronscripts/generate-ppa-htaccess.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== added file 'cronscripts/initialise_distro_series.py'
--- cronscripts/initialise_distro_series.py	1970-01-01 00:00:00 +0000
+++ cronscripts/initialise_distro_series.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,27 @@
+#!/usr/bin/python -S
+#
+# Copyright 2010 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Initialise new distroseries."""
+
+__metaclass__ = type
+
+import _pythonpath
+
+from lp.services.job.runner import JobCronScript
+from lp.soyuz.interfaces.distributionjob import (
+    IInitialiseDistroSeriesJobSource,
+    )
+
+
+class RunInitialiseDistroSeriesJob(JobCronScript):
+    """Run InitialiseDistroSeriesJob jobs."""
+
+    config_name = 'initialisedistroseries'
+    source_interface = IInitialiseDistroSeriesJobSource
+
+
+if __name__ == '__main__':
+    script = RunInitialiseDistroSeriesJob()
+    script.lock_and_run()

=== modified file 'cronscripts/language-pack-exporter.py'
--- cronscripts/language-pack-exporter.py	2009-07-17 00:26:05 +0000
+++ cronscripts/language-pack-exporter.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'cronscripts/librarian-gc.py'
--- cronscripts/librarian-gc.py	2009-06-24 20:52:01 +0000
+++ cronscripts/librarian-gc.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -46,16 +46,17 @@
                 help="Skip removing expired TemporaryBlobStorage rows"
                 )
         self.parser.add_option(
-                '', "--skip-expired", action="store_true", default=False,
-                dest="skip_expired",
-                help="Skip flagging expired files for deletion."
-                )
-        self.parser.add_option(
                 '', "--skip-files", action="store_true", default=False,
                 dest="skip_files",
                 help="Skip removing files on disk with no database references"
                      " or flagged for deletion."
                 )
+        self.parser.add_option(
+                '', "--skip-expiry", action="store_true", default=False,
+                dest="skip_expiry",
+                help="Skip expiring aliases with an expiry date in the past."
+                )
+
 
     def main(self):
         librariangc.log = self.logger
@@ -71,6 +72,8 @@
 
         # Note that each of these next steps will issue commit commands
         # as appropriate to make this script transaction friendly
+        if not self.options.skip_expiry:
+            librariangc.expire_aliases(conn)
         if not self.options.skip_content:
             librariangc.delete_unreferenced_content(conn) # first sweep
         if not self.options.skip_blobs:
@@ -81,8 +84,6 @@
             librariangc.delete_unreferenced_aliases(conn)
         if not self.options.skip_content:
             librariangc.delete_unreferenced_content(conn) # second sweep
-        if not self.options.skip_expired:
-            librariangc.flag_expired_files(conn)
         if not self.options.skip_files:
             librariangc.delete_unwanted_files(conn)
 

=== renamed file 'cronscripts/mpcreationjobs.py' => 'cronscripts/merge-proposal-jobs.py'
--- cronscripts/mpcreationjobs.py	2009-09-03 19:04:28 +0000
+++ cronscripts/merge-proposal-jobs.py	2010-11-07 00:31:57 +0000
@@ -1,48 +1,40 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
+# Copyright 2009, 2010 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 # pylint: disable-msg=W0403
 
-"""Handle new BranchMergeProposals.
+"""Handle jobs for BranchMergeProposals.
 
-This script generates a diff for the merge proposal if needed, then notifies
-all interested parties about the merge proposal.
+This script handles all job types for branch merge proposals.
 """
 
 __metaclass__ = type
 
 import _pythonpath
-from zope.component import getUtility
 
-from canonical.config import config
-from lp.codehosting.vfs import get_scanner_server
-from lp.services.job.runner import JobRunner
+# The following line is a horrible hack, but unfortunately necessary right now
+# to stop import errors from circular imports.
+import canonical.launchpad.interfaces
 from lp.code.interfaces.branchmergeproposal import (
-    IMergeProposalCreatedJobSource,)
-from lp.services.scripts.base import LaunchpadCronScript
-from canonical.launchpad.webapp.errorlog import globalErrorUtility
-
-
-class RunMergeProposalCreatedJobs(LaunchpadCronScript):
-    """Run merge proposal creation jobs."""
-
-    def main(self):
-        globalErrorUtility.configure('mpcreationjobs')
-        job_source = getUtility(IMergeProposalCreatedJobSource)
-        runner = JobRunner.fromReady(job_source, self.logger)
-        server = get_scanner_server()
-        server.setUp()
-        try:
-            runner.runAll()
-        finally:
-            server.tearDown()
-        self.logger.info(
-            'Ran %d MergeProposalCreatedJobs.', len(runner.completed_jobs))
+    IBranchMergeProposalJobSource,
+    )
+from lp.services.job.runner import JobCronScript, TwistedJobRunner
+
+
+class RunMergeProposalJobs(JobCronScript):
+    """Run all merge proposal jobs."""
+
+    config_name = 'merge_proposal_jobs'
+    source_interface = IBranchMergeProposalJobSource
+
+    def __init__(self):
+        super(RunMergeProposalJobs, self).__init__(
+            runner_class=TwistedJobRunner,
+            name='merge-proposal-jobs')
 
 
 if __name__ == '__main__':
-    script = RunMergeProposalCreatedJobs(
-        'mpcreationjobs', config.mpcreationjobs.dbuser)
+    script = RunMergeProposalJobs()
     script.lock_and_run()

=== modified file 'cronscripts/mirror-prober.sh'
--- cronscripts/mirror-prober.sh	2009-06-24 20:52:01 +0000
+++ cronscripts/mirror-prober.sh	2010-11-07 00:31:57 +0000
@@ -39,10 +39,10 @@
 cd /srv/launchpad.net/production/launchpad/cronscripts
 
 echo '== Distribution mirror prober (archive)' `date` ==
-python2.4 distributionmirror-prober.py --content-type=archive --max-mirrors=20
+python -S distributionmirror-prober.py --content-type=archive --max-mirrors=20
 
 echo '== Distribution mirror prober (cdimage)' `date` ==
-python2.4 distributionmirror-prober.py --content-type=cdimage --max-mirrors=30
+python -S distributionmirror-prober.py --content-type=cdimage --max-mirrors=30
 
 rm -f $LOCK
 

=== modified file 'cronscripts/nightly.sh'
--- cronscripts/nightly.sh	2009-08-10 16:57:14 +0000
+++ cronscripts/nightly.sh	2010-11-07 00:31:57 +0000
@@ -3,11 +3,11 @@
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-# This script performs nightly chores. It should be run from 
+# This script performs nightly chores. It should be run from
 # cron as the launchpad user once a day. Typically the output
 # will be sent to an email address for inspection.
 
-# Note that http/ftp proxies are needed by the product 
+# Note that http/ftp proxies are needed by the product
 # release finder
 
 # Only run this script on loganberry
@@ -42,41 +42,41 @@
 cd /srv/launchpad.net/production/launchpad/cronscripts
 
 echo == Expiring memberships `date` ==
-python2.4 flag-expired-memberships.py -q
+python -S flag-expired-memberships.py -q
 
 echo == Allocating revision karma `date` ==
-python2.4 allocate-revision-karma.py -q
+python -S allocate-revision-karma.py -q
 
 echo == Recalculating karma `date` ==
-python2.4 foaf-update-karma-cache.py -q
+python -S foaf-update-karma-cache.py -q
 
 echo == Updating cached statistics `date` ==
-python2.4 update-stats.py -q
+python -S update-stats.py -q
 
 echo == Expiring questions `date` ==
-python2.4 expire-questions.py
+python -S expire-questions.py
 
 ### echo == Expiring bugs `date` ==
-### python2.4 expire-bugtasks.py
+### python -S expire-bugtasks.py
 
 # checkwatches.py is scheduled in the /code/pqm/launchpad_crontabs branch.
 ### echo == Updating bug watches `date` ==
-### python2.4 checkwatches.py
+### python -S checkwatches.py
 
 echo == Updating bugtask target name caches `date` ==
-python2.4 update-bugtask-targetnamecaches.py -q
+python -S update-bugtask-targetnamecaches.py -q
 
 echo == Updating personal standings `date` ==
-python2.4 update-standing.py -q
+python -S update-standing.py -q
 
 echo == Updating CVE database `date` ==
-python2.4 update-cve.py -q
+python -S update-cve.py -q
 
 echo == Updating package cache `date` ==
-python2.4 update-pkgcache.py -q
+python -S update-pkgcache.py -q
 
 echo == Product Release Finder `date` ==
-python2.4 product-release-finder.py -q
+python -S product-release-finder.py -q
 
 
 rm -f $LOCK

=== modified file 'cronscripts/oops-prune.py'
--- cronscripts/oops-prune.py	2009-06-24 20:52:01 +0000
+++ cronscripts/oops-prune.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'cronscripts/parse-librarian-apache-access-logs.py'
--- cronscripts/parse-librarian-apache-access-logs.py	2009-09-11 12:11:04 +0000
+++ cronscripts/parse-librarian-apache-access-logs.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== added file 'cronscripts/parse-ppa-apache-access-logs.py'
--- cronscripts/parse-ppa-apache-access-logs.py	1970-01-01 00:00:00 +0000
+++ cronscripts/parse-ppa-apache-access-logs.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,61 @@
+#!/usr/bin/python -S
+#
+# Copyright 2010 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Parse PPA apache logs to find out download counts for each file."""
+
+__metaclass__ = type
+
+import _pythonpath
+
+import functools
+
+from zope.component import getUtility
+
+from canonical.config import config
+# XXX: wgrant 2010-03-16 bug=539496: Importing directly from
+# lp.registry.interfaces.person results in a circular import.
+from canonical.launchpad.interfaces import IPersonSet
+from lp.soyuz.interfaces.archive import NoSuchPPA
+from lp.soyuz.scripts.ppa_apache_log_parser import DBUSER, get_ppa_file_key
+from lp.services.apachelogparser.script import ParseApacheLogs
+
+
+class ParsePPAApacheLogs(ParseApacheLogs):
+    """An Apache log parser for PPA downloads."""
+
+    def setUpUtilities(self):
+        """See `ParseApacheLogs`."""
+        self.person_set = getUtility(IPersonSet)
+
+    @property
+    def root(self):
+        """See `ParseApacheLogs`."""
+        return config.ppa_apache_log_parser.logs_root
+
+    def getDownloadKey(self, path):
+        """See `ParseApacheLogs`."""
+        return get_ppa_file_key(path)
+
+    def getDownloadCountUpdater(self, file_id):
+        """See `ParseApacheLogs`."""
+        person = self.person_set.getByName(file_id[0])
+        if person is None:
+            return
+        try:
+            archive = person.getPPAByName(file_id[1])
+        except NoSuchPPA:
+            return None
+        # file_id[2] (distro) isn't used yet, since getPPAByName
+        # hardcodes Ubuntu.
+        bpr = archive.getBinaryPackageReleaseByFileName(file_id[3])
+        if bpr is None:
+            return None
+
+        return functools.partial(archive.updatePackageDownloadCount, bpr)
+
+
+if __name__ == '__main__':
+    script = ParsePPAApacheLogs('parse-ppa-apache-logs', DBUSER)
+    script.lock_and_run()

=== modified file 'cronscripts/ppa-generate-keys.py'
--- cronscripts/ppa-generate-keys.py	2009-06-24 20:52:01 +0000
+++ cronscripts/ppa-generate-keys.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -11,6 +11,10 @@
 
 import _pythonpath
 
+# Avoid crappy circular imports caused by
+# canonical.launchpad.interfaces.__init__
+import canonical.launchpad.interfaces
+
 from canonical.config import config
 from lp.soyuz.scripts.ppakeygenerator import PPAKeyGenerator
 

=== added file 'cronscripts/process-apport-blobs.py'
--- cronscripts/process-apport-blobs.py	1970-01-01 00:00:00 +0000
+++ cronscripts/process-apport-blobs.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,33 @@
+#!/usr/bin/python -S
+#
+# Copyright 2010 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# pylint: disable-msg=W0403
+
+"""Process uploaded Apport BLOBs."""
+
+__metaclass__ = type
+
+import _pythonpath
+
+from canonical.launchpad.webapp import errorlog
+
+from lp.services.job.runner import JobCronScript
+from lp.bugs.interfaces.apportjob import IProcessApportBlobJobSource
+
+
+class RunProcessApportBlobs(JobCronScript):
+    """Run ProcessApportBlobJobs."""
+
+    config_name = 'process_apport_blobs'
+    source_interface = IProcessApportBlobJobSource
+
+    def main(self):
+        errorlog.globalErrorUtility.configure(self.config_name)
+        return super(RunProcessApportBlobs, self).main()
+
+
+if __name__ == '__main__':
+    script = RunProcessApportBlobs()
+    script.lock_and_run()

=== modified file 'cronscripts/process-hwdb-submissions.py'
--- cronscripts/process-hwdb-submissions.py	2009-06-24 20:52:01 +0000
+++ cronscripts/process-hwdb-submissions.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -24,7 +24,7 @@
 import _pythonpath
 
 from lp.services.scripts.base import LaunchpadCronScript
-from canonical.launchpad.scripts.hwdbsubmissions import (
+from lp.hardwaredb.scripts.hwdbsubmissions import (
     process_pending_submissions)
 
 

=== added file 'cronscripts/process-job-source-groups.py'
--- cronscripts/process-job-source-groups.py	1970-01-01 00:00:00 +0000
+++ cronscripts/process-job-source-groups.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,114 @@
+#!/usr/bin/python -S
+#
+# Copyright 2009, 2010 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Handle jobs for multiple job source classes."""
+
+__metaclass__ = type
+
+from optparse import IndentedHelpFormatter
+import os
+import subprocess
+import sys
+import textwrap
+
+import _pythonpath
+
+from canonical.config import config
+from lp.services.propertycache import cachedproperty
+from lp.services.scripts.base import LaunchpadCronScript
+
+
+class LongEpilogHelpFormatter(IndentedHelpFormatter):
+    """Preserve newlines in epilog."""
+
+    def format_epilog(self, epilog):
+        if epilog:
+            return '\n%s\n' % epilog
+        else:
+            return ""
+
+
+class ProcessJobSourceGroups(LaunchpadCronScript):
+    """Handle each job source in a separate process with ProcessJobSource."""
+
+    def add_my_options(self):
+        self.parser.usage = "%prog [ -e JOB_SOURCE ] GROUP [GROUP]..."
+        self.parser.epilog = (
+            textwrap.fill(
+            "At least one group must be specified. Excluding job sources "
+            "is useful when you want to run all the other job sources in "
+            "a group.")
+            + "\n\n" + self.group_help)
+
+        self.parser.formatter = LongEpilogHelpFormatter()
+        self.parser.add_option(
+            '-e', '--exclude', dest='excluded_job_sources',
+            metavar="JOB_SOURCE", default=[], action='append',
+            help="Exclude specific job sources.")
+        self.parser.add_option(
+            '--wait', dest='do_wait', default=False, action='store_true',
+            help="Wait for the child processes to finish. This is useful "
+                 "for testing, but it shouldn't be used in a cronjob, since "
+                 "it would prevent the cronjob from processing new jobs "
+                 "if just one of the child processes is still processing, "
+                 "and each process only handles a single job source class.")
+
+    def main(self):
+        selected_groups = self.args
+        if len(selected_groups) == 0:
+            self.parser.print_help()
+            sys.exit(1)
+
+        selected_job_sources = set()
+        # Include job sources from selected groups.
+        for group in selected_groups:
+            selected_job_sources.update(self.grouped_sources[group])
+        # Then, exclude job sources.
+        for source in self.options.excluded_job_sources:
+            if source not in selected_job_sources:
+                self.logger.info('%r is not in job source groups %s'
+                                  % (source, self.options.groups))
+            else:
+                selected_job_sources.remove(source)
+        # Process job sources.
+        command = os.path.join(
+            os.path.dirname(sys.argv[0]), 'process-job-source.py')
+        child_args = [command]
+        if self.options.verbose:
+            child_args.append('-v')
+        children = []
+        for job_source in selected_job_sources:
+            child = subprocess.Popen(child_args + [job_source])
+            children.append(child)
+        if self.options.do_wait:
+            for child in children:
+                child.wait()
+
+    @cachedproperty
+    def all_job_sources(self):
+        job_sources = config['process-job-source-groups'].job_sources
+        return [job_source.strip() for job_source in job_sources.split(',')]
+
+    @cachedproperty
+    def grouped_sources(self):
+        groups = {}
+        for source in self.all_job_sources:
+            if source not in config:
+                continue
+            section = config[source]
+            group = groups.setdefault(section.crontab_group, [])
+            group.append(source)
+        return groups
+
+    @cachedproperty
+    def group_help(self):
+        return '\n\n'.join(
+            'Group: %s\n    %s' % (group, '\n    '.join(sources))
+            for group, sources in sorted(self.grouped_sources.items()))
+
+
+if __name__ == '__main__':
+    script = ProcessJobSourceGroups()
+    script.lock_and_run()

=== added file 'cronscripts/process-job-source.py'
--- cronscripts/process-job-source.py	1970-01-01 00:00:00 +0000
+++ cronscripts/process-job-source.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,65 @@
+#!/usr/bin/python -S
+#
+# Copyright 2009, 2010 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Handle jobs for a specified job source class."""
+
+__metaclass__ = type
+
+import sys
+
+import _pythonpath
+from twisted.python import log
+
+from canonical.config import config
+from lp.services.job import runner
+from lp.services.job.runner import JobCronScript
+
+
+class ProcessJobSource(JobCronScript):
+    """Run jobs for a specified job source class."""
+    usage = (
+        "Usage: %prog [options] JOB_SOURCE\n\n"
+        "For more help, run:\n"
+        "    cronscripts/process-job-source-groups.py --help")
+
+    def __init__(self):
+        super(ProcessJobSource, self).__init__()
+        # The fromlist argument is necessary so that __import__()
+        # returns the bottom submodule instead of the top one.
+        module = __import__(self.config_section.module,
+                            fromlist=[self.job_source_name])
+        self.source_interface = getattr(module, self.job_source_name)
+
+    @property
+    def config_name(self):
+        return self.job_source_name
+
+    @property
+    def name(self):
+        return 'process-job-source-%s' % self.job_source_name
+
+    @property
+    def runner_class(self):
+        runner_class_name = getattr(
+            self.config_section, 'runner_class', 'JobRunner')
+        # Override attributes that are normally set in __init__().
+        return getattr(runner, runner_class_name)
+
+    def handle_options(self):
+        if len(self.args) != 1:
+            self.parser.print_help()
+            sys.exit(1)
+        self.job_source_name = self.args[0]
+        super(ProcessJobSource, self).handle_options()
+
+    def main(self):
+        if self.options.verbose:
+            log.startLogging(sys.stdout)
+        super(ProcessJobSource, self).main()
+
+
+if __name__ == '__main__':
+    script = ProcessJobSource()
+    script.lock_and_run()

=== modified file 'cronscripts/process-mail.py'
--- cronscripts/process-mail.py	2009-06-24 20:52:01 +0000
+++ cronscripts/process-mail.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -13,7 +13,7 @@
 from canonical.config import config
 from lp.services.scripts.base import (
     LaunchpadCronScript, LaunchpadScriptFailure)
-from canonical.launchpad.mail.incoming import handleMail
+from lp.services.mail.incoming import handleMail
 from canonical.launchpad.interfaces import IMailBox
 
 
@@ -21,6 +21,7 @@
     usage = """%prog [options]
 
     """ + __doc__
+
     def main(self):
         try:
             handleMail(self.txn)

=== modified file 'cronscripts/process-pending-packagediffs.py'
--- cronscripts/process-pending-packagediffs.py	2009-06-24 20:52:01 +0000
+++ cronscripts/process-pending-packagediffs.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'cronscripts/product-release-finder.py'
--- cronscripts/product-release-finder.py	2009-06-24 20:52:01 +0000
+++ cronscripts/product-release-finder.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'cronscripts/publishing/commercial-compat.sh'
--- cronscripts/publishing/commercial-compat.sh	2009-06-24 20:52:01 +0000
+++ cronscripts/publishing/commercial-compat.sh	2010-11-07 00:31:57 +0000
@@ -25,7 +25,7 @@
 set -e
 
 # Config goes here.
-PRODUCTION_CONFIG=ftpmaster
+PRODUCTION_CONFIG=ftpmaster-publish
 if [ "$LPCONFIG" = "$PRODUCTION_CONFIG" ]; then
     archiveurl=/srv/launchpad.net/ubuntu-archive/ubuntu-partner
 else

=== modified file 'cronscripts/publishing/cron.base-ppa'
--- cronscripts/publishing/cron.base-ppa	2009-06-24 20:52:01 +0000
+++ cronscripts/publishing/cron.base-ppa	2010-11-07 00:31:57 +0000
@@ -5,9 +5,8 @@
 
 # Initial setup for PPA cronscripts.
 
-# Export LPCONFIG and define common variables.
-LPCONFIG=ppa
-export LPCONFIG
+# DO NOT set LPCONFIG here, it should come from the crontab or the shell.
+# Define common variables.
 LPCURRENT=/srv/launchpad.net/codelines/current
 PPAROOT=/srv/launchpad.net/ppa-archive
 P3AROOT=/srv/launchpad.net/private-ppa-archive

=== modified file 'cronscripts/publishing/cron.germinate'
--- cronscripts/publishing/cron.germinate	2009-08-21 09:10:55 +0000
+++ cronscripts/publishing/cron.germinate	2010-11-07 00:31:57 +0000
@@ -11,6 +11,9 @@
 LOCKROOT=$ARCHIVEROOT/..
 GERMINATEROOT=$ARCHIVEROOT/../ubuntu-germinate
 
+LAUNCHPADROOT=/srv/launchpad.net/codelines/current
+MAINTAINCE_CHECK=$LAUNCHPADROOT/cronscripts/publishing/maintenance-check.py
+
 ## Check to see if another germinate run is in progress
 
 LOCKFILE=$LOCKROOT/cron.germinate.lock
@@ -32,7 +35,7 @@
 cd $GERMINATEROOT
 
 # Clean up temporary files
-rm -f germinate.output ALL ALL.sources UBUNTU-* KUBUNTU-* EDUBUNTU-* XUBUNTU-* MOBILE-* MYTHBUNTU-* UNR-*
+rm -f germinate.output ALL ALL.sources UBUNTU-* KUBUNTU-* EDUBUNTU-* XUBUNTU-* MYTHBUNTU-* NETBOOK-*
 rm -f all_* all.sources_*
 
 # Grab a local copy of Sources files
@@ -41,11 +44,12 @@
 done
 
 > "$MISCROOT/more-extra.override.$suite.main.new"
-for distro in ubuntu kubuntu edubuntu xubuntu mobile mythbuntu unr; do
+
+germinate_components=main,universe,restricted,multiverse
+for distro in ubuntu kubuntu edubuntu xubuntu mythbuntu netbook; do
   DISTRO="$(echo $distro | tr a-z A-Z)"
   germinate_suite="$distro.$suite"
-  germinate_components=main,universe,restricted,multiverse
-  for arch in i386 amd64 lpia powerpc sparc ia64 armel; do
+  for arch in i386 amd64 powerpc armel; do
     # Grab local copy of Packages and InstallerPackages files
     for component in main universe restricted multiverse; do
       zcat $ARCHIVEROOT/dists/"$suite"/"$component"/binary-$arch/Packages.gz > archive.ubuntu.com_"$suite"_"$component"_Packages
@@ -122,4 +126,10 @@
 done
 echo " done."
 
+# now generate the Supported extra overrides
+$MAINTAINCE_CHECK $suite > "$MISCROOT/more-extra.override.$suite.main.supported" 2> _maintenance-check.stderr
+if [ $? -eq 0 ]; then
+    cat "$MISCROOT/more-extra.override.$suite.main.supported" >> "$MISCROOT/more-extra.override.$suite.main.new"
+fi
+
 mv -f "$MISCROOT/more-extra.override.$suite.main.new" "$MISCROOT/more-extra.override.$suite.main"

=== added file 'cronscripts/publishing/cron.publish-copy-archives'
--- cronscripts/publishing/cron.publish-copy-archives	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/cron.publish-copy-archives	2010-11-07 00:31:57 +0000
@@ -0,0 +1,83 @@
+#!/bin/sh
+#
+# Copyright 2009 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# LPCONFIG will come from the environment so this script can run unaltered
+# on dogfood.
+if [ -z $LPCONFIG ]; then
+    echo LPCONFIG must be set to run this script.
+    exit 1
+fi
+
+set -x
+set -e
+set -u
+
+#
+# This script publishes the COPY (rebuild) archvies *only*.
+#
+
+
+# Informational -- this *MUST* match the database.
+ARCHIVEROOT=/srv/launchpad.net/ubuntu-archive/ubuntu
+DISTSROOT=$ARCHIVEROOT/dists
+OVERRIDEROOT=$ARCHIVEROOT/../ubuntu-overrides
+INDICES=$ARCHIVEROOT/indices
+PRODUCTION_CONFIG=ftpmaster-publish
+
+if [ "$LPCONFIG" = "$PRODUCTION_CONFIG" ]; then
+    GNUPGHOME=/srv/launchpad.net/ubuntu-archive/gnupg-home
+else
+    echo GPG keys will come from ~/.gnupg
+    # GNUPGHOME does not need to be set, keys can come from ~/.gnupg.
+fi
+
+# Configuration options.
+LAUNCHPADROOT=/srv/launchpad.net/codelines/current
+LOCKFILE=/srv/launchpad.net/ubuntu-archive/cron.daily.lock
+DISTRONAME=ubuntu
+TRACEFILE=$ARCHIVEROOT/project/trace/$(hostname --fqdn)
+
+# Manipulate the environment.
+export GNUPGHOME
+PATH=$PATH:$LAUNCHPADROOT/scripts:$LAUNCHPADROOT/cronscripts:$LAUNCHPADROOT/cronscripts/publishing:$LAUNCHPADROOT/scripts/ftpmaster-tools
+
+# Claim the lock.
+if ! lockfile -r1 $LOCKFILE; then
+  echo "Could not claim lock file."
+  exit 1
+fi
+
+# Lock claimed.
+
+cleanup () {
+  echo "Cleaning up lockfile."
+  rm -f $LOCKFILE
+}
+
+trap cleanup EXIT
+
+# Process the accepted queue into the publishing records.
+process-accepted.py --copy-archive -v -v -v $DISTRONAME
+
+# Publish the packages to disk.
+publish-distro.py -v -v --copy-archive -d $DISTRONAME
+
+set +x
+
+echo Removing uncompressed Packages and Sources files
+find ${DISTSROOT} \( -name "Packages" -o -name "Sources" \) -exec rm "{}" \;
+
+# Copy in the indices.
+if [ "$LPCONFIG" = "$PRODUCTION_CONFIG" ]; then
+    echo Copying the indices into place.
+    rm -f $INDICES/override.*
+    cp $OVERRIDEROOT/override.* $INDICES
+fi
+
+# Timestamp our trace file to track when the last archive publisher run took
+# place.
+if [ "$LPCONFIG" = "$PRODUCTION_CONFIG" ]; then
+    date -u > "$TRACEFILE"
+fi

=== modified file 'cronscripts/publishing/cron.publish-ftpmaster'
--- cronscripts/publishing/cron.publish-ftpmaster	2009-06-24 20:52:01 +0000
+++ cronscripts/publishing/cron.publish-ftpmaster	2010-11-07 00:31:57 +0000
@@ -28,7 +28,7 @@
 CACHEROOT=$ARCHIVEROOT/../ubuntu-cache
 DISTSCOPYROOT=$ARCHIVEROOT/../ubuntu-distscopy
 INDICES=$ARCHIVEROOT/indices
-PRODUCTION_CONFIG=ftpmaster
+PRODUCTION_CONFIG=ftpmaster-publish
 
 if [ "$LPCONFIG" = "$PRODUCTION_CONFIG" ]; then
     ARCHIVEROOT_PARTNER=/srv/launchpad.net/ubuntu-archive/ubuntu-partner
@@ -44,10 +44,13 @@
 LAUNCHPADROOT=/srv/launchpad.net/codelines/current
 LOCKFILE=/srv/launchpad.net/ubuntu-archive/cron.daily.lock
 DISTRONAME=ubuntu
-TOUCHLIST=$ARCHIVEROOT/project/trace/$(hostname --fqdn)
+TRACEFILE=$ARCHIVEROOT/project/trace/$(hostname --fqdn)
 DSYNCLIST=$CACHEROOT/dsync.list
 MD5LIST=$INDICES/md5sums.gz
 
+# Mirrors to push at the end of the publishing run.
+MASTERMIRRORS="syowa frei scandium"
+
 # Manipulate the environment.
 export GNUPGHOME
 PATH=$PATH:$LAUNCHPADROOT/scripts:$LAUNCHPADROOT/cronscripts:$LAUNCHPADROOT/cronscripts/publishing:$LAUNCHPADROOT/scripts/ftpmaster-tools
@@ -58,12 +61,14 @@
   exit 1
 fi
 
+echo "$(date -R): Initiating archive publishing operations..."
+
 DONEPUB=no
 
 cleanup () {
-  echo "Cleaning up lockfile."
+  echo "$(date -R): Cleaning up lockfile."
   rm -f $LOCKFILE
-  echo "Moving dists backup to safe keeping for next time."
+  echo "$(date -R): Moving dists backup to safe keeping for next time."
   if [ "x$DONEPUB" = "xyes" ]; then
     if [ -d ${DISTSROOT}.old ]; then
       mv ${DISTSROOT}.old ${DISTSCOPYROOT}/dists
@@ -89,11 +94,13 @@
 # Lock claimed.
 
 # Process the accepted queue into the publishing records.
+echo "$(date -R): Processing the accepted queue into the publishing records..."
 process-accepted.py -v -v -v $DISTRONAME
 
 # If doing a security run, find out which suites are pending publication.
 SUITEOPTS=""
 if [ "$SECURITY_PUBLISHER" = "yes" ]; then
+    echo "$(date -R): Querying which suites are pending publication..."
     SUITES=$(lp-query-distro.py pending_suites)
     SECURITY_SUITES="no"
     if [ -n "$SUITES" ]; then
@@ -109,7 +116,7 @@
         done
     fi
     if [ "$SECURITY_SUITES" != "yes" ]; then
-        echo "Nothing to do for security publisher; exiting."
+        echo "$(date -R): Nothing to do for security publisher; exiting."
         exit 0
     fi
 fi
@@ -122,21 +129,27 @@
 # This should achieve the same as copying, only faster.
 
 # Create backup dists folder, if this is the first time.
+echo "$(date -R): Creating backup dists directories..."
 mkdir -p ${DISTSCOPYROOT}/dists
 mkdir -p ${DISTSCOPYROOT_PARTNER}/dists
 
 # Move the backup dists folder into place.
+echo "$(date -R): Moving backup dists into place..."
 mv ${DISTSCOPYROOT}/dists ${ARCHIVEROOT}/dists.new
 mv ${DISTSCOPYROOT_PARTNER}/dists ${ARCHIVEROOT_PARTNER}/dists.new
 
 # Bring it up-to-date efficiently with rsync. --delete is required to
 # ensure we don't ressurect things previously deleted, bug 58835.
+echo "$(date -R): Updating dists directories..."
 rsync -aH --delete ${DISTSROOT}/ ${ARCHIVEROOT}/dists.new
 rsync -aH --delete ${DISTSROOT_PARTNER}/ ${ARCHIVEROOT_PARTNER}/dists.new
 
 # Publish the results for all archives (except PPA).
 # The -R only affects the primary and the partner archive.
+echo "$(date -R): Publishing the $DISTRONAME partner archive..."
 publish-distro.py -v -v --partner -d $DISTRONAME -R ${DISTSROOT_PARTNER}.new
+
+echo "$(date -R): Publishing the $DISTRONAME archive..."
 publish-distro.py -v -v -d $DISTRONAME $SUITEOPTS -R ${DISTSROOT}.new
 
 set +x
@@ -148,39 +161,40 @@
        $(find ${DISTSROOT}.new/*/*/dist-upgrader* -name "*.tar.gz"); do
   #  [ Release.gpg missing   ] or [ Release is newer than Release.gpg ]
   if [ ! -f $CANDIDATE.gpg ] || [ $CANDIDATE -nt $CANDIDATE.gpg ]; then
-    echo "(re-)signing $CANDIDATE"
+    echo "$(date -R): (re-)signing $CANDIDATE"
     gpg --yes --detach-sign --armor -o $CANDIDATE.gpg --sign $CANDIDATE
   else
-    echo "Not re-signing $CANDIDATE"
+    echo "$(date -R): Not re-signing $CANDIDATE"
   fi
 done
 SIGNLIST_PARTNER=$(find ${DISTSROOT_PARTNER}.new -maxdepth 2 -name Release)
 for CANDIDATE in $SIGNLIST_PARTNER; do
   #  [ Release.gpg missing   ] or [ Release is newer than Release.gpg ].
   if [ ! -f $CANDIDATE.gpg ] || [ $CANDIDATE -nt $CANDIDATE.gpg ]; then
-    echo "(re-)signing $CANDIDATE"
+    echo "$(date -R): (re-)signing $CANDIDATE"
     gpg --yes --detach-sign --armor -o $CANDIDATE.gpg --sign $CANDIDATE
   else
-    echo "Not re-signing $CANDIDATE"
+    echo "$(date -R): Not re-signing $CANDIDATE"
   fi
 done
 
 # The Packages and Sources files are very large and would cripple our
 # mirrors, so we remove them now that the uncompressed MD5SUMS are in the
 # Release files.
-echo Removing uncompressed Packages and Sources files
+echo "$(date -R): Removing uncompressed Packages and Sources files"
 find ${DISTSROOT}.new \( -name "Packages" -o -name "Sources" \) -exec rm "{}" \;
 find ${DISTSROOT_PARTNER} \( -name "Packages" -o -name "Sources" \) -exec rm "{}" \;
 
 # Copy in the indices.
 if [ "$LPCONFIG" = "$PRODUCTION_CONFIG" ]; then
-    echo Copying the indices into place.
+    echo "$(date -R): Copying the indices into place."
     rm -f $INDICES/override.*
     cp $OVERRIDEROOT/override.* $INDICES
 fi
 
 # As close to atomically as possible, put the new dists into place for the
 # primary and partner archives.
+echo "$(date -R): Placing the new dists into place..."
 mv $DISTSROOT ${DISTSROOT}.old
 mv ${DISTSROOT}.new $DISTSROOT
 mv $DISTSROOT_PARTNER ${DISTSROOT_PARTNER}.old
@@ -192,19 +206,21 @@
 # Generate the -commercial pocket for backwards compatibility with
 # dapper, edgy and feisty releases.  Don't fail the whole script if it
 # fails.
+echo "$(date -R): Generating -commerical pocket..."
 commercial-compat.sh || true
 
-# Touch everything you asked us to do.
+# Timestamp our trace file to track when the last archive publisher run took
+# place.
 if [ "$LPCONFIG" = "$PRODUCTION_CONFIG" ]; then
-    for FILE in $TOUCHLIST; do
-      touch "$FILE"
-    done
+    echo "$(date -R): Timestamping trace file..."
+    date -u > "$TRACEFILE"
 fi
 
 # Skip long-running processes when doing a quick security run.
 if [ "$SECURITY_PUBLISHER" != "yes" ]; then
 
     # Make the lslr because we all love those.
+    echo "$(date -R): Creating ls-lR.gz..."
     LSLR=ls-lR.gz
     ( cd $ARCHIVEROOT ; \
       rm -f .$LSLR.new ; \
@@ -216,25 +232,35 @@
       mv -f .$LSLR.new $LSLR )
 
     # Run dsync over primary archive only.
+    echo "$(date -R): Running dsync over primary archive..."
     ( cd $ARCHIVEROOT ; \
       dsync-flist -q generate $DSYNCLIST -e 'Packages*' -e 'Sources*' -e 'Release*' --md5 ; \
       (dsync-flist -q md5sums $DSYNCLIST; find dists '(' -name 'Packages*' -o -name 'Sources*' -o -name 'Release*' ')' -print | xargs -r md5sum) | gzip -9n > ${MD5LIST} ; \
       dsync-flist -q link-dups $DSYNCLIST || true )
 
     # Clear out empty and thus redundant dirs.
+    echo "$(date -R): Clearing out empty directories..."
     find $ARCHIVEROOT -type d -empty | xargs -r rmdir
     find $ARCHIVEROOT_PARTNER -type d -empty | xargs -r rmdir
 
     # Invoke cron.germinate to fill ubuntu tasks and ignore failures,
     # the mirrors should be always triggered.
-    cron.germinate || echo "cron.germinate failed with exit code $?" >&2
+    echo "$(date -R): Running cron.germinate..."
+    cron.germinate || echo "$(date -R): cron.germinate failed with exit code $?"
 
 # End of block skipped by security publishing.
 fi
 
 # Trigger master mirrors.
 if [ "$LPCONFIG" = "$PRODUCTION_CONFIG" ]; then
-    ssh archvsync@syowa
-    ssh archvsync@frei
-    ssh archvsync@rockhopper
+    echo "$(date -R): Triggering master mirrors..."
+
+    for HOST in $MASTERMIRRORS; do
+        echo "$(date -R): Triggering $HOST:"
+        ssh archvsync@$HOST
+    done
+
+    echo "$(date -R): Master mirror triggers completed."
 fi
+
+echo "$(date -R): Archive publishing operations completed."

=== added file 'cronscripts/publishing/maintenance-check.py'
--- cronscripts/publishing/maintenance-check.py	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/maintenance-check.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,382 @@
+#!/usr/bin/python
+#
+# python port of the nice maintainace-check script by  Nick Barcet
+# 
+# taken from:
+#  https://code.edge.launchpad.net/~mvo/ubuntu-maintenance-check/python-port
+# (where it will vanish once taken here)
+
+# this warning filter is only needed on older versions of python-apt,
+# once the machine runs lucid it can be removed
+import warnings
+warnings.filterwarnings("ignore","apt API not stable yet")
+import apt
+warnings.resetwarnings()
+
+import apt_pkg
+import logging
+import os
+import sys
+import urllib2
+import urlparse
+
+from optparse import OptionParser
+
+# This is fun! We have a bunch of cases for 10.04 LTS
+#
+#  - distro "ubuntu" follows SUPPORT_TIMEFRAME_LTS but only for
+#    amd64/i386
+#  - distros "kubuntu", "edubuntu" and "netbook" need to be
+#    considered *but* only follow SUPPORT_TIMEFRAME
+#  - anything that is in armel follows SUPPORT_TIMEFRAME
+#  
+
+# codename of the lts releases
+LTS_RELEASES = [ "dapper", "hardy", "lucid" ]
+
+# architectures that are full supported (including LTS time)
+PRIMARY_ARCHES =  ["i386", "amd64"]
+
+# architectures we support (but not for LTS time)
+SUPPORTED_ARCHES = PRIMARY_ARCHES + ["armel"]
+
+# what defines the seeds is documented in wiki.ubuntu.com/SeedManagement
+SERVER_SEEDS = [ "supported-server", "server-ship"]
+DESKTOP_SEEDS = ["ship", "supported-desktop", "supported-desktop-extra"]
+SUPPORTED_SEEDS = [ "all" ]
+
+# normal support timeframe
+# time, seeds, arches
+SUPPORT_TIMEFRAME = [
+    ("18m", SUPPORTED_SEEDS),
+]
+
+# lts support timeframe
+# time, seeds, arches
+SUPPORT_TIMEFRAME_LTS = [
+    ("5y", SERVER_SEEDS),
+    ("3y", DESKTOP_SEEDS),
+    ("18m", SUPPORTED_SEEDS),
+]
+
+# distro names and if they get LTS support (order is important)
+DISTRO_NAMES_AND_LTS_SUPPORT = [ ("ubuntu",   True),
+                                 ("kubuntu",  True),
+                                 ("netbook",  False),
+                               ]
+
+# germinate output base directory
+BASE_URL = "http://people.canonical.com/~ubuntu-archive/germinate-output/";
+
+# hints dir url, hints file is "$distro.hints" by default
+# (e.g. lucid.hints)
+HINTS_DIR_URL = "http://people.canonical.com/~ubuntu-archive/seeds/platform.%s/SUPPORTED_HINTS";
+
+# we need the archive root to parse the Sources file to support
+# by-source hints
+ARCHIVE_ROOT = "http://archive.ubuntu.com/ubuntu";
+
+# support timeframe tag used in the Packages file
+SUPPORT_TAG = "Supported"
+
+def get_binaries_for_source_pkg(srcname):
+    """ Return all binary package names for the given source package name.
+
+    :param srcname: The source package name.
+    :return: A list of binary package names.
+    """
+    pkgnames = set()
+    recs = apt_pkg.GetPkgSrcRecords()
+    while recs.Lookup(srcname):
+        for binary in recs.Binaries:
+            pkgnames.add(binary)
+    return pkgnames
+
+def expand_src_pkgname(pkgname):
+    """ Expand a package name if it is prefixed with src.
+
+    If the package name is prefixed with src it will be expanded
+    to a list of binary package names. Otherwise the original
+    package name will be returned.
+    
+    :param pkgname: The package name (that may include src:prefix).
+    :return: A list of binary package names (the list may be one element long).
+    """
+    if not pkgname.startswith("src:"):
+        return [pkgname]
+    return get_binaries_for_source_pkg(pkgname.split("src:")[1])
+
+def create_and_update_deb_src_source_list(distroseries):
+    """ Create sources.list and update cache.
+
+    This creates a sources.list file with deb-src entries for a given 
+    distroseries and apt.Cache.update() to make sure the data is up-to-date.
+    :param distro: The code name of the distribution series (e.g. lucid).
+    :return: None
+    :raises: IOError: When cache update fails.
+    """
+    # apt root dir
+    rootdir="./aptroot.%s" % distroseries
+    sources_list_dir = os.path.join(rootdir, "etc","apt")
+    if not os.path.exists(sources_list_dir):
+        os.makedirs(sources_list_dir)
+    sources_list = open(os.path.join(sources_list_dir, "sources.list"),"w")
+    for pocket in [
+        "%s" % distroseries, 
+        "%s-updates" % distroseries, 
+        "%s-security" % distroseries]:
+        sources_list.write(
+            "deb-src %s %s main restricted\n" % (
+                ARCHIVE_ROOT, pocket))
+        sources_list.write(
+            "deb %s %s main restricted\n" % (
+                ARCHIVE_ROOT, pocket))
+    sources_list.close()
+    # create required dirs/files for apt.Cache(rootdir) to work on older
+    # versions of python-apt. once lucid is used it can be removed
+    for d in  ["var/lib/dpkg", 
+               "var/cache/apt/archives/partial",
+               "var/lib/apt/lists/partial"]:
+        if not os.path.exists(os.path.join(rootdir,d)):
+            os.makedirs(os.path.join(rootdir,d))
+    if not os.path.exists(os.path.join(rootdir,"var/lib/dpkg/status")):
+        open(os.path.join(rootdir,"var/lib/dpkg/status"),"w")
+    # open cache with our just prepared rootdir
+    cache = apt.Cache(rootdir=rootdir)
+    try:
+        cache.update(apt.progress.FetchProgress())
+    except SystemError:
+        logging.exception("cache.update() failed")
+
+def get_structure(distroname, version):
+    """ Get structure file conent for named distro and distro version.
+    
+    :param name: Name of the distribution (e.g. kubuntu, ubuntu, xubuntu).
+    :param version: Code name of the distribution version (e.g. lucid).
+    :return: List of strings with the structure file content
+    """
+    f = urllib2.urlopen("%s/%s.%s/structure" % (BASE_URL, distroname, version))
+    structure = f.readlines()
+    f.close()
+    return structure
+
+def expand_seeds(structure, seedname):
+    """ Expand seed by its dependencies using the strucure file.
+
+    :param structure: The content of the STRUCTURE file as string list.
+    :param seedname: The name of the seed as string that needs to be expanded.
+    :return: a set() for the seed dependencies (excluding the original seedname)
+    """
+    seeds = []
+    for line in structure:
+        if line.startswith("%s:" % seedname):
+            seeds += line.split(":")[1].split()
+            for seed in seeds:
+                seeds += expand_seeds(structure, seed)
+    return set(seeds)
+
+def get_packages_for_seeds(name, distro, seeds):
+    """
+    get packages for the given name (e.g. ubuntu) and distro release 
+    (e.g. lucid) that are in the given list of seeds
+    returns a set() of package names
+    """
+    pkgs_in_seeds = {}
+    for bseed in seeds:
+        for seed in [bseed]: #, bseed+".build-depends", bseed+".seed"]:
+            pkgs_in_seeds[seed] = set()
+            seedurl = "%s/%s.%s/%s" % (BASE_URL,name, distro, seed)
+            logging.debug("looking for '%s'" % seedurl)
+            try:
+                f = urllib2.urlopen(seedurl)
+                for line in f:
+                    # ignore lines that are not a package name (headers etc)
+                    if line[0] < 'a' or line[0] > 'z':
+                        continue
+                    # lines are (package,source,why,maintainer,size,inst-size)
+                    if options.source_packages:
+                        pkgname = line.split("|")[1]
+                    else:
+                        pkgname = line.split("|")[0]
+                    pkgs_in_seeds[seed].add(pkgname.strip())
+                f.close()
+            except Exception, e:
+                logging.error("seed %s failed (%s)" % (seedurl, e))
+    return pkgs_in_seeds
+
+def what_seeds(pkgname, seeds):
+    in_seeds = set()
+    for s in seeds:
+        if pkgname in seeds[s]:
+            in_seeds.add(s)
+    return in_seeds
+
+def compare_support_level(x, y):
+    """
+    compare two support level strings of the form 18m, 3y etc
+    :parm x: the first support level
+    :parm y: the second support level
+    :return: negative if x < y, zero if x==y, positive if x > y
+    """
+    def support_to_int(support_time):
+        """
+        helper that takes a support time string and converts it to 
+        a integer for cmp()
+        """
+        # allow strings like "5y (kubuntu-common)
+        x = support_time.split()[0]
+        if x.endswith("y"):
+            return 12 * int(x[0:-1])
+        elif x.endswith("m"):
+            return int(x[0:-1])
+        else:
+            raise ValueError("support time '%s' has to end with y or m" % x)
+    return cmp(support_to_int(x), support_to_int(y))
+
+def get_packages_support_time(structure, name, pkg_support_time, support_timeframe_list):
+    """
+    input a structure file and a list of pair<timeframe, seedlist>
+    return a dict of pkgnames -> support timeframe string
+    """
+    for (timeframe, seedlist) in support_timeframe_list:
+        expanded = set()
+        for s in seedlist:
+            expanded.add(s)
+            expanded |= expand_seeds(structure, s)
+        pkgs_in_seeds = get_packages_for_seeds(name, distro, expanded)
+        for seed in pkgs_in_seeds:
+            for pkg in pkgs_in_seeds[seed]:
+                if not pkg in pkg_support_time:
+                    pkg_support_time[pkg] = timeframe
+                else:
+                    old_timeframe = pkg_support_time[pkg]
+                    if compare_support_level(old_timeframe, timeframe) < 0:
+                        logging.debug("overwriting %s from %s to %s" % (
+                                pkg, old_timeframe, timeframe))
+                        pkg_support_time[pkg] = timeframe
+                if options.with_seeds:
+                    pkg_support_time[pkg] += " (%s)" % ", ".join(what_seeds(pkg, pkgs_in_seeds))
+
+
+    return pkg_support_time
+
+if __name__ == "__main__":
+    parser = OptionParser()
+    parser.add_option("--with-seeds", "", default=False,
+                      action="store_true", 
+                      help="add seed(s) of the package that are responsible for the maintaince time")
+    parser.add_option("--source-packages", "", default=False,
+                      action="store_true", 
+                      help="show as source pkgs")
+    parser.add_option("--hints-file", "", default=None,
+                      help="use diffenrt use hints file location")
+    (options, args) = parser.parse_args()
+
+    # init
+    if len(args) > 0:
+        distro = args[0]
+        if distro[0] < 'h':
+            print "ERROR: only hardy or later is supported"
+            sys.exit(1)
+    else:
+        distro = "lucid"
+
+    # make sure our deb-src information is up-to-date
+    create_and_update_deb_src_source_list(distro)
+
+    if options.hints_file:
+        hints_file = options.hints_file
+        (schema, netloc, path, query, fragment) = urlparse.urlsplit(hints_file)
+        if not schema:
+            hints_file = "file:%s" % path
+    else:
+        hints_file = HINTS_DIR_URL % distro
+        
+    # go over the distros we need to check
+    pkg_support_time = {}
+    for (name, lts_supported) in DISTRO_NAMES_AND_LTS_SUPPORT:
+
+        # get basic structure file
+        structure = get_structure(name, distro)
+    
+        # get dicts of pkgname -> support timeframe string
+        support_timeframe = SUPPORT_TIMEFRAME
+        if lts_supported and distro in LTS_RELEASES:
+            support_timeframe =  SUPPORT_TIMEFRAME_LTS
+        else:
+            support_timeframe = SUPPORT_TIMEFRAME
+        get_packages_support_time(structure, name, pkg_support_time, support_timeframe)
+
+    # now go over the bits in main that we have not seen (because
+    # they are not in any seed and got added manually into "main"
+    for arch in PRIMARY_ARCHES:
+        rootdir="./aptroot.%s" % distro
+        apt_pkg.Config.Set("APT::Architecture", arch)
+        cache = apt.Cache(rootdir=rootdir)
+        try:
+            cache.update(apt.progress.FetchProgress())
+        except SystemError:
+            logging.exception("cache.update() failed")
+        cache.open(apt.progress.OpProgress())
+        for pkg in cache:
+            if not pkg.name in pkg_support_time:
+                pkg_support_time[pkg.name] = support_timeframe[-1][0]
+                logging.warn("add package in main but not in seeds %s with %s" % 
+                             (pkg.name, pkg_support_time[pkg.name]))
+
+    # now check the hints file that is used to overwrite 
+    # the default seeds
+    try:
+        for line in urllib2.urlopen(hints_file):
+            line = line.strip()
+            if not line or line.startswith("#"):
+                continue
+            try:
+                (raw_pkgname, support_time) = line.split()
+                for pkgname in expand_src_pkgname(raw_pkgname):
+                    if support_time == 'unsupported':
+                        try:
+                            del pkg_support_time[pkgname]
+                            sys.stderr.write("hints-file: marking %s unsupported\n" % pkgname)
+                        except KeyError:
+                            pass
+                    else:
+                        if pkg_support_time.get(pkgname) != support_time:
+                            sys.stderr.write(
+                                "hints-file: changing %s from %s to %s\n" % (
+                                    pkgname,  pkg_support_time.get(pkgname), 
+                                    support_time))
+                            pkg_support_time[pkgname] = support_time
+            except:
+                logging.exception("can not parse line '%s'" % line)
+    except urllib2.HTTPError, e:
+        if e.code != 404:
+            raise
+        sys.stderr.write("hints-file: %s gave 404 error\n" % hints_file)
+    
+    # output suitable for the extra-override file
+    for pkgname in sorted(pkg_support_time.keys()):
+        # special case, the hints file may contain overrides that
+        # are arch-specific (like zsh-doc/armel)
+        if "/" in pkgname:
+            print "%s %s %s" % (
+                pkgname, SUPPORT_TAG, pkg_support_time[pkgname])
+        else:
+            # go over the supported arches, they are divided in 
+            # first-class (PRIMARY) and second-class with different
+            # support levels
+            for arch in SUPPORTED_ARCHES:
+                # ensure we do not overwrite arch-specific overwrites
+                pkgname_and_arch = "%s/%s" % (pkgname, arch)
+                if pkgname_and_arch in pkg_support_time:
+                    break
+                if arch in PRIMARY_ARCHES:
+                    # arch with full LTS support
+                    print "%s %s %s" % (
+                        pkgname_and_arch, SUPPORT_TAG, pkg_support_time[pkgname])
+                else:
+                    # not a LTS supported architecture, gets only regular
+                    # support_timeframe
+                    print "%s %s %s" % (
+                        pkgname_and_arch, SUPPORT_TAG, SUPPORT_TIMEFRAME[0][0])
+                

=== modified file 'cronscripts/reclaimbranchspace.py'
--- cronscripts/reclaimbranchspace.py	2009-09-03 20:06:45 +0000
+++ cronscripts/reclaimbranchspace.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== added file 'cronscripts/request_daily_builds.py'
--- cronscripts/request_daily_builds.py	1970-01-01 00:00:00 +0000
+++ cronscripts/request_daily_builds.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,44 @@
+#!/usr/bin/python -S
+#
+# Copyright 2010 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# pylint: disable-msg=W0403
+
+"""Request builds for stale daily build recipes."""
+
+__metaclass__ = type
+
+import _pythonpath
+
+import transaction
+from zope.component import getUtility
+
+from canonical.config import config
+# fix circular import issue
+import canonical.launchpad.interfaces
+from lp.code.interfaces.sourcepackagerecipebuild import (
+    ISourcePackageRecipeBuildSource,)
+from lp.services.scripts.base import LaunchpadCronScript
+from canonical.launchpad.webapp.errorlog import globalErrorUtility
+
+
+class RequestDailyBuilds(LaunchpadCronScript):
+    """Run create merge proposal jobs."""
+
+    def __init__(self):
+        name = 'request_daily_builds'
+        dbuser = config.request_daily_builds.dbuser
+        LaunchpadCronScript.__init__(self, name, dbuser)
+
+    def main(self):
+        globalErrorUtility.configure(self.name)
+        source = getUtility(ISourcePackageRecipeBuildSource)
+        builds = source.makeDailyBuilds()
+        self.logger.info('Requested %d daily builds.' % len(builds))
+        transaction.commit()
+
+
+if __name__ == '__main__':
+    script = RequestDailyBuilds()
+    script.lock_and_run()

=== modified file 'cronscripts/rosetta-approve-imports.py'
--- cronscripts/rosetta-approve-imports.py	2009-08-04 09:24:21 +0000
+++ cronscripts/rosetta-approve-imports.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#! /usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -9,27 +9,11 @@
 
 import _pythonpath
 
-from canonical.config import config
-from canonical.database.sqlbase import ISOLATION_LEVEL_READ_COMMITTED
-from lp.translations.scripts.po_import import AutoApproveProcess
-from lp.services.scripts.base import LaunchpadCronScript
-
-
-class RosettaImportApprover(LaunchpadCronScript):
-    def main(self):
-        self.txn.set_isolation_level(ISOLATION_LEVEL_READ_COMMITTED)
-        process = AutoApproveProcess(self.txn, self.logger)
-        self.logger.debug('Starting auto-approval of translation imports')
-        process.run()
-        self.logger.debug('Completed auto-approval of translation imports')
+from lp.translations.scripts.import_queue_gardener import ImportQueueGardener
 
 
 if __name__ == '__main__':
-    script = RosettaImportApprover('rosetta-approve-imports',
-        dbuser='poimportapprover')
-    script.lock_or_quit()
-    try:
-        script.run()
-    finally:
-        script.unlock()
-
+    script = ImportQueueGardener(
+        'translations-import-queue-gardener',
+        dbuser='translations_import_queue_gardener')
+    script.lock_and_run()

=== modified file 'cronscripts/rosetta-branches.py'
--- cronscripts/rosetta-branches.py	2009-09-03 20:29:25 +0000
+++ cronscripts/rosetta-branches.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -17,7 +17,7 @@
 from zope.component import getUtility
 
 from canonical.config import config
-from lp.codehosting.vfs.branchfs import get_scanner_server
+from lp.codehosting.vfs.branchfs import get_ro_server
 from lp.services.job.runner import JobRunner
 from lp.code.interfaces.branchjob import IRosettaUploadJobSource
 from lp.services.scripts.base import LaunchpadCronScript
@@ -31,12 +31,12 @@
         globalErrorUtility.configure('rosettabranches')
         runner = JobRunner.fromReady(
             getUtility(IRosettaUploadJobSource), self.logger)
-        server = get_scanner_server()
-        server.setUp()
+        server = get_ro_server()
+        server.start_server()
         try:
             runner.runAll()
         finally:
-            server.tearDown()
+            server.stop_server()
         self.logger.info('Ran %d RosettaBranchJobs.',
                          len(runner.completed_jobs))
 

=== modified file 'cronscripts/rosetta-export-queue.py'
--- cronscripts/rosetta-export-queue.py	2009-07-17 00:26:05 +0000
+++ cronscripts/rosetta-export-queue.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'cronscripts/rosetta-pofile-stats-daily.py'
--- cronscripts/rosetta-pofile-stats-daily.py	2009-08-07 17:58:19 +0000
+++ cronscripts/rosetta-pofile-stats-daily.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'cronscripts/rosetta-pofile-stats.py'
--- cronscripts/rosetta-pofile-stats.py	2009-08-06 12:30:58 +0000
+++ cronscripts/rosetta-pofile-stats.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'cronscripts/rosetta-poimport.py'
--- cronscripts/rosetta-poimport.py	2009-07-17 00:26:05 +0000
+++ cronscripts/rosetta-poimport.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -8,31 +8,9 @@
 import _pythonpath
 
 from canonical.config import config
-from canonical.database.sqlbase import ISOLATION_LEVEL_READ_COMMITTED
-from lp.translations.scripts.po_import import ImportProcess
-from lp.services.scripts.base import LaunchpadCronScript
-
-# Time goal for this run.  It is not exact.  The script will run for longer
-# than this time, but will know to stop taking on new batches of imports.
-# Since script is run every 9 or 10 minutes, we set the "alarm" at 8 minutes.
-# That leaves a bit of time to complete the last ongoing batch of imports.
-SECONDS_TO_RUN = 8 * 60
-
-class RosettaPOImporter(LaunchpadCronScript):
-    def main(self):
-        self.txn.set_isolation_level(ISOLATION_LEVEL_READ_COMMITTED)
-        process = ImportProcess(self.txn, self.logger, SECONDS_TO_RUN)
-        self.logger.debug('Starting the import process')
-        process.run()
-        self.logger.debug('Finished the import process')
-
+from lp.translations.scripts.po_import import TranslationsImport
 
 if __name__ == '__main__':
-    script = RosettaPOImporter('rosetta-poimport',
-        dbuser=config.poimport.dbuser)
-    script.lock_or_quit()
-    try:
-        script.run()
-    finally:
-        script.unlock()
-
+    script = TranslationsImport(
+        'rosetta-poimport', dbuser=config.poimport.dbuser)
+    script.lock_and_run()

=== added file 'cronscripts/scan_branches.py'
--- cronscripts/scan_branches.py	1970-01-01 00:00:00 +0000
+++ cronscripts/scan_branches.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,25 @@
+#!/usr/bin/python -S
+#
+# Copyright 2010 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Scan branches for new revisions."""
+
+__metaclass__ = type
+
+import _pythonpath
+
+from lp.services.job.runner import JobCronScript
+from lp.code.interfaces.branchjob import IBranchScanJobSource
+
+
+class RunScanBranches(JobCronScript):
+    """Run BranchScanJob jobs."""
+
+    config_name = 'branchscanner'
+    source_interface = IBranchScanJobSource
+
+
+if __name__ == '__main__':
+    script = RunScanBranches()
+    script.lock_and_run()

=== modified file 'cronscripts/send-bug-notifications.py'
--- cronscripts/send-bug-notifications.py	2009-06-24 20:52:01 +0000
+++ cronscripts/send-bug-notifications.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'cronscripts/send-person-notifications.py'
--- cronscripts/send-person-notifications.py	2009-06-24 20:52:01 +0000
+++ cronscripts/send-person-notifications.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -14,15 +14,11 @@
 __metaclass__ = type
 
 import _pythonpath
-from datetime import timedelta, datetime
-import pytz
-
-from zope.component import getUtility
 
 from canonical.config import config
-from canonical.launchpad.interfaces.personnotification import (
-    IPersonNotificationSet)
+
 from lp.services.scripts.base import LaunchpadCronScript
+from lp.registry.scripts.personnotification import PersonNotificationManager
 
 
 class SendPersonNotifications(LaunchpadCronScript):
@@ -36,39 +32,9 @@
     """
 
     def main(self):
-        notifications_sent = False
-        notification_set = getUtility(IPersonNotificationSet)
-        pending_notifications = notification_set.getNotificationsToSend()
-        self.logger.info(
-            '%d notification(s) to send.' % pending_notifications.count())
-        for notification in pending_notifications:
-            person = notification.person
-            self.logger.info(
-                "Sending notification to %s <%s>."
-                % (person.name, person.preferredemail.email))
-            notification.send()
-            notifications_sent = True
-            # Commit after each email sent, so that we won't re-mail the
-            # notifications in case of something going wrong in the middle.
-            self.txn.commit()
-
-        if not notifications_sent:
-            self.logger.debug("No notifications were sent.")
-
-        # Delete PersonNotifications that are older than the retention
-        # limit set in the configuration.
-        retained_days = timedelta(
-            days=int(config.person_notification.retained_days))
-        time_limit = (datetime.now(pytz.timezone('UTC')) - retained_days)
-        to_delete = notification_set.getNotificationsOlderThan(time_limit)
-        if to_delete.count():
-            self.logger.info(
-                "Notification retention limit is %s." % retained_days)
-            self.logger.info(
-                "Deleting %d old notification(s)." % to_delete.count())
-            for notification in to_delete:
-                notification.destroySelf()
-            self.txn.commit()
+        manager = PersonNotificationManager(self.txn, self.logger)
+        unsent_notifications = manager.sendNotifications()
+        manager.purgeNotifications(unsent_notifications)
 
 
 if __name__ == '__main__':

=== modified file 'cronscripts/sendbranchmail.py'
--- cronscripts/sendbranchmail.py	2009-09-03 19:25:57 +0000
+++ cronscripts/sendbranchmail.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -16,7 +16,7 @@
 from zope.component import getUtility
 
 from canonical.config import config
-from lp.codehosting.vfs import get_scanner_server
+from lp.codehosting.vfs import get_ro_server
 from lp.services.job.runner import JobRunner
 from lp.code.interfaces.branchjob import (
     IRevisionMailJobSource, IRevisionsAddedJobSource)
@@ -32,12 +32,12 @@
         jobs = list(getUtility(IRevisionMailJobSource).iterReady())
         jobs.extend(getUtility(IRevisionsAddedJobSource).iterReady())
         runner = JobRunner(jobs, self.logger)
-        server = get_scanner_server()
-        server.setUp()
+        server = get_ro_server()
+        server.start_server()
         try:
             runner.runAll()
         finally:
-            server.tearDown()
+            server.stop_server()
         self.logger.info(
             'Ran %d RevisionMailJobs.' % len(runner.completed_jobs))
 

=== modified file 'cronscripts/supermirror-pull.py'
--- cronscripts/supermirror-pull.py	2009-07-23 02:07:29 +0000
+++ cronscripts/supermirror-pull.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -10,12 +10,12 @@
 
 from twisted.internet import defer, reactor
 from twisted.python import log as tplog
-from twisted.web.xmlrpc import Proxy
 
-from lp.codehosting.puller import mirror, scheduler
 from canonical.config import config
 from canonical.launchpad.scripts import logger_options
-from canonical.twistedsupport.loggingsupport import set_up_logging_for_script
+from lp.codehosting.puller import mirror, scheduler
+from lp.services.twistedsupport.loggingsupport import (
+    LoggingProxy, set_up_logging_for_script)
 
 def clean_shutdown(ignored):
     reactor.stop()
@@ -38,12 +38,14 @@
 if __name__ == '__main__':
     parser = OptionParser()
     logger_options(parser)
+    parser.add_option('--branch-type', action='append', default=[])
     (options, arguments) = parser.parse_args()
     if arguments:
         parser.error("Unhandled arguments %s" % repr(arguments))
     log = set_up_logging_for_script(options, 'supermirror_puller')
     manager = scheduler.JobScheduler(
-        Proxy(config.codehosting.branch_puller_endpoint), log)
+        LoggingProxy(config.codehosting.codehosting_endpoint, log), log,
+        options.branch_type)
 
     reactor.callWhenRunning(run_mirror, log, manager)
     reactor.run()

=== removed directory 'cronscripts/test'
=== modified file 'cronscripts/translations-export-to-branch.py'
--- cronscripts/translations-export-to-branch.py	2009-07-17 18:46:25 +0000
+++ cronscripts/translations-export-to-branch.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 # pylint: disable-msg=W0403
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the

=== modified file 'cronscripts/update-bugtask-targetnamecaches.py'
--- cronscripts/update-bugtask-targetnamecaches.py	2009-06-24 20:52:01 +0000
+++ cronscripts/update-bugtask-targetnamecaches.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== added file 'cronscripts/update-bugzilla-remote-components.py'
--- cronscripts/update-bugzilla-remote-components.py	1970-01-01 00:00:00 +0000
+++ cronscripts/update-bugzilla-remote-components.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,41 @@
+#!/usr/bin/python -S
+#
+# Copyright 2010 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# pylint: disable-msg=W0403
+import _pythonpath
+
+import time
+
+from canonical.config import config
+from lp.services.scripts.base import LaunchpadCronScript
+from canonical.launchpad.scripts.bzremotecomponentfinder import (
+    BugzillaRemoteComponentFinder,
+    )
+
+
+class UpdateRemoteComponentsFromBugzilla(LaunchpadCronScript):
+
+    def add_my_options(self):
+        self.parser.add_option(
+            "-b", "--bugtracker", dest="bugtracker",
+            help="Update only the bug tracker with this name in launchpad")
+
+    def main(self):
+        start_time = time.time()
+        finder = BugzillaRemoteComponentFinder(
+            self.logger)
+        finder.getRemoteProductsAndComponents(
+            bugtracker_name=self.options.bugtracker)
+
+        run_time = time.time() - start_time
+        print("Time for this run: %.3f seconds." % run_time)
+
+
+if __name__ == "__main__":
+
+    updater = UpdateRemoteComponentsFromBugzilla(
+        "updatebugzillaremotecomponents",
+        dbuser=config.updatebugzillaremotecomponents.dbuser)
+    updater.lock_and_run()

=== modified file 'cronscripts/update-cve.py'
--- cronscripts/update-cve.py	2009-06-24 20:52:01 +0000
+++ cronscripts/update-cve.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== added file 'cronscripts/update-database-stats.py'
--- cronscripts/update-database-stats.py	1970-01-01 00:00:00 +0000
+++ cronscripts/update-database-stats.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,43 @@
+#!/usr/bin/python -S
+# Copyright 2010 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Populate the DatabaseTableStats and DatabaseCpuStats tables."""
+
+__metaclass__ = type
+
+import _pythonpath
+
+from zope.component import getUtility
+
+from canonical.launchpad.scripts import db_options
+from canonical.launchpad.webapp.interfaces import (
+    IStoreSelector, MAIN_STORE, MASTER_FLAVOR)
+from lp.services.scripts.base import LaunchpadCronScript
+
+
+class UpdateDatabaseStats(LaunchpadCronScript):
+    """Populate the DatabaseTableStats and DatabaseCpuStats tables."""
+
+    def main(self):
+        "Run UpdateDatabaseTableStats."""
+        store = getUtility(IStoreSelector).get(MAIN_STORE, MASTER_FLAVOR)
+
+        # The logic is in a stored procedure because we want to run
+        # ps(1) on the database server rather than the host this script
+        # is running on.
+        self.logger.debug("Invoking update_database_stats()")
+        store.execute("SELECT update_database_stats()", noresult=True)
+
+        self.logger.debug("Committing")
+        store.commit()
+
+    def add_my_options(self):
+        """Add standard database command line options."""
+        db_options(self.parser)
+
+if __name__ == '__main__':
+    script = UpdateDatabaseStats(
+        'update-database-stats', dbuser='database_stats_update')
+    script.lock_and_run()
+

=== modified file 'cronscripts/update-debwatches.py'
--- cronscripts/update-debwatches.py	2009-06-24 20:52:01 +0000
+++ cronscripts/update-debwatches.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -6,11 +6,11 @@
 # This script runs through the set of Debbugs watches, and tries to
 # syncronise each of those to the malone bug which is watching it.
 
+import _pythonpath
 import os
 import sys
 import email
 import logging
-import _pythonpath
 
 # zope bits
 from zope.component import getUtility

=== modified file 'cronscripts/update-pkgcache.py'
--- cronscripts/update-pkgcache.py	2009-06-24 20:52:01 +0000
+++ cronscripts/update-pkgcache.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -47,7 +47,7 @@
         PPA archives caches are consolidated in a Archive row to optimize
         searches across PPAs.
         """
-        for distroseries in distribution.serieses:
+        for distroseries in distribution.series:
             self.updateDistroSeriesCache(distroseries, archive)
 
         distribution.removeOldCacheItems(archive, log=self.logger)
@@ -101,6 +101,6 @@
 
 if __name__ == '__main__':
     script = PackageCacheUpdater(
-        'update-cache', dbuser=config.statistician.dbuser)
+        'update-cache', dbuser="update-pkg-cache")
     script.lock_and_run()
 

=== modified file 'cronscripts/update-remote-product.py'
--- cronscripts/update-remote-product.py	2009-06-24 20:52:01 +0000
+++ cronscripts/update-remote-product.py	2010-11-07 00:31:57 +0000
@@ -1,14 +1,18 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
+"""Cron job to update Product.remote_product using bug watch information.
+
+This script sets the remote_product string value on Launchpad Products
+by looking it up from one of the product's bug watches.
+"""
+
 # pylint: disable-msg=W0403
-
-"""Cron job to update Product.remote_product using bug watch information.  """
+import _pythonpath
 
 import time
-import _pythonpath
 
 from canonical.config import config
 from lp.services.scripts.base import LaunchpadCronScript

=== modified file 'cronscripts/update-sourceforge-remote-products.py'
--- cronscripts/update-sourceforge-remote-products.py	2009-06-24 20:52:01 +0000
+++ cronscripts/update-sourceforge-remote-products.py	2010-11-07 00:31:57 +0000
@@ -1,14 +1,14 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
+"""Cron job to update remote_products using SourceForge project data."""
+
 # pylint: disable-msg=W0403
-
-"""Cron job to update remote_products using SourceForge project data."""
+import _pythonpath
 
 import time
-import _pythonpath
 
 from canonical.config import config
 from lp.services.scripts.base import LaunchpadCronScript

=== modified file 'cronscripts/update-standing.py'
--- cronscripts/update-standing.py	2009-06-24 20:52:01 +0000
+++ cronscripts/update-standing.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'cronscripts/update-stats.py'
--- cronscripts/update-stats.py	2009-06-24 20:52:01 +0000
+++ cronscripts/update-stats.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -28,7 +28,7 @@
         # objects are responsible for committing.
         distroset = getUtility(IDistributionSet)
         for distro in distroset:
-            for distroseries in distro.serieses:
+            for distroseries in distro.series:
                 distroseries.updateStatistics(self.txn)
 
         launchpad_stats = getUtility(ILaunchpadStatisticSet)

=== removed file 'cronscripts/update_preview_diffs.py'
--- cronscripts/update_preview_diffs.py	2009-09-01 19:00:46 +0000
+++ cronscripts/update_preview_diffs.py	1970-01-01 00:00:00 +0000
@@ -1,34 +0,0 @@
-#!/usr/bin/python2.4
-#
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-# pylint: disable-msg=W0403
-
-"""Update or create previews diffs for branch merge proposals."""
-
-__metaclass__ = type
-
-import _pythonpath
-
-from lp.codehosting.vfs import get_scanner_server
-from lp.services.job.runner import JobCronScript
-from lp.code.interfaces.branchmergeproposal import (
-    IUpdatePreviewDiffJobSource,)
-
-
-class RunUpdatePreviewDiffJobs(JobCronScript):
-    """Run UpdatePreviewDiff jobs."""
-
-    config_name = 'update_preview_diffs'
-    source_interface = IUpdatePreviewDiffJobSource
-
-    def setUp(self):
-        server = get_scanner_server()
-        server.setUp()
-        return [server.tearDown]
-
-
-if __name__ == '__main__':
-    script = RunUpdatePreviewDiffJobs()
-    script.lock_and_run()

=== added file 'cronscripts/upgrade_branches.py'
--- cronscripts/upgrade_branches.py	1970-01-01 00:00:00 +0000
+++ cronscripts/upgrade_branches.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,25 @@
+#!/usr/bin/python -S
+#
+# Copyright 2009 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Upgrade branches to the most recent format."""
+
+__metaclass__ = type
+
+import _pythonpath
+
+from lp.services.job.runner import JobCronScript
+from lp.code.interfaces.branchjob import IBranchUpgradeJobSource
+
+
+class RunUpgradeBranches(JobCronScript):
+    """Run UpgradeBranchJob jobs."""
+
+    config_name = 'upgrade_branches'
+    source_interface = IBranchUpgradeJobSource
+
+
+if __name__ == '__main__':
+    script = RunUpgradeBranches()
+    script.lock_and_run()

=== added symlink 'daemons/_pythonpath.py'
=== target is u'../_pythonpath.py'
=== modified file 'daemons/buildd-manager.tac'
--- daemons/buildd-manager.tac	2009-06-24 20:55:31 +0000
+++ daemons/buildd-manager.tac	2010-11-07 00:31:57 +0000
@@ -5,21 +5,28 @@
 # Use with "twistd2.4 -y <file.tac>", e.g. "twistd -noy server.tac"
 
 from twisted.application import service
+from twisted.scripts.twistd import ServerOptions
 from twisted.web import server
 
 from lp.buildmaster.manager import BuilddManager
+from lp.services.twistedsupport.loggingsupport import RotatableFileLogObserver
 from canonical.config import config
-from canonical.launchpad.daemons import tachandler
+from canonical.launchpad.daemons import readyservice
 from canonical.launchpad.scripts import execute_zcml_for_scripts
 from canonical.lp import initZopeless
 
 execute_zcml_for_scripts()
 initZopeless(dbuser=config.builddmaster.dbuser)
 
+options = ServerOptions()
+options.parseOptions()
+
 application = service.Application('BuilddManager')
+application.addComponent(
+    RotatableFileLogObserver(options.get('logfile')), ignoreClass=1)
 
 # Service that announces when the daemon is ready.
-tachandler.ReadyService().setServiceParent(application)
+readyservice.ReadyService().setServiceParent(application)
 
 # Service for scanning buildd slaves.
 service = BuilddManager()

=== removed file 'daemons/buildd-sequencer.tac'
--- daemons/buildd-sequencer.tac	2009-06-24 20:55:31 +0000
+++ daemons/buildd-sequencer.tac	1970-01-01 00:00:00 +0000
@@ -1,31 +0,0 @@
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-# Twisted Application Configuration file.
-# Use with "twistd -y <file.tac>", e.g. "twistd -noy server.tac"
-
-from twisted.application import service
-
-from canonical.buildd.sequencer import BuildSequencer
-
-# Construct the application
-application = service.Application("BuildSequencer")
-
-class BuildSequencerService(service.Service):
-    def __init__(self, buildSequencer):
-        self.buildSequencer = buildSequencer
- 
-    def startService(self):
-        # Kick everything off...
-        self.buildSequencer.scheduleCallback()
- 
-
-# Construct the sequencer. It will automatically schedule the first job.
-bseq = BuildSequencer()
-# Make a service out of the sequencer
-bserv = BuildSequencerService(bseq)
-
-# Activate the service
-BuildSequencerService(bseq).setServiceParent(application)
-
-# Falling off the end here passes into twisted's reactor.

=== modified file 'daemons/buildd-slave.tac'
--- daemons/buildd-slave.tac	2009-06-24 20:55:31 +0000
+++ daemons/buildd-slave.tac	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
+# Copyright 2009, 2010 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 # Author: Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx>
@@ -8,8 +8,12 @@
 # passed through to the twistd log too. this could get dangerous/big
 
 from twisted.application import service, strports
-from canonical.buildd import XMLRPCBuildDSlave, DebianBuildManager
-from canonical.launchpad.daemons import tachandler
+from canonical.buildd import XMLRPCBuildDSlave
+from canonical.buildd.binarypackage import BinaryPackageBuildManager
+from canonical.buildd.sourcepackagerecipe import (
+    SourcePackageRecipeBuildManager)
+from canonical.buildd.translationtemplates import TranslationTemplatesBuildManager
+from canonical.launchpad.daemons import readyservice
 
 from twisted.web import server, resource, static
 from ConfigParser import SafeConfigParser
@@ -22,13 +26,17 @@
 conf.read(conffile)
 slave = XMLRPCBuildDSlave(conf)
 
-slave.registerBuilder(DebianBuildManager,"debian")
+# 'debian' is the old name. It remains here for compatibility.
+slave.registerBuilder(BinaryPackageBuildManager, "debian")
+slave.registerBuilder(BinaryPackageBuildManager, "binarypackage")
+slave.registerBuilder(SourcePackageRecipeBuildManager, "sourcepackagerecipe")
+slave.registerBuilder(TranslationTemplatesBuildManager, 'translation-templates')
 
 application = service.Application('BuildDSlave')
 builddslaveService = service.IServiceCollection(application)
 
 # Service that announces when the daemon is ready
-tachandler.ReadyService().setServiceParent(builddslaveService)
+readyservice.ReadyService().setServiceParent(builddslaveService)
 
 root = resource.Resource()
 root.putChild('rpc', slave)
@@ -43,5 +51,5 @@
 #
 # python
 # import xmlrpclib
-# s = xmlrpclib.Server("http://localhost:8221/";)
+# s = xmlrpclib.ServerProxy("http://localhost:8221/rpc";)
 # s.echo("Hello World")

=== added file 'daemons/cache-database-replication-lag.py'
--- daemons/cache-database-replication-lag.py	1970-01-01 00:00:00 +0000
+++ daemons/cache-database-replication-lag.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,53 @@
+#!/usr/bin/python -S
+#
+# Copyright 2010 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Calculate database replication lag and cache it."""
+
+__metaclass__ = type
+__all__ = []
+
+import _pythonpath
+
+import sys
+import time
+
+import psycopg2
+
+from canonical.database.sqlbase import connect, ISOLATION_LEVEL_AUTOCOMMIT
+from canonical.launchpad.scripts import db_options, logger
+from lp.scripts.helpers import LPOptionParser
+
+
+def main(args=None):
+    parser = LPOptionParser()
+    db_options(parser)
+    parser.add_option(
+        "-s", "--sleep", dest="sleep", type="int", default=5,
+        metavar="SECS", help="Wait SECS seconds between refreshes.")
+
+    (options, args) = parser.parse_args(args)
+    if len(args) != 0:
+        parser.error("Too many arguments.")
+
+    log = logger(options)
+
+    while True:
+        try:
+            con = connect(user="lagmon", isolation=ISOLATION_LEVEL_AUTOCOMMIT)
+            cur = con.cursor()
+            while True:
+                cur.execute("SELECT update_replication_lag_cache()")
+                if cur.fetchone()[0]:
+                    log.info("Updated.")
+                else:
+                    log.error("update_replication_lag_cache() failed.")
+                time.sleep(options.sleep)
+        except psycopg2.Error, x:
+            log.error("%s. Retrying.", str(x).strip())
+            time.sleep(options.sleep)
+
+
+if __name__ == '__main__':
+    sys.exit(main())

=== modified file 'daemons/distributionmirror_http_server.tac'
--- daemons/distributionmirror_http_server.tac	2009-06-24 20:55:31 +0000
+++ daemons/distributionmirror_http_server.tac	2010-11-07 00:31:57 +0000
@@ -9,7 +9,7 @@
 from twisted.application import service, internet, strports
 from twisted.web import server
 
-from canonical.launchpad.daemons import tachandler
+from canonical.launchpad.daemons import readyservice
 from lp.registry.tests.distributionmirror_http_server import (
     DistributionMirrorTestHTTPServer)
 
@@ -18,7 +18,7 @@
 httpserverService = service.IServiceCollection(application)
 
 # Service that announces when the daemon is ready
-tachandler.ReadyService().setServiceParent(httpserverService)
+readyservice.ReadyService().setServiceParent(httpserverService)
 
 root = DistributionMirrorTestHTTPServer()
 site = server.Site(root)

=== modified file 'daemons/librarian.tac'
--- daemons/librarian.tac	2009-06-24 20:55:31 +0000
+++ daemons/librarian.tac	2010-11-07 00:31:57 +0000
@@ -4,16 +4,24 @@
 # Twisted Application Configuration file.
 # Use with "twistd2.4 -y <file.tac>", e.g. "twistd -noy server.tac"
 
+import signal
+
+from meliae import scanner
+
 from twisted.application import service, strports
+from twisted.internet import reactor
+from twisted.python import log
 from twisted.web import server
 
 from canonical.config import config, dbconfig
-from canonical.launchpad.daemons import tachandler
+from canonical.launchpad.daemons import readyservice
 from canonical.launchpad.scripts import execute_zcml_for_scripts
 
+from canonical.librarian.interfaces import DUMP_FILE, SIGDUMPMEM
 from canonical.librarian.libraryprotocol import FileUploadFactory
 from canonical.librarian import storage, db
 from canonical.librarian import web as fatweb
+from lp.services.twistedsupport.loggingsupport import set_up_oops_reporting
 
 # Connect to database
 dbconfig.setConfigSection('librarian')
@@ -23,16 +31,20 @@
 if config.librarian_server.upstream_host:
     upstreamHost = config.librarian_server.upstream_host
     upstreamPort = config.librarian_server.upstream_port
-    print 'Using upstream librarian http://%s:%d' % (
-        upstreamHost, upstreamPort)
+    reactor.addSystemEventTrigger(
+        'before', 'startup', log.msg,
+        'Using upstream librarian http://%s:%d' %
+        (upstreamHost, upstreamPort))
 else:
     upstreamHost = upstreamPort = None
+    reactor.addSystemEventTrigger(
+        'before', 'startup', log.msg, 'Not using upstream librarian')
 
 application = service.Application('Librarian')
 librarianService = service.IServiceCollection(application)
 
 # Service that announces when the daemon is ready
-tachandler.ReadyService().setServiceParent(librarianService)
+readyservice.ReadyService().setServiceParent(librarianService)
 
 def setUpListener(uploadPort, webPort, restricted):
     """Set up a librarian listener on the given ports.
@@ -64,3 +76,12 @@
 webPort = config.librarian.restricted_download_port
 uploadPort = config.librarian.restricted_upload_port
 setUpListener(uploadPort, webPort, restricted=True)
+
+# Log OOPS reports
+set_up_oops_reporting('librarian', 'librarian')
+
+# Setup a signal handler to dump the process' memory upon 'kill -44'.
+def sigdumpmem_handler(signum, frame):
+    scanner.dump_all_objects(DUMP_FILE)
+
+signal.signal(SIGDUMPMEM, sigdumpmem_handler)

=== added file 'daemons/poppy-sftp.tac'
--- daemons/poppy-sftp.tac	1970-01-01 00:00:00 +0000
+++ daemons/poppy-sftp.tac	2010-11-07 00:31:57 +0000
@@ -0,0 +1,103 @@
+# Copyright 2010 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# This is a Twisted application config file.  To run, use:
+#     twistd -noy sftp.tac
+# or similar.  Refer to the twistd(1) man page for details.
+
+import os
+
+from twisted.application import service
+from twisted.conch.interfaces import ISession
+from twisted.conch.ssh import filetransfer
+from twisted.cred.portal import IRealm, Portal
+from twisted.python import components
+from twisted.web.xmlrpc import Proxy
+
+from zope.interface import implements
+
+from canonical.config import config
+from canonical.launchpad.daemons import readyservice
+
+from lp.poppy.twistedsftp import SFTPServer
+from lp.services.sshserver.auth import (
+    LaunchpadAvatar, PublicKeyFromLaunchpadChecker)
+from lp.services.sshserver.service import SSHService
+from lp.services.sshserver.session import DoNothingSession
+
+# XXX: Rename this file to something that doesn't mention poppy. Talk to
+# bigjools.
+
+
+def make_portal():
+    """Create and return a `Portal` for the SSH service.
+
+    This portal accepts SSH credentials and returns our customized SSH
+    avatars (see `LaunchpadAvatar`).
+    """
+    authentication_proxy = Proxy(
+        config.poppy.authentication_endpoint)
+    portal = Portal(Realm(authentication_proxy))
+    portal.registerChecker(
+        PublicKeyFromLaunchpadChecker(authentication_proxy))
+    return portal
+
+
+class Realm:
+    implements(IRealm)
+
+    def __init__(self, authentication_proxy):
+        self.authentication_proxy = authentication_proxy
+
+    def requestAvatar(self, avatar_id, mind, *interfaces):
+        # Fetch the user's details from the authserver
+        deferred = mind.lookupUserDetails(
+            self.authentication_proxy, avatar_id)
+
+        # Once all those details are retrieved, we can construct the avatar.
+        def got_user_dict(user_dict):
+            avatar = LaunchpadAvatar(user_dict)
+            return interfaces[0], avatar, avatar.logout
+
+        return deferred.addCallback(got_user_dict)
+
+
+def get_poppy_root():
+    """Return the poppy root to use for this server.
+
+    If the POPPY_ROOT environment variable is set, use that. If not, use
+    config.poppy.fsroot.
+    """
+    poppy_root = os.environ.get('POPPY_ROOT', None)
+    if poppy_root:
+        return poppy_root
+    return config.poppy.fsroot
+
+
+def poppy_sftp_adapter(avatar):
+    return SFTPServer(avatar, get_poppy_root())
+
+
+components.registerAdapter(
+    poppy_sftp_adapter, LaunchpadAvatar, filetransfer.ISFTPServer)
+
+components.registerAdapter(DoNothingSession, LaunchpadAvatar, ISession)
+
+
+# Construct an Application that has the Poppy SSH server.
+application = service.Application('poppy-sftp')
+svc = SSHService(
+    portal=make_portal(),
+    private_key_path=config.poppy.host_key_private,
+    public_key_path=config.poppy.host_key_public,
+    oops_configuration='poppy',
+    main_log='poppy',
+    access_log='poppy.access',
+    access_log_path=config.poppy.access_log,
+    strport=config.poppy.port,
+    idle_timeout=config.poppy.idle_timeout,
+    banner=config.poppy.banner)
+svc.setServiceParent(application)
+
+# Service that announces when the daemon is ready
+readyservice.ReadyService().setServiceParent(application)

=== modified file 'daemons/poppy-upload.py'
--- daemons/poppy-upload.py	2009-06-30 21:26:26 +0000
+++ daemons/poppy-upload.py	2010-11-07 00:31:57 +0000
@@ -1,57 +1,10 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 import sys
-import logging
-import optparse
-
-from canonical.poppy.server import run_server
-from lp.archiveuploader.poppyinterface import PoppyInterface
-from canonical.launchpad.scripts import logger, logger_options
-
-
-def main():
-
-    parser = optparse.OptionParser()
-    logger_options(parser)
-
-    parser.add_option("--cmd", action="store", metavar="CMD",
-                      help="Run CMD after each upload completion")
-
-    parser.add_option("--allow-user", action="store", metavar="USER",
-                      default='ubuntu',
-                      help="Username allowed to log in.")
-
-    parser.add_option("--permissions", action="store", metavar="PERMS",
-                      default='g+rwxs',
-                      help="Permissions to chmod the targetfsroot with "
-                      "before letting go of the directory.")
-
-    options, args = parser.parse_args()
-
-    log = logger(options, "poppy-upload")
-
-    if len(args) != 2:
-        print "usage: poppy-upload.py rootuploaddirectory port"
-        return 1
-
-    root, port = args
-    # host = "127.0.0.1"
-    # host = "82.211.81.167" # Drescher's public IP
-    host = "0.0.0.0"
-    ident = "lucille upload server"
-    numthreads = 4
-
-    iface = PoppyInterface(root, log, allow_user=options.allow_user,
-                           cmd=options.cmd,
-                           perms=options.permissions)
-
-    run_server(host, int(port), ident, numthreads,
-               iface.new_client_hook, iface.client_done_hook,
-               iface.auth_verify_hook)
-    return 0
+from lp.poppy.daemon import main
 
 if __name__ == '__main__':
     sys.exit(main())

=== modified file 'daemons/sftp.tac'
--- daemons/sftp.tac	2009-06-24 20:55:31 +0000
+++ daemons/sftp.tac	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
+# Copyright 2009-2010 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 # This is a Twisted application config file.  To run, use:
@@ -7,14 +7,29 @@
 
 from twisted.application import service
 
-from canonical.launchpad.daemons import tachandler
-from lp.codehosting.sshserver.service import SSHService
-
-
-# Construct an Application that includes a supermirror SFTP service. 
+from canonical.config import config
+from canonical.launchpad.daemons import readyservice
+
+from lp.codehosting.sshserver.daemon import (
+    ACCESS_LOG_NAME, get_key_path, LOG_NAME, make_portal, OOPS_CONFIG_SECTION,
+    PRIVATE_KEY_FILE, PUBLIC_KEY_FILE)
+from lp.services.sshserver.service import SSHService
+
+
+# Construct an Application that has the codehosting SSH server.
 application = service.Application('sftponly')
-svc = SSHService()
+svc = SSHService(
+    portal=make_portal(),
+    private_key_path=get_key_path(PRIVATE_KEY_FILE),
+    public_key_path=get_key_path(PUBLIC_KEY_FILE),
+    oops_configuration=OOPS_CONFIG_SECTION,
+    main_log=LOG_NAME,
+    access_log=ACCESS_LOG_NAME,
+    access_log_path=config.codehosting.access_log,
+    strport=config.codehosting.port,
+    idle_timeout=config.codehosting.idle_timeout,
+    banner=config.codehosting.banner)
 svc.setServiceParent(application)
 
 # Service that announces when the daemon is ready
-tachandler.ReadyService().setServiceParent(application)
+readyservice.ReadyService().setServiceParent(application)

=== modified file 'daemons/zeca.tac'
--- daemons/zeca.tac	2009-06-24 20:55:31 +0000
+++ daemons/zeca.tac	2010-11-07 00:31:57 +0000
@@ -8,7 +8,7 @@
 from twisted.web import server
 
 from canonical.config import config
-from canonical.launchpad.daemons import tachandler
+from canonical.launchpad.daemons import readyservice
 from canonical.launchpad.scripts import execute_zcml_for_scripts
 from canonical.zeca import Zeca, KeyServer, LookUp, SubmitKey
 
@@ -21,7 +21,7 @@
 zecaService = service.IServiceCollection(application)
 
 # Service that announces when the daemon is ready
-tachandler.ReadyService().setServiceParent(zecaService)
+readyservice.ReadyService().setServiceParent(zecaService)
 
 zeca = Zeca()
 keyserver = KeyServer()

=== modified file 'database/replication/Makefile'
--- database/replication/Makefile	2009-06-24 21:17:33 +0000
+++ database/replication/Makefile	2010-11-07 00:31:57 +0000
@@ -14,10 +14,16 @@
 # To test the staging rebuild script:
 #
 #  $ cd database/replication
-#  $ pg_dump --format=c launchpad_dev > launchpad.dump
-#  $ make stagingsetup STAGING_CONFIG=dev-staging STAGING_DUMP=launchpad.dump
+#  $ pg_dump --format=c launchpad_dev | bzip2 -c > launchpad.dump.bz2
+#  $ make stagingsetup \
+#        STAGING_CONFIG=dev-staging STAGING_DUMP=launchpad.dump.bz2
 #  $ make stagingswitch STAGING_CONFIG=dev-staging
 #
+# To restore a dogfood database:
+#
+#  $ cd database/replication
+#  $ make dogfood DOGFOOD_DBNAME=launchpad_dogfood DOGFOOD_DUMP=launchpad.dump
+#
 
 # This used to be 10 seconds, so we always ran staging lagged to detect
 # replication glitches more easily. However, this does not play well
@@ -31,12 +37,29 @@
 STAGING_CONFIG=staging # For swapping fresh db into place.
 STAGING_DUMP=launchpad.dump # Dumpfile to build new staging from.
 STAGING_TABLESPACE=pg_default # 'pg_default' for default
+DOGFOOD_DBNAME=launchpad_dogfood
+DOGFOOD_DUMP=launchpad.dump
 
 _CONFIG=overridden-on-command-line
 _SLAVE_TABLESPACE=pg_default
 
 PGMASSACRE=../../utilities/pgmassacre.py
 
+CREATEDB_83=createdb --encoding=UTF8
+CREATEDB_84=createdb --encoding=UTF8 --locale=C --template=template0
+CREATEDB=${CREATEDB_84}
+
+# Set this to --exit-on-error once our dumps are coming from a PG 8.4
+# source. Currently, the PG 8.3 dumps generate some spurious errors
+# when being restored into a PG 8.4 database.
+EXIT_ON_ERROR=
+
+# Turn off output silencing so we can see details of staging deployments.
+# Without the timestamps, we are unable to estimate production deployment
+# times.
+#SHHH=../../utilities/shhh.py
+SHHH=
+
 default:
 	echo Usage: make [start|stop|restart]
 
@@ -61,7 +84,7 @@
 	
 	# Replicate it again, so we can test with multiple slaves.
 	-${PGMASSACRE} launchpad_dev_slave2
-	createdb --encoding=UTF8 launchpad_dev_slave2
+	${CREATEDB} launchpad_dev_slave2
 	LPCONFIG=${DEV_CONFIG} ./slon_ctl.py start \
 		 node3_node 'dbname=launchpad_dev_slave2 user=slony'
 	LPCONFIG=${DEV_CONFIG} ./new-slave.py 3 launchpad_dev_slave2
@@ -81,13 +104,13 @@
 	    _MASTER=lpmain_staging_new _SLAVE=lpmain_staging_slave_new \
 	    LAG="0 seconds"
 	# Create the DB with the desired default tablespace.
-	createdb --encoding UTF8 --tablespace ${STAGING_TABLESPACE} \
-	    lpmain_staging_new
-	# Restore the DB schema. Don't restore permissions - it will blow
-	# up when roles don't exist in this cluster, and we rebuild it later
-	# with security.py anyway.
-	pg_restore --dbname=lpmain_staging_new \
-	    --no-acl --exit-on-error ${STAGING_DUMP}
+	${CREATEDB} --tablespace ${STAGING_TABLESPACE} lpmain_staging_new
+	# Restore the database. We need to restore permissions, despite
+	# later running security.py, to pull in permissions granted on
+	# production to users not maintained by security.py.
+	# Stop ignoring error code after dumps come from an 8.4 system.
+	-bunzip2 --stdout ${STAGING_DUMP} | \
+	    pg_restore --dbname=lpmain_staging_new --no-owner ${EXIT_ON_ERROR}
 	# Uninstall Slony-I if it is installed - a pg_dump of a DB with
 	# Slony-I installed isn't usable without this step.
 	LPCONFIG=${NEW_STAGING_CONFIG} ./repair-restored-db.py
@@ -118,6 +141,16 @@
 	# Start the slon daemons, with requested lag.
 	LPCONFIG=${STAGING_CONFIG} ./slon_ctl.py --lag="${LAG}" start
 
+dogfood:
+	${CREATEDB} ${DOGFOOD_DBNAME}
+	# Stop ignoring error code after are dumps come from an 8.4 system.
+	-pg_restore --dbname=${DOGFOOD_DBNAME} --no-acl --no-owner \
+	    ${EXIT_ON_ERROR} ${DOGFOOD_DUMP}
+	./repair-restored-db.py -d ${DOGFOOD_DBNAME}
+	../schema/upgrade.py -d ${DOGFOOD_DBNAME}
+	../schema/fti.py -d ${DOGFOOD_DBNAME}
+	../schema/security.py -d ${DOGFOOD_DBNAME}
+
 _prelim:
 	@echo LPCONFIG currently ${LPCONFIG}
 	# Create the slony PostgreSQL superuser if necessary.
@@ -132,25 +165,26 @@
 _replicate:
 	@echo LPCONFIG currently ${LPCONFIG}
 	# Start the slon daemon for the master.
-	./slon_ctl.py start \
+	./slon_ctl.py --lag="0 seconds" start \
 		 node1_node "dbname=${_MASTER} user=slony"
 	# Initialize the cluster and create replication sets.
 	./initialize.py
 	# Create the soon-to-be-slave database, empty at this point.
-	createdb --encoding=UTF8 --tablespace=${_SLAVE_TABLESPACE} ${_SLAVE}
+	${CREATEDB} --tablespace=${_SLAVE_TABLESPACE} ${_SLAVE}
 	# Start the slon daemon for the slave
-	./slon_ctl.py start node2_node "dbname=${_SLAVE} user=slony"
+	./slon_ctl.py --lag="0 seconds" start \
+	    node2_node "dbname=${_SLAVE} user=slony"
 	# Setup the slave
 	./new-slave.py 2 "dbname=${_SLAVE}"
 	# Upgrade all databases in the cluster and reset security.
-	../schema/upgrade.py
-	../schema/fti.py
-	../schema/security.py --cluster -U slony
-	# Migrate tables to the authdb replication set, creating the set
-	# and subscribing nodes to it as necessary.
-	./populate_auth_replication_set.py -U slony
+	@echo Running upgrade.py `date`
+	${SHHH} ../schema/upgrade.py
+	@echo Running fti.py `date`
+	${SHHH} ../schema/fti.py
+	@echo Running security.py `date`
+	./slon_ctl.py stop # security.py can deadlock with slony
+	${SHHH} ../schema/security.py --cluster -U slony
 	# Restart slon daemons with default lag setting.
-	./slon_ctl.py stop
 	./slon_ctl.py --lag="${LAG}" start
 	# Generate a preamble for manual slonik(1) usage.
 	./preamble.py > preamble.sk

=== modified file 'database/replication/helpers.py'
--- database/replication/helpers.py	2009-06-24 21:17:33 +0000
+++ database/replication/helpers.py	2010-11-07 00:31:57 +0000
@@ -26,24 +26,24 @@
 # The namespace in the database used to contain all the Slony-I tables.
 CLUSTER_NAMESPACE = '_%s' % CLUSTERNAME
 
-# Seed tables for the authdb replication set to be passed to
+# Replication set id constants. Don't change these without DBA help.
+LPMAIN_SET_ID = 1
+HOLDING_SET_ID = 666
+LPMIRROR_SET_ID = 4
+
+# Seed tables for the lpmain replication set to be passed to
 # calculate_replication_set().
-AUTHDB_SEED = frozenset([
+LPMAIN_SEED = frozenset([
     ('public', 'account'),
+    ('public', 'openidnonce'),
     ('public', 'openidassociation'),
-    ('public', 'openidnonce'),
-    ])
-
-# Seed tables for the lpmain replication set to be passed to
-# calculate_replication_set().
-LPMAIN_SEED = frozenset([
     ('public', 'person'),
     ('public', 'launchpaddatabaserevision'),
+    ('public', 'databasereplicationlag'),
     ('public', 'fticache'),
     ('public', 'nameblacklist'),
     ('public', 'openidconsumerassociation'),
     ('public', 'openidconsumernonce'),
-    ('public', 'oauthnonce'),
     ('public', 'codeimportmachine'),
     ('public', 'scriptactivity'),
     ('public', 'standardshipitrequest'),
@@ -51,14 +51,60 @@
     ('public', 'launchpadstatistic'),
     ('public', 'parsedapachelog'),
     ('public', 'shipitsurvey'),
-    ('public', 'openidassociations'), # Remove this in April 2009 or later.
+    ('public', 'databasereplicationlag'),
+    ('public', 'featureflag'),
+    # suggestivepotemplate can be removed when the
+    # suggestivepotemplate.potemplate foreign key constraint exists on
+    # production.
+    ('public', 'suggestivepotemplate'),
     ])
 
 # Explicitly list tables that should not be replicated. This includes the
 # session tables, as these might exist in developer databases but will not
 # exist in the production launchpad database.
 IGNORED_TABLES = set([
-    'public.secret', 'public.sessiondata', 'public.sessionpkgdata'])
+    # Session tables that in some situations will exist in the main lp
+    # database.
+    'public.secret', 'public.sessiondata', 'public.sessionpkgdata',
+    # Mirror tables, per Bug #489078. These tables have their own private
+    # replication set that is setup manually.
+    'public.lp_account',
+    'public.lp_openididentifier',
+    'public.lp_person',
+    'public.lp_personlocation',
+    'public.lp_teamparticipation',
+    # Database statistics
+    'public.databasetablestats',
+    'public.databasecpustats',
+    # Don't replicate OAuthNonce - too busy and no real gain.
+    'public.oauthnonce',
+    # Ubuntu SSO database. These tables where created manually by ISD
+    # and the Launchpad scripts should not mess with them. Eventually
+    # these tables will be in a totally separate database.
+    'public.auth_permission',
+    'public.auth_group',
+    'public.auth_user',
+    'public.auth_message',
+    'public.django_content_type',
+    'public.auth_permission',
+    'public.django_session',
+    'public.django_site',
+    'public.django_admin_log',
+    'public.ssoopenidrpconfig',
+    'public.auth_group_permissions',
+    'public.auth_user_groups',
+    'public.auth_user_user_permissions',
+    'public.oauth_nonce',
+    'public.oauth_consumer',
+    'public.oauth_token',
+    'public.api_user',
+    'public.oauth_consumer_id_seq',
+    'public.api_user_id_seq',
+    'public.oauth_nonce_id_seq',
+    ])
+
+# Calculate IGNORED_SEQUENCES
+IGNORED_SEQUENCES = set('%s_id_seq' % table for table in IGNORED_TABLES)
 
 
 def slony_installed(con):
@@ -101,7 +147,7 @@
 
 def sync(timeout):
     """Generate a sync event and wait for it to complete on all nodes.
-   
+
     This means that all pending events have propagated and are in sync
     to the point in time this method was called. This might take several
     hours if there is a large backlog of work to replicate.
@@ -135,12 +181,13 @@
         script = preamble() + script
 
     if sync is not None:
-        script = script + dedent("""\
+        sync_script = dedent("""\
             sync (id = @master_node);
             wait for event (
-                origin = ALL, confirmed = ALL,
+                origin = @master_node, confirmed = ALL,
                 wait on = @master_node, timeout = %d);
             """ % sync)
+        script = script + sync_script
 
     # Copy the script to a NamedTemporaryFile rather than just pumping it
     # to slonik via stdin. This way it can be examined if slonik appears
@@ -151,7 +198,7 @@
 
     # Run slonik
     log.debug("Executing slonik script %s" % script_on_disk.name)
-    log.log(DEBUG2, script)
+    log.log(DEBUG2, 'Running script:\n%s' % script)
     returncode = subprocess.call(['slonik', script_on_disk.name])
 
     if returncode != 0:
@@ -255,7 +302,7 @@
         assert len(node_ids) == 1, "Multiple nodes but no paths."
         master_node_id = node_ids[0]
         master_connection_string = ConnectionString(
-            config.database.main_master)
+            config.database.rw_main_master)
         master_connection_string.user = 'slony'
         return [Node(
             master_node_id, 'node%d_node' % master_node_id,
@@ -282,10 +329,10 @@
         cluster name = sl;
 
         # Symbolic ids for replication sets.
-        define lpmain_set  1;
-        define authdb_set  2;
-        define holding_set 666;
-        """)]
+        define lpmain_set   %d;
+        define holding_set  %d;
+        define lpmirror_set %d;
+        """ % (LPMAIN_SET_ID, HOLDING_SET_ID, LPMIRROR_SET_ID))]
 
     if master_node is not None:
         preamble.append(dedent("""\
@@ -303,9 +350,9 @@
                 node.nickname, node.node_id,
                 node.nickname, node.connection_string,
                 node.nickname, node.nickname)))
-    
+
     return '\n\n'.join(preamble)
-        
+
 
 def calculate_replication_set(cur, seeds):
     """Return the minimal set of tables and sequences needed in a
@@ -313,6 +360,9 @@
 
     A replication set must contain all tables linked by foreign key
     reference to the given table, and sequences used to generate keys.
+    Tables and sequences can be added to the IGNORED_TABLES and
+    IGNORED_SEQUENCES lists for cases where we known can safely ignore
+    this restriction.
 
     :param seeds: [(namespace, tablename), ...]
 
@@ -380,7 +430,8 @@
             """ % sqlvalues(namespace, tablename))
         for namespace, tablename in cur.fetchall():
             key = (namespace, tablename)
-            if key not in tables and key not in pending_tables:
+            if (key not in tables and key not in pending_tables
+                and '%s.%s' % (namespace, tablename) not in IGNORED_TABLES):
                 pending_tables.add(key)
 
     # Generate the set of sequences that are linked to any of our set of
@@ -401,8 +452,9 @@
                 ) AS whatever
             WHERE seq IS NOT NULL;
             """ % sqlvalues(fqn(namespace, tablename), namespace, tablename))
-        for row in cur.fetchall():
-            sequences.add(row[0])
+        for sequence, in cur.fetchall():
+            if sequence not in IGNORED_SEQUENCES:
+                sequences.add(sequence)
 
     # We can't easily convert the sequence name to (namespace, name) tuples,
     # so we might as well convert the tables to dot notation for consistancy.
@@ -434,7 +486,7 @@
 
     return (
         all_tables - replicated_tables - IGNORED_TABLES,
-        all_sequences - replicated_sequences)
+        all_sequences - replicated_sequences - IGNORED_SEQUENCES)
 
 
 class ReplicationConfigError(Exception):
@@ -462,19 +514,6 @@
         raise ReplicationConfigError(
             "Unreplicated sequences: %s" % repr(unrepl_sequences))
 
-    authdb_tables, authdb_sequences = calculate_replication_set(
-        cur, AUTHDB_SEED)
     lpmain_tables, lpmain_sequences = calculate_replication_set(
         cur, LPMAIN_SEED)
 
-    confused_tables = authdb_tables.intersection(lpmain_tables)
-    if confused_tables:
-        raise ReplicationConfigError(
-            "Tables exist in multiple replication sets: %s"
-            % repr(confused_tables))
-    confused_sequences = authdb_sequences.intersection(lpmain_sequences)
-    if confused_sequences:
-        raise ReplicationConfigError(
-            "Sequences exist in multiple replication sets: %s"
-            % repr(confused_sequences))
-

=== modified file 'database/replication/initialize.py'
--- database/replication/initialize.py	2009-07-19 04:41:14 +0000
+++ database/replication/initialize.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -41,9 +41,9 @@
     """Duplicate the master schema into the slaves."""
     log.info('Duplicating database schema')
 
-    master_cs = ConnectionString(config.database.main_master)
+    master_cs = ConnectionString(config.database.rw_main_master)
     master_cs.user = options.dbuser
-    slave1_cs = ConnectionString(config.database.main_slave)
+    slave1_cs = ConnectionString(config.database.rw_main_slave)
     slave1_cs.user = options.dbuser
 
     # We can't use pg_dump to replicate security as not all of the roles
@@ -70,7 +70,7 @@
     """Initialize the cluster."""
     log.info('Initializing Slony-I cluster')
     master_connection_string = ConnectionString(
-        config.database.main_master)
+        config.database.rw_main_master)
     master_connection_string.user = 'slony'
     helpers.execute_slonik("""
         node 1 admin conninfo = '%s';
@@ -88,54 +88,13 @@
     helpers.sync(120) # Will exit on failure.
 
 
-def create_replication_sets(
-    authdb_tables, authdb_sequences, lpmain_tables, lpmain_sequences):
+def create_replication_sets(lpmain_tables, lpmain_sequences):
     """Create the replication sets."""
     log.info('Creating Slony-I replication sets.')
 
-    # Instead of creating both the authdb and lpmain replication sets,
-    # we just create the lpmain replication set containing everything.
-    # This way, we can then test the populate_auth_replication_set.py
-    # migration script that moves the relevant tables from the lpmain
-    # replication set to the authdb replication set.
-    # We will turn this behavior off once we are running two
-    # replication sets in production and remove the migration script.
-    lpmain_tables = lpmain_tables.union(authdb_tables)
-    lpmain_sequences = lpmain_sequences.union(authdb_sequences)
-
     script = ["try {"]
-    # script,append("""
-    #     echo 'Creating AuthDB replication set (@authdb_set)';
-    #     create set (
-    #         id=@authdb_set, origin=@master_node,
-    #         comment='AuthDB tables and sequences');
-    #     """)
 
-    # entry_id = 1
-    # for table in sorted(authdb_tables):
-    #     script.append("""
-    #         echo 'Adding %(table)s to replication set @authdb_set';
-    #         set add table (
-    #             set id=@authdb_set,
-    #             origin=@master_node,
-    #             id=%(entry_id)d,
-    #             fully qualified name='%(table)s');
-    #         """ % vars())
-    #     entry_id += 1
-    # entry_id = 1
-    # for sequence in sorted(authdb_sequences):
-    #     script.append("""
-    #         echo 'Adding %(sequence)s to replication set @authdb_set';
-    #         set add sequence (
-    #             set id=@authdb_set,
-    #             origin=@master_node,
-    #             id=%(entry_id)d,
-    #             fully qualified name='%(sequence)s');
-    #         """ % vars())
-    #     entry_id += 1
-    #
-    # assert entry_id < 200, 'authdb replcation set has > 200 objects???'
-    entry_id = 200
+    entry_id = 1
 
     script.append("""
         echo 'Creating LPMain replication set (@lpmain_set)';
@@ -157,7 +116,7 @@
             """ % vars())
         entry_id += 1
 
-    entry_id = 200
+    entry_id = 1
     script.append(
         "echo 'Adding %d sequences to replication set @lpmain_set';"
         % len(lpmain_sequences))
@@ -199,9 +158,6 @@
     con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
     global cur
     cur = con.cursor()
-    log.debug("Calculating authdb replication set.")
-    authdb_tables, authdb_sequences = helpers.calculate_replication_set(
-        cur, helpers.AUTHDB_SEED)
     log.debug("Calculating lpmain replication set.")
     lpmain_tables, lpmain_sequences = helpers.calculate_replication_set(
         cur, helpers.LPMAIN_SEED)
@@ -212,8 +168,7 @@
     fails = 0
     for table in all_tables_in_schema(cur, 'public'):
         times_seen = 0
-        for table_set in [
-            authdb_tables, lpmain_tables, helpers.IGNORED_TABLES]:
+        for table_set in [lpmain_tables, helpers.IGNORED_TABLES]:
             if table in table_set:
                 times_seen += 1
         if times_seen == 0:
@@ -224,7 +179,7 @@
             fails += 1
     for sequence in all_sequences_in_schema(cur, 'public'):
         times_seen = 0
-        for sequence_set in [authdb_sequences, lpmain_sequences]:
+        for sequence_set in [lpmain_sequences, helpers.IGNORED_SEQUENCES]:
             if sequence in sequence_set:
                 times_seen += 1
         if times_seen == 0:
@@ -241,8 +196,7 @@
 
     ensure_live()
 
-    create_replication_sets(
-        authdb_tables, authdb_sequences, lpmain_tables, lpmain_sequences)
+    create_replication_sets(lpmain_tables, lpmain_sequences)
 
     helpers.sync(0)
 

=== modified file 'database/replication/new-slave.py'
--- database/replication/new-slave.py	2009-06-24 21:17:33 +0000
+++ database/replication/new-slave.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -24,6 +24,7 @@
 from canonical.launchpad.scripts import db_options, logger_options, logger
 
 import replication.helpers
+from replication.helpers import LPMAIN_SET_ID
 
 def main():
     parser = OptionParser(
@@ -72,6 +73,10 @@
     if node_id in [node.node_id for node in existing_nodes]:
         parser.error("Node %d already exists in the cluster." % node_id)
 
+    # Get the connection string for masters.
+    lpmain_connection_string = get_master_connection_string(
+        source_connection, parser, LPMAIN_SET_ID) or source_connection_string
+
     # Sanity check the target connection string.
     target_connection_string = ConnectionString(raw_target_connection_string)
     if target_connection_string.user is None:
@@ -110,11 +115,11 @@
             "Database at %s is not empty." % target_connection_string)
     target_con.rollback()
 
-    # Duplicate the schema. We restore with no-privileges as required
+    # Duplicate the full schema. We restore with no-privileges as required
     # roles may not yet exist, so we have to run security.py on the
     # new slave once it is built.
-    log.info("Duplicating db schema from '%s' to '%s'" % (
-        source_connection_string, target_connection_string))
+    log.info("Duplicating full db schema from '%s' to '%s'" % (
+        lpmain_connection_string, target_connection_string))
     cmd = "pg_dump --schema-only --no-privileges %s | psql -1 -q %s" % (
         source_connection_string.asPGCommandLineArgs(),
         target_connection_string.asPGCommandLineArgs())
@@ -123,19 +128,39 @@
         return 1
 
     # Trash the broken Slony tables we just duplicated.
+    log.debug("Removing slony cruft.")
     cur = target_con.cursor()
     cur.execute("DROP SCHEMA _sl CASCADE")
     target_con.commit()
     del target_con
 
-    # Get a list of existing set ids.
+    # Get a list of existing set ids that can be subscribed too. This
+    # is all sets where the origin is the master_node. We
+    # don't allow other sets where the master is configured as a
+    # forwarding slave as we have to special case rebuilding the database
+    # schema, and we want to avoid cascading slave configurations anyway
+    # since we are running an antique Slony-I at the moment - keep it
+    # simple!
+    # We order the sets smallest to largest by number of tables.
+    # This should let us subscribe the quickest sets first for more
+    # immediate feedback.
     source_connection.rollback()
     master_node = replication.helpers.get_master_node(source_connection)
     cur = source_connection.cursor()
-    cur.execute(
-        "SELECT set_id FROM _sl.sl_set WHERE set_origin=%d"
-        % master_node.node_id)
+    cur.execute("""
+        SELECT set_id
+        FROM _sl.sl_set, (
+            SELECT tab_set, count(*) AS tab_count
+            FROM _sl.sl_table GROUP BY tab_set
+            ) AS TableCounts
+        WHERE
+            set_origin=%d
+            AND tab_set = set_id
+        ORDER BY tab_count
+        """
+        % (master_node.node_id,))
     set_ids = [set_id for set_id, in cur.fetchall()]
+    log.debug("Discovered set ids %s" % repr(list(set_ids)))
 
     # Generate and run a slonik(1) script to initialize the new node
     # and subscribe it to our replication sets.
@@ -147,7 +172,7 @@
 
         echo 'Initializing new node.';
         try {
-            store node (id=@new_node, comment='%s');
+            store node (id=@new_node, comment='%s', event node=@master_node);
             echo 'Creating new node paths.';
         """ % (node_id, target_connection_string, comment))
 
@@ -163,21 +188,39 @@
 
     script += dedent("""\
         } on error { echo 'Failed.'; exit 1; }
+
+        echo 'You may need to restart the Slony daemons now. If the first';
+        echo 'of the following syncs passes then there is no need.';
         """)
 
+    full_sync = []
+    sync_nicknames = [node.nickname for node in existing_nodes]
+    sync_nicknames.append('new_node');
+    for nickname in sync_nicknames:
+        full_sync.append(dedent("""\
+            echo 'Waiting for %(nickname)s sync.';
+            sync (id=@%(nickname)s);
+            wait for event (
+                origin = @%(nickname)s, confirmed=ALL,
+                wait on = @%(nickname)s, timeout=0);
+            echo 'Ok. Replication syncing fine with new node.';
+            """ % {'nickname': nickname}))
+    full_sync = '\n'.join(full_sync)
+    script += full_sync
+
     for set_id in set_ids:
-
         script += dedent("""\
         echo 'Subscribing new node to set %d.';
         subscribe set (
             id=%d, provider=@master_node, receiver=@new_node, forward=yes);
-
-        echo 'Waiting for sync... this might take a while...';
+        echo 'Waiting for subscribe to start processing.';
+        echo 'This will block on long running transactions.';
         sync (id = @master_node);
         wait for event (
-            origin = ALL, confirmed = ALL,
+            origin = @master_node, confirmed = ALL,
             wait on = @master_node, timeout = 0);
         """ % (set_id, set_id))
+        script += full_sync
 
     replication.helpers.execute_slonik(script)
 
@@ -185,5 +228,33 @@
 
     return 0
 
+
+def get_master_connection_string(con, parser, set_id):
+    """Return the connection string to the origin for the replication set.
+    """
+    cur = con.cursor()
+    cur.execute("""
+        SELECT pa_conninfo FROM _sl.sl_set, _sl.sl_path
+        WHERE set_origin = pa_server AND set_id = %d
+        LIMIT 1
+        """ % set_id)
+    row = cur.fetchone()
+    if row is None:
+        # If we have no paths stored, there is only a single node in the
+        # cluster.
+        return None
+    else:
+        connection_string = ConnectionString(row[0])
+
+    # Confirm we can connect from here.
+    try:
+        test_con = psycopg2.connect(str(connection_string))
+    except psycopg2.Error, exception:
+        parser.error("Failed to connect to using '%s' (%s)" % (
+            connection_string, str(exception).strip()))
+
+    return connection_string
+
+
 if __name__ == '__main__':
     sys.exit(main())

=== removed file 'database/replication/populate_auth_replication_set.py'
--- database/replication/populate_auth_replication_set.py	2009-06-24 21:17:33 +0000
+++ database/replication/populate_auth_replication_set.py	1970-01-01 00:00:00 +0000
@@ -1,177 +0,0 @@
-#!/usr/bin/python2.4
-#
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-"""Populate the auth replication set.
-
-This script moves the the SSO tables from the main replication set to
-the auth replication set.
-
-Once it has been run on production, these tables can no longer be
-maintained using the Launchpad database maintenance scripts
-(upgrade.py, security.py etc.).
-
-We do this so Launchpad database upgrades do not lock the SSO tables,
-allowing the SSO service to continue to operate.
-
-This is a single shot script.
-"""
-
-__metaclass__ = type
-__all__ = []
-
-import _pythonpath
-
-import sys
-from textwrap import dedent
-from optparse import OptionParser
-
-from canonical.database.sqlbase import (
-    connect, ISOLATION_LEVEL_AUTOCOMMIT, sqlvalues)
-from canonical.launchpad.scripts import db_options, logger_options, logger
-
-import replication.helpers
-
-def create_auth_set(cur):
-    """Create the auth replication set if it doesn't already exist."""
-    cur.execute("SELECT TRUE FROM _sl.sl_set WHERE set_id=2")
-    if cur.fetchone() is not None:
-        log.info("Auth set already exists.")
-        return
-    slonik_script = dedent("""\
-        create set (
-            id=@authdb_set, origin=@master_node,
-            comment='SSO service tables');
-        """)
-    log.info("Creating authdb replication set.")
-    replication.helpers.execute_slonik(slonik_script, sync=0)
-
-
-def subscribe_auth_set(cur):
-    """The authdb set subscription much match the lpmain set subscription.
-
-    This is a requirement to move stuff between replication sets. It
-    is also what we want (all nodes replicating everything).
-    """
-    cur.execute("""
-        SELECT sub_receiver FROM _sl.sl_subscribe WHERE sub_set = 1
-        EXCEPT
-        SELECT sub_receiver FROM _sl.sl_subscribe WHERE sub_set = 2
-        """)
-    for node_id in (node_id for node_id, in cur.fetchall()):
-        log.info("Subscribing Node #%d to authdb replication set" % node_id)
-        success = replication.helpers.execute_slonik(dedent("""\
-            subscribe set (
-                id = @authdb_set, provider = @master_node,
-                receiver = %d, forward = yes);
-            """ % node_id), sync=0)
-        if not success:
-            log.error("Slonik failed. Exiting.")
-            sys.exit(1)
-
-
-def migrate_tables_and_sequences(cur):
-    auth_tables, auth_sequences = (
-        replication.helpers.calculate_replication_set(
-            cur, replication.helpers.AUTHDB_SEED))
-
-    slonik_script = ["try {"]
-    for table_fqn in auth_tables:
-        namespace, table_name = table_fqn.split('.')
-        cur.execute("""
-            SELECT tab_id, tab_set
-            FROM _sl.sl_table
-            WHERE tab_nspname = %s AND tab_relname = %s
-            """ % sqlvalues(namespace, table_name))
-        try:
-            table_id, set_id = cur.fetchone()
-        except IndexError:
-            log.error("Table %s not found in _sl.sl_tables" % table_fqn)
-            sys.exit(1)
-        if set_id == 1:
-            slonik_script.append("echo 'Moving table %s';" % table_fqn)
-            slonik_script.append(
-                "set move table "
-                "(origin=@master_node, id=%d, new set=@authdb_set);"
-                % table_id)
-        elif set_id == 2:
-            log.warn(
-                "Table %s already in authdb replication set"
-                % table_fqn)
-        else:
-            log.error("Unknown replication set %s" % set_id)
-            sys.exit(1)
-
-    for sequence_fqn in auth_sequences:
-        namespace, sequence_name = sequence_fqn.split('.')
-        cur.execute("""
-            SELECT seq_id, seq_set
-            FROM _sl.sl_sequence
-            WHERE seq_nspname = %s AND seq_relname = %s
-            """ % sqlvalues(namespace, sequence_name))
-        try:
-            sequence_id, set_id = cur.fetchone()
-        except IndexError:
-            log.error(
-                "Sequence %s not found in _sl.sl_sequences" % sequence_fqn)
-            sys.exit(1)
-        if set_id == 1:
-            slonik_script.append("echo 'Moving sequence %s';" % sequence_fqn)
-            slonik_script.append(
-                "set move sequence "
-                "(origin=@master_node, id=%d, new set=@authdb_set);"
-                % sequence_id)
-        elif set_id ==2:
-            log.warn(
-                "Sequence %s already in authdb replication set."
-                % sequence_fqn)
-        else:
-            log.error("Unknown replication set %s" % set_id)
-            sys.exit(1)
-
-    if len(slonik_script) == 1:
-        log.warn("No tables or sequences to migrate.")
-        return
-
-    slonik_script.append(dedent("""\
-        } on error {
-            echo 'Failed to move one or more tables or sequences.';
-            exit 1;
-        }
-        """))
-
-    slonik_script = "\n".join(slonik_script)
-
-    log.info("Running migration script...")
-    if not replication.helpers.execute_slonik(slonik_script, sync=0):
-        log.error("Slonik failed. Exiting.")
-        sys.exit(1)
-
-
-def main():
-    parser = OptionParser()
-    db_options(parser)
-    logger_options(parser)
-    options, args = parser.parse_args()
-
-    global log
-    log = logger(options)
-
-    con = connect('slony', isolation=ISOLATION_LEVEL_AUTOCOMMIT)
-    cur = con.cursor()
-
-    # Don't start until cluster is synced.
-    log.info("Waiting for sync.")
-    replication.helpers.sync(0)
-
-    create_auth_set(cur)
-    subscribe_auth_set(cur)
-    migrate_tables_and_sequences(cur)
-
-
-log = None # Global log
-
-
-if __name__ == '__main__':
-    main()

=== modified file 'database/replication/preamble.py'
--- database/replication/preamble.py	2009-06-24 21:17:33 +0000
+++ database/replication/preamble.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'database/replication/repair-restored-db.py'
--- database/replication/repair-restored-db.py	2009-06-24 21:17:33 +0000
+++ database/replication/repair-restored-db.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -27,7 +27,8 @@
 
 from canonical.config import config
 from canonical.database.postgresql import ConnectionString
-from canonical.database.sqlbase import connect, quote
+from canonical.database.sqlbase import (
+    connect, quote, ISOLATION_LEVEL_AUTOCOMMIT)
 from canonical.launchpad.scripts import db_options, logger_options, logger
 
 import replication.helpers
@@ -44,12 +45,23 @@
 
     log = logger(options)
 
-    con = connect(options.dbuser)
+    con = connect(options.dbuser, isolation=ISOLATION_LEVEL_AUTOCOMMIT)
 
     if not replication.helpers.slony_installed(con):
         log.info("Slony-I not installed. Nothing to do.")
         return 0
 
+    if not repair_with_slonik(log, options, con):
+        repair_with_drop_schema(log, con)
+
+    return 0
+
+
+def repair_with_slonik(log, options, con):
+    """Attempt to uninstall Slony-I via 'UNINSTALL NODE' per best practice.
+
+    Returns True on success, False if unable to do so for any reason.
+    """
     cur = con.cursor()
 
     # Determine the node id the database thinks it is.
@@ -60,29 +72,21 @@
         cur.execute(cmd)
         node_id = cur.fetchone()[0]
         log.debug("Node Id is %d" % node_id)
+
+        # Get a list of set ids in the database.
+        cur.execute(
+            "SELECT DISTINCT set_id FROM %s.sl_set"
+            % replication.helpers.CLUSTER_NAMESPACE)
+        set_ids = set(row[0] for row in cur.fetchall())
+        log.debug("Set Ids are %s" % repr(set_ids))
+
     except psycopg2.InternalError:
         # Not enough information to determine node id. Possibly
-        # this is an empty database. Just drop the _sl schema as
-        # it is 'good enough' with Slony-I 1.2 - this mechanism
-        # fails with Slony added primary keys, but we don't do that.
-        con.rollback()
-        cur = con.cursor()
-        cur.execute("DROP SCHEMA _sl CASCADE")
-        con.commit()
-        return 0
-
-    # Get a list of set ids in the database.
-    cur.execute(
-        "SELECT DISTINCT set_id FROM %s.sl_set"
-        % replication.helpers.CLUSTER_NAMESPACE)
-    set_ids = set(row[0] for row in cur.fetchall())
-    log.debug("Set Ids are %s" % repr(set_ids))
-
-    # Close so we don't block slonik(1)
-    del cur
-    con.close()
-
-    connection_string = ConnectionString(config.database.main_master)
+        # this is an empty database.
+        log.debug('Broken or no Slony-I install.')
+        return False
+
+    connection_string = ConnectionString(config.database.rw_main_master)
     if options.dbname:
         connection_string.dbname = options.dbname
     if options.dbuser:
@@ -103,7 +107,22 @@
         log.debug(line)
     script = '\n'.join(script)
 
-    replication.helpers.execute_slonik(script, auto_preamble=False)
+    return replication.helpers.execute_slonik(
+        script, auto_preamble=False, exit_on_fail=False)
+
+
+def repair_with_drop_schema(log, con):
+    """
+    Just drop the _sl schema as it is 'good enough' with Slony-I 1.2.
+
+    This mechanism fails with Slony added primary keys, but we don't
+    do that.
+    """
+    log.info('Fallback mode - dropping _sl schema.')
+    cur = con.cursor()
+    cur.execute("DROP SCHEMA _sl CASCADE")
+    return True
+
 
 if __name__ == '__main__':
     sys.exit(main())

=== modified file 'database/replication/report.py'
--- database/replication/report.py	2009-07-19 04:41:14 +0000
+++ database/replication/report.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).

=== modified file 'database/replication/slon_ctl.py'
--- database/replication/slon_ctl.py	2009-06-24 21:17:33 +0000
+++ database/replication/slon_ctl.py	2010-11-07 00:31:57 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python -S
 #
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
@@ -88,9 +88,11 @@
 
 
 def get_logfile(nickname):
+    logdir = config.database.replication_logdir
+    if not os.path.isabs(logdir):
+        logdir = os.path.normpath(os.path.join(config.root, logdir))
     return os.path.join(
-        config.root, 'database', 'replication',
-        'lpslon_%s_%s.log' % (nickname, config.instance_name))
+        logdir, 'lpslon_%s_%s.log' % (nickname, config.instance_name))
 
 
 def start(log, nodes, lag=None):

=== added file 'database/replication/sync.py'
--- database/replication/sync.py	1970-01-01 00:00:00 +0000
+++ database/replication/sync.py	2010-11-07 00:31:57 +0000
@@ -0,0 +1,26 @@
+#!/usr/bin/python -S
+#
+# Copyright 2010 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Block until the replication cluster synchronizes."""
+
+__metaclass__ = type
+__all__ = []
+
+import _pythonpath
+
+from optparse import OptionParser
+
+from canonical.launchpad.scripts import logger_options, db_options
+from replication.helpers import sync
+
+if __name__ == '__main__':
+    parser = OptionParser()
+    parser.add_option(
+        "-t", "--timeout", dest="timeout", metavar="SECS", type="int",
+        help="Abort if no sync after SECS seconds.", default=0)
+    logger_options(parser)
+    db_options(parser)
+    options, args = parser.parse_args()
+    sync(options.timeout)

=== modified file 'database/sampledata/current-dev.sql'
--- database/sampledata/current-dev.sql	2009-09-07 01:46:23 +0000
+++ database/sampledata/current-dev.sql	2010-11-07 00:31:57 +0000
@@ -1,11 +1,13 @@
--- Copyright 2009 Canonical Ltd.  This software is licensed under the
+-- Copyright 2010 Canonical Ltd.  This software is licensed under the
 -- GNU Affero General Public License version 3 (see the file LICENSE).
+-- Created using pg_dump (PostgreSQL) 8.4.3
 
 SET check_function_bodies = false;
 SET client_encoding = 'UTF8';
 SET client_min_messages = warning;
 SET escape_string_warning = off;
 SET standard_conforming_strings = off;
+SET statement_timeout = 0;
 
 SET search_path = public, pg_catalog;
 
@@ -745,85 +747,141 @@
 
 
 
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+SET SESSION AUTHORIZATION DEFAULT;
+
 ALTER TABLE account DISABLE TRIGGER ALL;
 
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (1, '2005-06-06 08:59:51.591618', 8, 20, '2005-06-06 08:59:51.591618', 'Mark Shuttleworth', 'mark_oid', NULL, '123/mark');
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (2, '2005-06-06 08:59:51.598107', 8, 20, '2005-06-06 08:59:51.598107', 'Robert Collins', 'lifeless_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (3, '2005-06-06 08:59:51.610048', 1, 20, '2008-09-05 20:55:47.76904', 'Dave Miller', 'justdave_oid', '', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (4, '2005-06-06 08:59:51.611185', 8, 20, '2005-06-06 08:59:51.611185', 'Colin Watson', 'kamion_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (5, '2005-06-06 08:59:51.608802', 1, 10, '2005-06-06 08:59:51.608802', 'Scott James Remnant', 'keybuk_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (6, '2005-06-06 08:59:51.600523', 8, 20, '2005-06-06 08:59:51.600523', 'Jeff Waugh', 'jdub_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (7, '2005-06-06 08:59:51.551196', 2, 20, '2008-09-05 20:55:47.76904', 'Andrew Bennetts', 'spiv_oid', '', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (8, '2005-06-06 08:59:51.601584', 8, 20, '2005-06-06 08:59:51.601584', 'James Blackwell', 'jblack_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (9, '2005-06-06 08:59:51.594941', 1, 10, '2005-06-06 08:59:51.594941', 'Christian Reis', 'kiko_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (10, '2005-06-06 08:59:51.619713', 2, 20, '2008-09-05 20:55:47.76904', 'Alexander Limi', 'limi_oid', '', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (11, '2005-06-06 08:59:51.599234', 8, 20, '2005-06-06 08:59:51.599234', 'Steve Alexander', 'stevea_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (12, '2005-06-06 08:59:51.612277', 8, 20, '2005-06-06 08:59:51.612277', 'Sample Person', 'name12_oid', NULL, '123/name12');
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (13, '2005-06-06 08:59:51.615543', 8, 20, '2005-06-06 08:59:51.615543', 'Carlos Perelló Marín', 'carlos_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (14, '2005-06-06 08:59:51.616666', 8, 20, '2005-06-06 08:59:51.616666', 'Dafydd Harries', 'daf_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (16, '2005-06-06 08:59:51.593849', 8, 20, '2005-06-06 08:59:51.593849', 'Foo Bar', 'name16_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (22, '2005-06-06 08:59:51.59276', 8, 20, '2005-06-06 08:59:51.59276', 'Stuart Bishop', 'stub_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (23, '2005-06-06 08:59:51.620823', 8, 20, '2005-06-06 08:59:51.620823', 'David Allouche', 'ddaa_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (26, '2005-06-06 08:59:51.618722', 2, 20, '2008-09-05 20:55:47.76904', 'Daniel Silverstone', 'kinnison_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (27, '2005-06-06 08:59:51.557224', 8, 20, '2005-06-06 08:59:51.557224', 'Daniel Henrique Debonzi', 'debonzi_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (28, '2005-06-06 08:59:51.59705', 8, 20, '2005-06-06 08:59:51.59705', 'Celso Providelo', 'cprov_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (29, '2005-06-06 08:59:51.596025', 8, 20, '2005-06-06 08:59:51.596025', 'Guilherme Salgado', 'salgado_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (33, '2005-06-06 08:59:51.621892', 8, 20, '2005-06-06 08:59:51.621892', 'Edgar Bursic', 'edgar_oid', '', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (34, '2005-06-06 08:59:51.622908', 4, 10, '2005-06-06 08:59:51.622908', 'Jordi Vilalta', 'jvprat_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (35, '2005-06-06 08:59:51.623962', 4, 10, '2005-06-06 08:59:51.623962', 'Sigurd Gartmann', 'sigurd-ubuntu_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (36, '2005-06-06 08:59:51.5244', 4, 10, '2005-06-06 08:59:51.5244', 'Vlastimil Skacel', 'skacel_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (37, '2005-06-06 08:59:51.549651', 8, 20, '2005-06-06 08:59:51.549651', 'Daniel Aguayo', 'danner_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (38, '2005-06-06 08:59:51.555051', 4, 10, '2005-06-06 08:59:51.555051', 'Martin Pitt', 'martin-pitt_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (39, '2005-06-06 08:59:51.556132', 4, 10, '2005-06-06 08:59:51.556132', 'Nicolas Velin', 'nsv_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (40, '2005-06-06 08:59:51.558429', 4, 10, '2005-06-06 08:59:51.558429', 'Francesco Accattapà', 'callipeo_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (41, '2005-06-06 08:59:51.559519', 4, 10, '2005-06-06 08:59:51.559519', 'Aloriel', 'jorge-gonzalez-gonzalez_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (42, '2005-06-06 08:59:51.560604', 4, 10, '2005-06-06 08:59:51.560604', 'Denis Barbier', 'barbier_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (43, '2005-06-06 08:59:51.561685', 4, 10, '2005-06-06 08:59:51.561685', 'André Luís Lopes', 'andrelop_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (44, '2005-06-06 08:59:51.562857', 4, 10, '2005-06-06 08:59:51.562857', 'Carlos Valdivia Yagüe', 'valyag_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (45, '2005-06-06 08:59:51.563952', 4, 10, '2005-06-06 08:59:51.563952', 'Luk Claes', 'luk-claes_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (46, '2005-06-06 08:59:51.565033', 8, 20, '2005-06-06 08:59:51.565033', 'Miroslav Kure', 'kurem_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (47, '2005-06-06 08:59:51.56614', 4, 10, '2005-06-06 08:59:51.56614', 'Morten Brix Pedersen', 'morten_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (48, '2005-06-06 08:59:51.567224', 4, 10, '2005-06-06 08:59:51.567224', 'Matti Pöllä', 'mpo_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (49, '2005-06-06 08:59:51.568323', 4, 10, '2005-06-06 08:59:51.568323', 'Kęstutis Biliūnas', 'kebil_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (50, '2005-06-06 08:59:51.569518', 8, 20, '2005-06-06 08:59:51.569518', 'Valentina Commissari', 'tsukimi_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (51, '2005-06-06 08:59:51.570701', 8, 20, '2005-06-06 08:59:51.570701', 'Helge Kreutzmann', 'kreutzm_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (52, '2005-06-06 08:59:51.593849', 8, 20, '2005-06-06 08:59:51.593849', 'No Privileges Person', 'no-priv_oid', NULL, '123/no-priv');
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (55, '2005-06-06 08:59:51.593849', 8, 20, '2005-06-06 08:59:51.593849', 'Marilize Coetzee', 'marilize_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (56, '2005-10-07 14:17:51.593849', 8, 20, '2005-10-07 14:17:51.593849', 'Jordi Mallach', 'jordi_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (58, '2005-12-06 09:48:58.287679', 8, 20, '2005-12-06 09:48:58.287679', 'Bug Importer', 'bug-importer_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (62, '2006-05-23 12:49:30.483464', 1, 10, '2006-05-23 12:49:30.483464', 'Bug Watch Updater', 'bug-watch-updater_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (63, '2006-05-23 12:49:30.483464', 8, 20, '2006-05-23 12:49:30.483464', 'Karl Tilbury', 'karl_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (65, '2006-10-04 16:20:51.19954', 1, 20, '2006-10-04 16:20:51.19954', 'Launchpad Janitor', 'launchpad-janitor_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (66, '2006-12-13 21:19:06.369142', 4, 10, '2006-12-13 21:19:06.369142', 'Diogo Matsubara', 'matsubara_oid', NULL, '456/matsubara');
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (67, '2006-10-04 16:20:51.19954', 1, 10, '2006-10-04 16:20:51.19954', 'Team Membership Janitor', 'team-membership-janitor_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (68, '2007-01-31 06:56:25.096519', 8, 20, '2007-01-31 06:56:25.096519', 'Launchpad Beta Testers Owner', 'launchpad-beta-owner_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (70, '2006-12-13 21:19:06.369142', 2, 30, '2008-02-01 13:01:01.000001', 'Former User', 'former-user_oid', 'an ex-user', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243602, '2007-02-19 11:17:57.755666', 8, 20, '2007-02-19 11:17:57.755666', 'No Team Memberships', 'no-team-memberships_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243603, '2007-02-21 10:53:59.700105', 8, 20, '2007-02-21 10:53:59.700105', 'One Membership', 'one-membership_oid', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243606, '2007-08-09 21:25:37.832976', 7, 10, '2007-08-09 21:25:37.832976', 'Julian Edwards', 'neMCQNd', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243608, '2007-11-12 15:23:19.847132', 3, 10, '2007-11-12 15:23:19.847132', 'Ubuntu Doc Team', 'WQPMHdf', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243609, '2007-12-07 13:43:20.393704', 8, 20, '2007-12-07 13:43:20.393704', 'Katie', '6w7kmzC', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243610, '2007-12-14 16:52:15.403833', 1, 20, '2007-12-14 16:52:15.403833', 'Gold Member', 'cCGE3LA', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243611, '2007-12-14 16:52:15.403833', 1, 20, '2007-12-14 16:52:15.403833', 'Owner', 'MGWJnTL', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243614, '2007-12-18 16:31:34.790641', 15, 10, '2007-12-18 16:31:34.790641', 'josh', '6KHNEe3', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243615, '2007-12-18 16:31:34.790641', 15, 10, '2007-12-18 16:31:34.790641', 'Sjoerd Simons', 'yEzBPbd', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243616, '2007-12-18 16:31:34.790641', 15, 10, '2007-12-18 16:31:34.790641', 'Martin Pitt', 'R8FpwXd', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243617, '2008-03-06 09:55:27.289842', 1, 20, '2008-06-16 07:02:18.857109', 'Tim Penhey', 'CALDpFr', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243622, '2008-05-12 17:38:38.798696', 8, 20, '2008-06-16 07:02:18.857109', 'Commercial Member', 'rPwGRk4', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243623, '2008-06-27 14:49:11.149508', 8, 20, '2008-06-27 14:49:11.149508', 'Brad Crittenden', 'mTmeENb', NULL, NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243624, '2008-11-04 12:59:26.965843', 8, 20, '2008-11-04 13:09:43.807125', 'PPA key guard', 'cF4PNk3', NULL, '771/ppa-key-guard');
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243625, '2009-03-17 07:28:15.948042', 1, 20, '2009-03-17 07:28:15.948042', 'Ubuntu-branches-owner', '3sbtGMy', 'Activated when the preferred email was set.', '538/ubuntu-branches-owner');
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243626, '2009-08-04 10:49:59.788665', 1, 20, '2009-08-04 10:49:59.788665', 'Techboard Owner', 'LQCGF4D', 'Activated when the preferred email was set.', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243627, '2009-08-24 13:10:40.725354', 1, 20, '2009-08-24 13:10:40.725354', 'Translator Deity', 'tMRLhWD', 'Activated when the preferred email was set.', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243628, '2009-08-24 14:09:47.90688', 1, 20, '2009-08-24 14:09:47.90688', 'Срба Србић', 'MY7Gzdp', 'Activated when the preferred email was set.', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243629, '2009-08-24 14:09:47.985931', 1, 20, '2009-08-24 14:09:47.985931', 'João da Silva', 'DX4rknT', 'Activated when the preferred email was set.', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243630, '2009-08-24 14:09:48.020245', 1, 20, '2009-08-24 14:09:48.020245', 'Jürgen Müller', 'f8mXQsW', 'Activated when the preferred email was set.', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243631, '2009-08-24 14:09:48.055449', 1, 20, '2009-08-24 14:09:48.055449', 'Dolores Dominguez', 'DeKE6Tx', 'Activated when the preferred email was set.', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243632, '2009-08-24 14:18:51.939952', 1, 20, '2009-08-24 14:18:51.939952', 'Иван Петровић', 'wwnwXxy', 'Activated when the preferred email was set.', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243633, '2009-08-24 14:19:05.515651', 1, 20, '2009-08-24 14:19:05.515651', 'Juanita Perez', 'YEBL6yn', 'Activated when the preferred email was set.', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243634, '2009-08-24 14:36:04.670944', 1, 20, '2009-08-24 14:36:04.670944', 'Epiphany Maintainer', 'HHN3kCp', 'Activated when the preferred email was set.', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243635, '2009-08-24 14:39:04.682972', 1, 20, '2009-08-24 14:39:04.682972', 'intltool maintainer', 'NbFGnBx', 'Activated when the preferred email was set.', NULL);
-INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, openid_identifier, status_comment, old_openid_identifier) VALUES (243636, '2009-08-24 14:41:04.403504', 1, 20, '2009-08-24 14:41:04.403504', 'Lies Maintainer', 'e3eENYJ', 'Activated when the preferred email was set.', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (11, '2005-06-06 08:59:51.591618', 8, 20, '2005-06-06 08:59:51.591618', 'Mark Shuttleworth', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (21, '2005-06-06 08:59:51.598107', 8, 20, '2005-06-06 08:59:51.598107', 'Robert Collins', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (31, '2005-06-06 08:59:51.610048', 1, 20, '2008-09-05 20:55:47.76904', 'Dave Miller', '');
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (41, '2005-06-06 08:59:51.611185', 8, 20, '2005-06-06 08:59:51.611185', 'Colin Watson', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (51, '2005-06-06 08:59:51.608802', 1, 10, '2005-06-06 08:59:51.608802', 'Scott James Remnant', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (61, '2005-06-06 08:59:51.600523', 8, 20, '2005-06-06 08:59:51.600523', 'Jeff Waugh', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (71, '2005-06-06 08:59:51.551196', 2, 20, '2008-09-05 20:55:47.76904', 'Andrew Bennetts', '');
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (81, '2005-06-06 08:59:51.601584', 8, 20, '2005-06-06 08:59:51.601584', 'James Blackwell', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (91, '2005-06-06 08:59:51.594941', 1, 10, '2005-06-06 08:59:51.594941', 'Christian Reis', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (101, '2005-06-06 08:59:51.619713', 2, 20, '2008-09-05 20:55:47.76904', 'Alexander Limi', '');
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (111, '2005-06-06 08:59:51.599234', 8, 20, '2005-06-06 08:59:51.599234', 'Steve Alexander', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (121, '2005-06-06 08:59:51.612277', 8, 20, '2005-06-06 08:59:51.612277', 'Sample Person', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (131, '2005-06-06 08:59:51.615543', 8, 20, '2005-06-06 08:59:51.615543', 'Carlos Perelló Marín', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (141, '2005-06-06 08:59:51.616666', 8, 20, '2005-06-06 08:59:51.616666', 'Dafydd Harries', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (161, '2005-06-06 08:59:51.593849', 8, 20, '2005-06-06 08:59:51.593849', 'Foo Bar', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (221, '2005-06-06 08:59:51.59276', 8, 20, '2005-06-06 08:59:51.59276', 'Stuart Bishop', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (231, '2005-06-06 08:59:51.620823', 8, 20, '2005-06-06 08:59:51.620823', 'David Allouche', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (261, '2005-06-06 08:59:51.618722', 2, 20, '2008-09-05 20:55:47.76904', 'Daniel Silverstone', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (271, '2005-06-06 08:59:51.557224', 8, 20, '2005-06-06 08:59:51.557224', 'Daniel Henrique Debonzi', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (281, '2005-06-06 08:59:51.59705', 8, 20, '2005-06-06 08:59:51.59705', 'Celso Providelo', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (291, '2005-06-06 08:59:51.596025', 8, 20, '2005-06-06 08:59:51.596025', 'Guilherme Salgado', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (331, '2005-06-06 08:59:51.621892', 8, 20, '2005-06-06 08:59:51.621892', 'Edgar Bursic', '');
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (341, '2005-06-06 08:59:51.622908', 4, 10, '2005-06-06 08:59:51.622908', 'Jordi Vilalta', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (351, '2005-06-06 08:59:51.623962', 4, 10, '2005-06-06 08:59:51.623962', 'Sigurd Gartmann', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (361, '2005-06-06 08:59:51.5244', 4, 10, '2005-06-06 08:59:51.5244', 'Vlastimil Skacel', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (371, '2005-06-06 08:59:51.549651', 8, 20, '2005-06-06 08:59:51.549651', 'Daniel Aguayo', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (381, '2005-06-06 08:59:51.555051', 4, 10, '2005-06-06 08:59:51.555051', 'Martin Pitt', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (391, '2005-06-06 08:59:51.556132', 4, 10, '2005-06-06 08:59:51.556132', 'Nicolas Velin', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (401, '2005-06-06 08:59:51.558429', 4, 10, '2005-06-06 08:59:51.558429', 'Francesco Accattapà', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (411, '2005-06-06 08:59:51.559519', 4, 10, '2005-06-06 08:59:51.559519', 'Aloriel', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (421, '2005-06-06 08:59:51.560604', 4, 10, '2005-06-06 08:59:51.560604', 'Denis Barbier', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (431, '2005-06-06 08:59:51.561685', 4, 10, '2005-06-06 08:59:51.561685', 'André Luís Lopes', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (441, '2005-06-06 08:59:51.562857', 4, 10, '2005-06-06 08:59:51.562857', 'Carlos Valdivia Yagüe', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (451, '2005-06-06 08:59:51.563952', 4, 10, '2005-06-06 08:59:51.563952', 'Luk Claes', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (461, '2005-06-06 08:59:51.565033', 8, 20, '2005-06-06 08:59:51.565033', 'Miroslav Kure', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (471, '2005-06-06 08:59:51.56614', 4, 10, '2005-06-06 08:59:51.56614', 'Morten Brix Pedersen', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (481, '2005-06-06 08:59:51.567224', 4, 10, '2005-06-06 08:59:51.567224', 'Matti Pöllä', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (491, '2005-06-06 08:59:51.568323', 4, 10, '2005-06-06 08:59:51.568323', 'Kęstutis Biliūnas', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (501, '2005-06-06 08:59:51.569518', 8, 20, '2005-06-06 08:59:51.569518', 'Valentina Commissari', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (511, '2005-06-06 08:59:51.570701', 8, 20, '2005-06-06 08:59:51.570701', 'Helge Kreutzmann', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (521, '2005-06-06 08:59:51.593849', 8, 20, '2005-06-06 08:59:51.593849', 'No Privileges Person', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (551, '2005-06-06 08:59:51.593849', 8, 20, '2005-06-06 08:59:51.593849', 'Marilize Coetzee', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (561, '2005-10-07 14:17:51.593849', 8, 20, '2005-10-07 14:17:51.593849', 'Jordi Mallach', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (581, '2005-12-06 09:48:58.287679', 8, 20, '2005-12-06 09:48:58.287679', 'Bug Importer', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (621, '2006-05-23 12:49:30.483464', 1, 10, '2006-05-23 12:49:30.483464', 'Bug Watch Updater', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (631, '2006-05-23 12:49:30.483464', 8, 20, '2006-05-23 12:49:30.483464', 'Karl Tilbury', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (651, '2006-10-04 16:20:51.19954', 1, 20, '2006-10-04 16:20:51.19954', 'Launchpad Janitor', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (661, '2006-12-13 21:19:06.369142', 4, 10, '2006-12-13 21:19:06.369142', 'Diogo Matsubara', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (671, '2006-10-04 16:20:51.19954', 1, 10, '2006-10-04 16:20:51.19954', 'Team Membership Janitor', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (681, '2007-01-31 06:56:25.096519', 8, 20, '2007-01-31 06:56:25.096519', 'Launchpad Beta Testers Owner', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (701, '2006-12-13 21:19:06.369142', 2, 30, '2008-02-01 13:01:01.000001', 'Former User', 'an ex-user');
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (243637, '2010-07-12 09:48:27.198885', 1, 20, '2010-07-12 09:48:27.198885', 'Software-center-agent', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436021, '2007-02-19 11:17:57.755666', 8, 20, '2007-02-19 11:17:57.755666', 'No Team Memberships', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436031, '2007-02-21 10:53:59.700105', 8, 20, '2007-02-21 10:53:59.700105', 'One Membership', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436061, '2007-08-09 21:25:37.832976', 7, 10, '2007-08-09 21:25:37.832976', 'Julian Edwards', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436081, '2007-11-12 15:23:19.847132', 3, 10, '2007-11-12 15:23:19.847132', 'Ubuntu Doc Team', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436091, '2007-12-07 13:43:20.393704', 8, 20, '2007-12-07 13:43:20.393704', 'Katie', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436101, '2007-12-14 16:52:15.403833', 1, 20, '2007-12-14 16:52:15.403833', 'Gold Member', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436111, '2007-12-14 16:52:15.403833', 1, 20, '2007-12-14 16:52:15.403833', 'Owner', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436141, '2007-12-18 16:31:34.790641', 15, 10, '2007-12-18 16:31:34.790641', 'josh', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436151, '2007-12-18 16:31:34.790641', 15, 10, '2007-12-18 16:31:34.790641', 'Sjoerd Simons', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436161, '2007-12-18 16:31:34.790641', 15, 10, '2007-12-18 16:31:34.790641', 'Martin Pitt', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436171, '2008-03-06 09:55:27.289842', 1, 20, '2008-06-16 07:02:18.857109', 'Tim Penhey', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436221, '2008-05-12 17:38:38.798696', 8, 20, '2008-06-16 07:02:18.857109', 'Commercial Member', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436231, '2008-06-27 14:49:11.149508', 8, 20, '2008-06-27 14:49:11.149508', 'Brad Crittenden', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436241, '2008-11-04 12:59:26.965843', 8, 20, '2008-11-04 13:09:43.807125', 'PPA key guard', NULL);
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436242, '2009-03-17 07:26:14.024613', 1, 20, '2009-03-17 07:26:14.024613', 'Ubuntu-branches-owner', 'Activated when the preferred email was set.');
+INSERT INTO account (id, date_created, creation_rationale, status, date_status_set, displayname, status_comment) VALUES (2436243, '2009-08-04 10:50:39.383407', 1, 20, '2009-08-04 10:50:39.383407', 'Techboard Owner', 'Activated when the preferred email was set.');
 
 
 ALTER TABLE account ENABLE TRIGGER ALL;
@@ -831,74 +889,1124 @@
 
 ALTER TABLE accountpassword DISABLE TRIGGER ALL;
 
-INSERT INTO accountpassword (id, account, password) VALUES (1, 1, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
-INSERT INTO accountpassword (id, account, password) VALUES (2, 2, 'ID1adsprLaTBox18F6dpSdtSdqCiOdpgUXBo4oG17qhg73jSDTVe3g==');
-INSERT INTO accountpassword (id, account, password) VALUES (4, 4, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
-INSERT INTO accountpassword (id, account, password) VALUES (6, 6, 'egSV4F7r1WCy/hf5jWu7AlOfsdt6E5/eGUDj2esLlEPV8VfJSdIJSQ==');
-INSERT INTO accountpassword (id, account, password) VALUES (8, 8, 'AqRrSgxlaD/jsmKcwKM6WRV6RjgdyuND0kHVDSFG+F1FGUCoCXncuQ==');
-INSERT INTO accountpassword (id, account, password) VALUES (11, 11, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
-INSERT INTO accountpassword (id, account, password) VALUES (12, 12, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
-INSERT INTO accountpassword (id, account, password) VALUES (13, 13, 'MdB+BoAdbza3BA6mIkMm6bFo1kv9hR2PKZ3U');
-INSERT INTO accountpassword (id, account, password) VALUES (14, 14, 'pGQrbOLX8qWHLVFxd/VPhZlqhPDXj/3/8p8CeEUYIFfYziLKdTbJNQ==');
-INSERT INTO accountpassword (id, account, password) VALUES (16, 16, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
-INSERT INTO accountpassword (id, account, password) VALUES (22, 22, 'I+lQozEFEr+uBuxQZuKGpL4jkiy6lE1dQsZx');
-INSERT INTO accountpassword (id, account, password) VALUES (23, 23, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
-INSERT INTO accountpassword (id, account, password) VALUES (27, 27, 'DAJs/l1RrrYFPPd2mBY4b/aFjnTfodXOyg+L+U6uPxUy8rCp/IFC/w==');
-INSERT INTO accountpassword (id, account, password) VALUES (28, 28, 'OkikNBxGC7hgRBJ109OZru86vpzhHw+cO+zW/0SlTN2phfv7lSwSHg==');
-INSERT INTO accountpassword (id, account, password) VALUES (29, 29, 'DAJs/l1RrrYFPPd2mBY4b/aFjnTfodXOyg+L+U6uPxUy8rCp/IFC/w==');
-INSERT INTO accountpassword (id, account, password) VALUES (33, 33, 'test');
-INSERT INTO accountpassword (id, account, password) VALUES (34, 34, 'gsTz0TyTUL7xrkoAH4Yz2WE6/w6WoYG5LjaO8p/xA1FDdSM6qkWiYA==');
-INSERT INTO accountpassword (id, account, password) VALUES (35, 35, 'FvPq9d4L5onnmcRA9wCzQ5lnPPYIzvW5rJA7GGnnsJuQqz8M8naZkQ==');
-INSERT INTO accountpassword (id, account, password) VALUES (36, 36, 'lyA9CgUH9lHmTiaiWGP2vzkmytufiHBAnc9c8WCX1g5pYyBd6QgL3A==');
-INSERT INTO accountpassword (id, account, password) VALUES (37, 37, 'bpLapC1tQHUedQBP447krtcmaRPd3hrncPusTlNUKXh5ymfO5yVhhQ==');
-INSERT INTO accountpassword (id, account, password) VALUES (38, 38, 'DAJs/l1RrrYFPPd2mBY4b/aFjnTfodXOyg+L+U6uPxUy8rCp/IFC/w==');
-INSERT INTO accountpassword (id, account, password) VALUES (39, 39, 'U2QzusrIFlQZKb3hWzcLpfhFcB3WZ0fa0E+OwcV8q/WOtsQCjarzzA==');
-INSERT INTO accountpassword (id, account, password) VALUES (40, 40, 'mSKDc1EKoi8a5L0zd+oueU33nuSEuFWy+JHIHxOukBVJt9LPW47RVg==');
-INSERT INTO accountpassword (id, account, password) VALUES (41, 41, '94y1dy33Evut2/bLsGG8Pzguyuip9wHeRtFWp0cSItzHdD1tK3gmcQ==');
-INSERT INTO accountpassword (id, account, password) VALUES (42, 42, 'vI/vIVB2qsx1NvuaMy+q4l8rWUNMFINWzCSLOK1D5qi97/VmXvIrEw==');
-INSERT INTO accountpassword (id, account, password) VALUES (43, 43, 'HG6qWB8PwzfIr3z+Tu+m3lQv7r1dsaWY6rxCxRuNypGomTPTzBh9iA==');
-INSERT INTO accountpassword (id, account, password) VALUES (44, 44, 'xrXafuC+VBaIz3m2+0UMjxms+2KhGhj6qnQdoo2V/f4iNFHJgSDzzw==');
-INSERT INTO accountpassword (id, account, password) VALUES (45, 45, 'w+f2krWWyQIIm76PIUEIsMCNQLhWLjObLcDONJNjjXcRaiKzKXeMAw==');
-INSERT INTO accountpassword (id, account, password) VALUES (46, 46, '1u05okOZJIa069F8COZ2vmxRq11c+4rolNUVRp539TI5ihnHwk9+Sw==');
-INSERT INTO accountpassword (id, account, password) VALUES (47, 47, 'n+KIa3PoihBN8ljj9Hjg9H3Im2LWnrn2yprgY4u/MnxOQx3dOh3bDw==');
-INSERT INTO accountpassword (id, account, password) VALUES (48, 48, 'U4KMnp73AYdriB7QH2NpEYhlH+fBWJKziDPcDAt25OxItZMYh0QV4Q==');
-INSERT INTO accountpassword (id, account, password) VALUES (49, 49, 'YbUJ4nzlxjYtaLLFMqUFL3LplUpS3FxcYwiCAS0WaAcnXS8Sst9BgA==');
-INSERT INTO accountpassword (id, account, password) VALUES (50, 50, 'MdB+BoAdbza3BA6mIkMm6bFo1kv9hR2PKZ3U');
-INSERT INTO accountpassword (id, account, password) VALUES (51, 51, 'sYVFKi2dWAfkFkWekcW296s2dZ0ihYcxAXtwumI1FQJes4PWD8xvqQ==');
-INSERT INTO accountpassword (id, account, password) VALUES (52, 52, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
-INSERT INTO accountpassword (id, account, password) VALUES (55, 55, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
-INSERT INTO accountpassword (id, account, password) VALUES (56, 56, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
-INSERT INTO accountpassword (id, account, password) VALUES (58, 58, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
-INSERT INTO accountpassword (id, account, password) VALUES (63, 63, 'UnjDN34pTZ0xE3vbCNZDedIVpLPrA9nty9S/mOzbeefQXAEN6CMNUQ==');
-INSERT INTO accountpassword (id, account, password) VALUES (68, 68, 'q/esKTLj2ba0Bzu2Xdi1JA4zgC683EE3I1Vjm+hp4mY+xgikQ7YD1g==');
-INSERT INTO accountpassword (id, account, password) VALUES (243602, 243602, 'PlPmrpS1styVUEK/lGn72zqxYYeZcLqKD3b5oD4/C6AyntMMFvSacw==');
-INSERT INTO accountpassword (id, account, password) VALUES (243603, 243603, '52kdKnxgzc0LWK2ltsED9SeqQcjZgDAj+wWlaRotx3BvsXJCH0AUdQ==');
-INSERT INTO accountpassword (id, account, password) VALUES (243610, 243610, '9I7bMpJUcBH+edfXjswjdo7nC6iuoTU6YAqolznT59Q1h9v+z9pdVQ==');
-INSERT INTO accountpassword (id, account, password) VALUES (243611, 243611, 'zpAkRDpNGKvwvoPeBHuuwK4RFNCrwUnEMglcuWfzV1FCZ0M9nskK8w==');
-INSERT INTO accountpassword (id, account, password) VALUES (243617, 243617, '8Z3kccpOP4HerugZDi/VS5VePtVDHk48XE11Mx0DmpnqiPsDLczI3g==');
-INSERT INTO accountpassword (id, account, password) VALUES (243622, 243622, 'x3AXXkF9hiPAvbgZSrM/6wczynYy0x+o7SNoT+Gy2Z1GZCMcDNb08A==');
-INSERT INTO accountpassword (id, account, password) VALUES (243623, 243623, '0HM7dR9mHB8uh4Pi88me/V7VrPBbsZZCIVWtkjyHV9WtA0QMcaVM5w==');
-INSERT INTO accountpassword (id, account, password) VALUES (243624, 26, 'test');
-INSERT INTO accountpassword (id, account, password) VALUES (243625, 7, 'test');
-INSERT INTO accountpassword (id, account, password) VALUES (243626, 3, 'test');
-INSERT INTO accountpassword (id, account, password) VALUES (243627, 10, 'test');
-INSERT INTO accountpassword (id, account, password) VALUES (243628, 243624, 'test');
-INSERT INTO accountpassword (id, account, password) VALUES (243629, 243625, 'Q7JyCMcPnbp7vrUbqwzllKxRgXj2UGSDDxs4G9zS1v8WVJHqkf/niw==');
-INSERT INTO accountpassword (id, account, password) VALUES (243630, 243627, 'gYi/tS3y+UK0KAL4UEsXGqtHK+ijQldSC57FF/ixw2RlfGEtsCiTuA==');
-INSERT INTO accountpassword (id, account, password) VALUES (243631, 243628, 'ImJ6/dkjvw68GoZtq5QmU500UtZ+3rOsEvFLLdnVreg3I1OA4s/CJg==');
-INSERT INTO accountpassword (id, account, password) VALUES (243632, 243629, 'xZUsNnWcjsmGdKFUPCf4HAX3f4NqRkh3Zm5o9vnBx7Oj2kISLcfp1g==');
-INSERT INTO accountpassword (id, account, password) VALUES (243633, 243630, 'oFiUnz5xQQkGJdyRu4oOB7AmVyZVW87YAvCI54QiB782KpKrHs6Cpw==');
-INSERT INTO accountpassword (id, account, password) VALUES (243634, 243631, 'zjEF5xn8fVJGFBlyUZU2ZGvgfNMbLXTF3hTca5fRaDbFcJWQujSoEg==');
-INSERT INTO accountpassword (id, account, password) VALUES (243635, 243632, 'GBt/GtCH6Lxq+CBODWC+H5tsGystXLXi+0gvW5KcLB478Vg3BPPR3A==');
-INSERT INTO accountpassword (id, account, password) VALUES (243636, 243633, 'BnGHiCNGHvHeUlH52LikKWMT5xqQ5e+5Wkk0dKgRrvCwFpxodDJI7g==');
-INSERT INTO accountpassword (id, account, password) VALUES (243637, 243634, 'j4OCnhO4rzPbvyJgeHYEGXe8FpfWYMsCsRvxb9wZVBUHsCfiUP/XvQ==');
-INSERT INTO accountpassword (id, account, password) VALUES (243638, 243635, 'MUDYvi+UgO8wUqxwhd9br4tXTgxPpcloU5EUW5BRTNUKUS2ac+hG7A==');
-INSERT INTO accountpassword (id, account, password) VALUES (243639, 243636, 'Q/4V+DAYdXXoCoQiSchF9DkfA4ntWhmkJhmzcmYmylpEnlbiZfvyIA==');
+INSERT INTO accountpassword (id, account, password) VALUES (1, 11, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
+INSERT INTO accountpassword (id, account, password) VALUES (2, 21, 'ID1adsprLaTBox18F6dpSdtSdqCiOdpgUXBo4oG17qhg73jSDTVe3g==');
+INSERT INTO accountpassword (id, account, password) VALUES (4, 41, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
+INSERT INTO accountpassword (id, account, password) VALUES (6, 61, 'egSV4F7r1WCy/hf5jWu7AlOfsdt6E5/eGUDj2esLlEPV8VfJSdIJSQ==');
+INSERT INTO accountpassword (id, account, password) VALUES (8, 81, 'AqRrSgxlaD/jsmKcwKM6WRV6RjgdyuND0kHVDSFG+F1FGUCoCXncuQ==');
+INSERT INTO accountpassword (id, account, password) VALUES (11, 111, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
+INSERT INTO accountpassword (id, account, password) VALUES (12, 121, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
+INSERT INTO accountpassword (id, account, password) VALUES (13, 131, 'MdB+BoAdbza3BA6mIkMm6bFo1kv9hR2PKZ3U');
+INSERT INTO accountpassword (id, account, password) VALUES (14, 141, 'pGQrbOLX8qWHLVFxd/VPhZlqhPDXj/3/8p8CeEUYIFfYziLKdTbJNQ==');
+INSERT INTO accountpassword (id, account, password) VALUES (16, 161, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
+INSERT INTO accountpassword (id, account, password) VALUES (22, 221, 'I+lQozEFEr+uBuxQZuKGpL4jkiy6lE1dQsZx');
+INSERT INTO accountpassword (id, account, password) VALUES (23, 231, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
+INSERT INTO accountpassword (id, account, password) VALUES (27, 271, 'DAJs/l1RrrYFPPd2mBY4b/aFjnTfodXOyg+L+U6uPxUy8rCp/IFC/w==');
+INSERT INTO accountpassword (id, account, password) VALUES (28, 281, 'OkikNBxGC7hgRBJ109OZru86vpzhHw+cO+zW/0SlTN2phfv7lSwSHg==');
+INSERT INTO accountpassword (id, account, password) VALUES (29, 291, 'DAJs/l1RrrYFPPd2mBY4b/aFjnTfodXOyg+L+U6uPxUy8rCp/IFC/w==');
+INSERT INTO accountpassword (id, account, password) VALUES (33, 331, 'test');
+INSERT INTO accountpassword (id, account, password) VALUES (34, 341, 'gsTz0TyTUL7xrkoAH4Yz2WE6/w6WoYG5LjaO8p/xA1FDdSM6qkWiYA==');
+INSERT INTO accountpassword (id, account, password) VALUES (35, 351, 'FvPq9d4L5onnmcRA9wCzQ5lnPPYIzvW5rJA7GGnnsJuQqz8M8naZkQ==');
+INSERT INTO accountpassword (id, account, password) VALUES (36, 361, 'lyA9CgUH9lHmTiaiWGP2vzkmytufiHBAnc9c8WCX1g5pYyBd6QgL3A==');
+INSERT INTO accountpassword (id, account, password) VALUES (37, 371, 'bpLapC1tQHUedQBP447krtcmaRPd3hrncPusTlNUKXh5ymfO5yVhhQ==');
+INSERT INTO accountpassword (id, account, password) VALUES (38, 381, 'DAJs/l1RrrYFPPd2mBY4b/aFjnTfodXOyg+L+U6uPxUy8rCp/IFC/w==');
+INSERT INTO accountpassword (id, account, password) VALUES (39, 391, 'U2QzusrIFlQZKb3hWzcLpfhFcB3WZ0fa0E+OwcV8q/WOtsQCjarzzA==');
+INSERT INTO accountpassword (id, account, password) VALUES (40, 401, 'mSKDc1EKoi8a5L0zd+oueU33nuSEuFWy+JHIHxOukBVJt9LPW47RVg==');
+INSERT INTO accountpassword (id, account, password) VALUES (41, 411, '94y1dy33Evut2/bLsGG8Pzguyuip9wHeRtFWp0cSItzHdD1tK3gmcQ==');
+INSERT INTO accountpassword (id, account, password) VALUES (42, 421, 'vI/vIVB2qsx1NvuaMy+q4l8rWUNMFINWzCSLOK1D5qi97/VmXvIrEw==');
+INSERT INTO accountpassword (id, account, password) VALUES (43, 431, 'HG6qWB8PwzfIr3z+Tu+m3lQv7r1dsaWY6rxCxRuNypGomTPTzBh9iA==');
+INSERT INTO accountpassword (id, account, password) VALUES (44, 441, 'xrXafuC+VBaIz3m2+0UMjxms+2KhGhj6qnQdoo2V/f4iNFHJgSDzzw==');
+INSERT INTO accountpassword (id, account, password) VALUES (45, 451, 'w+f2krWWyQIIm76PIUEIsMCNQLhWLjObLcDONJNjjXcRaiKzKXeMAw==');
+INSERT INTO accountpassword (id, account, password) VALUES (46, 461, '1u05okOZJIa069F8COZ2vmxRq11c+4rolNUVRp539TI5ihnHwk9+Sw==');
+INSERT INTO accountpassword (id, account, password) VALUES (47, 471, 'n+KIa3PoihBN8ljj9Hjg9H3Im2LWnrn2yprgY4u/MnxOQx3dOh3bDw==');
+INSERT INTO accountpassword (id, account, password) VALUES (48, 481, 'U4KMnp73AYdriB7QH2NpEYhlH+fBWJKziDPcDAt25OxItZMYh0QV4Q==');
+INSERT INTO accountpassword (id, account, password) VALUES (49, 491, 'YbUJ4nzlxjYtaLLFMqUFL3LplUpS3FxcYwiCAS0WaAcnXS8Sst9BgA==');
+INSERT INTO accountpassword (id, account, password) VALUES (50, 501, 'MdB+BoAdbza3BA6mIkMm6bFo1kv9hR2PKZ3U');
+INSERT INTO accountpassword (id, account, password) VALUES (51, 511, 'sYVFKi2dWAfkFkWekcW296s2dZ0ihYcxAXtwumI1FQJes4PWD8xvqQ==');
+INSERT INTO accountpassword (id, account, password) VALUES (52, 521, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
+INSERT INTO accountpassword (id, account, password) VALUES (55, 551, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
+INSERT INTO accountpassword (id, account, password) VALUES (56, 561, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
+INSERT INTO accountpassword (id, account, password) VALUES (58, 581, 'K7Qmeansl6RbuPfulfcmyDQOzp70OxVh5Fcf');
+INSERT INTO accountpassword (id, account, password) VALUES (63, 631, 'UnjDN34pTZ0xE3vbCNZDedIVpLPrA9nty9S/mOzbeefQXAEN6CMNUQ==');
+INSERT INTO accountpassword (id, account, password) VALUES (68, 681, 'q/esKTLj2ba0Bzu2Xdi1JA4zgC683EE3I1Vjm+hp4mY+xgikQ7YD1g==');
+INSERT INTO accountpassword (id, account, password) VALUES (243602, 2436021, 'PlPmrpS1styVUEK/lGn72zqxYYeZcLqKD3b5oD4/C6AyntMMFvSacw==');
+INSERT INTO accountpassword (id, account, password) VALUES (243603, 2436031, '52kdKnxgzc0LWK2ltsED9SeqQcjZgDAj+wWlaRotx3BvsXJCH0AUdQ==');
+INSERT INTO accountpassword (id, account, password) VALUES (243610, 2436101, '9I7bMpJUcBH+edfXjswjdo7nC6iuoTU6YAqolznT59Q1h9v+z9pdVQ==');
+INSERT INTO accountpassword (id, account, password) VALUES (243611, 2436111, 'zpAkRDpNGKvwvoPeBHuuwK4RFNCrwUnEMglcuWfzV1FCZ0M9nskK8w==');
+INSERT INTO accountpassword (id, account, password) VALUES (243617, 2436171, '8Z3kccpOP4HerugZDi/VS5VePtVDHk48XE11Mx0DmpnqiPsDLczI3g==');
+INSERT INTO accountpassword (id, account, password) VALUES (243622, 2436221, 'x3AXXkF9hiPAvbgZSrM/6wczynYy0x+o7SNoT+Gy2Z1GZCMcDNb08A==');
+INSERT INTO accountpassword (id, account, password) VALUES (243623, 2436231, '0HM7dR9mHB8uh4Pi88me/V7VrPBbsZZCIVWtkjyHV9WtA0QMcaVM5w==');
+INSERT INTO accountpassword (id, account, password) VALUES (243624, 261, 'test');
+INSERT INTO accountpassword (id, account, password) VALUES (243625, 71, 'test');
+INSERT INTO accountpassword (id, account, password) VALUES (243626, 31, 'test');
+INSERT INTO accountpassword (id, account, password) VALUES (243627, 101, 'test');
+INSERT INTO accountpassword (id, account, password) VALUES (243628, 2436241, 'test');
+INSERT INTO accountpassword (id, account, password) VALUES (243629, 2436242, 'AmIxkZe2yl53W8ai9xg8ok+JtsX1CTpR6Ma9bT5LJyMMz1HXnvfPoA==');
 
 
 ALTER TABLE accountpassword ENABLE TRIGGER ALL;
 
 
+ALTER TABLE language DISABLE TRIGGER ALL;
+
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (1, 'aa', 'Afar', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (2, 'ab', 'Abkhazian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (3, 'ace', 'Achinese', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (4, 'ach', 'Acoli', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (5, 'ada', 'Adangme', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (6, 'ady', 'Adyghe; Adygei', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (7, 'afa', 'Afro-Asiatic (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (8, 'afh', 'Afrihili', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (9, 'af', 'Afrikaans', NULL, 2, 'n != 1', true, 0, '{95a05dab-bf44-4804-bb97-be2a3ee83acd}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (10, 'ak', 'Akan', NULL, 2, 'n > 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (11, 'akk', 'Akkadian', NULL, 2, 'n > 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (12, 'sq', 'Albanian', NULL, 2, 'n != 1', true, 0, '{5ea95deb-8819-4960-837f-46de0f22bf81}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (13, 'ale', 'Aleut', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (14, 'alg', 'Algonquian languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (15, 'am', 'Amharic', NULL, 2, 'n > 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (16, 'ang', 'English, Old (ca.450-1100)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (17, 'apa', 'Apache languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (18, 'ar', 'Arabic', NULL, 6, 'n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 && n%100<=10 ? 3 : n%100>=11 && n%100<=99 ? 4 : 5', true, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (19, 'arc', 'Aramaic', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (20, 'an', 'Aragonese', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (21, 'hy', 'Armenian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (22, 'arn', 'Araucanian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (23, 'arp', 'Arapaho', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (24, 'art', 'Artificial (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (25, 'arw', 'Arawak', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (26, 'as', 'Assamese', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (27, 'ast', 'Asturian; Bable', NULL, 1, 'n != 1', true, 0, '{b5cfaf65-895d-4d69-ba92-99438d6003e9}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (28, 'ath', 'Athapascan language', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (29, 'aus', 'Australian languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (30, 'av', 'Avaric', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (31, 'ae', 'Avestan', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (32, 'awa', 'Awadhi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (33, 'ay', 'Aymara', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (34, 'az', 'Azerbaijani', NULL, 2, 'n != 1', true, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (35, 'bad', 'Banda', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (36, 'bai', 'Bamileke languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (37, 'ba', 'Bashkir', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (38, 'bal', 'Baluchi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (39, 'bm', 'Bambara', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (40, 'ban', 'Balinese', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (41, 'eu', 'Basque', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (42, 'bas', 'Basa', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (43, 'bat', 'Baltic (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (44, 'bej', 'Beja', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (45, 'be', 'Belarusian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (46, 'bem', 'Bemba', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (47, 'bn', 'Bengali', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (48, 'ber', 'Berber (Other)', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (49, 'bho', 'Bhojpuri', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (50, 'bh', 'Bihari', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (51, 'bik', 'Bikol', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (52, 'bin', 'Bini', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (53, 'bi', 'Bislama', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (54, 'bla', 'Siksika', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (55, 'bnt', 'Bantu (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (56, 'bs', 'Bosnian', NULL, 3, 'n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (57, 'bra', 'Braj', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (58, 'br', 'Breton', NULL, 2, 'n > 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (59, 'btk', 'Batak (Indonesia)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (60, 'bua', 'Buriat', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (61, 'bug', 'Buginese', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (62, 'bg', 'Bulgarian', NULL, 2, 'n != 1', true, 0, '{b5962da4-752e-416c-9934-f97724f07051}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (63, 'my', 'Burmese', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (64, 'byn', 'Blin; Bilin', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (65, 'cad', 'Caddo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (66, 'cai', 'Central American Indian (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (67, 'car', 'Carib', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (68, 'ca', 'Catalan', NULL, 2, 'n != 1', true, 0, '{f3b38190-f8e0-4e8b-bf29-451fb95c0cbd}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (69, 'cau', 'Caucasian (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (70, 'ceb', 'Cebuano', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (71, 'cel', 'Celtic (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (72, 'ch', 'Chamorro', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (73, 'chb', 'Chibcha', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (74, 'ce', 'Chechen', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (75, 'chg', 'Chagatai', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (76, 'zh', 'Chinese', NULL, 1, '0', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (77, 'chk', 'Chukese', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (78, 'chm', 'Mari', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (79, 'chn', 'Chinook jargon', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (80, 'cho', 'Choctaw', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (81, 'chp', 'Chipewyan', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (82, 'chr', 'Cherokee', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (83, 'chu', 'Church Slavic', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (84, 'cv', 'Chuvash', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (85, 'chy', 'Cheyenne', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (86, 'cmc', 'Chamic languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (87, 'cop', 'Coptic', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (88, 'kw', 'Cornish', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (89, 'co', 'Corsican', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (90, 'cpe', 'English-based (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (91, 'cpf', 'French-based (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (92, 'cpp', 'Portuguese-based (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (93, 'cr', 'Cree', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (94, 'crh', 'Crimean Turkish; Crimean Tatar', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (95, 'crp', 'Creoles and pidgins (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (96, 'csb', 'Kashubian', NULL, 3, 'n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (97, 'cus', 'Cushitic (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (98, 'cs', 'Czech', NULL, 3, 'n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2', true, 0, '{37b3f2ec-1229-4289-a6d9-e94d2948ae7e}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (99, 'dak', 'Dakota', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (100, 'da', 'Danish', NULL, 2, 'n != 1', true, 0, '{1f391bb4-a820-4e44-8b68-01b385d13f94}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (101, 'dar', 'Dargwa', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (102, 'del', 'Delaware', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (103, 'den', 'Slave (Athapascan)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (104, 'dgr', 'Dogrib', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (105, 'din', 'Dinka', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (106, 'dv', 'Divehi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (107, 'doi', 'Dogri', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (108, 'dra', 'Dravidian (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (109, 'dsb', 'Lower Sorbian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (110, 'dua', 'Duala', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (111, 'dum', 'Dutch, Middle (ca. 1050-1350)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (112, 'nl', 'Dutch', NULL, 2, 'n != 1', true, 0, '{83532d50-69a7-46d7-9873-ed232d5b246b}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (113, 'dyu', 'Dyula', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (114, 'dz', 'Dzongkha', NULL, 1, '0', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (115, 'efi', 'Efik', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (116, 'egy', 'Egyptian (Ancient)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (117, 'eka', 'Ekajuk', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (118, 'elx', 'Elamite', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (119, 'en', 'English', NULL, 2, 'n != 1', true, 0, '{6c3a4023-ca27-4847-a410-2fe8a2401654}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (120, 'enm', 'English, Middle (1100-1500)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (121, 'eo', 'Esperanto', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (122, 'et', 'Estonian', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (123, 'ee', 'Ewe', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (124, 'ewo', 'Ewondo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (125, 'fan', 'Fang', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (126, 'fo', 'Faroese', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (127, 'fat', 'Fanti', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (128, 'fj', 'Fijian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (129, 'fi', 'Finnish', NULL, 2, 'n != 1', true, 0, '{c5e1e759-ba2e-44b1-9915-51239d89c492}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (130, 'fiu', 'Finno-Ugrian (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (131, 'fon', 'Fon', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (132, 'fr', 'French', NULL, 2, 'n > 1', true, 0, '{5102ddd3-a33f-436f-b43c-f9468a8f8b32}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (133, 'frm', 'French, Middle (ca.1400-1600)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (134, 'fro', 'French, Old (842-ca.1400)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (135, 'fy', 'Frisian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (136, 'ff', 'Fulah', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (137, 'fur', 'Friulian', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (138, 'gaa', 'Ga', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (139, 'gay', 'Gayo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (140, 'gba', 'Gbaya', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (141, 'gem', 'Germanic (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (142, 'ka', 'Georgian', NULL, 1, '0', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (143, 'de', 'German', NULL, 2, 'n != 1', true, 0, '{69C47786-9BEF-49BD-B99B-148C9264AF72}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (144, 'gez', 'Geez', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (145, 'gil', 'Gilbertese', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (146, 'gd', 'Gaelic; Scottish', NULL, 3, 'n < 2 ? 0 : n == 2 ? 1 : 2', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (147, 'ga', 'Irish', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', true, 0, '{906b5382-9a34-4ab1-a627-39487b0678a9}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (148, 'gl', 'Galician', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (149, 'gv', 'Manx', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (150, 'gmh', 'German, Middle High (ca.1050-1500)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (151, 'goh', 'German, Old High (ca.750-1050)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (152, 'gon', 'Gondi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (153, 'gor', 'Gorontalo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (154, 'got', 'Gothic', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (155, 'grb', 'Grebo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (156, 'grc', 'Greek, Ancient (to 1453)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (157, 'el', 'Greek, Modern (1453-)', NULL, 2, 'n != 1', true, 0, '{eb0c5e26-c8a7-4873-b633-0f453cb1debc}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (158, 'gn', 'Guarani', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (159, 'gu', 'Gujarati', NULL, 2, 'n != 1', true, 0, '{16baab125756b023981bc4a14bd77b5c}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (160, 'gwi', 'Gwichin', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (161, 'hai', 'Haida', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (162, 'ht', 'Haitian; Haitian Creole', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (163, 'ha', 'Hausa', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (164, 'haw', 'Hawaiian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (165, 'he', 'Hebrew', NULL, 2, 'n != 1', true, 1, '{9818f84c-1be1-4eea-aded-55f406c70e37}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (166, 'hz', 'Herero', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (167, 'hil', 'Hiligaynon', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (168, 'him', 'Himachali', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (169, 'hi', 'Hindi', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (170, 'hit', 'Hittite', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (171, 'hmn', 'Hmong', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (172, 'ho', 'Hiri', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (173, 'hsb', 'Upper Sorbian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (174, 'hu', 'Hungarian', NULL, 1, '0', true, 0, '{cacb8e15-7f1b-4e71-a3c0-d63ce907366f}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (175, 'hup', 'Hupa', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (176, 'iba', 'Iban', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (177, 'ig', 'Igbo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (178, 'is', 'Icelandic', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (179, 'io', 'Ido', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (180, 'ii', 'Sichuan Yi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (181, 'ijo', 'Ijo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (182, 'iu', 'Inuktitut', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (183, 'ie', 'Interlingue', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (184, 'ilo', 'Iloko', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (185, 'ia', 'Interlingua', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (186, 'inc', 'Indic (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (187, 'id', 'Indonesian', NULL, 1, '0', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (188, 'ine', 'Indo-European (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (189, 'inh', 'Ingush', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (190, 'ik', 'Inupiaq', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (191, 'ira', 'Iranian (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (192, 'iro', 'Iroquoian languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (193, 'it', 'Italian', NULL, 2, 'n != 1', true, 0, '{9db167da-cba5-4d12-b045-5d2a5a36a88a}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (194, 'jv', 'Javanese', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (195, 'jbo', 'Lojban', NULL, 1, '0', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (196, 'ja', 'Japanese', NULL, 1, '0', true, 0, '{02d61967-84bb-455b-a14b-76abc5864739}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (197, 'jpr', 'Judeo-Persian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (198, 'jrb', 'Judeo-Arabic', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (199, 'kaa', 'Kara-Kalpak', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (200, 'kab', 'Kabyle', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (201, 'kac', 'Kachin', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (202, 'kl', 'Greenlandic (Kalaallisut)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (203, 'kam', 'Kamba', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (204, 'kn', 'Kannada', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (205, 'kar', 'Karen', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (206, 'ks', 'Kashmiri', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (207, 'kr', 'Kanuri', NULL, 1, '0', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (208, 'kaw', 'Kawi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (209, 'kk', 'Kazakh', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (210, 'kbd', 'Kabardian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (211, 'kha', 'Khazi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (212, 'khi', 'Khoisan (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (213, 'km', 'Khmer', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (214, 'kho', 'Khotanese', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (215, 'ki', 'Kikuyu', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (216, 'rw', 'Kinyarwanda', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (217, 'ky', 'Kirghiz', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (218, 'kmb', 'Kimbundu', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (219, 'kok', 'Konkani', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (220, 'kv', 'Komi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (221, 'kg', 'Kongo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (222, 'ko', 'Korean', NULL, 1, '0', true, 0, '{dcff4b08-a6cc-4588-a941-852609855803}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (223, 'kos', 'Kosraean', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (224, 'kpe', 'Kpelle', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (225, 'krc', 'Karachay-Balkar', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (226, 'kro', 'Kru', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (227, 'kru', 'Kurukh', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (228, 'kj', 'Kuanyama', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (229, 'kum', 'Kumyk', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (230, 'ku', 'Kurdish', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (231, 'kut', 'Kutenai', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (232, 'lad', 'Ladino', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (233, 'lah', 'Lahnda', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (234, 'lam', 'Lamba', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (235, 'lo', 'Lao', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (236, 'la', 'Latin', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (237, 'lv', 'Latvian', NULL, 3, 'n%10==1 && n%100!=11 ? 0 : n != 0 ? 1 : 2', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (238, 'lez', 'Lezghian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (239, 'li', 'Limburgian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (240, 'ln', 'Lingala', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (241, 'lt', 'Lithuanian', NULL, 3, 'n%10==1 && n%100!=11 ? 0 : n%10>=2 && (n%100<10 || n%100>=20) ? 1 : 2', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (242, 'lol', 'Mongo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (243, 'loz', 'Lozi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (244, 'lb', 'Luxembourgish', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (245, 'lua', 'Luba-Lulua', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (246, 'lu', 'Luba-Katanga', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (247, 'lg', 'Ganda', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (248, 'lui', 'Luiseno', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (249, 'lun', 'Lunda', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (250, 'luo', 'Luo (Kenya and Tanzania)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (251, 'lus', 'Lushai', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (252, 'mk', 'Macedonian', NULL, 2, '(n % 10 == 1 && n % 100 != 11) ? 0 : 1', true, 0, '{376b068c-4aff-4f66-bb4c-fde345b63073}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (253, 'mad', 'Madurese', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (254, 'mag', 'Magahi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (255, 'mh', 'Marshallese', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (256, 'mai', 'Maithili', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (257, 'mak', 'Makasar', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (258, 'ml', 'Malayalam', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (259, 'man', 'Mandingo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (260, 'mi', 'Maori', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (261, 'map', 'Austronesian (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (262, 'mr', 'Marathi', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (263, 'mas', 'Masai', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (264, 'ms', 'Malay', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (265, 'mdf', 'Moksha', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (266, 'mdr', 'Mandar', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (267, 'men', 'Mende', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (268, 'mga', 'Irish, Middle (900-1200)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (269, 'mic', 'Micmac', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (270, 'min', 'Minangkabau', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (271, 'mis', 'Miscellaneous languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (272, 'mkh', 'Mon-Khmer (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (273, 'mg', 'Malagasy', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (274, 'mt', 'Maltese', NULL, 4, 'n == 1 ? 0 : (n == 0 || ((n % 100) >= 2 && (n % 100) <= 10) ) ? 1 : ((n % 100) >= 11 && (n % 100) <= 19 ) ? 2 : 3', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (275, 'mnc', 'Manchu', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (276, 'mno', 'Manobo languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (277, 'moh', 'Mohawk', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (278, 'mo', 'Moldavian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (279, 'mn', 'Mongolian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (280, 'mos', 'Mossi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (281, 'mul', 'Multiple languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (282, 'mun', 'Munda languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (283, 'mus', 'Creek', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (284, 'mwr', 'Marwari', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (285, 'myn', 'Mayan languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (286, 'myv', 'Erzya', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (287, 'nah', 'Nahuatl', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (288, 'nai', 'North American Indian (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (289, 'nap', 'Neapolitan', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (290, 'na', 'Nauru', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (291, 'nv', 'Navaho', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (292, 'nr', 'Ndebele, South', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (293, 'nd', 'Ndebele, North', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (294, 'ng', 'Ndonga', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (295, 'nds', 'German, Low', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (296, 'ne', 'Nepali', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (297, 'new', 'Newari', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (298, 'nia', 'Nias', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (299, 'nic', 'Niger-Kordofanian (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (300, 'niu', 'Niuean', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (301, 'nn', 'Norwegian Nynorsk', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (302, 'nb', 'Norwegian Bokmål', NULL, 2, 'n != 1', true, 0, '{4CD2763D-5532-4ddc-84D9-2E094695A680}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (303, 'nog', 'Nogai', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (304, 'non', 'Norse, Old', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (305, 'no', 'Norwegian', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (306, 'nso', 'Sotho, Northern', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (307, 'nub', 'Nubian languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (308, 'nwc', 'Classical Newari; Old Newari', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (309, 'ny', 'Chewa; Chichewa; Nyanja', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (310, 'nym', 'Nyankole', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (311, 'nyo', 'Nyoro', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (312, 'nzi', 'Nzima', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (313, 'oc', 'Occitan (post 1500)', NULL, 2, 'n > 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (314, 'oj', 'Ojibwa', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (315, 'or', 'Oriya', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (316, 'om', 'Oromo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (317, 'osa', 'Osage', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (318, 'os', 'Ossetian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (319, 'ota', 'Turkish, Ottoman (1500-1928)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (320, 'oto', 'Otomian languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (321, 'paa', 'Papuan (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (322, 'pag', 'Pangasinan', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (323, 'pal', 'Pahlavi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (324, 'pam', 'Pampanga', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (325, 'pa', 'Punjabi', NULL, 2, 'n != 1', true, 0, '{96f366b1-5194-4e30-9415-1f6fcaaaa583}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (326, 'pap', 'Papiamento', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (327, 'pau', 'Palauan', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (328, 'peo', 'Persian, Old (ca.600-400 B.C.)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (329, 'fa', 'Persian', NULL, 1, '0', true, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (330, 'phi', 'Philippine (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (331, 'phn', 'Phoenician', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (332, 'pi', 'Pali', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (333, 'pl', 'Polish', NULL, 3, 'n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2', true, 0, '{cbfb6154-47f6-47ea-b888-6440a4ba44e8}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (334, 'pt', 'Portuguese', NULL, 2, 'n != 1', true, 0, '{6e528a74-5cca-40d1-mozia152-d1b2d415210b}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (335, 'pon', 'Pohnpeian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (336, 'pra', 'Prakrit languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (337, 'pro', 'Provençal, Old (to 1500)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (338, 'ps', 'Pushto', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (339, 'qu', 'Quechua', NULL, 2, '(n % 10 == 1 && n % 100 != 11) ? 0 : 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (340, 'raj', 'Rajasthani', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (341, 'rap', 'Rapanui', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (342, 'rar', 'Rarotongan', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (343, 'roa', 'Romance (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (344, 'rm', 'Raeto-Romance', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (345, 'rom', 'Romany', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (346, 'ro', 'Romanian', NULL, 3, '(n == 1 ? 0: (((n % 100 > 19) || ((n % 100 == 0) && (n != 0))) ? 2: 1))', true, 0, '{93ead120-1d61-4663-852d-ee631493168f}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (347, 'rn', 'Rundi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (348, 'ru', 'Russian', NULL, 3, 'n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2', true, 0, '{9E20245A-B2EE-4ee6-815B-99C30B35D0D2}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (349, 'sad', 'Sandawe', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (350, 'sg', 'Sango', NULL, 2, 'n > 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (351, 'sah', 'Yakut', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (352, 'sai', 'South American Indian (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (353, 'sal', 'Salishan languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (354, 'sam', 'Samaritan Aramaic', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (355, 'sa', 'Sanskrit', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (356, 'sas', 'Sasak', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (357, 'sat', 'Santali', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (358, 'sr', 'Serbian', NULL, 3, 'n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (359, 'sco', 'Scots', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (360, 'hr', 'Croatian', NULL, 3, 'n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (361, 'sel', 'Selkup', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (362, 'sem', 'Semitic (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (363, 'sga', 'Irish, Old (to 900)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (364, 'sgn', 'Sign languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (365, 'shn', 'Shan', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (366, 'sid', 'Sidamo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (367, 'si', 'Sinhalese', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (368, 'sio', 'Siouan languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (369, 'sit', 'Sino-Tibetan (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (370, 'sla', 'Slavic (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (371, 'sk', 'Slovak', NULL, 3, 'n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (372, 'sl', 'Slovenian', NULL, 4, 'n%100==1 ? 0 : n%100==2 ? 1 : n%100==3 || n%100==4 ? 2 : 3', true, 0, '{ac25d192-0004-4228-8dc3-13a5461ca1c6}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (373, 'sma', 'Southern Sami', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (374, 'se', 'Northern Sami', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (375, 'smi', 'Sami languages (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (376, 'smj', 'Lule Sami', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (377, 'smn', 'Inari Sami', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (378, 'sm', 'Samoan', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (379, 'sms', 'Skolt Sami', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (380, 'sn', 'Shona', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (381, 'sd', 'Sindhi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (382, 'snk', 'Soninke', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (383, 'sog', 'Sogdian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (384, 'so', 'Somali', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (385, 'son', 'Songhai', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (386, 'st', 'Sotho, Southern', NULL, 1, '0', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (387, 'es', 'Spanish', NULL, 2, 'n != 1', true, 0, '{e4d01067-3443-405d-939d-a200ed3db577}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (388, 'sc', 'Sardinian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (389, 'srr', 'Serer', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (390, 'ssa', 'Nilo-Saharan (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (391, 'ss', 'Swati', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (392, 'suk', 'Sukuma', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (393, 'su', 'Sundanese', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (394, 'sus', 'Susu', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (395, 'sux', 'Sumerian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (396, 'sw', 'Swahili', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (397, 'sv', 'Swedish', NULL, 2, 'n != 1', true, 0, '{A3E7CC55-B6E4-4a87-BD2E-657CA489F23A}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (398, 'syr', 'Syriac', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (399, 'ty', 'Tahitian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (400, 'tai', 'Tai (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (401, 'ta', 'Tamil', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (402, 'ts', 'Tsonga', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (403, 'tt', 'Tatar', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (404, 'te', 'Telugu', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (405, 'tem', 'Timne', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (406, 'ter', 'Tereno', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (407, 'tet', 'Tetum', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (408, 'tg', 'Tajik', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (409, 'tl', 'Tagalog', NULL, 2, 'n > 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (410, 'th', 'Thai', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (411, 'bo', 'Tibetan', NULL, 1, '0', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (412, 'tig', 'Tigre', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (413, 'ti', 'Tigrinya', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (414, 'tiv', 'Tiv', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (415, 'tlh', 'Klingon; tlhIngan-Hol', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (416, 'tkl', 'Tokelau', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (417, 'tli', 'Tlinglit', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (418, 'tmh', 'Tamashek', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (419, 'tog', 'Tonga (Nyasa)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (420, 'to', 'Tonga (Tonga Islands)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (421, 'tpi', 'Tok Pisin', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (422, 'tsi', 'Tsimshian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (423, 'tn', 'Tswana', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (424, 'tk', 'Turkmen', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (425, 'tum', 'Tumbuka', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (426, 'tup', 'Tupi languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (427, 'tr', 'Turkish', NULL, 1, '0', true, 0, '{08c0f953-0736-4989-b921-3e7ddfaf556a}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (428, 'tut', 'Altaic (Other)', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (429, 'tvl', 'Tuvalu', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (430, 'tw', 'Twi', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (431, 'tyv', 'Tuvinian', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (432, 'udm', 'Udmurt', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (433, 'uga', 'Ugaritic', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (434, 'ug', 'Uighur', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (435, 'uk', 'Ukrainian', NULL, 3, 'n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2', true, 0, '{f68df430-4534-4473-8ca4-d5de32268a8d}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (436, 'umb', 'Umbundu', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (437, 'und', 'Undetermined', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (438, 'ur', 'Urdu', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (439, 'uz', 'Uzbek', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (440, 'vai', 'Vai', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (441, 've', 'Venda', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (442, 'vi', 'Vietnamese', NULL, 1, '0', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (443, 'vo', 'Volapuk', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (444, 'vot', 'Votic', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (445, 'wak', 'Wakashan languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (446, 'wal', 'Walamo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (447, 'war', 'Waray', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (448, 'was', 'Washo', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (449, 'cy', 'Welsh', NULL, 4, 'n==1 ? 0 : n==2 ? 1 : (n != 8 || n != 11) ? 2 : 3', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (450, 'wen', 'Sorbian languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (451, 'wa', 'Walloon', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (452, 'wo', 'Wolof', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (453, 'xal', 'Kalmyk', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (454, 'xh', 'Xhosa', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (455, 'yao', 'Yao', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (456, 'yap', 'Yapese', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (457, 'yi', 'Yiddish', NULL, NULL, NULL, true, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (458, 'yo', 'Yoruba', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (459, 'ypk', 'Yupik languages', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (460, 'zap', 'Zapotec', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (461, 'zen', 'Zenaga', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (462, 'za', 'Chuang; Zhuang', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (463, 'znd', 'Zande', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (464, 'zu', 'Zulu', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (465, 'zun', 'Zuni', NULL, NULL, NULL, true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (466, 'ti_ER', 'Tigrinya (Eritrea)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (467, 'ti_ET', 'Tigrinya (Ethiopia)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (468, 'gez_ER', 'Geez (Eritrea)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (469, 'gez_ET', 'Geez (Ethiopia)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (470, 'de_AT', 'German (Austria)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (471, 'de_BE', 'German (Belgium)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (472, 'de_CH', 'German (Switzerland)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (473, 'de_DE', 'German (Germany)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (474, 'de_LU', 'German (Luxembourg)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (475, 'en_AU', 'English (Australia)', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (476, 'en_BW', 'English (Botswana)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (477, 'en_CA', 'English (Canada)', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (478, 'en_DK', 'English (Denmark)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (479, 'en_GB', 'English (United Kingdom)', NULL, 2, 'n != 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (480, 'en_HK', 'English (Hong Kong)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (481, 'en_IE', 'English (Ireland)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (482, 'en_IN', 'English (India)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (483, 'en_NZ', 'English (New Zealand)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (484, 'en_PH', 'English (Philippines)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (485, 'en_SG', 'English (Singapore)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (486, 'en_US', 'English (United States)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (487, 'en_ZA', 'English (South Africa)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (488, 'en_ZW', 'English (Zimbabwe)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (489, 'zh_CN', 'Chinese (China)', NULL, 1, '0', true, 0, '{74d253f5-1463-4de4-bc6e-a7127c066416}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (490, 'zh_HK', 'Chinese (Hong Kong)', NULL, 1, '0', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (491, 'zh_SG', 'Chinese (Singapore)', NULL, 1, '0', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (492, 'zh_TW', 'Chinese (Taiwan)', NULL, 1, '0', true, 0, '{0c7ce36c-a092-4a3d-9ac3-9891d2f2727e}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (493, 'eu_ES', 'Basque (Spain)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (494, 'eu_FR', 'Basque (France)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (495, 'es_AR', 'Spanish (Argentina)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (496, 'es_BO', 'Spanish (Bolivia)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (497, 'es_CL', 'Spanish (Chile)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (498, 'es_CO', 'Spanish (Colombia)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (499, 'es_CR', 'Spanish (Costa Rica)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (500, 'es_DO', 'Spanish (Dominican Republic)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (501, 'es_EC', 'Spanish (Ecuador)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (502, 'es_ES', 'Spanish (Spain)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (503, 'es_GT', 'Spanish (Guatemala)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (504, 'es_HN', 'Spanish (Honduras)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (505, 'es_MX', 'Spanish (Mexico)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (506, 'es_NI', 'Spanish (Nicaragua)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (507, 'es_PA', 'Spanish (Panama)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (508, 'es_PE', 'Spanish (Peru)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (509, 'es_PR', 'Spanish (Puerto Rico)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (510, 'es_PY', 'Spanish (Paraguay)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (511, 'es_SV', 'Spanish (El Salvador)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (512, 'es_US', 'Spanish (United States)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (513, 'es_UY', 'Spanish (Uruguay)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (514, 'es_VE', 'Spanish (Venezuela)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (515, 'ru_RU', 'Russian (Russian Federation)', NULL, 3, 'n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (516, 'ru_UA', 'Russian (Ukraine)', NULL, 3, 'n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (517, 'bn_BD', 'Bengali (Bangladesh)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (518, 'bn_IN', 'Bengali (India)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (519, 'om_ET', 'Oromo (Ethiopia)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (520, 'om_KE', 'Oromo (Kenya)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (521, 'pt_BR', 'Portuguese (Brazil)', NULL, 2, 'n > 1', true, 0, '{8cb7341c-bcb6-43ca-b214-c48967f2a77e}');
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (522, 'pt_PT', 'Portuguese (Portugal)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (523, 'aa_DJ', 'Afar (Djibouti)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (524, 'aa_ER', 'Afar (Eritrea)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (525, 'aa_ET', 'Afar (Ethiopia)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (526, 'it_CH', 'Italian (Switzerland)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (527, 'it_IT', 'Italian (Italy)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (528, 'ar_AE', 'Arabic (United Arab Emirates)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (529, 'ar_BH', 'Arabic (Bahrain)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (530, 'ar_DZ', 'Arabic (Algeria)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (531, 'ar_EG', 'Arabic (Egypt)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (532, 'ar_IN', 'Arabic (India)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (533, 'ar_IQ', 'Arabic (Iraq)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (534, 'ar_JO', 'Arabic (Jordan)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (535, 'ar_KW', 'Arabic (Kuwait)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (536, 'ar_LB', 'Arabic (Lebanon)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (537, 'ar_LY', 'Arabic (Libyan Arab Jamahiriya)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (538, 'ar_MA', 'Arabic (Morocco)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (539, 'ar_OM', 'Arabic (Oman)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (540, 'ar_QA', 'Arabic (Qatar)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (541, 'ar_SA', 'Arabic (Saudi Arabia)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (542, 'ar_SD', 'Arabic (Sudan)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (543, 'ar_SY', 'Arabic (Syrian Arab Republic)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (544, 'ar_TN', 'Arabic (Tunisia)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (545, 'ar_YE', 'Arabic (Yemen)', NULL, 3, 'n==1 ? 0 : n==2 ? 1 : 2', false, 1, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (546, 'nl_BE', 'Dutch (Belgium)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (547, 'nl_NL', 'Dutch (Netherlands)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (548, 'fr_BE', 'French (Belgium)', NULL, 2, 'n > 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (549, 'fr_CA', 'French (Canada)', NULL, 2, 'n > 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (550, 'fr_CH', 'French (Switzerland)', NULL, 2, 'n > 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (551, 'fr_FR', 'French (France)', NULL, 2, 'n > 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (552, 'fr_LU', 'French (Luxembourg)', NULL, 2, 'n > 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (553, 'sv_FI', 'Swedish (Finland)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (554, 'sv_SE', 'Swedish (Sweden)', NULL, 2, 'n != 1', false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (555, 'so_DJ', 'Somali (Djibouti)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (556, 'so_ET', 'Somali (Ethiopia)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (557, 'so_KE', 'Somali (Kenya)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (558, 'so_SO', 'Somali (Somalia)', NULL, NULL, NULL, false, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (559, 'fil', 'Filipino', NULL, 2, 'n > 1', true, 0, NULL);
+INSERT INTO language (id, code, englishname, nativename, pluralforms, pluralexpression, visible, direction, uuid) VALUES (560, 'es@test', 'Spanish test', NULL, 2, 'n != 1', false, 0, NULL);
+
+
+ALTER TABLE language ENABLE TRIGGER ALL;
+
+
+ALTER TABLE libraryfilecontent DISABLE TRIGGER ALL;
+
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (1, '2005-04-07 16:46:05.265391', 178859, '378b3498ead213d35a82033a6e9196014a5ef25c', '01234567890123456789012345678900', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (2, '2005-04-07 16:46:05.266763', 9922560, 'a57faa6287aee2c58e115673a119c6083d31d1b9', '01234567890123456789012345678902', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (3, '2005-04-07 16:46:05.26727', 309386, 'b218ca7b52fa813550e3f14cdcf3ba68606e4446', '01234567890123456789012345678903', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (4, '2005-04-07 16:46:05.267803', 162927750, 'cfbd3ee1f510c66d49be465b900a3334e8488184', '01234567890123456789012345678904', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (5, '2005-05-18 08:03:28.021862', 4381, '9b1f78faa39fb09a9fd955d744002c2d8f32d88d', '01234567890123456789012345678905', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (6, '2005-05-18 08:03:28.021862', 7910, 'afdf21d698587a6601e2ffed0f44292b7ad5dd07', '01234567890123456789012345678906', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (7, '2005-05-18 08:03:28.021862', 10826, '502828e7591277535abe9015ffbc6918dbba8ef4', '01234567890123456789012345678907', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (8, '2005-05-18 08:03:28.021862', 10826, '502828e7591277535abe9015ffbc6918dbba8ef4', '01234567890123456789012345678907', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (9, '2005-05-18 08:03:28.021862', 2655, 'ca3b107af84c05eaf98ba073376153986566ec28', '01234567890123456789012345678908', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (10, '2005-05-18 08:03:28.021862', 13110, 'bc7bebca1e3c5c166838b19f0eeb7f171e51805d', '01234567890123456789012345678909', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (11, '2005-05-18 08:03:28.021862', 13499, '78a26efee75a54f113063b78783b2d4612fee409', '0123456789012345678901234567890a', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (12, '2005-05-18 08:03:28.021862', 12695, '8812d04c170ca90bb1423e188ce9706869aa03d7', '0123456789012345678901234567890b', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (13, '2005-05-18 08:03:28.021862', 13133, 'db1b50cbde7142d344bd8ef9b2e1fe3b3116f77c', '0123456789012345678901234567890c', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (14, '2005-05-18 08:03:28.021862', 13641, 'e19cc1446e3004f10475c37b2cd363f75b8ae89a', '0123456789012345678901234567890d', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (15, '2005-05-18 08:03:28.021862', 13269, 'fc8cab1cb1e5fb1efa3c3c475b8f7c8dc5038d50', '0123456789012345678901234567890f', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (16, '2005-05-18 08:03:28.021862', 13983, 'e17ee3031bd29dcd1e5905c0fd17945600a91ccf', '01234567890123456789012345678910', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (17, '2005-05-18 08:03:28.021862', 12652, '07b01d1e6fe9a729f911e72dfe674a5e0abdc4ee', '01234567890123456789012345678911', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (18, '2005-05-18 08:03:28.021862', 13240, '801dc911c2bd67e17eff087516fdc63a2ac322ce', '01234567890123456789012345678912', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (19, '2005-05-18 08:03:28.021862', 4165, 'fca78a2292e4034b8dfbb2de6f69e17ebeecaaa1', '01234567890123456789012345678913', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (20, '2005-05-18 08:03:28.021862', 4093, 'fc67a1770f78c45c396b4724195aeb10683aa2fd', '01234567890123456789012345678914', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (21, '2005-05-18 08:03:28.021862', 3635, '4ab2ca308dafe152789640942488e23a33e4f46c', '01234567890123456789012345678915', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (22, '2005-05-18 08:03:28.021862', 3553, '20815563ee33368d51e3213354f97c05b4685968', '01234567890123456789012345678916', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (23, '2005-05-18 08:03:28.021862', 3778, '965968d3e6668f39ebc64bc11a3f1a5cd07c213b', '01234567890123456789012345678917', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (24, '2005-05-18 08:03:28.021862', 3666, 'cca8fb78e05a34481e07683cea8c3a47f01c609e', '01234567890123456789012345678918', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (25, '2005-05-18 08:03:28.021862', 3793, '28a7accfb491a2b4895b49b810ca7cda0badc787', '01234567890123456789012345678919', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (26, '2005-05-18 08:03:28.021862', 4773, '03efb176f04f3897de7d5e6484864b0559fd6cd6', '0123456789012345678901234567891a', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (27, '2005-05-18 08:03:28.021862', 2961, '4468039e1d2cbdfc78d2e53477e5fe0537bae302', '0123456789012345678901234567891b', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (28, '2005-05-18 08:03:28.021862', 3558, 'd6c2ddacdab7618ce2a555c20a4a730fcdb42600', '0123456789012345678901234567891c', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (29, '2005-05-18 08:03:28.021862', 3561, '9eb09455e6a568605c1bbab4cdf1936eee92222d', '0123456789012345678901234567891d', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (30, '2005-05-18 08:03:28.021862', 3305, 'b45b170da29f9b22650315657505124766c93720', '0123456789012345678901234567891e', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (31, '2005-05-18 08:03:28.021862', 3987, '9668ba9f0a59f9e6e6bc73fc5dc9f116b202bceb', '0123456789012345678901234567891f', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (32, '2005-05-18 08:03:28.021862', 4908, '874a6ef9cd1aaef17653c6c12f4b83ef9487c1c3', '01234567890123456789012345678920', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (33, '2005-05-18 08:03:28.021862', 4908, '874a6ef9cd1aaef17653c6c12f4b83ef9487c1c3', '01234567890123456789012345678920', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (34, '2005-08-10 09:31:29.606407', 2, '71853c6197a6a7f222db0f1978c7cb232b87c5ee', '01234567890123456789012345678921', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (35, '2005-08-01 09:31:29.606407', 2, '71853c6197a6a7f222db0f1978c7cb232b87c5ee', '01234567890123456789012345678921', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (36, '2005-10-30 18:00:27.899028', 3, '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33', '01234567890123456789012345678922', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (37, '2005-10-30 18:00:27.899028', 3, '1e04c7b5ea3f0fdbc95d0dd47f3c5bc275da8a33', '01234567890123456789012345678923', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (38, '2005-10-30 18:00:27.899028', 3, 'ae04c7b5ea3f0bdb095d0dd47f3c5bc275da8a33', '01234567890123456789012345678924', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (39, '2005-10-30 18:00:27.899028', 3, 'a10856bfea3f0bdb09550dd41f3c5bc275da8a33', '01234567890123456789012345678925', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (40, '2005-10-30 18:00:27.899028', 3, '5a04c7b5ea3f0fdbc95d0dd47f3c5bc275da8a33', '01234567890123456789012345678926', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (42, '2005-10-30 18:00:27.899028', 3, 'a45ed906e4f56fdbc95d0dd47f3c5bc275da8a33', '01234567890123456789012345678927', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (43, '2005-10-30 18:00:27.899028', 3, '4e3961baf4f56fdbc95d0dd47f3c5bc275da8a33', '01234567890123456789012345678928', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (44, '2005-10-30 18:00:27.899028', 3, 'b45ed906e4f5afdbc95d0dd47f3c5bc275da8a33', '01234567890123456789012345678929', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (45, '2006-08-01 09:31:29.606407', 2, '43853c6197a6a7f222db0f1978c7cb232b87c5ee', '0123456789012345678901234567892a', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (46, '2006-05-24 09:31:29.606407', 2, 'ab43246197a6a7f222db0f1978c7cb232b87c5ee', '0123456789012345678901234567892b', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (47, '2006-05-24 09:31:29.606407', 2, 'cabf42e197a6a7f222db0f1978c7cb232b87c5ee', '0123456789012345678901234567892c', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (51, '2006-07-14 16:41:34.028627', 716, '86d537a0d8b5b346d02752a853cc6ea648a0ebd7', 'eeb4c1e00a2e17a1eb51bd8b92fa5437', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (52, '2006-05-24 09:31:29.606407', 2, '1622db354faa9fa653804d018f3b9d5291e37d6b', 'e4a7193a8f72fa2755e2162512069093', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (53, '2006-12-01 16:41:34.028627', 716, '86d537a0d8b5b346d02752a853cc6ea648a0ebd7', 'eeb4c1e00a2e17a1eb51bd8b92fa5437', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (54, '2006-12-01 16:41:34.028627', 716, '86d537a0d8b5b346d02752a853cc6ea648a0ebd7', 'eeb4c1e00a2e17a1eb51bd8b92fa5438', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (55, '2006-12-01 16:41:34.028627', 716, '86d537a0d8b5b346d02752a853cc6ea648a0ebd7', 'eeb4c1e00a2e17a1eb51bd8b92fa5439', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (56, '2006-12-01 16:41:34.028627', 716, '86d537a0d8b5b346d02752a853cc6ea648a0ebd7', 'eeb4c1e00a2e17a1eb51bd8b92fa5440', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (57, '2006-12-13 21:17:56.241901', 1599, 'acdf6b9b99c39b1585f829ec7d68598a8e10816d', '5c6fa250b612e7e4d17261268a4d8400', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (58, '2006-12-13 21:18:28.796588', 1599, 'acdf6b9b99c39b1585f829ec7d68598a8e10816d', '5c6fa250b612e7e4d17261268a4d8401', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (59, '2006-05-24 09:31:29.606407', 2, 'fabb42e197a6a7f222db0f1978c7cb232b87c5ee', '0123456789012345678901234567892d', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (60, '2007-01-03 17:26:27.288968', 11793, 'df3a6670671781d5e08d7795ca1ada776815d87f', 'e8120781cd606202fd259a4f0d4585bb', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (61, '2007-02-15 14:26:27.288968', 100, 'df3a6670671781d5e08d7795ca1ada776815d87f', 'e8120781cd606202fd259a4f0d4585b1', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (62, '2007-05-14 23:21:11.121446', 123, 'd06b970f258e57547ef1104fba3499eb4ab43ff6', '767e1635f55ff5e833410523decec438', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (63, '2007-05-14 23:21:11.121446', 123, 'd06b970f258e57547ef1104fba3499eb4ab43ff6', '767e1635f55ff5e833410523decec438', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (65, '2007-08-09 21:25:37.832976', 958, 'df4adf483eb24fec455e8411ca3dceb0bee51b44', 'b85b447f88c326c4124f0554e175097f', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (66, '2007-08-09 21:25:37.832976', 179, '3e00fea68e91f6e03de6333b4c8e60c2ce441926', 'f27fb7494c5afbb0fb10b78d16f7da37', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (67, '2007-08-09 21:25:37.832976', 610, '01bedc249af59cb80abb40b4898eced10f5d5e20', '7d6fa416334c6da3b954bf9ebff6e9ae', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (68, '2007-08-09 21:25:37.832976', 567, 'b5aeb55faeb86a1d8c1a93dd58131b387a604c5a', '95f2b067e046d73f4e93eca44c034b97', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (69, '2007-08-09 21:54:18.456616', 749, '0526ef40da99af5c1c97c91f9aca77e7ae564838', '8afa3dbc21b2dcdad7f31ab28c041ad9', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (70, '2007-08-09 21:54:18.456616', 652, '0ce85c0cf331d05f76aa4977c43e87e9ffbd0480', '05a37db4ba50ba5f28650ee74b450c2c', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (71, '2007-09-10 19:14:26.037382', 22897, 'd7ebed9e00fc134ebc79eaf7deebb30a4f5f6859', '0113f983ff8beba46ad64c4ae8899091', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (72, '2007-09-10 19:15:01.67038', 221, '0ddef36534d59da64790eeffa55cc0221a01736d', 'df6ce34951747929ca9c6a4e2cb78f94', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (73, '2007-09-10 19:15:19.947543', 217, 'ea265925f6214628dc46e731ca1e131f1e632127', '9d19d15feffc620c0d16523e0ce00a54', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (74, '2007-09-10 19:16:01.017943', 22899, '234acbb3dc11b2af9bc46b6e33f0e453c3b1289d', '6d2d916ea729bc875ca0bd739f6be476', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (75, '2007-12-18 16:31:34.790641', 4296, 'e8b25389964bc3bd471c20a1508d00852ee7de40', 'e6b759498b7cde49e9d34e7b8c8b4542', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (76, '2007-12-18 16:31:34.790641', 2103, 'c154e1c1c2407d9b80c96b63c48d61a94248bf95', 'd86630cafdf624a3aa32aba0c41078c5', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (77, '2007-12-18 16:31:34.790641', 2787, '2d94c4a6a014895f93b581fb0134efe73ae4e2c8', '315ef57e5f6ad295e98db65607c5c18a', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (78, '2007-12-18 16:31:34.790641', 3534, '2bd22db53f2b9e47df50ed731128ff1fc54a5775', 'e0d510b49f803a0a4c2b2712d0a80522', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (79, '2007-12-18 16:31:34.790641', 3327, 'a57356e4cbf97bbb47df202449a95d909d50614a', '8c5ffb558a0d0f520887f3d4cbd619c5', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (80, '2007-12-18 16:31:34.790641', 189, '9800f54c65017e1bafe2d09a13af6176ba0ab244', 'a94677aabcd34b0901f1f75a35f7dff3', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (81, '2007-12-18 16:31:34.790641', 2926, 'b53bb11bc40c42b0f9ecd981561fe9d6f265b275', '496cf1cbb97c17a67998478402520ab5', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (82, '2008-03-17 15:36:19.035615', 18, '0c805a60b31058a1018680f99447033dcb9d4cf8', '8a8a67b8dbc5f203ae8712092c68c780', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (83, '2008-03-17 15:36:38.022812', 18, '0c805a60b31058a1018680f99447033dcb9d4cf8', '8a8a67b8dbc5f203ae8712092c68c780', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (84, '2008-03-17 15:36:48.877842', 3, '55ca6286e3e4f4fba5d0448333fa99fc5a404a73', '764efa883dda1e11db47671c4a3bbd9e', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (85, '2008-03-17 15:37:10.252357', 18, '0c805a60b31058a1018680f99447033dcb9d4cf8', '8a8a67b8dbc5f203ae8712092c68c780', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (86, '2008-03-17 15:37:22.489973', 18, '0c805a60b31058a1018680f99447033dcb9d4cf8', '8a8a67b8dbc5f203ae8712092c68c780', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (87, '2008-03-17 15:37:36.701686', 18, '0c805a60b31058a1018680f99447033dcb9d4cf8', '8a8a67b8dbc5f203ae8712092c68c780', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (88, '2008-03-17 15:37:48.465157', 3, '55ca6286e3e4f4fba5d0448333fa99fc5a404a73', '764efa883dda1e11db47671c4a3bbd9e', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (89, '2008-03-17 15:38:16.866444', 18, '0c805a60b31058a1018680f99447033dcb9d4cf8', '8a8a67b8dbc5f203ae8712092c68c780', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (90, '2008-05-08 10:10:16.866444', 18, '0c805a60b31058a1018680f99447033dcb9d4caa', '8a8a67b8dbc5f203ae8712092c68c7aa', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (91, '2008-07-07 22:30:01.123456', 10, '0c805a60b31058a1018680f99447033dcb9d4c01', '8a8a67b8dbc5f203ae8712092c68c701', NULL);
+INSERT INTO libraryfilecontent (id, datecreated, filesize, sha1, md5, sha256) VALUES (92, '2008-09-30 08:19:00.222131', 10, 'f10e2821bbbea527ea02200352313bc059445190', '7815696ecbf1c96e6894b779456d330e', NULL);
+
+
+ALTER TABLE libraryfilecontent ENABLE TRIGGER ALL;
+
+
+ALTER TABLE libraryfilealias DISABLE TRIGGER ALL;
+
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (1, 1, 'netapplet-1.0.0.tar.gz', 'application/x-gtar', NULL, '2005-11-17 16:15:32.440132', '2005-04-07 16:46:05.265391', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (2, 1, 'netapplet_1.0.0.orig.tar.gz', 'application/x-gtar', NULL, '2005-11-17 16:15:32.440132', '2005-04-07 16:46:05.265391', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (3, 2, 'firefox_0.9.2.orig.tar.gz', 'application/x-gtar', NULL, '2005-11-17 16:15:32.440132', '2005-04-07 16:46:05.266763', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (4, 3, 'evolution-1.0.tar.gz', 'application/x-gtar', NULL, '2005-11-17 16:15:32.440132', '2005-04-07 16:46:05.26727', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (5, 5, 'netapplet.pot', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (6, 6, 'pmount.pot', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (7, 7, 'evolution-2.2.pot', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (8, 8, 'evolution-2.2.pot', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (9, 9, 'pkgconf-mozilla.pot', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (10, 10, 'hr.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (11, 11, 'ca.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (12, 12, 'nb.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (13, 13, 'cs.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (14, 14, 'es_ES.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (15, 15, 'de.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (16, 16, 'fr.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (17, 17, 'it_IT.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (18, 18, 'es.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (19, 19, 'fr.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (20, 20, 'pt_BR.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (21, 21, 'ja.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (22, 22, 'es.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (23, 23, 'nl.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (24, 24, 'cs.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (25, 25, 'da.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (26, 26, 'fi.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (27, 27, 'gl.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (28, 28, 'lt.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (29, 29, 'it.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (30, 30, 'tr.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (31, 31, 'de.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (32, 32, 'es.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (33, 33, 'es.po', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-05-18 08:03:28.021862', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (34, 34, 'evolution-2.2-test.pot', 'application/x-po', NULL, '2005-11-17 16:15:32.440132', '2005-08-10 09:31:29.606407', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (35, 35, 'Ubuntu-High-Pri-2005-08-01.csv', 'text/plain', NULL, '2005-11-17 16:15:32.440132', '2005-08-01 09:31:29.606407', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (36, NULL, 'foo.txt', 'text/plain', NULL, '2005-11-17 16:15:32.440132', '2005-10-30 18:00:27.899028', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (37, 37, 'pmount_1.9-1_all.deb', 'application/x-debian-package', NULL, '2005-11-17 16:15:32.440132', '2005-10-30 18:00:27.899028', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (38, 38, 'alsa-utils_1.0.9a-4.dsc', 'application/dsc', NULL, '2005-11-17 16:15:32.440132', '2005-10-30 18:00:27.899028', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (39, 39, 'alsa-utils_1.0.8-1ubuntu1.dsc', 'application/dsc', NULL, '2005-11-17 16:15:32.440132', '2005-10-30 18:00:27.899028', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (40, 40, 'mozilla-firefox_0.9_i386.deb', 'application/x-debian-package', NULL, '2005-11-17 16:15:32.440132', '2005-10-30 18:00:27.899028', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (42, 42, 'linux-2.6.12_2.6.12.20_i386.deb', 'application/x-debian-package', NULL, '2005-11-17 16:15:32.440132', '2005-10-30 18:00:27.899028', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (43, 43, 'alsa-utils_1.0.9a-4ubuntu1.dsc', 'application/x-debian-package', NULL, '2005-11-17 16:15:32.440132', '2005-10-30 18:00:27.899028', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (44, 44, 'at-3.14156_all.udeb', 'application/x-debian-package', NULL, '2005-11-17 16:15:32.440132', '2005-10-30 18:00:27.899028', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (45, 45, 'Ubuntu-High-Pri-2006-08-01.csv', 'text/plain', NULL, '2005-11-17 16:15:32.440132', '2006-08-01 09:31:29.606407', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (46, 46, 'non-existent-mirrorprober-logfile.txt', 'text/plain', NULL, '2006-05-24 16:15:32.440132', '2006-05-24 09:31:29.606407', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (47, 47, 'non-existent-mirrorprober-logfile.txt', 'text/plain', NULL, '2006-05-24 16:15:32.440132', '2006-05-24 09:31:29.606407', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (51, 51, 'x4cWPgneBxsZOM21ZzpRPxsZXod.msg', 'message/rfc822', NULL, '2006-07-14 16:41:34.028627', '2006-07-14 16:41:34.028627', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (52, 52, 'mozilla-firefox_0.9_i386.changes', 'text/plain', NULL, '2006-07-31 15:41:34.028627', '2006-05-24 09:31:29.606407', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (53, 53, 'cdrkit-1.0.dsc', 'application/dsc', NULL, '2006-12-01 15:41:34.028627', '2006-12-01 16:41:34.028627', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (54, 54, 'foobar-1.0.dsc', 'application/dsc', NULL, '2006-12-01 15:41:34.028627', '2006-12-01 16:41:34.028627', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (55, 55, 'cdrkit_1.0_all.deb', 'application/deb', NULL, '2006-12-01 15:41:34.028627', '2006-12-01 16:41:34.028627', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (56, 56, 'foobar_1.0_all.deb', 'application/deb', NULL, '2006-12-01 15:41:34.028627', '2006-12-01 16:41:34.028627', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (57, 57, 'evolution-2.2-test.pot', 'application/x-po', NULL, '2006-12-13 21:17:56.241901', '2006-12-13 21:17:56.241901', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (58, 58, 'pt_BR.po', 'application/x-po', NULL, '2006-12-13 21:18:28.796588', '2006-12-13 21:18:28.796588', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (59, 59, 'salgado-mugshot.jpg', 'image/jpeg', NULL, '2006-07-31 15:41:34.028627', '2006-05-24 09:31:29.606407', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (60, 60, 'es.po', 'application/x-po', NULL, '2007-01-03 17:26:27.288968', '2007-01-03 17:26:27.288968', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (61, 61, 'language-pack-ar_1.0.dsc', 'application/dsc', NULL, '2007-02-15 14:26:27.288968', '2007-02-15 14:26:27.288968', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (62, 62, 'iceweasel-1.0.dsc', 'application/dsc', NULL, '2007-05-14 23:21:11.121446', '2007-05-14 23:21:11.121446', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (63, 63, 'hwsubmission1.xml', 'text/xml', NULL, '2007-05-14 23:21:11.121446', '2007-05-14 23:21:11.121446', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (65, 65, 'commercialpackage_1.0-1_source.changes', 'text/plain', NULL, '2007-08-09 21:25:37.832976', '2007-08-09 21:25:37.832976', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (66, 66, 'commercialpackage_1.0.orig.tar.gz', 'application/gzipped-tar', NULL, '2007-08-09 21:25:37.832976', '2007-08-09 21:25:37.832976', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (67, 67, 'commercialpackage_1.0-1.diff.gz', 'application/gzipped-patch', NULL, '2007-08-09 21:25:37.832976', '2007-08-09 21:25:37.832976', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (68, 68, 'commercialpackage_1.0-1.dsc', 'text/x-debian-source-package', NULL, '2007-08-09 21:25:37.832976', '2007-08-09 21:25:37.832976', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (69, 69, 'commercialpackage_1.0-1_i386.changes', 'text/plain', NULL, '2007-08-09 21:54:18.456616', '2007-08-09 21:54:18.456616', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (70, 70, 'commercialpackage_1.0-1_i386.deb', 'application/x-debian-package', NULL, '2007-08-09 21:54:18.456616', '2007-08-09 21:54:18.456616', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (71, 71, 'ubuntu-hoary-translations.tar.gz', 'application/x-gtar', NULL, '2007-09-10 19:14:26.037382', '2007-09-10 19:14:26.037382', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (72, 72, 'ubuntu-hoary-translations-update.tar.gz', 'application/x-gtar', NULL, '2007-09-10 19:15:01.67038', '2007-09-10 19:15:01.67038', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (73, 73, 'ubuntu-hoary-translations-update.tar.gz', 'application/x-gtar', NULL, '2007-09-10 19:15:19.947543', '2007-09-10 19:15:19.947543', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (74, 74, 'ubuntu-hoary-translations.tar.gz', 'application/x-gtar', NULL, '2007-09-10 19:16:01.017943', '2007-09-10 19:16:01.017943', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (75, 75, 'cX0Ey6rIIK5MqHphucFPna1fQMt.msg', 'message/rfc822', NULL, '2007-12-18 16:31:34.790641', '2007-12-18 16:31:34.790641', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (76, 76, 'nSpn1l12p7HBplm6e7kaaX6lf86.msg', 'message/rfc822', NULL, '2007-12-18 16:31:34.790641', '2007-12-18 16:31:34.790641', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (77, 77, 'evJ9qHEN3ufdtsDUnGPb8a5hs77.msg', 'message/rfc822', NULL, '2007-12-18 16:31:34.790641', '2007-12-18 16:31:34.790641', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (78, 78, 'jqUQpLymm7f9DjBAAsQoM10WndS.msg', 'message/rfc822', NULL, '2007-12-18 16:31:34.790641', '2007-12-18 16:31:34.790641', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (79, 79, '378cZyfOfUKx6BySEK0HYKz4Tpd.msg', 'message/rfc822', NULL, '2007-12-18 16:31:34.790641', '2007-12-18 16:31:34.790641', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (80, 80, 'unnamed', 'application/pgp-signature; name="signature.asc"', NULL, '2007-12-18 16:31:34.790641', '2007-12-18 16:31:34.790641', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (81, 81, 'jm81HhLQDRDAsqbl74W7GT3cpel.msg', 'message/rfc822', NULL, '2007-12-18 16:31:34.790641', '2007-12-18 16:31:34.790641', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (82, 82, 'alsa-1.0.9a.exe', 'application/x-msdos-program', NULL, '2008-03-17 15:36:19.035615', '2008-03-17 15:36:19.035615', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (83, 83, 'alsa-1.0.9a.dmg', 'application/x-apple-diskimage', NULL, '2008-03-17 15:36:38.022812', '2008-03-17 15:36:38.022812', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (84, 84, 'README.txt', 'text/plain', NULL, '2008-03-17 15:36:48.877842', '2008-03-17 15:36:48.877842', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (85, 85, 'alsa-1.0.8.exe', 'application/x-msdos-program', NULL, '2008-03-17 15:37:10.252357', '2008-03-17 15:37:10.252357', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (86, 86, 'alsa-1.0.8.dmg', 'application/x-apple-diskimage', NULL, '2008-03-17 15:37:22.489973', '2008-03-17 15:37:22.489973', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (87, 87, 'alsa-1.0.8.tgz', 'application/x-tar', NULL, '2008-03-17 15:37:36.701686', '2008-03-17 15:37:36.701686', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (88, 88, 'README.txt', 'text/plain', NULL, '2008-03-17 15:37:48.465157', '2008-03-17 15:37:48.465157', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (89, 89, 'alsa-1.0.9a.tgz', 'application/x-tar', NULL, '2008-03-17 15:38:16.866444', '2008-03-17 15:38:16.866444', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (90, 90, 'pmount_1.0-1_all.deb', 'application/x-debian-package', NULL, '2008-05-08 10:15:32.440132', '2008-05-08 10:10:27.899028', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (91, 91, 'upload_22_log.txt', 'application/text', NULL, '2008-07-07 22:30:01.123456', '2008-07-07 22:30:01.123456', false, 0);
+INSERT INTO libraryfilealias (id, content, filename, mimetype, expires, last_accessed, date_created, restricted, hits) VALUES (92, 92, 'sample-submission-2.xml', 'application/x-bzip2', NULL, '2008-09-30 08:19:00.222131', '2008-09-30 08:19:00.222131', false, 0);
+
+
+ALTER TABLE libraryfilealias ENABLE TRIGGER ALL;
+
+
+ALTER TABLE person DISABLE TRIGGER ALL;
+
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (1, 'Mark Shuttleworth', NULL, NULL, 'mark', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.591618', NULL, NULL, NULL, false, 8, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 11);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (2, 'Robert Collins', NULL, NULL, 'lifeless', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.598107', NULL, NULL, NULL, false, 8, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 21);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (3, 'Dave Miller', NULL, NULL, 'justdave', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.610048', NULL, NULL, NULL, false, 1, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 31);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (4, 'Colin Watson', NULL, NULL, 'kamion', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.611185', NULL, NULL, NULL, false, 8, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 41);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (5, 'Scott James Remnant', NULL, NULL, 'keybuk', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.608802', NULL, NULL, NULL, false, 1, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 51);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (6, 'Jeff Waugh', NULL, NULL, 'jdub', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.600523', NULL, NULL, NULL, false, 8, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 61);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (7, 'Andrew Bennetts', NULL, NULL, 'spiv', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.551196', NULL, NULL, NULL, false, 2, 'when importing bugs from http://bugzilla.ubuntu.com/', NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 71);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (8, 'James Blackwell', NULL, NULL, 'jblack', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.601584', NULL, NULL, NULL, false, 8, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 81);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (9, 'Christian Reis', NULL, NULL, 'kiko', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.594941', NULL, NULL, NULL, false, 1, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 91);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (10, 'Alexander Limi', NULL, NULL, 'limi', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.619713', NULL, NULL, NULL, false, 2, 'when importing bugs from http://bugzilla.ubuntu.com/', NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 101);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (11, 'Steve Alexander', NULL, NULL, 'stevea', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.599234', NULL, NULL, NULL, false, 8, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 111);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (12, 'Sample Person', NULL, NULL, 'name12', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.612277', NULL, NULL, NULL, true, 8, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 121);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (13, 'Carlos Perelló Marín', NULL, NULL, 'carlos', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.615543', NULL, NULL, NULL, false, 8, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 131);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (14, 'Dafydd Harries', NULL, NULL, 'daf', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.616666', NULL, NULL, NULL, false, 8, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 141);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (16, 'Foo Bar', NULL, NULL, 'name16', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.593849', NULL, NULL, NULL, false, 8, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, 161);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (17, 'Ubuntu Team', 1, 'This Team is responsible for the Ubuntu Distribution', 'ubuntu-team', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.60576', NULL, NULL, NULL, false, NULL, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, NULL);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (18, 'Ubuntu Gnome Team', 1, 'This Team is responsible for the GNOME releases Issues on whole Ubuntu Distribution', 'name18', NULL, NULL, NULL, NULL, 1, NULL, '2005-06-06 08:59:51.607744', NULL, NULL, NULL, false, NULL, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, false, NULL);
+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_stan

Follow ups