launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #05578
[Merge] lp:~bac/launchpad/revert-14311 into lp:launchpad
Brad Crittenden has proposed merging lp:~bac/launchpad/revert-14311 into lp:launchpad.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
Related bugs:
Bug #800295 in Launchpad itself: "buildd is unnecessarily coupled to the launchpad tree through tac file and readyservice"
https://bugs.launchpad.net/launchpad/+bug/800295
For more details, see:
https://code.launchpad.net/~bac/launchpad/revert-14311/+merge/82592
Rollback r 14311
--
https://code.launchpad.net/~bac/launchpad/revert-14311/+merge/82592
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~bac/launchpad/revert-14311 into lp:launchpad.
=== modified file '.bzrignore'
--- .bzrignore 2011-11-10 05:39:19 +0000
+++ .bzrignore 2011-11-17 19:45:28 +0000
@@ -63,6 +63,14 @@
twistd.pid
lib/canonical/launchpad/apidoc
*.prof
+lib/canonical/launchpad-buildd_*.dsc
+lib/canonical/launchpad-buildd_*.tar.gz
+lib/canonical/launchpad-buildd_*_all.deb
+lib/canonical/launchpad-buildd_*.changes
+lib/canonical/launchpad-buildd_*_source.build
+lib/canonical/launchpad-buildd_*_source.changes
+lib/canonical/buildd/debian/*
+lib/canonical/buildd/launchpad-files/*
.project
.pydevproject
librarian.log
=== modified file 'buildout.cfg'
--- buildout.cfg 2011-11-11 07:02:45 +0000
+++ buildout.cfg 2011-11-17 19:45:28 +0000
@@ -1,4 +1,4 @@
-# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
+# Copyright 2009 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
[buildout]
@@ -57,9 +57,7 @@
zc.zservertracelog
# XXX gary 2009-5-12 bug 375751:
# Make mailman built and installed in a more normal way.
-extra-paths =
- ${buildout:directory}/lib/mailman
- /usr/lib/launchpad-buildd
+extra-paths = ${buildout:directory}/lib/mailman
include-site-packages = true
allowed-eggs-from-site-packages =
interpreter = py
=== added directory 'lib/canonical/buildd'
=== added file 'lib/canonical/buildd/Makefile'
--- lib/canonical/buildd/Makefile 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/Makefile 2011-11-17 19:45:28 +0000
@@ -0,0 +1,19 @@
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+all: deb
+
+src: clean
+ dpkg-buildpackage -rfakeroot -uc -us -S
+
+deb: clean
+ dpkg-buildpackage -rfakeroot -uc -us
+
+clean:
+ fakeroot debian/rules clean
+ rm -f ../launchpad-buildd*tar.gz
+ rm -f ../launchpad-buildd*dsc
+ rm -f ../launchpad-buildd*deb
+ rm -f ../launchpad-buildd*changes
+
+.PHONY: all clean deb
=== added file 'lib/canonical/buildd/README'
--- lib/canonical/buildd/README 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/README 2011-11-17 19:45:28 +0000
@@ -0,0 +1,47 @@
+In order to build the package you need dpkg-dev and fakeroot
+
+To build the package do:
+
+cd ..../lib/canonical/buildd
+debian/rules package
+dpkg-buildpackage -rfakeroot -b
+
+It will "fail" because the package built in the "wrong" place. Don't
+worry about that.
+
+To clean up, do:
+
+fakeroot debian/rules clean
+rm launchpad-buildd*deb
+rm ../launchpad-buildd*changes
+
+-----------------------------------------------------------------------
+
+How to use the chroot tool:
+
+buildd-slave-chroot-tool --intervene <chroot tarball>
+
+...will unpack the chroot tarball, mount up the proc etc filesystems
+in it and chroot into it.
+
+When you leave the chroot, it will ask you if you want to repack the
+tarball. If you answer yes then it will repack it, if you answer no,
+then it will merely clean up and exit.
+
+If you wish to build a new chroot then first copy the example.chroot
+file from /usr/share/doc/launchpad-buildd/ and edit it to your
+requirements.
+
+Next run buildd-slave-chroot-tool --generate config.filename
+
+You must have sudo capability and ideally you would run this as the
+buildd user on a machine with launchpad-buildd installed.
+
+Once you have completed the chroot building, you will be left with a
+file named chroot-<distro>-<distrorelease>-<arch>.tar.bz2 which should
+be uploaded to the librarian from a machine with librarian access and
+then marked as the chroot for the given pocket.
+
+You should retain the configuration file so that we know how the
+chroot was built in case we need to rebuild it at a later date.
+
=== added file 'lib/canonical/buildd/__init__.py'
--- lib/canonical/buildd/__init__.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/__init__.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,5 @@
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+from canonical.buildd.slave import XMLRPCBuildDSlave
+from canonical.buildd.debian import DebianBuildManager
=== added file 'lib/canonical/buildd/binarypackage.py'
--- lib/canonical/buildd/binarypackage.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/binarypackage.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,133 @@
+# Copyright 2009, 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+
+import re
+
+from canonical.buildd.debian import DebianBuildManager, DebianBuildState
+
+
+class SBuildExitCodes:
+ """SBUILD process result codes."""
+ OK = 0
+ DEPFAIL = 1
+ GIVENBACK = 2
+ PACKAGEFAIL = 3
+ BUILDERFAIL = 4
+
+
+class BuildLogRegexes:
+ """Build log regexes for performing actions based on regexes, and extracting dependencies for auto dep-waits"""
+ GIVENBACK = [
+ ("^E: There are problems and -y was used without --force-yes"),
+ ]
+ DEPFAIL = [
+ ("(?P<pk>[\-+.\w]+)\(inst [^ ]+ ! >> wanted (?P<v>[\-.+\w:~]+)\)","\g<pk> (>> \g<v>)"),
+ ("(?P<pk>[\-+.\w]+)\(inst [^ ]+ ! >?= wanted (?P<v>[\-.+\w:~]+)\)","\g<pk> (>= \g<v>)"),
+ ("(?s)^E: Couldn't find package (?P<pk>[\-+.\w]+)(?!.*^E: Couldn't find package)","\g<pk>"),
+ ("(?s)^E: Package '?(?P<pk>[\-+.\w]+)'? has no installation candidate(?!.*^E: Package)","\g<pk>"),
+ ("(?s)^E: Unable to locate package (?P<pk>[\-+.\w]+)(?!.*^E: Unable to locate package)", "\g<pk>"),
+ ]
+
+
+class BinaryPackageBuildState(DebianBuildState):
+ SBUILD = "SBUILD"
+
+
+class BinaryPackageBuildManager(DebianBuildManager):
+ """Handle buildd building for a debian style binary package build"""
+
+ initial_build_state = BinaryPackageBuildState.SBUILD
+
+ def __init__(self, slave, buildid):
+ DebianBuildManager.__init__(self, slave, buildid)
+ self._sbuildpath = slave._config.get("binarypackagemanager", "sbuildpath")
+ self._sbuildargs = slave._config.get("binarypackagemanager",
+ "sbuildargs").split(" ")
+
+ def initiate(self, files, chroot, extra_args):
+ """Initiate a build with a given set of files and chroot."""
+
+ self._dscfile = None
+ for f in files:
+ if f.endswith(".dsc"):
+ self._dscfile = f
+ if self._dscfile is None:
+ raise ValueError, files
+
+ self.archive_purpose = extra_args.get('archive_purpose')
+ self.suite = extra_args.get('suite')
+ self.component = extra_args['ogrecomponent']
+ self.arch_indep = extra_args.get('arch_indep', False)
+ self.build_debug_symbols = extra_args.get('build_debug_symbols', False)
+
+ super(BinaryPackageBuildManager, self).initiate(
+ files, chroot, extra_args)
+
+ def doRunBuild(self):
+ """Run the sbuild process to build the package."""
+ args = ["sbuild-package", self._buildid, self.arch_tag]
+ if self.suite:
+ args.extend([self.suite])
+ args.extend(self._sbuildargs)
+ args.extend(["--dist=" + self.suite])
+ else:
+ args.extend(['autobuild'])
+ args.extend(self._sbuildargs)
+ args.extend(["--dist=autobuild"])
+ if self.arch_indep:
+ args.extend(["-A"])
+ if self.archive_purpose:
+ args.extend(["--purpose=" + self.archive_purpose])
+ if self.build_debug_symbols:
+ args.extend(["--build-debug-symbols"])
+ args.extend(["--architecture=" + self.arch_tag])
+ args.extend(["--comp=" + self.component])
+ args.extend([self._dscfile])
+ self.runSubProcess( self._sbuildpath, args )
+
+ def iterate_SBUILD(self, success):
+ """Finished the sbuild run."""
+ tmpLog = self.getTmpLogContents()
+ if success != SBuildExitCodes.OK:
+ if (success == SBuildExitCodes.DEPFAIL or
+ success == SBuildExitCodes.PACKAGEFAIL):
+ for rx in BuildLogRegexes.GIVENBACK:
+ mo = re.search(rx, tmpLog, re.M)
+ if mo:
+ success = SBuildExitCodes.GIVENBACK
+
+ if success == SBuildExitCodes.DEPFAIL:
+ for rx, dep in BuildLogRegexes.DEPFAIL:
+ mo = re.search(rx, tmpLog, re.M)
+ if mo:
+ if not self.alreadyfailed:
+ print("Returning build status: DEPFAIL")
+ print("Dependencies: " + mo.expand(dep))
+ self._slave.depFail(mo.expand(dep))
+ success = SBuildExitCodes.DEPFAIL
+ break
+ else:
+ success = SBuildExitCodes.PACKAGEFAIL
+
+ if success == SBuildExitCodes.GIVENBACK:
+ if not self.alreadyfailed:
+ print("Returning build status: GIVENBACK")
+ self._slave.giveBack()
+ elif success == SBuildExitCodes.PACKAGEFAIL:
+ if not self.alreadyfailed:
+ print("Returning build status: PACKAGEFAIL")
+ self._slave.buildFail()
+ elif success >= SBuildExitCodes.BUILDERFAIL:
+ # anything else is assumed to be a buildd failure
+ if not self.alreadyfailed:
+ print("Returning build status: BUILDERFAIL")
+ self._slave.builderFail()
+ self.alreadyfailed = True
+ self._state = DebianBuildState.REAP
+ self.doReapProcesses()
+ else:
+ print("Returning build status: OK")
+ self.gatherResults()
+ self._state = DebianBuildState.REAP
+ self.doReapProcesses()
=== added file 'lib/canonical/buildd/buildd-config.py'
--- lib/canonical/buildd/buildd-config.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/buildd-config.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,54 @@
+#!/usr/bin/python
+#
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+import os
+
+archtag = os.popen("dpkg --print-architecture").read().strip()
+
+from optparse import OptionParser
+
+parser = OptionParser()
+parser.add_option("-n", "--name", dest="NAME",
+ help="the name for this buildd",
+ metavar="NAME",
+ default="default")
+
+parser.add_option("-H", "--host", dest="BINDHOST",
+ help="the IP/host this buildd binds to",
+ metavar="HOSTNAME",
+ default="localhost")
+
+parser.add_option("-p", "--port", dest="BINDPORT",
+ help="the port this buildd binds to",
+ metavar="PORT",
+ default="8221")
+
+parser.add_option("-a", "--arch", dest="ARCHTAG",
+ help="the arch tag this buildd claims",
+ metavar="ARCHTAG",
+ default=archtag)
+
+parser.add_option("-t", "--template", dest="TEMPLATE",
+ help="the template file to use",
+ metavar="FILE",
+ default="/usr/share/launchpad-buildd/template-buildd-slave.conf")
+
+(options, args) = parser.parse_args()
+
+template = open(options.TEMPLATE, "r").read()
+
+replacements = {
+ "@NAME@": options.NAME,
+ "@BINDHOST@": options.BINDHOST,
+ "@ARCHTAG@": options.ARCHTAG,
+ "@BINDPORT@": options.BINDPORT,
+ }
+
+for replacement_key in replacements:
+ template = template.replace(replacement_key,
+ replacements[replacement_key])
+
+print template
+
=== added file 'lib/canonical/buildd/buildd-slave-example.conf'
--- lib/canonical/buildd/buildd-slave-example.conf 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/buildd-slave-example.conf 2011-11-17 19:45:28 +0000
@@ -0,0 +1,25 @@
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# Example buildd slave configuration
+# This should be refactored into a nice central configuration format when
+# such a beast becomes available
+
+[slave]
+architecturetag = i386
+filecache = /home/buildd/filecache
+bindhost = localhost
+bindport = 8221
+
+[allmanagers]
+unpackpath = /home/buildd/slavebin/unpack-chroot
+cleanpath = /home/buildd/slavebin/remove-build
+mountpath = /home/buildd/slavebin/mount-chroot
+umountpath = /home/buildd/slavebin/umount-chroot
+
+[debianmanager]
+sbuildpath = /home/buildd/slavebin/sbuild-package
+sbuildargs = -dautobuild --nolog --batch -A
+updatepath = /home/buildd/slavebin/update-debian-chroot
+processscanpath = /home/buildd/slavebin/scan-for-processes
+ogrepath = /home/buildd/slavebin/apply-ogre-model
=== added file 'lib/canonical/buildd/buildd-slave.tac'
--- lib/canonical/buildd/buildd-slave.tac 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/buildd-slave.tac 2011-11-17 19:45:28 +0000
@@ -0,0 +1,55 @@
+# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# CAUTION: The only modules in the Launchpad tree that this is permitted to
+# depend on are canonical.buildd, since buildds are deployed by copying that
+# directory only. (See also bug=800295.)
+
+# Buildd Slave implementation
+# XXX: dsilvers: 2005/01/21: Currently everything logged in the slave gets
+# passed through to the twistd log too. this could get dangerous/big
+
+from twisted.application import service, strports
+from canonical.buildd import XMLRPCBuildDSlave
+from canonical.buildd.binarypackage import BinaryPackageBuildManager
+from canonical.buildd.sourcepackagerecipe import (
+ SourcePackageRecipeBuildManager)
+from canonical.buildd.translationtemplates import (
+ TranslationTemplatesBuildManager)
+
+from twisted.web import server, resource, static
+from ConfigParser import SafeConfigParser
+
+import os
+
+conffile = os.environ.get('BUILDD_SLAVE_CONFIG', 'buildd-slave-example.conf')
+
+conf = SafeConfigParser()
+conf.read(conffile)
+slave = XMLRPCBuildDSlave(conf)
+
+# 'debian' is the old name. It remains here for compatibility.
+slave.registerBuilder(BinaryPackageBuildManager, "debian")
+slave.registerBuilder(BinaryPackageBuildManager, "binarypackage")
+slave.registerBuilder(SourcePackageRecipeBuildManager, "sourcepackagerecipe")
+slave.registerBuilder(
+ TranslationTemplatesBuildManager, 'translation-templates')
+
+application = service.Application('BuildDSlave')
+builddslaveService = service.IServiceCollection(application)
+
+root = resource.Resource()
+root.putChild('rpc', slave)
+root.putChild('filecache', static.File(conf.get('slave', 'filecache')))
+slavesite = server.Site(root)
+
+strports.service(slave.slave._config.get("slave","bindport"),
+ slavesite).setServiceParent(builddslaveService)
+
+# You can interact with a running slave like this:
+# (assuming the slave is on localhost:8221)
+#
+# python
+# import xmlrpclib
+# s = xmlrpclib.ServerProxy("http://localhost:8221/rpc")
+# s.echo("Hello World")
=== added file 'lib/canonical/buildd/buildrecipe'
--- lib/canonical/buildd/buildrecipe 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/buildrecipe 2011-11-17 19:45:28 +0000
@@ -0,0 +1,220 @@
+#! /usr/bin/env python -u
+# Copyright 2010, 2011 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""A script that builds a package from a recipe and a chroot."""
+
+__metaclass__ = type
+
+
+import os
+import os.path
+import pwd
+from resource import RLIMIT_AS, setrlimit
+import socket
+from subprocess import (
+ Popen,
+ call,
+ )
+import sys
+
+
+RETCODE_SUCCESS = 0
+RETCODE_FAILURE_INSTALL = 200
+RETCODE_FAILURE_BUILD_TREE = 201
+RETCODE_FAILURE_INSTALL_BUILD_DEPS = 202
+RETCODE_FAILURE_BUILD_SOURCE_PACKAGE = 203
+
+
+class NotVirtualized(Exception):
+ """Exception raised when not running in a virtualized environment."""
+
+ def __init__(self):
+ Exception.__init__(self, 'Not running under Xen.')
+
+
+def call_report_rusage(args):
+ """Run a subprocess.
+
+ Report that it was run, and the resources used, and complain if it fails.
+
+ :return: The process wait status.
+ """
+ print 'RUN %r' % args
+ proc = Popen(args)
+ pid, status, rusage = os.wait4(proc.pid, 0)
+ print(rusage)
+ return status
+
+
+class RecipeBuilder:
+ """Builds a package from a recipe."""
+
+ def __init__(self, build_id, author_name, author_email,
+ suite, distroseries_name, component, archive_purpose):
+ """Constructor.
+
+ :param build_id: The id of the build (a str).
+ :param author_name: The name of the author (a str).
+ :param author_email: The email address of the author (a str).
+ :param suite: The suite the package should be built for (a str).
+ """
+ self.build_id = build_id
+ self.author_name = author_name.decode('utf-8')
+ self.author_email = author_email
+ self.archive_purpose = archive_purpose
+ self.component = component
+ self.distroseries_name = distroseries_name
+ self.suite = suite
+ self.base_branch = None
+ self.chroot_path = get_build_path(build_id, 'chroot-autobuild')
+ self.work_dir_relative = os.environ['HOME'] + '/work'
+ self.work_dir = os.path.join(self.chroot_path,
+ self.work_dir_relative[1:])
+ self.tree_path = os.path.join(self.work_dir, 'tree')
+ self.username = pwd.getpwuid(os.getuid())[0]
+
+ def install(self):
+ """Install all the requirements for building recipes.
+
+ :return: A retcode from apt.
+ """
+ # XXX: AaronBentley 2010-07-07 bug=602463: pbuilder uses aptitude but
+ # does not depend on it.
+ return self.chroot([
+ 'apt-get', 'install', '-y', 'pbuilder', 'aptitude'])
+
+ def buildTree(self):
+ """Build the recipe into a source tree.
+
+ As a side-effect, sets self.source_dir_relative.
+ :return: a retcode from `bzr dailydeb`.
+ """
+ try:
+ ensure_virtualized()
+ except NotVirtualized, e:
+ sys.stderr.write('Aborting on failed virtualization check:\n')
+ sys.stderr.write(str(e))
+ return 1
+ assert not os.path.exists(self.tree_path)
+ recipe_path = os.path.join(self.work_dir, 'recipe')
+ manifest_path = os.path.join(self.tree_path, 'manifest')
+ recipe_file = open(recipe_path, 'rb')
+ try:
+ recipe = recipe_file.read()
+ finally:
+ recipe_file.close()
+ # As of bzr 2.2, a defined identity is needed. In this case, we're
+ # using buildd@<hostname>.
+ hostname = socket.gethostname()
+ bzr_email = 'buildd@%s' % hostname
+
+ print 'Bazaar versions:'
+ check_call(['bzr', 'version'])
+ check_call(['bzr', 'plugins'])
+
+ print 'Building recipe:'
+ print recipe
+ sys.stdout.flush()
+ env = {
+ 'DEBEMAIL': self.author_email,
+ 'DEBFULLNAME': self.author_name.encode('utf-8'),
+ 'BZR_EMAIL': bzr_email}
+ retcode = call_report_rusage([
+ 'bzr', 'dailydeb', '--safe', '--no-build', recipe_path,
+ self.tree_path, '--manifest', manifest_path,
+ '--allow-fallback-to-native', '--append-version',
+ '~%s1' % self.distroseries_name], env=env)
+ if retcode != 0:
+ return retcode
+ (source,) = [name for name in os.listdir(self.tree_path)
+ if name != 'manifest']
+ self.source_dir_relative = os.path.join(
+ self.work_dir_relative, 'tree', source)
+ return retcode
+
+ def getPackageName(self):
+ source_dir = os.path.join(
+ self.chroot_path, self.source_dir_relative.lstrip('/'))
+ changelog = os.path.join(source_dir, 'debian/changelog')
+ return open(changelog, 'r').readline().split(' ')[0]
+
+ def installBuildDeps(self):
+ """Install the build-depends of the source tree."""
+ package = self.getPackageName()
+ currently_building_path = os.path.join(
+ self.chroot_path, 'CurrentlyBuilding')
+ currently_building_contents = (
+ 'Package: %s\n'
+ 'Suite: %s\n'
+ 'Component: %s\n'
+ 'Purpose: %s\n'
+ 'Build-Debug-Symbols: no\n' %
+ (package, self.suite, self.component, self.archive_purpose))
+ currently_building = open(currently_building_path, 'w')
+ currently_building.write(currently_building_contents)
+ currently_building.close()
+ return self.chroot(['sh', '-c', 'cd %s &&'
+ '/usr/lib/pbuilder/pbuilder-satisfydepends'
+ % self.source_dir_relative])
+
+ def chroot(self, args, echo=False):
+ """Run a command in the chroot.
+
+ :param args: the command and arguments to run.
+ :return: the status code.
+ """
+ if echo:
+ print "Running in chroot: %s" % ' '.join(
+ "'%s'" % arg for arg in args)
+ sys.stdout.flush()
+ return call([
+ '/usr/bin/sudo', '/usr/sbin/chroot', self.chroot_path] + args)
+
+ def buildSourcePackage(self):
+ """Build the source package.
+
+ :return: a retcode from dpkg-buildpackage.
+ """
+ retcode = self.chroot([
+ 'su', '-c', 'cd %s && /usr/bin/dpkg-buildpackage -i -I -us -uc -S'
+ % self.source_dir_relative, self.username])
+ for filename in os.listdir(self.tree_path):
+ path = os.path.join(self.tree_path, filename)
+ if os.path.isfile(path):
+ os.rename(path, get_build_path(self.build_id, filename))
+ return retcode
+
+
+def get_build_path(build_id, *extra):
+ """Generate a path within the build directory.
+
+ :param build_id: the build id to use.
+ :param extra: the extra path segments within the build directory.
+ :return: the generated path.
+ """
+ return os.path.join(
+ os.environ["HOME"], "build-" + build_id, *extra)
+
+
+def ensure_virtualized():
+ """Raise an exception if not running in a virtualized environment.
+
+ Raises if not running under Xen.
+ """
+ if not os.path.isdir('/proc/xen') or os.path.exists('/proc/xen/xsd_kva'):
+ raise NotVirtualized()
+
+
+if __name__ == '__main__':
+ setrlimit(RLIMIT_AS, (1000000000, -1))
+ builder = RecipeBuilder(*sys.argv[1:])
+ if builder.buildTree() != 0:
+ sys.exit(RETCODE_FAILURE_BUILD_TREE)
+ if builder.install() != 0:
+ sys.exit(RETCODE_FAILURE_INSTALL)
+ if builder.installBuildDeps() != 0:
+ sys.exit(RETCODE_FAILURE_INSTALL_BUILD_DEPS)
+ if builder.buildSourcePackage() != 0:
+ sys.exit(RETCODE_FAILURE_BUILD_SOURCE_PACKAGE)
+ sys.exit(RETCODE_SUCCESS)
=== added file 'lib/canonical/buildd/check-implicit-pointer-functions'
--- lib/canonical/buildd/check-implicit-pointer-functions 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/check-implicit-pointer-functions 2011-11-17 19:45:28 +0000
@@ -0,0 +1,118 @@
+#!/usr/bin/env python
+
+#
+# Copyright (c) 2004 Hewlett-Packard Development Company, L.P.
+# David Mosberger <davidm@xxxxxxxxxx>
+# Copyright 2010 Canonical Ltd.
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+
+# Scan standard input for GCC warning messages that are likely to
+# source of real 64-bit problems. In particular, see whether there
+# are any implicitly declared functions whose return values are later
+# interpreted as pointers. Those are almost guaranteed to cause
+# crashes.
+#
+import re
+import sys
+
+implicit_pattern = re.compile(
+ "([^:]*):(\d+):(\d+:)? warning: implicit declaration "
+ "of function [`']([^']*)'")
+pointer_pattern = re.compile(
+ "([^:]*):(\d+):(\d+:)? warning: "
+ + "("
+ + "(assignment"
+ + "|initialization"
+ + "|return"
+ + "|passing arg \d+ of `[^']*'"
+ + "|passing arg \d+ of pointer to function"
+ + ") makes pointer from integer without a cast"
+ + "|"
+ + "cast to pointer from integer of different size)")
+
+def main():
+ last_implicit_filename = ""
+ last_implicit_linenum = -1
+ last_implicit_func = ""
+
+ errlist = ""
+
+ in_line = False
+ warn_only = False
+
+ for arg in sys.argv[1:]:
+ if arg == '--inline':
+ in_line = True
+ elif arg == '--warnonly':
+ warn_only = True
+
+ rv = 0
+ while True:
+ line = sys.stdin.readline()
+ if in_line:
+ sys.stdout.write(line)
+ sys.stdout.flush()
+ if line == '':
+ break
+ m = implicit_pattern.match(line)
+ if m:
+ last_implicit_filename = m.group(1)
+ last_implicit_linenum = int(m.group(2))
+ last_implicit_func = m.group(4)
+ else:
+ m = pointer_pattern.match(line)
+ if m:
+ pointer_filename = m.group(1)
+ pointer_linenum = int(m.group(2))
+ if (last_implicit_filename == pointer_filename
+ and last_implicit_linenum == pointer_linenum):
+ err = "Function `%s' implicitly converted to pointer at " \
+ "%s:%d" % (last_implicit_func, last_implicit_filename,
+ last_implicit_linenum)
+ errlist += err+"\n"
+ print err
+ if not warn_only:
+ rv = 3
+
+ if len(errlist):
+ if in_line:
+ print errlist
+ print """
+
+Our automated build log filter detected the problem(s) above that will
+likely cause your package to segfault on architectures where the size of
+a pointer is greater than the size of an integer, such as ia64 and amd64.
+
+This is often due to a missing function prototype definition.
+
+Since use of implicitly converted pointers is always fatal to the application
+on ia64, they are errors. Please correct them for your next upload.
+
+More information can be found at:
+http://wiki.debian.org/ImplicitPointerConversions
+
+ """
+ sys.exit(rv)
+
+if __name__ == '__main__':
+ main()
=== added symlink 'lib/canonical/buildd/check_implicit_pointer_functions.py'
=== target is u'check-implicit-pointer-functions'
=== added directory 'lib/canonical/buildd/debian'
=== added file 'lib/canonical/buildd/debian.py'
--- lib/canonical/buildd/debian.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/debian.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,239 @@
+# Copyright 2009, 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# Authors: Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx>
+# and Adam Conrad <adam.conrad@xxxxxxxxxxxxx>
+
+# Buildd Slave sbuild manager implementation
+
+__metaclass__ = type
+
+import os
+
+from canonical.buildd.slave import (
+ BuildManager,
+ )
+
+
+class DebianBuildState:
+ """States for the DebianBuildManager."""
+ INIT = "INIT"
+ UNPACK = "UNPACK"
+ MOUNT = "MOUNT"
+ SOURCES = "SOURCES"
+ UPDATE = "UPDATE"
+ REAP = "REAP"
+ UMOUNT = "UMOUNT"
+ CLEANUP = "CLEANUP"
+
+
+class DebianBuildManager(BuildManager):
+ """Base behaviour for Debian chrooted builds."""
+
+ def __init__(self, slave, buildid):
+ BuildManager.__init__(self, slave, buildid)
+ self._updatepath = slave._config.get("debianmanager", "updatepath")
+ self._scanpath = slave._config.get("debianmanager", "processscanpath")
+ self._sourcespath = slave._config.get("debianmanager", "sourcespath")
+ self._cachepath = slave._config.get("slave","filecache")
+ self._state = DebianBuildState.INIT
+ slave.emptyLog()
+ self.alreadyfailed = False
+
+ @property
+ def initial_build_state(self):
+ raise NotImplementedError()
+
+ def initiate(self, files, chroot, extra_args):
+ """Initiate a build with a given set of files and chroot."""
+
+ self.arch_tag = extra_args.get('arch_tag', self._slave.getArch())
+ self.sources_list = extra_args.get('archives')
+
+ BuildManager.initiate(self, files, chroot, extra_args)
+
+ def doSourcesList(self):
+ """Override apt/sources.list.
+
+ Mainly used for PPA builds.
+ """
+ args = ["override-sources-list", self._buildid]
+ args.extend(self.sources_list)
+ self.runSubProcess(self._sourcespath, args)
+
+ def doUpdateChroot(self):
+ """Perform the chroot upgrade."""
+ self.runSubProcess(
+ self._updatepath,
+ ["update-debian-chroot", self._buildid, self.arch_tag])
+
+ def doRunBuild(self):
+ """Run the main build process.
+
+ Subclasses must override this.
+ """
+ raise NotImplementedError()
+
+ def doReapProcesses(self):
+ """Reap any processes left lying around in the chroot."""
+ self.runSubProcess( self._scanpath, [self._scanpath, self._buildid] )
+
+ @staticmethod
+ def _parseChangesFile(linesIter):
+ """A generator that iterates over files listed in a changes file.
+
+ :param linesIter: an iterable of lines in a changes file.
+ """
+ seenfiles = False
+ for line in linesIter:
+ if line.endswith("\n"):
+ line = line[:-1]
+ if not seenfiles and line.startswith("Files:"):
+ seenfiles = True
+ elif seenfiles:
+ if not line.startswith(' '):
+ break
+ filename = line.split(' ')[-1]
+ yield filename
+
+ def getChangesFilename(self):
+ changes = self._dscfile[:-4] + "_" + self.arch_tag + ".changes"
+ return get_build_path(self._buildid, changes)
+
+ def gatherResults(self):
+ """Gather the results of the build and add them to the file cache.
+
+ The primary file we care about is the .changes file. We key from there.
+ """
+ path = self.getChangesFilename()
+ name = os.path.basename(path)
+ chfile = open(path, "r")
+ self._slave.waitingfiles[name] = self._slave.storeFile(chfile.read())
+ chfile.seek(0)
+ seenfiles = False
+
+ for fn in self._parseChangesFile(chfile):
+ self._slave.addWaitingFile(get_build_path(self._buildid, fn))
+
+ chfile.close()
+
+ def iterate(self, success):
+ # When a Twisted ProcessControl class is killed by SIGTERM,
+ # which we call 'build process aborted', 'None' is returned as
+ # exit_code.
+ print ("Iterating with success flag %s against stage %s"
+ % (success, self._state))
+ func = getattr(self, "iterate_" + self._state, None)
+ if func is None:
+ raise ValueError, "Unknown internal state " + self._state
+ func(success)
+
+ def iterate_INIT(self, success):
+ """Just finished initializing the build."""
+ if success != 0:
+ if not self.alreadyfailed:
+ # The init failed, can't fathom why that would be...
+ self._slave.builderFail()
+ self.alreadyfailed = True
+ self._state = DebianBuildState.CLEANUP
+ self.doCleanup()
+ else:
+ self._state = DebianBuildState.UNPACK
+ self.doUnpack()
+
+ def iterate_UNPACK(self, success):
+ """Just finished unpacking the tarball."""
+ if success != 0:
+ if not self.alreadyfailed:
+ # The unpack failed for some reason...
+ self._slave.chrootFail()
+ self.alreadyfailed = True
+ self._state = DebianBuildState.CLEANUP
+ self.doCleanup()
+ else:
+ self._state = DebianBuildState.MOUNT
+ self.doMounting()
+
+ def iterate_MOUNT(self, success):
+ """Just finished doing the mounts."""
+ if success != 0:
+ if not self.alreadyfailed:
+ self._slave.chrootFail()
+ self.alreadyfailed = True
+ self._state = DebianBuildState.UMOUNT
+ self.doUnmounting()
+ else:
+ if self.sources_list is not None:
+ self._state = DebianBuildState.SOURCES
+ self.doSourcesList()
+ else:
+ self._state = DebianBuildState.UPDATE
+ self.doUpdateChroot()
+
+ def getTmpLogContents(self):
+ try:
+ tmpLogHandle = open(os.path.join(self._cachepath, "buildlog"))
+ return tmpLogHandle.read()
+ finally:
+ tmpLogHandle.close()
+
+ def iterate_SOURCES(self, success):
+ """Just finished overwriting sources.list."""
+ if success != 0:
+ if not self.alreadyfailed:
+ self._slave.chrootFail()
+ self.alreadyfailed = True
+ self._state = DebianBuildState.REAP
+ self.doReapProcesses()
+ else:
+ self._state = DebianBuildState.UPDATE
+ self.doUpdateChroot()
+
+ def iterate_UPDATE(self, success):
+ """Just finished updating the chroot."""
+ if success != 0:
+ if not self.alreadyfailed:
+ self._slave.chrootFail()
+ self.alreadyfailed = True
+ self._state = DebianBuildState.REAP
+ self.doReapProcesses()
+ else:
+ self._state = self.initial_build_state
+ self.doRunBuild()
+
+ def iterate_REAP(self, success):
+ """Finished reaping processes; ignore error returns."""
+ self._state = DebianBuildState.UMOUNT
+ self.doUnmounting()
+
+ def iterate_UMOUNT(self, success):
+ """Just finished doing the unmounting."""
+ if success != 0:
+ if not self.alreadyfailed:
+ self._slave.builderFail()
+ self.alreadyfailed = True
+ self._state = DebianBuildState.CLEANUP
+ self.doCleanup()
+
+ def iterate_CLEANUP(self, success):
+ """Just finished the cleanup."""
+ if success != 0:
+ if not self.alreadyfailed:
+ self._slave.builderFail()
+ self.alreadyfailed = True
+ else:
+ # Successful clean
+ if not self.alreadyfailed:
+ self._slave.buildOK()
+ self._slave.buildComplete()
+
+
+def get_build_path(build_id, *extra):
+ """Generate a path within the build directory.
+
+ :param build_id: the build id to use.
+ :param extra: the extra path segments within the build directory.
+ :return: the generated path.
+ """
+ return os.path.join(
+ os.environ["HOME"], "build-" + build_id, *extra)
=== added file 'lib/canonical/buildd/debian/changelog'
--- lib/canonical/buildd/debian/changelog 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/debian/changelog 2011-11-17 19:45:28 +0000
@@ -0,0 +1,698 @@
+launchpad-buildd (82) hardy-cat; urgency=low
+
+ * Cut out readyservice from the buildds. LP: #800295
+ * buildrecipe shows the bzr and bzr-builder versions. LP: #884092
+ * buildrecipe shows bzr rusage. LP: 884997
+
+ -- Martin Pool <mbp@xxxxxxxxxxxxx> Thu, 03 Nov 2011 17:11:25 +1100
+
+launchpad-buildd (81) hardy-cat; urgency=low
+
+ * generate-translation-templates: switch to Python 2.7.
+
+ -- Danilo Šegan <danilo@xxxxxxxxxxxxx> Mon, 17 Oct 2011 14:46:13 +0200
+
+launchpad-buildd (80) hardy-cat; urgency=low
+
+ * binfmt-support demonstrated umount ordering issues for us. LP: #851934
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Mon, 19 Sep 2011 04:56:58 -0600
+
+launchpad-buildd (79) hardy-cat; urgency=low
+
+ * Fix sudoers.d/buildd permissions
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Fri, 19 Aug 2011 07:31:54 -0600
+
+launchpad-buildd (78) hardy-cat; urgency=low
+
+ * Correctly update sudoers files when needed. LP: #742881
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Wed, 06 Apr 2011 22:20:17 -0600
+
+launchpad-buildd (77) hardy-cat; urgency=low
+
+ * Add back in ultimate-backstop umask() correction.
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Wed, 06 Apr 2011 13:34:05 -0600
+
+launchpad-buildd (76) hardy-cat; urgency=low
+
+ [ various ]
+ * ProjectGroup.products sort order and remove Author: comments.
+ * Fix some tests to not print stuff
+ * Make buildd pointer check regexes work on natty
+ * merge before rollout + text conflict patch by wgrant
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Tue, 15 Mar 2011 16:59:36 -0600
+
+launchpad-buildd (74) hardy-cat; urgency=low
+
+ [ Aaron Bentley]
+ * Memory-limit recipe builds. LP#676657
+
+ [ LaMont Jones]
+ * mount a tmpfs on /dev/shm in build chroots. LP#671441
+
+ [Michael Bienia]
+ * Update regexes used for DEPWAIT. LP#615286
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Tue, 23 Nov 2010 06:17:57 -0700
+
+launchpad-buildd (73) hardy-cat; urgency=low
+
+ * Revert to revision 70
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Thu, 28 Oct 2010 12:53:45 -0600
+
+launchpad-buildd (72) hardy-cat; urgency=low
+
+ * break out readyservice.py from tachandler.py. LP#663828
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Wed, 20 Oct 2010 13:03:23 -0600
+
+launchpad-buildd (71) hardy-cat; urgency=low
+
+ * Detect ppa hosts for build recipes. LP#662664
+ * Better recipe builds. LP#599100, 627119, 479705
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Tue, 19 Oct 2010 13:48:33 -0600
+
+launchpad-buildd (70) hardy-cat; urgency=low
+
+ [ LaMont Jones ]
+ * Restore the rest of version 68.
+
+ [ James Westby ]
+ * buildrecipe: Specify BZR_EMAIL via sudo so that the called command
+ sees the environment variable.
+ * buildrecipe: call sudo -i -u instead of sudo -iu so that it works with
+ older versions of sudo.
+ * buildrecipe: flush stdout before calling another command so that
+ the build log has the output correctly interleaved.
+
+ [ William Grant ]
+ * correct arch_tag arguments.
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Fri, 20 Aug 2010 13:27:55 -0600
+
+launchpad-buildd (69) hardy-cat; urgency=low
+
+ * REVERT all of version 68 except for BZR_EMAIL LP#617072
+ (Not reflected in bzr.)
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Tue, 17 Aug 2010 10:40:03 -0600
+
+launchpad-buildd (68) hardy-cat; urgency=low
+
+ [ William Grant ]
+ * Take an 'arch_tag' argument, so the master can override the slave
+ architecture.
+
+ [ Jelmer Vernooij ]
+
+ * Explicitly use source format 1.0.
+ * Add LSB information to init script.
+ * Use debhelper >= 5 (available in dapper, not yet deprecated in
+ maverick).
+ * Fix spelling in description.
+ * Install example buildd configuration.
+
+ [ Paul Hummer ]
+ * Provide BZR_EMAIL for bzr 2.2 in the buildds LP#617072
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Mon, 16 Aug 2010 13:25:09 -0600
+
+launchpad-buildd (67) hardy-cat; urgency=low
+
+ * Force aptitude installation for recipe builds on maverick
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Fri, 23 Jul 2010 14:22:23 -0600
+
+launchpad-buildd (66) hardy-cat; urgency=low
+
+ * handle [linux-any] build-dependencies. LP#604981
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Mon, 19 Jul 2010 12:13:31 -0600
+
+launchpad-buildd (65) hardy-cat; urgency=low
+
+ * Drop preinst check, since human time does not scale across a large
+ rollout. soyuz just needs to deal with upgrades mid-build better.
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Thu, 08 Jul 2010 05:04:02 -0600
+
+launchpad-buildd (64) hardy-cat; urgency=low
+
+ * Pottery now strips quotes from variables.
+
+ -- Jeroen Vermeulen <jtv@xxxxxxxxxxxxx> Wed, 30 Jun 2010 12:50:59 +0200
+
+launchpad-buildd (63) hardy-cat; urgency=low
+
+ * Drop apply-ogre-model, since override-sources-list replaced it three years
+ ago. Also clean up extra_args parsing a bit.
+
+ -- William Grant <wgrant@xxxxxxxxxx> Sat, 12 Jun 2010 11:33:11 +1000
+
+launchpad-buildd (62) hardy-cat; urgency=low
+
+ * Make the buildds cope with not having a sourcepackagename LP#587109
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Tue, 08 Jun 2010 13:02:31 -0600
+
+launchpad-buildd (61) hardy-cat; urgency=high
+
+ [ William Grant ]
+ * Fixed translation templates slave to return files properly. LP#549422
+
+ [ Danilo Segan ]
+ * Added more output to generate-translation-templates. LP#580345
+
+ [ Henning Eggers ]
+ * Improved output of build xmplrpc call, not returning None now. LP#581746
+ * Added apache2 dependency. LP#557634
+ * Added preinst script to prevent installation when a build is running.
+ LP#557347
+
+ [ LaMont Jones ]
+ * preinst needs to detect a stale buildlog as well.
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Fri, 21 May 2010 05:52:53 -0600
+
+launchpad-buildd (60) lucid-cat; urgency=low
+
+ * Depends: lsb-release, which is ubuntu-minimal, but not essential.
+
+ -- LaMont Jones <lamont@xxxxxxxxxx> Thu, 01 Apr 2010 08:54:48 -0600
+
+launchpad-buildd (59) lucid-cat; urgency=low
+
+ [ Henning Eggers ]
+ * Added translation template generation code (pottery).
+
+ [ LaMont Jones ]
+ * set umask for twisted where supported
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Wed, 31 Mar 2010 10:38:15 -0600
+
+launchpad-buildd (58~1) karmic; urgency=low
+
+ * Misc fixes to match APIs.
+
+ -- Aaron Bentley <aaron@xxxxxxxxxxxxxxxx> Fri, 15 Jan 2010 10:03:07 +1300
+
+launchpad-buildd (58~0) karmic; urgency=low
+
+ * Include buildrecipe.py.
+
+ -- Aaron Bentley <aaron@xxxxxxxxxxxxxxxx> Wed, 13 Jan 2010 17:06:59 +1300
+
+launchpad-buildd (57) hardy-cat; urgency=low
+
+ * Split the sbuild wrapper from DebianBuildManager into a new
+ BinaryPackageBuildManager, and point the 'debian' builder at that
+ instead.
+
+ -- William Grant <wgrant@xxxxxxxxxx> Tue, 12 Jan 2010 09:22:50 +1300
+
+launchpad-buildd (56) hardy-cat; urgency=low
+
+ * only error out on implicit-function-pointers check on lucid or later,
+ non-32-bit architectures. Warnings elsewhere. LP#504078
+ * drop use of ccache and /var/cache/apt/archives, since we don't use one,
+ and the other is just plain silly.
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Mon, 11 Jan 2010 13:12:49 -0700
+
+launchpad-buildd (54) hardy-cat; urgency=low
+
+ [ William Grant ]
+ * debian.py: Tell sbuild to build debug symbols if the
+ build_debug_symbols argument is True.
+ * sbuild: Set "Build-Debug-Symbols: yes" in CurrentlyBuilding if
+ we have been told to build debug symbols.
+
+ [ LaMont Jones ]
+ * do not ignore SIGHUP in builds - it breaks test suites. LP#453460
+ * create filecache-default/ccache directories in init.d as well as postinst
+ * sbuild: run dpkg-source inside the chroot. LP#476036
+ * sbuild: change the regexp for dpkg-source extraction to handle both karmic and pre-karmic dpkg. LP#476036
+ * use --print-architecture instead of --print-installation-architecture
+ * mount-chroot: copy hosts et al into chroot. LP#447919
+ * provide and call check-implicit-function-pointers.
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Mon, 14 Dec 2009 12:00:10 -0700
+
+launchpad-buildd (52) dapper-cat; urgency=low
+
+ * Depends: apt-transport-https
+
+ -- LaMont Jones <lamont@xxxxxxxxxxxxx> Fri, 09 Oct 2009 11:00:50 -0600
+
+launchpad-buildd (50) dapper-cat; urgency=low
+
+ * sbuild: Change all invocations of apt and dpkg to occur inside
+ the build chroot, rather than happening outside the chroot with
+ a bunch of flags to operate on data files in the chroot. This
+ should clear up issues we see with mismatched host toolchains.
+ * sbuild: Revert the above in the case of "apt-get source" which
+ doesn't require any fancy features in the chroot and, frankly,
+ is much easier to manage if it's executed externally.
+ * scan-for-processes: Bring in a change from production to make
+ sure that we follow symlinks in our search for process roots.
+ * sbuild-package: Output NR_PROCESSORS in the build logs, for
+ sightly easier debugging of possible parallel build bugs.
+ * update-debian-chroot: Stop using chapt-get, and instead chroot
+ into the build chroot and call the native apt-get there.
+ * update-debian-chroot: Cargo-cult the linux32 magic from the
+ sbuild wrapper to set our personality on chroot upgrades.
+ * mount-chroot: Mount sys in the chroot too. While it shouldn't
+ be, strictly-speaking, required for anything, it's nice to have.
+ * chapt-get, slave_chroot_tool.py: Delete both as obsolete cruft.
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Fri, 24 Jul 2009 07:21:30 -0600
+
+launchpad-buildd (49) dapper-cat; urgency=low
+
+ * sbuild.conf: bump default automake from automake1.8 to automake1.9
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Fri, 12 Sep 2008 08:54:24 -0600
+
+launchpad-buildd (48) dapper-cat; urgency=low
+
+ * sbuild-package: If we're an amd64 host system, but being used
+ to build i386 or lpia, use linux32 to pretend to be i686.
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Fri, 12 Sep 2008 08:12:34 -0600
+
+launchpad-buildd (47) dapper-cat; urgency=low
+
+ * slave.py: If the logfile doesn't currently exist on disk when
+ getLogTail() goes looking for it (which is a possible race with
+ the new sanitisation code), just return an empty string.
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Mon, 02 Jun 2008 13:09:55 -0600
+
+launchpad-buildd (46) dapper-cat; urgency=low
+
+ * slave.py: Accept a separate username and password to the
+ ensurePresent() call which, if present, are used to install
+ an auth handler to cope with basic http auth with the http
+ server when fetching files.
+ * slave.py: Ensure that build logs are sanitized so that any
+ user:password@ parts in URLs are removed.
+
+ -- Julian Edwards <julian.edwards@xxxxxxxxxxxxx> Tue, 29 Apr 2008 14:25:00 +0100
+
+launchpad-buildd (45) dapper-cat; urgency=low
+
+ * slave.py: Stop setting BuilderStatus.WAITING in each failure
+ method, as this gives us a race where the builddmaster might
+ dispatch another build to us before we're done cleaning up.
+ * slave.py: Don't set BuildStatus.OK in buildComplete(), this is
+ now a generic "the build has ended, succesfully or not" method.
+ * slave.py: Define a new buildOK() method that sets BuildStatus.OK.
+ * debian.py: When done cleaning, if the build isn't already marked
+ as failed, call buildOK, then call buildComplete unconditionally.
+ * The above changes should resolve https://launchpad.net/bugs/179466
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Tue, 08 Apr 2008 14:12:07 -0600
+
+launchpad-buildd (44) dapper-cat; urgency=low
+
+ * slave.py: Redefine "private" _unpackChroot() as "public" doUnpack(),
+ so we can use it from the build iteration control process.
+ * slave.py: Make the initiate method set a _chroottarfile private
+ variable for use by doUnpack(), rather than calling _unpackChroot().
+ * slave.py: Trigger the forked buildd process with an echo statement.
+ * debian.py: Add the INIT state to the DebianBuildState class.
+ * debian.py: Start the build process at INIT state instead of UNPACK.
+ * debian.py: Add iterate_INIT(), which just checks success of the
+ initial variable sanitisation checks, then hands off to doUnpack().
+ * debian.py: Adjust the failure return calls of the UNPACK and MOUNT
+ methods to chrootFail() instead of builderFail(), for correctness.
+ * The above changes should resolve https://launchpad.net/bugs/211974
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Mon, 07 Apr 2008 13:53:20 -0600
+
+launchpad-buildd (43) dapper-cat; urgency=low
+
+ * unpack-chroot: Move the ntpdate calls below the bunzip/exec bit,
+ so we don't run ntpdate twice when unzipping tarballs, which
+ happens on every single build on Xen hosts (like the PPA hosts).
+ * debian/control: We use adduser in postinst, depending on it helps.
+ * debian/control: Set myself as the Maintainer, since I'm in here.
+ * debian/control: Change our section from "misc" to "admin".
+ * sbuild{,-package}: Pass DEB_BUILD_OPTIONS="parallel=N" to dpkg.
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Thu, 24 Jan 2008 15:39:20 -0700
+
+launchpad-buildd (42) dapper-cat; urgency=low
+
+ * sbuild: using "eq" to evaluate strings instead of "==" is ever
+ so slightly less retarded (fixed the launchpad bug #184565)
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Tue, 22 Jan 2008 16:21:54 -0700
+
+launchpad-buildd (41) dapper-cat; urgency=low
+
+ * sbuild: If we've already marked a package as "installed" with a
+ valid version, don't overwrite that version with PROVIDED.
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Thu, 17 Jan 2008 10:39:26 -0700
+
+launchpad-buildd (40) dapper-cat; urgency=low
+
+ * sbuild: Don't allow versioned build-deps to be satisfied by provided
+ packages, but force them to go through the "upgrade/downgrade" tests.
+ * sbuild: Do --info and --contents on _all.deb packages as well, if
+ we're building arch:all packages.
+ * sbuild: Don't process ENV_OVERRIDE anymore, we only had an override
+ for one thing anyway (LC_ALL), and this code caused bug #87077.
+ * sbuild-package: Call sbuild with LC_ALL=C explicitely, to compensate.
+ * Makefile: clean up the makefile a bit to DTRT (as I expect it).
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Tue, 15 Jan 2008 16:51:08 -0700
+
+launchpad-buildd (39) unstable; urgency=low
+
+ * If we're fed an archive_purpose argument from the builddmaster,
+ we pass --purpose=$archive_purpose to sbuild, and if we get suite
+ from the builddmaster, we pass --dist=$suite to sbuild.
+ * Mangle sbuild to write out Suite: and Purpose: stanzas to our
+ CurrentlyBuilding file, according to command-line input.
+ * Now that we're no longer always feeding -dautobuild to sbuild,
+ fix up sbuild to always look for the chroot at chroot-autobuild
+ instead of the Debian Way of using chroot-$suite.
+ * If the config file contains an ntphost stanza, use that with
+ ntpdate to sync the system's clock before we unpack the chroot.
+ * Mangle update-config to add an ntphost stanza to the default
+ config, and to 's/-dautobuild //' from the sbuild arguments.
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Thu, 20 Dec 2007 01:51:49 -0700
+
+launchpad-buildd (38) unstable; urgency=high
+
+ * unpack-chroot: set $PATH rather than hardcoding paths to binaries
+ since bzip2 moved from /usr/bin to /bin in edgy and didn't bother with
+ compatability symlinks.
+
+ -- James Troup <james.troup@xxxxxxxxxxxxx> Wed, 21 Nov 2007 17:08:36 +0000
+
+launchpad-buildd (37) dapper; urgency=high
+
+ * update-debian-chroot: Adam's LPIA support (i.e. overriding
+ architecture for chapt-get).
+ * debian/launchpad-buildd.cron.daily: fix run-on-line.
+ * debian/postinst: only create ~buildd/.sbuildrc if it doesn't exist.
+ This avoids the problem of upgrades of the launchpad-buildd package
+ resetting the architecture to i386 on lpia builders.
+
+ -- James Troup <james.troup@xxxxxxxxxxxxx> Wed, 14 Nov 2007 18:34:46 +0000
+
+launchpad-buildd (36) dapper; urgency=low
+
+ * changing override-sources to replace current sources.list with
+ the content sent by buildmaster instead of prepend. It will allow
+ us to cope more easily with SoyuzArchive implementation (PARTNER,
+ EMBARGOED, PPA)
+
+ -- Celso Providelo <cprov@xxxxxxxxxxxxx> Thu, 7 Aug 2007 14:10:26 -0300
+
+launchpad-buildd (35) unstable; urgency=low
+
+ * including previous code changes (32 & 33).
+
+ -- Celso Providelo <cprov@xxxxxxxxxxxxx> Thu, 23 May 2007 17:40:26 -0300
+
+launchpad-buildd (34) unstable; urgency=low
+
+ * add suport for overriding the chroot /etc/apt/sources.list with the
+ content of builddmaster build arguments 'archives'.
+
+ -- Celso Providelo <cprov@xxxxxxxxxxxxx> Thu, 17 May 2007 15:12:26 -0300
+
+launchpad-buildd (33) unstable; urgency=low
+
+ * Mangle sbuild further to allow us to publish Martin's debug debs (ddeb)
+ to public_html/ddebs/ until such a time as soyuz can do this natively.
+ * Fix the auto-dep-wait regexes to allow for versions with ~ in them.
+ * Make cron.daily clean out translations and ddebs more than 1 week old.
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Sat, 30 Sep 2006 17:25:25 +1000
+
+launchpad-buildd (32) unstable; urgency=low
+
+ * We need to create /var/run/launchpad-buildd in our init script in the
+ case (such as in current dapper) where /var/run is on a tmpfs.
+ * Our init script shouldn't exit non-zero on "stop" if already stopped.
+ * Remove exc_info argument from our call to self.log in slave.py, which
+ clearly doesn't support that argument, so stop producing tracebacks.
+ * Reset self.builddependencies in our clean routine, so the variable
+ doesn't get leaked to the next build, causing me SERIOUS confusion.
+ * Tidy up translation handling a bit more to deal with old chroots (where
+ pkgstriptranslations won't dpkg-distaddfile for us), and to chmod the
+ translation dirs after the build, so apache can actually get at them.
+ * Add --no_save to our command line to avoid useless -shutdown.tap files.
+ * Make sure umount-chroot doesn't fail, even if there's nothing to umount.
+ * Append to the cron.daily cleaning to also occasionally clean up the apt
+ cache and /home/buildd/filecache-default, so we don't run out of disk.
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Fri, 17 Mar 2006 19:39:05 +1100
+
+launchpad-buildd (31) unstable; urgency=low
+
+ * Cherry-pick patch from Ryan's sbuild that outputs dpkg --purge output
+ line-by-line, instead of as one big blob, to make output on the web
+ UI a little bit more friendly for people following along at home.
+ * Install a cron.daily script (eww) to purge old build logs for now until
+ I have the time to learn how twisted's native log rotation works.
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Wed, 15 Mar 2006 17:23:26 +1100
+
+launchpad-buildd (30) unstable; urgency=low
+
+ * Move our translation publishing mojo so it happens BEFORE we move
+ all the files from debian/files out of the chroot, instead of after.
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Wed, 8 Mar 2006 18:50:49 +1100
+
+launchpad-buildd (29) unstable; urgency=low
+
+ * Use dpkg --print-installation-architecture in our postinst instead
+ of --print-architecture to avoid spewing suprious error messages.
+ * Remove the check for log_dir, since we call sbuild with --nolog,
+ and stop creating $HOME/logs in the user setup part of postinst.
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Tue, 7 Mar 2006 19:13:56 +1100
+
+launchpad-buildd (28) unstable; urgency=low
+
+ * Modify the protocol method ensurepresent to return additional
+ information about the target files lookup procedure. It helps to
+ debug intermittent Librarian errors.
+
+ -- Celso Providelo <celso.providelo@xxxxxxxxxxxxx> Mon, 06 Mar 2006 16:42:00 -0300
+
+launchpad-buildd (27) unstable; urgency=low
+
+ * Update the slave chroot tool to use getent so it works on the production
+ buildds
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Mon, 20 Feb 2006 12:57:45 +0000
+
+launchpad-buildd (26) unstable; urgency=low
+
+ * Update buildd-slave code to allow for GIVENBACK status returns,
+ matching the states under which sbuild used to do --auto-give-back.
+ * Port over sanae's build log regex parsing to allow us to do:
+ - Automatic dep-wait handling, based on sbuild's logs of apt-get.
+ - Automatic give-backs for a few corner cases (like kernel bugs).
+ * Make sbuild stop dying if we have no sendmail installed, since we
+ don't really want it sending mail in the launchpad world anyway.
+ * Call sbuild and apt with "LANG=C", so we don't have to worry about
+ locales matching between the base system and the autobuild chroots.
+ * Clear up confusion in build states with 's/BUILDFAIL/PACKAGEFAIL/'
+
+ -- Adam Conrad <adconrad@xxxxxxxxxx> Mon, 27 Feb 2006 14:00:08 +1100
+
+launchpad-buildd (25) unstable; urgency=low
+
+ * Update sbuild.conf to current yumminess.
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Fri, 3 Feb 2006 19:22:01 +0000
+
+launchpad-buildd (24) unstable; urgency=low
+
+ * Add /var/cache/apt/archives to the buildd chroots when mounting
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Fri, 3 Feb 2006 00:30:07 +0000
+
+launchpad-buildd (23) unstable; urgency=low
+
+ * And make apply-ogre-model use $SUDO, yay
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Fri, 27 Jan 2006 13:59:10 +0000
+
+launchpad-buildd (22) unstable; urgency=low
+
+ * Fix typo in apply-ogre-model (missing space)
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Fri, 27 Jan 2006 13:55:12 +0000
+
+launchpad-buildd (21) unstable; urgency=low
+
+ * Fix the .extend call for the --comp argument to pass it as one argument
+ instead of as - - c o m p = m a i n (which kinda doesn't work)
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Fri, 27 Jan 2006 13:45:34 +0000
+
+launchpad-buildd (20) unstable; urgency=low
+
+ * Update sbuild to the latest sbuild from adam.
+ * Make sure we pass --archive=ubuntu
+ * Make sure we pass --comp=<the component we're building for>
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Thu, 26 Jan 2006 17:20:49 +0000
+
+launchpad-buildd (19) unstable; urgency=low
+
+ * Add ogre support to the slave chroot tool
+ * Make sure the chroot tool ensures localhost in /etc/hosts in the chroot
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Wed, 25 Jan 2006 12:29:04 +0000
+
+launchpad-buildd (18) unstable; urgency=low
+
+ * Remove sbuildrc.tmp dangleberry in postinst
+ * Add linux32 to set of depends so that hppa, sparc and powerpc can build
+ * Make hppa, sparc, powerpc use linux32 to invoke the sbuild binary
+ * Add --resolve-deps to debootstrap invocation
+ * Make chroot tool use /bin/su - rather than /bin/sh for chrooting. shiny
+ (apparently)
+ * Add a bunch of deps infinity spotted.
+ * Make sure we chown the chroot tarball to the calling user after packing
+ it up.
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Wed, 9 Nov 2005 17:37:37 -0500
+
+launchpad-buildd (17) unstable; urgency=low
+
+ * Changed default UID/GID to match the ldap buildd UID/GID
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Wed, 9 Nov 2005 17:13:22 -0500
+
+launchpad-buildd (16) unstable; urgency=low
+
+ * Change the XMLRPC method 'ensure' to be 'ensurepresent'
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Wed, 5 Oct 2005 15:50:58 +0100
+
+launchpad-buildd (15) unstable; urgency=low
+
+ * Fix it so getting a logtail when less than 2k is available will work.
+ * Actually install apply-ogre-model
+ * Also spot arch_indep properly
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Mon, 3 Oct 2005 14:34:55 +0100
+
+launchpad-buildd (14) unstable; urgency=low
+
+ * Slight bug in slave.py meant missing .emptyLog() attribute. Fixed.
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Mon, 3 Oct 2005 14:21:16 +0100
+
+launchpad-buildd (13) unstable; urgency=low
+
+ * Fix a syntax error in the postinst
+ * Oh, and actually include the buildd config upgrader
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Mon, 3 Oct 2005 12:17:50 +0100
+
+launchpad-buildd (12) unstable; urgency=low
+
+ * Implement V1.0new protocol.
+ * Add in OGRE support
+ * Add in archindep support
+ * If upgrading from < v12, will remove -A from sbuildargs and add in
+ a default ogrepath to any buildd configs found in /etc/launchpad-buildd
+ * Prevent launchpad-buildd init from starting ~ files
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Sun, 2 Oct 2005 23:20:08 +0100
+
+launchpad-buildd (11) unstable; urgency=low
+
+ * Quieten down the slave scripts and make them prettier for the logs.
+ * make unpack-chroot uncompress the chroot and keep it uncompressed if
+ possible. This fixes bug#2699
+ * Make the slave run the process reaper run even if the build failed.
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Fri, 30 Sep 2005 00:24:45 +0100
+
+launchpad-buildd (10) unstable; urgency=low
+
+ * Make sure /etc/source-dependencies is present in the postinst.
+ (just need to be touched)
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Wed, 28 Sep 2005 22:02:26 +0100
+
+launchpad-buildd (9) unstable; urgency=low
+
+ * Implement /filecache/XXX urls in the slave to permit larger file transfer
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Tue, 27 Sep 2005 13:16:52 +0100
+
+launchpad-buildd (8) unstable; urgency=low
+
+ * spiv's crappy spawnFDs implementation needs an int not a file handle
+ and can't cope with converting one to the other :-(
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Tue, 27 Sep 2005 02:18:05 +0100
+
+launchpad-buildd (7) unstable; urgency=low
+
+ * Made sure the slave puts /dev/null on the subprocess stdin.
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Tue, 27 Sep 2005 01:52:50 +0100
+
+launchpad-buildd (6) unstable; urgency=low
+
+ * Removed slavechroot.py from installed set.
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Thu, 15 Sep 2005 11:39:25 +0100
+
+launchpad-buildd (5) unstable; urgency=low
+
+ * Add slave tool and example chroot configuration
+ * Added debootstrap and dpkg-dev to the dependencies
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Fri, 9 Sep 2005 16:38:22 +0100
+
+launchpad-buildd (4) unstable; urgency=low
+
+ * Add sbuild.conf which was previously missing
+ * Fix up abort protocol and various other bits in the slave
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Fri, 9 Sep 2005 14:24:31 +0100
+
+launchpad-buildd (3) unstable; urgency=low
+
+ * Modified postinst to make sure ccache and log dirs are created
+ even if the user already exists.
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Wed, 7 Sep 2005 15:50:36 +0100
+
+launchpad-buildd (2) unstable; urgency=low
+
+ * Fixes to postinst to make sure ccache and log dirs are created if missing.
+ * Added README to explain how to build the package.
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Thu, 1 Sep 2005 10:46:08 +0100
+
+launchpad-buildd (1) unstable; urgency=low
+
+ * Initial version
+
+ -- Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx> Mon, 13 Jun 2005 11:08:38 +0100
+
=== added file 'lib/canonical/buildd/debian/compat'
--- lib/canonical/buildd/debian/compat 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/debian/compat 2011-11-17 19:45:28 +0000
@@ -0,0 +1,1 @@
+5
=== added file 'lib/canonical/buildd/debian/control'
--- lib/canonical/buildd/debian/control 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/debian/control 2011-11-17 19:45:28 +0000
@@ -0,0 +1,17 @@
+Source: launchpad-buildd
+Section: admin
+Priority: extra
+Maintainer: Adam Conrad <adconrad@xxxxxxxxxx>
+Standards-Version: 3.5.9
+Build-Depends-Indep: debhelper (>= 5)
+
+Package: launchpad-buildd
+Section: misc
+Architecture: all
+Depends: python-twisted-core, python-twisted-web, debootstrap, dpkg-dev, linux32, file, bzip2, sudo, ntpdate, adduser, apt-transport-https, lsb-release, apache2, bzr-builder (>=0.5), ${misc:Depends}
+Description: Launchpad buildd slave
+ This is the launchpad buildd slave package. It contains everything needed to
+ get a launchpad buildd going apart from the database manipulation required to
+ tell launchpad about the slave instance. If you are creating more than one
+ slave instance on the same computer, be sure to give them independent configs
+ and independent filecaches etc.
=== added file 'lib/canonical/buildd/debian/launchpad-buildd.cron.daily'
--- lib/canonical/buildd/debian/launchpad-buildd.cron.daily 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/debian/launchpad-buildd.cron.daily 2011-11-17 19:45:28 +0000
@@ -0,0 +1,10 @@
+#!/bin/sh
+#
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+find /var/log/launchpad-buildd/ -mindepth 1 -mtime +2 -print0 | xargs -r -0 rm
+find /home/buildd/public_html/ddebs/ -maxdepth 1 -mindepth 1 -mtime +7 -print0 | xargs -r -0 rm -r
+find /home/buildd/public_html/translations/ -maxdepth 1 -mindepth 1 -mtime +7 -print0 | xargs -r -0 rm -r
+find /home/buildd/filecache-default/ -mindepth 1 -mtime +2 -print0 | xargs -r -0 rm
+[ `date +%w` = "1" ] && apt-get clean || true
=== added file 'lib/canonical/buildd/debian/launchpad-buildd.examples'
--- lib/canonical/buildd/debian/launchpad-buildd.examples 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/debian/launchpad-buildd.examples 2011-11-17 19:45:28 +0000
@@ -0,0 +1,1 @@
+buildd-slave-example.conf
=== added file 'lib/canonical/buildd/debian/launchpad-buildd.init'
--- lib/canonical/buildd/debian/launchpad-buildd.init 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/debian/launchpad-buildd.init 2011-11-17 19:45:28 +0000
@@ -0,0 +1,98 @@
+#!/bin/sh
+#
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+#
+# launchpad-buildd
+# This file is used to start and stop launchpad buildds
+
+### BEGIN INIT INFO
+# Provides: launchpad_buildd
+# Required-Start: $local_fs $network $syslog $time
+# Required-Stop: $local_fs $network $syslog $time
+# Default-Start: 2 3 4 5
+# Default-Stop: 0 1 6
+# X-Interactive: false
+# Short-Description: Start/stop launchpad buildds
+### END INIT INFO
+
+set -e
+
+PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
+DESC="launchpad build slaves"
+
+TACFILE="/usr/share/launchpad-buildd/buildd-slave.tac"
+
+PIDROOT="/var/run/launchpad-buildd"
+LOGROOT="/var/log/launchpad-buildd"
+CONFROOT="/etc/launchpad-buildd"
+
+# Gracefully exit if the package has been removed.
+test -e $TACFILE || exit 0
+
+#
+# Function that starts a buildd slave
+#
+d_start() {
+ CONF=$1
+ PIDFILE="$PIDROOT"/"$CONF".pid
+ LOGFILE="$LOGROOT"/"$CONF".log
+ # prior to karmic, twisted didn't support --umask, and defaulted it well.
+ # we need it to be 022, not 077.
+ case $(lsb_release -sc) in
+ [a-j]*) UMASK="";;
+ [k-z]*) UMASK="--umask 022";;
+ esac
+ su - buildd -c "BUILDD_SLAVE_CONFIG=$CONFROOT/$CONF PYTHONPATH=/usr/share/launchpad-buildd twistd --no_save --pidfile $PIDFILE --python $TACFILE --logfile $LOGFILE $UMASK"
+}
+
+#
+# Function that stops a buildd slave
+#
+d_stop() {
+ CONF=$1
+ PIDFILE="$PIDROOT"/"$CONF".pid
+ test -r $PIDFILE && kill -TERM $(cat $PIDFILE) || true
+}
+
+CONFS=$(cd $CONFROOT; ls|grep -v "^-"|grep -v "~$")
+
+case "$1" in
+ start)
+ echo -n "Starting $DESC:"
+ install -m 755 -o buildd -g buildd -d $PIDROOT
+
+ # Create any missing directories and chown them appropriately
+ install -d -o buildd -g buildd /home/buildd/filecache-default
+
+ for conf in $CONFS; do
+ echo -n " $conf"
+ d_start $conf
+ done
+ echo "."
+ ;;
+ stop)
+ echo -n "Stopping $DESC:"
+ for conf in $CONFS; do
+ echo -n " $conf"
+ d_stop $conf
+ done
+ echo "."
+ ;;
+ restart|force-reload)
+ #
+ # If the "reload" option is implemented, move the "force-reload"
+ # option to the "reload" entry above. If not, "force-reload" is
+ # just the same as "restart".
+ #
+ $0 stop
+ sleep 1
+ $0 start
+ ;;
+ *)
+ echo "Usage: $SCRIPTNAME {start|stop|restart|force-reload}" >&2
+ exit 1
+ ;;
+esac
+
+exit 0
=== added file 'lib/canonical/buildd/debian/postinst'
--- lib/canonical/buildd/debian/postinst 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/debian/postinst 2011-11-17 19:45:28 +0000
@@ -0,0 +1,101 @@
+#!/bin/sh
+#
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# Post install script
+
+set -e
+set -u
+
+USER=${BUILDD_USER:-buildd}
+BUILDDGID=${BUILDD_GID:-2501}
+BUILDDUID=${BUILDD_UID:-2001}
+
+make_buildd()
+{
+ buildd-genconfig --name=default --host=0.0.0.0 --port=8221 > \
+ /etc/launchpad-buildd/default
+ echo Default buildd created.
+}
+
+case "$1" in
+ configure)
+ getent group buildd >/dev/null 2>&1 ||
+ addgroup --gid $BUILDDGID buildd
+
+ getent passwd buildd >/dev/null 2>&1 ||
+ adduser --ingroup buildd --disabled-login --gecos 'Buildd user' \
+ --uid $BUILDDUID ${USER}
+
+ SUDO_VERSION=$(sudo -V | sed -n '/^Sudo version/s/.* //p')
+ if dpkg --compare-versions $SUDO_VERSION lt 1.7 ||
+ ! grep -q '^#includedir /etc/sudoers.d' /etc/sudoers; then
+ grep -q ^${USER} /etc/sudoers ||
+ echo "${USER} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
+ else
+ if ! sudo -l -U ${USER} | grep -q '(ALL) NOPASSWD: ALL' ||
+ ! sudo -l -U ${USER} | grep -q umask_override; then
+ cat << EOF >> /etc/sudoers.d/buildd
+Defaults:buildd umask_override
+Defaults>buildd umask_override
+
+${USER} ALL=(ALL) NOPASSWD: ALL
+%buildd ALL=(buildd) NOPASSWD: ALL, (root) NOPASSWD: /bin/su - buildd
+%lpbuildd ALL=(ALL) NOPASSWD: ALL
+EOF
+ fi
+ chmod 440 /etc/sudoers.d/buildd
+ chown root:root /etc/sudoers.d/buildd
+ fi
+
+ install -d -o${USER} -m0755 /home/${USER}
+
+ # Create ~buildd/.sbuildrc if needed
+ if [ ! -f /home/${USER}/.sbuildrc ]; then
+ cp /usr/share/launchpad-buildd/sbuildrc \
+ /home/${USER}/sbuildrc.tmp
+ cd /home/${USER}
+ sed -e's/@ARCHTAG@/'$(dpkg --print-architecture)'/g' \
+ -e's/@FQDN@/'$(hostname --fqdn)'/g' \
+ <sbuildrc.tmp > .sbuildrc
+ chown $USER:buildd .sbuildrc
+ chmod 0644 .sbuildrc
+ rm sbuildrc.tmp
+ fi
+
+ # Prepare a default buildd...
+ test -e /etc/launchpad-buildd/default || make_buildd
+
+ # Create any missing directories and chown them appropriately
+ test -d /home/${USER}/filecache-default || mkdir /home/${USER}/filecache-default
+ chown $USER:buildd /home/${USER}/filecache-default
+
+ chown $USER:buildd /var/log/launchpad-buildd /var/run/launchpad-buildd
+
+ # Check for the presence of the /etc/source-dependencies file
+ # which sbuild will rant about the absence of...
+ test -e /etc/source-dependencies || touch /etc/source-dependencies
+
+ # Now check if we're upgrading a previous version...
+ if [ "x$2" != "x" ]; then
+ for CONFIG in $(ls /etc/launchpad-buildd/* \
+ | grep -v "^-" | grep -v "~$"); do
+ /usr/share/launchpad-buildd/upgrade-config $2 $CONFIG
+ done
+ fi
+
+ ;;
+ abort-upgrade|abort-remove|abort-deconfigure)
+
+ ;;
+
+ *)
+ echo "postinst called with unknown argument \`$1'" >&2
+ exit 1
+ ;;
+esac
+
+#DEBHELPER#
+
+exit 0
=== added file 'lib/canonical/buildd/debian/rules'
--- lib/canonical/buildd/debian/rules 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/debian/rules 2011-11-17 19:45:28 +0000
@@ -0,0 +1,96 @@
+#!/usr/bin/make -f
+#
+# Copyright 2009, 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+export DH_OPTIONS
+
+# This is an incomplete debian rules file for making the launchpad-buildd deb
+# Only ever invoke this as debian/rules package, which will build the source
+# package in the parent directory, after copying in the files that live above
+# this directory, so that the source package is valid.
+# after that, build the source package found in the parent directory.
+
+target = debian/launchpad-buildd
+topdir = ../../..
+
+buildd = $(topdir)/lib/canonical/buildd
+
+targetshare = $(target)/usr/share/launchpad-buildd
+pytarget = $(targetshare)/canonical/buildd
+
+pyfiles = debian.py slave.py binarypackage.py utils.py __init__.py \
+ sourcepackagerecipe.py translationtemplates.py
+slavebins = unpack-chroot mount-chroot update-debian-chroot sbuild-package \
+ scan-for-processes umount-chroot remove-build override-sources-list \
+ buildrecipe generate-translation-templates
+
+BUILDDUID=65500
+BUILDDGID=65500
+
+install: DH_OPTIONS=-plaunchpad-buildd
+install:
+ dh_testdir
+ dh_clean
+ dh_testroot
+ dh_installdirs usr/bin etc usr/share/launchpad-buildd/slavebin \
+ usr/share/launchpad-buildd/canonical/buildd \
+ var/run/launchpad-buildd var/log/launchpad-buildd \
+ etc/launchpad-buildd \
+ usr/share/launchpad-buildd/canonical/launchpad/daemons \
+ usr/share/doc/launchpad-buildd
+ dh_installexamples
+
+ # Do installs here
+ touch $(pytarget)/../launchpad/__init__.py
+ touch $(pytarget)/../launchpad/daemons/__init__.py
+ install -m644 buildd-slave.tac $(targetshare)/buildd-slave.tac
+ cp -r pottery $(pytarget)
+ for pyfile in $(pyfiles); do \
+ install -m644 ./$$pyfile $(pytarget)/$$pyfile; \
+ done
+ for slavebin in $(slavebins); do \
+ install -m755 ./$$slavebin $(targetshare)/slavebin/$$slavebin; \
+ done
+ install -m755 ./sbuild $(target)/usr/bin/sbuild
+ touch $(targetshare)/canonical/__init__.py
+ install -m644 template-buildd-slave.conf $(targetshare)/template-buildd-slave.conf
+ install -m755 buildd-config.py $(target)/usr/bin/buildd-genconfig
+ install -m644 sbuildrc $(targetshare)/sbuildrc
+ install -m644 sbuild.conf $(target)/etc/sbuild.conf
+ install -m755 debian/upgrade-config $(targetshare)/upgrade-config
+ install -m755 check-implicit-pointer-functions $(target)/usr/bin/check-implicit-pointer-functions
+ # Okay, that's installed all the slave-related files
+
+
+binary-arch:
+ @echo No arch-specific binaries to make
+
+binary-indep: DH_OPTIONS=-plaunchpad-buildd
+binary-indep: install
+ dh_installdocs
+ dh_installchangelogs
+ dh_installinit
+ dh_installcron
+ dh_strip
+ dh_compress
+ dh_fixperms
+ dh_installdeb
+ dh_gencontrol
+ dh_md5sums
+ dh_builddeb
+
+binary: binary-indep
+
+.PHONY: binary binary-indep binary-arch install clean build
+
+clean:
+ dh_clean
+
+prepare:
+
+package: prepare
+ debuild -uc -us -S
+
+build:
+ @echo Mmm builders
=== added directory 'lib/canonical/buildd/debian/source'
=== added file 'lib/canonical/buildd/debian/source/format'
--- lib/canonical/buildd/debian/source/format 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/debian/source/format 2011-11-17 19:45:28 +0000
@@ -0,0 +1,1 @@
+1.0
=== added file 'lib/canonical/buildd/debian/upgrade-config'
--- lib/canonical/buildd/debian/upgrade-config 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/debian/upgrade-config 2011-11-17 19:45:28 +0000
@@ -0,0 +1,122 @@
+#!/usr/bin/python
+#
+# Copyright 2009, 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Upgrade a launchpad-buildd configuration file."""
+
+import sys
+import os
+import subprocess
+
+(old_version, conf_file) = sys.argv[1:]
+
+def upgrade_to_12():
+ print "Upgrading %s to version 12" % conf_file
+ subprocess.call(["mv", conf_file, conf_file+"-prev12~"])
+ in_file = open(conf_file+"-prev12~", "r")
+ out_file = open(conf_file, "w")
+ for line in in_file:
+ if line.startswith("[debianmanager]"):
+ line += "ogrepath = /usr/share/launchpad-buildd/slavebin/apply-ogre-model\n"
+ if line.startswith("sbuildargs"):
+ line = line.replace("-A", "")
+ out_file.write(line)
+ in_file.close()
+ out_file.close()
+
+def upgrade_to_34():
+ print "Upgrading %s to version 34" % conf_file
+ subprocess.call(["mv", conf_file, conf_file+"-prev34~"])
+ in_file = open(conf_file+"-prev34~", "r")
+ out_file = open(conf_file, "w")
+ for line in in_file:
+ if line.startswith("[debianmanager]"):
+ line += "sourcespath = /usr/share/launchpad-buildd/slavebin/override-sources-list\n"
+ out_file.write(line)
+ in_file.close()
+ out_file.close()
+
+def upgrade_to_39():
+ print "Upgrading %s to version 39" % conf_file
+ subprocess.call(["mv", conf_file, conf_file+"-prev39~"])
+ in_file = open(conf_file+"-prev39~", "r")
+ out_file = open(conf_file, "w")
+ for line in in_file:
+ if line.startswith("sbuildargs"):
+ line = line.replace("-dautobuild ","")
+ if line.startswith("[slave]"):
+ line += "ntphost = ntp.buildd\n"
+ out_file.write(line)
+ in_file.close()
+ out_file.close()
+
+def upgrade_to_57():
+ print "Upgrading %s to version 57" % conf_file
+ subprocess.call(["mv", conf_file, conf_file+"-prev57~"])
+ in_file = open(conf_file+"-prev57~", "r")
+ out_file = open(conf_file, "w")
+ # We want to move all the sbuild lines to a new
+ # 'binarypackagemanager' section at the end.
+ binarypackage_lines = []
+ for line in in_file:
+ if line.startswith("sbuild"):
+ binarypackage_lines.append(line)
+ else:
+ out_file.write(line)
+ out_file.write('[binarypackagemanager]\n')
+ for line in binarypackage_lines:
+ out_file.write(line)
+ in_file.close()
+ out_file.close()
+
+def upgrade_to_58():
+ print "Upgrading %s to version 58" % conf_file
+ subprocess.call(["mv", conf_file, conf_file+"-prev58~"])
+ in_file = open(conf_file+"-prev58~", "r")
+ out_file = open(conf_file, "w")
+ out_file.write(in_file.read())
+ out_file.write(
+ '\n[sourcepackagerecipemanager]\n'
+ 'buildrecipepath = /usr/share/launchpad-buildd'
+ '/slavebin/buildrecipe\n')
+
+def upgrade_to_59():
+ print "Upgrading %s to version 59" % conf_file
+ subprocess.call(["mv", conf_file, conf_file+"-prev59~"])
+ in_file = open(conf_file+"-prev59~", "r")
+ out_file = open(conf_file, "w")
+ out_file.write(in_file.read())
+ out_file.write(
+ '\n[translationtemplatesmanager]\n'
+ 'generatepath = /usr/share/launchpad-buildd/slavebin/generate-translation-templates\n'
+ 'resultarchive = translation-templates.tar.gz\n')
+
+def upgrade_to_63():
+ print "Upgrading %s to version 63" % conf_file
+ subprocess.call(["mv", conf_file, conf_file+"-prev63~"])
+ in_file = open(conf_file+"-prev63~", "r")
+ out_file = open(conf_file, "w")
+ for line in in_file:
+ if not line.startswith('ogrepath'):
+ out_file.write(line)
+
+
+if __name__ == "__main__":
+ if old_version.find("~") > 0:
+ old_version = old_version[:old_version.find("~")]
+ if int(old_version) < 12:
+ upgrade_to_12()
+ if int(old_version) < 34:
+ upgrade_to_34()
+ if int(old_version) < 39:
+ upgrade_to_39()
+ if int(old_version) < 57:
+ upgrade_to_57()
+ if int(old_version) < 58:
+ upgrade_to_58()
+ if int(old_version) < 59:
+ upgrade_to_59()
+ if int(old_version) < 63:
+ upgrade_to_63()
+
=== added file 'lib/canonical/buildd/generate-translation-templates'
--- lib/canonical/buildd/generate-translation-templates 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/generate-translation-templates 2011-11-17 19:45:28 +0000
@@ -0,0 +1,66 @@
+#!/bin/sh
+#
+# Copyright 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# Buildd Slave tool to generate translation templates. Boiler plate code
+# copied from sbuild-package.
+
+# Expects build id as arg 1.
+# Expects branch url as arg 2.
+# Expects output tarball name as arg 3.
+
+# Must run as user with password-less sudo ability.
+
+exec 2>&1
+
+export LANG=C LC_ALL=C
+
+CHMOD=/bin/chmod
+CHROOT=/usr/sbin/chroot
+CP=/bin/cp
+INSTALL=/usr/bin/install
+MKDIR=/bin/mkdir
+SU=/bin/su
+SUDO=/usr/bin/sudo
+TOUCH=/usr/bin/touch
+
+BUILDID=$1
+BRANCH_URL=$2
+RESULT_NAME=$3
+
+BUILDD_HOME=/usr/share/launchpad-buildd
+SLAVEBIN=$BUILDD_HOME/slavebin
+BUILD_CHROOT="$HOME/build-$BUILDID/chroot-autobuild"
+USER=$(whoami)
+
+# Debug output.
+echo "Running as $USER for build $BUILDID on $BRANCH_URL."
+echo "Results expected in $RESULT_NAME."
+
+BUILDD_PACKAGE=canonical/buildd
+POTTERY=$BUILDD_PACKAGE/pottery
+# The script should be smarter about detecting the python version.
+PYMODULES=/usr/lib/pymodules/python2.7
+echo -n "Default Python in the chroot is: "
+$BUILD_CHROOT/usr/bin/python --version
+
+GENERATE_SCRIPT=$PYMODULES/$POTTERY/generate_translation_templates.py
+
+debug_exec() {
+ echo "Executing '$1'..."
+ $1 || echo "Got error $? from '$1'."
+}
+
+# Copy pottery files to chroot.
+debug_exec "$SUDO $MKDIR -vp $BUILD_CHROOT$PYMODULES/$BUILDD_PACKAGE"
+debug_exec "$SUDO $TOUCH $BUILD_CHROOT$PYMODULES/canonical/__init__.py"
+debug_exec "$SUDO $TOUCH $BUILD_CHROOT$PYMODULES/canonical/buildd/__init__.py"
+debug_exec "$SUDO $CP -vr $BUILDD_HOME/$POTTERY $BUILD_CHROOT$PYMODULES/$BUILDD_PACKAGE"
+debug_exec "$SUDO $CHMOD -v -R go+rX $BUILD_CHROOT$PYMODULES/canonical"
+debug_exec "$SUDO $CHMOD -v 755 $BUILD_CHROOT$GENERATE_SCRIPT"
+
+# Enter chroot, switch back to unprivileged user, execute the generate script.
+$SUDO $CHROOT $BUILD_CHROOT \
+ $SU - $USER \
+ -c "PYTHONPATH=$PYMODULES $GENERATE_SCRIPT $BRANCH_URL $RESULT_NAME"
=== added directory 'lib/canonical/buildd/launchpad-files'
=== added file 'lib/canonical/buildd/mount-chroot'
--- lib/canonical/buildd/mount-chroot 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/mount-chroot 2011-11-17 19:45:28 +0000
@@ -0,0 +1,25 @@
+#!/bin/sh
+#
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# Buildd Slave tool to mount a chroot
+
+# Expects build id as arg 1, makes build-id to contain the build
+
+# Needs SUDO to be set to a sudo instance for passwordless access
+
+SUDO=/usr/bin/sudo
+BUILDID="$1"
+
+set -e
+
+exec 2>&1
+
+echo "Mounting chroot for build $BUILDID"
+
+$SUDO mount -t proc none "$HOME/build-$BUILDID/chroot-autobuild/proc"
+$SUDO mount -t devpts none "$HOME/build-$BUILDID/chroot-autobuild/dev/pts"
+$SUDO mount -t sysfs none "$HOME/build-$BUILDID/chroot-autobuild/sys"
+$SUDO mount -t tmpfs none "$HOME/build-$BUILDID/chroot-autobuild/dev/shm"
+$SUDO cp /etc/hosts /etc/hostname /etc/resolv.conf $HOME/build-$BUILDID/chroot-autobuild/etc/
=== added file 'lib/canonical/buildd/override-sources-list'
--- lib/canonical/buildd/override-sources-list 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/override-sources-list 2011-11-17 19:45:28 +0000
@@ -0,0 +1,31 @@
+#!/bin/sh
+#
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# Buildd Slave tool to override sources.list in the chroot with a list of
+# archives
+
+# Expects build id as arg 1
+# Expects sources.list lines as subsequent args
+
+# Needs SUDO to be set to a sudo instance for passwordless access
+
+set -e
+exec 2>&1
+
+SUDO=/usr/bin/sudo
+
+BUILDID="$1"
+shift
+
+cd $HOME
+cd "build-$BUILDID/chroot-autobuild/etc/apt"
+
+echo "Overriding sources.list in build-$BUILDID"
+
+$SUDO rm -f sources.list.new
+(for archive; do
+ echo "$archive"
+done) | $SUDO tee sources.list.new >/dev/null
+$SUDO mv sources.list.new sources.list
=== added directory 'lib/canonical/buildd/pottery'
=== added file 'lib/canonical/buildd/pottery/__init__.py'
=== added file 'lib/canonical/buildd/pottery/generate_translation_templates.py'
--- lib/canonical/buildd/pottery/generate_translation_templates.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/pottery/generate_translation_templates.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,114 @@
+#! /usr/bin/python
+# Copyright 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+__metaclass__ = type
+
+import os.path
+import sys
+import tarfile
+
+import logging
+
+from bzrlib.branch import Branch
+from bzrlib.export import export
+
+from canonical.buildd.pottery import intltool
+
+
+class GenerateTranslationTemplates:
+ """Script to generate translation templates from a branch."""
+
+ def __init__(self, branch_spec, result_name, work_dir, log_file=None):
+ """Prepare to generate templates for a branch.
+
+ :param branch_spec: Either a branch URL or the path of a local
+ branch. URLs are recognized by the occurrence of ':'. In
+ the case of a URL, this will make up a path for the branch
+ and check out the branch to there.
+ :param result_name: The name of the result tarball. Should end in
+ .tar.gz.
+ :param work_dir: The directory to work in. Must exist.
+ :param log_file: File-like object to log to. If None, defaults to
+ stderr.
+ """
+ self.work_dir = work_dir
+ self.branch_spec = branch_spec
+ self.result_name = result_name
+ self.logger = self._setupLogger(log_file)
+
+ def _setupLogger(self, log_file):
+ """Sets up and returns a logger."""
+ if log_file is None:
+ log_file = sys.stderr
+ logger = logging.getLogger("generate-templates")
+ logger.setLevel(logging.DEBUG)
+ ch = logging.StreamHandler(log_file)
+ ch.setLevel(logging.DEBUG)
+ logger.addHandler(ch)
+ return logger
+
+ def _getBranch(self):
+ """Set `self.branch_dir`, and check out branch if needed."""
+ if ':' in self.branch_spec:
+ # This is a branch URL. Check out the branch.
+ self.branch_dir = os.path.join(self.work_dir, 'source-tree')
+ self.logger.info("Getting remote branch %s..." % self.branch_spec)
+ self._checkout(self.branch_spec)
+ else:
+ # This is a local filesystem path. Use the branch in-place.
+ self.logger.info("Using local branch %s..." % self.branch_spec)
+ self.branch_dir = self.branch_spec
+
+ def _checkout(self, branch_url):
+ """Check out a source branch to generate from.
+
+ The branch is checked out to the location specified by
+ `self.branch_dir`.
+ """
+ self.logger.info("Opening branch %s..." % branch_url)
+ branch = Branch.open(branch_url)
+ self.logger.info("Getting branch revision tree...")
+ rev_tree = branch.basis_tree()
+ self.logger.info("Exporting branch to %s..." % self.branch_dir)
+ export(rev_tree, self.branch_dir)
+ self.logger.info("Exporting branch done.")
+
+ def _makeTarball(self, files):
+ """Put the given files into a tarball in the working directory."""
+ tarname = os.path.join(self.work_dir, self.result_name)
+ self.logger.info("Making tarball with templates in %s..." % tarname)
+ tarball = tarfile.open(tarname, 'w|gz')
+ files = [name for name in files if not name.endswith('/')]
+ for path in files:
+ full_path = os.path.join(self.branch_dir, path)
+ self.logger.info("Adding template %s..." % full_path)
+ tarball.add(full_path, path)
+ tarball.close()
+ self.logger.info("Tarball generated.")
+
+ def generate(self):
+ """Do It. Generate templates."""
+ self.logger.info("Generating templates for %s." % self.branch_spec)
+ self._getBranch()
+ pots = intltool.generate_pots(self.branch_dir)
+ self.logger.info("Generated %d templates." % len(pots))
+ if len(pots) > 0:
+ self._makeTarball(pots)
+ return 0
+
+
+if __name__ == '__main__':
+ if len(sys.argv) < 3:
+ print "Usage: %s branch resultname [workdir]" % sys.argv[0]
+ print " 'branch' is a branch URL or directory."
+ print " 'resultname' is the name of the result tarball."
+ print " 'workdir' is a directory, defaults to HOME."
+ sys.exit(1)
+ if len(sys.argv) == 4:
+ workdir = sys.argv[3]
+ else:
+ workdir = os.environ['HOME']
+ script = GenerateTranslationTemplates(
+ sys.argv[1], sys.argv[2], workdir)
+ sys.exit(script.generate())
=== added file 'lib/canonical/buildd/pottery/intltool.py'
--- lib/canonical/buildd/pottery/intltool.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/pottery/intltool.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,342 @@
+# Copyright 2009-2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Functions to build PO templates on the build slave."""
+
+__metaclass__ = type
+__all__ = [
+ 'check_potfiles_in',
+ 'generate_pot',
+ 'generate_pots',
+ 'get_translation_domain',
+ 'find_intltool_dirs',
+ 'find_potfiles_in',
+ ]
+
+from contextlib import contextmanager
+import errno
+import os.path
+import re
+from subprocess import call
+
+
+def find_potfiles_in():
+ """Search the current directory and its subdirectories for POTFILES.in.
+
+ :returns: A list of names of directories that contain a file POTFILES.in.
+ """
+ result_dirs = []
+ for dirpath, dirnames, dirfiles in os.walk("."):
+ if "POTFILES.in" in dirfiles:
+ result_dirs.append(dirpath)
+ return result_dirs
+
+
+def check_potfiles_in(path):
+ """Check if the files listed in the POTFILES.in file exist.
+
+ Running 'intltool-update -m' will perform this check and also take a
+ possible POTFILES.skip into account. It stores details about 'missing'
+ (files that should be in POTFILES.in) and 'notexist'ing files (files
+ that are listed in POTFILES.in but don't exist) in files which are
+ named accordingly. These files are removed before the run.
+
+ We don't care about files missing from POTFILES.in but want to know if
+ all listed files exist. The presence of the 'notexist' file tells us
+ that.
+
+ :param path: The directory where POTFILES.in resides.
+ :returns: False if the directory does not exist, if an error occurred
+ when executing intltool-update or if files are missing from
+ POTFILES.in. True if all went fine and all files in POTFILES.in
+ actually exist.
+ """
+ current_path = os.getcwd()
+
+ try:
+ os.chdir(path)
+ except OSError, e:
+ # Abort nicely if the directory does not exist.
+ if e.errno == errno.ENOENT:
+ return False
+ raise
+ try:
+ # Remove stale files from a previous run of intltool-update -m.
+ for unlink_name in ['missing', 'notexist']:
+ try:
+ os.unlink(unlink_name)
+ except OSError, e:
+ # It's ok if the files are missing.
+ if e.errno != errno.ENOENT:
+ raise
+ devnull = open("/dev/null", "w")
+ returncode = call(
+ ["/usr/bin/intltool-update", "-m"],
+ stdout=devnull, stderr=devnull)
+ devnull.close()
+ finally:
+ os.chdir(current_path)
+
+ if returncode != 0:
+ # An error occurred when executing intltool-update.
+ return False
+
+ notexist = os.path.join(path, "notexist")
+ return not os.access(notexist, os.R_OK)
+
+
+def find_intltool_dirs():
+ """Search for directories with intltool structure.
+
+ The current directory and its subdiretories are searched. An 'intltool
+ structure' is a directory that contains a POFILES.in file and where all
+ files listed in that POTFILES.in do actually exist. The latter
+ condition makes sure that the file is not stale.
+
+ :returns: A list of directory names.
+ """
+ return sorted(filter(check_potfiles_in, find_potfiles_in()))
+
+
+def _get_AC_PACKAGE_NAME(config_file):
+ """Get the value of AC_PACKAGE_NAME from function parameters.
+
+ The value of AC_PACKAGE_NAME is either the first or the fourth
+ parameter of the AC_INIT call if it is called with at least two
+ parameters.
+ """
+ params = config_file.getFunctionParams("AC_INIT")
+ if params is None or len(params) < 2:
+ return None
+ if len(params) < 4:
+ return params[0]
+ else:
+ return params[3]
+
+
+def _try_substitution(config_files, varname, substitution):
+ """Try to find a substitution in the config files.
+
+ :returns: The completed substitution or None if none was found.
+ """
+ subst_value = None
+ if varname == substitution.name:
+ # Do not look for the same name in the current file.
+ config_files = config_files[:-1]
+ for config_file in reversed(config_files):
+ subst_value = config_file.getVariable(substitution.name)
+ if subst_value is not None:
+ # Substitution found.
+ break
+ else:
+ # No substitution found.
+ return None
+ return substitution.replace(subst_value)
+
+
+def get_translation_domain(dirname):
+ """Get the translation domain for this PO directory.
+
+ Imitates some of the behavior of intltool-update to find out which
+ translation domain the build environment provides. The domain is usually
+ defined in the GETTEXT_PACKAGE variable in one of the build files. Another
+ variant is DOMAIN in the Makevars file. This function goes through the
+ ordered list of these possible locations, top to bottom, and tries to
+ find a valid value. Since the same variable name may be defined in
+ multiple files (usually configure.ac and Makefile.in.in), it needs to
+ keep trying with the next file, until it finds the most specific
+ definition.
+
+ If the found value contains a substitution, either autoconf style (@...@)
+ or make style ($(...)), the search is continued in the same file and back
+ up the list of files, now searching for the substitution. Multiple
+ substitutions or multi-level substitutions are not supported.
+ """
+ locations = [
+ ('../configure.ac', 'GETTEXT_PACKAGE', True),
+ ('../configure.in', 'GETTEXT_PACKAGE', True),
+ ('Makefile.in.in', 'GETTEXT_PACKAGE', False),
+ ('Makevars', 'DOMAIN', False),
+ ]
+ value = None
+ substitution = None
+ config_files = []
+ for filename, varname, keep_trying in locations:
+ path = os.path.join(dirname, filename)
+ if not os.access(path, os.R_OK):
+ # Skip non-existent files.
+ continue
+ config_files.append(ConfigFile(path))
+ new_value = config_files[-1].getVariable(varname)
+ if new_value is not None:
+ value = new_value
+ if value == "AC_PACKAGE_NAME":
+ value = _get_AC_PACKAGE_NAME(config_files[-1])
+ else:
+ # Check if the value needs a substitution.
+ substitution = Substitution.get(value)
+ if substitution is not None:
+ # Try to substitute with value.
+ value = _try_substitution(
+ config_files, varname, substitution)
+ if value is None:
+ # No substitution found; the setup is broken.
+ break
+ if value is not None and not keep_trying:
+ # A value has been found.
+ break
+ return value
+
+
+@contextmanager
+def chdir(directory):
+ cwd = os.getcwd()
+ os.chdir(directory)
+ yield
+ os.chdir(cwd)
+
+
+def generate_pot(podir, domain):
+ """Generate one PO template using intltool.
+
+ Although 'intltool-update -p' can try to find out the translation domain
+ we trust our own code more on this one and simply specify the domain.
+ Also, the man page for 'intltool-update' states that the '-g' option
+ "has an additional effect: the name of current working directory is no
+ more limited to 'po' or 'po-*'." We don't want that limit either.
+
+ :param podir: The PO directory in which to build template.
+ :param domain: The translation domain to use as the name of the template.
+ If it is None or empty, 'messages.pot' will be used.
+ :return: True if generation succeeded.
+ """
+ if domain is None or domain.strip() == "":
+ domain = "messages"
+ with chdir(podir):
+ with open("/dev/null", "w") as devnull:
+ returncode = call(
+ ["/usr/bin/intltool-update", "-p", "-g", domain],
+ stdout=devnull, stderr=devnull)
+ return returncode == 0
+
+
+def generate_pots(package_dir='.'):
+ """Top-level function to generate all PO templates in a package."""
+ potpaths = []
+ with chdir(package_dir):
+ for podir in find_intltool_dirs():
+ domain = get_translation_domain(podir)
+ if generate_pot(podir, domain):
+ potpaths.append(os.path.join(podir, domain + ".pot"))
+ return potpaths
+
+
+class ConfigFile(object):
+ """Represent a config file and return variables defined in it."""
+
+ def __init__(self, file_or_name):
+ if isinstance(file_or_name, basestring):
+ conf_file = file(file_or_name)
+ else:
+ conf_file = file_or_name
+ self.content = conf_file.read()
+
+ def _stripQuotes(self, identifier):
+ """Strip surrounding quotes from `identifier`, if present.
+
+ :param identifier: a string, possibly surrounded by matching
+ 'single,' "double," or [bracket] quotes.
+ :return: `identifier` but with the outer pair of matching quotes
+ removed, if they were there.
+ """
+ if len(identifier) < 2:
+ return identifier
+
+ quote_pairs = [
+ ('"', '"'),
+ ("'", "'"),
+ ("[", "]"),
+ ]
+ for (left, right) in quote_pairs:
+ if identifier.startswith(left) and identifier.endswith(right):
+ return identifier[1:-1]
+
+ return identifier
+
+ def getVariable(self, name):
+ """Search the file for a variable definition with this name."""
+ pattern = re.compile(
+ "^%s[ \t]*=[ \t]*([^\s]*)" % re.escape(name), re.M)
+ result = pattern.search(self.content)
+ if result is None:
+ return None
+ return self._stripQuotes(result.group(1))
+
+ def getFunctionParams(self, name):
+ """Search file for a function call with this name, return parameters.
+ """
+ pattern = re.compile("^%s\(([^)]*)\)" % re.escape(name), re.M)
+ result = pattern.search(self.content)
+ if result is None:
+ return None
+ else:
+ return [
+ self._stripQuotes(param.strip())
+ for param in result.group(1).split(',')
+ ]
+
+
+class Substitution(object):
+ """Find and replace substitutions.
+
+ Variable texts may contain other variables which should be substituted
+ for their value. These are either marked by surrounding @ signs (autoconf
+ style) or preceded by a $ sign with optional () (make style).
+
+ This class identifies a single such substitution in a variable text and
+ extract the name of the variable who's value is to be inserted. It also
+ facilitates the actual replacement so that caller does not have to worry
+ about the substitution style that is being used.
+ """
+
+ autoconf_pattern = re.compile("@([^@]+)@")
+ makefile_pattern = re.compile("\$\(?([^\s\)]+)\)?")
+
+ @staticmethod
+ def get(variabletext):
+ """Factory method.
+
+ Creates a Substitution instance and checks if it found a substitution.
+
+ :param variabletext: A variable value with possible substitution.
+ :returns: A Substitution object or None if no substitution was found.
+ """
+ subst = Substitution(variabletext)
+ if subst.name is not None:
+ return subst
+ return None
+
+ def _searchForPatterns(self):
+ """Search for all the available patterns in variable text."""
+ result = self.autoconf_pattern.search(self.text)
+ if result is None:
+ result = self.makefile_pattern.search(self.text)
+ return result
+
+ def __init__(self, variabletext):
+ """Extract substitution name from variable text."""
+ self.text = variabletext
+ self.replaced = False
+ result = self._searchForPatterns()
+ if result is None:
+ self._replacement = None
+ self.name = None
+ else:
+ self._replacement = result.group(0)
+ self.name = result.group(1)
+
+ def replace(self, value):
+ """Return a copy of the variable text with the substitution resolved.
+ """
+ self.replaced = True
+ return self.text.replace(self._replacement, value)
=== added file 'lib/canonical/buildd/remove-build'
--- lib/canonical/buildd/remove-build 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/remove-build 2011-11-17 19:45:28 +0000
@@ -0,0 +1,25 @@
+#!/bin/sh
+#
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# Buildd Slave tool to remove an unmounted chroot
+
+# Expects build id as arg 1, makes build-id to contain the build
+
+# Needs RM to be set to a gnu rm instance
+# Needs SUDO to be set to a sudo instance for passwordless access
+
+RM=/bin/rm
+SUDO=/usr/bin/sudo
+BUILDID="$1"
+
+set -e
+
+exec 2>&1
+
+echo "Removing build $BUILDID"
+
+cd $HOME
+
+$SUDO $RM -rf "build-$BUILDID"
=== added file 'lib/canonical/buildd/sbuild'
--- lib/canonical/buildd/sbuild 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/sbuild 2011-11-17 19:45:28 +0000
@@ -0,0 +1,3658 @@
+#! /usr/bin/perl
+#
+# sbuild: build packages, obeying source dependencies
+# Copyright (C) 1998-2000 Roman Hodek <Roman.Hodek@xxxxxxxxxxxxxxxxxxxxxxxxxx>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; either version 2 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+#
+# $Id: sbuild,v 1.170 2002/10/10 18:50:44 rnhodek Exp $
+#
+# $Log: sbuild,v $
+# Revision 1.170 2002/10/10 18:50:44 rnhodek
+# Security/accepted autobuilding patch by Ryan.
+#
+# Revision 1.169 2002/03/11 09:49:55 rnhodek
+# chroot_apt_options: don't just set Dir::Etc::sourcelist, but whole
+# Dir::Etc, to avoid new preferences.
+#
+# Revision 1.168 2002/02/07 09:12:25 rnhodek
+# Fixed sudo call for running scripts.
+#
+# Revision 1.167 2002/01/22 08:53:10 rnhodek
+# Use capitalized "Noninteractive" das DEBIAN_FRONTENT.
+#
+# Revision 1.166 2001/11/08 09:32:10 rnhodek
+# fetch_source_file: in case of apt, need to pass up a $dscfile with
+# epoch stripped.
+#
+# Revision 1.165 2001/11/07 18:04:31 rnhodek
+# Argl.. I should learn perl first :)
+#
+# Revision 1.164 2001/11/07 09:25:59 rnhodek
+# fetch_source_files: after apt-get, fix $dscfile not to contain epoch.
+#
+# Revision 1.163 2001/11/06 15:11:24 rnhodek
+# fetch_source_files: don't touch symlinks generated by apt-get.
+#
+# Revision 1.162 2001/11/06 12:29:22 rnhodek
+# New config var $use_FetchFile (default 0).
+# Integrated patches for using apt-get to download sources; modified so
+# that old code (using FetchFile.pm) still present.
+# run_apt: Fix parsing error of apt-output (\s matched ^M printed by
+# pre-configuring).
+#
+# Revision 1.161 2001/10/29 12:20:41 rnhodek
+# Added $fakeroot var.
+#
+# Revision 1.160 2001/10/29 08:27:53 rnhodek
+# Fix typo in message.
+#
+# Revision 1.159 2001/06/18 09:57:55 rnhodek
+# Use --database option when wanna_build_dbbase set in config.
+#
+# Revision 1.158 2001/06/01 09:54:36 rnhodek
+# Fix two typos.
+#
+# Revision 1.157 2001/05/31 08:08:08 rnhodek
+# Fixed thinko in conflicts checking (tnx James)
+#
+# Revision 1.156 2001/05/30 08:20:05 rnhodek
+# Use /var/lib/apt as Dir::State if exists (newer apt versions use that)
+# (tnx to Ryan Murray).
+#
+# Revision 1.155 2001/05/30 08:17:01 rnhodek
+# Print also architecture at start of log.
+#
+# Revision 1.154 2001/05/29 07:33:54 rnhodek
+# Also handle versioned Build-Conflicts.
+# (Tnx to David Schleef <ds@xxxxxxxxxxx>)
+#
+# Revision 1.153 2001/03/13 08:45:17 rnhodek
+# Run chmod -R on source tree only after unpacking.
+#
+# Revision 1.152 2001/02/19 09:23:24 rnhodek
+# Fix typo.
+#
+# Revision 1.151 2001/02/19 08:43:22 rnhodek
+# Fix wrong arch logic.
+#
+# Revision 1.150 2001/01/22 12:56:16 rnhodek
+# Fix handling of arch-restricted build-deps (tnx to Ryan Murray).
+#
+# Revision 1.149 2001/01/15 11:17:36 rnhodek
+# Fixed typo.
+#
+# Revision 1.148 2001/01/15 10:59:18 rnhodek
+# Shortcut when looking for non-dsc files: first try to fetch from same
+# dir as .dsc and FF_search only if that fails.
+#
+# Revision 1.147 2001/01/10 16:16:48 rnhodek
+# Move all built files from chroot dir, not only *.deb.
+#
+# Revision 1.146 2000/10/23 10:39:24 rnhodek
+# Before doing chmod on /etc/ld.so.conf, test if it exists at all.
+#
+# Revision 1.145 2000/10/19 09:08:35 rnhodek
+# A couple of little fixes from Ben.
+# Checks and assures that /etc/ld.so.conf is readable.
+# Support for local overrides with $conf::srcdep_over.
+#
+# Revision 1.144 2000/06/27 12:34:00 rnhodek
+# Implemented new 'prepre' and 'unpack' sections for special targets;
+# the first is a script run before package installation, and the latter
+# can list source packages that should be unpacked in the build dir.
+#
+# Revision 1.143 2000/06/20 14:39:59 rnhodek
+# Call apt-get with some options for relocating various files instead of
+# chrooting it; this avoids that the archive must be available in chroot
+# environment; same is true for dpkg and apt-cache calls.
+# If chrooted, call dpkg with the chroot dir as cwd to avoid "cannot get
+# current directory" errors; same for apt option DPkg::Run-Directory.
+#
+# Revision 1.142 2000/06/19 14:09:00 rnhodek
+# Fix syntax error.
+#
+# Revision 1.141 2000/06/19 14:05:38 rnhodek
+# Call buildd-addpkg with --chroot=DIR options for each dist that is >=
+# the one building for (to run apt-get update in the chroot
+# environments).
+#
+# Revision 1.140 2000/06/19 09:10:24 rnhodek
+# Obey new config var @ignore_watches_no_build_deps, i.e. don't flag
+# watches that are listed there if the package doesn't have build deps.
+# In check_watches(), strip $chroot_dir instead of $chroot_build_dir
+# from pathname.
+#
+# Revision 1.139 2000/06/13 10:54:43 rnhodek
+# Also execute special dependency scripts in chroot environment.
+#
+# Revision 1.138 2000/06/09 12:47:52 rnhodek
+# File .dsc filename for rbuilder (with URL).
+#
+# Revision 1.137 2000/06/09 09:15:21 rnhodek
+# Always install built package (if already) when building chroot; i.e.
+# the $system_level test is not necessary when chrooted.
+#
+# Revision 1.136 2000/06/09 08:20:52 rnhodek
+# Fixed su usage in sub build.
+#
+# Revision 1.135 2000/06/08 14:02:11 rnhodek
+# After changing to chroot dir, change back to be the normal user again
+# and start dpkg-buildpackage with -rsudo again; some packages require
+# that the build target is executed as non-root.
+#
+# Revision 1.134 2000/06/08 13:01:54 rnhodek
+# apt-cache calls need sudo, too, when using chroot.
+#
+# Revision 1.133 2000/06/08 09:13:31 rnhodek
+# Implemented chroot builds; there are a few new global variables
+# $main::chroot_*; major changes are in build, where the source tree is
+# unpacked somewhere else, dpkg-buildpackage called under chroot and
+# built packages are moved back again; also all apt-get and dpkg calls
+# are chroot-ed and /var/lib/dpkg/status is accessed from the chroot
+# environment; also watches are checked under the new root dir.
+#
+# Revision 1.132 2000/06/06 14:37:05 rnhodek
+# New option --source (-s): Also build source package, i.e. don't pass
+# -b or -B to dpkg-buildpackage.
+#
+# Revision 1.131 2000/05/30 15:41:34 rnhodek
+# Call buildd-addpkg with --dist option.
+# Install freshly built packages only if $conf::system_level >= $dist.
+#
+# Revision 1.130 2000/05/16 12:34:20 rnhodek
+# Insert a chmod -R go+rX on the build tree to make files readable; it
+# happens sometimes that files in a .orig.tar.gz have restrictive
+# permissions and this can be inconvenient.
+#
+# Revision 1.129 2000/03/01 14:43:34 rnhodek
+# Also match error message "dpkg: status database area is locked" from
+# apt and retry call later.
+#
+# Revision 1.128 2000/02/16 15:21:33 rnhodek
+# Fix a print message in merge_pkg_build_deps.
+#
+# Revision 1.127 2000/02/16 15:20:38 rnhodek
+# Print version number of sbuild in package log.
+#
+# Revision 1.126 2000/02/16 15:15:15 rnhodek
+# Fix regexp for finding !needs-no-XXX packages.
+# Move !needs-no-XXX from central deps to $main::additional_deps so that
+# they can be found by prepare_watches later.
+#
+# Revision 1.125 2000/02/15 14:40:35 rnhodek
+# Remove forgotten debugging code.
+#
+# Revision 1.124 2000/02/15 11:12:43 rnhodek
+# Expand virtual packages in package build dependencies for comparing
+# with central deps.
+#
+# Revision 1.123 2000/02/11 11:17:07 rnhodek
+# Do not activate watches for packages XXX if a negative dependency
+# needs-no-XXX exists (used to be just a comment, now really processed
+# by sbuild.)
+# Also do not activate watches for dependencies of pkg build deps.
+#
+# Revision 1.122 2000/02/09 15:57:25 rnhodek
+# In merge_pkg_build_deps, do not show warnings about missing
+# !this-package-does-not-exist or !needs-no-xxx dependencies.
+#
+# Revision 1.121 2000/02/04 14:04:18 rnhodek
+# Use --no-down-propagation.
+#
+# Revision 1.120 2000/02/01 12:05:56 rnhodek
+# In binNMU mode, a '_' was missing in the job name.
+#
+# Revision 1.119 2000/01/28 14:54:43 rnhodek
+# Accept abbrevs for distribution options (-ds, -df, -du) here, too.
+# New option --make-binNMU=entry.
+# New binNMU hack to modify debian/changelog; it will add a new entry
+# for the NMU version.
+# New helper function binNMU_version to generate a new version number.
+#
+# Revision 1.118 2000/01/13 14:32:30 rnhodek
+# For compiling on slink systems, pass the --force-confold option to
+# dpkg only for versions < 1.4.1.18 (that don't understand it yet).
+#
+# Revision 1.117 1999/12/17 13:49:50 rnhodek
+# Improved output about missing central deps: build-essential (act.
+# policy) and dependencies of pkg build deps are filtered out and
+# printed separately.
+# New functions cmp_dep_lists, read_build_essential,
+# expand_dependencies, and get_dependencies for the above.
+#
+# Revision 1.116 1999/12/17 11:04:43 rnhodek
+# When pkg build-deps were read from debian/sbuild-build-deps, a wrong
+# package name was used.
+#
+# Revision 1.115 1999/12/09 09:54:42 rnhodek
+# Again fixed a fatal typo...
+#
+# Revision 1.114 1999/12/08 12:33:16 rnhodek
+# merge_pkg_build_deps: Fix printing of overrides.
+#
+# Revision 1.113 1999/12/08 12:25:34 rnhodek
+# Special dependencies are implicitly overrides, i.e. are added to the
+# package-provided build deps.
+#
+# Revision 1.112 1999/12/08 11:31:38 rnhodek
+# get_dpkg_status: don't reset $res{$pkg}->{Installed} to 0 if $pkg is
+# provided.
+#
+# Revision 1.111 1999/12/08 10:37:33 rnhodek
+# Change parsing of .dsc file so that multi-line build dependencies are
+# allowed.
+# Make warning about missing central deps a bit bigger.
+#
+# Revision 1.110 1999/12/06 15:00:33 rnhodek
+# Fix comparison with old deps (must copy them, not only the reference).
+#
+# Revision 1.109 1999/12/06 08:35:53 rnhodek
+# Fixed typo.
+#
+# Revision 1.108 1999/12/03 09:58:16 rnhodek
+# If a pkg has its own build deps, compare them with the central ones
+# and report missing ones.
+#
+# Revision 1.107 1999/11/30 13:54:38 rnhodek
+# Print a message if build deps from the .dsc are used (to avoid confusion).
+# If a pkg has build deps, store them in debian/.sbuild-build-deps to
+# have them available when rebuilding later (no .dsc anymore); also
+# check for this file and read deps from it if building without a .dsc
+# in unpacked source.
+#
+# Revision 1.106 1999/11/15 12:30:15 rnhodek
+# merge_pkg_build_deps: added missing if $main::debug.
+#
+# Revision 1.105 1999/11/03 14:56:32 rnhodek
+# When running apt, set env var DEBIAN_FRONTEND to noninteractive to
+# stop debconf from asking questions or complaining that /dev/tty can't
+# be opened.
+#
+# Revision 1.104 1999/11/02 16:43:51 rnhodek
+# check_inst_packages: also upgrade dependencies of src-deps (if they're
+# already installed); some -dev packages fail to correctly require an
+# identical versioned shlib pkg, so in some cases only the -dev pkg was
+# installed.
+#
+# Revision 1.103 1999/11/02 15:45:43 rnhodek
+# build: Use epoch-stripped version number for the .changes file.
+# check_inst_packages: forgot a if $main::debug.
+#
+# Revision 1.102 1999/10/29 13:07:49 rnhodek
+# New option --stats-dir=DIR; if used, a "1" is appended to
+# DIR/give-back each time a package is given back.
+#
+# Revision 1.101 1999/10/29 12:32:24 rnhodek
+# If using an already unpacked source tree, check (with
+# dpkg-parsechangelog) if it's really the requested version.
+# Make apt-get run dpkg with --force-confold, as the </dev/null trick
+# doesn't work anymore with dpkg >= 1.4.1.18.
+#
+# Revision 1.100 1999/10/25 12:12:21 rnhodek
+# check_inst_packages: Add packages to @deps_inst only if they're not
+# already to be installed.
+#
+# Revision 1.99 1999/10/22 09:01:36 rnhodek
+# Minor changes to output of check_inst_packages.
+#
+# Revision 1.98 1999/10/21 14:21:57 rnhodek
+# Oops... call check_inst_packages only if build was successful.
+#
+# Revision 1.97 1999/10/21 11:46:50 rnhodek
+# Deleted RCS logs for < 1.50.
+# New option --store-built-packages.
+# Fix package name parsing: \w also matches '_' which is unwanted;
+# replace by a-zA-Z.
+# Read reverse sourcedeps of $main::store_built_packages.
+# New sub check_inst_packages.
+#
+# Revision 1.96 1999/09/27 11:18:10 rnhodek
+# Added a missing PLOG.
+#
+# Revision 1.95 1999/09/15 09:10:25 rnhodek
+# Additionally print a warning if a special dep has a version relation.
+#
+# Revision 1.94 1999/09/15 09:08:12 rnhodek
+# Changed parsing of dependencies a bit so that special deps can have
+# arch restrictions, too.
+#
+# Revision 1.93 1999/08/30 09:44:35 rnhodek
+# get_dpkg_status: don't exit too early if a pkg isn't in the arg list,
+# as it might be provided only.
+#
+# Revision 1.92 1999/08/27 13:32:04 rnhodek
+# --auto-give-back has a new optional argument, the user and hostname
+# where to call wanna-build (like $conf::sshcmd); this is needed that
+# sbuild can do give-backs when there's no local wanna-build.
+#
+# Revision 1.91 1999/08/23 12:53:02 rnhodek
+# Support for alternatives.
+# Support for [ARCH1 !ARCH2] arch restriction on dependencies.
+# Parses only src-deps which are needed for packages to be built.
+# Reads Build-{Depends,Conflicts}{,-Indep}: fields from .dsc if present;
+# those override the central src-deps, except those marked as override
+# (& prefix).
+# Implemented abbrevs as kind of macros in src-deps.
+# New option --add-depends (-a).
+# New option --arch-all (-A).
+#
+# Revision 1.90 1999/08/11 15:28:11 rnhodek
+# Insert missing wait call in run_script to get correct return value.
+#
+# Revision 1.89 1999/08/10 14:01:49 rnhodek
+# Virtual packages as dependencies didn't work really yet -- the
+# consistency check didn't see them (dpkg --status doesn't know them)
+# and thus aborted the build; solution: get_dpkg_status now directly
+# reads the status file (which should be a bit faster, too) and extracts
+# Provides: fields of all installed packages and considers those virtual
+# packages installed, too.
+# Print "Source-dependencies not satisfied" message to package log, not
+# to sbuild log.
+# Same in run_apt for virtual package handling.
+# Fix stdout/stderr redirecting when running scripts.
+#
+# Revision 1.88 1999/07/13 07:23:55 rnhodek
+# Use GDBM for time/space databases, as perl-5.004 seems not to contain
+# DB_File anymore.
+#
+# Revision 1.87 1999/06/21 12:52:00 rnhodek
+# Seems apt has a new error message if a cached Packages file isn't
+# up-to-date anymore -- recognize this msg, too, and reun apt-get update.
+#
+# Revision 1.86 1999/06/09 15:05:38 rnhodek
+# Fix loop in apply_patches.
+# Don't fail due to failed patch if a global patch.
+# Global patches are no syntax error when parsing src-deps...
+#
+# Revision 1.85 1999/06/04 09:47:02 rnhodek
+# Add support for global patches, which will be tried on any package;
+# their names in source-dependencies start with "**".
+#
+# Revision 1.84 1999/06/04 08:17:17 rnhodek
+# When calling wanna-build --give-back, don't forget the --dist argument!
+# Added support for virtual packages as source dependencies: apt-get tells us
+# which alternatives are possible, and one of these is selected either by
+# %conf::alternatives or by random.
+#
+# Revision 1.83 1999/06/02 09:07:47 rnhodek
+# With --batch, write each finished job to SBUILD-FINISHED; buildd can pick up
+# this file if sbuild crashes and needs not rebuild already done stuff. The file
+# is removed on normal exit and if sbuild dumps to REDO during a shutdown.
+#
+# Revision 1.82 1999/06/02 08:47:39 rnhodek
+# Remove as many die's as possible -- the bad exit status can cause
+# buildd to retry all packages of an sbuild run; better let this one
+# package fail.
+# Make sure that after build() we're in the correct directory: some
+# chdir()s were missing; also don't chdir("..") because this can be
+# wrong if we followed a symlink, use $main::cwd instead.
+# If the package directory already exists as a symlink, abort the build.
+#
+# Revision 1.81 1999/05/31 12:59:41 rnhodek
+# Run du after build under sudo, to avoid error messages about
+# unreadable dirs.
+#
+# Revision 1.80 1999/05/27 13:28:04 rnhodek
+# Oops, missed an epoch fix (when constructing the .changes file name).
+#
+# Revision 1.79 1999/05/26 11:34:11 rnhodek
+# Ignore epochs for fetching files.
+#
+# Revision 1.78 1999/05/26 09:48:23 rnhodek
+# If dpkg-source fails, remove .tmp-nest dir.
+#
+# Revision 1.77 1999/05/05 07:56:51 rnhodek
+# Need to empty %main::this_watches before filling it for a new package;
+# otherwise we have some spurious reports :-)
+#
+# Revision 1.76 1999/05/04 14:51:40 rnhodek
+# Some more minor stuff for avg-build-space: Reset global
+# $main::this_space to 0 before each build to avoid using the figure of
+# the previous package in case of errors; don't write a 0 value into the
+# database.
+#
+# Revision 1.75 1999/05/04 14:43:01 rnhodek
+# Fix parsing of a single dependency: package name never should contain
+# a '('.
+#
+# Revision 1.74 1999/05/04 14:29:51 rnhodek
+# Determine how much space is required for a build (final build dir +
+# generated .debs) after dpkg-buildpackage is finished; display figure
+# in package log and also store it in $conf::avg_space_db (analogous to
+# avg_time_db).
+#
+# Revision 1.73 1999/05/03 12:53:25 rnhodek
+# After unpacking src dir, run "chmod -R g-s ." on it; some
+# .orig.tar.gz's are packed with the setgid bit, which causes the debian
+# dir and all subdirs to be created setgid, too, and later dpkg-deb
+# --build complains about this.
+#
+# Revision 1.72 1999/04/22 14:16:25 rnhodek
+# Don't kill tee process if verbose but --nolog set -- $pkg_tee_pid
+# undefined then!
+#
+# Revision 1.71 1999/04/21 14:54:10 rnhodek
+# Implemented watches if certain binaries have been used during a build
+# without a source dependency.
+#
+# Revision 1.70 1999/03/12 10:29:32 rnhodek
+# New option --force-depends (-f) to override src-deps of a package.
+#
+
+BEGIN {
+ ($main::HOME = $ENV{'HOME'})
+ or die "HOME not defined in environment!\n";
+ push( @INC, "$main::HOME/lib" );
+}
+
+chomp( $main::HOSTNAME = `hostname` );
+
+package conf;
+$HOME = $main::HOME;
+# defaults:
+@dist_parts = qw(main contrib non-free);
+$source_dependencies = "/etc/source-dependencies";
+$mailprog = "/usr/sbin/sendmail";
+$dpkg = "/usr/bin/dpkg";
+$sudo = "/usr/bin/sudo";
+$su = "/bin/su";
+$fakeroot = "/usr/bin/fakeroot";
+$apt_get = "/usr/bin/apt-get";
+$apt_cache = "/usr/bin/apt-cache";
+$dpkg_source = "/usr/bin/dpkg-source";
+$build_env_cmnd = "";
+$pgp_options = "-us -uc";
+$log_dir = "$main::HOME/logs";
+$mailto = "";
+$purge_build_directory = "successful";
+@toolchain_regex = ( 'binutils$', 'gcc-[\d.]+$', 'g\+\+-[\d.]+$', 'libstdc\+\+', 'libc[\d.]+-dev$', 'linux-kernel-headers$', 'dpkg-dev$', 'make$' );
+$stalled_pkg_timeout = 90; # minutes
+$srcdep_lock_wait = 1; # minutes
+%individual_stalled_pkg_timeout = ();
+$path = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/X11R6/bin:/usr/games";
+# read conf files
+require "/etc/sbuild.conf" if -r "/etc/sbuild.conf";
+require "$HOME/.sbuildrc" if -r "$HOME/.sbuildrc";
+# some checks
+#die "mailprog binary $conf::mailprog does not exist or isn't executable\n"
+# if !-x $conf::mailprog;
+die "sudo binary $conf::sudo does not exist or isn't executable\n"
+ if !-x $conf::sudo;
+die "apt-get binary $conf::apt_get does not exist or isn't executable\n"
+ if !-x $conf::apt_get;
+die "apt-cache binary $conf::apt_cache does not exist or isn't executable\n"
+ if !-x $conf::apt_cache;
+die "dpkg-source binary $conf::dpkg_source does not exist or isn't executable\n"
+ if !-x $conf::dpkg_source;
+#die "$conf::log_dir is not a directory\n" if ! -d $conf::log_dir;
+die "$conf::srcdep_lock_dir is not a directory\n" if ! -d $conf::srcdep_lock_dir;
+die "conf::mailto not set\n" if !$conf::mailto;
+package main;
+
+use strict;
+use GDBM_File;
+use POSIX;
+use FileHandle;
+use Cwd;
+
+# avoid intermixing of stdout and stderr
+$| = 1;
+
+# We should not ignore HUP for our children - it breaks test suites.
+# in case the terminal disappears, the build should continue
+#$SIG{'HUP'} = 'IGNORE';
+
+$main::distribution = "";
+$main::distribution = $conf::default_distribution if $conf::default_distribution;
+
+chomp( $main::arch = `$conf::dpkg --print-architecture` );
+$main::username = (getpwuid($<))[0] || $ENV{'LOGNAME'} || $ENV{'USER'};
+$main::debug = 0;
+$main::verbose = 0;
+$main::batchmode = 0;
+$main::auto_giveback = 0;
+$main::nomail = 0;
+$main::build_arch_all = 0;
+$main::build_source = 0;
+$main::jobs_file = cwd() . "/build-progress";
+$main::max_lock_trys = 120;
+$main::lock_interval = 5;
+$main::cwd = cwd();
+$main::ilock_file = "$conf::srcdep_lock_dir/install";
+$main::srcdep_lock_cnt = 0;
+$main::chroot_dir = "";
+$main::chroot_build_dir = "";
+$main::chroot_apt_options = "";
+@main::toolchain_pkgs = ();
+$main::component="";
+$main::nr_processors = $ENV{'NR_PROCESSORS'} if $ENV{'NR_PROCESSORS'};
+
+umask(022);
+
+$main::new_dpkg = 0;
+check_dpkg_version();
+
+while( @ARGV && $ARGV[0] =~ /^-/ ) {
+ $_ = shift @ARGV;
+ if (/^-v$/ || /^--verbose$/) {
+ $main::verbose++;
+ }
+ elsif (/^-D$/ || /^--debug$/) {
+ $main::debug++;
+ }
+ elsif (/^-b$/ || /^--batch$/) {
+ $main::batchmode = 1;
+ }
+ elsif (/^-n$/ || /^--nolog$/) {
+ $main::nolog = 1;
+ }
+ elsif (/^-A$/ || /^--arch-all$/) {
+ $main::build_arch_all++;
+ }
+ elsif (/^-s$/ || /^--source$/) {
+ $main::build_source++;
+ $conf::purge_build_directory = "never";
+ }
+ elsif (/^--architecture=(.)/) {
+ $conf::arch=$1.$';
+ }
+ elsif (/^--archive=(.)/) {
+ $main::archive=$1.$';
+ }
+ elsif (/^--comp=(.)/) {
+ $main::component=$1.$';
+ }
+ elsif (/^--purpose=(.)/) {
+ $main::purpose=$1.$';
+ }
+ elsif (/^--build-debug-symbols$/) {
+ $main::build_debug_symbols = 1;
+ }
+ elsif (/^-d/ || /^--dist/) {
+ if (/^-d(.)/ || /^--dist=(.)/) {
+ $main::distribution = $1.$';
+ }
+ elsif (!@ARGV) {
+ die "$_ option missing argument\n";
+ }
+ else {
+ $main::distribution = shift @ARGV;
+ }
+ $main::distribution = "stable" if $main::distribution eq "s";
+ $main::distribution = "testing" if $main::distribution eq "t";
+ $main::distribution = "unstable" if $main::distribution eq "u";
+ }
+ elsif (/^-p/ || /^--purge/) {
+ if (/^-p(.)/ || /^--purge=(.)/) {
+ $conf::purge_build_directory = $1.$';
+ }
+ elsif (!@ARGV) {
+ die "$_ option missing argument\n";
+ }
+ else {
+ $conf::purge_build_directory = shift @ARGV;
+ }
+ die "Bad purge mode\n"
+ if !isin($conf::purge_build_directory, qw(always successful never));
+ }
+ elsif (/^-m/ || /^--maintainer/) {
+ if (/^-m(.)/ || /^--maintainer=(.)/) {
+ $conf::maintainer_name = $1.$';
+ }
+ elsif (!@ARGV) {
+ die "$_ option missing argument\n";
+ }
+ else {
+ $conf::maintainer_name = shift @ARGV;
+ }
+ }
+ elsif (/^-f/ || /^--force-depends/) {
+ if (/^-f(.)/ || /^--force-depends=(.)/) {
+ push( @main::manual_srcdeps, "f".$1.$' );
+ }
+ elsif (!@ARGV) {
+ die "$_ option missing argument\n";
+ }
+ else {
+ push( @main::manual_srcdeps, "f".(shift @ARGV) );
+ }
+ }
+ elsif (/^-a/ || /^--add-depends/) {
+ if (/^-a(.)/ || /^--add-depends=(.)/) {
+ push( @main::manual_srcdeps, "a".$1.$' );
+ }
+ elsif (!@ARGV) {
+ die "$_ option missing argument\n";
+ }
+ else {
+ push( @main::manual_srcdeps, "a".(shift @ARGV) );
+ }
+ }
+ elsif (/^--auto-give-back(=(.*))?$/) {
+ $main::auto_giveback = 1;
+ if ($2) {
+ my @parts = split( '@', $2 );
+ $main::auto_giveback_socket = "$main::HOME/build" if @parts > 3;
+ $main::auto_giveback_socket .= $parts[$#parts-3] if @parts > 3;
+ $main::auto_giveback_wb_user = $parts[$#parts-2] if @parts > 2;
+ $main::auto_giveback_user = $parts[$#parts-1] if @parts > 1;
+ $main::auto_giveback_host = $parts[$#parts];
+ }
+ }
+ elsif (/^--database=(.+)$/) {
+ $main::database = $1;
+ }
+ elsif (/^--stats-dir=(.+)$/) {
+ $main::stats_dir = $1;
+ }
+ elsif (/^--make-binNMU=(.+)$/) {
+ $main::binNMU = $1;
+ $main::binNMUver ||= 1;
+ }
+ elsif (/^--binNMU=(\d+)$/) {
+ $main::binNMUver = $1;
+ }
+ elsif (/^--use-snapshot$/) {
+ $main::useSNAP = 1;
+ $main::ld_library_path = "/usr/lib/gcc-snapshot/lib";
+ $conf::path = "/usr/lib/gcc-snapshot/bin:$conf::path";
+ }
+ else {
+ die "Unknown option: $_\n";
+ }
+}
+
+die "Need distribution\n" if $main::distribution eq "";
+
+$conf::mailto = $conf::mailto{$main::distribution}
+ if $conf::mailto{$main::distribution};
+
+$main::arch = $conf::arch if $conf::arch;
+
+# variables for scripts:
+open_log();
+$SIG{'INT'} = \&shutdown;
+$SIG{'TERM'} = \&shutdown;
+$SIG{'ALRM'} = \&shutdown;
+$SIG{'PIPE'} = \&shutdown;
+read_deps( map { m,(?:.*/)?([^_/]+)[^/]*, } @ARGV );
+if (-d "chroot-autobuild") {
+ $main::chroot_dir = "chroot-autobuild";
+ $main::chroot_build_dir = "$main::chroot_dir/build/$main::username/";
+ $conf::srcdep_lock_dir = "$main::chroot_dir/var/debbuild/srcdep-lock";
+ $main::ilock_file = "$conf::srcdep_lock_dir/install";
+ my $absroot = "$main::cwd/$main::chroot_dir";
+ $main::chroot_apt_options =
+ "-o APT::Architecture=$main::arch ".
+ "-o Dir::State=$absroot/var/".
+ (-d "$absroot/var/lib/apt" ? "lib":"state")."/apt ".
+ "-o Dir::State::status=$absroot/var/lib/dpkg/status ".
+ "-o Dir::Cache=$absroot/var/cache/apt ".
+ "-o Dir::Etc=$absroot/etc/apt ".
+ "-o DPkg::Options::=--root=$absroot ".
+ "-o DPkg::Options::=--force-architecture ".
+ "-o DPkg::Run-Directory=$absroot";
+ $main::chroot_apt_op = '$CHROOT_OPTIONS';
+}
+write_jobs_file();
+
+my( $pkgv, $pkg );
+foreach $pkgv (@ARGV) {
+ my $urlbase;
+
+
+ ($urlbase, $pkgv) = ($1, $3) if $pkgv =~ m,^(\w+://(\S+/)?)([^/]+)$,;
+ $pkgv =~ s/\.dsc$//;
+ next if !open_pkg_log( $pkgv );
+ (my $pkg = $pkgv) =~ s/_.*$//;
+ $main::pkg_start_time = time;
+ $main::this_space = 0;
+ $main::pkg_status = "failed"; # assume for now
+ $main::current_job = $main::binNMU_name || $pkgv;
+ $main::additional_deps = [];
+ write_jobs_file( "currently building" );
+ if (should_skip( $pkgv )) {
+ $main::pkg_status = "skipped";
+ goto cleanup_close;
+ }
+ my $dscfile = $pkgv.".dsc";
+ $main::pkg_fail_stage = "fetch-src";
+ my @files_to_rm = fetch_source_files( \$dscfile );
+ if (@files_to_rm && $files_to_rm[0] eq "ERROR") {
+ shift @files_to_rm;
+ goto cleanup_symlinks;
+ }
+
+ $main::pkg_fail_stage = "install-deps";
+ if (!install_deps( $pkg )) {
+ print PLOG "Source-dependencies not satisfied; skipping $pkg\n";
+ goto cleanup_packages;
+ }
+
+ my $dscbase = basename( $dscfile );
+ $main::pkg_status = "successful" if build( $dscbase, $pkgv );
+ chdir( $main::cwd );
+ write_jobs_file( $main::pkg_status );
+ append_to_FINISHED( $main::current_job );
+
+ cleanup_packages:
+ undo_specials();
+ uninstall_deps();
+ remove_srcdep_lock_file();
+ cleanup_symlinks:
+ remove_files( @files_to_rm );
+ cleanup_close:
+ analyze_fail_stage( $pkgv );
+ if( $main::pkg_status eq 'failed' ) {
+ $main::pkg_status = 'failed ' . $main::pkg_fail_stage;
+ }
+ write_jobs_file( $main::pkg_status );
+ close_pkg_log( $pkgv );
+ $main::current_job = "";
+ if ( $main::batchmode and (-f "$main::HOME/EXIT-DAEMON-PLEASE") ) {
+ main::shutdown("NONE (flag file exit)");
+ }
+}
+write_jobs_file();
+
+close_log();
+#unlink( $main::jobs_file ) if $main::batchmode;
+unlink( "SBUILD-FINISHED" ) if $main::batchmode;
+exit 0;
+
+sub fetch_source_files {
+ my $dscfile_ref = shift;
+ my $dscfile = $$dscfile_ref;
+ my ($dir, $dscbase, $files, @other_files, $dscarchs, @made);
+ my ($build_depends, $build_depends_indep, $build_conflicts,
+ $build_conflicts_indep);
+ local( *F );
+
+ $dscfile =~ m,^(.*)/([^/]+)$,;
+ ($dir, $dscbase) = ($1, $2);
+ my $urlbase;
+ $urlbase = $1 if $dscfile =~ m,^(\w+://(\S+/)?)([^/]+)$,;
+ (my $pkgv = $dscfile) =~ s,^(.*/)?([^/]+)\.dsc$,$2,;
+ my ($pkg, $version) = split /_/, $pkgv;
+ @main::have_dsc_build_deps = ();
+
+ if (-d $dscfile) {
+ if (-f "$dscfile/debian/.sbuild-build-deps") {
+ open( F, "<$dscfile/debian/.sbuild-build-deps" );
+ my $pkg;
+ while( <F> ) {
+ /^Package:\s*(.*)\s*$/i and $pkg = $1;
+ /^Build-Depends:\s*(.*)\s*$/i and $build_depends = $1;
+ /^Build-Depends-Indep:\s*(.*)\s*$/i and $build_depends_indep = $1;
+ /^Build-Conflicts:\s*(.*)\s*$/i and $build_conflicts = $1;
+ /^Build-Conflicts-Indep:\s*(.*)\s*$/i and $build_conflicts_indep = $1;
+ }
+ close( F );
+ if ($build_depends || $build_depends_indep || $build_conflicts ||
+ $build_conflicts_indep) {
+ merge_pkg_build_deps( $pkg, $build_depends,
+ $build_depends_indep, $build_conflicts,
+ $build_conflicts_indep );
+ }
+ }
+ return;
+ }
+
+ if ($dir ne ".") {
+ {
+ if (-f "${pkgv}.dsc") {
+ print PLOG "${pkgv}.dsc exists in cwd\n";
+ }
+ else {
+ my %entries;
+ my $retried = 0;
+
+ retry:
+ print PLOG "Checking available source versions...\n";
+ if (!open( PIPE, "$conf::sudo /usr/sbin/chroot ".
+ "$main::chroot_dir $conf::apt_cache ".
+ "-q showsrc $pkg 2>&1 </dev/null |" )) {
+ print PLOG "Can't open pipe to apt-cache: $!\n";
+ return ("ERROR");
+ }
+ { local($/) = "";
+ while( <PIPE> ) {
+ my $ver = $1 if /^Version:\s+(\S+)\s*$/mi;
+ my $tfile = $1 if /^Files:\s*\n((\s+.*\s*\n)+)/mi;
+ @{$entries{$ver}} = map { (split( /\s+/, $_ ))[3] }
+ split( "\n", $tfile );
+ }
+ }
+ close( PIPE );
+ if ($?) {
+ print PLOG "$conf::apt_cache failed\n";
+ return ("ERROR");
+ }
+
+ if (!defined($entries{$version})) {
+ if (!$retried) {
+ # try to update apt's cache if nothing found
+ system "$conf::sudo /usr/sbin/chroot ".
+ "$main::chroot_dir $conf::apt_get ".
+ "update >/dev/null";
+ $retried = 1;
+ goto retry;
+ }
+ print PLOG "Can't find source for $pkgv\n";
+ print PLOG "(only different version(s) ",
+ join( ", ", sort keys %entries), " found)\n"
+ if %entries;
+ return( "ERROR" );
+ }
+
+ print PLOG "Fetching source files...\n";
+ @made = @{$entries{$version}};
+ if (!open( PIPE, "$conf::apt_get $main::chroot_apt_options ".
+ "--only-source -q -d source $pkg=$version 2>&1 </dev/null |" )) {
+ print PLOG "Can't open pipe to $conf::apt_get: $!\n";
+ return ("ERROR", @made);
+ }
+ while( <PIPE> ) {
+ print PLOG $_;
+ }
+ close( PIPE );
+ if ($?) {
+ print PLOG "$conf::apt_get for sources failed\n";
+ return( "ERROR", @made );
+ }
+ # touch the downloaded files, otherwise buildd-watcher
+ # will complain that they're old :)
+ $$dscfile_ref = $dscfile = (grep { /\.dsc$/ } @made)[0];
+ }
+ }
+ }
+
+ if (!open( F, "<$dscfile" )) {
+ print PLOG "Can't open $dscfile: $!\n";
+ return( "ERROR", @made );
+ }
+ my $dsctext;
+ { local($/); $dsctext = <F>; }
+ close( F );
+
+ $dsctext =~ /^Build-Depends:\s*((.|\n\s+)*)\s*$/mi
+ and $build_depends = $1;
+ $dsctext =~ /^Build-Depends-Indep:\s*((.|\n\s+)*)\s*$/mi
+ and $build_depends_indep = $1;
+ $dsctext =~ /^Build-Conflicts:\s*((.|\n\s+)*)\s*$/mi
+ and $build_conflicts = $1;
+ $dsctext =~ /^Build-Conflicts-Indep:\s*((.|\n\s+)*)\s*$/mi
+ and $build_conflicts_indep = $1;
+ $build_depends =~ s/\n\s+/ /g if defined $build_depends;
+ $build_depends_indep =~ s/\n\s+/ /g if defined $build_depends_indep;
+ $build_conflicts =~ s/\n\s+/ /g if defined $build_conflicts;
+ $build_conflicts_indep =~ s/\n\s+/ /g if defined $build_conflicts_indep;
+
+ $dsctext =~ /^Architecture:\s*(.*)$/mi and $dscarchs = $1;
+
+ $dsctext =~ /^Files:\s*\n((\s+.*\s*\n)+)/mi and $files = $1;
+ @other_files = map { (split( /\s+/, $_ ))[3] } split( "\n", $files );
+
+ if (!$dscarchs) {
+ print PLOG "$dscbase has no Architecture: field -- skipping arch check!\n";
+ }
+ else {
+ my $valid_arch;
+ for my $a (split(/\s+/, $dscarchs)) {
+ if (system('dpkg-architecture', '-a' . $main::arch, '-i' . $a) eq 0) {
+ $valid_arch = 1;
+ last;
+ }
+ }
+ if ($dscarchs ne "any" && !($valid_arch) &&
+ !($dscarchs eq "all" && $main::build_arch_all) ) {
+ my $msg = "$dscbase: $main::arch not in arch list or does not match any arch ";
+ $msg .= "wildcards: $dscarchs -- skipping\n";
+ print PLOG $msg;
+ $main::pkg_fail_stage = "arch-check";
+ return( "ERROR", @made );
+ }
+ }
+ print "Arch check ok ($main::arch included in $dscarchs)\n"
+ if $main::debug;
+
+ if ($build_depends || $build_depends_indep || $build_conflicts ||
+ $build_conflicts_indep) {
+ @main::have_dsc_build_deps = ($build_depends, $build_depends_indep,
+ $build_conflicts,$build_conflicts_indep);
+ merge_pkg_build_deps( $pkg, $build_depends, $build_depends_indep,
+ $build_conflicts, $build_conflicts_indep );
+ }
+
+ return @made;
+}
+
+sub build {
+ my $dsc = shift;
+ my $pkgv = shift;
+ my( $dir, $rv, $changes );
+ my $do_apply_patches = 1;
+ local( *PIPE, *F, *F2 );
+
+ fixup_pkgv( \$pkgv );
+ print PLOG "-"x78, "\n";
+ # count build time from now, ignoring the installation of source deps
+ $main::pkg_start_time = time;
+ $main::this_space = 0;
+ $pkgv =~ /^([a-zA-Z\d.+-]+)_([a-zA-Z\d:.+~-]+)/;
+ my ($pkg, $version) = ($1,$2);
+ (my $sversion = $version) =~ s/^\d+://;
+ my $tmpunpackdir = $dsc;
+ $tmpunpackdir =~ s/-.*$/.orig.tmp-nest/;
+ $tmpunpackdir =~ s/_/-/;
+ $tmpunpackdir = "$main::chroot_build_dir$tmpunpackdir";
+
+ if (-d "$main::chroot_build_dir$dsc" && -l "$main::chroot_build_dir$dsc") {
+ # if the package dir already exists but is a symlink, complain
+ print PLOG "Cannot unpack source: a symlink to a directory with the\n",
+ "same name already exists.\n";
+ return 0;
+ }
+ if (! -d "$main::chroot_build_dir$dsc") {
+ $main::pkg_fail_stage = "unpack";
+ # dpkg-source refuses to remove the remanants of an
+ # aborted dpkg-source extraction, so we will if necessary.
+ if (-d $tmpunpackdir) {
+ system ("rm -fr $tmpunpackdir");
+ }
+ $main::sub_pid = open( PIPE, "-|" );
+ if (!defined $main::sub_pid) {
+ print PLOG "Can't spawn dpkg-source: $!\n";
+ return 0;
+ }
+ if ($main::sub_pid == 0) {
+ setpgrp( 0, $$ );
+ if ($main::chroot_build_dir && !chdir( $main::chroot_build_dir )) {
+ print PLOG "Couldn't cd to $main::chroot_build_dir: $!\n";
+ system ("rm -fr $tmpunpackdir") if -d $tmpunpackdir;
+ exit 1;
+ }
+
+ my @files;
+ push( @files, $dsc );
+ if (!open( F, "<$main::cwd/$dsc" )) {
+ print PLOG "Can't open $main::cwd/$dsc: $!\n";
+ return 0;
+ }
+ my $dsctext;
+ { local($/); $dsctext = <F>; }
+ close( F );
+ my $files;
+ $dsctext =~ /^Files:\s*\n((\s+.*\s*\n)+)/mi and $files = $1;
+ push(@files, map { (split( /\s+/, $_ ))[3] } split( "\n", $files ));
+
+ my $file;
+ foreach $file (@files) {
+ system ("cp", "$main::cwd/$file", "$file");
+ }
+ exec "$conf::sudo", "/usr/sbin/chroot", "$main::cwd/$main::chroot_dir",
+ "$conf::su", $main::username, "-s", "/bin/sh", "-c",
+ "cd /build/$main::username && $conf::dpkg_source -sn -x $dsc 2>&1";
+ unlink @files;
+ }
+ $main::sub_task = "dpkg-source";
+
+ while( <PIPE> ) {
+ print PLOG $_;
+ $dir = $1 if /^dpkg-source: (?:info: )?extracting \S+ in (\S+)/;
+ $main::pkg_fail_stage = "unpack-check"
+ if /^dpkg-source: error: file.*instead of expected/;
+ }
+ close( PIPE );
+ undef $main::sub_pid;
+ if ($?) {
+ print PLOG "FAILED [dpkg-source died]\n";
+
+ system ("rm -fr $tmpunpackdir") if -d $tmpunpackdir;
+ return 0;
+ }
+ if (!$dir) {
+ print PLOG "Couldn't find directory of $dsc in dpkg-source output\n";
+ system ("rm -fr $tmpunpackdir") if -d $tmpunpackdir;
+ return 0;
+ }
+ $dir = "$main::chroot_build_dir$dir";
+
+ if (system( "chmod -R g-s,go+rX $dir" ) != 0) {
+ print PLOG "chmod -R g-s,go+rX $dir failed.\n";
+ return 0;
+ }
+
+ if (@main::have_dsc_build_deps && !defined $main::build_source) {
+ my ($d, $di, $c, $ci) = @main::have_dsc_build_deps;
+ open( F, ">$dir/debian/.sbuild-build-deps" );
+ print F "Package: $pkg\n";
+ print F "Build-Depends: $d\n" if $d;
+ print F "Build-Depends-Indep: $di\n" if $di;
+ print F "Build-Conflicts: $c\n" if $c;
+ print F "Build-Conflicts-Indep: $ci\n" if $ci;
+ close( F );
+ }
+ }
+ else {
+ $dir = "$main::chroot_build_dir$dsc";
+ $do_apply_patches = 0;
+
+ $main::pkg_fail_stage = "check-unpacked-version";
+ # check if the unpacked tree is really the version we need
+ $main::sub_pid = open( PIPE, "-|" );
+ if (!defined $main::sub_pid) {
+ print PLOG "Can't spawn dpkg-parsechangelog: $!\n";
+ return 0;
+ }
+ if ($main::sub_pid == 0) {
+ setpgrp( 0, $$ );
+ chdir( $dir );
+ exec "dpkg-parsechangelog 2>&1";
+ }
+ $main::sub_task = "dpkg-parsechangelog";
+
+ my $clog = "";
+ while( <PIPE> ) {
+ $clog .= $_;
+ }
+ close( PIPE );
+ undef $main::sub_pid;
+ if ($?) {
+ print PLOG "FAILED [dpkg-parsechangelog died]\n";
+ return 0;
+ }
+ if ($clog !~ /^Version:\s*(.+)\s*$/mi) {
+ print PLOG "dpkg-parsechangelog didn't print Version:\n";
+ return 0;
+ }
+ my $tree_version = $1;
+ my $cmp_version = ($main::binNMU && -f "$dir/debian/.sbuild-binNMU-done") ?
+ binNMU_version($version) : $version;
+ if ($tree_version ne $cmp_version) {
+ print PLOG "The unpacked source tree $dir is version ".
+ "$tree_version, not wanted $cmp_version!\n";
+ return 0;
+ }
+ }
+
+ if (!chdir( $dir )) {
+ print PLOG "Couldn't cd to $dir: $!\n";
+ system ("rm -fr $tmpunpackdir") if -d $tmpunpackdir;
+ return 0;
+ }
+
+ $main::pkg_fail_stage = "check-space";
+ my $current_usage = `/usr/bin/du -s .`;
+ $current_usage =~ /^(\d+)/;
+ $current_usage = $1;
+ if ($current_usage) {
+ my $free = df( "." );
+ if ($free < 2*$current_usage) {
+ print PLOG "Disk space is propably not enough for building.\n".
+ "(Source needs $current_usage KB, free are $free KB.)\n";
+ print PLOG "Purging $dir\n";
+ chdir( $main::cwd );
+ system "$conf::sudo rm -rf $dir";
+ return 0;
+ }
+ }
+
+ $main::pkg_fail_stage = "hack-binNMU";
+ if ($main::binNMU && ! -f "debian/.sbuild-binNMU-done") {
+ if (open( F, "<debian/changelog" )) {
+ my($firstline, $text);
+ $firstline = <F> while $firstline =~ /^$/;
+ { local($/); undef $/; $text = <F>; }
+ close( F );
+ $firstline =~ /^(\S+)\s+\((\S+)\)\s+([^;]+)\s*;\s*urgency=(\S+)\s*$/;
+ my ($name, $version, $dists, $urgent) = ($1, $2, $3, $4);
+ my $NMUversion = binNMU_version($version);
+ chomp( my $date = `822-date` );
+ if (!open( F, ">debian/changelog" )) {
+ print PLOG "Can't open debian/changelog for binNMU hack: $!\n";
+ chdir( $main::cwd );
+ return 0;
+ }
+ $dists = $main::distribution;
+ print F "$name ($NMUversion) $dists; urgency=low\n\n";
+ print F " * Binary-only non-maintainer upload for $main::arch; ",
+ "no source changes.\n";
+ print F " * ", join( " ", split( "\n", $main::binNMU )), "\n\n";
+ print F " -- $conf::maintainer_name $date\n\n";
+
+ print F $firstline, $text;
+ close( F );
+ system "touch debian/.sbuild-binNMU-done";
+ print PLOG "*** Created changelog entry for bin-NMU version $NMUversion\n";
+ }
+ else {
+ print PLOG "Can't open debian/changelog -- no binNMU hack!\n";
+ }
+ }
+
+ if ($do_apply_patches) {
+ if (!apply_patches( $pkg )) {
+ chdir( $main::cwd );
+ return 0;
+ }
+ }
+
+ if (-f "debian/files") {
+ local( *FILES );
+ my @lines;
+ open( FILES, "<debian/files" );
+ chomp( @lines = <FILES> );
+ close( FILES );
+ @lines = map { my $ind = 68-length($_);
+ $ind = 0 if $ind < 0;
+ "| $_".(" " x $ind)." |\n"; } @lines;
+
+ print PLOG <<"EOF";
+
++----------------------------------------------------------------------+
+| sbuild Warning: |
+| --------------- |
+| After unpacking, there exists a file debian/files with the contents: |
+| |
+EOF
+ print PLOG @lines;
+ print PLOG <<"EOF";
+| |
+| This should be reported as a bug. |
+| The file has been removed to avoid dpkg-genchanges errors. |
++----------------------------------------------------------------------+
+
+EOF
+ unlink "debian/files";
+ }
+
+ open CURRENT, ">$main::cwd/$main::chroot_dir/CurrentlyBuilding" or die "$main::cwd/$main::chroot_dir/CurrentlyBuilding open failed";
+ # Package: must be first
+ print CURRENT "Package: $pkg\nComponent: $main::component\n";
+ print CURRENT "Suite: $main::distribution\n" if $main::distribution;
+ print CURRENT "Purpose: $main::purpose\n" if $main::purpose;
+ print CURRENT "Build-Debug-Symbols: yes\n" if $main::build_debug_symbols;
+ close CURRENT;
+
+ $main::build_start_time = time;
+ $main::pkg_fail_stage = "build";
+ $main::sub_pid = open( PIPE, "-|" );
+ if (!defined $main::sub_pid) {
+ print PLOG "Can't spawn dpkg-buildpackage: $!\n";
+ chdir( $main::cwd );
+ return 0;
+ }
+ if ($main::sub_pid == 0) {
+ setpgrp( 0, $$ );
+ my $binopt = $main::build_source ? "" :
+ $main::build_arch_all ? "-b" : "-B";
+ my $env_cmnd = $conf::build_env_cmnd;
+ $env_cmnd = $conf::build_env_cmnd{$pkg} if $conf::build_env_cmnd{$pkg};
+ if ($main::chroot_dir) {
+ my $bdir = $dir;
+ $bdir =~ s/^\Q$main::chroot_dir\E//;
+ if (-f "$main::chroot_dir/etc/ld.so.conf" &&
+ ! -r "$main::chroot_dir/etc/ld.so.conf") {
+ system "$conf::sudo chmod a+r $main::chroot_dir/etc/ld.so.conf";
+ print PLOG "ld.so.conf was not readable! Fixed.\n";
+ }
+ exec "$conf::sudo", "/usr/sbin/chroot", "$main::cwd/$main::chroot_dir",
+ "$conf::su", $main::username, "-s", "/bin/sh", "-c",
+ "cd $bdir && PATH=$conf::path ".
+ (defined($main::nr_processors) ?
+ "DEB_BUILD_OPTIONS=\"parallel=".$main::nr_processors."\" " : "").
+ (defined($main::ld_library_path) ?
+ "LD_LIBRARY_PATH=".$main::ld_library_path." " : "").
+ "exec $env_cmnd dpkg-buildpackage $conf::pgp_options ".
+ "$binopt -m'$conf::maintainer_name' -r$conf::fakeroot 2>&1";
+ }
+ else {
+ if (-f "/etc/ld.so.conf" && ! -r "/etc/ld.so.conf") {
+ system "$conf::sudo chmod a+r /etc/ld.so.conf";
+ print PLOG "ld.so.conf was not readable! Fixed.\n";
+ }
+ exec "$env_cmnd dpkg-buildpackage $conf::pgp_options $binopt ".
+ "-m'$conf::maintainer_name' -r$conf::fakeroot 2>&1";
+ }
+ }
+ $main::sub_task = "dpkg-buildpackage";
+
+ # We must send the signal as root, because some subprocesses of
+ # dpkg-buildpackage could run as root. So we have to use a shell
+ # command to send the signal... but /bin/kill can't send to
+ # process groups :-( So start another Perl :-)
+ my $timeout = $conf::individual_stalled_pkg_timeout{$pkg} ||
+ $conf::stalled_pkg_timeout;
+ $timeout *= 60;
+ my $timed_out = 0;
+ my(@timeout_times, @timeout_sigs, $last_time);
+ $SIG{'ALRM'} = sub {
+ my $signal = ($timed_out > 0) ? 9 : 15;
+ system "$conf::sudo perl -e 'kill( -$signal, $main::sub_pid )'";
+ $timeout_times[$timed_out] = time - $last_time;
+ $timeout_sigs[$timed_out] = $signal;
+ $timed_out++;
+ $timeout = 5*60; # only wait 5 minutes until next signal
+ };
+
+ alarm( $timeout );
+ while( <PIPE> ) {
+ alarm( $timeout );
+ $last_time = time;
+ print PLOG $_;
+ }
+ close( PIPE );
+ undef $main::sub_pid;
+ alarm( 0 );
+ $rv = $?;
+
+ my $i;
+ for( $i = 0; $i < $timed_out; ++$i ) {
+ print PLOG "Build killed with signal ", $timeout_sigs[$i],
+ " after ", int($timeout_times[$i]/60),
+ " minutes of inactivity\n";
+ }
+ $main::pkg_end_time = time;
+ my $date = `date +%Y%m%d-%H%M`;
+ print PLOG "*"x78, "\n";
+ print PLOG "Build finished at $date";
+ chdir( $main::cwd );
+
+ my @space_files = ("$dir");
+ if (!$main::nolog and defined $conf::exit_hook and open TMP, '-|', "$conf::exit_hook <$main::pkg_logfile") {
+ local $/ = undef;
+ my $log = <TMP>;
+ close TMP;
+ $rv |= $?;
+ print PLOG $log;
+ }
+ if ($rv) {
+ print PLOG "FAILED [dpkg-buildpackage died]\n";
+ }
+ else {
+ my $trans_oldfmt="$main::chroot_build_dir${pkg}_${version}_translations.tar.gz";
+ my $trans_newfmt="$main::chroot_build_dir${pkg}_${version}_${main::arch}_translations.tar.gz";
+ my $translations="";
+ if (-r $trans_newfmt) {
+ $translations = $trans_newfmt;
+ } elsif (-r $trans_oldfmt) {
+ $translations = $trans_oldfmt;
+ }
+ if ($translations) {
+ print PLOG "Publishing $translations for rosetta.\n";
+ my $date = strftime '%Y%m%d',gmtime;
+ my $target = "$main::HOME/public_html/translations/$date/";
+ system "mkdir -p $target";
+ if (system("cp",$translations,$target) != 0) {
+ print PLOG "ERROR: Could not move $translations to $target\n";
+ } else {
+ open TRANS, ">>$target/translations.txt";
+ print TRANS "File: " . basename(${translations}) . "\n".
+ "Distribution: ${main::archive}\n".
+ "Release: ${main::distribution}\n".
+ "Component: ${main::component}\n".
+ "Source: ${pkg}\n".
+ "Version: ${version}\n\n";
+ close TRANS;
+ system("chmod -R go+rX $main::HOME/public_html/translations");
+ }
+ }
+
+ my $ddebtar = "";
+ my $ddebstring = "";
+ if (-r glob("$main::chroot_build_dir/*.ddeb")) {
+ my @ddeblist = glob("$main::chroot_build_dir/*.ddeb");
+ $ddebtar="${pkg}_${version}_${main::arch}_ddebs.tar";
+ while (@ddeblist) {
+ $ddebstring .= basename(@ddeblist[0]) . " ";
+ shift @ddeblist;
+ }
+ }
+ if ($ddebstring) {
+ print PLOG "Publishing debug debs.\n";
+ my $date = strftime '%Y%m%d',gmtime;
+ my $target = "$main::HOME/public_html/ddebs/$date/";
+ system "mkdir -p $target";
+ if (system("tar -C $main::chroot_build_dir -chf $target/$ddebtar $ddebstring") != 0) {
+ print PLOG "ERROR: Could not create $ddebtar in $target\n";
+ } else {
+ open TRANS, ">>$target/ddebs.txt";
+ print TRANS "File: " . basename(${ddebtar}) . "\n".
+ "Distribution: ${main::archive}\n".
+ "Release: ${main::distribution}\n".
+ "Component: ${main::component}\n".
+ "Source: ${pkg}\n".
+ "Version: ${version}\n\n";
+ close TRANS;
+ system("chmod -R go+rX $main::HOME/public_html/ddebs");
+ }
+ }
+
+ if (-r "$dir/debian/files") {
+ my @debs;
+ my @files;
+ open( F, "<$dir/debian/files" );
+ while( <F> ) {
+ my $f = (split( /\s+/, $_ ))[0];
+ push( @files, "$main::chroot_build_dir$f" );
+ if ($main::build_arch_all) {
+ next if ($f !~ /$main::arch\.[\w\d.-]*$/ && $f !~ /all\.[\w\d.-]*$/);
+ } else {
+ next if ($f !~ /$main::arch\.[\w\d.-]*$/);
+ }
+ push( @debs, "$main::chroot_build_dir$f" );
+ push( @space_files, $f );
+ }
+ close( F );
+ my @debs2 = @debs;
+ foreach (@debs) {
+ print PLOG "\n$_:\n";
+ if (!open( PIPE, "dpkg --info $_ 2>&1 |" )) {
+ print PLOG "Can't spawn dpkg: $! -- can't dump infos\n";
+ }
+ else {
+ print PLOG $_ while( <PIPE> );
+ close( PIPE );
+ }
+ }
+ foreach (@debs2) {
+ print PLOG "\n$_:\n";
+ if (!open( PIPE, "dpkg --contents $_ 2>&1 |" )) {
+ print PLOG "Can't spawn dpkg: $! -- can't dump infos\n";
+ }
+ else {
+ print PLOG $_ while( <PIPE> );
+ close( PIPE );
+ }
+ }
+ if ($main::chroot_build_dir) {
+ foreach (@files) {
+ system "mv", $_, "."
+ and print PLOG "ERROR: Could not move $_ to .\n";
+ }
+ }
+ }
+
+ if (-r $translations) {
+ system("rm",$translations);
+ }
+
+ $changes = "${pkg}_".
+ ($main::binNMU ? binNMU_version($sversion) : $sversion).
+ "_$main::arch.changes";
+ if (-r "$main::chroot_build_dir$changes") {
+ my(@do_dists, @saved_dists);
+ print PLOG "\n$changes:\n";
+ open( F, "<$main::chroot_build_dir$changes" );
+ if (open( F2, ">$changes.new" )) {
+ while( <F> ) {
+ if (/^Distribution:\s*(.*)\s*$/) {
+ print PLOG "Distribution: $main::distribution\n";
+ print F2 "Distribution: $main::distribution\n";
+ }
+ else {
+ print F2 $_;
+ while (length $_ > 989)
+ {
+ my $index = rindex($_,' ',989);
+ print PLOG substr ($_,0,$index) . "\n";
+ $_ = ' ' . substr ($_,$index+1);
+ }
+ print PLOG $_;
+ }
+ }
+ close( F2 );
+ rename( "$changes.new", "$changes" )
+ or print PLOG "$changes.new could not be renamed ".
+ "to $changes: $!\n";
+ unlink( "$main::chroot_build_dir$changes" )
+ if $main::chroot_build_dir;
+ }
+ else {
+ print PLOG "Cannot create $changes.new: $!\n";
+ print PLOG "Distribution field may be wrong!!!\n";
+ if ($main::chroot_build_dir) {
+ system "mv", "$main::chroot_build_dir$changes", "."
+ and print PLOG "ERROR: Could not move $_ to .\n";
+ }
+ }
+ close( F );
+ print PLOG "\n";
+ }
+ else {
+ print PLOG "Can't find $changes -- can't dump infos\n";
+ }
+
+ print PLOG "*"x78, "\n";
+ print PLOG "Built successfully\n";
+ }
+
+ check_watches();
+ check_space( @space_files );
+
+ if ($conf::purge_build_directory eq "always" ||
+ ($conf::purge_build_directory eq "successful" && $rv == 0)) {
+ print PLOG "Purging $dir\n";
+ system "$conf::sudo rm -rf $dir";
+ }
+
+ print PLOG "-"x78, "\n";
+ return $rv == 0 ? 1 : 0;
+}
+
+sub apply_patches {
+ my $pkg = shift;
+ my $name;
+
+ $main::pkg_fail_stage = "apply-patch";
+ foreach $name ((map { $_->{'Package'} } @{$main::deps{$pkg}}),
+ @main::global_patches) {
+ if ($name =~ /^\*/ && exists $main::specials{$name}->{'patch'}) {
+ if (exists $main::specials{$name}->{'patchcond'}) {
+ print "Testing condition for $name patch:\n"
+ if $main::debug;
+ if (run_script("+e",$main::specials{$name}->{'patchcond'})!=0){
+ print PLOG "Condition for $name patch not true -- ",
+ "not applying\n" if $name !~ /^\*\*/;
+ next;
+ }
+ print PLOG "Condition for $name patch ok\n";
+ }
+ print PLOG "Applying $name patch\n";
+ $main::sub_pid = open( PIPE, "|-" );
+ if (!defined $main::sub_pid) {
+ print PLOG "Can't spawn patch: $! -- can't patch\n";
+ return 0;
+ }
+ if ($main::sub_pid == 0) {
+ setpgrp( 0, $$ );
+ open( STDOUT, ">&PLOG" );
+ open( STDERR, ">&PLOG" );
+ exec "patch --batch --quiet -p1 -E -N --no-backup-if-mismatch";
+ }
+ $main::sub_task = "patch";
+
+ print PIPE $main::specials{$name}->{'patch'};
+ close( PIPE );
+ undef $main::sub_pid;
+ if ($name !~ /^\*\*/ && $?) {
+ print PLOG "FAILED [patch died]\n";
+ return 0;
+ }
+ }
+ }
+ return 1;
+}
+
+sub analyze_fail_stage {
+ my $pkgv = shift;
+
+ return if $main::pkg_status ne "failed";
+ return if !$main::auto_giveback;
+ if (isin( $main::pkg_fail_stage,
+ qw(find-dsc fetch-src unpack-check check-space install-deps-env))) {
+ $main::pkg_status = "given-back";
+ print PLOG "Giving back package $pkgv after failure in ".
+ "$main::pkg_fail_stage stage.\n";
+ chdir( $main::cwd );
+ my $cmd = "";
+ $cmd = "ssh -l$main::auto_giveback_user $main::auto_giveback_host "
+ if $main::auto_giveback_host;
+ $cmd .= "-S $main::auto_giveback_socket "
+ if ($main::auto_giveback_socket and -S "$main::auto_giveback_socket");
+ $cmd .= "wanna-build --give-back --no-down-propagation ".
+ "--dist=$main::distribution";
+ $cmd .= " --database=$main::database" if $main::database;
+ $cmd .= " --user=$main::auto_giveback_wb_user "
+ if $main::auto_giveback_wb_user;
+ $cmd .= " $pkgv";
+ system $cmd;
+ if ($?) {
+ print PLOG "wanna-build failed with status $?\n";
+ }
+ else {
+ add_givenback( $pkgv, time );
+ if ($main::stats_dir) {
+ local( *F );
+ lock_file( "$main::stats_dir" );
+ open( F, ">>$main::stats_dir/give-back" );
+ print F "1\n";
+ close( F );
+ unlock_file( "$main::stats_dir" );
+ }
+ }
+ }
+}
+
+sub remove_files {
+
+ foreach (@_) {
+ unlink $_;
+ print "Removed $_\n" if $main::debug;
+ }
+}
+
+
+sub install_deps {
+ my $pkg = shift;
+ my( @positive, @negative, @special, @instd, @rmvd );
+
+ if (!exists $main::deps{$pkg}) {
+ prepare_watches( [] );
+ return 1;
+ }
+
+ my $dep = $main::deps{$pkg};
+ if ($main::debug) {
+ print "Source dependencies of $pkg: ", format_deps(@$dep), "\n";
+ }
+
+ repeat:
+ lock_file( "$main::ilock_file", 1 );
+
+ print "Filtering dependencies\n" if $main::debug;
+ if (!filter_dependencies( $dep, \@positive, \@negative, \@special )) {
+ print PLOG "Package installation not possible\n";
+ unlock_file( "$main::ilock_file" );
+ return 0;
+ }
+
+ print PLOG "Checking for source dependency conflicts...\n";
+ if (!run_apt( "-s", \@instd, \@rmvd, @positive )) {
+ print PLOG "Test what should be installed failed.\n";
+ unlock_file( "$main::ilock_file" );
+ return 0;
+ }
+ # add negative deps as to be removed for checking srcdep conflicts
+ push( @rmvd, @negative );
+ my @confl;
+ if (@confl = check_srcdep_conflicts( \@instd, \@rmvd, \@special )) {
+ print PLOG "Waiting for job(s) @confl to finish\n";
+
+ unlock_file( "$main::ilock_file" );
+ wait_for_srcdep_conflicts( @confl );
+ goto repeat;
+ }
+
+ write_srcdep_lock_file( $dep, \@special );
+
+ foreach my $sp (@special) {
+ next if $sp !~ /^\*/ || !exists $main::specials{$sp}->{'prepre'};
+ print PLOG "Running prepre script for $sp\n";
+ if (run_script( "-e", $main::specials{$sp}->{'prepre'} ) != 0) {
+ print PLOG "prepre script of special dependency $sp failed\n";
+ unlock_file( "$main::ilock_file" );
+ return 0;
+ }
+ }
+
+ print "Installing positive dependencies: @positive\n" if $main::debug;
+ if (!run_apt( "-y", \@instd, \@rmvd, @positive )) {
+ print PLOG "Package installation failed\n";
+ # try to reinstall removed packages
+ print PLOG "Trying to reinstall removed packages:\n";
+ print "Reinstalling removed packages: @rmvd\n" if $main::debug;
+ my (@instd2, @rmvd2);
+ print PLOG "Failed to reinstall removed packages!\n"
+ if !run_apt( "-y", \@instd2, \@rmvd2, @rmvd );
+ print "Installed were: @instd2\n" if $main::debug;
+ print "Removed were: @rmvd2\n" if $main::debug;
+ # remove additional packages
+ print PLOG "Trying to uninstall newly installed packages:\n";
+ uninstall_debs( $main::chroot_dir ? "purge" : "remove", @instd );
+ unlock_file( "$main::ilock_file" );
+ return 0;
+ }
+ set_installed( @instd );
+ set_removed( @rmvd );
+
+ print "Removing negative dependencies: @negative\n" if $main::debug;
+ if (!uninstall_debs( $main::chroot_dir ? "purge" : "remove", @negative )) {
+ print PLOG "Removal of packages failed\n";
+ unlock_file( "$main::ilock_file" );
+ return 0;
+ }
+ set_removed( @negative );
+
+ my $fail = check_dependencies( $dep );
+ if ($fail) {
+ print PLOG "After installing, the following source dependencies are ".
+ "still unsatisfied:\n$fail\n";
+ unlock_file( "$main::ilock_file" );
+ return 0;
+ }
+
+ foreach my $sp (@special) {
+ next if $sp !~ /^\*/ ||
+ (!exists $main::specials{$sp}->{'pre'} &&
+ !exists $main::specials{$sp}->{'post'} &&
+ !exists $main::specials{$sp}->{'unpack'});
+ if (exists $main::specials{$sp}->{'unpack'}) {
+ my $s = $main::specials{$sp}->{'unpack'};
+ $s =~ s/^\s+//mg;
+ $s =~ s/\s+$//mg;
+ my @s = split( /\s+/, $s );
+ my @rem;
+ print PLOG "Unpacking special sources $sp: @s\n";
+ if (!(@rem = unpack_special_source( @s ))) {
+ print PLOG "unpacking of special dependency sources for $sp failed\n";
+ unlock_file( "$main::ilock_file" );
+ return 0;
+ }
+ $main::changes->{'unpacked'}->{$sp} = \@rem;
+ }
+ if (exists $main::specials{$sp}->{'pre'}) {
+ print PLOG "Running pre script for $sp\n";
+ $main::changes->{'specials'}->{$sp} = 1;
+ if (run_script( "-e", $main::specials{$sp}->{'pre'} ) != 0) {
+ print PLOG "pre script of special dependency $sp failed\n";
+ unlock_file( "$main::ilock_file" );
+ return 0;
+ }
+ }
+ }
+
+ local (*F);
+ if (open( F, "| $conf::sudo /usr/sbin/chroot $main::chroot_dir $conf::dpkg --set-selections")) {
+ foreach my $tpkg (@instd) {
+ print F $tpkg . " purge\n";
+ }
+ close( F );
+ if ($?) {
+ print PLOG "$conf::dpkg --set-selections failed";
+ }
+ }
+
+ unlock_file( "$main::ilock_file" );
+
+ prepare_watches( $dep, @instd );
+ return 1;
+}
+
+sub unpack_special_source {
+ my @s = @_;
+ my (@files, @dirs);
+ local (*PIPE);
+
+ foreach my $s (@s) {
+ my $dsc;
+
+ {
+ if (!open( PIPE, "$conf::apt_get $main::chroot_apt_options ".
+ "--only-source -q -d source $s 2>&1 </dev/null |" )) {
+ print PLOG "Can't open pipe to apt-get: $!\n";
+ goto failed;
+ }
+ while( <PIPE> ) {
+ $dsc = "$1_$2.dsc" if /(\S+) (?:[^:]+:)?(\S+) \(dsc\)/;
+ print PLOG $_;
+ }
+ close( PIPE );
+ if ($?) {
+ print PLOG "Apt-get of special unpack sources failed\n";
+ goto failed;
+ }
+ push( @files, $dsc );
+ if (!open( F, "<$dsc" )) {
+ print PLOG "Can't open $dsc: $!\n";
+ goto failed;
+ }
+ my $dsctext;
+ { local($/); $dsctext = <F>; }
+ close( F );
+ my $files;
+ $dsctext =~ /^Files:\s*\n((\s+.*\s*\n)+)/mi and $files = $1;
+ push(@files, map { (split( /\s+/, $_ ))[3] } split( "\n", $files ));
+ }
+
+ my $pid = open( PIPE, "-|" );
+ if (!defined $pid) {
+ print PLOG "Can't spawn dpkg-source: $! -- special unpack failed\n";
+ goto failed;
+ }
+ if ($pid == 0) {
+ setpgrp( 0, $$ );
+ if ($main::chroot_build_dir && !chdir( $main::chroot_build_dir )) {
+ print PLOG "Couldn't cd to $main::chroot_build_dir: $! -- special unpack failed\n";
+ exit 1;
+ }
+ exec "$conf::dpkg_source -sn -x $main::cwd/$dsc 2>&1";
+ }
+ my $dir;
+ while( <PIPE> ) {
+ print PLOG $_;
+ $dir = $1 if /^dpkg-source: (?:info: )?extracting \S+ in (\S+)/;
+ }
+ close( PIPE );
+ if ($?) {
+ print PLOG "dpkg-source failure -- special unpack failed\n";
+ goto failed;
+ }
+ push( @dirs, "$main::chroot_build_dir$dir" );
+ unlink( @files );
+ }
+
+ return @dirs;
+
+ failed:
+ unlink( @files );
+ system( "rm", "-rf", @dirs );
+ return ();
+}
+
+sub wait_for_srcdep_conflicts {
+ my @confl = @_;
+
+ for(;;) {
+ sleep( $conf::srcdep_lock_wait*60 );
+ my $allgone = 1;
+ for (@confl) {
+ /^(\d+)-(\d+)$/;
+ my $pid = $1;
+ if (-f "$conf::srcdep_lock_dir/$_") {
+ if (kill( 0, $pid ) == 0 && $! == ESRCH) {
+ print PLOG "Ignoring stale src-dep lock $_\n";
+ unlink( "$conf::srcdep_lock_dir/$_" ) or
+ print PLOG "Cannot remove $conf::srcdep_lock_dir/$_: $!\n";
+ }
+ else {
+ $allgone = 0;
+ last;
+ }
+ }
+ }
+ last if $allgone;
+ }
+}
+
+sub uninstall_deps {
+ my( @pkgs, @instd, @rmvd );
+
+ lock_file( "$main::ilock_file", 1 );
+
+ @pkgs = keys %{$main::changes->{'removed'}};
+ print "Reinstalling removed packages: @pkgs\n" if $main::debug;
+ print PLOG "Failed to reinstall removed packages!\n"
+ if !run_apt( "-y", \@instd, \@rmvd, @pkgs );
+ print "Installed were: @instd\n" if $main::debug;
+ print "Removed were: @rmvd\n" if $main::debug;
+ unset_removed( @instd );
+ unset_installed( @rmvd );
+
+ @pkgs = keys %{$main::changes->{'installed'}};
+ print "Removing installed packages: @pkgs\n" if $main::debug;
+ print PLOG "Failed to remove installed packages!\n"
+ if !uninstall_debs( "purge", @pkgs );
+ unset_installed( @pkgs );
+
+ unlock_file( "$main::ilock_file" );
+}
+
+sub uninstall_debs {
+ my $mode = shift;
+ local (*PIPE);
+
+ return 1 if !@_;
+ print "Uninstalling packages: @_\n" if $main::debug;
+ print PLOG " $conf::sudo dpkg --$mode @_\n";
+ repeat:
+ my $output;
+ if (!open( PIPE, "$conf::sudo /usr/sbin/chroot $main::chroot_dir $conf::dpkg --$mode @_ 2>&1 </dev/null |")) {
+ print PLOG "Can't open pipe to dpkg: $!\n";
+ return 0;
+ }
+ while ( <PIPE> ) {
+ $output .= $_;
+ print PLOG $_;
+ }
+ close( PIPE );
+
+ if ($output =~ /status database area is locked/mi) {
+ print PLOG "Another dpkg is running -- retrying later\n";
+ $output = "";
+ sleep( 2*60 );
+ goto repeat;
+ }
+ print PLOG "dpkg run to remove packages (@_) failed!\n" if $?;
+ return $? == 0;
+}
+
+sub undo_specials {
+ my $sp;
+
+ print "Running post scripts of special dependencies:\n" if $main::debug;
+ foreach $sp (keys %{$main::changes->{'specials'}}) {
+ print PLOG "Running post script for $sp\n";
+ if (run_script( "-e", $main::specials{$sp}->{'post'} ) != 0) {
+ print PLOG "post script of special dependency $sp failed\n";
+ }
+ delete $main::changes->{'specials'}->{$sp};
+ }
+ foreach $sp (keys %{$main::changes->{'unpacked'}}) {
+ my @dirs = @{$main::changes->{'unpacked'}->{$sp}};
+ print PLOG "Removing special unpacked sources for $sp: @dirs\n";
+ system "rm", "-rf", @dirs;
+ delete $main::changes->{'unpacked'}->{$sp};
+ }
+}
+
+
+sub run_apt {
+ my $mode = shift;
+ my $inst_ret = shift;
+ my $rem_ret = shift;
+ my @to_install = @_;
+ my( $msgs, $status, $pkgs, $rpkgs );
+ local (*PIPE);
+ local (%ENV) = %ENV; # make local environment
+ # hardwire frontend for debconf to non-interactive
+ $ENV{'DEBIAN_FRONTEND'} = "noninteractive";
+
+ @$inst_ret = ();
+ @$rem_ret = ();
+ return 1 if !@to_install;
+ repeat:
+ print PLOG " $conf::sudo $conf::apt_get --purge $main::chroot_apt_op -q $mode install @to_install\n"
+ if $mode ne "-s";
+ $msgs = "";
+ # redirection of stdin from /dev/null so that conffile question are
+ # treated as if RETURN was pressed.
+ # dpkg since 1.4.1.18 issues an error on the conffile question if it reads
+ # EOF -- hardwire the new --force-confold option to avoid the questions.
+ if (!open( PIPE, "$conf::sudo /usr/sbin/chroot ".
+ "$main::chroot_dir $conf::apt_get --purge ".
+ ($main::new_dpkg ? "-o DPkg::Options::=--force-confold " : "").
+ "-q $mode install @to_install 2>&1 </dev/null |" )) {
+ print PLOG "Can't open pipe to apt-get: $!\n";
+ return 0;
+ }
+ while( <PIPE> ) {
+ $msgs .= $_;
+ print PLOG $_ if $mode ne "-s" || $main::debug;
+ }
+ close( PIPE );
+ $status = $?;
+
+ if ($status != 0 && $msgs =~ /^E: Packages file \S+ (has changed|is out of sync)/mi) {
+ print PLOG "$conf::sudo $conf::apt_get $main::chroot_apt_op -q update\n";
+ if (!open( PIPE, "$conf::sudo /usr/sbin/chroot $main::chroot_dir $conf::apt_get -q update 2>&1 |" )) {
+ print PLOG "Can't open pipe to apt-get: $!\n";
+ return 0;
+ }
+ $msgs = "";
+ while( <PIPE> ) {
+ $msgs .= $_;
+ print PLOG $_;
+ }
+ close( PIPE );
+ print PLOG "apt-get update failed\n" if $?;
+ $msgs = "";
+ goto repeat;
+ }
+
+ if ($status != 0 && $msgs =~ /^Package (\S+) is a virtual package provided by:\n((^\s.*\n)*)/mi) {
+ my $to_replace = $1;
+ my @providers;
+ foreach (split( "\n", $2 )) {
+ s/^\s*//;
+ push( @providers, (split( /\s+/, $_ ))[0] );
+ }
+ print PLOG "$to_replace is a virtual package provided by: @providers\n";
+ my $selected;
+ if (@providers == 1) {
+ $selected = $providers[0];
+ print PLOG "Using $selected (only possibility)\n";
+ }
+ elsif (exists $conf::alternatives{$to_replace}) {
+ $selected = $conf::alternatives{$to_replace};
+ print PLOG "Using $selected (selected in sbuildrc)\n";
+ }
+ else {
+ $selected = $providers[0];
+ print PLOG "Using $selected (no default, using first one)\n";
+ }
+
+ @to_install = grep { $_ ne $to_replace } @to_install;
+ push( @to_install, $selected );
+
+ goto repeat;
+ }
+
+ if ($status != 0 && ($msgs =~ /^E: Could( not get lock|n.t lock)/mi ||
+ $msgs =~ /^dpkg: status database area is locked/mi)) {
+ print PLOG "Another apt-get or dpkg is running -- retrying later\n";
+ sleep( 2*60 );
+ goto repeat;
+ }
+
+ # check for errors that are probably caused by something broken in
+ # the build environment, and give back the packages.
+ if ($status != 0 && $mode ne "-s" &&
+ (($msgs =~ /^E: dpkg was interrupted, you must manually run 'dpkg --configure -a' to correct the problem./mi) ||
+ ($msgs =~ /^dpkg: parse error, in file `\/.+\/var\/lib\/dpkg\/(?:available|status)' near line/mi) ||
+ ($msgs =~ /^E: Unmet dependencies. Try 'apt-get -f install' with no packages \(or specify a solution\)\./mi))) {
+ print PLOG "Build environment unusable, giving back\n";
+ $main::pkg_fail_stage = "install-deps-env";
+ }
+
+ if ($status != 0 && $mode ne "-s" &&
+ (($msgs =~ /^E: Unable to fetch some archives, maybe run apt-get update or try with/mi))) {
+ print PLOG "Unable to fetch build-depends\n";
+ $main::pkg_fail_stage = "install-deps-env";
+ }
+
+ $pkgs = $rpkgs = "";
+ if ($msgs =~ /NEW packages will be installed:\n((^[ ].*\n)*)/mi) {
+ ($pkgs = $1) =~ s/^[ ]*((.|\n)*)\s*$/$1/m;
+ $pkgs =~ s/\*//g;
+ }
+ if ($msgs =~ /packages will be REMOVED:\n((^[ ].*\n)*)/mi) {
+ ($rpkgs = $1) =~ s/^[ ]*((.|\n)*)\s*$/$1/m;
+ $rpkgs =~ s/\*//g;
+ }
+ @$inst_ret = split( /\s+/, $pkgs );
+ @$rem_ret = split( /\s+/, $rpkgs );
+
+ print PLOG "apt-get failed.\n" if $status && $mode ne "-s";
+ return $mode eq "-s" || $status == 0;
+}
+
+sub filter_dependencies {
+ my $dependencies = shift;
+ my $pos_list = shift;
+ my $neg_list = shift;
+ my $special_list = shift;
+ my($dep, $d, $name, %names);
+
+ print PLOG "Checking for already installed source dependencies...\n";
+
+ @$pos_list = @$neg_list = @$special_list = ();
+ foreach $d (@$dependencies) {
+ my $name = $d->{'Package'};
+ $names{$name} = 1 if $name !~ /^\*/;
+ foreach (@{$d->{'Alternatives'}}) {
+ my $name = $_->{'Package'};
+ $names{$name} = 1 if $name !~ /^\*/;
+ }
+ }
+ my $status = get_dpkg_status( keys %names );
+
+ foreach $dep (@$dependencies) {
+ $name = $dep->{'Package'};
+ next if !$name;
+ if ($name =~ /^\*/) {
+ my $doit = 1;
+ if (exists $main::specials{$name}->{'condition'}) {
+ print "Testing condition for special dependency $name:\n"
+ if $main::debug;
+ if (run_script("+e",$main::specials{$name}->{'condition'})!=0){
+ print "Condition false -> not running scripts\n"
+ if $main::debug;
+ $doit = 0;
+ }
+ }
+ push( @$special_list, $name ) if $doit;
+ next;
+ }
+ my $stat = $status->{$name};
+ if ($dep->{'Neg'}) {
+ if ($stat->{'Installed'}) {
+ my ($rel, $vers) = ($dep->{'Rel'}, $dep->{'Version'});
+ my $ivers = $stat->{'Version'};
+ if (!$rel || version_cmp( $ivers, $rel, $vers )){
+ print "$name: neg dep, installed, not versioned or ",
+ "version relation satisfied --> remove\n" if $main::debug;
+ print PLOG "$name: installed (negative dependency)";
+ print PLOG " (bad version $ivers $rel $vers)"
+ if $rel;
+ print PLOG "\n";
+ push( @$neg_list, $name );
+ }
+ else {
+ print PLOG "$name: installed (negative dependency)",
+ "(but version ok $ivers $rel $vers)\n";
+ }
+ }
+ else {
+ print "$name: neg dep, not installed\n" if $main::debug;
+ print PLOG "$name: already deinstalled\n";
+ }
+ next;
+ }
+
+ my $is_satisfied = 0;
+ my $installable = "";
+ my $upgradeable = "";
+ my $downgradeable = "";
+ foreach $d ($dep, @{$dep->{'Alternatives'}}) {
+ my ($name, $rel, $vers) =
+ ($d->{'Package'}, $d->{'Rel'}, $d->{'Version'});
+ my $stat = $status->{$name};
+ if (!$stat->{'Installed'}) {
+ print "$name: pos dep, not installed\n" if $main::debug;
+ print PLOG "$name: missing\n";
+ my $exists = package_exists($name);
+ print PLOG "$name: does not exist\n" if not $exists;
+ $installable = $name if !$installable and $exists;
+ next;
+ }
+ my $ivers = $stat->{'Version'};
+ if (!$rel || version_cmp( $ivers, $rel, $vers )) {
+ print "$name: pos dep, installed, no versioned dep or ",
+ "version ok\n" if $main::debug;
+ print PLOG "$name: already installed ($ivers";
+ print PLOG " $rel $vers is satisfied"
+ if $rel;
+ print PLOG ")\n";
+ $is_satisfied = 1;
+ last;
+ }
+ print "$name: vers dep, installed $ivers ! $rel $vers\n"
+ if $main::debug;
+ print PLOG "$name: non-matching version installed ",
+ "($ivers ! $rel $vers)\n";
+ if ($rel =~ /^</ ||
+ ($rel eq '=' && version_cmp($ivers, '>>', $vers))) {
+ print "$name: would be a downgrade!\n" if $main::debug;
+ print PLOG "$name: would have to downgrade!\n";
+ $downgradeable = $name if !$downgradeable;
+ }
+ else {
+ $upgradeable = $name if !$upgradeable;
+ }
+ }
+ if (!$is_satisfied) {
+ if ($upgradeable) {
+ print "using $upgradeable for upgrade\n" if $main::debug;
+ push( @$pos_list, $upgradeable );
+ }
+ elsif ($installable) {
+ print "using $installable for install\n" if $main::debug;
+ push( @$pos_list, $installable );
+ }
+ elsif ($downgradeable) {
+ print PLOG "To satisfy this dependency the package(s) would ",
+ "have\n",
+ "to be downgraded; this is not implemented.\n";
+ return 0;
+ }
+ else {
+ # None of the build-deps exist. Return the
+ # first one so that we get a useful dep-wait.
+ $installable = $dep->{'Package'};
+ print "using $installable for install (does not exist)\n" if $main::debug;
+ push( @$pos_list, $installable );
+ }
+ }
+ }
+
+ return 1;
+}
+
+sub check_dependencies {
+ my $dependencies = shift;
+ my $fail = "";
+ my($dep, $d, $name, %names);
+
+ print PLOG "Checking correctness of source dependencies...\n";
+
+ foreach $d (@$dependencies) {
+ my $name = $d->{'Package'};
+ $names{$name} = 1 if $name !~ /^\*/;
+ foreach (@{$d->{'Alternatives'}}) {
+ my $name = $_->{'Package'};
+ $names{$name} = 1 if $name !~ /^\*/;
+ }
+ }
+ foreach $name (@main::toolchain_pkgs) {
+ $names{$name} = 1;
+ }
+ my $status = get_dpkg_status( keys %names );
+
+ foreach $dep (@$dependencies) {
+ $name = $dep->{'Package'};
+ next if $name =~ /^\*/;
+ my $stat = $status->{$name};
+ if ($dep->{'Neg'}) {
+ if ($stat->{'Installed'}) {
+ if (!$dep->{'Rel'}) {
+ $fail .= "$name(still installed) ";
+ }
+ elsif (version_cmp($stat->{'Version'}, $dep->{'Rel'},
+ $dep->{'Version'})) {
+ $fail .= "$name(inst $stat->{'Version'} $dep->{'Rel'} ".
+ "conflicted $dep->{'Version'})\n";
+ }
+ }
+ }
+ else {
+ my $is_satisfied = 0;
+ my $f = "";
+ foreach $d ($dep, @{$dep->{'Alternatives'}}) {
+ my $name = $d->{'Package'};
+ my $stat = $status->{$name};
+ if (!$stat->{'Installed'}) {
+ $f =~ s/ $/\|/ if $f;
+ $f .= "$name(missing) ";
+ }
+ elsif ($d->{'Rel'} &&
+ !version_cmp( $stat->{'Version'}, $d->{'Rel'},
+ $d->{'Version'} )) {
+ $f =~ s/ $/\|/ if $f;
+ $f .= "$name(inst $stat->{'Version'} ! $d->{'Rel'} ".
+ "wanted $d->{'Version'}) ";
+ }
+ else {
+ $is_satisfied = 1;
+ }
+ }
+ if (!$is_satisfied) {
+ $fail .= $f;
+ }
+ }
+ }
+ $fail =~ s/\s+$//;
+ if (!$fail && @main::toolchain_pkgs) {
+ print PLOG "Toolchain package versions:";
+ foreach $name (@main::toolchain_pkgs) {
+ print PLOG ' ' . $name . '_' . $status->{$name}->{'Version'};
+ }
+ print PLOG "\n";
+ }
+
+ return $fail;
+}
+
+sub get_dpkg_status {
+ my @interest = @_;
+ my %result;
+ local( *STATUS );
+
+ return () if !@_;
+ print "Requesting dpkg status for packages: @interest\n"
+ if $main::debug;
+ if (!open( STATUS, "<$main::chroot_dir/var/lib/dpkg/status" )) {
+ print PLOG "Can't open $main::chroot_dir/var/lib/dpkg/status: $!\n";
+ return ();
+ }
+ local( $/ ) = "";
+ while( <STATUS> ) {
+ my( $pkg, $status, $version, $provides );
+ /^Package:\s*(.*)\s*$/mi and $pkg = $1;
+ /^Status:\s*(.*)\s*$/mi and $status = $1;
+ /^Version:\s*(.*)\s*$/mi and $version = $1;
+ /^Provides:\s*(.*)\s*$/mi and $provides = $1;
+ if (!$pkg) {
+ print PLOG "sbuild: parse error in $main::chroot_dir/var/lib/dpkg/status: ",
+ "no Package: field\n";
+ next;
+ }
+ print "$pkg ($version) status: $status\n" if $main::debug >= 2;
+ if (!$status) {
+ print PLOG "sbuild: parse error in $main::chroot_dir/var/lib/dpkg/status: ",
+ "no Status: field for package $pkg\n";
+ next;
+ }
+ if ($status !~ /\sinstalled$/) {
+ $result{$pkg}->{'Installed'} = 0
+ if !(exists($result{$pkg}) &&
+ $result{$pkg}->{'Version'} eq '=*=PROVIDED=*=');
+ next;
+ }
+ if (!$version) {
+ print PLOG "sbuild: parse error in $main::chroot_dir/var/lib/dpkg/status: ",
+ "no Version: field for package $pkg\n";
+ next;
+ }
+ $result{$pkg} = { Installed => 1, Version => $version }
+ if isin( $pkg, @interest );
+ if ($provides) {
+ foreach (split( /\s*,\s*/, $provides )) {
+ $result{$_} = { Installed => 1, Version => '=*=PROVIDED=*=' }
+ if (isin( $_, @interest ) &&
+ $result{$_}->{'Installed'} != 1);
+ }
+ }
+ }
+ close( STATUS );
+ return \%result;
+}
+
+sub version_cmp {
+ my $v1 = shift;
+ my $rel = shift;
+ my $v2 = shift;
+ if ($v1 eq "=*=PROVIDED=*=") {
+ return 0;
+ }
+
+ system "$conf::dpkg", "--compare-versions", $v1, $rel, $v2;
+ return $? == 0;
+}
+
+sub run_script {
+ my $e_mode = shift;
+ my $x_mode = "";
+ my $script = shift;
+
+ if ($main::debug >= 2) {
+ $x_mode = "set -x -v\n";
+ }
+ elsif ($main::debug) {
+ print "Running script:\n ",
+ join( "\n ", split( "\n", "set $e_mode\n$script" )), "\n";
+ }
+ my $pid = fork();
+ if (!defined $pid) {
+ print PLOG "Can't fork: $! -- can't execute script\n";
+ return 1;
+ }
+ if ($pid == 0) {
+ setpgrp( 0, $$ );
+ open( STDOUT, ">&PLOG" );
+ open( STDERR, ">&PLOG" );
+ if ($main::chroot_dir) {
+ exec "$conf::sudo", "/usr/sbin/chroot", "$main::cwd/$main::chroot_dir",
+ "$conf::su", $main::username, "-s", "/bin/sh", "-c",
+ "cd /build/$main::username\nset $e_mode\n$x_mode$script";
+ }
+ else {
+ exec "/bin/sh", "-c", "set $e_mode\n$x_mode$script";
+ }
+ die "Can't exec /bin/sh: $!\n";
+ }
+ wait;
+ print "Script return value: $?\n" if $main::debug;
+ return $?
+}
+
+
+sub read_deps {
+ my @for_pkgs = @_;
+ my $fname;
+ local( *F );
+
+ open( F, $fname = "<$conf::source_dependencies-$main::distribution" )
+ or open( F, $fname = "<$conf::source_dependencies" )
+ or die "Cannot open $conf::source_dependencies: $!\n";
+ $fname = substr( $fname, 1 );
+ print "Reading source dependencies from $fname\n"
+ if $main::debug;
+ while( <F> ) {
+ chomp;
+ next if /^\s*$/ || /^\s*#/;
+ while( /\\$/ ) {
+ chop;
+ $_ .= <F>;
+ chomp;
+ }
+ if (/^(\*\*?[\w\d.+-]+):\s*$/) {
+ # is a special definition
+ my $sp = $1;
+ get_special( $fname, $sp, \*F );
+ next;
+ }
+ if (/^abbrev\s+([\w\d.+-]+)\s*=\s*(.*)\s*$/) {
+ my ($abbrev, $def) = ($1, $2);
+ parse_one_srcdep( $abbrev, $def, \%main::abbrevs );
+ next;
+ }
+ if (!/^([a-zA-Z\d.+-]+):\s*(.*)\s*$/) {
+ warn "Syntax error in line $. in $fname\n";
+ next;
+ }
+ my( $pkg, $deps ) = ($1, $2);
+ if (exists $main::deps{$pkg}) {
+ warn "Ignoring double entry for package $pkg at line $. ".
+ "in $fname\n";
+ next;
+ }
+ next if !isin( $pkg, @for_pkgs );
+ parse_one_srcdep( $pkg, $deps, \%main::deps );
+ }
+ close( F );
+
+ foreach (@main::manual_srcdeps) {
+ if (!/^([fa])([a-zA-Z\d.+-]+):\s*(.*)\s*$/) {
+ warn "Syntax error in manual source dependency: ",
+ substr( $_, 1 ), "\n";
+ next;
+ }
+ my ($mode, $pkg, $deps) = ($1, $2, $3);
+ next if !isin( $pkg, @for_pkgs );
+ @{$main::deps{$pkg}} = () if $mode eq 'f';
+ parse_one_srcdep( $pkg, $deps, \%main::deps );
+ }
+
+ # substitute abbrevs and warn about undefined special deps
+ my( $pkg, $i, %warned );
+ foreach $pkg (keys %main::deps) {
+ repeat:
+ my $dl = $main::deps{$pkg};
+ for( $i = 0; $i < @$dl; ++$i ) {
+ my $dep = $dl->[$i];
+ my $name = $dep->{'Package'};
+ if ($name =~ /^\*/) {
+ if (!$warned{$name} && !exists $main::specials{$name}) {
+ warn "Warning: $pkg: No definition for special ",
+ "dependency $name!\n";
+ $warned{$name}++;
+ }
+ }
+ elsif (defined $main::abbrevs{$name}) {
+ my @l = @{$main::abbrevs{$name}};
+ if (defined $dep->{'Alternatives'}) {
+ warn "Warning: $pkg: abbrev $name not allowed ",
+ "in alternative\n";
+ @l = ();
+ }
+ if ($dep->{'Neg'}) {
+ warn "Warning: $pkg: Negation of abbrev $name ",
+ "not allowed\n";
+ @l = ();
+ }
+ if ($dep->{'Rel'}) {
+ warn "Warning: $pkg: No relation with abbrev $name ",
+ "allowed\n";
+ @l = ();
+ }
+ if (my $ov = $dep->{'Override'}) {
+ @l = map { my $x = copy($_);
+ $x->{'Override'} = $ov; $x } @l;
+ }
+ splice @$dl, $i, 1, @l;
+ goto repeat;
+ }
+ elsif (defined $dep->{'Alternatives'}) {
+ my $alt;
+ foreach $alt (@{$dep->{'Alternatives'}}) {
+ if (defined $main::abbrevs{$alt->{'Package'}}) {
+ warn "Warning: $pkg: abbrev $alt->{'Package'} not ",
+ "allowed in alternative\n";
+ splice @$dl, $i, 1;
+ }
+ }
+ }
+ }
+ }
+}
+
+sub copy {
+ my $r = shift;
+ my $new;
+
+ if (ref($r) eq "HASH") {
+ $new = { };
+ foreach (keys %$r) {
+ $new->{$_} = copy($r->{$_});
+ }
+ }
+ elsif (ref($r) eq "ARRAY") {
+ my $i;
+ $new = [ ];
+ for( $i = 0; $i < @$r; ++$i ) {
+ $new->[$i] = copy($r->[$i]);
+ }
+ }
+ elsif (!ref($r)) {
+ $new = $r;
+ }
+ else {
+ die "unknown ref type in copy\n";
+ }
+
+ return $new;
+}
+
+sub merge_pkg_build_deps {
+ my $pkg = shift;
+ my $depends = shift;
+ my $dependsi = shift;
+ my $conflicts = shift;
+ my $conflictsi = shift;
+ my (@l, $dep);
+
+ print PLOG "** Using build dependencies supplied by package:\n";
+ print PLOG "Build-Depends: $depends\n" if $depends;
+ print PLOG "Build-Depends-Indep: $dependsi\n" if $dependsi;
+ print PLOG "Build-Conflicts: $conflicts\n" if $conflicts;
+ print PLOG "Build-Conflicts-Indep: $conflictsi\n" if $conflictsi;
+
+ my $old_deps = copy($main::deps{$pkg});
+ # keep deps from the central file marked as overrides (& prefix)
+ if ( $main::useSNAP ) {
+ $dep->{'Package'} = "gcc-snapshot";
+ $dep->{'Override'} = 1;
+ push( @{$main::deps{$pkg}}, $dep );
+ }
+ foreach $dep (@{$main::deps{$pkg}}) {
+ if ($dep->{'Override'}) {
+ print PLOG "Added override: ",
+ (map { ($_->{'Neg'} ? "!" : "") .
+ $_->{'Package'} .
+ ($_->{'Rel'} ? " ($_->{'Rel'} $_->{'Version'})":"") }
+ scalar($dep), @{$dep->{'Alternatives'}}), "\n";
+ push( @l, $dep );
+ }
+ }
+
+ $conflicts = join( ", ", map { "!$_" } split( /\s*,\s*/, $conflicts ));
+ $conflictsi = join( ", ", map { "!$_" } split( /\s*,\s*/, $conflictsi ));
+
+ my $deps = $depends . ", " . $conflicts;
+ $deps .= ", " . $dependsi . ", " . $conflictsi if $main::build_arch_all;
+ # For the moment, we treat multiarch-annotated build-dependencies as
+ # the same as any others because we're not implementing a
+ # cross-buildd.
+ $deps =~ s/:any//g;
+ $deps =~ s/:native//g;
+
+ @{$main::deps{$pkg}} = @l;
+ print "Merging pkg deps: $deps\n" if $main::debug;
+ parse_one_srcdep( $pkg, $deps, \%main::deps );
+
+ my $missing = (cmp_dep_lists( $old_deps, $main::deps{$pkg} ))[1];
+
+ # read list of build-essential packages (if not yet done) and expand their
+ # dependencies (those are implicitly essential)
+ if (!defined($main::deps{'ESSENTIAL'})) {
+ my $ess = read_build_essential();
+ parse_one_srcdep( 'ESSENTIAL', $ess, \%main::deps );
+ }
+ my ($exp_essential, $exp_pkgdeps, $filt_essential, $filt_pkgdeps);
+ $exp_essential = expand_dependencies( $main::deps{'ESSENTIAL'} );
+ print "Dependency-expanded build essential packages:\n",
+ format_deps(@$exp_essential), "\n" if $main::debug;
+
+ # populate toolchain_pkgs from toolchain_regexes and essential packages.
+ @main::toolchain_pkgs = ();
+ foreach my $tpkg (@$exp_essential) {
+ foreach my $regex (@conf::toolchain_regex) {
+ push @main::toolchain_pkgs,$tpkg->{'Package'}
+ if $tpkg->{'Package'} =~ m,^$regex,;
+ }
+ }
+ return if !@$missing;
+
+ # remove missing central deps that are essential
+ ($filt_essential, $missing) = cmp_dep_lists( $missing, $exp_essential );
+ print PLOG "** Filtered missing central deps that are build-essential:\n",
+ format_deps(@$filt_essential), "\n"
+ if @$filt_essential;
+
+ # if some build deps are virtual packages, replace them by an alternative
+ # over all providing packages
+ $exp_pkgdeps = expand_virtuals( $main::deps{$pkg} );
+ print "Provided-expanded build deps:\n",
+ format_deps(@$exp_pkgdeps), "\n" if $main::debug;
+
+ # now expand dependencies of package build deps
+ $exp_pkgdeps = expand_dependencies( $exp_pkgdeps );
+ print "Dependency-expanded build deps:\n",
+ format_deps(@$exp_pkgdeps), "\n" if $main::debug;
+ $main::additional_deps = $exp_pkgdeps;
+
+ # remove missing central deps that are dependencies of build deps
+ ($filt_pkgdeps, $missing) = cmp_dep_lists( $missing, $exp_pkgdeps );
+ print PLOG "** Filtered missing central deps that are dependencies of ",
+ "or provide build-deps:\n",
+ format_deps(@$filt_pkgdeps), "\n"
+ if @$filt_pkgdeps;
+
+ # remove comment package names
+ push( @$main::additional_deps,
+ grep { $_->{'Neg'} && $_->{'Package'} =~ /^needs-no-/ } @$missing );
+ $missing = [ grep { !($_->{'Neg'} &&
+ ($_->{'Package'} =~ /^this-package-does-not-exist/ ||
+ $_->{'Package'} =~ /^needs-no-/)) } @$missing ];
+
+ print PLOG "**** Warning:\n",
+ "**** The following central src deps are ",
+ "(probably) missing:\n ", format_deps(@$missing), "\n"
+ if @$missing;
+}
+
+sub cmp_dep_lists {
+ my $list1 = shift;
+ my $list2 = shift;
+ my ($dep, @common, @missing);
+
+ foreach $dep (@$list1) {
+ my $found = 0;
+
+ if ($dep->{'Neg'}) {
+ foreach (@$list2) {
+ if ($dep->{'Package'} eq $_->{'Package'} && $_->{'Neg'}) {
+ $found = 1;
+ last;
+ }
+ }
+ }
+ else {
+ my $al = get_altlist($dep);
+ foreach (@$list2) {
+ if (is_superset( get_altlist($_), $al )) {
+ $found = 1;
+ last;
+ }
+ }
+ }
+
+ if ($found) {
+ push( @common, $dep );
+ }
+ else {
+ push( @missing, $dep );
+ }
+ }
+ return (\@common, \@missing);
+}
+
+sub get_altlist {
+ my $dep = shift;
+ my %l;
+
+ foreach (scalar($dep), @{$dep->{'Alternatives'}}) {
+ $l{$_->{'Package'}} = 1 if !$_->{'Neg'};
+ }
+ return \%l;
+}
+
+sub is_superset {
+ my $l1 = shift;
+ my $l2 = shift;
+
+ foreach (keys %$l2) {
+ return 0 if !exists $l1->{$_};
+ }
+ return 1;
+}
+
+sub read_build_essential {
+ my @essential;
+ local (*F);
+
+ if (open( F, "$main::chroot_dir/usr/share/doc/build-essential/essential-packages-list" )) {
+ while( <F> ) {
+ last if $_ eq "\n";
+ }
+ while( <F> ) {
+ chomp;
+ push( @essential, $_ ) if $_ !~ /^\s*$/;
+ }
+ close( F );
+ }
+ else {
+ warn "Cannot open $main::chroot_dir/usr/share/doc/build-essential/essential-packages-list: $!\n";
+ }
+
+ if (open( F, "$main::chroot_dir/usr/share/doc/build-essential/list" )) {
+ while( <F> ) {
+ last if $_ eq "BEGIN LIST OF PACKAGES\n";
+ }
+ while( <F> ) {
+ chomp;
+ last if $_ eq "END LIST OF PACKAGES";
+ next if /^\s/ || /^$/;
+ push( @essential, $_ );
+ }
+ close( F );
+ }
+ else {
+ warn "Cannot open $main::chroot_dir/usr/share/doc/build-essential/list: $!\n";
+ }
+
+ return join( ", ", @essential );
+}
+
+sub expand_dependencies {
+ my $dlist = shift;
+ my (@to_check, @result, %seen, $check, $dep);
+
+ foreach $dep (@$dlist) {
+ next if $dep->{'Neg'} || $dep->{'Package'} =~ /^\*/;
+ foreach (scalar($dep), @{$dep->{'Alternatives'}}) {
+ my $name = $_->{'Package'};
+ push( @to_check, $name );
+ $seen{$name} = 1;
+ }
+ push( @result, copy($dep) );
+ }
+
+ while( @to_check ) {
+ my $deps = get_dependencies( @to_check );
+ my @check = @to_check;
+ @to_check = ();
+ foreach $check (@check) {
+ foreach (split( /\s*,\s*/, $deps->{$check} )) {
+ foreach (split( /\s*\|\s*/, $_ )) {
+ my $pkg = (/^([^\s([]+)/)[0];
+ if (!$seen{$pkg}) {
+ push( @to_check, $pkg );
+ push( @result, { Package => $pkg, Neg => 0 } );
+ $seen{$pkg} = 1;
+ }
+ }
+ }
+ }
+ }
+
+ return \@result;
+}
+
+sub expand_virtuals {
+ my $dlist = shift;
+ my ($dep, %names, @new_dlist);
+
+ foreach $dep (@$dlist) {
+ foreach (scalar($dep), @{$dep->{'Alternatives'}}) {
+ $names{$_->{'Package'}} = 1;
+ }
+ }
+ my $provided_by = get_virtuals( keys %names );
+
+ foreach $dep (@$dlist) {
+ my %seen;
+ foreach (scalar($dep), @{$dep->{'Alternatives'}}) {
+ my $name = $_->{'Package'};
+ $seen{$name} = 1;
+ if (exists $provided_by->{$name}) {
+ foreach( keys %{$provided_by->{$name}} ) {
+ $seen{$_} = 1;
+ }
+ }
+ }
+ my @l = map { { Package => $_, Neg => 0 } } keys %seen;
+ my $l = shift @l;
+ foreach (@l) {
+ push( @{$l->{'Alternatives'}}, $_ );
+ }
+ push( @new_dlist, $l );
+ }
+
+ return \@new_dlist;
+}
+
+sub get_dependencies {
+ local(*PIPE);
+ my %deps;
+
+ open( PIPE, "$conf::sudo /usr/sbin/chroot $main::chroot_dir $conf::apt_cache show @_ 2>&1 |" )
+ or die "Cannot start $conf::apt_cache $main::chroot_apt_op: $!\n";
+ local($/) = "";
+ while( <PIPE> ) {
+ my ($name, $dep, $predep);
+ /^Package:\s*(.*)\s*$/mi and $name = $1;
+ next if !$name || $deps{$name};
+ /^Depends:\s*(.*)\s*$/mi and $dep = $1;
+ /^Pre-Depends:\s*(.*)\s*$/mi and $predep = $1;
+ $dep .= ", " if $dep && $predep;
+ $dep .= $predep;
+ $deps{$name} = $dep;
+ }
+ close( PIPE );
+ die "$conf::apt_cache exit status $?\n" if $?;
+
+ return \%deps;
+}
+
+sub get_virtuals {
+ local(*PIPE);
+
+ open( PIPE, "$conf::sudo /usr/sbin/chroot $main::chroot_dir $conf::apt_cache showpkg @_ 2>&1 |" )
+ or die "Cannot start $conf::apt_cache $main::chroot_apt_op: $!\n";
+ my $name;
+ my $in_rprov = 0;
+ my %provided_by;
+ while( <PIPE> ) {
+ if (/^Package:\s*(\S+)\s*$/) {
+ $name = $1;
+ }
+ elsif (/^Reverse Provides: $/) {
+ $in_rprov = 1;
+ }
+ elsif ($in_rprov && /^(\w+):\s/) {
+ $in_rprov = 0;
+ }
+ elsif ($in_rprov && /^(\S+)\s*\S+\s*$/) {
+ $provided_by{$name}->{$1} = 1;
+ }
+ }
+ close( PIPE );
+ die "$conf::apt_cache exit status $?\n" if $?;
+
+ return \%provided_by;
+}
+
+# Try to figure out if a package exists. We need to take account of virtual
+# packages, so showpkg is the best tool I can think of; but that shows
+# packages which only exist as (reverse) dependencies. As such, we make
+# sure that either Versions: or Reverse Provides: has some content.
+sub package_exists {
+ local(*PIPE);
+
+ open( PIPE, "$conf::sudo /usr/sbin/chroot $main::chroot_dir ".
+ "$conf::apt_cache showpkg @_ 2>&1 |" )
+ or die "Cannot start $conf::apt_cache $main::chroot_apt_op: $!\n";
+ my $name;
+ my $in_versions = 0;
+ my $in_rprov = 0;
+ my $real = 0;
+ while( <PIPE> ) {
+ if (/^Package:\s*(\S+)\s*$/) {
+ $name = $1;
+ }
+ elsif (/^Versions: $/) {
+ $in_versions = 1;
+ $in_rprov = 0;
+ }
+ elsif (/^Reverse Provides: $/) {
+ $in_rprov = 1;
+ $in_versions = 0;
+ }
+ elsif (($in_versions || $in_rprov) && /^(\w.*):\s/) {
+ $in_versions = 0;
+ $in_rprov = 0;
+ }
+ elsif (($in_versions || $in_rprov) && /^\S/) {
+ $real = 1;
+ }
+ }
+ close( PIPE );
+ if (defined $name and $real) {
+ return 1;
+ } else {
+ return 0;
+ }
+}
+
+sub parse_one_srcdep {
+ my $pkg = shift;
+ my $deps = shift;
+ my $hash = shift;
+
+ $deps =~ s/^\s*(.*)\s*$/$1/;
+ foreach (split( /\s*,\s*/, $deps )) {
+ my @l;
+ my $override;
+ if (/^\&/) {
+ $override = 1;
+ s/^\&\s+//;
+ }
+ my @alts = split( /\s*\|\s*/, $_ );
+ my $special_seen = 0;
+ my $neg_seen = 0;
+ foreach (@alts) {
+ if (!/^([^\s([]+)\s*(\(\s*([<=>]+)\s*(\S+)\s*\))?(\s*\[([^]]+)\])?/) {
+ warn "Warning: syntax error in dependency '$_' of $pkg\n";
+ next;
+ }
+ my( $dep, $rel, $relv, $archlist ) = ($1, $3, $4, $6);
+ if ($archlist) {
+ $archlist =~ s/^\s*(.*)\s*$/$1/;
+ my @archs = split( /\s+/, $archlist );
+ my ($use_it, $ignore_it, $include) = (0, 0, 0);
+ foreach (@archs) {
+ # Use 'dpkg-architecture' to support architecture
+ # wildcards.
+ if (/^!/) {
+ $ignore_it = 1 if system('dpkg-architecture', '-a' . $main::arch, '-i' . substr($_, 1)) eq 0;
+ }
+ else {
+ $use_it = 1 if system('dpkg-architecture', '-a' . $main::arch, '-i' . $_) eq 0;
+ $include = 1;
+ }
+ }
+ warn "Warning: inconsistent arch restriction on ",
+ "$pkg: $dep depedency\n"
+ if $ignore_it && $use_it;
+ next if $ignore_it || ($include && !$use_it);
+ }
+ if ($dep =~ /^\*/) {
+ warn "Warning: $pkg: ignoring version relation on ".
+ "special dependency $dep\n"
+ if $rel || $relv;
+ push( @l, { Package => $dep, Override => 1 } );
+ $special_seen = 1;
+ next;
+ }
+ my $neg = 0;
+ if ($dep =~ /^!/) {
+ $dep =~ s/^!\s*//;
+ $neg = 1;
+ $neg_seen = 1;
+ }
+ if ($conf::srcdep_over{$dep}) {
+ if ($main::verbose) {
+ print PLOG "Replacing source dep $dep";
+ print PLOG " ($rel $relv)" if $relv;
+ print PLOG " with $conf::srcdep_over{$dep}[0]";
+ print PLOG " ($conf::srcdep_over{$dep}[1] $conf::srcdep_over{$dep}[2])"
+ if $conf::srcdep_over{$dep}[1];
+ print PLOG ".\n";
+ }
+ $dep = $conf::srcdep_over{$dep}[0];
+ $rel = $conf::srcdep_over{$dep}[1];
+ $relv = $conf::srcdep_over{$dep}[2];
+ }
+ my $h = { Package => $dep, Neg => $neg };
+ if ($rel && $relv) {
+ $h->{'Rel'} = $rel;
+ $h->{'Version'} = $relv;
+ }
+ $h->{'Override'} = $override if $override;
+ push( @l, $h );
+ }
+ if (@alts > 1 && $special_seen) {
+ warn "Warning: $pkg: alternatives with special dependencies ",
+ "forbidden -- skipped\n";
+ }
+ elsif (@alts > 1 && $neg_seen) {
+ warn "Warning: $pkg: alternatives with negative dependencies ",
+ "forbidden -- skipped\n";
+ }
+ elsif (@l) {
+ my $l = shift @l;
+ foreach (@l) {
+ push( @{$l->{'Alternatives'}}, $_ );
+ }
+ push( @{$hash->{$pkg}}, $l );
+ }
+ }
+}
+
+sub get_special {
+ my $fname = shift;
+ my $sp = shift;
+ my $sub = "";
+
+ while( <F> ) {
+ last if /^$/;
+ if (/^\s*(\w+)\s*\{\s*$/) {
+ if ($sub) {
+ warn "Syntax error in line $. in $fname:\n";
+ warn " Start of special subsection inside ".
+ "another one.\n";
+ }
+ else {
+ $sub = $1;
+ $main::specials{$sp}->{$sub} = "";
+ }
+ }
+ elsif (/^\s*\}\s*$/) {
+ if (!$sub) {
+ warn "Syntax error in line $. in $fname:\n";
+ warn " } outside of special subsection\n";
+ }
+ else {
+ $sub = "";
+ }
+ }
+ elsif ($sub) {
+ $main::specials{$sp}->{$sub} .= $_;
+ }
+ else {
+ warn "Syntax error in line $. in $fname:\n";
+ warn " Subsection start expected\n";
+ }
+ }
+ if ($sub) {
+ warn "Syntax error in line $. in $fname:\n";
+ warn " Subsection not finished with }\n";
+ }
+
+ push( @main::global_patches, $sp ) if $sp =~ /^\*\*/;
+}
+
+
+sub open_log {
+ my $date = `date +%Y%m%d-%H%M`;
+ chomp( $date );
+
+ if ($main::nolog) {
+ open( LOG, ">&STDOUT" );
+ open( PLOG, ">&LOG" ) or warn "Can't redirect PLOG\n";
+ select( LOG );
+ return;
+ }
+
+ $main::main_logfile = "build-$date.log";
+
+ if ($main::verbose) {
+ my $pid;
+ ($pid = open( LOG, "|-")) || exec "tee $main::main_logfile";
+ if (!defined $pid) {
+ warn "Cannot open pipe to 'tee $main::main_logfile': $!\n";
+ }
+ else {
+ $main::tee_pid = $pid;
+ }
+ }
+ else {
+ open( LOG, ">$main::main_logfile" )
+ or warn "Cannot open log file $main::main_logfile: $!\n";
+ }
+ select( (select( LOG ), $| = 1)[0] );
+ open( STDOUT, ">&LOG" ) or warn "Can't redirect stdout\n";
+ open( STDERR, ">&LOG" ) or warn "Can't redirect stderr\n";
+ open( PLOG, ">&LOG" ) or warn "Can't redirect PLOG\n";
+}
+
+sub close_log {
+ my $date = `date +%Y%m%d-%H%M`;
+ chomp( $date );
+
+ kill( 15, $main::tee_pid ) if $main::verbose;
+ close( LOG );
+ if (!$main::nolog && !$main::verbose &&
+ -s $main::main_logfile && $conf::mailto) {
+ send_mail( $conf::mailto, "Log from sbuild $date",
+ $main::main_logfile ) if $conf::mailto;
+ }
+ elsif (!$main::nolog && !$main::verbose && ! -s $main::main_logfile) {
+ unlink( $main::main_logfile );
+ }
+}
+
+sub open_pkg_log {
+ my $date = `date +%Y%m%d-%H%M`;
+ my $pkg = shift;
+
+ if ($main::nolog) {
+ open( PLOG, ">&STDOUT" );
+ }
+ else {
+ $pkg = basename( $pkg );
+ if ($main::binNMU) {
+ $pkg =~ /^([^_]+)_([^_]+)(.*)$/;
+ $pkg = $1."_".binNMU_version($2);
+ $main::binNMU_name = $pkg;
+ $pkg .= $3;
+ }
+ $main::pkg_logfile = "$conf::log_dir/${pkg}_$date";
+ if ($main::verbose) {
+ my $pid;
+ ($pid = open( PLOG, "|-")) || exec "tee $main::pkg_logfile";
+ if (!defined $pid) {
+ warn "Cannot open pipe to 'tee $main::pkg_logfile': $!\n";
+ }
+ else {
+ $main::pkg_tee_pid = $pid;
+ }
+ }
+ else {
+ if (!open( PLOG, ">$main::pkg_logfile" )) {
+ warn "Can't open logfile $main::pkg_logfile: $!\n";
+ return 0;
+ }
+ }
+ }
+ select( (select( PLOG ), $| = 1)[0] );
+
+ my $revision = '$Revision: 1.170.5 $';
+ $revision =~ /([\d.]+)/;
+ $revision = $1;
+
+ print PLOG "Automatic build of $pkg on $main::HOSTNAME by ".
+ "sbuild/$main::arch $revision\n";
+ print PLOG "Build started at $date";
+ print PLOG "*"x78, "\n";
+ return 1;
+}
+
+sub close_pkg_log {
+ my $date = `date +%Y%m%d-%H%M`;
+ my $pkg = shift;
+ my $t = $main::pkg_end_time - $main::pkg_start_time;
+
+ $pkg = basename( $pkg );
+ $t = 0 if $t < 0;
+ if ($main::pkg_status eq "successful") {
+ add_time_entry( $pkg, $t );
+ add_space_entry( $pkg, $main::this_space );
+ }
+ print PLOG "*"x78, "\n";
+ printf PLOG "Finished at ${date}Build needed %02d:%02d:%02d, %dk disk space\n",
+ int($t/3600), int(($t%3600)/60), int($t%60), $main::this_space;
+ kill( 15, $main::pkg_tee_pid ) if $main::verbose && !$main::nolog;
+ close( PLOG );
+ open( PLOG, ">&LOG" ) or warn "Can't redirect PLOG\n";
+ send_mail( $conf::mailto,
+ "Log for $main::pkg_status build of ".
+ ($main::binNMU_name || $pkg)." (dist=$main::distribution)",
+ $main::pkg_logfile ) if !$main::nolog && $conf::mailto;
+}
+
+sub add_time_entry {
+ my $pkg = shift;
+ my $t = shift;
+
+ return if !$conf::avg_time_db;
+ my %db;
+ if (!tie %db, 'GDBM_File',$conf::avg_time_db,GDBM_WRCREAT,0664) {
+ print "Can't open average time db $conf::avg_time_db\n";
+ return;
+ }
+ $pkg =~ s/_.*//;
+
+ if (exists $db{$pkg}) {
+ my @times = split( /\s+/, $db{$pkg} );
+ push( @times, $t );
+ my $sum = 0;
+ foreach (@times[1..$#times]) { $sum += $_; }
+ $times[0] = $sum / (@times-1);
+ $db{$pkg} = join( ' ', @times );
+ }
+ else {
+ $db{$pkg} = "$t $t";
+ }
+ untie %db;
+}
+
+sub check_space {
+ my @files = @_;
+ local( *PIPE );
+
+ if (!open( PIPE, "sudo /usr/bin/du -s @files 2>/dev/null |" )) {
+ print PLOG "Cannot determine space needed (du failed): $!\n";
+ return;
+ }
+ my $sum = 0;
+ while( <PIPE> ) {
+ next if !/^(\d+)/;
+ $sum += $1;
+ }
+ close( PIPE );
+ $main::this_space = $sum;
+}
+
+sub add_space_entry {
+ my $pkg = shift;
+ my $t = shift;
+
+ my $keepvals = 4;
+
+ return if !$conf::avg_space_db || $main::this_space == 0;
+ my %db;
+ if (!tie %db, 'GDBM_File',$conf::avg_space_db,GDBM_WRCREAT,0664) {
+ print "Can't open average space db $conf::avg_space_db\n";
+ return;
+ }
+ $pkg =~ s/_.*//;
+
+ if (exists $db{$pkg}) {
+ my @values = split( /\s+/, $db{$pkg} );
+ shift @values;
+ unshift( @values, $t );
+ pop @values if @values > $keepvals;
+ my ($sum, $n, $weight, $i) = (0, 0, scalar(@values));
+ for( $i = 0; $i < @values; ++$i) {
+ $sum += $values[$i] * $weight;
+ $n += $weight;
+ }
+ unshift( @values, $sum/$n );
+ $db{$pkg} = join( ' ', @values );
+ }
+ else {
+ $db{$pkg} = "$t $t";
+ }
+ untie %db;
+}
+
+sub file_for_name {
+ my $name = shift;
+ my @x = grep { /^\Q$name\E_/ } @_;
+ return $x[0];
+}
+
+sub write_jobs_file {
+ my $news = shift;
+ my $job;
+ local( *F );
+
+ $main::job_state{$main::current_job} = $news
+ if $news && $main::current_job;
+
+ return if !$main::batchmode;
+
+ return if !open( F, ">$main::jobs_file" );
+ foreach $job (@ARGV) {
+ print F ($job eq $main::current_job) ? "" : " ",
+ $job,
+ ($main::job_state{$job} ? ": $main::job_state{$job}" : ""),
+ "\n";
+ }
+ close( F );
+}
+
+sub append_to_FINISHED {
+ my $pkg = shift;
+ local( *F );
+
+ return if !$main::batchmode;
+
+ open( F, ">>SBUILD-FINISHED" );
+ print F "$pkg\n";
+ close( F );
+}
+
+sub write_srcdep_lock_file {
+ my $deps = shift;
+ my $specials = shift;
+ local( *F );
+
+ ++$main::srcdep_lock_cnt;
+ my $f = "$conf::srcdep_lock_dir/$$-$main::srcdep_lock_cnt";
+ if (!open( F, ">$f" )) {
+ print "Warning: cannot create srcdep lock file $f: $!";
+ return;
+ }
+ print "Writing srcdep lock file $f:\n" if $main::debug;
+
+ chomp( my $user = `/usr/bin/whoami` );
+ print F "$main::current_job $$ $user\n";
+ print "Job $main::current_job pid $$ user $user\n" if $main::debug;
+ foreach (@$deps) {
+ my $name = $_->{'Package'};
+ # add special deps only if they affect global state ("global" sub)
+ next if $name =~ /^\*/ &&
+ (!isin( $name, @$specials ) ||
+ $main::specials{$name}->{'global'} !~ /yes/m);
+ print F ($_->{'Neg'} ? "!" : ""), "$name\n";
+ print " ", ($_->{'Neg'} ? "!" : ""), "$name\n" if $main::debug;
+ }
+ close( F );
+}
+
+sub check_srcdep_conflicts {
+ my $to_inst = shift;
+ my $to_remove = shift;
+ my $special = shift;
+ local( *F, *DIR );
+ my $mypid = $$;
+ my %conflict_builds;
+
+ if (!opendir( DIR, $conf::srcdep_lock_dir )) {
+ print PLOG "Cannot opendir $conf::srcdep_lock_dir: $!\n";
+ return 1;
+ }
+ my @files = grep { !/^\.\.?$/ && !/^install\.lock/ && !/^$mypid-\d+$/ }
+ readdir(DIR);
+ closedir(DIR);
+
+ my $file;
+ foreach $file (@files) {
+ if (!open( F, "<$conf::srcdep_lock_dir/$file" )) {
+ print PLOG "Cannot open $conf::srcdep_lock_dir/$file: $!\n";
+ next;
+ }
+ <F> =~ /^(\S+)\s+(\S+)\s+(\S+)/;
+ my ($job, $pid, $user) = ($1, $2, $3);
+
+ # ignore (and remove) a lock file if associated process doesn't exist
+ # anymore
+ if (kill( 0, $pid ) == 0 && $! == ESRCH) {
+ close( F );
+ print PLOG "Found stale srcdep lock file $file -- removing it\n";
+ print PLOG "Cannot remove: $!\n"
+ if !unlink( "$conf::srcdep_lock_dir/$file" );
+ next;
+ }
+
+ print "Reading srclock file $file by job $job user $user\n"
+ if $main::debug;
+
+ while( <F> ) {
+ my ($neg, $pkg) = /^(!?)(\S+)/;
+ print "Found ", ($neg ? "neg " : ""), "entry $pkg\n"
+ if $main::debug;
+
+ if ($pkg =~ /^\*/) {
+ print PLOG "Build of $job by $user (pid $pid) has ",
+ "installed the global special dependency $pkg.\n";
+ $conflict_builds{$file} = 1;
+ }
+ else {
+ if (isin( $pkg, @$to_inst, @$to_remove )) {
+ print PLOG "Source dependency conflict with build of ",
+ "$job by $user (pid $pid):\n";
+ print PLOG " $job ", ($neg ? "conflicts with" : "needs"),
+ " $pkg\n";
+ print PLOG " $main::current_job wants to ",
+ (isin( $pkg, @$to_inst ) ? "update" : "remove"),
+ " $pkg\n";
+ $conflict_builds{$file} = 1;
+ }
+ }
+ }
+ close( F );
+ }
+
+ foreach (@$special) {
+ if ($main::specials{$_}->{'global'} =~ /yes/m) {
+ print PLOG "$main::current_job wants to apply global ",
+ "special dependency $_\n",
+ "Must wait for other builds to finish\n";
+ foreach (@files) {
+ $conflict_builds{$_} = 1;
+ }
+ }
+ }
+
+ my @conflict_builds = keys %conflict_builds;
+ if (@conflict_builds) {
+ print "Srcdep conflicts with: @conflict_builds\n" if $main::debug;
+ }
+ else {
+ print "No srcdep conflicts\n" if $main::debug;
+ }
+ return @conflict_builds;
+}
+
+sub remove_srcdep_lock_file {
+ my $f = "$conf::srcdep_lock_dir/$$-$main::srcdep_lock_cnt";
+
+ print "Removing srcdep lock file $f\n" if $main::debug;
+ if (!unlink( $f )) {
+ print "Warning: cannot remove srcdep lock file $f: $!\n"
+ if $! != ENOENT;
+ }
+}
+
+sub prepare_watches {
+ my $dependencies = shift;
+ my @instd = @_;
+ my(@dep_on, $dep, $pkg, $prg);
+
+ @dep_on = @instd;
+ foreach $dep (@$dependencies, @$main::additional_deps) {
+ if ($dep->{'Neg'} && $dep->{'Package'} =~ /^needs-no-(\S+)/) {
+ push( @dep_on, $1 );
+ }
+ elsif ($dep->{'Package'} !~ /^\*/ && !$dep->{'Neg'}) {
+ foreach (scalar($dep), @{$dep->{'Alternatives'}}) {
+ push( @dep_on, $_->{'Package'} );
+ }
+ }
+ }
+ # init %this_watches to names of packages which have not been installed as
+ # source dependencies
+ undef %main::this_watches;
+ foreach $pkg (keys %conf::watches) {
+ if (isin( $pkg, @dep_on )) {
+ print "Excluding from watch: $pkg\n" if $main::debug;
+ next;
+ }
+ foreach $prg (@{$conf::watches{$pkg}}) {
+ $prg = "/usr/bin/$prg" if $prg !~ m,^/,;
+ $main::this_watches{"$main::chroot_dir$prg"} = $pkg;
+ print "Will watch for $prg ($pkg)\n" if $main::debug;
+ }
+ }
+}
+
+sub check_watches {
+ my($prg, @st, %used);
+
+ foreach $prg (keys %main::this_watches) {
+ if (!(@st = stat( $prg ))) {
+ print "Watch: $prg: stat failed\n" if $main::debug;
+ next;
+ }
+ if ($st[8] > $main::build_start_time) {
+ my $pkg = $main::this_watches{$prg};
+ my $prg2 = $prg;
+ $prg2 =~ s/^\Q$main::chroot_dir\E// if $main::chroot_dir;
+ push( @{$used{$pkg}}, $prg2 )
+ if @main::have_dsc_build_deps ||
+ !isin( $pkg, @conf::ignore_watches_no_build_deps );
+ }
+ else {
+ print "Watch: $prg: untouched\n" if $main::debug;
+ }
+ }
+ return if !%used;
+
+ print PLOG <<EOF;
+
+NOTE: The package could have used binaries from the following packages
+(access time changed) without a source dependency:
+EOF
+ foreach (keys %used) {
+ print PLOG " $_: @{$used{$_}}\n";
+ }
+ print PLOG "\n";
+}
+
+sub should_skip {
+ my $pkgv = shift;
+
+ fixup_pkgv( \$pkgv );
+ lock_file( "SKIP" );
+ goto unlock if !open( F, "SKIP" );
+ my @pkgs = <F>;
+ close( F );
+
+ if (!open( F, ">SKIP" )) {
+ print "Can't open SKIP for writing: $!\n",
+ "Would write: @pkgs\nminus $pkgv\n";
+ goto unlock;
+ }
+ my $found = 0;
+ foreach (@pkgs) {
+ if (/^\Q$pkgv\E$/) {
+ ++$found;
+ print PLOG "$pkgv found in SKIP file -- skipping building it\n";
+ }
+ else {
+ print F $_;
+ }
+ }
+ close( F );
+ unlock:
+ unlock_file( "SKIP" );
+ return $found;
+}
+
+sub add_givenback {
+ my $pkgv = shift;
+ my $time = shift;
+ local( *F );
+
+ lock_file( "SBUILD-GIVEN-BACK" );
+
+ if (open( F, ">>SBUILD-GIVEN-BACK" )) {
+ print F "$pkgv $time\n";
+ close( F );
+ }
+ else {
+ print PLOG "Can't open SBUILD-GIVEN-BACK: $!\n";
+ }
+
+ unlock:
+ unlock_file( "SBUILD-GIVEN-BACK" );
+}
+
+sub send_mail {
+ my $to = shift;
+ my $subject = shift;
+ my $file = shift;
+ local( *MAIL, *F );
+
+ if (!open( F, "<$file" )) {
+ warn "Cannot open $file for mailing: $!\n";
+ return 0;
+ }
+ local $SIG{'PIPE'} = 'IGNORE';
+
+ if (!open( MAIL, "|$conf::mailprog -oem $to" )) {
+ warn "Could not open pipe to $conf::mailprog: $!\n";
+ close( F );
+ return 0;
+ }
+
+ print MAIL "Subject: $subject\n\n";
+ while( <F> ) {
+ print MAIL "." if $_ eq ".\n";
+ print MAIL $_;
+ }
+
+ close( F );
+ if (!close( MAIL )) {
+ warn "$conf::mailprog failed (exit status $?)\n";
+ return 0;
+ }
+ return 1;
+}
+
+
+sub set_installed {
+ foreach (@_) {
+ $main::changes->{'installed'}->{$_} = 1;
+ }
+ print "Added to installed list: @_\n" if $main::debug;
+}
+
+sub set_removed {
+ foreach (@_) {
+ $main::changes->{'removed'}->{$_} = 1;
+ if (exists $main::changes->{'installed'}->{$_}) {
+ delete $main::changes->{'installed'}->{$_};
+ $main::changes->{'auto-removed'}->{$_} = 1;
+ print "Note: $_ was installed\n" if $main::debug;
+ }
+ }
+ print "Added to removed list: @_\n" if $main::debug;
+}
+
+sub unset_installed {
+ foreach (@_) {
+ delete $main::changes->{'installed'}->{$_};
+ }
+ print "Removed from installed list: @_\n" if $main::debug;
+}
+
+sub unset_removed {
+ foreach (@_) {
+ delete $main::changes->{'removed'}->{$_};
+ if (exists $main::changes->{'auto-removed'}->{$_}) {
+ delete $main::changes->{'auto-removed'}->{$_};
+ $main::changes->{'installed'}->{$_} = 1;
+ print "Note: revived $_ to installed list\n" if $main::debug;
+ }
+ }
+ print "Removed from removed list: @_\n" if $main::debug;
+}
+
+sub basename {
+ my $b = $_[0];
+ $b =~ s,^.*/,,;
+ return $b;
+}
+
+sub df {
+ my $dir = shift;
+
+ my $free = `/bin/df $dir | tail -1`;
+ my @free = split( /\s+/, $free );
+ return $free[3];
+}
+
+sub isin {
+ my $val = shift;
+ return grep( $_ eq $val, @_ );
+}
+
+sub fixup_pkgv {
+ my $pkgv = shift;
+
+ $$pkgv =~ s,^.*/,,; # strip path
+ $$pkgv =~ s/\.(dsc|diff\.gz|tar\.gz|deb)$//; # strip extension
+ $$pkgv =~ s/_[a-zA-Z\d+~-]+\.(changes|deb)$//; # strip extension
+}
+
+sub format_deps {
+ return join( ", ",
+ map { join( "|",
+ map { ($_->{'Neg'} ? "!" : "") .
+ $_->{'Package'} .
+ ($_->{'Rel'} ? " ($_->{'Rel'} $_->{'Version'})":"")}
+ scalar($_), @{$_->{'Alternatives'}}) } @_ );
+}
+
+sub lock_file {
+ my $file = shift;
+ my $for_srcdep = shift;
+ my $lockfile = "$file.lock";
+ my $try = 0;
+
+ repeat:
+ if (!sysopen( F, $lockfile, O_WRONLY|O_CREAT|O_TRUNC|O_EXCL, 0644 )){
+ if ($! == EEXIST) {
+ # lock file exists, wait
+ goto repeat if !open( F, "<$lockfile" );
+ my $line = <F>;
+ my ($pid, $user);
+ close( F );
+ if ($line !~ /^(\d+)\s+([\w\d.-]+)$/) {
+ warn "Bad lock file contents ($lockfile) -- still trying\n";
+ }
+ else {
+ ($pid, $user) = ($1, $2);
+ if (kill( 0, $pid ) == 0 && $! == ESRCH) {
+ # process doesn't exist anymore, remove stale lock
+ warn "Removing stale lock file $lockfile ".
+ " (pid $pid, user $user)\n";
+ unlink( $lockfile );
+ goto repeat;
+ }
+ }
+ ++$try;
+ if (!$for_srcdep && $try > $main::max_lock_trys) {
+ warn "Lockfile $lockfile still present after ".
+ $main::max_lock_trys*$main::lock_interval.
+ " seconds -- giving up\n";
+ return;
+ }
+ print PLOG "Another sbuild process ($pid by $user) is currently ",
+ "installing or\n",
+ "removing packages -- waiting...\n"
+ if $for_srcdep && $try == 1;
+ sleep $main::lock_interval;
+ goto repeat;
+ }
+ warn "Can't create lock file $lockfile: $!\n";
+ }
+ F->print("$$ $ENV{'LOGNAME'}\n");
+ F->close();
+}
+
+sub unlock_file {
+ my $file = shift;
+ my $lockfile = "$file.lock";
+
+ unlink( $lockfile );
+}
+
+sub check_dpkg_version {
+ my $t = `$conf::dpkg --version`;
+ my $version = ($t =~ /version\s+(\S+)/)[0];
+
+ $main::new_dpkg = 1
+ if 0 == system "$conf::dpkg --compare-versions '$version' ge 1.4.1.18";
+}
+
+sub binNMU_version {
+ my $v = shift;
+
+ if ($v =~ /^(.*)-([^-]+)$/) {
+ my ($upstream, $debian) = ($1, $2);
+ my @parts = split( /\./, $debian );
+ if (@parts == 1) {
+ return "$upstream-$debian.0.$main::binNMUver";
+ }
+ elsif (@parts == 2) {
+ return "$upstream-$debian.$main::binNMUver";
+ }
+ else {
+ $parts[$#parts]+=$main::binNMUver;
+ return "$upstream-".join( ".", @parts );
+ }
+ }
+ else {
+ return "$v.0.$main::binNMUver";
+ }
+}
+
+sub shutdown {
+ my $signame = shift;
+ my($job,@npkgs,@pkgs);
+ local( *F );
+
+ $SIG{'INT'} = 'IGNORE';
+ $SIG{'QUIT'} = 'IGNORE';
+ $SIG{'TERM'} = 'IGNORE';
+ $SIG{'ALRM'} = 'IGNORE';
+ $SIG{'PIPE'} = 'IGNORE';
+ print PLOG "sbuild received SIG$signame -- shutting down\n";
+ chdir( $main::cwd );
+
+ goto not_ni_shutdown if !$main::batchmode;
+
+ # most important: dump out names of unfinished jobs to REDO
+ foreach $job (@ARGV) {
+ my $job2 = $job;
+ fixup_pkgv( \$job2 );
+ push( @npkgs, $job2 )
+ if !$main::job_state{$job} || $job eq $main::current_job;
+ }
+ print LOG "The following jobs were not finished: @npkgs\n";
+
+ my $f = "$main::HOME/build/REDO";
+ if (-f "$main::HOME/build/REDO.lock") {
+ # if lock file exists, write to a different file -- timing may
+ # be critical
+ $f = "$main::HOME/build/REDO2";
+ }
+ if (open( F, "<$f" )) {
+ @pkgs = <F>;
+ close( F );
+ }
+ if (open( F, ">>$f" )) {
+ foreach $job (@npkgs) {
+ next if grep( /^\Q$job\E\s/, @pkgs );
+ print F "$job $main::distribution $main::component\n";
+ }
+ close( F );
+ }
+ else {
+ print "Cannot open $f: $!\n";
+ }
+ open( F, ">SBUILD-REDO-DUMPED" );
+ close( F );
+ print LOG "SBUILD-REDO-DUMPED created\n";
+ unlink( "SBUILD-FINISHED" );
+
+ # next: say which packages should be uninstalled
+ @pkgs = keys %{$main::changes->{'installed'}};
+ if (@pkgs) {
+ if (open( F, ">>NEED-TO-UNINSTALL" )) {
+ print F "@pkgs\n";
+ close( F );
+ }
+ print "The following packages still need to be uninstalled ",
+ "(--purge):\n@pkgs\n";
+ }
+
+ not_ni_shutdown:
+ # next: kill currently running command (if one)
+ if ($main::sub_pid) {
+ print "Killing $main::sub_task subprocess $main::sub_pid\n";
+ system "$conf::sudo perl -e 'kill( -15, $main::sub_pid )'";
+ }
+ remove_srcdep_lock_file();
+
+ # close logs and send mails
+ if ( $main::current_job ) {
+ fixup_pkgv( \$main::current_job );
+ close_pkg_log( $main::current_job );
+ }
+ close_log();
+ unlink( $main::jobs_file ) if $main::batchmode;
+ $? = 0; $! = 0;
+ exit 0;
+}
=== added file 'lib/canonical/buildd/sbuild-package'
--- lib/canonical/buildd/sbuild-package 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/sbuild-package 2011-11-17 19:45:28 +0000
@@ -0,0 +1,92 @@
+#!/bin/sh
+#
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+#
+# Authors: Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx>
+# and Adam Conrad <adam.conrad@xxxxxxxxxxxxx>
+
+# Buildd Slave tool to update a debian chroot
+
+# Expects build id as arg 1, makes build-id to contain the build
+# Expects rest of arguments to be to pass to sbuild
+
+# Needs SBUILD to be set to a sbuild instance with passwordless sudo ability
+
+exec 2>&1
+
+export LANG=C LC_ALL=C
+
+SBUILD=/usr/bin/sbuild
+BUILDID=$1
+ARCHITECTURETAG=$2
+SUITE=$3
+
+shift 3
+
+export NR_PROCESSORS=$(grep ^processor /proc/cpuinfo | wc -l)
+
+echo "Initiating build $BUILDID with $NR_PROCESSORS processor cores."
+
+if [ $NR_PROCESSORS -lt 2 ]; then
+ unset NR_PROCESSORS
+fi
+
+cd "$HOME/build-$BUILDID"
+
+hostarch=$(dpkg --print-architecture)
+
+WARN=""
+case $hostarch in
+ hppa|powerpc|sparc)
+ SBUILD="linux32 $SBUILD"
+ WARN="--warnonly"
+ ;;
+ i386|lpia|armel)
+ # SBUILD is already correct
+ WARN="--warnonly"
+ ;;
+ amd64)
+ if [ "$hostarch" != "$ARCHITECTURETAG" ]; then
+ SBUILD="linux32 $SBUILD"
+ WARN="--warnonly"
+ fi
+ ;;
+ ia64)
+ ;;
+esac
+
+case $SUITE in
+ warty*|hoary*|breezy*|dapper*|edgy*|feisty*|gutsy*|hardy*|karmic*)
+ WARN="--warnonly"
+ ;;
+esac
+
+$SBUILD "$@" | /usr/bin/check-implicit-pointer-functions --inline $WARN
+RET=$?
+
+# really successful returns
+[ $RET = 0 ] || exit $RET
+
+STATE=$(cat build-progress | awk '{print $2}')
+STEP=$(cat build-progress | awk '{print $3}')
+
+# If the build was successful; return 0
+[ "$STATE" = "successful" ] && exit 0
+
+# If we think it's a dep-wait problem, exit 1
+[ "$STEP" = "install-deps" ] && exit 1
+
+# Is this a state where auto-give-back should kick in; exit 2
+[ "$STEP" = "find-dsc" ] && exit 2
+[ "$STEP" = "fetch-src" ] && exit 2
+[ "$STEP" = "unpack-check" ] && exit 2
+[ "$STEP" = "check-space" ] && exit 2
+[ "$STEP" = "install-deps-env" ] && exit 2
+
+# Any other reason (which we'll treat as a build failure), exit 3
+exit 3
+
+# There is an exit code of "4" that means the builder itself
+# is having problems. Currently, we don't use it at all.
+# exit 4
=== added file 'lib/canonical/buildd/sbuild.conf'
--- lib/canonical/buildd/sbuild.conf 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/sbuild.conf 2011-11-17 19:45:28 +0000
@@ -0,0 +1,155 @@
+#
+# /etc/sbuild.conf: global sbuild configurations that are
+# supposed to be changed only by the sbuild author, not by the user.
+# Of course you're free to override values here in /etc/sbuild.conf.local
+#
+# commented out stuff are defaults
+#
+# $Id: sbuild.conf,v 1.54 2002/10/10 18:50:45 rnhodek Exp $
+#
+
+# File with the source dependencies
+#$source_dependencies = "/etc/source-dependencies";
+
+# Default distribution to build:
+#$default_distribution="unstable"
+#$default_distribution=""
+
+# Environment variables to set/override:
+%ENV_OVERRIDES = {
+ 'LC_ALL' => 'C',
+};
+
+# Directory for writing build logs to
+#$log_dir = "$HOME/logs";
+
+# Name of a database for logging package build times (optional, no database
+# is written if empty)
+$avg_time_db = "/var/debbuild/avg-build-times";
+
+# Name of a database for logging package space requirement (optional)
+$avg_space_db = "/var/debbuild/avg-build-space";
+
+# Name for dir for source dependency lock files
+$srcdep_lock_dir = "/var/debbuild/srcdep-lock";
+
+# When to purge the build directory afterwards; possible values are "never",
+# "successful", and "always"
+#$purge_build_directory="successful";
+
+# PGP-related option to pass to dpkg-buildpackage. Usually neither .dsc
+# nor .changes files shall be signed automatically.
+#$pgp_options = "-us -uc";
+
+# After that time (in minutes) of inactivity a build is terminated. Activity
+# is measured by output to the log file.
+$stalled_pkg_timeout = 150;
+
+# Some packages may exceed the general timeout (e.g. redirecting output to
+# a file) and need a different timeout. Below are some examples.
+#%individual_stalled_pkg_timeout = (
+# smalleiffel => 300,
+# jade => 300,
+# atlas => 300,
+# glibc => 1000,
+# xfree86 => 1000,
+# 'gcc-3.3' => 300,
+# kwave => 600
+#);
+
+# Binaries for which the access time is controlled if they're not listed as
+# source dependencies (note: /usr/bin is added if executable name doesn't
+# start with '/')
+%watches = (
+ # general utilities
+ sharutils => [qw(uuencode uudecode)],
+ emacs20 => ["emacs"],
+ groff => ["groff"],
+ bzip2 => [qw(bzip2 bunzip2 bzcat bz2cat)],
+# unfortunately, too much stuff uses the following, so there are much
+# false alarms :-(
+# perl => ["perl"],
+# diff => [qw(diff diff3 sdiff cmp)],
+# gzip => [qw(gzip gunzip zcat)],
+ # development tools
+ debhelper => [qw(dh_clean dh_testdir dh_testversion dh_buildpackage)],
+ debmake => ["debstd"],
+ bison => ["bison"],
+ flex => ["flex"],
+ gettext => [qw(gettext msgfmt msgmerge xgettext gettextize)],
+ m4 => ["m4"],
+ f2c => [qw(f2c fc)],
+ g77 => ["g77"],
+ libtool => [qw(/usr/share/aclocal/libtool.m4
+ /usr/share/libtool/ltmain.sh)],
+# unfortunately, too much stuff uses the following in predefined autoconf
+# macros, but doesn't really need it.
+# automake => [qw(automake aclocal)],
+# autoconf => [qw(autoconf autoheader)],
+ "python-dev" => [qw(python /usr/include/python/Python.h)],
+ gperf => ["gperf"],
+ rcs => [qw(ci co ident rcs rcsdiff rlog)],
+ "netkit-rpc" => ["rpcgen"],
+ # doc formatting stuff
+ "sgml-tools" => ["sgmltools.v1"],
+ "debiandoc-sgml" => [qw(debiandoc2html debiandoc2latex2e debiandoc2texinfo
+ debiandoc2text debiandoc2dvi debiandoc2dvips
+ debiandoc2info debiandoc2ps)],
+ "jade" => ["jade"],
+ "liburi-perl" => ["/usr/lib/perl5/URI/URL.pm"],
+ "tetex-bin" => [qw(tex latex dvips)],
+ texi2html => ["texi2html"],
+ latex2html => ["latex2html"],
+ # special stuff
+ "apache-dev" => ["apxs"],
+
+ # test for X libs
+ "xlibs-dev" => [qw(/usr/X11R6/lib/libX11.a /usr/X11R6/lib/libX11.so /usr/X11R6/lib/libICE.a /usr/X11R6/lib/libICE.so /usr/X11R6/lib/libXp.a /usr/X11R6/lib/libXp.so)]
+);
+
+# Ignore watches on the following packages if the package doesn't have its own
+# build dependencies in the .dsc
+@ignore_watches_no_build_deps = qw();
+
+# If a source dependency is an alternative that is provided by more than one
+# package, this list can select which one will be installed (if nothing is
+# defined, a random provider will be used)
+%alternatives = (
+ "automaken" => "automake1.9",
+ "info-browser" => "info",
+ "httpd" => "apache2-mpm-worker",
+ "postscript-viewer" => "gs-esp",
+ "postscript-preview" => "psutils",
+ "www-browser" => "lynx",
+ "awk" => "gawk",
+ "c-shell" => "tcsh",
+ "wordlist" => "wbritish",
+ "tclsh" => "tcl8.4",
+ "wish" => "tk8.4",
+ "c-compiler" => "gcc",
+ "fortran77-compiler" => "g77",
+ "java-compiler" => "gcj",
+ "libc-dev" => "libc6-dev",
+ "libgl-dev" => "libgl1-mesa-dev",
+ "libglu-dev" => "libglu1-mesa-dev",
+ "libncurses-dev" => "libncurses5-dev",
+ "libreadline-dev" => "libreadline5-dev",
+ "libz-dev" => "zlib1g-dev",
+ "emacsen" => "emacs21",
+ "mail-transport-agent" => "postfix",
+ "mail-reader" => "mailx",
+ "news-transport-system" => "inn",
+ "news-reader" => "nn", # universe
+ "pgp" => "pgp-i",
+ "xserver" => "xserver-xorg",
+ "libpng-dev" => "libpng12-dev",
+ "mysql-dev" => "libmysqlclient-dev",
+ "giflib-dev" => "libungif4-dev",
+ "freetype2-dev" => "libttf-dev"
+);
+
+# read local config file
+# require '/etc/sbuild.conf.local';
+
+# don't remove this, Perl needs it:
+1;
=== added file 'lib/canonical/buildd/sbuildrc'
--- lib/canonical/buildd/sbuildrc 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/sbuildrc 2011-11-17 19:45:28 +0000
@@ -0,0 +1,32 @@
+# Mail address where logs are sent to (mandatory, no default!)
+$mailto = "buildd-maint@xxxxxxxxxxxxx";
+
+$srcdep_lock_dir = "/home/buildd/";
+
+%mailto = (
+);
+
+$arch="@ARCHTAG@";
+
+# Maintainer name to use in .changes files (mandatory, no default!)
+$maintainer_name="Ubuntu Build Daemon <buildd\@@FQDN@>";
+
+# When to purge the build directory afterwards; possible values are
+# "never", "successful", and "always"
+ $purge_build_directory="always";
+#$purge_build_directory="successful";
+
+# Hack a "frozen" into the Distribution: line if compiling for frozen
+#$hack_changes_for_frozen = 1;
+
+# After that time (in minutes) of inactivity a build is terminated.
+# Activity
+# is measured by output to the log file.
+$stalled_pkg_timeout = 150;
+
+# Some packages may exceed the general timeout (e.g. redirecting output to
+# a file) and need a different timeout. Below are some examples.
+#%individual_stalled_pkg_timeout = (
+# ivtools => 600,
+# smalleiffel => 600
+#);
=== added file 'lib/canonical/buildd/scan-for-processes'
--- lib/canonical/buildd/scan-for-processes 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/scan-for-processes 2011-11-17 19:45:28 +0000
@@ -0,0 +1,43 @@
+#!/bin/bash
+#
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# Buildd Slave tool to scan a chroot in case any processes are underneath it
+
+## This script uses bashisms, must be run under bash
+
+# Expects build id as arg 1, makes build-id to contain the build
+
+# Needs SUDO to be set to a sudo instance for passwordless access
+
+SUDO=/usr/bin/sudo
+BUILDID="$1"
+REALHOME=$(cd $HOME && pwd -P)
+
+set -e
+
+exec 2>&1
+
+[ $(id -u) = "0" ] || exec $SUDO $0 "$REALHOME/build-$BUILDID/chroot-autobuild"
+
+echo "Scanning for processes to kill in build $BUILDID..."
+
+PREFIX="$BUILDID"
+FOUND=0
+
+for ROOT in /proc/*/root; do
+ LINK=$(readlink $ROOT)
+ if [ "x$LINK" != "x" ]; then
+ if [ "x${LINK:0:${#PREFIX}}" = "x$PREFIX" ]; then
+ # this process is in the chroot...
+ PID=$(basename $(dirname "$ROOT"))
+ kill -9 "$PID"
+ FOUND=1
+ fi
+ fi
+done
+
+if [ "x$FOUND" = "x1" ]; then
+ exec $0 $1
+fi
=== added file 'lib/canonical/buildd/slave.py'
--- lib/canonical/buildd/slave.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/slave.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,686 @@
+# Copyright 2009, 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# Authors: Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx>
+# and Adam Conrad <adam.conrad@xxxxxxxxxxxxx>
+
+# Buildd Slave implementation
+
+__metaclass__ = type
+
+import hashlib
+import os
+import re
+import urllib2
+import xmlrpclib
+
+from twisted.internet import protocol
+from twisted.internet import reactor
+from twisted.internet import process
+from twisted.web import xmlrpc
+
+# cprov 20080611: in python2.4 posixfile.SEEK_END is deprecated and our
+# importfascist-check denies its import. When we migrate to python2.5,
+# we can use os.SEEK_END. See bug #239213.
+SEEK_END = 2
+
+devnull = open("/dev/null", "r")
+
+
+def _sanitizeURLs(text_seq):
+ """A generator that deletes URL passwords from a string sequence.
+
+ This generator removes user/password data from URLs if embedded
+ in the latter as follows: scheme://user:passwd@netloc/path.
+
+ :param text_seq: A sequence of strings (that may contain URLs).
+ :return: A (sanitized) line stripped of authentication credentials.
+ """
+ # This regular expression will be used to remove authentication
+ # credentials from URLs.
+ password_re = re.compile('://([^:]+:[^@]+@)(\S+)')
+
+ for line in text_seq:
+ sanitized_line = password_re.sub(r'://\2', line)
+ yield sanitized_line
+
+
+# XXX cprov 2005-06-28:
+# RunCapture can be replaced with a call to
+#
+# twisted.internet.utils.getProcessOutputAndValue
+#
+# when we start using Twisted 2.0.
+class RunCapture(protocol.ProcessProtocol):
+ """Run a command and capture its output to a slave's log"""
+
+ def __init__(self, slave, callback):
+ self.slave = slave
+ self.notify = callback
+ self.killCall = None
+
+ def outReceived(self, data):
+ """Pass on stdout data to the log."""
+ self.slave.log(data)
+
+ def errReceived(self, data):
+ """Pass on stderr data to the log.
+
+ With a bit of luck we won't interleave horribly."""
+ self.slave.log(data)
+
+ def processEnded(self, statusobject):
+ """This method is called when a child process got terminated.
+
+ Three actions are required at this point: identify if we are within an
+ "aborting" process, eliminate pending calls to "kill" and invoke the
+ programmed notification callback. We only really care about invoking
+ the notification callback last thing in this method. The order
+ of the rest of the method is not critical.
+ """
+ # finishing the ABORTING workflow
+ if self.slave.builderstatus == BuilderStatus.ABORTING:
+ self.slave.builderstatus = BuilderStatus.ABORTED
+
+ # check if there is a pending request for kill the process,
+ # in afirmative case simply cancel this request since it
+ # already died.
+ if self.killCall and self.killCall.active():
+ self.killCall.cancel()
+
+ # notify the slave, it'll perform the required actions
+ self.notify(statusobject.value.exitCode)
+
+
+class BuildManager(object):
+ """Build Daemon slave build manager abstract parent"""
+
+ def __init__(self, slave, buildid):
+ """Create a BuildManager.
+
+ :param slave: A `BuildDSlave`.
+ :param buildid: Identifying string for this build.
+ """
+ object.__init__(self)
+ self._buildid = buildid
+ self._slave = slave
+ self._unpackpath = slave._config.get("allmanagers", "unpackpath")
+ self._cleanpath = slave._config.get("allmanagers", "cleanpath")
+ self._mountpath = slave._config.get("allmanagers", "mountpath")
+ self._umountpath = slave._config.get("allmanagers", "umountpath")
+ self.is_archive_private = False
+ self.home = os.environ['HOME']
+
+ def runSubProcess(self, command, args):
+ """Run a sub process capturing the results in the log."""
+ self._subprocess = RunCapture(self._slave, self.iterate)
+ self._slave.log("RUN: %s %r\n" % (command, args))
+ childfds = {0: devnull.fileno(), 1: "r", 2: "r"}
+ reactor.spawnProcess(
+ self._subprocess, command, args, env=os.environ,
+ path=self.home, childFDs=childfds)
+
+ def doUnpack(self):
+ """Unpack the build chroot."""
+ self.runSubProcess(
+ self._unpackpath,
+ ["unpack-chroot", self._buildid, self._chroottarfile])
+
+ def doCleanup(self):
+ """Remove the build tree etc."""
+ self.runSubProcess(self._cleanpath, ["remove-build", self._buildid])
+
+ # Sanitize the URLs in the buildlog file if this is a build
+ # in a private archive.
+ if self.is_archive_private:
+ self._slave.sanitizeBuildlog(self._slave.cachePath("buildlog"))
+
+ def doMounting(self):
+ """Mount things in the chroot, e.g. proc."""
+ self.runSubProcess( self._mountpath,
+ ["mount-chroot", self._buildid])
+
+ def doUnmounting(self):
+ """Unmount the chroot."""
+ self.runSubProcess( self._umountpath,
+ ["umount-chroot", self._buildid])
+
+ def initiate(self, files, chroot, extra_args):
+ """Initiate a build given the input files.
+
+ Please note: the 'extra_args' dictionary may contain a boolean
+ value keyed under the 'archive_private' string. If that value
+ evaluates to True the build at hand is for a private archive.
+ """
+ os.mkdir("%s/build-%s" % (self.home, self._buildid))
+ for f in files:
+ os.symlink( self._slave.cachePath(files[f]),
+ "%s/build-%s/%s" % (self.home,
+ self._buildid, f))
+ self._chroottarfile = self._slave.cachePath(chroot)
+
+ # Check whether this is a build in a private archive and
+ # whether the URLs in the buildlog file should be sanitized
+ # so that they do not contain any embedded authentication
+ # credentials.
+ if extra_args.get('archive_private'):
+ self.is_archive_private = True
+
+ self.runSubProcess(
+ "/bin/echo", ["echo", "Forking build subprocess..."])
+
+ def iterate(self, success):
+ """Perform an iteration of the slave.
+
+ The BuildManager tends to work by invoking several
+ subprocesses in order. the iterate method is called by the
+ object created by runSubProcess to gather the results of the
+ sub process.
+ """
+ raise NotImplementedError("BuildManager should be subclassed to be "
+ "used")
+
+ def abort(self):
+ """Abort the build by killing the subprocess."""
+ if not self.alreadyfailed:
+ self.alreadyfailed = True
+ # Either SIGKILL and SIGTERM presents the same behavior,
+ # the process is just killed some time after the signal was sent
+ # 10 s ~ 40 s, and returns None as exit_code, instead of the normal
+ # interger. See further info on DebianBuildermanager.iterate in
+ # debian.py
+ # XXX cprov 2005-09-02:
+ # we may want to follow the canonical.tachandler kill process style,
+ # which sends SIGTERM to the process wait a given timeout and if was
+ # not killed sends a SIGKILL. IMO it only would be worth if we found
+ # different behaviour than the previous described.
+ self._subprocess.transport.signalProcess('TERM')
+ # alternativelly to simply send SIGTERM, we can pend a request to
+ # send SIGKILL to the process if nothing happened in 10 seconds
+ # see base class process
+ self._subprocess.killCall = reactor.callLater(10, self.kill)
+
+ def kill(self):
+ """Send SIGKILL to child process
+
+ Mask exception generated when the child process has already exited.
+ """
+ try:
+ self._subprocess.transport.signalProcess('KILL')
+ except process.ProcessExitedAlready:
+ self._slave.log("ABORTING: Process Exited Already\n")
+
+class BuilderStatus:
+ """Status values for the builder."""
+
+ IDLE = "BuilderStatus.IDLE"
+ BUILDING = "BuilderStatus.BUILDING"
+ WAITING = "BuilderStatus.WAITING"
+ ABORTING = "BuilderStatus.ABORTING"
+ ABORTED = "BuilderStatus.ABORTED"
+
+ UNKNOWNSUM = "BuilderStatus.UNKNOWNSUM"
+ UNKNOWNBUILDER = "BuilderStatus.UNKNOWNBUILDER"
+
+
+class BuildStatus:
+ """Status values for builds themselves."""
+
+ OK = "BuildStatus.OK"
+ DEPFAIL = "BuildStatus.DEPFAIL"
+ GIVENBACK = "BuildStatus.GIVENBACK"
+ PACKAGEFAIL = "BuildStatus.PACKAGEFAIL"
+ CHROOTFAIL = "BuildStatus.CHROOTFAIL"
+ BUILDERFAIL = "BuildStatus.BUILDERFAIL"
+
+
+class BuildDSlave(object):
+ """Build Daemon slave. Implementation of most needed functions
+ for a Build-Slave device.
+ """
+
+ def __init__(self, config):
+ object.__init__(self)
+ self._config = config
+ self.builderstatus = BuilderStatus.IDLE
+ self._cachepath = self._config.get("slave","filecache")
+ self.buildstatus = BuildStatus.OK
+ self.waitingfiles = {}
+ self.builddependencies = ""
+ self._log = None
+
+ if not os.path.isdir(self._cachepath):
+ raise ValueError("FileCache path is not a dir")
+
+ def getArch(self):
+ """Return the Architecture tag for the slave."""
+ return self._config.get("slave","architecturetag")
+
+ def cachePath(self, file):
+ """Return the path in the cache of the file specified."""
+ return os.path.join(self._cachepath, file)
+
+ def setupAuthHandler(self, url, username, password):
+ """Set up a BasicAuthHandler to open the url.
+
+ :param url: The URL that needs authenticating.
+ :param username: The username for authentication.
+ :param password: The password for authentication.
+ :return: The OpenerDirector instance.
+
+ This helper installs a urllib2.HTTPBasicAuthHandler that will deal
+ with any HTTP basic authentication required when opening the
+ URL.
+ """
+ password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
+ password_mgr.add_password(None, url, username, password)
+ handler = urllib2.HTTPBasicAuthHandler(password_mgr)
+ opener = urllib2.build_opener(handler)
+ return opener
+
+ def ensurePresent(self, sha1sum, url=None, username=None, password=None):
+ """Ensure we have the file with the checksum specified.
+
+ Optionally you can provide the librarian URL and
+ the build slave will fetch the file if it doesn't have it.
+ Return a tuple containing: (<present>, <info>)
+ """
+ extra_info = 'No URL'
+ if url is not None:
+ extra_info = 'Cache'
+ if not os.path.exists(self.cachePath(sha1sum)):
+ self.log('Fetching %s by url %s' % (sha1sum, url))
+ if username:
+ opener = self.setupAuthHandler(
+ url, username, password).open
+ else:
+ opener = urllib2.urlopen
+ try:
+ f = opener(url)
+ # Don't change this to URLError without thoroughly
+ # testing for regressions. For now, just suppress
+ # the PyLint warnings.
+ # pylint: disable-msg=W0703
+ except Exception, info:
+ extra_info = 'Error accessing Librarian: %s' % info
+ self.log(extra_info)
+ else:
+ of = open(self.cachePath(sha1sum), "w")
+ # Upped for great justice to 256k
+ check_sum = hashlib.sha1()
+ for chunk in iter(lambda: f.read(256*1024), ''):
+ of.write(chunk)
+ check_sum.update(chunk)
+ of.close()
+ f.close()
+ extra_info = 'Download'
+ if check_sum.hexdigest() != sha1sum:
+ os.remove(self.cachePath(sha1sum))
+ extra_info = "Digests did not match, removing again!"
+ self.log(extra_info)
+ return (os.path.exists(self.cachePath(sha1sum)), extra_info)
+
+ def storeFile(self, content):
+ """Take the provided content and store it in the file cache."""
+ sha1sum = hashlib.sha1(content).hexdigest()
+ present, info = self.ensurePresent(sha1sum)
+ if present:
+ return sha1sum
+ f = open(self.cachePath(sha1sum), "w")
+ f.write(content)
+ f.close()
+ return sha1sum
+
+ def addWaitingFile(self, path):
+ """Add a file to the cache and store its details for reporting."""
+ fn = os.path.basename(path)
+ f = open(path)
+ try:
+ self.waitingfiles[fn] = self.storeFile(f.read())
+ finally:
+ f.close()
+
+ def fetchFile(self, sha1sum):
+ """Fetch the file of the given sha1sum."""
+ present, info = self.ensurePresent(sha1sum)
+ if not present:
+ raise ValueError("Unknown SHA1sum %s" % sha1sum)
+ f = open(self.cachePath(sha1sum), "r")
+ c = f.read()
+ f.close()
+ return c
+
+ def abort(self):
+ """Abort the current build."""
+ # XXX: dsilvers: 2005-01-21: Current abort mechanism doesn't wait
+ # for abort to complete. This is potentially an issue in a heavy
+ # load situation.
+ if self.builderstatus != BuilderStatus.BUILDING:
+ # XXX: Should raise a known Fault so that the client can make
+ # useful decisions about the error!
+ raise ValueError("Slave is not BUILDING when asked to abort")
+ self.manager.abort()
+ self.builderstatus = BuilderStatus.ABORTING
+
+ def clean(self):
+ """Clean up pending files and reset the internal build state."""
+ if self.builderstatus not in [BuilderStatus.WAITING,
+ BuilderStatus.ABORTED]:
+ raise ValueError('Slave is not WAITING|ABORTED when asked'
+ 'to clean')
+ for f in self.waitingfiles:
+ os.remove(self.cachePath(self.waitingfiles[f]))
+ self.builderstatus = BuilderStatus.IDLE
+ if self._log is not None:
+ self._log.close()
+ os.remove(self.cachePath("buildlog"))
+ self._log = None
+ self.waitingfiles = {}
+ self.builddependencies = ""
+ self.manager = None
+ self.buildstatus = BuildStatus.OK
+
+ def log(self, data):
+ """Write the provided data to the log."""
+ if self._log is not None:
+ self._log.write(data)
+ self._log.flush()
+ if data.endswith("\n"):
+ data = data[:-1]
+ print "Build log: " + data
+
+ def getLogTail(self):
+ """Return the tail of the log.
+
+ If the buildlog is not yet opened for writing (self._log is None),
+ return a empty string.
+
+ It safely tries to open the 'buildlog', if it doesn't exist, due to
+ job cleanup or buildlog sanitization race-conditions, it also returns
+ an empty string.
+
+ When the 'buildlog' is present it return up to 2 KiB character of
+ the end of the file.
+
+ The returned content will be 'sanitized', see `_sanitizeURLs` for
+ further information.
+ """
+ if self._log is None:
+ return ""
+
+ rlog = None
+ try:
+ try:
+ rlog = open(self.cachePath("buildlog"), "r")
+ except IOError:
+ ret = ""
+ else:
+ # We rely on good OS practices that keep the file handler
+ # usable once it's opened. So, if open() is ok, a subsequent
+ # seek/tell/read will be safe.
+ rlog.seek(0, SEEK_END)
+ count = rlog.tell()
+ if count > 2048:
+ count = 2048
+ rlog.seek(-count, SEEK_END)
+ ret = rlog.read(count)
+ finally:
+ if rlog is not None:
+ rlog.close()
+
+ if self.manager.is_archive_private:
+ # This is a build in a private archive. We need to scrub
+ # the URLs contained in the buildlog excerpt in order to
+ # avoid leaking passwords.
+ log_lines = ret.splitlines()
+
+ # Please note: we are throwing away the first line (of the
+ # excerpt to be scrubbed) because it may be cut off thus
+ # thwarting the detection of embedded passwords.
+ clean_content_iter = _sanitizeURLs(log_lines[1:])
+ ret = '\n'.join(clean_content_iter)
+
+ return ret
+
+ def startBuild(self, manager):
+ """Start a build with the provided BuildManager instance."""
+ if self.builderstatus != BuilderStatus.IDLE:
+ raise ValueError("Slave is not IDLE when asked to start building")
+ self.manager = manager
+ self.builderstatus = BuilderStatus.BUILDING
+ self.emptyLog()
+
+ def emptyLog(self):
+ """Empty the log and start again."""
+ if self._log is not None:
+ self._log.close()
+ self._log = open(self.cachePath("buildlog"), "w")
+
+ def builderFail(self):
+ """Cease building because the builder has a problem."""
+ if self.builderstatus != BuilderStatus.BUILDING:
+ raise ValueError("Slave is not BUILDING when set to BUILDERFAIL")
+ self.buildstatus = BuildStatus.BUILDERFAIL
+
+ def chrootFail(self):
+ """Cease building because the chroot could not be created or contained
+ a set of package control files which couldn't upgrade themselves, or
+ yet a lot of causes that imply the CHROOT is corrupted not the
+ package.
+ """
+ if self.builderstatus != BuilderStatus.BUILDING:
+ raise ValueError("Slave is not BUILDING when set to CHROOTFAIL")
+ self.buildstatus = BuildStatus.CHROOTFAIL
+
+ def buildFail(self):
+ """Cease building because the package failed to build."""
+ if self.builderstatus != BuilderStatus.BUILDING:
+ raise ValueError("Slave is not BUILDING when set to PACKAGEFAIL")
+ self.buildstatus = BuildStatus.PACKAGEFAIL
+
+ def buildOK(self):
+ """Having passed all possible failure states, mark a build as OK."""
+ if self.builderstatus != BuilderStatus.BUILDING:
+ raise ValueError("Slave is not BUILDING when set to OK")
+ self.buildstatus = BuildStatus.OK
+
+ def depFail(self, dependencies):
+ """Cease building due to a dependency issue."""
+ if self.builderstatus != BuilderStatus.BUILDING:
+ raise ValueError("Slave is not BUILDING when set to DEPFAIL")
+ self.buildstatus = BuildStatus.DEPFAIL
+ self.builddependencies = dependencies
+
+ def giveBack(self):
+ """Give-back package due to a transient buildd/archive issue."""
+ if self.builderstatus != BuilderStatus.BUILDING:
+ raise ValueError("Slave is not BUILDING when set to GIVENBACK")
+ self.buildstatus = BuildStatus.GIVENBACK
+
+ def buildComplete(self):
+ """Mark the build as complete and waiting interaction from the build
+ daemon master.
+ """
+ if self.builderstatus != BuilderStatus.BUILDING:
+ raise ValueError("Slave is not BUILDING when told build is "
+ "complete")
+ self.builderstatus = BuilderStatus.WAITING
+
+ def sanitizeBuildlog(self, log_path):
+ """Removes passwords from buildlog URLs.
+
+ Because none of the URLs to be processed are expected to span
+ multiple lines and because build log files are potentially huge
+ they will be processed line by line.
+
+ :param log_path: The path to the buildlog file that is to be
+ sanitized.
+ :type log_path: ``str``
+ """
+ # First move the buildlog file that is to be sanitized out of
+ # the way.
+ unsanitized_path = self.cachePath(
+ os.path.basename(log_path) + '.unsanitized')
+ os.rename(log_path, unsanitized_path)
+
+ # Open the unsanitized buildlog file for reading.
+ unsanitized_file = open(unsanitized_path)
+
+ # Open the file that will hold the resulting, sanitized buildlog
+ # content for writing.
+ sanitized_file = None
+
+ try:
+ sanitized_file = open(log_path, 'w')
+
+ # Scrub the buildlog file line by line
+ clean_content_iter = _sanitizeURLs(unsanitized_file)
+ for line in clean_content_iter:
+ sanitized_file.write(line)
+ finally:
+ # We're done with scrubbing, close the file handles.
+ unsanitized_file.close()
+ if sanitized_file is not None:
+ sanitized_file.close()
+
+
+class XMLRPCBuildDSlave(xmlrpc.XMLRPC):
+ """XMLRPC build daemon slave management interface"""
+
+ def __init__(self, config):
+ xmlrpc.XMLRPC.__init__(self, allowNone=True)
+ # The V1.0 new-style protocol introduces string-style protocol
+ # versions of the form 'MAJOR.MINOR', the protocol is '1.0' for now
+ # implying the presence of /filecache/ /filecache/buildlog and
+ # the reduced and optimised XMLRPC interface.
+ self.protocolversion = '1.0'
+ self.slave = BuildDSlave(config)
+ self._builders = {}
+ print "Initialized"
+
+ def registerBuilder(self, builderclass, buildertag):
+ self._builders[buildertag] = builderclass
+
+ def xmlrpc_echo(self, *args):
+ """Echo the argument back."""
+ return args
+
+ def xmlrpc_info(self):
+ """Return the protocol version and the builder methods supported."""
+ return (self.protocolversion, self.slave.getArch(),
+ self._builders.keys())
+
+ def xmlrpc_status(self):
+ """Return the status of the build daemon.
+
+ Depending on the builder status we return differing amounts of
+ data. We do however always return the builder status as the first
+ value.
+ """
+ status = self.slave.builderstatus
+ statusname = status.split('.')[-1]
+ func = getattr(self, "status_" + statusname, None)
+ if func is None:
+ raise ValueError("Unknown status '%s'" % status)
+ return (status, ) + func()
+
+ def status_IDLE(self):
+ """Handler for xmlrpc_status IDLE.
+
+ Returns a tuple containing a empty string since there's nothing
+ to report.
+ """
+ # keep the result code sane
+ return ('', )
+
+ def status_BUILDING(self):
+ """Handler for xmlrpc_status BUILDING.
+
+ Returns the build id and up to one kilobyte of log tail
+ """
+ tail = self.slave.getLogTail()
+ return (self.buildid, xmlrpclib.Binary(tail))
+
+ def status_WAITING(self):
+ """Handler for xmlrpc_status WAITING.
+
+ Returns the build id and the set of files waiting to be returned
+ unless the builder failed in which case we return the buildstatus
+ and the build id but no file set.
+ """
+ if self.slave.buildstatus in (BuildStatus.OK, BuildStatus.PACKAGEFAIL,
+ BuildStatus.DEPFAIL):
+ return (self.slave.buildstatus, self.buildid,
+ self.slave.waitingfiles, self.slave.builddependencies)
+ return (self.slave.buildstatus, self.buildid)
+
+ def status_ABORTED(self):
+ """Handler for xmlrpc_status ABORTED.
+
+ The only action the master can take is clean, other than ask status,
+ of course, it returns the build id only.
+ """
+ return (self.buildid, )
+
+ def status_ABORTING(self):
+ """Handler for xmlrpc_status ABORTING.
+
+ This state means the builder performing the ABORT command and is
+ not able to do anything else than answer its status, returns the
+ build id only.
+ """
+ return (self.buildid, )
+
+ def xmlrpc_ensurepresent(self, sha1sum, url, username, password):
+ """Attempt to ensure the given file is present."""
+ return self.slave.ensurePresent(sha1sum, url, username, password)
+
+ def xmlrpc_abort(self):
+ """Abort the current build."""
+ self.slave.abort()
+ return BuilderStatus.ABORTING
+
+ def xmlrpc_clean(self):
+ """Clean up the waiting files and reset the slave's internal state."""
+ self.slave.clean()
+ return BuilderStatus.IDLE
+
+ def xmlrpc_build(self, buildid, builder, chrootsum, filemap, args):
+ """Check if requested arguments are sane and initiate build procedure
+
+ return a tuple containing: (<builder_status>, <info>)
+
+ """
+ # check requested builder
+ if not builder in self._builders:
+ extra_info = "%s not in %r" % (builder, self._builders.keys())
+ return (BuilderStatus.UNKNOWNBUILDER, extra_info)
+ # check requested chroot availability
+ chroot_present, info = self.slave.ensurePresent(chrootsum)
+ if not chroot_present:
+ extra_info = """CHROOTSUM -> %s
+ ***** INFO *****
+ %s
+ ****************
+ """ % (chrootsum, info)
+ return (BuilderStatus.UNKNOWNSUM, extra_info)
+ # check requested files availability
+ for filesum in filemap.itervalues():
+ file_present, info = self.slave.ensurePresent(filesum)
+ if not file_present:
+ extra_info = """FILESUM -> %s
+ ***** INFO *****
+ %s
+ ****************
+ """ % (filesum, info)
+ return (BuilderStatus.UNKNOWNSUM, extra_info)
+ # check buildid sanity
+ if buildid is None or buildid == "" or buildid == 0:
+ raise ValueError(buildid)
+
+ # builder is available, buildd is non empty,
+ # filelist is consistent, chrootsum is available, let's initiate...
+ self.buildid = buildid
+ self.slave.startBuild(self._builders[builder](self.slave, buildid))
+ self.slave.manager.initiate(filemap, chrootsum, args)
+ return (BuilderStatus.BUILDING, buildid)
=== added file 'lib/canonical/buildd/sourcepackagerecipe.py'
--- lib/canonical/buildd/sourcepackagerecipe.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/sourcepackagerecipe.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,144 @@
+# Copyright 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+# pylint: disable-msg=E1002
+
+"""The manager class for building packages from recipes."""
+
+import os
+import re
+
+from canonical.buildd.debian import (
+ DebianBuildManager,
+ DebianBuildState,
+ get_build_path,
+)
+RETCODE_SUCCESS = 0
+RETCODE_FAILURE_INSTALL = 200
+RETCODE_FAILURE_BUILD_TREE = 201
+RETCODE_FAILURE_INSTALL_BUILD_DEPS = 202
+RETCODE_FAILURE_BUILD_SOURCE_PACKAGE = 203
+
+
+def splat_file(path, contents):
+ """Write a string to the specified path.
+
+ :param path: The path to store the string in.
+ :param contents: The string to write to the file.
+ """
+ file_obj = open(path, 'w')
+ try:
+ file_obj.write(contents)
+ finally:
+ file_obj.close()
+
+
+def get_chroot_path(build_id, *extra):
+ """Return a path within the chroot.
+
+ :param build_id: The build_id of the build.
+ :param extra: Additional path elements.
+ """
+ return get_build_path(
+ build_id, 'chroot-autobuild', os.environ['HOME'][1:], *extra)
+
+
+class SourcePackageRecipeBuildState(DebianBuildState):
+ """The set of states that a recipe build can be in."""
+ BUILD_RECIPE = "BUILD_RECIPE"
+
+
+class SourcePackageRecipeBuildManager(DebianBuildManager):
+ """Build a source package from a bzr-builder recipe."""
+
+ initial_build_state = SourcePackageRecipeBuildState.BUILD_RECIPE
+
+ def __init__(self, slave, buildid):
+ """Constructor.
+
+ :param slave: A build slave device.
+ :param buildid: The id of the build (a str).
+ """
+ DebianBuildManager.__init__(self, slave, buildid)
+ self.build_recipe_path = slave._config.get(
+ "sourcepackagerecipemanager", "buildrecipepath")
+
+ def initiate(self, files, chroot, extra_args):
+ """Initiate a build with a given set of files and chroot.
+
+ :param files: The files sent by the manager with the request.
+ :param chroot: The sha1sum of the chroot to use.
+ :param extra_args: A dict of extra arguments.
+ """
+ self.recipe_text = extra_args['recipe_text']
+ self.suite = extra_args['suite']
+ self.component = extra_args['ogrecomponent']
+ self.author_name = extra_args['author_name']
+ self.author_email = extra_args['author_email']
+ self.archive_purpose = extra_args['archive_purpose']
+ self.distroseries_name = extra_args['distroseries_name']
+
+ super(SourcePackageRecipeBuildManager, self).initiate(
+ files, chroot, extra_args)
+
+ def doRunBuild(self):
+ """Run the build process to build the source package."""
+ os.makedirs(get_chroot_path(self._buildid, 'work'))
+ recipe_path = get_chroot_path(self._buildid, 'work/recipe')
+ splat_file(recipe_path, self.recipe_text)
+ args = [
+ "buildrecipe", self._buildid, self.author_name.encode('utf-8'),
+ self.author_email, self.suite, self.distroseries_name,
+ self.component, self.archive_purpose]
+ self.runSubProcess(self.build_recipe_path, args)
+
+ def iterate_BUILD_RECIPE(self, retcode):
+ """Move from BUILD_RECIPE to the next logical state."""
+ if retcode == RETCODE_SUCCESS:
+ self.gatherResults()
+ print("Returning build status: OK")
+ elif retcode == RETCODE_FAILURE_INSTALL_BUILD_DEPS:
+ if not self.alreadyfailed:
+ tmpLog = self.getTmpLogContents()
+ rx = (
+ 'The following packages have unmet dependencies:\n'
+ '.*: Depends: ([^ ]*( \([^)]*\))?)')
+ mo = re.search(rx, tmpLog, re.M)
+ if mo:
+ self._slave.depFail(mo.group(1))
+ print("Returning build status: DEPFAIL")
+ print("Dependencies: " + mo.group(1))
+ else:
+ print("Returning build status: Build failed")
+ self._slave.buildFail()
+ self.alreadyfailed = True
+ elif (
+ retcode >= RETCODE_FAILURE_INSTALL and
+ retcode <= RETCODE_FAILURE_BUILD_SOURCE_PACKAGE):
+ # XXX AaronBentley 2009-01-13: We should handle depwait separately
+ if not self.alreadyfailed:
+ self._slave.buildFail()
+ print("Returning build status: Build failed.")
+ self.alreadyfailed = True
+ else:
+ if not self.alreadyfailed:
+ self._slave.builderFail()
+ print("Returning build status: Builder failed.")
+ self.alreadyfailed = True
+ self._state = DebianBuildState.REAP
+ self.doReapProcesses()
+
+ def getChangesFilename(self):
+ """Return the path to the changes file."""
+ work_path = get_build_path(self._buildid)
+ for name in os.listdir(work_path):
+ if name.endswith('_source.changes'):
+ return os.path.join(work_path, name)
+
+ def gatherResults(self):
+ """Gather the results of the build and add them to the file cache.
+
+ The primary file we care about is the .changes file.
+ The manifest is also a useful record.
+ """
+ DebianBuildManager.gatherResults(self)
+ self._slave.addWaitingFile(get_build_path(self._buildid, 'manifest'))
=== added file 'lib/canonical/buildd/template-buildd-slave.conf'
--- lib/canonical/buildd/template-buildd-slave.conf 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/template-buildd-slave.conf 2011-11-17 19:45:28 +0000
@@ -0,0 +1,32 @@
+# Launchpad Buildd slave configuration.
+#
+# Configuration for buildd: @NAME@
+
+[slave]
+architecturetag = @ARCHTAG@
+filecache = /home/buildd/filecache-@NAME@
+bindhost = @BINDHOST@
+bindport = @BINDPORT@
+ntphost = ntp.buildd
+
+[allmanagers]
+unpackpath = /usr/share/launchpad-buildd/slavebin/unpack-chroot
+cleanpath = /usr/share/launchpad-buildd/slavebin/remove-build
+mountpath = /usr/share/launchpad-buildd/slavebin/mount-chroot
+umountpath = /usr/share/launchpad-buildd/slavebin/umount-chroot
+
+[debianmanager]
+updatepath = /usr/share/launchpad-buildd/slavebin/update-debian-chroot
+processscanpath = /usr/share/launchpad-buildd/slavebin/scan-for-processes
+sourcespath = /usr/share/launchpad-buildd/slavebin/override-sources-list
+
+[binarypackagemanager]
+sbuildpath = /usr/share/launchpad-buildd/slavebin/sbuild-package
+sbuildargs = --nolog --batch --archive=ubuntu
+
+[sourcepackagerecipemanager]
+buildrecipepath = /usr/share/launchpad-buildd/slavebin/buildrecipe
+
+[translationtemplatesmanager]
+generatepath = /usr/share/launchpad-buildd/slavebin/generate-translation-templates
+resultarchive = translation-templates.tar.gz
=== added file 'lib/canonical/buildd/test_buildd_generatetranslationtemplates'
--- lib/canonical/buildd/test_buildd_generatetranslationtemplates 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/test_buildd_generatetranslationtemplates 2011-11-17 19:45:28 +0000
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+# Copyright 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+#
+# Test script for manual use only. Exercises the
+# TranslationTemplatesBuildManager through XMLRPC.
+
+import sys
+
+from xmlrpclib import ServerProxy
+
+if len(sys.argv) != 2:
+ print "Usage: %s <chroot_sha1>" % sys.argv[0]
+ print "Where <chroot_sha1> is the SHA1 of the chroot tarball to use."
+ print "The chroot tarball must be in the local Librarian."
+ print "See https://dev.launchpad.net/Soyuz/HowToUseSoyuzLocally"
+ sys.exit(1)
+
+chroot_sha1 = sys.argv[1]
+
+proxy = ServerProxy('http://localhost:8221/rpc')
+print proxy.info()
+print proxy.status()
+buildid = '1-2'
+build_type = 'translation-templates'
+filemap = {}
+args = {'branch_url': 'no-branch-here-sorry'}
+print proxy.build(buildid, build_type, chroot_sha1, filemap, args)
+#status = proxy.status()
+#for filename, sha1 in status[3].iteritems():
+# print filename
+#proxy.clean()
+
=== added file 'lib/canonical/buildd/test_buildd_recipe'
--- lib/canonical/buildd/test_buildd_recipe 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/test_buildd_recipe 2011-11-17 19:45:28 +0000
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+# Copyright 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+#
+# This is a script to do end-to-end testing of the buildd with a bzr-builder
+# recipe, without involving the BuilderBehaviour.
+
+country_code = 'us'
+apt_cacher_ng_host = 'stumpy'
+distroseries_name = 'maverick'
+recipe_text = """# bzr-builder format 0.2 deb-version {debupstream}-0~{revno}
+http://bazaar.launchpad.dev/~ppa-user/+junk/wakeonlan"""
+
+def deb_line(host, suites):
+ prefix = 'deb http://'
+ if apt_cacher_ng_host != None:
+ prefix += '%s:3142/' % apt_cacher_ng_host
+ return '%s%s %s %s' % (prefix, host, distroseries_name, suites)
+
+import sys
+from xmlrpclib import ServerProxy
+
+proxy = ServerProxy('http://localhost:8221/rpc')
+print proxy.echo('Hello World')
+print proxy.info()
+status = proxy.status()
+print status
+if status[0] != 'BuilderStatus.IDLE':
+ print "Aborting due to non-IDLE builder."
+ sys.exit(1)
+print proxy.build(
+ '1-2', 'sourcepackagerecipe', '1ef177161c3cb073e66bf1550931c6fbaa0a94b0',
+ {}, {'author_name': u'Steve\u1234',
+ 'author_email': 'stevea@xxxxxxxxxxx',
+ 'suite': distroseries_name,
+ 'distroseries_name': distroseries_name,
+ 'ogrecomponent': 'universe',
+ 'archive_purpose': 'puppies',
+ 'recipe_text': recipe_text,
+ 'archives': [
+ deb_line('%s.archive.ubuntu.com/ubuntu' % country_code,
+ 'main universe'),
+ deb_line('ppa.launchpad.net/launchpad/bzr-builder-dev/ubuntu',
+ 'main'),]})
+#status = proxy.status()
+#for filename, sha1 in status[3].iteritems():
+# print filename
+#proxy.clean()
=== added directory 'lib/canonical/buildd/tests'
=== added file 'lib/canonical/buildd/tests/__init__.py'
--- lib/canonical/buildd/tests/__init__.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/tests/__init__.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,4 @@
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+from harness import BuilddSlaveTestSetup
=== added file 'lib/canonical/buildd/tests/buildd-slave-test.conf'
--- lib/canonical/buildd/tests/buildd-slave-test.conf 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/tests/buildd-slave-test.conf 2011-11-17 19:45:28 +0000
@@ -0,0 +1,27 @@
+# Test buildd slave configuration
+
+[slave]
+architecturetag = i386
+filecache = /var/tmp/buildd/filecache
+bindhost = localhost
+bindport = 8221
+
+[allmanagers]
+unpackpath = /var/tmp/buildd/slavebin/unpack-chroot
+cleanpath = /var/tmp/buildd/slavebin/remove-build
+mountpath = /var/tmp/buildd/slavebin/mount-chroot
+umountpath = /var/tmp/buildd/slavebin/umount-chroot
+
+[debianmanager]
+sbuildpath = /var/tmp/buildd/slavebin/sbuild-package
+sbuildargs = -dautobuild --nolog --batch
+updatepath = /var/tmp/buildd/slavebin/update-debian-chroot
+processscanpath = /var/tmp/buildd/slavebin/scan-for-processes
+sourcespath = /usr/share/launchpad-buildd/slavebin/override-sources-list
+
+[binarypackagemanager]
+sbuildpath = /var/tmp/buildd/slavebin/sbuild-package
+sbuildargs = -dautobuild --nolog --batch
+updatepath = /var/tmp/buildd/slavebin/update-debian-chroot
+processscanpath = /var/tmp/buildd/slavebin/scan-for-processes
+sourcespath = /usr/share/launchpad-buildd/slavebin/override-sources-list
=== added file 'lib/canonical/buildd/tests/buildlog'
--- lib/canonical/buildd/tests/buildlog 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/tests/buildlog 2011-11-17 19:45:28 +0000
@@ -0,0 +1,23 @@
+RUN: /usr/share/launchpad-buildd/slavebin/unpack-chroot ['unpack-chroot', '370614-896976', '/home/buildd/filecache-default/a40e3c410938399b35051833fe5244f9ac6f3774']
+Unpacking chroot for build 370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/mount-chroot ['mount-chroot', '370614-896976']
+Mounting chroot for build 370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/apply-ogre-model ['apply-ogre-model', '370614-896976', 'universe']
+Attempting OGRE for universe in build-370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/update-debian-chroot ['update-debian-chroot', '370614-896976']
+Updating debian chroot for build 370614-896976
+Get:1 http://buildd:secret@ftpmaster.internal gutsy Release.gpg [191B]
+Get:2 http://ftpmaster.internal gutsy Release [65.9kB]
+Get:3 http://user:blah@ftpmaster.internal gutsy/main Packages [1085kB]
+Get:4 http://ftpmaster.internal gutsy/universe Packages [3991kB]
+Fetched 5142kB in 5s (1012kB/s)
+Reading package lists...
+Reading package lists...
+Building dependency tree...
+The following packages will be upgraded:
+ apt bsdutils cpp g++ gcc initscripts libdb4.4 mount pkgbinarymangler sysv-rc
+ sysvutils util-linux
+12 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
+Need to get 0B/2832kB of archives.
+After unpacking 94.2kB of additional disk space will be used.
+(Reading database ... 8942 files and directories currently installed.)
=== added file 'lib/canonical/buildd/tests/buildlog.long'
--- lib/canonical/buildd/tests/buildlog.long 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/tests/buildlog.long 2011-11-17 19:45:28 +0000
@@ -0,0 +1,82 @@
+RUN: /usr/share/launchpad-buildd/slavebin/unpack-chroot ['unpack-chroot', '370614-896976', '/home/buildd/filecache-default/a40e3c410938399b35051833fe5244f9ac6f3774']
+Unpacking chroot for build 370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/mount-chroot ['mount-chroot', '370614-896976']
+Mounting chroot for build 370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/apply-ogre-model ['apply-ogre-model', '370614-896976', 'universe']
+Attempting OGRE for universe in build-370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/update-debian-chroot ['update-debian-chroot', '370614-896976']
+Updating debian chroot for build 370614-896976
+Get:1 http://buildd:secret@ftpmaster.internal gutsy Release.gpg [191B]
+Get:2 http://ftpmaster.internal gutsy Release [65.9kB]
+Get:3 http://user:blah@ftpmaster.internal gutsy/main Packages [1085kB]
+Get:4 http://ftpmaster.internal gutsy/universe Packages [3991kB]
+Fetched 5142kB in 5s (1012kB/s)
+Reading package lists...
+Reading package lists...
+Building dependency tree...
+The following packages will be upgraded:
+ apt bsdutils cpp g++ gcc initscripts libdb4.4 mount pkgbinarymangler sysv-rc
+ sysvutils util-linux
+12 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
+Need to get 0B/2832kB of archives.
+After unpacking 94.2kB of additional disk space will be used.
+(Reading database ... 8942 files and directories currently installed.)
+RUN: /usr/share/launchpad-buildd/slavebin/unpack-chroot ['unpack-chroot', '370614-896976', '/home/buildd/filecache-default/a40e3c410938399b35051833fe5244f9ac6f3774']
+Unpacking chroot for build 370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/mount-chroot ['mount-chroot', '370614-896976']
+Mounting chroot for build 370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/apply-ogre-model ['apply-ogre-model', '370614-896976', 'universe']
+Attempting OGRE for universe in build-370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/update-debian-chroot ['update-debian-chroot', '370614-896976']
+Updating debian chroot for build 370614-896976
+Get:1 http://buildd:secret@ftpmaster.internal gutsy Release.gpg [191B]
+Get:2 http://ftpmaster.internal gutsy Release [65.9kB]
+Get:3 http://user:blah@ftpmaster.internal gutsy/main Packages [1085kB]
+Get:4 http://ftpmaster.internal gutsy/universe Packages [3991kB]
+Fetched 5142kB in 5s (1012kB/s)
+Reading package lists...
+Reading package lists...
+Building dependency tree...
+The following packages will be upgraded:
+ apt bsdutils cpp g++ gcc initscripts libdb4.4 mount pkgbinarymangler sysv-rc
+ sysvutils util-linux
+12 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
+Need to get 0B/2832kB of archives.
+After unpacking 94.2kB of additional disk space will be used.
+(Reading database ... 8942 files and directories currently installed.)
+RUN: /usr/share/launchpad-buildd/slavebin/unpack-chroot ['unpack-chroot', '370614-896976', '/home/buildd/filecache-default/a40e3c410938399b35051833fe5244f9ac6f3774']
+Unpacking chroot for build 370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/mount-chroot ['mount-chroot', '370614-896976']
+Mounting chroot for build 370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/apply-ogre-model ['apply-ogre-model', '370614-896976', 'universe']
+Attempting OGRE for universe in build-370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/update-debian-chroot ['update-debian-chroot', '370614-896976']
+Updating debian chroot for build 370614-896976
+Get:1 http://buildd:secret@ftpmaster.internal gutsy Release.gpg [191B]
+Get:2 http://ftpmaster.internal gutsy Release [65.9kB]
+Get:3 http://user:blah@ftpmaster.internal gutsy/main Packages [1085kB]
+Get:4 http://ftpmaster.internal gutsy/universe Packages [3991kB]
+Fetched 5142kB in 5s (1012kB/s)
+Reading package lists...
+Reading package lists...
+Building dependency tree...
+The following packages will be upgraded:
+ apt bsdutils cpp g++ gcc initscripts libdb4.4 mount pkgbinarymangler sysv-rc
+ sysvutils util-linux
+12 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
+Need to get 0B/2832kB of archives.
+After unpacking 94.2kB of additional disk space will be used.
+(Reading database ... 8942 files and directories currently installed.)
+RUN: /usr/share/launchpad-buildd/slavebin/unpack-chroot ['unpack-chroot', '370614-896976', '/home/buildd/filecache-default/a40e3c410938399b35051833fe5244f9ac6f3774']
+Unpacking chroot for build 370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/mount-chroot ['mount-chroot', '370614-896976']
+Mounting chroot for build 370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/apply-ogre-model ['apply-ogre-model', '370614-896976', 'universe']
+Attempting OGRE for universe in build-370614-896976
+RUN: /usr/share/launchpad-buildd/slavebin/update-debian-chroot ['update-debian-chroot', '370614-896976']
+Updating debian chroot for build 370614-896976
+Get:1 http://buildd:secret@ftpmaster.internal gutsy Release.gpg [191B]
+Get:2 http://must:go@ftpmaster.internal gutsy Release [65.9kB]
+Get:3 http://scrub:this@ftpmaster.internal gutsy/main Packages [1085kB]
+Get:4 http://ftpmaster.internal gutsy/universe Packages [3991kB]
+Fetched 5142kB in 5s (1012kB/s)
=== added file 'lib/canonical/buildd/tests/dummy_templates.tar.gz'
Binary files lib/canonical/buildd/tests/dummy_templates.tar.gz 1970-01-01 00:00:00 +0000 and lib/canonical/buildd/tests/dummy_templates.tar.gz 2011-11-17 19:45:28 +0000 differ
=== added file 'lib/canonical/buildd/tests/harness.py'
--- lib/canonical/buildd/tests/harness.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/tests/harness.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,139 @@
+# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+__metaclass__ = type
+__all__ = [
+ 'BuilddTestCase',
+ ]
+
+import os
+import tempfile
+import unittest
+from ConfigParser import SafeConfigParser
+
+from txfixtures.tachandler import TacTestFixture
+
+from canonical.buildd.slave import BuildDSlave
+
+from lp.services.osutils import remove_tree
+
+
+test_conffile = os.path.join(
+ os.path.dirname(__file__), 'buildd-slave-test.conf')
+
+
+class MockBuildManager(object):
+ """Mock BuildManager class.
+
+ Only implements 'is_archive_private' as False.
+ """
+ is_archive_private = False
+
+
+class BuilddTestCase(unittest.TestCase):
+ """Unit tests for logtail mechanisms."""
+
+ def setUp(self):
+ """Setup a BuildDSlave using the test config."""
+ conf = SafeConfigParser()
+ conf.read(test_conffile)
+ conf.set("slave", "filecache", tempfile.mkdtemp())
+
+ self.slave = BuildDSlave(conf)
+ self.slave._log = True
+ self.slave.manager = MockBuildManager()
+
+ self.here = os.path.abspath(os.path.dirname(__file__))
+
+ def tearDown(self):
+ """Remove the 'filecache' directory used for the tests."""
+ remove_tree(self.slave._cachepath)
+
+ def makeLog(self, size):
+ """Inject data into the default buildlog file."""
+ f = open(self.slave.cachePath('buildlog'), 'w')
+ f.write("x" * size)
+ f.close()
+
+
+class BuilddSlaveTestSetup(TacTestFixture):
+ r"""Setup BuildSlave for use by functional tests
+
+ >>> fixture = BuilddSlaveTestSetup()
+ >>> fixture.setUp()
+
+ Make sure the server is running
+
+ >>> import xmlrpclib
+ >>> s = xmlrpclib.Server('http://localhost:8221/rpc/')
+ >>> s.echo('Hello World')
+ ['Hello World']
+ >>> fixture.tearDown()
+
+ Again for luck !
+
+ >>> fixture.setUp()
+ >>> s = xmlrpclib.Server('http://localhost:8221/rpc/')
+
+ >>> s.echo('Hello World')
+ ['Hello World']
+
+ >>> info = s.info()
+ >>> len(info)
+ 3
+ >>> print info[:2]
+ ['1.0', 'i386']
+
+ >>> for buildtype in sorted(info[2]):
+ ... print buildtype
+ binarypackage
+ debian
+ sourcepackagerecipe
+ translation-templates
+
+ >>> s.status()
+ ['BuilderStatus.IDLE', '']
+
+ >>> fixture.tearDown()
+ """
+ def setUpRoot(self):
+ """Recreate empty root directory to avoid problems."""
+ remove_tree(self.root)
+ os.mkdir(self.root)
+ filecache = os.path.join(self.root, 'filecache')
+ os.mkdir(filecache)
+ os.environ['HOME'] = self.root
+ os.environ['BUILDD_SLAVE_CONFIG'] = test_conffile
+ # XXX cprov 2005-05-30:
+ # When we are about running it seriously we need :
+ # * install sbuild package
+ # * to copy the scripts for sbuild
+ self.addCleanup(remove_tree, self.root)
+
+ @property
+ def root(self):
+ return '/var/tmp/buildd'
+
+ @property
+ def tacfile(self):
+ return os.path.abspath(os.path.join(
+ os.path.dirname(__file__),
+ os.path.pardir,
+ 'buildd-slave.tac'
+ ))
+
+ @property
+ def pidfile(self):
+ return os.path.join(self.root, 'build-slave.pid')
+
+ @property
+ def logfile(self):
+ return '/var/tmp/build-slave.log'
+
+ def _hasDaemonStarted(self):
+ """Called by the superclass to check if the daemon is listening.
+
+ The slave is ready when it's accepting connections.
+ """
+ # This must match buildd-slave-test.conf.
+ return self._isPortListening('localhost', 8221)
=== added file 'lib/canonical/buildd/tests/test_1.diff'
--- lib/canonical/buildd/tests/test_1.diff 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/tests/test_1.diff 2011-11-17 19:45:28 +0000
@@ -0,0 +1,17 @@
+---
+
++++
+
+@@ -6,9 +6,9 @@
+
+ Attempting OGRE for universe in build-370614-896976
+ RUN: /usr/share/launchpad-buildd/slavebin/update-debian-chroot ['update-debian-chroot', '370614-896976']
+ Updating debian chroot for build 370614-896976
+-Get:1 http://buildd:secret@ftpmaster.internal gutsy Release.gpg [191B]
++Get:1 http://ftpmaster.internal gutsy Release.gpg [191B]
+ Get:2 http://ftpmaster.internal gutsy Release [65.9kB]
+-Get:3 http://user:blah@ftpmaster.internal gutsy/main Packages [1085kB]
++Get:3 http://ftpmaster.internal gutsy/main Packages [1085kB]
+ Get:4 http://ftpmaster.internal gutsy/universe Packages [3991kB]
+ Fetched 5142kB in 5s (1012kB/s)
+ Reading package lists...
\ No newline at end of file
=== added file 'lib/canonical/buildd/tests/test_2.diff'
--- lib/canonical/buildd/tests/test_2.diff 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/tests/test_2.diff 2011-11-17 19:45:28 +0000
@@ -0,0 +1,32 @@
+---
+
++++
+
+@@ -1,11 +1,10 @@
+
+-ting chroot for build 370614-896976
+ RUN: /usr/share/launchpad-buildd/slavebin/apply-ogre-model ['apply-ogre-model', '370614-896976', 'universe']
+ Attempting OGRE for universe in build-370614-896976
+ RUN: /usr/share/launchpad-buildd/slavebin/update-debian-chroot ['update-debian-chroot', '370614-896976']
+ Updating debian chroot for build 370614-896976
+-Get:1 http://buildd:secret@ftpmaster.internal gutsy Release.gpg [191B]
++Get:1 http://ftpmaster.internal gutsy Release.gpg [191B]
+ Get:2 http://ftpmaster.internal gutsy Release [65.9kB]
+-Get:3 http://user:blah@ftpmaster.internal gutsy/main Packages [1085kB]
++Get:3 http://ftpmaster.internal gutsy/main Packages [1085kB]
+ Get:4 http://ftpmaster.internal gutsy/universe Packages [3991kB]
+ Fetched 5142kB in 5s (1012kB/s)
+ Reading package lists...
+@@ -26,8 +25,8 @@
+
+ Attempting OGRE for universe in build-370614-896976
+ RUN: /usr/share/launchpad-buildd/slavebin/update-debian-chroot ['update-debian-chroot', '370614-896976']
+ Updating debian chroot for build 370614-896976
+-Get:1 http://buildd:secret@ftpmaster.internal gutsy Release.gpg [191B]
+-Get:2 http://must:go@ftpmaster.internal gutsy Release [65.9kB]
+-Get:3 http://scrub:this@ftpmaster.internal gutsy/main Packages [1085kB]
++Get:1 http://ftpmaster.internal gutsy Release.gpg [191B]
++Get:2 http://ftpmaster.internal gutsy Release [65.9kB]
++Get:3 http://ftpmaster.internal gutsy/main Packages [1085kB]
+ Get:4 http://ftpmaster.internal gutsy/universe Packages [3991kB]
+ Fetched 5142kB in 5s (1012kB/s)
\ No newline at end of file
=== added file 'lib/canonical/buildd/tests/test_buildd_slave.py'
--- lib/canonical/buildd/tests/test_buildd_slave.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/tests/test_buildd_slave.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,198 @@
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Buildd Slave tests.
+
+This file contains the follwoing tests:
+
+ * Basic authentication handling (used to download private sources);
+ * Build log sanitization (removal of passwords from private buildlog);
+ * Build log(tail) mechanisms (limited output from the end of the buildlog).
+
+"""
+
+__metaclass__ = type
+
+__all__ = ['LaunchpadBuilddSlaveTests']
+
+import difflib
+import os
+import shutil
+import urllib2
+import unittest
+import xmlrpclib
+
+from canonical.buildd.tests.harness import (
+ BuilddSlaveTestSetup, BuilddTestCase)
+
+
+def read_file(path):
+ """Helper for reading the contents of a file."""
+ file_object = open(path)
+ try:
+ return file_object.read()
+ finally:
+ file_object.close()
+
+
+class LaunchpadBuilddSlaveTests(BuilddTestCase):
+ """Unit tests for scrubbing (removal of passwords) of buildlog files."""
+
+ def testBasicAuth(self):
+ """Test that the auth handler is installed with the right details."""
+ url = "http://fakeurl/"
+ user = "myuser"
+ password = "fakepassword"
+
+ opener = self.slave.setupAuthHandler(url, user, password)
+
+ # Inspect the openers and ensure the wanted handler is installed.
+ basic_auth_handler = None
+ for handler in opener.handlers:
+ if isinstance(handler, urllib2.HTTPBasicAuthHandler):
+ basic_auth_handler = handler
+ break
+ self.assertTrue(
+ basic_auth_handler is not None,
+ "No basic auth handler installed.")
+
+ password_mgr = basic_auth_handler.passwd
+ stored_user, stored_pass = password_mgr.find_user_password(None, url)
+ self.assertEqual(user, stored_user)
+ self.assertEqual(password, stored_pass)
+
+ def testBuildlogScrubbing(self):
+ """Tests the buildlog scrubbing (removal of passwords from URLs)."""
+ # This is where the buildlog file lives.
+ log_path = self.slave.cachePath('buildlog')
+
+ # This is where the slave leaves the original/unsanitized
+ # buildlog file after scrubbing.
+ unsanitized_path = self.slave.cachePath('buildlog.unsanitized')
+
+ # Copy the fake buildlog file to the cache path.
+ shutil.copy(os.path.join(self.here, 'buildlog'), log_path)
+
+ # Invoke the slave's buildlog scrubbing method.
+ self.slave.sanitizeBuildlog(log_path)
+
+ # Read the unsanitized original content.
+ unsanitized = read_file(unsanitized_path).splitlines()
+ # Read the new, sanitized content.
+ clean = read_file(log_path).splitlines()
+
+ # Compare the scrubbed content with the unsanitized one.
+ differences = '\n'.join(difflib.unified_diff(unsanitized, clean))
+
+ # Read the expected differences from the prepared disk file.
+ expected = read_file(os.path.join(self.here, 'test_1.diff'))
+
+ # Make sure they match.
+ self.assertEqual(differences, expected)
+
+ def testLogtailScrubbing(self):
+ """Test the scrubbing of the slave's getLogTail() output."""
+
+ # This is where the buildlog file lives.
+ log_path = self.slave.cachePath('buildlog')
+
+ # Copy the prepared, longer buildlog file so we can test lines
+ # that are chopped off in the middle.
+ shutil.copy(os.path.join(self.here, 'buildlog.long'), log_path)
+
+ # First get the unfiltered log tail output (which is the default
+ # behaviour because the BuildManager's 'is_archive_private'
+ # property is initialized to False).
+ self.slave.manager.is_archive_private = False
+ unsanitized = self.slave.getLogTail().splitlines()
+
+ # Make the slave believe we are building in a private archive to
+ # obtain the scrubbed log tail output.
+ self.slave.manager.is_archive_private = True
+ clean = self.slave.getLogTail().splitlines()
+
+ # Get the differences ..
+ differences = '\n'.join(difflib.unified_diff(unsanitized, clean))
+
+ # .. and the expected differences.
+ expected = read_file(os.path.join(self.here, 'test_2.diff'))
+
+ # Finally make sure what we got is what we expected.
+ self.assertEqual(differences, expected)
+
+ def testLogtail(self):
+ """Tests the logtail mechanisms.
+
+ 'getLogTail' return up to 2 KiB text from the current 'buildlog' file.
+ """
+ self.makeLog(0)
+ log_tail = self.slave.getLogTail()
+ self.assertEqual(len(log_tail), 0)
+
+ self.makeLog(1)
+ log_tail = self.slave.getLogTail()
+ self.assertEqual(len(log_tail), 1)
+
+ self.makeLog(2048)
+ log_tail = self.slave.getLogTail()
+ self.assertEqual(len(log_tail), 2048)
+
+ self.makeLog(2049)
+ log_tail = self.slave.getLogTail()
+ self.assertEqual(len(log_tail), 2048)
+
+ self.makeLog(4096)
+ log_tail = self.slave.getLogTail()
+ self.assertEqual(len(log_tail), 2048)
+
+ def testLogtailWhenLogFileVanishes(self):
+ """Slave.getLogTail doesn't get hurt if the logfile has vanished.
+
+ This is a common race-condition in our slaves, since they get
+ pooled all the time when they are building.
+
+ Sometimes the getLogTail calls coincides with the job
+ cleanup/sanitization, so there is no buildlog to inspect and thus
+ we expect an empty string to be returned instead of a explosion.
+ """
+ # Create some log content and read it.
+ self.makeLog(2048)
+ log_tail = self.slave.getLogTail()
+ self.assertEqual(len(log_tail), 2048)
+
+ # Read it again for luck.
+ log_tail = self.slave.getLogTail()
+ self.assertEqual(len(log_tail), 2048)
+
+ # Remove the buildlog file
+ os.remove(self.slave.cachePath('buildlog'))
+
+ # Instead of shocking the getLogTail call, return an empty string.
+ log_tail = self.slave.getLogTail()
+ self.assertEqual(len(log_tail), 0)
+
+
+class XMLRPCBuildDSlaveTests(unittest.TestCase):
+
+ def setUp(self):
+ super(XMLRPCBuildDSlaveTests, self).setUp()
+ self.slave = BuilddSlaveTestSetup()
+ self.slave.setUp()
+ self.server = xmlrpclib.Server('http://localhost:8221/rpc/')
+
+ def tearDown(self):
+ self.slave.tearDown()
+ super(XMLRPCBuildDSlaveTests, self).tearDown()
+
+ def test_build_unknown_builder(self):
+ # If a bogus builder name is passed into build, it returns an
+ # appropriate error message and not just 'None'.
+ buildername = 'nonexistentbuilder'
+ status, info = self.server.build('foo', buildername, 'sha1', {}, {})
+
+ self.assertEqual('BuilderStatus.UNKNOWNBUILDER', status)
+ self.assertTrue(
+ info is not None, "UNKNOWNBUILDER returns 'None' info.")
+ self.assertTrue(
+ info.startswith("%s not in [" % buildername),
+ 'UNKNOWNBUILDER info is "%s"' % info)
=== added file 'lib/canonical/buildd/tests/test_check_implicit_pointer_functions.py'
--- lib/canonical/buildd/tests/test_check_implicit_pointer_functions.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/tests/test_check_implicit_pointer_functions.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,43 @@
+# Copyright 2011 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+from lp.testing import TestCase
+
+from canonical.buildd.check_implicit_pointer_functions import implicit_pattern
+from canonical.buildd.check_implicit_pointer_functions import pointer_pattern
+
+
+class TestPointerCheckRegexes(TestCase):
+
+ def test_catches_pointer_from_integer_without_column_number(self):
+ # Regex should match compiler errors that don't include the
+ # column number.
+ line = (
+ "/build/buildd/gtk+3.0-3.0.0/./gtk/ubuntumenuproxymodule.c:94: "
+ "warning: assignment makes pointer from integer without a cast")
+ self.assertIsNot(None, pointer_pattern.match(line))
+
+ def test_catches_pointer_from_integer_with_column_number(self):
+ # Regex should match compiler errors that do include the
+ # column number.
+ line = (
+ "/build/buildd/gtk+3.0-3.0.0/./gtk/ubuntumenuproxymodule.c:94:7: "
+ "warning: assignment makes pointer from integer without a cast")
+ self.assertIsNot(None, pointer_pattern.match(line))
+
+ def test_catches_implicit_function_without_column_number(self):
+ # Regex should match compiler errors that do include the
+ # column number.
+ line = (
+ "/build/buildd/gtk+3.0-3.0.0/./gtk/ubuntumenuproxymodule.c:94: "
+ "warning: implicit declaration of function 'foo'")
+ self.assertIsNot(None, implicit_pattern.match(line))
+
+ def test_catches_implicit_function_with_column_number(self):
+ # Regex should match compiler errors that do include the
+ # column number.
+ line = (
+ "/build/buildd/gtk+3.0-3.0.0/./gtk/ubuntumenuproxymodule.c:94:7: "
+ "warning: implicit declaration of function 'foo'")
+ self.assertIsNot(None, implicit_pattern.match(line))
+
=== added file 'lib/canonical/buildd/tests/test_harness.py'
--- lib/canonical/buildd/tests/test_harness.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/tests/test_harness.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,10 @@
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+__metaclass__ = type
+
+import doctest
+
+def test_suite():
+ return doctest.DocTestSuite('canonical.buildd.tests.harness')
+
=== added file 'lib/canonical/buildd/tests/test_translationtemplatesbuildmanager.py'
--- lib/canonical/buildd/tests/test_translationtemplatesbuildmanager.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/tests/test_translationtemplatesbuildmanager.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,173 @@
+# Copyright 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+__metaclass__ = type
+
+import os
+
+from lp.testing import TestCase
+from lp.testing.fakemethod import FakeMethod
+
+from canonical.buildd.translationtemplates import (
+ TranslationTemplatesBuildManager, TranslationTemplatesBuildState)
+
+
+class FakeConfig:
+ def get(self, section, key):
+ return key
+
+
+class FakeSlave:
+ def __init__(self, tempdir):
+ self._cachepath = tempdir
+ self._config = FakeConfig()
+ self._was_called = set()
+
+ def cachePath(self, file):
+ return os.path.join(self._cachepath, file)
+
+ def anyMethod(self, *args, **kwargs):
+ pass
+
+ fake_methods = ['emptyLog', 'chrootFail', 'buildFail', 'builderFail',]
+ def __getattr__(self, name):
+ """Remember which fake methods were called."""
+ if name not in self.fake_methods:
+ raise AttributeError(
+ "'%s' object has no attribute '%s'" % (self.__class__, name))
+ self._was_called.add(name)
+ return self.anyMethod
+
+ def wasCalled(self, name):
+ return name in self._was_called
+
+ def getArch(self):
+ return 'i386'
+
+ addWaitingFile = FakeMethod()
+
+
+class MockBuildManager(TranslationTemplatesBuildManager):
+ def __init__(self, *args, **kwargs):
+ super(MockBuildManager, self).__init__(*args, **kwargs)
+ self.commands = []
+
+ def runSubProcess(self, path, command):
+ self.commands.append([path]+command)
+ return 0
+
+
+class TestTranslationTemplatesBuildManagerIteration(TestCase):
+ """Run TranslationTemplatesBuildManager through its iteration steps."""
+ def setUp(self):
+ super(TestTranslationTemplatesBuildManagerIteration, self).setUp()
+ self.working_dir = self.makeTemporaryDirectory()
+ slave_dir = os.path.join(self.working_dir, 'slave')
+ home_dir = os.path.join(self.working_dir, 'home')
+ for dir in (slave_dir, home_dir):
+ os.mkdir(dir)
+ self.slave = FakeSlave(slave_dir)
+ self.buildid = '123'
+ self.buildmanager = MockBuildManager(self.slave, self.buildid)
+ self.buildmanager.home = home_dir
+ self.chrootdir = os.path.join(
+ home_dir, 'build-%s' % self.buildid, 'chroot-autobuild')
+
+ def getState(self):
+ """Retrieve build manager's state."""
+ return self.buildmanager._state
+
+ def test_iterate(self):
+ # Two iteration steps are specific to this build manager.
+ url = 'lp:~my/branch'
+ # The build manager's iterate() kicks off the consecutive states
+ # after INIT.
+ self.buildmanager.initiate({}, 'chroot.tar.gz', {'branch_url': url})
+
+ # Skip states that are done in DebianBuldManager to the state
+ # directly before INSTALL.
+ self.buildmanager._state = TranslationTemplatesBuildState.UPDATE
+
+ # INSTALL: Install additional packages needed for this job into
+ # the chroot.
+ self.buildmanager.iterate(0)
+ self.assertEqual(
+ TranslationTemplatesBuildState.INSTALL, self.getState())
+ expected_command = [
+ '/usr/bin/sudo',
+ 'sudo', 'chroot', self.chrootdir,
+ 'apt-get',
+ ]
+ self.assertEqual(expected_command, self.buildmanager.commands[-1][:5])
+
+ # GENERATE: Run the slave's payload, the script that generates
+ # templates.
+ self.buildmanager.iterate(0)
+ self.assertEqual(
+ TranslationTemplatesBuildState.GENERATE, self.getState())
+ expected_command = [
+ 'generatepath', 'generatepath', self.buildid, url, 'resultarchive'
+ ]
+ self.assertEqual(expected_command, self.buildmanager.commands[-1])
+ self.assertFalse(self.slave.wasCalled('chrootFail'))
+
+ outfile_path = os.path.join(
+ self.chrootdir, self.buildmanager.home[1:],
+ self.buildmanager._resultname)
+ os.makedirs(os.path.dirname(outfile_path))
+
+ outfile = open(outfile_path, 'w')
+ outfile.write("I am a template tarball. Seriously.")
+ outfile.close()
+
+ # The control returns to the DebianBuildManager in the REAP state.
+ self.buildmanager.iterate(0)
+ expected_command = [
+ 'processscanpath', 'processscanpath', self.buildid
+ ]
+ self.assertEqual(
+ TranslationTemplatesBuildState.REAP, self.getState())
+ self.assertEqual(expected_command, self.buildmanager.commands[-1])
+ self.assertFalse(self.slave.wasCalled('buildFail'))
+ self.assertEqual(
+ [((outfile_path,), {})], self.slave.addWaitingFile.calls)
+
+ def test_iterate_fail_INSTALL(self):
+ # See that a failing INSTALL is handled properly.
+ url = 'lp:~my/branch'
+ # The build manager's iterate() kicks off the consecutive states
+ # after INIT.
+ self.buildmanager.initiate({}, 'chroot.tar.gz', {'branch_url': url})
+
+ # Skip states to the INSTALL state.
+ self.buildmanager._state = TranslationTemplatesBuildState.INSTALL
+
+ # The buildmanager fails and iterates to the UMOUNT state.
+ self.buildmanager.iterate(-1)
+ self.assertEqual(
+ TranslationTemplatesBuildState.UMOUNT, self.getState())
+ expected_command = [
+ 'umountpath', 'umount-chroot', self.buildid
+ ]
+ self.assertEqual(expected_command, self.buildmanager.commands[-1])
+ self.assertTrue(self.slave.wasCalled('chrootFail'))
+
+ def test_iterate_fail_GENERATE(self):
+ # See that a failing GENERATE is handled properly.
+ url = 'lp:~my/branch'
+ # The build manager's iterate() kicks off the consecutive states
+ # after INIT.
+ self.buildmanager.initiate({}, 'chroot.tar.gz', {'branch_url': url})
+
+ # Skip states to the INSTALL state.
+ self.buildmanager._state = TranslationTemplatesBuildState.GENERATE
+
+ # The buildmanager fails and iterates to the REAP state.
+ self.buildmanager.iterate(-1)
+ expected_command = [
+ 'processscanpath', 'processscanpath', self.buildid
+ ]
+ self.assertEqual(
+ TranslationTemplatesBuildState.REAP, self.getState())
+ self.assertEqual(expected_command, self.buildmanager.commands[-1])
+ self.assertTrue(self.slave.wasCalled('buildFail'))
=== added file 'lib/canonical/buildd/translationtemplates.py'
--- lib/canonical/buildd/translationtemplates.py 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/translationtemplates.py 2011-11-17 19:45:28 +0000
@@ -0,0 +1,99 @@
+# Copyright 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+__metaclass__ = type
+
+import os
+
+from canonical.buildd.debian import DebianBuildManager, DebianBuildState
+
+
+class TranslationTemplatesBuildState(DebianBuildState):
+ INSTALL = "INSTALL"
+ GENERATE = "GENERATE"
+
+
+class TranslationTemplatesBuildManager(DebianBuildManager):
+ """Generate translation templates from branch.
+
+ This is the implementation of `TranslationTemplatesBuildJob`. The
+ latter runs on the master server; TranslationTemplatesBuildManager
+ runs on the build slave.
+ """
+
+ initial_build_state = TranslationTemplatesBuildState.INSTALL
+
+ def __init__(self, slave, buildid):
+ super(TranslationTemplatesBuildManager, self).__init__(slave, buildid)
+ self._generatepath = slave._config.get(
+ "translationtemplatesmanager", "generatepath")
+ self._resultname = slave._config.get(
+ "translationtemplatesmanager", "resultarchive")
+
+ def initiate(self, files, chroot, extra_args):
+ """See `BuildManager`."""
+ self._branch_url = extra_args['branch_url']
+ self._chroot_path = os.path.join(
+ self.home, 'build-' + self._buildid, 'chroot-autobuild')
+
+ super(TranslationTemplatesBuildManager, self).initiate(
+ files, chroot, extra_args)
+
+ def doInstall(self):
+ """Install packages required."""
+ required_packages = [
+ 'bzr',
+ 'intltool',
+ ]
+ command = ['apt-get', 'install', '-y'] + required_packages
+ chroot = ['sudo', 'chroot', self._chroot_path]
+ self.runSubProcess('/usr/bin/sudo', chroot + command)
+
+ # To satisfy DebianPackageManagers needs without having a misleading
+ # method name here.
+ doRunBuild = doInstall
+
+ def doGenerate(self):
+ """Generate templates."""
+ command = [
+ self._generatepath,
+ self._buildid, self._branch_url, self._resultname]
+ self.runSubProcess(self._generatepath, command)
+
+ def gatherResults(self):
+ """Gather the results of the build and add them to the file cache."""
+ # The file is inside the chroot, in the home directory of the buildd
+ # user. Should be safe to assume the home dirs are named identically.
+ assert self.home.startswith('/'), "home directory must be absolute."
+
+ path = os.path.join(
+ self._chroot_path, self.home[1:], self._resultname)
+ if os.access(path, os.F_OK):
+ self._slave.addWaitingFile(path)
+
+ def iterate_INSTALL(self, success):
+ """Installation was done."""
+ if success == 0:
+ self._state = TranslationTemplatesBuildState.GENERATE
+ self.doGenerate()
+ else:
+ if not self.alreadyfailed:
+ self._slave.chrootFail()
+ self.alreadyfailed = True
+ self._state = TranslationTemplatesBuildState.UMOUNT
+ self.doUnmounting()
+
+ def iterate_GENERATE(self, success):
+ """Template generation finished."""
+ if success == 0:
+ # It worked! Now let's bring in the harvest.
+ self.gatherResults()
+ self._state = TranslationTemplatesBuildState.REAP
+ self.doReapProcesses()
+ else:
+ if not self.alreadyfailed:
+ self._slave.buildFail()
+ self.alreadyfailed = True
+ self._state = TranslationTemplatesBuildState.REAP
+ self.doReapProcesses()
+
=== added file 'lib/canonical/buildd/umount-chroot'
--- lib/canonical/buildd/umount-chroot 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/umount-chroot 2011-11-17 19:45:28 +0000
@@ -0,0 +1,40 @@
+#!/bin/sh
+#
+# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# Buildd Slave tool to mount a chroot
+
+# Expects build id as arg 1, makes build-id to contain the build
+
+# Needs SUDO to be set to a sudo instance for passwordless access
+
+SUDO=/usr/bin/sudo
+BUILDID="$1"
+GREP=/bin/grep
+CUT=/usr/bin/cut
+XARGS=/usr/bin/xargs
+SORT=/usr/bin/sort
+
+set -e
+
+exec 2>&1
+
+echo "Unmounting chroot for build $BUILDID..."
+
+# binfmt-support adds a mount under /proc, which means that our first
+# pass at umounting fails unless we reverse the list. Leave the while
+# loop in just to handle pathological cases, too.
+COUNT=0
+while $GREP "$HOME/build-$BUILDID/chroot-autobuild" /proc/mounts; do
+ COUNT=$(($COUNT+1))
+ if [ $COUNT -ge 20 ]; then
+ echo "failed to umount $HOME/build-$BUILDID/chroot-autobuild"
+ if [ -x /usr/bin/lsof ]; then
+ /usr/bin/lsof "$HOME/build-$BUILDID/chroot-autobuild"
+ fi
+ exit 1
+ fi
+ $GREP "$HOME/build-$BUILDID/chroot-autobuild" /proc/mounts | \
+ $CUT -d\ -f2 | LANG=C $SORT -r | $XARGS -r -n 1 $SUDO umount || sleep 1
+done
=== added file 'lib/canonical/buildd/unpack-chroot'
--- lib/canonical/buildd/unpack-chroot 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/unpack-chroot 2011-11-17 19:45:28 +0000
@@ -0,0 +1,52 @@
+#!/bin/sh
+#
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# Buildd Slave tool to unpack a chroot tarball
+
+# Expects build id as arg 1, makes build-id to contain the build
+# Expects bzip2 compressed tarball as arg 2
+
+# Needs TAR to be set to a gnu tar instance, that needs bzip2
+# Needs SUDO to be set to a sudo instance for passwordless access
+# BUNZIP2 must un-bzip2
+# FILE must implement the -b and -i arguments (so a Debianish file)
+
+export PATH=/usr/bin:/bin:/usr/sbin:/sbin:${PATH}
+
+NTPDATE=ntpdate
+TAR=tar
+SUDO=sudo
+BUNZIP2=bunzip2
+FILE=file
+
+BUILDID="$1"
+TARBALL="$2"
+
+set -e
+
+exec 2>&1
+
+MIMETYPE=$($FILE -bi "$TARBALL")
+
+if [ x"$MIMETYPE" = "xapplication/x-bzip2" ]; then
+ echo "Uncompressing the tarball..."
+ $BUNZIP2 -c < "$TARBALL" > "$TARBALL".tmp
+ mv "$TARBALL".tmp "$TARBALL"
+ exec $0 "$@"
+fi
+
+if [ -f /etc/launchpad-buildd/default ]; then
+ eval `grep ntphost /etc/launchpad-buildd/default | sed 's/ //g'`
+fi
+if [ -n "$ntphost" ]; then
+ echo "Synching the system clock with the buildd NTP service..."
+ $SUDO $NTPDATE -u $ntphost
+fi
+
+cd $HOME
+cd "build-$BUILDID"
+
+echo "Unpacking chroot for build $BUILDID"
+$SUDO $TAR -xf "$TARBALL"
=== added file 'lib/canonical/buildd/update-debian-chroot'
--- lib/canonical/buildd/update-debian-chroot 1970-01-01 00:00:00 +0000
+++ lib/canonical/buildd/update-debian-chroot 2011-11-17 19:45:28 +0000
@@ -0,0 +1,45 @@
+#!/bin/sh
+#
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+# Buildd Slave tool to update a debian chroot
+
+# Expects build id as arg 1, makes build-id to contain the build
+
+# Needs SUDO to be set to a sudo instance for passwordless access
+
+SUDO=/usr/bin/sudo
+CHROOT=/usr/sbin/chroot
+APTGET=/usr/bin/apt-get
+BUILDID="$1"
+ARCHITECTURETAG="$2"
+ROOT=$HOME/build-$BUILDID/chroot-autobuild
+OPTIONS="-o DPkg::Options::=--force-confold"
+
+set -e
+
+exec 2>&1
+
+echo "Updating debian chroot for build $BUILDID"
+
+hostarch=$(dpkg --print-architecture)
+
+case $hostarch in
+ hppa|powerpc|sparc)
+ CHROOT="linux32 $CHROOT"
+ ;;
+ amd64)
+ if [ "$hostarch" != "$ARCHITECTURETAG" ]; then
+ CHROOT="linux32 $CHROOT"
+ fi
+ ;;
+esac
+
+export LANG=C
+export DEBIAN_FRONTEND=noninteractive
+export TTY=unknown
+
+$SUDO $CHROOT $ROOT $APTGET -uy update < /dev/null
+$SUDO $CHROOT $ROOT $APTGET $OPTIONS -uy --purge dist-upgrade < /dev/null
+
=== modified file 'lib/canonical/launchpad/daemons/readyservice.py'
--- lib/canonical/launchpad/daemons/readyservice.py 2011-11-10 07:00:25 +0000
+++ lib/canonical/launchpad/daemons/readyservice.py 2011-11-17 19:45:28 +0000
@@ -1,6 +1,12 @@
# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
+# XXX: JonathanLange 2011-06-21 bug=800295: The only modules in the Launchpad
+# tree that this is permitted to depend on are canonical.buildd, since
+# canonical.buildd is deployed by copying lib/canonical/buildd,
+# lib/canonical/launchpad/daemons/readyservice.py and daemons/buildd-slave.tac
+# only.
+
"""Add logging for when twistd services start up.
Used externally to launchpad (by launchpad-buildd) - must not import
=== modified file 'lib/lp/buildmaster/model/builder.py'
--- lib/lp/buildmaster/model/builder.py 2011-11-10 07:00:25 +0000
+++ lib/lp/buildmaster/model/builder.py 2011-11-17 19:45:28 +0000
@@ -43,8 +43,7 @@
from zope.component import getUtility
from zope.interface import implements
-from lpbuildd.slave import BuilderStatus
-
+from canonical.buildd.slave import BuilderStatus
from canonical.config import config
from canonical.database.sqlbase import (
SQLBase,
@@ -314,7 +313,7 @@
"""See `IBuilder`."""
# 'ident_position' dict relates the position of the job identifier
# token in the sentence received from status(), according the
- # two status we care about. See lp:launchpad-buildd
+ # two status we care about. See see lib/canonical/buildd/slave.py
# for further information about sentence format.
ident_position = {
'BuilderStatus.BUILDING': 1,
@@ -329,6 +328,7 @@
Always return status_sentence.
"""
# Isolate the BuilderStatus string, always the first token in
+ # see lib/canonical/buildd/slave.py and
# IBuilder.slaveStatusSentence().
status = status_sentence[0]
=== modified file 'lib/lp/buildmaster/tests/mock_slaves.py'
--- lib/lp/buildmaster/tests/mock_slaves.py 2011-11-10 07:00:25 +0000
+++ lib/lp/buildmaster/tests/mock_slaves.py 2011-11-17 19:45:28 +0000
@@ -33,7 +33,7 @@
from twisted.internet import defer
from twisted.web import xmlrpc
-from lpbuildd.tests.harness import BuilddSlaveTestSetup
+from canonical.buildd.tests.harness import BuilddSlaveTestSetup
from lp.buildmaster.interfaces.builder import (
CannotFetchFile,
=== modified file 'lib/lp/buildmaster/tests/test_builder.py'
--- lib/lp/buildmaster/tests/test_builder.py 2011-11-10 07:00:25 +0000
+++ lib/lp/buildmaster/tests/test_builder.py 2011-11-17 19:45:28 +0000
@@ -29,8 +29,7 @@
removeSecurityProxy,
)
-from lpbuildd.slave import BuilderStatus
-
+from canonical.buildd.slave import BuilderStatus
from canonical.config import config
from canonical.database.sqlbase import flush_database_updates
from canonical.launchpad.webapp.interfaces import (
=== modified file 'lib/lp/buildmaster/tests/test_manager.py'
--- lib/lp/buildmaster/tests/test_manager.py 2011-11-10 07:00:25 +0000
+++ lib/lp/buildmaster/tests/test_manager.py 2011-11-17 19:45:28 +0000
@@ -27,8 +27,7 @@
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy
-from lpbuildd.tests import BuilddSlaveTestSetup
-
+from canonical.buildd.tests import BuilddSlaveTestSetup
from canonical.config import config
from canonical.launchpad.ftests import (
ANONYMOUS,
=== modified file 'lib/lp/testing/factory.py'
--- lib/lp/testing/factory.py 2011-11-17 09:11:42 +0000
+++ lib/lp/testing/factory.py 2011-11-17 19:45:28 +0000
@@ -2700,6 +2700,7 @@
Note: the builder returned will not be able to actually build -
we currently have a build slave setup for 'bob' only in the
test environment.
+ See lib/canonical/buildd/tests/buildd-slave-test.conf
"""
if processor is None:
processor_fam = ProcessorFamilySet().getByName('x86')
=== modified file 'lib/lp/translations/tests/test_generate_translation_templates.py'
--- lib/lp/translations/tests/test_generate_translation_templates.py 2011-11-10 07:00:25 +0000
+++ lib/lp/translations/tests/test_generate_translation_templates.py 2011-11-17 19:45:28 +0000
@@ -7,8 +7,7 @@
from lp.testing.fakemethod import FakeMethod
-from lpbuildd import pottery
-from lpbuildd.pottery.generate_translation_templates import (
+from canonical.buildd.pottery.generate_translation_templates import (
GenerateTranslationTemplates)
from canonical.launchpad.ftests.script import run_script
@@ -111,8 +110,6 @@
tempdir = self.makeTemporaryDirectory()
workdir = self.makeTemporaryDirectory()
(retval, out, err) = run_script(
- os.path.join(
- os.path.dirname(pottery.__file__),
- 'generate_translation_templates.py'),
+ 'lib/canonical/buildd/pottery/generate_translation_templates.py',
args=[tempdir, self.result_name, workdir])
self.assertEqual(0, retval)
=== modified file 'lib/lp/translations/tests/test_pottery_detect_intltool.py'
--- lib/lp/translations/tests/test_pottery_detect_intltool.py 2011-11-10 07:00:25 +0000
+++ lib/lp/translations/tests/test_pottery_detect_intltool.py 2011-11-17 19:45:28 +0000
@@ -8,7 +8,7 @@
from bzrlib.bzrdir import BzrDir
-from lpbuildd.pottery.intltool import (
+from canonical.buildd.pottery.intltool import (
check_potfiles_in,
ConfigFile,
find_intltool_dirs,
=== modified file 'scripts/rosetta/pottery-generate-intltool.py'
--- scripts/rosetta/pottery-generate-intltool.py 2011-11-10 07:00:25 +0000
+++ scripts/rosetta/pottery-generate-intltool.py 2011-11-17 19:45:28 +0000
@@ -8,7 +8,7 @@
import _pythonpath
-from lpbuildd.pottery.intltool import generate_pots
+from canonical.buildd.pottery.intltool import generate_pots
if __name__ == "__main__":
=== modified file 'utilities/snakefood/lp-sfood-packages'
--- utilities/snakefood/lp-sfood-packages 2011-11-10 07:00:25 +0000
+++ utilities/snakefood/lp-sfood-packages 2011-11-17 19:45:28 +0000
@@ -27,3 +27,4 @@
canonical/database/testing
canonical/database
canonical/config
+canonical/buildd
Follow ups