launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #19871
[Merge] lp:~cjwatson/launchpad/generalise-publisher-staging into lp:launchpad
Colin Watson has proposed merging lp:~cjwatson/launchpad/generalise-publisher-staging into lp:launchpad.
Commit message:
Generalise installation of staged Contents files to handle other files as well, e.g. DEP-11 metadata.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/generalise-publisher-staging/+merge/282473
Generalise installation of staged Contents files to handle other files as well, e.g. DEP-11 metadata. This will let us list those other files in Release for the sake of having a trust path.
--
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~cjwatson/launchpad/generalise-publisher-staging into lp:launchpad.
=== modified file 'lib/lp/archivepublisher/config.py'
--- lib/lp/archivepublisher/config.py 2014-07-28 01:49:27 +0000
+++ lib/lp/archivepublisher/config.py 2016-01-13 17:27:31 +0000
@@ -1,4 +1,4 @@
-# Copyright 2009-2012 Canonical Ltd. This software is licensed under the
+# Copyright 2009-2016 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
#
# This is the python package that defines the
@@ -105,6 +105,15 @@
else:
pubconf.metaroot = None
+ # Files under this directory are moved into distsroot by the publisher
+ # the next time it runs. This can be used by code that runs externally
+ # to the publisher (e.g. Contents generation) to publish files in a
+ # race-free way.
+ if archive.is_main:
+ pubconf.stagingroot = pubconf.archiveroot + '-staging'
+ else:
+ pubconf.stagingroot = None
+
return pubconf
@@ -125,6 +134,7 @@
self.overrideroot,
self.miscroot,
self.temproot,
+ self.stagingroot,
]
for directory in required_directories:
=== modified file 'lib/lp/archivepublisher/scripts/generate_contents_files.py'
--- lib/lp/archivepublisher/scripts/generate_contents_files.py 2015-09-23 13:19:25 +0000
+++ lib/lp/archivepublisher/scripts/generate_contents_files.py 2016-01-13 17:27:31 +0000
@@ -1,4 +1,4 @@
-# Copyright 2011-2015 Canonical Ltd. This software is licensed under the
+# Copyright 2011-2016 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Archive Contents files generator."""
@@ -27,6 +27,7 @@
DatabaseBlockedPolicy,
SlaveOnlyDatabasePolicy,
)
+from lp.services.osutils import ensure_directory_exists
from lp.services.scripts.base import (
LaunchpadCronScript,
LaunchpadScriptFailure,
@@ -255,6 +256,7 @@
"""Update Contents file, if it has changed."""
contents_dir = os.path.join(
self.content_archive, self.distribution.name, 'dists', suite)
+ staging_dir = os.path.join(self.config.stagingroot, suite)
contents_filename = "Contents-%s" % arch
last_contents = os.path.join(contents_dir, ".%s" % contents_filename)
current_contents = os.path.join(contents_dir, contents_filename)
@@ -268,8 +270,9 @@
new_contents = os.path.join(
contents_dir, "%s.gz" % contents_filename)
contents_dest = os.path.join(
- contents_dir, "%s-staged.gz" % contents_filename)
+ staging_dir, "%s.gz" % contents_filename)
+ ensure_directory_exists(os.path.dirname(contents_dest))
os.rename(current_contents, last_contents)
os.rename(new_contents, contents_dest)
os.chmod(contents_dest, 0664)
=== modified file 'lib/lp/archivepublisher/scripts/publish_ftpmaster.py'
--- lib/lp/archivepublisher/scripts/publish_ftpmaster.py 2015-09-23 13:19:25 +0000
+++ lib/lp/archivepublisher/scripts/publish_ftpmaster.py 2016-01-13 17:27:31 +0000
@@ -1,4 +1,4 @@
-# Copyright 2011-2015 Canonical Ltd. This software is licensed under the
+# Copyright 2011-2016 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Master distro publishing script."""
@@ -32,6 +32,7 @@
from lp.registry.interfaces.series import SeriesStatus
from lp.services.config import config
from lp.services.database.bulk import load_related
+from lp.services.osutils import ensure_directory_exists
from lp.services.scripts.base import (
LaunchpadCronScript,
LaunchpadScriptFailure,
@@ -563,55 +564,51 @@
self.publishDistroArchive(
distribution, archive, updated_suites=updated_suites)
- def updateContentsFile(self, archive, distribution, suite, arch):
- """Update a single Contents file if necessary.
+ def updateStagedFilesForSuite(self, archive_config, suite):
+ """Install all staged files for a single archive and suite.
- :return: True if a file was updated, otherwise False.
+ :return: True if any files were installed, otherwise False.
"""
- config = self.configs[distribution][archive.purpose]
- backup_dists = get_backup_dists(config)
- content_dists = os.path.join(
- config.distroroot, "contents-generation", distribution.name,
- "dists")
- contents_filename = "Contents-%s" % arch.architecturetag
- current_contents = os.path.join(
- backup_dists, suite, "%s.gz" % contents_filename)
- new_contents = os.path.join(
- content_dists, suite, "%s-staged.gz" % contents_filename)
- if newer_mtime(new_contents, current_contents):
- self.logger.debug(
- "Installing new Contents file for %s/%s.", suite,
- arch.architecturetag)
- shutil.copy2(new_contents, current_contents)
- # Due to http://bugs.python.org/issue12904, shutil.copy2 doesn't
- # copy timestamps precisely, and unfortunately it rounds down.
- # If we must lose accuracy, we need to round up instead. This
- # can be removed once Launchpad runs on Python >= 3.3.
- st = os.stat(new_contents)
- os.utime(
- current_contents,
- (math.ceil(st.st_atime), math.ceil(st.st_mtime)))
- return True
- return False
+ backup_top = os.path.join(get_backup_dists(archive_config), suite)
+ staging_top = os.path.join(archive_config.stagingroot, suite)
+ updated = False
+ for staging_dir, _, filenames in os.walk(staging_top):
+ rel_dir = os.path.relpath(staging_dir, staging_top)
+ backup_dir = os.path.join(backup_top, rel_dir)
+ for filename in filenames:
+ new_path = os.path.join(staging_dir, filename)
+ current_path = os.path.join(backup_dir, filename)
+ if newer_mtime(new_path, current_path):
+ self.logger.debug(
+ "Updating %s from %s." % (current_path, new_path))
+ ensure_directory_exists(os.path.dirname(current_path))
+ shutil.copy2(new_path, current_path)
+ # Due to http://bugs.python.org/issue12904, shutil.copy2
+ # doesn't copy timestamps precisely, and unfortunately
+ # it rounds down. If we must lose accuracy, we need to
+ # round up instead. This can be removed once Launchpad
+ # runs on Python >= 3.3.
+ st = os.stat(new_path)
+ os.utime(
+ current_path,
+ (math.ceil(st.st_atime), math.ceil(st.st_mtime)))
+ updated = True
+ return updated
- def updateContentsFiles(self, distribution):
- """Pick up updated Contents files if necessary."""
+ def updateStagedFiles(self, distribution):
+ """Install all staged files for a distribution's archives."""
updated_suites = []
- # XXX cjwatson 2015-03-20: GenerateContentsFiles currently only
- # supports the primary archive.
- archive = distribution.main_archive
- for series in distribution.getNonObsoleteSeries():
- for pocket in PackagePublishingPocket.items:
- suite = series.getSuite(pocket)
- if cannot_modify_suite(archive, series, pocket):
- continue
- updated = False
- for arch in series.enabled_architectures:
- if self.updateContentsFile(
- archive, distribution, suite, arch):
- updated = True
- if updated:
- updated_suites.append((archive, suite))
+ for archive in get_publishable_archives(distribution):
+ if archive.purpose not in self.configs[distribution]:
+ continue
+ archive_config = self.configs[distribution][archive.purpose]
+ for series in distribution.getNonObsoleteSeries():
+ for pocket in PackagePublishingPocket.items:
+ suite = series.getSuite(pocket)
+ if cannot_modify_suite(archive, series, pocket):
+ continue
+ if self.updateStagedFilesForSuite(archive_config, suite):
+ updated_suites.append((archive, suite))
return updated_suites
def publish(self, distribution, security_only=False):
@@ -629,7 +626,7 @@
if security_only:
has_published = self.publishSecurityUploads(distribution)
else:
- updated_suites = self.updateContentsFiles(distribution)
+ updated_suites = self.updateStagedFiles(distribution)
self.publishDistroUploads(
distribution, updated_suites=updated_suites)
# Let's assume the main archive is always modified
=== modified file 'lib/lp/archivepublisher/tests/test_config.py'
--- lib/lp/archivepublisher/tests/test_config.py 2014-07-07 07:15:39 +0000
+++ lib/lp/archivepublisher/tests/test_config.py 2016-01-13 17:27:31 +0000
@@ -1,4 +1,4 @@
-# Copyright 2011-2012 Canonical Ltd. This software is licensed under the
+# Copyright 2011-2016 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Test publisher configs handling.
@@ -48,6 +48,7 @@
self.root + "/ubuntutest-temp", primary_config.temproot)
self.assertEqual(archiveroot + "-uefi", primary_config.uefiroot)
self.assertIs(None, primary_config.metaroot)
+ self.assertEqual(archiveroot + "-staging", primary_config.stagingroot)
def test_partner_config(self):
# Partner archive configuration is correct.
@@ -70,6 +71,7 @@
self.root + "/ubuntutest-temp", partner_config.temproot)
self.assertEqual(archiveroot + "-uefi", partner_config.uefiroot)
self.assertIs(None, partner_config.metaroot)
+ self.assertEqual(archiveroot + "-staging", partner_config.stagingroot)
def test_copy_config(self):
# In the case of copy archives (used for rebuild testing) the
@@ -91,6 +93,7 @@
self.assertEqual(archiveroot + "-temp", copy_config.temproot)
self.assertIsNone(copy_config.uefiroot)
self.assertIs(None, copy_config.metaroot)
+ self.assertIs(None, copy_config.stagingroot)
class TestGetPubConfigPPA(TestCaseWithFactory):
@@ -129,6 +132,7 @@
self.ppa.owner.name, self.ppa.name)
self.assertEqual(uefiroot, self.ppa_config.uefiroot)
self.assertIs(None, self.ppa_config.metaroot)
+ self.assertIs(None, self.ppa_config.stagingroot)
def test_private_ppa_separate_root(self):
# Private PPAs are published to a different location.
@@ -162,6 +166,7 @@
p3a.owner.name, p3a.name)
self.assertEqual(uefiroot, p3a_config.uefiroot)
self.assertIs(None, p3a_config.metaroot)
+ self.assertIs(None, p3a_config.stagingroot)
def test_metaroot(self):
# The metadata directory structure doesn't include a distro
=== modified file 'lib/lp/archivepublisher/tests/test_generate_contents_files.py'
--- lib/lp/archivepublisher/tests/test_generate_contents_files.py 2015-09-23 13:19:25 +0000
+++ lib/lp/archivepublisher/tests/test_generate_contents_files.py 2016-01-13 17:27:31 +0000
@@ -1,4 +1,4 @@
-# Copyright 2011-2015 Canonical Ltd. This software is licensed under the
+# Copyright 2011-2016 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Test for the `generate-contents-files` script."""
@@ -299,8 +299,8 @@
fake_overrides(script, distroseries)
script.process()
self.assertTrue(file_exists(os.path.join(
- script.content_archive, distro.name, "dists", suite,
- "Contents-%s-staged.gz" % das.architecturetag)))
+ script.config.stagingroot, suite,
+ "Contents-%s.gz" % das.architecturetag)))
publisher_script = PublishFTPMaster(test_args=["-d", distro.name])
publisher_script.txn = self.layer.txn
publisher_script.logger = DevNullLogger()
=== modified file 'lib/lp/archivepublisher/tests/test_publish_ftpmaster.py'
--- lib/lp/archivepublisher/tests/test_publish_ftpmaster.py 2015-09-23 13:19:25 +0000
+++ lib/lp/archivepublisher/tests/test_publish_ftpmaster.py 2016-01-13 17:27:31 +0000
@@ -1,4 +1,4 @@
-# Copyright 2011-2015 Canonical Ltd. This software is licensed under the
+# Copyright 2011-2016 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Test publish-ftpmaster cron script."""
@@ -97,10 +97,9 @@
:param path: A list of path components.
:param contents: Text to write into the file.
"""
- marker = file(os.path.join(*path), "w")
- marker.write(contents)
- marker.flush()
- marker.close()
+ with open(os.path.join(*path), "w") as marker:
+ marker.write(contents)
+ marker.flush()
def read_marker_file(path):
@@ -108,7 +107,8 @@
:param return: Contents of the marker file.
"""
- return file(os.path.join(*path)).read()
+ with open(os.path.join(*path)) as marker:
+ return marker.read()
def get_a_suite(distroseries):
@@ -839,7 +839,7 @@
"Did not find expected marker for %s."
% archive.purpose.title)
- def test_updateContentsFile_installs_changed(self):
+ def test_updateStagedFilesForSuite_installs_changed(self):
distro = self.makeDistroWithPublishDirectory()
distroseries = self.factory.makeDistroSeries(distribution=distro)
das = self.factory.makeDistroArchSeries(distroseries=distroseries)
@@ -856,109 +856,131 @@
[backup_suite, "%s.gz" % contents_filename], "Old Contents")
os.utime(
os.path.join(backup_suite, "%s.gz" % contents_filename), (0, 0))
- content_suite = os.path.join(
- archive_config.distroroot, "contents-generation", distro.name,
- "dists", distroseries.name)
- os.makedirs(content_suite)
+ staging_suite = os.path.join(
+ archive_config.stagingroot, distroseries.name)
+ os.makedirs(staging_suite)
write_marker_file(
- [content_suite, "%s-staged.gz" % contents_filename], "Contents")
- self.assertTrue(script.updateContentsFile(
- distro.main_archive, distro, distroseries.name, das))
+ [staging_suite, "%s.gz" % contents_filename], "Contents")
+ self.assertTrue(script.updateStagedFilesForSuite(
+ archive_config, distroseries.name))
self.assertEqual(
"Contents",
read_marker_file([backup_suite, "%s.gz" % contents_filename]))
- def test_updateContentsFile_twice(self):
- # If updateContentsFile is run twice in a row, it does not update
- # the file the second time.
- distro = self.makeDistroWithPublishDirectory()
- distroseries = self.factory.makeDistroSeries(distribution=distro)
- das = self.factory.makeDistroArchSeries(distroseries=distroseries)
- script = self.makeScript(distro)
- script.setUp()
- script.setUpDirs()
- archive_config = getPubConfig(distro.main_archive)
- contents_filename = "Contents-%s" % das.architecturetag
- backup_suite = os.path.join(
- archive_config.archiveroot + "-distscopy", "dists",
- distroseries.name)
- os.makedirs(backup_suite)
- content_suite = os.path.join(
- archive_config.distroroot, "contents-generation", distro.name,
- "dists", distroseries.name)
- os.makedirs(content_suite)
- write_marker_file(
- [content_suite, "%s-staged.gz" % contents_filename], "Contents")
- self.assertTrue(script.updateContentsFile(
- distro.main_archive, distro, distroseries.name, das))
- self.assertFalse(script.updateContentsFile(
- distro.main_archive, distro, distroseries.name, das))
-
- def test_updateContentsFiles_updated_suites(self):
- # updateContentsFiles returns a list of suites for which it updated
- # Contents files.
- distro = self.makeDistroWithPublishDirectory()
- distroseries = self.factory.makeDistroSeries(distribution=distro)
- das = self.factory.makeDistroArchSeries(distroseries=distroseries)
- script = self.makeScript(distro)
- script.setUp()
- script.setUpDirs()
- archive_config = getPubConfig(distro.main_archive)
- contents_filename = "Contents-%s" % das.architecturetag
- backup_suite = os.path.join(
- archive_config.archiveroot + "-distscopy", "dists",
- distroseries.name)
- os.makedirs(backup_suite)
- content_suite = os.path.join(
- archive_config.distroroot, "contents-generation", distro.name,
- "dists", distroseries.name)
- os.makedirs(content_suite)
- write_marker_file(
- [content_suite, "%s-staged.gz" % contents_filename], "Contents")
+ def test_updateStagedFilesForSuite_installs_changed_dep11(self):
+ # updateStagedFilesForSuite installs changed files other than
+ # Contents files, such as DEP-11 metadata.
+ distro = self.makeDistroWithPublishDirectory()
+ distroseries = self.factory.makeDistroSeries(distribution=distro)
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ archive_config = getPubConfig(distro.main_archive)
+ backup_dep11 = os.path.join(
+ archive_config.archiveroot + "-distscopy", "dists",
+ distroseries.name, "main", "dep11")
+ os.makedirs(backup_dep11)
+ write_marker_file([backup_dep11, "a"], "Old A")
+ os.utime(os.path.join(backup_dep11, "a"), (0, 0))
+ staging_dep11 = os.path.join(
+ archive_config.stagingroot, distroseries.name, "main", "dep11")
+ os.makedirs(os.path.join(staging_dep11, "subdir"))
+ write_marker_file([staging_dep11, "a"], "A")
+ write_marker_file([staging_dep11, "subdir", "b"], "B")
+ self.assertTrue(script.updateStagedFilesForSuite(
+ archive_config, distroseries.name))
+ self.assertEqual("A", read_marker_file([backup_dep11, "a"]))
+ self.assertEqual("B", read_marker_file([backup_dep11, "subdir", "b"]))
+
+ def test_updateStagedFilesForSuite_twice(self):
+ # If updateStagedFilesForSuite is run twice in a row, it does not
+ # update the files the second time.
+ distro = self.makeDistroWithPublishDirectory()
+ distroseries = self.factory.makeDistroSeries(distribution=distro)
+ das = self.factory.makeDistroArchSeries(distroseries=distroseries)
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ archive_config = getPubConfig(distro.main_archive)
+ contents_filename = "Contents-%s" % das.architecturetag
+ backup_suite = os.path.join(
+ archive_config.archiveroot + "-distscopy", "dists",
+ distroseries.name)
+ os.makedirs(backup_suite)
+ staging_suite = os.path.join(
+ archive_config.stagingroot, distroseries.name)
+ os.makedirs(staging_suite)
+ write_marker_file(
+ [staging_suite, "%s.gz" % contents_filename], "Contents")
+ self.assertTrue(script.updateStagedFilesForSuite(
+ archive_config, distroseries.name))
+ self.assertFalse(script.updateStagedFilesForSuite(
+ archive_config, distroseries.name))
+
+ def test_updateStagedFiles_updated_suites(self):
+ # updateStagedFiles returns a list of suites for which it updated
+ # staged files.
+ distro = self.makeDistroWithPublishDirectory()
+ distroseries = self.factory.makeDistroSeries(distribution=distro)
+ das = self.factory.makeDistroArchSeries(distroseries=distroseries)
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ archive_config = getPubConfig(distro.main_archive)
+ contents_filename = "Contents-%s" % das.architecturetag
+ backup_suite = os.path.join(
+ archive_config.archiveroot + "-distscopy", "dists",
+ distroseries.name)
+ os.makedirs(backup_suite)
+ staging_suite = os.path.join(
+ archive_config.stagingroot, distroseries.name)
+ os.makedirs(staging_suite)
+ write_marker_file(
+ [staging_suite, "%s.gz" % contents_filename], "Contents")
self.assertEqual(
[(distro.main_archive, distroseries.name)],
- script.updateContentsFiles(distro))
+ script.updateStagedFiles(distro))
- def test_updateContentsFiles_only_primary_archive(self):
- # updateContentsFiles only considers the primary archive. (This
- # will need to change if GenerateContentsFiles ever gains support
- # for other archive purposes.)
+ def test_updateStagedFiles_considers_partner_archive(self):
+ # updateStagedFiles considers the partner archive as well as the
+ # primary archive.
distro = self.makeDistroWithPublishDirectory()
self.factory.makeArchive(
distribution=distro, owner=distro.owner,
purpose=ArchivePurpose.PARTNER)
distroseries = self.factory.makeDistroSeries(
distribution=distro, status=SeriesStatus.DEVELOPMENT)
- das = self.factory.makeDistroArchSeries(distroseries=distroseries)
script = self.makeScript(distro)
script.setUp()
script.setUpDirs()
- script.updateContentsFile = FakeMethod()
- script.updateContentsFiles(distro)
- expected_args = [
- (distro.main_archive, distro, distroseries.getSuite(pocket), das)
- for pocket in PackagePublishingPocket.items]
+ script.updateStagedFilesForSuite = FakeMethod()
+ script.updateStagedFiles(distro)
+ expected_args = []
+ for purpose in ArchivePurpose.PRIMARY, ArchivePurpose.PARTNER:
+ expected_args.extend([
+ (script.configs[distro][purpose],
+ distroseries.getSuite(pocket))
+ for pocket in PackagePublishingPocket.items])
self.assertEqual(
- expected_args, script.updateContentsFile.extract_args())
+ expected_args, script.updateStagedFilesForSuite.extract_args())
- def test_updateContentsFiles_skips_immutable_suites(self):
- # updateContentsFiles does not generate Contents files for immutable
- # suites.
+ def test_updateStagedFiles_skips_immutable_suites(self):
+ # updateStagedFiles does not update files for immutable suites.
distro = self.makeDistroWithPublishDirectory()
distroseries = self.factory.makeDistroSeries(
distribution=distro, status=SeriesStatus.CURRENT)
- das = self.factory.makeDistroArchSeries(distroseries=distroseries)
script = self.makeScript(distro)
script.setUp()
script.setUpDirs()
- script.updateContentsFile = FakeMethod()
- script.updateContentsFiles(distro)
+ script.updateStagedFilesForSuite = FakeMethod()
+ script.updateStagedFiles(distro)
expected_args = [
- (distro.main_archive, distro, distroseries.getSuite(pocket), das)
+ (script.configs[distro][ArchivePurpose.PRIMARY],
+ distroseries.getSuite(pocket))
for pocket in PackagePublishingPocket.items
if pocket != PackagePublishingPocket.RELEASE]
self.assertEqual(
- expected_args, script.updateContentsFile.extract_args())
+ expected_args, script.updateStagedFilesForSuite.extract_args())
def test_publish_always_returns_true_for_primary(self):
script = self.makeScript()
Follow ups