← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~cjwatson/launchpad/soyuz-doctests-future-imports into lp:launchpad

 

Colin Watson has proposed merging lp:~cjwatson/launchpad/soyuz-doctests-future-imports into lp:launchpad.

Commit message:
Convert doctests under lp.soyuz to Launchpad's preferred __future__ imports.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/soyuz-doctests-future-imports/+merge/346922

Huge, but mostly boring.  The only bit that wasn't essentially mechanical is that in a few places I switched to using print() for strings rather than having doctests that assert on the __repr__ of text strings.
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~cjwatson/launchpad/soyuz-doctests-future-imports into lp:launchpad.
=== modified file 'lib/lp/soyuz/doc/archive-deletion.txt'
--- lib/lp/soyuz/doc/archive-deletion.txt	2013-02-20 04:01:45 +0000
+++ lib/lp/soyuz/doc/archive-deletion.txt	2018-05-27 20:15:09 +0000
@@ -17,10 +17,10 @@
 
 The archive is currently active:
 
-    >>> print archive.enabled
+    >>> print(archive.enabled)
     True
 
-    >>> print archive.status.name
+    >>> print(archive.status.name)
     ACTIVE
 
 We can create some packages in it using the test publisher:
@@ -54,10 +54,10 @@
 Now the archive is disabled and the status is DELETING to tell the
 publisher to remove the publications and the repository:
 
-    >>> print archive.enabled
+    >>> print(archive.enabled)
     False
 
-    >>> print archive.status.name
+    >>> print(archive.status.name)
     DELETING
 
 Once deleted the archive can't be reenabled.
@@ -67,5 +67,5 @@
     ...
     AssertionError: Deleted archives can't be enabled.
 
-    >>> print archive.enabled
+    >>> print(archive.enabled)
     False

=== modified file 'lib/lp/soyuz/doc/archive.txt'
--- lib/lp/soyuz/doc/archive.txt	2017-06-03 16:40:44 +0000
+++ lib/lp/soyuz/doc/archive.txt	2018-05-27 20:15:09 +0000
@@ -17,23 +17,23 @@
     >>> cprov = getUtility(IPersonSet).getByName('cprov')
     >>> cprov_archive = cprov.archive
 
-    >>> print cprov_archive.owner.name
+    >>> print(cprov_archive.owner.name)
     cprov
-    >>> print cprov_archive.distribution.name
+    >>> print(cprov_archive.distribution.name)
     ubuntu
-    >>> print cprov_archive.name
+    >>> print(cprov_archive.name)
     ppa
-    >>> print cprov_archive.purpose.name
+    >>> print(cprov_archive.purpose.name)
     PPA
-    >>> print cprov_archive.displayname
+    >>> print(cprov_archive.displayname)
     PPA for Celso Providelo
     >>> cprov_archive.enabled
     True
     >>> cprov_archive.authorized_size
     1024
-    >>> print cprov_archive.signing_key
+    >>> print(cprov_archive.signing_key)
     None
-    >>> print cprov_archive.signing_key_fingerprint
+    >>> print(cprov_archive.signing_key_fingerprint)
     None
     >>> cprov_archive.private
     False
@@ -84,7 +84,7 @@
 build.  This allows an admin to set external repositories as a source for
 build dependencies on the context PPA.  Its default value is None:
 
-    >>> print cprov_archive.external_dependencies
+    >>> print(cprov_archive.external_dependencies)
     None
 
 Amending it as an unprivileged user results in failure:
@@ -101,7 +101,7 @@
 
 Useful properties:
 
-    >>> print cprov_archive.archive_url
+    >>> print(cprov_archive.archive_url)
     http://ppa.launchpad.dev/cprov/ppa/ubuntu
 
 Inquire what Distribution Series this archive has published sources to:
@@ -125,11 +125,11 @@
 
 It is only editable by someone with launchpad.Edit permissions:
 
-    >>> print cprov_archive.status.name
+    >>> print(cprov_archive.status.name)
     ACTIVE
 
     >>> cprov_archive.status = ArchiveStatus.DELETING
-    >>> print cprov_archive.status.name
+    >>> print(cprov_archive.status.name)
     DELETING
 
     >>> login(ANONYMOUS)
@@ -202,7 +202,7 @@
 Valid names work as expected.
 
     >>> rebuild_archive.name = 'there-we-go'
-    >>> print rebuild_archive.name
+    >>> print(rebuild_archive.name)
     there-we-go
 
 Please note that copy archive displayname doesn't follow the name change.
@@ -221,7 +221,7 @@
 
 Uploads to copy archives are not allowed.
 
-    >>> print rebuild_archive.checkArchivePermission(cprov)
+    >>> print(rebuild_archive.checkArchivePermission(cprov))
     False
 
 
@@ -268,7 +268,7 @@
     ...         title = pub.binarypackagerelease.title
     ...         arch_spec = pub.binarypackagerelease.architecturespecific
     ...         pub_arch = pub.distroarchseries.architecturetag
-    ...         print "%s (%s) -> %s" % (title, arch_spec, pub_arch)
+    ...         print("%s (%s) -> %s" % (title, arch_spec, pub_arch))
 
 The PPA for cprov contains only 4 binary publications, however 'pmount' is
 'architecture independent', which means that the same binary (DB) is
@@ -527,11 +527,11 @@
     >>> def print_published_files(archive):
     ...     for pub_source in archive.getPublishedSources():
     ...         for src_file in pub_source.sourcepackagerelease.files:
-    ...             print '%s: %s (%s, %d bytes)' % (
+    ...             print('%s: %s (%s, %d bytes)' % (
     ...                 src_file.sourcepackagerelease.title,
     ...                 src_file.libraryfile.filename,
     ...                 src_file.filetype.name,
-    ...                 src_file.libraryfile.content.filesize)
+    ...                 src_file.libraryfile.content.filesize))
 
 First, let's print the currently published files in cprov's PPA:
 
@@ -749,10 +749,10 @@
     >>> def print_dependencies(archive):
     ...     dependencies = archive.dependencies
     ...     if not dependencies:
-    ...         print "No dependencies recorded."
+    ...         print("No dependencies recorded.")
     ...         return
     ...     for dep in dependencies:
-    ...         print dep.dependency.displayname
+    ...         print(dep.dependency.displayname)
 
 Celso's PPA has no dependencies stored in the sampledata.
 
@@ -774,16 +774,16 @@
 
 The `IArchiveDependency` object simply maps the desired relationship.
 
-    >>> print archive_dependency.archive.displayname
+    >>> print(archive_dependency.archive.displayname)
     PPA for Celso Providelo
 
-    >>> print archive_dependency.dependency.displayname
+    >>> print(archive_dependency.dependency.displayname)
     PPA for Mark Shuttleworth
 
 The `IArchiveDependency` object itself implement a 'title'
 property. For PPA dependencies the title defaults to the PPA displayname.
 
-    >>> print archive_dependency.title
+    >>> print(archive_dependency.title)
     PPA for Mark Shuttleworth
 
 The archive dependency is immediately recorded on Celso's PPA.
@@ -794,12 +794,12 @@
 'getArchiveDependency' returns the corresponding `IArchiveDependency`
 for a given 'dependency', otherwise it returns None.
 
-    >>> print cprov.archive.getArchiveDependency(
-    ...     mark.archive).dependency.displayname
+    >>> print(cprov.archive.getArchiveDependency(
+    ...     mark.archive).dependency.displayname)
     PPA for Mark Shuttleworth
 
     >>> no_priv = getUtility(IPersonSet).getByName('no-priv')
-    >>> print cprov.archive.getArchiveDependency(no_priv.archive)
+    >>> print(cprov.archive.getArchiveDependency(no_priv.archive))
     None
 
 As mentioned above, the archive dependency engine doesn't follow
@@ -848,12 +848,12 @@
 the target 'pocket' and a human-readable reference to the components
 involved.
 
-    >>> print primary_dependency.title
+    >>> print(primary_dependency.title)
     Primary Archive for Ubuntu Linux - UPDATES (main, universe)
 
 They also expose the name of the component directly, for use in the API.
 
-    >>> print primary_dependency.component_name
+    >>> print(primary_dependency.component_name)
     universe
 
 See further implications of archive dependencies in
@@ -879,12 +879,12 @@
     >>> primary_component_dep = no_priv.archive.addArchiveDependency(
     ...     ubuntu.main_archive, PackagePublishingPocket.SECURITY)
 
-    >>> print primary_component_dep.title
+    >>> print(primary_component_dep.title)
     Primary Archive for Ubuntu Linux - SECURITY
 
 In this case the component name is None.
 
-    >>> print primary_component_dep.component_name
+    >>> print(primary_component_dep.component_name)
     None
 
 However only PRIMARY archive dependencies support pockets other than
@@ -942,7 +942,7 @@
     ...     copy_target)
     >>> pcr = ubuntu.main_archive.requestPackageCopy(
     ...     naked_copy_target, requestor)
-    >>> print pcr
+    >>> print(pcr)
     Package copy request
     source = primary/hoary/-/RELEASE
     target = my-copy-archive/hoary/-/RELEASE
@@ -955,7 +955,7 @@
 
     >>> package_copy_request = ubuntu.main_archive.requestPackageCopy(
     ...     naked_copy_target, requestor, suite="hoary-updates");
-    >>> print package_copy_request
+    >>> print(package_copy_request)
     Package copy request
     source = primary/hoary/-/UPDATES
     target = my-copy-archive/hoary/-/RELEASE
@@ -986,7 +986,7 @@
 
 PPAs are created with the name attribute set to 'ppa' by default.
 
-    >>> print sandbox_archive.name
+    >>> print(sandbox_archive.name)
     ppa
 
 We can take the opportunity to check if the default 'authorized_size'
@@ -1012,13 +1012,13 @@
     >>> ubuntutest = getUtility(IDistributionSet)['ubuntutest']
     >>> partner_archive = getUtility(IArchiveSet).getByDistroPurpose(
     ...     ubuntutest, ArchivePurpose.PARTNER)
-    >>> print partner_archive.name
+    >>> print(partner_archive.name)
     partner
-    >>> print partner_archive.is_partner
+    >>> print(partner_archive.is_partner)
     True
-    >>> print partner_archive.is_primary
+    >>> print(partner_archive.is_primary)
     False
-    >>> print partner_archive.is_main
+    >>> print(partner_archive.is_main)
     True
 
 It explicitly fails when purpose is PPA, since such lookup should be
@@ -1043,14 +1043,14 @@
 
     >>> partner_archive = getUtility(IArchiveSet).getByDistroAndName(
     ...     ubuntutest, 'partner')
-    >>> print partner_archive.displayname
+    >>> print(partner_archive.displayname)
     Partner Archive for Ubuntu Test
 
 Passing an invalid name will cause an empty result set.
 
     >>> bogus = getUtility(IArchiveSet).getByDistroAndName(
     ...     ubuntutest, 'bogus')
-    >>> print bogus
+    >>> print(bogus)
     None
 
 IArchive.archive_url will return a URL for the archive that the builder can
@@ -1058,31 +1058,31 @@
 PunlisherConfig require us to log in as an admin:
 
     >>> login('admin@xxxxxxxxxxxxx')
-    >>> print partner_archive.archive_url
+    >>> print(partner_archive.archive_url)
     http://archive.launchpad.dev/ubuntutest-partner
 
-    >>> print sandbox_archive.archive_url
+    >>> print(sandbox_archive.archive_url)
     http://ppa.launchpad.dev/name16/ppa/ubuntu
 
-    >>> print getUtility(IArchiveSet).getByDistroPurpose(
-    ...     ubuntutest, ArchivePurpose.PRIMARY).archive_url
+    >>> print(getUtility(IArchiveSet).getByDistroPurpose(
+    ...     ubuntutest, ArchivePurpose.PRIMARY).archive_url)
     http://archive.launchpad.dev/ubuntutest
 
 COPY archives use a URL format of <distro-name>-<archive-name>:
 
-    >>> print naked_copy_target.archive.is_copy
+    >>> print(naked_copy_target.archive.is_copy)
     True
-    >>> print naked_copy_target.archive.archive_url
+    >>> print(naked_copy_target.archive.archive_url)
     http://rebuild-test.internal/ubuntu-my-copy-archive/ubuntu
 
 If the archive is private, the url may be different as private PPAs
 are published to a secure location.
 
     >>> login("celso.providelo@xxxxxxxxxxxxx")
-    >>> print cprov_archive.archive_url
+    >>> print(cprov_archive.archive_url)
     http://ppa.launchpad.dev/cprov/ppa/ubuntu
 
-    >>> print cprov_private_ppa.archive_url
+    >>> print(cprov_private_ppa.archive_url)
     http://private-ppa.launchpad.dev/cprov/myprivateppa/ubuntu
 
 IArchive.allowUpdatesToReleasePocket returns whether the archive is allowed
@@ -1122,7 +1122,7 @@
     >>> len(archive_purposes)
     17
 
-    >>> print sorted(set(archive_purposes))
+    >>> print(sorted(set(archive_purposes)))
     ['COPY', 'PARTNER', 'PPA', 'PRIMARY']
 
 'getPPAsForUser' returns all the PPAs a given user participates in. It
@@ -1132,7 +1132,7 @@
 Celso only participates in his own PPAs.
 
     >>> for ppa in archive_set.getPPAsForUser(cprov):
-    ...     print ppa.displayname
+    ...     print(ppa.displayname)
     PPA for Celso Providelo
     PPA named myprivateppa for Celso Providelo
 
@@ -1156,7 +1156,7 @@
     ...     distribution=ubuntu, description='Yo !')
 
     >>> for ppa in archive_set.getPPAsForUser(cprov):
-    ...     print ppa.displayname
+    ...     print(ppa.displayname)
     PPA for Celso Providelo
     PPA for Launchpad Buildd Admins
     PPA named myprivateppa for Celso Providelo
@@ -1166,14 +1166,14 @@
 it gets listed by `getPPAsForUser`.
 
     >>> for ppa in archive_set.getPPAsForUser(no_priv):
-    ...     print ppa.displayname
+    ...     print(ppa.displayname)
     PPA for No Privileges Person
 
     >>> cprov_archive.newComponentUploader(no_priv, "main")
     <ArchivePermission ...>
 
     >>> for ppa in archive_set.getPPAsForUser(no_priv):
-    ...     print ppa.displayname
+    ...     print(ppa.displayname)
     PPA for Celso Providelo
     PPA for No Privileges Person
 
@@ -1188,14 +1188,14 @@
 'indirect_uploader' currently can't upload to cprov's PPA:
 
     >>> for ppa in archive_set.getPPAsForUser(indirect_uploader):
-    ...     print ppa.displayname
+    ...     print(ppa.displayname)
 
 But if we make them part of the uploader_team they'll gain access:
 
     >>> ignored = uploader_team.addMember(
     ...     indirect_uploader, indirect_uploader)
     >>> for ppa in archive_set.getPPAsForUser(indirect_uploader):
-    ...     print ppa.displayname
+    ...     print(ppa.displayname)
     PPA for Celso Providelo
 
 When there is no active PPA for the team a user participates the
@@ -1213,7 +1213,7 @@
 
     >>> p3as = archive_set.getPrivatePPAs()
     >>> for p3a in p3as:
-    ...     print p3a.displayname
+    ...     print(p3a.displayname)
     PPA named myprivateppa for Celso Providelo
 
 'getLatestPPASourcePublicationsForDistribution' returns up to 5
@@ -1233,7 +1233,8 @@
     ...         archive_set.getLatestPPASourcePublicationsForDistribution(
     ...         ubuntu))
     ...     for pub in latest_uploads:
-    ...         print pub.displayname, pub.status.name, pub.archive.owner.name
+    ...         print(
+    ...             pub.displayname, pub.status.name, pub.archive.owner.name)
 
     >>> print_latest_uploads()
     cdrkit 1.0 in breezy-autotest SUPERSEDED cprov
@@ -1320,8 +1321,8 @@
     ...     most_active_ppas = (
     ...         archive_set.getMostActivePPAsForDistribution(ubuntu))
     ...     for most_active in most_active_ppas:
-    ...         print most_active[
-    ...             'archive'].displayname, most_active['uploads']
+    ...         print(most_active[
+    ...             'archive'].displayname, most_active['uploads'])
 
     >>> print_most_active_ppas()
     PPA for Mark Shuttleworth 2
@@ -1382,9 +1383,9 @@
 its relevant attributes.
 
     >>> def print_archive_names(archives):
-    ...     print 'Name Owner Private Enabled'
+    ...     print('Name Owner Private Enabled')
     ...     for a in archives:
-    ...         print a.name, a.owner.name, a.private, a.enabled
+    ...         print(a.name, a.owner.name, a.private, a.enabled)
 
 Anonymous lookups return only public and enabled archives for the
 given purpose:
@@ -1784,7 +1785,7 @@
 The name is used as provided, so callsites should validate it when
 necessary.
 
-    >>> print rebuild_archive.name
+    >>> print(rebuild_archive.name)
     test-rebuild-one
 
 Another difference is the lookup, we can use getByDistroPurpose(),
@@ -1802,14 +1803,14 @@
 
     >>> candidate = getUtility(IArchiveSet).getByDistroPurpose(
     ...     ubuntutest, ArchivePurpose.COPY, name="does-not-exist")
-    >>> print candidate
+    >>> print(candidate)
     None
 
 If there is a matching archive it is returned.
 
     >>> candidate = getUtility(IArchiveSet).getByDistroPurpose(
     ...     ubuntutest, ArchivePurpose.COPY, name="test-rebuild-one")
-    >>> print candidate.name
+    >>> print(candidate.name)
     test-rebuild-one
 
 
@@ -1860,10 +1861,10 @@
     ...     sources, cprov.archive, "release", person=mark)
 
     >>> mark_one = mark.archive.getPublishedSources(name=u"package1").one()
-    >>> print mark_one.sourcepackagerelease.version
+    >>> print(mark_one.sourcepackagerelease.version)
     1.1
     >>> mark_two = mark.archive.getPublishedSources(name=u"package2").one()
-    >>> print mark_two.sourcepackagerelease.version
+    >>> print(mark_two.sourcepackagerelease.version)
     1.0
 
 Notice that the latest version of package_one was copied, ignoring the
@@ -1938,7 +1939,7 @@
     ...     from_series="hoary", person=mark)
     >>> mark_multiseries = mark.archive.getPublishedSources(
     ...     name=u"package-multiseries").one()
-    >>> print mark_multiseries.sourcepackagerelease.version
+    >>> print(mark_multiseries.sourcepackagerelease.version)
     1.0
 
 We can also specify a single source to be copied with the `syncSource`
@@ -1978,7 +1979,7 @@
     ...     person=mark)
     >>> pack = mark.archive.getPublishedSources(
     ...     name="pack", exact_match=True).one()
-    >>> print pack.sourcepackagerelease.version
+    >>> print(pack.sourcepackagerelease.version)
     1.0
 
 If the supplied package exists but not in the source archive, we get an error:
@@ -1993,7 +1994,7 @@
     >>> mark.archive.syncSource("package3", "1.0", cprov.archive,
     ...     "release", person=mark)
     >>> mark_three = mark.archive.getPublishedSources(name=u"package3").one()
-    >>> print mark_three.sourcepackagerelease.version
+    >>> print(mark_three.sourcepackagerelease.version)
     1.0
 
 It's also possible to copy the source and its binaries at the same time,
@@ -2053,7 +2054,7 @@
     >>> source_archive.getPublishedSources(name=u"overridden").count()
     2
 
-    >>> print copy_candidate.section.name
+    >>> print(copy_candidate.section.name)
     python
 
 When syncing 'overridden_1.0' to Mark's PPA, the latest publication,
@@ -2064,5 +2065,5 @@
     ...     from_archive=source_archive, to_pocket='release', person=mark)
 
     >>> copy = mark.archive.getPublishedSources(name=u"overridden").one()
-    >>> print copy.section.name
+    >>> print(copy.section.name)
     python

=== modified file 'lib/lp/soyuz/doc/archiveauthtoken.txt'
--- lib/lp/soyuz/doc/archiveauthtoken.txt	2012-04-10 14:01:17 +0000
+++ lib/lp/soyuz/doc/archiveauthtoken.txt	2018-05-27 20:15:09 +0000
@@ -51,7 +51,7 @@
 
 By default the tokens are 20 characters long.
 
-    >>> print len(token_with_random_string.token)
+    >>> print(len(token_with_random_string.token))
     20
 
 It is not possible to create a second token when one already exists:
@@ -74,16 +74,16 @@
 
 The new token is returned and reflects the data:
 
-    >>> print new_token.archive.displayname
+    >>> print(new_token.archive.displayname)
     PPA for Joe Smith
 
-    >>> print new_token.person.name
+    >>> print(new_token.person.name)
     bradsmith
 
-    >>> print new_token.token
+    >>> print(new_token.token)
     testtoken
 
-    >>> print new_token.archive_url
+    >>> print(new_token.archive_url)
     http://bradsmith:testtoken@xxxxxxxxxxxxxxxxxxxxxxxxx/joe/ppa/...
 
 Commit the new token to the database.
@@ -96,7 +96,7 @@
     >>> new_token.date_created is not None
     True
 
-    >>> print new_token.date_deactivated
+    >>> print(new_token.date_deactivated)
     None
 
 
@@ -123,26 +123,26 @@
 
 And retrieve the token by id and by token data:
 
-    >>> print token_set.get(new_token.id).token
+    >>> print(token_set.get(new_token.id).token)
     testtoken
 
-    >>> print token_set.getByToken(u"testtoken").person.name
+    >>> print(token_set.getByToken(u"testtoken").person.name)
     bradsmith
 
 It's also possible to retrieve a set of all the tokens for an archive.
 
     >>> tokens = token_set.getByArchive(joe_private_ppa)
-    >>> print tokens.count()
+    >>> print(tokens.count())
     1
 
     >>> for token in tokens:
-    ...     print token.person.name
+    ...     print(token.person.name)
     bradsmith
 
 Tokens can also be retreived by archive and person:
 
-    >>> print token_set.getActiveTokenForArchiveAndPerson(
-    ...     new_token.archive, new_token.person).token
+    >>> print(token_set.getActiveTokenForArchiveAndPerson(
+    ...     new_token.archive, new_token.person).token)
     testtoken
 
 
@@ -181,7 +181,7 @@
 The IArchiveAuthTokenSet.getActiveTokenForArchiveAndPerson() method will
 also not return tokens that have been deactivated:
 
-    >>> print token_set.getActiveTokenForArchiveAndPerson(
-    ...     new_token.archive, new_token.person)
+    >>> print(token_set.getActiveTokenForArchiveAndPerson(
+    ...     new_token.archive, new_token.person))
     None
 

=== modified file 'lib/lp/soyuz/doc/archivepermission.txt'
--- lib/lp/soyuz/doc/archivepermission.txt	2012-12-26 01:32:19 +0000
+++ lib/lp/soyuz/doc/archivepermission.txt	2018-05-27 20:15:09 +0000
@@ -48,22 +48,22 @@
 Ubuntu Team is indeed permissioned to upload to the main archive.  It
 has a number of useful properties that can be checked:
 
-    >>> print main_permission.date_created
+    >>> print(main_permission.date_created)
     2006-10-16...
 
-    >>> print main_permission.archive.displayname
+    >>> print(main_permission.archive.displayname)
     Primary Archive for Ubuntu Linux
 
     >>> main_permission.permission
     <DBItem ArchivePermissionType.UPLOAD, (1) Archive Upload Rights>
 
-    >>> print main_permission.person.name
+    >>> print(main_permission.person.name)
     ubuntu-team
 
-    >>> print main_permission.component_name
+    >>> print(main_permission.component_name)
     main
 
-    >>> print main_permission.source_package_name
+    >>> print(main_permission.source_package_name)
     None
 
 The checkAuthenticated() call is also able to check someone's
@@ -131,7 +131,7 @@
     >>> uploaders = permission_set.uploadersForComponent(
     ...     ubuntu.main_archive, main_component)
     >>> for uploader in sorted(uploaders, key=operator.attrgetter("id")):
-    ...     print uploader.person.name
+    ...     print(uploader.person.name)
     ubuntu-team
 
 The component argument can also be a string type and it's converted
@@ -153,7 +153,7 @@
 
     >>> uploaders = permission_set.uploadersForComponent(ubuntu.main_archive)
     >>> for uploader in sorted(uploaders, key=operator.attrgetter("id")):
-    ...     print uploader.person.name, uploader.component.name
+    ...     print(uploader.person.name, uploader.component.name)
     ubuntu-team universe
     ubuntu-team restricted
     ubuntu-team main
@@ -166,7 +166,7 @@
     ...         archive, person)
     ...     for permission in sorted(
     ...         permissions, key=operator.attrgetter("id")):
-    ...         print permission.component.name
+    ...         print(permission.component.name)
 
     >>> showComponentUploaders(ubuntu.main_archive, mark)
     universe
@@ -202,7 +202,7 @@
     ...     packages = permission_set.packagesForUploader(
     ...         archive, person)
     ...     for permission in sorted(packages, key=operator.attrgetter("id")):
-    ...         print permission.sourcepackagename.name
+    ...         print(permission.sourcepackagename.name)
 
     >>> carlos = getUtility(IPersonSet).getByName('carlos')
     >>> showPersonsPackages(ubuntu.main_archive, carlos)
@@ -228,7 +228,7 @@
     ...         archive, component)
     ...     for archive_admin in sorted(
     ...         archive_admins, key=operator.attrgetter("id")):
-    ...         print archive_admin.person.name
+    ...         print(archive_admin.person.name)
 
     >>> showQueueAdmins(ubuntu.main_archive, main_component)
     ubuntu-team
@@ -243,7 +243,7 @@
     >>> permissions = permission_set.componentsForQueueAdmin(
     ...     ubuntu.main_archive, name12)
     >>> for permission in sorted(permissions, key=operator.attrgetter("id")):
-    ...     print permission.component.name
+    ...     print(permission.component.name)
     main
     restricted
     universe
@@ -253,7 +253,7 @@
     >>> permissions = permission_set.componentsForQueueAdmin(
     ...     ubuntu.all_distro_archives, no_team)
     >>> for permission in sorted(permissions, key=operator.attrgetter("id")):
-    ...     print permission.component.name
+    ...     print(permission.component.name)
     universe
     multiverse
 
@@ -355,7 +355,7 @@
     ...         archive, pocket, distroseries=distroseries)
     ...     for archive_admin in sorted(
     ...         archive_admins, key=operator.attrgetter("id")):
-    ...         print archive_admin.person.name
+    ...         print(archive_admin.person.name)
 
     >>> new_permission = permission_set.newPocketQueueAdmin(
     ...     ubuntu.main_archive, carlos, PackagePublishingPocket.SECURITY)

=== modified file 'lib/lp/soyuz/doc/archivesubscriber.txt'
--- lib/lp/soyuz/doc/archivesubscriber.txt	2015-07-21 09:04:01 +0000
+++ lib/lp/soyuz/doc/archivesubscriber.txt	2018-05-27 20:15:09 +0000
@@ -81,16 +81,16 @@
 
 The new subscription is returned and reflects the data:
 
-    >>> print new_sub.displayname
+    >>> print(new_sub.displayname)
     Joe Smith's access to PPA named p3a for Celso Providelo
 
-    >>> print new_sub.registrant.name
+    >>> print(new_sub.registrant.name)
     cprov
 
-    >>> print new_sub.description
+    >>> print(new_sub.description)
     subscription for joesmith
 
-    >>> print new_sub.status.name
+    >>> print(new_sub.status.name)
     CURRENT
 
 Subscriptions also contain some date information:
@@ -98,7 +98,7 @@
     >>> new_sub.date_created is not None
     True
 
-    >>> print new_sub.date_expires
+    >>> print(new_sub.date_expires)
     None
 
 An email is sent to the subscribed person when the ArchiveSubscriber
@@ -253,18 +253,18 @@
 
 And retrieve the subscription by subscriber and archive:
 
-    >>> print sub_set.getBySubscriber(
-    ...     new_sub.subscriber)[0].archive.displayname
+    >>> print(sub_set.getBySubscriber(
+    ...     new_sub.subscriber)[0].archive.displayname)
     PPA named p3a for Celso Providelo
 
-    >>> print sub_set.getByArchive(new_sub.archive)[1].subscriber.name
+    >>> print(sub_set.getByArchive(new_sub.archive)[1].subscriber.name)
     joesmith
 
 The getBySubscriber() method takes an optional archive parameter for
 finding a subscription for a particular user in a particular archive:
 
-    >>> print sub_set.getBySubscriber(
-    ...     new_sub.subscriber, new_sub.archive)[0].archive.displayname
+    >>> print(sub_set.getBySubscriber(
+    ...     new_sub.subscriber, new_sub.archive)[0].archive.displayname)
     PPA named p3a for Celso Providelo
 
 By default the getBySubscriber() and getByArchive() methods return
@@ -272,17 +272,17 @@
 
     >>> login('mark@xxxxxxxxxxx')
     >>> for subscription in sub_set.getBySubscriber(new_sub.subscriber):
-    ...     print subscription.archive.displayname
-    ...     print subscription.date_created.date()
+    ...     print(subscription.archive.displayname)
+    ...     print(subscription.date_created.date())
     PPA named p3a for Celso Providelo      2009-02-26
     PPA named p3a for Mark Shuttleworth    2009-02-22
 
 getByArchive() sorts by subscriber name.
 
     >>> for subscription in sub_set.getByArchive(mark_private_ppa):
-    ...     print subscription.subscriber.name
-    ...     print subscription.subscriber.displayname
-    ...     print subscription.date_created.date()
+    ...     print(subscription.subscriber.name)
+    ...     print(subscription.subscriber.displayname)
+    ...     print(subscription.date_created.date())
     joesmith            Joe Smith       2009-02-22
     team-name-...       Team Cprov      2009-02-24
 
@@ -313,8 +313,8 @@
 
     >>> joesmith.join(team_cprov)
     >>> for subscription in sub_set.getBySubscriber(joesmith):
-    ...     print subscription.archive.displayname
-    ...     print subscription.description
+    ...     print(subscription.archive.displayname)
+    ...     print(subscription.description)
     PPA named p3a for Celso Providelo        subscription for joesmith
     PPA named p3a for Mark Shuttleworth      Access for cprov team
 
@@ -336,8 +336,8 @@
     ...             token_text = token.token
     ...         else:
     ...             token_text = "None"
-    ...         print subscription.archive.displayname
-    ...         print token_text
+    ...         print(subscription.archive.displayname)
+    ...         print(token_text)
     >>> print_subscriptions_with_tokens(
     ...     sub_set.getBySubscriberWithActiveToken(joesmith))
     PPA named p3a for Celso Providelo            test_token
@@ -347,7 +347,7 @@
 for the activated tokens.
 
     >>> for url in joesmith.getArchiveSubscriptionURLs(joesmith):
-    ...     print url
+    ...     print(url)
     http://joesmith:test_token@xxxxxxxxxxxxxxxxxxxxxxxxx/cprov/p3a/ubuntu
 
 This method can only be used by someone with launchpad.Edit on the context
@@ -435,10 +435,10 @@
     >>> new_sub.date_cancelled is not None
     True
 
-    >>> print new_sub.cancelled_by.name
+    >>> print(new_sub.cancelled_by.name)
     cprov
 
-    >>> print new_sub.status.name
+    >>> print(new_sub.status.name)
     CANCELLED
 
 We can do this as an admin too:
@@ -458,8 +458,8 @@
 via the cprov_team:
 
     >>> for subscription in sub_set.getBySubscriber(joesmith):
-    ...     print subscription.archive.displayname
-    ...     print subscription.description
+    ...     print(subscription.archive.displayname)
+    ...     print(subscription.description)
     PPA named p3a for Mark Shuttleworth      Access for cprov team
 
     >>> subscription = sub_set.getBySubscriber(joesmith).first()
@@ -468,7 +468,7 @@
 currently include Joe:
 
     >>> for person, email in subscription.getNonActiveSubscribers():
-    ...     print person.displayname, email.email
+    ...     print(person.displayname, email.email)
     Celso Providelo   celso.providelo@xxxxxxxxxxxxx
     Joe Smith         joe@xxxxxxxxxxx
     John Smith        john@xxxxxxxxxxx
@@ -479,7 +479,7 @@
 
     >>> joesmith_token = mark_private_ppa.newAuthToken(joesmith)
     >>> for person, email in subscription.getNonActiveSubscribers():
-    ...     print person.displayname
+    ...     print(person.displayname)
     Celso Providelo
     John Smith
 
@@ -493,7 +493,7 @@
     >>> subscription = mark_private_ppa.newSubscription(
     ...     harrysmith, mark, description=u"subscription for joesmith")
     >>> for person, email in subscription.getNonActiveSubscribers():
-    ...     print person.displayname
+    ...     print(person.displayname)
     Harry Smith
 
 If Harry activates a token for his new subscription then
@@ -501,7 +501,7 @@
 "active".
 
     >>> harry_token = mark_private_ppa.newAuthToken(harrysmith)
-    >>> print subscription.getNonActiveSubscribers().count()
+    >>> print(subscription.getNonActiveSubscribers().count())
     0
 
 If the subscription is for a group which itself contains a group, all
@@ -513,7 +513,7 @@
     >>> subscription = mark_private_ppa.newSubscription(
     ...     launchpad_devs, mark, description=u"LP team too")
     >>> for person, email in subscription.getNonActiveSubscribers():
-    ...     print person.displayname
+    ...     print(person.displayname)
     Celso Providelo
     John Smith
     Foo Bar

=== modified file 'lib/lp/soyuz/doc/binarypackagerelease.txt'
--- lib/lp/soyuz/doc/binarypackagerelease.txt	2014-04-24 06:45:51 +0000
+++ lib/lp/soyuz/doc/binarypackagerelease.txt	2018-05-27 20:15:09 +0000
@@ -50,7 +50,7 @@
 
     >>> import operator
     >>> for name in sorted(names, key=operator.attrgetter('name')):
-    ...     print name.name
+    ...     print(name.name)
     mozilla-firefox
     pmount
 
@@ -135,7 +135,7 @@
 
     >>> def print_files(binary):
     ...     for bin_file in binary.files:
-    ...         print bin_file.libraryfile.filename, bin_file.filetype.name
+    ...         print(bin_file.libraryfile.filename, bin_file.filetype.name)
 
     >>> print_files(a_binary)
     foo-bin_666_all.deb DEB

=== modified file 'lib/lp/soyuz/doc/build-failedtoupload-workflow.txt'
--- lib/lp/soyuz/doc/build-failedtoupload-workflow.txt	2015-07-14 10:57:46 +0000
+++ lib/lp/soyuz/doc/build-failedtoupload-workflow.txt	2018-05-27 20:15:09 +0000
@@ -25,13 +25,13 @@
 
   >>> failedtoupload_candidate = buildset.getByID(22)
 
-  >>> print failedtoupload_candidate.title
+  >>> print(failedtoupload_candidate.title)
   i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE
 
-  >>> print failedtoupload_candidate.status.name
+  >>> print(failedtoupload_candidate.status.name)
   FAILEDTOUPLOAD
 
-  >>> print failedtoupload_candidate.upload_log.filename
+  >>> print(failedtoupload_candidate.upload_log.filename)
   upload_22_log.txt
 
 FAILEDTOUPLOAD notification requires 'extra_info' argument to be filled:
@@ -79,7 +79,7 @@
   'mark@xxxxxxxxxxx'
 
   >>> notification_body = build_notification.get_payload()
-  >>> print notification_body #doctest: -NORMALIZE_WHITESPACE
+  >>> print(notification_body) #doctest: -NORMALIZE_WHITESPACE
   <BLANKLINE>
    * Source Package: cdrkit
    * Version: 1.0
@@ -109,12 +109,12 @@
 
 The other notifications are similar except for the footer.
 
-  >>> print notifications[1].get_payload()
+  >>> print(notifications[1].get_payload())
   <BLANKLINE>
   ...
   You are receiving this email because you are a buildd administrator.
   <BLANKLINE>
-  >>> print notifications[2].get_payload()
+  >>> print(notifications[2].get_payload())
   <BLANKLINE>
   ...
   You are receiving this email because you created this version of this

=== modified file 'lib/lp/soyuz/doc/build-files.txt'
--- lib/lp/soyuz/doc/build-files.txt	2013-01-22 02:06:59 +0000
+++ lib/lp/soyuz/doc/build-files.txt	2018-05-27 20:15:09 +0000
@@ -29,7 +29,7 @@
  * Build upload logs: '_log.txt';
  * Built files: '*deb';
 
-    >>> print build.title
+    >>> print(build.title)
     i386 build of test-pkg 1.0 in ubuntutest breezy-autotest RELEASE
 
 Unsupported filename lookups also result in a `NotFoundError`.
@@ -37,19 +37,19 @@
     >>> build.getFileByName('biscuit.cookie')
     Traceback (most recent call last):
     ...
-    NotFoundError: 'biscuit.cookie'
+    NotFoundError: u'biscuit.cookie'
 
 And unreachable files in `NotFoundError`.
 
     >>> build.getFileByName('boing.changes')
     Traceback (most recent call last):
     ...
-    NotFoundError: 'boing.changes'
+    NotFoundError: u'boing.changes'
 
 Retrieving a binary changesfile.  "test_1.0_i386.changes" is created when
 SoyuzTestPublisher creates the "test" binary publication.
 
-    >>> print build.upload_changesfile.filename
+    >>> print(build.upload_changesfile.filename)
     test_1.0_i386.changes
 
     >>> build.upload_changesfile == build.getFileByName(

=== modified file 'lib/lp/soyuz/doc/buildd-mass-retry.txt'
--- lib/lp/soyuz/doc/buildd-mass-retry.txt	2011-12-29 05:29:36 +0000
+++ lib/lp/soyuz/doc/buildd-mass-retry.txt	2018-05-27 20:15:09 +0000
@@ -36,7 +36,7 @@
     >>> stdout, stderr = process.communicate()
     >>> process.returncode
     0
-    >>> print stderr
+    >>> print(stderr)
     INFO    Creating lockfile: ...
     INFO    Initializing Build Mass-Retry for
     'The Hoary Hedgehog Release/RELEASE'
@@ -62,7 +62,7 @@
 Let's mark the build from the previous run superseded.
 
     >>> pub.status = PackagePublishingStatus.SUPERSEDED
-    >>> print build.current_source_publication
+    >>> print(build.current_source_publication)
     None
     >>> transaction.commit()
 
@@ -74,7 +74,7 @@
     >>> stdout, stderr = process.communicate()
     >>> process.returncode
     0
-    >>> print stderr
+    >>> print(stderr)
     INFO    Creating lockfile: ...
     INFO    Initializing Build Mass-Retry for
     'The Hoary Hedgehog Release/RELEASE'
@@ -103,7 +103,7 @@
     >>> stdout, stderr = process.communicate()
     >>> process.returncode
     0
-    >>> print stderr
+    >>> print(stderr)
     INFO    Creating lockfile: ...
     INFO    Initializing Build Mass-Retry for
     'The Hoary Hedgehog Release for hppa (hppa)/RELEASE'
@@ -124,7 +124,7 @@
     >>> stdout, stderr = process.communicate()
     >>> process.returncode
     0
-    >>> print stderr
+    >>> print(stderr)
     INFO    Creating lockfile: ...
     INFO    Initializing Build Mass-Retry for
     'The Hoary Hedgehog Release/RELEASE'

=== modified file 'lib/lp/soyuz/doc/closing-bugs-from-changelogs.txt'
--- lib/lp/soyuz/doc/closing-bugs-from-changelogs.txt	2015-07-21 09:04:01 +0000
+++ lib/lp/soyuz/doc/closing-bugs-from-changelogs.txt	2018-05-27 20:15:09 +0000
@@ -115,12 +115,12 @@
     >>> switch_dbuser('launchpad')
     >>> pmount_bug = getUtility(IBugSet).get(pmount_bug_id)
     >>> last_comment = pmount_bug.messages[-1]
-    >>> print pmount_release.creator.displayname
+    >>> print(pmount_release.creator.displayname)
     Mark Shuttleworth
-    >>> print last_comment.owner.displayname
+    >>> print(last_comment.owner.displayname)
     Launchpad Janitor
 
-    >>> print pmount_release.changelog_entry
+    >>> print(pmount_release.changelog_entry)
     pmount (0.1-1) hoary; urgency=low
     <BLANKLINE>
      * Fix description (Malone #1)
@@ -129,7 +129,7 @@
     <BLANKLINE>
      -- Sample Person <test@xxxxxxxxxxxxx> Tue, 7 Feb 2006 12:10:08 +0300
 
-    >>> print last_comment.text_contents
+    >>> print(last_comment.text_contents)
     This bug was fixed in the package pmount - 0.1-1
     <BLANKLINE>
     ---------------
@@ -148,9 +148,9 @@
     >>> from lp.bugs.model.bugnotification import BugNotification
     >>> notifications = BugNotification.select(orderBy='id')
     >>> for notification in notifications[-2:]:
-    ...     print "From %s:\n%s\n" % (
+    ...     print("From %s:\n%s\n" % (
     ...         notification.message.owner.displayname,
-    ...         notification.message.text_contents)
+    ...         notification.message.text_contents))
     From Launchpad Janitor:
     ** Changed in: pmount (Ubuntu)
            Status: New => Fix Released
@@ -190,15 +190,15 @@
 
     >>> def close_bugs_and_check_status(bug_id_list, queue_item):
     ...     """Close bugs, reporting status before and after."""
-    ...     print "Before:"
+    ...     print("Before:")
     ...     for bug_id in bug_id_list:
-    ...         print print_single_task_status(bug_id)
+    ...         print(print_single_task_status(bug_id))
     ...     switch_dbuser(test_dbuser)
     ...     close_bugs_for_queue_item(queue_item)
     ...     switch_dbuser('launchpad')
-    ...     print "After:"
+    ...     print("After:")
     ...     for bug_id in bug_id_list:
-    ...         print print_single_task_status(bug_id)
+    ...         print(print_single_task_status(bug_id))
 
 
 Uploads to pocket PROPOSED should not close bugs, see bug #125279 for

=== modified file 'lib/lp/soyuz/doc/components-and-sections.txt'
--- lib/lp/soyuz/doc/components-and-sections.txt	2015-10-05 06:34:17 +0000
+++ lib/lp/soyuz/doc/components-and-sections.txt	2018-05-27 20:15:09 +0000
@@ -20,7 +20,7 @@
 
 Test some attributes:
 
- >>> print main.id, main.name
+ >>> print(main.id, main.name)
  1 main
 
 Check if the instance corresponds to the declared interface:
@@ -41,7 +41,7 @@
 Test iteration over the sampledata default components:
 
  >>> for c in component_set:
- ...    print c.name
+ ...    print(c.name)
  main
  restricted
  universe
@@ -52,18 +52,18 @@
 
 Test __getitem__ method, retrieving a component by name:
 
- >>> print component_set['universe'].name
+ >>> print(component_set['universe'].name)
  universe
 
 Test get method, retrieving a component by its id:
 
- >>> print component_set.get(2).name
+ >>> print(component_set.get(2).name)
  restricted
 
 New component creation for a given name:
 
  >>> new_comp = component_set.new('test')
- >>> print new_comp.name
+ >>> print(new_comp.name)
  test
 
 Ensuring a component (if not found, create it):
@@ -86,7 +86,7 @@
 
 Test some attributes:
 
- >>> print base.id, base.name
+ >>> print(base.id, base.name)
  1 base
 
 Check if the instance corresponds to the declared interface:
@@ -107,7 +107,7 @@
 Test iteration over the sampledata default sections:
 
  >>> for s in section_set:
- ...    print s.name
+ ...    print(s.name)
  base
  web
  editors
@@ -148,18 +148,18 @@
 
 Test __getitem__ method, retrieving a section by name:
 
- >>> print section_set['science'].name
+ >>> print(section_set['science'].name)
  science
 
 Test get method, retrieving a section by its id:
 
- >>> print section_set.get(2).name
+ >>> print(section_set.get(2).name)
  web
 
 New section creation for a given name:
 
  >>> new_sec = section_set.new('test')
- >>> print new_sec.name
+ >>> print(new_sec.name)
  test
 
 Ensuring a section (if not found, create it):

=== modified file 'lib/lp/soyuz/doc/distribution.txt'
--- lib/lp/soyuz/doc/distribution.txt	2015-01-07 00:35:41 +0000
+++ lib/lp/soyuz/doc/distribution.txt	2018-05-27 20:15:09 +0000
@@ -72,7 +72,7 @@
 and its  'contents description' (see package-cache.txt).
 
     >>> for owner in [cprov, mark, no_priv]:
-    ...     print "%s: %s" % (owner.name, owner.archive.description)
+    ...     print("%s: %s" % (owner.name, owner.archive.description))
     cprov: packages to help my friends.
     mark: packages to help the humanity (you know, ubuntu)
     no-priv: I am not allowed to say, I have no privs.
@@ -198,7 +198,7 @@
     >>> warty = ubuntu['warty']
     >>> pocket_release = PackagePublishingPocket.RELEASE
     >>> src_pub = cprov_src.copyTo(warty, pocket_release, cprov.archive)
-    >>> print src_pub.status.name
+    >>> print(src_pub.status.name)
     PENDING
 
     >>> [pending_ppa] = ubuntu.getPendingPublicationPPAs()
@@ -255,7 +255,7 @@
 listed in the PPA pending-publication results.
 
     >>> for pub in pending_binaries:
-    ...     print pub.status.name
+    ...     print(pub.status.name)
     PENDING
     PENDING
 
@@ -302,7 +302,7 @@
 
     >>> ubuntutest = getUtility(IDistributionSet)['ubuntutest']
     >>> for archive in ubuntutest.all_distro_archives:
-    ...     print archive.purpose.title
+    ...     print(archive.purpose.title)
     Primary Archive
     Partner Archive
 
@@ -310,11 +310,11 @@
 component name.  If the component is unknown, None is returned.
 
     >>> partner_archive = ubuntutest.getArchiveByComponent('partner')
-    >>> print partner_archive.displayname
+    >>> print(partner_archive.displayname)
     Partner Archive for Ubuntu Test
 
     >>> other_archive = ubuntutest.getArchiveByComponent('dodgycomponent')
-    >>> print other_archive
+    >>> print(other_archive)
     None
 
 Multiple components, specially the debian-compatibility ones points to
@@ -322,13 +322,13 @@
 their packages in the correct archive.
 
     >>> main_archive = ubuntutest.getArchiveByComponent('main')
-    >>> print main_archive.displayname
+    >>> print(main_archive.displayname)
     Primary Archive for Ubuntu Test
 
     >>> non_free_archive = ubuntutest.getArchiveByComponent('non-free')
-    >>> print non_free_archive.displayname
+    >>> print(non_free_archive.displayname)
     Primary Archive for Ubuntu Test
 
     >>> contrib_archive = ubuntutest.getArchiveByComponent('contrib')
-    >>> print contrib_archive.displayname
+    >>> print(contrib_archive.displayname)
     Primary Archive for Ubuntu Test

=== modified file 'lib/lp/soyuz/doc/distroarchseries.txt'
--- lib/lp/soyuz/doc/distroarchseries.txt	2015-04-20 15:59:52 +0000
+++ lib/lp/soyuz/doc/distroarchseries.txt	2018-05-27 20:15:09 +0000
@@ -34,7 +34,7 @@
 Enabled is a boolean flag that says whether the arch will receive new builds
 and publish them.
 
-    >>> print hoary_i386.enabled
+    >>> print(hoary_i386.enabled)
     True
 
 `DistroSeries.enabled_architectures` is a `ResultSet` containing the
@@ -70,7 +70,7 @@
     1
     >>> results = warty_i386.searchBinaryPackages(text=u'a')
     >>> for dasbp in results:
-    ...     print "%s: %s" % (dasbp.__class__.__name__, dasbp.name)
+    ...     print("%s: %s" % (dasbp.__class__.__name__, dasbp.name))
     DistroArchSeriesBinaryPackageRelease: at
     DistroArchSeriesBinaryPackageRelease: mozilla-firefox
     DistroArchSeriesBinaryPackageRelease: mozilla-firefox
@@ -142,7 +142,7 @@
     >>> pmount_hoary_i386.publishing_history.count()
     3
 
-    >>> print pub.status.name, pub.datesuperseded is not None
+    >>> print(pub.status.name, pub.datesuperseded is not None)
     SUPERSEDED True
 
 
@@ -169,7 +169,7 @@
     ...             result += 'ppa'
     ...         if arch.official or arch.supports_virtualized:
     ...             result += ')'
-    ...         print result
+    ...         print(result)
 
     >>> print_architectures(warty.architectures)
     The Warty Warthog Release for hppa (hppa)
@@ -228,7 +228,7 @@
 The architecture also has a 'chroot_url' attribute directly referencing
 the file.
 
-    >>> print hoary.getDistroArchSeries('hppa').chroot_url
+    >>> print(hoary.getDistroArchSeries('hppa').chroot_url)
     http://.../filename...
     >>> hoary.getDistroArchSeries('hppa').chroot_url == \
     ...     chroot.http_url
@@ -236,7 +236,7 @@
 
 If there is no chroot, chroot_url will be None.
 
-    >>> print hoary.getDistroArchSeries('i386').chroot_url
+    >>> print(hoary.getDistroArchSeries('i386').chroot_url)
     None
 
 `DistroSeries.buildable_architectures` results are ordered

=== modified file 'lib/lp/soyuz/doc/distroarchseriesbinarypackage.txt'
--- lib/lp/soyuz/doc/distroarchseriesbinarypackage.txt	2016-05-19 05:02:59 +0000
+++ lib/lp/soyuz/doc/distroarchseriesbinarypackage.txt	2018-05-27 20:15:09 +0000
@@ -16,7 +16,7 @@
 
 `DistroArchSeriesBinaryPackage`s have a title property:
 
-    >>> print pmount_hoary_i386.title
+    >>> print(pmount_hoary_i386.title)
     pmount binary package in Ubuntu Hoary i386
 
 First, we create a new version of pmount, and a version of mozilla-
@@ -160,7 +160,7 @@
     ...        getUtility(IDistributionSet)['debian']['woody']['i386'])
     >>> pmount_woody_i386 = DistroArchSeriesBinaryPackage(
     ...        deb_wdy_i386, pmount_name)
-    >>> print pmount_woody_i386.distro_source_package
+    >>> print(pmount_woody_i386.distro_source_package)
     None
 
 Check the publishing record of packages returned by 'currentrelease' and

=== modified file 'lib/lp/soyuz/doc/distroarchseriesbinarypackagerelease.txt'
--- lib/lp/soyuz/doc/distroarchseriesbinarypackagerelease.txt	2014-11-27 22:13:36 +0000
+++ lib/lp/soyuz/doc/distroarchseriesbinarypackagerelease.txt	2018-05-27 20:15:09 +0000
@@ -11,18 +11,18 @@
 Grab the relevant DARs and BPRs:
 
   >>> warty = DistroArchSeries.get(1)
-  >>> print warty.distroseries.name
+  >>> print(warty.distroseries.name)
   warty
   >>> hoary = DistroArchSeries.get(6)
-  >>> print hoary.distroseries.name
+  >>> print(hoary.distroseries.name)
   hoary
 
   >>> mf = BinaryPackageRelease.get(12)
-  >>> print mf.binarypackagename.name
+  >>> print(mf.binarypackagename.name)
   mozilla-firefox
 
   >>> pm = BinaryPackageRelease.get(15)
-  >>> print pm.binarypackagename.name
+  >>> print(pm.binarypackagename.name)
   pmount
 
 Assemble our DARBPRs for fun and profit:
@@ -33,13 +33,16 @@
   >>> pm_hoary = DARBPR(hoary, pm)
 
   >>> for darbpr in [mf_warty, mf_hoary, pm_warty, pm_hoary]:
-  ...   print darbpr.name, darbpr.version, darbpr._latest_publishing_record()
+  ...   print(
+  ...       darbpr.name, darbpr.version, darbpr._latest_publishing_record())
   mozilla-firefox 0.9 <BinaryPackagePublishingHistory at 0x...>
   mozilla-firefox 0.9 None
   pmount 0.1-1 <BinaryPackagePublishingHistory at 0x...>
   pmount 0.1-1 <BinaryPackagePublishingHistory at 0x...>
 
-  >>> print mf_warty.status.title, pm_warty.status.title, pm_hoary.status.title
+  >>> print(
+  ...     mf_warty.status.title, pm_warty.status.title,
+  ...     pm_hoary.status.title)
   Published Superseded Published
 
 
@@ -51,15 +54,15 @@
     >>> warty_i386 = getUtility(IDistributionSet)['ubuntu']['warty']['i386']
 
     >>> warty_i386_pmount = warty_i386.getBinaryPackage('pmount')
-    >>> print warty_i386_pmount.title
+    >>> print(warty_i386_pmount.title)
     pmount binary package in Ubuntu Warty i386
 
     >>> pmount_release_in_warty = warty_i386_pmount['0.1-1']
-    >>> print pmount_release_in_warty.title
+    >>> print(pmount_release_in_warty.title)
     pmount 0.1-1 (i386 binary) in ubuntu warty
 
     >>> parent = (
     ...     pmount_release_in_warty.distroarchseriesbinarypackage)
-    >>> print parent.title
+    >>> print(parent.title)
     pmount binary package in Ubuntu Warty i386
 

=== modified file 'lib/lp/soyuz/doc/distroseriesbinarypackage.txt'
--- lib/lp/soyuz/doc/distroseriesbinarypackage.txt	2014-11-27 22:13:36 +0000
+++ lib/lp/soyuz/doc/distroseriesbinarypackage.txt	2018-05-27 20:15:09 +0000
@@ -21,29 +21,29 @@
 
 It has a name, summary, description and title:
 
-    >>> print firefox_dsbp.name
+    >>> print(firefox_dsbp.name)
     mozilla-firefox
 
-    >>> print firefox_dsbp.summary
+    >>> print(firefox_dsbp.summary)
     Mozilla Firefox Web Browser
 
-    >>> print firefox_dsbp.description
+    >>> print(firefox_dsbp.description)
     Mozilla Firefox Web Browser is .....
 
-    >>> print firefox_dsbp.title
+    >>> print(firefox_dsbp.title)
     Binary package "mozilla-firefox" in ubuntu warty
 
-    >>> print firefox_dsbp.distribution.name
+    >>> print(firefox_dsbp.distribution.name)
     ubuntu
 
 It provides the current publishings for the binary package in the
 distro series (ordered by architecture then datecreated):
 
     >>> for published in firefox_dsbp.current_publishings:
-    ...     print "%s %s in %s" % (
+    ...     print("%s %s in %s" % (
     ...         published.distroarchseriesbinarypackagerelease.name,
     ...         published.distroarchseriesbinarypackagerelease.version,
-    ...         published.distroarchseries.architecturetag)
+    ...         published.distroarchseries.architecturetag))
     mozilla-firefox 0.9 in hppa
     mozilla-firefox 0.9 in i386
     mozilla-firefox 1.0 in i386
@@ -56,7 +56,7 @@
 
 It also provides access to the last DistributionSourcePackageRelease:
 
-    >>> print firefox_dsbp.last_sourcepackagerelease.title
+    >>> print(firefox_dsbp.last_sourcepackagerelease.title)
     iceweasel 1.0 source package in Ubuntu
 
 If a DistroSeriesBinaryPackage doesn't have a DistroSeriesPackageCache,
@@ -75,11 +75,11 @@
     >>> firefox_hoary_dsbp.current_publishings
     []
     
-    >>> print firefox_hoary_dsbp.last_published
+    >>> print(firefox_hoary_dsbp.last_published)
     None
 
 In this case, the last DistributionSourcePackageRelease will also be None:
 
-    >>> print firefox_hoary_dsbp.last_sourcepackagerelease
+    >>> print(firefox_hoary_dsbp.last_sourcepackagerelease)
     None
 

=== modified file 'lib/lp/soyuz/doc/distroseriesqueue-notify.txt'
--- lib/lp/soyuz/doc/distroseriesqueue-notify.txt	2016-03-03 16:16:16 +0000
+++ lib/lp/soyuz/doc/distroseriesqueue-notify.txt	2018-05-27 20:15:09 +0000
@@ -10,7 +10,7 @@
 
     >>> from lp.soyuz.interfaces.queue import IPackageUploadSet
     >>> netapplet_upload = getUtility(IPackageUploadSet)[3]
-    >>> print netapplet_upload.displayname
+    >>> print(netapplet_upload.displayname)
     netapplet
 
 Set up some library files for the netapplet source package.  These are
@@ -84,7 +84,7 @@
 
 The mail body contains a list of files that were accepted:
 
-    >>> print notification.get_payload(0) # doctest: -NORMALIZE_WHITESPACE
+    >>> print(notification.get_payload(0)) # doctest: -NORMALIZE_WHITESPACE
     From nobody ...
     ...
     NEW: netapplet_1.0-1.dsc
@@ -156,7 +156,7 @@
     ...         key=to_lower)
 
     >>> for msg in msgs:
-    ...     print msg['To']
+    ...     print(msg['To'])
     Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx>
     Foo Bar <foo.bar@xxxxxxxxxxxxx>
     autotest_changes@xxxxxxxxxx
@@ -165,26 +165,26 @@
 uploader and the Debian derivatives address for the package uploaded.
 
     >>> for msg in msgs:
-    ...     print extract_addresses(msg['Bcc'])
-    ['Root <root@localhost>']
-    ['Root <root@localhost>']
-    ['netapplet_derivatives@xxxxxxxxxxxxxxxxxxxxxx', 'Root <root@localhost>']
+    ...     print(extract_addresses(msg['Bcc']))
+    [u'Root <root@localhost>']
+    [u'Root <root@localhost>']
+    [u'netapplet_derivatives@xxxxxxxxxxxxxxxxxxxxxx', u'Root <root@localhost>']
 
 The mail 'From:' addresses are the uploader (for acknowledgements sent to
 the uploader and the changer) and the changer.
 
     >>> for msg in msgs:
-    ...     print msg['From']
+    ...     print(msg['From'])
     Root <root@localhost>
     Root <root@localhost>
     Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx>
 
-    >>> print msgs[0]['Subject']
+    >>> print(msgs[0]['Subject'])
     [ubuntu/breezy-autotest] netapplet 0.99.6-1 (Accepted)
 
 The mail body contains the same list of files again:
 
-    >>> print msgs[0].get_payload(0) # doctest: -NORMALIZE_WHITESPACE
+    >>> print(msgs[0].get_payload(0)) # doctest: -NORMALIZE_WHITESPACE
     From nobody ...
     ...
      OK: netapplet_1.0-1.dsc
@@ -237,24 +237,24 @@
 
 The mail headers are the same as before:
 
-    >>> print changer_notification['To']
+    >>> print(changer_notification['To'])
     Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx>
-    >>> print signer_notification['To']
+    >>> print(signer_notification['To'])
     Foo Bar <foo.bar@xxxxxxxxxxxxx>
 
-    >>> print changer_notification['Bcc']
+    >>> print(changer_notification['Bcc'])
     Root <root@localhost>
-    >>> print signer_notification['Bcc']
+    >>> print(signer_notification['Bcc'])
     Root <root@localhost>
 
-    >>> print changer_notification['Subject']
+    >>> print(changer_notification['Subject'])
     [ubuntu/breezy-autotest] netapplet 0.99.6-1 (New)
-    >>> print signer_notification['Subject']
+    >>> print(signer_notification['Subject'])
     [ubuntu/breezy-autotest] netapplet 0.99.6-1 (New)
 
 The mail body contains the same list of files again:
 
-    >>> print changer_notification.get_payload(0)
+    >>> print(changer_notification.get_payload(0))
     ... # doctest: -NORMALIZE_WHITESPACE
     From nobody ...
     ...
@@ -271,7 +271,7 @@
     You are receiving this email because you are the most recent person
     listed in this package's changelog.
     <BLANKLINE>
-    >>> print signer_notification.get_payload(0)
+    >>> print(signer_notification.get_payload(0))
     ... # doctest: -NORMALIZE_WHITESPACE
     From nobody ...
     ...

=== modified file 'lib/lp/soyuz/doc/distroseriesqueue-translations.txt'
--- lib/lp/soyuz/doc/distroseriesqueue-translations.txt	2016-11-17 16:46:04 +0000
+++ lib/lp/soyuz/doc/distroseriesqueue-translations.txt	2018-05-27 20:15:09 +0000
@@ -108,7 +108,7 @@
 
     >>> from lp.registry.model.sourcepackage import SourcePackage
     >>> dapper_pmount = SourcePackage(pmount_sourcepackagename, dapper)
-    >>> print len(dapper_pmount.getLatestTranslationsUploads())
+    >>> print(len(dapper_pmount.getLatestTranslationsUploads()))
     0
 
     >>> success = pmount_upload.do_accept(build=build)
@@ -124,7 +124,7 @@
 # no longer exists and this content is impossible to check at the moment
 # since no email is generated because the recipients are not LP Persons.
 # (So why is it being checked in the first place?)
-#>>> print pmount_upload.getNotificationSummary()
+#>>> print(pmount_upload.getNotificationSummary())
 #NEW: pmount_0.9.7-2ubuntu2_amd64.deb
 #OK: pmount_0.9.7-2ubuntu2_amd64_translations.tar.gz
 
@@ -133,7 +133,7 @@
 
     >>> latest_translations_uploads = list(
     ...     dapper_pmount.getLatestTranslationsUploads())
-    >>> print len(latest_translations_uploads)
+    >>> print(len(latest_translations_uploads))
     1
 
 We'll get back to that uploaded file later.
@@ -184,9 +184,9 @@
 the spph creator is the requester.
 
     >>> for entry in translation_import_queue.getAllEntries(target=ubuntu):
-    ...     print '%s/%s by %s: %s' % (
+    ...     print('%s/%s by %s: %s' % (
     ...         entry.distroseries.name, entry.sourcepackagename.name,
-    ...         entry.importer.name, entry.path)
+    ...         entry.importer.name, entry.path))
     dapper/pmount by john-doe: po/es_ES.po
     dapper/pmount by john-doe: po/ca.po
     dapper/pmount by john-doe: po/de.po
@@ -243,13 +243,13 @@
 As we can see from the translation import queue content, as the publication
 has no creator specified, it falls back to rosetta-admins as the requester.
 
-    >>> print spph.creator
+    >>> print(spph.creator)
     None
 
     >>> for entry in translation_import_queue.getAllEntries(target=ubuntu):
-    ...     print '%s/%s by %s: %s' % (
+    ...     print('%s/%s by %s: %s' % (
     ...         entry.distroseries.name, entry.sourcepackagename.name,
-    ...         entry.importer.name, entry.path)
+    ...         entry.importer.name, entry.path))
     dapper/pmount by rosetta-admins: po/es_ES.po
     dapper/pmount by rosetta-admins: po/ca.po
     dapper/pmount by rosetta-admins: po/de.po
@@ -281,9 +281,9 @@
 As we can see from the translation import queue content.
 
     >>> for entry in translation_import_queue.getAllEntries(target=ubuntu):
-    ...     print '%s/%s by %s: %s' % (
+    ...     print('%s/%s by %s: %s' % (
     ...         entry.distroseries.name, entry.sourcepackagename.name,
-    ...         entry.importer.name, entry.path)
+    ...         entry.importer.name, entry.path))
     dapper/pmount by rosetta-admins: po/es_ES.po
     dapper/pmount by rosetta-admins: po/ca.po
     dapper/pmount by rosetta-admins: po/de.po
@@ -394,7 +394,7 @@
     ...     if entry.name.endswith('.po') or entry.name.endswith('.pot')
     ...     ])
     >>> for filename in translation_files:
-    ...     print filename
+    ...     print(filename)
     ./source/po/ca.po
     ./source/po/cs.po
     ./source/po/de.po

=== modified file 'lib/lp/soyuz/doc/distroseriesqueue.txt'
--- lib/lp/soyuz/doc/distroseriesqueue.txt	2015-09-04 12:19:07 +0000
+++ lib/lp/soyuz/doc/distroseriesqueue.txt	2018-05-27 20:15:09 +0000
@@ -90,14 +90,14 @@
     ...         item.setAccepted()
     ...         item.syncUpdate()
     ...     except QueueInconsistentStateError as info:
-    ...         print info
+    ...         print(info)
 
     >>> accepted_queue = hoary.getPackageUploads(PackageUploadStatus.ACCEPTED)
 
     >>> from lp.services.log.logger import FakeLogger
     >>> for item in accepted_queue:
     ...     for source in item.sources:
-    ...         print source.sourcepackagerelease.name
+    ...         print(source.sourcepackagerelease.name)
     ...     pub_records = item.realiseUpload(FakeLogger())
     ed
     DEBUG Publishing source ed/0.2-20 to ubuntu/hoary in ubuntu
@@ -110,7 +110,7 @@
     >>> for release in SourcePackagePublishingHistory.selectBy(
     ...     distroseries=hoary, status=PackagePublishingStatus.PENDING):
     ...     if release.sourcepackagerelease.sourcepackagename.name == "ed":
-    ...         print release.sourcepackagerelease.version
+    ...         print(release.sourcepackagerelease.version)
     0.2-20
 
 
@@ -177,19 +177,19 @@
     >>> qitem.date_created
     datetime.datetime(...)
 
-    >>> print qitem.changesfile.filename
+    >>> print(qitem.changesfile.filename)
     mozilla-firefox_0.9_i386.changes
 
-    >>> print qitem.sourcepackagerelease.name
-    mozilla-firefox
-
-    >>> print qitem.displayname
-    mozilla-firefox
-
-    >>> print qitem.displayversion
+    >>> print(qitem.sourcepackagerelease.name)
+    mozilla-firefox
+
+    >>> print(qitem.displayname)
+    mozilla-firefox
+
+    >>> print(qitem.displayversion)
     0.9
 
-    >>> print qitem.displayarchs
+    >>> print(qitem.displayarchs)
     i386
 
     >>> qitem.sourcepackagerelease
@@ -203,19 +203,19 @@
     >>> custom_item.date_created
     datetime.datetime(...)
 
-    >>> print custom_item.changesfile.filename
-    netapplet-1.0.0.tar.gz
-
-    >>> print custom_item.displayname
-    netapplet-1.0.0.tar.gz
-
-    >>> print custom_item.displayversion
+    >>> print(custom_item.changesfile.filename)
+    netapplet-1.0.0.tar.gz
+
+    >>> print(custom_item.displayname)
+    netapplet-1.0.0.tar.gz
+
+    >>> print(custom_item.displayversion)
     -
 
-    >>> print custom_item.displayarchs
+    >>> print(custom_item.displayarchs)
     raw-translations
 
-    >>> print custom_item.sourcepackagerelease
+    >>> print(custom_item.sourcepackagerelease)
     None
 
 The method getBuildByBuildIDs() will return all the PackageUploadBuild
@@ -223,7 +223,7 @@
 
     >>> ids = (18,19)
     >>> for package_upload_build in qset.getBuildByBuildIDs(ids):
-    ...     print package_upload_build.packageupload.displayname
+    ...     print(package_upload_build.packageupload.displayname)
     mozilla-firefox
     pmount
 
@@ -299,7 +299,7 @@
     >>> items = breezy_autotest.getPackageUploads(PackageUploadStatus.NEW)
     >>> for item in items:
     ...     item.setAccepted()
-    ...     print item.displayname, item.status.name
+    ...     print(item.displayname, item.status.name)
     netapplet-1.0.0.tar.gz ACCEPTED
     netapplet-1.0.0.tar.gz ACCEPTED
     alsa-utils ACCEPTED
@@ -314,7 +314,7 @@
     ...     PackageUploadStatus.ACCEPTED)
     >>> for item in items:
     ...     item.status = PassthroughStatusValue(PackageUploadStatus.NEW)
-    ...     print item.displayname, item.status.name
+    ...     print(item.displayname, item.status.name)
     netapplet-1.0.0.tar.gz NEW
     netapplet-1.0.0.tar.gz NEW
     alsa-utils NEW
@@ -353,9 +353,9 @@
     >>> try:
     ...     item.setAccepted()
     ... except QueueInconsistentStateError as e:
-    ...     print item.displayname, e
+    ...     print(item.displayname, e)
     ... else:
-    ...     print item.displayname, 'ACCEPTED'
+    ...     print(item.displayname, 'ACCEPTED')
     mozilla-firefox Component "hell" is not allowed in breezy-autotest
 
 Check how we treat source upload duplications in UNAPPROVED queue (NEW
@@ -447,7 +447,7 @@
 normally accepted.
 
     >>> item.setAccepted()
-    >>> print item.status.name
+    >>> print(item.status.name)
     ACCEPTED
 
 Roll back modified data:
@@ -467,9 +467,9 @@
     >>> from operator import attrgetter
     >>> def print_queue_items(queue_items):
     ...     for queue_item in queue_items:
-    ...         print "%s  %s  %s" % (
+    ...         print("%s  %s  %s" % (
     ...             queue_item.displayname, queue_item.displayversion,
-    ...             queue_item.displayarchs)
+    ...             queue_item.displayarchs))
 
     >>> queue_items = breezy_autotest.getPackageUploads(
     ...     PackageUploadStatus.NEW, name=u'pmount', version=u'0.1-1',
@@ -577,9 +577,9 @@
     >>> [item] = breezy_autotest.getPackageUploads(
     ...     PackageUploadStatus.NEW, name=u'alsa-utils')
     >>> [source] = item.sources
-    >>> print "%s/%s" % (
+    >>> print("%s/%s" % (
     ...     source.sourcepackagerelease.component.name,
-    ...     source.sourcepackagerelease.section.name)
+    ...     source.sourcepackagerelease.section.name))
     main/base
 
 Overriding to a component not in the allowed_components list results in
@@ -589,9 +589,9 @@
     >>> universe = getUtility(IComponentSet)['universe']
     >>> main = getUtility(IComponentSet)['main']
     >>> web = getUtility(ISectionSet)['web']
-    >>> print item.overrideSource(
+    >>> print(item.overrideSource(
     ...     new_component=restricted, new_section=web,
-    ...     allowed_components=(universe,))
+    ...     allowed_components=(universe,)))
     Traceback (most recent call last):
     ...
     QueueAdminUnauthorizedError: No rights to override to restricted
@@ -599,9 +599,9 @@
 Allowing "restricted" still won't work because the original component
 is "main":
 
-    >>> print item.overrideSource(
+    >>> print(item.overrideSource(
     ...     new_component=restricted, new_section=web,
-    ...     allowed_components=(restricted,))
+    ...     allowed_components=(restricted,)))
     Traceback (most recent call last):
     ...
     QueueAdminUnauthorizedError: No rights to override from main
@@ -609,13 +609,13 @@
 Specifying both main and restricted allows the override to restricted/web.
 overrideSource() returns True if it completed the task.
 
-    >>> print item.overrideSource(
+    >>> print(item.overrideSource(
     ...     new_component=restricted, new_section=web,
-    ...     allowed_components=(main,restricted))
+    ...     allowed_components=(main,restricted)))
     True
-    >>> print "%s/%s" % (
+    >>> print("%s/%s" % (
     ...     source.sourcepackagerelease.component.name,
-    ...     source.sourcepackagerelease.section.name)
+    ...     source.sourcepackagerelease.section.name))
     restricted/web
 
 Similarly for binaries:
@@ -624,10 +624,10 @@
     ...     PackageUploadStatus.NEW, name=u'pmount')
     >>> [build] = item.builds
     >>> [binary_package] = build.build.binarypackages
-    >>> print "%s/%s/%s" % (
+    >>> print("%s/%s/%s" % (
     ...     binary_package.component.name,
     ...     binary_package.section.name,
-    ...     binary_package.priority.title)
+    ...     binary_package.priority.title))
     main/base/Important
 
     >>> from lp.soyuz.enums import PackagePublishingPriority
@@ -636,25 +636,25 @@
     ...     "section": web,
     ...     "priority": PackagePublishingPriority.EXTRA,
     ...     }]
-    >>> print item.overrideBinaries(
-    ...     binary_changes, allowed_components=(universe,))
+    >>> print(item.overrideBinaries(
+    ...     binary_changes, allowed_components=(universe,)))
     Traceback (most recent call last):
     ...
     QueueAdminUnauthorizedError: No rights to override to restricted
 
-    >>> print item.overrideBinaries(
-    ...     binary_changes, allowed_components=(restricted,))
+    >>> print(item.overrideBinaries(
+    ...     binary_changes, allowed_components=(restricted,)))
     Traceback (most recent call last):
     ...
     QueueAdminUnauthorizedError: No rights to override from main
 
-    >>> print item.overrideBinaries(
-    ...     binary_changes, allowed_components=(main,restricted))
+    >>> print(item.overrideBinaries(
+    ...     binary_changes, allowed_components=(main, restricted)))
     True
-    >>> print "%s/%s/%s" % (
+    >>> print("%s/%s/%s" % (
     ...     binary_package.component.name,
     ...     binary_package.section.name,
-    ...     binary_package.priority.title)
+    ...     binary_package.priority.title))
     restricted/web/Extra
 
 
@@ -703,8 +703,8 @@
 
     >>> add_static_xlat_upload()
 
-    >>> print warty.getPackageUploads(
-    ...     custom_type=static_xlat).count()
+    >>> print(warty.getPackageUploads(
+    ...     custom_type=static_xlat).count())
     1
 
 There is also a created_since_date filter that will only return packages
@@ -720,7 +720,7 @@
 
     >>> add_static_xlat_upload()
     >>> uploads = warty.getPackageUploads(custom_type=static_xlat)
-    >>> print uploads.count()
+    >>> print(uploads.count())
     2
 
 Commit a transaction to ensure new DB objects get a later timestamp.
@@ -783,13 +783,13 @@
     >>> queue_source = items[1].sources[0]
     >>> [build] = queue_source.sourcepackagerelease.builds
 
-    >>> print build.title
+    >>> print(build.title)
     i386 build of alsa-utils 1.0.9a-4ubuntu1 in ubuntu hoary RELEASE
 
-    >>> print build.status.name
+    >>> print(build.status.name)
     NEEDSBUILD
 
-    >>> print build.buildqueue_record.lastscore
+    >>> print(build.buildqueue_record.lastscore)
     10
 
 Let's reject something in the queue:

=== modified file 'lib/lp/soyuz/doc/fakepackager.txt'
--- lib/lp/soyuz/doc/fakepackager.txt	2018-02-09 17:35:14 +0000
+++ lib/lp/soyuz/doc/fakepackager.txt	2018-05-27 20:15:09 +0000
@@ -14,7 +14,7 @@
 
     >>> packager = FakePackager('biscuit', '1.0')
 
-    >>> print packager.sandbox_path
+    >>> print(packager.sandbox_path)
     /tmp/fakepackager-...
 
     >>> os.path.exists(packager.sandbox_path)
@@ -23,18 +23,18 @@
 Source 'name' and 'version' and 'gpg_key_fingerprint' are set according to
 the arguments passed in the initialization.
 
-    >>> print packager.name
+    >>> print(packager.name)
     biscuit
 
-    >>> print packager.version
+    >>> print(packager.version)
     1.0
 
-    >>> print packager.gpg_key_fingerprint
+    >>> print(packager.gpg_key_fingerprint)
     None
 
 The upstream directory is known but not yet created.
 
-    >>> print packager.upstream_directory
+    >>> print(packager.upstream_directory)
     /tmp/fakepackager-.../biscuit-1.0
 
     >>> os.path.exists(packager.upstream_directory)
@@ -87,12 +87,12 @@
     >>> packager.buildSource(signed=False)
 
     >>> for changesfile in packager.listAvailableUploads():
-    ...     print changesfile
+    ...     print(changesfile)
     /tmp/fakepackager-.../biscuit_1.0-1_source.changes
 
     >>> changesfile_path = packager.listAvailableUploads()[0]
     >>> changesfile = open(changesfile_path)
-    >>> print changesfile.read()
+    >>> print(changesfile.read())
     Format: ...
     Date: ...
     Source: biscuit
@@ -135,7 +135,7 @@
 
 The error was raised because no signing key was set.
 
-    >>> print packager.gpg_key_fingerprint
+    >>> print(packager.gpg_key_fingerprint)
     None
 
 A GPG key can only be set on initialization so we will have to create a
@@ -148,7 +148,7 @@
 
 GPG key set, now we are able to build a signed version.
 
-    >>> print packager.gpg_key_fingerprint
+    >>> print(packager.gpg_key_fingerprint)
     0xFD311613D941C6DE55737D310E3498675D147547
 
 FakePackager also allows us to include as many versions it needs
@@ -164,7 +164,7 @@
 basically checking we pass the right arguments to it.
 
     >>> changesfile_path = packager.listAvailableUploads()[1]
-    >>> print os.path.basename(changesfile_path)
+    >>> print(os.path.basename(changesfile_path))
     biscuit_1.0-3_source.changes
 
     >>> content = open(changesfile_path).read()
@@ -202,14 +202,14 @@
 corresponding sandbox directory.
 
     >>> for changesfile in packager.listAvailableUploads():
-    ...     print changesfile
+    ...     print(changesfile)
     /tmp/fakepackager-.../biscuit_1.0-1_source.changes
     /tmp/fakepackager-.../biscuit_1.0-3_source.changes
     /tmp/fakepackager-.../biscuit_1.0-4_source.changes
     /tmp/fakepackager-.../biscuit_1.0-5_source.changes
 
     >>> for changesfile in zeca_packager.listAvailableUploads():
-    ...     print changesfile
+    ...     print(changesfile)
     /tmp/fakepackager-.../zeca_1.0-1_source.changes
     /tmp/fakepackager-.../zeca_1.0-2_source.changes
 
@@ -272,18 +272,18 @@
 automatically accepted, builds are created, the upload is published and
 the source publishing record created are returned.
 
-    >>> print ubuntu.getSourcePackage('zeca')
+    >>> print(ubuntu.getSourcePackage('zeca'))
     None
 
     >>> zeca_pub = zeca_packager.uploadSourceVersion('1.0-1')
 
-    >>> print zeca_pub.displayname, zeca_pub.status.name
+    >>> print(zeca_pub.displayname, zeca_pub.status.name)
     zeca 1.0-1 in hoary PENDING
 
     >>> len(zeca_pub.getBuilds())
     2
 
-    >>> print ubuntu.getSourcePackage('zeca').currentrelease.version
+    >>> print(ubuntu.getSourcePackage('zeca').currentrelease.version)
     1.0-1
 
 New uploaded versions will immediately show up as the current
@@ -294,7 +294,7 @@
     >>> len(zeca_pub.getBuilds())
     2
 
-    >>> print ubuntu.getSourcePackage('zeca').currentrelease.version
+    >>> print(ubuntu.getSourcePackage('zeca').currentrelease.version)
     1.0-2
 
 We can change the upload policy for a specific upload, for instance to
@@ -305,7 +305,7 @@
     >>> len(biscuit_pub.getBuilds())
     2
 
-    >>> print ubuntu.getSourcePackage('biscuit').currentrelease.version
+    >>> print(ubuntu.getSourcePackage('biscuit').currentrelease.version)
     1.0-1
 
 Since we are using Foo Bar's GPG key to sign packages, in order to test
@@ -313,7 +313,7 @@
 
     >>> from lp.registry.interfaces.person import IPersonSet
     >>> foobar = getUtility(IPersonSet).getByName('name16')
-    >>> print foobar.archive
+    >>> print(foobar.archive)
     None
 
     >>> from lp.soyuz.enums import ArchivePurpose
@@ -326,10 +326,10 @@
     >>> ppa_pub = packager.uploadSourceVersion(
     ...     '1.0-5', archive=foobar.archive)
 
-    >>> print ppa_pub.archive.displayname
+    >>> print(ppa_pub.archive.displayname)
     PPA for Foo Bar
 
-    >>> print ppa_pub.displayname, ppa_pub.status.name
+    >>> print(ppa_pub.displayname, ppa_pub.status.name)
     biscuit 1.0-5 in hoary PENDING
 
     >>> len(ppa_pub.getBuilds())

=== modified file 'lib/lp/soyuz/doc/gina-multiple-arch.txt'
--- lib/lp/soyuz/doc/gina-multiple-arch.txt	2016-01-26 15:47:37 +0000
+++ lib/lp/soyuz/doc/gina-multiple-arch.txt	2018-05-27 20:15:09 +0000
@@ -80,7 +80,7 @@
     >>> gina_proc = [sys.executable, 'scripts/gina.py', '-q',
     ...              'dapper', 'dapper-updates']
     >>> proc = subprocess.Popen(gina_proc, stderr=subprocess.PIPE)
-    >>> print proc.stderr.read()
+    >>> print(proc.stderr.read())
     WARNING ...
     WARNING No source package bdftopcf (0.99.0-1) listed for bdftopcf (0.99.0-1), scrubbing archive...
     WARNING The archive for dapper-updates/universe doesn't contain a directory for powerpc, skipping
@@ -104,17 +104,17 @@
 
     >>> SourcePackageRelease.select().count() - orig_spr_count
     2
-    >>> print SSPPH.select().count() - orig_sspph_count
+    >>> print(SSPPH.select().count() - orig_sspph_count)
     2
 
 Each source package has its own maintainer (in this case, fabbione and
 porridge):
 
-    >>> print Person.select().count() - orig_person_count
-    2
-    >>> print TeamParticipation.select().count() - orig_tp_count
-    2
-    >>> print EmailAddress.select().count() - orig_email_count
+    >>> print(Person.select().count() - orig_person_count)
+    2
+    >>> print(TeamParticipation.select().count() - orig_tp_count)
+    2
+    >>> print(EmailAddress.select().count() - orig_email_count)
     2
 
 There are 4 binary packages generated by the two builds of the two
@@ -135,9 +135,9 @@
     >>> n = SourcePackageName.selectOneBy(name="ekg")
     >>> ekg = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id,
     ...                                        version="1:1.5-4ubuntu1.2")
-    >>> print ekg.section.name
+    >>> print(ekg.section.name)
     net
-    >>> print ekg.component.name
+    >>> print(ekg.component.name)
     main
 
 And that one of the packages in main is here too:
@@ -145,13 +145,13 @@
     >>> n = BinaryPackageName.selectOneBy(name="libgadu-dev")
     >>> ekg = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id,
     ...                                        version="1:1.5-4ubuntu1.2")
-    >>> print ekg.section.name
+    >>> print(ekg.section.name)
     libdevel
-    >>> print ekg.component.name
+    >>> print(ekg.component.name)
     main
-    >>> print ekg.architecturespecific
+    >>> print(ekg.architecturespecific)
     True
-    >>> print ekg.build.processor.name
+    >>> print(ekg.build.processor.name)
     386
 
 Check that the package it generates in universe was successfully
@@ -162,11 +162,11 @@
     >>> n = BinaryPackageName.selectOneBy(name="ekg")
     >>> ekg = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id,
     ...                                        version="1:1.5-4ubuntu1.2")
-    >>> print ekg.section.name
+    >>> print(ekg.section.name)
     net
-    >>> print ekg.component.name
+    >>> print(ekg.component.name)
     universe
-    >>> print ekg.priority == PackagePublishingPriority.OPTIONAL
+    >>> print(ekg.priority == PackagePublishingPriority.OPTIONAL)
     True
 
 The bdftopcf package is in a bit of a fix. Its binary package is present
@@ -178,15 +178,15 @@
     >>> n = BinaryPackageName.selectOneBy(name="bdftopcf")
     >>> ekg = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id,
     ...                                        version="0.99.0-1")
-    >>> print ekg.section.name
+    >>> print(ekg.section.name)
     x11
-    >>> print ekg.component.name
+    >>> print(ekg.component.name)
     universe
-    >>> print ekg.build.source_package_release.sourcepackagename.name
+    >>> print(ekg.build.source_package_release.sourcepackagename.name)
     bdftopcf
-    >>> print ekg.build.source_package_release.component.name
+    >>> print(ekg.build.source_package_release.component.name)
     main
-    >>> print ekg.build.source_package_release.version
+    >>> print(ekg.build.source_package_release.version)
     0.99.0-1
 
 Check that we publishing bdftopcf into the correct distroarchseries:
@@ -195,12 +195,12 @@
     >>> dar = DistroArchSeries.selectOneBy(distroseriesID=dapper.id,
     ...          processor_id=processor.id, architecturetag="i386",
     ...          official=True, ownerID=celebs.launchpad_developers.id)
-    >>> print dar.architecturetag
+    >>> print(dar.architecturetag)
     i386
     >>> for entry in SBPPH.selectBy(distroarchseriesID=dar.id,
     ...                             orderBy="binarypackagerelease"):
     ...     package = entry.binarypackagerelease
-    ...     print package.binarypackagename.name, package.version
+    ...     print(package.binarypackagename.name, package.version)
     bdftopcf 0.99.0-1
     ekg 1:1.5-4ubuntu1.2
     libgadu-dev 1:1.5-4ubuntu1.2

=== modified file 'lib/lp/soyuz/doc/gina.txt'
--- lib/lp/soyuz/doc/gina.txt	2017-01-13 12:24:45 +0000
+++ lib/lp/soyuz/doc/gina.txt	2018-05-27 20:15:09 +0000
@@ -129,7 +129,7 @@
 
 Check STDERR for the errors we expected:
 
-    >>> print proc.stderr.read()
+    >>> print(proc.stderr.read())
     ERROR   Error processing package files for clearlooks
     ...
     ExecutionError: Error 2 unpacking source
@@ -210,12 +210,12 @@
     >>> x11p = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id,
     ...                                         version="6.8.99.7-2")
 
-    >>> print x11p.builddependsindep
+    >>> print(x11p.builddependsindep)
     debhelper (>= 4.0.0)
 
 Check if the changelog message was stored correcly:
 
-    >>> print x11p.changelog_entry #doctest: -NORMALIZE_WHITESPACE
+    >>> print(x11p.changelog_entry) #doctest: -NORMALIZE_WHITESPACE
     x11proto-damage (6.8.99.7-2) breezy; urgency=low
     <BLANKLINE>
       * Add dependency on x11proto-fixes-dev.
@@ -228,7 +228,7 @@
 
 Check that the changelog was uploaded to the librarian correctly:
 
-    >>> print x11p.changelog.read()
+    >>> print(x11p.changelog.read())
     x11proto-damage (6.8.99.7-2) breezy; urgency=low
     <BLANKLINE>
       * Add dependency on x11proto-fixes-dev.
@@ -243,7 +243,7 @@
 
 Same for the copyright:
 
-    >>> print x11p.copyright
+    >>> print(x11p.copyright)
     $Id: COPYING,v 1.2 2003/11/05 05:39:58 keithp Exp $
     <BLANKLINE>
     Copyright ... 2003 Keith Packard
@@ -255,7 +255,7 @@
 
     >>> n = SourcePackageName.selectOneBy(name="libcap")
     >>> cap = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id)
-    >>> print cap.dsc
+    >>> print(cap.dsc)
     -----BEGIN PGP SIGNED MESSAGE-----
     Hash: SHA1
     <BLANKLINE>
@@ -278,9 +278,9 @@
     FVJMGmGr+2YLZfF+oRUKcug=
     =bw+A
     -----END PGP SIGNATURE-----
-    >>> print cap.maintainer.displayname
+    >>> print(cap.maintainer.displayname)
     Michael Vogt
-    >>> print cap.dsc_binaries
+    >>> print(cap.dsc_binaries)
     libcap-dev, libcap-bin, libcap1
 
 Test ubuntu-meta in breezy, which was forcefully imported.
@@ -288,8 +288,8 @@
     >>> n = SourcePackageName.selectOneBy(name="ubuntu-meta")
     >>> um = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id,
     ...         version="0.80")
-    >>> print um.section.name, um.architecturehintlist, \
-    ...         um.upload_distroseries.name
+    >>> print(um.section.name, um.architecturehintlist,
+    ...       um.upload_distroseries.name)
     base any breezy
 
 And check that its files actually ended up in the librarian (these sha1sums
@@ -299,7 +299,7 @@
     >>> files = SourcePackageReleaseFile.selectBy(
     ...     sourcepackagereleaseID=cap.id, orderBy="libraryfile")
     >>> for f in files:
-    ...     print f.libraryfile.content.sha1
+    ...     print(f.libraryfile.content.sha1)
     107d5478e72385f714523bad5359efedb5dcc8b2
     0083da007d44c02fd861c1d21579f716490cab02
     e6661aec051ccb201061839d275f2282968d8b93
@@ -309,20 +309,20 @@
 
     >>> n = SourcePackageName.selectOneBy(name="python-pam")
     >>> pp = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id)
-    >>> print pp.component.name
+    >>> print(pp.component.name)
     main
 
 In the hoary Sources, its section is listed as underworld/python. Ensure
 this is cut up correctly:
 
-    >>> print pp.section.name
+    >>> print(pp.section.name)
     python
 
 Make sure that we only imported one db1-compat source package.
 
     >>> n = SourcePackageName.selectOneBy(name="db1-compat")
     >>> db1 = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id)
-    >>> print db1.section.name
+    >>> print(db1.section.name)
     libs
 
 
@@ -333,9 +333,9 @@
 
     >>> for pub in SSPPH.selectBy(
     ...     sourcepackagereleaseID=db1.id, orderBy='distroseries'):
-    ...     print "%s %s %s" % (
+    ...     print("%s %s %s" % (
     ...         pub.distroseries.name, pub.section.name,
-    ...         pub.archive.purpose.name)
+    ...         pub.archive.purpose.name))
     hoary libs PRIMARY
     breezy oldlibs PRIMARY
 
@@ -348,13 +348,13 @@
     - We had 2 errors (out of 10 Sources stanzas) in breezy: python-sqllite
       and util-linux (again, poor thing).
 
-    >>> print SSPPH.select().count() - orig_sspph_count
+    >>> print(SSPPH.select().count() - orig_sspph_count)
     21
 
     >>> new_count = SSPPH.selectBy(
     ...     componentID=1,
     ...     pocket=PackagePublishingPocket.RELEASE).count()
-    >>> print new_count - orig_sspph_main_count
+    >>> print(new_count - orig_sspph_main_count)
     21
 
 
@@ -382,11 +382,11 @@
     >>> from lp.soyuz.model.binarypackagename import BinaryPackageName
     >>> n = BinaryPackageName.selectOneBy(name="rioutil")
     >>> rio = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id)
-    >>> print rio.shlibdeps
+    >>> print(rio.shlibdeps)
     librioutil 1 rioutil
-    >>> print rio.version
+    >>> print(rio.version)
     1.4.4-1.0.1
-    >>> print rio.build.source_package_release.version
+    >>> print(rio.build.source_package_release.version)
     1.4.4-1
 
 Test all the data got to the ed BPR intact, and that the missing
@@ -394,28 +394,28 @@
 
     >>> n = BinaryPackageName.selectOneBy(name="ed")
     >>> ed = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id)
-    >>> print ed.version
+    >>> print(ed.version)
     0.2-20
-    >>> print ed.build.processor.name
+    >>> print(ed.build.processor.name)
     386
-    >>> print ed.build.status
+    >>> print(ed.build.status)
     Successfully built
-    >>> print ed.build.distro_arch_series.processor.name
+    >>> print(ed.build.distro_arch_series.processor.name)
     386
-    >>> print ed.build.distro_arch_series.architecturetag
+    >>> print(ed.build.distro_arch_series.architecturetag)
     i386
-    >>> print ed.priority
+    >>> print(ed.priority)
     Extra
-    >>> print ed.section.name
+    >>> print(ed.section.name)
     editors
-    >>> print ed.summary
+    >>> print(ed.summary)
     The classic unix line editor.
 
 We now check if the Breezy publication record has the correct priority:
 
     >>> ed_pub = SBPPH.selectOneBy(binarypackagereleaseID=ed.id,
     ...                            distroarchseriesID=breezy_i386.id)
-    >>> print ed_pub.priority
+    >>> print(ed_pub.priority)
     Standard
 
 Check binary package libgjc-dev in Breezy. Its version number must differ from
@@ -424,11 +424,11 @@
     >>> n = BinaryPackageName.selectOneBy(name="libgcj-dev")
     >>> lib = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id,
     ...         version="4:4.0.1-3")
-    >>> print lib.version
+    >>> print(lib.version)
     4:4.0.1-3
-    >>> print lib.build.source_package_release.version
+    >>> print(lib.build.source_package_release.version)
     1.28
-    >>> print lib.build.source_package_release.maintainer.displayname
+    >>> print(lib.build.source_package_release.maintainer.displayname)
     Debian GCC maintainers
 
 Check if the udeb was properly parsed and identified:
@@ -436,17 +436,17 @@
     >>> n = BinaryPackageName.selectOneBy(name="archive-copier")
     >>> ac = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id,
     ...         version="0.1.5")
-    >>> print ac.version
+    >>> print(ac.version)
     0.1.5
-    >>> print ac.priority
+    >>> print(ac.priority)
     Standard
-    >>> print ac.section.name
+    >>> print(ac.section.name)
     debian-installer
-    >>> print ac.build.source_package_release.version
+    >>> print(ac.build.source_package_release.version)
     0.1.5
-    >>> print ac.build.source_package_release.maintainer.name
+    >>> print(ac.build.source_package_release.maintainer.name)
     cjwatson
-    >>> print ac.build.processor.name
+    >>> print(ac.build.processor.name)
     386
 
 We check that the binary package publishing override facility works:
@@ -456,9 +456,9 @@
     ...         version="2.1.3-7")
     >>> for pub in BinaryPackagePublishingHistory.selectBy(
     ...     binarypackagereleaseID=db1.id, orderBy='distroarchseries'):
-    ...     print "%s %s %s" % (
+    ...     print("%s %s %s" % (
     ...         pub.distroarchseries.distroseries.name, pub.priority,
-    ...         pub.archive.purpose.name)
+    ...         pub.archive.purpose.name))
     hoary Required PRIMARY
     breezy Optional PRIMARY
 
@@ -475,13 +475,13 @@
 
     >>> from sqlobject import LIKE
     >>> p = Person.selectOne(LIKE(Person.q.name, u"cjwatson%"))
-    >>> print p.name
+    >>> print(p.name)
     cjwatson
-    >>> print Person.select().count() - orig_person_count
-    13
-    >>> print TeamParticipation.select().count() - orig_tp_count
-    13
-    >>> print EmailAddress.select().count() - orig_email_count
+    >>> print(Person.select().count() - orig_person_count)
+    13
+    >>> print(TeamParticipation.select().count() - orig_tp_count)
+    13
+    >>> print(EmailAddress.select().count() - orig_email_count)
     13
 
 
@@ -514,7 +514,7 @@
     >>> gina_proc = [sys.executable, 'scripts/gina.py', '-q',
     ...              'hoary', 'breezy']
     >>> proc = subprocess.Popen(gina_proc, stderr=subprocess.PIPE)
-    >>> print proc.stderr.read()
+    >>> print(proc.stderr.read())
     ERROR   Error processing package files for clearlooks
     ...
     ExecutionError: Error 2 unpacking source
@@ -566,11 +566,11 @@
 
     >>> SourcePackageRelease.select().count() - orig_spr_count
     17
-    >>> print Person.select().count() - orig_person_count
-    13
-    >>> print TeamParticipation.select().count() - orig_tp_count
-    13
-    >>> print EmailAddress.select().count() - orig_email_count
+    >>> print(Person.select().count() - orig_person_count)
+    13
+    >>> print(TeamParticipation.select().count() - orig_tp_count)
+    13
+    >>> print(EmailAddress.select().count() - orig_email_count)
     13
     >>> BinaryPackageRelease.select().count() - orig_bpr_count
     40
@@ -581,7 +581,7 @@
 
     >>> SBPPH.select().count() - orig_sbpph_count
     47
-    >>> print SSPPH.select().count() - orig_sspph_count
+    >>> print(SSPPH.select().count() - orig_sspph_count)
     23
 
 Check that the overrides we did were correctly issued. We can't use
@@ -596,7 +596,7 @@
     ...     """ % sqlvalues(
     ...         x11p, breezy, active_publishing_status),
     ...     orderBy=["-datecreated"])[0]
-    >>> print x11_pub.section.name
+    >>> print(x11_pub.section.name)
     net
     >>> ed_pub = SBPPH.select("""
     ...     binarypackagerelease = %s AND
@@ -605,7 +605,7 @@
     ...     """ % sqlvalues(
     ...         ed, breezy_i386, active_publishing_status),
     ...     orderBy=["-datecreated"])[0]
-    >>> print ed_pub.priority
+    >>> print(ed_pub.priority)
     Extra
     >>> n = SourcePackageName.selectOneBy(name="archive-copier")
     >>> ac = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id,
@@ -617,7 +617,7 @@
     ...     """ % sqlvalues(
     ...         ac, breezy, active_publishing_status),
     ...     orderBy=["-datecreated"])[0]
-    >>> print ac_pub.component.name
+    >>> print(ac_pub.component.name)
     universe
 
 
@@ -666,16 +666,16 @@
 All the publishings will also have the 'partner' component and the
 partner archive:
 
-    >>> print set(sspph.component.name for sspph in source_difference)
-    set([u'partner'])
-
-    >>> print set(sbpph.component.name for sbpph in binary_difference)
-    set([u'partner'])
-
-    >>> print set(sspph.archive.purpose.name for sspph in source_difference)
+    >>> print(set(sspph.component.name for sspph in source_difference))
+    set([u'partner'])
+
+    >>> print(set(sbpph.component.name for sbpph in binary_difference))
+    set([u'partner'])
+
+    >>> print(set(sspph.archive.purpose.name for sspph in source_difference))
     set(['PARTNER'])
 
-    >>> print set(sbpph.archive.purpose.name for sbpph in binary_difference)
+    >>> print(set(sbpph.archive.purpose.name for sbpph in binary_difference))
     set(['PARTNER'])
 
 
@@ -752,7 +752,7 @@
     >>> lenny_sources.count()
     12
 
-    >>> print set([pub.status.name for pub in lenny_sources])
+    >>> print(set([pub.status.name for pub in lenny_sources]))
     set(['PUBLISHED'])
 
 As mentioned before, lenny/i386 is empty, no binaries were imported.
@@ -778,7 +778,7 @@
     ...     sys.executable, 'scripts/gina.py', 'lenny', 'partner']
     >>> proc = subprocess.Popen(gina_proc, stderr=subprocess.PIPE)
 
-    >>> print proc.stderr.read()
+    >>> print(proc.stderr.read())
     INFO    Creating lockfile: /var/lock/launchpad-gina.lock
     ...
     INFO    === Processing debian/lenny/release ===
@@ -797,7 +797,7 @@
 
     >>> gina_proc = [sys.executable, 'scripts/gina.py', '-q', 'bogus']
     >>> proc = subprocess.Popen(gina_proc, stderr=subprocess.PIPE)
-    >>> print proc.stderr.read()
+    >>> print(proc.stderr.read())
     ERROR   Failed to analyze archive for bogoland
     ...
     MangledArchiveError: No archive directory for bogoland/main

=== modified file 'lib/lp/soyuz/doc/hasbuildrecords.txt'
--- lib/lp/soyuz/doc/hasbuildrecords.txt	2012-09-27 02:53:00 +0000
+++ lib/lp/soyuz/doc/hasbuildrecords.txt	2018-05-27 20:15:09 +0000
@@ -43,7 +43,7 @@
     >>> builds = ubuntu['warty'].getBuildRecords(name=u'firefox',
     ...                                          arch_tag='i386')
     >>> for build in builds:
-    ...     print build.title
+    ...     print(build.title)
     i386 build of mozilla-firefox 0.9 in ubuntu warty RELEASE
 
 

=== modified file 'lib/lp/soyuz/doc/package-cache-script.txt'
--- lib/lp/soyuz/doc/package-cache-script.txt	2016-06-02 11:37:23 +0000
+++ lib/lp/soyuz/doc/package-cache-script.txt	2018-05-27 20:15:09 +0000
@@ -43,9 +43,9 @@
   >>> process.returncode
   0
 
-  >>> print stdout
+  >>> print(stdout)
 
-  >>> print stderr
+  >>> print(stderr)
   INFO    Creating lockfile: /var/lock/launchpad-update-cache.lock
   INFO    Updating ubuntu package counters
   INFO    Updating ubuntu main archives

=== modified file 'lib/lp/soyuz/doc/package-cache.txt'
--- lib/lp/soyuz/doc/package-cache.txt	2016-05-26 16:25:52 +0000
+++ lib/lp/soyuz/doc/package-cache.txt	2018-05-27 20:15:09 +0000
@@ -33,7 +33,7 @@
     10
 
     >>> for name in sorted([cache.name for cache in ubuntu_caches]):
-    ...              print name
+    ...     print(name)
     alsa-utils
     cnews
     commercialpackage
@@ -67,7 +67,7 @@
     5
 
     >>> for name in sorted([cache.name for cache in warty_caches]):
-    ...              print name
+    ...     print(name)
     at
     foobar
     linux-2.6.12
@@ -168,7 +168,7 @@
     DEBUG Considering sources linux-source-2.6.15, mozilla-firefox, netapplet
     ...
 
-    >>> print updates
+    >>> print(updates)
     10
 
 Now we see that the 'cdrkit' source is part of the caches and can be
@@ -250,7 +250,7 @@
     DEBUG Considering binaries mozilla-firefox, mozilla-firefox-data, pmount
     DEBUG Committing
 
-    >>> print updates
+    >>> print(updates)
     6
 
 Transaction behaves exactly the same as for Source Caches, except that
@@ -285,15 +285,15 @@
 With empty cache contents in Archive table we can't even find a PPA by
 owner name.
 
-    >>> print ubuntu.searchPPAs(text=u'cprov').count()
+    >>> print(ubuntu.searchPPAs(text=u'cprov').count())
     0
 
 Sampledata contains stub counters.
 
-    >>> print cprov.archive.sources_cached
+    >>> print(cprov.archive.sources_cached)
     3
 
-    >>> print cprov.archive.binaries_cached
+    >>> print(cprov.archive.binaries_cached)
     3
 
 We have to issue 'updateArchiveCache' to include the owner 'name' and
@@ -304,13 +304,13 @@
 Now Celso's PPA can be found via searches and the package counters got
 reset, reflecting that nothing is cached in the database yet.
 
-    >>> print ubuntu.searchPPAs(text=u'cprov')[0].displayname
+    >>> print(ubuntu.searchPPAs(text=u'cprov')[0].displayname)
     PPA for Celso Providelo
 
-    >>> print cprov.archive.sources_cached
+    >>> print(cprov.archive.sources_cached)
     0
 
-    >>> print cprov.archive.binaries_cached
+    >>> print(cprov.archive.binaries_cached)
     0
 
 The sampledata contains no package caches, so attempts to find 'pmount'
@@ -348,28 +348,28 @@
     >>> cprov.archive.sources_cached == source_updates
     True
 
-    >>> print cprov.archive.sources_cached
+    >>> print(cprov.archive.sources_cached)
     3
 
     >>> cprov.archive.binaries_cached == binary_updates
     True
 
-    >>> print cprov.archive.binaries_cached
+    >>> print(cprov.archive.binaries_cached)
     2
 
-    >>> print ubuntu.searchPPAs(text=u'cprov')[0].displayname
-    PPA for Celso Providelo
-
-    >>> print ubuntu.searchPPAs(text=u'pmount')[0].displayname
-    PPA for Celso Providelo
-
-    >>> print ubuntu.searchPPAs(text=u'firefox')[0].displayname
-    PPA for Celso Providelo
-
-    >>> print ubuntu.searchPPAs(text=u'warty')[0].displayname
-    PPA for Celso Providelo
-
-    >>> print ubuntu.searchPPAs(text=u'shortdesc')[0].displayname
+    >>> print(ubuntu.searchPPAs(text=u'cprov')[0].displayname)
+    PPA for Celso Providelo
+
+    >>> print(ubuntu.searchPPAs(text=u'pmount')[0].displayname)
+    PPA for Celso Providelo
+
+    >>> print(ubuntu.searchPPAs(text=u'firefox')[0].displayname)
+    PPA for Celso Providelo
+
+    >>> print(ubuntu.searchPPAs(text=u'warty')[0].displayname)
+    PPA for Celso Providelo
+
+    >>> print(ubuntu.searchPPAs(text=u'shortdesc')[0].displayname)
     PPA for Celso Providelo
 
 The method which populates the archive caches also cleans the texts up
@@ -401,13 +401,13 @@
 
     >>> cprov.archive.updateArchiveCache()
 
-    >>> print cprov.archive.sources_cached
-    0
-
-    >>> print cprov.archive.binaries_cached
-    0
-
-    >>> print cprov.archive.package_description_cache
+    >>> print(cprov.archive.sources_cached)
+    0
+
+    >>> print(cprov.archive.binaries_cached)
+    0
+
+    >>> print(cprov.archive.package_description_cache)
     celso providelo cprov
 
 We insert a new source cache with texts containing punctuation and
@@ -435,13 +435,13 @@
 Only one source cached and the 'package_description_cache' only contains
 unique and lowercase words free of any punctuation.
 
-    >>> print cprov.archive.sources_cached
+    >>> print(cprov.archive.sources_cached)
     1
 
-    >>> print cprov.archive.binaries_cached
+    >>> print(cprov.archive.binaries_cached)
     0
 
-    >>> print cprov.archive.package_description_cache
+    >>> print(cprov.archive.package_description_cache)
     ding providelo celso cdrkit cdrkit-bin dong ubuntu cdrkit-extra cprov
 
 Let's remove the unclean cache and update Celso's PPA cache, so
@@ -484,7 +484,7 @@
     ...     commit_chunk=3)
     DEBUG Considering sources unique-from-factory-...
     ...
-    >>> print updates
+    >>> print(updates)
     1
     >>> ubuntu.searchSourcePackages(branch.sourcepackagename.name).count()
     1
@@ -552,8 +552,8 @@
     ...      binaryname="pending-binary", pub_source=pending_source,
     ...      status=PackagePublishingStatus.PENDING)
 
-    >>> print len(
-    ...      set(pub.binarypackagerelease.name for pub in pending_binaries))
+    >>> print(len(
+    ...      set(pub.binarypackagerelease.name for pub in pending_binaries)))
     1
 
 And one source with a single binary in PUBLISHED status.
@@ -566,8 +566,8 @@
     ...      binaryname="published-binary", pub_source=published_source,
     ...      status=PackagePublishingStatus.PUBLISHED)
 
-    >>> print len(
-    ...      set(pub.binarypackagerelease.name for pub in published_binaries))
+    >>> print(len(set(
+    ...     pub.binarypackagerelease.name for pub in published_binaries)))
     1
 
     >>> switch_dbuser(test_dbuser)
@@ -593,8 +593,8 @@
     ...      binaryname="pending-binary", pub_source=deleted_source,
     ...      status=PackagePublishingStatus.DELETED)
 
-    >>> print len(
-    ...      set(pub.binarypackagerelease.name for pub in deleted_binaries))
+    >>> print(len(
+    ...      set(pub.binarypackagerelease.name for pub in deleted_binaries)))
     1
 
     >>> switch_dbuser(test_dbuser)
@@ -649,10 +649,10 @@
     >>> foobar_binary.cache == primary_cache
     True
 
-    >>> print foobar_binary.summary
+    >>> print(foobar_binary.summary)
     main foobar
 
-    >>> print foobar_binary.description
+    >>> print(foobar_binary.description)
     main foobar description
 
 The DistroArchSeriesBinaryPackage.
@@ -663,10 +663,10 @@
     >>> foobar_arch_binary.cache == primary_cache
     True
 
-    >>> print foobar_arch_binary.summary
+    >>> print(foobar_arch_binary.summary)
     main foobar
 
-    >>> print foobar_arch_binary.description
+    >>> print(foobar_arch_binary.description)
     main foobar description
 
 This lookup mechanism will continue to work even after we have added a
@@ -737,14 +737,14 @@
     ...          archive=archive)
     ...      binary_caches = DistroSeriesPackageCache.selectBy(
     ...          archive=archive)
-    ...      print '%d sources cached [%d]' % (
-    ...          archive.sources_cached, source_caches.count())
-    ...      print '%d binaries cached [%d]' % (
-    ...          archive.binaries_cached, binary_caches.count())
+    ...      print('%d sources cached [%d]' % (
+    ...          archive.sources_cached, source_caches.count()))
+    ...      print('%d binaries cached [%d]' % (
+    ...          archive.binaries_cached, binary_caches.count()))
     >>> def print_search_results(text, user=None):
     ...      with lp_dbuser():
     ...          for ppa in ubuntu.searchPPAs(text, user=user):
-    ...              print ppa.displayname
+    ...              print(ppa.displayname)
 
     >>> rebuild_caches(cprov.archive)
 

=== modified file 'lib/lp/soyuz/doc/package-diff.txt'
--- lib/lp/soyuz/doc/package-diff.txt	2017-01-13 13:33:17 +0000
+++ lib/lp/soyuz/doc/package-diff.txt	2018-05-27 20:15:09 +0000
@@ -50,7 +50,7 @@
     >>> verifyObject(IPerson, package_diff.requester)
     True
 
-    >>> print package_diff.requester.displayname
+    >>> print(package_diff.requester.displayname)
     Celso Providelo
 
  * 'from_source', which maps to a `ISourcePackageRelease`, the base
@@ -61,7 +61,7 @@
     >>> verifyObject(ISourcePackageRelease, package_diff.from_source)
     True
 
-    >>> print package_diff.from_source.title
+    >>> print(package_diff.from_source.title)
     pmount - 0.1-1
 
  * 'to_source', which maps to a `ISourcePackageRelease`, the result
@@ -70,31 +70,31 @@
     >>> verifyObject(ISourcePackageRelease, package_diff.to_source)
     True
 
-    >>> print package_diff.to_source.title
+    >>> print(package_diff.to_source.title)
     pmount - 0.1-2
 
 The PackageDiff record is not yet 'performed', so 'status' is PENDING
 and both 'date_fullfiled' and 'diff_content' fields are empty.
 
-    >>> print package_diff.date_fulfilled
-    None
-
-    >>> print package_diff.diff_content
-    None
-
-    >>> print package_diff.status.name
+    >>> print(package_diff.date_fulfilled)
+    None
+
+    >>> print(package_diff.diff_content)
+    None
+
+    >>> print(package_diff.status.name)
     PENDING
 
 IPackageDiff offers a property that return the 'title' of the diff
 request.
 
-    >>> print package_diff.title
+    >>> print(package_diff.title)
     diff from 0.1-1 to 0.1-2
 
 IPackageDiff has a property which indicates whether a diff content
 should be private or not. See section 'PackageDiff privacy' below.
 
-    >>> print package_diff.private
+    >>> print(package_diff.private)
     False
 
 An attempt to record an already recorded DiffRequest will result in an
@@ -167,7 +167,7 @@
     ...     '1.0-8', policy='sync')
 
     >>> [diff] = biscuit_eight_pub.sourcepackagerelease.package_diffs
-    >>> print diff.title
+    >>> print(diff.title)
     diff from 1.0-1 to 1.0-8
 
 We will release ubuntu/hoary so we can upload to post-RELEASE pockets
@@ -185,7 +185,7 @@
     ...     '1.0-9', policy='sync', suite='hoary-updates')
 
     >>> [diff] = biscuit_nine_pub.sourcepackagerelease.package_diffs
-    >>> print diff.title
+    >>> print(diff.title)
     diff from 1.0-8 to 1.0-9
 
 Now version 1.0-12 gets uploaded to the just opened distroseries. It
@@ -199,7 +199,7 @@
     ...     '1.0-12', policy='sync', suite='breezy-autotest')
 
     >>> [diff] = biscuit_twelve_pub.sourcepackagerelease.package_diffs
-    >>> print diff.title
+    >>> print(diff.title)
     diff from 1.0-8 to 1.0-12
 
 The subsequent version uploaded to hoary-updates will get a diff
@@ -211,7 +211,7 @@
     ...     '1.0-10', policy='sync', suite='hoary-updates')
 
     >>> [diff] = biscuit_ten_pub.sourcepackagerelease.package_diffs
-    >>> print diff.title
+    >>> print(diff.title)
     diff from 1.0-9 to 1.0-10
 
 An upload to other pocket, in this case hoary-proposed, will get a diff
@@ -223,7 +223,7 @@
     ...     '1.0-11', policy='sync', suite='hoary-proposed')
 
     >>> [diff] = biscuit_eleven_pub.sourcepackagerelease.package_diffs
-    >>> print diff.title
+    >>> print(diff.title)
     diff from 1.0-8 to 1.0-11
 
 For testing diffs in the PPA context we need to activate the PPA for
@@ -245,7 +245,7 @@
     ...     '1.0-2', archive=foobar.archive)
 
     >>> [diff] = biscuit_two_pub.sourcepackagerelease.package_diffs
-    >>> print diff.title
+    >>> print(diff.title)
     diff from 1.0-8 (in Ubuntu) to 1.0-2
 
 A subsequent upload in the PPA context will get a diff against 1.0-2,
@@ -257,7 +257,7 @@
     ...     '1.0-3', archive=foobar.archive)
 
     >>> [diff] = biscuit_three_pub.sourcepackagerelease.package_diffs
-    >>> print diff.title
+    >>> print(diff.title)
     diff from 1.0-2 to 1.0-3
 
 
@@ -318,13 +318,13 @@
 'date_fullfiled' and 'diff_content' fields, are empty and 'status' is
 PENDING.
 
-    >>> print diff.status.name
+    >>> print(diff.status.name)
     PENDING
 
-    >>> print diff.date_fulfilled
+    >>> print(diff.date_fulfilled)
     None
 
-    >>> print diff.diff_content
+    >>> print(diff.diff_content)
     None
 
 Performing the diff.
@@ -338,16 +338,16 @@
     >>> diff.date_fulfilled is not None
     True
 
-    >>> print diff.status.name
+    >>> print(diff.status.name)
     COMPLETED
 
-    >>> print diff.diff_content.filename
+    >>> print(diff.diff_content.filename)
     biscuit_1.0-1_1.0-8.diff.gz
 
-    >>> print diff.diff_content.mimetype
+    >>> print(diff.diff_content.mimetype)
     application/gzipped-patch
 
-    >>> print diff.diff_content.restricted
+    >>> print(diff.diff_content.restricted)
     False
 
 Since it stores the diff results in the librarian we need to commit the
@@ -358,7 +358,7 @@
 Now we can compare the package diff outcome to the debdiff output
 (obtained manually on the shell) for the packages in question.
 
-    >>> print get_diff_results(diff)
+    >>> print(get_diff_results(diff))
     --- biscuit-1.0/contents
     +++ biscuit-1.0/contents
     @@ -2,0 +3 @@
@@ -392,10 +392,10 @@
     >>> resp = con.getresponse()
     >>> headers = dict(resp.getheaders())
 
-    >>> print headers['content-encoding']
+    >>> print(headers['content-encoding'])
     gzip
 
-    >>> print headers['content-type']
+    >>> print(headers['content-type'])
     text/plain
 
 
@@ -424,8 +424,8 @@
     ...     diff_first_id = diffs[0].id
     ...     for diff in diff_set:
     ...         id_diff = diff.id - diff_first_id
-    ...         print diff.from_source.name, diff.title, \
-    ...               diff.date_fulfilled is not None, id_diff
+    ...         print(diff.from_source.name, diff.title,
+    ...               diff.date_fulfilled is not None, id_diff)
 
     >>> print_diffs(packagediff_set)
     biscuit diff from 1.0-2 to 1.0-3               False   0
@@ -492,7 +492,7 @@
     >>> staging_ppa_pub = packager.uploadSourceVersion(
     ...     '1.0-1', archive=foobar.archive)
     >>> [diff] = staging_ppa_pub.sourcepackagerelease.package_diffs
-    >>> print diff.title
+    >>> print(diff.title)
     diff from 1.0-1 (in Ubuntu) to 1.0-1
 
 Commit the transaction for make the uploaded files available in
@@ -509,17 +509,17 @@
 The PackageDiff request was correctly performed and the result is a
 empty library file, which is what the user expects.
 
-    >>> print diff.status.name
+    >>> print(diff.status.name)
     COMPLETED
 
     >>> diff.date_fulfilled is not None
     True
 
-    >>> print diff.diff_content.filename
+    >>> print(diff.diff_content.filename)
     staging_1.0-1_1.0-1.diff.gz
 
     >>> get_diff_results(diff)
-    ''
+    u''
 
 Now we will simulate a version collision when generating the diff.
 
@@ -545,7 +545,7 @@
     >>> collision_ppa_pub = packager.uploadSourceVersion(
     ...     '1.0-1', archive=foobar.archive)
     >>> [diff] = collision_ppa_pub.sourcepackagerelease.package_diffs
-    >>> print diff.title
+    >>> print(diff.title)
     diff from 1.0-1 (in Ubuntu) to 1.0-1
 
 Note that, despite of having the same name and version, the diff.gz
@@ -563,7 +563,7 @@
 
     >>> distinct_files = [filename for filename, md5 in file_set]
     >>> for filename in sorted(distinct_files):
-    ...     print filename
+    ...     print(filename)
     collision_1.0-1.diff.gz
     collision_1.0-1.diff.gz
     collision_1.0-1.dsc
@@ -586,16 +586,16 @@
 The package-diff subsystem has dealt with the filename conflicts and
 the diff was properly generated.
 
-    >>> print diff.status.name
+    >>> print(diff.status.name)
     COMPLETED
 
     >>> diff.date_fulfilled is not None
     True
 
-    >>> print diff.diff_content.filename
+    >>> print(diff.diff_content.filename)
     collision_1.0-1_1.0-1.diff.gz
 
-    >>> print get_diff_results(diff)
+    >>> print(get_diff_results(diff))
     --- collision-1.0/contents
     +++ collision-1.0/contents
     @@ -2,0 +3 @@
@@ -633,7 +633,7 @@
     >>> fillLibrarianFile(dsc.libraryfile.id)
 
     >>> [broken_diff] = pub.sourcepackagerelease.package_diffs
-    >>> print broken_diff.title
+    >>> print(broken_diff.title)
     diff from 1.0-1 (in Ubuntu) to 1.0-2
 
 With a tainted DSC 'debdiff' cannot do much and fails, resulting in a
@@ -642,13 +642,13 @@
     >>> broken_diff.performDiff()
     >>> transaction.commit()
 
-    >>> print broken_diff.status.name
+    >>> print(broken_diff.status.name)
     FAILED
 
     >>> broken_diff.date_fulfilled is None
     True
 
-    >>> print broken_diff.diff_content
+    >>> print(broken_diff.diff_content)
     None
 
 
@@ -664,27 +664,27 @@
 explain how this mechanism works.
 
     >>> [diff] = biscuit_two_pub.sourcepackagerelease.package_diffs
-    >>> print diff.title
+    >>> print(diff.title)
     diff from 1.0-8 (in Ubuntu) to 1.0-2
 
 The chosen diff is for a source uploaded to a public PPA.
 
-    >>> print diff.to_source.upload_archive.displayname
+    >>> print(diff.to_source.upload_archive.displayname)
     PPA for Foo Bar
 
-    >>> print diff.to_source.upload_archive.private
+    >>> print(diff.to_source.upload_archive.private)
     False
 
 Thus it's also considered public and the generated 'diff_content' is
 stored in the public librarian.
 
-    >>> print diff.private
+    >>> print(diff.private)
     False
 
     >>> diff.performDiff()
     >>> transaction.commit()
 
-    >>> print diff.diff_content.restricted
+    >>> print(diff.diff_content.restricted)
     False
 
 If the diff is attached to a private PPA, the diff becomes 'private' and
@@ -695,11 +695,11 @@
     >>> removeSecurityProxy(diff.to_source).upload_archive = private_ppa
     >>> removeSecurityProxy(biscuit_two_pub).archive = private_ppa
 
-    >>> print diff.private
+    >>> print(diff.private)
     True
 
     >>> diff.performDiff()
     >>> transaction.commit()
 
-    >>> print diff.diff_content.restricted
+    >>> print(diff.diff_content.restricted)
     True

=== modified file 'lib/lp/soyuz/doc/package-meta-classes.txt'
--- lib/lp/soyuz/doc/package-meta-classes.txt	2014-11-10 00:53:02 +0000
+++ lib/lp/soyuz/doc/package-meta-classes.txt	2018-05-27 20:15:09 +0000
@@ -87,8 +87,8 @@
 
     >>> def print_builds(builds):
     ...     for build in builds:
-    ...         print "%s in %s" % (build.source_package_release.name,
-    ...                             build.archive.displayname)
+    ...         print("%s in %s" % (build.source_package_release.name,
+    ...                             build.archive.displayname))
 
 Now we can query the builds:
 

=== modified file 'lib/lp/soyuz/doc/package-relationship-pages.txt'
--- lib/lp/soyuz/doc/package-relationship-pages.txt	2012-01-06 11:08:30 +0000
+++ lib/lp/soyuz/doc/package-relationship-pages.txt	2018-05-27 20:15:09 +0000
@@ -28,8 +28,10 @@
 Note that iterations over PackageRelationshipSet are sorted
 alphabetically according to the relationship 'name':
 
-  >>> [relationship.name for relationship in relationship_set]
-  ['foobar', 'test']
+  >>> for relationship in relationship_set:
+  ...     print(relationship.name)
+  foobar
+  test
 
 It will cause all the relationship contents to be rendered in this order.
 

=== modified file 'lib/lp/soyuz/doc/packagecopyrequest.txt'
--- lib/lp/soyuz/doc/packagecopyrequest.txt	2012-12-26 01:32:19 +0000
+++ lib/lp/soyuz/doc/packagecopyrequest.txt	2018-05-27 20:15:09 +0000
@@ -57,7 +57,7 @@
 components are not set by default. Also, the date started and completed are
 not set either since this is a new package copy request.
 
-    >>> print new_pcr
+    >>> print(new_pcr)
     Package copy request
     source = primary/breezy-autotest/-/RELEASE
     target = our-sample-copy-archive/breezy-autotest/-/RELEASE

=== modified file 'lib/lp/soyuz/doc/packageupload-lookups.txt'
--- lib/lp/soyuz/doc/packageupload-lookups.txt	2014-07-14 16:47:15 +0000
+++ lib/lp/soyuz/doc/packageupload-lookups.txt	2018-05-27 20:15:09 +0000
@@ -47,10 +47,10 @@
     ...         changesfile = build.upload_changesfile
     ...         if package_upload is None or changesfile is None:
     ...            builds_missing_upload.append(builds)
-    ...     print '* %s' % archive.displayname
-    ...     print '%d of %d sources and %d of %d builds missing uploads' % (
+    ...     print('* %s' % archive.displayname)
+    ...     print('%d of %d sources and %d of %d builds missing uploads' % (
     ...        len(sources_missing_upload), len(sources),
-    ...        len(builds_missing_upload), len(builds))
+    ...        len(builds_missing_upload), len(builds)))
 
 As we can see from the results below, most of our sampledata are
 sources and builds directly imported into the system, not
@@ -110,27 +110,27 @@
 The `SourcePackageRelease` 'package_upload' and 'upload_changesfile'
 
     >>> original_source_upload = source.sourcepackagerelease.package_upload
-    >>> print original_source_upload
+    >>> print(original_source_upload)
     <PackageUpload ...>
 
     >>> source_changesfile = source.sourcepackagerelease.upload_changesfile
     >>> original_source_upload.changesfile == source_changesfile
     True
 
-    >>> print source_changesfile.filename
+    >>> print(source_changesfile.filename)
     testing_1.0_source.changes
 
 The `Build` 'package_upload' and 'upload_changesfile'
 
     >>> original_build_upload = build.package_upload
-    >>> print original_build_upload
+    >>> print(original_build_upload)
     <...PackageUpload ...>
 
     >>> build_changesfile = build.upload_changesfile
     >>> original_build_upload.changesfile == build_changesfile
     True
 
-    >>> print build_changesfile.filename
+    >>> print(build_changesfile.filename)
     testing-bin_1.0_i386.changes
 
 The `PackageUpload` lookups are not restricted to the status of the

=== modified file 'lib/lp/soyuz/doc/pocketchroot.txt'
--- lib/lp/soyuz/doc/pocketchroot.txt	2015-10-06 06:48:01 +0000
+++ lib/lp/soyuz/doc/pocketchroot.txt	2018-05-27 20:15:09 +0000
@@ -25,30 +25,30 @@
 Check if getPocketChroot returns None for unknown chroots:
 
   >>> p_chroot = hoary_i386.getPocketChroot()
-  >>> print p_chroot
+  >>> print(p_chroot)
   None
 
 Check if getChroot returns the 'default' argument on not found chroots:
 
-  >>> hoary_i386.getChroot(default='duuuuh')
-  'duuuuh'
+  >>> print(hoary_i386.getChroot(default='duuuuh'))
+  duuuuh
 
 Invoke addOrUpdateChroot for missing chroot, so it will insert a new
 record in PocketChroot:
 
   >>> p_chroot1 = hoary_i386.addOrUpdateChroot(chroot=chroot1)
-  >>> print p_chroot1.distroarchseries.architecturetag
+  >>> print(p_chroot1.distroarchseries.architecturetag)
   i386
-  >>> print p_chroot1.chroot.id,
+  >>> print(p_chroot1.chroot.id)
   1
 
 Invoke addOrUpdateChroot on an existing PocketChroot, it will update
 the chroot:
 
   >>> p_chroot2 = hoary_i386.addOrUpdateChroot(chroot=chroot2)
-  >>> print p_chroot2.distroarchseries.architecturetag
+  >>> print(p_chroot2.distroarchseries.architecturetag)
   i386
-  >>> print p_chroot2.chroot.id,
+  >>> print(p_chroot2.chroot.id)
   2
   >>> p_chroot2 == p_chroot1
   True

=== modified file 'lib/lp/soyuz/doc/publishing-security.txt'
--- lib/lp/soyuz/doc/publishing-security.txt	2012-04-10 14:01:17 +0000
+++ lib/lp/soyuz/doc/publishing-security.txt	2018-05-27 20:15:09 +0000
@@ -25,21 +25,21 @@
 the public PPA:
 
     >>> login(ANONYMOUS)
-    >>> print public_ppa.getPublishedSources().first().displayname
+    >>> print(public_ppa.getPublishedSources().first().displayname)
     foo 666 in breezy-autotest
 
     >>> binary_pub = public_ppa.getAllPublishedBinaries()[0]
-    >>> print binary_pub.displayname
+    >>> print(binary_pub.displayname)
     foo-bin 666 in breezy-autotest i386
 
 A regular user can see them too:
 
     >>> login('no-priv@xxxxxxxxxxxxx')
-    >>> print public_ppa.getPublishedSources().first().displayname
+    >>> print(public_ppa.getPublishedSources().first().displayname)
     foo 666 in breezy-autotest
 
     >>> binary_pub = public_ppa.getAllPublishedBinaries()[0]
-    >>> print binary_pub.displayname
+    >>> print(binary_pub.displayname)
     foo-bin 666 in breezy-autotest i386
 
 But when querying the private PPA, anonymous access will be refused:
@@ -71,20 +71,20 @@
 But the owner can see them.
 
     >>> ignored = login_person(private_ppa.owner)
-    >>> print public_ppa.getPublishedSources().first().displayname
+    >>> print(public_ppa.getPublishedSources().first().displayname)
     foo 666 in breezy-autotest
 
     >>> binary_pub = private_ppa.getAllPublishedBinaries()[0]
-    >>> print binary_pub.displayname
+    >>> print(binary_pub.displayname)
     foo-bin 666 in breezy-autotest i386
 
 As can an administrator.
 
     >>> login('admin@xxxxxxxxxxxxx')
-    >>> print public_ppa.getPublishedSources().first().displayname
+    >>> print(public_ppa.getPublishedSources().first().displayname)
     foo 666 in breezy-autotest
 
     >>> binary_pub = private_ppa.getAllPublishedBinaries()[0]
-    >>> print binary_pub.displayname
+    >>> print(binary_pub.displayname)
     foo-bin 666 in breezy-autotest i386
 

=== modified file 'lib/lp/soyuz/doc/publishing.txt'
--- lib/lp/soyuz/doc/publishing.txt	2018-04-19 00:02:19 +0000
+++ lib/lp/soyuz/doc/publishing.txt	2018-05-27 20:15:09 +0000
@@ -63,27 +63,27 @@
 This is mostly as a convenience to API users so that we don't need to export
 tiny 2-column content classes and force the users to retrieve those.
 
-    >>> print spph.source_package_name
+    >>> print(spph.source_package_name)
     pmount
 
-    >>> print spph.source_package_version
+    >>> print(spph.source_package_version)
     0.1-1
 
-    >>> print spph.component_name
+    >>> print(spph.component_name)
     main
 
-    >>> print spph.section_name
+    >>> print(spph.section_name)
     base
 
 Other properties are shortcuts to the source package's properties:
 
-    >>> print spph.package_creator
-    <Person at ... mark (Mark Shuttleworth)>
-
-    >>> print spph.package_maintainer
-    <Person at ... mark (Mark Shuttleworth)>
-
-    >>> print spph.package_signer
+    >>> print(spph.package_creator)
+    <Person at ... mark (Mark Shuttleworth)>
+
+    >>> print(spph.package_maintainer)
+    <Person at ... mark (Mark Shuttleworth)>
+
+    >>> print(spph.package_signer)
     <Person at ... name16 (Foo Bar)>
 
 The signer can also be None for packages that were synced (e.g. from Debian):
@@ -91,7 +91,7 @@
     >>> from lp.services.propertycache import get_property_cache
     >>> spph.sourcepackagerelease.signing_key_owner = None
     >>> spph.sourcepackagerelease.signing_key_fingerprint = None
-    >>> print spph.package_signer
+    >>> print(spph.package_signer)
     None
 
 There is also a method that returns the .changes file URL. This is proxied
@@ -104,13 +104,13 @@
 
 The pmount source has no packageupload in the sampledata:
 
-    >>> print spph.changesFileUrl()
+    >>> print(spph.changesFileUrl())
     None
 
 The iceweasel source has good data:
 
     >>> pub = spph.archive.getPublishedSources(name=u"iceweasel").first()
-    >>> print pub.changesFileUrl()
+    >>> print(pub.changesFileUrl())
     http://.../ubuntu/+archive/primary/+files/mozilla-firefox_0.9_i386.changes
 
 There is also a helper property to determine whether the current release for
@@ -118,7 +118,7 @@
 returned if there is no package in the distroseries primary archive with a
 later version.
 
-    >>> print pub.newer_distroseries_version
+    >>> print(pub.newer_distroseries_version)
     None
 
 If we publish iceweasel 1.1 in the same distroseries, then the distroseries
@@ -133,14 +133,14 @@
     ...     sourcename='iceweasel')
 
     >>> del get_property_cache(pub).newer_distroseries_version
-    >>> print pub.newer_distroseries_version.title
+    >>> print(pub.newer_distroseries_version.title)
     iceweasel 1.1 source package in Ubuntu
 
 We can calculate the newer_distroseries_version for many spph objects at once.
 
     >>> del get_property_cache(pub).newer_distroseries_version
     >>> pub.distroseries.setNewerDistroSeriesVersions([pub])
-    >>> print get_property_cache(pub).newer_distroseries_version.title
+    >>> print(get_property_cache(pub).newer_distroseries_version.title)
     iceweasel 1.1 source package in Ubuntu
 
 A helper is also included to create a summary of the build statuses for
@@ -159,10 +159,10 @@
 
     >>> import operator
     >>> def print_build_status_summary(summary):
-    ...     print summary['status'].title
+    ...     print(summary['status'].title)
     ...     for build in sorted(
     ...         summary['builds'], key=operator.attrgetter('title')):
-    ...         print build.title
+    ...         print(build.title)
     >>> build_status_summary = spph.getStatusSummaryForBuilds()
     >>> print_build_status_summary(build_status_summary)
     FULLYBUILT_PENDING
@@ -176,7 +176,7 @@
     >>> ps = getUtility(IPublishingSet)
     >>> unpublished_builds = ps.getUnpublishedBuildsForSources([spph])
     >>> for _, b, _ in sorted(unpublished_builds, key=lambda b:b[1].title):
-    ...     print b.title
+    ...     print(b.title)
     hppa build of abc 666 in ubuntutest breezy-autotest RELEASE
     i386 build of abc 666 in ubuntutest breezy-autotest RELEASE
 
@@ -209,7 +209,7 @@
 Nor will it be included in the unpublished builds:
 
     >>> for _, build, _ in ps.getUnpublishedBuildsForSources([spph]):
-    ...     print build.title
+    ...     print(build.title)
     i386 build of abc 666 in ubuntutest breezy-autotest RELEASE
 
 By default, only FULLYBUILT builds are included in the returned
@@ -218,14 +218,14 @@
     >>> builds[1].updateStatus(
     ...     BuildStatus.SUPERSEDED, force_invalid_transition=True)
     >>> for _, build, _ in ps.getUnpublishedBuildsForSources([spph]):
-    ...     print build.title
+    ...     print(build.title)
 
 But the returned build-states can be set explicitly:
 
     >>> for _, build, _ in ps.getUnpublishedBuildsForSources(
     ...     [spph],
     ...     build_states=[BuildStatus.FULLYBUILT, BuildStatus.SUPERSEDED]):
-    ...     print build.title
+    ...     print(build.title)
     i386 build of abc 666 in ubuntutest breezy-autotest RELEASE
 
 Just switch it back to FULLYBUILT before continuing:
@@ -248,7 +248,7 @@
 There are no longer any unpublished builds for the source package:
 
     >>> for _, build, _ in ps.getUnpublishedBuildsForSources([spph]):
-    ...     print build.title
+    ...     print(build.title)
 
 If a build is deleted, it does not cause the build status summary to change:
 
@@ -293,16 +293,16 @@
     >>> verifyObject(IBinaryPackagePublishingHistory, bpph)
     True
 
-    >>> print bpph.binary_package_name
+    >>> print(bpph.binary_package_name)
     def-bin
 
-    >>> print bpph.binary_package_version
+    >>> print(bpph.binary_package_version)
     666
 
-    >>> print bpph.component_name
+    >>> print(bpph.component_name)
     main
 
-    >>> print bpph.section_name
+    >>> print(bpph.section_name)
     base
 
 
@@ -314,7 +314,7 @@
     ...     IBinaryPackagePublishingHistory)
     >>> spph = SourcePackagePublishingHistory.get(10)
 
-    >>> print spph.displayname
+    >>> print(spph.displayname)
     alsa-utils 1.0.8-1ubuntu1 in warty
 
 
@@ -358,10 +358,10 @@
     >>> modified_spph.datesuperseded == transaction_timestamp
     True
 
-    >>> print modified_spph.removed_by.name
+    >>> print(modified_spph.removed_by.name)
     mark
 
-    >>> print modified_spph.removal_comment
+    >>> print(modified_spph.removal_comment)
     testing deletion
 
 requstObsolescence takes no additional arguments:
@@ -405,11 +405,11 @@
     ...     status=PackagePublishingStatus.PUBLISHED,
     ...     pocket=PackagePublishingPocket.PROPOSED)
 
-    >>> print source.displayname
+    >>> print(source.displayname)
     ghi 666 in breezy-autotest
 
     >>> for bin in binaries:
-    ...     print bin.displayname
+    ...     print(bin.displayname)
     ghi-bin 666 in breezy-autotest i386
     ghi-bin 666 in breezy-autotest hppa
 
@@ -425,7 +425,7 @@
 without retrieving its binaries.
 
     >>> for build in source.getBuilds():
-    ...     print build.title
+    ...     print(build.title)
     hppa build of ghi 666 in ubuntutest breezy-autotest PROPOSED
     i386 build of ghi 666 in ubuntutest breezy-autotest PROPOSED
 
@@ -472,11 +472,11 @@
 a chance to verify it's contents and include it in the destination
 archive index.
 
-    >>> print copied_source.status.name
+    >>> print(copied_source.status.name)
     PENDING
 
     >>> for bin in copied_binaries:
-    ...     print bin.status.name
+    ...     print(bin.status.name)
     PENDING
     PENDING
 
@@ -499,12 +499,12 @@
 previous broken implementation in this area.
 
     >>> for bin in source.getPublishedBinaries():
-    ...     print bin.displayname, bin.pocket.name, bin.status.name
+    ...     print(bin.displayname, bin.pocket.name, bin.status.name)
     ghi-bin 666 in breezy-autotest hppa PROPOSED PUBLISHED
     ghi-bin 666 in breezy-autotest i386 PROPOSED PUBLISHED
 
     >>> for bin in copied_source.getPublishedBinaries():
-    ...     print bin.displayname, bin.pocket.name, bin.status.name
+    ...     print(bin.displayname, bin.pocket.name, bin.status.name)
     ghi-bin 666 in breezy-autotest hppa UPDATES PENDING
     ghi-bin 666 in breezy-autotest i386 UPDATES PENDING
 
@@ -588,7 +588,7 @@
 but also the override just done.
 
     >>> for pub in copied_source.getPublishedBinaries():
-    ...     print pub.displayname, pub.component.name
+    ...     print(pub.displayname, pub.component.name)
     ghi-bin 666 in breezy-autotest hppa universe
     ghi-bin 666 in breezy-autotest hppa main
     ghi-bin 666 in breezy-autotest i386 main
@@ -601,7 +601,7 @@
 publications and the hppa one is the overridden one.
 
     >>> for pub in copied_source.getBuiltBinaries():
-    ...     print pub.displayname, pub.component.name
+    ...     print(pub.displayname, pub.component.name)
     ghi-bin 666 in breezy-autotest hppa universe
     ghi-bin 666 in breezy-autotest i386 main
 
@@ -630,11 +630,11 @@
     ...     status=PackagePublishingStatus.PUBLISHED,
     ...     pocket=PackagePublishingPocket.PROPOSED)
 
-    >>> print source_all.displayname
+    >>> print(source_all.displayname)
     pirulito 666 in breezy-autotest
 
     >>> for bin in binaries_all:
-    ...     print bin.displayname
+    ...     print(bin.displayname)
     pirulito 666 in breezy-autotest i386
     pirulito 666 in breezy-autotest hppa
 
@@ -643,7 +643,7 @@
 
     >>> copied_source_all = source_all.copyTo(distroseries, pocket, archive)
 
-    >>> print copied_source_all.displayname
+    >>> print(copied_source_all.displayname)
     pirulito 666 in breezy-autotest
 
 Architecture independent binaries, however, when copied results in
@@ -664,7 +664,7 @@
 The same binary is published in both supported architecture.
 
     >>> for bin in binary_copies:
-    ...     print bin.displayname
+    ...     print(bin.displayname)
     pirulito 666 in breezy-autotest hppa
     pirulito 666 in breezy-autotest i386
 
@@ -675,7 +675,7 @@
     >>> copied_binaries_all = copied_source_all.getPublishedBinaries()
 
     >>> for bin in copied_binaries_all:
-    ...     print bin.displayname
+    ...     print(bin.displayname)
     pirulito 666 in breezy-autotest hppa
     pirulito 666 in breezy-autotest i386
 
@@ -690,7 +690,7 @@
 
     >>> [built_binary] = copied_source_all.getBuiltBinaries()
 
-    >>> print built_binary.displayname
+    >>> print(built_binary.displayname)
     pirulito 666 in breezy-autotest i386
 
 
@@ -711,11 +711,11 @@
     ...     pub_source=ppa_source,
     ...     status=PackagePublishingStatus.PUBLISHED)
 
-    >>> print ppa_source.displayname, ppa_source.archive.displayname
+    >>> print(ppa_source.displayname, ppa_source.archive.displayname)
     jkl 666 in breezy-autotest PPA for Celso Providelo
 
     >>> for bin in ppa_binaries:
-    ...     print bin.displayname, bin.archive.displayname
+    ...     print(bin.displayname, bin.archive.displayname)
     jkl-bin 666 in breezy-autotest i386 PPA for Celso Providelo
     jkl-bin 666 in breezy-autotest hppa PPA for Celso Providelo
 
@@ -803,7 +803,7 @@
     >>> copied_source = ppa_source.copyTo(series, pocket, archive)
 
     >>> ppa_binary_i386 = ppa_binaries[0]
-    >>> print ppa_binary_i386.displayname
+    >>> print(ppa_binary_i386.displayname)
     mno-bin 999 in breezy-autotest i386
 
     >>> copied_binary = ppa_binary_i386.copyTo(series, pocket, archive)
@@ -811,11 +811,11 @@
 The source and binary are present in hoary-test:
 
     >>> copied_source = SourcePackagePublishingHistory.get(copied_source.id)
-    >>> print copied_source.displayname
+    >>> print(copied_source.displayname)
     mno 999 in hoary-test
 
     >>> for bin in copied_source.getPublishedBinaries():
-    ...     print bin.displayname
+    ...     print(bin.displayname)
     mno-bin 999 in hoary-test amd64
     mno-bin 999 in hoary-test i386
 
@@ -834,7 +834,7 @@
 Using the same Ubuntu source publishing example as above:
 
     >>> for file in source.getSourceAndBinaryLibraryFiles():
-    ...     print file.filename
+    ...     print(file.filename)
     ghi-bin_666_hppa.deb
     ghi-bin_666_i386.deb
     ghi_666.dsc
@@ -851,7 +851,7 @@
     ...     status=PackagePublishingStatus.PUBLISHED)
 
     >>> for file in ppa_source.getSourceAndBinaryLibraryFiles():
-    ...     print file.filename
+    ...     print(file.filename)
     pqr-bin_666_all.deb
     pqr_666.dsc
 
@@ -880,7 +880,7 @@
     ...     IBinaryPackageFile)
 
     >>> bpph = BinaryPackagePublishingHistory.get(15)
-    >>> print bpph.displayname
+    >>> print(bpph.displayname)
     mozilla-firefox 0.9 in woody i386
 
     >>> IBinaryPackagePublishingHistory.providedBy(bpph)
@@ -896,7 +896,7 @@
 Binary publishing records also have a download count, which contains
 the number of downloads of this binary package release in this archive.
 
-    >>> print bpph.getDownloadCount()
+    >>> print(bpph.getDownloadCount())
     0
 
     >>> from datetime import date
@@ -911,7 +911,7 @@
     >>> bpph.archive.updatePackageDownloadCount(
     ...     bpph.binarypackagerelease, date(2010, 2, 21), uk, 4)
 
-    >>> print bpph.getDownloadCount()
+    >>> print(bpph.getDownloadCount())
     16
 
 We can also use getDownloadCounts to find the raw download counts per
@@ -1005,7 +1005,7 @@
     ...     component=test_source_pub.component,
     ...     section=test_source_pub.section,
     ...     pocket=test_source_pub.pocket)
-    >>> print ppa_pub.component.name
+    >>> print(ppa_pub.component.name)
     main
 
 IPublishingSet is an essential component for
@@ -1019,7 +1019,7 @@
     >>> len(cprov_sources)
     8
     >>> for spph in cprov_sources:
-    ...     print spph.displayname
+    ...     print(spph.displayname)
     cdrkit 1.0 in breezy-autotest
     iceweasel 1.0 in warty
     jkl 666 in hoary-test
@@ -1071,7 +1071,7 @@
 
     >>> build = binaries[0].binarypackagerelease.build
     >>> source_pub = build.source_package_release.publishings[0]
-    >>> print build.archive.name
+    >>> print(build.archive.name)
     otherppa
 
     # Copy the source into Celso's PPA, ensuring that the binaries
@@ -1148,19 +1148,19 @@
     >>> (source_pub, binary_pub, binary, binary_name,
     ...  arch) = cprov_binaries.last()
 
-    >>> print source_pub.displayname
+    >>> print(source_pub.displayname)
     pqr 666 in breezy-autotest
 
-    >>> print binary_pub.displayname
+    >>> print(binary_pub.displayname)
     pqr-bin 666 in breezy-autotest i386
 
-    >>> print binary.title
+    >>> print(binary.title)
     pqr-bin-666
 
-    >>> print binary_name.name
+    >>> print(binary_name.name)
     pqr-bin
 
-    >>> print arch.displayname
+    >>> print(arch.displayname)
     ubuntutest Breezy Badger Autotest i386
 
 We can retrieve all files related with Celso's PPA publications.
@@ -1181,7 +1181,7 @@
     >>> ordered_filenames = sorted(
     ...    file.filename for source, file, content in cprov_files)
 
-    >>> print ordered_filenames[0]
+    >>> print(ordered_filenames[0])
     firefox_0.9.2.orig.tar.gz
 
 We can also retrieve just the binary files related with Celso's PPA
@@ -1201,7 +1201,7 @@
     True
 
     >>> for source, file, content in binary_files:
-    ...     print file.filename
+    ...     print(file.filename)
     mozilla-firefox_0.9_i386.deb
     jkl-bin_666_all.deb
     jkl-bin_666_all.deb
@@ -1231,19 +1231,19 @@
 
     >>> source_pub, upload, source, file, content = a_change
 
-    >>> print source_pub.displayname
+    >>> print(source_pub.displayname)
     iceweasel 1.0 in warty
 
-    >>> print upload.displayname
+    >>> print(upload.displayname)
     iceweasel
 
-    >>> print source.title
+    >>> print(source.title)
     iceweasel - 1.0
 
-    >>> print file.filename
+    >>> print(file.filename)
     mozilla-firefox_0.9_i386.changes
 
-    >>> print content.md5
+    >>> print(content.md5)
     e4a7193a8f72fa2755e2162512069093
 
 Last but not least the publishing set class allows for the bulk deletion
@@ -1253,7 +1253,7 @@
     ...     cprov.archive.getPublishedSources(
     ...     status=PackagePublishingStatus.PUBLISHED),
     ...     key=operator.attrgetter('id'))
-    >>> print len(cprov_sources)
+    >>> print(len(cprov_sources))
     6
 
 We will delete the first two source publishing history records and
@@ -1283,7 +1283,7 @@
     >>> cprov_sources = list(
     ...     cprov.archive.getPublishedSources(
     ...     status=PackagePublishingStatus.PUBLISHED))
-    >>> print len(cprov_sources)
+    >>> print(len(cprov_sources))
     4
 
 Analogously, the number of associated published binaries will be less
@@ -1315,7 +1315,7 @@
     >>> cprov_published_sources = cprov.archive.getPublishedSources(
     ...     status=PackagePublishingStatus.PUBLISHED)
     >>> for spph in cprov_published_sources:
-    ...     print spph.displayname
+    ...     print(spph.displayname)
     jkl 666 in breezy-autotest
     mno 999 in breezy-autotest
     pmount 0.1-1 in warty
@@ -1363,9 +1363,9 @@
     ...     given_ids = [obj.id for obj in given]
     ...     returned_ids = [obj.id for obj in returned]
     ...     if given_ids == returned_ids:
-    ...        print 'Matches'
+    ...        print('Matches')
     ...     else:
-    ...        print 'Mismatch:', given_ids, returned_ids
+    ...        print('Mismatch:', given_ids, returned_ids)
 
     >>> compare_ids(cprov_published_sources, decorated_set)
     Matches
@@ -1413,7 +1413,7 @@
 
     >>> all_cprov_sources = cprov.archive.getPublishedSources()
     >>> for spph in all_cprov_sources:
-    ...     print spph.displayname
+    ...     print(spph.displayname)
     cdrkit 1.0 in breezy-autotest
     foo 666 in breezy-autotest
     iceweasel 1.0 in warty
@@ -1430,7 +1430,7 @@
     >>> pub_with_changes = all_cprov_sources[2]
     >>> the_source = pub_with_changes.sourcepackagerelease
     >>> the_change = the_source.upload_changesfile
-    >>> print the_change.filename
+    >>> print(the_change.filename)
     mozilla-firefox_0.9_i386.changes
 
 The same control-publication is reachable in the dictionary returned
@@ -1439,7 +1439,7 @@
     >>> decorated_changes = ArchiveSourcePublications(all_cprov_sources)
     >>> changes_by_source = decorated_changes.getChangesFileBySource()
     >>> decorated_change = changes_by_source.get(pub_with_changes)
-    >>> print decorated_change.filename
+    >>> print(decorated_change.filename)
     mozilla-firefox_0.9_i386.changes
 
 Enough internals! What really matters for callsites is that, when
@@ -1451,7 +1451,7 @@
 
     >>> decorated_pub = list(decorated_set)[1]
 
-    >>> print decorated_pub
+    >>> print(decorated_pub)
     <...ArchiveSourcePublication ...>
 
     >>> verifyObject(ISourcePackagePublishingHistory, decorated_pub)
@@ -1471,13 +1471,13 @@
     >>> pub_with_changes = cprov_published_sources[1]
     >>> the_source = pub_with_changes.sourcepackagerelease
     >>> changesfile = the_source.upload_changesfile
-    >>> print '%s (%s)' % (changesfile.filename, changesfile.content.md5)
+    >>> print('%s (%s)' % (changesfile.filename, changesfile.content.md5))
     mno_999_source.changes (6168e17ba012fc3db6dc77e255243bd1)
 
     >>> decorated_pub_with_changes = list(decorated_set)[1]
     >>> decorated_source = decorated_pub_with_changes.sourcepackagerelease
     >>> changesfile = decorated_source.upload_changesfile
-    >>> print '%s (%s)' % (changesfile.filename, changesfile.content.md5)
+    >>> print('%s (%s)' % (changesfile.filename, changesfile.content.md5))
     mno_999_source.changes (6168e17ba012fc3db6dc77e255243bd1)
 
 `ArchiveSourcePublication` also has a decorated version of the
@@ -1517,18 +1517,18 @@
 Create a small function for displaying the results:
 
     >>> def print_build_summary(summary):
-    ...     print "%s\n%s\nRelevant builds:\n%s" % (
+    ...     print("%s\n%s\nRelevant builds:\n%s" % (
     ...         summary['status'].title,
     ...         summary['status'].description,
     ...         "\n".join(
     ...             " - %s" % build.title for build in summary['builds'])
-    ...     )
+    ...     ))
 
     >>> def print_build_summaries(summaries):
     ...     count = 0
     ...     for source_id, summary in sorted(summaries.items()):
     ...         count += 1
-    ...         print "Source number: %s" % count
+    ...         print("Source number: %s" % count)
     ...         print_build_summary(summary)
 
 And then grab the build summaries for firefox and foo:

=== modified file 'lib/lp/soyuz/doc/sampledata-setup.txt'
--- lib/lp/soyuz/doc/sampledata-setup.txt	2011-12-28 17:03:06 +0000
+++ lib/lp/soyuz/doc/sampledata-setup.txt	2018-05-27 20:15:09 +0000
@@ -14,10 +14,10 @@
     >>> return_code, output, error = run_script(
     ...     'utilities/soyuz-sampledata-setup.py')
 
-    >>> print return_code
+    >>> print(return_code)
     0
 
-    >>> print error
+    >>> print(error)
     INFO ...
     INFO Done.
 

=== modified file 'lib/lp/soyuz/doc/sourcepackagerelease.txt'
--- lib/lp/soyuz/doc/sourcepackagerelease.txt	2016-02-29 18:48:23 +0000
+++ lib/lp/soyuz/doc/sourcepackagerelease.txt	2018-05-27 20:15:09 +0000
@@ -27,7 +27,7 @@
 SourcePackageRelease is published in.
 
     >>> for archive in spr.published_archives:
-    ...     print archive.displayname
+    ...     print(archive.displayname)
     Primary Archive for Ubuntu Linux
     PPA for Celso Providelo
 
@@ -241,12 +241,12 @@
     ...     archive=cprov_private_ppa)
 
     >>> test_sourcepackagerelease = private_publication.sourcepackagerelease
-    >>> print test_sourcepackagerelease.title
+    >>> print(test_sourcepackagerelease.title)
     foo - 666
 
     >>> published_archives = test_sourcepackagerelease.published_archives
     >>> for archive in published_archives:
-    ...     print archive.displayname
+    ...     print(archive.displayname)
     PPA named pppa for Celso Providelo
 
 'foo - 666' sourcepackagerelease is only published in Celso's Private
@@ -282,7 +282,7 @@
 
     >>> published_archives = test_sourcepackagerelease.published_archives
     >>> for archive in published_archives:
-    ...     print archive.displayname
+    ...     print(archive.displayname)
     Primary Archive for Ubuntu Linux
     PPA named pppa for Celso Providelo
 
@@ -328,6 +328,6 @@
 
     >>> published_archives = test_sourcepackagerelease.published_archives
     >>> for archive in published_archives:
-    ...     print archive.displayname
+    ...     print(archive.displayname)
     Primary Archive for Ubuntu Linux
     PPA named pppa for Celso Providelo

=== modified file 'lib/lp/soyuz/doc/soyuz-set-of-uploads.txt'
--- lib/lp/soyuz/doc/soyuz-set-of-uploads.txt	2018-05-06 08:52:34 +0000
+++ lib/lp/soyuz/doc/soyuz-set-of-uploads.txt	2018-05-27 20:15:09 +0000
@@ -249,7 +249,7 @@
     ...     if len(rejected_contents) > 0:
     ...         # Clean up rejected entry
     ...         shutil.rmtree(os.path.join(rejected_dir, leafname))
-    ...         print "Rejected uploads: %s" % rejected_contents
+    ...         print("Rejected uploads: %s" % ", ".join(rejected_contents))
     ...         return
     ...
     ...     assert len(os.listdir(failed_dir)) == 0, (
@@ -260,7 +260,7 @@
     ...     assert simple_publish(distro=distro), (
     ...             "Should publish at least one item")
     ...     if loglevel is None or loglevel <= logging.INFO:
-    ...         print "Upload complete."
+    ...         print("Upload complete.")
 
     >>> from lp.testing.mail_helpers import (
     ...     pop_notifications,
@@ -274,10 +274,10 @@
     ...     line.
     ...     """
     ...     for message in pop_notifications(commit=False):
-    ...         print "To:", sort_addresses(message['to'])
-    ...         print "Subject:", message['subject']
-    ...         print message.get_payload()[0].as_string()
-    ...         print ''
+    ...         print("To:", sort_addresses(message['to']))
+    ...         print("Subject:", message['subject'])
+    ...         print(message.get_payload()[0].as_string())
+    ...         print()
 
 The 'bar' package' is an arch-all package. We have four stages to the
 bar test. Each stage should be simple enough. First we have a new
@@ -301,7 +301,7 @@
 
     >>> stub.test_emails = []
     >>> simulate_upload('bar_1.0-3', loglevel=logging.ERROR)
-    Rejected uploads: ['bar_1.0-3']
+    Rejected uploads: bar_1.0-3
 
     >>> read_email()
     To: Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx>
@@ -579,7 +579,7 @@
     INFO Upload was rejected:
     INFO foo_1.0-3.dsc: Version older than that in the archive. 1.0-3 <= 2.9-2
     ...
-    Rejected uploads: ['foo_1.0-3']
+    Rejected uploads: foo_1.0-3
 
 Note that the ancestry pointed in the rejection message (2.9-2) is what
 we expect.
@@ -625,9 +625,9 @@
     ...         args.append("-P")
     ...     script = os.path.join(config.root, "scripts", "publish-distro.py")
     ...     result, stdout, stderr = run_script(script, args)
-    ...     print stderr
+    ...     print(stderr)
     ...     if result != 0:
-    ...         print "Script returned", result
+    ...         print("Script returned", result)
 
     >>> def release_file_has_uncompressed_packages(path):
     ...     """Does the release file include uncompressed Packages?"""

=== modified file 'lib/lp/soyuz/doc/soyuz-upload.txt'
--- lib/lp/soyuz/doc/soyuz-upload.txt	2018-05-06 08:52:34 +0000
+++ lib/lp/soyuz/doc/soyuz-upload.txt	2018-05-27 20:15:09 +0000
@@ -170,7 +170,7 @@
     >>> key_data = open(key_path).read()
     >>> key = gpg_handler.importPublicKey(key_data)
     >>> assert key is not None
-    >>> print key.fingerprint
+    >>> print(key.fingerprint)
     33C0A61893A5DC5EB325B29E415A12CAC2F30234
 
 
@@ -254,11 +254,11 @@
 the other three still in incoming.
 
     >>> for i in range(4):
-    ...     find_upload_dir_result(i + 1)
-    'rejected'
-    'incoming'
-    'incoming'
-    'incoming'
+    ...     print(find_upload_dir_result(i + 1))
+    rejected
+    incoming
+    incoming
+    incoming
 
 
 Now continue with the real upload.
@@ -272,8 +272,8 @@
 
     >>> stdout, stderr = process.communicate()
     >>> if process.returncode != 0:
-    ...     print stdout
-    ...     print stderr
+    ...     print(stdout)
+    ...     print(stderr)
 
 
 Let's check if packages were uploaded correctly.
@@ -342,9 +342,11 @@
 Check the four uploads all ended up where we expected.
 
     >>> for i in range(0, 4):
-    ...     find_upload_dir_result(i + 1)
-    'rejected'
-    'failed'
+    ...     print(find_upload_dir_result(i + 1))
+    rejected
+    None
+    None
+    failed
 
 Also check the upload folders contain all the files we uploaded.
 
@@ -369,7 +371,7 @@
 
     >>> from lp.soyuz.model.queue import PackageUploadSource
     >>> for name in package_names:
-    ...     print name
+    ...     print(name)
     ...     spn = SourcePackageName.selectOneBy(name=name)
     ...     spr = SourcePackageRelease.selectOneBy(sourcepackagenameID=spn.id)
     ...     us = PackageUploadSource.selectOneBy(
@@ -415,7 +417,7 @@
     ...          L.append("%s %s" % (queue_item.sourcepackagerelease.name,
     ...                              'ACCEPTED'))
     >>> L.sort()
-    >>> print "\n".join(L)
+    >>> print("\n".join(L))
     drdsl ACCEPTED
     etherwake ACCEPTED
 
@@ -438,9 +440,9 @@
     ...     spr = SourcePackageRelease.selectOneBy(sourcepackagenameID=spn.id)
     ...     sspph = SSPPH.selectOneBy(sourcepackagereleaseID=spr.id)
     ...     if sspph:
-    ...         print name, sspph.status.title
+    ...         print(name, sspph.status.title)
     ...     else:
-    ...         print name, 'not Published'
+    ...         print(name, 'not Published')
     drdsl Pending
     etherwake Pending
 
@@ -453,7 +455,7 @@
     ...                            stdout=subprocess.PIPE,
     ...                            stderr=subprocess.PIPE)
     >>> stdout, stderr = process.communicate()
-    >>> print stdout
+    >>> print(stdout)
     <BLANKLINE>
 
     >>> transaction.commit()
@@ -473,10 +475,10 @@
     ...     for key in sorted(deb822):
     ...         value = deb822.get_as_string(key)
     ...         if not value or value[0] == '\n':
-    ...             print '%s:%s' % (key, value)
+    ...             print('%s:%s' % (key, value))
     ...         else:
-    ...             print '%s: %s' % (key, value)
-    ...     print
+    ...             print('%s: %s' % (key, value))
+    ...     print()
 
 Check the generation of a correct Sources tag file for the main
 component of ubuntutest/breezy-autotest, containing the only the
@@ -490,7 +492,7 @@
     ...        "source/Sources.gz") as sources_file:
     ...     for source in Sources.iter_paragraphs(sources_file):
     ...         pprint_deb822(source)
-    ...     print 'END'
+    ...     print('END')
     Architecture: any
     Binary: etherwake
     Build-Depends: debhelper (>> 2.0)
@@ -535,7 +537,7 @@
     >>> for pub in SSPPH.selectBy(
     ...    sourcepackagereleaseID=etherwake_drspr.sourcepackagerelease.id,
     ...    orderBy=['id']):
-    ...    print pub.status.name, pub.component.name, pub.pocket.name
+    ...    print(pub.status.name, pub.component.name, pub.pocket.name)
     PUBLISHED universe RELEASE
     PENDING multiverse RELEASE
 
@@ -564,7 +566,7 @@
     >>> for pub in SSPPH.selectBy(
     ...    sourcepackagereleaseID=etherwake_drspr.sourcepackagerelease.id,
     ...    orderBy=['id']):
-    ...    print pub.status.name, pub.component.name, pub.pocket.name
+    ...    print(pub.status.name, pub.component.name, pub.pocket.name)
     SUPERSEDED universe RELEASE
     PUBLISHED multiverse RELEASE
 
@@ -573,14 +575,14 @@
     >>> main_sources = gzip.open(
     ...     "/var/tmp/archive/ubuntutest/dists/breezy-autotest"
     ...     "/main/source/Sources.gz").read()
-    >>> print main_sources + '\nEND'
+    >>> print(main_sources + '\nEND')
     <BLANKLINE>
     END
 
     >>> multiverse_sources = gzip.open(
     ...     "/var/tmp/archive/ubuntutest/dists/breezy-autotest"
     ...     "/multiverse/source/Sources.gz").read()
-    >>> print multiverse_sources + '\nEND'
+    >>> print(multiverse_sources + '\nEND')
     Package: drdsl
     ...
     Package: etherwake
@@ -600,7 +602,7 @@
 
     >>> releasefile_contents = open("/var/tmp/archive/ubuntutest/dists/"
     ...                             "breezy-autotest/Release").read()
-    >>> print releasefile_contents + '\nEND' #doctest: -NORMALIZE_WHITESPACE
+    >>> print(releasefile_contents + '\nEND') #doctest: -NORMALIZE_WHITESPACE
     Origin: ubuntutest
     Label: ubuntutest
     Suite: breezy-autotest

=== modified file 'lib/lp/soyuz/doc/vocabularies.txt'
--- lib/lp/soyuz/doc/vocabularies.txt	2018-03-01 23:00:35 +0000
+++ lib/lp/soyuz/doc/vocabularies.txt	2018-05-27 20:15:09 +0000
@@ -159,7 +159,7 @@
     >>> verifyObject(IHugeVocabulary, vocabulary)
     True
 
-    >>> print vocabulary.displayname
+    >>> print(vocabulary.displayname)
     Select a PPA
 
 Iterations over the PPA vocabulary will return on PPA archives.
@@ -175,13 +175,13 @@
 
     >>> cprov_term = vocabulary.getTermByToken('~cprov/ubuntu/ppa')
 
-    >>> print cprov_term.token
+    >>> print(cprov_term.token)
     ~cprov/ubuntu/ppa
 
-    >>> print cprov_term.value
+    >>> print(cprov_term.value)
     <... lp.soyuz.model.archive.Archive instance ...>
 
-    >>> print cprov_term.title
+    >>> print(cprov_term.title)
     packages to help my friends.
 
 Not found terms result in LookupError.
@@ -196,7 +196,7 @@
     >>> def print_search_results(results):
     ...     for archive in results:
     ...         term = vocabulary.toTerm(archive)
-    ...         print '%s: %s' % (term.token, term.title)
+    ...         print('%s: %s' % (term.token, term.title))
 
     >>> cprov_search = vocabulary.search(u'cprov')
     >>> print_search_results(cprov_search)
@@ -256,14 +256,14 @@
     >>> flush_database_updates()
 
     >>> cprov_term = vocabulary.getTermByToken('~cprov/ubuntu/ppa')
-    >>> print cprov_term.title
+    >>> print(cprov_term.title)
     Single line.
 
     >>> cprov.archive.description = "First line\nSecond line."
     >>> flush_database_updates()
 
     >>> cprov_term = vocabulary.getTermByToken('~cprov/ubuntu/ppa')
-    >>> print cprov_term.title
+    >>> print(cprov_term.title)
     First line
 
 PPAs with empty description are identified and have a title saying so.
@@ -272,7 +272,7 @@
     >>> flush_database_updates()
 
     >>> cprov_term = vocabulary.getTermByToken('~cprov/ubuntu/ppa')
-    >>> print cprov_term.title
+    >>> print(cprov_term.title)
     No description available
 
 Queries on empty strings also results in a valid SelectResults.

=== modified file 'lib/lp/soyuz/tests/test_doc.py'
--- lib/lp/soyuz/tests/test_doc.py	2018-05-06 08:52:34 +0000
+++ lib/lp/soyuz/tests/test_doc.py	2018-05-27 20:15:09 +0000
@@ -56,7 +56,7 @@
 
 def uploaderSetUp(test):
     """setup the package uploader script tests."""
-    setUp(test)
+    setUp(test, future=True)
     switch_dbuser('uploader')
 
 
@@ -64,7 +64,7 @@
     test_dbuser = config.statistician.dbuser
     test.globs['test_dbuser'] = test_dbuser
     switch_dbuser(test_dbuser)
-    setUp(test)
+    setUp(test, future=True)
 
 
 def statisticianTearDown(test):
@@ -75,7 +75,7 @@
     lobotomize_stevea()
     test_dbuser = config.uploadqueue.dbuser
     switch_dbuser(test_dbuser)
-    setUp(test)
+    setUp(test, future=True)
     test.globs['test_dbuser'] = test_dbuser
 
 
@@ -89,7 +89,7 @@
     lobotomize_stevea()
     test_dbuser = config.uploader.dbuser
     switch_dbuser(test_dbuser)
-    setUp(test)
+    setUp(test, future=True)
     test.globs['test_dbuser'] = test_dbuser
 
 
@@ -109,7 +109,7 @@
         ),
     'distroarchseriesbinarypackage.txt': LayeredDocFileSuite(
         '../doc/distroarchseriesbinarypackage.txt',
-        setUp=setUp, tearDown=tearDown,
+        setUp=lambda test: setUp(test, future=True), tearDown=tearDown,
         layer=LaunchpadZopelessLayer
         ),
     'closing-bugs-from-changelogs.txt': LayeredDocFileSuite(
@@ -127,6 +127,7 @@
         ),
     'soyuz-set-of-uploads.txt': LayeredDocFileSuite(
         '../doc/soyuz-set-of-uploads.txt',
+        setUp=lambda test: setUp(test, future=True),
         layer=LaunchpadZopelessLayer,
         ),
     'package-relationship.txt': LayeredDocFileSuite(
@@ -134,27 +135,27 @@
         stdout_logging=False, layer=None),
     'publishing.txt': LayeredDocFileSuite(
         '../doc/publishing.txt',
-        setUp=setUp,
+        setUp=lambda test: setUp(test, future=True),
         layer=LaunchpadZopelessLayer,
         ),
     'build-failedtoupload-workflow.txt': LayeredDocFileSuite(
         '../doc/build-failedtoupload-workflow.txt',
-        setUp=setUp, tearDown=tearDown,
+        setUp=lambda test: setUp(test, future=True), tearDown=tearDown,
         layer=LaunchpadZopelessLayer,
         ),
     'distroseriesqueue.txt': LayeredDocFileSuite(
         '../doc/distroseriesqueue.txt',
-        setUp=setUp, tearDown=tearDown,
+        setUp=lambda test: setUp(test, future=True), tearDown=tearDown,
         layer=LaunchpadZopelessLayer,
         ),
     'distroseriesqueue-notify.txt': LayeredDocFileSuite(
         '../doc/distroseriesqueue-notify.txt',
-        setUp=setUp, tearDown=tearDown,
+        setUp=lambda test: setUp(test, future=True), tearDown=tearDown,
         layer=LaunchpadZopelessLayer,
         ),
     'distroseriesqueue-translations.txt': LayeredDocFileSuite(
         '../doc/distroseriesqueue-translations.txt',
-        setUp=setUp, tearDown=tearDown,
+        setUp=lambda test: setUp(test, future=True), tearDown=tearDown,
         layer=LaunchpadZopelessLayer,
         ),
     }
@@ -190,7 +191,8 @@
     for filename in filenames:
         path = os.path.join('../doc', filename)
         one_test = LayeredDocFileSuite(
-            path, setUp=setUp, tearDown=tearDown,
+            path,
+            setUp=lambda test: setUp(test, future=True), tearDown=tearDown,
             layer=LaunchpadFunctionalLayer,
             stdout_logging_level=logging.WARNING)
         suite.addTest(one_test)


Follow ups