launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #28846
[Merge] ~cjwatson/launchpad:black-codehosting into launchpad:master
Colin Watson has proposed merging ~cjwatson/launchpad:black-codehosting into launchpad:master.
Commit message:
lp.codehosting: Apply black
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/427119
--
The attached diff has been truncated due to its size.
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:black-codehosting into launchpad:master.
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index 1ccbf84..b19c911 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -72,3 +72,5 @@ c606443bdb2f342593c9a7c9437cb70c01f85f29
a6bed71f3d2fdbceae20c2d435c993e8bededdce
# apply black to lp.code
94d8e9842b7c92f3f9b7f514fb49ebdc9af7e413
+# apply black to lp.codehosting
+ed7d7b97b8fb4ebe92799f922b0fa9c4bd1714e8
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index c17b9a5..34046c5 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -50,6 +50,7 @@ repos:
|buildmaster
|charms
|code
+ |codehosting
)/
- repo: https://github.com/PyCQA/isort
rev: 5.9.2
@@ -76,6 +77,7 @@ repos:
|buildmaster
|charms
|code
+ |codehosting
)/
- id: isort
alias: isort-black
@@ -92,6 +94,7 @@ repos:
|buildmaster
|charms
|code
+ |codehosting
)/
- repo: https://github.com/PyCQA/flake8
rev: 3.9.2
diff --git a/lib/lp/codehosting/__init__.py b/lib/lp/codehosting/__init__.py
index 96c4788..7ce709c 100644
--- a/lib/lp/codehosting/__init__.py
+++ b/lib/lp/codehosting/__init__.py
@@ -8,19 +8,20 @@ all plugins in the brzplugins/ directory underneath the rocketfuel checkout.
"""
__all__ = [
- 'get_brz_path',
- ]
+ "get_brz_path",
+]
import os
import breezy
+
+# This import is needed so that brz's logger gets registered.
+import breezy.trace
from breezy import ui as brz_ui
from breezy.branch import Branch
from breezy.library_state import BzrLibraryState as BrzLibraryState
from breezy.plugin import load_plugins as brz_load_plugins
-# This import is needed so that brz's logger gets registered.
-import breezy.trace
from zope.security import checker
from lp.services.config import config
@@ -28,12 +29,12 @@ from lp.services.config import config
def get_brz_path():
"""Find the path to the copy of Breezy for this rocketfuel instance"""
- return os.path.join(config.root, 'bin', 'brz')
+ return os.path.join(config.root, "bin", "brz")
def _get_brz_plugins_path():
"""Find the path to the Breezy plugins for this rocketfuel instance."""
- return os.path.join(config.root, 'brzplugins')
+ return os.path.join(config.root, "brzplugins")
def get_BRZ_PLUGIN_PATH_for_subprocess():
@@ -52,21 +53,24 @@ def get_BRZ_PLUGIN_PATH_for_subprocess():
# itself with a terminal-oriented UI.
if breezy._global_state is None:
brz_state = BrzLibraryState(
- ui=brz_ui.SilentUIFactory(), trace=breezy.trace.Config())
+ ui=brz_ui.SilentUIFactory(), trace=breezy.trace.Config()
+ )
brz_state._start()
-os.environ['BRZ_PLUGIN_PATH'] = get_BRZ_PLUGIN_PATH_for_subprocess()
+os.environ["BRZ_PLUGIN_PATH"] = get_BRZ_PLUGIN_PATH_for_subprocess()
# Disable some Breezy plugins that are likely to cause trouble if used on
# the server. (Unfortunately there doesn't seem to be a good way to load
# only explicitly-specified plugins at the moment.)
-os.environ['BRZ_DISABLE_PLUGINS'] = ':'.join([
- 'cvs',
- 'darcs',
- 'email',
- 'mtn',
- ])
+os.environ["BRZ_DISABLE_PLUGINS"] = ":".join(
+ [
+ "cvs",
+ "darcs",
+ "email",
+ "mtn",
+ ]
+)
# We want to have full access to Launchpad's Breezy plugins throughout the
# codehosting package.
@@ -83,6 +87,5 @@ def dont_wrap_class_and_subclasses(cls):
# the various LoomBranch classes are present first.
import breezy.plugins.loom.branch # noqa: E402
-
breezy.plugins.loom.branch
dont_wrap_class_and_subclasses(Branch)
diff --git a/lib/lp/codehosting/bzrutils.py b/lib/lp/codehosting/bzrutils.py
index 8df9813..ad0fe37 100644
--- a/lib/lp/codehosting/bzrutils.py
+++ b/lib/lp/codehosting/bzrutils.py
@@ -8,35 +8,29 @@ integrates between Breezy's infrastructure and Launchpad's infrastructure.
"""
__all__ = [
- 'add_exception_logging_hook',
- 'DenyingServer',
- 'get_branch_info',
- 'get_branch_stacked_on_url',
- 'get_stacked_on_url',
- 'get_vfs_format_classes',
- 'HttpAsLocalTransport',
- 'identical_formats',
- 'install_oops_handler',
- 'is_branch_stackable',
- 'server',
- 'read_locked',
- 'remove_exception_logging_hook',
- ]
+ "add_exception_logging_hook",
+ "DenyingServer",
+ "get_branch_info",
+ "get_branch_stacked_on_url",
+ "get_stacked_on_url",
+ "get_vfs_format_classes",
+ "HttpAsLocalTransport",
+ "identical_formats",
+ "install_oops_handler",
+ "is_branch_stackable",
+ "server",
+ "read_locked",
+ "remove_exception_logging_hook",
+]
-from contextlib import contextmanager
import os
import sys
+from contextlib import contextmanager
-from breezy import (
- config,
- trace,
- )
+import six
+from breezy import config, trace
from breezy.branch import UnstackableBranchFormat
-from breezy.bzr.remote import (
- RemoteBranch,
- RemoteBzrDir,
- RemoteRepository,
- )
+from breezy.bzr.remote import RemoteBranch, RemoteBzrDir, RemoteRepository
from breezy.errors import (
AppendRevisionsOnlyViolation,
ConnectionTimeout,
@@ -44,31 +38,27 @@ from breezy.errors import (
NotStacked,
UnstackableRepositoryFormat,
UnsupportedProtocol,
- )
+)
from breezy.transport import (
get_transport,
register_transport,
unregister_transport,
- )
+)
from breezy.transport.local import LocalTransport
from lazr.uri import URI
-import six
-
-from lp.services.webapp.errorlog import (
- ErrorReportingUtility,
- ScriptRequest,
- )
+from lp.services.webapp.errorlog import ErrorReportingUtility, ScriptRequest
# Exception classes which are not converted into OOPSes
NOT_OOPS_EXCEPTIONS = (
- AppendRevisionsOnlyViolation, ConnectionTimeout,
- GhostRevisionsHaveNoRevno)
+ AppendRevisionsOnlyViolation,
+ ConnectionTimeout,
+ GhostRevisionsHaveNoRevno,
+)
def should_log_oops(exc):
- """Return true if exc should trigger an OOPS.
- """
+ """Return true if exc should trigger an OOPS."""
return not issubclass(exc, NOT_OOPS_EXCEPTIONS)
@@ -121,7 +111,8 @@ def get_branch_stacked_on_url(a_bzrdir):
format = a_bzrdir.find_branch_format(None)
except NotImplementedError:
raise UnstackableBranchFormat(
- a_bzrdir._format, a_bzrdir.root_transport.base)
+ a_bzrdir._format, a_bzrdir.root_transport.base
+ )
if not format.supports_stacking():
raise UnstackableBranchFormat(format, a_bzrdir.root_transport.base)
branch_transport = a_bzrdir.get_branch_transport(None)
@@ -131,9 +122,8 @@ def get_branch_stacked_on_url(a_bzrdir):
# we read the 'branch.conf' and don't bother with the locations.conf or
# bazaar.conf. This is OK for Launchpad since we don't ever want to have
# local client configuration. It's not OK for Bazaar in general.
- branch_config = config.TransportConfig(
- branch_transport, 'branch.conf')
- stacked_on_url = branch_config.get_option('stacked_on_location')
+ branch_config = config.TransportConfig(branch_transport, "branch.conf")
+ stacked_on_url = branch_config.get_option("stacked_on_location")
if not stacked_on_url:
raise NotStacked(a_bzrdir.root_transport.base)
return stacked_on_url
@@ -180,6 +170,7 @@ def make_oops_logging_exception_hook(error_utility, request):
def log_oops():
if should_log_oops(sys.exc_info()[0]):
error_utility.raising(sys.exc_info(), request)
+
return log_oops
@@ -191,7 +182,7 @@ class BazaarOopsRequest(ScriptRequest):
:param user_id: The database ID of the user doing this.
"""
- data = [('user_id', user_id)]
+ data = [("user_id", user_id)]
super().__init__(data, URL=None)
@@ -200,7 +191,7 @@ def make_error_utility(pid=None):
if pid is None:
pid = os.getpid()
error_utility = ErrorReportingUtility()
- error_utility.configure('bzr_lpserve')
+ error_utility.configure("bzr_lpserve")
return error_utility
@@ -227,19 +218,18 @@ class HttpAsLocalTransport(LocalTransport):
"""
def __init__(self, http_url):
- file_url = URI(
- scheme='file', host='', path=URI(http_url).path)
+ file_url = URI(scheme="file", host="", path=URI(http_url).path)
return super().__init__(str(file_url))
@classmethod
def register(cls):
"""Register this transport."""
- register_transport('http://', cls)
+ register_transport("http://", cls)
@classmethod
def unregister(cls):
"""Unregister this transport."""
- unregister_transport('http://', cls)
+ unregister_transport("http://", cls)
class DenyingServer:
@@ -271,7 +261,8 @@ class DenyingServer:
def _deny(self, url):
"""Prevent creation of transport for 'url'."""
raise AssertionError(
- "Creation of transport for %r is currently forbidden" % url)
+ "Creation of transport for %r is currently forbidden" % url
+ )
def get_vfs_format_classes(branch):
@@ -295,14 +286,14 @@ def get_vfs_format_classes(branch):
branch._format.__class__,
repository._format.__class__,
bzrdir._format.__class__,
- )
+ )
def identical_formats(branch_one, branch_two):
- """Check if two branches have the same bzrdir, repo, and branch formats.
- """
- return (get_vfs_format_classes(branch_one) ==
- get_vfs_format_classes(branch_two))
+ """Check if two branches have the same bzrdir, repo, and branch formats."""
+ return get_vfs_format_classes(branch_one) == get_vfs_format_classes(
+ branch_two
+ )
def get_stacked_on_url(branch):
@@ -320,16 +311,18 @@ def get_branch_info(branch):
'control_string', 'branch_string', 'repository_string'.
"""
info = {}
- info['stacked_on_url'] = get_stacked_on_url(branch)
- info['last_revision_id'] = six.ensure_str(branch.last_revision())
+ info["stacked_on_url"] = get_stacked_on_url(branch)
+ info["last_revision_id"] = six.ensure_str(branch.last_revision())
# XXX: Aaron Bentley 2008-06-13
# Bazaar does not provide a public API for learning about
# format markers. Fix this in Bazaar, then here.
- info['control_string'] = six.ensure_str(
- branch.controldir._format.get_format_string())
- info['branch_string'] = six.ensure_str(branch._format.get_format_string())
- info['repository_string'] = six.ensure_str(
- branch.repository._format.get_format_string())
+ info["control_string"] = six.ensure_str(
+ branch.controldir._format.get_format_string()
+ )
+ info["branch_string"] = six.ensure_str(branch._format.get_format_string())
+ info["repository_string"] = six.ensure_str(
+ branch.repository._format.get_format_string()
+ )
return info
diff --git a/lib/lp/codehosting/inmemory.py b/lib/lp/codehosting/inmemory.py
index becf674..159ff22 100644
--- a/lib/lp/codehosting/inmemory.py
+++ b/lib/lp/codehosting/inmemory.py
@@ -4,37 +4,27 @@
"""In-memory doubles of core codehosting objects."""
__all__ = [
- 'InMemoryFrontend',
- 'XMLRPCWrapper',
- ]
+ "InMemoryFrontend",
+ "XMLRPCWrapper",
+]
import operator
from xmlrpc.client import Fault
-from breezy.urlutils import (
- escape,
- unescape,
- )
import six
+from breezy.urlutils import escape, unescape
from twisted.internet import defer
-from zope.component import (
- adapter,
- getSiteManager,
- )
+from zope.component import adapter, getSiteManager
from zope.interface import implementer
from lp.app.enums import (
- InformationType,
PRIVATE_INFORMATION_TYPES,
PUBLIC_INFORMATION_TYPES,
- )
+ InformationType,
+)
from lp.app.validators import LaunchpadValidationError
from lp.app.validators.name import valid_name
-from lp.code.bzr import (
- BranchFormat,
- ControlFormat,
- RepositoryFormat,
- )
+from lp.code.bzr import BranchFormat, ControlFormat, RepositoryFormat
from lp.code.enums import BranchType
from lp.code.errors import UnknownBranchTypeError
from lp.code.interfaces.branch import IBranch
@@ -42,18 +32,15 @@ from lp.code.interfaces.branchlookup import get_first_path_result
from lp.code.interfaces.branchtarget import IBranchTarget
from lp.code.interfaces.codehosting import (
BRANCH_ALIAS_PREFIX,
- branch_id_alias,
BRANCH_TRANSPORT,
CONTROL_TRANSPORT,
LAUNCHPAD_ANONYMOUS,
LAUNCHPAD_SERVICES,
- )
+ branch_id_alias,
+)
from lp.code.interfaces.linkedbranch import ICanHasLinkedBranch
from lp.code.model.branchnamespace import BranchNamespaceSet
-from lp.code.model.branchtarget import (
- PackageBranchTarget,
- ProductBranchTarget,
- )
+from lp.code.model.branchtarget import PackageBranchTarget, ProductBranchTarget
from lp.code.xmlrpc.codehosting import datetime_from_tuple
from lp.registry.errors import InvalidName
from lp.registry.interfaces.pocket import PackagePublishingPocket
@@ -82,9 +69,10 @@ class FakeStore:
implies that the given attribute has the expected value. Returning
None implies the opposite.
"""
- branch_id = kwargs.pop('id')
+ branch_id = kwargs.pop("id")
assert len(kwargs) == 1, (
- 'Expected only id and one other. Got %r' % kwargs)
+ "Expected only id and one other. Got %r" % kwargs
+ )
attribute = list(kwargs)[0]
expected_value = kwargs[attribute]
branch = self._object_set.get(branch_id)
@@ -109,7 +97,7 @@ class FakeDatabaseObject:
"""Base class for fake database objects."""
def _set_object_set(self, object_set):
- self.__storm_object_info__ = {'store': FakeStore(object_set)}
+ self.__storm_object_info__ = {"store": FakeStore(object_set)}
class ObjectSet:
@@ -163,8 +151,10 @@ class FakeSourcePackage:
return hash((self.sourcepackagename.id, self.distroseries.id))
def __eq__(self, other):
- return (self.sourcepackagename.id == other.sourcepackagename.id
- and self.distroseries.id == other.distroseries.id)
+ return (
+ self.sourcepackagename.id == other.sourcepackagename.id
+ and self.distroseries.id == other.distroseries.id
+ )
def __ne__(self, other):
return not (self == other)
@@ -178,7 +168,7 @@ class FakeSourcePackage:
@property
def development_version(self):
- name = '%s-devel' % self.distribution.name
+ name = "%s-devel" % self.distribution.name
dev_series = self._distroseries_set.getByName(name)
if dev_series is None:
dev_series = FakeDistroSeries(name, self.distribution)
@@ -187,21 +177,20 @@ class FakeSourcePackage:
@property
def path(self):
- return '%s/%s/%s' % (
+ return "%s/%s/%s" % (
self.distribution.name,
self.distroseries.name,
- self.sourcepackagename.name)
+ self.sourcepackagename.name,
+ )
def getBranch(self, pocket):
- return self.distroseries._linked_branches.get(
- (self, pocket), None)
+ return self.distroseries._linked_branches.get((self, pocket), None)
def setBranch(self, pocket, branch, registrant):
self.distroseries._linked_branches[self, pocket] = branch
class SourcePackageNameSet(ObjectSet):
-
def new(self, name_string):
if not valid_name(name_string):
raise InvalidName(name_string)
@@ -217,9 +206,19 @@ def fake_source_package_to_branch_target(fake_package):
class FakeBranch(FakeDatabaseObject):
"""Fake branch object."""
- def __init__(self, branch_type, name, owner, url=None, product=None,
- stacked_on=None, information_type=InformationType.PUBLIC,
- registrant=None, distroseries=None, sourcepackagename=None):
+ def __init__(
+ self,
+ branch_type,
+ name,
+ owner,
+ url=None,
+ product=None,
+ stacked_on=None,
+ information_type=InformationType.PUBLIC,
+ registrant=None,
+ distroseries=None,
+ sourcepackagename=None,
+ ):
self.branch_type = branch_type
self.last_mirror_attempt = None
self.last_mirrored = None
@@ -247,18 +246,19 @@ class FakeBranch(FakeDatabaseObject):
def unique_name(self):
if self.product is None:
if self.distroseries is None:
- product = '+junk'
+ product = "+junk"
else:
- product = '%s/%s/%s' % (
+ product = "%s/%s/%s" % (
self.distroseries.distribution.name,
self.distroseries.name,
- self.sourcepackagename.name)
+ self.sourcepackagename.name,
+ )
else:
product = self.product.name
- return '~%s/%s/%s' % (self.owner.name, product, self.name)
+ return "~%s/%s/%s" % (self.owner.name, product, self.name)
def getPullURL(self):
- return 'lp-fake:///' + self.unique_name
+ return "lp-fake:///" + self.unique_name
@property
def target(self):
@@ -277,6 +277,7 @@ class FakeBranch(FakeDatabaseObject):
class FakePerson(FakeDatabaseObject):
"""Fake person object."""
+
is_team = False
def __init__(self, name):
@@ -296,6 +297,7 @@ class FakePerson(FakeDatabaseObject):
class FakeTeam(FakePerson):
"""Fake team."""
+
is_team = True
def __init__(self, name, members=None):
@@ -314,10 +316,10 @@ class FakeProduct(FakeDatabaseObject):
self.owner = owner
self.information_type = information_type
self.bzr_path = name
- self.development_focus = FakeProductSeries(self, 'trunk')
+ self.development_focus = FakeProductSeries(self, "trunk")
self.series = {
- 'trunk': self.development_focus,
- }
+ "trunk": self.development_focus,
+ }
def getSeries(self, name):
return self.series.get(name, None)
@@ -372,7 +374,6 @@ class FakeScriptActivity(FakeDatabaseObject):
class FakeDistribution(FakeDatabaseObject):
-
def __init__(self, name):
self.name = name
@@ -397,9 +398,15 @@ DEFAULT_PRODUCT = object()
class FakeObjectFactory(ObjectFactory):
-
- def __init__(self, branch_set, person_set, product_set, distribution_set,
- distroseries_set, sourcepackagename_set):
+ def __init__(
+ self,
+ branch_set,
+ person_set,
+ product_set,
+ distribution_set,
+ distroseries_set,
+ sourcepackagename_set,
+ ):
super().__init__()
self._branch_set = branch_set
self._person_set = person_set
@@ -408,10 +415,17 @@ class FakeObjectFactory(ObjectFactory):
self._distroseries_set = distroseries_set
self._sourcepackagename_set = sourcepackagename_set
- def makeBranch(self, branch_type=None, stacked_on=None,
- information_type=InformationType.PUBLIC,
- product=DEFAULT_PRODUCT, owner=None, name=None,
- registrant=None, sourcepackage=None):
+ def makeBranch(
+ self,
+ branch_type=None,
+ stacked_on=None,
+ information_type=InformationType.PUBLIC,
+ product=DEFAULT_PRODUCT,
+ owner=None,
+ name=None,
+ registrant=None,
+ sourcepackage=None,
+ ):
if branch_type is None:
branch_type = BranchType.HOSTED
if branch_type == BranchType.MIRRORED:
@@ -432,12 +446,19 @@ class FakeObjectFactory(ObjectFactory):
else:
sourcepackagename = sourcepackage.sourcepackagename
distroseries = sourcepackage.distroseries
- IBranch['name'].validate(six.ensure_text(name))
+ IBranch["name"].validate(six.ensure_text(name))
branch = FakeBranch(
- branch_type, name=name, owner=owner, url=url,
- stacked_on=stacked_on, product=product,
- information_type=information_type, registrant=registrant,
- distroseries=distroseries, sourcepackagename=sourcepackagename)
+ branch_type,
+ name=name,
+ owner=owner,
+ url=url,
+ stacked_on=stacked_on,
+ product=product,
+ information_type=information_type,
+ registrant=registrant,
+ distroseries=distroseries,
+ sourcepackagename=sourcepackagename,
+ )
self._branch_set._add(branch)
return branch
@@ -448,13 +469,15 @@ class FakeObjectFactory(ObjectFactory):
if sourcepackage is None:
sourcepackage = self.makeSourcePackage()
return self.makeBranch(
- product=None, sourcepackage=sourcepackage, **kwargs)
+ product=None, sourcepackage=sourcepackage, **kwargs
+ )
def makePersonalBranch(self, owner=None, **kwargs):
if owner is None:
owner = self.makePerson()
return self.makeBranch(
- owner=owner, product=None, sourcepackage=None, **kwargs)
+ owner=owner, product=None, sourcepackage=None, **kwargs
+ )
def makeProductBranch(self, product=None, **kwargs):
if product is None:
@@ -497,8 +520,9 @@ class FakeObjectFactory(ObjectFactory):
self._person_set._add(person)
return person
- def makeProduct(self, name=None, owner=None,
- information_type=InformationType.PUBLIC):
+ def makeProduct(
+ self, name=None, owner=None, information_type=InformationType.PUBLIC
+ ):
if name is None:
name = self.getUniqueString()
if owner is None:
@@ -524,7 +548,7 @@ class FakeObjectFactory(ObjectFactory):
if branch is None:
branch = self.makeBranch(product=product)
product.development_focus.branch = branch
- branch.last_mirrored_id = 'rev1'
+ branch.last_mirrored_id = "rev1"
return branch
def enableDefaultStackingForPackage(self, package, branch):
@@ -535,16 +559,24 @@ class FakeObjectFactory(ObjectFactory):
branch.
"""
package.development_version.setBranch(
- PackagePublishingPocket.RELEASE, branch, branch.owner)
- branch.last_mirrored_id = 'rev1'
+ PackagePublishingPocket.RELEASE, branch, branch.owner
+ )
+ branch.last_mirrored_id = "rev1"
return branch
class FakeCodehosting:
-
- def __init__(self, branch_set, person_set, product_set, distribution_set,
- distroseries_set, sourcepackagename_set, factory,
- script_activity_set):
+ def __init__(
+ self,
+ branch_set,
+ person_set,
+ product_set,
+ distribution_set,
+ distroseries_set,
+ sourcepackagename_set,
+ factory,
+ script_activity_set,
+ ):
self._branch_set = branch_set
self._person_set = person_set
self._product_set = product_set
@@ -556,19 +588,24 @@ class FakeCodehosting:
def acquireBranchToPull(self, branch_type_names):
if not branch_type_names:
- branch_type_names = 'HOSTED', 'MIRRORED', 'IMPORTED'
+ branch_type_names = "HOSTED", "MIRRORED", "IMPORTED"
branch_types = []
for branch_type_name in branch_type_names:
try:
branch_types.append(BranchType.items[branch_type_name])
except KeyError:
raise UnknownBranchTypeError(
- 'Unknown branch type: %r' % (branch_type_name,))
+ "Unknown branch type: %r" % (branch_type_name,)
+ )
branches = sorted(
- (branch for branch in self._branch_set
- if branch.next_mirror_time is not None
- and branch.branch_type in branch_types),
- key=operator.attrgetter('next_mirror_time'))
+ (
+ branch
+ for branch in self._branch_set
+ if branch.next_mirror_time is not None
+ and branch.branch_type in branch_types
+ ),
+ key=operator.attrgetter("next_mirror_time"),
+ )
if branches:
branch = branches[-1]
# Mark it as started mirroring.
@@ -576,15 +613,22 @@ class FakeCodehosting:
branch.next_mirror_time = None
default_branch = branch.target.default_stacked_on_branch
if default_branch is None:
- default_branch_name = ''
- elif (branch.branch_type == BranchType.MIRRORED
- and default_branch.information_type in
- PRIVATE_INFORMATION_TYPES):
- default_branch_name = ''
+ default_branch_name = ""
+ elif (
+ branch.branch_type == BranchType.MIRRORED
+ and default_branch.information_type
+ in PRIVATE_INFORMATION_TYPES
+ ):
+ default_branch_name = ""
else:
- default_branch_name = '/' + default_branch.unique_name
- return (branch.id, branch.getPullURL(), branch.unique_name,
- default_branch_name, branch.branch_type.name)
+ default_branch_name = "/" + default_branch.unique_name
+ return (
+ branch.id,
+ branch.getPullURL(),
+ branch.unique_name,
+ default_branch_name,
+ branch.branch_type.name,
+ )
else:
return ()
@@ -598,16 +642,18 @@ class FakeCodehosting:
def recordSuccess(self, name, hostname, date_started, date_completed):
self._script_activity_set._add(
- FakeScriptActivity(name, hostname, date_started, date_completed))
+ FakeScriptActivity(name, hostname, date_started, date_completed)
+ )
return True
def _parseUniqueName(self, branch_path):
"""Return a dict of the parsed information and the branch name."""
try:
- namespace_path, branch_name = branch_path.rsplit('/', 1)
+ namespace_path, branch_name = branch_path.rsplit("/", 1)
except ValueError:
raise faults.PermissionDenied(
- "Cannot create branch at '/%s'" % branch_path)
+ "Cannot create branch at '/%s'" % branch_path
+ )
data = BranchNamespaceSet().parse(namespace_path)
return data, branch_name
@@ -617,25 +663,26 @@ class FakeCodehosting:
Raises exceptions on error conditions.
"""
to_link = None
- if branch_path.startswith(BRANCH_ALIAS_PREFIX + '/'):
- branch_path = branch_path[len(BRANCH_ALIAS_PREFIX) + 1:]
- if branch_path.startswith('~'):
+ if branch_path.startswith(BRANCH_ALIAS_PREFIX + "/"):
+ branch_path = branch_path[len(BRANCH_ALIAS_PREFIX) + 1 :]
+ if branch_path.startswith("~"):
data, branch_name = self._parseUniqueName(branch_path)
else:
- tokens = branch_path.split('/')
+ tokens = branch_path.split("/")
data = {
- 'person': registrant.name,
- 'product': tokens[0],
- }
- branch_name = 'trunk'
+ "person": registrant.name,
+ "product": tokens[0],
+ }
+ branch_name = "trunk"
# check the series
- product = self._product_set.getByName(data['product'])
+ product = self._product_set.getByName(data["product"])
if product is not None:
if len(tokens) > 1:
series = product.getSeries(tokens[1])
if series is None:
raise faults.NotFound(
- "No such product series: '%s'." % tokens[1])
+ "No such product series: '%s'." % tokens[1]
+ )
else:
to_link = ICanHasLinkedBranch(series)
else:
@@ -644,10 +691,11 @@ class FakeCodehosting:
else:
data, branch_name = self._parseUniqueName(branch_path)
- owner = self._person_set.getByName(data['person'])
+ owner = self._person_set.getByName(data["person"])
if owner is None:
raise faults.NotFound(
- "User/team '%s' does not exist." % (data['person'],))
+ "User/team '%s' does not exist." % (data["person"],)
+ )
# The real code consults the branch creation policy of the product. We
# don't need to do so here, since the tests above this layer never
# encounter that behaviour. If they *do* change to rely on the branch
@@ -655,60 +703,75 @@ class FakeCodehosting:
# exceptions.
if not registrant.inTeam(owner):
raise faults.PermissionDenied(
- '%s cannot create branches owned by %s'
- % (registrant.displayname, owner.displayname))
+ "%s cannot create branches owned by %s"
+ % (registrant.displayname, owner.displayname)
+ )
product = sourcepackage = None
- if data['product'] == '+junk':
+ if data["product"] == "+junk":
product = None
- elif data['product'] is not None:
- if not valid_name(data['product']):
- raise faults.InvalidProductName(escape(data['product']))
- product = self._product_set.getByName(data['product'])
+ elif data["product"] is not None:
+ if not valid_name(data["product"]):
+ raise faults.InvalidProductName(escape(data["product"]))
+ product = self._product_set.getByName(data["product"])
if product is None:
raise faults.NotFound(
- "Project '%s' does not exist." % (data['product'],))
- elif data['distribution'] is not None:
- distro = self._distribution_set.getByName(data['distribution'])
+ "Project '%s' does not exist." % (data["product"],)
+ )
+ elif data["distribution"] is not None:
+ distro = self._distribution_set.getByName(data["distribution"])
if distro is None:
raise faults.NotFound(
- "No such distribution: '%s'." % (data['distribution'],))
+ "No such distribution: '%s'." % (data["distribution"],)
+ )
distroseries = self._distroseries_set.getByName(
- data['distroseries'])
+ data["distroseries"]
+ )
if distroseries is None:
raise faults.NotFound(
"No such distribution series: '%s'."
- % (data['distroseries'],))
+ % (data["distroseries"],)
+ )
sourcepackagename = self._sourcepackagename_set.getByName(
- data['sourcepackagename'])
+ data["sourcepackagename"]
+ )
if sourcepackagename is None:
try:
sourcepackagename = self._sourcepackagename_set.new(
- data['sourcepackagename'])
+ data["sourcepackagename"]
+ )
except InvalidName:
raise faults.InvalidSourcePackageName(
- data['sourcepackagename'])
+ data["sourcepackagename"]
+ )
sourcepackage = self._factory.makeSourcePackage(
- distroseries, sourcepackagename)
+ distroseries, sourcepackagename
+ )
else:
raise faults.PermissionDenied(
- "Cannot create branch at '%s'" % branch_path)
+ "Cannot create branch at '%s'" % branch_path
+ )
branch = self._factory.makeBranch(
- owner=owner, name=branch_name, product=product,
- sourcepackage=sourcepackage, registrant=registrant,
- branch_type=BranchType.HOSTED)
+ owner=owner,
+ name=branch_name,
+ product=product,
+ sourcepackage=sourcepackage,
+ registrant=registrant,
+ branch_type=BranchType.HOSTED,
+ )
if to_link is not None:
if registrant.inTeam(to_link.product.owner):
to_link.branch = branch
else:
self._branch_set._delete(branch)
raise faults.PermissionDenied(
- "Cannot create linked branch at '%s'." % branch_path)
+ "Cannot create linked branch at '%s'." % branch_path
+ )
return branch.id
def createBranch(self, requester_id, branch_path):
- if not branch_path.startswith('/'):
+ if not branch_path.startswith("/"):
return faults.InvalidPath(branch_path)
- escaped_path = unescape(branch_path.strip('/'))
+ escaped_path = unescape(branch_path.strip("/"))
registrant = self._person_set.get(requester_id)
try:
return self._createBranch(registrant, escaped_path)
@@ -720,24 +783,33 @@ class FakeCodehosting:
def requestMirror(self, requester_id, branch_id):
self._branch_set.get(branch_id).requestMirror()
- def branchChanged(self, login_id, branch_id, stacked_on_location,
- last_revision_id, control_string, branch_string,
- repository_string):
+ def branchChanged(
+ self,
+ login_id,
+ branch_id,
+ stacked_on_location,
+ last_revision_id,
+ control_string,
+ branch_string,
+ repository_string,
+ ):
branch = self._branch_set._find(id=branch_id)
if branch is None:
return faults.NoBranchWithID(branch_id)
branch.mirror_status_message = None
- if stacked_on_location == '':
+ if stacked_on_location == "":
stacked_on_branch = None
else:
# We could log or something if the branch is not found here, but
# we just wait until the scanner fails and sets up an appropriate
# message.
stacked_on_branch = self._branch_set._find(
- unique_name=stacked_on_location.strip('/'))
+ unique_name=stacked_on_location.strip("/")
+ )
if stacked_on_branch is None:
branch.mirror_status_message = (
- 'Invalid stacked on location: ' + stacked_on_location)
+ "Invalid stacked on location: " + stacked_on_location
+ )
branch.stacked_on = stacked_on_branch
branch.last_mirrored = UTC_NOW
if branch.last_mirrored_id != last_revision_id:
@@ -751,12 +823,14 @@ class FakeCodehosting:
return default
branch.control_format = match_title(
- ControlFormat, control_string, ControlFormat.UNRECOGNIZED)
+ ControlFormat, control_string, ControlFormat.UNRECOGNIZED
+ )
branch.branch_format = match_title(
- BranchFormat, branch_string, BranchFormat.UNRECOGNIZED)
+ BranchFormat, branch_string, BranchFormat.UNRECOGNIZED
+ )
branch.repository_format = match_title(
- RepositoryFormat, repository_string,
- RepositoryFormat.UNRECOGNIZED)
+ RepositoryFormat, repository_string, RepositoryFormat.UNRECOGNIZED
+ )
return True
@@ -786,7 +860,7 @@ class FakeCodehosting:
def _get_product_target(self, path):
try:
- owner_name, product_name = path.split('/')
+ owner_name, product_name = path.split("/")
except ValueError:
# Wrong number of segments -- can't be a product.
return
@@ -795,27 +869,26 @@ class FakeCodehosting:
def _get_package_target(self, path):
try:
- owner_name, distro_name, series_name, package_name = (
- path.split('/'))
+ owner_name, distro_name, series_name, package_name = path.split(
+ "/"
+ )
except ValueError:
# Wrong number of segments -- can't be a package.
return
distro = self._distribution_set.getByName(distro_name)
distroseries = self._distroseries_set.getByName(series_name)
- sourcepackagename = self._sourcepackagename_set.getByName(
- package_name)
+ sourcepackagename = self._sourcepackagename_set.getByName(package_name)
if None in (distro, distroseries, sourcepackagename):
return
- return self._factory.makeSourcePackage(
- distroseries, sourcepackagename)
+ return self._factory.makeSourcePackage(distroseries, sourcepackagename)
def _serializeControlDirectory(self, requester, lookup):
- trailing_path = lookup['trailing'].lstrip('/')
- if not ('.bzr' == trailing_path or trailing_path.startswith('.bzr/')):
+ trailing_path = lookup["trailing"].lstrip("/")
+ if not (".bzr" == trailing_path or trailing_path.startswith(".bzr/")):
return
- target = self._get_product_target(lookup['control_name'])
+ target = self._get_product_target(lookup["control_name"])
if target is None:
- target = self._get_package_target(lookup['control_name'])
+ target = self._get_package_target(lookup["control_name"])
if target is None:
return
default_branch = IBranchTarget(target).default_stacked_on_branch
@@ -826,11 +899,13 @@ class FakeCodehosting:
path = branch_id_alias(default_branch)
return (
CONTROL_TRANSPORT,
- {'default_stack_on': escape(path)},
- trailing_path)
+ {"default_stack_on": escape(path)},
+ trailing_path,
+ )
- def _serializeBranch(self, requester_id, branch, trailing_path,
- force_readonly=False):
+ def _serializeBranch(
+ self, requester_id, branch, trailing_path, force_readonly=False
+ ):
if not self._canRead(requester_id, branch):
return faults.PermissionDenied()
elif branch.branch_type == BranchType.REMOTE:
@@ -841,50 +916,60 @@ class FakeCodehosting:
writable = self._canWrite(requester_id, branch)
return (
BRANCH_TRANSPORT,
- {'id': branch.id, 'writable': writable, 'private': branch.private},
- trailing_path)
+ {"id": branch.id, "writable": writable, "private": branch.private},
+ trailing_path,
+ )
def performLookup(self, requester_id, lookup, branch_name_only=False):
branch = None
- if branch_name_only and lookup['type'] != 'branch_name':
+ if branch_name_only and lookup["type"] != "branch_name":
return
- if lookup['type'] == 'control_name':
- return self._serializeControlDirectory(requester_id,
- lookup)
- elif lookup['type'] == 'id':
- branch = self._branch_set.get(lookup['branch_id'])
+ if lookup["type"] == "control_name":
+ return self._serializeControlDirectory(requester_id, lookup)
+ elif lookup["type"] == "id":
+ branch = self._branch_set.get(lookup["branch_id"])
if branch is None:
return None
- trailing = lookup['trailing']
- elif lookup['type'] == 'alias':
- result = get_first_path_result(lookup['lp_path'],
- lambda l: self.performLookup(requester_id, l,
- branch_name_only=True), None)
+ trailing = lookup["trailing"]
+ elif lookup["type"] == "alias":
+ result = get_first_path_result(
+ lookup["lp_path"],
+ lambda l: self.performLookup(
+ requester_id, l, branch_name_only=True
+ ),
+ None,
+ )
if result is not None:
return result
- product_name = lookup['lp_path'].split('/', 2)[0]
+ product_name = lookup["lp_path"].split("/", 2)[0]
product = self._product_set.getByName(product_name)
if product is None:
return None
branch = product.development_focus.branch
- trailing = lookup['lp_path'][len(product_name):]
- elif lookup['type'] == 'branch_name':
- branch = self._branch_set._find(
- unique_name=lookup['unique_name'])
- trailing = escape(lookup['trailing'])
+ trailing = lookup["lp_path"][len(product_name) :]
+ elif lookup["type"] == "branch_name":
+ branch = self._branch_set._find(unique_name=lookup["unique_name"])
+ trailing = escape(lookup["trailing"])
else:
return None
if branch is not None:
- serialized = self._serializeBranch(requester_id, branch,
- trailing.lstrip('/'), lookup['type'] == 'id')
+ serialized = self._serializeBranch(
+ requester_id,
+ branch,
+ trailing.lstrip("/"),
+ lookup["type"] == "id",
+ )
if serialized is not None:
return serialized
def translatePath(self, requester_id, path):
- if not path.startswith('/'):
+ if not path.startswith("/"):
return faults.InvalidPath(path)
- result = get_first_path_result(unescape(path.strip('/')),
- lambda l: self.performLookup(requester_id, l), None)
+ result = get_first_path_result(
+ unescape(path.strip("/")),
+ lambda l: self.performLookup(requester_id, l),
+ None,
+ )
if result is not None:
return result
else:
@@ -906,14 +991,23 @@ class InMemoryFrontend:
self._distroseries_set = ObjectSet()
self._sourcepackagename_set = SourcePackageNameSet()
self._factory = FakeObjectFactory(
- self._branch_set, self._person_set, self._product_set,
- self._distribution_set, self._distroseries_set,
- self._sourcepackagename_set)
+ self._branch_set,
+ self._person_set,
+ self._product_set,
+ self._distribution_set,
+ self._distroseries_set,
+ self._sourcepackagename_set,
+ )
self._codehosting = FakeCodehosting(
- self._branch_set, self._person_set, self._product_set,
- self._distribution_set, self._distroseries_set,
- self._sourcepackagename_set, self._factory,
- self._script_activity_set)
+ self._branch_set,
+ self._person_set,
+ self._product_set,
+ self._distribution_set,
+ self._distroseries_set,
+ self._sourcepackagename_set,
+ self._factory,
+ self._script_activity_set,
+ )
sm = getSiteManager()
sm.registerAdapter(fake_product_to_can_has_linked_branch)
sm.registerAdapter(fake_product_to_branch_target)
diff --git a/lib/lp/codehosting/puller/__init__.py b/lib/lp/codehosting/puller/__init__.py
index 0cf3449..646917e 100644
--- a/lib/lp/codehosting/puller/__init__.py
+++ b/lib/lp/codehosting/puller/__init__.py
@@ -1,7 +1,7 @@
# Copyright 2009 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
-__all__ = ['get_lock_id_for_branch_id', 'mirror']
+__all__ = ["get_lock_id_for_branch_id", "mirror"]
import datetime
@@ -11,14 +11,13 @@ from twisted.internet import defer
def get_lock_id_for_branch_id(branch_id):
- """Return the lock id that should be used for a branch with the passed id.
- """
- return 'worker-for-branch-%s@supermirror' % (branch_id,)
+ """Return the lock id that should be used for a branch with this id."""
+ return "worker-for-branch-%s@supermirror" % (branch_id,)
-from lp.codehosting.puller.scheduler import LockError # noqa: E402
+from lp.codehosting.puller.scheduler import LockError # noqa: E402
-UTC = pytz.timezone('UTC')
+UTC = pytz.timezone("UTC")
def mirror(logger, manager):
@@ -26,7 +25,7 @@ def mirror(logger, manager):
try:
manager.lock()
except LockError as exception:
- logger.info('Could not acquire lock: %s', exception)
+ logger.info("Could not acquire lock: %s", exception)
return defer.succeed(0)
date_started = datetime.datetime.now(UTC)
diff --git a/lib/lp/codehosting/puller/scheduler.py b/lib/lp/codehosting/puller/scheduler.py
index 1680e65..38bb4f1 100644
--- a/lib/lp/codehosting/puller/scheduler.py
+++ b/lib/lp/codehosting/puller/scheduler.py
@@ -2,34 +2,21 @@
# GNU Affero General Public License version 3 (see the file LICENSE).
__all__ = [
- 'BadMessage',
- 'JobScheduler',
- 'LockError',
- 'PullerMaster',
- 'PullerMonitorProtocol',
- ]
+ "BadMessage",
+ "JobScheduler",
+ "LockError",
+ "PullerMaster",
+ "PullerMonitorProtocol",
+]
import io
import os
import socket
-from contrib.glock import (
- GlobalLock,
- LockAlreadyAcquired,
- )
-from twisted.internet import (
- defer,
- error,
- reactor,
- )
-from twisted.protocols.basic import (
- NetstringParseError,
- NetstringReceiver,
- )
-from twisted.python import (
- failure,
- log,
- )
+from contrib.glock import GlobalLock, LockAlreadyAcquired
+from twisted.internet import defer, error, reactor
+from twisted.protocols.basic import NetstringParseError, NetstringReceiver
+from twisted.python import failure, log
from lp.code.interfaces.codehosting import LAUNCHPAD_SERVICES
from lp.codehosting.puller import get_lock_id_for_branch_id
@@ -37,11 +24,11 @@ from lp.codehosting.puller.worker import get_canonical_url_for_branch_name
from lp.services.config import config
from lp.services.twistedsupport.processmonitor import (
ProcessMonitorProtocolWithTimeout,
- )
+)
from lp.services.twistedsupport.task import (
ParallelLimitedTaskConsumer,
PollingTaskSource,
- )
+)
from lp.services.webapp import errorlog
@@ -50,7 +37,8 @@ class BadMessage(Exception):
def __init__(self, bad_netstring):
Exception.__init__(
- self, 'Received unrecognized message: %r' % bad_netstring)
+ self, "Received unrecognized message: %r" % bad_netstring
+ )
class UnexpectedStderr(Exception):
@@ -62,7 +50,8 @@ class UnexpectedStderr(Exception):
else:
last_line = stderr
Exception.__init__(
- self, "Unexpected standard error from subprocess: %s" % last_line)
+ self, "Unexpected standard error from subprocess: %s" % last_line
+ )
self.error = stderr
@@ -116,17 +105,21 @@ class PullerWireProtocol(NetstringReceiver):
# is check the value of brokenPeer.
if self.brokenPeer:
self.puller_protocol.unexpectedError(
- failure.Failure(NetstringParseError(data)))
+ failure.Failure(NetstringParseError(data))
+ )
def stringReceived(self, line):
"""See `NetstringReceiver.stringReceived`."""
try:
- line = line.decode('UTF-8')
+ line = line.decode("UTF-8")
except UnicodeDecodeError:
self.puller_protocol.unexpectedError(
- failure.Failure(BadMessage(line)))
- if (self._current_command is not None
- and self._expected_args is not None):
+ failure.Failure(BadMessage(line))
+ )
+ if (
+ self._current_command is not None
+ and self._expected_args is not None
+ ):
# state [2]
self._current_args.append(line)
elif self._current_command is not None:
@@ -137,16 +130,18 @@ class PullerWireProtocol(NetstringReceiver):
self.puller_protocol.unexpectedError(failure.Failure())
else:
# state [0]
- if getattr(self.puller_protocol, 'do_%s' % line, None) is None:
+ if getattr(self.puller_protocol, "do_%s" % line, None) is None:
self.puller_protocol.unexpectedError(
- failure.Failure(BadMessage(line)))
+ failure.Failure(BadMessage(line))
+ )
else:
self._current_command = line
if len(self._current_args) == self._expected_args:
# Execute the command.
method = getattr(
- self.puller_protocol, 'do_%s' % self._current_command)
+ self.puller_protocol, "do_%s" % self._current_command
+ )
try:
try:
method(*self._current_args)
@@ -162,8 +157,9 @@ class PullerWireProtocol(NetstringReceiver):
self._current_args = []
-class PullerMonitorProtocol(ProcessMonitorProtocolWithTimeout,
- NetstringReceiver):
+class PullerMonitorProtocol(
+ ProcessMonitorProtocolWithTimeout, NetstringReceiver
+):
"""The protocol for receiving events from the puller worker."""
def __init__(self, deferred, listener, clock=None):
@@ -178,14 +174,16 @@ class PullerMonitorProtocol(ProcessMonitorProtocolWithTimeout,
If a clock is not passed in explicitly the reactor is used.
"""
ProcessMonitorProtocolWithTimeout.__init__(
- self, deferred, config.supermirror.worker_timeout, clock)
+ self, deferred, config.supermirror.worker_timeout, clock
+ )
self.reported_mirror_finished = False
self.listener = listener
self.wire_protocol = PullerWireProtocol(self)
self._stderr = io.BytesIO()
self._deferred.addCallbacks(
self.checkReportingFinishedAndNoStderr,
- self.ensureReportingFinished)
+ self.ensureReportingFinished,
+ )
def reportMirrorFinished(self, ignored):
self.reported_mirror_finished = True
@@ -196,14 +194,16 @@ class PullerMonitorProtocol(ProcessMonitorProtocolWithTimeout,
When the process exits cleanly, we expect it to have not printed
anything to stderr and to have reported success or failure. If it has
failed to do either of these things, we should fail noisily."""
- stderr = self._stderr.getvalue().decode('UTF-8', 'replace')
+ stderr = self._stderr.getvalue().decode("UTF-8", "replace")
if stderr:
fail = failure.Failure(UnexpectedStderr(stderr))
fail.error = stderr
return fail
if not self.reported_mirror_finished:
- raise AssertionError('Process exited successfully without '
- 'reporting success or failure?')
+ raise AssertionError(
+ "Process exited successfully without "
+ "reporting success or failure?"
+ )
return result
def ensureReportingFinished(self, reason):
@@ -215,7 +215,7 @@ class PullerMonitorProtocol(ProcessMonitorProtocolWithTimeout,
as a failure reason.
"""
if not self.reported_mirror_finished:
- stderr = self._stderr.getvalue().decode('UTF-8', 'replace')
+ stderr = self._stderr.getvalue().decode("UTF-8", "replace")
reason.error = stderr
if stderr:
errorline = stderr.splitlines()[-1]
@@ -227,10 +227,10 @@ class PullerMonitorProtocol(ProcessMonitorProtocolWithTimeout,
# failure that comes from mirrorFailed failing. In any case, we
# just pass along the failure.
report_failed_deferred = defer.maybeDeferred(
- self.listener.mirrorFailed, errorline, None)
+ self.listener.mirrorFailed, errorline, None
+ )
report_failed_deferred.addErrback(log.err)
- return report_failed_deferred.addCallback(
- lambda result: reason)
+ return report_failed_deferred.addCallback(lambda result: reason)
else:
return reason
@@ -250,22 +250,36 @@ class PullerMonitorProtocol(ProcessMonitorProtocolWithTimeout,
self.resetTimeout()
self.runNotification(self.listener.startMirroring)
- def do_branchChanged(self, stacked_on_url, revid_before, revid_after,
- control_string, branch_string, repository_string):
+ def do_branchChanged(
+ self,
+ stacked_on_url,
+ revid_before,
+ revid_after,
+ control_string,
+ branch_string,
+ repository_string,
+ ):
def branchChanged():
d = defer.maybeDeferred(
- self.listener.branchChanged, stacked_on_url, revid_before,
- revid_after, control_string, branch_string, repository_string)
+ self.listener.branchChanged,
+ stacked_on_url,
+ revid_before,
+ revid_after,
+ control_string,
+ branch_string,
+ repository_string,
+ )
d.addCallback(self.reportMirrorFinished)
return d
+
self.runNotification(branchChanged)
def do_mirrorFailed(self, reason, oops):
def mirrorFailed():
- d = defer.maybeDeferred(
- self.listener.mirrorFailed, reason, oops)
+ d = defer.maybeDeferred(self.listener.mirrorFailed, reason, oops)
d.addCallback(self.reportMirrorFinished)
return d
+
self.runNotification(mirrorFailed)
def do_progressMade(self):
@@ -283,11 +297,19 @@ class PullerMaster:
generated by that process.
"""
- path_to_script = os.path.join(config.root, 'scripts/mirror-branch.py')
+ path_to_script = os.path.join(config.root, "scripts/mirror-branch.py")
protocol_class = PullerMonitorProtocol
- def __init__(self, branch_id, source_url, unique_name, branch_type_name,
- default_stacked_on_url, logger, client):
+ def __init__(
+ self,
+ branch_id,
+ source_url,
+ unique_name,
+ branch_type_name,
+ default_stacked_on_url,
+ logger,
+ client,
+ ):
"""Construct a PullerMaster object.
:param branch_id: The database ID of the branch to be mirrored.
@@ -305,7 +327,7 @@ class PullerMaster:
"""
self.branch_id = branch_id
self.source_url = source_url.strip()
- self.destination_url = 'lp-internal:///%s' % (unique_name,)
+ self.destination_url = "lp-internal:///%s" % (unique_name,)
self.unique_name = unique_name
self.branch_type_name = branch_type_name
self.default_stacked_on_url = default_stacked_on_url
@@ -316,15 +338,20 @@ class PullerMaster:
"""Spawn a worker process to mirror a branch."""
deferred = defer.Deferred()
protocol = self.protocol_class(deferred, self)
- interpreter = '%s/bin/py' % config.root
+ interpreter = "%s/bin/py" % config.root
command = [
- interpreter, self.path_to_script, self.source_url,
- self.destination_url, str(self.branch_id), str(self.unique_name),
+ interpreter,
+ self.path_to_script,
+ self.source_url,
+ self.destination_url,
+ str(self.branch_id),
+ str(self.unique_name),
self.branch_type_name,
- self.default_stacked_on_url]
+ self.default_stacked_on_url,
+ ]
self.logger.debug("executing %s", command)
env = os.environ.copy()
- env['BRZ_EMAIL'] = get_lock_id_for_branch_id(self.branch_id)
+ env["BRZ_EMAIL"] = get_lock_id_for_branch_id(self.branch_id)
reactor.spawnProcess(protocol, interpreter, command, env=env)
return deferred
@@ -343,51 +370,79 @@ class PullerMaster:
def startMirroring(self):
self.logger.info(
- 'Worker started on branch %d: %s to %s', self.branch_id,
- self.source_url, self.destination_url)
+ "Worker started on branch %d: %s to %s",
+ self.branch_id,
+ self.source_url,
+ self.destination_url,
+ )
def mirrorFailed(self, reason, oops):
- self.logger.info('Recorded %s', oops)
- self.logger.info('Recorded failure: %s', str(reason))
+ self.logger.info("Recorded %s", oops)
+ self.logger.info("Recorded failure: %s", str(reason))
return self.codehosting_endpoint.callRemote(
- 'mirrorFailed', self.branch_id, reason)
-
- def branchChanged(self, stacked_on_url, revid_before, revid_after,
- control_string, branch_string, repository_string):
+ "mirrorFailed", self.branch_id, reason
+ )
+
+ def branchChanged(
+ self,
+ stacked_on_url,
+ revid_before,
+ revid_after,
+ control_string,
+ branch_string,
+ repository_string,
+ ):
if revid_before == revid_after:
- was_noop = 'noop'
+ was_noop = "noop"
else:
- was_noop = 'non-trivial'
+ was_noop = "non-trivial"
self.logger.info(
- 'Successfully mirrored %s branch %d %s to %s to from rev %s to %s'
- ' (%s)', self.branch_type_name, self.branch_id, self.source_url,
- self.destination_url, revid_before, revid_after, was_noop)
+ "Successfully mirrored %s branch %d %s to %s to from rev %s to %s"
+ " (%s)",
+ self.branch_type_name,
+ self.branch_id,
+ self.source_url,
+ self.destination_url,
+ revid_before,
+ revid_after,
+ was_noop,
+ )
return self.codehosting_endpoint.callRemote(
- 'branchChanged', LAUNCHPAD_SERVICES, self.branch_id,
- stacked_on_url, revid_after, control_string, branch_string,
- repository_string)
+ "branchChanged",
+ LAUNCHPAD_SERVICES,
+ self.branch_id,
+ stacked_on_url,
+ revid_after,
+ control_string,
+ branch_string,
+ repository_string,
+ )
def log(self, message):
- self.logger.info('From worker: %s', message)
+ self.logger.info("From worker: %s", message)
def unexpectedError(self, failure):
- request = errorlog.ScriptRequest([
- ('branch_id', self.branch_id),
- ('source', self.source_url),
- ('dest', self.destination_url),
- ('error-explanation', failure.getErrorMessage())])
+ request = errorlog.ScriptRequest(
+ [
+ ("branch_id", self.branch_id),
+ ("source", self.source_url),
+ ("dest", self.destination_url),
+ ("error-explanation", failure.getErrorMessage()),
+ ]
+ )
request.URL = get_canonical_url_for_branch_name(self.unique_name)
# If the sub-process exited abnormally, the stderr it produced is
# probably a much more interesting traceback than the one attached to
# the Failure we've been passed.
tb = None
if failure.check(error.ProcessTerminated, UnexpectedStderr):
- tb = getattr(failure, 'error', None)
+ tb = getattr(failure, "error", None)
if tb is None:
tb = failure.getTraceback()
errorlog.globalErrorUtility.raising(
- (failure.type, failure.value, tb), request)
- self.logger.info('Recorded %s', request.oopsid)
+ (failure.type, failure.value, tb), request
+ )
+ self.logger.info("Recorded %s", request.oopsid)
class JobScheduler:
@@ -402,8 +457,8 @@ class JobScheduler:
self.logger = logger
self.branch_type_names = branch_type_names
self.actualLock = None
- self.name = 'branch-puller'
- self.lockfilename = '/var/lock/launchpad-%s.lock' % self.name
+ self.name = "branch-puller"
+ self.lockfilename = "/var/lock/launchpad-%s.lock" % self.name
def _turnJobTupleIntoTask(self, job_tuple):
"""Turn the return value of `acquireBranchToPull` into a job.
@@ -415,33 +470,45 @@ class JobScheduler:
"""
if len(job_tuple) == 0:
return None
- (branch_id, pull_url, unique_name,
- default_stacked_on_url, branch_type_name) = job_tuple
+ (
+ branch_id,
+ pull_url,
+ unique_name,
+ default_stacked_on_url,
+ branch_type_name,
+ ) = job_tuple
master = PullerMaster(
- branch_id, pull_url, unique_name, branch_type_name,
- default_stacked_on_url, self.logger,
- self.codehosting_endpoint)
+ branch_id,
+ pull_url,
+ unique_name,
+ branch_type_name,
+ default_stacked_on_url,
+ self.logger,
+ self.codehosting_endpoint,
+ )
return master.run
def _poll(self):
deferred = self.codehosting_endpoint.callRemote(
- 'acquireBranchToPull', self.branch_type_names)
+ "acquireBranchToPull", self.branch_type_names
+ )
deferred.addCallback(self._turnJobTupleIntoTask)
return deferred
def run(self):
consumer = ParallelLimitedTaskConsumer(
- config.supermirror.maximum_workers, logger=self.logger)
+ config.supermirror.maximum_workers, logger=self.logger
+ )
self.consumer = consumer
source = PollingTaskSource(
- config.supermirror.polling_interval, self._poll,
- logger=self.logger)
+ config.supermirror.polling_interval, self._poll, logger=self.logger
+ )
deferred = consumer.consume(source)
deferred.addCallback(self._finishedRunning)
return deferred
def _finishedRunning(self, ignored):
- self.logger.info('Mirroring complete')
+ self.logger.info("Mirroring complete")
return ignored
def lock(self):
@@ -459,15 +526,18 @@ class JobScheduler:
started_tuple = tuple(date_started.utctimetuple())
completed_tuple = tuple(date_completed.utctimetuple())
return self.codehosting_endpoint.callRemote(
- 'recordSuccess', self.name, socket.gethostname(), started_tuple,
- completed_tuple)
+ "recordSuccess",
+ self.name,
+ socket.gethostname(),
+ started_tuple,
+ completed_tuple,
+ )
class LockError(Exception):
-
def __init__(self, lockfilename):
super().__init__()
self.lockfilename = lockfilename
def __str__(self):
- return 'Jobmanager unable to get master lock: %s' % self.lockfilename
+ return "Jobmanager unable to get master lock: %s" % self.lockfilename
diff --git a/lib/lp/codehosting/puller/tests/__init__.py b/lib/lp/codehosting/puller/tests/__init__.py
index 340a11c..011d8a6 100644
--- a/lib/lp/codehosting/puller/tests/__init__.py
+++ b/lib/lp/codehosting/puller/tests/__init__.py
@@ -12,7 +12,7 @@ from breezy.tests.http_server import (
HttpServer,
TestingHTTPServer,
TestingThreadingHTTPServer,
- )
+)
from breezy.url_policy_open import AcceptAnythingPolicy
from lp.codehosting.puller.worker import (
@@ -20,13 +20,14 @@ from lp.codehosting.puller.worker import (
BranchMirrorerPolicy,
PullerWorker,
PullerWorkerProtocol,
- )
+)
from lp.codehosting.tests.helpers import LoomTestMixin
from lp.testing import TestCaseWithFactory
-class AcceptAnythingBranchMirrorerPolicy(AcceptAnythingPolicy,
- BranchMirrorerPolicy):
+class AcceptAnythingBranchMirrorerPolicy(
+ AcceptAnythingPolicy, BranchMirrorerPolicy
+):
"""A branch mirror policy that supports mirrorring from anywhere."""
@@ -38,9 +39,15 @@ class PullerWorkerMixin:
method.
"""
- def makePullerWorker(self, src_dir=None, dest_dir=None, branch_type=None,
- default_stacked_on_url=None, protocol=None,
- policy=None):
+ def makePullerWorker(
+ self,
+ src_dir=None,
+ dest_dir=None,
+ branch_type=None,
+ default_stacked_on_url=None,
+ protocol=None,
+ policy=None,
+ ):
"""Anonymous creation method for PullerWorker."""
if protocol is None:
protocol = PullerWorkerProtocol(io.BytesIO())
@@ -51,10 +58,15 @@ class PullerWorkerMixin:
else:
opener = None
return PullerWorker(
- src_dir, dest_dir, branch_id=1, unique_name='foo/bar/baz',
+ src_dir,
+ dest_dir,
+ branch_id=1,
+ unique_name="foo/bar/baz",
branch_type=branch_type,
- default_stacked_on_url=default_stacked_on_url, protocol=protocol,
- branch_mirrorer=opener)
+ default_stacked_on_url=default_stacked_on_url,
+ protocol=protocol,
+ branch_mirrorer=opener,
+ )
# XXX MichaelHudson, bug=564375: With changes to the SocketServer module in
@@ -63,7 +75,7 @@ class PullerWorkerMixin:
# call in SocketServer.BaseServer.handle_request(). So what follows is
# slightly horrible code to use the version of handle_request from Python 2.5.
def fixed_handle_request(self):
- """Handle one request, possibly blocking. """
+ """Handle one request, possibly blocking."""
try:
request, client_address = self.get_request()
except OSError:
@@ -85,11 +97,12 @@ class FixedTTHS(TestingThreadingHTTPServer):
class FixedHttpServer(HttpServer):
- http_server_class = {'HTTP/1.0': FixedTHS, 'HTTP/1.1': FixedTTHS}
+ http_server_class = {"HTTP/1.0": FixedTHS, "HTTP/1.1": FixedTTHS}
-class PullerBranchTestCase(TestCaseWithTransport, TestCaseWithFactory,
- LoomTestMixin):
+class PullerBranchTestCase(
+ TestCaseWithTransport, TestCaseWithFactory, LoomTestMixin
+):
"""Some useful code for the more-integration-y puller tests."""
def setUp(self):
@@ -116,4 +129,4 @@ class PullerBranchTestCase(TestCaseWithTransport, TestCaseWithFactory,
# issues with the test runner.
self.addCleanup(http_server._server_thread.join)
self.addCleanup(http_server.stop_server)
- return http_server.get_url().rstrip('/')
+ return http_server.get_url().rstrip("/")
diff --git a/lib/lp/codehosting/puller/tests/test_acceptance.py b/lib/lp/codehosting/puller/tests/test_acceptance.py
index 66a3491..beae357 100644
--- a/lib/lp/codehosting/puller/tests/test_acceptance.py
+++ b/lib/lp/codehosting/puller/tests/test_acceptance.py
@@ -7,23 +7,18 @@ __all__ = []
import os
-from subprocess import (
- PIPE,
- Popen,
- )
+from subprocess import PIPE, Popen
+import six
+import transaction
from breezy import errors
from breezy.branch import Branch
from breezy.bzr.bzrdir import BzrDir
from breezy.upgrade import upgrade
-from breezy.urlutils import (
- join as urljoin,
- local_path_from_url,
- )
+from breezy.urlutils import join as urljoin
+from breezy.urlutils import local_path_from_url
from breezy.workingtree import WorkingTree
from fixtures import TempDir
-import six
-import transaction
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy
@@ -51,10 +46,12 @@ class TestBranchPuller(PullerBranchTestCase, LoomTestMixin):
def setUp(self):
PullerBranchTestCase.setUp(self)
self._puller_script = os.path.join(
- config.root, 'cronscripts', 'supermirror-pull.py')
+ config.root, "cronscripts", "supermirror-pull.py"
+ )
self.makeCleanDirectory(config.codehosting.mirrored_branches_root)
self.makeCleanDirectory(
- local_path_from_url(config.launchpad.bzr_imports_root_url))
+ local_path_from_url(config.launchpad.bzr_imports_root_url)
+ )
def assertMirrored(self, db_branch, source_branch):
"""Assert that 'db_branch' was mirrored successfully.
@@ -76,20 +73,24 @@ class TestBranchPuller(PullerBranchTestCase, LoomTestMixin):
transaction.commit()
self.assertEqual(None, db_branch.mirror_status_message)
self.assertEqual(
- db_branch.last_mirror_attempt, db_branch.last_mirrored)
+ db_branch.last_mirror_attempt, db_branch.last_mirrored
+ )
self.assertEqual(0, db_branch.mirror_failures)
mirrored_branch = self.openBranchAsUser(db_branch, accessing_user)
self.assertEqual(
six.ensure_text(source_branch.last_revision()),
- db_branch.last_mirrored_id)
+ db_branch.last_mirrored_id,
+ )
self.assertEqual(
- source_branch.last_revision(), mirrored_branch.last_revision())
+ source_branch.last_revision(), mirrored_branch.last_revision()
+ )
self.assertEqual(
- source_branch._format.__class__,
- mirrored_branch._format.__class__)
+ source_branch._format.__class__, mirrored_branch._format.__class__
+ )
self.assertEqual(
source_branch.repository._format.__class__,
- mirrored_branch.repository._format.__class__)
+ mirrored_branch.repository._format.__class__,
+ )
return mirrored_branch
def assertRanSuccessfully(self, command, retcode, stdout, stderr):
@@ -98,17 +99,20 @@ class TestBranchPuller(PullerBranchTestCase, LoomTestMixin):
'Successfully' means that it's return code was 0 and it printed
nothing to stdout or stderr.
"""
- message = '\n'.join(
- ['Command: %r' % (command,),
- 'Return code: %s' % retcode,
- 'Output:',
- stdout,
- '',
- 'Error:',
- stderr])
+ message = "\n".join(
+ [
+ "Command: %r" % (command,),
+ "Return code: %s" % retcode,
+ "Output:",
+ stdout,
+ "",
+ "Error:",
+ stderr,
+ ]
+ )
self.assertEqual(0, retcode, message)
- self.assertEqualDiff('', stdout)
- self.assertEqualDiff('', stderr)
+ self.assertEqualDiff("", stdout)
+ self.assertEqualDiff("", stderr)
def runSubprocess(self, command):
"""Run the given command in a subprocess.
@@ -117,7 +121,8 @@ class TestBranchPuller(PullerBranchTestCase, LoomTestMixin):
:return: retcode, stdout, stderr
"""
process = Popen(
- command, stdout=PIPE, stderr=PIPE, universal_newlines=True)
+ command, stdout=PIPE, stderr=PIPE, universal_newlines=True
+ )
output, error = process.communicate()
return process.returncode, output, error
@@ -132,8 +137,12 @@ class TestBranchPuller(PullerBranchTestCase, LoomTestMixin):
"""
logfile = self.useFixture(TempDir()).join("supermirror_test.log")
command = [
- '%s/bin/py' % config.root, self._puller_script, '--log-file',
- logfile, '-q'] + list(args)
+ "%s/bin/py" % config.root,
+ self._puller_script,
+ "--log-file",
+ logfile,
+ "-q",
+ ] + list(args)
retcode, output, error = self.runSubprocess(command)
return command, retcode, output, error
@@ -157,24 +166,21 @@ class TestBranchPuller(PullerBranchTestCase, LoomTestMixin):
return lp_server
def openBranchAsUser(self, db_branch, user):
- """Open the branch as 'user' would see it as a client of codehosting.
- """
+ """Open the branch as 'user' would see it as a codehosting client."""
lp_server = self.getLPServerForUser(user)
return Branch.open(lp_server.get_url() + db_branch.unique_name)
def setUpMirroredBranch(self, db_branch, format=None):
- """Make a tree in the cwd and serve it over HTTP, returning the URL.
- """
- tree = self.make_branch_and_tree('.', format=format)
- tree.commit('rev1')
+ """Make a tree in the cwd and serve it over HTTP, returning the URL."""
+ tree = self.make_branch_and_tree(".", format=format)
+ tree.commit("rev1")
db_branch.url = self.serveOverHTTP()
db_branch.requestMirror()
return tree
def test_mirror_mirrored_branch(self):
# Run the puller with a mirrored branch ready to be pulled.
- db_branch = self.factory.makeAnyBranch(
- branch_type=BranchType.MIRRORED)
+ db_branch = self.factory.makeAnyBranch(branch_type=BranchType.MIRRORED)
tree = self.setUpMirroredBranch(db_branch)
transaction.commit()
command, retcode, output, error = self.runPuller()
@@ -183,8 +189,7 @@ class TestBranchPuller(PullerBranchTestCase, LoomTestMixin):
def test_mirror_mirrored_loom_branch(self):
# Run the puller with a mirrored loom branch ready to be pulled.
- db_branch = self.factory.makeAnyBranch(
- branch_type=BranchType.MIRRORED)
+ db_branch = self.factory.makeAnyBranch(branch_type=BranchType.MIRRORED)
tree = self.setUpMirroredBranch(db_branch)
self.loomify(tree.branch)
transaction.commit()
@@ -195,9 +200,8 @@ class TestBranchPuller(PullerBranchTestCase, LoomTestMixin):
def test_format_change(self):
# When the format of a mirrored branch changes, the puller remirrors
# the branch into the new format.
- db_branch = self.factory.makeAnyBranch(
- branch_type=BranchType.MIRRORED)
- tree = self.setUpMirroredBranch(db_branch, format='pack-0.92')
+ db_branch = self.factory.makeAnyBranch(branch_type=BranchType.MIRRORED)
+ tree = self.setUpMirroredBranch(db_branch, format="pack-0.92")
transaction.commit()
command, retcode, output, error = self.runPuller()
self.assertRanSuccessfully(command, retcode, output, error)
@@ -228,18 +232,21 @@ class TestBranchPuller(PullerBranchTestCase, LoomTestMixin):
else:
information_type = InformationType.PUBLIC
default_branch = self.factory.makeProductBranch(
- product=product, information_type=information_type)
+ product=product, information_type=information_type
+ )
transaction.commit()
# Create the underlying bzr branch.
lp_server = self.getLPServerForUser(default_branch.owner)
BzrDir.create_branch_convenience(
- lp_server.get_url() + default_branch.unique_name)
+ lp_server.get_url() + default_branch.unique_name
+ )
transaction.commit()
# Make it the default stacked-on branch for the product.
series = removeSecurityProxy(product.development_focus)
series.branch = default_branch
self.assertEqual(
- default_branch, IBranchTarget(product).default_stacked_on_branch)
+ default_branch, IBranchTarget(product).default_stacked_on_branch
+ )
return default_branch
def test_stack_mirrored_branch(self):
@@ -247,47 +254,53 @@ class TestBranchPuller(PullerBranchTestCase, LoomTestMixin):
# branch of the product if such a thing exists.
default_branch = self._makeDefaultStackedOnBranch()
db_branch = self.factory.makeProductBranch(
- branch_type=BranchType.MIRRORED, product=default_branch.product)
+ branch_type=BranchType.MIRRORED, product=default_branch.product
+ )
tree = self.setUpMirroredBranch(db_branch)
transaction.commit()
command, retcode, output, error = self.runPuller()
self.assertRanSuccessfully(command, retcode, output, error)
mirrored_branch = self.assertMirrored(
- db_branch, source_branch=tree.branch)
+ db_branch, source_branch=tree.branch
+ )
self.assertEqual(
- '/' + default_branch.unique_name,
- mirrored_branch.get_stacked_on_url())
+ "/" + default_branch.unique_name,
+ mirrored_branch.get_stacked_on_url(),
+ )
def test_stack_mirrored_branch_onto_private(self):
# If the default stacked-on branch is private then mirrored branches
# aren't stacked when they are mirrored.
default_branch = self._makeDefaultStackedOnBranch(private=True)
db_branch = self.factory.makeProductBranch(
- branch_type=BranchType.MIRRORED, product=default_branch.product)
+ branch_type=BranchType.MIRRORED, product=default_branch.product
+ )
tree = self.setUpMirroredBranch(db_branch)
transaction.commit()
command, retcode, output, error = self.runPuller()
self.assertRanSuccessfully(command, retcode, output, error)
mirrored_branch = self.assertMirrored(
- db_branch, source_branch=tree.branch)
+ db_branch, source_branch=tree.branch
+ )
self.assertRaises(
- errors.NotStacked, mirrored_branch.get_stacked_on_url)
+ errors.NotStacked, mirrored_branch.get_stacked_on_url
+ )
def test_mirror_imported_branch(self):
# Run the puller on a populated imported branch pull queue.
# Create the branch in the database.
- db_branch = self.factory.makeAnyBranch(
- branch_type=BranchType.IMPORTED)
+ db_branch = self.factory.makeAnyBranch(branch_type=BranchType.IMPORTED)
db_branch.requestMirror()
transaction.commit()
# Create the Bazaar branch in the expected location.
branch_url = urljoin(
- config.launchpad.bzr_imports_root_url, '%08x' % db_branch.id)
+ config.launchpad.bzr_imports_root_url, "%08x" % db_branch.id
+ )
branch = BzrDir.create_branch_convenience(branch_url)
tree = branch.controldir.open_workingtree()
- tree.commit('rev1')
+ tree.commit("rev1")
transaction.commit()
@@ -306,33 +319,35 @@ class TestBranchPuller(PullerBranchTestCase, LoomTestMixin):
# When run with --branch-type arguments, the puller only mirrors those
# branches of the specified types.
imported_branch = self.factory.makeAnyBranch(
- branch_type=BranchType.IMPORTED)
+ branch_type=BranchType.IMPORTED
+ )
imported_branch.requestMirror()
mirrored_branch = self.factory.makeAnyBranch(
- branch_type=BranchType.MIRRORED)
+ branch_type=BranchType.MIRRORED
+ )
mirrored_branch.requestMirror()
tree = self.setUpMirroredBranch(mirrored_branch)
transaction.commit()
command, retcode, output, error = self.runPuller(
- '--branch-type', 'MIRRORED')
+ "--branch-type", "MIRRORED"
+ )
self.assertRanSuccessfully(command, retcode, output, error)
self.assertMirrored(mirrored_branch, source_branch=tree.branch)
- self.assertIsNot(
- None, imported_branch.next_mirror_time)
+ self.assertIsNot(None, imported_branch.next_mirror_time)
def test_records_script_activity(self):
# A record gets created in the ScriptActivity table.
script_activity_set = getUtility(IScriptActivitySet)
self.assertIs(
- script_activity_set.getLastActivity("branch-puller"),
- None)
+ script_activity_set.getLastActivity("branch-puller"), None
+ )
self.runPuller()
transaction.abort()
self.assertIsNot(
- script_activity_set.getLastActivity("branch-puller"),
- None)
+ script_activity_set.getLastActivity("branch-puller"), None
+ )
# Possible tests to add:
# - branch already exists in new location
diff --git a/lib/lp/codehosting/puller/tests/test_errors.py b/lib/lp/codehosting/puller/tests/test_errors.py
index 4ec9a75..6ae649c 100644
--- a/lib/lp/codehosting/puller/tests/test_errors.py
+++ b/lib/lp/codehosting/puller/tests/test_errors.py
@@ -15,11 +15,8 @@ from breezy.errors import (
ParamikoNotPresent,
UnknownFormatError,
UnsupportedFormatError,
- )
-from breezy.url_policy_open import (
- BranchLoopError,
- BranchReferenceForbidden,
- )
+)
+from breezy.url_policy_open import BranchLoopError, BranchReferenceForbidden
from lazr.uri import InvalidURIError
from lp.code.enums import BranchType
@@ -30,7 +27,7 @@ from lp.codehosting.puller.worker import (
BranchMirrorer,
PullerWorker,
PullerWorkerProtocol,
- )
+)
from lp.testing import TestCase
@@ -64,31 +61,38 @@ class TestErrorCatching(TestCase):
def makeRaisingWorker(self, exception, branch_type=None):
opener = self.CustomErrorOpener(exception)
worker = PullerWorker(
- src='foo', dest='bar', branch_id=1,
- unique_name='owner/product/foo', branch_type=branch_type,
+ src="foo",
+ dest="bar",
+ branch_id=1,
+ unique_name="owner/product/foo",
+ branch_type=branch_type,
default_stacked_on_url=None,
- protocol=StubbedPullerWorkerProtocol(), branch_mirrorer=opener)
+ protocol=StubbedPullerWorkerProtocol(),
+ branch_mirrorer=opener,
+ )
return worker
- def getMirrorFailureForException(self, exc=None, worker=None,
- branch_type=None):
+ def getMirrorFailureForException(
+ self, exc=None, worker=None, branch_type=None
+ ):
"""Mirror the branch and return the error message.
Runs mirror, checks that we receive exactly one error, and returns the
str() of the error.
"""
if worker is None:
- worker = self.makeRaisingWorker(
- exc, branch_type=branch_type)
+ worker = self.makeRaisingWorker(exc, branch_type=branch_type)
worker.mirror()
self.assertEqual(
- 2, len(worker.protocol.calls),
+ 2,
+ len(worker.protocol.calls),
"Expected startMirroring and mirrorFailed, got: %r"
- % (worker.protocol.calls,))
+ % (worker.protocol.calls,),
+ )
startMirroring, mirrorFailed = worker.protocol.calls
- self.assertEqual(('startMirroring',), startMirroring)
- self.assertEqual('mirrorFailed', mirrorFailed[0])
- self.assertStartsWith(mirrorFailed[2], 'OOPS-')
+ self.assertEqual(("startMirroring",), startMirroring)
+ self.assertEqual("mirrorFailed", mirrorFailed[0])
+ self.assertStartsWith(mirrorFailed[2], "OOPS-")
worker.protocol.calls = []
return str(mirrorFailed[1])
@@ -98,7 +102,8 @@ class TestErrorCatching(TestCase):
# the user.
expected_msg = "Launchpad cannot mirror branches from SFTP "
msg = self.getMirrorFailureForException(
- BadUrlSsh('sftp://example.com/foo'))
+ BadUrlSsh("sftp://example.com/foo")
+ )
self.assertTrue(msg.startswith(expected_msg))
def testBadUrlLaunchpadCaught(self):
@@ -107,32 +112,34 @@ class TestErrorCatching(TestCase):
# is displayed to the user.
expected_msg = "Launchpad does not mirror branches from Launchpad."
msg = self.getMirrorFailureForException(
- BadUrlLaunchpad('http://launchpad.test/foo'))
+ BadUrlLaunchpad("http://launchpad.test/foo")
+ )
self.assertTrue(msg.startswith(expected_msg))
def testHostedBranchReference(self):
# A branch reference for a hosted branch must cause an error.
expected_msg = (
- "Branch references are not allowed for branches of type Hosted.")
+ "Branch references are not allowed for branches of type Hosted."
+ )
msg = self.getMirrorFailureForException(
- BranchReferenceForbidden(),
- branch_type=BranchType.HOSTED)
+ BranchReferenceForbidden(), branch_type=BranchType.HOSTED
+ )
self.assertEqual(expected_msg, msg)
def testLocalURL(self):
# A file:// branch reference for a mirror branch must cause an error.
- expected_msg = (
- "Launchpad does not mirror file:// URLs.")
+ expected_msg = "Launchpad does not mirror file:// URLs."
msg = self.getMirrorFailureForException(
- BadUrlScheme('file', 'file:///sauces/sikrit'))
+ BadUrlScheme("file", "file:///sauces/sikrit")
+ )
self.assertEqual(expected_msg, msg)
def testUnknownSchemeURL(self):
# A branch reference to a URL with unknown scheme must cause an error.
- expected_msg = (
- "Launchpad does not mirror random:// URLs.")
+ expected_msg = "Launchpad does not mirror random:// URLs."
msg = self.getMirrorFailureForException(
- BadUrlScheme('random', 'random:///sauces/sikrit'))
+ BadUrlScheme("random", "random:///sauces/sikrit")
+ )
self.assertEqual(expected_msg, msg)
def testHTTPError(self):
@@ -140,32 +147,38 @@ class TestErrorCatching(TestCase):
# error message.
msg = self.getMirrorFailureForException(
HTTPError(
- 'http://something', http.client.UNAUTHORIZED,
- 'Authorization Required', 'some headers',
- os.fdopen(tempfile.mkstemp()[0])))
+ "http://something",
+ http.client.UNAUTHORIZED,
+ "Authorization Required",
+ "some headers",
+ os.fdopen(tempfile.mkstemp()[0]),
+ )
+ )
self.assertEqual("Authentication required.", msg)
def testSocketErrorHandling(self):
# If a socket error occurs accessing the source branch, say so in the
# error message.
- msg = self.getMirrorFailureForException(socket.error('foo'))
- expected_msg = 'A socket error occurred:'
+ msg = self.getMirrorFailureForException(socket.error("foo"))
+ expected_msg = "A socket error occurred:"
self.assertTrue(msg.startswith(expected_msg))
def testUnsupportedFormatErrorHandling(self):
# If we don't support the format that the source branch is in, say so
# in the error message.
msg = self.getMirrorFailureForException(
- UnsupportedFormatError('Bazaar-NG branch, format 0.0.4'))
- expected_msg = 'Launchpad does not support branches '
+ UnsupportedFormatError("Bazaar-NG branch, format 0.0.4")
+ )
+ expected_msg = "Launchpad does not support branches "
self.assertTrue(msg.startswith(expected_msg))
def testUnknownFormatError(self):
# If the format is completely unknown to us, say so in the error
# message.
msg = self.getMirrorFailureForException(
- UnknownFormatError(format='Bad format'))
- expected_msg = 'Unknown branch format: '
+ UnknownFormatError(format="Bad format")
+ )
+ expected_msg = "Unknown branch format: "
self.assertTrue(msg.startswith(expected_msg))
def testParamikoNotPresent(self):
@@ -174,18 +187,22 @@ class TestErrorCatching(TestCase):
# XXX: JonathanLange 2008-06-25: It's bogus to assume that this is
# the error we'll get if we try to mirror over SSH.
msg = self.getMirrorFailureForException(
- ParamikoNotPresent('No module named paramiko'))
- expected_msg = ('Launchpad cannot mirror branches from SFTP and SSH '
- 'URLs. Please register a HTTP location for this '
- 'branch.')
+ ParamikoNotPresent("No module named paramiko")
+ )
+ expected_msg = (
+ "Launchpad cannot mirror branches from SFTP and SSH "
+ "URLs. Please register a HTTP location for this "
+ "branch."
+ )
self.assertEqual(expected_msg, msg)
def testNotBranchErrorMirrored(self):
# Log a user-friendly message when we are asked to mirror a
# non-branch.
msg = self.getMirrorFailureForException(
- NotBranchError('http://example.com/not-branch'),
- branch_type=BranchType.MIRRORED)
+ NotBranchError("http://example.com/not-branch"),
+ branch_type=BranchType.MIRRORED,
+ )
expected_msg = 'Not a branch: "http://example.com/not-branch".'
self.assertEqual(expected_msg, msg)
@@ -194,8 +211,9 @@ class TestErrorCatching(TestCase):
# lp-hosted:/// URL. Instead, the path is translated to a
# user-visible location.
worker = self.makeRaisingWorker(
- NotBranchError('lp-hosted:///~user/project/branch'),
- branch_type=BranchType.HOSTED)
+ NotBranchError("lp-hosted:///~user/project/branch"),
+ branch_type=BranchType.HOSTED,
+ )
msg = self.getMirrorFailureForException(worker=worker)
expected_msg = 'Not a branch: "lp:%s".' % (worker.unique_name,)
self.assertEqual(expected_msg, msg)
@@ -205,26 +223,28 @@ class TestErrorCatching(TestCase):
# the internal URL. Since there is no user-visible URL to blame, we do
# not display any URL at all.
msg = self.getMirrorFailureForException(
- NotBranchError('http://canonical.example.com/internal/url'),
- branch_type=BranchType.IMPORTED)
- expected_msg = 'Not a branch.'
+ NotBranchError("http://canonical.example.com/internal/url"),
+ branch_type=BranchType.IMPORTED,
+ )
+ expected_msg = "Not a branch."
self.assertEqual(expected_msg, msg)
def testBranchLoopError(self):
# BranchLoopError exceptions are caught.
- msg = self.getMirrorFailureForException(
- BranchLoopError())
+ msg = self.getMirrorFailureForException(BranchLoopError())
self.assertEqual("Circular branch reference.", msg)
def testInvalidURIError(self):
# When a branch reference contains an invalid URL, an InvalidURIError
# is raised. The worker catches this and reports it to the scheduler.
msg = self.getMirrorFailureForException(
- InvalidURIError("This is not a URL"))
+ InvalidURIError("This is not a URL")
+ )
self.assertEqual(msg, "This is not a URL")
def testBzrErrorHandling(self):
msg = self.getMirrorFailureForException(
- BzrError('A generic bzr error'))
- expected_msg = 'A generic bzr error'
+ BzrError("A generic bzr error")
+ )
+ expected_msg = "A generic bzr error"
self.assertEqual(msg, expected_msg)
diff --git a/lib/lp/codehosting/puller/tests/test_scheduler.py b/lib/lp/codehosting/puller/tests/test_scheduler.py
index 7be95b6..a221a90 100644
--- a/lib/lp/codehosting/puller/tests/test_scheduler.py
+++ b/lib/lp/codehosting/puller/tests/test_scheduler.py
@@ -5,54 +5,43 @@ import logging
import os
import textwrap
+import six
from breezy.branch import Branch
from breezy.bzr.bzrdir import BzrDir
from breezy.controldir import format_registry
from breezy.urlutils import join as urljoin
-import six
from testtools.twistedsupport import (
- assert_fails_with,
AsynchronousDeferredRunTest,
+ assert_fails_with,
flush_logged_errors,
- )
-from twisted.internet import (
- defer,
- error,
- reactor,
- )
+)
+from twisted.internet import defer, error, reactor
from twisted.protocols.basic import NetstringParseError
from zope.component import getUtility
from lp.code.enums import BranchType
from lp.code.interfaces.branchlookup import IBranchLookup
from lp.code.interfaces.codehosting import LAUNCHPAD_SERVICES
-from lp.codehosting.puller import (
- get_lock_id_for_branch_id,
- scheduler,
- )
+from lp.codehosting.puller import get_lock_id_for_branch_id, scheduler
from lp.codehosting.puller.tests import PullerBranchTestCase
from lp.codehosting.puller.worker import get_canonical_url_for_branch_name
from lp.services.config import config
from lp.services.twistedsupport.tests.test_processmonitor import (
- makeFailure,
ProcessTestsMixin,
+ makeFailure,
suppress_stderr,
- )
+)
from lp.services.webapp import errorlog
-from lp.testing import (
- reset_logging,
- TestCase,
- )
+from lp.testing import TestCase, reset_logging
from lp.testing.layers import ZopelessAppServerLayer
class FakeCodehostingEndpointProxy:
-
def __init__(self):
self.calls = []
def callRemote(self, method_name, *args):
- method = getattr(self, '_remote_%s' % method_name, self._default)
+ method = getattr(self, "_remote_%s" % method_name, self._default)
deferred = method(*args)
def append_to_log(pass_through):
@@ -70,10 +59,9 @@ class FakeCodehostingEndpointProxy:
class TestJobScheduler(TestCase):
-
def setUp(self):
super().setUp()
- self.masterlock = 'master.lock'
+ self.masterlock = "master.lock"
def tearDown(self):
reset_logging()
@@ -83,8 +71,10 @@ class TestJobScheduler(TestCase):
def makeJobScheduler(self, branch_type_names=()):
return scheduler.JobScheduler(
- FakeCodehostingEndpointProxy(), logging.getLogger(),
- branch_type_names)
+ FakeCodehostingEndpointProxy(),
+ logging.getLogger(),
+ branch_type_names,
+ )
def testManagerCreatesLocks(self):
manager = self.makeJobScheduler()
@@ -104,11 +94,12 @@ class TestJobScheduler(TestCase):
manager.unlock()
def test_run_calls_acquireBranchToPull(self):
- manager = self.makeJobScheduler(('MIRRORED',))
+ manager = self.makeJobScheduler(("MIRRORED",))
manager.run()
self.assertEqual(
- [('acquireBranchToPull', ('MIRRORED',))],
- manager.codehosting_endpoint.calls)
+ [("acquireBranchToPull", ("MIRRORED",))],
+ manager.codehosting_endpoint.calls,
+ )
class TestPullerWireProtocol(TestCase):
@@ -125,13 +116,12 @@ class TestPullerWireProtocol(TestCase):
pass
class StubPullerProtocol:
-
def __init__(self):
self.calls = []
self.failure = None
def do_method(self, *args):
- self.calls.append(('method',) + args)
+ self.calls.append(("method",) + args)
def do_raise(self):
return 1 / 0
@@ -147,13 +137,13 @@ class TestPullerWireProtocol(TestCase):
def convertToNetstring(self, string):
"""Encode `string` as a netstring."""
- return b'%d:%s,' % (len(string), string)
+ return b"%d:%s," % (len(string), string)
def sendToProtocol(self, *arguments):
"""Send each element of `arguments` to the protocol as a netstring."""
for argument in arguments:
if not isinstance(argument, bytes):
- argument = str(argument).encode('UTF-8')
+ argument = str(argument).encode("UTF-8")
self.protocol.dataReceived(self.convertToNetstring(argument))
def assertUnexpectedErrorCalled(self, exception_type):
@@ -171,56 +161,56 @@ class TestPullerWireProtocol(TestCase):
def test_methodDispatch(self):
# The wire protocol object calls the named method on the
# puller_protocol.
- self.sendToProtocol('method')
+ self.sendToProtocol("method")
# The protocol is now in state [1]
self.assertEqual(self.puller_protocol.calls, [])
self.sendToProtocol(0)
# As we say we are not passing any arguments, the protocol executes
# the command straight away.
- self.assertEqual(self.puller_protocol.calls, [('method',)])
+ self.assertEqual(self.puller_protocol.calls, [("method",)])
self.assertProtocolInState0()
def test_methodDispatchWithArguments(self):
# The wire protocol waits for the given number of arguments before
# calling the method.
- self.sendToProtocol('method', 1)
+ self.sendToProtocol("method", 1)
# The protocol is now in state [2]
self.assertEqual(self.puller_protocol.calls, [])
- self.sendToProtocol('arg')
+ self.sendToProtocol("arg")
# We've now passed in the declared number of arguments so the protocol
# executes the command.
- self.assertEqual(self.puller_protocol.calls, [('method', 'arg')])
+ self.assertEqual(self.puller_protocol.calls, [("method", "arg")])
self.assertProtocolInState0()
def test_commandRaisesException(self):
# If a command raises an exception, the puller_protocol's
# unexpectedError method is called with the corresponding failure.
- self.sendToProtocol('raise', 0)
+ self.sendToProtocol("raise", 0)
self.assertUnexpectedErrorCalled(ZeroDivisionError)
self.assertProtocolInState0()
def test_nonIntegerArgcount(self):
# Passing a non integer where there should be an argument count is an
# error.
- self.sendToProtocol('method', 'not-an-int')
+ self.sendToProtocol("method", "not-an-int")
self.assertUnexpectedErrorCalled(ValueError)
def test_unrecognizedMessage(self):
# The protocol notifies the listener as soon as it receives an
# unrecognized command name.
- self.sendToProtocol('foo')
+ self.sendToProtocol("foo")
self.assertUnexpectedErrorCalled(scheduler.BadMessage)
def test_nonUTF8Message(self):
# The protocol notifies the listener if it receives a line not
# encoded in UTF-8.
- self.sendToProtocol(b'\x80')
+ self.sendToProtocol(b"\x80")
self.assertUnexpectedErrorCalled(scheduler.BadMessage)
def test_invalidNetstring(self):
# The protocol terminates the session if it receives an unparsable
# netstring.
- self.protocol.dataReceived(b'foo')
+ self.protocol.dataReceived(b"foo")
self.assertUnexpectedErrorCalled(NetstringParseError)
@@ -236,23 +226,39 @@ class TestPullerMonitorProtocol(ProcessTestsMixin, TestCase):
self.calls = []
def startMirroring(self):
- self.calls.append('startMirroring')
-
- def branchChanged(self, stacked_on_url, revid_before, revid_after,
- control_string, branch_string, repository_string):
+ self.calls.append("startMirroring")
+
+ def branchChanged(
+ self,
+ stacked_on_url,
+ revid_before,
+ revid_after,
+ control_string,
+ branch_string,
+ repository_string,
+ ):
self.calls.append(
- ('branchChanged', stacked_on_url, revid_before, revid_after,
- control_string, branch_string, repository_string))
+ (
+ "branchChanged",
+ stacked_on_url,
+ revid_before,
+ revid_after,
+ control_string,
+ branch_string,
+ repository_string,
+ )
+ )
def mirrorFailed(self, message, oops):
- self.calls.append(('mirrorFailed', message, oops))
+ self.calls.append(("mirrorFailed", message, oops))
def log(self, message):
- self.calls.append(('log', message))
+ self.calls.append(("log", message))
def makeProtocol(self):
return scheduler.PullerMonitorProtocol(
- self.termination_deferred, self.listener, self.clock)
+ self.termination_deferred, self.listener, self.clock
+ )
def setUp(self):
self.listener = self.StubPullerListener()
@@ -265,32 +271,33 @@ class TestPullerMonitorProtocol(ProcessTestsMixin, TestCase):
def test_startMirroring(self):
"""Receiving a startMirroring message notifies the listener."""
self.protocol.do_startMirroring()
- self.assertEqual(['startMirroring'], self.listener.calls)
+ self.assertEqual(["startMirroring"], self.listener.calls)
self.assertProtocolSuccess()
def test_branchChanged(self):
"""Receiving a branchChanged message notifies the listener."""
self.protocol.do_startMirroring()
self.listener.calls = []
- self.protocol.do_branchChanged('', 'rev1', 'rev2', '', '', '')
+ self.protocol.do_branchChanged("", "rev1", "rev2", "", "", "")
self.assertEqual(
- [('branchChanged', '', 'rev1', 'rev2', '', '', '')],
- self.listener.calls)
+ [("branchChanged", "", "rev1", "rev2", "", "", "")],
+ self.listener.calls,
+ )
self.assertProtocolSuccess()
def test_mirrorFailed(self):
"""Receiving a mirrorFailed message notifies the listener."""
self.protocol.do_startMirroring()
self.listener.calls = []
- self.protocol.do_mirrorFailed('Error Message', 'OOPS')
+ self.protocol.do_mirrorFailed("Error Message", "OOPS")
self.assertEqual(
- [('mirrorFailed', 'Error Message', 'OOPS')], self.listener.calls)
+ [("mirrorFailed", "Error Message", "OOPS")], self.listener.calls
+ )
self.assertProtocolSuccess()
def test_log(self):
- self.protocol.do_log('message')
- self.assertEqual(
- [('log', 'message')], self.listener.calls)
+ self.protocol.do_log("message")
+ self.assertEqual([("log", "message")], self.listener.calls)
def assertMessageResetsTimeout(self, callable, *args):
"""Assert that sending the message resets the protocol timeout."""
@@ -322,10 +329,9 @@ class TestPullerMonitorProtocol(ProcessTestsMixin, TestCase):
"""
self.protocol.do_startMirroring()
self.clock.advance(config.supermirror.worker_timeout - 1)
- self.protocol.do_branchChanged('', '', '', '', '', '')
+ self.protocol.do_branchChanged("", "", "", "", "", "")
self.clock.advance(2)
- return assert_fails_with(
- self.termination_deferred, error.TimeoutError)
+ return assert_fails_with(self.termination_deferred, error.TimeoutError)
def test_mirrorFailedDoesNotResetTimeout(self):
"""Receiving 'mirrorFailed' doesn't reset the timeout.
@@ -335,31 +341,33 @@ class TestPullerMonitorProtocol(ProcessTestsMixin, TestCase):
"""
self.protocol.do_startMirroring()
self.clock.advance(config.supermirror.worker_timeout - 1)
- self.protocol.do_mirrorFailed('error message', 'OOPS')
+ self.protocol.do_mirrorFailed("error message", "OOPS")
self.clock.advance(2)
- return assert_fails_with(
- self.termination_deferred, error.TimeoutError)
+ return assert_fails_with(self.termination_deferred, error.TimeoutError)
def test_terminatesWithError(self):
"""When the child process terminates with an unexpected error, raise
an error that includes the contents of stderr and the exit condition.
"""
+
def check_failure(failure):
- self.assertEqual('error message', failure.error)
+ self.assertEqual("error message", failure.error)
return failure
self.termination_deferred.addErrback(check_failure)
- self.protocol.errReceived(b'error message')
+ self.protocol.errReceived(b"error message")
self.simulateProcessExit(clean=False)
return assert_fails_with(
- self.termination_deferred, error.ProcessTerminated)
+ self.termination_deferred, error.ProcessTerminated
+ )
def test_stderrFailsProcess(self):
"""If the process prints to stderr, then the Deferred fires an
errback, even if it terminated successfully.
"""
+
def fail_if_succeeded(ignored):
self.fail("stderr did not cause failure")
@@ -367,11 +375,11 @@ class TestPullerMonitorProtocol(ProcessTestsMixin, TestCase):
def check_failure(failure):
failure.trap(Exception)
- self.assertEqual('error message', failure.error)
+ self.assertEqual("error message", failure.error)
self.termination_deferred.addErrback(check_failure)
- self.protocol.errReceived(b'error message')
+ self.protocol.errReceived(b"error message")
self.simulateProcessExit()
return self.termination_deferred
@@ -383,19 +391,22 @@ class TestPullerMonitorProtocol(ProcessTestsMixin, TestCase):
self.protocol.do_startMirroring()
self.simulateProcessExit(clean=False)
return assert_fails_with(
- self.termination_deferred, error.ProcessTerminated)
+ self.termination_deferred, error.ProcessTerminated
+ )
def test_errorBeforeStatusReport(self):
# If the subprocess exits before reporting success or failure, the
# puller master should record failure.
self.protocol.do_startMirroring()
- self.protocol.errReceived(b'traceback')
+ self.protocol.errReceived(b"traceback")
self.simulateProcessExit(clean=False)
self.assertEqual(
self.listener.calls,
- ['startMirroring', ('mirrorFailed', 'traceback', None)])
+ ["startMirroring", ("mirrorFailed", "traceback", None)],
+ )
return assert_fails_with(
- self.termination_deferred, error.ProcessTerminated)
+ self.termination_deferred, error.ProcessTerminated
+ )
@suppress_stderr
def test_errorBeforeStatusReportAndFailingMirrorFailed(self):
@@ -414,12 +425,14 @@ class TestPullerMonitorProtocol(ProcessTestsMixin, TestCase):
self.protocol.listener = FailingMirrorFailedStubPullerListener()
self.listener = self.protocol.listener
- self.protocol.errReceived(b'traceback')
+ self.protocol.errReceived(b"traceback")
self.simulateProcessExit(clean=False)
self.assertEqual(
- flush_logged_errors(RuntimeError), [runtime_error_failure])
+ flush_logged_errors(RuntimeError), [runtime_error_failure]
+ )
return assert_fails_with(
- self.termination_deferred, error.ProcessTerminated)
+ self.termination_deferred, error.ProcessTerminated
+ )
class TestPullerMaster(TestCase):
@@ -431,22 +444,33 @@ class TestPullerMaster(TestCase):
self.status_client = FakeCodehostingEndpointProxy()
self.arbitrary_branch_id = 1
self.eventHandler = scheduler.PullerMaster(
- self.arbitrary_branch_id, 'arbitrary-source', 'arbitrary-dest',
- BranchType.HOSTED, None, logging.getLogger(), self.status_client)
+ self.arbitrary_branch_id,
+ "arbitrary-source",
+ "arbitrary-dest",
+ BranchType.HOSTED,
+ None,
+ logging.getLogger(),
+ self.status_client,
+ )
def test_unexpectedError(self):
"""The puller master logs an OOPS when it receives an unexpected
error.
"""
- fail = makeFailure(RuntimeError, 'error message')
+ fail = makeFailure(RuntimeError, "error message")
self.eventHandler.unexpectedError(fail)
oops = self.oopses[-1]
- self.assertEqual(fail.getTraceback(), oops['tb_text'])
- self.assertEqual('error message', oops['value'])
- self.assertEqual('RuntimeError', oops['type'])
+ self.assertEqual(fail.getTraceback(), oops["tb_text"])
+ self.assertEqual("error message", oops["value"])
+ self.assertEqual("RuntimeError", oops["type"])
self.assertEqual(
- six.ensure_text(get_canonical_url_for_branch_name(
- self.eventHandler.unique_name)), oops['url'])
+ six.ensure_text(
+ get_canonical_url_for_branch_name(
+ self.eventHandler.unique_name
+ )
+ ),
+ oops["url"],
+ )
def test_startMirroring(self):
# startMirroring does not send a message to the endpoint.
@@ -458,42 +482,73 @@ class TestPullerMaster(TestCase):
return deferred.addCallback(checkMirrorStarted)
def test_branchChanged(self):
- (stacked_on_url, revid_before, revid_after, control_string,
- branch_string, repository_string
- ) = list(self.factory.getUniqueString() for i in range(6))
+ (
+ stacked_on_url,
+ revid_before,
+ revid_after,
+ control_string,
+ branch_string,
+ repository_string,
+ ) = list(self.factory.getUniqueString() for i in range(6))
deferred = defer.maybeDeferred(self.eventHandler.startMirroring)
def branchChanged(*ignored):
self.status_client.calls = []
return self.eventHandler.branchChanged(
- stacked_on_url, revid_before, revid_after, control_string,
- branch_string, repository_string)
+ stacked_on_url,
+ revid_before,
+ revid_after,
+ control_string,
+ branch_string,
+ repository_string,
+ )
+
deferred.addCallback(branchChanged)
def checkMirrorCompleted(ignored):
self.assertEqual(
- [('branchChanged', LAUNCHPAD_SERVICES,
- self.arbitrary_branch_id, stacked_on_url, revid_after,
- control_string, branch_string, repository_string)],
- self.status_client.calls)
+ [
+ (
+ "branchChanged",
+ LAUNCHPAD_SERVICES,
+ self.arbitrary_branch_id,
+ stacked_on_url,
+ revid_after,
+ control_string,
+ branch_string,
+ repository_string,
+ )
+ ],
+ self.status_client.calls,
+ )
+
return deferred.addCallback(checkMirrorCompleted)
def test_mirrorFailed(self):
- arbitrary_error_message = 'failed'
+ arbitrary_error_message = "failed"
deferred = defer.maybeDeferred(self.eventHandler.startMirroring)
def mirrorFailed(ignored):
self.status_client.calls = []
return self.eventHandler.mirrorFailed(
- arbitrary_error_message, 'oops')
+ arbitrary_error_message, "oops"
+ )
+
deferred.addCallback(mirrorFailed)
def checkMirrorFailed(ignored):
self.assertEqual(
- [('mirrorFailed', self.arbitrary_branch_id,
- arbitrary_error_message)],
- self.status_client.calls)
+ [
+ (
+ "mirrorFailed",
+ self.arbitrary_branch_id,
+ arbitrary_error_message,
+ )
+ ],
+ self.status_client.calls,
+ )
+
return deferred.addCallback(checkMirrorFailed)
@@ -503,8 +558,8 @@ class TestPullerMasterSpawning(TestCase):
def setUp(self):
super().setUp()
- self.eventHandler = self.makePullerMaster('HOSTED')
- self.patch(reactor, 'spawnProcess', self.spawnProcess)
+ self.eventHandler = self.makePullerMaster("HOSTED")
+ self.patch(reactor, "spawnProcess", self.spawnProcess)
self.commands_spawned = []
def makePullerMaster(self, branch_type_name, default_stacked_on_url=None):
@@ -517,7 +572,8 @@ class TestPullerMasterSpawning(TestCase):
branch_type_name=branch_type_name,
default_stacked_on_url=default_stacked_on_url,
logger=logging.getLogger(),
- client=FakeCodehostingEndpointProxy())
+ client=FakeCodehostingEndpointProxy(),
+ )
def spawnProcess(self, protocol, executable, arguments, env):
self.commands_spawned.append(arguments)
@@ -526,18 +582,20 @@ class TestPullerMasterSpawning(TestCase):
# If a default_stacked_on_url is passed into the master then that
# URL is sent to the command line.
url = self.factory.getUniqueURL()
- master = self.makePullerMaster('MIRRORED', default_stacked_on_url=url)
+ master = self.makePullerMaster("MIRRORED", default_stacked_on_url=url)
master.run()
self.assertEqual(
- [url], [arguments[-1] for arguments in self.commands_spawned])
+ [url], [arguments[-1] for arguments in self.commands_spawned]
+ )
def test_default_stacked_on_url_not_set(self):
# If a default_stacked_on_url is passed into the master as '' then
# the empty string is passed as an argument to the script.
- master = self.makePullerMaster('MIRRORED', default_stacked_on_url='')
+ master = self.makePullerMaster("MIRRORED", default_stacked_on_url="")
master.run()
self.assertEqual(
- [''], [arguments[-1] for arguments in self.commands_spawned])
+ [""], [arguments[-1] for arguments in self.commands_spawned]
+ )
# The common parts of all the worker scripts. See
@@ -566,11 +624,12 @@ class TestPullerMasterIntegration(PullerBranchTestCase):
def setUp(self):
super().setUp()
self.makeCleanDirectory(config.codehosting.mirrored_branches_root)
- self.bzr_tree = self.make_branch_and_tree('src-branch')
- url = urljoin(self.serveOverHTTP(), 'src-branch')
- self.bzr_tree.commit('rev1')
+ self.bzr_tree = self.make_branch_and_tree("src-branch")
+ url = urljoin(self.serveOverHTTP(), "src-branch")
+ self.bzr_tree.commit("rev1")
branch_id = self.factory.makeAnyBranch(
- branch_type=BranchType.MIRRORED, url=url).id
+ branch_type=BranchType.MIRRORED, url=url
+ ).id
self.layer.txn.commit()
self.db_branch = getUtility(IBranchLookup).get(branch_id)
self.client = FakeCodehostingEndpointProxy()
@@ -579,12 +638,13 @@ class TestPullerMasterIntegration(PullerBranchTestCase):
# XXX: JonathanLange 2007-10-17: It would be nice if we didn't have to
# do this manually, and instead the test automatically gave us the
# full error.
- error = getattr(failure, 'error', 'No stderr stored.')
+ error = getattr(failure, "error", "No stderr stored.")
print(error)
return failure
- def makePullerMaster(self, cls=scheduler.PullerMaster, script_text=None,
- use_header=True):
+ def makePullerMaster(
+ self, cls=scheduler.PullerMaster, script_text=None, use_header=True
+ ):
"""Construct a PullerMaster suited to the test environment.
:param cls: The class of the PullerMaster to construct, defaulting to
@@ -597,47 +657,66 @@ class TestPullerMasterIntegration(PullerBranchTestCase):
instance of PullerWorkerProtocol.
"""
puller_master = cls(
- self.db_branch.id, str(self.db_branch.url),
- self.db_branch.unique_name[1:], self.db_branch.branch_type.name,
- '', logging.getLogger(), self.client)
- puller_master.destination_url = os.path.abspath('dest-branch')
+ self.db_branch.id,
+ str(self.db_branch.url),
+ self.db_branch.unique_name[1:],
+ self.db_branch.branch_type.name,
+ "",
+ logging.getLogger(),
+ self.client,
+ )
+ puller_master.destination_url = os.path.abspath("dest-branch")
if script_text is not None:
- script = open('script.py', 'w')
+ script = open("script.py", "w")
if use_header:
script.write(script_header)
script.write(textwrap.dedent(script_text))
script.close()
- puller_master.path_to_script = os.path.abspath('script.py')
+ puller_master.path_to_script = os.path.abspath("script.py")
return puller_master
def doDefaultMirroring(self):
- """Run the subprocess to do the mirroring and check that it succeeded.
- """
+ """Run the subprocess to do the mirroring and check that it worked."""
revision_id = self.bzr_tree.branch.last_revision()
puller_master = self.makePullerMaster()
deferred = puller_master.mirror()
def check_authserver_called(ignored):
- default_format = format_registry.get('default')()
+ default_format = format_registry.get("default")()
control_string = six.ensure_str(default_format.get_format_string())
branch_string = six.ensure_str(
- default_format.get_branch_format().get_format_string())
+ default_format.get_branch_format().get_format_string()
+ )
repository_string = six.ensure_str(
- default_format.repository_format.get_format_string())
+ default_format.repository_format.get_format_string()
+ )
self.assertEqual(
- [('branchChanged', LAUNCHPAD_SERVICES, self.db_branch.id, '',
- six.ensure_str(revision_id), control_string, branch_string,
- repository_string)],
- self.client.calls)
+ [
+ (
+ "branchChanged",
+ LAUNCHPAD_SERVICES,
+ self.db_branch.id,
+ "",
+ six.ensure_str(revision_id),
+ control_string,
+ branch_string,
+ repository_string,
+ )
+ ],
+ self.client.calls,
+ )
return ignored
+
deferred.addCallback(check_authserver_called)
def check_branch_mirrored(ignored):
self.assertEqual(
revision_id,
- Branch.open(puller_master.destination_url).last_revision())
+ Branch.open(puller_master.destination_url).last_revision(),
+ )
return ignored
+
deferred.addCallback(check_branch_mirrored)
return deferred
@@ -666,13 +745,16 @@ class TestPullerMasterIntegration(PullerBranchTestCase):
self.addCleanup(restore_oops)
- expected_output = 'foo\nbar'
+ expected_output = "foo\nbar"
stderr_script = """
import sys
sys.stderr.write(%r)
- """ % (expected_output,)
+ """ % (
+ expected_output,
+ )
master = self.makePullerMaster(
- script_text=stderr_script, use_header=False)
+ script_text=stderr_script, use_header=False
+ )
deferred = master.run()
def check_oops_report(ignored):
@@ -681,16 +763,16 @@ class TestPullerMasterIntegration(PullerBranchTestCase):
self.assertEqual(scheduler.UnexpectedStderr, oops[0])
last_line = expected_output.splitlines()[-1]
self.assertEqual(
- 'Unexpected standard error from subprocess: %s' % last_line,
- str(oops[1]))
+ "Unexpected standard error from subprocess: %s" % last_line,
+ str(oops[1]),
+ )
self.assertEqual(expected_output, oops[2])
return deferred.addCallback(check_oops_report)
def test_lock_with_magic_id(self):
# When the subprocess locks a branch, it is locked with the right ID.
- class PullerMonitorProtocolWithLockID(
- scheduler.PullerMonitorProtocol):
+ class PullerMonitorProtocolWithLockID(scheduler.PullerMonitorProtocol):
"""Subclass of PullerMonitorProtocol with a lock_id method.
This protocol defines a method that records on the listener the
@@ -702,8 +784,7 @@ class TestPullerMasterIntegration(PullerBranchTestCase):
self.listener.lock_ids.append(id)
class PullerMasterWithLockID(scheduler.PullerMaster):
- """A subclass of PullerMaster that allows recording of lock ids.
- """
+ """A subclass of PullerMaster that allows recording of lock ids."""
protocol_class = PullerMonitorProtocolWithLockID
@@ -717,7 +798,8 @@ class TestPullerMasterIntegration(PullerBranchTestCase):
"""
puller_master = self.makePullerMaster(
- PullerMasterWithLockID, check_lock_id_script)
+ PullerMasterWithLockID, check_lock_id_script
+ )
puller_master.lock_ids = []
# We need to create a branch at the destination_url, so that the
@@ -729,7 +811,8 @@ class TestPullerMasterIntegration(PullerBranchTestCase):
def checkID(ignored):
self.assertEqual(
puller_master.lock_ids,
- [get_lock_id_for_branch_id(puller_master.branch_id)])
+ [get_lock_id_for_branch_id(puller_master.branch_id)],
+ )
return deferred.addCallback(checkID)
@@ -808,6 +891,7 @@ class TestPullerMasterIntegration(PullerBranchTestCase):
# branch_locked_deferred.
def wrapper(ignore):
return func()
+
branch_locked_deferred.addCallback(wrapper)
# When it is done, successfully or not, we store the result on the
@@ -816,23 +900,24 @@ class TestPullerMasterIntegration(PullerBranchTestCase):
locking_puller_master.seen_final_result = True
locking_puller_master.final_result = result
try:
- locking_puller_master.protocol.transport.signalProcess('INT')
+ locking_puller_master.protocol.transport.signalProcess("INT")
except error.ProcessExitedAlready:
# We can only get here if the locking subprocess somehow
# manages to crash between locking the branch and being killed
# by us. In that case, locking_process_errback below will
# cause the test to fail, so just do nothing here.
pass
+
branch_locked_deferred.addBoth(cleanup)
locking_puller_master = self.makePullerMaster(
- LockingPullerMaster, lock_and_wait_script)
+ LockingPullerMaster, lock_and_wait_script
+ )
locking_puller_master.branch_id += lock_id_delta
# We need to create a branch at the destination_url, so that the
# subprocess can actually create a lock.
- BzrDir.create_branch_convenience(
- locking_puller_master.destination_url)
+ BzrDir.create_branch_convenience(locking_puller_master.destination_url)
# Because when the deferred returned by 'func' is done we kill the
# locking subprocess, we know that when the subprocess is done, the
@@ -856,7 +941,8 @@ class TestPullerMasterIntegration(PullerBranchTestCase):
return locking_puller_master.final_result
return locking_process_deferred.addCallbacks(
- locking_process_callback, locking_process_errback)
+ locking_process_callback, locking_process_errback
+ )
def test_mirror_with_destination_self_locked(self):
# If the destination branch was locked by another worker, the worker
@@ -885,23 +971,24 @@ class TestPullerMasterIntegration(PullerBranchTestCase):
def mirror_fails_to_unlock():
puller_master = self.makePullerMaster(
- script_text=lower_timeout_script)
+ script_text=lower_timeout_script
+ )
deferred = puller_master.mirror()
def check_mirror_failed(ignored):
self.assertEqual(len(self.client.calls), 1)
mirror_failed_call = self.client.calls[0]
self.assertEqual(
- mirror_failed_call[:2],
- ('mirrorFailed', self.db_branch.id))
+ mirror_failed_call[:2], ("mirrorFailed", self.db_branch.id)
+ )
self.assertTrue(
- "Could not acquire lock" in mirror_failed_call[2])
+ "Could not acquire lock" in mirror_failed_call[2]
+ )
return ignored
deferred.addCallback(check_mirror_failed)
return deferred
- deferred = self._run_with_destination_locked(
- mirror_fails_to_unlock, 1)
+ deferred = self._run_with_destination_locked(mirror_fails_to_unlock, 1)
return deferred.addErrback(self._dumpError)
diff --git a/lib/lp/codehosting/puller/tests/test_worker.py b/lib/lp/codehosting/puller/tests/test_worker.py
index 7ea9daf..4cc2dd0 100644
--- a/lib/lp/codehosting/puller/tests/test_worker.py
+++ b/lib/lp/codehosting/puller/tests/test_worker.py
@@ -7,48 +7,35 @@ import gc
import io
import breezy.branch
+import six
from breezy.bzr.branch import BranchReferenceFormat
from breezy.bzr.bzrdir import BzrDir
-from breezy.errors import (
- IncompatibleRepositories,
- NotBranchError,
- NotStacked,
- )
+from breezy.errors import IncompatibleRepositories, NotBranchError, NotStacked
from breezy.revision import NULL_REVISION
-from breezy.tests import (
- TestCaseInTempDir,
- TestCaseWithTransport,
- )
+from breezy.tests import TestCaseInTempDir, TestCaseWithTransport
from breezy.transport import get_transport
from breezy.url_policy_open import (
AcceptAnythingPolicy,
BadUrl,
BranchOpener,
BranchOpenPolicy,
- )
-import six
+)
from lp.code.enums import BranchType
-from lp.codehosting.puller.tests import (
- FixedHttpServer,
- PullerWorkerMixin,
- )
+from lp.codehosting.puller.tests import FixedHttpServer, PullerWorkerMixin
from lp.codehosting.puller.worker import (
+ WORKER_ACTIVITY_NETWORK,
BadUrlLaunchpad,
BadUrlScheme,
BadUrlSsh,
BranchMirrorerPolicy,
ImportedBranchPolicy,
- install_worker_ui_factory,
MirroredBranchPolicy,
PullerWorkerProtocol,
- WORKER_ACTIVITY_NETWORK,
- )
+ install_worker_ui_factory,
+)
from lp.testing import TestCase
-from lp.testing.factory import (
- LaunchpadObjectFactory,
- ObjectFactory,
- )
+from lp.testing.factory import LaunchpadObjectFactory, ObjectFactory
def get_netstrings(line):
@@ -58,25 +45,27 @@ def get_netstrings(line):
"""
strings = []
while len(line) > 0:
- colon_index = line.find(b':')
+ colon_index = line.find(b":")
length = int(line[:colon_index])
- strings.append(line[(colon_index + 1):(colon_index + 1 + length)])
- line = line[colon_index + 1 + length:]
- assert b',' == line[:1], 'Expected %r == %r' % (b',', line[:1])
+ strings.append(line[(colon_index + 1) : (colon_index + 1 + length)])
+ line = line[colon_index + 1 + length :]
+ assert b"," == line[:1], "Expected %r == %r" % (b",", line[:1])
line = line[1:]
return strings
-class PrearrangedStackedBranchPolicy(BranchMirrorerPolicy,
- AcceptAnythingPolicy):
+class PrearrangedStackedBranchPolicy(
+ BranchMirrorerPolicy, AcceptAnythingPolicy
+):
"""A branch policy that returns a pre-configurable stack-on URL."""
def __init__(self, stack_on_url):
AcceptAnythingPolicy.__init__(self)
self.stack_on_url = stack_on_url
- def getStackedOnURLForDestinationBranch(self, source_branch,
- destination_url):
+ def getStackedOnURLForDestinationBranch(
+ self, source_branch, destination_url
+ ):
return self.stack_on_url
@@ -91,9 +80,10 @@ class TestPullerWorker(TestCaseWithTransport, PullerWorkerMixin):
# A PullerWorker for a mirrored branch gets a MirroredBranchPolicy as
# the policy of its branch_mirrorer. The default stacked-on URL is
# passed through.
- url = '/~foo/bar/baz'
+ url = "/~foo/bar/baz"
worker = self.makePullerWorker(
- branch_type=BranchType.MIRRORED, default_stacked_on_url=url)
+ branch_type=BranchType.MIRRORED, default_stacked_on_url=url
+ )
policy = worker.branch_mirrorer.policy
self.assertIsInstance(policy, MirroredBranchPolicy)
self.assertEqual(url, policy.stacked_on_url)
@@ -104,7 +94,8 @@ class TestPullerWorker(TestCaseWithTransport, PullerWorkerMixin):
# specified (indicated by an empty string), then the stacked_on_url is
# None.
worker = self.makePullerWorker(
- branch_type=BranchType.MIRRORED, default_stacked_on_url='')
+ branch_type=BranchType.MIRRORED, default_stacked_on_url=""
+ )
policy = worker.branch_mirrorer.policy
self.assertIsInstance(policy, MirroredBranchPolicy)
self.assertIs(None, policy.stacked_on_url)
@@ -114,24 +105,28 @@ class TestPullerWorker(TestCaseWithTransport, PullerWorkerMixin):
# the policy of its branch_mirrorer.
worker = self.makePullerWorker(branch_type=BranchType.IMPORTED)
self.assertIsInstance(
- worker.branch_mirrorer.policy, ImportedBranchPolicy)
+ worker.branch_mirrorer.policy, ImportedBranchPolicy
+ )
def testMirrorActuallyMirrors(self):
# Check that mirror() will mirror the Bazaar branch.
- source_tree = self.make_branch_and_tree('source-branch')
+ source_tree = self.make_branch_and_tree("source-branch")
to_mirror = self.makePullerWorker(
- source_tree.branch.base, self.get_url('dest'))
- source_tree.commit('commit message')
+ source_tree.branch.base, self.get_url("dest")
+ )
+ source_tree.commit("commit message")
to_mirror.mirrorWithoutChecks()
mirrored_branch = breezy.branch.Branch.open(to_mirror.dest)
self.assertEqual(
- source_tree.last_revision(), mirrored_branch.last_revision())
+ source_tree.last_revision(), mirrored_branch.last_revision()
+ )
def testMirrorEmptyBranch(self):
# We can mirror an empty branch.
- source_branch = self.make_branch('source-branch')
+ source_branch = self.make_branch("source-branch")
to_mirror = self.makePullerWorker(
- source_branch.base, self.get_url('dest'))
+ source_branch.base, self.get_url("dest")
+ )
to_mirror.mirrorWithoutChecks()
mirrored_branch = breezy.branch.Branch.open(to_mirror.dest)
self.assertEqual(NULL_REVISION, mirrored_branch.last_revision())
@@ -139,136 +134,154 @@ class TestPullerWorker(TestCaseWithTransport, PullerWorkerMixin):
def testCanMirrorWhenDestDirExists(self):
# We can mirror a branch even if the destination exists, and contains
# data but is not a branch.
- source_tree = self.make_branch_and_tree('source-branch')
+ source_tree = self.make_branch_and_tree("source-branch")
to_mirror = self.makePullerWorker(
- source_tree.branch.base, self.get_url('destdir'))
- source_tree.commit('commit message')
+ source_tree.branch.base, self.get_url("destdir")
+ )
+ source_tree.commit("commit message")
# Make the directory.
dest = get_transport(to_mirror.dest)
dest.create_prefix()
- dest.mkdir('.bzr')
+ dest.mkdir(".bzr")
# 'dest' is not a branch.
self.assertRaises(
- NotBranchError, breezy.branch.Branch.open, to_mirror.dest)
+ NotBranchError, breezy.branch.Branch.open, to_mirror.dest
+ )
to_mirror.mirrorWithoutChecks()
mirrored_branch = breezy.branch.Branch.open(to_mirror.dest)
self.assertEqual(
- source_tree.last_revision(), mirrored_branch.last_revision())
+ source_tree.last_revision(), mirrored_branch.last_revision()
+ )
def testHttpTransportStillThere(self):
# We tweak the http:// transport in the worker. Make sure that it's
# still available after mirroring.
- http = get_transport('http://example.com')
- source_branch = self.make_branch('source-branch')
+ http = get_transport("http://example.com")
+ source_branch = self.make_branch("source-branch")
to_mirror = self.makePullerWorker(
- source_branch.base, self.get_url('destdir'))
+ source_branch.base, self.get_url("destdir")
+ )
to_mirror.mirrorWithoutChecks()
- new_http = get_transport('http://example.com')
- self.assertEqual(get_transport('http://example.com').base, http.base)
+ new_http = get_transport("http://example.com")
+ self.assertEqual(get_transport("http://example.com").base, http.base)
self.assertEqual(new_http.__class__, http.__class__)
def test_defaultStackedOnBranchDoesNotForceStacking(self):
# If the policy supplies a stacked on URL but the source branch does
# not support stacking, the destination branch does not support
# stacking.
- stack_on = self.make_branch('default-stack-on')
- source_branch = self.make_branch('source-branch', format='pack-0.92')
+ stack_on = self.make_branch("default-stack-on")
+ source_branch = self.make_branch("source-branch", format="pack-0.92")
self.assertFalse(source_branch._format.supports_stacking())
to_mirror = self.makePullerWorker(
- source_branch.base, self.get_url('destdir'),
- policy=PrearrangedStackedBranchPolicy(stack_on.base))
+ source_branch.base,
+ self.get_url("destdir"),
+ policy=PrearrangedStackedBranchPolicy(stack_on.base),
+ )
to_mirror.mirrorWithoutChecks()
- dest = breezy.branch.Branch.open(self.get_url('destdir'))
+ dest = breezy.branch.Branch.open(self.get_url("destdir"))
self.assertFalse(dest._format.supports_stacking())
def test_defaultStackedOnBranchIncompatibleMirrorsOK(self):
# If the policy supplies a stacked on URL for a branch which is
# incompatible with the branch we're mirroring, the mirroring
# completes successfully and the destination branch is not stacked.
- stack_on = self.make_branch('default-stack-on', format='2a')
- source_branch = self.make_branch('source-branch', format='1.9')
+ stack_on = self.make_branch("default-stack-on", format="2a")
+ source_branch = self.make_branch("source-branch", format="1.9")
to_mirror = self.makePullerWorker(
- source_branch.base, self.get_url('destdir'),
- policy=PrearrangedStackedBranchPolicy(stack_on.base))
+ source_branch.base,
+ self.get_url("destdir"),
+ policy=PrearrangedStackedBranchPolicy(stack_on.base),
+ )
to_mirror.mirrorWithoutChecks()
- dest = breezy.branch.Branch.open(self.get_url('destdir'))
+ dest = breezy.branch.Branch.open(self.get_url("destdir"))
self.assertRaises(NotStacked, dest.get_stacked_on_url)
def testCanMirrorWithIncompatibleRepos(self):
# If the destination branch cannot be opened because its repository is
# not compatible with that of the branch it is stacked on, we delete
# the destination branch and start again.
- self.get_transport('dest').ensure_base()
+ self.get_transport("dest").ensure_base()
# Make a branch to stack on in 1.6 format
- self.make_branch('dest/stacked-on', format='1.6')
+ self.make_branch("dest/stacked-on", format="1.6")
# Make a branch stacked on this.
- stacked_branch = self.make_branch('dest/stacked', format='1.6')
- stacked_branch.set_stacked_on_url(self.get_url('dest/stacked-on'))
+ stacked_branch = self.make_branch("dest/stacked", format="1.6")
+ stacked_branch.set_stacked_on_url(self.get_url("dest/stacked-on"))
# Delete the stacked-on branch and replace it with a 2a format branch.
- self.get_transport('dest').delete_tree('stacked-on')
- self.make_branch('dest/stacked-on', format='2a')
+ self.get_transport("dest").delete_tree("stacked-on")
+ self.make_branch("dest/stacked-on", format="2a")
# Check our setup: trying to open the stacked branch raises
# IncompatibleRepositories.
self.assertRaises(
- IncompatibleRepositories,
- breezy.branch.Branch.open, 'dest/stacked')
- source_branch = self.make_branch(
- 'source-branch', format='2a')
+ IncompatibleRepositories, breezy.branch.Branch.open, "dest/stacked"
+ )
+ source_branch = self.make_branch("source-branch", format="2a")
to_mirror = self.makePullerWorker(
- source_branch.base, self.get_url('dest/stacked'))
+ source_branch.base, self.get_url("dest/stacked")
+ )
# The branch can be mirrored without errors and the destionation
# location is upgraded to match the source format.
to_mirror.mirrorWithoutChecks()
mirrored_branch = breezy.branch.Branch.open(to_mirror.dest)
self.assertEqual(
source_branch.repository._format,
- mirrored_branch.repository._format)
+ mirrored_branch.repository._format,
+ )
def getStackedOnUrlFromNetStringOutput(self, netstring_output):
netstrings = get_netstrings(netstring_output)
- branchChanged_index = netstrings.index(b'branchChanged')
+ branchChanged_index = netstrings.index(b"branchChanged")
return six.ensure_text(netstrings[branchChanged_index + 2])
def testSendsStackedInfo(self):
# When the puller worker stacks a branch, it reports the stacked on
# URL to the master.
- base_branch = self.make_branch('base_branch', format='1.9')
- stacked_branch = self.make_branch('stacked-branch', format='1.9')
+ base_branch = self.make_branch("base_branch", format="1.9")
+ stacked_branch = self.make_branch("stacked-branch", format="1.9")
protocol_output = io.BytesIO()
to_mirror = self.makePullerWorker(
- stacked_branch.base, self.get_url('destdir'),
+ stacked_branch.base,
+ self.get_url("destdir"),
protocol=PullerWorkerProtocol(protocol_output),
- policy=PrearrangedStackedBranchPolicy(base_branch.base))
+ policy=PrearrangedStackedBranchPolicy(base_branch.base),
+ )
to_mirror.mirror()
stacked_on_url = self.getStackedOnUrlFromNetStringOutput(
- protocol_output.getvalue())
+ protocol_output.getvalue()
+ )
self.assertEqual(base_branch.base, stacked_on_url)
def testDoesntSendStackedInfoUnstackableFormat(self):
# Mirroring an unstackable branch sends '' as the stacked-on location
# to the master.
- source_branch = self.make_branch('source-branch', format='pack-0.92')
+ source_branch = self.make_branch("source-branch", format="pack-0.92")
protocol_output = io.BytesIO()
to_mirror = self.makePullerWorker(
- source_branch.base, self.get_url('destdir'),
- protocol=PullerWorkerProtocol(protocol_output))
+ source_branch.base,
+ self.get_url("destdir"),
+ protocol=PullerWorkerProtocol(protocol_output),
+ )
to_mirror.mirror()
stacked_on_url = self.getStackedOnUrlFromNetStringOutput(
- protocol_output.getvalue())
- self.assertEqual('', stacked_on_url)
+ protocol_output.getvalue()
+ )
+ self.assertEqual("", stacked_on_url)
def testDoesntSendStackedInfoNotStacked(self):
# Mirroring a non-stacked branch sends '' as the stacked-on location
# to the master.
- source_branch = self.make_branch('source-branch', format='1.9')
+ source_branch = self.make_branch("source-branch", format="1.9")
protocol_output = io.BytesIO()
to_mirror = self.makePullerWorker(
- source_branch.base, self.get_url('destdir'),
- protocol=PullerWorkerProtocol(protocol_output))
+ source_branch.base,
+ self.get_url("destdir"),
+ protocol=PullerWorkerProtocol(protocol_output),
+ )
to_mirror.mirror()
stacked_on_url = self.getStackedOnUrlFromNetStringOutput(
- protocol_output.getvalue())
- self.assertEqual('', stacked_on_url)
+ protocol_output.getvalue()
+ )
+ self.assertEqual("", stacked_on_url)
class TestReferenceOpener(TestCaseWithTransport):
@@ -287,15 +300,17 @@ class TestReferenceOpener(TestCaseWithTransport):
# XXX DavidAllouche 2007-09-12 bug=139109:
# We do this manually because the breezy API does not support creating
# a branch reference without opening it.
- t = get_transport(self.get_url('.'))
- t.mkdir('reference')
- a_bzrdir = BzrDir.create(self.get_url('reference'))
+ t = get_transport(self.get_url("."))
+ t.mkdir("reference")
+ a_bzrdir = BzrDir.create(self.get_url("reference"))
branch_reference_format = BranchReferenceFormat()
branch_transport = a_bzrdir.get_branch_transport(
- branch_reference_format)
- branch_transport.put_bytes('location', six.ensure_binary(url))
+ branch_reference_format
+ )
+ branch_transport.put_bytes("location", six.ensure_binary(url))
branch_transport.put_bytes(
- 'format', branch_reference_format.get_format_string())
+ "format", branch_reference_format.get_format_string()
+ )
return a_bzrdir.root_transport.base
def testCreateBranchReference(self):
@@ -303,7 +318,7 @@ class TestReferenceOpener(TestCaseWithTransport):
# that points to that branch reference.
# First create a branch and a reference to that branch.
- target_branch = self.make_branch('repo')
+ target_branch = self.make_branch("repo")
reference_url = self.createBranchReference(target_branch.base)
# References are transparent, so we can't test much about them. The
@@ -320,15 +335,16 @@ class TestReferenceOpener(TestCaseWithTransport):
# BranchOpener.follow_reference gives the reference value for
# a branch reference.
opener = BranchOpener(BranchOpenPolicy())
- reference_value = 'http://example.com/branch'
+ reference_value = "http://example.com/branch"
reference_url = self.createBranchReference(reference_value)
self.assertEqual(
- reference_value, opener.follow_reference(reference_url))
+ reference_value, opener.follow_reference(reference_url)
+ )
def testFollowReferenceNone(self):
# BranchOpener.follow_reference gives None for a normal branch.
- self.make_branch('repo')
- branch_url = self.get_url('repo')
+ self.make_branch("repo")
+ branch_url = self.get_url("repo")
opener = BranchOpener(BranchOpenPolicy())
self.assertIs(None, opener.follow_reference(branch_url))
@@ -343,53 +359,69 @@ class TestMirroredBranchPolicy(TestCase):
def testNoFileURL(self):
policy = MirroredBranchPolicy()
self.assertRaises(
- BadUrlScheme, policy.check_one_url,
- self.factory.getUniqueURL(scheme='file'))
+ BadUrlScheme,
+ policy.check_one_url,
+ self.factory.getUniqueURL(scheme="file"),
+ )
def testNoUnknownSchemeURLs(self):
policy = MirroredBranchPolicy()
self.assertRaises(
- BadUrlScheme, policy.check_one_url,
- self.factory.getUniqueURL(scheme='decorator+scheme'))
+ BadUrlScheme,
+ policy.check_one_url,
+ self.factory.getUniqueURL(scheme="decorator+scheme"),
+ )
def testNoSSHURL(self):
policy = MirroredBranchPolicy()
self.assertRaises(
- BadUrlSsh, policy.check_one_url,
- self.factory.getUniqueURL(scheme='bzr+ssh'))
+ BadUrlSsh,
+ policy.check_one_url,
+ self.factory.getUniqueURL(scheme="bzr+ssh"),
+ )
def testNoSftpURL(self):
policy = MirroredBranchPolicy()
self.assertRaises(
- BadUrlSsh, policy.check_one_url,
- self.factory.getUniqueURL(scheme='sftp'))
+ BadUrlSsh,
+ policy.check_one_url,
+ self.factory.getUniqueURL(scheme="sftp"),
+ )
def testNoLaunchpadURL(self):
policy = MirroredBranchPolicy()
self.assertRaises(
- BadUrlLaunchpad, policy.check_one_url,
- self.factory.getUniqueURL(host='bazaar.launchpad.test'))
+ BadUrlLaunchpad,
+ policy.check_one_url,
+ self.factory.getUniqueURL(host="bazaar.launchpad.test"),
+ )
def testNoHTTPSLaunchpadURL(self):
policy = MirroredBranchPolicy()
self.assertRaises(
- BadUrlLaunchpad, policy.check_one_url,
+ BadUrlLaunchpad,
+ policy.check_one_url,
self.factory.getUniqueURL(
- host='bazaar.launchpad.test', scheme='https'))
+ host="bazaar.launchpad.test", scheme="https"
+ ),
+ )
def testNoOtherHostLaunchpadURL(self):
policy = MirroredBranchPolicy()
self.assertRaises(
- BadUrlLaunchpad, policy.check_one_url,
- self.factory.getUniqueURL(host='code.launchpad.test'))
+ BadUrlLaunchpad,
+ policy.check_one_url,
+ self.factory.getUniqueURL(host="code.launchpad.test"),
+ )
def testLocalhost(self):
self.pushConfig(
- 'codehosting', blacklisted_hostnames='localhost,127.0.0.1')
+ "codehosting", blacklisted_hostnames="localhost,127.0.0.1"
+ )
policy = MirroredBranchPolicy()
- localhost_url = self.factory.getUniqueURL(host='localhost')
+ localhost_url = self.factory.getUniqueURL(host="localhost")
self.assertRaises(BadUrl, policy.check_one_url, localhost_url)
- localhost_url = self.factory.getUniqueURL(host='127.0.0.1')
+ localhost_url = self.factory.getUniqueURL(host="127.0.0.1")
self.assertRaises(BadUrl, policy.check_one_url, localhost_url)
def test_no_stacked_on_url(self):
@@ -398,27 +430,30 @@ class TestMirroredBranchPolicy(TestCase):
# This implementation of the method doesn't actually care about the
# arguments.
self.assertIs(
- None, policy.getStackedOnURLForDestinationBranch(None, None))
+ None, policy.getStackedOnURLForDestinationBranch(None, None)
+ )
def test_specified_stacked_on_url(self):
# If a default stacked-on URL is specified, then the
# MirroredBranchPolicy will tell branches to be stacked on that.
- stacked_on_url = '/foo'
+ stacked_on_url = "/foo"
policy = MirroredBranchPolicy(stacked_on_url)
- destination_url = 'http://example.com/bar'
+ destination_url = "http://example.com/bar"
self.assertEqual(
- '/foo',
- policy.getStackedOnURLForDestinationBranch(None, destination_url))
+ "/foo",
+ policy.getStackedOnURLForDestinationBranch(None, destination_url),
+ )
def test_stacked_on_url_for_mirrored_branch(self):
# If the default stacked-on URL is also the URL for the branch being
# mirrored, then the stacked-on URL for destination branch is None.
- stacked_on_url = '/foo'
+ stacked_on_url = "/foo"
policy = MirroredBranchPolicy(stacked_on_url)
- destination_url = 'http://example.com/foo'
+ destination_url = "http://example.com/foo"
self.assertIs(
None,
- policy.getStackedOnURLForDestinationBranch(None, destination_url))
+ policy.getStackedOnURLForDestinationBranch(None, destination_url),
+ )
class TestWorkerProtocol(TestCaseInTempDir, PullerWorkerMixin):
@@ -441,7 +476,7 @@ class TestWorkerProtocol(TestCaseInTempDir, PullerWorkerMixin):
# Empty the test output and error buffers.
self.output.truncate(0)
self.output.seek(0)
- self.assertEqual(b'', self.output.getvalue())
+ self.assertEqual(b"", self.output.getvalue())
def test_nothingSentOnConstruction(self):
# The protocol sends nothing until it receives an event.
@@ -451,7 +486,7 @@ class TestWorkerProtocol(TestCaseInTempDir, PullerWorkerMixin):
def test_startMirror(self):
# Calling startMirroring sends 'startMirroring' as a netstring.
self.protocol.startMirroring()
- self.assertSentNetstrings([b'startMirroring', b'0'])
+ self.assertSentNetstrings([b"startMirroring", b"0"])
def test_branchChanged(self):
# Calling 'branchChanged' sends the arguments.
@@ -459,27 +494,28 @@ class TestWorkerProtocol(TestCaseInTempDir, PullerWorkerMixin):
self.protocol.startMirroring()
self.resetBuffers()
self.protocol.branchChanged(*arbitrary_args)
- self.assertSentNetstrings([b'branchChanged', b'6'] + arbitrary_args)
+ self.assertSentNetstrings([b"branchChanged", b"6"] + arbitrary_args)
def test_mirrorFailed(self):
# Calling 'mirrorFailed' sends the error message.
self.protocol.startMirroring()
self.resetBuffers()
- self.protocol.mirrorFailed('Error Message', 'OOPS')
+ self.protocol.mirrorFailed("Error Message", "OOPS")
self.assertSentNetstrings(
- [b'mirrorFailed', b'2', b'Error Message', b'OOPS'])
+ [b"mirrorFailed", b"2", b"Error Message", b"OOPS"]
+ )
def test_progressMade(self):
# Calling 'progressMade' sends an arbitrary string indicating
# progress.
- self.protocol.progressMade('test')
- self.assertSentNetstrings([b'progressMade', b'0'])
+ self.protocol.progressMade("test")
+ self.assertSentNetstrings([b"progressMade", b"0"])
def test_log(self):
# Calling 'log' sends 'log' as a netstring and its arguments, after
# formatting as a string.
- self.protocol.log('logged %s', 'message')
- self.assertSentNetstrings([b'log', b'1', b'logged message'])
+ self.protocol.log("logged %s", "message")
+ self.assertSentNetstrings([b"log", b"1", b"logged message"])
class TestWorkerProgressReporting(TestCaseWithTransport):
@@ -487,6 +523,7 @@ class TestWorkerProgressReporting(TestCaseWithTransport):
class StubProtocol:
"""A stub for PullerWorkerProtocol that just defines progressMade."""
+
def __init__(self):
self.calls = []
@@ -498,7 +535,7 @@ class TestWorkerProgressReporting(TestCaseWithTransport):
BranchOpener.install_hook()
self.saved_factory = breezy.ui.ui_factory
self.disable_directory_isolation()
- self.addCleanup(setattr, breezy.ui, 'ui_factory', self.saved_factory)
+ self.addCleanup(setattr, breezy.ui, "ui_factory", self.saved_factory)
def getHttpServerForCwd(self):
"""Get an `HttpServer` instance that serves from '.'."""
@@ -511,9 +548,9 @@ class TestWorkerProgressReporting(TestCaseWithTransport):
def test_simple(self):
# Even the simplest of pulls should call progressMade at least once.
- b1 = self.make_branch('some-branch')
- b2_tree = self.make_branch_and_tree('some-other-branch')
- b2_tree.commit('rev1', allow_pointless=True)
+ b1 = self.make_branch("some-branch")
+ b2_tree = self.make_branch_and_tree("some-other-branch")
+ b2_tree.commit("rev1", allow_pointless=True)
p = self.StubProtocol()
install_worker_ui_factory(p)
@@ -523,14 +560,15 @@ class TestWorkerProgressReporting(TestCaseWithTransport):
def test_network(self):
# Even the simplest of pulls over a transport that reports activity
# (here, HTTP) should call progressMade with a type of 'activity'.
- b1 = self.make_branch('some-branch')
- b2_tree = self.make_branch_and_tree('some-other-branch')
- b2_tree.commit('rev1', allow_pointless=True)
+ b1 = self.make_branch("some-branch")
+ b2_tree = self.make_branch_and_tree("some-other-branch")
+ b2_tree.commit("rev1", allow_pointless=True)
http_server = self.getHttpServerForCwd()
p = self.StubProtocol()
install_worker_ui_factory(p)
b2_http = breezy.branch.Branch.open(
- http_server.get_url() + 'some-other-branch')
+ http_server.get_url() + "some-other-branch"
+ )
b1.pull(b2_http)
self.assertSubset([WORKER_ACTIVITY_NETWORK], p.calls)
diff --git a/lib/lp/codehosting/puller/tests/test_worker_formats.py b/lib/lp/codehosting/puller/tests/test_worker_formats.py
index bbcf331..7c75231 100644
--- a/lib/lp/codehosting/puller/tests/test_worker_formats.py
+++ b/lib/lp/codehosting/puller/tests/test_worker_formats.py
@@ -11,7 +11,7 @@ from breezy.plugins.weave_fmt.bzrdir import BzrDirFormat6
from breezy.plugins.weave_fmt.repository import (
RepositoryFormat6,
RepositoryFormat7,
- )
+)
from breezy.tests.per_repository import TestCaseWithRepository
from breezy.url_policy_open import BranchOpener
@@ -20,45 +20,50 @@ from lp.codehosting.puller.tests import PullerWorkerMixin
from lp.codehosting.tests.helpers import LoomTestMixin
-class TestPullerWorkerFormats(TestCaseWithRepository, PullerWorkerMixin,
- LoomTestMixin):
-
+class TestPullerWorkerFormats(
+ TestCaseWithRepository, PullerWorkerMixin, LoomTestMixin
+):
def setUp(self):
TestCaseWithRepository.setUp(self)
# make_controldir relies on this being a relative filesystem path.
- self._source_branch_path = 'source-branch'
+ self._source_branch_path = "source-branch"
BranchOpener.install_hook()
self.worker = self.makePullerWorker(
- self.get_url(self._source_branch_path),
- self.get_url('dest-path'))
+ self.get_url(self._source_branch_path), self.get_url("dest-path")
+ )
- def _createSourceBranch(self, repository_format, bzrdir_format,
- branch_format=None):
+ def _createSourceBranch(
+ self, repository_format, bzrdir_format, branch_format=None
+ ):
"""Make a source branch with the given formats."""
if branch_format is not None:
bzrdir_format.set_branch_format(branch_format)
bd = self.make_controldir(
- self._source_branch_path, format=bzrdir_format)
+ self._source_branch_path, format=bzrdir_format
+ )
repository_format.initialize(bd)
branch = bd.create_branch()
- tree = branch.create_checkout('source-checkout')
- tree.commit('Commit message')
- self.get_transport().delete_tree('source-checkout')
+ tree = branch.create_checkout("source-checkout")
+ tree.commit("Commit message")
+ self.get_transport().delete_tree("source-checkout")
return branch
def assertMirrored(self, source_branch, dest_branch):
"""Assert that `dest_branch` is a mirror of `src_branch`."""
self.assertEqual(
- source_branch.last_revision(), dest_branch.last_revision())
+ source_branch.last_revision(), dest_branch.last_revision()
+ )
# Assert that the mirrored branch is in source's format
# XXX AndrewBennetts 2006-05-18 bug=45277: comparing format objects
# is ugly.
self.assertEqual(
source_branch.repository._format.get_format_description(),
- dest_branch.repository._format.get_format_description())
+ dest_branch.repository._format.get_format_description(),
+ )
self.assertEqual(
source_branch.controldir._format.get_format_description(),
- dest_branch.controldir._format.get_format_description())
+ dest_branch.controldir._format.get_format_description(),
+ )
def _testMirrorWithFormats(self, repository_format, bzrdir_format):
"""Make a branch with certain formats, mirror it and check the mirror.
@@ -66,8 +71,7 @@ class TestPullerWorkerFormats(TestCaseWithRepository, PullerWorkerMixin,
:param repository_format: The repository format.
:param bzrdir_format: The bzrdir format.
"""
- src_branch = self._createSourceBranch(
- repository_format, bzrdir_format)
+ src_branch = self._createSourceBranch(repository_format, bzrdir_format)
self.worker.mirror()
dest_branch = Branch.open(self.worker.dest)
self.assertMirrored(src_branch, dest_branch)
@@ -76,8 +80,8 @@ class TestPullerWorkerFormats(TestCaseWithRepository, PullerWorkerMixin,
# When we mirror a loom branch for the first time, the mirrored loom
# branch matches the original.
branch = self._createSourceBranch(
- RepositoryFormatKnitPack5(),
- BzrDirMetaFormat1())
+ RepositoryFormatKnitPack5(), BzrDirMetaFormat1()
+ )
self.loomify(branch)
self.worker.mirror()
mirrored_branch = Branch.open(self.worker.dest)
@@ -90,7 +94,8 @@ class TestPullerWorkerFormats(TestCaseWithRepository, PullerWorkerMixin,
# Create a source branch in knit format, and check that the mirror is
# in knit format.
self._testMirrorWithFormats(
- RepositoryFormatKnit1(), BzrDirMetaFormat1())
+ RepositoryFormatKnit1(), BzrDirMetaFormat1()
+ )
def testMirrorMetaweaveAsMetaweave(self):
# Create a source branch in metaweave format, and check that the
@@ -118,4 +123,5 @@ class TestPullerWorkerFormats(TestCaseWithRepository, PullerWorkerMixin,
# The mirrored branch should now be in knit format.
self.assertMirrored(
- Branch.open(self.worker.source), Branch.open(self.worker.dest))
+ Branch.open(self.worker.source), Branch.open(self.worker.dest)
+ )
diff --git a/lib/lp/codehosting/puller/worker.py b/lib/lp/codehosting/puller/worker.py
index be4cb8c..662d6dd 100644
--- a/lib/lp/codehosting/puller/worker.py
+++ b/lib/lp/codehosting/puller/worker.py
@@ -4,30 +4,24 @@
import http.client
import sys
-
# FIRST Ensure correct plugins are loaded. Do not delete this comment or the
# line below this comment.
import lp.codehosting # noqa: F401 # isort: split
from urllib.error import HTTPError
-from breezy import (
- errors,
- urlutils,
- )
-from breezy.branch import (
- Branch,
- UnstackableBranchFormat,
- )
+import breezy.ui
+import six
+from breezy import errors, urlutils
+from breezy.branch import Branch, UnstackableBranchFormat
from breezy.plugins.loom.branch import LoomSupport
from breezy.plugins.weave_fmt.branch import BzrBranchFormat4
from breezy.plugins.weave_fmt.repository import (
RepositoryFormat4,
RepositoryFormat5,
RepositoryFormat6,
- )
+)
from breezy.transport import get_transport
-import breezy.ui
from breezy.ui import SilentUIFactory
from breezy.url_policy_open import (
BadUrl,
@@ -35,35 +29,27 @@ from breezy.url_policy_open import (
BranchOpener,
BranchOpenPolicy,
BranchReferenceForbidden,
- )
-from lazr.uri import (
- InvalidURIError,
- URI,
- )
-import six
+)
+from lazr.uri import URI, InvalidURIError
-from lp.code.bzr import (
- BranchFormat,
- RepositoryFormat,
- )
+from lp.code.bzr import BranchFormat, RepositoryFormat
from lp.code.enums import BranchType
from lp.codehosting.bzrutils import identical_formats
from lp.codehosting.puller import get_lock_id_for_branch_id
from lp.services.config import config
from lp.services.webapp import errorlog
-
__all__ = [
- 'BadUrlLaunchpad',
- 'BadUrlScheme',
- 'BadUrlSsh',
- 'BranchMirrorer',
- 'BranchMirrorerPolicy',
- 'get_canonical_url_for_branch_name',
- 'install_worker_ui_factory',
- 'PullerWorker',
- 'PullerWorkerProtocol',
- ]
+ "BadUrlLaunchpad",
+ "BadUrlScheme",
+ "BadUrlSsh",
+ "BranchMirrorer",
+ "BranchMirrorerPolicy",
+ "get_canonical_url_for_branch_name",
+ "install_worker_ui_factory",
+ "PullerWorker",
+ "PullerWorkerProtocol",
+]
class BadUrlSsh(BadUrl):
@@ -89,11 +75,11 @@ def get_canonical_url_for_branch_name(unique_name):
access to real content objects.
"""
if config.vhosts.use_https:
- scheme = 'https'
+ scheme = "https"
else:
- scheme = 'http'
+ scheme = "http"
hostname = config.vhost.code.hostname
- return scheme + '://' + hostname + '/' + unique_name
+ return scheme + "://" + hostname + "/" + unique_name
class PullerWorkerProtocol:
@@ -108,35 +94,49 @@ class PullerWorkerProtocol:
def sendNetstring(self, string):
self.out_stream.write(
- b'%d:%s,' % (len(string), six.ensure_binary(string)))
+ b"%d:%s," % (len(string), six.ensure_binary(string))
+ )
def sendEvent(self, command, *args):
self.sendNetstring(command)
self.sendNetstring(str(len(args)))
for argument in args:
if not isinstance(argument, bytes):
- argument = str(argument).encode('UTF-8')
+ argument = str(argument).encode("UTF-8")
self.sendNetstring(argument)
def startMirroring(self):
- self.sendEvent('startMirroring')
-
- def branchChanged(self, stacked_on_url, revid_before, revid_after,
- control_string, branch_string, repository_string):
+ self.sendEvent("startMirroring")
+
+ def branchChanged(
+ self,
+ stacked_on_url,
+ revid_before,
+ revid_after,
+ control_string,
+ branch_string,
+ repository_string,
+ ):
self.sendEvent(
- 'branchChanged', stacked_on_url, revid_before, revid_after,
- control_string, branch_string, repository_string)
+ "branchChanged",
+ stacked_on_url,
+ revid_before,
+ revid_after,
+ control_string,
+ branch_string,
+ repository_string,
+ )
def mirrorFailed(self, message, oops_id):
- self.sendEvent('mirrorFailed', message, oops_id)
+ self.sendEvent("mirrorFailed", message, oops_id)
def progressMade(self, type):
# 'type' is ignored; we only care about the type of progress in the
# tests of the progress reporting.
- self.sendEvent('progressMade')
+ self.sendEvent("progressMade")
def log(self, fmt, *args):
- self.sendEvent('log', fmt % args)
+ self.sendEvent("log", fmt % args)
class BranchMirrorerPolicy(BranchOpenPolicy):
@@ -157,19 +157,21 @@ class BranchMirrorerPolicy(BranchOpenPolicy):
:return: The destination branch.
"""
dest_transport = get_transport(destination_url)
- if dest_transport.has('.'):
- dest_transport.delete_tree('.')
+ if dest_transport.has("."):
+ dest_transport.delete_tree(".")
if isinstance(source_branch, LoomSupport):
# Looms suck.
revision_id = None
else:
- revision_id = b'null:'
+ revision_id = b"null:"
source_branch.controldir.clone_on_transport(
- dest_transport, revision_id=revision_id)
+ dest_transport, revision_id=revision_id
+ )
return Branch.open(destination_url)
- def getStackedOnURLForDestinationBranch(self, source_branch,
- destination_url):
+ def getStackedOnURLForDestinationBranch(
+ self, source_branch, destination_url
+ ):
"""Get the stacked on URL for `source_branch`.
In particular, the URL it should be stacked on when it is mirrored to
@@ -220,8 +222,8 @@ class BranchMirrorer:
:return: The destination branch.
"""
return self.opener.run_with_transform_fallback_location_hook_installed(
- self.policy.createDestinationBranch, source_branch,
- destination_url)
+ self.policy.createDestinationBranch, source_branch, destination_url
+ )
def openDestinationBranch(self, source_branch, destination_url):
"""Open or create the destination branch at 'destination_url'.
@@ -235,12 +237,11 @@ class BranchMirrorer:
branch = Branch.open(destination_url)
except (errors.NotBranchError, errors.IncompatibleRepositories):
# Make a new branch in the same format as the source branch.
- return self.createDestinationBranch(
- source_branch, destination_url)
+ return self.createDestinationBranch(source_branch, destination_url)
# Check that destination branch is in the same format as the source.
if identical_formats(source_branch, branch):
return branch
- self.log('Formats differ.')
+ self.log("Formats differ.")
return self.createDestinationBranch(source_branch, destination_url)
def updateBranch(self, source_branch, dest_branch):
@@ -253,17 +254,20 @@ class BranchMirrorer:
the same format.
"""
stacked_on_url = self.policy.getStackedOnURLForDestinationBranch(
- source_branch, dest_branch.base)
+ source_branch, dest_branch.base
+ )
try:
dest_branch.set_stacked_on_url(stacked_on_url)
- except (errors.UnstackableRepositoryFormat,
- UnstackableBranchFormat,
- errors.IncompatibleRepositories):
+ except (
+ errors.UnstackableRepositoryFormat,
+ UnstackableBranchFormat,
+ errors.IncompatibleRepositories,
+ ):
stacked_on_url = None
if stacked_on_url is None:
# We use stacked_on_url == '' to mean "no stacked on location"
# because XML-RPC doesn't support None.
- stacked_on_url = ''
+ stacked_on_url = ""
dest_branch.pull(source_branch, overwrite=True)
return stacked_on_url
@@ -300,11 +304,22 @@ class PullerWorker:
:return: A `BranchMirrorer`.
"""
return make_branch_mirrorer(
- branch_type, protocol=self.protocol,
- mirror_stacked_on_url=self.default_stacked_on_url)
-
- def __init__(self, src, dest, branch_id, unique_name, branch_type,
- default_stacked_on_url, protocol, branch_mirrorer=None):
+ branch_type,
+ protocol=self.protocol,
+ mirror_stacked_on_url=self.default_stacked_on_url,
+ )
+
+ def __init__(
+ self,
+ src,
+ dest,
+ branch_id,
+ unique_name,
+ branch_type,
+ default_stacked_on_url,
+ protocol,
+ branch_mirrorer=None,
+ ):
"""Construct a `PullerWorker`.
:param src: The URL to pull from.
@@ -326,7 +341,7 @@ class PullerWorker:
self.branch_id = branch_id
self.unique_name = unique_name
self.branch_type = branch_type
- if default_stacked_on_url == '':
+ if default_stacked_on_url == "":
default_stacked_on_url = None
self.default_stacked_on_url = default_stacked_on_url
self.protocol = protocol
@@ -345,9 +360,14 @@ class PullerWorker:
str(exception) to fill in this parameter, it should only be set
when a human readable error has been explicitly generated.
"""
- request = errorlog.ScriptRequest([
- ('branch_id', self.branch_id), ('source', self.source),
- ('dest', self.dest), ('error-explanation', str(message))])
+ request = errorlog.ScriptRequest(
+ [
+ ("branch_id", self.branch_id),
+ ("source", self.source),
+ ("dest", self.dest),
+ ("error-explanation", str(message)),
+ ]
+ )
request.URL = get_canonical_url_for_branch_name(self.unique_name)
errorlog.globalErrorUtility.raising(sys.exc_info(), request)
return request.oopsid
@@ -385,8 +405,11 @@ class PullerWorker:
"""
self.protocol.startMirroring()
try:
- dest_branch, revid_before, stacked_on_url = \
- self.mirrorWithoutChecks()
+ (
+ dest_branch,
+ revid_before,
+ stacked_on_url,
+ ) = self.mirrorWithoutChecks()
# add further encountered errors from the production runs here
# ------ HERE ---------
#
@@ -400,20 +423,24 @@ class PullerWorker:
self._mirrorFailed(msg)
except OSError as e:
- msg = 'A socket error occurred: %s' % str(e)
+ msg = "A socket error occurred: %s" % str(e)
self._mirrorFailed(msg)
except errors.UnsupportedFormatError:
- msg = ("Launchpad does not support branches from before "
- "bzr 0.7. Please upgrade the branch using bzr upgrade.")
+ msg = (
+ "Launchpad does not support branches from before "
+ "bzr 0.7. Please upgrade the branch using bzr upgrade."
+ )
self._mirrorFailed(msg)
except errors.UnknownFormatError as e:
self._mirrorFailed(e)
except (errors.ParamikoNotPresent, BadUrlSsh):
- msg = ("Launchpad cannot mirror branches from SFTP and SSH URLs."
- " Please register a HTTP location for this branch.")
+ msg = (
+ "Launchpad cannot mirror branches from SFTP and SSH URLs."
+ " Please register a HTTP location for this branch."
+ )
self._mirrorFailed(msg)
except BadUrlLaunchpad:
@@ -426,17 +453,20 @@ class PullerWorker:
except errors.NotBranchError as e:
hosted_branch_error = errors.NotBranchError(
- "lp:%s" % self.unique_name)
+ "lp:%s" % self.unique_name
+ )
message_by_type = {
BranchType.HOSTED: str(hosted_branch_error),
BranchType.IMPORTED: "Not a branch.",
- }
+ }
msg = message_by_type.get(self.branch_type, str(e))
self._mirrorFailed(msg)
except BranchReferenceForbidden:
- msg = ("Branch references are not allowed for branches of type "
- "%s." % (self.branch_type.title,))
+ msg = (
+ "Branch references are not allowed for branches of type "
+ "%s." % (self.branch_type.title,)
+ )
self._mirrorFailed(msg)
except BranchLoopError:
@@ -469,20 +499,27 @@ class PullerWorker:
else:
repository_string = repository_format.get_format_string()
self.protocol.branchChanged(
- stacked_on_url, revid_before, revid_after,
- six.ensure_str(control_string), six.ensure_str(branch_string),
- six.ensure_str(repository_string))
+ stacked_on_url,
+ revid_before,
+ revid_after,
+ six.ensure_str(control_string),
+ six.ensure_str(branch_string),
+ six.ensure_str(repository_string),
+ )
def __eq__(self, other):
return self.source == other.source and self.dest == other.dest
def __repr__(self):
- return ("<PullerWorker source=%s dest=%s at %x>" %
- (self.source, self.dest, id(self)))
+ return "<PullerWorker source=%s dest=%s at %x>" % (
+ self.source,
+ self.dest,
+ id(self),
+ )
-WORKER_ACTIVITY_PROGRESS_BAR = 'progress bar'
-WORKER_ACTIVITY_NETWORK = 'network'
+WORKER_ACTIVITY_PROGRESS_BAR = "progress bar"
+WORKER_ACTIVITY_NETWORK = "network"
class PullerWorkerUIFactory(SilentUIFactory):
@@ -493,11 +530,11 @@ class PullerWorkerUIFactory(SilentUIFactory):
self.puller_worker_protocol = puller_worker_protocol
def confirm_action(self, prompt, confirmation_id, args):
- """If we're asked to break a lock like a stale lock of ours, say yes.
- """
- if confirmation_id != 'breezy.lockdir.break':
+ """If we're asked to break one of our stale locks, say yes."""
+ if confirmation_id != "breezy.lockdir.break":
raise AssertionError(
- "Didn't expect confirmation id %r" % (confirmation_id,))
+ "Didn't expect confirmation id %r" % (confirmation_id,)
+ )
branch_id = self.puller_worker_protocol.branch_id
prompt = prompt % args
if get_lock_id_for_branch_id(branch_id) in prompt:
@@ -513,7 +550,7 @@ class PullerWorkerUIFactory(SilentUIFactory):
# the 'action' or whatever it's called is 'read'/'write'
# <poolie> if we add a soft timeout like 'no io for two seconds' then
# we'd make a new action
- if direction in ['read', 'write']:
+ if direction in ["read", "write"]:
self.puller_worker_protocol.progressMade(WORKER_ACTIVITY_NETWORK)
@@ -541,8 +578,9 @@ class MirroredBranchPolicy(BranchMirrorerPolicy):
def __init__(self, stacked_on_url=None):
self.stacked_on_url = stacked_on_url
- def getStackedOnURLForDestinationBranch(self, source_branch,
- destination_url):
+ def getStackedOnURLForDestinationBranch(
+ self, source_branch, destination_url
+ ):
"""Return the stacked on URL for the destination branch.
Mirrored branches are stacked on the default stacked-on branch of
@@ -580,6 +618,7 @@ class MirroredBranchPolicy(BranchMirrorerPolicy):
"""
# Avoid circular import
from lp.code.interfaces.branch import get_blacklisted_hostnames
+
uri = URI(url)
launchpad_domain = config.vhost.mainsite.hostname
if uri.underDomain(launchpad_domain):
@@ -587,9 +626,9 @@ class MirroredBranchPolicy(BranchMirrorerPolicy):
for hostname in get_blacklisted_hostnames():
if uri.underDomain(hostname):
raise BadUrl(url)
- if uri.scheme in ['sftp', 'bzr+ssh']:
+ if uri.scheme in ["sftp", "bzr+ssh"]:
raise BadUrlSsh(url)
- elif uri.scheme not in ['http', 'https']:
+ elif uri.scheme not in ["http", "https"]:
raise BadUrlScheme(uri.scheme, url)
@@ -614,8 +653,8 @@ class ImportedBranchPolicy(BranchMirrorerPolicy):
# We loop until the remote file list before and after the copy is
# the same to catch the case where the remote side is being
# mutated as we copy it.
- if dest_transport.has('.'):
- dest_transport.delete_tree('.')
+ if dest_transport.has("."):
+ dest_transport.delete_tree(".")
files_before = set(source_transport.iter_files_recursive())
source_transport.copy_tree_to_transport(dest_transport)
files_after = set(source_transport.iter_files_recursive())
@@ -646,12 +685,12 @@ class ImportedBranchPolicy(BranchMirrorerPolicy):
we raise AssertionError if that's happened.
"""
if not url.startswith(config.launchpad.bzr_imports_root_url):
- raise AssertionError(
- "Bogus URL for imported branch: %r" % url)
+ raise AssertionError("Bogus URL for imported branch: %r" % url)
-def make_branch_mirrorer(branch_type, protocol=None,
- mirror_stacked_on_url=None):
+def make_branch_mirrorer(
+ branch_type, protocol=None, mirror_stacked_on_url=None
+):
"""Create a `BranchMirrorer` with the appropriate `BranchOpenerPolicy`.
:param branch_type: A `BranchType` to select a policy by.
@@ -666,8 +705,7 @@ def make_branch_mirrorer(branch_type, protocol=None,
elif branch_type == BranchType.IMPORTED:
policy = ImportedBranchPolicy()
else:
- raise AssertionError(
- "Unexpected branch type: %r" % branch_type)
+ raise AssertionError("Unexpected branch type: %r" % branch_type)
if protocol is not None:
log_function = protocol.log
diff --git a/lib/lp/codehosting/rewrite.py b/lib/lp/codehosting/rewrite.py
index f59520b..d22dabd 100644
--- a/lib/lp/codehosting/rewrite.py
+++ b/lib/lp/codehosting/rewrite.py
@@ -18,14 +18,12 @@ from lp.services.utils import iter_split
from lp.services.webapp.adapter import (
clear_request_started,
set_request_started,
- )
+)
-
-__all__ = ['BranchRewriter']
+__all__ = ["BranchRewriter"]
class BranchRewriter:
-
def __init__(self, logger, _now=None):
"""
@@ -41,9 +39,7 @@ class BranchRewriter:
self._cache = {}
def _codebrowse_url(self, path):
- return urlutils.join(
- config.codehosting.internal_codebrowse_root,
- path)
+ return urlutils.join(config.codehosting.internal_codebrowse_root, path)
def _getBranchIdAndTrailingPath(self, location):
"""Return the branch id and trailing path for 'location'.
@@ -51,21 +47,24 @@ class BranchRewriter:
In addition this method returns whether the answer can from the cache
or from the database.
"""
- for first, second in iter_split(location[1:], '/'):
+ for first, second in iter_split(location[1:], "/"):
if first in self._cache:
branch_id, inserted_time = self._cache[first]
- if (self._now() < inserted_time +
- config.codehosting.branch_rewrite_cache_lifetime):
+ if (
+ self._now()
+ < inserted_time
+ + config.codehosting.branch_rewrite_cache_lifetime
+ ):
return branch_id, second, "HIT"
lookup = getUtility(IBranchLookup)
- branch, trailing = lookup.getByHostingPath(location.lstrip('/'))
+ branch, trailing = lookup.getByHostingPath(location.lstrip("/"))
if branch is not None:
try:
branch_id = branch.id
except Unauthorized:
pass
else:
- unique_name = location[1:-len(trailing)]
+ unique_name = location[1 : -len(trailing)]
self._cache[unique_name] = (branch_id, self._now())
return branch_id, trailing, "MISS"
return None, None, "MISS"
@@ -106,28 +105,36 @@ class BranchRewriter:
set_request_started()
try:
cached = None
- if resource_location.startswith('/static/'):
+ if resource_location.startswith("/static/"):
r = self._codebrowse_url(resource_location)
- cached = 'N/A'
+ cached = "N/A"
else:
branch_id, trailing, cached = self._getBranchIdAndTrailingPath(
- resource_location)
+ resource_location
+ )
if branch_id is None:
if resource_location.startswith(
- '/' + BRANCH_ID_ALIAS_PREFIX):
- r = 'NULL'
+ "/" + BRANCH_ID_ALIAS_PREFIX
+ ):
+ r = "NULL"
else:
r = self._codebrowse_url(resource_location)
else:
- if trailing.startswith('/.bzr'):
+ if trailing.startswith("/.bzr"):
r = urlutils.join(
config.codehosting.internal_branch_by_id_root,
- branch_id_to_path(branch_id), trailing[1:])
+ branch_id_to_path(branch_id),
+ trailing[1:],
+ )
else:
r = self._codebrowse_url(resource_location)
finally:
clear_request_started()
self.logger.info(
"%r -> %r (%fs, cache: %s)",
- resource_location, r, time.time() - T, cached)
+ resource_location,
+ r,
+ time.time() - T,
+ cached,
+ )
return r
diff --git a/lib/lp/codehosting/scanner/buglinks.py b/lib/lp/codehosting/scanner/buglinks.py
index f7fcfd5..e016403 100644
--- a/lib/lp/codehosting/scanner/buglinks.py
+++ b/lib/lp/codehosting/scanner/buglinks.py
@@ -4,8 +4,8 @@
"""Bugs support for the scanner."""
__all__ = [
- 'BugBranchLinker',
- ]
+ "BugBranchLinker",
+]
from urllib.parse import urlsplit
@@ -27,18 +27,17 @@ class BugBranchLinker:
protocol, host, path, ignored, ignored = urlsplit(url)
# Skip URLs that don't point to Launchpad.
- if host != 'launchpad.net':
+ if host != "launchpad.net":
return None
# Remove empty path segments.
- segments = [
- segment for segment in path.split('/') if len(segment) > 0]
+ segments = [segment for segment in path.split("/") if len(segment) > 0]
# Don't allow Launchpad URLs that aren't /bugs/<integer>.
try:
bug_segment, bug_id = segments
except ValueError:
return None
- if bug_segment != 'bugs':
+ if bug_segment != "bugs":
return None
try:
return int(bug_id)
@@ -47,7 +46,7 @@ class BugBranchLinker:
def _getBugStatus(self, bzr_status):
# Make sure the status is acceptable.
- valid_statuses = {'fixed': 'fixed'}
+ valid_statuses = {"fixed": "fixed"}
return valid_statuses.get(bzr_status.lower(), None)
def extractBugInfo(self, bzr_revision):
@@ -85,7 +84,8 @@ class BugBranchLinker:
else:
bug.linkBranch(
branch=self.db_branch,
- registrant=getUtility(ILaunchpadCelebrities).janitor)
+ registrant=getUtility(ILaunchpadCelebrities).janitor,
+ )
def got_new_mainline_revisions(new_mainline_revisions):
diff --git a/lib/lp/codehosting/scanner/bzrsync.py b/lib/lp/codehosting/scanner/bzrsync.py
index 4847e37..92e7c45 100755
--- a/lib/lp/codehosting/scanner/bzrsync.py
+++ b/lib/lp/codehosting/scanner/bzrsync.py
@@ -7,26 +7,23 @@
__all__ = [
"BzrSync",
- 'schedule_diff_updates',
- 'schedule_translation_templates_build',
- 'schedule_translation_upload',
- ]
+ "schedule_diff_updates",
+ "schedule_translation_templates_build",
+ "schedule_translation_upload",
+]
import logging
-from breezy.graph import DictParentsProvider
-from breezy.revision import NULL_REVISION
import pytz
import six
-from storm.locals import Store
import transaction
+from breezy.graph import DictParentsProvider
+from breezy.revision import NULL_REVISION
+from storm.locals import Store
from zope.component import getUtility
from zope.event import notify
-from lp.code.bzr import (
- branch_revision_history,
- get_ancestry,
- )
+from lp.code.bzr import branch_revision_history, get_ancestry
from lp.code.interfaces.branchjob import IRosettaUploadJobSource
from lp.code.interfaces.revision import IRevisionSet
from lp.code.model.branchrevision import BranchRevision
@@ -38,15 +35,13 @@ from lp.services.utils import iter_chunks
from lp.services.webhooks.interfaces import IWebhookSet
from lp.translations.interfaces.translationtemplatesbuild import (
ITranslationTemplatesBuildSource,
- )
+)
-
-UTC = pytz.timezone('UTC')
+UTC = pytz.timezone("UTC")
class BzrSync:
- """Import version control metadata from a Bazaar branch into the database.
- """
+ """Import version control metadata from a Bazaar branch into the DB."""
def __init__(self, branch, logger=None):
self.db_branch = branch
@@ -56,8 +51,7 @@ class BzrSync:
self.revision_set = getUtility(IRevisionSet)
def syncBranchAndClose(self, bzr_branch=None):
- """Synchronize the database with a Bazaar branch, handling locking.
- """
+ """Synchronize the database with a Bazaar branch, handling locking."""
if bzr_branch is None:
bzr_branch = self.db_branch.getBzrBranch()
bzr_branch.lock_read()
@@ -92,18 +86,24 @@ class BzrSync:
self.logger.info("Retrieving history from breezy.")
bzr_history = [
six.ensure_text(revid)
- for revid in branch_revision_history(bzr_branch)]
+ for revid in branch_revision_history(bzr_branch)
+ ]
# The BranchRevision, Revision and RevisionParent tables are only
# written to by the branch-scanner, so they are not subject to
# write-lock contention. Update them all in a single transaction to
# improve the performance and allow garbage collection in the future.
db_ancestry, db_history = self.retrieveDatabaseAncestry()
- (new_ancestry, branchrevisions_to_delete,
- revids_to_insert) = self.planDatabaseChanges(
- bzr_branch, bzr_history, db_ancestry, db_history)
- new_db_revs = (
- new_ancestry - getUtility(IRevisionSet).onlyPresent(new_ancestry))
+ (
+ new_ancestry,
+ branchrevisions_to_delete,
+ revids_to_insert,
+ ) = self.planDatabaseChanges(
+ bzr_branch, bzr_history, db_ancestry, db_history
+ )
+ new_db_revs = new_ancestry - getUtility(IRevisionSet).onlyPresent(
+ new_ancestry
+ )
self.logger.info("Adding %s new revisions.", len(new_db_revs))
for revids in iter_chunks(new_db_revs, 10000):
revisions = self.getBazaarRevisions(bzr_branch, revids)
@@ -119,7 +119,7 @@ class BzrSync:
# Notify any listeners that the tip of the branch has changed, but
# before we've actually updated the database branch.
self.logger.info("Firing tip change event.")
- initial_scan = (len(db_history) == 0)
+ initial_scan = len(db_history) == 0
notify(events.TipChanged(self.db_branch, bzr_branch, initial_scan))
# The Branch table is modified by other systems, including the web UI,
@@ -134,7 +134,9 @@ class BzrSync:
self.logger.info("Firing scan completion event.")
notify(
events.ScanCompleted(
- self.db_branch, bzr_branch, self.logger, new_ancestry))
+ self.db_branch, bzr_branch, self.logger, new_ancestry
+ )
+ )
transaction.commit()
def retrieveDatabaseAncestry(self):
@@ -146,17 +148,20 @@ class BzrSync:
def _getRevisionGraph(self, bzr_branch, db_last):
if bzr_branch.repository.has_revision(db_last):
return bzr_branch.repository.get_graph()
- revisions = Store.of(self.db_branch).find(Revision,
- BranchRevision.branch_id == self.db_branch.id,
- Revision.id == BranchRevision.revision_id)
+ revisions = Store.of(self.db_branch).find(
+ Revision,
+ BranchRevision.branch_id == self.db_branch.id,
+ Revision.id == BranchRevision.revision_id,
+ )
parent_map = {
- six.ensure_binary(r.revision_id):
- [six.ensure_binary(revid) for revid in r.parent_ids]
- for r in revisions}
+ six.ensure_binary(r.revision_id): [
+ six.ensure_binary(revid) for revid in r.parent_ids
+ ]
+ for r in revisions
+ }
parents_provider = DictParentsProvider(parent_map)
class PPSource:
-
@staticmethod
def _make_parents_provider():
return parents_provider
@@ -173,12 +178,14 @@ class BzrSync:
else:
db_last = six.ensure_binary(db_last)
graph = self._getRevisionGraph(bzr_branch, db_last)
- added_ancestry, removed_ancestry = (
- graph.find_difference(bzr_last, db_last))
+ added_ancestry, removed_ancestry = graph.find_difference(
+ bzr_last, db_last
+ )
added_ancestry.discard(NULL_REVISION)
added_ancestry = {six.ensure_text(revid) for revid in added_ancestry}
removed_ancestry = {
- six.ensure_text(revid) for revid in removed_ancestry}
+ six.ensure_text(revid) for revid in removed_ancestry
+ }
return added_ancestry, removed_ancestry
def getHistoryDelta(self, bzr_history, db_history):
@@ -201,8 +208,9 @@ class BzrSync:
added_history = bzr_history[common_len:]
return added_history, removed_history
- def planDatabaseChanges(self, bzr_branch, bzr_history, db_ancestry,
- db_history):
+ def planDatabaseChanges(
+ self, bzr_branch, bzr_history, db_ancestry, db_history
+ ):
"""Plan database changes to synchronize with breezy data.
Use the data retrieved by `retrieveDatabaseAncestry` and
@@ -211,37 +219,46 @@ class BzrSync:
self.logger.info("Planning changes.")
# Find the length of the common history.
added_history, removed_history = self.getHistoryDelta(
- bzr_history, db_history)
+ bzr_history, db_history
+ )
added_ancestry, removed_ancestry = self.getAncestryDelta(bzr_branch)
notify(
events.RevisionsRemoved(
- self.db_branch, bzr_branch, removed_history))
+ self.db_branch, bzr_branch, removed_history
+ )
+ )
# We must delete BranchRevision rows for all revisions which where
# removed from the ancestry or whose sequence value has changed.
branchrevisions_to_delete = set(removed_history)
branchrevisions_to_delete.update(removed_ancestry)
branchrevisions_to_delete.update(
- set(added_history).difference(added_ancestry))
+ set(added_history).difference(added_ancestry)
+ )
# We must insert BranchRevision rows for all revisions which were
# added to the ancestry or whose sequence value has changed.
last_revno = len(bzr_history)
revids_to_insert = dict(
- self.revisionsToInsert(
- added_history, last_revno, added_ancestry))
+ self.revisionsToInsert(added_history, last_revno, added_ancestry)
+ )
# We must remove any stray BranchRevisions that happen to already be
# present.
self.logger.info("Finding stray BranchRevisions.")
existing_branchrevisions = Store.of(self.db_branch).find(
- Revision.revision_id, BranchRevision.branch == self.db_branch,
+ Revision.revision_id,
+ BranchRevision.branch == self.db_branch,
BranchRevision.revision_id == Revision.id,
- Revision.revision_id.is_in(revids_to_insert))
+ Revision.revision_id.is_in(revids_to_insert),
+ )
branchrevisions_to_delete.update(existing_branchrevisions)
- return (added_ancestry, list(branchrevisions_to_delete),
- revids_to_insert)
+ return (
+ added_ancestry,
+ list(branchrevisions_to_delete),
+ revids_to_insert,
+ )
def getBazaarRevisions(self, bzr_branch, revisions):
"""Like ``get_revisions(revisions)`` but filter out ghosts first.
@@ -250,7 +267,8 @@ class BzrSync:
Revision objects for.
"""
revisions = bzr_branch.repository.get_parent_map(
- [six.ensure_binary(revid) for revid in revisions])
+ [six.ensure_binary(revid) for revid in revisions]
+ )
return bzr_branch.repository.get_revisions(revisions.keys())
def syncRevisions(self, bzr_branch, bzr_revisions, revids_to_insert):
@@ -269,8 +287,11 @@ class BzrSync:
if revids_to_insert[revision_id] is None:
continue
mainline_revisions.append(bzr_revision)
- notify(events.NewMainlineRevisions(
- self.db_branch, bzr_branch, mainline_revisions))
+ notify(
+ events.NewMainlineRevisions(
+ self.db_branch, bzr_branch, mainline_revisions
+ )
+ )
@staticmethod
def revisionsToInsert(added_history, last_revno, added_ancestry):
@@ -290,8 +311,9 @@ class BzrSync:
def deleteBranchRevisions(self, revision_ids_to_delete):
"""Delete a batch of BranchRevision rows."""
- self.logger.info("Deleting %d branchrevision records.",
- len(revision_ids_to_delete))
+ self.logger.info(
+ "Deleting %d branchrevision records.", len(revision_ids_to_delete)
+ )
# Use a config value to work out how many to delete at a time.
# Deleting more than one at a time is significantly more efficient
# than doing one at a time, but the actual optimal count is a bit up
@@ -304,8 +326,9 @@ class BzrSync:
def insertBranchRevisions(self, bzr_branch, revids_to_insert):
"""Insert a batch of BranchRevision rows."""
- self.logger.info("Inserting %d branchrevision records.",
- len(revids_to_insert))
+ self.logger.info(
+ "Inserting %d branchrevision records.", len(revids_to_insert)
+ )
revid_seq_pairs = revids_to_insert.items()
for revid_seq_pair_chunk in iter_chunks(revid_seq_pairs, 10000):
self.db_branch.createBranchRevisionFromIDs(revid_seq_pair_chunk)
@@ -320,13 +343,15 @@ class BzrSync:
else:
revision = None
self.logger.info(
- "Updating branch scanner status: %s revs", revision_count)
+ "Updating branch scanner status: %s revs", revision_count
+ )
self.db_branch.updateScannedDetails(revision, revision_count)
def schedule_translation_upload(tip_changed):
getUtility(IRosettaUploadJobSource).create(
- tip_changed.db_branch, tip_changed.old_tip_revision_id)
+ tip_changed.db_branch, tip_changed.old_tip_revision_id
+ )
def schedule_translation_templates_build(tip_changed):
@@ -351,6 +376,8 @@ def trigger_webhooks(tip_changed):
new_revid = tip_changed.new_tip_revision_id
if getFeatureFlag("code.bzr.webhooks.enabled") and old_revid != new_revid:
payload = tip_changed.composeWebhookPayload(
- tip_changed.db_branch, old_revid, new_revid)
+ tip_changed.db_branch, old_revid, new_revid
+ )
getUtility(IWebhookSet).trigger(
- tip_changed.db_branch, "bzr:push:0.1", payload)
+ tip_changed.db_branch, "bzr:push:0.1", payload
+ )
diff --git a/lib/lp/codehosting/scanner/email.py b/lib/lp/codehosting/scanner/email.py
index 01b1ea6..68a3fba 100644
--- a/lib/lp/codehosting/scanner/email.py
+++ b/lib/lp/codehosting/scanner/email.py
@@ -4,9 +4,9 @@
"""Email code for the branch scanner."""
__all__ = [
- 'send_removed_revision_emails',
- 'queue_tip_changed_email_jobs',
- ]
+ "send_removed_revision_emails",
+ "queue_tip_changed_email_jobs",
+]
import six
from zope.component import getUtility
@@ -15,14 +15,15 @@ from lp.code.enums import BranchSubscriptionNotificationLevel
from lp.code.interfaces.branchjob import (
IRevisionMailJobSource,
IRevisionsAddedJobSource,
- )
+)
from lp.services.config import config
def subscribers_want_notification(db_branch):
diff_levels = (
BranchSubscriptionNotificationLevel.DIFFSONLY,
- BranchSubscriptionNotificationLevel.FULL)
+ BranchSubscriptionNotificationLevel.FULL,
+ )
subscriptions = db_branch.getSubscriptionsByLevel(diff_levels)
return not subscriptions.is_empty()
@@ -38,18 +39,23 @@ def send_removed_revision_emails(revisions_removed):
if number_removed == 0:
return
if number_removed == 1:
- count = '1 revision'
- contents = '%s was removed from the branch.' % count
+ count = "1 revision"
+ contents = "%s was removed from the branch." % count
else:
- count = '%d revisions' % number_removed
- contents = '%s were removed from the branch.' % count
+ count = "%d revisions" % number_removed
+ contents = "%s were removed from the branch." % count
# No diff is associated with the removed email.
subject = "[Branch %s] %s removed" % (
- revisions_removed.db_branch.unique_name, count)
+ revisions_removed.db_branch.unique_name,
+ count,
+ )
job = getUtility(IRevisionMailJobSource).create(
- revisions_removed.db_branch, revno='removed',
+ revisions_removed.db_branch,
+ revno="removed",
from_address=config.canonical.noreply_from_address,
- body=contents, subject=subject)
+ body=contents,
+ subject=subject,
+ )
job.celeryRunOnCommit()
@@ -59,20 +65,29 @@ def queue_tip_changed_email_jobs(tip_changed):
if tip_changed.initial_scan:
revision_count = tip_changed.bzr_branch.revno()
if revision_count == 1:
- revisions = '1 revision'
+ revisions = "1 revision"
else:
- revisions = '%d revisions' % revision_count
- message = ('First scan of the branch detected %s'
- ' in the revision history of the branch.' %
- revisions)
+ revisions = "%d revisions" % revision_count
+ message = (
+ "First scan of the branch detected %s"
+ " in the revision history of the branch." % revisions
+ )
subject = "[Branch %s] %s" % (
- tip_changed.db_branch.unique_name, revisions)
+ tip_changed.db_branch.unique_name,
+ revisions,
+ )
job = getUtility(IRevisionMailJobSource).create(
- tip_changed.db_branch, 'initial',
- config.canonical.noreply_from_address, message, subject)
+ tip_changed.db_branch,
+ "initial",
+ config.canonical.noreply_from_address,
+ message,
+ subject,
+ )
else:
job = getUtility(IRevisionsAddedJobSource).create(
- tip_changed.db_branch, tip_changed.db_branch.last_scanned_id,
+ tip_changed.db_branch,
+ tip_changed.db_branch.last_scanned_id,
six.ensure_text(tip_changed.bzr_branch.last_revision()),
- config.canonical.noreply_from_address)
+ config.canonical.noreply_from_address,
+ )
job.celeryRunOnCommit()
diff --git a/lib/lp/codehosting/scanner/events.py b/lib/lp/codehosting/scanner/events.py
index ca42cc6..442da4c 100644
--- a/lib/lp/codehosting/scanner/events.py
+++ b/lib/lp/codehosting/scanner/events.py
@@ -4,17 +4,14 @@
"""Events generated by the scanner."""
__all__ = [
- 'NewMainlineRevisions',
- 'RevisionsRemoved',
- 'TipChanged',
- ]
+ "NewMainlineRevisions",
+ "RevisionsRemoved",
+ "TipChanged",
+]
import six
from zope.interface import implementer
-from zope.interface.interfaces import (
- IObjectEvent,
- ObjectEvent,
- )
+from zope.interface.interfaces import IObjectEvent, ObjectEvent
from lp.services.webapp.publisher import canonical_url
@@ -23,7 +20,7 @@ class ScannerEvent(ObjectEvent):
"""Base scanner event."""
def __init__(self, db_branch, bzr_branch):
- """"Construct a scanner event.
+ """ "Construct a scanner event.
:param db_branch: The database IBranch.
:param bzr_branch: The Bazaar branch being scanned.
@@ -87,7 +84,7 @@ class TipChanged(ScannerEvent):
"bzr_branch_path": branch.shortened_path,
"old": {"revision_id": old_revid},
"new": {"revision_id": new_revid},
- }
+ }
class IRevisionsRemoved(IObjectEvent):
diff --git a/lib/lp/codehosting/scanner/mergedetection.py b/lib/lp/codehosting/scanner/mergedetection.py
index 121d2d9..a28813b 100644
--- a/lib/lp/codehosting/scanner/mergedetection.py
+++ b/lib/lp/codehosting/scanner/mergedetection.py
@@ -4,12 +4,12 @@
"""The way the branch scanner handles merges."""
__all__ = [
- 'auto_merge_branches',
- 'auto_merge_proposals',
- ]
+ "auto_merge_branches",
+ "auto_merge_proposals",
+]
-from breezy.revision import NULL_REVISION
import six
+from breezy.revision import NULL_REVISION
from zope.component import getUtility
from lp.code.adapters.branch import BranchMergeProposalNoPreviewDiffDelta
@@ -17,7 +17,7 @@ from lp.code.enums import BranchLifecycleStatus
from lp.code.interfaces.branchcollection import IAllBranches
from lp.code.interfaces.branchmergeproposal import (
BRANCH_MERGE_PROPOSAL_FINAL_STATES,
- )
+)
from lp.services.utils import CachingIterator
@@ -54,8 +54,8 @@ def merge_detected(logger, source, target, proposal=None, merge_revno=None):
# If the target branch is not the development focus, then don't update
# the status of the source branch.
logger.info(
- 'Merge detected: %s => %s',
- source.bzr_identity, target.bzr_identity)
+ "Merge detected: %s => %s", source.bzr_identity, target.bzr_identity
+ )
if proposal is None:
# If there's no explicit merge proposal, only change the branch's
# status when it has been merged into the development focus.
@@ -99,7 +99,8 @@ def auto_merge_branches(scan_completed):
BranchLifecycleStatus.DEVELOPMENT,
BranchLifecycleStatus.EXPERIMENTAL,
BranchLifecycleStatus.MATURE,
- BranchLifecycleStatus.ABANDONED).getBranches(eager_load=False)
+ BranchLifecycleStatus.ABANDONED,
+ ).getBranches(eager_load=False)
for branch in branches:
last_scanned = branch.last_scanned_id
# If the branch doesn't have any revisions, not any point setting
@@ -162,16 +163,22 @@ def auto_merge_proposals(scan_completed):
# initialising it until we need it, and we cache the iterator's
# results.
merge_sorted = CachingIterator(
- scan_completed.bzr_branch.iter_merge_sorted_revisions)
+ scan_completed.bzr_branch.iter_merge_sorted_revisions
+ )
for proposal in db_branch.landing_candidates:
tip_rev_id = proposal.source_branch.last_scanned_id
if tip_rev_id in new_ancestry:
merged_revno = find_merged_revno(
- merge_sorted, six.ensure_binary(tip_rev_id))
+ merge_sorted, six.ensure_binary(tip_rev_id)
+ )
# Remember so we can find the merged revision number.
merge_detected(
- logger, proposal.source_branch, db_branch, proposal,
- merged_revno)
+ logger,
+ proposal.source_branch,
+ db_branch,
+ proposal,
+ merged_revno,
+ )
# Now check the landing targets. We should probably get rid of this,
# especially if we are trying to get rid of the branch revision table.
@@ -182,7 +189,9 @@ def auto_merge_proposals(scan_completed):
# If there is a branch revision record for target branch with
# the tip_rev_id of the source branch, then it is merged.
branch_revision = proposal.target_branch.getBranchRevision(
- revision_id=tip_rev_id)
+ revision_id=tip_rev_id
+ )
if branch_revision is not None:
merge_detected(
- logger, db_branch, proposal.target_branch, proposal)
+ logger, db_branch, proposal.target_branch, proposal
+ )
diff --git a/lib/lp/codehosting/scanner/tests/test_buglinks.py b/lib/lp/codehosting/scanner/tests/test_buglinks.py
index 790a9af..982e3a7 100644
--- a/lib/lp/codehosting/scanner/tests/test_buglinks.py
+++ b/lib/lp/codehosting/scanner/tests/test_buglinks.py
@@ -15,14 +15,8 @@ from lp.codehosting.scanner.buglinks import BugBranchLinker
from lp.codehosting.scanner.tests.test_bzrsync import BzrSyncTestCase
from lp.registry.interfaces.pocket import PackagePublishingPocket
from lp.services.osutils import override_environ
-from lp.testing import (
- TestCase,
- TestCaseWithFactory,
- )
-from lp.testing.dbuser import (
- lp_dbuser,
- switch_dbuser,
- )
+from lp.testing import TestCase, TestCaseWithFactory
+from lp.testing.dbuser import lp_dbuser, switch_dbuser
from lp.testing.layers import LaunchpadZopelessLayer
@@ -50,8 +44,8 @@ class RevisionPropertyParsing(TestCase):
def extractBugInfo(self, bug_property):
revision = Revision(
- self.factory.getUniqueString(),
- properties=dict(bugs=bug_property))
+ self.factory.getUniqueString(), properties=dict(bugs=bug_property)
+ )
bug_linker = BugBranchLinker(None)
return bug_linker.extractBugInfo(revision)
@@ -59,46 +53,46 @@ class RevisionPropertyParsing(TestCase):
# Parsing a single line should give a dict with a single entry,
# mapping the bug_id to the status.
bugs = self.extractBugInfo("https://launchpad.net/bugs/9999 fixed")
- self.assertEqual(bugs, {9999: 'fixed'})
+ self.assertEqual(bugs, {9999: "fixed"})
def test_multiple(self):
# Information about more than one bug can be specified. Make sure that
# all the information is processed.
bugs = self.extractBugInfo(
"https://launchpad.net/bugs/9999 fixed\n"
- "https://launchpad.net/bugs/8888 fixed")
- self.assertEqual(bugs, {9999: 'fixed',
- 8888: 'fixed'})
+ "https://launchpad.net/bugs/8888 fixed"
+ )
+ self.assertEqual(bugs, {9999: "fixed", 8888: "fixed"})
def test_empty(self):
# If the property is empty, then return an empty dict.
- bugs = self.extractBugInfo('')
+ bugs = self.extractBugInfo("")
self.assertEqual(bugs, {})
def test_bad_bug(self):
# If the given bug is not a valid integer, then skip it, generate an
# OOPS and continue processing.
- bugs = self.extractBugInfo('https://launchpad.net/~jml fixed')
+ bugs = self.extractBugInfo("https://launchpad.net/~jml fixed")
self.assertEqual(bugs, {})
def test_non_launchpad_bug(self):
# References to bugs on sites other than launchpad are ignored.
- bugs = self.extractBugInfo('http://bugs.debian.org/1234 fixed')
+ bugs = self.extractBugInfo("http://bugs.debian.org/1234 fixed")
self.assertEqual(bugs, {})
def test_duplicated_line(self):
# If a particular line is duplicated, silently ignore the duplicates.
bugs = self.extractBugInfo(
- 'https://launchpad.net/bugs/9999 fixed\n'
- 'https://launchpad.net/bugs/9999 fixed')
- self.assertEqual(bugs, {9999: 'fixed'})
+ "https://launchpad.net/bugs/9999 fixed\n"
+ "https://launchpad.net/bugs/9999 fixed"
+ )
+ self.assertEqual(bugs, {9999: "fixed"})
def test_strict_url_checking(self):
# Ignore URLs that look like a Launchpad bug URL but aren't.
- bugs = self.extractBugInfo('https://launchpad.net/people/1234 fixed')
+ bugs = self.extractBugInfo("https://launchpad.net/people/1234 fixed")
self.assertEqual(bugs, {})
- bugs = self.extractBugInfo(
- 'https://launchpad.net/bugs/foo/1234 fixed')
+ bugs = self.extractBugInfo("https://launchpad.net/bugs/foo/1234 fixed")
self.assertEqual(bugs, {})
@@ -131,7 +125,7 @@ class TestBugLinking(BzrSyncTestCase):
We don't use canonical_url because we don't want to have to make
Bazaar know about launchpad.test.
"""
- return 'https://launchpad.net/bugs/%s' % bug.id
+ return "https://launchpad.net/bugs/%s" % bug.id
def assertBugBranchLinked(self, bug, branch):
"""Assert that the BugBranch for `bug` and `branch` exists.
@@ -143,16 +137,18 @@ class TestBugLinking(BzrSyncTestCase):
def test_newMainlineRevisionAddsBugBranch(self):
"""New mainline revisions with bugs properties create BugBranches."""
self.commitRevision(
- rev_id=b'rev1',
- revprops={'bugs': '%s fixed' % self.getBugURL(self.bug1)})
+ rev_id=b"rev1",
+ revprops={"bugs": "%s fixed" % self.getBugURL(self.bug1)},
+ )
self.syncBazaarBranchToDatabase(self.bzr_branch, self.db_branch)
self.assertBugBranchLinked(self.bug1, self.db_branch)
def test_scanningTwiceDoesntMatter(self):
"""Scanning a branch twice is the same as scanning it once."""
self.commitRevision(
- rev_id=b'rev1',
- revprops={'bugs': '%s fixed' % self.getBugURL(self.bug1)})
+ rev_id=b"rev1",
+ revprops={"bugs": "%s fixed" % self.getBugURL(self.bug1)},
+ )
self.syncBazaarBranchToDatabase(self.bzr_branch, self.db_branch)
self.syncBazaarBranchToDatabase(self.bzr_branch, self.db_branch)
self.assertBugBranchLinked(self.bug1, self.db_branch)
@@ -161,15 +157,17 @@ class TestBugLinking(BzrSyncTestCase):
with lp_dbuser():
branch = self.factory.makePackageBranch()
branch.sourcepackage.setBranch(
- PackagePublishingPocket.RELEASE, branch, branch.owner)
+ PackagePublishingPocket.RELEASE, branch, branch.owner
+ )
return branch
def test_linking_bug_to_official_package_branch(self):
# We can link a bug to an official package branch. Test added to catch
# bug 391303.
self.commitRevision(
- rev_id=b'rev1',
- revprops={'bugs': '%s fixed' % self.getBugURL(self.bug1)})
+ rev_id=b"rev1",
+ revprops={"bugs": "%s fixed" % self.getBugURL(self.bug1)},
+ )
branch = self.makePackageBranch()
self.syncBazaarBranchToDatabase(self.bzr_branch, branch)
self.assertBugBranchLinked(self.bug1, branch)
@@ -177,8 +175,9 @@ class TestBugLinking(BzrSyncTestCase):
def test_knownMainlineRevisionsDoesntMakeLink(self):
"""Don't add BugBranches for known mainline revision."""
self.commitRevision(
- rev_id=b'rev1',
- revprops={'bugs': '%s fixed' % self.getBugURL(self.bug1)})
+ rev_id=b"rev1",
+ revprops={"bugs": "%s fixed" % self.getBugURL(self.bug1)},
+ )
self.syncBazaarBranchToDatabase(self.bzr_branch, self.db_branch)
# Create a new DB branch to sync with.
self.syncBazaarBranchToDatabase(self.bzr_branch, self.new_db_branch)
@@ -190,29 +189,38 @@ class TestBugLinking(BzrSyncTestCase):
author = self.factory.getUniqueString()
# XXX: AaronBentley 2010-08-06 bug=614404: a bzr username is
# required to generate the revision-id.
- with override_environ(BRZ_EMAIL='me@xxxxxxxxxxx'):
+ with override_environ(BRZ_EMAIL="me@xxxxxxxxxxx"):
self.bzr_tree.commit(
- 'common parent', committer=author, rev_id=b'r1',
- allow_pointless=True)
+ "common parent",
+ committer=author,
+ rev_id=b"r1",
+ allow_pointless=True,
+ )
# Branch from the base revision.
- new_tree = self.make_branch_and_tree('bzr_branch_merged')
+ new_tree = self.make_branch_and_tree("bzr_branch_merged")
new_tree.pull(self.bzr_branch)
# Commit to both branches
self.bzr_tree.commit(
- 'commit one', committer=author, rev_id=b'r2',
- allow_pointless=True)
+ "commit one",
+ committer=author,
+ rev_id=b"r2",
+ allow_pointless=True,
+ )
new_tree.commit(
- 'commit two', committer=author, rev_id=b'r1.1.1',
+ "commit two",
+ committer=author,
+ rev_id=b"r1.1.1",
allow_pointless=True,
- revprops={'bugs': '%s fixed' % self.getBugURL(self.bug1)})
+ revprops={"bugs": "%s fixed" % self.getBugURL(self.bug1)},
+ )
# Merge and commit.
self.bzr_tree.merge_from_branch(new_tree.branch)
self.bzr_tree.commit(
- 'merge', committer=author, rev_id=b'r3',
- allow_pointless=True)
+ "merge", committer=author, rev_id=b"r3", allow_pointless=True
+ )
self.syncBazaarBranchToDatabase(self.bzr_branch, self.db_branch)
self.assertNotIn(self.db_branch, self.bug1.linked_branches)
@@ -222,17 +230,21 @@ class TestBugLinking(BzrSyncTestCase):
self.assertRaises(NotFoundError, getUtility(IBugSet).get, 99999)
self.assertEqual([], list(self.db_branch.linked_bugs))
self.commitRevision(
- rev_id=b'rev1',
- revprops={'bugs': 'https://launchpad.net/bugs/99999 fixed'})
+ rev_id=b"rev1",
+ revprops={"bugs": "https://launchpad.net/bugs/99999 fixed"},
+ )
self.syncBazaarBranchToDatabase(self.bzr_branch, self.db_branch)
self.assertEqual([], list(self.db_branch.linked_bugs))
def test_multipleBugsInProperty(self):
"""Create BugBranch links for *all* bugs in the property."""
self.commitRevision(
- rev_id=b'rev1',
- revprops={'bugs': '%s fixed\n%s fixed' % (
- self.getBugURL(self.bug1), self.getBugURL(self.bug2))})
+ rev_id=b"rev1",
+ revprops={
+ "bugs": "%s fixed\n%s fixed"
+ % (self.getBugURL(self.bug1), self.getBugURL(self.bug2))
+ },
+ )
self.syncBazaarBranchToDatabase(self.bzr_branch, self.db_branch)
self.assertBugBranchLinked(self.bug1, self.db_branch)
@@ -251,13 +263,17 @@ class TestSubscription(TestCaseWithFactory):
switch_dbuser("branchscanner")
# XXX: AaronBentley 2010-08-06 bug=614404: a bzr username is
# required to generate the revision-id.
- with override_environ(BRZ_EMAIL='me@xxxxxxxxxxx'):
- revision_id = tree.commit('fix revision',
+ with override_environ(BRZ_EMAIL="me@xxxxxxxxxxx"):
+ revision_id = tree.commit(
+ "fix revision",
revprops={
- 'bugs': 'https://launchpad.net/bugs/%d fixed' % bug.id})
+ "bugs": "https://launchpad.net/bugs/%d fixed" % bug.id
+ },
+ )
bzr_revision = tree.branch.repository.get_revision(revision_id)
revision_set = getUtility(IRevisionSet)
revision_set.newFromBazaarRevisions([bzr_revision])
- notify(events.NewMainlineRevisions(
- db_branch, tree.branch, [bzr_revision]))
+ notify(
+ events.NewMainlineRevisions(db_branch, tree.branch, [bzr_revision])
+ )
self.assertIn(db_branch, bug.linked_branches)
diff --git a/lib/lp/codehosting/scanner/tests/test_bzrsync.py b/lib/lp/codehosting/scanner/tests/test_bzrsync.py
index 216b400..bae6fa3 100644
--- a/lib/lp/codehosting/scanner/tests/test_bzrsync.py
+++ b/lib/lp/codehosting/scanner/tests/test_bzrsync.py
@@ -8,25 +8,16 @@ import os
import random
import time
-from breezy.revision import (
- NULL_REVISION,
- Revision as BzrRevision,
- )
+import pytz
+import six
+from breezy.revision import NULL_REVISION
+from breezy.revision import Revision as BzrRevision
from breezy.tests import TestCaseWithTransport
from breezy.uncommit import uncommit
from breezy.url_policy_open import BranchOpener
-from fixtures import (
- FakeLogger,
- TempDir,
- )
-import pytz
-import six
+from fixtures import FakeLogger, TempDir
from storm.locals import Store
-from testtools.matchers import (
- Equals,
- MatchesDict,
- MatchesStructure,
- )
+from testtools.matchers import Equals, MatchesDict, MatchesStructure
from twisted.python.util import mergeFunctionMetadata
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy
@@ -38,18 +29,11 @@ from lp.code.interfaces.revision import IRevisionSet
from lp.code.model.branchmergeproposaljob import (
BranchMergeProposalJobSource,
BranchMergeProposalJobType,
- )
+)
from lp.code.model.branchrevision import BranchRevision
-from lp.code.model.revision import (
- Revision,
- RevisionAuthor,
- RevisionParent,
- )
+from lp.code.model.revision import Revision, RevisionAuthor, RevisionParent
from lp.code.model.tests.test_diff import commit_file
-from lp.codehosting.bzrutils import (
- read_locked,
- write_locked,
- )
+from lp.codehosting.bzrutils import read_locked, write_locked
from lp.codehosting.scanner.bzrsync import BzrSync
from lp.services.config import config
from lp.services.database.interfaces import IStore
@@ -58,26 +42,21 @@ from lp.services.osutils import override_environ
from lp.services.webhooks.testing import LogsScheduledWebhooks
from lp.snappy.interfaces.snap import SNAP_TESTING_FLAGS
from lp.testing import TestCaseWithFactory
-from lp.testing.dbuser import (
- dbuser,
- lp_dbuser,
- switch_dbuser,
- )
+from lp.testing.dbuser import dbuser, lp_dbuser, switch_dbuser
from lp.testing.layers import LaunchpadZopelessLayer
from lp.translations.interfaces.translations import (
TranslationsBranchImportMode,
- )
+)
def run_as_db_user(username):
- """Create a decorator that will run a function as the given database user.
- """
+ """Create a decorator that will run a function as the given DB user."""
def _run_with_different_user(f):
-
def decorated(*args, **kwargs):
with dbuser(username):
return f(*args, **kwargs)
+
return mergeFunctionMetadata(f, decorated)
return _run_with_different_user
@@ -106,7 +85,8 @@ class BzrSyncTestCase(TestCaseWithTransport, TestCaseWithFactory):
def makeFixtures(self):
"""Makes test fixtures before we switch to the scanner db user."""
self.db_branch, self.bzr_tree = self.create_branch_and_tree(
- db_branch=self.makeDatabaseBranch())
+ db_branch=self.makeDatabaseBranch()
+ )
self.bzr_branch = self.bzr_tree.branch
def syncBazaarBranchToDatabase(self, bzr_branch, db_branch):
@@ -134,34 +114,49 @@ class BzrSyncTestCase(TestCaseWithTransport, TestCaseWithFactory):
store.find(Revision).count(),
store.find(BranchRevision).count(),
store.find(RevisionParent).count(),
- store.find(RevisionAuthor).count())
-
- def assertCounts(self, counts, new_revisions=0, new_numbers=0,
- new_parents=0, new_authors=0):
- (old_revision_count,
- old_revisionnumber_count,
- old_revisionparent_count,
- old_revisionauthor_count) = counts
- (new_revision_count,
- new_revisionnumber_count,
- new_revisionparent_count,
- new_revisionauthor_count) = self.getCounts()
+ store.find(RevisionAuthor).count(),
+ )
+
+ def assertCounts(
+ self,
+ counts,
+ new_revisions=0,
+ new_numbers=0,
+ new_parents=0,
+ new_authors=0,
+ ):
+ (
+ old_revision_count,
+ old_revisionnumber_count,
+ old_revisionparent_count,
+ old_revisionauthor_count,
+ ) = counts
+ (
+ new_revision_count,
+ new_revisionnumber_count,
+ new_revisionparent_count,
+ new_revisionauthor_count,
+ ) = self.getCounts()
self.assertEqual(
new_revisions,
new_revision_count - old_revision_count,
- "Wrong number of new database Revisions.")
+ "Wrong number of new database Revisions.",
+ )
self.assertEqual(
new_numbers,
new_revisionnumber_count - old_revisionnumber_count,
- "Wrong number of new BranchRevisions.")
+ "Wrong number of new BranchRevisions.",
+ )
self.assertEqual(
new_parents,
new_revisionparent_count - old_revisionparent_count,
- "Wrong number of new RevisionParents.")
+ "Wrong number of new RevisionParents.",
+ )
self.assertEqual(
new_authors,
new_revisionauthor_count - old_revisionauthor_count,
- "Wrong number of new RevisionAuthors.")
+ "Wrong number of new RevisionAuthors.",
+ )
def makeBzrSync(self, db_branch):
"""Create a BzrSync instance for the test branch.
@@ -171,20 +166,37 @@ class BzrSyncTestCase(TestCaseWithTransport, TestCaseWithFactory):
"""
return BzrSync(db_branch)
- def syncAndCount(self, db_branch=None, new_revisions=0, new_numbers=0,
- new_parents=0, new_authors=0):
+ def syncAndCount(
+ self,
+ db_branch=None,
+ new_revisions=0,
+ new_numbers=0,
+ new_parents=0,
+ new_authors=0,
+ ):
"""Run BzrSync and assert the number of rows added to each table."""
if db_branch is None:
db_branch = self.db_branch
counts = self.getCounts()
self.makeBzrSync(db_branch).syncBranchAndClose()
self.assertCounts(
- counts, new_revisions=new_revisions, new_numbers=new_numbers,
- new_parents=new_parents, new_authors=new_authors)
-
- def commitRevision(self, message=None, committer=None,
- extra_parents=None, rev_id=None,
- timestamp=None, timezone=None, revprops=None):
+ counts,
+ new_revisions=new_revisions,
+ new_numbers=new_numbers,
+ new_parents=new_parents,
+ new_authors=new_authors,
+ )
+
+ def commitRevision(
+ self,
+ message=None,
+ committer=None,
+ extra_parents=None,
+ rev_id=None,
+ timestamp=None,
+ timezone=None,
+ revprops=None,
+ ):
if message is None:
message = self.LOG
if committer is None:
@@ -193,18 +205,24 @@ class BzrSyncTestCase(TestCaseWithTransport, TestCaseWithFactory):
self.bzr_tree.add_pending_merge(*extra_parents)
# XXX: AaronBentley 2010-08-06 bug=614404: a bzr username is
# required to generate the revision-id.
- with override_environ(BRZ_EMAIL='me@xxxxxxxxxxx'):
+ with override_environ(BRZ_EMAIL="me@xxxxxxxxxxx"):
return self.bzr_tree.commit(
- message, committer=committer, rev_id=rev_id,
- timestamp=timestamp, timezone=timezone, allow_pointless=True,
- revprops=revprops)
+ message,
+ committer=committer,
+ rev_id=rev_id,
+ timestamp=timestamp,
+ timezone=timezone,
+ allow_pointless=True,
+ revprops=revprops,
+ )
def uncommitRevision(self):
branch = self.bzr_tree.branch
uncommit(branch, tree=self.bzr_tree)
- def makeBranchWithMerge(self, base_rev_id, trunk_rev_id, branch_rev_id,
- merge_rev_id):
+ def makeBranchWithMerge(
+ self, base_rev_id, trunk_rev_id, branch_rev_id, merge_rev_id
+ ):
"""Make a branch that has had another branch merged into it.
Creates two Bazaar branches and two database branches associated with
@@ -239,26 +257,29 @@ class BzrSyncTestCase(TestCaseWithTransport, TestCaseWithFactory):
# Make the base revision.
db_branch = self.makeDatabaseBranch()
db_branch, trunk_tree = self.create_branch_and_tree(
- db_branch=db_branch)
+ db_branch=db_branch
+ )
# XXX: AaronBentley 2010-08-06 bug=614404: a bzr username is
# required to generate the revision-id.
- with override_environ(BRZ_EMAIL='me@xxxxxxxxxxx'):
- trunk_tree.commit('base revision', rev_id=base_rev_id)
+ with override_environ(BRZ_EMAIL="me@xxxxxxxxxxx"):
+ trunk_tree.commit("base revision", rev_id=base_rev_id)
# Branch from the base revision.
new_db_branch = self.makeDatabaseBranch(
- product=db_branch.product)
+ product=db_branch.product
+ )
new_db_branch, branch_tree = self.create_branch_and_tree(
- db_branch=new_db_branch)
+ db_branch=new_db_branch
+ )
branch_tree.pull(trunk_tree.branch)
# Commit to both branches.
- trunk_tree.commit('trunk revision', rev_id=trunk_rev_id)
- branch_tree.commit('branch revision', rev_id=branch_rev_id)
+ trunk_tree.commit("trunk revision", rev_id=trunk_rev_id)
+ branch_tree.commit("branch revision", rev_id=branch_rev_id)
# Merge branch into trunk.
trunk_tree.merge_from_branch(branch_tree.branch)
- trunk_tree.commit('merge revision', rev_id=merge_rev_id)
+ trunk_tree.commit("merge revision", rev_id=merge_rev_id)
return (db_branch, trunk_tree), (new_db_branch, branch_tree)
@@ -268,10 +289,13 @@ class BzrSyncTestCase(TestCaseWithTransport, TestCaseWithFactory):
:return: A set of tuples (sequence, revision-id) for all the
BranchRevisions rows belonging to self.db_branch.
"""
- return set(IStore(BranchRevision).find(
- (BranchRevision.sequence, Revision.revision_id),
- Revision.id == BranchRevision.revision_id,
- BranchRevision.branch == db_branch))
+ return set(
+ IStore(BranchRevision).find(
+ (BranchRevision.sequence, Revision.revision_id),
+ Revision.id == BranchRevision.revision_id,
+ BranchRevision.branch == db_branch,
+ )
+ )
def writeToFile(self, filename="file", contents=None):
"""Set the contents of the specified file.
@@ -295,7 +319,6 @@ class BzrSyncTestCase(TestCaseWithTransport, TestCaseWithFactory):
class TestBzrSync(BzrSyncTestCase):
-
def isMainline(self, db_branch, revision_id):
"""Is `revision_id` in the mainline history of `db_branch`?"""
for branch_revision in db_branch.revision_history:
@@ -307,13 +330,15 @@ class TestBzrSync(BzrSyncTestCase):
"""Assert that `revision_id` is in the mainline of `db_branch`."""
self.assertTrue(
self.isMainline(db_branch, revision_id),
- "%r not in mainline of %r" % (revision_id, db_branch))
+ "%r not in mainline of %r" % (revision_id, db_branch),
+ )
def assertNotInMainline(self, revision_id, db_branch):
"""Assert that `revision_id` is not in the mainline of `db_branch`."""
self.assertFalse(
self.isMainline(db_branch, revision_id),
- "%r in mainline of %r" % (revision_id, db_branch))
+ "%r in mainline of %r" % (revision_id, db_branch),
+ )
def test_empty_branch(self):
# Importing an empty branch does nothing.
@@ -340,15 +365,15 @@ class TestBzrSync(BzrSyncTestCase):
# there should be an email generated saying that
# 1 (in this case) revision has been removed,
# and another email with the diff and log message.
- self.commitRevision('first')
+ self.commitRevision("first")
self.syncAndCount(new_revisions=1, new_numbers=1, new_authors=1)
self.assertEqual(self.db_branch.revision_count, 1)
self.uncommitRevision()
- self.commitRevision('second')
+ self.commitRevision("second")
self.syncAndCount(new_revisions=1, new_authors=1)
self.assertEqual(self.db_branch.revision_count, 1)
[revno] = self.db_branch.revision_history
- self.assertEqual(revno.revision.log_body, 'second')
+ self.assertEqual(revno.revision.log_body, "second")
def test_import_revision_with_url(self):
# Importing a revision passing the url parameter works.
@@ -357,15 +382,17 @@ class TestBzrSync(BzrSyncTestCase):
bzrsync = BzrSync(self.db_branch)
bzrsync.syncBranchAndClose()
self.assertCounts(
- counts, new_revisions=1, new_numbers=1, new_authors=1)
+ counts, new_revisions=1, new_numbers=1, new_authors=1
+ )
def test_new_author(self):
# Importing a different committer adds it as an author.
author = "Another Author <another@xxxxxxxxxxx>"
self.commitRevision(committer=author)
self.syncAndCount(new_revisions=1, new_numbers=1, new_authors=1)
- db_author = IStore(RevisionAuthor).find(
- RevisionAuthor, name=author).one()
+ db_author = (
+ IStore(RevisionAuthor).find(RevisionAuthor, name=author).one()
+ )
self.assertEqual(db_author.name, author)
def test_new_parent(self):
@@ -373,57 +400,72 @@ class TestBzrSync(BzrSyncTestCase):
self.commitRevision()
self.commitRevision()
self.syncAndCount(
- new_revisions=2, new_numbers=2, new_parents=1, new_authors=2)
+ new_revisions=2, new_numbers=2, new_parents=1, new_authors=2
+ )
def test_sync_updates_branch(self):
# test that the last scanned revision ID is recorded
self.syncAndCount()
self.assertEqual(
- six.ensure_text(NULL_REVISION), self.db_branch.last_scanned_id)
+ six.ensure_text(NULL_REVISION), self.db_branch.last_scanned_id
+ )
last_modified = self.db_branch.date_last_modified
last_scanned = self.db_branch.last_scanned
self.commitRevision()
self.syncAndCount(new_revisions=1, new_numbers=1, new_authors=1)
- self.assertEqual(six.ensure_text(self.bzr_branch.last_revision()),
- self.db_branch.last_scanned_id)
- self.assertTrue(self.db_branch.last_scanned > last_scanned,
- "last_scanned was not updated")
- self.assertTrue(self.db_branch.date_last_modified > last_modified,
- "date_last_modifed was not updated")
+ self.assertEqual(
+ six.ensure_text(self.bzr_branch.last_revision()),
+ self.db_branch.last_scanned_id,
+ )
+ self.assertTrue(
+ self.db_branch.last_scanned > last_scanned,
+ "last_scanned was not updated",
+ )
+ self.assertTrue(
+ self.db_branch.date_last_modified > last_modified,
+ "date_last_modifed was not updated",
+ )
def test_timestamp_parsing(self):
# Test that the timezone selected does not affect the
# timestamp recorded in the database.
- self.commitRevision(rev_id=b'rev-1',
- timestamp=1000000000.0, timezone=0)
- self.commitRevision(rev_id=b'rev-2',
- timestamp=1000000000.0, timezone=28800)
+ self.commitRevision(
+ rev_id=b"rev-1", timestamp=1000000000.0, timezone=0
+ )
+ self.commitRevision(
+ rev_id=b"rev-2", timestamp=1000000000.0, timezone=28800
+ )
self.syncAndCount(
- new_revisions=2, new_numbers=2, new_parents=1, new_authors=2)
+ new_revisions=2, new_numbers=2, new_parents=1, new_authors=2
+ )
rev_1 = IStore(Revision).find(Revision, revision_id="rev-1").one()
rev_2 = IStore(Revision).find(Revision, revision_id="rev-2").one()
- UTC = pytz.timezone('UTC')
+ UTC = pytz.timezone("UTC")
dt = datetime.datetime.fromtimestamp(1000000000.0, UTC)
self.assertEqual(rev_1.revision_date, dt)
self.assertEqual(rev_2.revision_date, dt)
def getAncestryDelta_test(self, clean_repository=False):
- """"Test various ancestry delta calculations.
+ """ "Test various ancestry delta calculations.
:param clean_repository: If True, perform calculations with a branch
whose repository contains only revisions in the ancestry of the
tip.
"""
(db_branch, bzr_tree), ignored = self.makeBranchWithMerge(
- b'base', b'trunk', b'branch', b'merge')
+ b"base", b"trunk", b"branch", b"merge"
+ )
bzr_branch = bzr_tree.branch
- self.factory.makeBranchRevision(db_branch, 'base', 0)
+ self.factory.makeBranchRevision(db_branch, "base", 0)
self.factory.makeBranchRevision(
- db_branch, 'trunk', 1, parent_ids=['base'])
+ db_branch, "trunk", 1, parent_ids=["base"]
+ )
self.factory.makeBranchRevision(
- db_branch, 'branch', None, parent_ids=['base'])
+ db_branch, "branch", None, parent_ids=["base"]
+ )
self.factory.makeBranchRevision(
- db_branch, 'merge', 2, parent_ids=['trunk', 'branch'])
+ db_branch, "merge", 2, parent_ids=["trunk", "branch"]
+ )
sync = self.makeBzrSync(db_branch)
self.useContext(write_locked(bzr_branch))
@@ -441,78 +483,82 @@ class TestBzrSync(BzrSyncTestCase):
delta_branch = bzr_branch
return sync.getAncestryDelta(delta_branch)
- added_ancestry, removed_ancestry = get_delta(b'merge', None)
+ added_ancestry, removed_ancestry = get_delta(b"merge", None)
# All revisions are new for an unscanned branch
- self.assertEqual(
- {'base', 'trunk', 'branch', 'merge'}, added_ancestry)
+ self.assertEqual({"base", "trunk", "branch", "merge"}, added_ancestry)
self.assertEqual(set(), removed_ancestry)
- added_ancestry, removed_ancestry = get_delta(b'merge', 'base')
- self.assertEqual(
- {'trunk', 'branch', 'merge'}, added_ancestry)
+ added_ancestry, removed_ancestry = get_delta(b"merge", "base")
+ self.assertEqual({"trunk", "branch", "merge"}, added_ancestry)
self.assertEqual(set(), removed_ancestry)
- added_ancestry, removed_ancestry = get_delta(NULL_REVISION, 'merge')
- self.assertEqual(
- set(), added_ancestry)
+ added_ancestry, removed_ancestry = get_delta(NULL_REVISION, "merge")
+ self.assertEqual(set(), added_ancestry)
self.assertEqual(
- {'base', 'trunk', 'branch', 'merge'}, removed_ancestry)
- added_ancestry, removed_ancestry = get_delta(b'base', 'merge')
- self.assertEqual(
- set(), added_ancestry)
- self.assertEqual(
- {'trunk', 'branch', 'merge'}, removed_ancestry)
- added_ancestry, removed_ancestry = get_delta(b'trunk', 'branch')
- self.assertEqual({'trunk'}, added_ancestry)
- self.assertEqual({'branch'}, removed_ancestry)
+ {"base", "trunk", "branch", "merge"}, removed_ancestry
+ )
+ added_ancestry, removed_ancestry = get_delta(b"base", "merge")
+ self.assertEqual(set(), added_ancestry)
+ self.assertEqual({"trunk", "branch", "merge"}, removed_ancestry)
+ added_ancestry, removed_ancestry = get_delta(b"trunk", "branch")
+ self.assertEqual({"trunk"}, added_ancestry)
+ self.assertEqual({"branch"}, removed_ancestry)
def test_getAncestryDelta(self):
- """"Test ancestry delta calculations with a dirty repository."""
+ """ "Test ancestry delta calculations with a dirty repository."""
return self.getAncestryDelta_test()
def test_getAncestryDelta_clean_repository(self):
- """"Test ancestry delta calculations with a clean repository."""
+ """ "Test ancestry delta calculations with a clean repository."""
return self.getAncestryDelta_test(clean_repository=True)
def test_revisionsToInsert_empty(self):
# An empty branch should have no revisions.
- self.assertEqual(
- [], list(BzrSync.revisionsToInsert([], 0, set())))
+ self.assertEqual([], list(BzrSync.revisionsToInsert([], 0, set())))
def test_revisionsToInsert_linear(self):
# If the branch has a linear ancestry, revisionsToInsert() should
# yield each revision along with a sequence number, starting at 1.
- self.commitRevision(rev_id=b'rev-1')
+ self.commitRevision(rev_id=b"rev-1")
bzrsync = self.makeBzrSync(self.db_branch)
bzr_history = [
six.ensure_text(revid)
- for revid in branch_revision_history(self.bzr_branch)]
+ for revid in branch_revision_history(self.bzr_branch)
+ ]
added_ancestry = bzrsync.getAncestryDelta(self.bzr_branch)[0]
result = bzrsync.revisionsToInsert(
- bzr_history, self.bzr_branch.revno(), added_ancestry)
- self.assertEqual({'rev-1': 1}, dict(result))
+ bzr_history, self.bzr_branch.revno(), added_ancestry
+ )
+ self.assertEqual({"rev-1": 1}, dict(result))
def test_revisionsToInsert_branched(self):
# Confirm that these revisions are generated by getRevisions with None
# as the sequence 'number'.
(db_branch, bzr_tree), ignored = self.makeBranchWithMerge(
- b'base', b'trunk', b'branch', b'merge')
+ b"base", b"trunk", b"branch", b"merge"
+ )
bzrsync = self.makeBzrSync(db_branch)
bzr_history = [
six.ensure_text(revid)
- for revid in branch_revision_history(bzr_tree.branch)]
+ for revid in branch_revision_history(bzr_tree.branch)
+ ]
added_ancestry = bzrsync.getAncestryDelta(bzr_tree.branch)[0]
- expected = {'base': 1, 'trunk': 2, 'merge': 3, 'branch': None}
+ expected = {"base": 1, "trunk": 2, "merge": 3, "branch": None}
self.assertEqual(
- expected, dict(bzrsync.revisionsToInsert(bzr_history,
- bzr_tree.branch.revno(), added_ancestry)))
+ expected,
+ dict(
+ bzrsync.revisionsToInsert(
+ bzr_history, bzr_tree.branch.revno(), added_ancestry
+ )
+ ),
+ )
def test_sync_with_merged_branches(self):
# Confirm that when we syncHistory, all of the revisions are included
# correctly in the BranchRevision table.
(db_branch, branch_tree), ignored = self.makeBranchWithMerge(
- b'r1', b'r2', b'r1.1.1', b'r3')
+ b"r1", b"r2", b"r1.1.1", b"r3"
+ )
self.makeBzrSync(db_branch).syncBranchAndClose()
- expected = {
- (1, 'r1'), (2, 'r2'), (3, 'r3'), (None, 'r1.1.1')}
+ expected = {(1, "r1"), (2, "r2"), (3, "r3"), (None, "r1.1.1")}
self.assertEqual(self.getBranchRevisions(db_branch), expected)
def test_sync_merged_to_merging(self):
@@ -520,26 +566,30 @@ class TestBzrSync(BzrSyncTestCase):
# not NULL to NULL if that revision changes from mainline to not
# mainline when synced.
- (db_trunk, trunk_tree), (db_branch, branch_tree) = (
- self.makeBranchWithMerge(b'base', b'trunk', b'branch', b'merge'))
+ (db_trunk, trunk_tree), (
+ db_branch,
+ branch_tree,
+ ) = self.makeBranchWithMerge(b"base", b"trunk", b"branch", b"merge")
self.syncBazaarBranchToDatabase(trunk_tree.branch, db_branch)
- self.assertInMainline('trunk', db_branch)
+ self.assertInMainline("trunk", db_branch)
self.syncBazaarBranchToDatabase(branch_tree.branch, db_branch)
- self.assertNotInMainline('trunk', db_branch)
- self.assertInMainline('branch', db_branch)
+ self.assertNotInMainline("trunk", db_branch)
+ self.assertInMainline("branch", db_branch)
def test_sync_merging_to_merged(self):
# When replacing a branch by one of the branches it merged, the
# database must be updated appropriately.
- (db_trunk, trunk_tree), (db_branch, branch_tree) = (
- self.makeBranchWithMerge(b'base', b'trunk', b'branch', b'merge'))
+ (db_trunk, trunk_tree), (
+ db_branch,
+ branch_tree,
+ ) = self.makeBranchWithMerge(b"base", b"trunk", b"branch", b"merge")
# First, sync with the merging branch.
self.syncBazaarBranchToDatabase(trunk_tree.branch, db_trunk)
# Then sync with the merged branch.
self.syncBazaarBranchToDatabase(branch_tree.branch, db_trunk)
- expected = {(1, 'base'), (2, 'branch')}
+ expected = {(1, "base"), (2, "branch")}
self.assertEqual(self.getBranchRevisions(db_trunk), expected)
def test_retrieveDatabaseAncestry(self):
@@ -553,40 +603,48 @@ class TestBzrSync(BzrSyncTestCase):
# dependency, as the test setup would depend on
# retrieveDatabaseAncestry.
branch = getUtility(IBranchLookup).getByUniqueName(
- '~name12/+junk/junk.contrib')
+ "~name12/+junk/junk.contrib"
+ )
branch_revisions = IStore(BranchRevision).find(
- BranchRevision, BranchRevision.branch == branch)
+ BranchRevision, BranchRevision.branch == branch
+ )
sampledata = list(branch_revisions.order_by(BranchRevision.sequence))
- expected_ancestry = {branch_revision.revision.revision_id
- for branch_revision in sampledata}
- expected_history = [branch_revision.revision.revision_id
+ expected_ancestry = {
+ branch_revision.revision.revision_id
for branch_revision in sampledata
- if branch_revision.sequence is not None]
+ }
+ expected_history = [
+ branch_revision.revision.revision_id
+ for branch_revision in sampledata
+ if branch_revision.sequence is not None
+ ]
self.create_branch_and_tree(db_branch=branch)
bzrsync = self.makeBzrSync(branch)
- db_ancestry, db_history = (
- bzrsync.retrieveDatabaseAncestry())
+ db_ancestry, db_history = bzrsync.retrieveDatabaseAncestry()
self.assertEqual(expected_ancestry, set(db_ancestry))
self.assertEqual(expected_history, list(db_history))
class TestPlanDatabaseChanges(BzrSyncTestCase):
-
def test_ancestry_already_present(self):
# If a BranchRevision is being added, and it's already in the DB, but
# not found through the graph operations, we should schedule it for
# deletion anyway.
- rev1_id = six.ensure_text(self.bzr_tree.commit(
- 'initial commit', committer='me@xxxxxxxxxxx'))
+ rev1_id = six.ensure_text(
+ self.bzr_tree.commit("initial commit", committer="me@xxxxxxxxxxx")
+ )
merge_tree = self.bzr_tree.controldir.sprout(
- 'merge').open_workingtree()
- merge_id = six.ensure_text(merge_tree.commit(
- 'mergeable commit', committer='me@xxxxxxxxxxx'))
+ "merge"
+ ).open_workingtree()
+ merge_id = six.ensure_text(
+ merge_tree.commit("mergeable commit", committer="me@xxxxxxxxxxx")
+ )
self.bzr_tree.merge_from_branch(merge_tree.branch)
- rev2_id = six.ensure_text(self.bzr_tree.commit(
- 'merge', committer='me@xxxxxxxxxxx'))
+ rev2_id = six.ensure_text(
+ self.bzr_tree.commit("merge", committer="me@xxxxxxxxxxx")
+ )
self.useContext(read_locked(self.bzr_tree))
syncer = BzrSync(self.db_branch)
syncer.syncBranchAndClose(self.bzr_tree.branch)
@@ -594,7 +652,8 @@ class TestPlanDatabaseChanges(BzrSyncTestCase):
self.db_branch.last_scanned_id = rev1_id
db_ancestry, db_history = self.db_branch.getScannerData()
branchrevisions_to_delete = syncer.planDatabaseChanges(
- self.bzr_branch, [rev1_id, rev2_id], db_ancestry, db_history)[1]
+ self.bzr_branch, [rev1_id, rev2_id], db_ancestry, db_history
+ )[1]
self.assertIn(merge_id, branchrevisions_to_delete)
@@ -610,24 +669,30 @@ class TestBzrSyncRevisions(BzrSyncTestCase):
# timestamps.
# Make a negative, fractional timestamp and equivalent datetime
- UTC = pytz.timezone('UTC')
+ UTC = pytz.timezone("UTC")
old_timestamp = -0.5
old_date = datetime.datetime(1969, 12, 31, 23, 59, 59, 500000, UTC)
# Fake revision with negative timestamp.
fake_rev = BzrRevision(
- revision_id='rev42', parent_ids=['rev1', 'rev2'],
- committer=self.factory.getUniqueString(), message=self.LOG,
- timestamp=old_timestamp, timezone=0, properties={})
+ revision_id="rev42",
+ parent_ids=["rev1", "rev2"],
+ committer=self.factory.getUniqueString(),
+ message=self.LOG,
+ timestamp=old_timestamp,
+ timezone=0,
+ properties={},
+ )
# Sync the revision. The second parameter is a dict of revision ids
# to revnos, and will error if the revision id is not in the dict.
- self.bzrsync.syncRevisions(None, [fake_rev], {'rev42': None})
+ self.bzrsync.syncRevisions(None, [fake_rev], {"rev42": None})
# Find the revision we just synced and check that it has the correct
# date.
revision = getUtility(IRevisionSet).getByRevisionId(
- fake_rev.revision_id)
+ fake_rev.revision_id
+ )
self.assertEqual(old_date, revision.revision_date)
@@ -664,8 +729,7 @@ class TestBzrTranslationsUploadJob(BzrSyncTestCase):
def test_upload_on_new_revision(self):
# Syncing a branch with a changed tip creates a new RosettaUploadJob.
- self._makeProductSeries(
- TranslationsBranchImportMode.IMPORT_TEMPLATES)
+ self._makeProductSeries(TranslationsBranchImportMode.IMPORT_TEMPLATES)
revision_id = self.commitRevision()
self.makeBzrSync(self.db_branch).syncBranchAndClose()
self.db_branch.last_mirrored_id = six.ensure_text(revision_id)
@@ -684,8 +748,9 @@ class TestUpdatePreviewDiffJob(BzrSyncTestCase):
def test_create_on_new_revision(self):
"""When branch tip changes, a job is created."""
bmp = self.factory.makeBranchMergeProposal(
- source_branch=self.db_branch)
- removeSecurityProxy(bmp).target_branch.last_scanned_id = 'rev'
+ source_branch=self.db_branch
+ )
+ removeSecurityProxy(bmp).target_branch.last_scanned_id = "rev"
# The creation of a merge proposal has created an update preview diff
# job, so we'll mark that one as done.
bmp.next_preview_diff_job.start()
@@ -703,24 +768,28 @@ class TestGenerateIncrementalDiffJob(BzrSyncTestCase):
return list(
BranchMergeProposalJobSource.iterReady(
BranchMergeProposalJobType.GENERATE_INCREMENTAL_DIFF
- )
)
+ )
@run_as_db_user(config.launchpad.dbuser)
def test_create_on_new_revision(self):
"""When branch tip changes, a job is created."""
- parent_id = commit_file(self.db_branch, 'foo', b'bar')
- self.factory.makeBranchRevision(self.db_branch, parent_id,
- revision_date=self.factory.getUniqueDate())
+ parent_id = commit_file(self.db_branch, "foo", b"bar")
+ self.factory.makeBranchRevision(
+ self.db_branch,
+ parent_id,
+ revision_date=self.factory.getUniqueDate(),
+ )
self.db_branch.last_scanned_id = six.ensure_text(parent_id)
# Make sure that the merge proposal is created in the past.
- date_created = (
- datetime.datetime.now(pytz.UTC) - datetime.timedelta(days=7))
+ date_created = datetime.datetime.now(pytz.UTC) - datetime.timedelta(
+ days=7
+ )
bmp = self.factory.makeBranchMergeProposal(
- source_branch=self.db_branch,
- date_created=date_created)
- revision_id = commit_file(self.db_branch, 'foo', b'baz')
- removeSecurityProxy(bmp).target_branch.last_scanned_id = 'rev'
+ source_branch=self.db_branch, date_created=date_created
+ )
+ revision_id = commit_file(self.db_branch, "foo", b"baz")
+ removeSecurityProxy(bmp).target_branch.last_scanned_id = "rev"
self.assertEqual([], self.getPending())
switch_dbuser("branchscanner")
self.makeBzrSync(self.db_branch).syncBranchAndClose()
@@ -736,7 +805,8 @@ class TestSetRecipeStale(BzrSyncTestCase):
def test_base_branch_recipe(self):
"""On tip change, recipes where this branch is base become stale."""
recipe = self.factory.makeSourcePackageRecipe(
- branches=[self.db_branch])
+ branches=[self.db_branch]
+ )
removeSecurityProxy(recipe).is_stale = False
switch_dbuser("branchscanner")
self.makeBzrSync(self.db_branch).syncBranchAndClose()
@@ -746,7 +816,8 @@ class TestSetRecipeStale(BzrSyncTestCase):
def test_instruction_branch_recipe(self):
"""On tip change, recipes including this branch become stale."""
recipe = self.factory.makeSourcePackageRecipe(
- branches=[self.factory.makeBranch(), self.db_branch])
+ branches=[self.factory.makeBranch(), self.db_branch]
+ )
removeSecurityProxy(recipe).is_stale = False
switch_dbuser("branchscanner")
self.makeBzrSync(self.db_branch).syncBranchAndClose()
@@ -787,8 +858,7 @@ class TestMarkSnapsStale(BzrSyncTestCase):
def test_mark_private_snap_stale(self):
# Private snaps should be correctly marked as stale.
self.useFixture(FeatureFixture(SNAP_TESTING_FLAGS))
- snap = self.factory.makeSnap(
- branch=self.db_branch, private=True)
+ snap = self.factory.makeSnap(branch=self.db_branch, private=True)
removeSecurityProxy(snap).is_stale = False
switch_dbuser("branchscanner")
self.makeBzrSync(self.db_branch).syncBranchAndClose()
@@ -806,30 +876,38 @@ class TestTriggerWebhooks(BzrSyncTestCase):
old_revid = self.db_branch.last_scanned_id
with dbuser(config.launchpad.dbuser):
hook = self.factory.makeWebhook(
- target=self.db_branch, event_types=["bzr:push:0.1"])
+ target=self.db_branch, event_types=["bzr:push:0.1"]
+ )
self.commitRevision()
new_revid = six.ensure_text(self.bzr_branch.last_revision())
self.makeBzrSync(self.db_branch).syncBranchAndClose()
delivery = hook.deliveries.one()
- payload_matcher = MatchesDict({
- "bzr_branch": Equals("/" + self.db_branch.unique_name),
- "bzr_branch_path": Equals(self.db_branch.shortened_path),
- "old": Equals({"revision_id": old_revid}),
- "new": Equals({"revision_id": new_revid}),
- })
+ payload_matcher = MatchesDict(
+ {
+ "bzr_branch": Equals("/" + self.db_branch.unique_name),
+ "bzr_branch_path": Equals(self.db_branch.shortened_path),
+ "old": Equals({"revision_id": old_revid}),
+ "new": Equals({"revision_id": new_revid}),
+ }
+ )
self.assertThat(
delivery,
MatchesStructure(
- event_type=Equals("bzr:push:0.1"),
- payload=payload_matcher))
+ event_type=Equals("bzr:push:0.1"), payload=payload_matcher
+ ),
+ )
with dbuser(config.IWebhookDeliveryJobSource.dbuser):
self.assertEqual(
- "<WebhookDeliveryJob for webhook %d on %r>" % (
- hook.id, hook.target),
- repr(delivery))
+ "<WebhookDeliveryJob for webhook %d on %r>"
+ % (hook.id, hook.target),
+ repr(delivery),
+ )
self.assertThat(
- logger.output, LogsScheduledWebhooks([
- (hook, "bzr:push:0.1", payload_matcher)]))
+ logger.output,
+ LogsScheduledWebhooks(
+ [(hook, "bzr:push:0.1", payload_matcher)]
+ ),
+ )
class TestRevisionProperty(BzrSyncTestCase):
@@ -838,12 +916,12 @@ class TestRevisionProperty(BzrSyncTestCase):
def test_revision_properties(self):
# Revisions with properties should have records stored in the
# RevisionProperty table, accessible through Revision.getProperties().
- properties = {'name': 'value'}
- self.commitRevision(rev_id=b'rev1', revprops=properties)
+ properties = {"name": "value"}
+ self.commitRevision(rev_id=b"rev1", revprops=properties)
self.makeBzrSync(self.db_branch).syncBranchAndClose()
# Check that properties were saved to the revision.
- bzr_revision = self.bzr_branch.repository.get_revision(b'rev1')
+ bzr_revision = self.bzr_branch.repository.get_revision(b"rev1")
self.assertEqual(properties, bzr_revision.properties)
# Check that properties are stored in the database.
- db_revision = getUtility(IRevisionSet).getByRevisionId('rev1')
+ db_revision = getUtility(IRevisionSet).getByRevisionId("rev1")
self.assertEqual(properties, db_revision.getProperties())
diff --git a/lib/lp/codehosting/scanner/tests/test_email.py b/lib/lp/codehosting/scanner/tests/test_email.py
index 01a512e..f5d2021 100644
--- a/lib/lp/codehosting/scanner/tests/test_email.py
+++ b/lib/lp/codehosting/scanner/tests/test_email.py
@@ -14,11 +14,11 @@ from lp.code.enums import (
BranchSubscriptionDiffSize,
BranchSubscriptionNotificationLevel,
CodeReviewNotificationLevel,
- )
+)
from lp.code.interfaces.branchjob import (
IRevisionMailJobSource,
IRevisionsAddedJobSource,
- )
+)
from lp.code.model.branchjob import RevisionMailJob
from lp.codehosting.scanner import events
from lp.codehosting.scanner.bzrsync import BzrSync
@@ -27,27 +27,22 @@ from lp.registry.interfaces.person import IPersonSet
from lp.services.config import config
from lp.services.features.testing import FeatureFixture
from lp.services.job.runner import JobRunner
-from lp.services.job.tests import (
- block_on_job,
- pop_remote_notifications,
- )
+from lp.services.job.tests import block_on_job, pop_remote_notifications
from lp.services.mail import stub
from lp.testing import TestCaseWithFactory
from lp.testing.dbuser import switch_dbuser
-from lp.testing.layers import (
- CeleryJobLayer,
- LaunchpadZopelessLayer,
- )
+from lp.testing.layers import CeleryJobLayer, LaunchpadZopelessLayer
def add_subscriber(branch):
- test_user = getUtility(IPersonSet).getByEmail('test@xxxxxxxxxxxxx')
+ test_user = getUtility(IPersonSet).getByEmail("test@xxxxxxxxxxxxx")
branch.subscribe(
test_user,
BranchSubscriptionNotificationLevel.FULL,
BranchSubscriptionDiffSize.FIVEKLINES,
CodeReviewNotificationLevel.NOEMAIL,
- test_user)
+ test_user,
+ )
class TestBzrSyncEmail(BzrSyncTestCase):
@@ -69,13 +64,14 @@ class TestBzrSyncEmail(BzrSyncTestCase):
JobRunner.fromReady(getUtility(IRevisionMailJobSource)).runAll()
self.assertEqual(len(stub.test_emails), 1)
[initial_email] = stub.test_emails
- expected = 'First scan of the branch detected 0 revisions'
+ expected = "First scan of the branch detected 0 revisions"
message = email.message_from_bytes(initial_email[2])
email_body = message.get_payload()
self.assertIn(expected, email_body)
self.assertEmailHeadersEqual(
- '[Branch %s] 0 revisions' % self.db_branch.unique_name,
- message['Subject'])
+ "[Branch %s] 0 revisions" % self.db_branch.unique_name,
+ message["Subject"],
+ )
def test_import_revision(self):
self.commitRevision()
@@ -83,14 +79,17 @@ class TestBzrSyncEmail(BzrSyncTestCase):
JobRunner.fromReady(getUtility(IRevisionMailJobSource)).runAll()
self.assertEqual(len(stub.test_emails), 1)
[initial_email] = stub.test_emails
- expected = ('First scan of the branch detected 1 revision'
- ' in the revision history of the=\n branch.')
+ expected = (
+ "First scan of the branch detected 1 revision"
+ " in the revision history of the=\n branch."
+ )
message = email.message_from_bytes(initial_email[2])
email_body = message.get_payload()
self.assertIn(expected, email_body)
self.assertEmailHeadersEqual(
- '[Branch %s] 1 revision' % self.db_branch.unique_name,
- message['Subject'])
+ "[Branch %s] 1 revision" % self.db_branch.unique_name,
+ message["Subject"],
+ )
def test_import_uncommit(self):
self.commitRevision()
@@ -102,51 +101,53 @@ class TestBzrSyncEmail(BzrSyncTestCase):
JobRunner.fromReady(getUtility(IRevisionMailJobSource)).runAll()
self.assertEqual(len(stub.test_emails), 1)
[uncommit_email] = stub.test_emails
- expected = '1 revision was removed from the branch.'
+ expected = "1 revision was removed from the branch."
message = email.message_from_bytes(uncommit_email[2])
email_body = message.get_payload()
self.assertIn(expected, email_body)
self.assertEmailHeadersEqual(
- '[Branch %s] 1 revision removed' % self.db_branch.unique_name,
- message['Subject'])
+ "[Branch %s] 1 revision removed" % self.db_branch.unique_name,
+ message["Subject"],
+ )
def test_import_recommit(self):
# When scanning the uncommit and new commit there should be an email
# generated saying that 1 (in this case) revision has been removed,
# and another email with the diff and log message.
- self.commitRevision('first')
+ self.commitRevision("first")
self.makeBzrSync(self.db_branch).syncBranchAndClose()
JobRunner.fromReady(getUtility(IRevisionMailJobSource)).runAll()
stub.test_emails = []
self.uncommitRevision()
- self.writeToFile(filename="hello.txt",
- contents="Hello World\n")
+ self.writeToFile(filename="hello.txt", contents="Hello World\n")
author = self.factory.getUniqueString()
- self.commitRevision('second', committer=author)
+ self.commitRevision("second", committer=author)
self.makeBzrSync(self.db_branch).syncBranchAndClose()
JobRunner.fromReady(getUtility(IRevisionsAddedJobSource)).runAll()
JobRunner.fromReady(getUtility(IRevisionMailJobSource)).runAll()
self.assertEqual(len(stub.test_emails), 2)
[recommit_email, uncommit_email] = stub.test_emails
uncommit_email_body = uncommit_email[2]
- expected = b'1 revision was removed from the branch.'
+ expected = b"1 revision was removed from the branch."
self.assertIn(expected, uncommit_email_body)
subject = (
- 'Subject: [Branch %s] Test branch' % self.db_branch.unique_name)
+ "Subject: [Branch %s] Test branch" % self.db_branch.unique_name
+ )
self.assertIn(expected, uncommit_email_body)
recommit_email_msg = email.message_from_bytes(recommit_email[2])
recommit_email_body = recommit_email_msg.get_payload()[0].get_payload(
- decode=True)
- subject = '[Branch %s] Rev 1: second' % self.db_branch.unique_name
- self.assertEmailHeadersEqual(subject, recommit_email_msg['Subject'])
+ decode=True
+ )
+ subject = "[Branch %s] Rev 1: second" % self.db_branch.unique_name
+ self.assertEmailHeadersEqual(subject, recommit_email_msg["Subject"])
body_bits = [
- b'revno: 1',
- ('committer: %s' % author).encode('UTF-8'),
- ('branch nick: %s' % self.bzr_branch.nick).encode('UTF-8'),
- b'message:\n second',
- b'added:\n hello.txt',
- ]
+ b"revno: 1",
+ ("committer: %s" % author).encode("UTF-8"),
+ ("branch nick: %s" % self.bzr_branch.nick).encode("UTF-8"),
+ b"message:\n second",
+ b"added:\n hello.txt",
+ ]
for bit in body_bits:
self.assertIn(bit, recommit_email_body)
@@ -156,8 +157,9 @@ class TestViaCelery(TestCaseWithFactory):
layer = CeleryJobLayer
def prepare(self, job_name):
- self.useFixture(FeatureFixture(
- {'jobs.celery.enabled_classes': job_name}))
+ self.useFixture(
+ FeatureFixture({"jobs.celery.enabled_classes": job_name})
+ )
self.useBzrBranches(direct_database=True)
db_branch, tree = self.create_branch_and_tree()
add_subscriber(db_branch)
@@ -172,15 +174,15 @@ class TestViaCelery(TestCaseWithFactory):
def test_empty_branch(self):
"""RevisionMailJob for empty branches runs via Celery."""
- db_branch, tree = self.prepare('RevisionMailJob')
+ db_branch, tree = self.prepare("RevisionMailJob")
with block_on_job():
BzrSync(db_branch).syncBranchAndClose(tree.branch)
self.assertEqual(1, len(pop_remote_notifications()))
def test_uncommit_branch(self):
"""RevisionMailJob for removed revisions runs via Celery."""
- db_branch, tree = self.prepare('RevisionMailJob')
- tree.commit('message')
+ db_branch, tree = self.prepare("RevisionMailJob")
+ tree.commit("message")
bzr_sync = BzrSync(db_branch)
with block_on_job():
bzr_sync.syncBranchAndClose(tree.branch)
@@ -194,13 +196,13 @@ class TestViaCelery(TestCaseWithFactory):
"""RevisionsAddedJob for added revisions runs via Celery."""
# Enable RevisionMailJob to let celery activate a new connection
# before trying to flush sent emails calling pop_remote_notifications.
- db_branch, tree = self.prepare('RevisionMailJob RevisionsAddedJob')
- tree.commit('message')
+ db_branch, tree = self.prepare("RevisionMailJob RevisionsAddedJob")
+ tree.commit("message")
bzr_sync = BzrSync(db_branch)
with block_on_job():
bzr_sync.syncBranchAndClose(tree.branch)
pop_remote_notifications()
- tree.commit('message2')
+ tree.commit("message2")
with block_on_job():
bzr_sync.syncBranchAndClose(tree.branch)
self.assertEqual(1, len(pop_remote_notifications()))
@@ -219,7 +221,8 @@ class TestScanBranches(TestCaseWithFactory):
BranchSubscriptionNotificationLevel.FULL,
BranchSubscriptionDiffSize.WHOLEDIFF,
CodeReviewNotificationLevel.FULL,
- db_branch.registrant)
+ db_branch.registrant,
+ )
self.assertEqual(0, len(list(RevisionMailJob.iterReady())))
notify(events.TipChanged(db_branch, tree.branch, True))
self.assertEqual(1, len(list(RevisionMailJob.iterReady())))
@@ -233,9 +236,10 @@ class TestScanBranches(TestCaseWithFactory):
BranchSubscriptionNotificationLevel.FULL,
BranchSubscriptionDiffSize.WHOLEDIFF,
CodeReviewNotificationLevel.FULL,
- db_branch.registrant)
+ db_branch.registrant,
+ )
self.assertEqual(0, len(list(RevisionMailJob.iterReady())))
- notify(events.RevisionsRemoved(db_branch, tree.branch, ['x']))
+ notify(events.RevisionsRemoved(db_branch, tree.branch, ["x"]))
self.assertEqual(1, len(list(RevisionMailJob.iterReady())))
@@ -254,8 +258,11 @@ class TestBzrSyncNoEmail(BzrSyncTestCase):
self.assertEqual([], jobs, "There should be no pending emails.")
def test_no_subscribers(self):
- self.assertEqual(self.db_branch.subscribers.count(), 0,
- "There should be no subscribers to the branch.")
+ self.assertEqual(
+ self.db_branch.subscribers.count(),
+ 0,
+ "There should be no subscribers to the branch.",
+ )
def test_empty_branch(self):
bzrsync = self.makeBzrSync(self.db_branch)
@@ -280,14 +287,13 @@ class TestBzrSyncNoEmail(BzrSyncTestCase):
def test_import_recommit(self):
# No emails should have been generated.
- self.commitRevision('first')
+ self.commitRevision("first")
bzrsync = self.makeBzrSync(self.db_branch)
bzrsync.syncBranchAndClose()
stub.test_emails = []
self.uncommitRevision()
- self.writeToFile(filename="hello.txt",
- contents="Hello World\n")
- self.commitRevision('second')
+ self.writeToFile(filename="hello.txt", contents="Hello World\n")
+ self.commitRevision("second")
bzrsync = self.makeBzrSync(self.db_branch)
bzrsync.syncBranchAndClose()
self.assertNoPendingEmails()
diff --git a/lib/lp/codehosting/scanner/tests/test_mergedetection.py b/lib/lp/codehosting/scanner/tests/test_mergedetection.py
index 07dcf12..def12d1 100644
--- a/lib/lp/codehosting/scanner/tests/test_mergedetection.py
+++ b/lib/lp/codehosting/scanner/tests/test_mergedetection.py
@@ -5,38 +5,29 @@
import logging
-from breezy.revision import NULL_REVISION
-from lazr.lifecycle.event import ObjectModifiedEvent
import six
import transaction
+from breezy.revision import NULL_REVISION
+from lazr.lifecycle.event import ObjectModifiedEvent
from zope.component import getUtility
from zope.event import notify
-from lp.code.enums import (
- BranchLifecycleStatus,
- BranchMergeProposalStatus,
- )
+from lp.code.enums import BranchLifecycleStatus, BranchMergeProposalStatus
from lp.code.interfaces.branchlookup import IBranchLookup
from lp.code.model.branchmergeproposaljob import (
BranchMergeProposalJob,
BranchMergeProposalJobType,
- )
-from lp.codehosting.scanner import (
- events,
- mergedetection,
- )
+)
+from lp.codehosting.scanner import events, mergedetection
from lp.codehosting.scanner.tests.test_bzrsync import (
BzrSyncTestCase,
run_as_db_user,
- )
+)
from lp.services.config import config
from lp.services.database.interfaces import IStore
from lp.services.log.logger import DevNullLogger
from lp.services.osutils import override_environ
-from lp.testing import (
- TestCase,
- TestCaseWithFactory,
- )
+from lp.testing import TestCase, TestCaseWithFactory
from lp.testing.layers import LaunchpadZopelessLayer
from lp.testing.mail_helpers import pop_notifications
@@ -56,8 +47,10 @@ class TestAutoMergeDetectionForMergeProposals(BzrSyncTestCase):
def _createBranchesAndProposal(self):
# Create two branches where the trunk has the branch as a merge. Also
# create a merge proposal from the branch to the trunk.
- (db_trunk, trunk_tree), (db_branch, branch_tree) = (
- self.makeBranchWithMerge(b'base', b'trunk', b'branch', b'merge'))
+ (db_trunk, trunk_tree), (
+ db_branch,
+ branch_tree,
+ ) = self.makeBranchWithMerge(b"base", b"trunk", b"branch", b"merge")
trunk_id = db_trunk.id
branch_id = db_branch.id
self.createProposal(db_branch, db_trunk)
@@ -76,78 +69,97 @@ class TestAutoMergeDetectionForMergeProposals(BzrSyncTestCase):
def test_auto_merge_proposals_real_merge(self):
# If there is a merge proposal where the tip of the source is in the
# ancestry of the target, mark it as merged.
- proposal, db_trunk, db_branch, branch_tree = (
- self._createBranchesAndProposal())
+ (
+ proposal,
+ db_trunk,
+ db_branch,
+ branch_tree,
+ ) = self._createBranchesAndProposal()
self._scanTheBranches(db_branch, db_trunk)
# The proposal should now be merged.
self.assertEqual(
- BranchMergeProposalStatus.MERGED,
- proposal.queue_status)
+ BranchMergeProposalStatus.MERGED, proposal.queue_status
+ )
self.assertEqual(3, proposal.merged_revno)
def test_auto_merge_proposals_real_merge_target_scanned_first(self):
# If there is a merge proposal where the tip of the source is in the
# ancestry of the target, mark it as merged.
- proposal, db_trunk, db_branch, branch_tree = (
- self._createBranchesAndProposal())
+ (
+ proposal,
+ db_trunk,
+ db_branch,
+ branch_tree,
+ ) = self._createBranchesAndProposal()
self._scanTheBranches(db_trunk, db_branch)
# The proposal should now be merged.
self.assertEqual(
- BranchMergeProposalStatus.MERGED,
- proposal.queue_status)
+ BranchMergeProposalStatus.MERGED, proposal.queue_status
+ )
def test_auto_merge_proposals_rejected_proposal(self):
# If there is a merge proposal where the tip of the source is in the
# ancestry of the target but the proposal is in a final state the
# proposal is not marked as merged.
- proposal, db_trunk, db_branch, branch_tree = (
- self._createBranchesAndProposal())
+ (
+ proposal,
+ db_trunk,
+ db_branch,
+ branch_tree,
+ ) = self._createBranchesAndProposal()
- proposal.rejectBranch(db_trunk.owner, 'branch')
+ proposal.rejectBranch(db_trunk.owner, "branch")
self._scanTheBranches(db_branch, db_trunk)
# The proposal should stay rejected..
self.assertEqual(
- BranchMergeProposalStatus.REJECTED,
- proposal.queue_status)
+ BranchMergeProposalStatus.REJECTED, proposal.queue_status
+ )
- def test_auto_merge_proposals_rejected_proposal_target_scanned_first(
- self):
+ def test_auto_merge_proposals_rejected_proposal_target_scanned_first(self):
# If there is a merge proposal where the tip of the source is in the
# ancestry of the target but the proposal is in a final state the
# proposal is not marked as merged.
- proposal, db_trunk, db_branch, branch_tree = (
- self._createBranchesAndProposal())
+ (
+ proposal,
+ db_trunk,
+ db_branch,
+ branch_tree,
+ ) = self._createBranchesAndProposal()
- proposal.rejectBranch(db_trunk.owner, 'branch')
+ proposal.rejectBranch(db_trunk.owner, "branch")
self._scanTheBranches(db_trunk, db_branch)
# The proposal should stay rejected..
self.assertEqual(
- BranchMergeProposalStatus.REJECTED,
- proposal.queue_status)
+ BranchMergeProposalStatus.REJECTED, proposal.queue_status
+ )
def test_auto_merge_proposals_not_merged_proposal(self):
# If there is a merge proposal where the tip of the source is not in
# the ancestry of the target it is not marked as merged.
- proposal, db_trunk, db_branch, branch_tree = (
- self._createBranchesAndProposal())
+ (
+ proposal,
+ db_trunk,
+ db_branch,
+ branch_tree,
+ ) = self._createBranchesAndProposal()
# XXX: AaronBentley 2010-08-06 bug=614404: a bzr username is
# required to generate the revision-id.
- with override_environ(BRZ_EMAIL='me@xxxxxxxxxxx'):
- branch_tree.commit('another revision', rev_id=b'another-rev')
+ with override_environ(BRZ_EMAIL="me@xxxxxxxxxxx"):
+ branch_tree.commit("another revision", rev_id=b"another-rev")
current_proposal_status = proposal.queue_status
self.assertNotEqual(
- current_proposal_status,
- BranchMergeProposalStatus.MERGED)
+ current_proposal_status, BranchMergeProposalStatus.MERGED
+ )
self._scanTheBranches(db_branch, db_trunk)
@@ -158,17 +170,21 @@ class TestAutoMergeDetectionForMergeProposals(BzrSyncTestCase):
# If there is a merge proposal where the tip of the source is not in
# the ancestry of the target it is not marked as merged.
- proposal, db_trunk, db_branch, branch_tree = (
- self._createBranchesAndProposal())
+ (
+ proposal,
+ db_trunk,
+ db_branch,
+ branch_tree,
+ ) = self._createBranchesAndProposal()
# XXX: AaronBentley 2010-08-06 bug=614404: a bzr username is
# required to generate the revision-id.
- with override_environ(BRZ_EMAIL='me@xxxxxxxxxxx'):
- branch_tree.commit('another revision', rev_id=b'another-rev')
+ with override_environ(BRZ_EMAIL="me@xxxxxxxxxxx"):
+ branch_tree.commit("another revision", rev_id=b"another-rev")
current_proposal_status = proposal.queue_status
self.assertNotEqual(
- current_proposal_status,
- BranchMergeProposalStatus.MERGED)
+ current_proposal_status, BranchMergeProposalStatus.MERGED
+ )
self._scanTheBranches(db_trunk, db_branch)
@@ -198,8 +214,12 @@ class TestMergeDetection(TestCaseWithFactory):
def autoMergeBranches(self, db_branch, new_ancestry):
mergedetection.auto_merge_branches(
events.ScanCompleted(
- db_branch=db_branch, bzr_branch=None,
- logger=DevNullLogger(), new_ancestry=new_ancestry))
+ db_branch=db_branch,
+ bzr_branch=None,
+ logger=DevNullLogger(),
+ new_ancestry=new_ancestry,
+ )
+ )
def mergeDetected(self, logger, source, target):
# Record the merged branches
@@ -208,25 +228,25 @@ class TestMergeDetection(TestCaseWithFactory):
def test_own_branch_not_emitted(self):
# A merge is never emitted with the source branch being the same as
# the target branch.
- self.db_branch.last_scanned_id = 'revid'
- self.autoMergeBranches(self.db_branch, ['revid'])
+ self.db_branch.last_scanned_id = "revid"
+ self.autoMergeBranches(self.db_branch, ["revid"])
self.assertEqual([], self.merges)
def test_branch_tip_in_ancestry(self):
# If there is another branch with their tip revision id in the
# ancestry passed in, the merge detection is emitted.
source = self.factory.makeProductBranch(product=self.product)
- source.last_scanned_id = 'revid'
- self.autoMergeBranches(self.db_branch, ['revid'])
+ source.last_scanned_id = "revid"
+ self.autoMergeBranches(self.db_branch, ["revid"])
self.assertEqual([(source, self.db_branch)], self.merges)
def test_branch_tip_in_ancestry_status_merged(self):
# Branches that are already merged do emit events.
source = self.factory.makeProductBranch(
- product=self.product,
- lifecycle_status=BranchLifecycleStatus.MERGED)
- source.last_scanned_id = 'revid'
- self.autoMergeBranches(self.db_branch, ['revid'])
+ product=self.product, lifecycle_status=BranchLifecycleStatus.MERGED
+ )
+ source.last_scanned_id = "revid"
+ self.autoMergeBranches(self.db_branch, ["revid"])
self.assertEqual([], self.merges)
def test_other_branch_with_no_last_scanned_id(self):
@@ -234,7 +254,7 @@ class TestMergeDetection(TestCaseWithFactory):
# of the branch is not yet been set no merge event is emitted for that
# branch.
self.factory.makeProductBranch(product=self.product)
- self.autoMergeBranches(self.db_branch, ['revid'])
+ self.autoMergeBranches(self.db_branch, ["revid"])
self.assertEqual([], self.merges)
def test_other_branch_with_NULL_REVISION_last_scanned_id(self):
@@ -243,16 +263,16 @@ class TestMergeDetection(TestCaseWithFactory):
# that branch.
source = self.factory.makeProductBranch(product=self.product)
source.last_scanned_id = six.ensure_text(NULL_REVISION)
- self.autoMergeBranches(self.db_branch, ['revid'])
+ self.autoMergeBranches(self.db_branch, ["revid"])
self.assertEqual([], self.merges)
def test_other_branch_same_tip_revision_not_emitted(self):
# If two different branches have the same tip revision, then they are
# conceptually the same branch, not one merged into the other.
source = self.factory.makeProductBranch(product=self.product)
- source.last_scanned_id = 'revid'
- self.db_branch.last_scanned_id = 'revid'
- self.autoMergeBranches(self.db_branch, ['revid'])
+ source.last_scanned_id = "revid"
+ self.db_branch.last_scanned_id = "revid"
+ self.autoMergeBranches(self.db_branch, ["revid"])
self.assertEqual([], self.merges)
@@ -268,41 +288,59 @@ class TestBranchMergeDetectionHandler(TestCaseWithFactory):
proposal = self.factory.makeBranchMergeProposal(product=product)
product.development_focus.branch = proposal.target_branch
self.assertNotEqual(
- BranchMergeProposalStatus.MERGED, proposal.queue_status)
+ BranchMergeProposalStatus.MERGED, proposal.queue_status
+ )
self.assertNotEqual(
BranchLifecycleStatus.MERGED,
- proposal.source_branch.lifecycle_status)
+ proposal.source_branch.lifecycle_status,
+ )
_, [event] = self.assertNotifies(
- [ObjectModifiedEvent], True, mergedetection.merge_detected,
+ [ObjectModifiedEvent],
+ True,
+ mergedetection.merge_detected,
logging.getLogger(),
- proposal.source_branch, proposal.target_branch, proposal)
+ proposal.source_branch,
+ proposal.target_branch,
+ proposal,
+ )
self.assertEqual(
- BranchMergeProposalStatus.MERGED, proposal.queue_status)
+ BranchMergeProposalStatus.MERGED, proposal.queue_status
+ )
self.assertEqual(
BranchLifecycleStatus.MERGED,
- proposal.source_branch.lifecycle_status)
+ proposal.source_branch.lifecycle_status,
+ )
self.assertEqual(proposal, event.object)
self.assertEqual(
BranchMergeProposalStatus.WORK_IN_PROGRESS,
- event.object_before_modification.queue_status)
+ event.object_before_modification.queue_status,
+ )
self.assertEqual(
- BranchMergeProposalStatus.MERGED, event.object.queue_status)
- job = IStore(proposal).find(
- BranchMergeProposalJob,
- BranchMergeProposalJob.branch_merge_proposal == proposal,
- BranchMergeProposalJob.job_type ==
- BranchMergeProposalJobType.MERGE_PROPOSAL_UPDATED).one()
+ BranchMergeProposalStatus.MERGED, event.object.queue_status
+ )
+ job = (
+ IStore(proposal)
+ .find(
+ BranchMergeProposalJob,
+ BranchMergeProposalJob.branch_merge_proposal == proposal,
+ BranchMergeProposalJob.job_type
+ == BranchMergeProposalJobType.MERGE_PROPOSAL_UPDATED,
+ )
+ .one()
+ )
derived_job = job.makeDerived()
derived_job.run()
notifications = pop_notifications()
self.assertIn(
- 'Work in progress => Merged',
- six.ensure_text(notifications[0].get_payload(decode=True)))
- self.assertEqual(proposal.address, notifications[0]['From'])
- recipients = {msg['x-envelope-to'] for msg in notifications}
+ "Work in progress => Merged",
+ six.ensure_text(notifications[0].get_payload(decode=True)),
+ )
+ self.assertEqual(proposal.address, notifications[0]["From"])
+ recipients = {msg["x-envelope-to"] for msg in notifications}
expected = {
proposal.source_branch.registrant.preferredemail.email,
- proposal.target_branch.registrant.preferredemail.email}
+ proposal.target_branch.registrant.preferredemail.email,
+ }
self.assertEqual(expected, recipients)
def test_mergeProposalMergeDetected_not_series(self):
@@ -311,18 +349,25 @@ class TestBranchMergeDetectionHandler(TestCaseWithFactory):
# branch is not updated.
proposal = self.factory.makeBranchMergeProposal()
self.assertNotEqual(
- BranchMergeProposalStatus.MERGED, proposal.queue_status)
+ BranchMergeProposalStatus.MERGED, proposal.queue_status
+ )
self.assertNotEqual(
BranchLifecycleStatus.MERGED,
- proposal.source_branch.lifecycle_status)
+ proposal.source_branch.lifecycle_status,
+ )
mergedetection.merge_detected(
logging.getLogger(),
- proposal.source_branch, proposal.target_branch, proposal)
+ proposal.source_branch,
+ proposal.target_branch,
+ proposal,
+ )
self.assertEqual(
- BranchMergeProposalStatus.MERGED, proposal.queue_status)
+ BranchMergeProposalStatus.MERGED, proposal.queue_status
+ )
self.assertNotEqual(
BranchLifecycleStatus.MERGED,
- proposal.source_branch.lifecycle_status)
+ proposal.source_branch.lifecycle_status,
+ )
def test_mergeOfTwoBranches_target_not_dev_focus(self):
# The target branch must be the development focus in order for the
@@ -331,7 +376,8 @@ class TestBranchMergeDetectionHandler(TestCaseWithFactory):
target = self.factory.makeProductBranch()
mergedetection.merge_detected(logging.getLogger(), source, target)
self.assertNotEqual(
- BranchLifecycleStatus.MERGED, source.lifecycle_status)
+ BranchLifecycleStatus.MERGED, source.lifecycle_status
+ )
def test_mergeOfTwoBranches_target_dev_focus(self):
# If the target branch is the development focus branch of the product,
@@ -341,8 +387,7 @@ class TestBranchMergeDetectionHandler(TestCaseWithFactory):
target = self.factory.makeProductBranch(product=product)
product.development_focus.branch = target
mergedetection.merge_detected(logging.getLogger(), source, target)
- self.assertEqual(
- BranchLifecycleStatus.MERGED, source.lifecycle_status)
+ self.assertEqual(BranchLifecycleStatus.MERGED, source.lifecycle_status)
def test_mergeOfTwoBranches_source_series_branch(self):
# If the source branch is associated with a series, its lifecycle
@@ -351,23 +396,23 @@ class TestBranchMergeDetectionHandler(TestCaseWithFactory):
source = self.factory.makeProductBranch(product=product)
target = self.factory.makeProductBranch(product=product)
product.development_focus.branch = target
- series = product.newSeries(product.owner, 'new', '')
+ series = product.newSeries(product.owner, "new", "")
series.branch = source
mergedetection.merge_detected(logging.getLogger(), source, target)
self.assertNotEqual(
- BranchLifecycleStatus.MERGED, source.lifecycle_status)
+ BranchLifecycleStatus.MERGED, source.lifecycle_status
+ )
def test_auto_merge_branches_subscribed(self):
"""Auto merging is triggered by ScanCompleted."""
source = self.factory.makeBranch()
- source.last_scanned_id = '23foo'
+ source.last_scanned_id = "23foo"
target = self.factory.makeBranchTargetBranch(source.target)
target.product.development_focus.branch = target
- logger = logging.getLogger('test')
- notify(events.ScanCompleted(target, None, logger, ['23foo']))
- self.assertEqual(
- BranchLifecycleStatus.MERGED, source.lifecycle_status)
+ logger = logging.getLogger("test")
+ notify(events.ScanCompleted(target, None, logger, ["23foo"]))
+ self.assertEqual(BranchLifecycleStatus.MERGED, source.lifecycle_status)
class TestFindMergedRevno(TestCase):
@@ -376,19 +421,19 @@ class TestFindMergedRevno(TestCase):
def get_merge_graph(self):
# Create a fake merge graph.
return [
- ('rev-3', 0, (3,), False),
- ('rev-3a', 1, (15, 4, 8), False),
- ('rev-3b', 1, (15, 4, 7), False),
- ('rev-3c', 1, (15, 4, 6), False),
- ('rev-2', 0, (2,), False),
- ('rev-2a', 1, (4, 4, 8), False),
- ('rev-2b', 1, (4, 4, 7), False),
- ('rev-2-1a', 2, (7, 2, 47), False),
- ('rev-2-1b', 2, (7, 2, 45), False),
- ('rev-2-1c', 2, (7, 2, 42), False),
- ('rev-2c', 1, (4, 4, 6), False),
- ('rev-1', 0, (1,), False),
- ]
+ ("rev-3", 0, (3,), False),
+ ("rev-3a", 1, (15, 4, 8), False),
+ ("rev-3b", 1, (15, 4, 7), False),
+ ("rev-3c", 1, (15, 4, 6), False),
+ ("rev-2", 0, (2,), False),
+ ("rev-2a", 1, (4, 4, 8), False),
+ ("rev-2b", 1, (4, 4, 7), False),
+ ("rev-2-1a", 2, (7, 2, 47), False),
+ ("rev-2-1b", 2, (7, 2, 45), False),
+ ("rev-2-1c", 2, (7, 2, 42), False),
+ ("rev-2c", 1, (4, 4, 6), False),
+ ("rev-1", 0, (1,), False),
+ ]
def assertFoundRevisionNumber(self, expected, rev_id):
merge_sorted = self.get_merge_graph()
@@ -401,10 +446,10 @@ class TestFindMergedRevno(TestCase):
def test_not_found(self):
# If the rev_id passed into the function isn't in the merge sorted
# graph, None is returned.
- self.assertFoundRevisionNumber(None, 'not-there')
+ self.assertFoundRevisionNumber(None, "not-there")
def test_existing_revision(self):
# If a revision is found, the last mainline revision is returned.
- self.assertFoundRevisionNumber(3, 'rev-3b')
- self.assertFoundRevisionNumber(2, 'rev-2-1c')
- self.assertFoundRevisionNumber(1, 'rev-1')
+ self.assertFoundRevisionNumber(3, "rev-3b")
+ self.assertFoundRevisionNumber(2, "rev-2-1c")
+ self.assertFoundRevisionNumber(1, "rev-1")
diff --git a/lib/lp/codehosting/scripts/modifiedbranches.py b/lib/lp/codehosting/scripts/modifiedbranches.py
index 8f4a0c1..e111e17 100644
--- a/lib/lp/codehosting/scripts/modifiedbranches.py
+++ b/lib/lp/codehosting/scripts/modifiedbranches.py
@@ -3,14 +3,11 @@
"""Implementation of the Launchpad script to list modified branches."""
-__all__ = ['ModifiedBranchesScript']
+__all__ = ["ModifiedBranchesScript"]
-from datetime import (
- datetime,
- timedelta,
- )
import os
+from datetime import datetime, timedelta
from time import strptime
import pytz
@@ -20,10 +17,7 @@ from lp.code.enums import BranchType
from lp.code.interfaces.branchcollection import IAllBranches
from lp.codehosting.vfs import branch_id_to_path
from lp.services.config import config
-from lp.services.scripts.base import (
- LaunchpadScript,
- LaunchpadScriptFailure,
- )
+from lp.services.scripts.base import LaunchpadScript, LaunchpadScriptFailure
class ModifiedBranchesScript(LaunchpadScript):
@@ -36,7 +30,8 @@ class ModifiedBranchesScript(LaunchpadScript):
"""
description = (
- "List branch paths for branches modified since the specified time.")
+ "List branch paths for branches modified since the specified time."
+ )
def __init__(self, name, dbuser=None, test_args=None):
LaunchpadScript.__init__(self, name, dbuser, test_args)
@@ -45,45 +40,59 @@ class ModifiedBranchesScript(LaunchpadScript):
self.locations = set()
def add_my_options(self):
- self.parser.set_defaults(
- strip_prefix='/srv/',
- append_suffix='/**')
+ self.parser.set_defaults(strip_prefix="/srv/", append_suffix="/**")
self.parser.add_option(
- "-s", "--since", metavar="DATE",
+ "-s",
+ "--since",
+ metavar="DATE",
help="A date in the format YYYY-MM-DD. Branches that "
- "have been modified since this date will be returned.")
+ "have been modified since this date will be returned.",
+ )
self.parser.add_option(
- "-l", "--last-hours", metavar="HOURS", type="int",
+ "-l",
+ "--last-hours",
+ metavar="HOURS",
+ type="int",
help="Return the branches that have been modified in "
- "the last HOURS number of hours.")
+ "the last HOURS number of hours.",
+ )
self.parser.add_option(
- "--strip-prefix", metavar="PREFIX",
+ "--strip-prefix",
+ metavar="PREFIX",
help="The prefix to remove from the branch locations. "
- "Defaults to '/srv/'.")
+ "Defaults to '/srv/'.",
+ )
self.parser.add_option(
- "--append-suffix", metavar="SUFFIX",
+ "--append-suffix",
+ metavar="SUFFIX",
help="A suffix to append to the end of the branch locations. "
- "Defaults to '/**'.")
+ "Defaults to '/**'.",
+ )
def get_last_modified_epoch(self):
- """Return the timezone aware datetime for the last modified epoch. """
- if (self.options.last_hours is not None and
- self.options.since is not None):
+ """Return the timezone aware datetime for the last modified epoch."""
+ if (
+ self.options.last_hours is not None
+ and self.options.since is not None
+ ):
raise LaunchpadScriptFailure(
- "Only one of --since or --last-hours can be specified.")
+ "Only one of --since or --last-hours can be specified."
+ )
last_modified = None
if self.options.last_hours is not None:
- last_modified = (
- self.now_timestamp - timedelta(hours=self.options.last_hours))
+ last_modified = self.now_timestamp - timedelta(
+ hours=self.options.last_hours
+ )
elif self.options.since is not None:
try:
- parsed_time = strptime(self.options.since, '%Y-%m-%d')
+ parsed_time = strptime(self.options.since, "%Y-%m-%d")
last_modified = datetime(*(parsed_time[:3]))
except ValueError as e:
raise LaunchpadScriptFailure(str(e))
else:
raise LaunchpadScriptFailure(
- "One of --since or --last-hours needs to be specified.")
+ "One of --since or --last-hours needs to be specified."
+ )
# Make the datetime timezone aware.
return last_modified.replace(tzinfo=pytz.UTC)
@@ -96,26 +105,28 @@ class ModifiedBranchesScript(LaunchpadScript):
def process_location(self, location):
"""Strip the defined prefix, and append the suffix as configured."""
if location.startswith(self.options.strip_prefix):
- location = location[len(self.options.strip_prefix):]
+ location = location[len(self.options.strip_prefix) :]
return location + self.options.append_suffix
def update_locations(self, location):
"""Add the location, and all the possible parent directories."""
- paths = location.split('/')
+ paths = location.split("/")
curr = []
for segment in paths:
curr.append(segment)
# Don't add an empty string.
- if curr != ['']:
- self.locations.add('/'.join(curr))
+ if curr != [""]:
+ self.locations.add("/".join(curr))
def main(self):
last_modified = self.get_last_modified_epoch()
self.logger.info(
- "Looking for branches modified since %s", last_modified)
+ "Looking for branches modified since %s", last_modified
+ )
collection = getUtility(IAllBranches)
collection = collection.withBranchType(
- BranchType.HOSTED, BranchType.MIRRORED, BranchType.IMPORTED)
+ BranchType.HOSTED, BranchType.MIRRORED, BranchType.IMPORTED
+ )
collection = collection.scannedSince(last_modified)
for branch in collection.getBranches():
self.logger.info(branch.unique_name)
diff --git a/lib/lp/codehosting/scripts/sync_branches.py b/lib/lp/codehosting/scripts/sync_branches.py
index 7e22476..fc361a5 100644
--- a/lib/lp/codehosting/scripts/sync_branches.py
+++ b/lib/lp/codehosting/scripts/sync_branches.py
@@ -3,22 +3,18 @@
"""Sync branches from production to a staging environment."""
-__all__ = ['SyncBranchesScript']
+__all__ = ["SyncBranchesScript"]
import os.path
-from shlex import quote as shell_quote
import subprocess
+from shlex import quote as shell_quote
from zope.component import getUtility
from lp.code.interfaces.branch import IBranchSet
from lp.codehosting.vfs import branch_id_to_path
from lp.services.config import config
-from lp.services.scripts.base import (
- LaunchpadScript,
- LaunchpadScriptFailure,
- )
-
+from lp.services.scripts.base import LaunchpadScript, LaunchpadScriptFailure
# We don't want to spend too long syncing branches.
BRANCH_LIMIT = 35
@@ -29,35 +25,49 @@ class SyncBranchesScript(LaunchpadScript):
"""Sync branches from production to a staging environment."""
usage = "%prog [options] BRANCH_NAME [...]"
- description = __doc__ + "\n" + (
- 'Branch names may be given in any of the forms accepted for lp: '
- 'URLs, but without the leading "lp:".')
+ description = (
+ __doc__
+ + "\n"
+ + (
+ "Branch names may be given in any of the forms accepted for lp: "
+ 'URLs, but without the leading "lp:".'
+ )
+ )
def _syncBranch(self, branch):
branch_path = branch_id_to_path(branch.id)
branch_dir = os.path.join(
- config.codehosting.mirrored_branches_root, branch_path)
+ config.codehosting.mirrored_branches_root, branch_path
+ )
if not os.path.exists(branch_dir):
os.makedirs(branch_dir)
args = [
- "rsync", "-a", "--delete-after",
+ "rsync",
+ "-a",
+ "--delete-after",
"%s::mirrors/%s/" % (REMOTE_SERVER, branch_path),
"%s/" % branch_dir,
- ]
+ ]
try:
subprocess.check_output(args, universal_newlines=True)
except subprocess.CalledProcessError as e:
if "No such file or directory" in e.output:
self.logger.warning(
"Branch %s (%s) not found, ignoring",
- branch.identity, branch_path)
+ branch.identity,
+ branch_path,
+ )
else:
raise LaunchpadScriptFailure(
"There was an error running: %s\n"
"Status: %s\n"
- "Output: %s" % (
+ "Output: %s"
+ % (
" ".join(shell_quote(arg) for arg in args),
- e.returncode, e.output.rstrip("\n")))
+ e.returncode,
+ e.output.rstrip("\n"),
+ )
+ )
else:
self.logger.info("Rsynced %s (%s)", branch.identity, branch_path)
@@ -74,7 +84,8 @@ class SyncBranchesScript(LaunchpadScript):
if len(branches) > BRANCH_LIMIT:
raise LaunchpadScriptFailure(
- "Refusing to rsync more than %d branches" % BRANCH_LIMIT)
+ "Refusing to rsync more than %d branches" % BRANCH_LIMIT
+ )
for branch in branches:
self._syncBranch(branch)
diff --git a/lib/lp/codehosting/scripts/tests/test_modifiedbranches.py b/lib/lp/codehosting/scripts/tests/test_modifiedbranches.py
index 7683988..d8cfac2 100644
--- a/lib/lp/codehosting/scripts/tests/test_modifiedbranches.py
+++ b/lib/lp/codehosting/scripts/tests/test_modifiedbranches.py
@@ -3,8 +3,8 @@
"""Test the modified branches script."""
-from datetime import datetime
import os
+from datetime import datetime
import pytz
@@ -13,10 +13,7 @@ from lp.codehosting.scripts.modifiedbranches import ModifiedBranchesScript
from lp.codehosting.vfs import branch_id_to_path
from lp.services.config import config
from lp.services.scripts.base import LaunchpadScriptFailure
-from lp.testing import (
- TestCase,
- TestCaseWithFactory,
- )
+from lp.testing import TestCase, TestCaseWithFactory
from lp.testing.layers import DatabaseFunctionalLayer
@@ -28,12 +25,13 @@ class TestModifiedBranchesLocations(TestCaseWithFactory):
def test_branch(self):
# A branch location is the physical disk directory.
branch = self.factory.makeAnyBranch(branch_type=BranchType.HOSTED)
- script = ModifiedBranchesScript('modified-branches', test_args=[])
+ script = ModifiedBranchesScript("modified-branches", test_args=[])
location = script.branch_location(branch)
path = branch_id_to_path(branch.id)
self.assertEqual(
os.path.join(config.codehosting.mirrored_branches_root, path),
- location)
+ location,
+ )
class TestModifiedBranchesLastModifiedEpoch(TestCase):
@@ -41,50 +39,55 @@ class TestModifiedBranchesLastModifiedEpoch(TestCase):
def test_no_args(self):
# The script needs one of --since or --last-hours to be specified.
- script = ModifiedBranchesScript(
- 'modified-branches', test_args=[])
+ script = ModifiedBranchesScript("modified-branches", test_args=[])
self.assertRaises(
- LaunchpadScriptFailure,
- script.get_last_modified_epoch)
+ LaunchpadScriptFailure, script.get_last_modified_epoch
+ )
def test_both_args(self):
# We don't like it if both --since and --last-hours are specified.
script = ModifiedBranchesScript(
- 'modified-branches',
- test_args=['--since=2009-03-02', '--last-hours=12'])
+ "modified-branches",
+ test_args=["--since=2009-03-02", "--last-hours=12"],
+ )
self.assertRaises(
- LaunchpadScriptFailure,
- script.get_last_modified_epoch)
+ LaunchpadScriptFailure, script.get_last_modified_epoch
+ )
def test_modified_since(self):
# The --since parameter is parsed into a datetime using the fairly
# standard YYYY-MM-DD format.
script = ModifiedBranchesScript(
- 'modified-branches', test_args=['--since=2009-03-02'])
+ "modified-branches", test_args=["--since=2009-03-02"]
+ )
self.assertEqual(
datetime(2009, 3, 2, tzinfo=pytz.UTC),
- script.get_last_modified_epoch())
+ script.get_last_modified_epoch(),
+ )
def test_modified_since_bad_format(self):
# Passing in a bad format string for the --since parameter errors.
script = ModifiedBranchesScript(
- 'modified-branches', test_args=['--since=2009-03'])
+ "modified-branches", test_args=["--since=2009-03"]
+ )
self.assertRaises(
- LaunchpadScriptFailure,
- script.get_last_modified_epoch)
+ LaunchpadScriptFailure, script.get_last_modified_epoch
+ )
def test_modified_last_hours(self):
# If last_hours is specified, that number of hours is removed from the
# current timestamp to work out the selection epoch.
script = ModifiedBranchesScript(
- 'modified-branches', test_args=['--last-hours=12'])
+ "modified-branches", test_args=["--last-hours=12"]
+ )
# Override the script's now_timestamp to have a definitive test.
# 3pm on the first of January.
script.now_timestamp = datetime(2009, 1, 1, 15, tzinfo=pytz.UTC)
# The last modified should be 3am on the same day.
self.assertEqual(
datetime(2009, 1, 1, 3, tzinfo=pytz.UTC),
- script.get_last_modified_epoch())
+ script.get_last_modified_epoch(),
+ )
class TestModifiedBranchesStripPrefix(TestCase):
@@ -94,34 +97,38 @@ class TestModifiedBranchesStripPrefix(TestCase):
# The prefix defaults for '/srv/' for the ease of the main callers.
# Still need to pass in one of --since or --last-hours.
script = ModifiedBranchesScript(
- 'modified-branches', test_args=['--last-hours=12'])
- self.assertEqual('/srv/', script.options.strip_prefix)
+ "modified-branches", test_args=["--last-hours=12"]
+ )
+ self.assertEqual("/srv/", script.options.strip_prefix)
def test_override(self):
# The default can be overrided with the --strip-prefix option.
# Still need to pass in one of --since or --last-hours.
script = ModifiedBranchesScript(
- 'modified-branches',
- test_args=['--last-hours=12', '--strip-prefix=foo'])
- self.assertEqual('foo', script.options.strip_prefix)
+ "modified-branches",
+ test_args=["--last-hours=12", "--strip-prefix=foo"],
+ )
+ self.assertEqual("foo", script.options.strip_prefix)
def test_prefix_is_stripped(self):
# If the location starts with the prefix, it is stripped.
script = ModifiedBranchesScript(
- 'modified-branches', test_args=['--last-hours=12'])
+ "modified-branches", test_args=["--last-hours=12"]
+ )
# Override the append_suffix as we aren't testing that here.
- script.options.append_suffix = ''
- location = script.process_location('/srv/testing')
- self.assertEqual('testing', location)
+ script.options.append_suffix = ""
+ location = script.process_location("/srv/testing")
+ self.assertEqual("testing", location)
def test_non_matching_location_unchanged(self):
# If the location doesn't match, it is left unchanged.
script = ModifiedBranchesScript(
- 'modified-branches', test_args=['--last-hours=12'])
+ "modified-branches", test_args=["--last-hours=12"]
+ )
# Override the append_suffix as we aren't testing that here.
- script.options.append_suffix = ''
- location = script.process_location('/var/testing')
- self.assertEqual('/var/testing', location)
+ script.options.append_suffix = ""
+ location = script.process_location("/var/testing")
+ self.assertEqual("/var/testing", location)
class TestModifiedBranchesAppendSuffix(TestCase):
@@ -131,23 +138,26 @@ class TestModifiedBranchesAppendSuffix(TestCase):
# The suffix defaults for '/**' for the ease of the main callers.
# Still need to pass in one of --since or --last-hours.
script = ModifiedBranchesScript(
- 'modified-branches', test_args=['--last-hours=12'])
- self.assertEqual('/**', script.options.append_suffix)
+ "modified-branches", test_args=["--last-hours=12"]
+ )
+ self.assertEqual("/**", script.options.append_suffix)
def test_override(self):
# The default can be overrided with the --append-suffix option.
# Still need to pass in one of --since or --last-hours.
script = ModifiedBranchesScript(
- 'modified-branches',
- test_args=['--last-hours=12', '--append-suffix=foo'])
- self.assertEqual('foo', script.options.append_suffix)
+ "modified-branches",
+ test_args=["--last-hours=12", "--append-suffix=foo"],
+ )
+ self.assertEqual("foo", script.options.append_suffix)
def test_suffix_appended(self):
# The suffix is appended to all branch locations.
script = ModifiedBranchesScript(
- 'modified-branches', test_args=['--last-hours=12'])
- location = script.process_location('/var/testing')
- self.assertEqual('/var/testing/**', location)
+ "modified-branches", test_args=["--last-hours=12"]
+ )
+ location = script.process_location("/var/testing")
+ self.assertEqual("/var/testing/**", location)
class TestModifiedBranchesUpdateLocations(TestCase):
@@ -156,7 +166,8 @@ class TestModifiedBranchesUpdateLocations(TestCase):
def setUp(self):
super().setUp()
self.script = ModifiedBranchesScript(
- 'modified-branches', test_args=['--last-hours=12'])
+ "modified-branches", test_args=["--last-hours=12"]
+ )
def test_locations_initially_empty(self):
# The locations starts out as an empty set.
@@ -164,24 +175,24 @@ class TestModifiedBranchesUpdateLocations(TestCase):
def test_single_path_element(self):
# Adding a single element should just add that.
- self.script.update_locations('foo')
- self.assertEqual({'foo'}, self.script.locations)
+ self.script.update_locations("foo")
+ self.assertEqual({"foo"}, self.script.locations)
def test_single_root_element(self):
# If the single element starts with a /, the locations do not include
# an empty string.
- self.script.update_locations('/foo')
- self.assertEqual({'/foo'}, self.script.locations)
+ self.script.update_locations("/foo")
+ self.assertEqual({"/foo"}, self.script.locations)
def test_multi_path_element(self):
# Adding a "real" path will also include all the parents.
- self.script.update_locations('foo/bar/baz')
- expected = {'foo', 'foo/bar', 'foo/bar/baz'}
+ self.script.update_locations("foo/bar/baz")
+ expected = {"foo", "foo/bar", "foo/bar/baz"}
self.assertEqual(expected, self.script.locations)
def test_duplicates(self):
# Adding paths with common parentage doesn't cause duplicates.
- self.script.update_locations('foo/bar/baz')
- self.script.update_locations('foo/bar/who')
- expected = {'foo', 'foo/bar', 'foo/bar/baz', 'foo/bar/who'}
+ self.script.update_locations("foo/bar/baz")
+ self.script.update_locations("foo/bar/who")
+ expected = {"foo", "foo/bar", "foo/bar/baz", "foo/bar/who"}
self.assertEqual(expected, self.script.locations)
diff --git a/lib/lp/codehosting/scripts/tests/test_sync_branches.py b/lib/lp/codehosting/scripts/tests/test_sync_branches.py
index f085078..3e35444 100644
--- a/lib/lp/codehosting/scripts/tests/test_sync_branches.py
+++ b/lib/lp/codehosting/scripts/tests/test_sync_branches.py
@@ -7,17 +7,8 @@ import os.path
import subprocess
from textwrap import dedent
-from fixtures import (
- MockPatch,
- TempDir,
- )
-from testtools.matchers import (
- DirExists,
- Equals,
- Matcher,
- MatchesListwise,
- Not,
- )
+from fixtures import MockPatch, TempDir
+from testtools.matchers import DirExists, Equals, Matcher, MatchesListwise, Not
from lp.codehosting.scripts.sync_branches import SyncBranchesScript
from lp.codehosting.vfs import branch_id_to_path
@@ -29,27 +20,38 @@ from lp.testing.layers import ZopelessDatabaseLayer
class BranchDirectoryCreated(Matcher):
-
def match(self, branch):
return DirExists().match(
os.path.join(
config.codehosting.mirrored_branches_root,
- branch_id_to_path(branch.id)))
+ branch_id_to_path(branch.id),
+ )
+ )
class BranchSyncProcessMatches(MatchesListwise):
-
def __init__(self, branch):
branch_path = branch_id_to_path(branch.id)
- super().__init__([
- Equals(([
- "rsync", "-a", "--delete-after",
- "bazaar.launchpad.net::mirrors/%s/" % branch_path,
- "%s/" % os.path.join(
- config.codehosting.mirrored_branches_root, branch_path),
- ],)),
- Equals({"universal_newlines": True}),
- ])
+ super().__init__(
+ [
+ Equals(
+ (
+ [
+ "rsync",
+ "-a",
+ "--delete-after",
+ "bazaar.launchpad.net::mirrors/%s/" % branch_path,
+ "%s/"
+ % os.path.join(
+ config.codehosting.mirrored_branches_root,
+ branch_path,
+ ),
+ ],
+ )
+ ),
+ Equals({"universal_newlines": True}),
+ ]
+ )
class TestSyncBranches(TestCaseWithFactory):
@@ -61,35 +63,45 @@ class TestSyncBranches(TestCaseWithFactory):
self.tempdir = self.useFixture(TempDir()).path
self.pushConfig("codehosting", mirrored_branches_root=self.tempdir)
self.mock_check_output = self.useFixture(
- MockPatch("subprocess.check_output")).mock
+ MockPatch("subprocess.check_output")
+ ).mock
self.logger = BufferLogger()
def _runScript(self, branch_names):
script = SyncBranchesScript(
- "sync-branches", test_args=branch_names, logger=self.logger)
+ "sync-branches", test_args=branch_names, logger=self.logger
+ )
script.main()
def test_unknown_branch(self):
branch = self.factory.makeBranch()
self._runScript(
- [branch.unique_name, branch.unique_name + "-nonexistent"])
+ [branch.unique_name, branch.unique_name + "-nonexistent"]
+ )
self.assertIn(
- "WARNING Branch %s-nonexistent does not exist\n" % (
- branch.unique_name),
- self.logger.getLogBuffer())
+ "WARNING Branch %s-nonexistent does not exist\n"
+ % (branch.unique_name),
+ self.logger.getLogBuffer(),
+ )
# Other branches are synced anyway.
self.assertThat(branch, BranchDirectoryCreated())
self.assertThat(
self.mock_check_output.call_args_list,
- MatchesListwise([
- BranchSyncProcessMatches(branch),
- ]))
+ MatchesListwise(
+ [
+ BranchSyncProcessMatches(branch),
+ ]
+ ),
+ )
def test_too_many_branches(self):
branches = [self.factory.makeBranch() for _ in range(36)]
self.assertRaisesWithContent(
- LaunchpadScriptFailure, "Refusing to rsync more than 35 branches",
- self._runScript, [branch.unique_name for branch in branches])
+ LaunchpadScriptFailure,
+ "Refusing to rsync more than 35 branches",
+ self._runScript,
+ [branch.unique_name for branch in branches],
+ )
for branch in branches:
self.assertThat(branch, Not(BranchDirectoryCreated()))
self.assertEqual([], self.mock_check_output.call_args_list)
@@ -101,10 +113,13 @@ class TestSyncBranches(TestCaseWithFactory):
def check_output_side_effect(args, **kwargs):
if "%s/%s/" % (self.tempdir, branch_paths[0]) in args:
raise subprocess.CalledProcessError(
- 23, args,
+ 23,
+ args,
output=(
'rsync: change_dir "/%s" (in mirrors) failed: '
- 'No such file or directory (2)' % branch_paths[0]))
+ "No such file or directory (2)" % branch_paths[0]
+ ),
+ )
else:
return None
@@ -112,27 +127,34 @@ class TestSyncBranches(TestCaseWithFactory):
self._runScript([branch.unique_name for branch in branches])
branch_displays = [
"%s (%s)" % (branch.identity, branch_path)
- for branch, branch_path in zip(branches, branch_paths)]
+ for branch, branch_path in zip(branches, branch_paths)
+ ]
self.assertEqual(
- dedent("""\
+ dedent(
+ """\
INFO There are 2 branches to rsync
WARNING Branch {} not found, ignoring
INFO Rsynced {}
- """).format(*branch_displays),
- self.logger.getLogBuffer())
+ """
+ ).format(*branch_displays),
+ self.logger.getLogBuffer(),
+ )
self.assertThat(
branches,
- MatchesListwise([BranchDirectoryCreated() for _ in branches]))
+ MatchesListwise([BranchDirectoryCreated() for _ in branches]),
+ )
self.assertThat(
self.mock_check_output.call_args_list,
- MatchesListwise([
- BranchSyncProcessMatches(branch) for branch in branches
- ]))
+ MatchesListwise(
+ [BranchSyncProcessMatches(branch) for branch in branches]
+ ),
+ )
def test_branch_other_rsync_error(self):
branch = self.factory.makeBranch()
self.mock_check_output.side_effect = subprocess.CalledProcessError(
- 1, [], output="rsync exploded\n")
+ 1, [], output="rsync exploded\n"
+ )
self.assertRaisesWithContent(
LaunchpadScriptFailure,
"There was an error running: "
@@ -141,12 +163,17 @@ class TestSyncBranches(TestCaseWithFactory):
"Status: 1\n"
"Output: rsync exploded".format(
branch_id_to_path(branch.id),
- self.tempdir, branch_id_to_path(branch.id)),
- self._runScript, [branch.unique_name])
+ self.tempdir,
+ branch_id_to_path(branch.id),
+ ),
+ self._runScript,
+ [branch.unique_name],
+ )
self.assertThat(branch, BranchDirectoryCreated())
self.assertThat(
self.mock_check_output.call_args_list,
- MatchesListwise([BranchSyncProcessMatches(branch)]))
+ MatchesListwise([BranchSyncProcessMatches(branch)]),
+ )
def test_success(self):
branches = [self.factory.makeBranch() for _ in range(3)]
@@ -154,20 +181,26 @@ class TestSyncBranches(TestCaseWithFactory):
self._runScript([branch.unique_name for branch in branches])
branch_displays = [
"%s (%s)" % (branch.identity, branch_path)
- for branch, branch_path in zip(branches, branch_paths)]
+ for branch, branch_path in zip(branches, branch_paths)
+ ]
self.assertEqual(
- dedent("""\
+ dedent(
+ """\
INFO There are 3 branches to rsync
INFO Rsynced {}
INFO Rsynced {}
INFO Rsynced {}
- """).format(*branch_displays),
- self.logger.getLogBuffer())
+ """
+ ).format(*branch_displays),
+ self.logger.getLogBuffer(),
+ )
self.assertThat(
branches,
- MatchesListwise([BranchDirectoryCreated() for _ in branches]))
+ MatchesListwise([BranchDirectoryCreated() for _ in branches]),
+ )
self.assertThat(
self.mock_check_output.call_args_list,
- MatchesListwise([
- BranchSyncProcessMatches(branch) for branch in branches
- ]))
+ MatchesListwise(
+ [BranchSyncProcessMatches(branch) for branch in branches]
+ ),
+ )
diff --git a/lib/lp/codehosting/scripts/tests/test_upgrade_all_branches.py b/lib/lp/codehosting/scripts/tests/test_upgrade_all_branches.py
index d552e0b..87752a6 100644
--- a/lib/lp/codehosting/scripts/tests/test_upgrade_all_branches.py
+++ b/lib/lp/codehosting/scripts/tests/test_upgrade_all_branches.py
@@ -5,18 +5,14 @@ import logging
import os
from os.path import dirname
+import transaction
from breezy.bzr.groupcompress_repo import RepositoryFormat2a
from fixtures import TempDir
-import transaction
from lp.code.bzr import branch_changed
from lp.codehosting.upgrade import Upgrader
from lp.services.config import config
-from lp.testing import (
- person_logged_in,
- run_script,
- TestCaseWithFactory,
- )
+from lp.testing import TestCaseWithFactory, person_logged_in, run_script
from lp.testing.layers import AppServerLayer
@@ -33,21 +29,23 @@ class TestUpgradeAllBranchesScript(TestCaseWithFactory):
"""Run the script to upgrade all branches."""
transaction.commit()
if finish:
- flags = ' --finish '
+ flags = " --finish "
else:
- flags = ' '
+ flags = " "
return run_script(
- 'scripts/upgrade_all_branches.py' + flags + target, cwd=self.cwd)
+ "scripts/upgrade_all_branches.py" + flags + target, cwd=self.cwd
+ )
def prepare(self):
"""Prepare to run the script."""
self.useBzrBranches(direct_database=True)
- branch, tree = self.create_branch_and_tree(format='pack-0.92')
- tree.commit('foo', committer='jrandom@xxxxxxxxxxx')
+ branch, tree = self.create_branch_and_tree(format="pack-0.92")
+ tree.commit("foo", committer="jrandom@xxxxxxxxxxx")
with person_logged_in(branch.owner):
branch_changed(branch, tree.branch)
- target = self.useFixture(TempDir(
- rootdir=dirname(config.codehosting.mirrored_branches_root))).path
+ target = self.useFixture(
+ TempDir(rootdir=dirname(config.codehosting.mirrored_branches_root))
+ ).path
upgrader = Upgrader(branch, target, logging.getLogger(), tree.branch)
return upgrader
@@ -55,14 +53,13 @@ class TestUpgradeAllBranchesScript(TestCaseWithFactory):
"""Test that starting the upgrade behaves as expected."""
upgrader = self.prepare()
stdout, stderr, retcode = self.upgrade_all_branches(
- upgrader.target_dir)
- self.assertIn(
- 'INFO Upgrading branch %s' % upgrader.branch.unique_name,
- stderr)
- self.assertIn(
- 'INFO Converting repository with fetch.', stderr)
+ upgrader.target_dir
+ )
self.assertIn(
- 'INFO Skipped 0 already-upgraded branches.', stderr)
+ "INFO Upgrading branch %s" % upgrader.branch.unique_name, stderr
+ )
+ self.assertIn("INFO Converting repository with fetch.", stderr)
+ self.assertIn("INFO Skipped 0 already-upgraded branches.", stderr)
self.assertEqual(0, retcode)
upgraded = upgrader.get_bzrdir().open_repository()
self.assertIs(RepositoryFormat2a, upgraded._format.__class__)
@@ -72,11 +69,13 @@ class TestUpgradeAllBranchesScript(TestCaseWithFactory):
upgrader = self.prepare()
upgrader.start_upgrade()
stdout, stderr, retcode = self.upgrade_all_branches(
- upgrader.target_dir, finish=True)
+ upgrader.target_dir, finish=True
+ )
self.assertIn(
- 'INFO Upgrading branch %s' % upgrader.branch.unique_name,
- stderr)
+ "INFO Upgrading branch %s" % upgrader.branch.unique_name, stderr
+ )
self.assertEqual(0, retcode)
upgraded = upgrader.branch.getBzrBranch()
self.assertIs(
- RepositoryFormat2a, upgraded.repository._format.__class__)
+ RepositoryFormat2a, upgraded.repository._format.__class__
+ )
diff --git a/lib/lp/codehosting/sftp.py b/lib/lp/codehosting/sftp.py
index 238dcb8..728bb3e 100644
--- a/lib/lp/codehosting/sftp.py
+++ b/lib/lp/codehosting/sftp.py
@@ -13,37 +13,28 @@ We call such a transport a "Twisted Transport".
"""
__all__ = [
- 'avatar_to_sftp_server',
- 'TransportSFTPServer',
- ]
+ "avatar_to_sftp_server",
+ "TransportSFTPServer",
+]
-from copy import copy
import errno
import os
import stat
+from copy import copy
-from breezy import (
- errors as bzr_errors,
- osutils,
- urlutils,
- )
+from breezy import errors as bzr_errors
+from breezy import osutils, urlutils
from breezy.transport.local import LocalTransport
from lazr.sshserver.sftp import FileIsADirectory
-from twisted.conch.interfaces import (
- ISFTPFile,
- ISFTPServer,
- )
+from twisted.conch.interfaces import ISFTPFile, ISFTPServer
from twisted.conch.ls import lsLine
from twisted.conch.ssh import filetransfer
from twisted.internet import defer
from twisted.python import util
from zope.interface import implementer
-from lp.codehosting.vfs import (
- AsyncLaunchpadTransport,
- LaunchpadServer,
- )
+from lp.codehosting.vfs import AsyncLaunchpadTransport, LaunchpadServer
from lp.services.config import config
from lp.services.twistedsupport import gatherResults
@@ -89,10 +80,13 @@ def with_sftp_error(func):
See `TransportSFTPServer.translateError` for the details of the
translation.
"""
+
def decorator(*args, **kwargs):
deferred = func(*args, **kwargs)
- return deferred.addErrback(TransportSFTPServer.translateError,
- func.__name__)
+ return deferred.addErrback(
+ TransportSFTPServer.translateError, func.__name__
+ )
+
return util.mergeFunctionMetadata(func, decorator)
@@ -142,24 +136,25 @@ class TransportSFTPFile:
# The Twisted VFS adapter creates a file when any of these flags are
# set. It's possible that we only need to check for FXF_CREAT.
create_mask = (
- filetransfer.FXF_WRITE | filetransfer.FXF_APPEND |
- filetransfer.FXF_CREAT)
+ filetransfer.FXF_WRITE
+ | filetransfer.FXF_APPEND
+ | filetransfer.FXF_CREAT
+ )
return bool(self._flags & create_mask)
def _shouldTruncate(self):
"""Should we truncate the file?"""
- return (bool(self._flags & filetransfer.FXF_TRUNC)
- and not self._written)
+ return bool(self._flags & filetransfer.FXF_TRUNC) and not self._written
def _shouldWrite(self):
"""Is this file opened writable?"""
- write_mask = (filetransfer.FXF_WRITE | filetransfer.FXF_APPEND)
+ write_mask = filetransfer.FXF_WRITE | filetransfer.FXF_APPEND
return bool(self._flags & write_mask)
def _truncateFile(self):
"""Truncate this file."""
self._written = True
- return self.transport.put_bytes(self._escaped_path, b'')
+ return self.transport.put_bytes(self._escaped_path, b"")
@with_sftp_error
@defer.inlineCallbacks
@@ -168,7 +163,8 @@ class TransportSFTPFile:
if not self._shouldWrite():
raise filetransfer.SFTPError(
filetransfer.FX_PERMISSION_DENIED,
- "%r was opened read-only." % self._unescaped_relpath)
+ "%r was opened read-only." % self._unescaped_relpath,
+ )
if self._shouldTruncate():
yield self._truncateFile()
self._written = True
@@ -183,7 +179,8 @@ class TransportSFTPFile:
"""See `ISFTPFile`."""
try:
read_things = yield self.transport.readv(
- self._escaped_path, [(offset, length)])
+ self._escaped_path, [(offset, length)]
+ )
chunk = next(read_things)[1]
except bzr_errors.ShortReadvError as e:
# Handle short reads by reading what was available.
@@ -203,13 +200,15 @@ class TransportSFTPFile:
# XXX 2008-05-09 JonathanLange: This should at least raise an error,
# not do nothing silently.
return self._server.setAttrs(
- self._unescaped_relpath.encode('UTF-8'), attrs)
+ self._unescaped_relpath.encode("UTF-8"), attrs
+ )
@with_sftp_error
def getAttrs(self):
"""See `ISFTPFile`."""
return self._server.getAttrs(
- self._unescaped_relpath.encode('UTF-8'), False)
+ self._unescaped_relpath.encode("UTF-8"), False
+ )
@defer.inlineCallbacks
def close(self):
@@ -234,9 +233,11 @@ def _get_transport_for_dir(directory):
def avatar_to_sftp_server(avatar):
user_id = avatar.user_id
branch_transport = _get_transport_for_dir(
- config.codehosting.mirrored_branches_root)
+ config.codehosting.mirrored_branches_root
+ )
server = LaunchpadServer(
- avatar.codehosting_proxy, user_id, branch_transport)
+ avatar.codehosting_proxy, user_id, branch_transport
+ )
server.start_server()
transport = AsyncLaunchpadTransport(server, server.get_url())
return TransportSFTPServer(transport)
@@ -258,7 +259,7 @@ class TransportSFTPServer:
Twisted sends paths over SFTP as bytes, so we must decode them.
"""
try:
- return path.decode('UTF-8')
+ return path.decode("UTF-8")
except UnicodeDecodeError:
raise filetransfer.SFTPError(filetransfer.FX_BAD_MESSAGE, path)
@@ -282,8 +283,7 @@ class TransportSFTPServer:
deferreds = []
for filename in file_list:
escaped_file_path = os.path.join(escaped_dir_path, filename)
- deferreds.append(
- self.transport.stat(escaped_file_path))
+ deferreds.append(self.transport.stat(escaped_file_path))
return gatherResults(deferreds)
def _format_directory_entries(self, stat_results, filenames):
@@ -295,9 +295,9 @@ class TransportSFTPServer:
:return: An iterator of ``(shortname, longname, attributes)``.
"""
for stat_result, filename in zip(stat_results, filenames):
- shortname = urlutils.unescape(filename).encode('utf-8')
+ shortname = urlutils.unescape(filename).encode("utf-8")
stat_result = copy(stat_result)
- for attribute in ['st_uid', 'st_gid', 'st_mtime', 'st_nlink']:
+ for attribute in ["st_uid", "st_gid", "st_mtime", "st_nlink"]:
if getattr(stat_result, attribute, None) is None:
setattr(stat_result, attribute, 0)
longname = lsLine(shortname, stat_result)
@@ -326,7 +326,8 @@ class TransportSFTPServer:
return TransportSFTPFile(self.transport, path, flags, self)
else:
raise filetransfer.SFTPError(
- filetransfer.FX_NO_SUCH_FILE, directory)
+ filetransfer.FX_NO_SUCH_FILE, directory
+ )
def readLink(self, path):
"""See `ISFTPServer`."""
@@ -338,7 +339,7 @@ class TransportSFTPServer:
relpath = self._decodePath(relpath)
path = yield self.transport.local_realPath(urlutils.escape(relpath))
unescaped_path = urlutils.unescape(path)
- return unescaped_path.encode('utf-8')
+ return unescaped_path.encode("utf-8")
def setAttrs(self, path, attrs):
"""See `ISFTPServer`.
@@ -355,12 +356,12 @@ class TransportSFTPServer:
attributes as not all the Bazaar transports return full stat results.
"""
return {
- 'size': getattr(stat_val, 'st_size', 0),
- 'uid': getattr(stat_val, 'st_uid', 0),
- 'gid': getattr(stat_val, 'st_gid', 0),
- 'permissions': getattr(stat_val, 'st_mode', 0),
- 'atime': int(getattr(stat_val, 'st_atime', 0)),
- 'mtime': int(getattr(stat_val, 'st_mtime', 0)),
+ "size": getattr(stat_val, "st_size", 0),
+ "uid": getattr(stat_val, "st_uid", 0),
+ "gid": getattr(stat_val, "st_gid", 0),
+ "permissions": getattr(stat_val, "st_mode", 0),
+ "atime": int(getattr(stat_val, "st_atime", 0)),
+ "mtime": int(getattr(stat_val, "st_mtime", 0)),
}
@with_sftp_error
@@ -383,7 +384,8 @@ class TransportSFTPServer:
"""See `ISFTPServer`."""
path = self._decodePath(path)
return self.transport.mkdir(
- urlutils.escape(path), attrs['permissions'])
+ urlutils.escape(path), attrs["permissions"]
+ )
@with_sftp_error
def removeDirectory(self, path):
@@ -403,26 +405,26 @@ class TransportSFTPServer:
oldpath = self._decodePath(oldpath)
newpath = self._decodePath(newpath)
return self.transport.rename(
- urlutils.escape(oldpath), urlutils.escape(newpath))
+ urlutils.escape(oldpath), urlutils.escape(newpath)
+ )
@staticmethod
def translateError(failure, func_name):
"""Translate Bazaar errors to `filetransfer.SFTPError` instances."""
types_to_codes = {
bzr_errors.PermissionDenied: filetransfer.FX_PERMISSION_DENIED,
- bzr_errors.TransportNotPossible:
- filetransfer.FX_PERMISSION_DENIED,
+ bzr_errors.TransportNotPossible: filetransfer.FX_PERMISSION_DENIED,
bzr_errors.NoSuchFile: filetransfer.FX_NO_SUCH_FILE,
bzr_errors.FileExists: filetransfer.FX_FILE_ALREADY_EXISTS,
bzr_errors.DirectoryNotEmpty: filetransfer.FX_FAILURE,
bzr_errors.TransportError: filetransfer.FX_FAILURE,
FileIsADirectory: filetransfer.FX_FILE_IS_A_DIRECTORY,
- }
+ }
# Bazaar expects makeDirectory to fail with exactly the string "mkdir
# failed".
names_to_messages = {
- 'makeDirectory': 'mkdir failed',
- }
+ "makeDirectory": "mkdir failed",
+ }
try:
sftp_code = types_to_codes[failure.type]
except KeyError:
diff --git a/lib/lp/codehosting/sshserver/daemon.py b/lib/lp/codehosting/sshserver/daemon.py
index 657e608..259d125 100644
--- a/lib/lp/codehosting/sshserver/daemon.py
+++ b/lib/lp/codehosting/sshserver/daemon.py
@@ -4,28 +4,22 @@
"""Glues the codehosting SSH daemon together."""
__all__ = [
- 'ACCESS_LOG_NAME',
- 'CodehostingAvatar',
- 'get_key_path',
- 'get_portal',
- 'LOG_NAME',
- 'make_portal',
- 'PRIVATE_KEY_FILE',
- 'PUBLIC_KEY_FILE',
- ]
+ "ACCESS_LOG_NAME",
+ "CodehostingAvatar",
+ "get_key_path",
+ "get_portal",
+ "LOG_NAME",
+ "make_portal",
+ "PRIVATE_KEY_FILE",
+ "PUBLIC_KEY_FILE",
+]
import os
-from lazr.sshserver.auth import (
- LaunchpadAvatar,
- PublicKeyFromLaunchpadChecker,
- )
+from lazr.sshserver.auth import LaunchpadAvatar, PublicKeyFromLaunchpadChecker
from twisted.conch.interfaces import ISession
from twisted.conch.ssh import filetransfer
-from twisted.cred.portal import (
- IRealm,
- Portal,
- )
+from twisted.cred.portal import IRealm, Portal
from twisted.python import components
from twisted.web.xmlrpc import Proxy
from zope.interface import implementer
@@ -34,14 +28,13 @@ from lp.codehosting import sftp
from lp.codehosting.sshserver.session import launch_smart_server
from lp.services.config import config
-
# The names of the key files of the server itself. The directory itself is
# given in config.codehosting.host_key_pair_path.
-PRIVATE_KEY_FILE = 'ssh_host_key_rsa'
-PUBLIC_KEY_FILE = 'ssh_host_key_rsa.pub'
+PRIVATE_KEY_FILE = "ssh_host_key_rsa"
+PUBLIC_KEY_FILE = "ssh_host_key_rsa.pub"
-LOG_NAME = 'codehosting'
-ACCESS_LOG_NAME = 'codehosting.access'
+LOG_NAME = "codehosting"
+ACCESS_LOG_NAME = "codehosting.access"
class CodehostingAvatar(LaunchpadAvatar):
@@ -59,20 +52,19 @@ class CodehostingAvatar(LaunchpadAvatar):
components.registerAdapter(launch_smart_server, CodehostingAvatar, ISession)
components.registerAdapter(
- sftp.avatar_to_sftp_server, CodehostingAvatar, filetransfer.ISFTPServer)
+ sftp.avatar_to_sftp_server, CodehostingAvatar, filetransfer.ISFTPServer
+)
@implementer(IRealm)
class Realm:
-
def __init__(self, authentication_proxy, codehosting_proxy):
self.authentication_proxy = authentication_proxy
self.codehosting_proxy = codehosting_proxy
def requestAvatar(self, avatar_id, mind, *interfaces):
# Fetch the user's details from the authserver
- deferred = mind.lookupUserDetails(
- self.authentication_proxy, avatar_id)
+ deferred = mind.lookupUserDetails(self.authentication_proxy, avatar_id)
# Once all those details are retrieved, we can construct the avatar.
def got_user_dict(user_dict):
@@ -85,8 +77,7 @@ class Realm:
def get_portal(authentication_proxy, codehosting_proxy):
"""Get a portal for connecting to Launchpad codehosting."""
portal = Portal(Realm(authentication_proxy, codehosting_proxy))
- portal.registerChecker(
- PublicKeyFromLaunchpadChecker(authentication_proxy))
+ portal.registerChecker(PublicKeyFromLaunchpadChecker(authentication_proxy))
return portal
@@ -102,7 +93,9 @@ def make_portal():
avatars (see `CodehostingAvatar`).
"""
authentication_proxy = Proxy(
- config.codehosting.authentication_endpoint.encode('UTF-8'))
+ config.codehosting.authentication_endpoint.encode("UTF-8")
+ )
codehosting_proxy = Proxy(
- config.codehosting.codehosting_endpoint.encode('UTF-8'))
+ config.codehosting.codehosting_endpoint.encode("UTF-8")
+ )
return get_portal(authentication_proxy, codehosting_proxy)
diff --git a/lib/lp/codehosting/sshserver/session.py b/lib/lp/codehosting/sshserver/session.py
index 4f0d549..7c8672d 100644
--- a/lib/lp/codehosting/sshserver/session.py
+++ b/lib/lp/codehosting/sshserver/session.py
@@ -4,15 +4,15 @@
"""SSH session implementations for the codehosting SSH server."""
__all__ = [
- 'launch_smart_server',
- ]
+ "launch_smart_server",
+]
import os
from urllib.parse import urlparse
+import six
from lazr.sshserver.events import AvatarEvent
from lazr.sshserver.session import DoNothingSession
-import six
from twisted.internet import process
from twisted.python import log
from zope.event import notify
@@ -23,12 +23,12 @@ from lp.services.config import config
class BazaarSSHStarted(AvatarEvent):
- template = '[%(session_id)s] %(username)s started bzr+ssh session.'
+ template = "[%(session_id)s] %(username)s started bzr+ssh session."
class BazaarSSHClosed(AvatarEvent):
- template = '[%(session_id)s] %(username)s closed bzr+ssh session.'
+ template = "[%(session_id)s] %(username)s closed bzr+ssh session."
class ForbiddenCommand(Exception):
@@ -47,6 +47,7 @@ class ExecOnlySession(DoNothingSession):
@classmethod
def getAvatarAdapter(klass, environment=None):
from twisted.internet import reactor
+
return lambda avatar: klass(avatar, reactor, environment)
def closed(self):
@@ -57,7 +58,7 @@ class ExecOnlySession(DoNothingSession):
# class knows nothing about Bazaar.
notify(BazaarSSHClosed(self.avatar))
try:
- self._transport.signalProcess('HUP')
+ self._transport.signalProcess("HUP")
except (OSError, process.ProcessExitedAlready):
pass
self._transport.loseConnection()
@@ -79,23 +80,26 @@ class ExecOnlySession(DoNothingSession):
try:
executable, arguments = self.getCommandToRun(command)
except ForbiddenCommand as e:
- self.errorWithMessage(protocol, str(e) + '\r\n')
+ self.errorWithMessage(protocol, str(e) + "\r\n")
return
- log.msg('Running: %r, %r' % (executable, arguments))
+ log.msg("Running: %r, %r" % (executable, arguments))
if self._transport is not None:
log.err(
"ERROR: %r already running a command on transport %r"
- % (self, self._transport))
+ % (self, self._transport)
+ )
# XXX: JonathanLange 2008-12-23: This is something of an abstraction
# violation. Apart from this line and its twin, this class knows
# nothing about Bazaar.
notify(BazaarSSHStarted(self.avatar))
- self._transport = self._spawn(protocol, executable, arguments,
- env=self.environment)
+ self._transport = self._spawn(
+ protocol, executable, arguments, env=self.environment
+ )
def _spawn(self, protocol, executable, arguments, env):
- return self.reactor.spawnProcess(protocol, executable, arguments,
- env=env)
+ return self.reactor.spawnProcess(
+ protocol, executable, arguments, env=env
+ )
def getCommandToRun(self, command):
"""Return the command that will actually be run given `command`.
@@ -112,8 +116,9 @@ class ExecOnlySession(DoNothingSession):
class RestrictedExecOnlySession(ExecOnlySession):
"""Conch session that only allows specific commands to be executed."""
- def __init__(self, avatar, reactor, lookup_command_template,
- environment=None):
+ def __init__(
+ self, avatar, reactor, lookup_command_template, environment=None
+ ):
"""Construct a RestrictedExecOnlySession.
:param avatar: See `ExecOnlySession`.
@@ -130,8 +135,10 @@ class RestrictedExecOnlySession(ExecOnlySession):
@classmethod
def getAvatarAdapter(klass, lookup_command_template, environment=None):
from twisted.internet import reactor
- return lambda avatar: klass(avatar, reactor, lookup_command_template,
- environment)
+
+ return lambda avatar: klass(
+ avatar, reactor, lookup_command_template, environment
+ )
def getCommandToRun(self, command):
"""As in ExecOnlySession, but only allow a particular command.
@@ -140,8 +147,8 @@ class RestrictedExecOnlySession(ExecOnlySession):
"""
executed_command_template = self.lookup_command_template(command)
return ExecOnlySession.getCommandToRun(
- self, executed_command_template
- % {'user_id': self.avatar.user_id})
+ self, executed_command_template % {"user_id": self.avatar.user_id}
+ )
def lookup_command_template(command):
@@ -152,15 +159,16 @@ def lookup_command_template(command):
:raise ForbiddenCommand: Raised when command isn't allowed
"""
python_command = "%(root)s/bin/py %(brz)s" % {
- 'root': config.root,
- 'brz': get_brz_path(),
- }
+ "root": config.root,
+ "brz": get_brz_path(),
+ }
args = " lp-serve --inet %(user_id)s"
command_template = python_command + args
if command in (
- b'bzr serve --inet --directory=/ --allow-writes',
- b'brz serve --inet --directory=/ --allow-writes'):
+ b"bzr serve --inet --directory=/ --allow-writes",
+ b"brz serve --inet --directory=/ --allow-writes",
+ ):
return command_template
# At the moment, only bzr/brz branch serving is allowed.
raise ForbiddenCommand("Not allowed to execute %r." % (command,))
@@ -173,7 +181,8 @@ def launch_smart_server(avatar):
# Extract the hostname from the supermirror root config.
hostname = urlparse(config.codehosting.supermirror_root)[1]
- environment['BRZ_EMAIL'] = '%s@%s' % (avatar.username, hostname)
+ environment["BRZ_EMAIL"] = "%s@%s" % (avatar.username, hostname)
return RestrictedExecOnlySession(
- avatar, reactor, lookup_command_template, environment=environment)
+ avatar, reactor, lookup_command_template, environment=environment
+ )
diff --git a/lib/lp/codehosting/sshserver/tests/test_daemon.py b/lib/lp/codehosting/sshserver/tests/test_daemon.py
index 0c2aef0..856c37e 100644
--- a/lib/lp/codehosting/sshserver/tests/test_daemon.py
+++ b/lib/lp/codehosting/sshserver/tests/test_daemon.py
@@ -3,21 +3,18 @@
"""Tests for the codehosting SSH server glue."""
-from lazr.sshserver.auth import (
- NoSuchPersonWithName,
- SSHUserAuthServer,
- )
+from lazr.sshserver.auth import NoSuchPersonWithName, SSHUserAuthServer
from lazr.sshserver.service import Factory
from twisted.conch.ssh.common import NS
from twisted.conch.ssh.keys import Key
from twisted.internet.testing import StringTransport
from lp.codehosting.sshserver.daemon import (
- get_key_path,
- get_portal,
PRIVATE_KEY_FILE,
PUBLIC_KEY_FILE,
- )
+ get_key_path,
+ get_portal,
+)
from lp.testing import TestCase
from lp.xmlrpc import faults
@@ -38,10 +35,9 @@ class TestFactory(TestCase):
"""Create and start the factory that our SSH server uses."""
factory = Factory(
get_portal(None, None),
- private_key=Key.fromFile(
- get_key_path(PRIVATE_KEY_FILE)),
- public_key=Key.fromFile(
- get_key_path(PUBLIC_KEY_FILE)))
+ private_key=Key.fromFile(get_key_path(PRIVATE_KEY_FILE)),
+ public_key=Key.fromFile(get_key_path(PUBLIC_KEY_FILE)),
+ )
factory.startFactory()
return factory
@@ -65,7 +61,7 @@ class TestFactory(TestCase):
has begun.
"""
server_transport = self.startConnecting(factory)
- server_transport.ssh_SERVICE_REQUEST(NS('ssh-userauth'))
+ server_transport.ssh_SERVICE_REQUEST(NS("ssh-userauth"))
self.addCleanup(server_transport.service.serviceStopped)
return server_transport
@@ -97,4 +93,5 @@ class TestXMLRPC(TestCase):
# that in lp.xmlrpc.faults.
self.assertEqual(
faults.NoSuchPersonWithName.error_code,
- NoSuchPersonWithName.error_code)
+ NoSuchPersonWithName.error_code,
+ )
diff --git a/lib/lp/codehosting/sshserver/tests/test_session.py b/lib/lp/codehosting/sshserver/tests/test_session.py
index 3184dd7..3aa6366 100644
--- a/lib/lp/codehosting/sshserver/tests/test_session.py
+++ b/lib/lp/codehosting/sshserver/tests/test_session.py
@@ -8,17 +8,14 @@ from twisted.conch.ssh import connection
from twisted.internet.process import ProcessExitedAlready
from twisted.internet.protocol import ProcessProtocol
-from lp.codehosting import (
- get_brz_path,
- get_BRZ_PLUGIN_PATH_for_subprocess,
- )
+from lp.codehosting import get_brz_path, get_BRZ_PLUGIN_PATH_for_subprocess
from lp.codehosting.sshserver.daemon import CodehostingAvatar
from lp.codehosting.sshserver.session import (
ExecOnlySession,
ForbiddenCommand,
- lookup_command_template,
RestrictedExecOnlySession,
- )
+ lookup_command_template,
+)
from lp.codehosting.tests.helpers import AvatarTestCase
from lp.services.config import config
from lp.testing import TestCase
@@ -32,14 +29,25 @@ class MockReactor:
def __init__(self):
self.log = []
- def spawnProcess(self, protocol, executable, args, env=None, path=None,
- uid=None, gid=None, usePTY=0, childFDs=None):
- self.log.append((protocol, executable, args, env, path, uid, gid,
- usePTY, childFDs))
+ def spawnProcess(
+ self,
+ protocol,
+ executable,
+ args,
+ env=None,
+ path=None,
+ uid=None,
+ gid=None,
+ usePTY=0,
+ childFDs=None,
+ ):
+ self.log.append(
+ (protocol, executable, args, env, path, uid, gid, usePTY, childFDs)
+ )
return MockProcessTransport(executable)
def addReader(self, reader):
- self.log.append(('addReader', reader))
+ self.log.append(("addReader", reader))
class MockSSHSession:
@@ -49,7 +57,7 @@ class MockSSHSession:
self.log = log
def writeExtended(self, channel, data):
- self.log.append(('writeExtended', channel, data))
+ self.log.append(("writeExtended", channel, data))
class MockProcessTransport:
@@ -64,26 +72,26 @@ class MockProcessTransport:
self.status = None
def closeStdin(self):
- self.log.append(('closeStdin',))
+ self.log.append(("closeStdin",))
def loseConnection(self):
- self.log.append(('loseConnection',))
+ self.log.append(("loseConnection",))
def childConnectionLost(self, childFD, reason=None):
- self.log.append(('childConnectionLost', childFD, reason))
+ self.log.append(("childConnectionLost", childFD, reason))
def signalProcess(self, signal):
- if self._executable == b'raise-os-error':
+ if self._executable == b"raise-os-error":
raise OSError()
- if self._executable == b'already-terminated':
+ if self._executable == b"already-terminated":
raise ProcessExitedAlready()
- self.log.append(('signalProcess', signal))
+ self.log.append(("signalProcess", signal))
def write(self, data):
- self.log.append(('write', data))
+ self.log.append(("write", data))
def processEnded(self, status):
- self.log.append(('processEnded', status))
+ self.log.append(("processEnded", status))
class TestExecOnlySession(AvatarTestCase):
@@ -112,24 +120,33 @@ class TestExecOnlySession(AvatarTestCase):
def test_openShellNotImplemented(self):
# openShell closes the connection.
- protocol = MockProcessTransport(b'bash')
+ protocol = MockProcessTransport(b"bash")
self.session.openShell(protocol)
self.assertEqual(
- [('writeExtended', connection.EXTENDED_DATA_STDERR,
- 'No shells on this server.\r\n'),
- ('loseConnection',)],
- protocol.log)
+ [
+ (
+ "writeExtended",
+ connection.EXTENDED_DATA_STDERR,
+ "No shells on this server.\r\n",
+ ),
+ ("loseConnection",),
+ ],
+ protocol.log,
+ )
def test_windowChangedNotImplemented(self):
# windowChanged raises a NotImplementedError. It doesn't matter what
# we pass it.
- self.assertRaises(NotImplementedError,
- self.session.windowChanged, None)
+ self.assertRaises(
+ NotImplementedError, self.session.windowChanged, None
+ )
def test_providesISession(self):
# ExecOnlySession must provide ISession.
- self.assertTrue(ISession.providedBy(self.session),
- "ExecOnlySession doesn't implement ISession")
+ self.assertTrue(
+ ISession.providedBy(self.session),
+ "ExecOnlySession doesn't implement ISession",
+ )
def test_closedDoesNothingWhenNoCommand(self):
# When no process has been created, 'closed' is a no-op.
@@ -144,11 +161,12 @@ class TestExecOnlySession(AvatarTestCase):
# inside, it tells the process transport to end the connection between
# the SSH server and the child process.
protocol = ProcessProtocol()
- self.session.execCommand(protocol, b'cat /etc/hostname')
+ self.session.execCommand(protocol, b"cat /etc/hostname")
self.session.closed()
self.assertEqual(
- [('signalProcess', 'HUP'), ('loseConnection',)],
- self.session._transport.log)
+ [("signalProcess", "HUP"), ("loseConnection",)],
+ self.session._transport.log,
+ )
def test_closedDisconnectsIfProcessCantBeTerminated(self):
# 'closed' still calls 'loseConnection' on the transport, even if the
@@ -156,11 +174,9 @@ class TestExecOnlySession(AvatarTestCase):
protocol = ProcessProtocol()
# MockTransport will raise an OSError on signalProcess if the executed
# command is 'raise-os-error'.
- self.session.execCommand(protocol, b'raise-os-error')
+ self.session.execCommand(protocol, b"raise-os-error")
self.session.closed()
- self.assertEqual(
- [('loseConnection',)],
- self.session._transport.log)
+ self.assertEqual([("loseConnection",)], self.session._transport.log)
def test_closedDisconnectsIfProcessAlreadyTerminated(self):
# 'closed' still calls 'loseConnection' on the transport, even if the
@@ -168,27 +184,40 @@ class TestExecOnlySession(AvatarTestCase):
protocol = ProcessProtocol()
# MockTransport will raise a ProcessExitedAlready on signalProcess if
# the executed command is 'already-terminated'.
- self.session.execCommand(protocol, b'already-terminated')
+ self.session.execCommand(protocol, b"already-terminated")
self.session.closed()
- self.assertEqual([('loseConnection',)], self.session._transport.log)
+ self.assertEqual([("loseConnection",)], self.session._transport.log)
def test_getCommandToRunSplitsCommandLine(self):
# getCommandToRun takes a command line and splits it into the name of
# an executable to run and a sequence of arguments.
- command = b'cat foo bar'
+ command = b"cat foo bar"
executable, arguments = self.session.getCommandToRun(command)
- self.assertEqual(b'cat', executable)
- self.assertEqual([b'cat', b'foo', b'bar'], list(arguments))
+ self.assertEqual(b"cat", executable)
+ self.assertEqual([b"cat", b"foo", b"bar"], list(arguments))
def test_execCommandSpawnsProcess(self):
# ExecOnlySession.execCommand spawns the appropriate process.
protocol = ProcessProtocol()
- command = b'cat /etc/hostname'
+ command = b"cat /etc/hostname"
self.session.execCommand(protocol, command)
executable, arguments = self.session.getCommandToRun(command)
- self.assertEqual([(protocol, executable, arguments, None, None,
- None, None, 0, None)],
- self.reactor.log)
+ self.assertEqual(
+ [
+ (
+ protocol,
+ executable,
+ arguments,
+ None,
+ None,
+ None,
+ None,
+ 0,
+ None,
+ )
+ ],
+ self.reactor.log,
+ )
def test_eofReceivedDoesNothingWhenNoCommand(self):
# When no process has been created, 'eofReceived' is a no-op.
@@ -200,20 +229,23 @@ class TestExecOnlySession(AvatarTestCase):
# 'eofReceived' closes standard input when called while a command is
# running.
protocol = ProcessProtocol()
- self.session.execCommand(protocol, b'cat /etc/hostname')
+ self.session.execCommand(protocol, b"cat /etc/hostname")
self.session.eofReceived()
- self.assertEqual([('closeStdin',)], self.session._transport.log)
+ self.assertEqual([("closeStdin",)], self.session._transport.log)
def test_getAvatarAdapter(self):
# getAvatarAdapter is a convenience classmethod so that
# ExecOnlySession can be easily registered as an adapter for Conch
# avatars.
from twisted.internet import reactor
+
adapter = ExecOnlySession.getAvatarAdapter()
session = adapter(self.avatar)
- self.assertTrue(isinstance(session, ExecOnlySession),
- "ISession(avatar) doesn't adapt to ExecOnlySession. "
- "Got %r instead." % (session,))
+ self.assertTrue(
+ isinstance(session, ExecOnlySession),
+ "ISession(avatar) doesn't adapt to ExecOnlySession. "
+ "Got %r instead." % (session,),
+ )
self.assertIs(self.avatar, session.avatar)
self.assertIs(reactor, session.reactor)
@@ -221,22 +253,34 @@ class TestExecOnlySession(AvatarTestCase):
# The environment for the executed process can be specified in the
# ExecOnlySession constructor.
session = ExecOnlySession(
- self.avatar, self.reactor, environment={'FOO': 'BAR'})
+ self.avatar, self.reactor, environment={"FOO": "BAR"}
+ )
protocol = ProcessProtocol()
- session.execCommand(protocol, b'yes')
- self.assertEqual({'FOO': 'BAR'}, session.environment)
+ session.execCommand(protocol, b"yes")
+ self.assertEqual({"FOO": "BAR"}, session.environment)
self.assertEqual(
- [(protocol, b'yes', [b'yes'], {'FOO': 'BAR'}, None, None, None, 0,
- None)],
- self.reactor.log)
+ [
+ (
+ protocol,
+ b"yes",
+ [b"yes"],
+ {"FOO": "BAR"},
+ None,
+ None,
+ None,
+ 0,
+ None,
+ )
+ ],
+ self.reactor.log,
+ )
def test_environmentInGetAvatarAdapter(self):
# We can pass the environment into getAvatarAdapter so that it is used
# when we adapt the session.
- adapter = ExecOnlySession.getAvatarAdapter(
- environment={'FOO': 'BAR'})
+ adapter = ExecOnlySession.getAvatarAdapter(environment={"FOO": "BAR"})
session = adapter(self.avatar)
- self.assertEqual({'FOO': 'BAR'}, session.environment)
+ self.assertEqual({"FOO": "BAR"}, session.environment)
class TestRestrictedExecOnlySession(AvatarTestCase):
@@ -257,12 +301,13 @@ class TestRestrictedExecOnlySession(AvatarTestCase):
self.reactor = MockReactor()
def lookup_template(command):
- if command == 'foo':
- return 'bar baz %(user_id)s'
+ if command == "foo":
+ return "bar baz %(user_id)s"
raise ForbiddenCommand("Not allowed to execute %r." % command)
self.session = RestrictedExecOnlySession(
- self.avatar, self.reactor, lookup_template)
+ self.avatar, self.reactor, lookup_template
+ )
def test_makeRestrictedExecOnlySession(self):
# A RestrictedExecOnlySession is constructed with an avatar, a reactor
@@ -270,13 +315,16 @@ class TestRestrictedExecOnlySession(AvatarTestCase):
self.assertTrue(
isinstance(self.session, RestrictedExecOnlySession),
"%r not an instance of RestrictedExecOnlySession"
- % (self.session,))
+ % (self.session,),
+ )
self.assertEqual(self.avatar, self.session.avatar)
self.assertEqual(self.reactor, self.session.reactor)
- self.assertEqual('bar baz %(user_id)s',
- self.session.lookup_command_template('foo'))
- self.assertRaises(ForbiddenCommand,
- self.session.lookup_command_template, 'notfoo')
+ self.assertEqual(
+ "bar baz %(user_id)s", self.session.lookup_command_template("foo")
+ )
+ self.assertRaises(
+ ForbiddenCommand, self.session.lookup_command_template, "notfoo"
+ )
def test_execCommandRejectsUnauthorizedCommands(self):
# execCommand rejects all commands except for the command specified in
@@ -285,24 +333,30 @@ class TestRestrictedExecOnlySession(AvatarTestCase):
# Note that Conch doesn't have a well-defined way of rejecting
# commands. Disconnecting in execCommand will do. We don't raise
# an exception to avoid logging an OOPS.
- protocol = MockProcessTransport(b'cat')
+ protocol = MockProcessTransport(b"cat")
+ self.assertEqual(None, self.session.execCommand(protocol, b"cat"))
self.assertEqual(
- None, self.session.execCommand(protocol, b'cat'))
- self.assertEqual(
- [('writeExtended', connection.EXTENDED_DATA_STDERR,
- "Not allowed to execute %r.\r\n" % b'cat'),
- ('loseConnection',)],
- protocol.log)
+ [
+ (
+ "writeExtended",
+ connection.EXTENDED_DATA_STDERR,
+ "Not allowed to execute %r.\r\n" % b"cat",
+ ),
+ ("loseConnection",),
+ ],
+ protocol.log,
+ )
def test_getCommandToRunReturnsTemplateCommand(self):
# When passed the allowed command, getCommandToRun always returns the
# executable and arguments corresponding to the provided executed
# command template.
- executable, arguments = self.session.getCommandToRun('foo')
- self.assertEqual(b'bar', executable)
+ executable, arguments = self.session.getCommandToRun("foo")
+ self.assertEqual(b"bar", executable)
self.assertEqual(
- [b'bar', b'baz', str(self.avatar.user_id).encode('UTF-8')],
- list(arguments))
+ [b"bar", b"baz", str(self.avatar.user_id).encode("UTF-8")],
+ list(arguments),
+ )
def test_getAvatarAdapter(self):
# getAvatarAdapter is a convenience classmethod so that
@@ -311,23 +365,23 @@ class TestRestrictedExecOnlySession(AvatarTestCase):
from twisted.internet import reactor
def lookup_template(command):
- if command == 'foo':
- return 'bar baz'
+ if command == "foo":
+ return "bar baz"
raise ForbiddenCommand(command)
- adapter = RestrictedExecOnlySession.getAvatarAdapter(
- lookup_template)
+ adapter = RestrictedExecOnlySession.getAvatarAdapter(lookup_template)
session = adapter(self.avatar)
self.assertTrue(
isinstance(session, RestrictedExecOnlySession),
"ISession(avatar) doesn't adapt to RestrictedExecOnlySession. "
- "Got %r instead." % (session,))
+ "Got %r instead." % (session,),
+ )
self.assertIs(self.avatar, session.avatar)
self.assertIs(reactor, session.reactor)
- self.assertEqual('bar baz',
- session.lookup_command_template('foo'))
- self.assertRaises(ForbiddenCommand,
- session.lookup_command_template, 'notfoo')
+ self.assertEqual("bar baz", session.lookup_command_template("foo"))
+ self.assertRaises(
+ ForbiddenCommand, session.lookup_command_template, "notfoo"
+ )
class TestSessionIntegration(AvatarTestCase):
@@ -345,41 +399,59 @@ class TestSessionIntegration(AvatarTestCase):
self.assertTrue(
isinstance(session, RestrictedExecOnlySession),
"ISession(avatar) doesn't adapt to ExecOnlySession. "
- "Got %r instead." % (session,))
+ "Got %r instead." % (session,),
+ )
self.assertEqual(
get_BRZ_PLUGIN_PATH_for_subprocess(),
- session.environment['BRZ_PLUGIN_PATH'])
+ session.environment["BRZ_PLUGIN_PATH"],
+ )
self.assertEqual(
- '%s@xxxxxxxxxxxxxxxxxxxxx' % self.avatar.username,
- session.environment['BRZ_EMAIL'])
+ "%s@xxxxxxxxxxxxxxxxxxxxx" % self.avatar.username,
+ session.environment["BRZ_EMAIL"],
+ )
executable, arguments = session.getCommandToRun(
- b'bzr serve --inet --directory=/ --allow-writes')
- interpreter = ('%s/bin/py' % config.root).encode('UTF-8')
+ b"bzr serve --inet --directory=/ --allow-writes"
+ )
+ interpreter = ("%s/bin/py" % config.root).encode("UTF-8")
self.assertEqual(interpreter, executable)
self.assertEqual(
- [interpreter, get_brz_path().encode('UTF-8'), b'lp-serve',
- b'--inet', str(self.avatar.user_id).encode('UTF-8')],
- list(arguments))
+ [
+ interpreter,
+ get_brz_path().encode("UTF-8"),
+ b"lp-serve",
+ b"--inet",
+ str(self.avatar.user_id).encode("UTF-8"),
+ ],
+ list(arguments),
+ )
self.assertRaises(
- ForbiddenCommand, session.getCommandToRun, b'rm -rf /')
+ ForbiddenCommand, session.getCommandToRun, b"rm -rf /"
+ )
class TestLookupCommand(TestCase):
-
def test_other(self):
- self.assertRaises(ForbiddenCommand, lookup_command_template, 'foo')
+ self.assertRaises(ForbiddenCommand, lookup_command_template, "foo")
def test_bzr(self):
self.assertEqual(
- config.root + '/bin/py ' + get_brz_path() +
- ' lp-serve --inet %(user_id)s',
+ config.root
+ + "/bin/py "
+ + get_brz_path()
+ + " lp-serve --inet %(user_id)s",
lookup_command_template(
- b'bzr serve --inet --directory=/ --allow-writes'))
+ b"bzr serve --inet --directory=/ --allow-writes"
+ ),
+ )
def test_brz(self):
self.assertEqual(
- config.root + '/bin/py ' + get_brz_path() +
- ' lp-serve --inet %(user_id)s',
+ config.root
+ + "/bin/py "
+ + get_brz_path()
+ + " lp-serve --inet %(user_id)s",
lookup_command_template(
- b'brz serve --inet --directory=/ --allow-writes'))
+ b"brz serve --inet --directory=/ --allow-writes"
+ ),
+ )
diff --git a/lib/lp/codehosting/tests/helpers.py b/lib/lp/codehosting/tests/helpers.py
index bb4a256..fbdb51d 100644
--- a/lib/lp/codehosting/tests/helpers.py
+++ b/lib/lp/codehosting/tests/helpers.py
@@ -4,22 +4,19 @@
"""Common helpers for codehosting tests."""
__all__ = [
- 'AvatarTestCase',
- 'create_branch_with_one_revision',
- 'force_stacked_on_url',
- 'LoomTestMixin',
- 'TestResultWrapper',
- ]
+ "AvatarTestCase",
+ "create_branch_with_one_revision",
+ "force_stacked_on_url",
+ "LoomTestMixin",
+ "TestResultWrapper",
+]
import os
from breezy.controldir import ControlDir
from breezy.errors import FileExists
from breezy.plugins.loom import branch as loom_branch
-from breezy.tests import (
- TestNotApplicable,
- TestSkipped,
- )
+from breezy.tests import TestNotApplicable, TestSkipped
from testtools.twistedsupport import AsynchronousDeferredRunTest
from lp.testing import TestCase
@@ -37,10 +34,10 @@ class AvatarTestCase(TestCase):
# A basic user dict, 'alice' is a member of no teams (aside from the
# user themself).
self.aliceUserDict = {
- 'id': 1,
- 'name': 'alice',
- 'teams': [{'id': 1, 'name': 'alice'}],
- 'initialBranches': [(1, [])]
+ "id": 1,
+ "name": "alice",
+ "teams": [{"id": 1, "name": "alice"}],
+ "initialBranches": [(1, [])],
}
@@ -48,20 +45,20 @@ class LoomTestMixin:
"""Mixin to provide Bazaar test classes with limited loom support."""
def loomify(self, branch):
- tree = branch.create_checkout('checkout')
+ tree = branch.create_checkout("checkout")
tree.lock_write()
try:
- tree.branch.nick = 'bottom-thread'
+ tree.branch.nick = "bottom-thread"
loom_branch.loomify(tree.branch)
finally:
tree.unlock()
loom_tree = tree.controldir.open_workingtree()
loom_tree.lock_write()
- loom_tree.branch.new_thread('bottom-thread')
- loom_tree.commit('this is a commit', rev_id=b'commit-1')
+ loom_tree.branch.new_thread("bottom-thread")
+ loom_tree.commit("this is a commit", rev_id=b"commit-1")
loom_tree.unlock()
- loom_tree.branch.record_loom('sample loom')
- self.get_transport().delete_tree('checkout')
+ loom_tree.branch.record_loom("sample loom")
+ self.get_transport().delete_tree("checkout")
return loom_tree
def makeLoomBranchAndTree(self, tree_directory):
@@ -69,16 +66,16 @@ class LoomTestMixin:
tree = self.make_branch_and_tree(tree_directory)
tree.lock_write()
try:
- tree.branch.nick = 'bottom-thread'
+ tree.branch.nick = "bottom-thread"
loom_branch.loomify(tree.branch)
finally:
tree.unlock()
loom_tree = tree.controldir.open_workingtree()
loom_tree.lock_write()
- loom_tree.branch.new_thread('bottom-thread')
- loom_tree.commit('this is a commit', rev_id=b'commit-1')
+ loom_tree.branch.new_thread("bottom-thread")
+ loom_tree.commit("this is a commit", rev_id=b"commit-1")
loom_tree.unlock()
- loom_tree.branch.record_loom('sample loom')
+ loom_tree.branch.record_loom("sample loom")
return loom_tree
@@ -90,10 +87,10 @@ def create_branch_with_one_revision(branch_dir, format=None):
tree = ControlDir.create_standalone_workingtree(branch_dir, format)
except FileExists:
return
- f = open(os.path.join(branch_dir, 'hello'), 'w')
- f.write('foo')
+ f = open(os.path.join(branch_dir, "hello"), "w")
+ f.write("foo")
f.close()
- tree.commit('message')
+ tree.commit("message")
return tree
@@ -104,7 +101,7 @@ def force_stacked_on_url(branch, url):
stacking. It's still worth testing that we don't blow up in the face of
them, so this function lets us create them anyway.
"""
- branch.get_config().set_user_option('stacked_on_location', url)
+ branch.get_config().set_user_option("stacked_on_location", url)
class TestResultWrapper:
diff --git a/lib/lp/codehosting/tests/servers.py b/lib/lp/codehosting/tests/servers.py
index 2bfa45e..fc00623 100644
--- a/lib/lp/codehosting/tests/servers.py
+++ b/lib/lp/codehosting/tests/servers.py
@@ -4,20 +4,17 @@
"""Server used in codehosting acceptance tests."""
__all__ = [
- 'CodeHostingTac',
- 'SSHCodeHostingServer',
- ]
+ "CodeHostingTac",
+ "SSHCodeHostingServer",
+]
import os
import shutil
import tempfile
-from breezy.transport import (
- get_transport,
- Server,
- )
import transaction
+from breezy.transport import Server, get_transport
from twisted.python.util import sibpath
from zope.component import getUtility
@@ -34,21 +31,23 @@ def set_up_test_user(test_user, test_team):
Also make sure that 'test_user' belongs to 'test_team'.
"""
person_set = getUtility(IPersonSet)
- testUser = person_set.getByName('no-priv')
+ testUser = person_set.getByName("no-priv")
testUser.name = test_user
testTeam = person_set.newTeam(
- testUser, test_team, test_team,
- membership_policy=TeamMembershipPolicy.OPEN)
+ testUser,
+ test_team,
+ test_team,
+ membership_policy=TeamMembershipPolicy.OPEN,
+ )
testUser.join(testTeam)
ssh_key_set = getUtility(ISSHKeySet)
- with open(sibpath(__file__, 'id_rsa.pub')) as f:
- pubkey_data = f.read().rstrip('\n')
+ with open(sibpath(__file__, "id_rsa.pub")) as f:
+ pubkey_data = f.read().rstrip("\n")
ssh_key_set.new(testUser, pubkey_data)
transaction.commit()
class CodeHostingTac(TacTestSetup):
-
def __init__(self, mirrored_area):
super().__init__()
# The mirrored area.
@@ -75,19 +74,19 @@ class CodeHostingTac(TacTestSetup):
@property
def tacfile(self):
return os.path.abspath(
- os.path.join(config.root, 'daemons', 'sftp.tac'))
+ os.path.join(config.root, "daemons", "sftp.tac")
+ )
@property
def logfile(self):
- return os.path.join(self.root, 'codehosting.log')
+ return os.path.join(self.root, "codehosting.log")
@property
def pidfile(self):
- return os.path.join(self.root, 'codehosting.pid')
+ return os.path.join(self.root, "codehosting.pid")
class SSHCodeHostingServer(Server):
-
def __init__(self, schema, tac_server):
Server.__init__(self)
self._schema = schema
@@ -98,20 +97,22 @@ class SSHCodeHostingServer(Server):
def setUpFakeHome(self):
user_home = os.path.abspath(tempfile.mkdtemp())
- os.makedirs(os.path.join(user_home, '.ssh'))
+ os.makedirs(os.path.join(user_home, ".ssh"))
shutil.copyfile(
- sibpath(__file__, 'id_rsa'),
- os.path.join(user_home, '.ssh', 'id_rsa'))
+ sibpath(__file__, "id_rsa"),
+ os.path.join(user_home, ".ssh", "id_rsa"),
+ )
shutil.copyfile(
- sibpath(__file__, 'id_rsa.pub'),
- os.path.join(user_home, '.ssh', 'id_rsa.pub'))
- os.chmod(os.path.join(user_home, '.ssh', 'id_rsa'), 0o600)
- real_home, os.environ['HOME'] = os.environ['HOME'], user_home
+ sibpath(__file__, "id_rsa.pub"),
+ os.path.join(user_home, ".ssh", "id_rsa.pub"),
+ )
+ os.chmod(os.path.join(user_home, ".ssh", "id_rsa"), 0o600)
+ real_home, os.environ["HOME"] = os.environ["HOME"], user_home
return real_home, user_home
def getTransport(self, path=None):
if path is None:
- path = ''
+ path = ""
transport = get_transport(self.get_url()).clone(path)
return transport
@@ -119,10 +120,10 @@ class SSHCodeHostingServer(Server):
self._real_home, self._fake_home = self.setUpFakeHome()
def stop_server(self):
- os.environ['HOME'] = self._real_home
+ os.environ["HOME"] = self._real_home
shutil.rmtree(self._fake_home)
def get_url(self, user=None):
if user is None:
- user = 'testuser'
- return '%s://%s@xxxxxxxxxxxxxxxxxxxxx:22222/' % (self._schema, user)
+ user = "testuser"
+ return "%s://%s@xxxxxxxxxxxxxxxxxxxxx:22222/" % (self._schema, user)
diff --git a/lib/lp/codehosting/tests/test_acceptance.py b/lib/lp/codehosting/tests/test_acceptance.py
index 835b4d8..c9bb224 100644
--- a/lib/lp/codehosting/tests/test_acceptance.py
+++ b/lib/lp/codehosting/tests/test_acceptance.py
@@ -5,43 +5,33 @@
import os
import re
-from urllib.request import urlopen
import xmlrpc.client
+from urllib.request import urlopen
import breezy.branch
+import six
from breezy.tests import TestCaseWithTransport
from breezy.tests.per_repository import all_repository_format_scenarios
from breezy.urlutils import local_path_from_url
from breezy.workingtree import WorkingTree
-import six
-from testscenarios import (
- load_tests_apply_scenarios,
- WithScenarios,
- )
+from testscenarios import WithScenarios, load_tests_apply_scenarios
from zope.component import getUtility
-from lp.code.bzr import (
- BranchFormat,
- ControlFormat,
- RepositoryFormat,
- )
+from lp.code.bzr import BranchFormat, ControlFormat, RepositoryFormat
from lp.code.enums import BranchType
from lp.code.interfaces.branch import IBranchSet
from lp.code.interfaces.branchnamespace import get_branch_namespace
from lp.code.tests.helpers import (
get_non_existant_source_package_branch_unique_name,
- )
-from lp.codehosting import (
- get_brz_path,
- get_BRZ_PLUGIN_PATH_for_subprocess,
- )
+)
+from lp.codehosting import get_brz_path, get_BRZ_PLUGIN_PATH_for_subprocess
from lp.codehosting.bzrutils import DenyingServer
from lp.codehosting.tests.helpers import LoomTestMixin
from lp.codehosting.tests.servers import (
CodeHostingTac,
- set_up_test_user,
SSHCodeHostingServer,
- )
+ set_up_test_user,
+)
from lp.codehosting.vfs import branch_id_to_path
from lp.registry.model.person import Person
from lp.registry.model.product import Product
@@ -59,7 +49,8 @@ class SSHServerLayer(ZopelessAppServerLayer):
def getTacHandler(cls):
if cls._tac_handler is None:
cls._tac_handler = CodeHostingTac(
- config.codehosting.mirrored_branches_root)
+ config.codehosting.mirrored_branches_root
+ )
return cls._tac_handler
@classmethod
@@ -85,7 +76,7 @@ class SSHServerLayer(ZopelessAppServerLayer):
@profiled
def testSetUp(cls):
SSHServerLayer._reset()
- set_up_test_user('testuser', 'testteam')
+ set_up_test_user("testuser", "testteam")
@classmethod
@profiled
@@ -110,17 +101,17 @@ class SSHTestCase(TestCaseWithTransport, LoomTestMixin, TestCaseWithFactory):
# Prevent creation of in-process sftp:// and bzr+ssh:// transports --
# such connections tend to leak threads and occasionally create
# uncollectable garbage.
- ssh_denier = DenyingServer(['bzr+ssh://', 'sftp://'])
+ ssh_denier = DenyingServer(["bzr+ssh://", "sftp://"])
ssh_denier.start_server()
self.addCleanup(ssh_denier.stop_server)
# Create a local branch with one revision
- tree = self.make_branch_and_tree('local')
+ tree = self.make_branch_and_tree("local")
self.local_branch = tree.branch
self.local_branch_path = local_path_from_url(self.local_branch.base)
- self.build_tree(['local/foo'])
- tree.add('foo')
- self.revid = six.ensure_text(tree.commit('Added foo'))
+ self.build_tree(["local/foo"])
+ tree.add("foo")
+ self.revid = six.ensure_text(tree.commit("Added foo"))
def __str__(self):
return self.id()
@@ -143,26 +134,29 @@ class SSHTestCase(TestCaseWithTransport, LoomTestMixin, TestCaseWithFactory):
(mainly so we can test the loom support).
"""
output, error = self.run_bzr_subprocess(
- args, env_changes={
- 'BRZ_SSH': 'paramiko',
- 'BRZ_PLUGIN_PATH': get_BRZ_PLUGIN_PATH_for_subprocess()
+ args,
+ env_changes={
+ "BRZ_SSH": "paramiko",
+ "BRZ_PLUGIN_PATH": get_BRZ_PLUGIN_PATH_for_subprocess(),
},
- allow_plugins=True, retcode=retcode)
- return output.decode('UTF-8'), error.decode('UTF-8')
+ allow_plugins=True,
+ retcode=retcode,
+ )
+ return output.decode("UTF-8"), error.decode("UTF-8")
def _run_bzr_error(self, args):
- """Run bzr expecting an error, returning the error message.
- """
+ """Run bzr expecting an error, returning the error message."""
output, error = self._run_bzr(args, retcode=3)
for line in error.splitlines():
if line.startswith("brz: ERROR"):
return line
raise AssertionError(
- "Didn't find error line in output:\n\n%s\n" % error)
+ "Didn't find error line in output:\n\n%s\n" % error
+ )
def branch(self, remote_url, local_directory):
"""Branch from the given URL to a local directory."""
- self._run_bzr(['branch', remote_url, local_directory])
+ self._run_bzr(["branch", remote_url, local_directory])
def get_brz_path(self):
"""See `breezy.tests.TestCase.get_brz_path`.
@@ -173,7 +167,7 @@ class SSHTestCase(TestCaseWithTransport, LoomTestMixin, TestCaseWithFactory):
def push(self, local_directory, remote_url, extra_args=None):
"""Push the local branch to the given URL."""
- args = ['push', '-d', local_directory, remote_url]
+ args = ["push", "-d", local_directory, remote_url]
if extra_args is not None:
args.extend(extra_args)
self._run_bzr(args)
@@ -189,27 +183,27 @@ class SSHTestCase(TestCaseWithTransport, LoomTestMixin, TestCaseWithFactory):
be the 'brz: ERROR: <repr of Exception>' line.
"""
error_line = self._run_bzr_error(
- ['push', '-d', local_directory, remote_url])
+ ["push", "-d", local_directory, remote_url]
+ )
# This will be the 'brz: ERROR: <repr of Exception>' line.
if not error_messages:
return error_line
for msg in error_messages:
- if error_line.startswith('brz: ERROR: ' + msg):
+ if error_line.startswith("brz: ERROR: " + msg):
return error_line
self.fail(
- "Error message %r didn't match any of those supplied."
- % error_line)
+ "Error message %r didn't match any of those supplied." % error_line
+ )
def getLastRevision(self, remote_url):
"""Get the last revision ID at the given URL."""
- output, error = self._run_bzr(
- ['revision-info', '-d', remote_url])
+ output, error = self._run_bzr(["revision-info", "-d", remote_url])
return output.split()[1]
def getTransportURL(self, relpath=None, username=None):
"""Return the base URL for the tests."""
if relpath is None:
- relpath = ''
+ relpath = ""
return self.server.get_url(username) + relpath
def getDatabaseBranch(self, personName, productName, branchName):
@@ -222,28 +216,33 @@ class SSHTestCase(TestCaseWithTransport, LoomTestMixin, TestCaseWithFactory):
namespace = get_branch_namespace(owner, product)
return namespace.getByName(branchName)
- def createBazaarBranch(self, user, product, branch, creator=None,
- branch_root=None):
+ def createBazaarBranch(
+ self, user, product, branch, creator=None, branch_root=None
+ ):
"""Create a new branch in the database and push our test branch there.
Used to create branches that the test user is not able to create, and
might not even be able to view.
"""
authserver = xmlrpc.client.ServerProxy(
- config.codehosting.authentication_endpoint)
+ config.codehosting.authentication_endpoint
+ )
codehosting_api = xmlrpc.client.ServerProxy(
- config.codehosting.codehosting_endpoint)
+ config.codehosting.codehosting_endpoint
+ )
if creator is None:
- creator_id = authserver.getUserAndSSHKeys(user)['id']
+ creator_id = authserver.getUserAndSSHKeys(user)["id"]
else:
- creator_id = authserver.getUserAndSSHKeys(creator)['id']
+ creator_id = authserver.getUserAndSSHKeys(creator)["id"]
if branch_root is None:
branch_root = self.server._mirror_root
branch_id = codehosting_api.createBranch(
- creator_id, '/~%s/%s/%s' % (user, product, branch))
- branch_url = 'file://' + os.path.abspath(
- os.path.join(branch_root, branch_id_to_path(branch_id)))
- self.push(self.local_branch_path, branch_url, ['--create-prefix'])
+ creator_id, "/~%s/%s/%s" % (user, product, branch)
+ )
+ branch_url = "file://" + os.path.abspath(
+ os.path.join(branch_root, branch_id_to_path(branch_id))
+ )
+ self.push(self.local_branch_path, branch_url, ["--create-prefix"])
return branch_url
@@ -251,26 +250,29 @@ class SmokeTest(WithScenarios, SSHTestCase):
"""Smoke test for repository support."""
scenarios = [
- scenario for scenario in all_repository_format_scenarios()
- if scenario[0] not in {
+ scenario
+ for scenario in all_repository_format_scenarios()
+ if scenario[0]
+ not in {
# RepositoryFormat4 is not initializable (breezy raises
# TestSkipped when you try).
- 'RepositoryFormat4',
+ "RepositoryFormat4",
# Fetching weave formats from the smart server is known to be
# broken. See bug 173807 and breezy.tests.test_repository.
- 'RepositoryFormat5',
- 'RepositoryFormat6',
- 'RepositoryFormat7',
- 'GitRepositoryFormat',
- 'SvnRepositoryFormat',
- }
- and not scenario[0].startswith('RemoteRepositoryFormat')]
+ "RepositoryFormat5",
+ "RepositoryFormat6",
+ "RepositoryFormat7",
+ "GitRepositoryFormat",
+ "SvnRepositoryFormat",
+ }
+ and not scenario[0].startswith("RemoteRepositoryFormat")
+ ]
def setUp(self):
- self.scheme = 'bzr+ssh'
+ self.scheme = "bzr+ssh"
super().setUp()
- self.first_tree = 'first'
- self.second_tree = 'second'
+ self.first_tree = "first"
+ self.second_tree = "second"
def make_branch_specifying_repo_format(self, relpath, repo_format):
bd = self.make_controldir(relpath, format=self.bzrdir_format)
@@ -279,7 +281,8 @@ class SmokeTest(WithScenarios, SSHTestCase):
def make_branch_and_tree(self, relpath):
b = self.make_branch_specifying_repo_format(
- relpath, self.repository_format)
+ relpath, self.repository_format
+ )
return b.controldir.create_workingtree()
def test_smoke(self):
@@ -287,12 +290,12 @@ class SmokeTest(WithScenarios, SSHTestCase):
tree = self.make_branch_and_tree(self.first_tree)
# Push up a new branch.
- remote_url = self.getTransportURL('~testuser/+junk/new-branch')
+ remote_url = self.getTransportURL("~testuser/+junk/new-branch")
self.push(self.first_tree, remote_url)
self.assertBranchesMatch(self.first_tree, remote_url)
# Commit to it.
- tree.commit('new revision', allow_pointless=True)
+ tree.commit("new revision", allow_pointless=True)
# Push it up again.
self.push(self.first_tree, remote_url)
@@ -311,61 +314,71 @@ class AcceptanceTests(WithScenarios, SSHTestCase):
"""
scenarios = [
- ('sftp', {'scheme': 'sftp'}),
- ('bzr+ssh', {'scheme': 'bzr+ssh'}),
- ]
+ ("sftp", {"scheme": "sftp"}),
+ ("bzr+ssh", {"scheme": "bzr+ssh"}),
+ ]
def assertNotBranch(self, url):
"""Assert that there's no branch at 'url'."""
error_line = self._run_bzr_error(
- ['cat-revision', '-r', 'branch:' + url])
+ ["cat-revision", "-r", "branch:" + url]
+ )
self.assertTrue(
- error_line.startswith('brz: ERROR: Not a branch:'),
- 'Expected "Not a branch", found %r' % error_line)
-
- def makeDatabaseBranch(self, owner_name, product_name, branch_name,
- branch_type=BranchType.HOSTED):
+ error_line.startswith("brz: ERROR: Not a branch:"),
+ 'Expected "Not a branch", found %r' % error_line,
+ )
+
+ def makeDatabaseBranch(
+ self,
+ owner_name,
+ product_name,
+ branch_name,
+ branch_type=BranchType.HOSTED,
+ ):
"""Create a new branch in the database."""
owner = Person.selectOneBy(name=owner_name)
- if product_name == '+junk':
+ if product_name == "+junk":
product = None
else:
product = Product.selectOneBy(name=product_name)
if branch_type == BranchType.MIRRORED:
- url = 'http://example.com'
+ url = "http://example.com"
else:
url = None
namespace = get_branch_namespace(owner, product)
return namespace.createBranch(
- branch_type=branch_type, name=branch_name, registrant=owner,
- url=url)
+ branch_type=branch_type,
+ name=branch_name,
+ registrant=owner,
+ url=url,
+ )
def test_push_to_new_branch(self):
- remote_url = self.getTransportURL('~testuser/+junk/test-branch')
+ remote_url = self.getTransportURL("~testuser/+junk/test-branch")
self.push(self.local_branch_path, remote_url)
self.assertBranchesMatch(self.local_branch_path, remote_url)
ZopelessAppServerLayer.txn.begin()
db_branch = getUtility(IBranchSet).getByUniqueName(
- '~testuser/+junk/test-branch')
+ "~testuser/+junk/test-branch"
+ )
self.assertEqual(
- RepositoryFormat.BZR_CHK_2A, db_branch.repository_format)
- self.assertEqual(
- BranchFormat.BZR_BRANCH_7, db_branch.branch_format)
- self.assertEqual(
- ControlFormat.BZR_METADIR_1, db_branch.control_format)
+ RepositoryFormat.BZR_CHK_2A, db_branch.repository_format
+ )
+ self.assertEqual(BranchFormat.BZR_BRANCH_7, db_branch.branch_format)
+ self.assertEqual(ControlFormat.BZR_METADIR_1, db_branch.control_format)
ZopelessAppServerLayer.txn.commit()
def test_push_to_existing_branch(self):
"""Pushing to an existing branch must work."""
# Initial push.
- remote_url = self.getTransportURL('~testuser/+junk/test-branch')
+ remote_url = self.getTransportURL("~testuser/+junk/test-branch")
self.push(self.local_branch_path, remote_url)
remote_revision = self.getLastRevision(remote_url)
self.assertEqual(self.revid, remote_revision)
# Add a single revision to the local branch.
tree = WorkingTree.open(self.local_branch.base)
- tree.commit('Empty commit', rev_id=b'rev2')
+ tree.commit("Empty commit", rev_id=b"rev2")
# Push the new revision.
self.push(self.local_branch_path, remote_url)
self.assertBranchesMatch(self.local_branch_path, remote_url)
@@ -381,48 +394,50 @@ class AcceptanceTests(WithScenarios, SSHTestCase):
everything in this test.
"""
# Push the local branch to the server
- remote_url = self.getTransportURL('~testuser/+junk/test-branch')
+ remote_url = self.getTransportURL("~testuser/+junk/test-branch")
self.push(self.local_branch_path, remote_url)
# Rename owner, product and branch in the database
ZopelessAppServerLayer.txn.begin()
- branch = self.getDatabaseBranch('testuser', None, 'test-branch')
- branch.owner.name = 'renamed-user'
- branch.setTarget(user=branch.owner, project=Product.byName('firefox'))
- branch.name = 'renamed-branch'
+ branch = self.getDatabaseBranch("testuser", None, "test-branch")
+ branch.owner.name = "renamed-user"
+ branch.setTarget(user=branch.owner, project=Product.byName("firefox"))
+ branch.name = "renamed-branch"
ZopelessAppServerLayer.txn.commit()
# Check that it's not at the old location.
self.assertNotBranch(
self.getTransportURL(
- '~testuser/+junk/test-branch', username='renamed-user'))
+ "~testuser/+junk/test-branch", username="renamed-user"
+ )
+ )
# Check that it *is* at the new location.
self.assertBranchesMatch(
self.local_branch_path,
self.getTransportURL(
- '~renamed-user/firefox/renamed-branch',
- username='renamed-user'))
+ "~renamed-user/firefox/renamed-branch", username="renamed-user"
+ ),
+ )
def test_push_team_branch(self):
- remote_url = self.getTransportURL('~testteam/firefox/a-new-branch')
+ remote_url = self.getTransportURL("~testteam/firefox/a-new-branch")
self.push(self.local_branch_path, remote_url)
self.assertBranchesMatch(self.local_branch_path, remote_url)
def test_push_new_branch_creates_branch_in_database(self):
# pushing creates a branch in the database with the correct name and
# last_mirrored_id.
- remote_url = self.getTransportURL(
- '~testuser/+junk/totally-new-branch')
+ remote_url = self.getTransportURL("~testuser/+junk/totally-new-branch")
self.push(self.local_branch_path, remote_url)
ZopelessAppServerLayer.txn.begin()
- branch = self.getDatabaseBranch(
- 'testuser', None, 'totally-new-branch')
+ branch = self.getDatabaseBranch("testuser", None, "totally-new-branch")
self.assertEqual(
- ['~testuser/+junk/totally-new-branch', self.revid],
- [branch.unique_name, branch.last_mirrored_id])
+ ["~testuser/+junk/totally-new-branch", self.revid],
+ [branch.unique_name, branch.last_mirrored_id],
+ )
ZopelessAppServerLayer.txn.abort()
def test_record_default_stacking(self):
@@ -434,21 +449,24 @@ class AcceptanceTests(WithScenarios, SSHTestCase):
ZopelessAppServerLayer.txn.begin()
- self.make_branch_and_tree('stacked-on')
- trunk_unique_name = '~testuser/%s/trunk' % product.name
- self.push('stacked-on', self.getTransportURL(trunk_unique_name))
+ self.make_branch_and_tree("stacked-on")
+ trunk_unique_name = "~testuser/%s/trunk" % product.name
+ self.push("stacked-on", self.getTransportURL(trunk_unique_name))
db_trunk = getUtility(IBranchSet).getByUniqueName(trunk_unique_name)
self.factory.enableDefaultStackingForProduct(
- db_trunk.product, db_trunk)
+ db_trunk.product, db_trunk
+ )
ZopelessAppServerLayer.txn.commit()
- stacked_unique_name = '~testuser/%s/stacked' % product.name
+ stacked_unique_name = "~testuser/%s/stacked" % product.name
self.push(
- self.local_branch_path, self.getTransportURL(stacked_unique_name))
+ self.local_branch_path, self.getTransportURL(stacked_unique_name)
+ )
db_stacked = getUtility(IBranchSet).getByUniqueName(
- stacked_unique_name)
+ stacked_unique_name
+ )
self.assertEqual(db_trunk, db_stacked.stacked_on)
@@ -460,16 +478,18 @@ class AcceptanceTests(WithScenarios, SSHTestCase):
product = self.factory.makeProduct()
ZopelessAppServerLayer.txn.commit()
- self.make_branch_and_tree('stacked-on')
- trunk_unique_name = '~testuser/%s/trunk' % product.name
+ self.make_branch_and_tree("stacked-on")
+ trunk_unique_name = "~testuser/%s/trunk" % product.name
trunk_url = self.getTransportURL(trunk_unique_name)
- self.push('stacked-on', self.getTransportURL(trunk_unique_name))
+ self.push("stacked-on", self.getTransportURL(trunk_unique_name))
- stacked_unique_name = '~testuser/%s/stacked' % product.name
+ stacked_unique_name = "~testuser/%s/stacked" % product.name
stacked_url = self.getTransportURL(stacked_unique_name)
self.push(
- self.local_branch_path, stacked_url,
- extra_args=['--stacked-on', trunk_url])
+ self.local_branch_path,
+ stacked_url,
+ extra_args=["--stacked-on", trunk_url],
+ )
branch_set = getUtility(IBranchSet)
db_trunk = branch_set.getByUniqueName(trunk_unique_name)
@@ -477,53 +497,58 @@ class AcceptanceTests(WithScenarios, SSHTestCase):
self.assertEqual(db_trunk, db_stacked.stacked_on)
- output, error = self._run_bzr(['info', stacked_url])
- actually_stacked_on = re.search('stacked on: (.*)$', output).group(1)
- self.assertEqual('/' + trunk_unique_name, actually_stacked_on)
+ output, error = self._run_bzr(["info", stacked_url])
+ actually_stacked_on = re.search("stacked on: (.*)$", output).group(1)
+ self.assertEqual("/" + trunk_unique_name, actually_stacked_on)
def test_cant_access_private_branch(self):
# Trying to get information about a private branch should fail as if
# the branch doesn't exist.
# 'salgado' is a member of landscape-developers.
- salgado = Person.selectOneBy(name='salgado')
- landscape_dev = Person.selectOneBy(
- name='landscape-developers')
+ salgado = Person.selectOneBy(name="salgado")
+ landscape_dev = Person.selectOneBy(name="landscape-developers")
self.assertTrue(
salgado.inTeam(landscape_dev),
- "salgado should be a member of landscape-developers, but isn't.")
+ "salgado should be a member of landscape-developers, but isn't.",
+ )
# Make a private branch.
branch_url = self.createBazaarBranch(
- 'landscape-developers', 'landscape', 'some-branch',
- creator='salgado')
+ "landscape-developers",
+ "landscape",
+ "some-branch",
+ creator="salgado",
+ )
# Sanity checking that the branch is actually there. We don't care
# about the result, only that the call succeeds.
self.getLastRevision(branch_url)
# Check that testuser can't access the branch.
remote_url = self.getTransportURL(
- '~landscape-developers/landscape/some-branch')
+ "~landscape-developers/landscape/some-branch"
+ )
self.assertNotBranch(remote_url)
def test_push_to_new_full_branch_alias(self):
# We can also push branches to URLs like /+branch/~foo/bar/baz.
- unique_name = '~testuser/firefox/new-branch'
- remote_url = self.getTransportURL('+branch/%s' % unique_name)
+ unique_name = "~testuser/firefox/new-branch"
+ remote_url = self.getTransportURL("+branch/%s" % unique_name)
self.push(self.local_branch_path, remote_url)
self.assertBranchesMatch(self.local_branch_path, remote_url)
self.assertBranchesMatch(
- self.local_branch_path, self.getTransportURL(unique_name))
+ self.local_branch_path, self.getTransportURL(unique_name)
+ )
def test_push_to_new_short_branch_alias(self):
# We can also push branches to URLs like /+branch/firefox
# Hack 'firefox' so we have permission to do this.
ZopelessAppServerLayer.txn.begin()
- firefox = Product.selectOneBy(name='firefox')
- testuser = Person.selectOneBy(name='testuser')
+ firefox = Product.selectOneBy(name="firefox")
+ testuser = Person.selectOneBy(name="testuser")
firefox.development_focus.owner = testuser
ZopelessAppServerLayer.txn.commit()
- remote_url = self.getTransportURL('+branch/firefox')
+ remote_url = self.getTransportURL("+branch/firefox")
self.push(self.local_branch_path, remote_url)
self.assertBranchesMatch(self.local_branch_path, remote_url)
@@ -532,39 +557,47 @@ class AcceptanceTests(WithScenarios, SSHTestCase):
# filesystem, and is writable by the user, then the user is able to
# push to it.
ZopelessAppServerLayer.txn.begin()
- branch = self.makeDatabaseBranch('testuser', 'firefox', 'some-branch')
+ branch = self.makeDatabaseBranch("testuser", "firefox", "some-branch")
remote_url = self.getTransportURL(branch.unique_name)
ZopelessAppServerLayer.txn.commit()
self.push(
- self.local_branch_path, remote_url,
- extra_args=['--use-existing-dir'])
+ self.local_branch_path,
+ remote_url,
+ extra_args=["--use-existing-dir"],
+ )
self.assertBranchesMatch(self.local_branch_path, remote_url)
def test_cant_push_to_existing_mirrored_branch(self):
# Users cannot push to mirrored branches.
ZopelessAppServerLayer.txn.begin()
branch = self.makeDatabaseBranch(
- 'testuser', 'firefox', 'some-branch', BranchType.MIRRORED)
+ "testuser", "firefox", "some-branch", BranchType.MIRRORED
+ )
remote_url = self.getTransportURL(branch.unique_name)
ZopelessAppServerLayer.txn.commit()
self.assertCantPush(
- self.local_branch_path, remote_url,
- ['Permission denied:', 'Transport operation not possible:'])
+ self.local_branch_path,
+ remote_url,
+ ["Permission denied:", "Transport operation not possible:"],
+ )
def test_cant_push_to_existing_unowned_hosted_branch(self):
# Users can only push to hosted branches that they own.
ZopelessAppServerLayer.txn.begin()
- branch = self.makeDatabaseBranch('mark', 'firefox', 'some-branch')
+ branch = self.makeDatabaseBranch("mark", "firefox", "some-branch")
remote_url = self.getTransportURL(branch.unique_name)
ZopelessAppServerLayer.txn.commit()
self.assertCantPush(
- self.local_branch_path, remote_url,
- ['Permission denied:', 'Transport operation not possible:'])
+ self.local_branch_path,
+ remote_url,
+ ["Permission denied:", "Transport operation not possible:"],
+ )
def test_push_new_branch_of_non_existant_source_package_name(self):
ZopelessAppServerLayer.txn.begin()
unique_name = get_non_existant_source_package_branch_unique_name(
- 'testuser', self.factory)
+ "testuser", self.factory
+ )
ZopelessAppServerLayer.txn.commit()
remote_url = self.getTransportURL(unique_name)
self.push(self.local_branch_path, remote_url)
@@ -572,27 +605,27 @@ class AcceptanceTests(WithScenarios, SSHTestCase):
def test_can_push_loom_branch(self):
# We can push and pull a loom branch.
- self.makeLoomBranchAndTree('loom')
- remote_url = self.getTransportURL('~testuser/+junk/loom')
- self.push('loom', remote_url)
- self.assertBranchesMatch('loom', remote_url)
+ self.makeLoomBranchAndTree("loom")
+ remote_url = self.getTransportURL("~testuser/+junk/loom")
+ self.push("loom", remote_url)
+ self.assertBranchesMatch("loom", remote_url)
class SmartserverTests(WithScenarios, SSHTestCase):
"""Acceptance tests for the codehosting smartserver."""
scenarios = [
- ('bzr+ssh', {'scheme': 'bzr+ssh'}),
- ]
+ ("bzr+ssh", {"scheme": "bzr+ssh"}),
+ ]
def makeMirroredBranch(self, person_name, product_name, branch_name):
ro_branch_url = self.createBazaarBranch(
- person_name, product_name, branch_name)
+ person_name, product_name, branch_name
+ )
# Mark as mirrored.
ZopelessAppServerLayer.txn.begin()
- branch = self.getDatabaseBranch(
- person_name, product_name, branch_name)
+ branch = self.getDatabaseBranch(person_name, product_name, branch_name)
branch.branch_type = BranchType.MIRRORED
branch.url = "http://example.com/smartservertest/branch"
ZopelessAppServerLayer.txn.commit()
@@ -600,45 +633,57 @@ class SmartserverTests(WithScenarios, SSHTestCase):
def test_can_read_readonly_branch(self):
# We can get information from a read-only branch.
- ro_branch_url = self.createBazaarBranch(
- 'mark', '+junk', 'ro-branch')
- revision = breezy.branch.Branch.open(
- ro_branch_url).last_revision().decode('UTF-8')
+ ro_branch_url = self.createBazaarBranch("mark", "+junk", "ro-branch")
+ revision = (
+ breezy.branch.Branch.open(ro_branch_url)
+ .last_revision()
+ .decode("UTF-8")
+ )
remote_revision = self.getLastRevision(
- self.getTransportURL('~mark/+junk/ro-branch'))
+ self.getTransportURL("~mark/+junk/ro-branch")
+ )
self.assertEqual(revision, remote_revision)
def test_cant_write_to_readonly_branch(self):
# We can't write to a read-only branch.
- self.createBazaarBranch('mark', '+junk', 'ro-branch')
+ self.createBazaarBranch("mark", "+junk", "ro-branch")
# Create a new revision on the local branch.
tree = WorkingTree.open(self.local_branch.base)
- tree.commit('Empty commit', rev_id=b'rev2')
+ tree.commit("Empty commit", rev_id=b"rev2")
# Push the local branch to the remote url
- remote_url = self.getTransportURL('~mark/+junk/ro-branch')
+ remote_url = self.getTransportURL("~mark/+junk/ro-branch")
self.assertCantPush(self.local_branch_path, remote_url)
def test_can_read_mirrored_branch(self):
# Users should be able to read mirrored branches that they own.
# Added to catch bug 126245.
ro_branch_url = self.makeMirroredBranch(
- 'testuser', 'firefox', 'mirror')
- revision = breezy.branch.Branch.open(
- ro_branch_url).last_revision().decode('UTF-8')
+ "testuser", "firefox", "mirror"
+ )
+ revision = (
+ breezy.branch.Branch.open(ro_branch_url)
+ .last_revision()
+ .decode("UTF-8")
+ )
remote_revision = self.getLastRevision(
- self.getTransportURL('~testuser/firefox/mirror'))
+ self.getTransportURL("~testuser/firefox/mirror")
+ )
self.assertEqual(revision, remote_revision)
def test_can_read_unowned_mirrored_branch(self):
# Users should be able to read mirrored branches even if they don't
# own those branches.
- ro_branch_url = self.makeMirroredBranch('mark', 'firefox', 'mirror')
- revision = breezy.branch.Branch.open(
- ro_branch_url).last_revision().decode('UTF-8')
+ ro_branch_url = self.makeMirroredBranch("mark", "firefox", "mirror")
+ revision = (
+ breezy.branch.Branch.open(ro_branch_url)
+ .last_revision()
+ .decode("UTF-8")
+ )
remote_revision = self.getLastRevision(
- self.getTransportURL('~mark/firefox/mirror'))
+ self.getTransportURL("~mark/firefox/mirror")
+ )
self.assertEqual(revision, remote_revision)
def test_authserver_error_propagation(self):
@@ -646,19 +691,20 @@ class SmartserverTests(WithScenarios, SSHTestCase):
# displayed sensibly by the client. We test this by pushing to a
# product that does not exist (the other error message possibilities
# are covered by unit tests).
- remote_url = self.getTransportURL('~mark/no-such-product/branch')
+ remote_url = self.getTransportURL("~mark/no-such-product/branch")
message = "Project 'no-such-product' does not exist."
last_line = self.assertCantPush(self.local_branch_path, remote_url)
self.assertTrue(
- message in last_line, '%r not in %r' % (message, last_line))
+ message in last_line, "%r not in %r" % (message, last_line)
+ )
def test_web_status_available(self):
# There is an HTTP service that reports whether the SSH server is
# available for new connections.
# Munge the config value in strport format into a URL.
- self.assertEqual('tcp:', config.codehosting.web_status_port[:4])
+ self.assertEqual("tcp:", config.codehosting.web_status_port[:4])
port = int(config.codehosting.web_status_port[4:])
- web_status_url = 'http://localhost:%d/' % port
+ web_status_url = "http://localhost:%d/" % port
urlopen(web_status_url)
diff --git a/lib/lp/codehosting/tests/test_bzrutils.py b/lib/lp/codehosting/tests/test_bzrutils.py
index 33e0dfa..449134a 100644
--- a/lib/lp/codehosting/tests/test_bzrutils.py
+++ b/lib/lp/codehosting/tests/test_bzrutils.py
@@ -6,40 +6,24 @@
import gc
import sys
-from breezy import (
- errors,
- trace,
- )
-from breezy.branch import (
- Branch,
- UnstackableBranchFormat,
- )
+from breezy import errors, trace
+from breezy.branch import Branch, UnstackableBranchFormat
from breezy.bzr.remote import RemoteBranch
from breezy.controldir import format_registry
from breezy.errors import AppendRevisionsOnlyViolation
-from breezy.tests import (
- test_server,
- TestCaseWithTransport,
- TestNotApplicable,
- )
-from breezy.tests.per_branch import (
- branch_scenarios,
- TestCaseWithControlDir,
- )
-from testscenarios import (
- load_tests_apply_scenarios,
- WithScenarios,
- )
+from breezy.tests import TestCaseWithTransport, TestNotApplicable, test_server
+from breezy.tests.per_branch import TestCaseWithControlDir, branch_scenarios
+from testscenarios import WithScenarios, load_tests_apply_scenarios
from lp.codehosting.bzrutils import (
- add_exception_logging_hook,
DenyingServer,
+ add_exception_logging_hook,
get_branch_stacked_on_url,
get_vfs_format_classes,
install_oops_handler,
is_branch_stackable,
remove_exception_logging_hook,
- )
+)
from lp.codehosting.tests.helpers import TestResultWrapper
from lp.testing import TestCase
@@ -48,12 +32,15 @@ class TestGetBranchStackedOnURL(WithScenarios, TestCaseWithControlDir):
"""Tests for get_branch_stacked_on_url()."""
scenarios = [
- scenario for scenario in branch_scenarios()
- if scenario[0] not in {
- 'BranchReferenceFormat',
- 'GitBranchFormat',
- 'SvnBranchFormat',
- }]
+ scenario
+ for scenario in branch_scenarios()
+ if scenario[0]
+ not in {
+ "BranchReferenceFormat",
+ "GitBranchFormat",
+ "SvnBranchFormat",
+ }
+ ]
def __str__(self):
"""Return the test id so that Zope test output shows the format."""
@@ -67,64 +54,70 @@ class TestGetBranchStackedOnURL(WithScenarios, TestCaseWithControlDir):
TestCaseWithControlDir.tearDown(self)
def run(self, result=None):
- """Run the test, with the result wrapped so that it knows about skips.
- """
+ """Run the test, with the result wrapped to know about skips."""
if result is None:
result = self.defaultTestResult()
super().run(TestResultWrapper(result))
def testGetBranchStackedOnUrl(self):
# get_branch_stacked_on_url returns the URL of the stacked-on branch.
- self.make_branch('stacked-on')
- stacked_branch = self.make_branch('stacked')
+ self.make_branch("stacked-on")
+ stacked_branch = self.make_branch("stacked")
try:
- stacked_branch.set_stacked_on_url('../stacked-on')
+ stacked_branch.set_stacked_on_url("../stacked-on")
except UnstackableBranchFormat:
- raise TestNotApplicable('This format does not support stacking.')
+ raise TestNotApplicable("This format does not support stacking.")
# Deleting the stacked-on branch ensures that Bazaar will raise an
# error if it tries to open the stacked-on branch.
- self.get_transport('.').delete_tree('stacked-on')
+ self.get_transport(".").delete_tree("stacked-on")
self.assertEqual(
- '../stacked-on',
- get_branch_stacked_on_url(stacked_branch.controldir))
+ "../stacked-on",
+ get_branch_stacked_on_url(stacked_branch.controldir),
+ )
def testGetBranchStackedOnUrlUnstackable(self):
# get_branch_stacked_on_url raises UnstackableBranchFormat if it's
# called on the bzrdir of a branch that cannot be stacked.
- branch = self.make_branch('source')
+ branch = self.make_branch("source")
try:
branch.get_stacked_on_url()
except errors.NotStacked:
- raise TestNotApplicable('This format supports stacked branches.')
+ raise TestNotApplicable("This format supports stacked branches.")
except UnstackableBranchFormat:
pass
self.assertRaises(
UnstackableBranchFormat,
- get_branch_stacked_on_url, branch.controldir)
+ get_branch_stacked_on_url,
+ branch.controldir,
+ )
def testGetBranchStackedOnUrlNotStacked(self):
# get_branch_stacked_on_url raises NotStacked if it's called on the
# bzrdir of a non-stacked branch.
- branch = self.make_branch('source')
+ branch = self.make_branch("source")
try:
branch.get_stacked_on_url()
except errors.NotStacked:
pass
except UnstackableBranchFormat:
raise TestNotApplicable(
- 'This format does not support stacked branches')
+ "This format does not support stacked branches"
+ )
self.assertRaises(
- errors.NotStacked, get_branch_stacked_on_url, branch.controldir)
+ errors.NotStacked, get_branch_stacked_on_url, branch.controldir
+ )
def testGetBranchStackedOnUrlNoBranch(self):
# get_branch_stacked_on_url raises a NotBranchError if it's called on
# a bzrdir that's not got a branch.
- a_bzrdir = self.make_controldir('source')
+ a_bzrdir = self.make_controldir("source")
if a_bzrdir.has_branch():
raise TestNotApplicable(
- 'This format does not support branchless bzrdirs.')
+ "This format does not support branchless bzrdirs."
+ )
self.assertRaises(
- errors.NotBranchError, get_branch_stacked_on_url, a_bzrdir)
+ errors.NotBranchError, get_branch_stacked_on_url, a_bzrdir
+ )
class TestIsBranchStackable(TestCaseWithTransport):
@@ -133,13 +126,15 @@ class TestIsBranchStackable(TestCaseWithTransport):
def test_packs_unstackable(self):
# The original packs are unstackable.
branch = self.make_branch(
- 'branch', format=format_registry.get("pack-0.92")())
+ "branch", format=format_registry.get("pack-0.92")()
+ )
self.assertFalse(is_branch_stackable(branch))
def test_1_9_stackable(self):
# The original packs are unstackable.
branch = self.make_branch(
- 'branch', format=format_registry.get("1.9")())
+ "branch", format=format_registry.get("1.9")()
+ )
self.assertTrue(is_branch_stackable(branch))
@@ -149,11 +144,12 @@ class TestDenyingServer(TestCaseWithTransport):
def test_denyingServer(self):
# DenyingServer prevents creations of transports for the given URL
# schemes between setUp() and tearDown().
- branch = self.make_branch('branch')
+ branch = self.make_branch("branch")
self.assertTrue(
- branch.base.startswith('file://'),
- "make_branch() didn't make branch with file:// URL")
- file_denier = DenyingServer(['file://'])
+ branch.base.startswith("file://"),
+ "make_branch() didn't make branch with file:// URL",
+ )
+ file_denier = DenyingServer(["file://"])
file_denier.start_server()
self.assertRaises(AssertionError, Branch.open, branch.base)
file_denier.stop_server()
@@ -162,7 +158,6 @@ class TestDenyingServer(TestCaseWithTransport):
class TestExceptionLoggingHooks(TestCase):
-
def logException(self, exception):
"""Log exception with Bazaar's exception logger."""
try:
@@ -180,7 +175,7 @@ class TestExceptionLoggingHooks(TestCase):
add_exception_logging_hook(hook)
self.addCleanup(remove_exception_logging_hook, hook)
- exception = RuntimeError('foo')
+ exception = RuntimeError("foo")
self.logException(exception)
self.assertEqual([(RuntimeError, exception)], exceptions)
@@ -203,13 +198,12 @@ class TestExceptionLoggingHooks(TestCase):
add_exception_logging_hook(hook)
remove_exception_logging_hook(hook)
- self.logException(RuntimeError('foo'))
+ self.logException(RuntimeError("foo"))
self.assertEqual([], exceptions)
class TestGetVfsFormatClasses(TestCaseWithTransport):
- """Tests for `lp.codehosting.bzrutils.get_vfs_format_classes`.
- """
+ """Tests for `lp.codehosting.bzrutils.get_vfs_format_classes`."""
def setUp(self):
super().setUp()
@@ -223,7 +217,7 @@ class TestGetVfsFormatClasses(TestCaseWithTransport):
# get_vfs_format_classes for a returns the underlying format classes
# of the branch, repo and bzrdir, even if the branch is a
# RemoteBranch.
- vfs_branch = self.make_branch('.')
+ vfs_branch = self.make_branch(".")
smart_server = test_server.SmartTCPServer_for_testing()
smart_server.start_server(self.get_vfs_only_server())
self.addCleanup(smart_server.stop_server)
@@ -236,7 +230,8 @@ class TestGetVfsFormatClasses(TestCaseWithTransport):
# information.
self.assertEqual(
get_vfs_format_classes(vfs_branch),
- get_vfs_format_classes(remote_branch))
+ get_vfs_format_classes(remote_branch),
+ )
load_tests = load_tests_apply_scenarios
diff --git a/lib/lp/codehosting/tests/test_format_comparison.py b/lib/lp/codehosting/tests/test_format_comparison.py
index f713b21..86fbb1d 100644
--- a/lib/lp/codehosting/tests/test_format_comparison.py
+++ b/lib/lp/codehosting/tests/test_format_comparison.py
@@ -7,9 +7,9 @@ import unittest
from lp.codehosting.bzrutils import identical_formats
-
# Define a bunch of different fake format classes to pass to identical_formats
+
class BzrDirFormatA:
pass
@@ -36,12 +36,14 @@ class RepoFormatB:
class StubObjectWithFormat:
"""A stub object with a _format attribute, like bzrdir and repositories."""
+
def __init__(self, format):
self._format = format
class StubBranch:
"""A stub branch object that just has formats."""
+
def __init__(self, bzrdir_format, repo_format, branch_format):
self.controldir = StubObjectWithFormat(bzrdir_format)
self.repository = StubObjectWithFormat(repo_format)
@@ -57,7 +59,9 @@ class IdenticalFormatsTestCase(unittest.TestCase):
self.assertTrue(
identical_formats(
StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatA()),
- StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatA())))
+ StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatA()),
+ )
+ )
def testDifferentBzrDirFormats(self):
# identical_formats should return False when both branches have the
@@ -65,7 +69,9 @@ class IdenticalFormatsTestCase(unittest.TestCase):
self.assertFalse(
identical_formats(
StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatA()),
- StubBranch(BzrDirFormatB(), RepoFormatA(), BranchFormatA())))
+ StubBranch(BzrDirFormatB(), RepoFormatA(), BranchFormatA()),
+ )
+ )
def testDifferentRepositoryFormats(self):
# identical_formats should return False when both branches have the
@@ -73,7 +79,9 @@ class IdenticalFormatsTestCase(unittest.TestCase):
self.assertFalse(
identical_formats(
StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatA()),
- StubBranch(BzrDirFormatA(), RepoFormatB(), BranchFormatA())))
+ StubBranch(BzrDirFormatA(), RepoFormatB(), BranchFormatA()),
+ )
+ )
def testDifferentBranchFormats(self):
# identical_formats should return False when both branches have the
@@ -81,4 +89,6 @@ class IdenticalFormatsTestCase(unittest.TestCase):
self.assertFalse(
identical_formats(
StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatA()),
- StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatB())))
+ StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatB()),
+ )
+ )
diff --git a/lib/lp/codehosting/tests/test_lpserve.py b/lib/lp/codehosting/tests/test_lpserve.py
index 7470f31..fa1570f 100644
--- a/lib/lp/codehosting/tests/test_lpserve.py
+++ b/lib/lp/codehosting/tests/test_lpserve.py
@@ -25,7 +25,7 @@ class TestLaunchpadServe(TestCaseWithSubprocess):
def assertFinishedCleanly(self, result):
"""Assert that a server process finished cleanly."""
- self.assertEqual((0, b'', b''), tuple(result))
+ self.assertEqual((0, b"", b""), tuple(result))
def finish_lpserve_subprocess(self, process):
"""Shut down the server process.
@@ -44,7 +44,7 @@ class TestLaunchpadServe(TestCaseWithSubprocess):
process.returncode,
stdout_and_stderr[0],
stdout_and_stderr[1],
- )
+ )
def start_server_inet(self, user_id=None):
"""Start an lp-serve server subprocess.
@@ -62,15 +62,17 @@ class TestLaunchpadServe(TestCaseWithSubprocess):
if user_id is None:
user_id = 1
process = self.start_bzr_subprocess(
- ['lp-serve', '--inet', str(user_id)])
+ ["lp-serve", "--inet", str(user_id)]
+ )
# Connect to the server.
# The transport needs a URL, but we don't have one for our server, so
# we're just going to give it this nearly-arbitrary-yet-well-formed
# one.
- url = 'bzr://localhost/'
+ url = "bzr://localhost/"
client_medium = medium.SmartSimplePipesClientMedium(
- process.stdout, process.stdin, url)
+ process.stdout, process.stdin, url
+ )
transport = remote.RemoteTransport(url, medium=client_medium)
return process, transport
@@ -99,14 +101,18 @@ class TestLaunchpadServe(TestCaseWithSubprocess):
# we need a new way of triggering errors in the smart server.
self.assertRaises(
errors.UnknownErrorFromSmartServer,
- transport.list_dir, 'foo/bar/baz')
+ transport.list_dir,
+ "foo/bar/baz",
+ )
result = self.finish_lpserve_subprocess(process)
self.assertFinishedCleanly(result)
capture.sync()
self.assertEqual(1, len(capture.oopses))
self.assertEqual(
- '[Errno 111] Connection refused', capture.oopses[0]['value'],
- capture.oopses)
+ "[Errno 111] Connection refused",
+ capture.oopses[0]["value"],
+ capture.oopses,
+ )
def test_suite():
diff --git a/lib/lp/codehosting/tests/test_rewrite.py b/lib/lp/codehosting/tests/test_rewrite.py
index 288f1cb..ff82882 100644
--- a/lib/lp/codehosting/tests/test_rewrite.py
+++ b/lib/lp/codehosting/tests/test_rewrite.py
@@ -19,16 +19,13 @@ from lp.services.config import config
from lp.services.log.logger import BufferLogger
from lp.testing import (
FakeTime,
- nonblocking_readline,
- person_logged_in,
TestCase,
TestCaseWithFactory,
- )
+ nonblocking_readline,
+ person_logged_in,
+)
from lp.testing.fixture import PGBouncerFixture
-from lp.testing.layers import (
- DatabaseFunctionalLayer,
- DatabaseLayer,
- )
+from lp.testing.layers import DatabaseFunctionalLayer, DatabaseLayer
class TestBranchRewriter(TestCaseWithFactory):
@@ -52,15 +49,18 @@ class TestBranchRewriter(TestCaseWithFactory):
branches = [
self.factory.makeProductBranch(),
self.factory.makePersonalBranch(),
- self.factory.makePackageBranch()]
+ self.factory.makePackageBranch(),
+ ]
transaction.commit()
output = [
rewriter.rewriteLine("/%s/.bzr/README" % branch.unique_name)
- for branch in branches]
+ for branch in branches
+ ]
expected = [
- 'file:///var/tmp/bazaar.launchpad.test/mirrors/%s/.bzr/README'
+ "file:///var/tmp/bazaar.launchpad.test/mirrors/%s/.bzr/README"
% branch_id_to_path(branch.id)
- for branch in branches]
+ for branch in branches
+ ]
self.assertEqual(expected, output)
def test_rewriteLine_found_not_dot_bzr(self):
@@ -70,14 +70,17 @@ class TestBranchRewriter(TestCaseWithFactory):
branches = [
self.factory.makeProductBranch(),
self.factory.makePersonalBranch(),
- self.factory.makePackageBranch()]
+ self.factory.makePackageBranch(),
+ ]
transaction.commit()
output = [
rewriter.rewriteLine("/%s/changes" % branch.unique_name)
- for branch in branches]
+ for branch in branches
+ ]
expected = [
- 'http://localhost:8080/%s/changes' % branch.unique_name
- for branch in branches]
+ "http://localhost:8080/%s/changes" % branch.unique_name
+ for branch in branches
+ ]
self.assertEqual(expected, output)
def test_rewriteLine_private(self):
@@ -86,17 +89,21 @@ class TestBranchRewriter(TestCaseWithFactory):
# handle them there.
rewriter = self.makeRewriter()
branch = self.factory.makeAnyBranch(
- information_type=InformationType.USERDATA)
+ information_type=InformationType.USERDATA
+ )
unique_name = removeSecurityProxy(branch).unique_name
transaction.commit()
output = [
rewriter.rewriteLine("/%s/changes" % unique_name),
- rewriter.rewriteLine("/%s/.bzr" % unique_name)
- ]
+ rewriter.rewriteLine("/%s/.bzr" % unique_name),
+ ]
self.assertEqual(
- ['http://localhost:8080/%s/changes' % unique_name,
- 'http://localhost:8080/%s/.bzr' % unique_name],
- output)
+ [
+ "http://localhost:8080/%s/changes" % unique_name,
+ "http://localhost:8080/%s/.bzr" % unique_name,
+ ],
+ output,
+ )
def test_rewriteLine_id_alias_found_dot_bzr(self):
# Requests for /+branch-id/$id/.bzr/... are redirected to where the
@@ -105,16 +112,18 @@ class TestBranchRewriter(TestCaseWithFactory):
branches = [
self.factory.makeProductBranch(),
self.factory.makePersonalBranch(),
- self.factory.makePackageBranch()]
+ self.factory.makePackageBranch(),
+ ]
transaction.commit()
output = [
- rewriter.rewriteLine(
- "%s/.bzr/README" % branch_id_alias(branch))
- for branch in branches]
+ rewriter.rewriteLine("%s/.bzr/README" % branch_id_alias(branch))
+ for branch in branches
+ ]
expected = [
- 'file:///var/tmp/bazaar.launchpad.test/mirrors/%s/.bzr/README'
+ "file:///var/tmp/bazaar.launchpad.test/mirrors/%s/.bzr/README"
% branch_id_to_path(branch.id)
- for branch in branches]
+ for branch in branches
+ ]
self.assertEqual(expected, output)
def test_rewriteLine_id_alias_private(self):
@@ -122,14 +131,15 @@ class TestBranchRewriter(TestCaseWithFactory):
# 'NULL'. This is translated by apache to a 404.
rewriter = self.makeRewriter()
branch = self.factory.makeAnyBranch(
- information_type=InformationType.USERDATA)
+ information_type=InformationType.USERDATA
+ )
path = branch_id_alias(removeSecurityProxy(branch))
transaction.commit()
output = [
rewriter.rewriteLine("%s/changes" % path),
- rewriter.rewriteLine("%s/.bzr" % path)
- ]
- self.assertEqual(['NULL', 'NULL'], output)
+ rewriter.rewriteLine("%s/.bzr" % path),
+ ]
+ self.assertEqual(["NULL", "NULL"], output)
def test_rewriteLine_id_alias_logs_cache_hit(self):
# The second request for a branch using the alias hits the cache.
@@ -139,22 +149,23 @@ class TestBranchRewriter(TestCaseWithFactory):
path = "%s/.bzr/README" % branch_id_alias(branch)
rewriter.rewriteLine(path)
rewriter.rewriteLine(path)
- logging_output_lines = self.getLoggerOutput(
- rewriter).strip().split('\n')
+ logging_output_lines = (
+ self.getLoggerOutput(rewriter).strip().split("\n")
+ )
self.assertEqual(2, len(logging_output_lines))
self.assertIsNot(
None,
- re.match("INFO .* -> .* (.*s, cache: HIT)",
- logging_output_lines[-1]),
- "No hit found in %r" % logging_output_lines[-1])
+ re.match(
+ "INFO .* -> .* (.*s, cache: HIT)", logging_output_lines[-1]
+ ),
+ "No hit found in %r" % logging_output_lines[-1],
+ )
def test_rewriteLine_static(self):
# Requests to /static are rewritten to codebrowse urls.
rewriter = self.makeRewriter()
output = rewriter.rewriteLine("/static/foo")
- self.assertEqual(
- 'http://localhost:8080/static/foo',
- output)
+ self.assertEqual("http://localhost:8080/static/foo", output)
def test_rewriteLine_not_found(self):
# If the request does not map to a branch, we redirect it to
@@ -162,91 +173,105 @@ class TestBranchRewriter(TestCaseWithFactory):
rewriter = self.makeRewriter()
not_found_path = "/~nouser/noproduct"
output = rewriter.rewriteLine(not_found_path)
- self.assertEqual(
- 'http://localhost:8080%s' % not_found_path,
- output)
+ self.assertEqual("http://localhost:8080%s" % not_found_path, output)
def test_rewriteLine_logs_cache_miss(self):
# The first request for a branch misses the cache and logs this fact.
rewriter = self.makeRewriter()
branch = self.factory.makeAnyBranch()
transaction.commit()
- rewriter.rewriteLine('/' + branch.unique_name + '/.bzr/README')
+ rewriter.rewriteLine("/" + branch.unique_name + "/.bzr/README")
logging_output = self.getLoggerOutput(rewriter)
self.assertIsNot(
None,
re.match("INFO .* -> .* (.*s, cache: MISS)", logging_output),
- "No miss found in %r" % logging_output)
+ "No miss found in %r" % logging_output,
+ )
def test_rewriteLine_logs_cache_hit(self):
# The second request for a branch misses the cache and logs this fact.
rewriter = self.makeRewriter()
branch = self.factory.makeAnyBranch()
transaction.commit()
- rewriter.rewriteLine('/' + branch.unique_name + '/.bzr/README')
- rewriter.rewriteLine('/' + branch.unique_name + '/.bzr/README')
- logging_output_lines = self.getLoggerOutput(
- rewriter).strip().split('\n')
+ rewriter.rewriteLine("/" + branch.unique_name + "/.bzr/README")
+ rewriter.rewriteLine("/" + branch.unique_name + "/.bzr/README")
+ logging_output_lines = (
+ self.getLoggerOutput(rewriter).strip().split("\n")
+ )
self.assertEqual(2, len(logging_output_lines))
self.assertIsNot(
None,
- re.match("INFO .* -> .* (.*s, cache: HIT)",
- logging_output_lines[-1]),
- "No hit found in %r" % logging_output_lines[-1])
+ re.match(
+ "INFO .* -> .* (.*s, cache: HIT)", logging_output_lines[-1]
+ ),
+ "No hit found in %r" % logging_output_lines[-1],
+ )
def test_rewriteLine_cache_expires(self):
# The second request for a branch misses the cache and logs this fact.
rewriter = self.makeRewriter()
branch = self.factory.makeAnyBranch()
transaction.commit()
- rewriter.rewriteLine('/' + branch.unique_name + '/.bzr/README')
+ rewriter.rewriteLine("/" + branch.unique_name + "/.bzr/README")
self.fake_time.advance(
- config.codehosting.branch_rewrite_cache_lifetime + 1)
- rewriter.rewriteLine('/' + branch.unique_name + '/.bzr/README')
- logging_output_lines = self.getLoggerOutput(
- rewriter).strip().split('\n')
+ config.codehosting.branch_rewrite_cache_lifetime + 1
+ )
+ rewriter.rewriteLine("/" + branch.unique_name + "/.bzr/README")
+ logging_output_lines = (
+ self.getLoggerOutput(rewriter).strip().split("\n")
+ )
self.assertEqual(2, len(logging_output_lines))
self.assertIsNot(
None,
- re.match("INFO .* -> .* (.*s, cache: MISS)",
- logging_output_lines[-1]),
- "No miss found in %r" % logging_output_lines[-1])
+ re.match(
+ "INFO .* -> .* (.*s, cache: MISS)", logging_output_lines[-1]
+ ),
+ "No miss found in %r" % logging_output_lines[-1],
+ )
def test_getBranchIdAndTrailingPath_cached(self):
"""When results come from cache, they should be the same."""
rewriter = self.makeRewriter()
branch = self.factory.makeAnyBranch()
transaction.commit()
- id_path = (branch.id, '/.bzr/README',)
+ id_path = (
+ branch.id,
+ "/.bzr/README",
+ )
result = rewriter._getBranchIdAndTrailingPath(
- '/' + branch.unique_name + '/.bzr/README')
- self.assertEqual(id_path + ('MISS',), result)
+ "/" + branch.unique_name + "/.bzr/README"
+ )
+ self.assertEqual(id_path + ("MISS",), result)
result = rewriter._getBranchIdAndTrailingPath(
- '/' + branch.unique_name + '/.bzr/README')
- self.assertEqual(id_path + ('HIT',), result)
+ "/" + branch.unique_name + "/.bzr/README"
+ )
+ self.assertEqual(id_path + ("HIT",), result)
def test_branch_id_alias_private(self):
# Private branches are not found at all (this is for anonymous access)
owner = self.factory.makePerson()
branch = self.factory.makeAnyBranch(
- owner=owner, information_type=InformationType.USERDATA)
+ owner=owner, information_type=InformationType.USERDATA
+ )
with person_logged_in(owner):
path = branch_id_alias(branch)
result = self.makeRewriter()._getBranchIdAndTrailingPath(path)
- self.assertEqual((None, None, 'MISS'), result)
+ self.assertEqual((None, None, "MISS"), result)
def test_branch_id_alias_transitive_private(self):
# Transitively private branches are not found at all
# (this is for anonymous access)
owner = self.factory.makePerson()
private_branch = self.factory.makeAnyBranch(
- owner=owner, information_type=InformationType.USERDATA)
+ owner=owner, information_type=InformationType.USERDATA
+ )
branch = self.factory.makeAnyBranch(
- stacked_on=private_branch, owner=owner)
+ stacked_on=private_branch, owner=owner
+ )
with person_logged_in(owner):
path = branch_id_alias(branch)
result = self.makeRewriter()._getBranchIdAndTrailingPath(path)
- self.assertEqual((None, None, 'MISS'), result)
+ self.assertEqual((None, None, "MISS"), result)
class TestBranchRewriterScript(TestCaseWithFactory):
@@ -261,34 +286,46 @@ class TestBranchRewriterScript(TestCaseWithFactory):
bs = [
self.factory.makeProductBranch(),
self.factory.makePersonalBranch(),
- self.factory.makePackageBranch()]
+ self.factory.makePackageBranch(),
+ ]
privbranch = removeSecurityProxy(
self.factory.makeProductBranch(
- information_type=InformationType.USERDATA))
+ information_type=InformationType.USERDATA
+ )
+ )
allbs = bs + [privbranch]
- input_lines = (
- ["/%s/.bzr/README" % branch.unique_name for branch in allbs]
- + ["/%s/changes" % branch.unique_name for branch in allbs])
+ input_lines = [
+ "/%s/.bzr/README" % branch.unique_name for branch in allbs
+ ] + ["/%s/changes" % branch.unique_name for branch in allbs]
expected_lines = (
- ['file:///var/tmp/bazaar.launchpad.test/mirrors/%s/.bzr/README'
- % branch_id_to_path(branch.id) for branch in bs]
- + ['http://localhost:8080/%s/.bzr/README' % privbranch.unique_name]
- + ['http://localhost:8080/%s/changes' % b.unique_name for b in bs]
- + ['http://localhost:8080/%s/changes' % privbranch.unique_name])
+ [
+ "file:///var/tmp/bazaar.launchpad.test/mirrors/%s/.bzr/README"
+ % branch_id_to_path(branch.id)
+ for branch in bs
+ ]
+ + ["http://localhost:8080/%s/.bzr/README" % privbranch.unique_name]
+ + ["http://localhost:8080/%s/changes" % b.unique_name for b in bs]
+ + ["http://localhost:8080/%s/changes" % privbranch.unique_name]
+ )
transaction.commit()
- script_file = os.path.join(
- config.root, 'scripts', 'branch-rewrite.py')
+ script_file = os.path.join(config.root, "scripts", "branch-rewrite.py")
proc = subprocess.Popen(
- [script_file], stdin=subprocess.PIPE, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE, bufsize=0, universal_newlines=True)
+ [script_file],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ bufsize=0,
+ universal_newlines=True,
+ )
output_lines = []
# For each complete line of input, the script should, without
# buffering, write a complete line of output.
for input_line in input_lines:
- proc.stdin.write(input_line + '\n')
+ proc.stdin.write(input_line + "\n")
proc.stdin.flush()
output_lines.append(
- nonblocking_readline(proc.stdout, 60).rstrip('\n'))
+ nonblocking_readline(proc.stdout, 60).rstrip("\n")
+ )
# If we create a new branch after the branch-rewrite.py script has
# connected to the database, or edit a branch name that has already
# been rewritten, both are rewritten successfully.
@@ -297,58 +334,64 @@ class TestBranchRewriterScript(TestCaseWithFactory):
edited_branch.name = self.factory.getUniqueString()
transaction.commit()
- new_branch_input = '/%s/.bzr/README' % new_branch.unique_name
+ new_branch_input = "/%s/.bzr/README" % new_branch.unique_name
expected_lines.append(
- 'file:///var/tmp/bazaar.launchpad.test/mirrors/%s/.bzr/README'
- % branch_id_to_path(new_branch.id))
- proc.stdin.write(new_branch_input + '\n')
+ "file:///var/tmp/bazaar.launchpad.test/mirrors/%s/.bzr/README"
+ % branch_id_to_path(new_branch.id)
+ )
+ proc.stdin.write(new_branch_input + "\n")
proc.stdin.flush()
- output_lines.append(
- nonblocking_readline(proc.stdout, 60).rstrip('\n'))
+ output_lines.append(nonblocking_readline(proc.stdout, 60).rstrip("\n"))
- edited_branch_input = '/%s/.bzr/README' % edited_branch.unique_name
+ edited_branch_input = "/%s/.bzr/README" % edited_branch.unique_name
expected_lines.append(
- 'file:///var/tmp/bazaar.launchpad.test/mirrors/%s/.bzr/README'
- % branch_id_to_path(edited_branch.id))
- proc.stdin.write(edited_branch_input + '\n')
+ "file:///var/tmp/bazaar.launchpad.test/mirrors/%s/.bzr/README"
+ % branch_id_to_path(edited_branch.id)
+ )
+ proc.stdin.write(edited_branch_input + "\n")
proc.stdin.flush()
- output_lines.append(
- nonblocking_readline(proc.stdout, 60).rstrip('\n'))
+ output_lines.append(nonblocking_readline(proc.stdout, 60).rstrip("\n"))
os.kill(proc.pid, signal.SIGINT)
err = proc.stderr.read()
# The script produces logging output, but not to stderr.
- self.assertEqual('', err)
+ self.assertEqual("", err)
self.assertEqual(expected_lines, output_lines)
class TestBranchRewriterScriptHandlesDisconnects(TestCase):
"""Ensure branch-rewrite.py survives fastdowntime deploys."""
+
layer = DatabaseLayer
def spawn(self):
- script_file = os.path.join(
- config.root, 'scripts', 'branch-rewrite.py')
+ script_file = os.path.join(config.root, "scripts", "branch-rewrite.py")
self.rewriter_proc = subprocess.Popen(
- [script_file], stdin=subprocess.PIPE, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE, bufsize=0, universal_newlines=True)
+ [script_file],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ bufsize=0,
+ universal_newlines=True,
+ )
self.addCleanup(self.rewriter_proc.terminate)
def request(self, query):
- self.rewriter_proc.stdin.write(query + '\n')
+ self.rewriter_proc.stdin.write(query + "\n")
self.rewriter_proc.stdin.flush()
# 60 second timeout as we might need to wait for the script to
# finish starting up.
result = nonblocking_readline(self.rewriter_proc.stdout, 60)
- if result.endswith('\n'):
+ if result.endswith("\n"):
return result[:-1]
self.fail(
"Incomplete line or no result retrieved from subprocess: %r"
- % result)
+ % result
+ )
def test_reconnects_when_disconnected(self):
pgbouncer = self.useFixture(PGBouncerFixture())
@@ -356,8 +399,8 @@ class TestBranchRewriterScriptHandlesDisconnects(TestCase):
self.spawn()
# Everything should be working, and we get valid output.
- out = self.request('foo')
- self.assertEndsWith(out, '/foo')
+ out = self.request("foo")
+ self.assertEndsWith(out, "/foo")
pgbouncer.stop()
@@ -366,14 +409,14 @@ class TestBranchRewriterScriptHandlesDisconnects(TestCase):
# once to ensure that we will keep trying to reconnect even
# after several failures.
for count in range(5):
- out = self.request('foo')
- self.assertEqual(out, 'NULL')
+ out = self.request("foo")
+ self.assertEqual(out, "NULL")
pgbouncer.start()
# Everything should be working, and we get valid output.
- out = self.request('foo')
- self.assertEndsWith(out, '/foo')
+ out = self.request("foo")
+ self.assertEndsWith(out, "/foo")
def test_starts_with_db_down(self):
pgbouncer = self.useFixture(PGBouncerFixture())
@@ -384,10 +427,10 @@ class TestBranchRewriterScriptHandlesDisconnects(TestCase):
self.spawn()
for count in range(5):
- out = self.request('foo')
- self.assertEqual(out, 'NULL')
+ out = self.request("foo")
+ self.assertEqual(out, "NULL")
pgbouncer.start()
- out = self.request('foo')
- self.assertEndsWith(out, '/foo')
+ out = self.request("foo")
+ self.assertEndsWith(out, "/foo")
diff --git a/lib/lp/codehosting/tests/test_sftp.py b/lib/lp/codehosting/tests/test_sftp.py
index 16df390..21d8247 100644
--- a/lib/lp/codehosting/tests/test_sftp.py
+++ b/lib/lp/codehosting/tests/test_sftp.py
@@ -3,13 +3,11 @@
"""Tests for the transport-backed SFTP server implementation."""
-from contextlib import closing
import os
+from contextlib import closing
-from breezy import (
- errors as bzr_errors,
- urlutils,
- )
+from breezy import errors as bzr_errors
+from breezy import urlutils
from breezy.tests import TestCaseInTempDir
from breezy.transport import get_transport
from breezy.transport.memory import MemoryTransport
@@ -24,14 +22,8 @@ from twisted.internet import defer
from twisted.python import failure
from twisted.python.util import mergeFunctionMetadata
-from lp.codehosting.inmemory import (
- InMemoryFrontend,
- XMLRPCWrapper,
- )
-from lp.codehosting.sftp import (
- FatLocalTransport,
- TransportSFTPServer,
- )
+from lp.codehosting.inmemory import InMemoryFrontend, XMLRPCWrapper
+from lp.codehosting.sftp import FatLocalTransport, TransportSFTPServer
from lp.codehosting.sshserver.daemon import CodehostingAvatar
from lp.services.utils import file_exists
from lp.testing import TestCase
@@ -61,27 +53,25 @@ class AsyncTransport:
class TestFatLocalTransport(TestCaseInTempDir):
-
def setUp(self):
TestCaseInTempDir.setUp(self)
- self.transport = FatLocalTransport(urlutils.local_path_to_url('.'))
+ self.transport = FatLocalTransport(urlutils.local_path_to_url("."))
def test_writeChunk(self):
# writeChunk writes a chunk of data to a file at a given offset.
- filename = 'foo'
- self.transport.put_bytes(filename, b'content')
- self.transport.writeChunk(filename, 1, b'razy')
- self.assertEqual(b'crazynt', self.transport.get_bytes(filename))
+ filename = "foo"
+ self.transport.put_bytes(filename, b"content")
+ self.transport.writeChunk(filename, 1, b"razy")
+ self.assertEqual(b"crazynt", self.transport.get_bytes(filename))
def test_localRealPath(self):
# localRealPath takes a URL-encoded relpath and returns a URL-encoded
# absolute path.
- filename = 'foo bar'
+ filename = "foo bar"
escaped_filename = urlutils.escape(filename)
self.assertNotEqual(filename, escaped_filename)
realpath = self.transport.local_realPath(escaped_filename)
- self.assertEqual(
- urlutils.escape(os.path.abspath(filename)), realpath)
+ self.assertEqual(urlutils.escape(os.path.abspath(filename)), realpath)
def test_clone_with_no_offset(self):
# FatLocalTransport.clone with no arguments returns a new instance of
@@ -98,14 +88,15 @@ class TestFatLocalTransport(TestCaseInTempDir):
transport = self.transport.clone("foo")
self.assertIsNot(self.transport, transport)
self.assertEqual(
- urlutils.join(self.transport.base, "foo").rstrip('/'),
- transport.base.rstrip('/'))
+ urlutils.join(self.transport.base, "foo").rstrip("/"),
+ transport.base.rstrip("/"),
+ )
self.assertIsInstance(transport, FatLocalTransport)
def test_clone_with_absolute_offset(self):
transport = self.transport.clone("/")
self.assertIsNot(self.transport, transport)
- self.assertEqual('file:///', transport.base)
+ self.assertEqual("file:///", transport.base)
self.assertIsInstance(transport, FatLocalTransport)
@@ -118,7 +109,8 @@ class TestSFTPAdapter(TestCase):
frontend = InMemoryFrontend()
self.factory = frontend.getLaunchpadObjectFactory()
self.codehosting_endpoint = XMLRPCWrapper(
- frontend.getCodehostingEndpoint())
+ frontend.getCodehostingEndpoint()
+ )
def makeCodehostingAvatar(self):
user = self.factory.makePerson()
@@ -136,9 +128,11 @@ class TestSFTPAdapter(TestCase):
product = self.factory.makeProduct()
branch_name = self.factory.getUniqueString()
deferred = server.makeDirectory(
- ('~%s/%s/%s' % (
- avatar.username, product.name, branch_name)).encode('UTF-8'),
- {'permissions': 0o777})
+ (
+ "~%s/%s/%s" % (avatar.username, product.name, branch_name)
+ ).encode("UTF-8"),
+ {"permissions": 0o777},
+ )
return deferred
@@ -150,12 +144,12 @@ class SFTPTestMixin:
def checkAttrs(self, attrs, stat_value):
"""Check that an attrs dictionary matches a stat result."""
- self.assertEqual(stat_value.st_size, attrs['size'])
- self.assertEqual(os.getuid(), attrs['uid'])
- self.assertEqual(os.getgid(), attrs['gid'])
- self.assertEqual(stat_value.st_mode, attrs['permissions'])
- self.assertEqual(int(stat_value.st_mtime), attrs['mtime'])
- self.assertEqual(int(stat_value.st_atime), attrs['atime'])
+ self.assertEqual(stat_value.st_size, attrs["size"])
+ self.assertEqual(os.getuid(), attrs["uid"])
+ self.assertEqual(os.getgid(), attrs["gid"])
+ self.assertEqual(stat_value.st_mode, attrs["permissions"])
+ self.assertEqual(int(stat_value.st_mtime), attrs["mtime"])
+ self.assertEqual(int(stat_value.st_atime), attrs["atime"])
def getPathSegment(self):
"""Return a unique path segment for testing.
@@ -164,7 +158,7 @@ class SFTPTestMixin:
exercises the interface between the sftp server and the Bazaar
transport, which expects escaped URL segments.
"""
- return self._factory.getUniqueString('%41%42%43-')
+ return self._factory.getUniqueString("%41%42%43-")
class TestSFTPFile(TestCaseInTempDir, SFTPTestMixin):
@@ -179,19 +173,20 @@ class TestSFTPFile(TestCaseInTempDir, SFTPTestMixin):
TestCaseInTempDir.setUp(self)
SFTPTestMixin.setUp(self)
transport = AsyncTransport(
- FatLocalTransport(urlutils.local_path_to_url('.')))
+ FatLocalTransport(urlutils.local_path_to_url("."))
+ )
self._sftp_server = TransportSFTPServer(transport)
@defer.inlineCallbacks
def assertSFTPError(self, sftp_code, function, *args, **kwargs):
"""Assert that calling functions fails with `sftp_code`."""
with ExpectedException(
- filetransfer.SFTPError,
- MatchesStructure.byEquality(code=sftp_code)):
+ filetransfer.SFTPError, MatchesStructure.byEquality(code=sftp_code)
+ ):
yield function(*args, **kwargs)
def openFile(self, path, flags, attrs):
- return self._sftp_server.openFile(path.encode('UTF-8'), flags, attrs)
+ return self._sftp_server.openFile(path.encode("UTF-8"), flags, attrs)
def test_openFileInNonexistingDirectory(self):
# openFile fails with a no such file error if we try to open a file in
@@ -200,18 +195,24 @@ class TestSFTPFile(TestCaseInTempDir, SFTPTestMixin):
return self.assertSFTPError(
filetransfer.FX_NO_SUCH_FILE,
self.openFile,
- '%s/%s' % (self.getPathSegment(), self.getPathSegment()), 0, {})
+ "%s/%s" % (self.getPathSegment(), self.getPathSegment()),
+ 0,
+ {},
+ )
def test_openFileInNonDirectory(self):
# openFile fails with a no such file error if we try to open a file
# that has another file as one of its "parents". The flags passed to
# openFile() do not have any effect.
nondirectory = self.getPathSegment()
- self.build_tree_contents([(nondirectory, b'content')])
+ self.build_tree_contents([(nondirectory, b"content")])
return self.assertSFTPError(
filetransfer.FX_NO_SUCH_FILE,
self.openFile,
- '%s/%s' % (nondirectory, self.getPathSegment()), 0, {})
+ "%s/%s" % (nondirectory, self.getPathSegment()),
+ 0,
+ {},
+ )
@defer.inlineCallbacks
def test_createEmptyFile(self):
@@ -220,37 +221,39 @@ class TestSFTPFile(TestCaseInTempDir, SFTPTestMixin):
filename = self.getPathSegment()
handle = yield self.openFile(filename, filetransfer.FXF_CREAT, {})
yield handle.close()
- self.assertFileEqual(b'', filename)
+ self.assertFileEqual(b"", filename)
@defer.inlineCallbacks
def test_createFileWithData(self):
# writeChunk writes data to the file.
filename = self.getPathSegment()
handle = yield self.openFile(
- filename, filetransfer.FXF_CREAT | filetransfer.FXF_WRITE, {})
- yield handle.writeChunk(0, b'bar')
+ filename, filetransfer.FXF_CREAT | filetransfer.FXF_WRITE, {}
+ )
+ yield handle.writeChunk(0, b"bar")
yield handle.close()
- self.assertFileEqual(b'bar', filename)
+ self.assertFileEqual(b"bar", filename)
@defer.inlineCallbacks
def test_writeChunkToFile(self):
filename = self.getPathSegment()
- self.build_tree_contents([(filename, b'contents')])
+ self.build_tree_contents([(filename, b"contents")])
handle = yield self.openFile(filename, filetransfer.FXF_WRITE, {})
- yield handle.writeChunk(1, b'qux')
+ yield handle.writeChunk(1, b"qux")
yield handle.close()
- self.assertFileEqual(b'cquxents', filename)
+ self.assertFileEqual(b"cquxents", filename)
@defer.inlineCallbacks
def test_writeTwoChunks(self):
# We can write one chunk after another.
filename = self.getPathSegment()
handle = yield self.openFile(
- filename, filetransfer.FXF_WRITE | filetransfer.FXF_TRUNC, {})
- yield handle.writeChunk(1, b'a')
- yield handle.writeChunk(2, b'a')
+ filename, filetransfer.FXF_WRITE | filetransfer.FXF_TRUNC, {}
+ )
+ yield handle.writeChunk(1, b"a")
+ yield handle.writeChunk(2, b"a")
yield handle.close()
- self.assertFileEqual(b'\0aa', filename)
+ self.assertFileEqual(b"\0aa", filename)
@defer.inlineCallbacks
def test_writeChunkToNonexistentFile(self):
@@ -260,62 +263,70 @@ class TestSFTPFile(TestCaseInTempDir, SFTPTestMixin):
# http://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/
filename = self.getPathSegment()
handle = yield self.openFile(filename, filetransfer.FXF_WRITE, {})
- yield handle.writeChunk(1, b'qux')
+ yield handle.writeChunk(1, b"qux")
yield handle.close()
- self.assertFileEqual(b'\0qux', filename)
+ self.assertFileEqual(b"\0qux", filename)
@defer.inlineCallbacks
def test_writeToReadOpenedFile(self):
# writeChunk raises an error if we try to write to a file that has
# been opened only for reading.
filename = self.getPathSegment()
- self.build_tree_contents([(filename, b'bar')])
+ self.build_tree_contents([(filename, b"bar")])
handle = yield self.openFile(filename, filetransfer.FXF_READ, {})
yield self.assertSFTPError(
filetransfer.FX_PERMISSION_DENIED,
- handle.writeChunk, 0, b'new content')
+ handle.writeChunk,
+ 0,
+ b"new content",
+ )
@defer.inlineCallbacks
def test_overwriteFile(self):
# writeChunk overwrites a file if write, create and trunk flags are
# set.
- self.build_tree_contents([('foo', b'contents')])
+ self.build_tree_contents([("foo", b"contents")])
handle = yield self.openFile(
- 'foo', filetransfer.FXF_CREAT | filetransfer.FXF_TRUNC |
- filetransfer.FXF_WRITE, {})
- yield handle.writeChunk(0, b'bar')
- self.assertFileEqual(b'bar', 'foo')
+ "foo",
+ filetransfer.FXF_CREAT
+ | filetransfer.FXF_TRUNC
+ | filetransfer.FXF_WRITE,
+ {},
+ )
+ yield handle.writeChunk(0, b"bar")
+ self.assertFileEqual(b"bar", "foo")
@defer.inlineCallbacks
def test_writeToAppendingFileIgnoresOffset(self):
# If a file is opened with the 'append' flag, writeChunk ignores its
# offset parameter.
filename = self.getPathSegment()
- self.build_tree_contents([(filename, b'bar')])
+ self.build_tree_contents([(filename, b"bar")])
handle = yield self.openFile(filename, filetransfer.FXF_APPEND, {})
- yield handle.writeChunk(0, b'baz')
- self.assertFileEqual(b'barbaz', filename)
+ yield handle.writeChunk(0, b"baz")
+ self.assertFileEqual(b"barbaz", filename)
@defer.inlineCallbacks
def test_openAndCloseExistingFileLeavesUnchanged(self):
# If we open a file with the 'create' flag and without the 'truncate'
# flag, the file remains unchanged.
filename = self.getPathSegment()
- self.build_tree_contents([(filename, b'bar')])
+ self.build_tree_contents([(filename, b"bar")])
handle = yield self.openFile(filename, filetransfer.FXF_CREAT, {})
yield handle.close()
- self.assertFileEqual(b'bar', filename)
+ self.assertFileEqual(b"bar", filename)
@defer.inlineCallbacks
def test_openAndCloseExistingFileTruncation(self):
# If we open a file with the 'create' flag and the 'truncate' flag,
# the file is reset to empty.
filename = self.getPathSegment()
- self.build_tree_contents([(filename, b'bar')])
+ self.build_tree_contents([(filename, b"bar")])
handle = yield self.openFile(
- filename, filetransfer.FXF_TRUNC | filetransfer.FXF_CREAT, {})
+ filename, filetransfer.FXF_TRUNC | filetransfer.FXF_CREAT, {}
+ )
yield handle.close()
- self.assertFileEqual(b'', filename)
+ self.assertFileEqual(b"", filename)
@defer.inlineCallbacks
def test_writeChunkOnDirectory(self):
@@ -324,36 +335,36 @@ class TestSFTPFile(TestCaseInTempDir, SFTPTestMixin):
os.mkdir(directory)
handle = yield self.openFile(directory, filetransfer.FXF_WRITE, {})
with ExpectedException(filetransfer.SFTPError):
- yield handle.writeChunk(0, b'bar')
+ yield handle.writeChunk(0, b"bar")
@defer.inlineCallbacks
def test_readChunk(self):
# readChunk reads a chunk of data from the file.
filename = self.getPathSegment()
- self.build_tree_contents([(filename, b'bar')])
+ self.build_tree_contents([(filename, b"bar")])
handle = yield self.openFile(filename, 0, {})
chunk = yield handle.readChunk(1, 2)
- self.assertEqual(b'ar', chunk)
+ self.assertEqual(b"ar", chunk)
@defer.inlineCallbacks
def test_readChunkPastEndOfFile(self):
# readChunk returns the rest of the file if it is asked to read past
# the end of the file.
filename = self.getPathSegment()
- self.build_tree_contents([(filename, b'bar')])
+ self.build_tree_contents([(filename, b"bar")])
handle = yield self.openFile(filename, 0, {})
chunk = yield handle.readChunk(2, 10)
- self.assertEqual(b'r', chunk)
+ self.assertEqual(b"r", chunk)
@defer.inlineCallbacks
def test_readChunkEOF(self):
# readChunk returns the empty string if it encounters end-of-file
# before reading any data.
filename = self.getPathSegment()
- self.build_tree_contents([(filename, b'bar')])
+ self.build_tree_contents([(filename, b"bar")])
handle = yield self.openFile(filename, 0, {})
chunk = yield handle.readChunk(3, 10)
- self.assertEqual(b'', chunk)
+ self.assertEqual(b"", chunk)
@defer.inlineCallbacks
def test_readChunkError(self):
@@ -367,7 +378,7 @@ class TestSFTPFile(TestCaseInTempDir, SFTPTestMixin):
def test_setAttrs(self):
# setAttrs on TransportSFTPFile does nothing.
filename = self.getPathSegment()
- self.build_tree_contents([(filename, b'bar')])
+ self.build_tree_contents([(filename, b"bar")])
handle = yield self.openFile(filename, 0, {})
yield handle.setAttrs({})
@@ -376,7 +387,7 @@ class TestSFTPFile(TestCaseInTempDir, SFTPTestMixin):
# getAttrs on TransportSFTPFile returns a dictionary consistent
# with the results of os.stat.
filename = self.getPathSegment()
- self.build_tree_contents([(filename, b'bar')])
+ self.build_tree_contents([(filename, b"bar")])
stat_value = os.stat(filename)
handle = yield self.openFile(filename, 0, {})
attrs = yield handle.getAttrs()
@@ -401,25 +412,27 @@ class TestSFTPServer(TestCaseInTempDir, SFTPTestMixin):
TestCaseInTempDir.setUp(self)
SFTPTestMixin.setUp(self)
transport = AsyncTransport(
- FatLocalTransport(urlutils.local_path_to_url('.')))
+ FatLocalTransport(urlutils.local_path_to_url("."))
+ )
self.sftp_server = TransportSFTPServer(transport)
@defer.inlineCallbacks
def test_serverSetAttrs(self):
# setAttrs on the TransportSFTPServer doesn't do anything either.
filename = self.getPathSegment()
- self.build_tree_contents([(filename, b'bar')])
- yield self.sftp_server.setAttrs(filename.encode('UTF-8'), {})
+ self.build_tree_contents([(filename, b"bar")])
+ yield self.sftp_server.setAttrs(filename.encode("UTF-8"), {})
@defer.inlineCallbacks
def test_serverGetAttrs(self):
# getAttrs on the TransportSFTPServer also returns a dictionary
# consistent with the results of os.stat.
filename = self.getPathSegment()
- self.build_tree_contents([(filename, b'bar')])
+ self.build_tree_contents([(filename, b"bar")])
stat_value = os.stat(filename)
attrs = yield self.sftp_server.getAttrs(
- filename.encode('UTF-8'), False)
+ filename.encode("UTF-8"), False
+ )
self.checkAttrs(attrs, stat_value)
@defer.inlineCallbacks
@@ -429,14 +442,15 @@ class TestSFTPServer(TestCaseInTempDir, SFTPTestMixin):
nonexistent_file = self.getPathSegment()
with ExpectedException(filetransfer.SFTPError):
yield self.sftp_server.getAttrs(
- nonexistent_file.encode('UTF-8'), False)
+ nonexistent_file.encode("UTF-8"), False
+ )
@defer.inlineCallbacks
def test_removeFile(self):
# removeFile removes the file.
filename = self.getPathSegment()
- self.build_tree_contents([(filename, b'bar')])
- yield self.sftp_server.removeFile(filename.encode('UTF-8'))
+ self.build_tree_contents([(filename, b"bar")])
+ yield self.sftp_server.removeFile(filename.encode("UTF-8"))
self.assertFalse(file_exists(filename))
@defer.inlineCallbacks
@@ -444,24 +458,25 @@ class TestSFTPServer(TestCaseInTempDir, SFTPTestMixin):
# Errors in removeFile are translated into SFTPErrors.
filename = self.getPathSegment()
with ExpectedException(filetransfer.SFTPError):
- yield self.sftp_server.removeFile(filename.encode('UTF-8'))
+ yield self.sftp_server.removeFile(filename.encode("UTF-8"))
@defer.inlineCallbacks
def test_removeFile_directory(self):
# Errors in removeFile are translated into SFTPErrors.
filename = self.getPathSegment()
- self.build_tree_contents([(filename + '/',)])
+ self.build_tree_contents([(filename + "/",)])
with ExpectedException(filetransfer.SFTPError):
- yield self.sftp_server.removeFile(filename.encode('UTF-8'))
+ yield self.sftp_server.removeFile(filename.encode("UTF-8"))
@defer.inlineCallbacks
def test_renameFile(self):
# renameFile renames the file.
orig_filename = self.getPathSegment()
new_filename = self.getPathSegment()
- self.build_tree_contents([(orig_filename, b'bar')])
+ self.build_tree_contents([(orig_filename, b"bar")])
yield self.sftp_server.renameFile(
- orig_filename.encode('UTF-8'), new_filename.encode('UTF-8'))
+ orig_filename.encode("UTF-8"), new_filename.encode("UTF-8")
+ )
self.assertFalse(file_exists(orig_filename))
self.assertTrue(file_exists(new_filename))
@@ -472,33 +487,37 @@ class TestSFTPServer(TestCaseInTempDir, SFTPTestMixin):
new_filename = self.getPathSegment()
with ExpectedException(filetransfer.SFTPError):
yield self.sftp_server.renameFile(
- orig_filename.encode('UTF-8'), new_filename.encode('UTF-8'))
+ orig_filename.encode("UTF-8"), new_filename.encode("UTF-8")
+ )
@defer.inlineCallbacks
def test_makeDirectory(self):
# makeDirectory makes the directory.
directory = self.getPathSegment()
yield self.sftp_server.makeDirectory(
- directory.encode('UTF-8'), {'permissions': 0o777})
+ directory.encode("UTF-8"), {"permissions": 0o777}
+ )
self.assertTrue(
- os.path.isdir(directory), '%r is not a directory' % directory)
+ os.path.isdir(directory), "%r is not a directory" % directory
+ )
self.assertEqual(0o40777, os.stat(directory).st_mode)
@defer.inlineCallbacks
def test_makeDirectoryError(self):
# Errors in makeDirectory are translated into SFTPErrors.
nonexistent = self.getPathSegment()
- nonexistent_child = '%s/%s' % (nonexistent, self.getPathSegment())
+ nonexistent_child = "%s/%s" % (nonexistent, self.getPathSegment())
with ExpectedException(filetransfer.SFTPError):
yield self.sftp_server.makeDirectory(
- nonexistent_child.encode('UTF-8'), {'permissions': 0o777})
+ nonexistent_child.encode("UTF-8"), {"permissions": 0o777}
+ )
@defer.inlineCallbacks
def test_removeDirectory(self):
# removeDirectory removes the directory.
directory = self.getPathSegment()
os.mkdir(directory)
- yield self.sftp_server.removeDirectory(directory.encode('UTF-8'))
+ yield self.sftp_server.removeDirectory(directory.encode("UTF-8"))
self.assertFalse(file_exists(directory))
@defer.inlineCallbacks
@@ -506,39 +525,46 @@ class TestSFTPServer(TestCaseInTempDir, SFTPTestMixin):
# Errors in removeDirectory are translated into SFTPErrors.
directory = self.getPathSegment()
with ExpectedException(filetransfer.SFTPError):
- yield self.sftp_server.removeDirectory(directory.encode('UTF-8'))
+ yield self.sftp_server.removeDirectory(directory.encode("UTF-8"))
def test_gotVersion(self):
# gotVersion returns an empty dictionary.
- extended = self.sftp_server.gotVersion('version', {})
+ extended = self.sftp_server.gotVersion("version", {})
self.assertEqual({}, extended)
def test_extendedRequest(self):
# We don't support any extensions.
self.assertRaises(
- NotImplementedError, self.sftp_server.extendedRequest,
- b'foo', b'bar')
+ NotImplementedError,
+ self.sftp_server.extendedRequest,
+ b"foo",
+ b"bar",
+ )
@defer.inlineCallbacks
def test_realPath(self):
# realPath returns the absolute path of the file.
src, dst = self.getPathSegment(), self.getPathSegment()
os.symlink(src, dst)
- path = yield self.sftp_server.realPath(dst.encode('UTF-8'))
- self.assertEqual(os.path.abspath(src).encode('UTF-8'), path)
+ path = yield self.sftp_server.realPath(dst.encode("UTF-8"))
+ self.assertEqual(os.path.abspath(src).encode("UTF-8"), path)
def test_makeLink(self):
# makeLink is not supported.
self.assertRaises(
- NotImplementedError, self.sftp_server.makeLink,
- self.getPathSegment().encode('UTF-8'),
- self.getPathSegment().encode('UTF-8'))
+ NotImplementedError,
+ self.sftp_server.makeLink,
+ self.getPathSegment().encode("UTF-8"),
+ self.getPathSegment().encode("UTF-8"),
+ )
def test_readLink(self):
# readLink is not supported.
self.assertRaises(
- NotImplementedError, self.sftp_server.readLink,
- self.getPathSegment().encode('UTF-8'))
+ NotImplementedError,
+ self.sftp_server.readLink,
+ self.getPathSegment().encode("UTF-8"),
+ )
@defer.inlineCallbacks
def test_openDirectory(self):
@@ -547,25 +573,31 @@ class TestSFTPServer(TestCaseInTempDir, SFTPTestMixin):
parent_dir = self.getPathSegment()
child_dir = self.getPathSegment()
child_file = self.getPathSegment()
- self.build_tree([
- parent_dir + '/',
- '%s/%s/' % (parent_dir, child_dir),
- '%s/%s' % (parent_dir, child_file)])
+ self.build_tree(
+ [
+ parent_dir + "/",
+ "%s/%s/" % (parent_dir, child_dir),
+ "%s/%s" % (parent_dir, child_file),
+ ]
+ )
directory = yield self.sftp_server.openDirectory(
- parent_dir.encode('UTF-8'))
+ parent_dir.encode("UTF-8")
+ )
entries = list(directory)
directory.close()
names = [entry[0] for entry in entries]
self.assertEqual(
- set(names),
- {child_dir.encode('UTF-8'), child_file.encode('UTF-8')})
+ set(names), {child_dir.encode("UTF-8"), child_file.encode("UTF-8")}
+ )
def check_entry(entries, filename):
- t = get_transport('.')
- stat = t.stat(urlutils.escape('%s/%s' % (parent_dir, filename)))
+ t = get_transport(".")
+ stat = t.stat(urlutils.escape("%s/%s" % (parent_dir, filename)))
named_entries = [
- entry for entry in entries
- if entry[0] == filename.encode('UTF-8')]
+ entry
+ for entry in entries
+ if entry[0] == filename.encode("UTF-8")
+ ]
self.assertEqual(1, len(named_entries))
name, longname, attrs = named_entries[0]
self.assertEqual(lsLine(name, stat), longname)
@@ -579,18 +611,18 @@ class TestSFTPServer(TestCaseInTempDir, SFTPTestMixin):
# Errors in openDirectory are translated into SFTPErrors.
nonexistent = self.getPathSegment()
with ExpectedException(filetransfer.SFTPError):
- yield self.sftp_server.openDirectory(nonexistent.encode('UTF-8'))
+ yield self.sftp_server.openDirectory(nonexistent.encode("UTF-8"))
@defer.inlineCallbacks
def test_openDirectoryMemory(self):
"""openDirectory works on MemoryTransport."""
transport = MemoryTransport()
- transport.put_bytes('hello', b'hello')
+ transport.put_bytes("hello", b"hello")
sftp_server = TransportSFTPServer(AsyncTransport(transport))
- directory = yield sftp_server.openDirectory(b'.')
+ directory = yield sftp_server.openDirectory(b".")
with closing(directory):
names = [entry[0] for entry in directory]
- self.assertEqual([b'hello'], names)
+ self.assertEqual([b"hello"], names)
def test__format_directory_entries_with_MemoryStat(self):
"""format_directory_entries works with MemoryStat.
@@ -599,25 +631,38 @@ class TestSFTPServer(TestCaseInTempDir, SFTPTestMixin):
around that.
"""
t = MemoryTransport()
- stat_result = t.stat('.')
+ stat_result = t.stat(".")
entries = self.sftp_server._format_directory_entries(
- [stat_result], ['filename'])
- self.assertEqual(list(entries), [
- (b'filename', 'drwxr-xr-x 0 0 0 0 '
- 'Jan 01 1970 filename',
- {'atime': 0,
- 'gid': 0,
- 'mtime': 0,
- 'permissions': 16877,
- 'size': 0,
- 'uid': 0})])
- self.assertIs(None, getattr(stat_result, 'st_mtime', None))
+ [stat_result], ["filename"]
+ )
+ self.assertEqual(
+ list(entries),
+ [
+ (
+ b"filename",
+ "drwxr-xr-x 0 0 0 0 "
+ "Jan 01 1970 filename",
+ {
+ "atime": 0,
+ "gid": 0,
+ "mtime": 0,
+ "permissions": 16877,
+ "size": 0,
+ "uid": 0,
+ },
+ )
+ ],
+ )
+ self.assertIs(None, getattr(stat_result, "st_mtime", None))
def do_translation_test(self, exception, sftp_code, method_name=None):
"""Test that `exception` is translated into the correct SFTPError."""
- result = self.assertRaises(filetransfer.SFTPError,
+ result = self.assertRaises(
+ filetransfer.SFTPError,
self.sftp_server.translateError,
- failure.Failure(exception), method_name)
+ failure.Failure(exception),
+ method_name,
+ )
self.assertEqual(sftp_code, result.code)
self.assertEqual(str(exception), result.message)
@@ -636,22 +681,26 @@ class TestSFTPServer(TestCaseInTempDir, SFTPTestMixin):
def test_translateFileExists(self):
exception = bzr_errors.FileExists(self.getPathSegment())
self.do_translation_test(
- exception, filetransfer.FX_FILE_ALREADY_EXISTS)
+ exception, filetransfer.FX_FILE_ALREADY_EXISTS
+ )
def test_translateFileIsADirectory(self):
exception = FileIsADirectory(self.getPathSegment())
self.do_translation_test(
- exception, filetransfer.FX_FILE_IS_A_DIRECTORY)
+ exception, filetransfer.FX_FILE_IS_A_DIRECTORY
+ )
def test_translateDirectoryNotEmpty(self):
exception = bzr_errors.DirectoryNotEmpty(self.getPathSegment())
- self.do_translation_test(
- exception, filetransfer.FX_FAILURE)
+ self.do_translation_test(exception, filetransfer.FX_FAILURE)
def test_translateRandomError(self):
# translateError re-raises unrecognized errors.
exception = KeyboardInterrupt()
- result = self.assertRaises(KeyboardInterrupt,
+ result = self.assertRaises(
+ KeyboardInterrupt,
self.sftp_server.translateError,
- failure.Failure(exception), 'methodName')
+ failure.Failure(exception),
+ "methodName",
+ )
self.assertIs(result, exception)
diff --git a/lib/lp/codehosting/tests/test_upgrade.py b/lib/lp/codehosting/tests/test_upgrade.py
index 7f23cd5..f8a2729 100644
--- a/lib/lp/codehosting/tests/test_upgrade.py
+++ b/lib/lp/codehosting/tests/test_upgrade.py
@@ -9,7 +9,7 @@ from breezy.bzr.bzrdir import BzrDir
from breezy.bzr.groupcompress_repo import (
RepositoryFormat2a,
RepositoryFormat2aSubtree,
- )
+)
from breezy.controldir import format_registry
from breezy.plugins.loom.branch import loomify
from breezy.revision import NULL_REVISION
@@ -17,11 +17,11 @@ from breezy.transport import get_transport
from fixtures import TempDir
from lp.code.bzr import (
- branch_changed,
BranchFormat,
- get_branch_formats,
RepositoryFormat,
- )
+ branch_changed,
+ get_branch_formats,
+)
from lp.codehosting.bzrutils import read_locked
from lp.codehosting.tests.helpers import force_stacked_on_url
from lp.codehosting.upgrade import Upgrader
@@ -34,7 +34,7 @@ class TestUpgrader(TestCaseWithFactory):
layer = ZopelessDatabaseLayer
- def prepare(self, format='pack-0.92', loomify_branch=False):
+ def prepare(self, format="pack-0.92", loomify_branch=False):
"""Prepare an upgrade test.
:param format: The branch format to use, as a string.
@@ -43,7 +43,8 @@ class TestUpgrader(TestCaseWithFactory):
self.useBzrBranches(direct_database=True)
branch, tree = self.create_branch_and_tree(format=format)
tree.commit(
- 'foo', rev_id=b'prepare-commit', committer='jrandom@xxxxxxxxxxx')
+ "foo", rev_id=b"prepare-commit", committer="jrandom@xxxxxxxxxxx"
+ )
if loomify_branch:
loomify(tree.branch)
bzr_branch = tree.controldir.open_branch()
@@ -52,8 +53,9 @@ class TestUpgrader(TestCaseWithFactory):
return self.getUpgrader(bzr_branch, branch)
def getTargetDir(self, bzr_branch):
- return self.useFixture(TempDir(
- rootdir=dirname(config.codehosting.mirrored_branches_root))).path
+ return self.useFixture(
+ TempDir(rootdir=dirname(config.codehosting.mirrored_branches_root))
+ ).path
def getUpgrader(self, bzr_branch, branch):
"""Return an upgrader for the specified branches.
@@ -62,8 +64,11 @@ class TestUpgrader(TestCaseWithFactory):
:param branch: The DB branch to use.
"""
return Upgrader(
- branch, self.getTargetDir(bzr_branch), logging.getLogger(),
- bzr_branch)
+ branch,
+ self.getTargetDir(bzr_branch),
+ logging.getLogger(),
+ bzr_branch,
+ )
def addTreeReference(self, tree):
"""Add a tree reference to a tree and commit.
@@ -71,12 +76,17 @@ class TestUpgrader(TestCaseWithFactory):
:param tree: A Bazaar WorkingTree to add a tree to.
"""
sub_branch = BzrDir.create_branch_convenience(
- tree.controldir.root_transport.clone('sub').base)
+ tree.controldir.root_transport.clone("sub").base
+ )
tree.add_reference(sub_branch.controldir.open_workingtree())
- tree.commit('added tree reference', committer='jrandom@xxxxxxxxxxx')
-
- def check_branch(self, upgraded, branch_format=BranchFormat.BZR_BRANCH_7,
- repository_format=RepositoryFormat.BZR_CHK_2A):
+ tree.commit("added tree reference", committer="jrandom@xxxxxxxxxxx")
+
+ def check_branch(
+ self,
+ upgraded,
+ branch_format=BranchFormat.BZR_BRANCH_7,
+ repository_format=RepositoryFormat.BZR_CHK_2A,
+ ):
"""Check that a branch matches expected post-upgrade formats."""
control, branch, repository = get_branch_formats(upgraded)
self.assertEqual(repository, repository_format)
@@ -87,12 +97,11 @@ class TestUpgrader(TestCaseWithFactory):
upgrader = self.prepare()
upgrader.start_upgrade()
upgrader.finish_upgrade()
- self.check_branch(
- upgrader.branch.getBzrBranch())
+ self.check_branch(upgrader.branch.getBzrBranch())
def test_subtree_upgrade(self):
"""Upgrade a pack-0.92-subtree branch."""
- upgrader = self.prepare('pack-0.92-subtree')
+ upgrader = self.prepare("pack-0.92-subtree")
upgrader.start_upgrade()
upgrader.finish_upgrade()
self.check_branch(upgrader.branch.getBzrBranch())
@@ -107,7 +116,7 @@ class TestUpgrader(TestCaseWithFactory):
def test_upgrade_subtree_loom(self):
"""Upgrade a loomified pack-0.92-subtree branch."""
- upgrader = self.prepare('pack-0.92-subtree', loomify_branch=True)
+ upgrader = self.prepare("pack-0.92-subtree", loomify_branch=True)
upgrader.start_upgrade()
upgrader.finish_upgrade()
upgraded = upgrader.branch.getBzrBranch()
@@ -118,23 +127,25 @@ class TestUpgrader(TestCaseWithFactory):
upgrader = self.prepare()
target_format = upgrader.get_target_format()
self.assertIs(
- target_format._repository_format.__class__, RepositoryFormat2a)
+ target_format._repository_format.__class__, RepositoryFormat2a
+ )
def test_subtree_format_repo_format(self):
"""Even subtree formats use 2a if they don't have tree references."""
self.useBzrBranches(direct_database=True)
- format = format_registry.make_controldir('pack-0.92-subtree')
+ format = format_registry.make_controldir("pack-0.92-subtree")
branch, tree = self.create_branch_and_tree(format=format)
upgrader = self.getUpgrader(tree.branch, branch)
with read_locked(upgrader.bzr_branch):
target_format = upgrader.get_target_format()
self.assertIs(
- target_format._repository_format.__class__, RepositoryFormat2a)
+ target_format._repository_format.__class__, RepositoryFormat2a
+ )
def test_tree_reference_repo_format(self):
"""Repos with tree references get 2aSubtree."""
self.useBzrBranches(direct_database=True)
- format = format_registry.make_controldir('pack-0.92-subtree')
+ format = format_registry.make_controldir("pack-0.92-subtree")
branch, tree = self.create_branch_and_tree(format=format)
upgrader = self.getUpgrader(tree.branch, branch)
self.addTreeReference(tree)
@@ -142,25 +153,27 @@ class TestUpgrader(TestCaseWithFactory):
target_format = upgrader.get_target_format()
self.assertIs(
target_format._repository_format.__class__,
- RepositoryFormat2aSubtree)
+ RepositoryFormat2aSubtree,
+ )
def test_add_upgraded_branch_preserves_tip(self):
"""Fetch-based upgrade preserves branch tip."""
- upgrader = self.prepare('pack-0.92-subtree')
+ upgrader = self.prepare("pack-0.92-subtree")
with read_locked(upgrader.bzr_branch):
upgrader.start_upgrade()
upgraded = upgrader.add_upgraded_branch().open_branch()
- self.assertEqual(b'prepare-commit', upgraded.last_revision())
+ self.assertEqual(b"prepare-commit", upgraded.last_revision())
def test_create_upgraded_repository_preserves_dead_heads(self):
"""Fetch-based upgrade preserves heads in the repository."""
- upgrader = self.prepare('pack-0.92-subtree')
+ upgrader = self.prepare("pack-0.92-subtree")
upgrader.bzr_branch.set_last_revision_info(0, NULL_REVISION)
with read_locked(upgrader.bzr_branch):
upgrader.create_upgraded_repository()
upgraded = upgrader.get_bzrdir().open_repository()
self.assertEqual(
- 'foo', upgraded.get_revision(b'prepare-commit').message)
+ "foo", upgraded.get_revision(b"prepare-commit").message
+ )
def test_create_upgraded_repository_uses_target_subdir(self):
"""The repository is created in the right place."""
@@ -171,17 +184,17 @@ class TestUpgrader(TestCaseWithFactory):
def test_add_upgraded_branch_preserves_tags(self):
"""Fetch-based upgrade preserves heads in the repository."""
- upgrader = self.prepare('pack-0.92-subtree')
- upgrader.bzr_branch.tags.set_tag('steve', b'rev-id')
+ upgrader = self.prepare("pack-0.92-subtree")
+ upgrader.bzr_branch.tags.set_tag("steve", b"rev-id")
with read_locked(upgrader.bzr_branch):
upgrader.start_upgrade()
upgraded = upgrader.add_upgraded_branch().open_branch()
- self.assertEqual(b'rev-id', upgraded.tags.lookup_tag('steve'))
+ self.assertEqual(b"rev-id", upgraded.tags.lookup_tag("steve"))
def test_has_tree_references(self):
"""Detects whether repo contains actual tree references."""
self.useBzrBranches(direct_database=True)
- format = format_registry.make_controldir('pack-0.92-subtree')
+ format = format_registry.make_controldir("pack-0.92-subtree")
branch, tree = self.create_branch_and_tree(format=format)
upgrader = self.getUpgrader(tree.branch, branch)
with read_locked(tree.branch.repository):
@@ -193,12 +206,13 @@ class TestUpgrader(TestCaseWithFactory):
def test_use_subtree_format_for_tree_references(self):
"""Subtree references cause RepositoryFormat2aSubtree to be used."""
self.useBzrBranches(direct_database=True)
- format = format_registry.make_controldir('pack-0.92-subtree')
+ format = format_registry.make_controldir("pack-0.92-subtree")
branch, tree = self.create_branch_and_tree(format=format)
sub_branch = BzrDir.create_branch_convenience(
- tree.controldir.root_transport.clone('sub').base, format=format)
+ tree.controldir.root_transport.clone("sub").base, format=format
+ )
tree.add_reference(sub_branch.controldir.open_workingtree())
- tree.commit('added tree reference', committer='jrandom@xxxxxxxxxxx')
+ tree.commit("added tree reference", committer="jrandom@xxxxxxxxxxx")
upgrader = self.getUpgrader(tree.branch, branch)
with read_locked(tree.branch):
upgrader.create_upgraded_repository()
@@ -220,32 +234,34 @@ class TestUpgrader(TestCaseWithFactory):
upgrader.add_upgraded_branch()
upgrader.swap_in()
t = get_transport(upgrader.branch.getInternalBzrUrl())
- t = t.clone('backup.bzr')
+ t = t.clone("backup.bzr")
branch = Branch.open_from_transport(t)
- self.check_branch(branch, BranchFormat.BZR_BRANCH_6,
- RepositoryFormat.BZR_KNITPACK_1)
+ self.check_branch(
+ branch, BranchFormat.BZR_BRANCH_6, RepositoryFormat.BZR_KNITPACK_1
+ )
def test_start_all_upgrades(self):
"""Start all upgrades starts upgrading all branches."""
upgrader = self.prepare()
branch_changed(upgrader.branch, upgrader.bzr_branch)
- Upgrader.start_all_upgrades(
- upgrader.target_dir, upgrader.logger)
+ Upgrader.start_all_upgrades(upgrader.target_dir, upgrader.logger)
upgraded = upgrader.get_bzrdir().open_repository()
self.assertIs(RepositoryFormat2a, upgraded._format.__class__)
self.assertEqual(
- 'foo', upgraded.get_revision(b'prepare-commit').message)
+ "foo", upgraded.get_revision(b"prepare-commit").message
+ )
def test_finish_upgrade_fetches(self):
"""finish_upgrade fetches new changes into the branch."""
upgrader = self.prepare()
upgrader.start_upgrade()
- tree = upgrader.bzr_branch.create_checkout('tree', lightweight=True)
- bar_id = tree.commit('bar', committer='jrandom@xxxxxxxxxxx')
+ tree = upgrader.bzr_branch.create_checkout("tree", lightweight=True)
+ bar_id = tree.commit("bar", committer="jrandom@xxxxxxxxxxx")
upgrader.finish_upgrade()
upgraded = upgrader.branch.getBzrBranch()
self.assertEqual(
- 'bar', upgraded.repository.get_revision(bar_id).message)
+ "bar", upgraded.repository.get_revision(bar_id).message
+ )
def test_finish_upgrade_updates_formats(self):
"""finish_upgrade updates branch and repository formats."""
@@ -253,28 +269,32 @@ class TestUpgrader(TestCaseWithFactory):
upgrader.start_upgrade()
upgrader.finish_upgrade()
self.assertEqual(
- upgrader.branch.branch_format, BranchFormat.BZR_BRANCH_7)
+ upgrader.branch.branch_format, BranchFormat.BZR_BRANCH_7
+ )
self.assertEqual(
- upgrader.branch.repository_format, RepositoryFormat.BZR_CHK_2A)
+ upgrader.branch.repository_format, RepositoryFormat.BZR_CHK_2A
+ )
def test_finish_all_upgrades(self):
"""Finish all upgrades behaves as expected."""
upgrader = self.prepare()
branch_changed(upgrader.branch, upgrader.bzr_branch)
upgrader.start_upgrade()
- Upgrader.finish_all_upgrades(
- upgrader.target_dir, upgrader.logger)
+ Upgrader.finish_all_upgrades(upgrader.target_dir, upgrader.logger)
upgraded = upgrader.branch.getBzrBranch()
- self.assertIs(RepositoryFormat2a,
- upgraded.repository._format.__class__)
+ self.assertIs(
+ RepositoryFormat2a, upgraded.repository._format.__class__
+ )
self.assertEqual(
- 'foo', upgraded.repository.get_revision(b'prepare-commit').message)
+ "foo", upgraded.repository.get_revision(b"prepare-commit").message
+ )
def test_invalid_stacking(self):
"""Upgrade tolerates branches stacked on different-format branches."""
self.useBzrBranches(direct_database=True)
- target, target_tree = self.create_branch_and_tree(format='1.6')
- trunk, trunk_tree = self.create_branch_and_tree(format='2a')
+ target, target_tree = self.create_branch_and_tree(format="1.6")
+ trunk, trunk_tree = self.create_branch_and_tree(format="2a")
force_stacked_on_url(target_tree.branch, trunk_tree.branch.base)
- Upgrader(target, self.getTargetDir(target_tree.branch),
- logging.getLogger())
+ Upgrader(
+ target, self.getTargetDir(target_tree.branch), logging.getLogger()
+ )
diff --git a/lib/lp/codehosting/upgrade.py b/lib/lp/codehosting/upgrade.py
index d558b87..1bffc40 100755
--- a/lib/lp/codehosting/upgrade.py
+++ b/lib/lp/codehosting/upgrade.py
@@ -10,7 +10,7 @@ Repositories that have no tree references are always upgraded to the standard
actually have tree references are converted to RepositoryFormat2aSubtree.
"""
-__all__ = ['Upgrader']
+__all__ = ["Upgrader"]
import os
from shutil import rmtree
@@ -20,20 +20,11 @@ from breezy.bzr.bzrdir import BzrDir
from breezy.bzr.groupcompress_repo import RepositoryFormat2aSubtree
from breezy.controldir import format_registry
from breezy.errors import UpToDateFormat
-from breezy.plugins.loom import (
- NotALoom,
- require_loom_branch,
- )
+from breezy.plugins.loom import NotALoom, require_loom_branch
from breezy.upgrade import upgrade
-from breezy.url_policy_open import (
- BranchOpener,
- SingleSchemePolicy,
- )
-
-from lp.code.bzr import (
- branch_changed,
- RepositoryFormat,
- )
+from breezy.url_policy_open import BranchOpener, SingleSchemePolicy
+
+from lp.code.bzr import RepositoryFormat, branch_changed
from lp.code.model.branch import Branch
from lp.codehosting.bzrutils import read_locked
from lp.codehosting.vfs.branchfs import get_real_branch_path
@@ -51,12 +42,12 @@ class Upgrader:
self.branch = branch
self.bzr_branch = bzr_branch
if self.bzr_branch is None:
- opener = BranchOpener(SingleSchemePolicy('lp-internal'))
+ opener = BranchOpener(SingleSchemePolicy("lp-internal"))
self.bzr_branch = opener.open(
- self.branch.getInternalBzrUrl(), ignore_fallbacks=True)
+ self.branch.getInternalBzrUrl(), ignore_fallbacks=True
+ )
self.target_dir = target_dir
- self.target_subdir = os.path.join(
- self.target_dir, str(self.branch.id))
+ self.target_subdir = os.path.join(self.target_dir, str(self.branch.id))
self.logger = logger
def get_bzrdir(self):
@@ -71,7 +62,7 @@ class Upgrader:
:param branch: The bzr branch to upgrade
:return: A Metadir format instance.
"""
- format = format_registry.make_controldir('2a')
+ format = format_registry.make_controldir("2a")
try:
require_loom_branch(self.bzr_branch)
except NotALoom:
@@ -79,8 +70,10 @@ class Upgrader:
else:
format._branch_format = self.bzr_branch._format
if getattr(
- self.bzr_branch.repository._format, 'supports_tree_reference',
- False):
+ self.bzr_branch.repository._format,
+ "supports_tree_reference",
+ False,
+ ):
if self.has_tree_references():
format._repository_format = RepositoryFormat2aSubtree()
return format
@@ -90,12 +83,13 @@ class Upgrader:
"""Iterate through Upgraders given a target and logger."""
store = IStore(Branch)
branches = store.find(
- Branch, Branch.repository_format != RepositoryFormat.BZR_CHK_2A)
+ Branch, Branch.repository_format != RepositoryFormat.BZR_CHK_2A
+ )
branches.order_by(Branch.unique_name)
for branch in branches:
logger.info(
- 'Upgrading branch %s (%d)', branch.unique_name,
- branch.id)
+ "Upgrading branch %s (%d)", branch.unique_name, branch.id
+ )
yield cls(branch, target_dir, logger)
@classmethod
@@ -111,7 +105,7 @@ class Upgrader:
upgrader.start_upgrade()
except AlreadyUpgraded:
skipped += 1
- logger.info('Skipped %d already-upgraded branches.', skipped)
+ logger.info("Skipped %d already-upgraded branches.", skipped)
@classmethod
def finish_all_upgrades(cls, target_dir, logger):
@@ -142,7 +136,8 @@ class Upgrader:
self.mirror_branch(self.bzr_branch, bd)
try:
exceptions = upgrade(
- bd.root_transport.base, self.get_target_format())
+ bd.root_transport.base, self.get_target_format()
+ )
if exceptions:
if len(exceptions) == 1:
# Compatibility with historical behaviour
@@ -166,7 +161,7 @@ class Upgrader:
:param upgrade_dir: The directory to create the repository in.
:return: The created repository.
"""
- self.logger.info('Converting repository with fetch.')
+ self.logger.info("Converting repository with fetch.")
upgrade_dir = mkdtemp(dir=self.target_dir)
try:
bzrdir = BzrDir.create(upgrade_dir, self.get_target_format())
@@ -181,7 +176,7 @@ class Upgrader:
def swap_in(self):
"""Swap the upgraded branch into place."""
real_location = get_real_branch_path(self.branch.id)
- backup_dir = os.path.join(self.target_subdir, 'backup.bzr')
+ backup_dir = os.path.join(self.target_subdir, "backup.bzr")
os.rename(real_location, backup_dir)
os.rename(self.target_subdir, real_location)
@@ -194,7 +189,7 @@ class Upgrader:
revision_ids = repo.all_revision_ids()
for tree in repo.revision_trees(revision_ids):
for path, entry in tree.iter_entries_by_dir():
- if entry.kind == 'tree-reference':
+ if entry.kind == "tree-reference":
return True
return False
diff --git a/lib/lp/codehosting/vfs/__init__.py b/lib/lp/codehosting/vfs/__init__.py
index fd82790..32bb4ed 100644
--- a/lib/lp/codehosting/vfs/__init__.py
+++ b/lib/lp/codehosting/vfs/__init__.py
@@ -4,21 +4,21 @@
"""A virtual filesystem for hosting Bazaar branches."""
__all__ = [
- 'AsyncLaunchpadTransport',
- 'branch_id_to_path',
- 'BranchFileSystemClient',
- 'get_lp_server',
- 'get_ro_server',
- 'get_rw_server',
- 'LaunchpadServer',
- ]
+ "AsyncLaunchpadTransport",
+ "branch_id_to_path",
+ "BranchFileSystemClient",
+ "get_lp_server",
+ "get_ro_server",
+ "get_rw_server",
+ "LaunchpadServer",
+]
from lp.codehosting.vfs.branchfs import (
AsyncLaunchpadTransport,
+ LaunchpadServer,
branch_id_to_path,
get_lp_server,
get_ro_server,
get_rw_server,
- LaunchpadServer,
- )
+)
from lp.codehosting.vfs.branchfsclient import BranchFileSystemClient
diff --git a/lib/lp/codehosting/vfs/branchfs.py b/lib/lp/codehosting/vfs/branchfs.py
index 65e5494..a36597a 100644
--- a/lib/lp/codehosting/vfs/branchfs.py
+++ b/lib/lp/codehosting/vfs/branchfs.py
@@ -41,89 +41,70 @@ branch if appropriate.
"""
__all__ = [
- 'AsyncLaunchpadTransport',
- 'branch_id_to_path',
- 'DirectDatabaseLaunchpadServer',
- 'get_lp_server',
- 'get_real_branch_path',
- 'get_ro_server',
- 'get_rw_server',
- 'LaunchpadInternalServer',
- 'LaunchpadServer',
- ]
+ "AsyncLaunchpadTransport",
+ "branch_id_to_path",
+ "DirectDatabaseLaunchpadServer",
+ "get_lp_server",
+ "get_real_branch_path",
+ "get_ro_server",
+ "get_rw_server",
+ "LaunchpadInternalServer",
+ "LaunchpadServer",
+]
import os.path
import sys
import xmlrpc.client
+import six
from breezy import urlutils
from breezy.bzr.bzrdir import BzrDir
from breezy.bzr.smart.request import jail_info
from breezy.config import TransportConfig
from breezy.controldir import ControlDirFormat
-from breezy.errors import (
- NoSuchFile,
- PermissionDenied,
- TransportNotPossible,
- )
+from breezy.errors import NoSuchFile, PermissionDenied, TransportNotPossible
from breezy.transport import get_transport
from breezy.transport.memory import MemoryServer
from lazr.uri import URI
-import six
-from twisted.internet import (
- defer,
- error,
- )
-from twisted.python import (
- failure,
- log,
- )
+from twisted.internet import defer, error
+from twisted.python import failure, log
from zope.component import getUtility
-from zope.interface import (
- implementer,
- Interface,
- )
+from zope.interface import Interface, implementer
from lp.code.interfaces.branchlookup import IBranchLookup
from lp.code.interfaces.codehosting import (
BRANCH_TRANSPORT,
CONTROL_TRANSPORT,
LAUNCHPAD_SERVICES,
- )
-from lp.codehosting.bzrutils import (
- get_branch_info,
- get_stacked_on_url,
- )
+)
+from lp.codehosting.bzrutils import get_branch_info, get_stacked_on_url
from lp.codehosting.vfs.branchfsclient import BranchFileSystemClient
from lp.codehosting.vfs.transport import (
AsyncVirtualServer,
AsyncVirtualTransport,
+ TranslationError,
get_chrooted_transport,
get_readonly_transport,
- TranslationError,
- )
+)
from lp.services.config import config
from lp.services.twistedsupport import no_traceback_failures
-from lp.services.twistedsupport.xmlrpc import (
- DeferredBlockingProxy,
- trap_fault,
- )
+from lp.services.twistedsupport.xmlrpc import DeferredBlockingProxy, trap_fault
from lp.services.webapp import errorlog
from lp.xmlrpc import faults
-
# The directories allowed directly beneath a branch directory. These are the
# directories that Bazaar creates as part of regular operation. We support
# only two numbered backups to avoid indefinite space usage.
ALLOWED_DIRECTORIES = (
- '.bzr',
- '.bzr.backup',
- 'backup.bzr',
- 'backup.bzr.~1~',
- 'backup.bzr.~2~',
- )
+ ".bzr",
+ ".bzr.backup",
+ "backup.bzr",
+ "backup.bzr.~1~",
+ "backup.bzr.~2~",
+)
FORBIDDEN_DIRECTORY_ERROR = (
- "Cannot create '%s'. Only Bazaar branches are allowed.")
+ "Cannot create '%s'. Only Bazaar branches are allowed."
+)
class NotABranchPath(TranslationError):
@@ -133,8 +114,9 @@ class NotABranchPath(TranslationError):
the path itself.
"""
- _fmt = ("Could not translate %(virtual_url_fragment)r to branch. "
- "%(reason)s")
+ _fmt = (
+ "Could not translate %(virtual_url_fragment)r to branch. " "%(reason)s"
+ )
class UnknownTransportType(Exception):
@@ -151,7 +133,7 @@ def branch_id_to_path(branch_id):
to determine the splitting.
"""
h = "%08x" % int(branch_id)
- return '%s/%s/%s/%s' % (h[:2], h[2:4], h[4:6], h[6:])
+ return "%s/%s/%s/%s" % (h[:2], h[2:4], h[4:6], h[6:])
def get_path_segments(path, maximum_segments=-1):
@@ -160,12 +142,12 @@ def get_path_segments(path, maximum_segments=-1):
If 'path' ends with a trailing slash, then the final empty segment is
ignored.
"""
- return path.strip('/').split('/', maximum_segments)
+ return path.strip("/").split("/", maximum_segments)
def is_lock_directory(absolute_path):
"""Is 'absolute_path' a Bazaar branch lock directory?"""
- return absolute_path.endswith('/.bzr/branch/lock/held')
+ return absolute_path.endswith("/.bzr/branch/lock/held")
def get_ro_server():
@@ -173,9 +155,11 @@ def get_ro_server():
proxy = xmlrpc.client.ServerProxy(config.codehosting.codehosting_endpoint)
codehosting_endpoint = DeferredBlockingProxy(proxy)
branch_transport = get_readonly_transport(
- get_transport(config.codehosting.internal_branch_by_id_root))
+ get_transport(config.codehosting.internal_branch_by_id_root)
+ )
return LaunchpadInternalServer(
- 'lp-internal:///', codehosting_endpoint, branch_transport)
+ "lp-internal:///", codehosting_endpoint, branch_transport
+ )
def get_rw_server(direct_database=False):
@@ -189,15 +173,18 @@ def get_rw_server(direct_database=False):
implementation that talks to the internal XML-RPC server.
"""
transport = get_chrooted_transport(
- config.codehosting.mirrored_branches_root, mkdir=True)
+ config.codehosting.mirrored_branches_root, mkdir=True
+ )
if direct_database:
- return DirectDatabaseLaunchpadServer('lp-internal:///', transport)
+ return DirectDatabaseLaunchpadServer("lp-internal:///", transport)
else:
proxy = xmlrpc.client.ServerProxy(
- config.codehosting.codehosting_endpoint)
+ config.codehosting.codehosting_endpoint
+ )
codehosting_endpoint = DeferredBlockingProxy(proxy)
return LaunchpadInternalServer(
- 'lp-internal:///', codehosting_endpoint, transport)
+ "lp-internal:///", codehosting_endpoint, transport
+ )
def get_real_branch_path(branch_id):
@@ -250,12 +237,11 @@ class BranchTransportDispatch:
:raise PermissionDenied: if `path_on_branch` is forbidden.
"""
- if path_on_branch == '':
+ if path_on_branch == "":
return
segments = get_path_segments(path_on_branch)
if segments[0] not in ALLOWED_DIRECTORIES:
- raise PermissionDenied(
- FORBIDDEN_DIRECTORY_ERROR % (segments[0],))
+ raise PermissionDenied(FORBIDDEN_DIRECTORY_ERROR % (segments[0],))
def makeTransport(self, transport_tuple):
"""See `ITransportDispatch`.
@@ -267,7 +253,7 @@ class BranchTransportDispatch:
if transport_type != BRANCH_TRANSPORT:
raise UnknownTransportType(transport_type)
self._checkPath(trailing_path)
- transport = self.base_transport.clone(branch_id_to_path(data['id']))
+ transport = self.base_transport.clone(branch_id_to_path(data["id"]))
try:
transport.create_prefix()
except TransportNotPossible:
@@ -291,26 +277,29 @@ class TransportDispatch:
def __init__(self, rw_transport):
self._rw_dispatch = BranchTransportDispatch(rw_transport)
self._ro_dispatch = BranchTransportDispatch(
- get_readonly_transport(rw_transport))
+ get_readonly_transport(rw_transport)
+ )
self._transport_factories = {
BRANCH_TRANSPORT: self._makeBranchTransport,
CONTROL_TRANSPORT: self._makeControlTransport,
- }
+ }
def makeTransport(self, transport_tuple):
transport_type, data, trailing_path = transport_tuple
factory = self._transport_factories[transport_type]
- data['trailing_path'] = trailing_path
+ data["trailing_path"] = trailing_path
return factory(**data), trailing_path
- def _makeBranchTransport(self, id, writable, trailing_path='',
- private=False):
+ def _makeBranchTransport(
+ self, id, writable, trailing_path="", private=False
+ ):
if writable:
dispatch = self._rw_dispatch
else:
dispatch = self._ro_dispatch
transport, ignored = dispatch.makeTransport(
- (BRANCH_TRANSPORT, dict(id=id), trailing_path))
+ (BRANCH_TRANSPORT, dict(id=id), trailing_path)
+ )
return transport
def _makeControlTransport(self, default_stack_on, trailing_path=None):
@@ -329,12 +318,13 @@ class TransportDispatch:
memory_server = MemoryServer()
memory_server.start_server()
transport = get_transport(memory_server.get_url())
- if default_stack_on == '':
+ if default_stack_on == "":
return transport
format = ControlDirFormat.get_default_format()
bzrdir = format.initialize_on_transport(transport)
bzrdir.get_config().set_default_stack_on(
- urlutils.unescape(default_stack_on))
+ urlutils.unescape(default_stack_on)
+ )
return get_readonly_transport(transport)
@@ -355,8 +345,9 @@ class _BaseLaunchpadServer(AsyncVirtualServer):
path on that transport.
"""
- def __init__(self, scheme, codehosting_api, user_id,
- seen_new_branch_hook=None):
+ def __init__(
+ self, scheme, codehosting_api, user_id, seen_new_branch_hook=None
+ ):
"""Construct a LaunchpadServer.
:param scheme: The URL scheme to use.
@@ -368,8 +359,8 @@ class _BaseLaunchpadServer(AsyncVirtualServer):
"""
AsyncVirtualServer.__init__(self, scheme)
self._branchfs_client = BranchFileSystemClient(
- codehosting_api, user_id,
- seen_new_branch_hook=seen_new_branch_hook)
+ codehosting_api, user_id, seen_new_branch_hook=seen_new_branch_hook
+ )
self._is_start_server = False
def translateVirtualPath(self, virtual_url_fragment):
@@ -380,11 +371,13 @@ class _BaseLaunchpadServer(AsyncVirtualServer):
result into a transport and trailing path.
"""
deferred = self._branchfs_client.translatePath(
- '/' + virtual_url_fragment)
+ "/" + virtual_url_fragment
+ )
def path_not_translated(fail):
trap_fault(
- fail, faults.PathTranslationError, faults.PermissionDenied)
+ fail, faults.PathTranslationError, faults.PermissionDenied
+ )
return failure.Failure(NoSuchFile(virtual_url_fragment))
def unknown_transport_type(fail):
@@ -393,7 +386,8 @@ class _BaseLaunchpadServer(AsyncVirtualServer):
deferred.addCallbacks(
no_traceback_failures(self._transport_dispatch.makeTransport),
- path_not_translated)
+ path_not_translated,
+ )
deferred.addErrback(unknown_transport_type)
return deferred
@@ -432,12 +426,11 @@ class LaunchpadInternalServer(_BaseLaunchpadServer):
def destroy(self):
"""Delete the on-disk branches and tear down."""
- self._transport_dispatch.base_transport.delete_tree('.')
+ self._transport_dispatch.base_transport.delete_tree(".")
self.stop_server()
class DirectDatabaseLaunchpadServer(AsyncVirtualServer):
-
def __init__(self, scheme, branch_transport):
AsyncVirtualServer.__init__(self, scheme)
self._transport_dispatch = BranchTransportDispatch(branch_transport)
@@ -451,7 +444,7 @@ class DirectDatabaseLaunchpadServer(AsyncVirtualServer):
def destroy(self):
"""Delete the on-disk branches and tear down."""
- self._transport_dispatch.base_transport.delete_tree('.')
+ self._transport_dispatch.base_transport.delete_tree(".")
self.stop_server()
def translateVirtualPath(self, virtual_url_fragment):
@@ -461,7 +454,9 @@ class DirectDatabaseLaunchpadServer(AsyncVirtualServer):
"""
deferred = defer.succeed(
getUtility(IBranchLookup).getByHostingPath(
- virtual_url_fragment.lstrip('/')))
+ virtual_url_fragment.lstrip("/")
+ )
+ )
@no_traceback_failures
def process_result(result):
@@ -470,7 +465,8 @@ class DirectDatabaseLaunchpadServer(AsyncVirtualServer):
raise NoSuchFile(virtual_url_fragment)
else:
return self._transport_dispatch.makeTransport(
- (BRANCH_TRANSPORT, dict(id=branch.id), trailing[1:]))
+ (BRANCH_TRANSPORT, dict(id=branch.id), trailing[1:])
+ )
deferred.addCallback(process_result)
return deferred
@@ -494,7 +490,8 @@ class AsyncLaunchpadTransport(AsyncVirtualTransport):
# directory that has too little information to be translated into a
# Launchpad branch.
deferred = AsyncVirtualTransport._getUnderlyingTransportAndPath(
- self, relpath)
+ self, relpath
+ )
@no_traceback_failures
def maybe_make_branch_in_db(failure):
@@ -505,7 +502,7 @@ class AsyncLaunchpadTransport(AsyncVirtualTransport):
@no_traceback_failures
def real_mkdir(result):
transport, path = result
- return getattr(transport, 'mkdir')(path, mode)
+ return getattr(transport, "mkdir")(path, mode)
deferred.addCallback(real_mkdir)
deferred.addErrback(maybe_make_branch_in_db)
@@ -522,20 +519,24 @@ class AsyncLaunchpadTransport(AsyncVirtualTransport):
deferred = deferred.addCallback(
no_traceback_failures(
lambda ignored: AsyncVirtualTransport.rename(
- self, rel_from, rel_to)))
+ self, rel_from, rel_to
+ )
+ )
+ )
return deferred
def rmdir(self, relpath):
# We hook into rmdir in order to prevent users from deleting branches,
# products and people from the VFS.
virtual_url_fragment = self._abspath(relpath)
- path_segments = virtual_url_fragment.lstrip('/').split('/')
+ path_segments = virtual_url_fragment.lstrip("/").split("/")
# XXX: JonathanLange 2008-11-19 bug=300551: This code assumes stuff
# about the VFS! We need to figure out the best way to delegate the
# decision about permission-to-delete to the XML-RPC server.
if len(path_segments) <= 3:
return defer.fail(
- failure.Failure(PermissionDenied(virtual_url_fragment)))
+ failure.Failure(PermissionDenied(virtual_url_fragment))
+ )
return AsyncVirtualTransport.rmdir(self, relpath)
@@ -554,8 +555,13 @@ class LaunchpadServer(_BaseLaunchpadServer):
asyncTransportFactory = AsyncLaunchpadTransport
- def __init__(self, codehosting_api, user_id, branch_transport,
- seen_new_branch_hook=None):
+ def __init__(
+ self,
+ codehosting_api,
+ user_id,
+ branch_transport,
+ seen_new_branch_hook=None,
+ ):
"""Construct a `LaunchpadServer`.
See `_BaseLaunchpadServer` for more information.
@@ -571,9 +577,10 @@ class LaunchpadServer(_BaseLaunchpadServer):
:param seen_new_branch_hook: A callable that will be called once for
each branch accessed via this server.
"""
- scheme = 'lp-%d:///' % id(self)
+ scheme = "lp-%d:///" % id(self)
super().__init__(
- scheme, codehosting_api, user_id, seen_new_branch_hook)
+ scheme, codehosting_api, user_id, seen_new_branch_hook
+ )
self._transport_dispatch = TransportDispatch(branch_transport)
def createBranch(self, virtual_url_fragment):
@@ -605,11 +612,16 @@ class LaunchpadServer(_BaseLaunchpadServer):
# exist. You may supply --create-prefix to create all leading
# parent directories", which is just misleading.
fault = trap_fault(
- fail, faults.NotFound, faults.PermissionDenied,
- faults.InvalidSourcePackageName, faults.InvalidProductName)
+ fail,
+ faults.NotFound,
+ faults.PermissionDenied,
+ faults.InvalidSourcePackageName,
+ faults.InvalidProductName,
+ )
faultString = six.ensure_str(fault.faultString)
return failure.Failure(
- PermissionDenied(virtual_url_fragment, faultString))
+ PermissionDenied(virtual_url_fragment, faultString)
+ )
return deferred.addErrback(translate_fault)
@@ -631,11 +643,11 @@ class LaunchpadServer(_BaseLaunchpadServer):
stacked_on_url = get_stacked_on_url(branch)
if stacked_on_url is None:
return None
- if '://' not in stacked_on_url:
+ if "://" not in stacked_on_url:
# Assume it's a relative path.
return stacked_on_url
uri = URI(stacked_on_url)
- if uri.scheme not in ['http', 'bzr+ssh', 'sftp']:
+ if uri.scheme not in ["http", "bzr+ssh", "sftp"]:
return stacked_on_url
launchpad_domain = config.vhost.mainsite.hostname
if not uri.underDomain(launchpad_domain):
@@ -643,8 +655,8 @@ class LaunchpadServer(_BaseLaunchpadServer):
# We use TransportConfig directly because the branch
# is still locked at this point! We're effectively
# 'borrowing' the lock that is being released.
- branch_config = TransportConfig(branch._transport, 'branch.conf')
- branch_config.set_option(uri.path, 'stacked_on_location')
+ branch_config = TransportConfig(branch._transport, "branch.conf")
+ branch_config.set_option(uri.path, "stacked_on_location")
return uri.path
def branchChanged(self, virtual_url_fragment):
@@ -658,7 +670,8 @@ class LaunchpadServer(_BaseLaunchpadServer):
owned by a branch.
"""
deferred = self._branchfs_client.translatePath(
- '/' + virtual_url_fragment)
+ "/" + virtual_url_fragment
+ )
@no_traceback_failures
def got_path_info(result):
@@ -666,22 +679,22 @@ class LaunchpadServer(_BaseLaunchpadServer):
if transport_type != BRANCH_TRANSPORT:
raise NotABranchPath(virtual_url_fragment)
transport, _ = self._transport_dispatch.makeTransport(
- (transport_type, data, trailing_path))
+ (transport_type, data, trailing_path)
+ )
if jail_info.transports:
jail_info.transports.append(transport)
try:
branch = BzrDir.open_from_transport(transport).open_branch(
- ignore_fallbacks=True)
+ ignore_fallbacks=True
+ )
info = get_branch_info(branch)
- info['stacked_on_url'] = (
- self._normalize_stacked_on_url(branch))
+ info["stacked_on_url"] = self._normalize_stacked_on_url(branch)
finally:
if jail_info.transports:
jail_info.transports.remove(transport)
- if info['stacked_on_url'] is None:
- info['stacked_on_url'] = ''
- return self._branchfs_client.branchChanged(
- data['id'], **info)
+ if info["stacked_on_url"] is None:
+ info["stacked_on_url"] = ""
+ return self._branchfs_client.branchChanged(data["id"], **info)
@no_traceback_failures
def handle_error(failure=None, **kw):
@@ -696,17 +709,22 @@ class LaunchpadServer(_BaseLaunchpadServer):
# to log a bug with the oops information.
# See bugs 674305 and 675517 for details.
- request = errorlog.ScriptRequest([
- ('source', virtual_url_fragment),
- ('error-explanation', failure.getErrorMessage())])
+ request = errorlog.ScriptRequest(
+ [
+ ("source", virtual_url_fragment),
+ ("error-explanation", failure.getErrorMessage()),
+ ]
+ )
self.unexpectedError(failure, request)
fault = faults.OopsOccurred(
- "updating a Launchpad branch", request.oopsid)
+ "updating a Launchpad branch", request.oopsid
+ )
# Twisted's log.err used to write to stderr but it doesn't now so
# we will write to stderr as well as log.err.
print(repr(fault), file=sys.stderr)
log.err(repr(fault))
return fault
+
return deferred.addCallback(got_path_info).addErrback(handle_error)
def unexpectedError(self, failure, request=None):
@@ -715,15 +733,21 @@ class LaunchpadServer(_BaseLaunchpadServer):
# the Failure we've been passed.
traceback = None
if failure.check(error.ProcessTerminated):
- traceback = getattr(failure, 'error', None)
+ traceback = getattr(failure, "error", None)
if traceback is None:
traceback = failure.getTraceback()
errorlog.globalErrorUtility.raising(
- (failure.type, failure.value, traceback), request)
+ (failure.type, failure.value, traceback), request
+ )
-def get_lp_server(user_id, codehosting_endpoint_url=None, branch_url=None,
- seen_new_branch_hook=None, branch_transport=None):
+def get_lp_server(
+ user_id,
+ codehosting_endpoint_url=None,
+ branch_url=None,
+ seen_new_branch_hook=None,
+ branch_transport=None,
+):
"""Create a Launchpad server.
:param user_id: A unique database ID of the user whose branches are
@@ -746,10 +770,14 @@ def get_lp_server(user_id, codehosting_endpoint_url=None, branch_url=None,
branch_transport = get_chrooted_transport(branch_url)
else:
raise AssertionError(
- "can't supply both branch_url and branch_transport!")
+ "can't supply both branch_url and branch_transport!"
+ )
codehosting_client = xmlrpc.client.ServerProxy(codehosting_endpoint_url)
lp_server = LaunchpadServer(
- DeferredBlockingProxy(codehosting_client), user_id, branch_transport,
- seen_new_branch_hook)
+ DeferredBlockingProxy(codehosting_client),
+ user_id,
+ branch_transport,
+ seen_new_branch_hook,
+ )
return lp_server
diff --git a/lib/lp/codehosting/vfs/branchfsclient.py b/lib/lp/codehosting/vfs/branchfsclient.py
index b6f8e92..ecb5634 100644
--- a/lib/lp/codehosting/vfs/branchfsclient.py
+++ b/lib/lp/codehosting/vfs/branchfsclient.py
@@ -7,9 +7,9 @@ This code talks to the internal XML-RPC server for the branch filesystem.
"""
__all__ = [
- 'BranchFileSystemClient',
- 'NotInCache',
- ]
+ "BranchFileSystemClient",
+ "NotInCache",
+]
import time
@@ -36,8 +36,14 @@ class BranchFileSystemClient:
cache the results here.
"""
- def __init__(self, codehosting_endpoint, user_id, expiry_time=None,
- seen_new_branch_hook=None, _now=time.time):
+ def __init__(
+ self,
+ codehosting_endpoint,
+ user_id,
+ expiry_time=None,
+ seen_new_branch_hook=None,
+ _now=time.time,
+ ):
"""Construct a caching codehosting_endpoint.
:param codehosting_endpoint: An XML-RPC proxy that implements
@@ -64,7 +70,7 @@ class BranchFileSystemClient:
matched_part = path
else:
matched_part = path[:-trailing_length]
- return matched_part.rstrip('/')
+ return matched_part.rstrip("/")
def _addToCache(self, transport_tuple, path):
"""Cache the given 'transport_tuple' results for 'path'.
@@ -76,24 +82,26 @@ class BranchFileSystemClient:
matched_part = self._getMatchedPart(path, transport_tuple)
if transport_type == BRANCH_TRANSPORT:
if self.seen_new_branch_hook:
- self.seen_new_branch_hook(matched_part.strip('/'))
+ self.seen_new_branch_hook(matched_part.strip("/"))
self._cache[matched_part] = (transport_type, data, self._now())
return transport_tuple
def _getFromCache(self, path):
"""Get the cached 'transport_tuple' for 'path'."""
- split_path = path.strip('/').split('/')
+ split_path = path.strip("/").split("/")
for object_path, value in self._cache.items():
transport_type, data, inserted_time = value
- split_object_path = object_path.strip('/').split('/')
+ split_object_path = object_path.strip("/").split("/")
# Do a segment-by-segment comparison. Python sucks, lists should
# also have startswith.
- if split_path[:len(split_object_path)] == split_object_path:
- if (self.expiry_time is not None
- and self._now() > inserted_time + self.expiry_time):
+ if split_path[: len(split_object_path)] == split_object_path:
+ if (
+ self.expiry_time is not None
+ and self._now() > inserted_time + self.expiry_time
+ ):
del self._cache[object_path]
break
- trailing_path = '/'.join(split_path[len(split_object_path):])
+ trailing_path = "/".join(split_path[len(split_object_path) :])
return (transport_type, data, trailing_path)
raise NotInCache(path)
@@ -108,18 +116,32 @@ class BranchFileSystemClient:
:return: A `Deferred` that fires the ID of the created branch.
"""
return self._codehosting_endpoint.callRemote(
- 'createBranch', self._user_id, branch_path)
-
- def branchChanged(self, branch_id, stacked_on_url, last_revision_id,
- control_string, branch_string, repository_string):
+ "createBranch", self._user_id, branch_path
+ )
+
+ def branchChanged(
+ self,
+ branch_id,
+ stacked_on_url,
+ last_revision_id,
+ control_string,
+ branch_string,
+ repository_string,
+ ):
"""Mark a branch as needing to be mirrored.
:param branch_id: The database ID of the branch.
"""
return self._codehosting_endpoint.callRemote(
- 'branchChanged', self._user_id, branch_id, stacked_on_url,
- last_revision_id, control_string, branch_string,
- repository_string)
+