launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #23759
[Merge] lp:~cjwatson/launchpad/log-webhook-triggers into lp:launchpad
Colin Watson has proposed merging lp:~cjwatson/launchpad/log-webhook-triggers into lp:launchpad.
Commit message:
Log lightly-redacted payloads of scheduled webhook deliveries.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/log-webhook-triggers/+merge/369093
I'm hoping that this will make it easier to debug https://bugs.launchpad.net/launchpad/+bug/1833424.
--
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~cjwatson/launchpad/log-webhook-triggers into lp:launchpad.
=== modified file 'lib/lp/code/model/tests/test_branchmergeproposal.py'
--- lib/lp/code/model/tests/test_branchmergeproposal.py 2019-05-22 14:57:45 +0000
+++ lib/lp/code/model/tests/test_branchmergeproposal.py 2019-06-20 13:26:17 +0000
@@ -1,4 +1,4 @@
-# Copyright 2009-2018 Canonical Ltd. This software is licensed under the
+# Copyright 2009-2019 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests for BranchMergeProposals."""
@@ -15,6 +15,7 @@
import hashlib
from unittest import TestCase
+from fixtures import FakeLogger
from lazr.lifecycle.event import ObjectCreatedEvent
from lazr.restfulclient.errors import BadRequest
from pytz import UTC
@@ -83,6 +84,7 @@
from lp.services.features.testing import FeatureFixture
from lp.services.job.interfaces.job import JobStatus
from lp.services.webapp import canonical_url
+from lp.services.webhooks.testing import LogsScheduledWebhooks
from lp.services.xref.interfaces import IXRefSet
from lp.testing import (
ExpectedException,
@@ -1036,7 +1038,7 @@
return None
@classmethod
- def getExpectedPayload(cls, proposal):
+ def getExpectedPayload(cls, proposal, redact=False):
payload = {
"registrant": "/~%s" % proposal.registrant.name,
"source_branch": cls.getURL(proposal.source_branch),
@@ -1052,9 +1054,10 @@
proposal.prerequisite_git_repository),
"prerequisite_git_path": proposal.prerequisite_git_path,
"queue_status": proposal.queue_status.title,
- "commit_message": proposal.commit_message,
- "whiteboard": proposal.whiteboard,
- "description": proposal.description,
+ "commit_message": (
+ "<redacted>" if redact else proposal.commit_message),
+ "whiteboard": "<redacted>" if redact else proposal.whiteboard,
+ "description": "<redacted>" if redact else proposal.description,
"preview_diff": cls.getURL(proposal.preview_diff),
}
return {k: Equals(v) for k, v in payload.items()}
@@ -1070,9 +1073,17 @@
hook.id, hook.target),
repr(delivery))
+ def assertCorrectLogging(self, expected_redacted_payload, hook, logger):
+ with dbuser(config.IWebhookDeliveryJobSource.dbuser):
+ self.assertThat(
+ logger.output, LogsScheduledWebhooks([
+ (hook, "merge-proposal:0.1",
+ MatchesDict(expected_redacted_payload))]))
+
def test_create_triggers_webhooks(self):
# When a merge proposal is created, any relevant webhooks are
# triggered.
+ logger = self.useFixture(FakeLogger())
source = self.makeBranch()
target = self.makeBranch(same_target_as=source)
registrant = self.factory.makePerson()
@@ -1088,9 +1099,14 @@
"action": Equals("created"),
"new": MatchesDict(self.getExpectedPayload(proposal)),
}
+ expected_redacted_payload = dict(
+ expected_payload,
+ new=MatchesDict(self.getExpectedPayload(proposal, redact=True)))
self.assertCorrectDelivery(expected_payload, hook, delivery)
+ self.assertCorrectLogging(expected_redacted_payload, hook, logger)
def test_modify_triggers_webhooks(self):
+ logger = self.useFixture(FakeLogger())
# When an existing merge proposal is modified, any relevant webhooks
# are triggered.
source = self.makeBranch()
@@ -1107,18 +1123,25 @@
"action": Equals("modified"),
"old": MatchesDict(self.getExpectedPayload(proposal)),
}
+ expected_redacted_payload = dict(
+ expected_payload,
+ old=MatchesDict(self.getExpectedPayload(proposal, redact=True)))
with BranchMergeProposalNoPreviewDiffDelta.monitor(proposal):
proposal.setStatus(
BranchMergeProposalStatus.CODE_APPROVED, user=target.owner)
proposal.description = "An excellent proposal."
expected_payload["new"] = MatchesDict(
self.getExpectedPayload(proposal))
+ expected_redacted_payload["new"] = MatchesDict(
+ self.getExpectedPayload(proposal, redact=True))
delivery = hook.deliveries.one()
self.assertCorrectDelivery(expected_payload, hook, delivery)
+ self.assertCorrectLogging(expected_redacted_payload, hook, logger)
def test_delete_triggers_webhooks(self):
# When an existing merge proposal is deleted, any relevant webhooks
# are triggered.
+ logger = self.useFixture(FakeLogger())
source = self.makeBranch()
target = self.makeBranch(same_target_as=source)
registrant = self.factory.makePerson()
@@ -1133,9 +1156,13 @@
"action": Equals("deleted"),
"old": MatchesDict(self.getExpectedPayload(proposal)),
}
+ expected_redacted_payload = dict(
+ expected_payload,
+ old=MatchesDict(self.getExpectedPayload(proposal, redact=True)))
proposal.deleteProposal()
delivery = hook.deliveries.one()
self.assertCorrectDelivery(expected_payload, hook, delivery)
+ self.assertCorrectLogging(expected_redacted_payload, hook, logger)
class TestMergeProposalWebhooksBzr(
=== modified file 'lib/lp/code/model/tests/test_branchmergeproposaljobs.py'
--- lib/lp/code/model/tests/test_branchmergeproposaljobs.py 2019-05-22 14:57:45 +0000
+++ lib/lp/code/model/tests/test_branchmergeproposaljobs.py 2019-06-20 13:26:17 +0000
@@ -1,4 +1,4 @@
-# Copyright 2010-2017 Canonical Ltd. This software is licensed under the
+# Copyright 2010-2019 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests for branch merge proposal jobs."""
@@ -13,6 +13,7 @@
)
import hashlib
+from fixtures import FakeLogger
from lazr.lifecycle.event import ObjectModifiedEvent
from lazr.lifecycle.interfaces import IObjectModifiedEvent
import pytz
@@ -72,6 +73,7 @@
)
from lp.services.osutils import override_environ
from lp.services.webapp import canonical_url
+from lp.services.webhooks.testing import LogsScheduledWebhooks
from lp.testing import (
EventRecorder,
TestCaseWithFactory,
@@ -373,22 +375,27 @@
expiry_delta = job.lease_expires - datetime.now(pytz.UTC)
self.assertTrue(500 <= expiry_delta.seconds, expiry_delta)
- def assertCorrectPreviewDiffDelivery(self, bmp, delivery):
+ def assertCorrectPreviewDiffDelivery(self, bmp, delivery, logger):
bmp_url = canonical_url(bmp, force_local_path=True)
diff_url = canonical_url(bmp.preview_diff, force_local_path=True)
+ payload_matcher = MatchesDict({
+ "merge_proposal": Equals(bmp_url),
+ "action": Equals("modified"),
+ "old": ContainsDict({"preview_diff": Is(None)}),
+ "new": ContainsDict({"preview_diff": Equals(diff_url)}),
+ })
self.assertThat(
delivery, MatchesStructure(
event_type=Equals("merge-proposal:0.1"),
- payload=MatchesDict({
- "merge_proposal": Equals(bmp_url),
- "action": Equals("modified"),
- "old": ContainsDict({"preview_diff": Is(None)}),
- "new": ContainsDict({"preview_diff": Equals(diff_url)}),
- })))
+ payload=payload_matcher))
+ self.assertThat(
+ logger.output, LogsScheduledWebhooks([
+ (delivery.webhook, "merge-proposal:0.1", payload_matcher)]))
def test_triggers_webhooks_bzr(self):
self.useFixture(FeatureFixture(
{BRANCH_MERGE_PROPOSAL_WEBHOOKS_FEATURE_FLAG: "on"}))
+ logger = self.useFixture(FakeLogger())
bmp = self.createExampleBzrMerge()[0]
hook = self.factory.makeWebhook(
target=bmp.target_branch, event_types=["merge-proposal:0.1"])
@@ -397,11 +404,13 @@
bmp.source_branch.next_mirror_time = None
with dbuser("merge-proposal-jobs"):
JobRunner([job]).runAll()
- self.assertCorrectPreviewDiffDelivery(bmp, hook.deliveries.one())
+ self.assertCorrectPreviewDiffDelivery(
+ bmp, hook.deliveries.one(), logger)
def test_triggers_webhooks_git(self):
self.useFixture(FeatureFixture(
{BRANCH_MERGE_PROPOSAL_WEBHOOKS_FEATURE_FLAG: "on"}))
+ logger = self.useFixture(FakeLogger())
bmp = self.createExampleGitMerge()[0]
hook = self.factory.makeWebhook(
target=bmp.target_git_repository,
@@ -409,7 +418,8 @@
job = UpdatePreviewDiffJob.create(bmp)
with dbuser("merge-proposal-jobs"):
JobRunner([job]).runAll()
- self.assertCorrectPreviewDiffDelivery(bmp, hook.deliveries.one())
+ self.assertCorrectPreviewDiffDelivery(
+ bmp, hook.deliveries.one(), logger)
def make_runnable_incremental_diff_job(test_case):
=== modified file 'lib/lp/code/model/tests/test_gitjob.py'
--- lib/lp/code/model/tests/test_gitjob.py 2018-10-21 17:38:05 +0000
+++ lib/lp/code/model/tests/test_gitjob.py 2019-06-20 13:26:17 +0000
@@ -1,4 +1,4 @@
-# Copyright 2015-2018 Canonical Ltd. This software is licensed under the
+# Copyright 2015-2019 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests for `GitJob`s."""
@@ -13,6 +13,7 @@
)
import hashlib
+from fixtures import FakeLogger
from lazr.lifecycle.snapshot import Snapshot
import pytz
from testtools.matchers import (
@@ -55,6 +56,7 @@
from lp.services.utils import seconds_since_epoch
from lp.services.webapp import canonical_url
from lp.services.webapp.snapshot import notify_modified
+from lp.services.webhooks.testing import LogsScheduledWebhooks
from lp.testing import (
person_logged_in,
TestCaseWithFactory,
@@ -179,6 +181,7 @@
def test_triggers_webhooks(self):
# Jobs trigger any relevant webhooks when they're enabled.
self.useFixture(FeatureFixture({'code.git.webhooks.enabled': 'on'}))
+ logger = self.useFixture(FakeLogger())
repository = self.factory.makeGitRepository()
self.factory.makeGitRefs(
repository, paths=['refs/heads/master', 'refs/tags/1.0'])
@@ -191,30 +194,35 @@
JobRunner([job]).runAll()
delivery = hook.deliveries.one()
sha1 = lambda s: hashlib.sha1(s).hexdigest()
+ payload_matcher = MatchesDict({
+ 'git_repository': Equals('/' + repository.unique_name),
+ 'git_repository_path': Equals(repository.unique_name),
+ 'ref_changes': Equals({
+ 'refs/tags/1.0': {
+ 'old': {'commit_sha1': sha1('refs/tags/1.0')},
+ 'new': None},
+ 'refs/tags/2.0': {
+ 'old': None,
+ 'new': {'commit_sha1': sha1('refs/tags/2.0')}},
+ })})
self.assertThat(
delivery,
MatchesStructure(
event_type=Equals('git:push:0.1'),
- payload=MatchesDict({
- 'git_repository': Equals('/' + repository.unique_name),
- 'git_repository_path': Equals(repository.unique_name),
- 'ref_changes': Equals({
- 'refs/tags/1.0': {
- 'old': {'commit_sha1': sha1('refs/tags/1.0')},
- 'new': None},
- 'refs/tags/2.0': {
- 'old': None,
- 'new': {'commit_sha1': sha1('refs/tags/2.0')}},
- })})))
+ payload=payload_matcher))
with dbuser(config.IWebhookDeliveryJobSource.dbuser):
self.assertEqual(
"<WebhookDeliveryJob for webhook %d on %r>" % (
hook.id, hook.target),
repr(delivery))
+ self.assertThat(
+ logger.output, LogsScheduledWebhooks([
+ (hook, "git:push:0.1", payload_matcher)]))
def test_merge_detection_triggers_webhooks(self):
self.useFixture(FeatureFixture(
{BRANCH_MERGE_PROPOSAL_WEBHOOKS_FEATURE_FLAG: 'on'}))
+ logger = self.useFixture(FakeLogger())
repository = self.factory.makeGitRepository()
target, source = self.factory.makeGitRefs(
repository, paths=['refs/heads/target', 'refs/heads/source'])
@@ -238,23 +246,26 @@
with dbuser('branchscanner'):
JobRunner([job]).runAll()
delivery = hook.deliveries.one()
+ payload_matcher = MatchesDict({
+ 'merge_proposal': Equals(
+ canonical_url(bmp, force_local_path=True)),
+ 'action': Equals('modified'),
+ 'old': ContainsDict({'queue_status': Equals('Work in progress')}),
+ 'new': ContainsDict({'queue_status': Equals('Merged')}),
+ })
self.assertThat(
delivery,
MatchesStructure(
event_type=Equals('merge-proposal:0.1'),
- payload=MatchesDict({
- 'merge_proposal': Equals(
- canonical_url(bmp, force_local_path=True)),
- 'action': Equals('modified'),
- 'old': ContainsDict(
- {'queue_status': Equals('Work in progress')}),
- 'new': ContainsDict({'queue_status': Equals('Merged')}),
- })))
+ payload=payload_matcher))
with dbuser(config.IWebhookDeliveryJobSource.dbuser):
self.assertEqual(
"<WebhookDeliveryJob for webhook %d on %r>" % (
hook.id, hook.target),
repr(delivery))
+ self.assertThat(
+ logger.output, LogsScheduledWebhooks([
+ (hook, "merge-proposal:0.1", payload_matcher)]))
def test_composeWebhookPayload(self):
repository = self.factory.makeGitRepository()
=== modified file 'lib/lp/codehosting/scanner/tests/test_bzrsync.py'
--- lib/lp/codehosting/scanner/tests/test_bzrsync.py 2018-01-02 16:10:26 +0000
+++ lib/lp/codehosting/scanner/tests/test_bzrsync.py 2019-06-20 13:26:17 +0000
@@ -1,6 +1,6 @@
#!/usr/bin/python
#
-# Copyright 2009-2016 Canonical Ltd. This software is licensed under the
+# Copyright 2009-2019 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
import datetime
@@ -14,7 +14,10 @@
)
from bzrlib.tests import TestCaseWithTransport
from bzrlib.uncommit import uncommit
-from fixtures import TempDir
+from fixtures import (
+ FakeLogger,
+ TempDir,
+ )
import pytz
from storm.locals import Store
from testtools.matchers import (
@@ -51,6 +54,7 @@
from lp.services.database.interfaces import IStore
from lp.services.features.testing import FeatureFixture
from lp.services.osutils import override_environ
+from lp.services.webhooks.testing import LogsScheduledWebhooks
from lp.testing import TestCaseWithFactory
from lp.testing.dbuser import (
dbuser,
@@ -777,6 +781,7 @@
def test_triggers_webhooks(self):
# On tip change, any relevant webhooks are triggered.
self.useFixture(FeatureFixture({"code.bzr.webhooks.enabled": "on"}))
+ logger = self.useFixture(FakeLogger())
self.syncAndCount()
old_revid = self.db_branch.last_scanned_id
with dbuser(config.launchpad.dbuser):
@@ -786,21 +791,25 @@
new_revid = self.bzr_branch.last_revision()
self.makeBzrSync(self.db_branch).syncBranchAndClose()
delivery = hook.deliveries.one()
+ payload_matcher = MatchesDict({
+ "bzr_branch": Equals("/" + self.db_branch.unique_name),
+ "bzr_branch_path": Equals(self.db_branch.shortened_path),
+ "old": Equals({"revision_id": old_revid}),
+ "new": Equals({"revision_id": new_revid}),
+ })
self.assertThat(
delivery,
MatchesStructure(
event_type=Equals("bzr:push:0.1"),
- payload=MatchesDict({
- "bzr_branch": Equals("/" + self.db_branch.unique_name),
- "bzr_branch_path": Equals(self.db_branch.shortened_path),
- "old": Equals({"revision_id": old_revid}),
- "new": Equals({"revision_id": new_revid}),
- })))
+ payload=payload_matcher))
with dbuser(config.IWebhookDeliveryJobSource.dbuser):
self.assertEqual(
"<WebhookDeliveryJob for webhook %d on %r>" % (
hook.id, hook.target),
repr(delivery))
+ self.assertThat(
+ logger.output, LogsScheduledWebhooks([
+ (hook, "bzr:push:0.1", payload_matcher)]))
class TestRevisionProperty(BzrSyncTestCase):
=== modified file 'lib/lp/services/webhooks/model.py'
--- lib/lp/services/webhooks/model.py 2019-04-13 08:58:02 +0000
+++ lib/lp/services/webhooks/model.py 2019-06-20 13:26:17 +0000
@@ -22,6 +22,7 @@
DBItem,
)
from pytz import utc
+import six
from storm.expr import Desc
from storm.properties import (
Bool,
@@ -63,6 +64,7 @@
Job,
)
from lp.services.job.runner import BaseRunnableJob
+from lp.services.scripts import log
from lp.services.webhooks.interfaces import (
IWebhook,
IWebhookClient,
@@ -370,6 +372,30 @@
return (cls(job) for job in jobs)
+def _redact_payload(event_type, payload):
+ """Redact a webhook payload for logging.
+
+ Webhook payloads don't generally contain anything that's particularly
+ sensitive for the purposes of logging (i.e. no credentials), although
+ they may contain information that's private to their target. However,
+ we still don't want logs to be clogged up with excessive detail from
+ long user-supplied text fields, so redact some fields that are known to
+ be boring, and truncate any multi-line strings that may slip through.
+ """
+ redacted = {}
+ for key, value in payload.items():
+ if isinstance(value, dict):
+ redacted[key] = _redact_payload(event_type, value)
+ elif (event_type.startswith("merge-proposal:") and
+ key in ("commit_message", "whiteboard", "description")):
+ redacted[key] = "<redacted>"
+ elif isinstance(value, six.string_types) and "\n" in value:
+ redacted[key] = "%s\n<redacted>" % value.split("\n", 1)[0]
+ else:
+ redacted[key] = value
+ return redacted
+
+
@provider(IWebhookDeliveryJobSource)
@implementer(IWebhookDeliveryJob)
class WebhookDeliveryJob(WebhookJobDerived):
@@ -400,6 +426,9 @@
{"event_type": event_type, "payload": payload})
job = cls(webhook_job)
job.celeryRunOnCommit()
+ log.info(
+ "Scheduled %r (%s): %s" %
+ (job, event_type, _redact_payload(event_type, payload)))
return job
@property
=== added file 'lib/lp/services/webhooks/testing.py'
--- lib/lp/services/webhooks/testing.py 1970-01-01 00:00:00 +0000
+++ lib/lp/services/webhooks/testing.py 2019-06-20 13:26:17 +0000
@@ -0,0 +1,55 @@
+# Copyright 2019 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Webhook testing helpers."""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+__metaclass__ = type
+__all__ = [
+ 'LogsScheduledWebhooks',
+ ]
+
+from testtools.matchers import (
+ AfterPreprocessing,
+ Matcher,
+ MatchesSetwise,
+ StartsWith,
+ )
+
+
+class LogsOneScheduledWebhook(Matcher):
+ """Matches a line of logger output indicating a scheduled webhook."""
+
+ def __init__(self, webhook, event_type, payload_matcher):
+ self.webhook = webhook
+ self.event_type = event_type
+ self.payload_matcher = payload_matcher
+
+ def match(self, line):
+ prefix = (
+ "Scheduled <WebhookDeliveryJob for webhook %d on %r> (%s): " %
+ (self.webhook.id, self.webhook.target, self.event_type))
+ mismatch = StartsWith(prefix).match(line)
+ if mismatch is not None:
+ return mismatch
+ return AfterPreprocessing(eval, self.payload_matcher).match(
+ line[len(prefix):])
+
+
+class LogsScheduledWebhooks(MatchesSetwise):
+ """Matches logger output indicating at least one scheduled webhook.
+
+ Construct this with a sequence of (webhook, event_type, payload_matcher)
+ tuples.
+ """
+
+ def __init__(self, expected_webhooks):
+ super(LogsScheduledWebhooks, self).__init__(*(
+ LogsOneScheduledWebhook(webhook, event_type, payload_matcher)
+ for webhook, event_type, payload_matcher in expected_webhooks))
+
+ def match(self, logger_output):
+ return super(LogsScheduledWebhooks, self).match(
+ [line for line in logger_output.splitlines()
+ if line.startswith("Scheduled <WebhookDeliveryJob ")])
=== modified file 'lib/lp/services/webhooks/tests/test_job.py'
--- lib/lp/services/webhooks/tests/test_job.py 2019-06-07 21:12:13 +0000
+++ lib/lp/services/webhooks/tests/test_job.py 2019-06-20 13:26:17 +0000
@@ -11,6 +11,7 @@
)
import re
+from fixtures import FakeLogger
from pytz import utc
import requests
import requests.exceptions
@@ -58,6 +59,7 @@
WebhookJobDerived,
WebhookJobType,
)
+from lp.services.webhooks.testing import LogsScheduledWebhooks
from lp.testing import (
login_person,
TestCaseWithFactory,
@@ -295,6 +297,7 @@
def test_create(self):
# `WebhookDeliveryJob` objects provide `IWebhookDeliveryJob`.
+ logger = self.useFixture(FakeLogger())
hook = self.factory.makeWebhook()
job = WebhookDeliveryJob.create(hook, 'test', payload={'foo': 'bar'})
self.assertProvides(job, IWebhookDeliveryJob)
@@ -302,6 +305,9 @@
job,
MatchesStructure.byEquality(
webhook=hook, event_type='test', payload={'foo': 'bar'}))
+ self.assertThat(
+ logger.output,
+ LogsScheduledWebhooks([(hook, 'test', Equals({'foo': 'bar'}))]))
def test_gitrepository__repr__(self):
# `WebhookDeliveryJob` objects for Git repositories have an
=== modified file 'lib/lp/snappy/tests/test_snap.py'
--- lib/lp/snappy/tests/test_snap.py 2019-05-16 10:21:14 +0000
+++ lib/lp/snappy/tests/test_snap.py 2019-06-20 13:26:17 +0000
@@ -16,7 +16,10 @@
from textwrap import dedent
from urlparse import urlsplit
-from fixtures import MockPatch
+from fixtures import (
+ FakeLogger,
+ MockPatch,
+ )
import iso8601
from pymacaroons import Macaroon
import pytz
@@ -85,6 +88,7 @@
from lp.services.webapp.interfaces import OAuthPermission
from lp.services.webapp.publisher import canonical_url
from lp.services.webapp.snapshot import notify_modified
+from lp.services.webhooks.testing import LogsScheduledWebhooks
from lp.snappy.interfaces.snap import (
BadSnapSearchContext,
CannotFetchSnapcraftYaml,
@@ -418,6 +422,7 @@
def test_requestBuild_triggers_webhooks(self):
# Requesting a build triggers webhooks.
+ logger = self.useFixture(FakeLogger())
processor = self.factory.makeProcessor(supports_virtualized=True)
distroarchseries = self.makeBuildableDistroArchSeries(
processor=processor)
@@ -447,6 +452,11 @@
"<WebhookDeliveryJob for webhook %d on %r>" % (
hook.id, hook.target),
repr(delivery))
+ self.assertThat(
+ logger.output,
+ LogsScheduledWebhooks([
+ (hook, "snap:build:0.1",
+ MatchesDict(expected_payload))]))
def test_requestBuilds(self):
# requestBuilds schedules a job and returns a corresponding
@@ -704,6 +714,7 @@
- build-on: avr
- build-on: riscv64
""")))
+ logger = self.useFixture(FakeLogger())
job = self.makeRequestBuildsJob(["avr", "riscv64", "sparc"])
hook = self.factory.makeWebhook(
target=job.snap, event_types=["snap:build:0.1"])
@@ -713,21 +724,29 @@
removeSecurityProxy(job.channels),
build_request=job.build_request)
self.assertEqual(2, len(builds))
+ payload_matchers = [
+ MatchesDict({
+ "snap_build": Equals(canonical_url(
+ build, force_local_path=True)),
+ "action": Equals("created"),
+ "snap": Equals(canonical_url(
+ job.snap, force_local_path=True)),
+ "build_request": Equals(canonical_url(
+ job.build_request, force_local_path=True)),
+ "status": Equals("Needs building"),
+ "store_upload_status": Equals("Unscheduled"),
+ })
+ for build in builds]
self.assertThat(hook.deliveries, MatchesSetwise(*(
MatchesStructure(
event_type=Equals("snap:build:0.1"),
- payload=MatchesDict({
- "snap_build": Equals(canonical_url(
- build, force_local_path=True)),
- "action": Equals("created"),
- "snap": Equals(canonical_url(
- job.snap, force_local_path=True)),
- "build_request": Equals(canonical_url(
- job.build_request, force_local_path=True)),
- "status": Equals("Needs building"),
- "store_upload_status": Equals("Unscheduled"),
- }))
- for build in builds)))
+ payload=payload_matcher)
+ for payload_matcher in payload_matchers)))
+ self.assertThat(
+ logger.output,
+ LogsScheduledWebhooks([
+ (hook, "snap:build:0.1", payload_matcher)
+ for payload_matcher in payload_matchers]))
def test_requestAutoBuilds(self):
# requestAutoBuilds creates new builds for all configured
=== modified file 'lib/lp/snappy/tests/test_snapbuild.py'
--- lib/lp/snappy/tests/test_snapbuild.py 2019-05-22 14:57:45 +0000
+++ lib/lp/snappy/tests/test_snapbuild.py 2019-06-20 13:26:17 +0000
@@ -16,9 +16,11 @@
urlopen,
)
+from fixtures import FakeLogger
from pymacaroons import Macaroon
import pytz
from testtools.matchers import (
+ ContainsDict,
Equals,
Is,
MatchesDict,
@@ -50,6 +52,7 @@
from lp.services.propertycache import clear_property_cache
from lp.services.webapp.interfaces import OAuthPermission
from lp.services.webapp.publisher import canonical_url
+from lp.services.webhooks.testing import LogsScheduledWebhooks
from lp.snappy.interfaces.snap import SNAP_TESTING_FLAGS
from lp.snappy.interfaces.snapbuild import (
CannotScheduleStoreUpload,
@@ -268,6 +271,7 @@
def test_updateStatus_triggers_webhooks(self):
# Updating the status of a SnapBuild triggers webhooks on the
# corresponding Snap.
+ logger = self.useFixture(FakeLogger())
hook = self.factory.makeWebhook(
target=self.build.snap, event_types=["snap:build:0.1"])
self.build.updateStatus(BuildStatus.FULLYBUILT)
@@ -291,22 +295,39 @@
"<WebhookDeliveryJob for webhook %d on %r>" % (
hook.id, hook.target),
repr(delivery))
+ self.assertThat(
+ logger.output, LogsScheduledWebhooks([
+ (hook, "snap:build:0.1", MatchesDict(expected_payload))]))
def test_updateStatus_no_change_does_not_trigger_webhooks(self):
# An updateStatus call that changes details such as the revision_id
# but that doesn't change the build's status attribute does not
# trigger webhooks.
+ logger = self.useFixture(FakeLogger())
hook = self.factory.makeWebhook(
target=self.build.snap, event_types=["snap:build:0.1"])
builder = self.factory.makeBuilder()
self.build.updateStatus(BuildStatus.BUILDING)
+ expected_logs = [
+ (hook, "snap:build:0.1", ContainsDict({
+ "action": Equals("status-changed"),
+ "status": Equals("Currently building"),
+ }))]
self.assertEqual(1, hook.deliveries.count())
+ self.assertThat(logger.output, LogsScheduledWebhooks(expected_logs))
self.build.updateStatus(
BuildStatus.BUILDING, builder=builder,
slave_status={"revision_id": "1"})
self.assertEqual(1, hook.deliveries.count())
+ self.assertThat(logger.output, LogsScheduledWebhooks(expected_logs))
self.build.updateStatus(BuildStatus.UPLOADING)
+ expected_logs.append(
+ (hook, "snap:build:0.1", ContainsDict({
+ "action": Equals("status-changed"),
+ "status": Equals("Uploading build"),
+ })))
self.assertEqual(2, hook.deliveries.count())
+ self.assertThat(logger.output, LogsScheduledWebhooks(expected_logs))
def test_updateStatus_failure_does_not_trigger_store_uploads(self):
# A failed SnapBuild does not trigger store uploads.
@@ -565,6 +586,7 @@
def test_scheduleStoreUpload_triggers_webhooks(self):
# Scheduling a store upload triggers webhooks on the corresponding
# snap.
+ logger = self.useFixture(FakeLogger())
self.setUpStoreUpload()
self.build.updateStatus(BuildStatus.FULLYBUILT)
self.factory.makeSnapFile(
@@ -593,6 +615,9 @@
"<WebhookDeliveryJob for webhook %d on %r>" % (
hook.id, hook.target),
repr(delivery))
+ self.assertThat(
+ logger.output, LogsScheduledWebhooks([
+ (hook, "snap:build:0.1", MatchesDict(expected_payload))]))
class TestSnapBuildSet(TestCaseWithFactory):
=== modified file 'lib/lp/snappy/tests/test_snapbuildjob.py'
--- lib/lp/snappy/tests/test_snapbuildjob.py 2019-06-19 13:38:32 +0000
+++ lib/lp/snappy/tests/test_snapbuildjob.py 2019-06-20 13:26:17 +0000
@@ -1,4 +1,4 @@
-# Copyright 2016-2018 Canonical Ltd. This software is licensed under the
+# Copyright 2016-2019 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests for snap build jobs."""
@@ -27,6 +27,7 @@
from lp.services.job.interfaces.job import JobStatus
from lp.services.job.runner import JobRunner
from lp.services.webapp.publisher import canonical_url
+from lp.services.webhooks.testing import LogsScheduledWebhooks
from lp.snappy.interfaces.snap import SNAP_TESTING_FLAGS
from lp.snappy.interfaces.snapbuildjob import (
ISnapBuildJob,
@@ -117,7 +118,7 @@
return snapbuild
def assertWebhookDeliveries(self, snapbuild,
- expected_store_upload_statuses):
+ expected_store_upload_statuses, logger):
hook = snapbuild.snap.webhooks.one()
deliveries = list(hook.deliveries)
deliveries.reverse()
@@ -143,10 +144,15 @@
"<WebhookDeliveryJob for webhook %d on %r>" % (
hook.id, hook.target),
repr(delivery))
+ self.assertThat(
+ logger.output, LogsScheduledWebhooks([
+ (hook, "snap:build:0.1", MatchesDict(expected_payload))
+ for expected_payload in expected_payloads]))
def test_run(self):
# The job uploads the build to the store and records the store URL
# and revision.
+ logger = self.useFixture(FakeLogger())
snapbuild = self.makeSnapBuild()
self.assertContentEqual([], snapbuild.store_upload_jobs)
job = SnapStoreUploadJob.create(snapbuild)
@@ -163,10 +169,12 @@
self.assertEqual(1, job.store_revision)
self.assertIsNone(job.error_message)
self.assertEqual([], pop_notifications())
- self.assertWebhookDeliveries(snapbuild, ["Pending", "Uploaded"])
+ self.assertWebhookDeliveries(
+ snapbuild, ["Pending", "Uploaded"], logger)
def test_run_failed(self):
# A failed run sets the store upload status to FAILED.
+ logger = self.useFixture(FakeLogger())
snapbuild = self.makeSnapBuild()
self.assertContentEqual([], snapbuild.store_upload_jobs)
job = SnapStoreUploadJob.create(snapbuild)
@@ -183,10 +191,11 @@
self.assertEqual("An upload failure", job.error_message)
self.assertEqual([], pop_notifications())
self.assertWebhookDeliveries(
- snapbuild, ["Pending", "Failed to upload"])
+ snapbuild, ["Pending", "Failed to upload"], logger)
def test_run_unauthorized_notifies(self):
# A run that gets 401 from the store sends mail.
+ logger = self.useFixture(FakeLogger())
requester = self.factory.makePerson(name="requester")
requester_team = self.factory.makeTeam(
owner=requester, name="requester-team", members=[requester])
@@ -231,12 +240,12 @@
"Your team Requester Team is the requester of the build.\n" %
snapbuild.id, footer)
self.assertWebhookDeliveries(
- snapbuild, ["Pending", "Failed to upload"])
+ snapbuild, ["Pending", "Failed to upload"], logger)
def test_run_502_retries(self):
# A run that gets a 502 error from the store schedules itself to be
# retried.
- self.useFixture(FakeLogger())
+ logger = self.useFixture(FakeLogger())
snapbuild = self.makeSnapBuild()
self.assertContentEqual([], snapbuild.store_upload_jobs)
job = SnapStoreUploadJob.create(snapbuild)
@@ -254,7 +263,7 @@
self.assertIsNone(job.error_message)
self.assertEqual([], pop_notifications())
self.assertEqual(JobStatus.WAITING, job.job.status)
- self.assertWebhookDeliveries(snapbuild, ["Pending"])
+ self.assertWebhookDeliveries(snapbuild, ["Pending"], logger)
# Try again. The upload part of the job is retried, and this time
# it succeeds.
job.scheduled_start = None
@@ -272,11 +281,13 @@
self.assertIsNone(job.error_message)
self.assertEqual([], pop_notifications())
self.assertEqual(JobStatus.COMPLETED, job.job.status)
- self.assertWebhookDeliveries(snapbuild, ["Pending", "Uploaded"])
+ self.assertWebhookDeliveries(
+ snapbuild, ["Pending", "Uploaded"], logger)
def test_run_refresh_failure_notifies(self):
# A run that gets a failure when trying to refresh macaroons sends
# mail.
+ logger = self.useFixture(FakeLogger())
requester = self.factory.makePerson(name="requester")
requester_team = self.factory.makeTeam(
owner=requester, name="requester-team", members=[requester])
@@ -321,11 +332,12 @@
"Your team Requester Team is the requester of the build.\n" %
snapbuild.id, footer)
self.assertWebhookDeliveries(
- snapbuild, ["Pending", "Failed to upload"])
+ snapbuild, ["Pending", "Failed to upload"], logger)
def test_run_upload_failure_notifies(self):
# A run that gets some other upload failure from the store sends
# mail.
+ logger = self.useFixture(FakeLogger())
requester = self.factory.makePerson(name="requester")
requester_team = self.factory.makeTeam(
owner=requester, name="requester-team", members=[requester])
@@ -371,14 +383,14 @@
"%s\nYour team Requester Team is the requester of the build.\n" %
build_url, footer)
self.assertWebhookDeliveries(
- snapbuild, ["Pending", "Failed to upload"])
+ snapbuild, ["Pending", "Failed to upload"], logger)
self.assertIn(
("error_detail", "The proxy exploded.\n"), job.getOopsVars())
def test_run_scan_pending_retries(self):
# A run that finds that the store has not yet finished scanning the
# package schedules itself to be retried.
- self.useFixture(FakeLogger())
+ logger = self.useFixture(FakeLogger())
snapbuild = self.makeSnapBuild()
self.assertContentEqual([], snapbuild.store_upload_jobs)
job = SnapStoreUploadJob.create(snapbuild)
@@ -396,7 +408,7 @@
self.assertIsNone(job.error_message)
self.assertEqual([], pop_notifications())
self.assertEqual(JobStatus.WAITING, job.job.status)
- self.assertWebhookDeliveries(snapbuild, ["Pending"])
+ self.assertWebhookDeliveries(snapbuild, ["Pending"], logger)
# Try again. The upload part of the job is not retried, and this
# time the scan completes.
job.scheduled_start = None
@@ -414,10 +426,12 @@
self.assertIsNone(job.error_message)
self.assertEqual([], pop_notifications())
self.assertEqual(JobStatus.COMPLETED, job.job.status)
- self.assertWebhookDeliveries(snapbuild, ["Pending", "Uploaded"])
+ self.assertWebhookDeliveries(
+ snapbuild, ["Pending", "Uploaded"], logger)
def test_run_scan_failure_notifies(self):
# A run that gets a scan failure from the store sends mail.
+ logger = self.useFixture(FakeLogger())
requester = self.factory.makePerson(name="requester")
requester_team = self.factory.makeTeam(
owner=requester, name="requester-team", members=[requester])
@@ -469,11 +483,12 @@
"Your team Requester Team is the requester of the build.\n" %
snapbuild.id, footer)
self.assertWebhookDeliveries(
- snapbuild, ["Pending", "Failed to upload"])
+ snapbuild, ["Pending", "Failed to upload"], logger)
def test_run_release(self):
# A run configured to automatically release the package to certain
# channels does so.
+ logger = self.useFixture(FakeLogger())
snapbuild = self.makeSnapBuild(store_channels=["stable", "edge"])
self.assertContentEqual([], snapbuild.store_upload_jobs)
job = SnapStoreUploadJob.create(snapbuild)
@@ -490,7 +505,8 @@
self.assertEqual(1, job.store_revision)
self.assertIsNone(job.error_message)
self.assertEqual([], pop_notifications())
- self.assertWebhookDeliveries(snapbuild, ["Pending", "Uploaded"])
+ self.assertWebhookDeliveries(
+ snapbuild, ["Pending", "Uploaded"], logger)
def test_retry_delay(self):
# The job is retried every minute, unless it just made one of its
Follow ups